gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
|---|---|
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import exception
from nova.network import model
from nova import test
from nova.tests import fake_network_cache_model
from nova.virt import netutils
class RouteTests(test.NoDBTestCase):
def test_create_route_with_attrs(self):
route = fake_network_cache_model.new_route()
fake_network_cache_model.new_ip(dict(address='192.168.1.1'))
self.assertEqual(route['cidr'], '0.0.0.0/24')
self.assertEqual(route['gateway']['address'], '192.168.1.1')
self.assertEqual(route['interface'], 'eth0')
def test_routes_equal(self):
route1 = model.Route()
route2 = model.Route()
self.assertEqual(route1, route2)
def test_routes_not_equal(self):
route1 = model.Route(cidr='1.1.1.0/24')
route2 = model.Route(cidr='2.2.2.0/24')
self.assertNotEqual(route1, route2)
route1 = model.Route(cidr='1.1.1.1/24', gateway='1.1.1.1')
route2 = model.Route(cidr='1.1.1.1/24', gateway='1.1.1.2')
self.assertNotEqual(route1, route2)
route1 = model.Route(cidr='1.1.1.1/24', interface='tap0')
route2 = model.Route(cidr='1.1.1.1/24', interface='tap1')
self.assertNotEqual(route1, route2)
def test_hydrate(self):
route = model.Route.hydrate(
{'gateway': fake_network_cache_model.new_ip(
dict(address='192.168.1.1'))})
self.assertIsNone(route['cidr'])
self.assertEqual(route['gateway']['address'], '192.168.1.1')
self.assertIsNone(route['interface'])
class IPTests(test.NoDBTestCase):
def test_ip_equal(self):
ip1 = model.IP(address='127.0.0.1')
ip2 = model.IP(address='127.0.0.1')
self.assertEqual(ip1, ip2)
def test_ip_not_equal(self):
ip1 = model.IP(address='127.0.0.1')
ip2 = model.IP(address='172.0.0.3')
self.assertNotEqual(ip1, ip2)
ip1 = model.IP(address='127.0.0.1', type=1)
ip2 = model.IP(address='172.0.0.1', type=2)
self.assertNotEqual(ip1, ip2)
ip1 = model.IP(address='127.0.0.1', version=4)
ip2 = model.IP(address='172.0.0.1', version=6)
self.assertNotEqual(ip1, ip2)
class FixedIPTests(test.NoDBTestCase):
def test_createnew_fixed_ip_with_attrs(self):
fixed_ip = model.FixedIP(address='192.168.1.100')
self.assertEqual(fixed_ip['address'], '192.168.1.100')
self.assertEqual(fixed_ip['floating_ips'], [])
self.assertEqual(fixed_ip['type'], 'fixed')
self.assertEqual(fixed_ip['version'], 4)
def test_create_fixed_ipv6(self):
fixed_ip = model.FixedIP(address='::1')
self.assertEqual(fixed_ip['address'], '::1')
self.assertEqual(fixed_ip['floating_ips'], [])
self.assertEqual(fixed_ip['type'], 'fixed')
self.assertEqual(fixed_ip['version'], 6)
def test_create_fixed_bad_ip_fails(self):
self.assertRaises(exception.InvalidIpAddressError,
model.FixedIP,
address='picklespicklespickles')
def test_equate_two_fixed_ips(self):
fixed_ip = model.FixedIP(address='::1')
fixed_ip2 = model.FixedIP(address='::1')
self.assertEqual(fixed_ip, fixed_ip2)
def test_equate_two_dissimilar_fixed_ips_fails(self):
fixed_ip = model.FixedIP(address='::1')
fixed_ip2 = model.FixedIP(address='::2')
self.assertNotEqual(fixed_ip, fixed_ip2)
fixed_ip = model.FixedIP(address='::1', type='1')
fixed_ip2 = model.FixedIP(address='::1', type='2')
self.assertNotEqual(fixed_ip, fixed_ip2)
fixed_ip = model.FixedIP(address='::1', version='6')
fixed_ip2 = model.FixedIP(address='::1', version='4')
self.assertNotEqual(fixed_ip, fixed_ip2)
fixed_ip = model.FixedIP(address='::1', floating_ips='1.1.1.1')
fixed_ip2 = model.FixedIP(address='::1', floating_ips='8.8.8.8')
self.assertNotEqual(fixed_ip, fixed_ip2)
def test_hydrate(self):
fixed_ip = model.FixedIP.hydrate({})
self.assertEqual(fixed_ip['floating_ips'], [])
self.assertIsNone(fixed_ip['address'])
self.assertEqual(fixed_ip['type'], 'fixed')
self.assertIsNone(fixed_ip['version'])
def test_add_floating_ip(self):
fixed_ip = model.FixedIP(address='192.168.1.100')
fixed_ip.add_floating_ip('192.168.1.101')
self.assertEqual(fixed_ip['floating_ips'], ['192.168.1.101'])
def test_add_floating_ip_repeatedly_only_one_instance(self):
fixed_ip = model.FixedIP(address='192.168.1.100')
for i in xrange(10):
fixed_ip.add_floating_ip('192.168.1.101')
self.assertEqual(fixed_ip['floating_ips'], ['192.168.1.101'])
class SubnetTests(test.NoDBTestCase):
def test_create_subnet_with_attrs(self):
subnet = fake_network_cache_model.new_subnet()
route1 = fake_network_cache_model.new_route()
self.assertEqual(subnet['cidr'], '10.10.0.0/24')
self.assertEqual(subnet['dns'],
[fake_network_cache_model.new_ip(dict(address='1.2.3.4')),
fake_network_cache_model.new_ip(dict(address='2.3.4.5'))])
self.assertEqual(subnet['gateway']['address'], '10.10.0.1')
self.assertEqual(subnet['ips'],
[fake_network_cache_model.new_fixed_ip(
dict(address='10.10.0.2')),
fake_network_cache_model.new_fixed_ip(
dict(address='10.10.0.3'))])
self.assertEqual(subnet['routes'], [route1])
self.assertEqual(subnet['version'], 4)
def test_subnet_equal(self):
subnet1 = fake_network_cache_model.new_subnet()
subnet2 = fake_network_cache_model.new_subnet()
self.assertEqual(subnet1, subnet2)
def test_subnet_not_equal(self):
subnet1 = model.Subnet(cidr='1.1.1.0/24')
subnet2 = model.Subnet(cidr='2.2.2.0/24')
self.assertNotEqual(subnet1, subnet2)
subnet1 = model.Subnet(dns='1.1.1.0/24')
subnet2 = model.Subnet(dns='2.2.2.0/24')
self.assertNotEqual(subnet1, subnet2)
subnet1 = model.Subnet(gateway='1.1.1.1/24')
subnet2 = model.Subnet(gateway='2.2.2.1/24')
self.assertNotEqual(subnet1, subnet2)
subnet1 = model.Subnet(ips='1.1.1.0/24')
subnet2 = model.Subnet(ips='2.2.2.0/24')
self.assertNotEqual(subnet1, subnet2)
subnet1 = model.Subnet(routes='1.1.1.0/24')
subnet2 = model.Subnet(routes='2.2.2.0/24')
self.assertNotEqual(subnet1, subnet2)
subnet1 = model.Subnet(version='4')
subnet2 = model.Subnet(version='6')
self.assertNotEqual(subnet1, subnet2)
def test_add_route(self):
subnet = fake_network_cache_model.new_subnet()
route1 = fake_network_cache_model.new_route()
route2 = fake_network_cache_model.new_route({'cidr': '1.1.1.1/24'})
subnet.add_route(route2)
self.assertEqual(subnet['routes'], [route1, route2])
def test_add_route_a_lot(self):
subnet = fake_network_cache_model.new_subnet()
route1 = fake_network_cache_model.new_route()
route2 = fake_network_cache_model.new_route({'cidr': '1.1.1.1/24'})
for i in xrange(10):
subnet.add_route(route2)
self.assertEqual(subnet['routes'], [route1, route2])
def test_add_dns(self):
subnet = fake_network_cache_model.new_subnet()
dns = fake_network_cache_model.new_ip(dict(address='9.9.9.9'))
subnet.add_dns(dns)
self.assertEqual(subnet['dns'],
[fake_network_cache_model.new_ip(dict(address='1.2.3.4')),
fake_network_cache_model.new_ip(dict(address='2.3.4.5')),
fake_network_cache_model.new_ip(dict(address='9.9.9.9'))])
def test_add_dns_a_lot(self):
subnet = fake_network_cache_model.new_subnet()
for i in xrange(10):
subnet.add_dns(fake_network_cache_model.new_ip(
dict(address='9.9.9.9')))
self.assertEqual(subnet['dns'],
[fake_network_cache_model.new_ip(dict(address='1.2.3.4')),
fake_network_cache_model.new_ip(dict(address='2.3.4.5')),
fake_network_cache_model.new_ip(dict(address='9.9.9.9'))])
def test_add_ip(self):
subnet = fake_network_cache_model.new_subnet()
subnet.add_ip(fake_network_cache_model.new_ip(
dict(address='192.168.1.102')))
self.assertEqual(subnet['ips'],
[fake_network_cache_model.new_fixed_ip(
dict(address='10.10.0.2')),
fake_network_cache_model.new_fixed_ip(
dict(address='10.10.0.3')),
fake_network_cache_model.new_ip(
dict(address='192.168.1.102'))])
def test_add_ip_a_lot(self):
subnet = fake_network_cache_model.new_subnet()
for i in xrange(10):
subnet.add_ip(fake_network_cache_model.new_fixed_ip(
dict(address='192.168.1.102')))
self.assertEqual(subnet['ips'],
[fake_network_cache_model.new_fixed_ip(
dict(address='10.10.0.2')),
fake_network_cache_model.new_fixed_ip(
dict(address='10.10.0.3')),
fake_network_cache_model.new_fixed_ip(
dict(address='192.168.1.102'))])
def test_hydrate(self):
subnet_dict = {
'cidr': '255.255.255.0',
'dns': [fake_network_cache_model.new_ip(dict(address='1.1.1.1'))],
'ips': [fake_network_cache_model.new_fixed_ip(
dict(address='2.2.2.2'))],
'routes': [fake_network_cache_model.new_route()],
'version': 4,
'gateway': fake_network_cache_model.new_ip(
dict(address='3.3.3.3'))}
subnet = model.Subnet.hydrate(subnet_dict)
self.assertEqual(subnet['cidr'], '255.255.255.0')
self.assertEqual(subnet['dns'], [fake_network_cache_model.new_ip(
dict(address='1.1.1.1'))])
self.assertEqual(subnet['gateway']['address'], '3.3.3.3')
self.assertEqual(subnet['ips'], [fake_network_cache_model.new_fixed_ip(
dict(address='2.2.2.2'))])
self.assertEqual(subnet['routes'], [
fake_network_cache_model.new_route()])
self.assertEqual(subnet['version'], 4)
class NetworkTests(test.NoDBTestCase):
def test_create_network(self):
network = fake_network_cache_model.new_network()
self.assertEqual(network['id'], 1)
self.assertEqual(network['bridge'], 'br0')
self.assertEqual(network['label'], 'public')
self.assertEqual(network['subnets'],
[fake_network_cache_model.new_subnet(),
fake_network_cache_model.new_subnet(
dict(cidr='255.255.255.255'))])
def test_add_subnet(self):
network = fake_network_cache_model.new_network()
network.add_subnet(fake_network_cache_model.new_subnet(
dict(cidr='0.0.0.0')))
self.assertEqual(network['subnets'],
[fake_network_cache_model.new_subnet(),
fake_network_cache_model.new_subnet(
dict(cidr='255.255.255.255')),
fake_network_cache_model.new_subnet(dict(cidr='0.0.0.0'))])
def test_add_subnet_a_lot(self):
network = fake_network_cache_model.new_network()
for i in xrange(10):
network.add_subnet(fake_network_cache_model.new_subnet(
dict(cidr='0.0.0.0')))
self.assertEqual(network['subnets'],
[fake_network_cache_model.new_subnet(),
fake_network_cache_model.new_subnet(
dict(cidr='255.255.255.255')),
fake_network_cache_model.new_subnet(dict(cidr='0.0.0.0'))])
def test_network_equal(self):
network1 = model.Network()
network2 = model.Network()
self.assertEqual(network1, network2)
def test_network_not_equal(self):
network1 = model.Network(id='1')
network2 = model.Network(id='2')
self.assertNotEqual(network1, network2)
network1 = model.Network(bridge='br-int')
network2 = model.Network(bridge='br0')
self.assertNotEqual(network1, network2)
network1 = model.Network(label='net1')
network2 = model.Network(label='net2')
self.assertNotEqual(network1, network2)
network1 = model.Network(subnets='1.1.1.0/24')
network2 = model.Network(subnets='2.2.2.0/24')
self.assertNotEqual(network1, network2)
def test_hydrate(self):
fake_network_cache_model.new_subnet()
fake_network_cache_model.new_subnet(dict(cidr='255.255.255.255'))
network = model.Network.hydrate(fake_network_cache_model.new_network())
self.assertEqual(network['id'], 1)
self.assertEqual(network['bridge'], 'br0')
self.assertEqual(network['label'], 'public')
self.assertEqual(network['subnets'],
[fake_network_cache_model.new_subnet(),
fake_network_cache_model.new_subnet(
dict(cidr='255.255.255.255'))])
class VIFTests(test.NoDBTestCase):
def test_create_vif(self):
vif = fake_network_cache_model.new_vif()
self.assertEqual(vif['id'], 1)
self.assertEqual(vif['address'], 'aa:aa:aa:aa:aa:aa')
self.assertEqual(vif['network'],
fake_network_cache_model.new_network())
def test_vif_equal(self):
vif1 = model.VIF()
vif2 = model.VIF()
self.assertEqual(vif1, vif2)
def test_vif_not_equal(self):
vif1 = model.VIF(id=1)
vif2 = model.VIF(id=2)
self.assertNotEqual(vif1, vif2)
vif1 = model.VIF(address='00:00:00:00:00:11')
vif2 = model.VIF(address='00:00:00:00:00:22')
self.assertNotEqual(vif1, vif2)
vif1 = model.VIF(network='net1')
vif2 = model.VIF(network='net2')
self.assertNotEqual(vif1, vif2)
vif1 = model.VIF(type='ovs')
vif2 = model.VIF(type='linuxbridge')
self.assertNotEqual(vif1, vif2)
vif1 = model.VIF(devname='ovs1234')
vif2 = model.VIF(devname='linuxbridge1234')
self.assertNotEqual(vif1, vif2)
vif1 = model.VIF(qbh_params=1)
vif2 = model.VIF(qbh_params=None)
self.assertNotEqual(vif1, vif2)
vif1 = model.VIF(qbg_params=1)
vif2 = model.VIF(qbg_params=None)
self.assertNotEqual(vif1, vif2)
vif1 = model.VIF(active=True)
vif2 = model.VIF(active=False)
self.assertNotEqual(vif1, vif2)
def test_create_vif_with_type(self):
vif_dict = dict(
id=1,
address='aa:aa:aa:aa:aa:aa',
network=fake_network_cache_model.new_network(),
type='bridge')
vif = fake_network_cache_model.new_vif(vif_dict)
self.assertEqual(vif['id'], 1)
self.assertEqual(vif['address'], 'aa:aa:aa:aa:aa:aa')
self.assertEqual(vif['type'], 'bridge')
self.assertEqual(vif['network'],
fake_network_cache_model.new_network())
def test_vif_get_fixed_ips(self):
vif = fake_network_cache_model.new_vif()
fixed_ips = vif.fixed_ips()
ips = [
fake_network_cache_model.new_fixed_ip(dict(address='10.10.0.2')),
fake_network_cache_model.new_fixed_ip(dict(address='10.10.0.3'))
] * 2
self.assertEqual(fixed_ips, ips)
def test_vif_get_floating_ips(self):
vif = fake_network_cache_model.new_vif()
vif['network']['subnets'][0]['ips'][0].add_floating_ip('192.168.1.1')
floating_ips = vif.floating_ips()
self.assertEqual(floating_ips, ['192.168.1.1'])
def test_vif_get_labeled_ips(self):
vif = fake_network_cache_model.new_vif()
labeled_ips = vif.labeled_ips()
ip_dict = {
'network_id': 1,
'ips': [fake_network_cache_model.new_ip(
{'address': '10.10.0.2', 'type': 'fixed'}),
fake_network_cache_model.new_ip(
{'address': '10.10.0.3', 'type': 'fixed'})] * 2,
'network_label': 'public'}
self.assertEqual(labeled_ips, ip_dict)
def test_hydrate(self):
fake_network_cache_model.new_network()
vif = model.VIF.hydrate(fake_network_cache_model.new_vif())
self.assertEqual(vif['id'], 1)
self.assertEqual(vif['address'], 'aa:aa:aa:aa:aa:aa')
self.assertEqual(vif['network'],
fake_network_cache_model.new_network())
def test_hydrate_vif_with_type(self):
vif_dict = dict(
id=1,
address='aa:aa:aa:aa:aa:aa',
network=fake_network_cache_model.new_network(),
type='bridge')
vif = model.VIF.hydrate(fake_network_cache_model.new_vif(vif_dict))
self.assertEqual(vif['id'], 1)
self.assertEqual(vif['address'], 'aa:aa:aa:aa:aa:aa')
self.assertEqual(vif['type'], 'bridge')
self.assertEqual(vif['network'],
fake_network_cache_model.new_network())
class NetworkInfoTests(test.NoDBTestCase):
def test_create_model(self):
ninfo = model.NetworkInfo([fake_network_cache_model.new_vif(),
fake_network_cache_model.new_vif(
{'address': 'bb:bb:bb:bb:bb:bb'})])
self.assertEqual(ninfo.fixed_ips(),
[fake_network_cache_model.new_fixed_ip(
{'address': '10.10.0.2'}),
fake_network_cache_model.new_fixed_ip(
{'address': '10.10.0.3'})] * 4)
def test_create_async_model(self):
def async_wrapper():
return model.NetworkInfo(
[fake_network_cache_model.new_vif(),
fake_network_cache_model.new_vif(
{'address': 'bb:bb:bb:bb:bb:bb'})])
ninfo = model.NetworkInfoAsyncWrapper(async_wrapper)
self.assertEqual(ninfo.fixed_ips(),
[fake_network_cache_model.new_fixed_ip(
{'address': '10.10.0.2'}),
fake_network_cache_model.new_fixed_ip(
{'address': '10.10.0.3'})] * 4)
def test_create_async_model_exceptions(self):
def async_wrapper():
raise test.TestingException()
ninfo = model.NetworkInfoAsyncWrapper(async_wrapper)
self.assertRaises(test.TestingException, ninfo.wait)
# 2nd one doesn't raise
self.assertIsNone(ninfo.wait())
# Test that do_raise=False works on .wait()
ninfo = model.NetworkInfoAsyncWrapper(async_wrapper)
self.assertIsNone(ninfo.wait(do_raise=False))
# Test we also raise calling a method
ninfo = model.NetworkInfoAsyncWrapper(async_wrapper)
self.assertRaises(test.TestingException, ninfo.fixed_ips)
def test_get_floating_ips(self):
vif = fake_network_cache_model.new_vif()
vif['network']['subnets'][0]['ips'][0].add_floating_ip('192.168.1.1')
ninfo = model.NetworkInfo([vif,
fake_network_cache_model.new_vif(
{'address': 'bb:bb:bb:bb:bb:bb'})])
self.assertEqual(ninfo.floating_ips(), ['192.168.1.1'])
def test_hydrate(self):
ninfo = model.NetworkInfo([fake_network_cache_model.new_vif(),
fake_network_cache_model.new_vif(
{'address': 'bb:bb:bb:bb:bb:bb'})])
model.NetworkInfo.hydrate(ninfo)
self.assertEqual(ninfo.fixed_ips(),
[fake_network_cache_model.new_fixed_ip(
{'address': '10.10.0.2'}),
fake_network_cache_model.new_fixed_ip(
{'address': '10.10.0.3'})] * 4)
def _setup_injected_network_scenario(self, should_inject=True,
use_ipv4=True, use_ipv6=False,
gateway=True, dns=True,
two_interfaces=False,
libvirt_virt_type=None):
"""Check that netutils properly decides whether to inject based on
whether the supplied subnet is static or dynamic.
"""
network = fake_network_cache_model.new_network({'subnets': []})
subnet_dict = {}
if not gateway:
subnet_dict['gateway'] = None
if not dns:
subnet_dict['dns'] = None
if not should_inject:
subnet_dict['dhcp_server'] = '10.10.0.1'
if use_ipv4:
network.add_subnet(
fake_network_cache_model.new_subnet(subnet_dict))
if should_inject and use_ipv6:
gateway_ip = fake_network_cache_model.new_ip(dict(
address='1234:567::1'))
ip = fake_network_cache_model.new_ip(dict(
address='1234:567::2'))
ipv6_subnet_dict = dict(
cidr='1234:567::/48',
gateway=gateway_ip,
dns=[fake_network_cache_model.new_ip(
dict(address='2001:4860:4860::8888')),
fake_network_cache_model.new_ip(
dict(address='2001:4860:4860::8844'))],
ips=[ip])
if not gateway:
ipv6_subnet_dict['gateway'] = None
network.add_subnet(fake_network_cache_model.new_subnet(
ipv6_subnet_dict))
# Behave as though CONF.flat_injected is True
network['meta']['injected'] = True
vif = fake_network_cache_model.new_vif({'network': network})
vifs = [vif]
if two_interfaces:
vifs.append(vif)
nwinfo = model.NetworkInfo(vifs)
return netutils.get_injected_network_template(
nwinfo, use_ipv6=use_ipv6, libvirt_virt_type=libvirt_virt_type)
def test_injection_dynamic(self):
expected = None
template = self._setup_injected_network_scenario(should_inject=False)
self.assertEqual(expected, template)
def test_injection_static(self):
expected = """\
# Injected by Nova on instance boot
#
# This file describes the network interfaces available on your system
# and how to activate them. For more information, see interfaces(5).
# The loopback network interface
auto lo
iface lo inet loopback
auto eth0
iface eth0 inet static
address 10.10.0.2
netmask 255.255.255.0
broadcast 10.10.0.255
gateway 10.10.0.1
dns-nameservers 1.2.3.4 2.3.4.5
"""
template = self._setup_injected_network_scenario()
self.assertEqual(expected, template)
def test_injection_static_no_gateway(self):
expected = """\
# Injected by Nova on instance boot
#
# This file describes the network interfaces available on your system
# and how to activate them. For more information, see interfaces(5).
# The loopback network interface
auto lo
iface lo inet loopback
auto eth0
iface eth0 inet static
address 10.10.0.2
netmask 255.255.255.0
broadcast 10.10.0.255
dns-nameservers 1.2.3.4 2.3.4.5
"""
template = self._setup_injected_network_scenario(gateway=False)
self.assertEqual(expected, template)
def test_injection_static_no_dns(self):
expected = """\
# Injected by Nova on instance boot
#
# This file describes the network interfaces available on your system
# and how to activate them. For more information, see interfaces(5).
# The loopback network interface
auto lo
iface lo inet loopback
auto eth0
iface eth0 inet static
address 10.10.0.2
netmask 255.255.255.0
broadcast 10.10.0.255
gateway 10.10.0.1
"""
template = self._setup_injected_network_scenario(dns=False)
self.assertEqual(expected, template)
def test_injection_static_ipv6(self):
expected = """\
# Injected by Nova on instance boot
#
# This file describes the network interfaces available on your system
# and how to activate them. For more information, see interfaces(5).
# The loopback network interface
auto lo
iface lo inet loopback
auto eth0
iface eth0 inet static
address 10.10.0.2
netmask 255.255.255.0
broadcast 10.10.0.255
gateway 10.10.0.1
dns-nameservers 1.2.3.4 2.3.4.5
iface eth0 inet6 static
address 1234:567::2
netmask 48
gateway 1234:567::1
dns-nameservers 2001:4860:4860::8888 2001:4860:4860::8844
"""
template = self._setup_injected_network_scenario(use_ipv6=True)
self.assertEqual(expected, template)
def test_injection_static_ipv6_no_gateway(self):
expected = """\
# Injected by Nova on instance boot
#
# This file describes the network interfaces available on your system
# and how to activate them. For more information, see interfaces(5).
# The loopback network interface
auto lo
iface lo inet loopback
auto eth0
iface eth0 inet static
address 10.10.0.2
netmask 255.255.255.0
broadcast 10.10.0.255
dns-nameservers 1.2.3.4 2.3.4.5
iface eth0 inet6 static
address 1234:567::2
netmask 48
dns-nameservers 2001:4860:4860::8888 2001:4860:4860::8844
"""
template = self._setup_injected_network_scenario(use_ipv6=True,
gateway=False)
self.assertEqual(expected, template)
def test_injection_static_with_ipv4_off(self):
expected = None
template = self._setup_injected_network_scenario(use_ipv4=False)
self.assertEqual(expected, template)
def test_injection_ipv6_two_interfaces(self):
expected = """\
# Injected by Nova on instance boot
#
# This file describes the network interfaces available on your system
# and how to activate them. For more information, see interfaces(5).
# The loopback network interface
auto lo
iface lo inet loopback
auto eth0
iface eth0 inet static
address 10.10.0.2
netmask 255.255.255.0
broadcast 10.10.0.255
gateway 10.10.0.1
dns-nameservers 1.2.3.4 2.3.4.5
iface eth0 inet6 static
address 1234:567::2
netmask 48
gateway 1234:567::1
dns-nameservers 2001:4860:4860::8888 2001:4860:4860::8844
auto eth1
iface eth1 inet static
address 10.10.0.2
netmask 255.255.255.0
broadcast 10.10.0.255
gateway 10.10.0.1
dns-nameservers 1.2.3.4 2.3.4.5
iface eth1 inet6 static
address 1234:567::2
netmask 48
gateway 1234:567::1
dns-nameservers 2001:4860:4860::8888 2001:4860:4860::8844
"""
template = self._setup_injected_network_scenario(use_ipv6=True,
two_interfaces=True)
self.assertEqual(expected, template)
def test_injection_ipv6_with_lxc(self):
expected = """\
# Injected by Nova on instance boot
#
# This file describes the network interfaces available on your system
# and how to activate them. For more information, see interfaces(5).
# The loopback network interface
auto lo
iface lo inet loopback
auto eth0
iface eth0 inet static
address 10.10.0.2
netmask 255.255.255.0
broadcast 10.10.0.255
gateway 10.10.0.1
dns-nameservers 1.2.3.4 2.3.4.5
post-up ip -6 addr add 1234:567::2/48 dev ${IFACE}
post-up ip -6 route add default via 1234:567::1 dev ${IFACE}
auto eth1
iface eth1 inet static
address 10.10.0.2
netmask 255.255.255.0
broadcast 10.10.0.255
gateway 10.10.0.1
dns-nameservers 1.2.3.4 2.3.4.5
post-up ip -6 addr add 1234:567::2/48 dev ${IFACE}
post-up ip -6 route add default via 1234:567::1 dev ${IFACE}
"""
template = self._setup_injected_network_scenario(
use_ipv6=True, two_interfaces=True, libvirt_virt_type='lxc')
self.assertEqual(expected, template)
def test_injection_ipv6_with_lxc_no_gateway(self):
expected = """\
# Injected by Nova on instance boot
#
# This file describes the network interfaces available on your system
# and how to activate them. For more information, see interfaces(5).
# The loopback network interface
auto lo
iface lo inet loopback
auto eth0
iface eth0 inet static
address 10.10.0.2
netmask 255.255.255.0
broadcast 10.10.0.255
dns-nameservers 1.2.3.4 2.3.4.5
post-up ip -6 addr add 1234:567::2/48 dev ${IFACE}
auto eth1
iface eth1 inet static
address 10.10.0.2
netmask 255.255.255.0
broadcast 10.10.0.255
dns-nameservers 1.2.3.4 2.3.4.5
post-up ip -6 addr add 1234:567::2/48 dev ${IFACE}
"""
template = self._setup_injected_network_scenario(
use_ipv6=True, gateway=False, two_interfaces=True,
libvirt_virt_type='lxc')
self.assertEqual(expected, template)
|
|
"""
Models for the in-game communication system.
The comm system could take the form of channels, but can also be
adopted for storing tells or in-game mail.
The comsystem's main component is the Message (Msg), which carries the
actual information between two parties. Msgs are stored in the
database and usually not deleted. A Msg always have one sender (a
user), but can have any number targets, both users and channels.
For non-persistent (and slightly faster) use one can also use the
TempMsg, which mimics the Msg API but without actually saving to the
database.
Channels are central objects that act as targets for Msgs. Players can
connect to channels by use of a ChannelConnect object (this object is
necessary to easily be able to delete connections on the fly).
"""
from django.conf import settings
from django.utils import timezone
from django.db import models
from evennia.typeclasses.models import TypedObject
from evennia.utils.idmapper.models import SharedMemoryModel
from evennia.comms import managers
from evennia.locks.lockhandler import LockHandler
from evennia.utils.utils import crop, make_iter, lazy_property
__all__ = ("Msg", "TempMsg", "ChannelDB")
_GA = object.__getattribute__
_SA = object.__setattr__
_DA = object.__delattr__
#------------------------------------------------------------
#
# Msg
#
#------------------------------------------------------------
class Msg(SharedMemoryModel):
"""
A single message. This model describes all ooc messages
sent in-game, both to channels and between players.
The Msg class defines the following database fields (all
accessed via specific handler methods):
- db_sender_players: Player senders
- db_sender_objects: Object senders
- db_sender_external: External senders (defined as string names)
- db_receivers_players: Receiving players
- db_receivers_objects: Receiving objects
- db_receivers_channels: Receiving channels
- db_header: Header text
- db_message: The actual message text
- db_date_sent: time message was sent
- db_hide_from_sender: bool if message should be hidden from sender
- db_hide_from_receivers: list of receiver objects to hide message from
- db_hide_from_channels: list of channels objects to hide message from
- db_lock_storage: Internal storage of lock strings.
"""
#
# Msg database model setup
#
#
# These databse fields are all set using their corresponding properties,
# named same as the field, but withtout the db_* prefix.
# Sender is either a player, an object or an external sender, like
# an IRC channel; normally there is only one, but if co-modification of
# a message is allowed, there may be more than one "author"
db_sender_players = models.ManyToManyField("players.PlayerDB", related_name='sender_player_set',
null=True, verbose_name='sender(player)', db_index=True)
db_sender_objects = models.ManyToManyField("objects.ObjectDB", related_name='sender_object_set',
null=True, verbose_name='sender(object)', db_index=True)
db_sender_external = models.CharField('external sender', max_length=255, null=True, db_index=True,
help_text="identifier for external sender, for example a sender over an "
"IRC connection (i.e. someone who doesn't have an exixtence in-game).")
# The destination objects of this message. Stored as a
# comma-separated string of object dbrefs. Can be defined along
# with channels below.
db_receivers_players = models.ManyToManyField('players.PlayerDB', related_name='receiver_player_set',
null=True, help_text="player receivers")
db_receivers_objects = models.ManyToManyField('objects.ObjectDB', related_name='receiver_object_set',
null=True, help_text="object receivers")
db_receivers_channels = models.ManyToManyField("ChannelDB", related_name='channel_set',
null=True, help_text="channel recievers")
# header could be used for meta-info about the message if your system needs
# it, or as a separate store for the mail subject line maybe.
db_header = models.TextField('header', null=True, blank=True)
# the message body itself
db_message = models.TextField('messsage')
# send date
db_date_sent = models.DateTimeField('date sent', editable=False, auto_now_add=True, db_index=True)
# lock storage
db_lock_storage = models.TextField('locks', blank=True,
help_text='access locks on this message.')
# these can be used to filter/hide a given message from supplied objects/players/channels
db_hide_from_players = models.ManyToManyField("players.PlayerDB", related_name='hide_from_players_set', null=True)
db_hide_from_objects = models.ManyToManyField("objects.ObjectDB", related_name='hide_from_objects_set', null=True)
db_hide_from_channels = models.ManyToManyField("ChannelDB", related_name='hide_from_channels_set', null=True)
# Database manager
objects = managers.MsgManager()
_is_deleted = False
def __init__(self, *args, **kwargs):
SharedMemoryModel.__init__(self, *args, **kwargs)
self.extra_senders = []
class Meta:
"Define Django meta options"
verbose_name = "Message"
# Wrapper properties to easily set database fields. These are
# @property decorators that allows to access these fields using
# normal python operations (without having to remember to save()
# etc). So e.g. a property 'attr' has a get/set/del decorator
# defined that allows the user to do self.attr = value,
# value = self.attr and del self.attr respectively (where self
# is the object in question).
# sender property (wraps db_sender_*)
#@property
def __senders_get(self):
"Getter. Allows for value = self.sender"
return list(self.db_sender_players.all()) + \
list(self.db_sender_objects.all()) + \
self.extra_senders
#@sender.setter
def __senders_set(self, senders):
"Setter. Allows for self.sender = value"
for sender in make_iter(senders):
if not sender:
continue
if isinstance(sender, basestring):
self.db_sender_external = sender
self.extra_senders.append(sender)
self.save(update_fields=["db_sender_external"])
continue
if not hasattr(sender, "__dbclass__"):
raise ValueError("This is a not a typeclassed object!")
clsname = sender.__dbclass__.__name__
if clsname == "ObjectDB":
self.db_sender_objects.add(sender)
elif clsname == "PlayerDB":
self.db_sender_players.add(sender)
#@sender.deleter
def __senders_del(self):
"Deleter. Clears all senders"
self.db_sender_players.clear()
self.db_sender_objects.clear()
self.db_sender_external = ""
self.extra_senders = []
self.save()
senders = property(__senders_get, __senders_set, __senders_del)
def remove_sender(self, senders):
"""
Remove a single sender or a list of senders.
Args:
senders (Player, Object, str or list): Senders to remove.
"""
for sender in make_iter(senders):
if not sender:
continue
if isinstance(sender, basestring):
self.db_sender_external = ""
self.save(update_fields=["db_sender_external"])
if not hasattr(sender, "__dbclass__"):
raise ValueError("This is a not a typeclassed object!")
clsname = sender.__dbclass__.__name__
if clsname == "ObjectDB":
self.db_sender_objects.remove(sender)
elif clsname == "PlayerDB":
self.db_sender_players.remove(sender)
# receivers property
#@property
def __receivers_get(self):
"""
Getter. Allows for value = self.receivers.
Returns three lists of receivers: players, objects and channels.
"""
return list(self.db_receivers_players.all()) + list(self.db_receivers_objects.all())
#@receivers.setter
def __receivers_set(self, receivers):
"""
Setter. Allows for self.receivers = value.
This appends a new receiver to the message.
"""
for receiver in make_iter(receivers):
if not receiver:
continue
if not hasattr(receiver, "__dbclass__"):
raise ValueError("This is a not a typeclassed object!")
clsname = receiver.__dbclass__.__name__
if clsname == "ObjectDB":
self.db_receivers_objects.add(receiver)
elif clsname == "PlayerDB":
self.db_receivers_players.add(receiver)
#@receivers.deleter
def __receivers_del(self):
"Deleter. Clears all receivers"
self.db_receivers_players.clear()
self.db_receivers_objects.clear()
self.save()
receivers = property(__receivers_get, __receivers_set, __receivers_del)
def remove_receiver(self, receivers):
"""
Remove a single receiver or a list of receivers.
Args:
receivers (Player, Object, Channel or list): Receiver to remove.
"""
for receiver in make_iter(receivers):
if not receiver:
continue
if not hasattr(receiver, "__dbclass__"):
raise ValueError("This is a not a typeclassed object!")
clsname = receiver.__dbclass__.__name__
if clsname == "ObjectDB":
self.db_receivers_objects.remove(receiver)
elif clsname == "PlayerDB":
self.db_receivers_players.remove(receiver)
# channels property
#@property
def __channels_get(self):
"Getter. Allows for value = self.channels. Returns a list of channels."
return self.db_receivers_channels.all()
#@channels.setter
def __channels_set(self, value):
"""
Setter. Allows for self.channels = value.
Requires a channel to be added.
"""
for val in (v for v in make_iter(value) if v):
self.db_receivers_channels.add(val)
#@channels.deleter
def __channels_del(self):
"Deleter. Allows for del self.channels"
self.db_receivers_channels.clear()
self.save()
channels = property(__channels_get, __channels_set, __channels_del)
def __hide_from_get(self):
"""
Getter. Allows for value = self.hide_from.
Returns 3 lists of players, objects and channels
"""
return self.db_hide_from_players.all(), self.db_hide_from_objects.all(), self.db_hide_from_channels.all()
#@hide_from_sender.setter
def __hide_from_set(self, hiders):
"Setter. Allows for self.hide_from = value. Will append to hiders"
for hider in make_iter(hiders):
if not hider:
continue
if not hasattr(hider, "__dbclass__"):
raise ValueError("This is a not a typeclassed object!")
clsname = hider.__dbclass__.__name__
if clsname == "PlayerDB":
self.db_hide_from_players.add(hider.__dbclass__)
elif clsname == "ObjectDB":
self.db_hide_from_objects.add(hider.__dbclass__)
elif clsname == "ChannelDB":
self.db_hide_from_channels.add(hider.__dbclass__)
#@hide_from_sender.deleter
def __hide_from_del(self):
"Deleter. Allows for del self.hide_from_senders"
self.db_hide_from_players.clear()
self.db_hide_from_objects.clear()
self.db_hide_from_channels.clear()
self.save()
hide_from = property(__hide_from_get, __hide_from_set, __hide_from_del)
#
# Msg class methods
#
def __str__(self):
"This handles what is shown when e.g. printing the message"
senders = ",".join(obj.key for obj in self.senders)
receivers = ",".join(["[%s]" % obj.key for obj in self.channels] + [obj.key for obj in self.receivers])
return "%s->%s: %s" % (senders, receivers, crop(self.message, width=40))
#------------------------------------------------------------
#
# TempMsg
#
#------------------------------------------------------------
class TempMsg(object):
"""
This is a non-persistent object for sending temporary messages
that will not be stored. It mimics the "real" Msg object, but
doesn't require sender to be given.
"""
def __init__(self, senders=None, receivers=None, channels=None, message="", header="", type="", lockstring="", hide_from=None):
"""
Creates the temp message.
Args:
senders (any or list, optional): Senders of the message.
receivers (Player, Object, Channel or list, optional): Receivers of this message.
channels (Channel or list, optional): Channels to send to.
message (str, optional): Message to send.
header (str, optional): Header of message.
type (str, optional): Message class, if any.
lockstring (str, optional): Lock for the message.
hide_from (Player, Object, Channel or list, optional): Entities to hide this message from.
"""
self.senders = senders and make_iter(senders) or []
self.receivers = receivers and make_iter(receivers) or []
self.channels = channels and make_iter(channels) or []
self.type = type
self.header = header
self.message = message
self.lock_storage = lockstring
self.hide_from = hide_from and make_iter(hide_from) or []
self.date_sent = timezone.now()
@lazy_property
def locks(self):
return LockHandler(self)
def __str__(self):
"""
This handles what is shown when e.g. printing the message.
"""
senders = ",".join(obj.key for obj in self.senders)
receivers = ",".join(["[%s]" % obj.key for obj in self.channels] + [obj.key for obj in self.receivers])
return "%s->%s: %s" % (senders, receivers, crop(self.message, width=40))
def remove_sender(self, sender):
"""
Remove a sender or a list of senders.
Args:
sender (Object, Player, str or list): Senders to remove.
"""
for o in make_iter(sender):
try:
self.senders.remove(o)
except ValueError:
pass # nothing to remove
def remove_receiver(self, receiver):
"""
Remove a receiver or a list of receivers
Args:
receiver (Object, Player, Channel, str or list): Receivers to remove.
"""
for o in make_iter(receiver):
try:
self.senders.remove(o)
except ValueError:
pass # nothing to remove
def access(self, accessing_obj, access_type='read', default=False):
"""
Checks lock access.
Args:
accessing_obj (Object or Player): The object trying to gain access.
access_type (str, optional): The type of lock access to check.
default (bool): Fallback to use if `access_type` lock is not defined.
Returns:
result (bool): If access was granted or not.
"""
return self.locks.check(accessing_obj,
access_type=access_type, default=default)
#------------------------------------------------------------
#
# Channel
#
#------------------------------------------------------------
class SubscriptionHandler(object):
"""
This handler manages subscriptions to the
channel and hides away which type of entity is
subscribing (Player or Object)
"""
def __init__(self, obj):
"""
Initialize the handler
Attr:
obj (ChannelDB): The channel the handler sits on.
"""
self.obj = obj
def has(self, entity):
"""
Check if the given entity subscribe to this channel
Args:
entity (str, Player or Object): The entity to return. If
a string, it assumed to be the key or the #dbref
of the entity.
Returns:
subscriber (Player, Object or None): The given
subscriber.
"""
clsname = entity.__dbclass__.__name__
if clsname == "PlayerDB":
return entity in self.obj.db_subscriptions.all()
elif clsname == "ObjectDB":
return entity in self.obj.db_object_subscriptions.all()
def add(self, entity):
"""
Subscribe an entity to this channel.
Args:
entity (Player, Object or list): The entity or
list of entities to subscribe to this channel.
Note:
No access-checking is done here, this must have
been done before calling this method. Also
no hooks will be called.
"""
for subscriber in make_iter(entity):
if subscriber:
clsname = subscriber.__dbclass__.__name__
# chooses the right type
if clsname == "ObjectDB":
self.obj.db_object_subscriptions.add(subscriber)
elif clsname == "PlayerDB":
self.obj.db_subscriptions.add(subscriber)
def remove(self, entity):
"""
Remove a subecriber from the channel.
Args:
entity (Player, Object or list): The entity or
entities to un-subscribe from the channel.
"""
for subscriber in make_iter(entity):
if subscriber:
clsname = subscriber.__dbclass__.__name__
# chooses the right type
if clsname == "PlayerDB":
self.obj.db_subscriptions.remove(entity)
elif clsname == "ObjectDB":
self.obj.db_object_subscriptions.remove(entity)
def all(self):
"""
Get all subscriptions to this channel.
Returns:
subscribers (list): The subscribers. This
may be a mix of Players and Objects!
"""
return list(self.obj.db_subscriptions.all()) + \
list(self.obj.db_object_subscriptions.all())
def clear(self):
"""
Remove all subscribers from channel.
"""
self.obj.db_subscriptions.clear()
self.obj.db_object_subscriptions.clear()
class ChannelDB(TypedObject):
"""
This is the basis of a comm channel, only implementing
the very basics of distributing messages.
The Channel class defines the following database fields
beyond the ones inherited from TypedObject:
- db_subscriptions: The Player subscriptions (this is the most
usual case, named this way for legacy.
- db_object_subscriptions: The Object subscriptions.
"""
db_subscriptions = models.ManyToManyField("players.PlayerDB",
related_name="subscription_set", null=True, verbose_name='subscriptions', db_index=True)
db_object_subscriptions = models.ManyToManyField("objects.ObjectDB",
related_name="object_subscription_set", null=True, verbose_name='subscriptions', db_index=True)
# Database manager
objects = managers.ChannelDBManager()
__settingclasspath__ = settings.BASE_CHANNEL_TYPECLASS
__defaultclasspath__ = "evennia.comms.comms.DefaultChannel"
__applabel__ = "comms"
class Meta:
"Define Django meta options"
verbose_name = "Channel"
verbose_name_plural = "Channels"
def __str__(self):
"Echoes the text representation of the channel."
return "Channel '%s' (%s)" % (self.key, self.db.desc)
@lazy_property
def subscriptions(self):
return SubscriptionHandler(self)
|
|
# Copyright 2013, Big Switch Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django.template import defaultfilters as filters
from django.utils.translation import pgettext_lazy
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from horizon import exceptions
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard import policy
LOG = logging.getLogger(__name__)
class AddRuleLink(tables.LinkAction):
name = "addrule"
verbose_name = _("Add Rule")
url = "horizon:project:firewalls:addrule"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "create_firewall_rule"),)
class AddPolicyLink(tables.LinkAction):
name = "addpolicy"
verbose_name = _("Add Policy")
url = "horizon:project:firewalls:addpolicy"
classes = ("ajax-modal", "btn-addpolicy",)
icon = "plus"
policy_rules = (("network", "create_firewall_policy"),)
class AddFirewallLink(tables.LinkAction):
name = "addfirewall"
verbose_name = _("Create Firewall")
url = "horizon:project:firewalls:addfirewall"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "create_firewall"),)
class DeleteRuleLink(policy.PolicyTargetMixin, tables.DeleteAction):
name = "deleterule"
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Rule",
u"Delete Rules",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Scheduled deletion of Rule",
u"Scheduled deletion of Rules",
count
)
policy_rules = (("network", "delete_firewall_rule"),)
class DeletePolicyLink(policy.PolicyTargetMixin, tables.DeleteAction):
name = "deletepolicy"
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Policy",
u"Delete Policies",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Scheduled deletion of Policy",
u"Scheduled deletion of Policies",
count
)
policy_rules = (("network", "delete_firewall_policy"),)
class DeleteFirewallLink(policy.PolicyTargetMixin,
tables.DeleteAction):
name = "deletefirewall"
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Firewall",
u"Delete Firewalls",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Scheduled deletion of Firewall",
u"Scheduled deletion of Firewalls",
count
)
policy_rules = (("network", "delete_firewall"),)
class UpdateRuleLink(policy.PolicyTargetMixin, tables.LinkAction):
name = "updaterule"
verbose_name = _("Edit Rule")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "update_firewall_rule"),)
def get_link_url(self, rule):
base_url = reverse("horizon:project:firewalls:updaterule",
kwargs={'rule_id': rule.id})
return base_url
class UpdatePolicyLink(policy.PolicyTargetMixin, tables.LinkAction):
name = "updatepolicy"
verbose_name = _("Edit Policy")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "update_firewall_policy"),)
def get_link_url(self, policy):
base_url = reverse("horizon:project:firewalls:updatepolicy",
kwargs={'policy_id': policy.id})
return base_url
class UpdateFirewallLink(policy.PolicyTargetMixin, tables.LinkAction):
name = "updatefirewall"
verbose_name = _("Edit Firewall")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "update_firewall"),)
def get_link_url(self, firewall):
base_url = reverse("horizon:project:firewalls:updatefirewall",
kwargs={'firewall_id': firewall.id})
return base_url
def allowed(self, request, firewall):
if firewall.status in ("PENDING_CREATE",
"PENDING_UPDATE",
"PENDING_DELETE"):
return False
return True
class InsertRuleToPolicyLink(policy.PolicyTargetMixin,
tables.LinkAction):
name = "insertrule"
verbose_name = _("Insert Rule")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "get_firewall_policy"),
("network", "insert_rule"),)
def get_link_url(self, policy):
base_url = reverse("horizon:project:firewalls:insertrule",
kwargs={'policy_id': policy.id})
return base_url
class RemoveRuleFromPolicyLink(policy.PolicyTargetMixin,
tables.LinkAction):
name = "removerule"
verbose_name = _("Remove Rule")
classes = ("ajax-modal", "btn-danger",)
policy_rules = (("network", "get_firewall_policy"),
("network", "remove_rule"),)
def get_link_url(self, policy):
base_url = reverse("horizon:project:firewalls:removerule",
kwargs={'policy_id': policy.id})
return base_url
def allowed(self, request, policy):
if len(policy.rules) > 0:
return True
return False
class AddRouterToFirewallLink(policy.PolicyTargetMixin,
tables.LinkAction):
name = "addrouter"
verbose_name = _("Add Router")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "get_firewall"),
("network", "add_router"),)
def get_link_url(self, firewall):
base_url = reverse("horizon:project:firewalls:addrouter",
kwargs={'firewall_id': firewall.id})
return base_url
def allowed(self, request, firewall):
if not api.neutron.is_extension_supported(request,
'fwaasrouterinsertion'):
return False
tenant_id = firewall['tenant_id']
available_routers = api.fwaas.firewall_unassociated_routers_list(
request, tenant_id)
return bool(available_routers)
class RemoveRouterFromFirewallLink(policy.PolicyTargetMixin,
tables.LinkAction):
name = "removerouter"
verbose_name = _("Remove Router")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "get_firewall"),
("network", "remove_router"),)
def get_link_url(self, firewall):
base_url = reverse("horizon:project:firewalls:removerouter",
kwargs={'firewall_id': firewall.id})
return base_url
def allowed(self, request, firewall):
if not api.neutron.is_extension_supported(request,
'fwaasrouterinsertion'):
return False
return bool(firewall['router_ids'])
def get_rules_name(datum):
return ', '.join([rule.name or rule.id[:13]
for rule in datum.rules])
def get_routers_name(firewall):
if firewall.routers:
return ', '.join(router['name'] for router in firewall.routers)
def get_policy_name(datum):
if datum.policy:
return datum.policy.name or datum.policy.id
def get_policy_link(datum):
return reverse('horizon:project:firewalls:policydetails',
kwargs={'policy_id': datum.policy.id})
class RulesTable(tables.DataTable):
ACTION_DISPLAY_CHOICES = (
("Allow", pgettext_lazy("Action Name of a Firewall Rule", u"ALLOW")),
("Deny", pgettext_lazy("Action Name of a Firewall Rule", u"DENY")),
)
name = tables.Column("name_or_id",
verbose_name=_("Name"),
link="horizon:project:firewalls:ruledetails")
description = tables.Column('description', verbose_name=_('Description'))
protocol = tables.Column("protocol",
filters=(lambda v: filters.default(v, _("ANY")),
filters.upper,),
verbose_name=_("Protocol"))
source_ip_address = tables.Column("source_ip_address",
verbose_name=_("Source IP"))
source_port = tables.Column("source_port",
verbose_name=_("Source Port"))
destination_ip_address = tables.Column("destination_ip_address",
verbose_name=_("Destination IP"))
destination_port = tables.Column("destination_port",
verbose_name=_("Destination Port"))
action = tables.Column("action",
display_choices=ACTION_DISPLAY_CHOICES,
verbose_name=_("Action"))
shared = tables.Column("shared",
verbose_name=_("Shared"),
filters=(filters.yesno, filters.capfirst))
enabled = tables.Column("enabled",
verbose_name=_("Enabled"),
filters=(filters.yesno, filters.capfirst))
firewall_policy_id = tables.Column(get_policy_name,
link=get_policy_link,
verbose_name=_("In Policy"))
class Meta(object):
name = "rulestable"
verbose_name = _("Rules")
table_actions = (AddRuleLink, DeleteRuleLink)
row_actions = (UpdateRuleLink, DeleteRuleLink)
class PoliciesTable(tables.DataTable):
name = tables.Column("name_or_id",
verbose_name=_("Name"),
link="horizon:project:firewalls:policydetails")
description = tables.Column('description', verbose_name=_('Description'))
firewall_rules = tables.Column(get_rules_name,
verbose_name=_("Rules"))
shared = tables.Column("shared",
verbose_name=_("Shared"),
filters=(filters.yesno, filters.capfirst))
audited = tables.Column("audited",
verbose_name=_("Audited"),
filters=(filters.yesno, filters.capfirst))
shared = tables.Column("shared",
verbose_name=_("Shared"),
filters=(filters.yesno, filters.capfirst))
class Meta(object):
name = "policiestable"
verbose_name = _("Policies")
table_actions = (AddPolicyLink, DeletePolicyLink)
row_actions = (UpdatePolicyLink, InsertRuleToPolicyLink,
RemoveRuleFromPolicyLink, DeletePolicyLink)
class FirewallsTable(tables.DataTable):
STATUS_DISPLAY_CHOICES = (
("Active", pgettext_lazy("Current status of a Firewall",
u"Active")),
("Down", pgettext_lazy("Current status of a Firewall",
u"Down")),
("Error", pgettext_lazy("Current status of a Firewall",
u"Error")),
("Created", pgettext_lazy("Current status of a Firewall",
u"Created")),
("Pending_Create", pgettext_lazy("Current status of a Firewall",
u"Pending Create")),
("Pending_Update", pgettext_lazy("Current status of a Firewall",
u"Pending Update")),
("Pending_Delete", pgettext_lazy("Current status of a Firewall",
u"Pending Delete")),
("Inactive", pgettext_lazy("Current status of a Firewall",
u"Inactive")),
)
ADMIN_STATE_DISPLAY_CHOICES = (
("UP", pgettext_lazy("Admin state of a Firewall", u"UP")),
("DOWN", pgettext_lazy("Admin state of a Firewall", u"DOWN")),
)
name = tables.Column("name_or_id",
verbose_name=_("Name"),
link="horizon:project:firewalls:firewalldetails")
description = tables.Column('description', verbose_name=_('Description'))
firewall_policy_id = tables.Column(get_policy_name,
link=get_policy_link,
verbose_name=_("Policy"))
router_ids = tables.Column(get_routers_name,
verbose_name=_("Associated Routers"))
status = tables.Column("status",
verbose_name=_("Status"),
display_choices=STATUS_DISPLAY_CHOICES)
shared = tables.Column("shared",
verbose_name=_("Shared"),
filters=(filters.yesno, filters.capfirst))
admin_state = tables.Column("admin_state",
verbose_name=_("Admin State"),
display_choices=ADMIN_STATE_DISPLAY_CHOICES)
class Meta(object):
name = "firewallstable"
verbose_name = _("Firewalls")
table_actions = (AddFirewallLink, DeleteFirewallLink)
row_actions = (UpdateFirewallLink, DeleteFirewallLink,
AddRouterToFirewallLink, RemoveRouterFromFirewallLink)
def __init__(self, request, data=None, needs_form_wrapper=None, **kwargs):
super(FirewallsTable, self).__init__(
request, data=data,
needs_form_wrapper=needs_form_wrapper, **kwargs)
try:
if not api.neutron.is_extension_supported(request,
'fwaasrouterinsertion'):
del self.columns['router_ids']
except Exception as e:
msg = _('Failed to verify extension support %(reason)s') % {
'reason': e}
LOG.error(msg)
exceptions.handle(request, msg)
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import re
from heat.common import exception
SCHEMA_KEYS = (
REQUIRED, IMPLEMENTED, DEFAULT, TYPE, SCHEMA,
PATTERN, MIN_VALUE, MAX_VALUE, VALUES,
) = (
'Required', 'Implemented', 'Default', 'Type', 'Schema',
'AllowedPattern', 'MinValue', 'MaxValue', 'AllowedValues',
)
SCHEMA_TYPES = (
INTEGER,
STRING, NUMBER, BOOLEAN,
MAP, LIST
) = (
'Integer',
'String', 'Number', 'Boolean',
'Map', 'List'
)
class Property(object):
def __init__(self, schema, name=None):
self.schema = schema
self.name = name
for key in self.schema:
assert key in SCHEMA_KEYS, 'Unknown schema key "%s"' % key
assert self.type() in SCHEMA_TYPES,\
'Unknown property type "%s"' % self.type()
def required(self):
return self.schema.get(REQUIRED, False)
def implemented(self):
return self.schema.get(IMPLEMENTED, True)
def has_default(self):
return DEFAULT in self.schema
def default(self):
return self.schema[DEFAULT]
def type(self):
return self.schema[TYPE]
def _check_allowed(self, value):
if VALUES in self.schema:
allowed = self.schema[VALUES]
if value not in allowed:
raise ValueError('"%s" is not an allowed value %s' %
(value, str(allowed)))
@staticmethod
def str_to_num(value):
try:
return int(value)
except ValueError:
return float(value)
def _validate_integer(self, value):
if value is None:
value = self.has_default() and self.default() or 0
if not isinstance(value, int):
raise TypeError('value is not an integer')
return self._validate_number(value)
def _validate_number(self, value):
if value is None:
value = self.has_default() and self.default() or 0
self._check_allowed(value)
num = self.str_to_num(value)
minn = self.str_to_num(self.schema.get(MIN_VALUE, value))
maxn = self.str_to_num(self.schema.get(MAX_VALUE, value))
if num > maxn or num < minn:
format = '%d' if isinstance(num, int) else '%f'
raise ValueError('%s is out of range' % (format % num))
return value
def _validate_string(self, value):
if value is None:
value = self.has_default() and self.default() or ''
if not isinstance(value, basestring):
raise ValueError('Value must be a string')
self._check_allowed(value)
if PATTERN in self.schema:
pattern = self.schema[PATTERN]
match = re.match(pattern, value)
if match is None or match.end() != len(value):
raise ValueError('"%s" does not match pattern "%s"' %
(value, pattern))
return value
def _validate_map(self, value):
if value is None:
value = self.has_default() and self.default() or {}
if not isinstance(value, collections.Mapping):
raise TypeError('"%s" is not a map' % value)
if SCHEMA in self.schema:
children = dict(Properties(self.schema[SCHEMA], value,
parent_name=self.name))
else:
children = value
return children
def _validate_list(self, value):
if value is None:
value = self.has_default() and self.default() or []
if (not isinstance(value, collections.Sequence) or
isinstance(value, basestring)):
raise TypeError('"%s" is not a list' % repr(value))
for v in value:
self._check_allowed(v)
if SCHEMA in self.schema:
prop = Property(self.schema[SCHEMA])
children = [prop.validate_data(d) for d in value]
else:
children = value
return children
def _validate_bool(self, value):
if value is None:
value = self.has_default() and self.default() or False
if isinstance(value, bool):
return value
normalised = value.lower()
if normalised not in ['true', 'false']:
raise ValueError('"%s" is not a valid boolean')
return normalised == 'true'
def validate_data(self, value):
t = self.type()
if t == STRING:
return self._validate_string(value)
elif t == INTEGER:
return self._validate_integer(value)
elif t == NUMBER:
return self._validate_number(value)
elif t == MAP:
return self._validate_map(value)
elif t == LIST:
return self._validate_list(value)
elif t == BOOLEAN:
return self._validate_bool(value)
class Properties(collections.Mapping):
def __init__(self, schema, data, resolver=lambda d: d, parent_name=None):
self.props = dict((k, Property(s, k)) for k, s in schema.items())
self.resolve = resolver
self.data = data
if parent_name is None:
self.error_prefix = ''
else:
self.error_prefix = parent_name + ': '
def validate(self):
for (key, prop) in self.props.items():
try:
self[key]
except ValueError as e:
msg = "Property error : %s" % str(e)
raise exception.StackValidationFailed(message=msg)
# are there unimplemented Properties
if not prop.implemented() and key in self.data:
msg = "Property %s not implemented yet" % key
raise exception.StackValidationFailed(message=msg)
for key in self.data:
if key not in self.props:
msg = "Unknown Property %s" % key
raise exception.StackValidationFailed(message=msg)
def __getitem__(self, key):
if key not in self:
raise KeyError(self.error_prefix + 'Invalid Property %s' % key)
prop = self.props[key]
if key in self.data:
value = self.resolve(self.data[key])
try:
return prop.validate_data(value)
except ValueError as e:
raise ValueError(self.error_prefix + '%s %s' % (key, str(e)))
elif prop.has_default():
return prop.default()
elif prop.required():
raise ValueError(self.error_prefix +
'Property %s not assigned' % key)
def __len__(self):
return len(self.props)
def __contains__(self, key):
return key in self.props
def __iter__(self):
return iter(self.props)
|
|
import json
import os
from pokemongo_bot import inventory
from pokemongo_bot.human_behaviour import action_delay
from pokemongo_bot.base_task import BaseTask
from pokemongo_bot.inventory import Pokemons, Pokemon
class TransferPokemon(BaseTask):
SUPPORTED_TASK_API_VERSION = 1
def work(self):
pokemon_groups = self._release_pokemon_get_groups()
for pokemon_id, group in pokemon_groups.iteritems():
pokemon_name = Pokemons.name_for(pokemon_id)
keep_best, keep_best_cp, keep_best_iv = self._validate_keep_best_config(pokemon_name)
if keep_best:
best_pokemon_ids = set()
order_criteria = 'none'
if keep_best_cp >= 1:
cp_limit = keep_best_cp
best_cp_pokemons = sorted(group, key=lambda x: (x.cp, x.iv), reverse=True)[:cp_limit]
best_pokemon_ids = set(pokemon.id for pokemon in best_cp_pokemons)
order_criteria = 'cp'
if keep_best_iv >= 1:
iv_limit = keep_best_iv
best_iv_pokemons = sorted(group, key=lambda x: (x.iv, x.cp), reverse=True)[:iv_limit]
best_pokemon_ids |= set(pokemon.id for pokemon in best_iv_pokemons)
if order_criteria == 'cp':
order_criteria = 'cp and iv'
else:
order_criteria = 'iv'
# remove best pokemons from all pokemons array
all_pokemons = group
best_pokemons = []
for best_pokemon_id in best_pokemon_ids:
for pokemon in all_pokemons:
if best_pokemon_id == pokemon.id:
all_pokemons.remove(pokemon)
best_pokemons.append(pokemon)
transfer_pokemons = [pokemon for pokemon in all_pokemons if self.should_release_pokemon(pokemon,True)]
if transfer_pokemons:
if best_pokemons:
self.emit_event(
'keep_best_release',
formatted="Keeping best {amount} {pokemon}, based on {criteria}",
data={
'amount': len(best_pokemons),
'pokemon': pokemon_name,
'criteria': order_criteria
}
)
for pokemon in transfer_pokemons:
self.release_pokemon(pokemon)
else:
group = sorted(group, key=lambda x: x.cp, reverse=True)
for pokemon in group:
if self.should_release_pokemon(pokemon):
self.release_pokemon(pokemon)
def _release_pokemon_get_groups(self):
pokemon_groups = {}
# TODO: Use new inventory everywhere and then remove the inventory update
for pokemon in inventory.pokemons(True).all():
if pokemon.in_fort or pokemon.is_favorite:
continue
group_id = pokemon.pokemon_id
if group_id not in pokemon_groups:
pokemon_groups[group_id] = []
pokemon_groups[group_id].append(pokemon)
return pokemon_groups
def should_release_pokemon(self, pokemon, keep_best_mode = False):
release_config = self._get_release_config_for(pokemon.name)
if (keep_best_mode
and not release_config.has_key('never_release')
and not release_config.has_key('always_release')
and not release_config.has_key('release_below_cp')
and not release_config.has_key('release_below_iv')):
return True
cp_iv_logic = release_config.get('logic')
if not cp_iv_logic:
cp_iv_logic = self._get_release_config_for('any').get('logic', 'and')
release_results = {
'cp': False,
'iv': False,
}
if release_config.get('never_release', False):
return False
if release_config.get('always_release', False):
return True
release_cp = release_config.get('release_below_cp', 0)
if pokemon.cp < release_cp:
release_results['cp'] = True
release_iv = release_config.get('release_below_iv', 0)
if pokemon.iv < release_iv:
release_results['iv'] = True
logic_to_function = {
'or': lambda x, y: x or y,
'and': lambda x, y: x and y
}
if logic_to_function[cp_iv_logic](*release_results.values()):
self.emit_event(
'future_pokemon_release',
formatted="Releasing {pokemon} [CP {cp}] [IV {iv}] based on rule: CP < {below_cp} {cp_iv_logic} IV < {below_iv}",
data={
'pokemon': pokemon.name,
'cp': pokemon.cp,
'iv': pokemon.iv,
'below_cp': release_cp,
'cp_iv_logic': cp_iv_logic.upper(),
'below_iv': release_iv
}
)
return logic_to_function[cp_iv_logic](*release_results.values())
def release_pokemon(self, pokemon):
"""
:type pokemon: Pokemon
"""
try:
if self.bot.config.test:
candy_awarded = 1
else:
response_dict = self.bot.api.release_pokemon(pokemon_id=pokemon.id)
candy_awarded = response_dict['responses']['RELEASE_POKEMON']['candy_awarded']
except KeyError:
return
# We could refresh here too, but adding 1 saves a inventory request
candy = inventory.candies().get(pokemon.pokemon_id)
candy.add(candy_awarded)
inventory.pokemons().remove(pokemon.id)
self.bot.metrics.released_pokemon()
self.emit_event(
'pokemon_release',
formatted='Exchanged {pokemon} [CP {cp}] [IV {iv}] for candy.',
data={
'pokemon': pokemon.name,
'cp': pokemon.cp,
'iv': pokemon.iv,
'ncp': pokemon.cp_percent,
'dps': pokemon.moveset.dps
}
)
action_delay(self.bot.config.action_wait_min, self.bot.config.action_wait_max)
def _get_release_config_for(self, pokemon):
release_config = self.bot.config.release.get(pokemon)
if not release_config:
release_config = self.bot.config.release.get('any')
if not release_config:
release_config = {}
return release_config
def _validate_keep_best_config(self, pokemon_name):
keep_best = False
release_config = self._get_release_config_for(pokemon_name)
keep_best_cp = release_config.get('keep_best_cp', 0)
keep_best_iv = release_config.get('keep_best_iv', 0)
if keep_best_cp or keep_best_iv:
keep_best = True
try:
keep_best_cp = int(keep_best_cp)
except ValueError:
keep_best_cp = 0
try:
keep_best_iv = int(keep_best_iv)
except ValueError:
keep_best_iv = 0
if keep_best_cp < 0 or keep_best_iv < 0:
keep_best = False
if keep_best_cp == 0 and keep_best_iv == 0:
keep_best = False
return keep_best, keep_best_cp, keep_best_iv
|
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import re
import thrift
from django.utils.encoding import smart_str, force_unicode
from django.utils.translation import ugettext as _
import hadoop.cluster
from desktop.lib import thrift_util
from hive_metastore import ThriftHiveMetastore
from desktop.conf import KERBEROS
from beeswaxd import BeeswaxService
from beeswaxd.ttypes import QueryNotFoundException
from beeswax import conf
from beeswax import models
from beeswax import hive_site
from beeswax.models import BeeswaxQueryHandle
from beeswax.server.dbms import Table, DataTable
LOG = logging.getLogger(__name__)
class BeeswaxTable(Table):
def __init__(self, table_obj):
self.table_obj = table_obj
@property
def name(self):
return self.table_obj.tableName
@property
def is_view(self):
return self.table_obj.tableType == 'VIRTUAL_VIEW'
@property
def partition_keys(self):
return self.table_obj.partitionKeys
@property
def path_location(self):
return self.table_obj.sd.location
@property
def parameters(self):
return self.table_obj.parameters
@property
def cols(self):
return self.table_obj.sd.cols
@property
def comment(self):
return self.table_obj.parameters.get('comment')
class BeeswaxDataTable(DataTable):
def __init__(self, results):
self.results = results
self.has_more = results.has_more
self.startRowOffset = results.start_row
self.columns = results.columns
@property
def ready(self):
return self.results.ready
def cols(self):
return self.results.columns
def rows(self):
"""
Results come back tab-delimited, and this splits
them back up into reasonable things.
"""
def parse_result_row(row):
return row.split("\t")
for row in self.results.data:
yield parse_result_row(row)
class BeeswaxClient:
NO_RESULT_SET_RE = re.compile('DROP|CREATE|ALTER|LOAD|USE', re.IGNORECASE)
def __init__(self, query_server, user):
self.user = user
self.query_server = query_server
self.db_client = self.db_client(query_server)
self.meta_client = self.meta_client()
def make_query(self, hql_query, statement=0):
# HUE-535 without having to modify Beeswaxd, add 'use database' as first option
if self.query_server['server_name'] == 'impala':
configuration = [','.join(['%(key)s=%(value)s' % setting for setting in hql_query.settings])]
else:
configuration = ['use ' + hql_query.query.get('database', 'default')]
configuration.extend(hql_query.get_configuration())
query_statement = hql_query.get_query_statement(statement)
thrift_query = BeeswaxService.Query(query=query_statement, configuration=configuration)
thrift_query.hadoop_user = self.user.username
return thrift_query
def get_databases(self, *args, **kwargs):
if self.query_server['server_name'] == 'impala':
return ['default']
else:
return self.meta_client.get_all_databases()
def get_tables(self, *args, **kwargs):
return self.meta_client.get_tables(*args, **kwargs)
def get_table(self, *args, **kwargs):
table = self.meta_client.get_table(*args, **kwargs)
return BeeswaxTable(table)
def query(self, query, statement=0):
thrift_query = self.make_query(query, statement)
handle = self.db_client.query(thrift_query)
# Fake has_result_set
has_result_set = not BeeswaxClient.NO_RESULT_SET_RE.match(thrift_query.query) is not None
return BeeswaxQueryHandle(secret=handle.id, has_result_set=has_result_set, log_context=handle.log_context)
def fetch(self, handle, start_over=True, rows=-1):
if rows is None:
rows = -1
rpc_handle = handle.get_rpc_handle()
results = self.db_client.fetch(rpc_handle, start_over, rows)
if results.ready:
# Impala does not return the name of the columns, need to fetch separately
if self.query_server['server_name'] == 'impala':
results.columns = [column.name for column in self.get_results_metadata(handle).schema.fieldSchemas]
return BeeswaxDataTable(results)
def cancel_operation(self, handle):
raise Exception(_('Query cancelation is not supported by the Beeswax interface. Please use the Hive Server 2 interface instead.'))
def get_log(self, handle):
return self.db_client.get_log(handle.log_context)
def get_state(self, handle):
return self.db_client.get_state(handle)
def get_results_metadata(self, handle):
handle = handle.get_rpc_handle()
return self.db_client.get_results_metadata(handle)
def close(self, handle):
handle = handle.get_rpc_handle()
self.db_client.close(handle)
self.db_client.clean(handle.log_context)
def get_partitions(self, db_name, tbl_name, max_parts):
if max_parts is None:
max_parts = -1
return self.meta_client.get_partitions(db_name, tbl_name, max_parts)
def explain(self, statement):
thrift_query = self.make_query(statement)
return self.db_client.explain(thrift_query)
def echo(self, text):
return self.db_client.echo(text)
def getStatus(self):
return self.meta_client.getStatus()
def get_default_configuration(self, *args, **kwargs):
return self.db_client.get_default_configuration(*args, **kwargs)
@classmethod
def get_security(cls, query_server=None):
cluster_conf = hadoop.cluster.get_cluster_conf_for_job_submission()
use_sasl = cluster_conf is not None and cluster_conf.SECURITY_ENABLED.get()
if query_server is not None:
principal = query_server['principal']
else:
principal = KERBEROS.HUE_PRINCIPAL.get()
# We should integrate hive_site.get_metastore() here in the future
if principal:
kerberos_principal_short_name = principal.split('/', 1)[0]
else:
kerberos_principal_short_name = None
return use_sasl, kerberos_principal_short_name
def db_client(self, query_server):
"""Get the Thrift client to talk to beeswax server"""
class UnicodeBeeswaxClient(object):
"""Wrap the thrift client to take and return Unicode"""
def __init__(self, client):
self._client = client
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
return getattr(self._client, attr)
def query(self, query):
_encode_struct_attr(query, 'query')
return self._client.query(query)
def explain(self, query):
_encode_struct_attr(query, 'query')
res = self._client.explain(query)
return _decode_struct_attr(res, 'textual')
def fetch(self, *args, **kwargs):
res = self._client.fetch(*args, **kwargs)
if res.ready:
res.columns = [ force_unicode(col, errors='replace') for col in res.columns ]
res.data = [ force_unicode(row, errors='replace') for row in res.data ]
return res
def get_state(self, handle):
"""
get_query_state(query_history) --> state enum
Find out the *server* state of this query, and translate it to the *client* state.
Expects to find the server_id from the ``query_history``.
Return None on error. (It catches all anticipated exceptions.)
"""
rpc_handle = handle.get_rpc_handle()
try:
rpc_state = self._client.get_state(rpc_handle)
return models.BeeswaxQueryHistory.STATE_MAP[rpc_state]
except QueryNotFoundException:
LOG.debug("Query id %s has expired" % (handle.secret,))
return models.QueryHistory.STATE.expired
except thrift.transport.TTransport.TTransportException, ex:
LOG.error("Failed to retrieve server state of submitted query id %s: %s" % (handle.secret, ex)) # queryhistory.id
return None
def dump_config(self):
res = self._client.dump_config()
return force_unicode(res, errors='replace')
def echo(self, msg):
return self._client.echo(smart_str(msg))
def get_log(self, *args, **kwargs):
res = self._client.get_log(*args, **kwargs)
return force_unicode(res, errors='replace')
def get_default_configuration(self, *args, **kwargs):
config_list = self._client.get_default_configuration(*args, **kwargs)
for config in config_list:
_decode_struct_attr(config, 'key')
_decode_struct_attr(config, 'value')
_decode_struct_attr(config, 'desc')
return config_list
def get_results_metadata(self, *args, **kwargs):
res = self._client.get_results_metadata(*args, **kwargs)
return _decode_struct_attr(res, 'table_dir')
use_sasl, kerberos_principal_short_name = BeeswaxClient.get_security(query_server)
client = thrift_util.get_client(BeeswaxService.Client,
query_server['server_host'],
query_server['server_port'],
service_name=query_server['server_name'],
kerberos_principal=kerberos_principal_short_name,
use_sasl=use_sasl,
timeout_seconds=conf.BEESWAX_SERVER_CONN_TIMEOUT.get())
return UnicodeBeeswaxClient(client)
def meta_client(self):
"""Get the Thrift client to talk to the metastore"""
class UnicodeMetastoreClient(object):
"""Wrap the thrift client to take and return Unicode."""
def __init__(self, client):
self._client = client
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
return getattr(self._client, attr)
def _encode_storage_descriptor(self, sd):
_encode_struct_attr(sd, 'location')
for col in sd.cols:
_encode_struct_attr(col, 'comment')
self._encode_map(sd.parameters)
def _decode_storage_descriptor(self, sd):
_decode_struct_attr(sd, 'location')
for col in sd.cols:
_decode_struct_attr(col, 'comment')
self._decode_map(sd.parameters)
def _encode_map(self, mapp):
for key, value in mapp.iteritems():
mapp[key] = smart_str(value, strings_only=True)
def _decode_map(self, mapp):
for key, value in mapp.iteritems():
mapp[key] = force_unicode(value, strings_only=True, errors='replace')
def create_database(self, name, description):
description = smart_str(description)
return self._client.create_database(name, description)
def get_database(self, *args, **kwargs):
db = self._client.get_database(*args, **kwargs)
return _decode_struct_attr(db, 'description')
def get_fields(self, *args, **kwargs):
res = self._client.get_fields(*args, **kwargs)
for fschema in res:
_decode_struct_attr(fschema, 'comment')
return res
def get_table(self, *args, **kwargs):
res = self._client.get_table(*args, **kwargs)
self._decode_storage_descriptor(res.sd)
self._decode_map(res.parameters)
return res
def alter_table(self, dbname, tbl_name, new_tbl):
self._encode_storage_descriptor(new_tbl.sd)
self._encode_map(new_tbl.parameters)
return self._client.alter_table(dbname, tbl_name, new_tbl)
def _encode_partition(self, part):
self._encode_storage_descriptor(part.sd)
self._encode_map(part.parameters)
return part
def _decode_partition(self, part):
self._decode_storage_descriptor(part.sd)
self._decode_map(part.parameters)
return part
def add_partition(self, new_part):
self._encode_partition(new_part)
part = self._client.add_partition(new_part)
return self._decode_partition(part)
def get_partition(self, *args, **kwargs):
part = self._client.get_partition(*args, **kwargs)
return self._decode_partition(part)
def get_partitions(self, *args, **kwargs):
part_list = self._client.get_partitions(*args, **kwargs)
for part in part_list:
self._decode_partition(part)
return part_list
def alter_partition(self, db_name, tbl_name, new_part):
self._encode_partition(new_part)
return self._client.alter_partition(db_name, tbl_name, new_part)
# Use service name from kerberos principal set in hive-site.xml
_, host, port, metastore_kerberos_principal = hive_site.get_metastore()
use_sasl, kerberos_principal_short_name = BeeswaxClient.get_security()
kerberos_principal_short_name = metastore_kerberos_principal and metastore_kerberos_principal.split('/', 1)[0] or None
client = thrift_util.get_client(ThriftHiveMetastore.Client,
host,
port,
service_name="Hive Metastore Server",
kerberos_principal=kerberos_principal_short_name,
use_sasl=use_sasl,
timeout_seconds=conf.METASTORE_CONN_TIMEOUT.get())
return UnicodeMetastoreClient(client)
def _decode_struct_attr(struct, attr):
try:
val = getattr(struct, attr)
except AttributeError:
return struct
unival = force_unicode(val, strings_only=True, errors='replace')
setattr(struct, attr, unival)
return struct
def _encode_struct_attr(struct, attr):
try:
unival = getattr(struct, attr)
except AttributeError:
return struct
val = smart_str(unival, strings_only=True)
setattr(struct, attr, val)
return struct
|
|
# coding: utf-8
import plumber
from xylose.scielodocument import Article, Journal
import xylose
from articlemeta import export_sci
from articlemeta import export_rsps
from articlemeta import export_doaj
from articlemeta import export_pubmed
from articlemeta import export_crossref
class CustomArticle(Article):
@property
def issue_publication_date(self):
if 'v65' in self.data['article']:
return xylose.tools.get_date(self.data['article']['v65'][0]['_'])
else:
return None
class JournalExport:
def __init__(self, journal):
self._journal = journal
def pipeline_scieloorg(self):
journal = Journal(self._journal)
def _safegetter(func):
try:
return func()
except:
return None
return {'title': _safegetter(lambda: journal.title),
'subtitle': _safegetter(lambda: journal.subtitle),
'previous_title': _safegetter(lambda: journal.previous_title),
'acronym': _safegetter(lambda: journal.acronym),
'scielo_url': _safegetter(lambda: journal.url()),
'institutional_url': _safegetter(lambda: journal.institutional_url),
'subject_areas': _safegetter(lambda: journal.subject_areas),
'wos_subject_areas': _safegetter(lambda: journal.wos_subject_areas),
'publisher_city': _safegetter(lambda: journal.publisher_city),
'publisher_country': _safegetter(lambda: journal.publisher_country),
'publisher_name': _safegetter(lambda: journal.publisher_name),
'status': _safegetter(lambda: journal.current_status)}
class Export(object):
def __init__(self, article):
self._article = article
def pipeline_sci(self):
xylose_article = Article(self._article)
ppl = plumber.Pipeline(export_sci.SetupArticlePipe(),
export_sci.XMLArticlePipe(),
export_sci.XMLFrontPipe(),
export_sci.XMLJournalMetaJournalIdPipe(),
export_sci.XMLJournalMetaJournalTitleGroupPipe(),
export_sci.XMLJournalMetaISSNPipe(),
export_sci.XMLJournalMetaCollectionPipe(),
export_sci.XMLJournalMetaPublisherPipe(),
export_sci.XMLArticleMetaUniqueArticleIdPipe(),
export_sci.XMLArticleMetaArticleIdPublisherPipe(),
export_sci.XMLArticleMetaArticleIdDOIPipe(),
export_sci.XMLArticleMetaArticleCategoriesPipe(),
export_sci.XMLArticleMetaTitleGroupPipe(),
export_sci.XMLArticleMetaTranslatedTitleGroupPipe(),
export_sci.XMLArticleMetaContribGroupPipe(),
export_sci.XMLArticleMetaAffiliationPipe(),
export_sci.XMLArticleMetaDatesInfoPipe(),
export_sci.XMLArticleMetaIssueInfoPipe(),
export_sci.XMLArticleMetaElocationInfoPipe(),
export_sci.XMLArticleMetaPagesInfoPipe(),
export_sci.XMLArticleMetaPermissionPipe(),
export_sci.XMLArticleMetaURLsPipe(),
export_sci.XMLArticleMetaAbstractsPipe(),
export_sci.XMLArticleMetaKeywordsPipe(),
export_sci.XMLArticleMetaCitationsPipe(),
export_sci.XMLClosePipe())
transformed_data = ppl.run(xylose_article, rewrap=True)
return next(transformed_data)
def pipeline_rsps(self):
xylose_article = Article(self._article)
ppl = plumber.Pipeline(export_rsps.SetupArticlePipe(),
export_rsps.XMLArticlePipe(),
export_rsps.XMLFrontPipe(),
export_rsps.XMLJournalMetaJournalIdPipe(),
export_rsps.XMLJournalMetaJournalTitleGroupPipe(),
export_rsps.XMLJournalMetaISSNPipe(),
export_rsps.XMLJournalMetaPublisherPipe(),
export_rsps.XMLArticleMetaArticleIdPublisherPipe(),
export_rsps.XMLArticleMetaArticleIdDOIPipe(),
export_rsps.XMLArticleMetaArticleCategoriesPipe(),
export_rsps.XMLArticleMetaTitleGroupPipe(),
export_rsps.XMLArticleMetaTranslatedTitleGroupPipe(),
export_rsps.XMLArticleMetaContribGroupPipe(),
export_rsps.XMLArticleMetaAffiliationPipe(),
export_rsps.XMLArticleMetaDatesInfoPipe(),
export_rsps.XMLArticleMetaIssueInfoPipe(),
export_rsps.XMLArticleMetaElocationInfoPipe(),
export_rsps.XMLArticleMetaPagesInfoPipe(),
export_rsps.XMLArticleMetaHistoryPipe(),
export_rsps.XMLArticleMetaPermissionPipe(),
export_rsps.XMLArticleMetaSelfUriPipe(),
export_rsps.XMLArticleMetaAbstractsPipe(),
export_rsps.XMLArticleMetaKeywordsPipe(),
export_rsps.XMLArticleMetaCountsPipe(),
export_rsps.XMLBodyPipe(),
export_rsps.XMLArticleMetaCitationsPipe(),
export_rsps.XMLSubArticlePipe(),
export_rsps.XMLClosePipe())
transformed_data = ppl.run(xylose_article, rewrap=True)
return next(transformed_data)
def pipeline_doaj(self):
xylose_article = Article(self._article, iso_format='iso 639-2')
ppl = plumber.Pipeline(export_doaj.SetupArticlePipe(),
export_doaj.XMLArticlePipe(),
export_doaj.XMLJournalMetaPublisherPipe(),
export_doaj.XMLJournalMetaJournalTitlePipe(),
export_doaj.XMLJournalMetaISSNPipe(),
export_doaj.XMLArticleMetaPublicationDatePipe(),
export_doaj.XMLArticleMetaVolumePipe(),
export_doaj.XMLArticleMetaIssuePipe(),
export_doaj.XMLArticleMetaStartPagePipe(),
export_doaj.XMLArticleMetaEndPagePipe(),
export_doaj.XMLArticleMetaArticleIdDOIPipe(),
export_doaj.XMLArticleMetaIdPipe(),
export_doaj.XMLArticleMetaDocumentTypePipe(),
export_doaj.XMLArticleMetaTitlePipe(),
export_doaj.XMLArticleMetaAuthorsPipe(),
export_doaj.XMLArticleMetaAffiliationPipe(),
export_doaj.XMLArticleMetaAbstractsPipe(),
export_doaj.XMLArticleMetaFullTextUrlPipe(),
export_doaj.XMLArticleMetaKeywordsPipe(),
export_doaj.XMLClosePipe())
transformed_data = ppl.run(xylose_article, rewrap=True)
return next(transformed_data)
def pipeline_pubmed(self):
xylose_article = Article(self._article, iso_format='iso 639-2')
ppl = plumber.Pipeline(export_pubmed.SetupArticleSetPipe(),
export_pubmed.XMLArticlePipe(),
export_pubmed.XMLJournalPipe(),
export_pubmed.XMLPublisherNamePipe(),
export_pubmed.XMLJournalTitlePipe(),
export_pubmed.XMLISSNPipe(),
export_pubmed.XMLVolumePipe(),
export_pubmed.XMLIssuePipe(),
export_pubmed.XMLPubDatePipe(),
export_pubmed.XMLReplacesPipe(),
export_pubmed.XMLArticleTitlePipe(),
export_pubmed.XMLFirstPagePipe(),
export_pubmed.XMLLastPagePipe(),
export_pubmed.XMLElocationIDPipe(),
export_pubmed.XMLLanguagePipe(),
export_pubmed.XMLAuthorListPipe(),
export_pubmed.XMLPublicationTypePipe(),
export_pubmed.XMLArticleIDListPipe(),
export_pubmed.XMLHistoryPipe(),
export_pubmed.XMLAbstractPipe(),
export_pubmed.XMLClosePipe())
transformed_data = ppl.run(xylose_article, rewrap=True)
return next(transformed_data)
def pipeline_crossref(self):
xylose_article = CustomArticle(self._article)
ppl = plumber.Pipeline(
export_crossref.SetupDoiBatchPipe(),
export_crossref.XMLHeadPipe(),
export_crossref.XMLBodyPipe(),
export_crossref.XMLDoiBatchIDPipe(),
export_crossref.XMLTimeStampPipe(),
export_crossref.XMLDepositorPipe(),
export_crossref.XMLRegistrantPipe(),
export_crossref.XMLJournalPipe(),
export_crossref.XMLJournalMetadataPipe(),
export_crossref.XMLJournalTitlePipe(),
export_crossref.XMLAbbreviatedJournalTitlePipe(),
export_crossref.XMLISSNPipe(),
export_crossref.XMLJournalIssuePipe(),
export_crossref.XMLPubDatePipe(),
export_crossref.XMLVolumePipe(),
export_crossref.XMLIssuePipe(),
export_crossref.XMLJournalArticlePipe(),
export_crossref.XMLArticleTitlesPipe(),
export_crossref.XMLArticleTitlePipe(),
export_crossref.XMLArticleContributorsPipe(),
export_crossref.XMLArticleAbstractPipe(),
export_crossref.XMLArticlePubDatePipe(),
export_crossref.XMLPagesPipe(),
export_crossref.XMLPIDPipe(),
export_crossref.XMLPermissionsPipe(),
export_crossref.XMLElocationPipe(),
export_crossref.XMLDOIDataPipe(),
export_crossref.XMLDOIPipe(),
export_crossref.XMLResourcePipe(),
export_crossref.XMLCollectionPipe(),
export_crossref.XMLArticleCitationsPipe(),
export_crossref.XMLClosePipe()
)
transformed_data = ppl.run(xylose_article, rewrap=True)
return next(transformed_data)
def pipeline_opac(self):
article = self._article.copy()
keys_to_remove = ['citations', '_shard_id', 'validated_scielo',
'doaj_id', 'normalized', 'sent_doaj', 'sent_wos',
'validated_wos', 'applicable']
for k in keys_to_remove:
try:
del(article[k])
except KeyError:
pass
return article
|
|
#!/usr/bin/env python3
import time
import copy
import random
import colorsys
import signal
import asyncio
from enum import Enum
# from termcolor import colored
import helper
def stringify(container, vertical_border = '', horizontal_border = ''):
s = ''
if len(horizontal_border) > 0:
s += vertical_border + horizontal_border * len(container) + vertical_border + '\n'
if len(vertical_border) > 0:
s += vertical_border
prev_y = None
for (x, y, color) in helper.row_wise(container):
if prev_y is not None and y != prev_y:
s += vertical_border + '\n' + vertical_border
if color is None:
s += ' '
else:
brightness = int(color.brightness * 9)
if brightness == 0:
s += ' '
else:
text = "%d" % brightness
attrs = []
if brightness > 6:
attrs = ['reverse']
elif brightness <=3:
attrs = ['dark']
color = 'red' if color.hue > 0.6 else 'green'
#s += colored(text, color, attrs = attrs)
s += str(brightness)
prev_y = y
if len(vertical_border) > 0:
s += vertical_border
if len(horizontal_border) > 0:
s += '\n' + vertical_border + horizontal_border * len(container) + vertical_border
return s
class Color:
def __init__(self, hue = 0.0, saturation = 0.0, brightness = 0.0):
self.hue = float(hue)
self.saturation = float(saturation)
self.brightness = float(brightness)
def __eq__(self, other):
if other is None: return False
return self.hue == other.hue and self.saturation == other.saturation and self.brightness == other.brightness
def __str__(self):
return "h: %.1f s: %.1f b: %.1f" % (self.hue, self.saturation, self.brightness)
def blend_towards(self, target_color, current_progress, new_progress):
self.hue = self.__blend_hue(self.hue, target_color.hue, current_progress, new_progress)
self.saturation = self.__blend_value(self.saturation, target_color.saturation, current_progress, new_progress)
self.brightness = self.__blend_value(self.brightness, target_color.brightness, current_progress, new_progress)
def __blend_value(self, current_value, target_value, current_progress, new_progress):
progress_step = new_progress - current_progress
remaining_difference = target_value - current_value
remaining_progress = 1.0 - current_progress
if remaining_progress < 0.0001:
return target_value
start_value = target_value - 1.0 / remaining_progress * remaining_difference
total_difference = target_value - start_value
step_difference = total_difference * progress_step
return current_value + step_difference
def __blend_hue(self, current_value, target_value, current_progress, new_progress):
progress_step = new_progress - current_progress
remaining_cw_difference = target_value - current_value
remaining_cw_wrap_difference = target_value + 1 - current_value
remaining_ccw_difference = target_value - 1 - current_value
remaining_difference = self.__abs_min(remaining_cw_difference, remaining_cw_wrap_difference, remaining_ccw_difference)
remaining_progress = 1.0 - current_progress
if remaining_progress < 0.0001:
return target_value
start_value = target_value - (1.0 / remaining_progress * remaining_difference)
total_cw_difference = target_value - start_value
total_cw_wrap_difference = target_value + 1 - start_value
total_ccw_difference = target_value - 1 - start_value
total_difference = self.__abs_min(total_cw_difference, total_cw_wrap_difference, total_ccw_difference)
step_difference = total_difference * progress_step
return self.__wrap_1(current_value + step_difference)
def __abs_min(self, a, b, c):
min_ab = a if abs(a) < abs(b) else b
min_abc = min_ab if abs(min_ab) < abs(c) else c
return min_abc
def __wrap_1(self, value):
if value > 1:
return value - 1
elif value < 0:
return value + 1
return value
def rgb(self):
return colorsys.hsv_to_rgb(self.hue, self.saturation, self.brightness)
def hsb(self):
return (self.hue, self.saturation, self.brightness)
class Shape(Enum):
T = [[True, None], [True, True], [True, None]]
O = [[True, True], [True, True]]
I = [[True, True, True, True]]
J = [[None, None, True], [True, True, True]]
L = [[True, True, True], [None, None, True]]
Z = [[True, None], [True, True], [None, True]]
S = [[None, True], [True, True], [True, None]]
class Brick:
def __init__(self, shape, color, x, y):
self.shape = shape
self.position = (x, y)
self.gravity_affected = True
self.pattern = copy.deepcopy(shape.value)
for (x, y, value) in helper.column_wise(self.pattern):
if value is None: continue
self.pattern[x][y] = color
@property
def x(self):
return self.position[0]
@property
def y(self):
return self.position[1]
@property
def width(self):
return len(self.pattern)
@property
def height(self):
return len(self.pattern[0])
def set_saturation(self, saturation):
for (x, y, color) in helper.column_wise(self.pattern):
color.saturation = saturation
def set_brightness(self, brightness):
for (x, y, color) in helper.column_wise(self.pattern):
color.brightness = brightness
def __str__(self):
return stringify(self.pattern)
def rotate_cw(self):
self.pattern = list(zip(*self.pattern))
self.pattern.reverse()
def rotate_ccw(self):
self.pattern.reverse()
self.pattern = list(zip(*self.pattern))
class Field:
def __init__(self, width, height):
self.field = helper.array_2d(width, height)
@property
def width(self):
return len(self.field)
@property
def height(self):
return len(self.field[0])
def clear(self):
self.field = helper.array_2d(self.width, self.height)
def set_all_saturation(self, saturation):
for (x, y, color) in helper.column_wise(self.field):
color.saturation = saturation
def set_all_brightness(self, brightness):
for (x, y, color) in helper.column_wise(self.field):
color.brightness = brightness
def can_move(self, brick, new_position):
# return True
if (new_position[0] < 0 or
new_position[0] + brick.width > self.width or
new_position[1] + brick.height > self.height):
return False
for (x, y, color) in helper.column_wise(brick.pattern):
if color is None: continue
if self.field[new_position[0] + x][new_position[1] + y] is not None: return False
return True
def is_outside(self, brick):
if (brick.position[0] + brick.width < 0 or
brick.position[0] > self.width or
brick.position[1] > self.height):
return True
return False
def __str__(self):
return stringify(self.field)
def merge(self, brick):
# transfer brick.pattern to target
for (x, y, color) in helper.column_wise(brick.pattern):
if color is None: continue
if brick.x + x < 0 or brick.x + x >= self.width: continue
if brick.y + y < 0 or brick.y + y >= self.height: continue
self.field[brick.x + x][brick.y + y] = color
class Game:
def __init__(self, loop, width, height, logger=None):
self._loop = loop
self.field = Field(width, height)
self.bricks = []
self.logger = logger
self.update_interval = 1
asyncio.async(self.update(), loop=loop)
@property
def width(self):
return self.field.width
@property
def height(self):
return self.field.height
@asyncio.coroutine
def update(self):
while True:
to_remove = []
for brick in self.bricks:
new_position = (brick.x, brick.y +1)
if self.field.can_move(brick, new_position):
brick.position = new_position
else:
self.field.merge(brick)
to_remove.append(brick)
if self.field.is_outside(brick):
to_remove.append(brick)
for brick in to_remove:
self.bricks.remove(brick)
yield from asyncio.sleep(self.update_interval)
def place_brick(self, brick):
if brick.gravity_affected:
self.bricks.append(brick)
else:
self.field.merge(brick)
def set_all_saturation(self, saturation):
self.field.set_all_saturation(saturation)
for brick in self.bricks:
brick.set_saturation = saturation
def set_all_brightness(self, brightness):
self.field.set_all_saturation(saturation)
for brick in self.bricks:
brick.set_saturation = saturation
def state(self):
"2D array of Color objects"
field = copy.deepcopy(self.field)
for brick in self.bricks:
field.merge(brick)
return field.field
class ConsoleStateView:
def __init__(self, loop, stateful, in_place=False):
self.stateful = stateful
self._needs_jump = False
self.in_place = in_place
asyncio.async(self.update(), loop=loop)
@asyncio.coroutine
def update(self):
while True:
if self.in_place and self._needs_jump:
print("\033[%dA" % (len(self.stateful.state()[0]) + 3))
print(stringify(self.stateful.state(), vertical_border = '|', horizontal_border = '-'))
self._needs_jump = True
yield from asyncio.sleep(0.5)
class ColorBlendingView:
def __init__(self, loop, game):
self._loop = loop
self.game = game
self.update_interval = 0.05
self.blend_time = 2
self.current_state = game.state()
self.previous_target = game.state()
self.blend_progress = game.state()
for (x, y, _) in helper.column_wise(self.current_state):
self.current_state[x][y] = None
self.previous_target[x][y] = None
self.blend_progress[x][y] = 0
asyncio.async(self.update(), loop=loop)
@property
def width(self):
return len(self.current_state)
@property
def height(self):
return len(self.current_state[0])
@asyncio.coroutine
def update(self):
last_update = self._loop.time()
while True:
now = self._loop.time()
elapsed_time = now - last_update
game_state = self.game.state()
for (x, y, current_color) in helper.column_wise(self.current_state):
target_color = game_state[x][y]
if self.previous_target[x][y] != target_color:
self.blend_progress[x][y] = 0
self.previous_target[x][y] = target_color
if current_color == target_color:
continue
if target_color is None:
target_color = Color(hue = current_color.hue, saturation = current_color.saturation, brightness = 0)
if current_color is None:
current_color = Color(hue = target_color.hue, saturation = target_color.saturation, brightness = 0)
self.current_state[x][y] = current_color
progress = min(self.blend_progress[x][y] + elapsed_time / self.blend_time, 1)
current_color.blend_towards(target_color, self.blend_progress[x][y], progress)
self.blend_progress[x][y] = progress
last_update = now
yield from asyncio.sleep(self.update_interval)
def state(self):
return copy.deepcopy(self.current_state)
|
|
# Natural Language Toolkit: Graphical Representations for Trees
#
# Copyright (C) 2001-2017 NLTK Project
# Author: Edward Loper <edloper@gmail.com>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
"""
Graphically display a Tree.
"""
from six.moves.tkinter import IntVar, Menu, Tk
from nltk.util import in_idle
from nltk.tree import Tree
from nltk.draw.util import (CanvasFrame, CanvasWidget, BoxWidget,
TextWidget, ParenWidget, OvalWidget)
##//////////////////////////////////////////////////////
## Tree Segment
##//////////////////////////////////////////////////////
class TreeSegmentWidget(CanvasWidget):
"""
A canvas widget that displays a single segment of a hierarchical
tree. Each ``TreeSegmentWidget`` connects a single "node widget"
to a sequence of zero or more "subtree widgets". By default, the
bottom of the node is connected to the top of each subtree by a
single line. However, if the ``roof`` attribute is set, then a
single triangular "roof" will connect the node to all of its
children.
Attributes:
- ``roof``: What sort of connection to draw between the node and
its subtrees. If ``roof`` is true, draw a single triangular
"roof" over the subtrees. If ``roof`` is false, draw a line
between each subtree and the node. Default value is false.
- ``xspace``: The amount of horizontal space to leave between
subtrees when managing this widget. Default value is 10.
- ``yspace``: The amount of space to place between the node and
its children when managing this widget. Default value is 15.
- ``color``: The color of the lines connecting the node to its
subtrees; and of the outline of the triangular roof. Default
value is ``'#006060'``.
- ``fill``: The fill color for the triangular roof. Default
value is ``''`` (no fill).
- ``width``: The width of the lines connecting the node to its
subtrees; and of the outline of the triangular roof. Default
value is 1.
- ``orientation``: Determines whether the tree branches downwards
or rightwards. Possible values are ``'horizontal'`` and
``'vertical'``. The default value is ``'vertical'`` (i.e.,
branch downwards).
- ``draggable``: whether the widget can be dragged by the user.
"""
def __init__(self, canvas, label, subtrees, **attribs):
"""
:type node:
:type subtrees: list(CanvasWidgetI)
"""
self._label = label
self._subtrees = subtrees
# Attributes
self._horizontal = 0
self._roof = 0
self._xspace = 10
self._yspace = 15
self._ordered = False
# Create canvas objects.
self._lines = [canvas.create_line(0,0,0,0, fill='#006060')
for c in subtrees]
self._polygon = canvas.create_polygon(0,0, fill='', state='hidden',
outline='#006060')
# Register child widgets (label + subtrees)
self._add_child_widget(label)
for subtree in subtrees:
self._add_child_widget(subtree)
# Are we currently managing?
self._managing = False
CanvasWidget.__init__(self, canvas, **attribs)
def __setitem__(self, attr, value):
canvas = self.canvas()
if attr == 'roof':
self._roof = value
if self._roof:
for l in self._lines: canvas.itemconfig(l, state='hidden')
canvas.itemconfig(self._polygon, state='normal')
else:
for l in self._lines: canvas.itemconfig(l, state='normal')
canvas.itemconfig(self._polygon, state='hidden')
elif attr == 'orientation':
if value == 'horizontal': self._horizontal = 1
elif value == 'vertical': self._horizontal = 0
else:
raise ValueError('orientation must be horizontal or vertical')
elif attr == 'color':
for l in self._lines: canvas.itemconfig(l, fill=value)
canvas.itemconfig(self._polygon, outline=value)
elif isinstance(attr, tuple) and attr[0] == 'color':
# Set the color of an individual line.
l = self._lines[int(attr[1])]
canvas.itemconfig(l, fill=value)
elif attr == 'fill':
canvas.itemconfig(self._polygon, fill=value)
elif attr == 'width':
canvas.itemconfig(self._polygon, {attr:value})
for l in self._lines: canvas.itemconfig(l, {attr:value})
elif attr in ('xspace', 'yspace'):
if attr == 'xspace': self._xspace = value
elif attr == 'yspace': self._yspace = value
self.update(self._label)
elif attr == 'ordered':
self._ordered = value
else:
CanvasWidget.__setitem__(self, attr, value)
def __getitem__(self, attr):
if attr == 'roof': return self._roof
elif attr == 'width':
return self.canvas().itemcget(self._polygon, attr)
elif attr == 'color':
return self.canvas().itemcget(self._polygon, 'outline')
elif isinstance(attr, tuple) and attr[0] == 'color':
l = self._lines[int(attr[1])]
return self.canvas().itemcget(l, 'fill')
elif attr == 'xspace': return self._xspace
elif attr == 'yspace': return self._yspace
elif attr == 'orientation':
if self._horizontal: return 'horizontal'
else: return 'vertical'
elif attr == 'ordered':
return self._ordered
else:
return CanvasWidget.__getitem__(self, attr)
def label(self):
return self._label
def subtrees(self):
return self._subtrees[:]
def set_label(self, label):
"""
Set the node label to ``label``.
"""
self._remove_child_widget(self._label)
self._add_child_widget(label)
self._label = label
self.update(self._label)
def replace_child(self, oldchild, newchild):
"""
Replace the child ``oldchild`` with ``newchild``.
"""
index = self._subtrees.index(oldchild)
self._subtrees[index] = newchild
self._remove_child_widget(oldchild)
self._add_child_widget(newchild)
self.update(newchild)
def remove_child(self, child):
index = self._subtrees.index(child)
del self._subtrees[index]
self._remove_child_widget(child)
self.canvas().delete(self._lines.pop())
self.update(self._label)
def insert_child(self, index, child):
canvas = self.canvas()
self._subtrees.insert(index, child)
self._add_child_widget(child)
self._lines.append(canvas.create_line(0,0,0,0, fill='#006060'))
self.update(self._label)
# but.. lines???
def _tags(self):
if self._roof:
return [self._polygon]
else:
return self._lines
def _subtree_top(self, child):
if isinstance(child, TreeSegmentWidget):
bbox = child.label().bbox()
else:
bbox = child.bbox()
if self._horizontal:
return (bbox[0], (bbox[1]+bbox[3])/2.0)
else:
return ((bbox[0]+bbox[2])/2.0, bbox[1])
def _node_bottom(self):
bbox = self._label.bbox()
if self._horizontal:
return (bbox[2], (bbox[1]+bbox[3])/2.0)
else:
return ((bbox[0]+bbox[2])/2.0, bbox[3])
def _update(self, child):
if len(self._subtrees) == 0: return
if self._label.bbox() is None: return # [XX] ???
# Which lines need to be redrawn?
if child is self._label: need_update = self._subtrees
else: need_update = [child]
if self._ordered and not self._managing:
need_update = self._maintain_order(child)
# Update the polygon.
(nodex, nodey) = self._node_bottom()
(xmin, ymin, xmax, ymax) = self._subtrees[0].bbox()
for subtree in self._subtrees[1:]:
bbox = subtree.bbox()
xmin = min(xmin, bbox[0])
ymin = min(ymin, bbox[1])
xmax = max(xmax, bbox[2])
ymax = max(ymax, bbox[3])
if self._horizontal:
self.canvas().coords(self._polygon, nodex, nodey, xmin,
ymin, xmin, ymax, nodex, nodey)
else:
self.canvas().coords(self._polygon, nodex, nodey, xmin,
ymin, xmax, ymin, nodex, nodey)
# Redraw all lines that need it.
for subtree in need_update:
(nodex, nodey) = self._node_bottom()
line = self._lines[self._subtrees.index(subtree)]
(subtreex, subtreey) = self._subtree_top(subtree)
self.canvas().coords(line, nodex, nodey, subtreex, subtreey)
def _maintain_order(self, child):
if self._horizontal:
return self._maintain_order_horizontal(child)
else:
return self._maintain_order_vertical(child)
def _maintain_order_vertical(self, child):
(left, top, right, bot) = child.bbox()
if child is self._label:
# Check all the leaves
for subtree in self._subtrees:
(x1, y1, x2, y2) = subtree.bbox()
if bot+self._yspace > y1:
subtree.move(0,bot+self._yspace-y1)
return self._subtrees
else:
moved = [child]
index = self._subtrees.index(child)
# Check leaves to our right.
x = right + self._xspace
for i in range(index+1, len(self._subtrees)):
(x1, y1, x2, y2) = self._subtrees[i].bbox()
if x > x1:
self._subtrees[i].move(x-x1, 0)
x += x2-x1 + self._xspace
moved.append(self._subtrees[i])
# Check leaves to our left.
x = left - self._xspace
for i in range(index-1, -1, -1):
(x1, y1, x2, y2) = self._subtrees[i].bbox()
if x < x2:
self._subtrees[i].move(x-x2, 0)
x -= x2-x1 + self._xspace
moved.append(self._subtrees[i])
# Check the node
(x1, y1, x2, y2) = self._label.bbox()
if y2 > top-self._yspace:
self._label.move(0, top-self._yspace-y2)
moved = self._subtrees
# Return a list of the nodes we moved
return moved
def _maintain_order_horizontal(self, child):
(left, top, right, bot) = child.bbox()
if child is self._label:
# Check all the leaves
for subtree in self._subtrees:
(x1, y1, x2, y2) = subtree.bbox()
if right+self._xspace > x1:
subtree.move(right+self._xspace-x1)
return self._subtrees
else:
moved = [child]
index = self._subtrees.index(child)
# Check leaves below us.
y = bot + self._yspace
for i in range(index+1, len(self._subtrees)):
(x1, y1, x2, y2) = self._subtrees[i].bbox()
if y > y1:
self._subtrees[i].move(0, y-y1)
y += y2-y1 + self._yspace
moved.append(self._subtrees[i])
# Check leaves above us
y = top - self._yspace
for i in range(index-1, -1, -1):
(x1, y1, x2, y2) = self._subtrees[i].bbox()
if y < y2:
self._subtrees[i].move(0, y-y2)
y -= y2-y1 + self._yspace
moved.append(self._subtrees[i])
# Check the node
(x1, y1, x2, y2) = self._label.bbox()
if x2 > left-self._xspace:
self._label.move(left-self._xspace-x2, 0)
moved = self._subtrees
# Return a list of the nodes we moved
return moved
def _manage_horizontal(self):
(nodex, nodey) = self._node_bottom()
# Put the subtrees in a line.
y = 20
for subtree in self._subtrees:
subtree_bbox = subtree.bbox()
dx = nodex - subtree_bbox[0] + self._xspace
dy = y - subtree_bbox[1]
subtree.move(dx, dy)
y += subtree_bbox[3] - subtree_bbox[1] + self._yspace
# Find the center of their tops.
center = 0.0
for subtree in self._subtrees:
center += self._subtree_top(subtree)[1]
center /= len(self._subtrees)
# Center the subtrees with the node.
for subtree in self._subtrees:
subtree.move(0, nodey-center)
def _manage_vertical(self):
(nodex, nodey) = self._node_bottom()
# Put the subtrees in a line.
x = 0
for subtree in self._subtrees:
subtree_bbox = subtree.bbox()
dy = nodey - subtree_bbox[1] + self._yspace
dx = x - subtree_bbox[0]
subtree.move(dx, dy)
x += subtree_bbox[2] - subtree_bbox[0] + self._xspace
# Find the center of their tops.
center = 0.0
for subtree in self._subtrees:
center += self._subtree_top(subtree)[0]/len(self._subtrees)
# Center the subtrees with the node.
for subtree in self._subtrees:
subtree.move(nodex-center, 0)
def _manage(self):
self._managing = True
(nodex, nodey) = self._node_bottom()
if len(self._subtrees) == 0: return
if self._horizontal: self._manage_horizontal()
else: self._manage_vertical()
# Update lines to subtrees.
for subtree in self._subtrees:
self._update(subtree)
self._managing = False
def __repr__(self):
return '[TreeSeg %s: %s]' % (self._label, self._subtrees)
def _tree_to_treeseg(canvas, t, make_node, make_leaf,
tree_attribs, node_attribs,
leaf_attribs, loc_attribs):
if isinstance(t, Tree):
label = make_node(canvas, t.label(), **node_attribs)
subtrees = [_tree_to_treeseg(canvas, child, make_node, make_leaf,
tree_attribs, node_attribs,
leaf_attribs, loc_attribs)
for child in t]
return TreeSegmentWidget(canvas, label, subtrees, **tree_attribs)
else:
return make_leaf(canvas, t, **leaf_attribs)
def tree_to_treesegment(canvas, t, make_node=TextWidget,
make_leaf=TextWidget, **attribs):
"""
Convert a Tree into a ``TreeSegmentWidget``.
:param make_node: A ``CanvasWidget`` constructor or a function that
creates ``CanvasWidgets``. ``make_node`` is used to convert
the Tree's nodes into ``CanvasWidgets``. If no constructor
is specified, then ``TextWidget`` will be used.
:param make_leaf: A ``CanvasWidget`` constructor or a function that
creates ``CanvasWidgets``. ``make_leaf`` is used to convert
the Tree's leafs into ``CanvasWidgets``. If no constructor
is specified, then ``TextWidget`` will be used.
:param attribs: Attributes for the canvas widgets that make up the
returned ``TreeSegmentWidget``. Any attribute beginning with
``'tree_'`` will be passed to all ``TreeSegmentWidgets`` (with
the ``'tree_'`` prefix removed. Any attribute beginning with
``'node_'`` will be passed to all nodes. Any attribute
beginning with ``'leaf_'`` will be passed to all leaves. And
any attribute beginning with ``'loc_'`` will be passed to all
text locations (for Trees).
"""
# Process attribs.
tree_attribs = {}
node_attribs = {}
leaf_attribs = {}
loc_attribs = {}
for (key, value) in list(attribs.items()):
if key[:5] == 'tree_': tree_attribs[key[5:]] = value
elif key[:5] == 'node_': node_attribs[key[5:]] = value
elif key[:5] == 'leaf_': leaf_attribs[key[5:]] = value
elif key[:4] == 'loc_': loc_attribs[key[4:]] = value
else: raise ValueError('Bad attribute: %s' % key)
return _tree_to_treeseg(canvas, t, make_node, make_leaf,
tree_attribs, node_attribs,
leaf_attribs, loc_attribs)
##//////////////////////////////////////////////////////
## Tree Widget
##//////////////////////////////////////////////////////
class TreeWidget(CanvasWidget):
"""
A canvas widget that displays a single Tree.
``TreeWidget`` manages a group of ``TreeSegmentWidgets`` that are
used to display a Tree.
Attributes:
- ``node_attr``: Sets the attribute ``attr`` on all of the
node widgets for this ``TreeWidget``.
- ``node_attr``: Sets the attribute ``attr`` on all of the
leaf widgets for this ``TreeWidget``.
- ``loc_attr``: Sets the attribute ``attr`` on all of the
location widgets for this ``TreeWidget`` (if it was built from
a Tree). Note that a location widget is a ``TextWidget``.
- ``xspace``: The amount of horizontal space to leave between
subtrees when managing this widget. Default value is 10.
- ``yspace``: The amount of space to place between the node and
its children when managing this widget. Default value is 15.
- ``line_color``: The color of the lines connecting each expanded
node to its subtrees.
- ``roof_color``: The color of the outline of the triangular roof
for collapsed trees.
- ``roof_fill``: The fill color for the triangular roof for
collapsed trees.
- ``width``
- ``orientation``: Determines whether the tree branches downwards
or rightwards. Possible values are ``'horizontal'`` and
``'vertical'``. The default value is ``'vertical'`` (i.e.,
branch downwards).
- ``shapeable``: whether the subtrees can be independently
dragged by the user. THIS property simply sets the
``DRAGGABLE`` property on all of the ``TreeWidget``'s tree
segments.
- ``draggable``: whether the widget can be dragged by the user.
"""
def __init__(self, canvas, t, make_node=TextWidget,
make_leaf=TextWidget, **attribs):
# Node & leaf canvas widget constructors
self._make_node = make_node
self._make_leaf = make_leaf
self._tree = t
# Attributes.
self._nodeattribs = {}
self._leafattribs = {}
self._locattribs = {'color': '#008000'}
self._line_color = '#008080'
self._line_width = 1
self._roof_color = '#008080'
self._roof_fill = '#c0c0c0'
self._shapeable = False
self._xspace = 10
self._yspace = 10
self._orientation = 'vertical'
self._ordered = False
# Build trees.
self._keys = {} # treeseg -> key
self._expanded_trees = {}
self._collapsed_trees = {}
self._nodes = []
self._leaves = []
#self._locs = []
self._make_collapsed_trees(canvas, t, ())
self._treeseg = self._make_expanded_tree(canvas, t, ())
self._add_child_widget(self._treeseg)
CanvasWidget.__init__(self, canvas, **attribs)
def expanded_tree(self, *path_to_tree):
"""
Return the ``TreeSegmentWidget`` for the specified subtree.
:param path_to_tree: A list of indices i1, i2, ..., in, where
the desired widget is the widget corresponding to
``tree.children()[i1].children()[i2]....children()[in]``.
For the root, the path is ``()``.
"""
return self._expanded_trees[path_to_tree]
def collapsed_tree(self, *path_to_tree):
"""
Return the ``TreeSegmentWidget`` for the specified subtree.
:param path_to_tree: A list of indices i1, i2, ..., in, where
the desired widget is the widget corresponding to
``tree.children()[i1].children()[i2]....children()[in]``.
For the root, the path is ``()``.
"""
return self._collapsed_trees[path_to_tree]
def bind_click_trees(self, callback, button=1):
"""
Add a binding to all tree segments.
"""
for tseg in list(self._expanded_trees.values()):
tseg.bind_click(callback, button)
for tseg in list(self._collapsed_trees.values()):
tseg.bind_click(callback, button)
def bind_drag_trees(self, callback, button=1):
"""
Add a binding to all tree segments.
"""
for tseg in list(self._expanded_trees.values()):
tseg.bind_drag(callback, button)
for tseg in list(self._collapsed_trees.values()):
tseg.bind_drag(callback, button)
def bind_click_leaves(self, callback, button=1):
"""
Add a binding to all leaves.
"""
for leaf in self._leaves: leaf.bind_click(callback, button)
for leaf in self._leaves: leaf.bind_click(callback, button)
def bind_drag_leaves(self, callback, button=1):
"""
Add a binding to all leaves.
"""
for leaf in self._leaves: leaf.bind_drag(callback, button)
for leaf in self._leaves: leaf.bind_drag(callback, button)
def bind_click_nodes(self, callback, button=1):
"""
Add a binding to all nodes.
"""
for node in self._nodes: node.bind_click(callback, button)
for node in self._nodes: node.bind_click(callback, button)
def bind_drag_nodes(self, callback, button=1):
"""
Add a binding to all nodes.
"""
for node in self._nodes: node.bind_drag(callback, button)
for node in self._nodes: node.bind_drag(callback, button)
def _make_collapsed_trees(self, canvas, t, key):
if not isinstance(t, Tree): return
make_node = self._make_node
make_leaf = self._make_leaf
node = make_node(canvas, t.label(), **self._nodeattribs)
self._nodes.append(node)
leaves = [make_leaf(canvas, l, **self._leafattribs)
for l in t.leaves()]
self._leaves += leaves
treeseg = TreeSegmentWidget(canvas, node, leaves, roof=1,
color=self._roof_color,
fill=self._roof_fill,
width=self._line_width)
self._collapsed_trees[key] = treeseg
self._keys[treeseg] = key
#self._add_child_widget(treeseg)
treeseg.hide()
# Build trees for children.
for i in range(len(t)):
child = t[i]
self._make_collapsed_trees(canvas, child, key + (i,))
def _make_expanded_tree(self, canvas, t, key):
make_node = self._make_node
make_leaf = self._make_leaf
if isinstance(t, Tree):
node = make_node(canvas, t.label(), **self._nodeattribs)
self._nodes.append(node)
children = t
subtrees = [self._make_expanded_tree(canvas, children[i], key+(i,))
for i in range(len(children))]
treeseg = TreeSegmentWidget(canvas, node, subtrees,
color=self._line_color,
width=self._line_width)
self._expanded_trees[key] = treeseg
self._keys[treeseg] = key
return treeseg
else:
leaf = make_leaf(canvas, t, **self._leafattribs)
self._leaves.append(leaf)
return leaf
def __setitem__(self, attr, value):
if attr[:5] == 'node_':
for node in self._nodes: node[attr[5:]] = value
elif attr[:5] == 'leaf_':
for leaf in self._leaves: leaf[attr[5:]] = value
elif attr == 'line_color':
self._line_color = value
for tseg in list(self._expanded_trees.values()): tseg['color'] = value
elif attr == 'line_width':
self._line_width = value
for tseg in list(self._expanded_trees.values()): tseg['width'] = value
for tseg in list(self._collapsed_trees.values()): tseg['width'] = value
elif attr == 'roof_color':
self._roof_color = value
for tseg in list(self._collapsed_trees.values()): tseg['color'] = value
elif attr == 'roof_fill':
self._roof_fill = value
for tseg in list(self._collapsed_trees.values()): tseg['fill'] = value
elif attr == 'shapeable':
self._shapeable = value
for tseg in list(self._expanded_trees.values()):
tseg['draggable'] = value
for tseg in list(self._collapsed_trees.values()):
tseg['draggable'] = value
for leaf in self._leaves: leaf['draggable'] = value
elif attr == 'xspace':
self._xspace = value
for tseg in list(self._expanded_trees.values()):
tseg['xspace'] = value
for tseg in list(self._collapsed_trees.values()):
tseg['xspace'] = value
self.manage()
elif attr == 'yspace':
self._yspace = value
for tseg in list(self._expanded_trees.values()):
tseg['yspace'] = value
for tseg in list(self._collapsed_trees.values()):
tseg['yspace'] = value
self.manage()
elif attr == 'orientation':
self._orientation = value
for tseg in list(self._expanded_trees.values()):
tseg['orientation'] = value
for tseg in list(self._collapsed_trees.values()):
tseg['orientation'] = value
self.manage()
elif attr == 'ordered':
self._ordered = value
for tseg in list(self._expanded_trees.values()):
tseg['ordered'] = value
for tseg in list(self._collapsed_trees.values()):
tseg['ordered'] = value
else: CanvasWidget.__setitem__(self, attr, value)
def __getitem__(self, attr):
if attr[:5] == 'node_':
return self._nodeattribs.get(attr[5:], None)
elif attr[:5] == 'leaf_':
return self._leafattribs.get(attr[5:], None)
elif attr[:4] == 'loc_':
return self._locattribs.get(attr[4:], None)
elif attr == 'line_color': return self._line_color
elif attr == 'line_width': return self._line_width
elif attr == 'roof_color': return self._roof_color
elif attr == 'roof_fill': return self._roof_fill
elif attr == 'shapeable': return self._shapeable
elif attr == 'xspace': return self._xspace
elif attr == 'yspace': return self._yspace
elif attr == 'orientation': return self._orientation
else: return CanvasWidget.__getitem__(self, attr)
def _tags(self): return []
def _manage(self):
segs = list(self._expanded_trees.values()) + list(self._collapsed_trees.values())
for tseg in segs:
if tseg.hidden():
tseg.show()
tseg.manage()
tseg.hide()
def toggle_collapsed(self, treeseg):
"""
Collapse/expand a tree.
"""
old_treeseg = treeseg
if old_treeseg['roof']:
new_treeseg = self._expanded_trees[self._keys[old_treeseg]]
else:
new_treeseg = self._collapsed_trees[self._keys[old_treeseg]]
# Replace the old tree with the new tree.
if old_treeseg.parent() is self:
self._remove_child_widget(old_treeseg)
self._add_child_widget(new_treeseg)
self._treeseg = new_treeseg
else:
old_treeseg.parent().replace_child(old_treeseg, new_treeseg)
# Move the new tree to where the old tree was. Show it first,
# so we can find its bounding box.
new_treeseg.show()
(newx, newy) = new_treeseg.label().bbox()[:2]
(oldx, oldy) = old_treeseg.label().bbox()[:2]
new_treeseg.move(oldx-newx, oldy-newy)
# Hide the old tree
old_treeseg.hide()
# We could do parent.manage() here instead, if we wanted.
new_treeseg.parent().update(new_treeseg)
##//////////////////////////////////////////////////////
## draw_trees
##//////////////////////////////////////////////////////
class TreeView(object):
def __init__(self, *trees):
from math import sqrt, ceil
self._trees = trees
self._top = Tk()
self._top.title('NLTK')
self._top.bind('<Control-x>', self.destroy)
self._top.bind('<Control-q>', self.destroy)
cf = self._cframe = CanvasFrame(self._top)
self._top.bind('<Control-p>', self._cframe.print_to_file)
# Size is variable.
self._size = IntVar(self._top)
self._size.set(12)
bold = ('helvetica', -self._size.get(), 'bold')
helv = ('helvetica', -self._size.get())
# Lay the trees out in a square.
self._width = int(ceil(sqrt(len(trees))))
self._widgets = []
for i in range(len(trees)):
widget = TreeWidget(cf.canvas(), trees[i], node_font=bold,
leaf_color='#008040', node_color='#004080',
roof_color='#004040', roof_fill='white',
line_color='#004040', draggable=1,
leaf_font=helv)
widget.bind_click_trees(widget.toggle_collapsed)
self._widgets.append(widget)
cf.add_widget(widget, 0, 0)
self._layout()
self._cframe.pack(expand=1, fill='both')
self._init_menubar()
def _layout(self):
i = x = y = ymax = 0
width = self._width
for i in range(len(self._widgets)):
widget = self._widgets[i]
(oldx, oldy) = widget.bbox()[:2]
if i % width == 0:
y = ymax
x = 0
widget.move(x-oldx, y-oldy)
x = widget.bbox()[2] + 10
ymax = max(ymax, widget.bbox()[3] + 10)
def _init_menubar(self):
menubar = Menu(self._top)
filemenu = Menu(menubar, tearoff=0)
filemenu.add_command(label='Print to Postscript', underline=0,
command=self._cframe.print_to_file,
accelerator='Ctrl-p')
filemenu.add_command(label='Exit', underline=1,
command=self.destroy, accelerator='Ctrl-x')
menubar.add_cascade(label='File', underline=0, menu=filemenu)
zoommenu = Menu(menubar, tearoff=0)
zoommenu.add_radiobutton(label='Tiny', variable=self._size,
underline=0, value=10, command=self.resize)
zoommenu.add_radiobutton(label='Small', variable=self._size,
underline=0, value=12, command=self.resize)
zoommenu.add_radiobutton(label='Medium', variable=self._size,
underline=0, value=14, command=self.resize)
zoommenu.add_radiobutton(label='Large', variable=self._size,
underline=0, value=28, command=self.resize)
zoommenu.add_radiobutton(label='Huge', variable=self._size,
underline=0, value=50, command=self.resize)
menubar.add_cascade(label='Zoom', underline=0, menu=zoommenu)
self._top.config(menu=menubar)
def resize(self, *e):
bold = ('helvetica', -self._size.get(), 'bold')
helv = ('helvetica', -self._size.get())
xspace = self._size.get()
yspace = self._size.get()
for widget in self._widgets:
widget['node_font'] = bold
widget['leaf_font'] = helv
widget['xspace'] = xspace
widget['yspace'] = yspace
if self._size.get() < 20: widget['line_width'] = 1
elif self._size.get() < 30: widget['line_width'] = 2
else: widget['line_width'] = 3
self._layout()
def destroy(self, *e):
if self._top is None: return
self._top.destroy()
self._top = None
def mainloop(self, *args, **kwargs):
"""
Enter the Tkinter mainloop. This function must be called if
this demo is created from a non-interactive program (e.g.
from a secript); otherwise, the demo will close as soon as
the script completes.
"""
if in_idle(): return
self._top.mainloop(*args, **kwargs)
def draw_trees(*trees):
"""
Open a new window containing a graphical diagram of the given
trees.
:rtype: None
"""
TreeView(*trees).mainloop()
return
##//////////////////////////////////////////////////////
## Demo Code
##//////////////////////////////////////////////////////
def demo():
import random
def fill(cw):
cw['fill'] = '#%06d' % random.randint(0,999999)
cf = CanvasFrame(width=550, height=450, closeenough=2)
t = Tree.fromstring('''
(S (NP the very big cat)
(VP (Adv sorta) (V saw) (NP (Det the) (N dog))))''')
tc = TreeWidget(cf.canvas(), t, draggable=1,
node_font=('helvetica', -14, 'bold'),
leaf_font=('helvetica', -12, 'italic'),
roof_fill='white', roof_color='black',
leaf_color='green4', node_color='blue2')
cf.add_widget(tc,10,10)
def boxit(canvas, text):
big = ('helvetica', -16, 'bold')
return BoxWidget(canvas, TextWidget(canvas, text,
font=big), fill='green')
def ovalit(canvas, text):
return OvalWidget(canvas, TextWidget(canvas, text),
fill='cyan')
treetok = Tree.fromstring('(S (NP this tree) (VP (V is) (AdjP shapeable)))')
tc2 = TreeWidget(cf.canvas(), treetok, boxit, ovalit, shapeable=1)
def color(node):
node['color'] = '#%04d00' % random.randint(0,9999)
def color2(treeseg):
treeseg.label()['fill'] = '#%06d' % random.randint(0,9999)
treeseg.label().child()['color'] = 'white'
tc.bind_click_trees(tc.toggle_collapsed)
tc2.bind_click_trees(tc2.toggle_collapsed)
tc.bind_click_nodes(color, 3)
tc2.expanded_tree(1).bind_click(color2, 3)
tc2.expanded_tree().bind_click(color2, 3)
paren = ParenWidget(cf.canvas(), tc2)
cf.add_widget(paren, tc.bbox()[2]+10, 10)
tree3 = Tree.fromstring('''
(S (NP this tree) (AUX was)
(VP (V built) (PP (P with) (NP (N tree_to_treesegment)))))''')
tc3 = tree_to_treesegment(cf.canvas(), tree3, tree_color='green4',
tree_xspace=2, tree_width=2)
tc3['draggable'] = 1
cf.add_widget(tc3, 10, tc.bbox()[3]+10)
def orientswitch(treewidget):
if treewidget['orientation'] == 'horizontal':
treewidget.expanded_tree(1,1).subtrees()[0].set_text('vertical')
treewidget.collapsed_tree(1,1).subtrees()[0].set_text('vertical')
treewidget.collapsed_tree(1).subtrees()[1].set_text('vertical')
treewidget.collapsed_tree().subtrees()[3].set_text('vertical')
treewidget['orientation'] = 'vertical'
else:
treewidget.expanded_tree(1,1).subtrees()[0].set_text('horizontal')
treewidget.collapsed_tree(1,1).subtrees()[0].set_text('horizontal')
treewidget.collapsed_tree(1).subtrees()[1].set_text('horizontal')
treewidget.collapsed_tree().subtrees()[3].set_text('horizontal')
treewidget['orientation'] = 'horizontal'
text = """
Try clicking, right clicking, and dragging
different elements of each of the trees.
The top-left tree is a TreeWidget built from
a Tree. The top-right is a TreeWidget built
from a Tree, using non-default widget
constructors for the nodes & leaves (BoxWidget
and OvalWidget). The bottom-left tree is
built from tree_to_treesegment."""
twidget = TextWidget(cf.canvas(), text.strip())
textbox = BoxWidget(cf.canvas(), twidget, fill='white', draggable=1)
cf.add_widget(textbox, tc3.bbox()[2]+10, tc2.bbox()[3]+10)
tree4 = Tree.fromstring('(S (NP this tree) (VP (V is) (Adj horizontal)))')
tc4 = TreeWidget(cf.canvas(), tree4, draggable=1,
line_color='brown2', roof_color='brown2',
node_font=('helvetica', -12, 'bold'),
node_color='brown4', orientation='horizontal')
tc4.manage()
cf.add_widget(tc4, tc3.bbox()[2]+10, textbox.bbox()[3]+10)
tc4.bind_click(orientswitch)
tc4.bind_click_trees(tc4.toggle_collapsed, 3)
# Run mainloop
cf.mainloop()
if __name__ == '__main__':
demo()
|
|
import sys
import numpy as np
import pickle
import os
from tqdm import tqdm
from itertools import chain
from sklearn.preprocessing.data import MinMaxScaler
from sklearn.svm import SVC, LinearSVC
from sklearn.naive_bayes import MultinomialNB, GaussianNB
from sklearn.neighbors import KNeighborsClassifier
from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis
from sklearn.decomposition import PCA
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier
from sklearn.neural_network import MLPClassifier
from sklearn.linear_model import RidgeClassifier
from sklearn.model_selection import GridSearchCV
from sklearn.ensemble import GradientBoostingClassifier
from feature_engineering import refuting_features, polarity_features, hand_features, word_overlap_features
from feature_engineering import clean, get_tokenized_lemmas, gen_or_load_feats, has_feature
from utils.dataset import DataSet
from utils.generate_test_splits import kfold_split, get_stances_for_folds
from utils.score import LABELS, score_submission, report_score
from utils.system import parse_params, check_version
ORIGINAL_LABELS = ['agree', 'disagree', 'discuss', 'unrelated']
def gen_or_load_texts(text_file, gen_fn):
if os.path.isfile(text_file):
with open(text_file, 'rb') as fp:
feats = pickle.load(fp)
else:
feats = gen_fn()
with open(text_file, 'wb') as fp:
pickle.dump(feats, fp)
return feats
def transform_labels_for_binary_relevance(labels):
return list(map(lambda labelI: 0 if ORIGINAL_LABELS[labelI] != 'unrelated' else 1, labels))
def transform_features_for_binary_relevance(X_overlap, X_hand, X_refuting, firstStage):
if (firstStage):
return np.c_[X_hand, X_overlap]
else:
return np.c_[X_refuting, X_overlap]
def scaleMinMax(train_xs, test_xs):
all_xs = list(train_xs) + list(test_xs)
scaler = MinMaxScaler()
scaler.fit(all_xs)
train_xs_scaled = scaler.transform(train_xs)
test_xs_scaled = scaler.transform(test_xs)
return train_xs, test_xs
def generate_features(stances, all_articles, name):
overlap_file = "features/overlap."+name+".npy"
refuting_file = "features/refuting."+name+".npy"
hand_file = "features/hand."+name+".npy"
h, b, y = [],[],[]
for stance in stances:
y.append(LABELS.index(stance['Stance']))
h.append(stance['Headline'])
b.append(all_articles[stance['Body ID']])
clean_headlines = []
clean_bodies = []
clean_lemmatized_headlines = []
clean_lemmatized_bodies = []
# Some feature is missing
if not (has_feature(overlap_file) and has_feature(refuting_file) and has_feature(hand_file)):
print("Cleaning texts")
clean_headlines = gen_or_load_texts("features/clean_heads."+name+".p",
lambda: list(map(clean, tqdm(h))))
clean_bodies = gen_or_load_texts("features/clean_bodies."+name+".p",
lambda: list(map(clean, tqdm(b))))
print("Lemmatizing texts")
clean_lemmatized_headlines = gen_or_load_texts("features/lemmas_heads."+name+".p",
lambda: list(map(get_tokenized_lemmas, tqdm(clean_headlines))))
clean_lemmatized_bodies = gen_or_load_texts("features/lemmas_bodies."+name+".p",
lambda: list(map(get_tokenized_lemmas, tqdm(clean_bodies))))
print("Computing 3 features")
X_overlap = gen_or_load_feats(word_overlap_features,
clean_lemmatized_headlines, clean_lemmatized_bodies, overlap_file)
X_refuting = gen_or_load_feats(refuting_features,
clean_lemmatized_headlines, clean_lemmatized_bodies, refuting_file)
X_hand = gen_or_load_feats(hand_features, clean_headlines, clean_bodies, hand_file)
return X_overlap,X_hand,X_refuting,y
def evaluate_double_stage_on_test():
print("Evaluating 2-stage classifier on TEST dataset")
# Load TRAIN data and all features on it
train_dataset = DataSet()
print("Computing features for TRAIN data")
train_overlap, train_hand, train_refuting, train_all_ys = generate_features(
train_dataset.stances, train_dataset.articles, 'train_dataset')
train_stage1_ys = transform_labels_for_binary_relevance(train_all_ys)
train_stage1_xs = transform_features_for_binary_relevance(train_overlap, train_hand, train_refuting, True)
train_stage2_xs = transform_features_for_binary_relevance(train_overlap, train_hand, train_refuting, False)
test_dataset = DataSet("competition_test")
# Load TEST data and all features on it
print("Computing features for TEST data")
test_overlap, test_hand, test_refuting, test_all_ys = generate_features(
test_dataset.stances, test_dataset.articles, 'test_dataset')
test_stage1_ys = transform_labels_for_binary_relevance(test_all_ys)
test_stage1_xs = transform_features_for_binary_relevance(test_overlap, test_hand, test_refuting, True)
test_stage2_xs = transform_features_for_binary_relevance(test_overlap, test_hand, test_refuting, False)
train_stage1_xs, test_stage1_xs = scaleMinMax(train_stage1_xs, test_stage1_xs)
train_stage2_xs, test_stage2_xs = scaleMinMax(train_stage2_xs, test_stage2_xs)
print("Training stage 1")
classifier_stage1 = MLPClassifier(random_state=14128, hidden_layer_sizes=(60, 30))
classifier_stage1.fit(train_stage1_xs, train_stage1_ys)
train_xs_related = []
train_ys_related = []
for i, y in enumerate(train_stage1_ys):
if y == 0: # related
train_xs_related.append(train_stage2_xs[i])
train_ys_related.append(train_all_ys[i])
classifier_stage2 = MLPClassifier(random_state=14128, hidden_layer_sizes=(200))
print("Training stage 2")
classifier_stage2.fit(train_xs_related, train_ys_related)
print("Classifying TEST data stage 1")
labels = ['related', 'unrelated']
relatedLabels = []
predicted = [labels[int(a)] for a in classifier_stage1.predict(test_stage1_xs)]
actual_first_stage = [labels[int(a)] for a in test_stage1_ys]
print("First stage results")
report_score(actual_first_stage, predicted, labels, relatedLabels)
print("Classifying TEST data stage 2")
labels = ORIGINAL_LABELS
relatedLabels = labels[0:3]
for i, label in enumerate(predicted):
if label == 'related':
new_pred = classifier_stage2.predict([test_stage2_xs[i]])[0]
predicted[i] = labels[int(new_pred)]
actual = [labels[int(a)] for a in test_all_ys]
print("Both stages results")
report_score(actual, predicted, labels, relatedLabels)
def evaluate_single_stage_on_test():
print("Evaluating 1-stage classifier on TEST dataset")
# Load TRAIN data and all features on it
train_dataset = DataSet()
print("Computing features for TRAIN data")
train_overlap, train_hand, train_refuting, train_ys = generate_features(
train_dataset.stances, train_dataset.articles, 'train_dataset')
train_xs = np.c_[train_overlap, train_hand, train_refuting]
test_dataset = DataSet("competition_test")
# Load TEST data and all features on it
print("Computing features for TEST data")
test_overlap, test_hand, test_refuting, test_ys = generate_features(
test_dataset.stances, test_dataset.articles, 'test_dataset')
test_xs = np.c_[test_overlap, test_hand, test_refuting]
train_xs, test_xs = scaleMinMax(train_xs, test_xs)
classifier = MLPClassifier(random_state=14128)
print("Training")
classifier.fit(train_xs, train_ys)
print("Classifying")
labels = ORIGINAL_LABELS
predicted = [labels[int(a)] for a in classifier.predict(test_xs)]
actual = [labels[int(a)] for a in test_ys]
relatedLabels = labels[0:3]
report_score(actual, predicted, labels, relatedLabels)
def evaluate_single_stage_on_train():
labels = ORIGINAL_LABELS
relatedLabels = labels[0:3]
print("Evaluating 1-stage classifier on TRAIN dataset with 10-fold cross-validation")
train_dataset = DataSet()
folds = kfold_split(train_dataset, n_folds=10)
fold_stances = get_stances_for_folds(train_dataset, folds)
Xs = dict()
ys = dict()
# Load/Precompute all features now
for fold in fold_stances:
print("Computing features for fold "+ str(fold))
fold_overlap, fold_hand, fold_refuting, ys[fold] = generate_features(
fold_stances[fold], train_dataset.articles, str(fold))
Xs[fold] = np.c_[fold_overlap, fold_hand, fold_refuting]
all_data = [item for fold in list(Xs.values()) for item in fold]
scaler = MinMaxScaler()
scaler.fit(all_data)
for fold in fold_stances:
Xs[fold] = scaler.transform(Xs[fold])
print("Classifying folds")
average_score = 0
total_count = 0
# Classifier for each fold
for fold in fold_stances:
ids = list(range(len(folds)))
del ids[fold]
X_train = np.vstack(tuple([Xs[i] for i in ids]))
y_train = np.hstack(tuple([ys[i] for i in ids]))
X_test = Xs[fold]
y_test = ys[fold]
clf = RandomForestClassifier(n_estimators=200, random_state=14128)
clf.fit(X_train, y_train)
predicted = [LABELS[int(a)] for a in clf.predict(X_test)]
actual = [LABELS[int(a)] for a in y_test]
fold_score, _ = score_submission(actual, predicted, labels, relatedLabels)
max_fold_score, _ = score_submission(actual, actual, labels, relatedLabels)
score = fold_score / max_fold_score
average_score += score
print("Score for fold "+ str(fold) + " = " + str(score))
average_score /= len(fold_stances)
print("Average score " + str(average_score))
if __name__ == "__main__":
check_version()
parse_params()
#evaluate_double_stage_on_test()
#evaluate_single_stage_on_test()
evaluate_single_stage_on_train()
|
|
"""Unit tests illustrating usage of the ``history_meta.py``
module functions."""
from unittest import TestCase
from sqlalchemy.ext.declarative import declarative_base
from .history_meta import Versioned, versioned_session
from sqlalchemy import create_engine, Column, Integer, String, \
ForeignKey, Boolean, select
from sqlalchemy.orm import clear_mappers, Session, deferred, relationship, \
column_property
from sqlalchemy.testing import AssertsCompiledSQL, eq_, assert_raises
from sqlalchemy.testing.entities import ComparableEntity
from sqlalchemy.orm import exc as orm_exc
import warnings
warnings.simplefilter("error")
engine = None
def setup_module():
global engine
engine = create_engine('sqlite://', echo=True)
class TestVersioning(TestCase, AssertsCompiledSQL):
__dialect__ = 'default'
def setUp(self):
self.session = Session(engine)
self.Base = declarative_base()
versioned_session(self.session)
def tearDown(self):
self.session.close()
clear_mappers()
self.Base.metadata.drop_all(engine)
def create_tables(self):
self.Base.metadata.create_all(engine)
def test_plain(self):
class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
name = Column(String(50))
self.create_tables()
sess = self.session
sc = SomeClass(name='sc1')
sess.add(sc)
sess.commit()
sc.name = 'sc1modified'
sess.commit()
assert sc.version == 2
SomeClassHistory = SomeClass.__history_mapper__.class_
eq_(
sess.query(SomeClassHistory).filter(
SomeClassHistory.version == 1).all(),
[SomeClassHistory(version=1, name='sc1')]
)
sc.name = 'sc1modified2'
eq_(
sess.query(SomeClassHistory).order_by(
SomeClassHistory.version).all(),
[
SomeClassHistory(version=1, name='sc1'),
SomeClassHistory(version=2, name='sc1modified')
]
)
assert sc.version == 3
sess.commit()
sc.name = 'temp'
sc.name = 'sc1modified2'
sess.commit()
eq_(
sess.query(SomeClassHistory).order_by(
SomeClassHistory.version).all(),
[
SomeClassHistory(version=1, name='sc1'),
SomeClassHistory(version=2, name='sc1modified')
]
)
sess.delete(sc)
sess.commit()
eq_(
sess.query(SomeClassHistory).order_by(
SomeClassHistory.version).all(),
[
SomeClassHistory(version=1, name='sc1'),
SomeClassHistory(version=2, name='sc1modified'),
SomeClassHistory(version=3, name='sc1modified2')
]
)
def test_w_mapper_versioning(self):
class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
name = Column(String(50))
SomeClass.__mapper__.version_id_col = SomeClass.__table__.c.version
self.create_tables()
sess = self.session
sc = SomeClass(name='sc1')
sess.add(sc)
sess.commit()
s2 = Session(sess.bind)
sc2 = s2.query(SomeClass).first()
sc2.name = 'sc1modified'
sc.name = 'sc1modified_again'
sess.commit()
eq_(sc.version, 2)
assert_raises(
orm_exc.StaleDataError,
s2.flush
)
def test_from_null(self):
class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
name = Column(String(50))
self.create_tables()
sess = self.session
sc = SomeClass()
sess.add(sc)
sess.commit()
sc.name = 'sc1'
sess.commit()
assert sc.version == 2
def test_insert_null(self):
class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
boole = Column(Boolean, default=False)
self.create_tables()
sess = self.session
sc = SomeClass(boole=True)
sess.add(sc)
sess.commit()
sc.boole = None
sess.commit()
sc.boole = False
sess.commit()
SomeClassHistory = SomeClass.__history_mapper__.class_
eq_(
sess.query(SomeClassHistory.boole).order_by(
SomeClassHistory.id).all(),
[(True, ), (None, )]
)
eq_(sc.version, 3)
def test_deferred(self):
"""test versioning of unloaded, deferred columns."""
class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
name = Column(String(50))
data = deferred(Column(String(25)))
self.create_tables()
sess = self.session
sc = SomeClass(name='sc1', data='somedata')
sess.add(sc)
sess.commit()
sess.close()
sc = sess.query(SomeClass).first()
assert 'data' not in sc.__dict__
sc.name = 'sc1modified'
sess.commit()
assert sc.version == 2
SomeClassHistory = SomeClass.__history_mapper__.class_
eq_(
sess.query(SomeClassHistory).filter(
SomeClassHistory.version == 1).all(),
[SomeClassHistory(version=1, name='sc1', data='somedata')]
)
def test_joined_inheritance(self):
class BaseClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'basetable'
id = Column(Integer, primary_key=True)
name = Column(String(50))
type = Column(String(20))
__mapper_args__ = {
'polymorphic_on': type,
'polymorphic_identity': 'base'}
class SubClassSeparatePk(BaseClass):
__tablename__ = 'subtable1'
id = column_property(
Column(Integer, primary_key=True),
BaseClass.id
)
base_id = Column(Integer, ForeignKey('basetable.id'))
subdata1 = Column(String(50))
__mapper_args__ = {'polymorphic_identity': 'sep'}
class SubClassSamePk(BaseClass):
__tablename__ = 'subtable2'
id = Column(
Integer, ForeignKey('basetable.id'), primary_key=True)
subdata2 = Column(String(50))
__mapper_args__ = {'polymorphic_identity': 'same'}
self.create_tables()
sess = self.session
sep1 = SubClassSeparatePk(name='sep1', subdata1='sep1subdata')
base1 = BaseClass(name='base1')
same1 = SubClassSamePk(name='same1', subdata2='same1subdata')
sess.add_all([sep1, base1, same1])
sess.commit()
base1.name = 'base1mod'
same1.subdata2 = 'same1subdatamod'
sep1.name = 'sep1mod'
sess.commit()
BaseClassHistory = BaseClass.__history_mapper__.class_
SubClassSeparatePkHistory = \
SubClassSeparatePk.__history_mapper__.class_
SubClassSamePkHistory = SubClassSamePk.__history_mapper__.class_
eq_(
sess.query(BaseClassHistory).order_by(BaseClassHistory.id).all(),
[
SubClassSeparatePkHistory(
id=1, name='sep1', type='sep', version=1),
BaseClassHistory(id=2, name='base1', type='base', version=1),
SubClassSamePkHistory(
id=3, name='same1', type='same', version=1)
]
)
same1.subdata2 = 'same1subdatamod2'
eq_(
sess.query(BaseClassHistory).order_by(
BaseClassHistory.id, BaseClassHistory.version).all(),
[
SubClassSeparatePkHistory(
id=1, name='sep1', type='sep', version=1),
BaseClassHistory(id=2, name='base1', type='base', version=1),
SubClassSamePkHistory(
id=3, name='same1', type='same', version=1),
SubClassSamePkHistory(
id=3, name='same1', type='same', version=2)
]
)
base1.name = 'base1mod2'
eq_(
sess.query(BaseClassHistory).order_by(
BaseClassHistory.id, BaseClassHistory.version).all(),
[
SubClassSeparatePkHistory(
id=1, name='sep1', type='sep', version=1),
BaseClassHistory(id=2, name='base1', type='base', version=1),
BaseClassHistory(
id=2, name='base1mod', type='base', version=2),
SubClassSamePkHistory(
id=3, name='same1', type='same', version=1),
SubClassSamePkHistory(
id=3, name='same1', type='same', version=2)
]
)
def test_joined_inheritance_multilevel(self):
class BaseClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'basetable'
id = Column(Integer, primary_key=True)
name = Column(String(50))
type = Column(String(20))
__mapper_args__ = {
'polymorphic_on': type,
'polymorphic_identity': 'base'}
class SubClass(BaseClass):
__tablename__ = 'subtable'
id = column_property(
Column(Integer, primary_key=True),
BaseClass.id
)
base_id = Column(Integer, ForeignKey('basetable.id'))
subdata1 = Column(String(50))
__mapper_args__ = {'polymorphic_identity': 'sub'}
class SubSubClass(SubClass):
__tablename__ = 'subsubtable'
id = Column(Integer, ForeignKey('subtable.id'), primary_key=True)
subdata2 = Column(String(50))
__mapper_args__ = {'polymorphic_identity': 'subsub'}
self.create_tables()
SubSubHistory = SubSubClass.__history_mapper__.class_
sess = self.session
q = sess.query(SubSubHistory)
self.assert_compile(
q,
"SELECT "
"subsubtable_history.id AS subsubtable_history_id, "
"subtable_history.id AS subtable_history_id, "
"basetable_history.id AS basetable_history_id, "
"subsubtable_history.changed AS subsubtable_history_changed, "
"subtable_history.changed AS subtable_history_changed, "
"basetable_history.changed AS basetable_history_changed, "
"basetable_history.name AS basetable_history_name, "
"basetable_history.type AS basetable_history_type, "
"subsubtable_history.version AS subsubtable_history_version, "
"subtable_history.version AS subtable_history_version, "
"basetable_history.version AS basetable_history_version, "
"subtable_history.base_id AS subtable_history_base_id, "
"subtable_history.subdata1 AS subtable_history_subdata1, "
"subsubtable_history.subdata2 AS subsubtable_history_subdata2 "
"FROM basetable_history "
"JOIN subtable_history "
"ON basetable_history.id = subtable_history.base_id "
"AND basetable_history.version = subtable_history.version "
"JOIN subsubtable_history ON subtable_history.id = "
"subsubtable_history.id AND subtable_history.version = "
"subsubtable_history.version"
)
ssc = SubSubClass(name='ss1', subdata1='sd1', subdata2='sd2')
sess.add(ssc)
sess.commit()
eq_(
sess.query(SubSubHistory).all(),
[]
)
ssc.subdata1 = 'sd11'
ssc.subdata2 = 'sd22'
sess.commit()
eq_(
sess.query(SubSubHistory).all(),
[SubSubHistory(name='ss1', subdata1='sd1',
subdata2='sd2', type='subsub', version=1)]
)
eq_(ssc, SubSubClass(
name='ss1', subdata1='sd11',
subdata2='sd22', version=2))
def test_joined_inheritance_changed(self):
class BaseClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'basetable'
id = Column(Integer, primary_key=True)
name = Column(String(50))
type = Column(String(20))
__mapper_args__ = {
'polymorphic_on': type,
'polymorphic_identity': 'base'
}
class SubClass(BaseClass):
__tablename__ = 'subtable'
id = Column(Integer, ForeignKey('basetable.id'), primary_key=True)
__mapper_args__ = {'polymorphic_identity': 'sep'}
self.create_tables()
BaseClassHistory = BaseClass.__history_mapper__.class_
SubClassHistory = SubClass.__history_mapper__.class_
sess = self.session
s1 = SubClass(name='s1')
sess.add(s1)
sess.commit()
s1.name = 's2'
sess.commit()
actual_changed_base = sess.scalar(
select([BaseClass.__history_mapper__.local_table.c.changed]))
actual_changed_sub = sess.scalar(
select([SubClass.__history_mapper__.local_table.c.changed]))
h1 = sess.query(BaseClassHistory).first()
eq_(h1.changed, actual_changed_base)
eq_(h1.changed, actual_changed_sub)
h1 = sess.query(SubClassHistory).first()
eq_(h1.changed, actual_changed_base)
eq_(h1.changed, actual_changed_sub)
def test_single_inheritance(self):
class BaseClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'basetable'
id = Column(Integer, primary_key=True)
name = Column(String(50))
type = Column(String(50))
__mapper_args__ = {
'polymorphic_on': type,
'polymorphic_identity': 'base'}
class SubClass(BaseClass):
subname = Column(String(50), unique=True)
__mapper_args__ = {'polymorphic_identity': 'sub'}
self.create_tables()
sess = self.session
b1 = BaseClass(name='b1')
sc = SubClass(name='s1', subname='sc1')
sess.add_all([b1, sc])
sess.commit()
b1.name = 'b1modified'
BaseClassHistory = BaseClass.__history_mapper__.class_
SubClassHistory = SubClass.__history_mapper__.class_
eq_(
sess.query(BaseClassHistory).order_by(
BaseClassHistory.id, BaseClassHistory.version).all(),
[BaseClassHistory(id=1, name='b1', type='base', version=1)]
)
sc.name = 's1modified'
b1.name = 'b1modified2'
eq_(
sess.query(BaseClassHistory).order_by(
BaseClassHistory.id, BaseClassHistory.version).all(),
[
BaseClassHistory(id=1, name='b1', type='base', version=1),
BaseClassHistory(
id=1, name='b1modified', type='base', version=2),
SubClassHistory(id=2, name='s1', type='sub', version=1)
]
)
# test the unique constraint on the subclass
# column
sc.name = "modifyagain"
sess.flush()
def test_unique(self):
class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
name = Column(String(50), unique=True)
data = Column(String(50))
self.create_tables()
sess = self.session
sc = SomeClass(name='sc1', data='sc1')
sess.add(sc)
sess.commit()
sc.data = 'sc1modified'
sess.commit()
assert sc.version == 2
sc.data = 'sc1modified2'
sess.commit()
assert sc.version == 3
def test_relationship(self):
class SomeRelated(self.Base, ComparableEntity):
__tablename__ = 'somerelated'
id = Column(Integer, primary_key=True)
class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
name = Column(String(50))
related_id = Column(Integer, ForeignKey('somerelated.id'))
related = relationship("SomeRelated", backref='classes')
SomeClassHistory = SomeClass.__history_mapper__.class_
self.create_tables()
sess = self.session
sc = SomeClass(name='sc1')
sess.add(sc)
sess.commit()
assert sc.version == 1
sr1 = SomeRelated()
sc.related = sr1
sess.commit()
assert sc.version == 2
eq_(
sess.query(SomeClassHistory).filter(
SomeClassHistory.version == 1).all(),
[SomeClassHistory(version=1, name='sc1', related_id=None)]
)
sc.related = None
eq_(
sess.query(SomeClassHistory).order_by(
SomeClassHistory.version).all(),
[
SomeClassHistory(version=1, name='sc1', related_id=None),
SomeClassHistory(version=2, name='sc1', related_id=sr1.id)
]
)
assert sc.version == 3
def test_backref_relationship(self):
class SomeRelated(self.Base, ComparableEntity):
__tablename__ = 'somerelated'
id = Column(Integer, primary_key=True)
name = Column(String(50))
related_id = Column(Integer, ForeignKey('sometable.id'))
related = relationship("SomeClass", backref='related')
class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
self.create_tables()
sess = self.session
sc = SomeClass()
sess.add(sc)
sess.commit()
assert sc.version == 1
sr = SomeRelated(name='sr', related=sc)
sess.add(sr)
sess.commit()
assert sc.version == 1
sr.name = 'sr2'
sess.commit()
assert sc.version == 1
sess.delete(sr)
sess.commit()
assert sc.version == 1
def test_create_double_flush(self):
class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
name = Column(String(30))
other = Column(String(30))
self.create_tables()
sc = SomeClass()
self.session.add(sc)
self.session.flush()
sc.name = 'Foo'
self.session.flush()
assert sc.version == 2
def test_mutate_plain_column(self):
class Document(self.Base, Versioned):
__tablename__ = 'document'
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String, nullable=True)
description_ = Column('description', String, nullable=True)
self.create_tables()
document = Document()
self.session.add(document)
document.name = 'Foo'
self.session.commit()
document.name = 'Bar'
self.session.commit()
DocumentHistory = Document.__history_mapper__.class_
v2 = self.session.query(Document).one()
v1 = self.session.query(DocumentHistory).one()
self.assertEqual(v1.id, v2.id)
self.assertEqual(v2.name, 'Bar')
self.assertEqual(v1.name, 'Foo')
def test_mutate_named_column(self):
class Document(self.Base, Versioned):
__tablename__ = 'document'
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String, nullable=True)
description_ = Column('description', String, nullable=True)
self.create_tables()
document = Document()
self.session.add(document)
document.description_ = 'Foo'
self.session.commit()
document.description_ = 'Bar'
self.session.commit()
DocumentHistory = Document.__history_mapper__.class_
v2 = self.session.query(Document).one()
v1 = self.session.query(DocumentHistory).one()
self.assertEqual(v1.id, v2.id)
self.assertEqual(v2.description_, 'Bar')
self.assertEqual(v1.description_, 'Foo')
|
|
#!/usr/bin/env python
# Copyright 2013 Google, Inc. or its affiliates. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import botocore.session
import botocore.exceptions
import unittest
class TestAddresses(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.gce = self.session.get_service('gce')
def test_addresses_aggregatedList(self):
op = self.gce.get_operation('compute.addresses.aggregatedList')
params = op.build_parameters(project='project')
self.assertTrue('project' in params['uri_params'])
def test_addresses_aggregatedList_missing_params(self):
op = self.gce.get_operation('compute.addresses.aggregatedList')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
def test_addresses_delete(self):
op = self.gce.get_operation('compute.addresses.delete')
params = op.build_parameters(project='project',
region='region',
address='address')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('region' in params['uri_params'])
self.assertTrue('address' in params['uri_params'])
def test_addresses_delete_missing_params(self):
op = self.gce.get_operation('compute.addresses.delete')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
region='region'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
address='address'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
region='region'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
address='address'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
region='region',
address='address'
)
def test_addresses_get(self):
op = self.gce.get_operation('compute.addresses.get')
params = op.build_parameters(project='project',
region='region',
address='address')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('region' in params['uri_params'])
self.assertTrue('address' in params['uri_params'])
def test_addresses_get_missing_params(self):
op = self.gce.get_operation('compute.addresses.get')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
region='region'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
address='address'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
region='region'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
address='address'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
region='region',
address='address'
)
def test_addresses_insert(self):
op = self.gce.get_operation('compute.addresses.insert')
params = op.build_parameters(body='body',
project='project',
region='region')
self.assertTrue(params['payload'])
self.assertTrue('project' in params['uri_params'])
self.assertTrue('region' in params['uri_params'])
def test_addresses_insert_missing_params(self):
op = self.gce.get_operation('compute.addresses.insert')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
region='region'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
region='region'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
region='region'
)
def test_addresses_list(self):
op = self.gce.get_operation('compute.addresses.list')
params = op.build_parameters(region='region',
project='project')
self.assertTrue('region' in params['uri_params'])
self.assertTrue('project' in params['uri_params'])
def test_addresses_list_missing_params(self):
op = self.gce.get_operation('compute.addresses.list')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
region='region'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
class TestDisks(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.gce = self.session.get_service('gce')
def test_disks_aggregatedList(self):
op = self.gce.get_operation('compute.disks.aggregatedList')
params = op.build_parameters(project='project')
self.assertTrue('project' in params['uri_params'])
def test_disks_aggregatedList_missing_params(self):
op = self.gce.get_operation('compute.disks.aggregatedList')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
def test_disks_createSnapshot(self):
op = self.gce.get_operation('compute.disks.createSnapshot')
params = op.build_parameters(body='body',
project='project',
disk='disk',
zone='zone')
self.assertTrue(params['payload'])
self.assertTrue('project' in params['uri_params'])
self.assertTrue('disk' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
def test_disks_createSnapshot_missing_params(self):
op = self.gce.get_operation('compute.disks.createSnapshot')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
disk='disk'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
disk='disk'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
disk='disk'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
disk='disk',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
project='project',
disk='disk'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
project='project',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
disk='disk',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
disk='disk',
zone='zone'
)
def test_disks_delete(self):
op = self.gce.get_operation('compute.disks.delete')
params = op.build_parameters(project='project',
disk='disk',
zone='zone')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('disk' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
def test_disks_delete_missing_params(self):
op = self.gce.get_operation('compute.disks.delete')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
disk='disk'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
disk='disk'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
disk='disk',
zone='zone'
)
def test_disks_get(self):
op = self.gce.get_operation('compute.disks.get')
params = op.build_parameters(project='project',
disk='disk',
zone='zone')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('disk' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
def test_disks_get_missing_params(self):
op = self.gce.get_operation('compute.disks.get')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
disk='disk'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
disk='disk'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
disk='disk',
zone='zone'
)
def test_disks_insert(self):
op = self.gce.get_operation('compute.disks.insert')
params = op.build_parameters(body='body',
project='project',
zone='zone')
self.assertTrue(params['payload'])
self.assertTrue('project' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
def test_disks_insert_missing_params(self):
op = self.gce.get_operation('compute.disks.insert')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
zone='zone'
)
def test_disks_list(self):
op = self.gce.get_operation('compute.disks.list')
params = op.build_parameters(project='project',
zone='zone')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
def test_disks_list_missing_params(self):
op = self.gce.get_operation('compute.disks.list')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
class TestFirewalls(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.gce = self.session.get_service('gce')
def test_firewalls_delete(self):
op = self.gce.get_operation('compute.firewalls.delete')
params = op.build_parameters(firewall='firewall',
project='project')
self.assertTrue('firewall' in params['uri_params'])
self.assertTrue('project' in params['uri_params'])
def test_firewalls_delete_missing_params(self):
op = self.gce.get_operation('compute.firewalls.delete')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
firewall='firewall'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
def test_firewalls_get(self):
op = self.gce.get_operation('compute.firewalls.get')
params = op.build_parameters(firewall='firewall',
project='project')
self.assertTrue('firewall' in params['uri_params'])
self.assertTrue('project' in params['uri_params'])
def test_firewalls_get_missing_params(self):
op = self.gce.get_operation('compute.firewalls.get')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
firewall='firewall'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
def test_firewalls_insert(self):
op = self.gce.get_operation('compute.firewalls.insert')
params = op.build_parameters(body='body',
project='project')
self.assertTrue(params['payload'])
self.assertTrue('project' in params['uri_params'])
def test_firewalls_insert_missing_params(self):
op = self.gce.get_operation('compute.firewalls.insert')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
def test_firewalls_list(self):
op = self.gce.get_operation('compute.firewalls.list')
params = op.build_parameters(project='project')
self.assertTrue('project' in params['uri_params'])
def test_firewalls_list_missing_params(self):
op = self.gce.get_operation('compute.firewalls.list')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
def test_firewalls_patch(self):
op = self.gce.get_operation('compute.firewalls.patch')
params = op.build_parameters(body='body',
firewall='firewall',
project='project')
self.assertTrue(params['payload'])
self.assertTrue('firewall' in params['uri_params'])
self.assertTrue('project' in params['uri_params'])
def test_firewalls_patch_missing_params(self):
op = self.gce.get_operation('compute.firewalls.patch')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
firewall='firewall'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
firewall='firewall'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
firewall='firewall',
project='project'
)
def test_firewalls_update(self):
op = self.gce.get_operation('compute.firewalls.update')
params = op.build_parameters(body='body',
firewall='firewall',
project='project')
self.assertTrue(params['payload'])
self.assertTrue('firewall' in params['uri_params'])
self.assertTrue('project' in params['uri_params'])
def test_firewalls_update_missing_params(self):
op = self.gce.get_operation('compute.firewalls.update')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
firewall='firewall'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
firewall='firewall'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
firewall='firewall',
project='project'
)
class TestGlobaloperations(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.gce = self.session.get_service('gce')
def test_globalOperations_aggregatedList(self):
op = self.gce.get_operation('compute.globalOperations.aggregatedList')
params = op.build_parameters(project='project')
self.assertTrue('project' in params['uri_params'])
def test_globalOperations_aggregatedList_missing_params(self):
op = self.gce.get_operation('compute.globalOperations.aggregatedList')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
def test_globalOperations_delete(self):
op = self.gce.get_operation('compute.globalOperations.delete')
params = op.build_parameters(project='project',
operation='operation')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('operation' in params['uri_params'])
def test_globalOperations_delete_missing_params(self):
op = self.gce.get_operation('compute.globalOperations.delete')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
operation='operation'
)
def test_globalOperations_get(self):
op = self.gce.get_operation('compute.globalOperations.get')
params = op.build_parameters(project='project',
operation='operation')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('operation' in params['uri_params'])
def test_globalOperations_get_missing_params(self):
op = self.gce.get_operation('compute.globalOperations.get')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
operation='operation'
)
def test_globalOperations_list(self):
op = self.gce.get_operation('compute.globalOperations.list')
params = op.build_parameters(project='project')
self.assertTrue('project' in params['uri_params'])
def test_globalOperations_list_missing_params(self):
op = self.gce.get_operation('compute.globalOperations.list')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
class TestImages(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.gce = self.session.get_service('gce')
def test_images_delete(self):
op = self.gce.get_operation('compute.images.delete')
params = op.build_parameters(project='project',
image='image')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('image' in params['uri_params'])
def test_images_delete_missing_params(self):
op = self.gce.get_operation('compute.images.delete')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
image='image'
)
def test_images_deprecate(self):
op = self.gce.get_operation('compute.images.deprecate')
params = op.build_parameters(body='body',
project='project',
image='image')
self.assertTrue(params['payload'])
self.assertTrue('project' in params['uri_params'])
self.assertTrue('image' in params['uri_params'])
def test_images_deprecate_missing_params(self):
op = self.gce.get_operation('compute.images.deprecate')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
image='image'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
image='image'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
image='image'
)
def test_images_get(self):
op = self.gce.get_operation('compute.images.get')
params = op.build_parameters(project='project',
image='image')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('image' in params['uri_params'])
def test_images_get_missing_params(self):
op = self.gce.get_operation('compute.images.get')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
image='image'
)
def test_images_insert(self):
op = self.gce.get_operation('compute.images.insert')
params = op.build_parameters(body='body',
project='project')
self.assertTrue(params['payload'])
self.assertTrue('project' in params['uri_params'])
def test_images_insert_missing_params(self):
op = self.gce.get_operation('compute.images.insert')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
def test_images_list(self):
op = self.gce.get_operation('compute.images.list')
params = op.build_parameters(project='project')
self.assertTrue('project' in params['uri_params'])
def test_images_list_missing_params(self):
op = self.gce.get_operation('compute.images.list')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
class TestInstances(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.gce = self.session.get_service('gce')
def test_instances_addAccessConfig(self):
op = self.gce.get_operation('compute.instances.addAccessConfig')
params = op.build_parameters(body='body',
instance='instance',
zone='zone',
project='project',
network_interface='network_interface')
self.assertTrue(params['payload'])
self.assertTrue('instance' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
self.assertTrue('project' in params['uri_params'])
self.assertTrue('networkInterface' in params['uri_params'])
def test_instances_addAccessConfig_missing_params(self):
op = self.gce.get_operation('compute.instances.addAccessConfig')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
instance='instance',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
instance='instance',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
instance='instance',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
zone='zone',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
zone='zone',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
project='project',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
zone='zone',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
zone='zone',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
project='project',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone',
project='project',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
instance='instance',
zone='zone',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
instance='instance',
zone='zone',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
instance='instance',
project='project',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
zone='zone',
project='project',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
zone='zone',
project='project',
network_interface='network_interface'
)
def test_instances_aggregatedList(self):
op = self.gce.get_operation('compute.instances.aggregatedList')
params = op.build_parameters(project='project')
self.assertTrue('project' in params['uri_params'])
def test_instances_aggregatedList_missing_params(self):
op = self.gce.get_operation('compute.instances.aggregatedList')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
def test_instances_attachDisk(self):
op = self.gce.get_operation('compute.instances.attachDisk')
params = op.build_parameters(body='body',
instance='instance',
zone='zone',
project='project')
self.assertTrue(params['payload'])
self.assertTrue('instance' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
self.assertTrue('project' in params['uri_params'])
def test_instances_attachDisk_missing_params(self):
op = self.gce.get_operation('compute.instances.attachDisk')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
instance='instance',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
instance='instance',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
zone='zone',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
zone='zone',
project='project'
)
def test_instances_delete(self):
op = self.gce.get_operation('compute.instances.delete')
params = op.build_parameters(project='project',
instance='instance',
zone='zone')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('instance' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
def test_instances_delete_missing_params(self):
op = self.gce.get_operation('compute.instances.delete')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
zone='zone'
)
def test_instances_deleteAccessConfig(self):
op = self.gce.get_operation('compute.instances.deleteAccessConfig')
params = op.build_parameters(access_config='access_config',
instance='instance',
zone='zone',
project='project',
network_interface='network_interface')
self.assertTrue('accessConfig' in params['uri_params'])
self.assertTrue('instance' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
self.assertTrue('project' in params['uri_params'])
self.assertTrue('networkInterface' in params['uri_params'])
def test_instances_deleteAccessConfig_missing_params(self):
op = self.gce.get_operation('compute.instances.deleteAccessConfig')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
access_config='access_config'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
access_config='access_config',
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
access_config='access_config',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
access_config='access_config',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
access_config='access_config',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
access_config='access_config',
instance='instance',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
access_config='access_config',
instance='instance',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
access_config='access_config',
instance='instance',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
access_config='access_config',
zone='zone',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
access_config='access_config',
zone='zone',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
access_config='access_config',
project='project',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
zone='zone',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
zone='zone',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
project='project',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone',
project='project',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
access_config='access_config',
instance='instance',
zone='zone',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
access_config='access_config',
instance='instance',
zone='zone',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
access_config='access_config',
instance='instance',
project='project',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
access_config='access_config',
zone='zone',
project='project',
network_interface='network_interface'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
zone='zone',
project='project',
network_interface='network_interface'
)
def test_instances_detachDisk(self):
op = self.gce.get_operation('compute.instances.detachDisk')
params = op.build_parameters(project='project',
device_name='device_name',
zone='zone',
instance='instance')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('deviceName' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
self.assertTrue('instance' in params['uri_params'])
def test_instances_detachDisk_missing_params(self):
op = self.gce.get_operation('compute.instances.detachDisk')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
device_name='device_name'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
device_name='device_name'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
device_name='device_name',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
device_name='device_name',
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone',
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
device_name='device_name',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
device_name='device_name',
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
zone='zone',
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
device_name='device_name',
zone='zone',
instance='instance'
)
def test_instances_get(self):
op = self.gce.get_operation('compute.instances.get')
params = op.build_parameters(project='project',
instance='instance',
zone='zone')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('instance' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
def test_instances_get_missing_params(self):
op = self.gce.get_operation('compute.instances.get')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
zone='zone'
)
def test_instances_getSerialPortOutput(self):
op = self.gce.get_operation('compute.instances.getSerialPortOutput')
params = op.build_parameters(project='project',
instance='instance',
zone='zone')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('instance' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
def test_instances_getSerialPortOutput_missing_params(self):
op = self.gce.get_operation('compute.instances.getSerialPortOutput')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
zone='zone'
)
def test_instances_insert(self):
op = self.gce.get_operation('compute.instances.insert')
params = op.build_parameters(body='body',
project='project',
zone='zone')
self.assertTrue(params['payload'])
self.assertTrue('project' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
def test_instances_insert_missing_params(self):
op = self.gce.get_operation('compute.instances.insert')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
zone='zone'
)
def test_instances_list(self):
op = self.gce.get_operation('compute.instances.list')
params = op.build_parameters(project='project',
zone='zone')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
def test_instances_list_missing_params(self):
op = self.gce.get_operation('compute.instances.list')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
def test_instances_setMetadata(self):
op = self.gce.get_operation('compute.instances.setMetadata')
params = op.build_parameters(body='body',
instance='instance',
zone='zone',
project='project')
self.assertTrue(params['payload'])
self.assertTrue('instance' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
self.assertTrue('project' in params['uri_params'])
def test_instances_setMetadata_missing_params(self):
op = self.gce.get_operation('compute.instances.setMetadata')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
instance='instance',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
instance='instance',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
zone='zone',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
zone='zone',
project='project'
)
def test_instances_setTags(self):
op = self.gce.get_operation('compute.instances.setTags')
params = op.build_parameters(body='body',
instance='instance',
zone='zone',
project='project')
self.assertTrue(params['payload'])
self.assertTrue('instance' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
self.assertTrue('project' in params['uri_params'])
def test_instances_setTags_missing_params(self):
op = self.gce.get_operation('compute.instances.setTags')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
instance='instance'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
instance='instance',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
instance='instance',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body',
zone='zone',
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
instance='instance',
zone='zone',
project='project'
)
class TestKernels(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.gce = self.session.get_service('gce')
def test_kernels_get(self):
op = self.gce.get_operation('compute.kernels.get')
params = op.build_parameters(project='project',
kernel='kernel')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('kernel' in params['uri_params'])
def test_kernels_get_missing_params(self):
op = self.gce.get_operation('compute.kernels.get')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
kernel='kernel'
)
def test_kernels_list(self):
op = self.gce.get_operation('compute.kernels.list')
params = op.build_parameters(project='project')
self.assertTrue('project' in params['uri_params'])
def test_kernels_list_missing_params(self):
op = self.gce.get_operation('compute.kernels.list')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
class TestMachinetypes(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.gce = self.session.get_service('gce')
def test_machineTypes_aggregatedList(self):
op = self.gce.get_operation('compute.machineTypes.aggregatedList')
params = op.build_parameters(project='project')
self.assertTrue('project' in params['uri_params'])
def test_machineTypes_aggregatedList_missing_params(self):
op = self.gce.get_operation('compute.machineTypes.aggregatedList')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
def test_machineTypes_get(self):
op = self.gce.get_operation('compute.machineTypes.get')
params = op.build_parameters(project='project',
machine_type='machine_type',
zone='zone')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('machineType' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
def test_machineTypes_get_missing_params(self):
op = self.gce.get_operation('compute.machineTypes.get')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
machine_type='machine_type'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
machine_type='machine_type'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
machine_type='machine_type',
zone='zone'
)
def test_machineTypes_list(self):
op = self.gce.get_operation('compute.machineTypes.list')
params = op.build_parameters(project='project',
zone='zone')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
def test_machineTypes_list_missing_params(self):
op = self.gce.get_operation('compute.machineTypes.list')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
class TestNetworks(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.gce = self.session.get_service('gce')
def test_networks_delete(self):
op = self.gce.get_operation('compute.networks.delete')
params = op.build_parameters(project='project',
network='network')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('network' in params['uri_params'])
def test_networks_delete_missing_params(self):
op = self.gce.get_operation('compute.networks.delete')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
network='network'
)
def test_networks_get(self):
op = self.gce.get_operation('compute.networks.get')
params = op.build_parameters(project='project',
network='network')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('network' in params['uri_params'])
def test_networks_get_missing_params(self):
op = self.gce.get_operation('compute.networks.get')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
network='network'
)
def test_networks_insert(self):
op = self.gce.get_operation('compute.networks.insert')
params = op.build_parameters(body='body',
project='project')
self.assertTrue(params['payload'])
self.assertTrue('project' in params['uri_params'])
def test_networks_insert_missing_params(self):
op = self.gce.get_operation('compute.networks.insert')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
def test_networks_list(self):
op = self.gce.get_operation('compute.networks.list')
params = op.build_parameters(project='project')
self.assertTrue('project' in params['uri_params'])
def test_networks_list_missing_params(self):
op = self.gce.get_operation('compute.networks.list')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
class TestProjects(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.gce = self.session.get_service('gce')
def test_projects_get(self):
op = self.gce.get_operation('compute.projects.get')
params = op.build_parameters(project='project')
self.assertTrue('project' in params['uri_params'])
def test_projects_get_missing_params(self):
op = self.gce.get_operation('compute.projects.get')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
def test_projects_setCommonInstanceMetadata(self):
op = self.gce.get_operation('compute.projects.setCommonInstanceMetadata')
params = op.build_parameters(body='body',
project='project')
self.assertTrue(params['payload'])
self.assertTrue('project' in params['uri_params'])
def test_projects_setCommonInstanceMetadata_missing_params(self):
op = self.gce.get_operation('compute.projects.setCommonInstanceMetadata')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
class TestRegionoperations(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.gce = self.session.get_service('gce')
def test_regionOperations_delete(self):
op = self.gce.get_operation('compute.regionOperations.delete')
params = op.build_parameters(project='project',
operation='operation',
region='region')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('operation' in params['uri_params'])
self.assertTrue('region' in params['uri_params'])
def test_regionOperations_delete_missing_params(self):
op = self.gce.get_operation('compute.regionOperations.delete')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
operation='operation'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
region='region'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
operation='operation'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
region='region'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
operation='operation',
region='region'
)
def test_regionOperations_get(self):
op = self.gce.get_operation('compute.regionOperations.get')
params = op.build_parameters(project='project',
operation='operation',
region='region')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('operation' in params['uri_params'])
self.assertTrue('region' in params['uri_params'])
def test_regionOperations_get_missing_params(self):
op = self.gce.get_operation('compute.regionOperations.get')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
operation='operation'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
region='region'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
operation='operation'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
region='region'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
operation='operation',
region='region'
)
def test_regionOperations_list(self):
op = self.gce.get_operation('compute.regionOperations.list')
params = op.build_parameters(region='region',
project='project')
self.assertTrue('region' in params['uri_params'])
self.assertTrue('project' in params['uri_params'])
def test_regionOperations_list_missing_params(self):
op = self.gce.get_operation('compute.regionOperations.list')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
region='region'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
class TestRegions(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.gce = self.session.get_service('gce')
def test_regions_get(self):
op = self.gce.get_operation('compute.regions.get')
params = op.build_parameters(project='project',
region='region')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('region' in params['uri_params'])
def test_regions_get_missing_params(self):
op = self.gce.get_operation('compute.regions.get')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
region='region'
)
def test_regions_list(self):
op = self.gce.get_operation('compute.regions.list')
params = op.build_parameters(project='project')
self.assertTrue('project' in params['uri_params'])
def test_regions_list_missing_params(self):
op = self.gce.get_operation('compute.regions.list')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
class TestRoutes(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.gce = self.session.get_service('gce')
def test_routes_delete(self):
op = self.gce.get_operation('compute.routes.delete')
params = op.build_parameters(project='project',
route='route')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('route' in params['uri_params'])
def test_routes_delete_missing_params(self):
op = self.gce.get_operation('compute.routes.delete')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
route='route'
)
def test_routes_get(self):
op = self.gce.get_operation('compute.routes.get')
params = op.build_parameters(project='project',
route='route')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('route' in params['uri_params'])
def test_routes_get_missing_params(self):
op = self.gce.get_operation('compute.routes.get')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
route='route'
)
def test_routes_insert(self):
op = self.gce.get_operation('compute.routes.insert')
params = op.build_parameters(body='body',
project='project')
self.assertTrue(params['payload'])
self.assertTrue('project' in params['uri_params'])
def test_routes_insert_missing_params(self):
op = self.gce.get_operation('compute.routes.insert')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
body='body'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
def test_routes_list(self):
op = self.gce.get_operation('compute.routes.list')
params = op.build_parameters(project='project')
self.assertTrue('project' in params['uri_params'])
def test_routes_list_missing_params(self):
op = self.gce.get_operation('compute.routes.list')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
class TestSnapshots(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.gce = self.session.get_service('gce')
def test_snapshots_delete(self):
op = self.gce.get_operation('compute.snapshots.delete')
params = op.build_parameters(project='project',
snapshot='snapshot')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('snapshot' in params['uri_params'])
def test_snapshots_delete_missing_params(self):
op = self.gce.get_operation('compute.snapshots.delete')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
snapshot='snapshot'
)
def test_snapshots_get(self):
op = self.gce.get_operation('compute.snapshots.get')
params = op.build_parameters(project='project',
snapshot='snapshot')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('snapshot' in params['uri_params'])
def test_snapshots_get_missing_params(self):
op = self.gce.get_operation('compute.snapshots.get')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
snapshot='snapshot'
)
def test_snapshots_list(self):
op = self.gce.get_operation('compute.snapshots.list')
params = op.build_parameters(project='project')
self.assertTrue('project' in params['uri_params'])
def test_snapshots_list_missing_params(self):
op = self.gce.get_operation('compute.snapshots.list')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
class TestZoneoperations(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.gce = self.session.get_service('gce')
def test_zoneOperations_delete(self):
op = self.gce.get_operation('compute.zoneOperations.delete')
params = op.build_parameters(project='project',
operation='operation',
zone='zone')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('operation' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
def test_zoneOperations_delete_missing_params(self):
op = self.gce.get_operation('compute.zoneOperations.delete')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
operation='operation'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
operation='operation'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
operation='operation',
zone='zone'
)
def test_zoneOperations_get(self):
op = self.gce.get_operation('compute.zoneOperations.get')
params = op.build_parameters(project='project',
operation='operation',
zone='zone')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('operation' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
def test_zoneOperations_get_missing_params(self):
op = self.gce.get_operation('compute.zoneOperations.get')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
operation='operation'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
operation='operation'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project',
zone='zone'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
operation='operation',
zone='zone'
)
def test_zoneOperations_list(self):
op = self.gce.get_operation('compute.zoneOperations.list')
params = op.build_parameters(project='project',
zone='zone')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
def test_zoneOperations_list_missing_params(self):
op = self.gce.get_operation('compute.zoneOperations.list')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
class TestZones(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.gce = self.session.get_service('gce')
def test_zones_get(self):
op = self.gce.get_operation('compute.zones.get')
params = op.build_parameters(project='project',
zone='zone')
self.assertTrue('project' in params['uri_params'])
self.assertTrue('zone' in params['uri_params'])
def test_zones_get_missing_params(self):
op = self.gce.get_operation('compute.zones.get')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
project='project'
)
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters,
zone='zone'
)
def test_zones_list(self):
op = self.gce.get_operation('compute.zones.list')
params = op.build_parameters(project='project')
self.assertTrue('project' in params['uri_params'])
def test_zones_list_missing_params(self):
op = self.gce.get_operation('compute.zones.list')
self.assertRaises(
botocore.exceptions.MissingParametersError,
op.build_parameters
)
|
|
"""
Read SAS7BDAT files
Based on code written by Jared Hobbs:
https://bitbucket.org/jaredhobbs/sas7bdat
See also:
https://github.com/BioStatMatt/sas7bdat
Partial documentation of the file format:
https://cran.r-project.org/web/packages/sas7bdat/vignettes/sas7bdat.pdf
Reference for binary data compression:
http://collaboration.cmc.ec.gc.ca/science/rpn/biblio/ddj/Website/articles/CUJ/1992/9210/ross/ross.htm
"""
import pandas as pd
from pandas import compat
from pandas.io.common import get_filepath_or_buffer, BaseIterator
from pandas.errors import EmptyDataError
import numpy as np
import struct
import pandas.io.sas.sas_constants as const
from pandas.io.sas._sas import Parser
class _subheader_pointer(object):
pass
class _column(object):
pass
# SAS7BDAT represents a SAS data file in SAS7BDAT format.
class SAS7BDATReader(BaseIterator):
"""
Read SAS files in SAS7BDAT format.
Parameters
----------
path_or_buf : path name or buffer
Name of SAS file or file-like object pointing to SAS file
contents.
index : column identifier, defaults to None
Column to use as index.
convert_dates : boolean, defaults to True
Attempt to convert dates to Pandas datetime values. Note that
some rarely used SAS date formats may be unsupported.
blank_missing : boolean, defaults to True
Convert empty strings to missing values (SAS uses blanks to
indicate missing character variables).
chunksize : int, defaults to None
Return SAS7BDATReader object for iterations, returns chunks
with given number of lines.
encoding : string, defaults to None
String encoding.
convert_text : bool, defaults to True
If False, text variables are left as raw bytes.
convert_header_text : bool, defaults to True
If False, header text, including column names, are left as raw
bytes.
"""
def __init__(self, path_or_buf, index=None, convert_dates=True,
blank_missing=True, chunksize=None, encoding=None,
convert_text=True, convert_header_text=True):
self.index = index
self.convert_dates = convert_dates
self.blank_missing = blank_missing
self.chunksize = chunksize
self.encoding = encoding
self.convert_text = convert_text
self.convert_header_text = convert_header_text
self.default_encoding = "latin-1"
self.compression = ""
self.column_names_strings = []
self.column_names = []
self.column_types = []
self.column_formats = []
self.columns = []
self._current_page_data_subheader_pointers = []
self._cached_page = None
self._column_data_lengths = []
self._column_data_offsets = []
self._current_row_in_file_index = 0
self._current_row_on_page_index = 0
self._current_row_in_file_index = 0
self._path_or_buf, _, _, _ = get_filepath_or_buffer(path_or_buf)
if isinstance(self._path_or_buf, compat.string_types):
self._path_or_buf = open(self._path_or_buf, 'rb')
self.handle = self._path_or_buf
self._get_properties()
self._parse_metadata()
def close(self):
try:
self.handle.close()
except AttributeError:
pass
def _get_properties(self):
# Check magic number
self._path_or_buf.seek(0)
self._cached_page = self._path_or_buf.read(288)
if self._cached_page[0:len(const.magic)] != const.magic:
self.close()
raise ValueError("magic number mismatch (not a SAS file?)")
# Get alignment information
align1, align2 = 0, 0
buf = self._read_bytes(const.align_1_offset, const.align_1_length)
if buf == const.u64_byte_checker_value:
align2 = const.align_2_value
self.U64 = True
self._int_length = 8
self._page_bit_offset = const.page_bit_offset_x64
self._subheader_pointer_length = const.subheader_pointer_length_x64
else:
self.U64 = False
self._page_bit_offset = const.page_bit_offset_x86
self._subheader_pointer_length = const.subheader_pointer_length_x86
self._int_length = 4
buf = self._read_bytes(const.align_2_offset, const.align_2_length)
if buf == const.align_1_checker_value:
align1 = const.align_2_value
total_align = align1 + align2
# Get endianness information
buf = self._read_bytes(const.endianness_offset,
const.endianness_length)
if buf == b'\x01':
self.byte_order = "<"
else:
self.byte_order = ">"
# Get encoding information
buf = self._read_bytes(const.encoding_offset, const.encoding_length)[0]
if buf in const.encoding_names:
self.file_encoding = const.encoding_names[buf]
else:
self.file_encoding = "unknown (code=%s)" % str(buf)
# Get platform information
buf = self._read_bytes(const.platform_offset, const.platform_length)
if buf == b'1':
self.platform = "unix"
elif buf == b'2':
self.platform = "windows"
else:
self.platform = "unknown"
buf = self._read_bytes(const.dataset_offset, const.dataset_length)
self.name = buf.rstrip(b'\x00 ')
if self.convert_header_text:
self.name = self.name.decode(
self.encoding or self.default_encoding)
buf = self._read_bytes(const.file_type_offset, const.file_type_length)
self.file_type = buf.rstrip(b'\x00 ')
if self.convert_header_text:
self.file_type = self.file_type.decode(
self.encoding or self.default_encoding)
# Timestamp is epoch 01/01/1960
epoch = pd.datetime(1960, 1, 1)
x = self._read_float(const.date_created_offset + align1,
const.date_created_length)
self.date_created = epoch + pd.to_timedelta(x, unit='s')
x = self._read_float(const.date_modified_offset + align1,
const.date_modified_length)
self.date_modified = epoch + pd.to_timedelta(x, unit='s')
self.header_length = self._read_int(const.header_size_offset + align1,
const.header_size_length)
# Read the rest of the header into cached_page.
buf = self._path_or_buf.read(self.header_length - 288)
self._cached_page += buf
if len(self._cached_page) != self.header_length:
self.close()
raise ValueError("The SAS7BDAT file appears to be truncated.")
self._page_length = self._read_int(const.page_size_offset + align1,
const.page_size_length)
self._page_count = self._read_int(const.page_count_offset + align1,
const.page_count_length)
buf = self._read_bytes(const.sas_release_offset + total_align,
const.sas_release_length)
self.sas_release = buf.rstrip(b'\x00 ')
if self.convert_header_text:
self.sas_release = self.sas_release.decode(
self.encoding or self.default_encoding)
buf = self._read_bytes(const.sas_server_type_offset + total_align,
const.sas_server_type_length)
self.server_type = buf.rstrip(b'\x00 ')
if self.convert_header_text:
self.server_type = self.server_type.decode(
self.encoding or self.default_encoding)
buf = self._read_bytes(const.os_version_number_offset + total_align,
const.os_version_number_length)
self.os_version = buf.rstrip(b'\x00 ')
if self.convert_header_text:
self.os_version = self.os_version.decode(
self.encoding or self.default_encoding)
buf = self._read_bytes(const.os_name_offset + total_align,
const.os_name_length)
buf = buf.rstrip(b'\x00 ')
if len(buf) > 0:
self.os_name = buf.decode(self.encoding or self.default_encoding)
else:
buf = self._read_bytes(const.os_maker_offset + total_align,
const.os_maker_length)
self.os_name = buf.rstrip(b'\x00 ')
if self.convert_header_text:
self.os_name = self.os_name.decode(
self.encoding or self.default_encoding)
def __next__(self):
da = self.read(nrows=self.chunksize or 1)
if da is None:
raise StopIteration
return da
# Read a single float of the given width (4 or 8).
def _read_float(self, offset, width):
if width not in (4, 8):
self.close()
raise ValueError("invalid float width")
buf = self._read_bytes(offset, width)
fd = "f" if width == 4 else "d"
return struct.unpack(self.byte_order + fd, buf)[0]
# Read a single signed integer of the given width (1, 2, 4 or 8).
def _read_int(self, offset, width):
if width not in (1, 2, 4, 8):
self.close()
raise ValueError("invalid int width")
buf = self._read_bytes(offset, width)
it = {1: "b", 2: "h", 4: "l", 8: "q"}[width]
iv = struct.unpack(self.byte_order + it, buf)[0]
return iv
def _read_bytes(self, offset, length):
if self._cached_page is None:
self._path_or_buf.seek(offset)
buf = self._path_or_buf.read(length)
if len(buf) < length:
self.close()
msg = "Unable to read {:d} bytes from file position {:d}."
raise ValueError(msg.format(length, offset))
return buf
else:
if offset + length > len(self._cached_page):
self.close()
raise ValueError("The cached page is too small.")
return self._cached_page[offset:offset + length]
def _parse_metadata(self):
done = False
while not done:
self._cached_page = self._path_or_buf.read(self._page_length)
if len(self._cached_page) <= 0:
break
if len(self._cached_page) != self._page_length:
self.close()
raise ValueError(
"Failed to read a meta data page from the SAS file.")
done = self._process_page_meta()
def _process_page_meta(self):
self._read_page_header()
pt = [const.page_meta_type, const.page_amd_type] + const.page_mix_types
if self._current_page_type in pt:
self._process_page_metadata()
return ((self._current_page_type in [256] + const.page_mix_types) or
(self._current_page_data_subheader_pointers is not None))
def _read_page_header(self):
bit_offset = self._page_bit_offset
tx = const.page_type_offset + bit_offset
self._current_page_type = self._read_int(tx, const.page_type_length)
tx = const.block_count_offset + bit_offset
self._current_page_block_count = self._read_int(
tx, const.block_count_length)
tx = const.subheader_count_offset + bit_offset
self._current_page_subheaders_count = (
self._read_int(tx, const.subheader_count_length))
def _process_page_metadata(self):
bit_offset = self._page_bit_offset
for i in range(self._current_page_subheaders_count):
pointer = self._process_subheader_pointers(
const.subheader_pointers_offset + bit_offset, i)
if pointer.length == 0:
continue
if pointer.compression == const.truncated_subheader_id:
continue
subheader_signature = self._read_subheader_signature(
pointer.offset)
subheader_index = (
self._get_subheader_index(subheader_signature,
pointer.compression, pointer.ptype))
self._process_subheader(subheader_index, pointer)
def _get_subheader_index(self, signature, compression, ptype):
index = const.subheader_signature_to_index.get(signature)
if index is None:
f1 = ((compression == const.compressed_subheader_id) or
(compression == 0))
f2 = (ptype == const.compressed_subheader_type)
if (self.compression != "") and f1 and f2:
index = const.SASIndex.data_subheader_index
else:
self.close()
raise ValueError("Unknown subheader signature")
return index
def _process_subheader_pointers(self, offset, subheader_pointer_index):
subheader_pointer_length = self._subheader_pointer_length
total_offset = (offset +
subheader_pointer_length * subheader_pointer_index)
subheader_offset = self._read_int(total_offset, self._int_length)
total_offset += self._int_length
subheader_length = self._read_int(total_offset, self._int_length)
total_offset += self._int_length
subheader_compression = self._read_int(total_offset, 1)
total_offset += 1
subheader_type = self._read_int(total_offset, 1)
x = _subheader_pointer()
x.offset = subheader_offset
x.length = subheader_length
x.compression = subheader_compression
x.ptype = subheader_type
return x
def _read_subheader_signature(self, offset):
subheader_signature = self._read_bytes(offset, self._int_length)
return subheader_signature
def _process_subheader(self, subheader_index, pointer):
offset = pointer.offset
length = pointer.length
if subheader_index == const.SASIndex.row_size_index:
processor = self._process_rowsize_subheader
elif subheader_index == const.SASIndex.column_size_index:
processor = self._process_columnsize_subheader
elif subheader_index == const.SASIndex.column_text_index:
processor = self._process_columntext_subheader
elif subheader_index == const.SASIndex.column_name_index:
processor = self._process_columnname_subheader
elif subheader_index == const.SASIndex.column_attributes_index:
processor = self._process_columnattributes_subheader
elif subheader_index == const.SASIndex.format_and_label_index:
processor = self._process_format_subheader
elif subheader_index == const.SASIndex.column_list_index:
processor = self._process_columnlist_subheader
elif subheader_index == const.SASIndex.subheader_counts_index:
processor = self._process_subheader_counts
elif subheader_index == const.SASIndex.data_subheader_index:
self._current_page_data_subheader_pointers.append(pointer)
return
else:
raise ValueError("unknown subheader index")
processor(offset, length)
def _process_rowsize_subheader(self, offset, length):
int_len = self._int_length
lcs_offset = offset
lcp_offset = offset
if self.U64:
lcs_offset += 682
lcp_offset += 706
else:
lcs_offset += 354
lcp_offset += 378
self.row_length = self._read_int(
offset + const.row_length_offset_multiplier * int_len, int_len)
self.row_count = self._read_int(
offset + const.row_count_offset_multiplier * int_len, int_len)
self.col_count_p1 = self._read_int(
offset + const.col_count_p1_multiplier * int_len, int_len)
self.col_count_p2 = self._read_int(
offset + const.col_count_p2_multiplier * int_len, int_len)
mx = const.row_count_on_mix_page_offset_multiplier * int_len
self._mix_page_row_count = self._read_int(offset + mx, int_len)
self._lcs = self._read_int(lcs_offset, 2)
self._lcp = self._read_int(lcp_offset, 2)
def _process_columnsize_subheader(self, offset, length):
int_len = self._int_length
offset += int_len
self.column_count = self._read_int(offset, int_len)
if (self.col_count_p1 + self.col_count_p2 !=
self.column_count):
print("Warning: column count mismatch (%d + %d != %d)\n",
self.col_count_p1, self.col_count_p2, self.column_count)
# Unknown purpose
def _process_subheader_counts(self, offset, length):
pass
def _process_columntext_subheader(self, offset, length):
offset += self._int_length
text_block_size = self._read_int(offset, const.text_block_size_length)
buf = self._read_bytes(offset, text_block_size)
cname_raw = buf[0:text_block_size].rstrip(b"\x00 ")
cname = cname_raw
if self.convert_header_text:
cname = cname.decode(self.encoding or self.default_encoding)
self.column_names_strings.append(cname)
if len(self.column_names_strings) == 1:
compression_literal = ""
for cl in const.compression_literals:
if cl in cname_raw:
compression_literal = cl
self.compression = compression_literal
offset -= self._int_length
offset1 = offset + 16
if self.U64:
offset1 += 4
buf = self._read_bytes(offset1, self._lcp)
compression_literal = buf.rstrip(b"\x00")
if compression_literal == "":
self._lcs = 0
offset1 = offset + 32
if self.U64:
offset1 += 4
buf = self._read_bytes(offset1, self._lcp)
self.creator_proc = buf[0:self._lcp]
elif compression_literal == const.rle_compression:
offset1 = offset + 40
if self.U64:
offset1 += 4
buf = self._read_bytes(offset1, self._lcp)
self.creator_proc = buf[0:self._lcp]
elif self._lcs > 0:
self._lcp = 0
offset1 = offset + 16
if self.U64:
offset1 += 4
buf = self._read_bytes(offset1, self._lcs)
self.creator_proc = buf[0:self._lcp]
if self.convert_header_text:
if hasattr(self, "creator_proc"):
self.creator_proc = self.creator_proc.decode(
self.encoding or self.default_encoding)
def _process_columnname_subheader(self, offset, length):
int_len = self._int_length
offset += int_len
column_name_pointers_count = (length - 2 * int_len - 12) // 8
for i in range(column_name_pointers_count):
text_subheader = offset + const.column_name_pointer_length * \
(i + 1) + const.column_name_text_subheader_offset
col_name_offset = offset + const.column_name_pointer_length * \
(i + 1) + const.column_name_offset_offset
col_name_length = offset + const.column_name_pointer_length * \
(i + 1) + const.column_name_length_offset
idx = self._read_int(
text_subheader, const.column_name_text_subheader_length)
col_offset = self._read_int(
col_name_offset, const.column_name_offset_length)
col_len = self._read_int(
col_name_length, const.column_name_length_length)
name_str = self.column_names_strings[idx]
self.column_names.append(name_str[col_offset:col_offset + col_len])
def _process_columnattributes_subheader(self, offset, length):
int_len = self._int_length
column_attributes_vectors_count = (
length - 2 * int_len - 12) // (int_len + 8)
self.column_types = np.empty(
column_attributes_vectors_count, dtype=np.dtype('S1'))
self._column_data_lengths = np.empty(
column_attributes_vectors_count, dtype=np.int64)
self._column_data_offsets = np.empty(
column_attributes_vectors_count, dtype=np.int64)
for i in range(column_attributes_vectors_count):
col_data_offset = (offset + int_len +
const.column_data_offset_offset +
i * (int_len + 8))
col_data_len = (offset + 2 * int_len +
const.column_data_length_offset +
i * (int_len + 8))
col_types = (offset + 2 * int_len +
const.column_type_offset + i * (int_len + 8))
x = self._read_int(col_data_offset, int_len)
self._column_data_offsets[i] = x
x = self._read_int(col_data_len, const.column_data_length_length)
self._column_data_lengths[i] = x
x = self._read_int(col_types, const.column_type_length)
if x == 1:
self.column_types[i] = b'd'
else:
self.column_types[i] = b's'
def _process_columnlist_subheader(self, offset, length):
# unknown purpose
pass
def _process_format_subheader(self, offset, length):
int_len = self._int_length
text_subheader_format = (
offset +
const.column_format_text_subheader_index_offset +
3 * int_len)
col_format_offset = (offset +
const.column_format_offset_offset +
3 * int_len)
col_format_len = (offset +
const.column_format_length_offset +
3 * int_len)
text_subheader_label = (
offset +
const.column_label_text_subheader_index_offset +
3 * int_len)
col_label_offset = (offset +
const.column_label_offset_offset +
3 * int_len)
col_label_len = offset + const.column_label_length_offset + 3 * int_len
x = self._read_int(text_subheader_format,
const.column_format_text_subheader_index_length)
format_idx = min(x, len(self.column_names_strings) - 1)
format_start = self._read_int(
col_format_offset, const.column_format_offset_length)
format_len = self._read_int(
col_format_len, const.column_format_length_length)
label_idx = self._read_int(
text_subheader_label,
const.column_label_text_subheader_index_length)
label_idx = min(label_idx, len(self.column_names_strings) - 1)
label_start = self._read_int(
col_label_offset, const.column_label_offset_length)
label_len = self._read_int(col_label_len,
const.column_label_length_length)
label_names = self.column_names_strings[label_idx]
column_label = label_names[label_start: label_start + label_len]
format_names = self.column_names_strings[format_idx]
column_format = format_names[format_start: format_start + format_len]
current_column_number = len(self.columns)
col = _column()
col.col_id = current_column_number
col.name = self.column_names[current_column_number]
col.label = column_label
col.format = column_format
col.ctype = self.column_types[current_column_number]
col.length = self._column_data_lengths[current_column_number]
self.column_formats.append(column_format)
self.columns.append(col)
def read(self, nrows=None):
if (nrows is None) and (self.chunksize is not None):
nrows = self.chunksize
elif nrows is None:
nrows = self.row_count
if len(self.column_types) == 0:
self.close()
raise EmptyDataError("No columns to parse from file")
if self._current_row_in_file_index >= self.row_count:
return None
m = self.row_count - self._current_row_in_file_index
if nrows > m:
nrows = m
nd = (self.column_types == b'd').sum()
ns = (self.column_types == b's').sum()
self._string_chunk = np.empty((ns, nrows), dtype=np.object)
self._byte_chunk = np.empty((nd, 8 * nrows), dtype=np.uint8)
self._current_row_in_chunk_index = 0
p = Parser(self)
p.read(nrows)
rslt = self._chunk_to_dataframe()
if self.index is not None:
rslt = rslt.set_index(self.index)
return rslt
def _read_next_page(self):
self._current_page_data_subheader_pointers = []
self._cached_page = self._path_or_buf.read(self._page_length)
if len(self._cached_page) <= 0:
return True
elif len(self._cached_page) != self._page_length:
self.close()
msg = ("failed to read complete page from file "
"(read {:d} of {:d} bytes)")
raise ValueError(msg.format(len(self._cached_page),
self._page_length))
self._read_page_header()
if self._current_page_type == const.page_meta_type:
self._process_page_metadata()
pt = [const.page_meta_type, const.page_data_type]
pt += [const.page_mix_types]
if self._current_page_type not in pt:
return self._read_next_page()
return False
def _chunk_to_dataframe(self):
n = self._current_row_in_chunk_index
m = self._current_row_in_file_index
ix = range(m - n, m)
rslt = pd.DataFrame(index=ix)
js, jb = 0, 0
for j in range(self.column_count):
name = self.column_names[j]
if self.column_types[j] == b'd':
rslt[name] = self._byte_chunk[jb, :].view(
dtype=self.byte_order + 'd')
rslt[name] = np.asarray(rslt[name], dtype=np.float64)
if self.convert_dates:
unit = None
if self.column_formats[j] in const.sas_date_formats:
unit = 'd'
elif self.column_formats[j] in const.sas_datetime_formats:
unit = 's'
if unit:
rslt[name] = pd.to_datetime(rslt[name], unit=unit,
origin="1960-01-01")
jb += 1
elif self.column_types[j] == b's':
rslt[name] = self._string_chunk[js, :]
if self.convert_text and (self.encoding is not None):
rslt[name] = rslt[name].str.decode(
self.encoding or self.default_encoding)
if self.blank_missing:
ii = rslt[name].str.len() == 0
rslt.loc[ii, name] = np.nan
js += 1
else:
self.close()
raise ValueError("unknown column type %s" %
self.column_types[j])
return rslt
|
|
# -*- coding: utf-8 -*-
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Accesses the google.logging.v2 ConfigServiceV2 API."""
import functools
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.grpc_helpers
import google.api_core.page_iterator
import google.api_core.path_template
import grpc
from google.api import monitored_resource_pb2
from google.cloud.logging_v2.gapic import config_service_v2_client_config
from google.cloud.logging_v2.gapic import enums
from google.cloud.logging_v2.gapic.transports import config_service_v2_grpc_transport
from google.cloud.logging_v2.proto import log_entry_pb2
from google.cloud.logging_v2.proto import logging_config_pb2
from google.cloud.logging_v2.proto import logging_config_pb2_grpc
from google.cloud.logging_v2.proto import logging_pb2
from google.cloud.logging_v2.proto import logging_pb2_grpc
from google.protobuf import empty_pb2
from google.protobuf import field_mask_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
'google-cloud-logging', ).version
class ConfigServiceV2Client(object):
"""
Service for configuring sinks used to export log entries out of
Logging.
"""
SERVICE_ADDRESS = 'logging.googleapis.com:443'
"""The default address of the service."""
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = 'google.logging.v2.ConfigServiceV2'
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ConfigServiceV2Client: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename)
kwargs['credentials'] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@classmethod
def project_path(cls, project):
"""Return a fully-qualified project string."""
return google.api_core.path_template.expand(
'projects/{project}',
project=project,
)
@classmethod
def sink_path(cls, project, sink):
"""Return a fully-qualified sink string."""
return google.api_core.path_template.expand(
'projects/{project}/sinks/{sink}',
project=project,
sink=sink,
)
@classmethod
def exclusion_path(cls, project, exclusion):
"""Return a fully-qualified exclusion string."""
return google.api_core.path_template.expand(
'projects/{project}/exclusions/{exclusion}',
project=project,
exclusion=exclusion,
)
def __init__(self,
transport=None,
channel=None,
credentials=None,
client_config=None,
client_info=None):
"""Constructor.
Args:
transport (Union[~.ConfigServiceV2GrpcTransport,
Callable[[~.Credentials, type], ~.ConfigServiceV2GrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Raise deprecation warnings for things we want to go away.
if client_config is not None:
warnings.warn(
'The `client_config` argument is deprecated.',
PendingDeprecationWarning,
stacklevel=2)
else:
client_config = config_service_v2_client_config.config
if channel:
warnings.warn(
'The `channel` argument is deprecated; use '
'`transport` instead.',
PendingDeprecationWarning,
stacklevel=2)
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=config_service_v2_grpc_transport.
ConfigServiceV2GrpcTransport,
)
else:
if credentials:
raise ValueError(
'Received both a transport instance and '
'credentials; these are mutually exclusive.')
self.transport = transport
else:
self.transport = config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport(
address=self.SERVICE_ADDRESS,
channel=channel,
credentials=credentials,
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
gapic_version=_GAPIC_LIBRARY_VERSION, )
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config['interfaces'][self._INTERFACE_NAME], )
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
# Service calls
def list_sinks(self,
parent,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Lists sinks.
Example:
>>> from google.cloud import logging_v2
>>>
>>> client = logging_v2.ConfigServiceV2Client()
>>>
>>> parent = client.project_path('[PROJECT]')
>>>
>>> # Iterate over all results
>>> for element in client.list_sinks(parent):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_sinks(parent).pages:
... for element in page:
... # process element
... pass
Args:
parent (str): Required. The parent resource whose sinks are to be listed:
::
"projects/[PROJECT_ID]"
"organizations/[ORGANIZATION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]"
"folders/[FOLDER_ID]"
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.logging_v2.types.LogSink` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'list_sinks' not in self._inner_api_calls:
self._inner_api_calls[
'list_sinks'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_sinks,
default_retry=self._method_configs['ListSinks'].retry,
default_timeout=self._method_configs['ListSinks'].timeout,
client_info=self._client_info,
)
request = logging_config_pb2.ListSinksRequest(
parent=parent,
page_size=page_size,
)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls['list_sinks'],
retry=retry,
timeout=timeout,
metadata=metadata),
request=request,
items_field='sinks',
request_token_field='page_token',
response_token_field='next_page_token',
)
return iterator
def get_sink(self,
sink_name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Gets a sink.
Example:
>>> from google.cloud import logging_v2
>>>
>>> client = logging_v2.ConfigServiceV2Client()
>>>
>>> sink_name = client.sink_path('[PROJECT]', '[SINK]')
>>>
>>> response = client.get_sink(sink_name)
Args:
sink_name (str): Required. The resource name of the sink:
::
"projects/[PROJECT_ID]/sinks/[SINK_ID]"
"organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
"folders/[FOLDER_ID]/sinks/[SINK_ID]"
Example: ``"projects/my-project-id/sinks/my-sink-id"``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.logging_v2.types.LogSink` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'get_sink' not in self._inner_api_calls:
self._inner_api_calls[
'get_sink'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_sink,
default_retry=self._method_configs['GetSink'].retry,
default_timeout=self._method_configs['GetSink'].timeout,
client_info=self._client_info,
)
request = logging_config_pb2.GetSinkRequest(sink_name=sink_name, )
return self._inner_api_calls['get_sink'](
request, retry=retry, timeout=timeout, metadata=metadata)
def create_sink(self,
parent,
sink,
unique_writer_identity=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Creates a sink that exports specified log entries to a destination. The
export of newly-ingested log entries begins immediately, unless the
sink's ``writer_identity`` is not permitted to write to the destination.
A sink can export log entries only from the resource owning the sink.
Example:
>>> from google.cloud import logging_v2
>>>
>>> client = logging_v2.ConfigServiceV2Client()
>>>
>>> parent = client.project_path('[PROJECT]')
>>>
>>> # TODO: Initialize `sink`:
>>> sink = {}
>>>
>>> response = client.create_sink(parent, sink)
Args:
parent (str): Required. The resource in which to create the sink:
::
"projects/[PROJECT_ID]"
"organizations/[ORGANIZATION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]"
"folders/[FOLDER_ID]"
Examples: ``"projects/my-logging-project"``,
``"organizations/123456789"``.
sink (Union[dict, ~google.cloud.logging_v2.types.LogSink]): Required. The new sink, whose ``name`` parameter is a sink identifier
that is not already in use.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.logging_v2.types.LogSink`
unique_writer_identity (bool): Optional. Determines the kind of IAM identity returned as
``writer_identity`` in the new sink. If this value is omitted or set to
false, and if the sink's parent is a project, then the value returned as
``writer_identity`` is the same group or service account used by Logging
before the addition of writer identities to this API. The sink's
destination must be in the same project as the sink itself.
If this field is set to true, or if the sink is owned by a non-project
resource such as an organization, then the value of ``writer_identity``
will be a unique service account used only for exports from the new
sink. For more information, see ``writer_identity`` in ``LogSink``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.logging_v2.types.LogSink` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'create_sink' not in self._inner_api_calls:
self._inner_api_calls[
'create_sink'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_sink,
default_retry=self._method_configs['CreateSink'].retry,
default_timeout=self._method_configs['CreateSink'].timeout,
client_info=self._client_info,
)
request = logging_config_pb2.CreateSinkRequest(
parent=parent,
sink=sink,
unique_writer_identity=unique_writer_identity,
)
return self._inner_api_calls['create_sink'](
request, retry=retry, timeout=timeout, metadata=metadata)
def update_sink(self,
sink_name,
sink,
unique_writer_identity=None,
update_mask=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Updates a sink. This method replaces the following fields in the
existing sink with values from the new sink: ``destination``, and
``filter``. The updated sink might also have a new ``writer_identity``;
see the ``unique_writer_identity`` field.
Example:
>>> from google.cloud import logging_v2
>>>
>>> client = logging_v2.ConfigServiceV2Client()
>>>
>>> sink_name = client.sink_path('[PROJECT]', '[SINK]')
>>>
>>> # TODO: Initialize `sink`:
>>> sink = {}
>>>
>>> response = client.update_sink(sink_name, sink)
Args:
sink_name (str): Required. The full resource name of the sink to update, including the
parent resource and the sink identifier:
::
"projects/[PROJECT_ID]/sinks/[SINK_ID]"
"organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
"folders/[FOLDER_ID]/sinks/[SINK_ID]"
Example: ``"projects/my-project-id/sinks/my-sink-id"``.
sink (Union[dict, ~google.cloud.logging_v2.types.LogSink]): Required. The updated sink, whose name is the same identifier that
appears as part of ``sink_name``.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.logging_v2.types.LogSink`
unique_writer_identity (bool): Optional. See
`sinks.create <https://cloud.google.com/logging/docs/api/reference/rest/v2/projects.sinks/create>`__
for a description of this field. When updating a sink, the effect of
this field on the value of ``writer_identity`` in the updated sink
depends on both the old and new values of this field:
- If the old and new values of this field are both false or both true,
then there is no change to the sink's ``writer_identity``.
- If the old value is false and the new value is true, then
``writer_identity`` is changed to a unique service account.
- It is an error if the old value is true and the new value is set to
false or defaulted to false.
update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Optional. Field mask that specifies the fields in ``sink`` that need an
update. A sink field will be overwritten if, and only if, it is in the
update mask. ``name`` and output only fields cannot be updated.
An empty updateMask is temporarily treated as using the following mask
for backwards compatibility purposes: destination,filter,includeChildren
At some point in the future, behavior will be removed and specifying an
empty updateMask will be an error.
For a detailed ``FieldMask`` definition, see
https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask
Example: ``updateMask=filter``.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.logging_v2.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.logging_v2.types.LogSink` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'update_sink' not in self._inner_api_calls:
self._inner_api_calls[
'update_sink'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.update_sink,
default_retry=self._method_configs['UpdateSink'].retry,
default_timeout=self._method_configs['UpdateSink'].timeout,
client_info=self._client_info,
)
request = logging_config_pb2.UpdateSinkRequest(
sink_name=sink_name,
sink=sink,
unique_writer_identity=unique_writer_identity,
update_mask=update_mask,
)
return self._inner_api_calls['update_sink'](
request, retry=retry, timeout=timeout, metadata=metadata)
def delete_sink(self,
sink_name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Deletes a sink. If the sink has a unique ``writer_identity``, then that
service account is also deleted.
Example:
>>> from google.cloud import logging_v2
>>>
>>> client = logging_v2.ConfigServiceV2Client()
>>>
>>> sink_name = client.sink_path('[PROJECT]', '[SINK]')
>>>
>>> client.delete_sink(sink_name)
Args:
sink_name (str): Required. The full resource name of the sink to delete, including the
parent resource and the sink identifier:
::
"projects/[PROJECT_ID]/sinks/[SINK_ID]"
"organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]"
"folders/[FOLDER_ID]/sinks/[SINK_ID]"
Example: ``"projects/my-project-id/sinks/my-sink-id"``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'delete_sink' not in self._inner_api_calls:
self._inner_api_calls[
'delete_sink'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_sink,
default_retry=self._method_configs['DeleteSink'].retry,
default_timeout=self._method_configs['DeleteSink'].timeout,
client_info=self._client_info,
)
request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name, )
self._inner_api_calls['delete_sink'](
request, retry=retry, timeout=timeout, metadata=metadata)
def list_exclusions(self,
parent,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Lists all the exclusions in a parent resource.
Example:
>>> from google.cloud import logging_v2
>>>
>>> client = logging_v2.ConfigServiceV2Client()
>>>
>>> parent = client.project_path('[PROJECT]')
>>>
>>> # Iterate over all results
>>> for element in client.list_exclusions(parent):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_exclusions(parent).pages:
... for element in page:
... # process element
... pass
Args:
parent (str): Required. The parent resource whose exclusions are to be listed.
::
"projects/[PROJECT_ID]"
"organizations/[ORGANIZATION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]"
"folders/[FOLDER_ID]"
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.logging_v2.types.LogExclusion` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'list_exclusions' not in self._inner_api_calls:
self._inner_api_calls[
'list_exclusions'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_exclusions,
default_retry=self._method_configs['ListExclusions'].retry,
default_timeout=self._method_configs['ListExclusions'].
timeout,
client_info=self._client_info,
)
request = logging_config_pb2.ListExclusionsRequest(
parent=parent,
page_size=page_size,
)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls['list_exclusions'],
retry=retry,
timeout=timeout,
metadata=metadata),
request=request,
items_field='exclusions',
request_token_field='page_token',
response_token_field='next_page_token',
)
return iterator
def get_exclusion(self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Gets the description of an exclusion.
Example:
>>> from google.cloud import logging_v2
>>>
>>> client = logging_v2.ConfigServiceV2Client()
>>>
>>> name = client.exclusion_path('[PROJECT]', '[EXCLUSION]')
>>>
>>> response = client.get_exclusion(name)
Args:
name (str): Required. The resource name of an existing exclusion:
::
"projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]"
"organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]"
"folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]"
Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.logging_v2.types.LogExclusion` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'get_exclusion' not in self._inner_api_calls:
self._inner_api_calls[
'get_exclusion'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_exclusion,
default_retry=self._method_configs['GetExclusion'].retry,
default_timeout=self._method_configs['GetExclusion'].
timeout,
client_info=self._client_info,
)
request = logging_config_pb2.GetExclusionRequest(name=name, )
return self._inner_api_calls['get_exclusion'](
request, retry=retry, timeout=timeout, metadata=metadata)
def create_exclusion(self,
parent,
exclusion,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Creates a new exclusion in a specified parent resource.
Only log entries belonging to that resource can be excluded.
You can have up to 10 exclusions in a resource.
Example:
>>> from google.cloud import logging_v2
>>>
>>> client = logging_v2.ConfigServiceV2Client()
>>>
>>> parent = client.project_path('[PROJECT]')
>>>
>>> # TODO: Initialize `exclusion`:
>>> exclusion = {}
>>>
>>> response = client.create_exclusion(parent, exclusion)
Args:
parent (str): Required. The parent resource in which to create the exclusion:
::
"projects/[PROJECT_ID]"
"organizations/[ORGANIZATION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]"
"folders/[FOLDER_ID]"
Examples: ``"projects/my-logging-project"``,
``"organizations/123456789"``.
exclusion (Union[dict, ~google.cloud.logging_v2.types.LogExclusion]): Required. The new exclusion, whose ``name`` parameter is an exclusion
name that is not already used in the parent resource.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.logging_v2.types.LogExclusion`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.logging_v2.types.LogExclusion` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'create_exclusion' not in self._inner_api_calls:
self._inner_api_calls[
'create_exclusion'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_exclusion,
default_retry=self._method_configs['CreateExclusion'].
retry,
default_timeout=self._method_configs['CreateExclusion'].
timeout,
client_info=self._client_info,
)
request = logging_config_pb2.CreateExclusionRequest(
parent=parent,
exclusion=exclusion,
)
return self._inner_api_calls['create_exclusion'](
request, retry=retry, timeout=timeout, metadata=metadata)
def update_exclusion(self,
name,
exclusion,
update_mask,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Changes one or more properties of an existing exclusion.
Example:
>>> from google.cloud import logging_v2
>>>
>>> client = logging_v2.ConfigServiceV2Client()
>>>
>>> name = client.exclusion_path('[PROJECT]', '[EXCLUSION]')
>>>
>>> # TODO: Initialize `exclusion`:
>>> exclusion = {}
>>>
>>> # TODO: Initialize `update_mask`:
>>> update_mask = {}
>>>
>>> response = client.update_exclusion(name, exclusion, update_mask)
Args:
name (str): Required. The resource name of the exclusion to update:
::
"projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]"
"organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]"
"folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]"
Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``.
exclusion (Union[dict, ~google.cloud.logging_v2.types.LogExclusion]): Required. New values for the existing exclusion. Only the fields
specified in ``update_mask`` are relevant.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.logging_v2.types.LogExclusion`
update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Required. A nonempty list of fields to change in the existing exclusion.
New values for the fields are taken from the corresponding fields in the
``LogExclusion`` included in this request. Fields not mentioned in
``update_mask`` are not changed and are ignored in the request.
For example, to change the filter and description of an exclusion,
specify an ``update_mask`` of ``"filter,description"``.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.logging_v2.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.logging_v2.types.LogExclusion` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'update_exclusion' not in self._inner_api_calls:
self._inner_api_calls[
'update_exclusion'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.update_exclusion,
default_retry=self._method_configs['UpdateExclusion'].
retry,
default_timeout=self._method_configs['UpdateExclusion'].
timeout,
client_info=self._client_info,
)
request = logging_config_pb2.UpdateExclusionRequest(
name=name,
exclusion=exclusion,
update_mask=update_mask,
)
return self._inner_api_calls['update_exclusion'](
request, retry=retry, timeout=timeout, metadata=metadata)
def delete_exclusion(self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Deletes an exclusion.
Example:
>>> from google.cloud import logging_v2
>>>
>>> client = logging_v2.ConfigServiceV2Client()
>>>
>>> name = client.exclusion_path('[PROJECT]', '[EXCLUSION]')
>>>
>>> client.delete_exclusion(name)
Args:
name (str): Required. The resource name of an existing exclusion to delete:
::
"projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]"
"organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]"
"folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]"
Example: ``"projects/my-project-id/exclusions/my-exclusion-id"``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'delete_exclusion' not in self._inner_api_calls:
self._inner_api_calls[
'delete_exclusion'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_exclusion,
default_retry=self._method_configs['DeleteExclusion'].
retry,
default_timeout=self._method_configs['DeleteExclusion'].
timeout,
client_info=self._client_info,
)
request = logging_config_pb2.DeleteExclusionRequest(name=name, )
self._inner_api_calls['delete_exclusion'](
request, retry=retry, timeout=timeout, metadata=metadata)
|
|
from datetime import datetime, timedelta
import itertools
import sys
import os
import time
import unittest
from lxml import html
from tpb.tpb import TPB, Search, Recent, Top, List, Paginated
from tpb.constants import ConstantType, Constants, ORDERS, CATEGORIES
from tpb.utils import URL
if sys.version_info >= (3, 0):
from urllib.request import urlopen
from tests.cases import RemoteTestCase
unicode = str
else:
from urllib2 import urlopen
from cases import RemoteTestCase
class ConstantsTestCase(RemoteTestCase):
def test_extension(self):
checks = [ORDERS, CATEGORIES]
while checks:
current = checks.pop()
for name, attr in current.__dict__.items():
if isinstance(attr, type):
self.assertTrue(attr.__class__, ConstantType)
checks.append(attr)
def test_repr(self):
class Alphanum(Constants):
greek = True
class Alpha:
alpha = 'a'
beta = 'b'
gamma = 'c'
class Num:
alpha = 1
beta = 2
gamma = 3
output = """\
Alphanum:
Alpha:
alpha: 'a'
beta: 'b'
gamma: 'c'
Num:
alpha: 1
beta: 2
gamma: 3
greek: True
"""
self.assertEqual(repr(Alphanum), output)
self.assertEqual(str(Alphanum), output)
class PathSegmentsTestCase(RemoteTestCase):
def setUp(self):
self.segments = ['alpha', 'beta', 'gamma']
self.defaults = ['0', '1', '2']
self.url = URL('', '/', self.segments, self.defaults)
def test_attributes(self):
other_segments = ['one', 'two', 'three']
other_url = URL('', '/', other_segments, self.defaults)
for segment, other_segment in zip(self.segments, other_segments):
self.assertTrue(hasattr(self.url, segment))
self.assertFalse(hasattr(other_url, segment))
self.assertTrue(hasattr(other_url, other_segment))
self.assertFalse(hasattr(self.url, other_segment))
def test_properties(self):
self.assertEqual(str(self.url), '/0/1/2')
self.url.alpha = '9'
self.url.beta = '8'
self.url.gamma = '7'
self.assertEqual(str(self.url), '/9/8/7')
class ParsingTestCase(RemoteTestCase):
def setUp(self):
self.torrents = Search(self.url, 'tpb afk')
def test_items(self):
self.assertEqual(len(list(self.torrents.items())), 30)
self.assertEqual(len(list(iter(self.torrents))), 30)
def test_creation_dates(self):
"""
Make sure torrents aren't lazily created.
"""
alpha = time.time()
# Create torrents
torrents = self.torrents.items()
time.sleep(1)
# If they were lazily evaluated, they would be created now
diff = next(torrents)._created[1] - alpha
self.assertTrue(diff > 1)
def test_torrent_rows(self):
request = urlopen(str(self.torrents.url))
document = html.parse(request)
rows = self.torrents._get_torrent_rows(document.getroot())
self.assertEqual(len(rows), 30)
def test_torrent_build(self):
for torrent in self.torrents.items():
if torrent.title == 'TPB.AFK.2013.720p.h264-SimonKlose' and\
torrent.user == 'SimonKlose':
self.assertEqual(torrent.user_status, 'VIP')
self.assertTrue(torrent.comments >= 313)
self.assertEqual(torrent.has_cover, 'Yes')
break
class TorrentTestCase(RemoteTestCase):
def setUp(self):
self.torrents = Search(self.url, 'tpb afk')
def assertEqualDatetimes(self, *datetimes):
datetimes = [d.replace(microsecond=0) for d in datetimes]
return self.assertEqual(*datetimes)
def test_created_timestamp_parse(self):
for torrent in self.torrents.items():
torrent.created
torrent._created = ('1 sec ago', time.time())
self.assertEqualDatetimes(
torrent.created, datetime.now() - timedelta(seconds=1))
torrent._created = ('1 min ago', time.time())
self.assertEqualDatetimes(
torrent.created, datetime.now() - timedelta(minutes=1))
torrent._created = ('1 hour ago', time.time())
self.assertEqualDatetimes(
torrent.created, datetime.now() - timedelta(hours=1))
torrent._created = ('Today', time.time())
self.assertEqual(torrent.created.date(), datetime.now().date())
torrent._created = ('Y-day', time.time())
self.assertEqual(torrent.created.date(),
(datetime.now() - timedelta(days=1)).date())
torrent._created = ('1 sec ago', time.time() - 60 * 60 * 24)
self.assertEqualDatetimes(torrent.created, datetime.now() -
timedelta(days=1, seconds=1))
def test_info(self):
for torrent in self.torrents.items():
self.assertNotEqual('', torrent.info.strip())
def test_files(self):
for torrent in self.torrents.items():
self.assertTrue(len(torrent.files) > 0)
class PaginationTestCase(RemoteTestCase):
def setUp(self):
self.torrents = Search(self.url, 'tpb afk')
def test_page_items(self):
self.assertEqual(len(list(self.torrents.items())), 30)
def test_multipage_items(self):
self.torrents.multipage()
items = list(itertools.islice(self.torrents.items(), 50))
self.assertEqual(len(items), 50)
self.assertEqual(self.torrents.page(), 1)
def test_last_page(self):
class DummyList(List):
pages_left = 5
def items(self):
if self.pages_left == 0:
raise StopIteration()
for i in range(10):
yield i
self.pages_left -= 1
class DummySearch(Search, Paginated, DummyList):
pass
self.torrents = DummySearch(self.url, 'tpb afk').multipage()
self.assertEqual(len(list(iter(self.torrents))), 50)
class SearchTestCase(RemoteTestCase):
def setUp(self):
self.torrents = Search(self.url, 'tpb afk')
def test_url(self):
self.assertEqual(str(self.torrents.url),
self.url + '/search/tpb%20afk/0/7/0')
self.torrents.query('something').page(1).next().previous()
self.torrents.order(9).category(100)
self.assertEqual(self.torrents.query(), 'something')
self.assertEqual(self.torrents.page(), 1)
self.assertEqual(self.torrents.order(), 9)
self.assertEqual(self.torrents.category(), 100)
self.assertEqual(str(self.torrents.url),
self.url + '/search/something/1/9/100')
def test_torrents(self):
for item in self.torrents:
self.assertEqual(unicode, type(item.title))
self.assertEqual(unicode, type(item.user))
self.assertTrue(hasattr(item, 'url'))
# ensure the URL points to the /torrent/ html page
self.assertTrue(item.url.path().startswith('/torrent/'))
class RecentTestCase(RemoteTestCase):
def setUp(self):
self.torrents = Recent(self.url)
def test_url(self):
self.assertEqual(str(self.torrents.url),
self.url + '/recent/0')
self.torrents.page(1).next().previous()
self.assertEqual(str(self.torrents.url),
self.url + '/recent/1')
class TopTestCase(RemoteTestCase):
def setUp(self):
self.torrents = Top(self.url)
def test_url(self):
self.assertEqual(str(self.torrents.url),
self.url + '/top/0')
self.torrents.category(100)
self.assertEqual(str(self.torrents.url),
self.url + '/top/100')
def test_results(self):
self.assertEqual(len(list(self.torrents.items())), 100)
class TPBTestCase(RemoteTestCase):
def setUp(self):
self.tpb = TPB(self.url)
def test_search(self):
kwargs = {'query': 'tpb afk', 'page': 5, 'order': 9, 'category': 100}
a_search = self.tpb.search(**kwargs)
b_search = Search(self.url, **kwargs)
self.assertTrue(isinstance(a_search, Search))
self.assertTrue(isinstance(b_search, Search))
self.assertEqual(str(a_search.url), str(b_search.url))
def test_recent(self):
kwargs = {'page': 5}
a_recent = self.tpb.recent(**kwargs)
b_recent = Recent(self.url, **kwargs)
self.assertTrue(isinstance(a_recent, Recent))
self.assertTrue(isinstance(b_recent, Recent))
self.assertEqual(str(a_recent.url), str(b_recent.url))
def test_top(self):
kwargs = {'category': 100}
a_top = self.tpb.top(**kwargs)
b_top = Top(self.url, **kwargs)
self.assertTrue(isinstance(a_top, Top))
self.assertTrue(isinstance(b_top, Top))
self.assertEqual(str(a_top.url), str(b_top.url))
def load_tests(loader, tests, discovery):
for attr, envvar in [('_do_local', 'LOCAL'), ('_do_remote', 'REMOTE')]:
envvar = os.environ.get(envvar)
if envvar is not None:
setattr(RemoteTestCase, attr, envvar.lower() in ['true', '1'])
return unittest.TestSuite(tests)
if __name__ == '__main__':
unittest.main()
|
|
"""
Views for SymmetricalEureka
"""
from importlib import import_module
try:
from inspect import signature
except ImportError:
# pylint: disable=import-error
from funcsigs import signature
from django.core.exceptions import PermissionDenied, ValidationError
from django.core.urlresolvers import reverse_lazy
from django.db.models import base
from django.http import (Http404, HttpResponseBadRequest, HttpResponseRedirect,
JsonResponse)
from django.shortcuts import get_object_or_404
from django.utils.datastructures import MultiValueDictKeyError
from django.views.generic import DetailView, ListView
from django.views.generic.base import TemplateView, View
from django.views.generic.detail import BaseDetailView
try:
from django.contrib.auth.mixins import LoginRequiredMixin,\
PermissionRequiredMixin
except ImportError:
from SymmetricalEureka.backports.django_contrib_auth_mixins import\
LoginRequiredMixin, PermissionRequiredMixin
# pylint: disable=wrong-import-order
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import authentication, generics, permissions
from .models import (AbilityScores, CASTER_CLASSES, Character, SpellListing,
SpellClasses, UserProfile)
from .forms import AbilityScoresForm, CharacterForm
from .serializers import SpellListingSerializer, SpellClassesSerializer
# pylint: disable=too-many-ancestors
class PlayerLoggedIn(LoginRequiredMixin):
"""
Class to load header data for logged in user.
Inherits from LoginRequiredMixin, requiring user to be logged in.
"""
character_list = None
def get_context_data(self, **kwargs):
"""
Override View.get_context_data to add character_list for header.
"""
self.character_list = Character.objects.filter(
player__user=self.request.user).order_by('character_name')
return super(PlayerLoggedIn, self).get_context_data(**kwargs)
class LoginView(PermissionRequiredMixin, TemplateView):
"""
Class to display login view.
We will slightly abuse the notation of the PermissionRequiredMixin.
The user only has 'permission' for this page if they aren't logged in.
"""
template_name = 'SymmetricalEureka/login.html'
def has_permission(self):
"""
Returns the negation of if the user is authenticated.
"""
return not self.request.user.is_authenticated()
def handle_no_permission(self):
return HttpResponseRedirect(reverse_lazy('SE_home'))
def get_context_data(self, **kwargs):
new_kwargs = super(LoginView, self).get_context_data(**kwargs)
new_kwargs['next_path'] = reverse_lazy('SE_home')
return new_kwargs
class LoggedInHomeView(PlayerLoggedIn, TemplateView):
"""
Class to create home page view if logged in.
"""
template_name = 'SymmetricalEureka/home.html'
class NotLoggedInHomeView(TemplateView):
"""
Class to create home page view if not logged in.
"""
template_name = 'SymmetricalEureka/home.html'
class HomeView(View):
"""
Class to create Home view.
"""
def get(self, request, *args, **kwargs):
"""
Split get request into simpler classes.
"""
if request.user.is_authenticated():
view = LoggedInHomeView.as_view()
else:
view = NotLoggedInHomeView.as_view()
return view(request, *args, **kwargs)
class DisplayCharacterView(PlayerLoggedIn, DetailView):
"""
Class for the view to displaying a character.
"""
template_name = 'SymmetricalEureka/character.html'
pk_url_kwarg = 'Char_uuid'
model = Character
def dispatch(self, request, *args, **kwargs):
"""
Override TemplateView.dispatch to test if Character belongs to User.
"""
# pylint: disable=attribute-defined-outside-init
self.player_character = Character.objects.get(
Char_uuid=kwargs['Char_uuid'])
if self.player_character.player.user.id != request.user.id:
raise PermissionDenied()
return super(DisplayCharacterView, self).dispatch(request, *args,
**kwargs)
def get_context_data(self, **kwargs):
kwargs['ability_scores'] = self.get_ability_scores()
return super(DisplayCharacterView, self).get_context_data(**kwargs)
def get_ability_scores(self):
""" generate a list of the ability scores for the character."""
return AbilityScores.objects.filter(character=self.player_character)
def build_kwargs(func, data):
""" Build a dictionary of arguments for func out of data."""
return {x: data.get(x, None) for x in signature(func).parameters.keys()}
class ClassMethodView(View):
""" Class that exposes classmethods of Django models."""
module = 'SymmetricalEureka.models'
extra_methods = {'ability_score_mod': ['abs_saving_throw']}
def get(self, request, *args, **kwargs):
""" Handle get requests by passing arguments to classmethod."""
klass = kwargs['model']
try:
cls = getattr(import_module(self.module), klass)
except AttributeError:
raise Http404()
if not issubclass(cls, base.Model):
raise Http404()
method = kwargs['method']
try:
fnc = getattr(cls, method)
except AttributeError:
raise Http404()
new_kwargs = build_kwargs(fnc, request.GET)
try:
result = fnc(**new_kwargs)
except (TypeError, ValueError):
return HttpResponseBadRequest()
response = {method: result}
for other_method in self.extra_methods.get(method, []):
try:
fnc = getattr(cls, other_method)
except AttributeError:
fnc = lambda: None
new_kwargs = build_kwargs(fnc, request.GET)
try:
result = fnc(**new_kwargs)
response[other_method] = result
except (TypeError, ValueError):
pass
return JsonResponse(response)
class CharacterAtributeView(LoginRequiredMixin, BaseDetailView):
""" View that exposes Character Attributes as JSON api."""
model = Character
pk_url_kwarg = 'Char_uuid'
raise_exception = True
response_class = JsonResponse
def get_object(self, queryset=None):
character = super(CharacterAtributeView, self).get_object(queryset)
if character.player.user != self.request.user:
raise PermissionDenied(self.get_permission_denied_message())
# pylint: disable=unsubscriptable-object
attr = AbilityScores.WHICH_ENG_2_KEY.get(self.kwargs['attribute'],
None)
queryset = AbilityScores.objects.all().filter(character=character,
which=attr)
try:
obj = queryset.get()
except queryset.model.DoesNotExist:
raise Http404
return obj
def render_to_response(self, context, **response_kwargs):
""" Render a response."""
obj = context['object']
response = {AbilityScores.WHICH_KEY_2_ENG[obj.which]: obj.value}
response.update(response_kwargs)
return self.response_class(response)
# pylint: disable=unused-argument
def post(self, *args, **kwargs):
"""
Handle post requests by storing to the database and returning the new
atrribute value.
"""
# pylint: disable=attribute-defined-outside-init
self.object = self.get_object()
request = args[0]
try:
val = request.POST['value']
except MultiValueDictKeyError:
return HttpResponseBadRequest()
try:
# pylint: disable=protected-access
field = self.object._meta.get_field('value')
val = field.clean(val, self.object)
except ValidationError:
return HttpResponseBadRequest()
self.object.value = val
self.object.save()
func = AbilityScores.ability_score_mod
context = self.get_context_data(object=self.object)
return self.render_to_response(context,
ability_score_mod=func(val),
saving_throw=self.object.saving_throw)
class NewCharacterView(PlayerLoggedIn, TemplateView):
"""
Class for the view to create a new character.
"""
template_name = 'SymmetricalEureka/new_character.html'
def get_context_data(self, **kwargs):
if 'character_form' not in kwargs:
character_form = CharacterForm(instance=Character())
kwargs['character_form'] = character_form
if 'as_forms' not in kwargs:
as_forms = [AbilityScoresForm(prefix=x[1],
instance=AbilityScores())
for x in AbilityScores.WHICH_CHOICES]
kwargs['as_forms'] = as_forms
if 'sav_throw' not in kwargs:
sav_throw = [AbilityScores(which=x[0])
for x in AbilityScores.WHICH_CHOICES]
kwargs['sav_throw'] = sav_throw
return super(NewCharacterView, self).get_context_data(**kwargs)
def post(self, request, *args, **kwargs):
""" Handle POST requests."""
character_form = CharacterForm(request.POST, instance=Character())
as_forms = [AbilityScoresForm(request.POST, prefix=x[1],
instance=AbilityScores())
for x in AbilityScores.WHICH_CHOICES]
if character_form.is_valid() and \
all([af.is_valid() for af in as_forms]):
character = character_form.save(commit=False)
character.player = UserProfile.objects.get(user=self.request.user)
character.save()
for as_form in as_forms:
ability_score = as_form.save(commit=False)
ability_score.character = character
ability_score.which = AbilityScores.WHICH_ENG_2_KEY[
as_form.prefix]
ability_score.save()
return HttpResponseRedirect(character.get_absolute_url())
else:
return self.render_to_response(self.get_context_data(
character_form=character_form, as_forms=as_forms))
class SpellListView(ListView):
"""
Class for the view to display Spells.
"""
model = SpellListing
template_name = 'SymmetricalEureka/spell_list.html'
def get_queryset(self):
return SpellListing.objects.order_by('name')
def get_context_data(self, **kwargs):
kwargs['caster_classes'] = CASTER_CLASSES
profile = UserProfile.objects.get(user=self.request.user)
kwargs['profile'] = profile
kwargs['starred'] = profile.spells.all()
kwargs = super(SpellListView, self).get_context_data(**kwargs)
return kwargs
class SpellListDetail(generics.RetrieveAPIView):
"""
Class for the REST API to display Spell details.
"""
queryset = SpellListing.objects.all()
serializer_class = SpellListingSerializer
class SpellClassesView(generics.ListAPIView):
"""
Class for the REST API to display spells by class.
"""
serializer_class = SpellClassesSerializer
def get_queryset(self):
cls = self.kwargs['cls']
return SpellClasses.objects.filter(caster_class__exact=cls)
class UserSpellView(APIView):
"""
Class for the REST API to handle adding/removing Spells to User.
"""
authentication_classes = (authentication.SessionAuthentication,)
permission_classes = (permissions.IsAuthenticated,)
def post(self, request, pk):
user = request.user
profile = get_object_or_404(UserProfile, user=user)
queryset = profile.spells.filter(name=pk)
starred = False
if queryset.count() == 0:
spell = get_object_or_404(SpellListing, name=pk)
profile.spells.add(spell)
starred = True
else:
profile.spells.remove(queryset[0])
data = {
"Spell": pk,
"starred": starred
}
return Response(data)
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012-2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
A helper class for proxy objects to remote APIs.
For more information about rpc API version numbers, see:
rpc/dispatcher.py
"""
from trove.openstack.common import rpc
from trove.openstack.common.rpc import common as rpc_common
from trove.openstack.common.rpc import serializer as rpc_serializer
class RpcProxy(object):
"""A helper class for rpc clients.
This class is a wrapper around the RPC client API. It allows you to
specify the topic and API version in a single place. This is intended to
be used as a base class for a class that implements the client side of an
rpc API.
"""
# The default namespace, which can be overriden in a subclass.
RPC_API_NAMESPACE = None
def __init__(self, topic, default_version, version_cap=None,
serializer=None):
"""Initialize an RpcProxy.
:param topic: The topic to use for all messages.
:param default_version: The default API version to request in all
outgoing messages. This can be overridden on a per-message
basis.
:param version_cap: Optionally cap the maximum version used for sent
messages.
:param serializer: Optionaly (de-)serialize entities with a
provided helper.
"""
self.topic = topic
self.default_version = default_version
self.version_cap = version_cap
if serializer is None:
serializer = rpc_serializer.NoOpSerializer()
self.serializer = serializer
super(RpcProxy, self).__init__()
def _set_version(self, msg, vers):
"""Helper method to set the version in a message.
:param msg: The message having a version added to it.
:param vers: The version number to add to the message.
"""
v = vers if vers else self.default_version
if (self.version_cap and not
rpc_common.version_is_compatible(self.version_cap, v)):
raise rpc_common.RpcVersionCapError(version_cap=self.version_cap)
msg['version'] = v
def _get_topic(self, topic):
"""Return the topic to use for a message."""
return topic if topic else self.topic
def can_send_version(self, version):
"""Check to see if a version is compatible with the version cap."""
return (not self.version_cap or
rpc_common.version_is_compatible(self.version_cap, version))
@staticmethod
def make_namespaced_msg(method, namespace, **kwargs):
return {'method': method, 'namespace': namespace, 'args': kwargs}
def make_msg(self, method, **kwargs):
return self.make_namespaced_msg(method, self.RPC_API_NAMESPACE,
**kwargs)
def _serialize_msg_args(self, context, kwargs):
"""Helper method called to serialize message arguments.
This calls our serializer on each argument, returning a new
set of args that have been serialized.
:param context: The request context
:param kwargs: The arguments to serialize
:returns: A new set of serialized arguments
"""
new_kwargs = dict()
for argname, arg in kwargs.iteritems():
new_kwargs[argname] = self.serializer.serialize_entity(context,
arg)
return new_kwargs
def call(self, context, msg, topic=None, version=None, timeout=None):
"""rpc.call() a remote method.
:param context: The request context
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:param timeout: (Optional) A timeout to use when waiting for the
response. If no timeout is specified, a default timeout will be
used that is usually sufficient.
:returns: The return value from the remote method.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
real_topic = self._get_topic(topic)
try:
result = rpc.call(context, real_topic, msg, timeout)
return self.serializer.deserialize_entity(context, result)
except rpc.common.Timeout as exc:
raise rpc.common.Timeout(
exc.info, real_topic, msg.get('method'))
def multicall(self, context, msg, topic=None, version=None, timeout=None):
"""rpc.multicall() a remote method.
:param context: The request context
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:param timeout: (Optional) A timeout to use when waiting for the
response. If no timeout is specified, a default timeout will be
used that is usually sufficient.
:returns: An iterator that lets you process each of the returned values
from the remote method as they arrive.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
real_topic = self._get_topic(topic)
try:
result = rpc.multicall(context, real_topic, msg, timeout)
return self.serializer.deserialize_entity(context, result)
except rpc.common.Timeout as exc:
raise rpc.common.Timeout(
exc.info, real_topic, msg.get('method'))
def cast(self, context, msg, topic=None, version=None):
"""rpc.cast() a remote method.
:param context: The request context
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:returns: None. rpc.cast() does not wait on any return value from the
remote method.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
rpc.cast(context, self._get_topic(topic), msg)
def fanout_cast(self, context, msg, topic=None, version=None):
"""rpc.fanout_cast() a remote method.
:param context: The request context
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:returns: None. rpc.fanout_cast() does not wait on any return value
from the remote method.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
rpc.fanout_cast(context, self._get_topic(topic), msg)
def cast_to_server(self, context, server_params, msg, topic=None,
version=None):
"""rpc.cast_to_server() a remote method.
:param context: The request context
:param server_params: Server parameters. See rpc.cast_to_server() for
details.
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:returns: None. rpc.cast_to_server() does not wait on any
return values.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
rpc.cast_to_server(context, server_params, self._get_topic(topic), msg)
def fanout_cast_to_server(self, context, server_params, msg, topic=None,
version=None):
"""rpc.fanout_cast_to_server() a remote method.
:param context: The request context
:param server_params: Server parameters. See rpc.cast_to_server() for
details.
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:returns: None. rpc.fanout_cast_to_server() does not wait on any
return values.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
rpc.fanout_cast_to_server(context, server_params,
self._get_topic(topic), msg)
|
|
import json
import urllib
from collections import OrderedDict
from datetime import datetime
from django.conf import settings
from django.core.cache import cache
from django.db.models import Q
import commonware.log
from elasticsearch_dsl import filter as es_filter
from tower import ugettext_lazy as _lazy
import mkt
from mkt.abuse.models import AbuseReport
from mkt.access import acl
from mkt.comm.utils import create_comm_note
from mkt.constants import comm
from mkt.files.models import File
from mkt.ratings.models import Review
from mkt.reviewers.models import EscalationQueue, RereviewQueue, ReviewerScore
from mkt.site.helpers import product_as_dict
from mkt.site.models import manual_order
from mkt.site.utils import cached_property, JSONEncoder
from mkt.translations.query import order_by_translation
from mkt.versions.models import Version
from mkt.webapps.models import Webapp
from mkt.webapps.indexers import WebappIndexer
from mkt.webapps.tasks import set_storefront_data
from mkt.websites.models import Website
log = commonware.log.getLogger('z.mailer')
def get_review_type(request, webapp, version):
if EscalationQueue.objects.filter(webapp=webapp).exists():
queue = 'escalated'
elif RereviewQueue.objects.filter(webapp=webapp).exists():
queue = 'rereview'
else:
queue = 'pending'
return queue
class ReviewBase(object):
def __init__(self, request, webapp, version, attachment_formset=None,
testedon_formset=None):
self.request = request
self.user = self.request.user
self.webapp = webapp
self.version = version
self.review_type = get_review_type(request, webapp, version)
self.files = None
self.comm_thread = None
self.attachment_formset = attachment_formset
self.testedon_formset = testedon_formset
self.in_pending = self.webapp.status == mkt.STATUS_PENDING
self.in_rereview = RereviewQueue.objects.filter(
webapp=self.webapp).exists()
self.in_escalate = EscalationQueue.objects.filter(
webapp=self.webapp).exists()
def get_attachments(self):
"""
Returns a list of triples suitable to be attached to an email.
"""
try:
num = int(self.attachment_formset.data['attachment-TOTAL_FORMS'])
except (ValueError, TypeError):
return []
else:
files = []
for i in xrange(num):
attachment_name = 'attachment-%d-attachment' % i
attachment = self.request.FILES.get(attachment_name)
if attachment:
attachment.open()
files.append((attachment.name, attachment.read(),
attachment.content_type))
return files
def set_webapp(self, **kw):
"""Alters webapp using provided kwargs."""
self.webapp.update(_signal=False, **kw)
def set_reviewed(self):
"""Sets reviewed timestamp on version."""
self.version.update(_signal=False, reviewed=datetime.now())
def set_files(self, status, files, hide_disabled_file=False):
"""Change the files to be the new status and hide as appropriate."""
for file in files:
file.update(_signal=False, datestatuschanged=datetime.now(),
reviewed=datetime.now(), status=status)
if hide_disabled_file:
file.hide_disabled_file()
def create_note(self, action):
"""
Permissions default to developers + reviewers + Mozilla contacts.
For escalation/comment, exclude the developer from the conversation.
"""
details = {'comments': self.data['comments'],
'reviewtype': self.review_type}
if self.files:
details['files'] = [f.id for f in self.files]
tested = self.get_tested() # You really should...
if tested:
self.data['comments'] += '\n\n%s' % tested
# Commbadge (the future).
note_type = comm.ACTION_MAP(action.id)
self.comm_thread, self.comm_note = create_comm_note(
self.webapp, self.version, self.request.user,
self.data['comments'], note_type=note_type,
attachments=self.attachment_formset)
# ActivityLog (ye olde).
mkt.log(action, self.webapp, self.version, user=self.user,
created=datetime.now(), details=details)
def get_tested(self):
"""
Get string indicating devices/browsers used by reviewer to test.
Will be automatically attached to the note body.
"""
tested_on_text = []
if not self.testedon_formset:
return ''
for form in self.testedon_formset.forms:
if form.cleaned_data:
dtype = form.cleaned_data.get('device_type', None)
device = form.cleaned_data.get('device', None)
version = form.cleaned_data.get('version', None)
if device and version:
text = ('%s platform on %s with version %s' %
(dtype, device, version))
elif device and not version:
text = '%s platform on %s' % (dtype, device)
elif not device and version:
text = '%s with version %s' % (dtype, version)
else:
text = dtype
if text:
tested_on_text.append(text)
if not len(tested_on_text):
return ''
else:
return 'Tested on ' + '; '.join(tested_on_text)
class ReviewApp(ReviewBase):
def set_data(self, data):
self.data = data
self.files = self.version.files.all()
def process_approve(self):
"""
Handle the approval of apps and/or files.
"""
if self.webapp.has_incomplete_status():
# Failsafe.
return
# Hold onto the status before we change it.
status = self.webapp.status
if self.webapp.publish_type == mkt.PUBLISH_IMMEDIATE:
self._process_public(mkt.STATUS_PUBLIC)
elif self.webapp.publish_type == mkt.PUBLISH_HIDDEN:
self._process_public(mkt.STATUS_UNLISTED)
else:
self._process_private()
# Note: Post save signals shouldn't happen here. All the set_*()
# methods pass _signal=False to prevent them from being sent. They are
# manually triggered in the view after the transaction is committed to
# avoid multiple indexing tasks getting fired with stale data.
#
# This does mean that we need to call update_version() manually to get
# the webapp in the correct state before updating names. We do that,
# passing _signal=False again to prevent it from sending
# 'version_changed'. The post_save() that happen in the view will
# call it without that parameter, sending 'version_changed' normally.
self.webapp.update_version(_signal=False)
if self.webapp.is_packaged:
self.webapp.update_name_from_package_manifest()
self.webapp.update_supported_locales()
self.webapp.resend_version_changed_signal = True
if self.in_escalate:
EscalationQueue.objects.filter(webapp=self.webapp).delete()
# Clear priority_review flag on approval - its not persistant.
if self.webapp.priority_review:
self.webapp.update(priority_review=False)
# Assign reviewer incentive scores.
return ReviewerScore.award_points(self.request.user, self.webapp,
status)
def _process_private(self):
"""Make an app private."""
if self.webapp.has_incomplete_status():
# Failsafe.
return
self.webapp.sign_if_packaged(self.version.pk)
# If there are no prior PUBLIC versions we set the file status to
# PUBLIC no matter what ``publish_type`` was chosen since at least one
# version needs to be PUBLIC when an app is approved to set a
# ``current_version``.
if File.objects.filter(version__webapp__pk=self.webapp.pk,
status=mkt.STATUS_PUBLIC).count() == 0:
self.set_files(mkt.STATUS_PUBLIC, self.version.files.all())
else:
self.set_files(mkt.STATUS_APPROVED, self.version.files.all())
if self.webapp.status not in (mkt.STATUS_PUBLIC, mkt.STATUS_UNLISTED):
self.set_webapp(status=mkt.STATUS_APPROVED,
highest_status=mkt.STATUS_APPROVED)
self.set_reviewed()
self.create_note(mkt.LOG.APPROVE_VERSION_PRIVATE)
log.info(u'Making %s approved' % self.webapp)
def _process_public(self, status):
"""Changes status to a publicly viewable status."""
if self.webapp.has_incomplete_status():
# Failsafe.
return
self.webapp.sign_if_packaged(self.version.pk)
# Save files first, because set_webapp checks to make sure there
# is at least one public file or it won't make the webapp public.
self.set_files(mkt.STATUS_PUBLIC, self.version.files.all())
# If app is already an approved status, don't change it when approving
# a version.
if self.webapp.status not in mkt.WEBAPPS_APPROVED_STATUSES:
self.set_webapp(status=status, highest_status=status)
self.set_reviewed()
set_storefront_data.delay(self.webapp.pk)
self.create_note(mkt.LOG.APPROVE_VERSION)
log.info(u'Making %s public' % self.webapp)
def process_reject(self):
"""
Reject an app.
Changes status to Rejected.
Creates Rejection note.
"""
# Hold onto the status before we change it.
status = self.webapp.status
self.set_files(mkt.STATUS_DISABLED, self.version.files.all(),
hide_disabled_file=True)
# If this app is not packaged (packaged apps can have multiple
# versions) or if there aren't other versions with already reviewed
# files, reject the app also.
if (not self.webapp.is_packaged or
not self.webapp.versions.exclude(id=self.version.id)
.filter(files__status__in=mkt.REVIEWED_STATUSES).exists()):
self.set_webapp(status=mkt.STATUS_REJECTED)
if self.in_escalate:
EscalationQueue.objects.filter(webapp=self.webapp).delete()
if self.in_rereview:
RereviewQueue.objects.filter(webapp=self.webapp).delete()
self.create_note(mkt.LOG.REJECT_VERSION)
log.info(u'Making %s disabled' % self.webapp)
# Assign reviewer incentive scores.
return ReviewerScore.award_points(self.request.user, self.webapp,
status, in_rereview=self.in_rereview)
def process_request_information(self):
"""Send a message to the authors."""
self.create_note(mkt.LOG.REQUEST_INFORMATION)
self.version.update(has_info_request=True)
log.info(u'Sending reviewer message for %s to authors' % self.webapp)
def process_escalate(self):
"""
Ask for escalation for an app (EscalationQueue).
Doesn't change status.
Creates Escalation note.
"""
EscalationQueue.objects.get_or_create(webapp=self.webapp)
self.create_note(mkt.LOG.ESCALATE_MANUAL)
log.info(u'Escalated review requested for %s' % self.webapp)
def process_comment(self):
"""
Editor comment (not visible to developer).
Doesn't change status.
Creates Reviewer Comment note.
"""
self.version.update(has_editor_comment=True)
self.create_note(mkt.LOG.COMMENT_VERSION)
def process_manual_rereview(self):
"""
Adds the app to the rereview queue.
Doesn't change status.
Creates Reviewer Comment note.
"""
RereviewQueue.objects.get_or_create(webapp=self.webapp)
self.create_note(mkt.LOG.REREVIEW_MANUAL)
log.info(u'Re-review manually requested for %s' % self.webapp)
def process_clear_escalation(self):
"""
Clear app from escalation queue.
Doesn't change status.
Creates Reviewer-only note.
"""
EscalationQueue.objects.filter(webapp=self.webapp).delete()
self.create_note(mkt.LOG.ESCALATION_CLEARED)
log.info(u'Escalation cleared for app: %s' % self.webapp)
def process_clear_rereview(self):
"""
Clear app from re-review queue.
Doesn't change status.
Creates Reviewer-only note.
"""
RereviewQueue.objects.filter(webapp=self.webapp).delete()
self.create_note(mkt.LOG.REREVIEW_CLEARED)
log.info(u'Re-review cleared for app: %s' % self.webapp)
# Assign reviewer incentive scores.
return ReviewerScore.award_points(self.request.user, self.webapp,
self.webapp.status, in_rereview=True)
def process_disable(self):
"""
Bans app from Marketplace, clears app from all queues.
Changes status to Disabled.
Creates Banned/Disabled note.
"""
if not acl.action_allowed(self.request, 'Apps', 'Edit'):
return
# Disable disables all files, not just those in this version.
self.set_files(mkt.STATUS_DISABLED,
File.objects.filter(version__webapp=self.webapp),
hide_disabled_file=True)
self.webapp.update(status=mkt.STATUS_DISABLED)
if self.in_escalate:
EscalationQueue.objects.filter(webapp=self.webapp).delete()
if self.in_rereview:
RereviewQueue.objects.filter(webapp=self.webapp).delete()
set_storefront_data.delay(self.webapp.pk, disable=True)
self.create_note(mkt.LOG.APP_DISABLED)
log.info(u'App %s has been banned by a reviewer.' % self.webapp)
class ReviewHelper(object):
"""
A class that builds enough to render the form back to the user and
process off to the correct handler.
"""
def __init__(self, request=None, webapp=None, version=None,
attachment_formset=None, testedon_formset=None):
self.handler = None
self.required = {}
self.webapp = webapp
self.version = version
self.all_files = version and version.files.all()
self.attachment_formset = attachment_formset
self.testedon_formset = testedon_formset
self.handler = ReviewApp(request, webapp, version,
attachment_formset=self.attachment_formset,
testedon_formset=self.testedon_formset)
self.review_type = self.handler.review_type
self.actions = self.get_actions()
def set_data(self, data):
self.handler.set_data(data)
def get_actions(self):
"""Get the appropriate handler based on the action."""
public = {
'method': self.handler.process_approve,
'minimal': False,
'label': _lazy(u'Approve'),
'details': _lazy(u'This will approve the app and allow the '
u'author(s) to publish it.')}
reject = {
'method': self.handler.process_reject,
'label': _lazy(u'Reject'),
'minimal': False,
'details': _lazy(u'This will reject the app, remove it from '
u'the review queue and un-publish it if already '
u'published.')}
info = {
'method': self.handler.process_request_information,
'label': _lazy(u'Message developer'),
'minimal': True,
'details': _lazy(u'This will send the author(s) - and other '
u'thread subscribers - a message. This will not '
u'change the app\'s status.')}
escalate = {
'method': self.handler.process_escalate,
'label': _lazy(u'Escalate'),
'minimal': True,
'details': _lazy(u'Flag this app for an admin to review. The '
u'comments are sent to the admins, '
u'not the author(s).')}
comment = {
'method': self.handler.process_comment,
'label': _lazy(u'Private comment'),
'minimal': True,
'details': _lazy(u'Make a private reviewer comment on this app. '
u'The message won\'t be visible to the '
u'author(s), and no notification will be sent '
u'them.')}
manual_rereview = {
'method': self.handler.process_manual_rereview,
'label': _lazy(u'Request Re-review'),
'minimal': True,
'details': _lazy(u'Add this app to the re-review queue. Any '
u'comments here won\'t be visible to the '
u'author(s), and no notification will be sent to'
u'them.')}
clear_escalation = {
'method': self.handler.process_clear_escalation,
'label': _lazy(u'Clear Escalation'),
'minimal': True,
'details': _lazy(u'Clear this app from the escalation queue. The '
u'author(s) will get no email or see comments '
u'here.')}
clear_rereview = {
'method': self.handler.process_clear_rereview,
'label': _lazy(u'Clear Re-review'),
'minimal': True,
'details': _lazy(u'Clear this app from the re-review queue. The '
u'author(s) will get no email or see comments '
u'here.')}
disable = {
'method': self.handler.process_disable,
'label': _lazy(u'Ban app'),
'minimal': True,
'details': _lazy(u'Ban the app from Marketplace. Similar to '
u'Reject but the author(s) can\'t resubmit. To '
u'only be used in extreme cases.')}
actions = OrderedDict()
if not self.version:
# Return early if there is no version, this app is incomplete.
return actions
file_status = self.version.files.values_list('status', flat=True)
multiple_versions = (File.objects.exclude(version=self.version)
.filter(
version__webapp=self.webapp,
status__in=mkt.REVIEWED_STATUSES)
.exists())
show_privileged = (not self.version.is_privileged or
acl.action_allowed(self.handler.request, 'Apps',
'ReviewPrivileged'))
# Public.
if ((self.webapp.is_packaged and
mkt.STATUS_PUBLIC not in file_status and show_privileged) or
(not self.webapp.is_packaged and
self.webapp.status != mkt.STATUS_PUBLIC)):
actions['public'] = public
# Reject.
if self.webapp.is_packaged and show_privileged:
# Packaged apps reject the file only, or the app itself if there's
# only a single version.
if (not multiple_versions and
self.webapp.status not in [mkt.STATUS_REJECTED,
mkt.STATUS_DISABLED]):
actions['reject'] = reject
elif multiple_versions and mkt.STATUS_DISABLED not in file_status:
actions['reject'] = reject
elif not self.webapp.is_packaged:
# Hosted apps reject the app itself.
if self.webapp.status not in [mkt.STATUS_REJECTED,
mkt.STATUS_DISABLED]:
actions['reject'] = reject
# Ban/Disable.
if (acl.action_allowed(self.handler.request, 'Apps', 'Edit') and (
self.webapp.status != mkt.STATUS_DISABLED or
mkt.STATUS_DISABLED not in file_status)):
actions['disable'] = disable
# Clear re-review.
if self.handler.in_rereview:
actions['clear_rereview'] = clear_rereview
else:
# Manual re-review.
actions['manual_rereview'] = manual_rereview
# Clear escalation.
if self.handler.in_escalate:
actions['clear_escalation'] = clear_escalation
else:
# Escalate.
actions['escalate'] = escalate
# Request info and comment are always shown.
actions['info'] = info
actions['comment'] = comment
return actions
def process(self):
"""Call handler."""
action = self.handler.data.get('action', '')
if not action:
raise NotImplementedError
return self.actions[action]['method']()
def clean_sort_param(request, date_sort='created'):
"""
Handles empty and invalid values for sort and sort order.
'created' by ascending is the default ordering.
"""
sort = request.GET.get('sort', date_sort)
order = request.GET.get('order', 'asc')
if sort not in ('name', 'created', 'nomination'):
sort = date_sort
if order not in ('desc', 'asc'):
order = 'asc'
return sort, order
def clean_sort_param_es(request, date_sort='created'):
"""
Handles empty and invalid values for sort and sort order.
'created' by ascending is the default ordering.
"""
sort_map = {
'name': 'name_sort',
'nomination': 'latest_version.nomination_date',
}
sort = request.GET.get('sort', date_sort)
order = request.GET.get('order', 'asc')
if sort not in ('name', 'created', 'nomination'):
sort = date_sort
sort = sort_map.get(sort, date_sort)
if order not in ('desc', 'asc'):
order = 'asc'
return sort, order
def create_sort_link(pretty_name, sort_field, get_params, sort, order):
"""Generate table header sort links.
pretty_name -- name displayed on table header
sort_field -- name of the sort_type GET parameter for the column
get_params -- additional get_params to include in the sort_link
sort -- the current sort type
order -- the current sort order
"""
get_params.append(('sort', sort_field))
if sort == sort_field and order == 'asc':
# Have link reverse sort order to desc if already sorting by desc.
get_params.append(('order', 'desc'))
else:
# Default to ascending.
get_params.append(('order', 'asc'))
# Show little sorting sprite if sorting by this field.
url_class = ''
if sort == sort_field:
url_class = ' class="sort-icon ed-sprite-sort-%s"' % order
return u'<a href="?%s"%s>%s</a>' % (urllib.urlencode(get_params, True),
url_class, pretty_name)
class AppsReviewing(object):
"""
Class to manage the list of apps a reviewer is currently reviewing.
Data is stored in memcache.
"""
def __init__(self, request):
self.request = request
self.user_id = request.user.id
self.key = '%s:myapps:%s' % (settings.CACHE_PREFIX, self.user_id)
def get_apps(self):
ids = []
my_apps = cache.get(self.key)
if my_apps:
for id in my_apps.split(','):
valid = cache.get(
'%s:review_viewing:%s' % (settings.CACHE_PREFIX, id))
if valid and valid == self.user_id:
ids.append(id)
apps = []
for app in Webapp.objects.filter(id__in=ids):
apps.append({
'app': app,
'app_attrs': json.dumps(
product_as_dict(self.request, app, False, 'reviewer'),
cls=JSONEncoder),
})
return apps
def add(self, webapp_id):
my_apps = cache.get(self.key)
if my_apps:
apps = my_apps.split(',')
else:
apps = []
apps.append(webapp_id)
cache.set(self.key, ','.join(map(str, set(apps))),
mkt.EDITOR_VIEWING_INTERVAL * 2)
def log_reviewer_action(webapp, user, msg, action, **kwargs):
create_comm_note(webapp, webapp.latest_version, user, msg,
note_type=comm.ACTION_MAP(action.id))
mkt.log(action, webapp, webapp.latest_version, details={'comments': msg},
**kwargs)
class ReviewersQueuesHelper(object):
def __init__(self, request=None, use_es=False):
self.request = request
self.use_es = use_es
@cached_property
def excluded_ids(self):
# We need to exclude Escalated Apps from almost all queries, so store
# the result once.
return self.get_escalated_queue().values_list('webapp', flat=True)
def get_escalated_queue(self):
if self.use_es:
must = [
es_filter.Term(is_disabled=False),
es_filter.Term(is_escalated=True),
]
return WebappIndexer.search().filter('bool', must=must)
return EscalationQueue.objects.filter(
webapp__disabled_by_user=False)
def get_pending_queue(self):
if self.use_es:
must = [
es_filter.Term(status=mkt.STATUS_PENDING),
es_filter.Term(**{'latest_version.status':
mkt.STATUS_PENDING}),
es_filter.Term(is_escalated=False),
es_filter.Term(is_disabled=False),
]
return WebappIndexer.search().filter('bool', must=must)
return (Version.objects.filter(
files__status=mkt.STATUS_PENDING,
webapp__disabled_by_user=False,
webapp__status=mkt.STATUS_PENDING)
.exclude(webapp__id__in=self.excluded_ids)
.order_by('nomination', 'created')
.select_related('webapp', 'files').no_transforms())
def get_rereview_queue(self):
if self.use_es:
must = [
es_filter.Term(is_rereviewed=True),
es_filter.Term(is_disabled=False),
es_filter.Term(is_escalated=False),
]
return WebappIndexer.search().filter('bool', must=must)
return (RereviewQueue.objects.
filter(webapp__disabled_by_user=False).
exclude(webapp__in=self.excluded_ids))
def get_updates_queue(self):
if self.use_es:
must = [
es_filter.Terms(status=mkt.WEBAPPS_APPROVED_STATUSES),
es_filter.Term(**{'latest_version.status':
mkt.STATUS_PENDING}),
es_filter.Terms(app_type=[mkt.WEBAPP_PACKAGED,
mkt.WEBAPP_PRIVILEGED]),
es_filter.Term(is_disabled=False),
es_filter.Term(is_escalated=False),
]
return WebappIndexer.search().filter('bool', must=must)
return (Version.objects.filter(
# Note: this will work as long as we disable files of existing
# unreviewed versions when a new version is uploaded.
files__status=mkt.STATUS_PENDING,
webapp__disabled_by_user=False,
webapp__is_packaged=True,
webapp__status__in=mkt.WEBAPPS_APPROVED_STATUSES)
.exclude(webapp__id__in=self.excluded_ids)
.order_by('nomination', 'created')
.select_related('webapp', 'files').no_transforms())
def get_moderated_queue(self):
return (Review.objects
.exclude(Q(webapp__isnull=True) | Q(reviewflag__isnull=True))
.exclude(webapp__status=mkt.STATUS_DELETED)
.filter(editorreview=True)
.order_by('reviewflag__created'))
def get_abuse_queue(self):
report_ids = (AbuseReport.objects
.exclude(webapp__isnull=True)
.exclude(webapp__status=mkt.STATUS_DELETED)
.filter(read=False)
.select_related('webapp')
.values_list('webapp', flat=True))
return Webapp.objects.filter(id__in=report_ids).order_by('created')
def get_abuse_queue_websites(self):
report_ids = (AbuseReport.objects
.exclude(website__isnull=True)
.exclude(website__status=mkt.STATUS_DELETED)
.filter(read=False)
.select_related('website')
.values_list('website', flat=True))
return Website.objects.filter(id__in=report_ids).order_by('created')
def sort(self, qs, date_sort='created'):
"""Given a queue queryset, return the sorted version."""
if self.use_es:
return self._do_sort_es(qs, date_sort)
if qs.model == Webapp:
return self._do_sort_webapp(qs, date_sort)
return self._do_sort_queue_obj(qs, date_sort)
def _do_sort_webapp(self, qs, date_sort):
"""
Column sorting logic based on request GET parameters.
"""
sort_type, order = clean_sort_param(self.request, date_sort=date_sort)
order_by = ('-' if order == 'desc' else '') + sort_type
# Sort.
if sort_type == 'name':
# Sorting by name translation.
return order_by_translation(qs, order_by)
else:
return qs.order_by('-priority_review', order_by)
def _do_sort_queue_obj(self, qs, date_sort):
"""
Column sorting logic based on request GET parameters.
Deals with objects with joins on the Webapp (e.g. RereviewQueue,
Version). Returns qs of apps.
"""
sort_type, order = clean_sort_param(self.request, date_sort=date_sort)
sort_str = sort_type
if sort_type not in [date_sort, 'name']:
sort_str = 'webapp__' + sort_type
# sort_str includes possible joins when ordering.
# sort_type is the name of the field to sort on without desc/asc
# markers. order_by is the name of the field to sort on with desc/asc
# markers.
order_by = ('-' if order == 'desc' else '') + sort_str
# Sort.
if sort_type == 'name':
# Sorting by name translation through an webapp foreign key.
return order_by_translation(
Webapp.objects.filter(
id__in=qs.values_list('webapp', flat=True)), order_by)
# Convert sorted queue object queryset to sorted app queryset.
sorted_app_ids = (qs.order_by('-webapp__priority_review', order_by)
.values_list('webapp', flat=True))
qs = Webapp.objects.filter(id__in=sorted_app_ids)
return manual_order(qs, sorted_app_ids, 'webapps.id')
def _do_sort_es(self, qs, date_sort):
sort_type, order = clean_sort_param_es(self.request,
date_sort=date_sort)
order_by = ('-' if order == 'desc' else '') + sort_type
return qs.sort(order_by)
|
|
# coding= utf-8
# todo 1 (interaction, feature) +1: multiline TODO
# todo 2131 (interaction, feature) +0: popup todo history
# todo 232 (feature) +1: introduce sub-todo's that are part of other, todo yyy: becomes xxx/yyy
# todo 2141 (feature) +0: 232, create sub-todo's with inherited state and priority
import sublime, sublime_plugin
import sys, re
if sys.version < '3':
from db import *
from cache import *
from c import *
else:
from .db import *
from .cache import *
from .c import *
#callback at end of fetching DB
def dbMaintainance():
cWnd= sublime.active_window()
if not cWnd: return
cView= cWnd.active_view()
if not cView: return
cView.run_command('typetodo_maintain', {})
class TypetodoEvent(sublime_plugin.EventListener):
mutexUnlocked= 1
view= None
# React on all Sublime window close event.
# React on switching project in window.
def on_deactivated(self, _view):
# todo 1783 (cleanup, uncertain) -1: switching project in window not clearly fixed, need review
sublime.set_timeout(lambda: self.on_activated(_view), 200) #spike to catch switching project in existing window
sublime.set_timeout(WCache().exitHandler, 0) #sublime's timeout is needed to let sublime.windows() be [] at exit
# React on switching into view, initializing DB fetch-synchronize-save.
# Switching into view actually duplicated number of times with several
# handlers, but this handler is main and is used for TodoDb() creation.
def on_activated(self, _view):
constCorrect(_view) #coz settings are delayed at load
cDb= WCache().getDB(True, dbMaintainance) #really applies only once
#set 'file' syntax where it is not right and check consistency
if cDb:
cDb.pushReset()
#set .do synthax for all 'file' databases
for cSetting in cDb.config.getSettings():
if cSetting.engine=='file':
if cSetting.fullName==_view.file_name() and _view.settings().get('syntax')!='Packages/TypeTodo/typeTodo.tmLanguage':
_view.set_syntax_file('Packages/TypeTodo/typeTodo.tmLanguage')
self.on_load_activate(_view)
# Shortcut for on_activated()
def on_load(self, _view):
self.on_load_activate(_view)
# Set readonly for results; maintain and colorize
def on_load_activate(self, _view):
if WCache().checkResultsView(_view.buffer_id()):
sublime.set_timeout(lambda: _view.set_read_only(True), 0)
sublime.set_timeout(lambda: _view.run_command('typetodo_maintain', {}), 0)
# Wipe results view from cache
def on_close(self, _view):
WCache().checkResultsView(_view.buffer_id(), True)
# Both on_modified and on_selection_modified deal with cursor position,
# saving current inside-doplet context to be used later,
# on_modified also reacts on doplet editing.
def on_selection_modified(self, _view):
#not for results view
if WCache().checkResultsView(_view.buffer_id()):
return
if self.mutexUnlocked:
self.mutexUnlocked= 0
self.view= _view
sublime.set_timeout(self.matchTodo, 0) #negative undo effects if no timeout
self.mutexUnlocked= 1
def on_modified(self, _view):
if self.mutexUnlocked:
self.mutexUnlocked= 0
self.view= _view
self.matchTodo(True)
self.mutexUnlocked= 1
# Use previously format autocompletion list.
# It is formed every time when cursor moves into doplet.
def on_query_completions(self, view, prefix, locations):
return self.autoList
lastCat= ['general']
lastLvl= '+0'
prevTriggerNew= None
prevStateMod= None
todoCursorPlace= False
todoMatch= None
autoList= False
# Collect all tags from existing database and form them for autocompletion.
#
# Return list suitable for autocompletion.
def tagsAutoCollect(self):
cDb= WCache().getDB()
if not cDb:
return
tagsA= []
if cDb:
todosA= cDb.todoA
for cTask in todosA:
for cTag in todosA[cTask].tagsA:
if cTag not in tagsA:
tagsA.append(cTag)
tagsListA= [(' ','')]
for cTag in tagsA:
tagsListA.append(('tag: '+cTag, cTag))
return tagsListA
# Check current line at every cursor movement or text modification.
# If line is doplet then:
# context is saved for later use (on_query_context)
# autocomplete list is formed from database tags
# readonly is set
# doplet edits are saved into database
# If line is doplet keyword (#todo):
# new doplet is created
# If line is neither:
# readonly is removed
def matchTodo(self, _modified= False):
self.autoList= False
self.todoCursorPlace= False
if len(self.view.sel())!=1: #more than one cursors skipped for number of reasons
return;
todoRegion = self.view.line(self.view.sel()[0])
todoText = self.view.substr(todoRegion)
self.todoMatch= todoModMatch= RE_TODO_EXISTING.match(todoText) #mod goes first to allow midline todo
if todoModMatch:
#resolve cursor place
selStart= self.view.rowcol(self.view.sel()[0].a)[1]
selEnd= selStart +self.view.sel()[0].b -self.view.sel()[0].a
if selStart>selEnd:
tmp= selStart
selStart= selEnd
selEnd= tmp
#store doplet field name under cursor
self.todoCursorPlace= 'preTodo'
if selStart>=todoModMatch.end('prefix') and selEnd<=todoModMatch.end('postfix'):
self.todoCursorPlace= 'todo'
for rangeName in ('prefix', 'state', 'tags', 'priority', 'postfix'):
if selStart>=todoModMatch.start(rangeName) and selEnd<=todoModMatch.end(rangeName):
self.todoCursorPlace= rangeName
break
#protect fields
self.view.set_read_only(self.todoCursorPlace=='todo')
# todo 1239 (interaction, unsolved) +0: get rid of snippets for tags autocomplete
#toggle default autocomplete to avoid exceeding entries for doplet
self.view.settings().erase('auto_complete_selector')
if self.todoCursorPlace=='tags':
self.autoList= self.tagsAutoCollect()
self.view.settings().set('auto_complete_selector', 'source')
#should trigger at '+' or '!' entered
doWipe= todoModMatch.group('state')=='+' and self.prevStateMod!='+'
if not doWipe: doWipe= todoModMatch.group('state')=='!' and self.prevStateMod!='!'
self.prevStateMod= todoModMatch.group('state')
if _modified:
self.substUpdate(todoModMatch.group('state'), todoModMatch.group('id'), todoModMatch.group('tags'), todoModMatch.group('priority'), todoModMatch.group('comment'), todoModMatch.group('prefix'), todoRegion, doWipe)
sublime.set_timeout(lambda: self.view.run_command('typetodo_maintain', {'_delayed':0}), 0)
return
self.view.set_read_only(False)
todoNewMatch = RE_TODO_NEW.match(todoText)
if todoNewMatch:
#should trigger at ':' entered
doTrigger= todoNewMatch.group('trigger')==':' and self.prevTriggerNew!=':'
self.prevTriggerNew= todoNewMatch.group('trigger')
if _modified and doTrigger:
self.substNew(todoNewMatch.group('prefix'), todoNewMatch.group('comment'), todoRegion)
sublime.set_timeout(lambda: self.view.run_command('typetodo_maintain', {'_delayed':0}), 0)
return
# Create new todo in db and return string to replace original 'todo:'
# Saves first version of task if _postfx supplied, that is used when
# creating doplet in mid-line.
# todo 2128 (general, ux) +0: allow '//todo xxx:' expanding into existing xxx
def substNew(self, _prefx, _postfx, _region):
todoId= self.cfgStore(0, '', self.lastCat[0], self.lastLvl, self.view.file_name(), '')
todoComment= _prefx + ' ' +STATE_DEFAULT[0] +'todo ' +str(todoId) +' (${1:' +self.lastCat[0] +'}) ${2:' +self.lastLvl +'}: ${0:}' +_postfx +''
self.view.run_command('typetodo_reg_replace', {'_regStart': _region.a, '_regEnd': _region.b})
self.view.run_command("insert_snippet", {"contents": todoComment})
if _postfx != '': #need to save if have comment at creation
self.substUpdate('', todoId, self.lastCat[0], self.lastLvl, _postfx, _prefx, _region)
return todoId
# Store to db and, if changed state, remove comment
#
# Return function to be used in substUpdate()
def substDoUpdate(self, _updVals):
def func(_txt=False):
if _txt==False or _txt=='':
_txt= _updVals['_comment']
cView= _updVals['_view']
if _updVals['_tags'] != None:
self.lastCat[0]= _updVals['_tags']
_updVals['_id']= self.cfgStore(_updVals['_id'], _updVals['_state'], _updVals['_tags'], _updVals['_lvl'] or 0, self.view.file_name(), _txt)
if _updVals['_wipe']:
todoRegion= cView.full_line(_updVals['_region'])
if _updVals['_prefix']!='': #midline todo
todoRegion= sublime.Region(
todoRegion.a +len(_updVals['_prefix']),
todoRegion.b
)
cView.run_command('typetodo_reg_replace', {'_regStart': todoRegion.a, '_regEnd': todoRegion.b-1})
return func
#Cancel deleting todo
#
def substRestore(self, _updVals):
def func(_txt=False):
cDb= WCache().getDB()
if not cDb:
return
#restore todo string
cString= self.view.substr(_updVals['_region'])
cTodo= RE_TODO_EXISTING.match(cString)
storedTask= cDb.todoA[int(_updVals['_id'])]
replaceTodo= storedTask.state +'todo ' +str(storedTask.id) +' (' +', '.join(storedTask.tagsA) +') +' +str(storedTask.lvl) +': ' +storedTask.comment
self.view.run_command('typetodo_reg_replace', {'_regStart': _updVals['_region'].a+cTodo.start('state'), '_regEnd': _updVals['_region'].a+cTodo.end('comment'), '_replaceWith': replaceTodo})
return func
# Update existing task.
# Ask for 'reason' for 'cancel' state.
def substUpdate(self, _state, _id, _tags, _lvl, _comment, _prefix, _region, _wipe=False):
updVals= {'_view':self.view, '_state':_state, '_id':_id, '_tags':_tags, '_lvl':_lvl, '_comment':_comment, '_prefix':_prefix, '_region':_region, '_wipe':_wipe}
if _state=='!' and _comment!='':
self.view.window().show_input_panel('Reason of canceling:', '', self.substDoUpdate(updVals), None, self.substRestore(updVals))
else:
self.substDoUpdate(updVals)()
# Gate to TodoDB().store()
# Store new or existing values as task.
def cfgStore(self, _id, _state, _tags, _lvl, _fileName, _comment):
cDb= WCache().getDB()
if cDb:
return cDb.store(_id, _state, (_tags or '').split(','), _lvl, _fileName, _comment)
sublime.message_dialog('TypeTodo error:\n\n\tTypeTodo was not properly initialized. \n\tMaybe reinstalling will help')
try:
if sys.version < '3':
from test import *
else:
from .test import *
except:
None
|
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright (c) 2010 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
A fake (in-memory) hypervisor+api.
Allows nova testing w/o a hypervisor. This module also documents the
semantics of real hypervisor connections.
"""
import collections
import contextlib
import time
import uuid
import fixtures
import os_resource_classes as orc
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import versionutils
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import vm_states
import nova.conf
from nova.console import type as ctype
from nova import context as nova_context
from nova import exception
from nova import objects
from nova.objects import diagnostics as diagnostics_obj
from nova.objects import fields as obj_fields
from nova.objects import migrate_data
from nova.virt import driver
from nova.virt import hardware
from nova.virt.ironic import driver as ironic
from nova.virt import virtapi
CONF = nova.conf.CONF
LOG = logging.getLogger(__name__)
class FakeInstance(object):
def __init__(self, name, state, uuid):
self.name = name
self.state = state
self.uuid = uuid
def __getitem__(self, key):
return getattr(self, key)
class Resources(object):
vcpus = 0
memory_mb = 0
local_gb = 0
vcpus_used = 0
memory_mb_used = 0
local_gb_used = 0
def __init__(self, vcpus=8, memory_mb=8000, local_gb=500):
self.vcpus = vcpus
self.memory_mb = memory_mb
self.local_gb = local_gb
def claim(self, vcpus=0, mem=0, disk=0):
self.vcpus_used += vcpus
self.memory_mb_used += mem
self.local_gb_used += disk
def release(self, vcpus=0, mem=0, disk=0):
self.vcpus_used -= vcpus
self.memory_mb_used -= mem
self.local_gb_used -= disk
def dump(self):
return {
'vcpus': self.vcpus,
'memory_mb': self.memory_mb,
'local_gb': self.local_gb,
'vcpus_used': self.vcpus_used,
'memory_mb_used': self.memory_mb_used,
'local_gb_used': self.local_gb_used
}
class FakeDriver(driver.ComputeDriver):
# These must match the traits in
# nova.tests.functional.integrated_helpers.ProviderUsageBaseTestCase
capabilities = {
"has_imagecache": True,
"supports_evacuate": True,
"supports_migrate_to_same_host": False,
"supports_attach_interface": True,
"supports_device_tagging": True,
"supports_tagged_attach_interface": True,
"supports_tagged_attach_volume": True,
"supports_extend_volume": True,
"supports_multiattach": True,
"supports_trusted_certs": True,
"supports_pcpus": False,
"supports_accelerators": True,
# Supported image types
"supports_image_type_raw": True,
"supports_image_type_vhd": False,
}
# Since we don't have a real hypervisor, pretend we have lots of
# disk and ram so this driver can be used to test large instances.
vcpus = 1000
memory_mb = 800000
local_gb = 600000
"""Fake hypervisor driver."""
def __init__(self, virtapi, read_only=False):
super(FakeDriver, self).__init__(virtapi)
self.instances = {}
self.resources = Resources(
vcpus=self.vcpus,
memory_mb=self.memory_mb,
local_gb=self.local_gb)
self.host_status_base = {
'hypervisor_type': 'fake',
'hypervisor_version': versionutils.convert_version_to_int('1.0'),
'hypervisor_hostname': CONF.host,
'cpu_info': {},
'disk_available_least': 0,
'supported_instances': [(
obj_fields.Architecture.X86_64,
obj_fields.HVType.FAKE,
obj_fields.VMMode.HVM)],
'numa_topology': None,
}
self._mounts = {}
self._interfaces = {}
self.active_migrations = {}
self._host = None
self._nodes = None
def init_host(self, host):
self._host = host
# NOTE(gibi): this is unnecessary complex and fragile but this is
# how many current functional sample tests expect the node name.
self._nodes = (['fake-mini'] if self._host == 'compute'
else [self._host])
def _set_nodes(self, nodes):
# NOTE(gibi): this is not part of the driver interface but used
# by our tests to customize the discovered nodes by the fake
# driver.
self._nodes = nodes
def list_instances(self):
return [self.instances[uuid].name for uuid in self.instances.keys()]
def list_instance_uuids(self):
return list(self.instances.keys())
def plug_vifs(self, instance, network_info):
"""Plug VIFs into networks."""
pass
def unplug_vifs(self, instance, network_info):
"""Unplug VIFs from networks."""
pass
def spawn(self, context, instance, image_meta, injected_files,
admin_password, allocations, network_info=None,
block_device_info=None, power_on=True, accel_info=None):
if network_info:
for vif in network_info:
# simulate a real driver triggering the async network
# allocation as it might cause an error
vif.fixed_ips()
# store the vif as attached so we can allow detaching it later
# with a detach_interface() call.
self._interfaces[vif['id']] = vif
uuid = instance.uuid
state = power_state.RUNNING if power_on else power_state.SHUTDOWN
flavor = instance.flavor
self.resources.claim(
vcpus=flavor.vcpus,
mem=flavor.memory_mb,
disk=flavor.root_gb)
fake_instance = FakeInstance(instance.name, state, uuid)
self.instances[uuid] = fake_instance
def snapshot(self, context, instance, image_id, update_task_state):
if instance.uuid not in self.instances:
raise exception.InstanceNotRunning(instance_id=instance.uuid)
update_task_state(task_state=task_states.IMAGE_UPLOADING)
def reboot(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None,
accel_info=None):
# If the guest is not on the hypervisor and we're doing a hard reboot
# then mimic the libvirt driver by spawning the guest.
if (instance.uuid not in self.instances and
reboot_type.lower() == 'hard'):
injected_files = admin_password = allocations = None
self.spawn(context, instance, instance.image_meta, injected_files,
admin_password, allocations,
block_device_info=block_device_info)
else:
# Just try to power on the guest.
self.power_on(context, instance, network_info,
block_device_info=block_device_info)
def get_host_ip_addr(self):
return '192.168.0.1'
def set_admin_password(self, instance, new_pass):
pass
def resume_state_on_host_boot(self, context, instance, network_info,
block_device_info=None):
pass
def rescue(self, context, instance, network_info, image_meta,
rescue_password, block_device_info):
pass
def unrescue(
self,
context: nova_context.RequestContext,
instance: 'objects.Instance',
):
self.instances[instance.uuid].state = power_state.RUNNING
def poll_rebooting_instances(self, timeout, instances):
pass
def migrate_disk_and_power_off(self, context, instance, dest,
flavor, network_info,
block_device_info=None,
timeout=0, retry_interval=0):
pass
def finish_revert_migration(self, context, instance, network_info,
migration, block_device_info=None,
power_on=True):
state = power_state.RUNNING if power_on else power_state.SHUTDOWN
self.instances[instance.uuid] = FakeInstance(
instance.name, state, instance.uuid)
def post_live_migration_at_destination(self, context, instance,
network_info,
block_migration=False,
block_device_info=None):
# Called from the destination host after a successful live migration
# so spawn the instance on this host to track it properly.
image_meta = injected_files = admin_password = allocations = None
self.spawn(context, instance, image_meta, injected_files,
admin_password, allocations)
def power_off(self, instance, timeout=0, retry_interval=0):
if instance.uuid in self.instances:
self.instances[instance.uuid].state = power_state.SHUTDOWN
else:
raise exception.InstanceNotFound(instance_id=instance.uuid)
def power_on(self, context, instance, network_info,
block_device_info=None, accel_info=None):
if instance.uuid in self.instances:
self.instances[instance.uuid].state = power_state.RUNNING
else:
raise exception.InstanceNotFound(instance_id=instance.uuid)
def trigger_crash_dump(self, instance):
pass
def soft_delete(self, instance):
pass
def restore(self, instance):
pass
def pause(self, instance):
pass
def unpause(self, instance):
pass
def suspend(self, context, instance):
pass
def resume(self, context, instance, network_info, block_device_info=None):
pass
def destroy(self, context, instance, network_info, block_device_info=None,
destroy_disks=True):
key = instance.uuid
if key in self.instances:
flavor = instance.flavor
self.resources.release(
vcpus=flavor.vcpus,
mem=flavor.memory_mb,
disk=flavor.root_gb)
del self.instances[key]
else:
LOG.warning("Key '%(key)s' not in instances '%(inst)s'",
{'key': key,
'inst': self.instances}, instance=instance)
def cleanup(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None, destroy_vifs=True):
# cleanup() should not be called when the guest has not been destroyed.
if instance.uuid in self.instances:
raise exception.InstanceExists(
"Instance %s has not been destroyed." % instance.uuid)
def attach_volume(self, context, connection_info, instance, mountpoint,
disk_bus=None, device_type=None, encryption=None):
"""Attach the disk to the instance at mountpoint using info."""
instance_name = instance.name
if instance_name not in self._mounts:
self._mounts[instance_name] = {}
self._mounts[instance_name][mountpoint] = connection_info
def detach_volume(self, context, connection_info, instance, mountpoint,
encryption=None):
"""Detach the disk attached to the instance."""
try:
del self._mounts[instance.name][mountpoint]
except KeyError:
pass
def swap_volume(self, context, old_connection_info, new_connection_info,
instance, mountpoint, resize_to):
"""Replace the disk attached to the instance."""
instance_name = instance.name
if instance_name not in self._mounts:
self._mounts[instance_name] = {}
self._mounts[instance_name][mountpoint] = new_connection_info
def extend_volume(self, context, connection_info, instance,
requested_size):
"""Extend the disk attached to the instance."""
pass
def attach_interface(self, context, instance, image_meta, vif):
if vif['id'] in self._interfaces:
raise exception.InterfaceAttachFailed(
instance_uuid=instance.uuid)
self._interfaces[vif['id']] = vif
def detach_interface(self, context, instance, vif):
try:
del self._interfaces[vif['id']]
except KeyError:
raise exception.InterfaceDetachFailed(
instance_uuid=instance.uuid)
def get_info(self, instance, use_cache=True):
if instance.uuid not in self.instances:
raise exception.InstanceNotFound(instance_id=instance.uuid)
i = self.instances[instance.uuid]
return hardware.InstanceInfo(state=i.state)
def get_diagnostics(self, instance):
return {'cpu0_time': 17300000000,
'memory': 524288,
'vda_errors': -1,
'vda_read': 262144,
'vda_read_req': 112,
'vda_write': 5778432,
'vda_write_req': 488,
'vnet1_rx': 2070139,
'vnet1_rx_drop': 0,
'vnet1_rx_errors': 0,
'vnet1_rx_packets': 26701,
'vnet1_tx': 140208,
'vnet1_tx_drop': 0,
'vnet1_tx_errors': 0,
'vnet1_tx_packets': 662,
}
def get_instance_diagnostics(self, instance):
diags = diagnostics_obj.Diagnostics(
state='running', driver='libvirt', hypervisor='kvm',
hypervisor_os='ubuntu', uptime=46664, config_drive=True)
diags.add_cpu(id=0, time=17300000000, utilisation=15)
diags.add_nic(mac_address='01:23:45:67:89:ab',
rx_octets=2070139,
rx_errors=100,
rx_drop=200,
rx_packets=26701,
rx_rate=300,
tx_octets=140208,
tx_errors=400,
tx_drop=500,
tx_packets = 662,
tx_rate=600)
diags.add_disk(read_bytes=262144,
read_requests=112,
write_bytes=5778432,
write_requests=488,
errors_count=1)
diags.memory_details = diagnostics_obj.MemoryDiagnostics(
maximum=524288, used=0)
return diags
def get_all_volume_usage(self, context, compute_host_bdms):
"""Return usage info for volumes attached to vms on
a given host.
"""
volusage = []
if compute_host_bdms:
volusage = [{'volume': compute_host_bdms[0][
'instance_bdms'][0]['volume_id'],
'instance': compute_host_bdms[0]['instance'],
'rd_bytes': 0,
'rd_req': 0,
'wr_bytes': 0,
'wr_req': 0}]
return volusage
def get_host_cpu_stats(self):
stats = {'kernel': 5664160000000,
'idle': 1592705190000000,
'user': 26728850000000,
'iowait': 6121490000000}
stats['frequency'] = 800
return stats
def block_stats(self, instance, disk_id):
return [0, 0, 0, 0, None]
def get_console_output(self, context, instance):
return 'FAKE CONSOLE OUTPUT\nANOTHER\nLAST LINE'
def get_vnc_console(self, context, instance):
return ctype.ConsoleVNC(internal_access_path='FAKE',
host='fakevncconsole.com',
port=6969)
def get_spice_console(self, context, instance):
return ctype.ConsoleSpice(internal_access_path='FAKE',
host='fakespiceconsole.com',
port=6969,
tlsPort=6970)
def get_rdp_console(self, context, instance):
return ctype.ConsoleRDP(internal_access_path='FAKE',
host='fakerdpconsole.com',
port=6969)
def get_serial_console(self, context, instance):
return ctype.ConsoleSerial(internal_access_path='FAKE',
host='fakerdpconsole.com',
port=6969)
def get_mks_console(self, context, instance):
return ctype.ConsoleMKS(internal_access_path='FAKE',
host='fakemksconsole.com',
port=6969)
def get_available_resource(self, nodename):
"""Updates compute manager resource info on ComputeNode table.
Since we don't have a real hypervisor, pretend we have lots of
disk and ram.
"""
cpu_info = collections.OrderedDict([
('arch', 'x86_64'),
('model', 'Nehalem'),
('vendor', 'Intel'),
('features', ['pge', 'clflush']),
('topology', {
'cores': 1,
'threads': 1,
'sockets': 4,
}),
])
if nodename not in self.get_available_nodes():
return {}
host_status = self.host_status_base.copy()
host_status.update(self.resources.dump())
host_status['hypervisor_hostname'] = nodename
host_status['host_hostname'] = nodename
host_status['host_name_label'] = nodename
host_status['cpu_info'] = jsonutils.dumps(cpu_info)
return host_status
def update_provider_tree(self, provider_tree, nodename, allocations=None):
# NOTE(yikun): If the inv record does not exists, the allocation_ratio
# will use the CONF.xxx_allocation_ratio value if xxx_allocation_ratio
# is set, and fallback to use the initial_xxx_allocation_ratio
# otherwise.
inv = provider_tree.data(nodename).inventory
ratios = self._get_allocation_ratios(inv)
inventory = {
'VCPU': {
'total': self.vcpus,
'min_unit': 1,
'max_unit': self.vcpus,
'step_size': 1,
'allocation_ratio': ratios[orc.VCPU],
'reserved': CONF.reserved_host_cpus,
},
'MEMORY_MB': {
'total': self.memory_mb,
'min_unit': 1,
'max_unit': self.memory_mb,
'step_size': 1,
'allocation_ratio': ratios[orc.MEMORY_MB],
'reserved': CONF.reserved_host_memory_mb,
},
'DISK_GB': {
'total': self.local_gb,
'min_unit': 1,
'max_unit': self.local_gb,
'step_size': 1,
'allocation_ratio': ratios[orc.DISK_GB],
'reserved': self._get_reserved_host_disk_gb_from_config(),
},
}
provider_tree.update_inventory(nodename, inventory)
def get_instance_disk_info(self, instance, block_device_info=None):
return
def live_migration(self, context, instance, dest,
post_method, recover_method, block_migration=False,
migrate_data=None):
post_method(context, instance, dest, block_migration,
migrate_data)
return
def live_migration_force_complete(self, instance):
return
def live_migration_abort(self, instance):
return
def cleanup_live_migration_destination_check(self, context,
dest_check_data):
return
def check_can_live_migrate_destination(self, context, instance,
src_compute_info, dst_compute_info,
block_migration=False,
disk_over_commit=False):
data = migrate_data.LibvirtLiveMigrateData()
data.filename = 'fake'
data.image_type = CONF.libvirt.images_type
data.graphics_listen_addr_vnc = CONF.vnc.server_listen
data.graphics_listen_addr_spice = CONF.spice.server_listen
data.serial_listen_addr = None
# Notes(eliqiao): block_migration and disk_over_commit are not
# nullable, so just don't set them if they are None
if block_migration is not None:
data.block_migration = block_migration
if disk_over_commit is not None:
data.disk_over_commit = disk_over_commit
data.disk_available_mb = 100000
data.is_shared_block_storage = True
data.is_shared_instance_path = True
return data
def check_can_live_migrate_source(self, context, instance,
dest_check_data, block_device_info=None):
return dest_check_data
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
allocations, block_device_info=None, power_on=True):
injected_files = admin_password = None
# Finish migration is just like spawning the guest on a destination
# host during resize/cold migrate, so re-use the spawn() fake to
# claim resources and track the instance on this "hypervisor".
self.spawn(context, instance, image_meta, injected_files,
admin_password, allocations,
block_device_info=block_device_info, power_on=power_on)
def confirm_migration(self, context, migration, instance, network_info):
# Confirm migration cleans up the guest from the source host so just
# destroy the guest to remove it from the list of tracked instances
# unless it is a same-host resize.
if migration.source_compute != migration.dest_compute:
self.destroy(context, instance, network_info)
def pre_live_migration(self, context, instance, block_device_info,
network_info, disk_info, migrate_data):
return migrate_data
def rollback_live_migration_at_destination(self, context, instance,
network_info,
block_device_info,
destroy_disks=True,
migrate_data=None):
return
def _test_remove_vm(self, instance_uuid):
"""Removes the named VM, as if it crashed. For testing."""
self.instances.pop(instance_uuid)
def host_power_action(self, action):
"""Reboots, shuts down or powers up the host."""
return action
def host_maintenance_mode(self, host, mode):
"""Start/Stop host maintenance window. On start, it triggers
guest VMs evacuation.
"""
if not mode:
return 'off_maintenance'
return 'on_maintenance'
def set_host_enabled(self, enabled):
"""Sets the specified host's ability to accept new instances."""
if enabled:
return 'enabled'
return 'disabled'
def get_volume_connector(self, instance):
return {'ip': CONF.my_block_storage_ip,
'initiator': 'fake',
'host': self._host}
def get_available_nodes(self, refresh=False):
return self._nodes
def instance_on_disk(self, instance):
return False
def quiesce(self, context, instance, image_meta):
pass
def unquiesce(self, context, instance, image_meta):
pass
class FakeVirtAPI(virtapi.VirtAPI):
@contextlib.contextmanager
def wait_for_instance_event(self, instance, event_names, deadline=300,
error_callback=None):
# NOTE(danms): Don't actually wait for any events, just
# fall through
yield
def exit_wait_early(self, events):
# We never wait, so there is nothing to exit early
pass
def update_compute_provider_status(self, context, rp_uuid, enabled):
pass
class SmallFakeDriver(FakeDriver):
# The api samples expect specific cpu memory and disk sizes. In order to
# allow the FakeVirt driver to be used outside of the unit tests, provide
# a separate class that has the values expected by the api samples. So
# instead of requiring new samples every time those
# values are adjusted allow them to be overwritten here.
vcpus = 2
memory_mb = 8192
local_gb = 1028
class MediumFakeDriver(FakeDriver):
# Fake driver that has enough resources to host more than one instance
# but not that much that cannot be exhausted easily
vcpus = 10
memory_mb = 8192
local_gb = 1028
class SameHostColdMigrateDriver(MediumFakeDriver):
"""MediumFakeDriver variant that supports same-host cold migrate."""
capabilities = dict(FakeDriver.capabilities,
supports_migrate_to_same_host=True)
class RescueBFVDriver(MediumFakeDriver):
capabilities = dict(FakeDriver.capabilities, supports_bfv_rescue=True)
class PowerUpdateFakeDriver(SmallFakeDriver):
# A specific fake driver for the power-update external event testing.
def __init__(self, virtapi):
super(PowerUpdateFakeDriver, self).__init__(virtapi=None)
self.driver = ironic.IronicDriver(virtapi=virtapi)
def power_update_event(self, instance, target_power_state):
"""Update power state of the specified instance in the nova DB."""
self.driver.power_update_event(instance, target_power_state)
class MediumFakeDriverWithNestedCustomResources(MediumFakeDriver):
# A MediumFakeDriver variant that also reports CUSTOM_MAGIC resources on
# a nested resource provider
vcpus = 10
memory_mb = 8192
local_gb = 1028
child_resources = {
'CUSTOM_MAGIC': {
'total': 10,
'reserved': 0,
'min_unit': 1,
'max_unit': 10,
'step_size': 1,
'allocation_ratio': 1,
}
}
def update_provider_tree(self, provider_tree, nodename, allocations=None):
super(
MediumFakeDriverWithNestedCustomResources,
self).update_provider_tree(
provider_tree, nodename,
allocations=allocations)
if not provider_tree.exists(nodename + '-child'):
provider_tree.new_child(name=nodename + '-child',
parent=nodename)
provider_tree.update_inventory(nodename + '-child',
self.child_resources)
class FakeFinishMigrationFailDriver(FakeDriver):
"""FakeDriver variant that will raise an exception from finish_migration"""
def finish_migration(self, *args, **kwargs):
raise exception.VirtualInterfaceCreateException()
class PredictableNodeUUIDDriver(SmallFakeDriver):
"""SmallFakeDriver variant that reports a predictable node uuid in
get_available_resource, like IronicDriver.
"""
def get_available_resource(self, nodename):
resources = super(
PredictableNodeUUIDDriver, self).get_available_resource(nodename)
# This is used in ComputeNode.update_from_virt_driver which is called
# from the ResourceTracker when creating a ComputeNode.
resources['uuid'] = uuid.uuid5(uuid.NAMESPACE_DNS, nodename)
return resources
class FakeRescheduleDriver(FakeDriver):
"""FakeDriver derivative that triggers a reschedule on the first spawn
attempt. This is expected to only be used in tests that have more than
one compute service.
"""
# dict, keyed by instance uuid, mapped to a boolean telling us if the
# instance has been rescheduled or not
rescheduled = {}
def spawn(self, context, instance, image_meta, injected_files,
admin_password, allocations, network_info=None,
block_device_info=None, power_on=True, accel_info=None):
if not self.rescheduled.get(instance.uuid, False):
# We only reschedule on the first time something hits spawn().
self.rescheduled[instance.uuid] = True
raise exception.ComputeResourcesUnavailable(
reason='FakeRescheduleDriver')
super(FakeRescheduleDriver, self).spawn(
context, instance, image_meta, injected_files,
admin_password, allocations, network_info, block_device_info,
power_on)
class FakeRescheduleDriverWithNestedCustomResources(
FakeRescheduleDriver, MediumFakeDriverWithNestedCustomResources):
pass
class FakeBuildAbortDriver(FakeDriver):
"""FakeDriver derivative that always fails on spawn() with a
BuildAbortException so no reschedule is attempted.
"""
def spawn(self, context, instance, image_meta, injected_files,
admin_password, allocations, network_info=None,
block_device_info=None, power_on=True, accel_info=None):
raise exception.BuildAbortException(
instance_uuid=instance.uuid, reason='FakeBuildAbortDriver')
class FakeBuildAbortDriverWithNestedCustomResources(
FakeBuildAbortDriver, MediumFakeDriverWithNestedCustomResources):
pass
class FakeUnshelveSpawnFailDriver(FakeDriver):
"""FakeDriver derivative that always fails on spawn() with a
VirtualInterfaceCreateException when unshelving an offloaded instance.
"""
def spawn(self, context, instance, image_meta, injected_files,
admin_password, allocations, network_info=None,
block_device_info=None, power_on=True, accel_info=None):
if instance.vm_state == vm_states.SHELVED_OFFLOADED:
raise exception.VirtualInterfaceCreateException(
'FakeUnshelveSpawnFailDriver')
# Otherwise spawn normally during the initial build.
super(FakeUnshelveSpawnFailDriver, self).spawn(
context, instance, image_meta, injected_files,
admin_password, allocations, network_info, block_device_info,
power_on)
class FakeUnshelveSpawnFailDriverWithNestedCustomResources(
FakeUnshelveSpawnFailDriver, MediumFakeDriverWithNestedCustomResources):
pass
class FakeLiveMigrateDriver(FakeDriver):
"""FakeDriver derivative to handle force_complete and abort calls.
This module serves those tests that need to abort or force-complete
the live migration, thus the live migration will never be finished
without the force_complete_migration or delete_migration API calls.
"""
def __init__(self, virtapi, read_only=False):
super(FakeLiveMigrateDriver, self).__init__(virtapi, read_only)
self._migrating = True
self._abort_migration = True
def live_migration(self, context, instance, dest,
post_method, recover_method, block_migration=False,
migrate_data=None):
self._abort_migration = False
self._migrating = True
count = 0
while self._migrating and count < 50:
time.sleep(0.1)
count = count + 1
if self._abort_migration:
recover_method(context, instance, dest, migrate_data,
migration_status='cancelled')
else:
post_method(context, instance, dest, block_migration,
migrate_data)
def live_migration_force_complete(self, instance):
self._migrating = False
if instance.uuid in self.instances:
del self.instances[instance.uuid]
def live_migration_abort(self, instance):
self._abort_migration = True
self._migrating = False
def post_live_migration(self, context, instance, block_device_info,
migrate_data=None):
# Runs on the source host, called from
# ComputeManager._post_live_migration so just delete the instance
# from being tracked on the source host.
self.destroy(context, instance, network_info=None,
block_device_info=block_device_info)
class FakeLiveMigrateDriverWithNestedCustomResources(
FakeLiveMigrateDriver, MediumFakeDriverWithNestedCustomResources):
pass
class FakeDriverWithPciResources(SmallFakeDriver):
PCI_ADDR_PF1 = '0000:01:00.0'
PCI_ADDR_PF1_VF1 = '0000:01:00.1'
PCI_ADDR_PF2 = '0000:02:00.0'
PCI_ADDR_PF2_VF1 = '0000:02:00.1'
PCI_ADDR_PF3 = '0000:03:00.0'
PCI_ADDR_PF3_VF1 = '0000:03:00.1'
# NOTE(gibi): Always use this fixture along with the
# FakeDriverWithPciResources to make the necessary configuration for the
# driver.
class FakeDriverWithPciResourcesConfigFixture(fixtures.Fixture):
def setUp(self):
super(FakeDriverWithPciResources.
FakeDriverWithPciResourcesConfigFixture, self).setUp()
# Set passthrough_whitelist before the compute node starts to match
# with the PCI devices reported by this fake driver.
# NOTE(gibi): 0000:01:00 is tagged to physnet1 and therefore not a
# match based on physnet to our sriov port
# 'port_with_sriov_resource_request' as the network of that port
# points to physnet2 with the attribute
# 'provider:physical_network'. Nova pci handling already enforces
# this rule.
#
# 0000:02:00 and 0000:03:00 are both tagged to physnet2 and
# therefore a good match for our sriov port based on physnet.
# Having two PFs on the same physnet will allow us to test the
# placement allocation - physical allocation matching based on the
# bandwidth allocation in the future.
CONF.set_override('passthrough_whitelist', override=[
jsonutils.dumps(
{
"address": {
"domain": "0000",
"bus": "01",
"slot": "00",
"function": ".*"},
"physical_network": "physnet1",
}
),
jsonutils.dumps(
{
"address": {
"domain": "0000",
"bus": "02",
"slot": "00",
"function": ".*"},
"physical_network": "physnet2",
}
),
jsonutils.dumps(
{
"address": {
"domain": "0000",
"bus": "03",
"slot": "00",
"function": ".*"},
"physical_network": "physnet2",
}
),
],
group='pci')
def get_available_resource(self, nodename):
host_status = super(
FakeDriverWithPciResources, self).get_available_resource(nodename)
# 01:00.0 - PF - ens1
# |---- 01:00.1 - VF
#
# 02:00.0 - PF - ens2
# |---- 02:00.1 - VF
#
# 03:00.0 - PF - ens3
# |---- 03:00.1 - VF
host_status['pci_passthrough_devices'] = jsonutils.dumps([
{
'address': self.PCI_ADDR_PF1,
'product_id': 'fake-product_id',
'vendor_id': 'fake-vendor_id',
'status': 'available',
'dev_type': 'type-PF',
'parent_addr': None,
'numa_node': 0,
'label': 'fake-label',
},
{
'address': self.PCI_ADDR_PF1_VF1,
'product_id': 'fake-product_id',
'vendor_id': 'fake-vendor_id',
'status': 'available',
'dev_type': 'type-VF',
'parent_addr': self.PCI_ADDR_PF1,
'numa_node': 0,
'label': 'fake-label',
"parent_ifname": self._host + "-ens1",
},
{
'address': self.PCI_ADDR_PF2,
'product_id': 'fake-product_id',
'vendor_id': 'fake-vendor_id',
'status': 'available',
'dev_type': 'type-PF',
'parent_addr': None,
'numa_node': 0,
'label': 'fake-label',
},
{
'address': self.PCI_ADDR_PF2_VF1,
'product_id': 'fake-product_id',
'vendor_id': 'fake-vendor_id',
'status': 'available',
'dev_type': 'type-VF',
'parent_addr': self.PCI_ADDR_PF2,
'numa_node': 0,
'label': 'fake-label',
"parent_ifname": self._host + "-ens2",
},
{
'address': self.PCI_ADDR_PF3,
'product_id': 'fake-product_id',
'vendor_id': 'fake-vendor_id',
'status': 'available',
'dev_type': 'type-PF',
'parent_addr': None,
'numa_node': 0,
'label': 'fake-label',
},
{
'address': self.PCI_ADDR_PF3_VF1,
'product_id': 'fake-product_id',
'vendor_id': 'fake-vendor_id',
'status': 'available',
'dev_type': 'type-VF',
'parent_addr': self.PCI_ADDR_PF3,
'numa_node': 0,
'label': 'fake-label',
"parent_ifname": self._host + "-ens3",
},
])
return host_status
class FakeLiveMigrateDriverWithPciResources(
FakeLiveMigrateDriver, FakeDriverWithPciResources):
"""FakeDriver derivative to handle force_complete and abort calls.
This module serves those tests that need to abort or force-complete
the live migration, thus the live migration will never be finished
without the force_complete_migration or delete_migration API calls.
"""
class FakeDriverWithCaching(FakeDriver):
def __init__(self, *a, **k):
super(FakeDriverWithCaching, self).__init__(*a, **k)
self.cached_images = set()
def cache_image(self, context, image_id):
if image_id in self.cached_images:
return False
else:
self.cached_images.add(image_id)
return True
|
|
#Copyright 2012 EasyDevStdio , wes342
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
#http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
#!/usr/bin/env python
from scripts.GI import *
from kivy.uix.textinput import TextInput
from kivy.uix.button import Button
from kivy.uix.checkbox import CheckBox
from kivy.uix.switch import Switch
from kivy.uix.settings import SettingItem, SettingsPanel, SettingOptions
from scripts.EdsNotify import EdsNotify
from BeautifulSoup import BeautifulSoup
#from bs4 import BeautifulSoup
import urllib2
import re
config = ConfigParser()
config.read('%s/eds.ini' % Usr)
class LoadDialog(FloatLayout):
load = ObjectProperty(None)
cancel = ObjectProperty(None)
def dismiss(self):
self._popup.dismiss()
def show_base_browse(self):
content = LoadDialog(load=self.load, cancel=self.dismiss_popup)
self._popup = Popup(background='atlas://images/eds/pop',title="EDS File Browser", content=content, size_hint=(0.9, 0.9))
self._popup.open()
def select_base_file(self, path, filename):
try:
shutil.rmtree(Rom)
os.mkdir(Rom)
global fileb
fileb = os.path.join(path, filename[0])
self.dismiss_popup()
except:
print 'no file selected'
destpath = '%s/' % (Rom)
z = zipfile.ZipFile(fileb)
z.extractall(destpath)
if config.getint('Config', 'changelog'):
text_file = open(Changelog, "w")
text_file.write("######## Changelog #########\n")
text_file.write("Created: %s\n" % timestamp)
text_file.close()
else:
print "Auto Changelog Disabled"
EdsNotify().run("Base Rom Extracted Successfully", 'Selected file was:\n' + fileb)
self.dismiss_popup()
def select_base(self):
self.panel_layout.clear_widgets()
title = Label(text='[b][color=#22A0D6][size=20]Base Rom Selection[/size][/color][/b]', markup = True, pos_hint={'x':-.05, 'y':.20})
browse = CustomButton(text='BROWSE', pos_hint={'x':.0, 'y':.550}, size_hint=(.90, .06))
download = CustomButton(text='Download', pos_hint={'x':.0, 'y':.450}, size_hint=(.90, .06))
extract = CustomButton(text='Extract From RUU', pos_hint={'x':.0, 'y':.250}, size_hint=(.90, .06))
port = CustomButton(text='Rom Porting (Experimental)', pos_hint={'x':.0, 'y':.150}, size_hint=(.90, .06))
clean = Button(text='Clean Out Old Rom Files', pos_hint={'x':.0, 'y':-.05}, size_hint=(.90, .06), background_color=(1.4, 0, 0, 0.6))
self.panel_layout.add_widget(title)
self.panel_layout.add_widget(browse)
self.panel_layout.add_widget(download)
download.bind(on_release=dl_base_type)
self.panel_layout.add_widget(extract)
self.panel_layout.add_widget(port)
self.panel_layout.add_widget(clean)
def browse_files(instance):
show_base_browse(self)
browse.bind(on_release=browse_files)
def clean_files(instance):
root = BoxLayout(orientation='vertical', spacing=20)
btn_layout = GridLayout(cols=2, row_force_default=True, row_default_height=50, spacing=25)
remove = Button(text='Clean', size_hint_x=None, width=150)
cancel = Button(text='Cancel', size_hint_x=None, width=150)
root.add_widget(Label(text='Are You Sure You Want To\nClean Out Current Rom Files?'))
root.add_widget(btn_layout)
btn_layout.add_widget(remove)
btn_layout.add_widget(cancel)
popup = Popup(background='atlas://images/eds/pop', title='Add Option',content=root, auto_dismiss=False,
size_hint=(None, None), size=(350, 200))
cancel.bind(on_release=popup.dismiss)
popup.open()
def clean_now(self):
shutil.rmtree(Rom)
os.mkdir(Rom)
EdsNotify().run("Clean Successful", 'Rom Files Have Been Removed')
remove.bind(on_press=clean_now)
remove.bind(on_release=popup.dismiss)
clean.bind(on_release=clean_files)
def dl_base_type(self):
Box = BoxLayout(orientation="vertical", spacing=10)
msg = GridLayout(cols=1, padding=15, spacing=20, size_hint_y=None)
btn_layout = GridLayout(cols=1)
done = Button(text="Done")
btn_layout.add_widget(done)
msg.bind(minimum_height=msg.setter('height'))
Cm = Label(text="[b][color=#32D1D1][size=20]CyanogenMod[/color][/size][/b]", markup=True)
stable = CustomButton(text='STABLE', size=(560, 45), size_hint=(None, None))
stable.bind(on_press=load_cm_s)
rc = CustomButton(text='RELEASE CANDIDATE', size=(560, 45), size_hint=(None, None))
rc.bind(on_release=load_cm_r)
night = CustomButton(text='NIGHTLY', size=(560, 45), size_hint=(None, None))
night.bind(on_release=load_cm_n)
msg.add_widget(Cm)
msg.add_widget(stable)
msg.add_widget(rc)
msg.add_widget(night)
root = ScrollView(size_hint=(None, None),bar_margin=-22, size=(575, 350), do_scroll_x=False)
root.add_widget(msg)
Box.add_widget(root)
Box.add_widget(btn_layout)
popup = Popup(background='atlas://images/eds/pop', title='Download Rom',content=Box, auto_dismiss=True,
size_hint=(None, None), size=(620, 460))
done.bind(on_release=popup.dismiss)
popup.open()
def load_cm_s(self):
Box = BoxLayout(orientation="vertical", spacing=10)
panel = SettingsPanel(title="CyanogenMod", settings=self)
msg = GridLayout(cols=1, size_hint=(None, 8.8), width=750)
btn_layout = GridLayout(cols=1)
done = Button(text="Done")
btn_layout.add_widget(done)
msg.bind(minimum_height=msg.setter('height'))
try:
html_page = urllib2.urlopen(cyan_s)
soup = BeautifulSoup(html_page)
for link in soup.findAll('a'):
if "/get/jenkins/" in link.get('href'):
name = link.get('href').split(".html")[0]
re1='((?:[a-z][a-z]+))' # Word 1
re2='(.)' # Any Single Character 1
re3='(.)' # Any Single Character 2
re4='(.)' # Any Single Character 3
re5='((?:[a-z][a-z]+))' # Word 2
re6='(.)' # Any Single Character 4
re7='((?:[a-z][a-z]+))' # Word 3
re8='(.)' # Any Single Character 5
re9='((?:[a-z][a-z]+))' # Word 4
re10='(.)' # Any Single Character 6
re11='((?:[a-z][a-z]+))' # Word 5
re12='(.)' # Any Single Character 7
re13='(\\d+)' # Integer Number 1
re14='(.)' # Any Single Character 8
re15='((?:[a-z][a-z]+))' # Word 6
re16='(.)' # Any Single Character 9
re17='(\\d+)' # Integer Number 2
re18='(.)' # Any Single Character 10
re19='(\\d+)' # Integer Number 3
re20='(.)' # Any Single Character 11
re21='(\\d+)' # Integer Number 4
re22='(.)' # Any Single Character 12
re23='(\w+)' # Alphanum 1
re24='(.)' # Any Single Character 13
re25='((?:[a-z][a-z]+))' # Word 7
rg = re.compile(re1+re2+re3+re4+re5+re6+re7+re8+re9+re10+re11+re12+re13+re14+re15+re16+re17+re18+re19+re20+re21+re22+re23+re24+re25,re.IGNORECASE|re.DOTALL)
m = rg.search(name)
if m:
word1=m.group(1)
c1=m.group(2)
c2=m.group(3)
c3=m.group(4)
word2=m.group(5)
c4=m.group(6)
word3=m.group(7)
c5=m.group(8)
word4=m.group(9)
c6=m.group(10)
word5=m.group(11)
c7=m.group(12)
int1=m.group(13)
c8=m.group(14)
word6=m.group(15)
c9=m.group(16)
int2=m.group(17)
c10=m.group(18)
int3=m.group(19)
c11=m.group(20)
int4=m.group(21)
c12=m.group(22)
alphanum1=m.group(23)
c13=m.group(24)
word7=m.group(25)
url = c7+int1+c8+word6+c9+int2+c10+int3+c11+int4+c12+alphanum1+c13+word7
ver = word6+c9+int2+c10+int3+c11+int4
device = alphanum1
stable = SettingItem(panel = panel, title = "%s" % device.upper(), disabled=False, desc = "Build: %s" % ver)
stable_btn = CustomButton(text="%s" % url, size_hint=(None, None),width=330, height=40)
stable_b2 = CustomButton(text="%s" % url)
stable.add_widget(stable_btn)
msg.add_widget(stable)
def cm_stable(self):
webbrowser.open('http://get.cm/get/jenkins%s' % self.text)
stable_btn.bind(on_release=cm_stable)
root = ScrollView(size_hint=(None, None), size=(730, 390), do_scroll_x=False)
root.add_widget(msg)
Box.add_widget(root)
Box.add_widget(btn_layout)
popup = Popup(background='atlas://images/eds/pop', title='CyanogenMod (STABLE)',content=Box, auto_dismiss=True,
size_hint=(None, None), size=(750, 500))
done.bind(on_release=popup.dismiss)
popup.open()
except:
EdsNotify().run("'Url Not Found", 'Error Loading: http://get.cm')
def load_cm_r(self):
Box = BoxLayout(orientation="vertical", spacing=10)
panel = SettingsPanel(title="CyanogenMod", settings=self)
msg = GridLayout(cols=1, size_hint=(None, 8.8), width=750)
btn_layout = GridLayout(cols=1)
done = Button(text="Done")
btn_layout.add_widget(done)
msg.bind(minimum_height=msg.setter('height'))
try:
html_page = urllib2.urlopen(cyan_r)
soup = BeautifulSoup(html_page)
for link in soup.findAll('a'):
if "/get/jenkins/" in link.get('href'):
name = link.get('href').split(".html")[0]
re1='(http)'
re2='(:)'
re3='(\\/)'
re4='(\\/)'
re5='(get)'
re6='(\\.)'
re7='(cm)'
re8='(\\/)'
re9='(get)'
re10='(\\/)'
re11='((?:[a-z][a-z]+))'
re12='(\\/)'
re13='(\\d+)'
re14='(\\/)'
re15='(cm)'
re16='(-)'
re17='(\\d+)'
re18='(\\.)'
re19='(\\d+)'
re20='(\\.)'
re21='(\\d+)'
re22='(-)'
re23='((?:[a-z][a-z]*[0-9]+[a-z0-9]*))'
re24='(-)'
re25='(\w+)'
re26='(\\.)'
re27='(zip)'
rg = re.compile(re1+re2+re3+re4+re5+re6+re7+re8+re9+re10+re11+re12+re13+re14+re15+re16+re17+re18+re19+re20+re21+re22+re23+re24+re25+re26+re27,re.IGNORECASE|re.DOTALL)
m = rg.search(name)
if m:
var1=m.group(1)
c1=m.group(2)
c2=m.group(3)
c3=m.group(4)
word1=m.group(5)
c4=m.group(6)
word2=m.group(7)
c5=m.group(8)
word3=m.group(9)
c6=m.group(10)
word4=m.group(11)
c7=m.group(12)
int1=m.group(13)
c8=m.group(14)
word5=m.group(15)
c9=m.group(16)
int2=m.group(17)
c10=m.group(18)
int3=m.group(19)
c11=m.group(20)
int4=m.group(21)
c12=m.group(22)
alphanum1=m.group(23)
c13=m.group(24)
alphanum2=m.group(25)
c14=m.group(26)
word6=m.group(27)
url = int1+c8+word5+c9+int2+c10+int3+c11+int4+c12+alphanum1+c13+alphanum2+c14+word6
ver = word5+c9+int2+c10+int3+c11+int4
device = alphanum2
stable = SettingItem(panel = panel, title = "%s" % device.upper(), disabled=False, desc = "Build: %s" % ver)
stable_btn = CustomButton(text="%s" % url, size_hint=(None, None),width=330, height=40)
stable_b2 = CustomButton(text="%s" % url)
stable.add_widget(stable_btn)
msg.add_widget(stable)
def cm_stable(self):
webbrowser.open('http://get.cm/get/jenkins'+c7+'%s' % self.text)
stable_btn.bind(on_release=cm_stable)
root = ScrollView(size_hint=(None, None), size=(730, 390), do_scroll_x=False)
root.add_widget(msg)
Box.add_widget(root)
Box.add_widget(btn_layout)
popup = Popup(background='atlas://images/eds/pop', title='CyanogenMod (RELEASE CANDIDATE)',content=Box, auto_dismiss=True,
size_hint=(None, None), size=(750, 500))
done.bind(on_release=popup.dismiss)
popup.open()
except:
EdsNotify().run("'Url Not Found", 'Error Loading: http://get.cm')
def load_cm_n(self):
Box = BoxLayout(orientation="vertical", spacing=10)
panel = SettingsPanel(title="CyanogenMod", settings=self)
msg = GridLayout(cols=1, size_hint=(None, 8.8), width=750)
btn_layout = GridLayout(cols=1)
done = Button(text="Done")
btn_layout.add_widget(done)
msg.bind(minimum_height=msg.setter('height'))
try:
html_page = urllib2.urlopen(cyan_n)
soup = BeautifulSoup(html_page)
for link in soup.findAll('a'):
if "/get/jenkins/" in link.get('href'):
name = link.get('href').split(".html")[0]
re1='(http)' # Variable Name 1
re2='(:)' # Any Single Character 1
re3='(\\/)' # Any Single Character 2
re4='(\\/)' # Any Single Character 3
re5='(get\\.cm)' # Fully Qualified Domain Name 1
re6='(\\/)' # Any Single Character 4
re7='(get)' # Word 1
re8='(\\/)' # Any Single Character 5
re9='((?:[a-z][a-z]+))' # Word 2
re10='(\\/)' # Any Single Character 6
re11='(\\d+)' # Integer Number 1
re12='(\\/)' # Any Single Character 7
re13='(cm)' # Word 3
re14='([-+]\\d+)' # Integer Number 1
re15='(-)' # Any Single Character 8
re16='((?:(?:[1]{1}\\d{1}\\d{1}\\d{1})|(?:[2]{1}\\d{3}))(?:[0]?[1-9]|[1][012])(?:(?:[0-2]?\\d{1})|(?:[3][01]{1})))(?![\\d])' # YYYYMMDD 1
re17='(-)' # Any Single Character 9
re18='(NIGHTLY)' # Word 4
re19='(-)' # Any Single Character 10
re20='(\w+)' # Alphanum 1
re21='(\\.)' # Any Single Character 11
re22='(zip)' # Word 5
rg = re.compile(re1+re2+re3+re4+re5+re6+re7+re8+re9+re10+re11+re12+re13+re14+re15+re16+re17+re18+re19+re20+re21+re22,re.IGNORECASE|re.DOTALL)
m = rg.search(name)
if m:
var1=m.group(1)
c1=m.group(2)
c2=m.group(3)
c3=m.group(4)
fqdn1=m.group(5)
c4=m.group(6)
word1=m.group(7)
c5=m.group(8)
word2=m.group(9)
c6=m.group(10)
int1=m.group(11)
c7=m.group(12)
word3=m.group(13)
signed_int1=m.group(14)
c8=m.group(15)
yyyymmdd1=m.group(16)
c9=m.group(17)
word4=m.group(18)
c10=m.group(19)
alphanum1=m.group(20)
c11=m.group(21)
word5=m.group(22)
url = int1+c7+word3+signed_int1+c8+yyyymmdd1+c9+word4+c10+alphanum1
ver = word3+signed_int1
device = alphanum1
night = SettingItem(panel = panel, title = "%s" % device.upper(), disabled=False, desc = "Build: %s" % ver + "\nBuild Date: %s" % yyyymmdd1)
night_btn = CustomButton(text="%s" % url ,size_hint=(None, None),width=330, height=40)
night.add_widget(night_btn)
msg.add_widget(night)
def cm_nightly(self):
webbrowser.open('http://get.cm/get/jenkins%s' % c6+self.text+c11+word5)
night_btn.bind(on_release=cm_nightly)
root = ScrollView(size_hint=(None, None), size=(730, 390), do_scroll_x=False)
root.add_widget(msg)
Box.add_widget(root)
Box.add_widget(btn_layout)
popup = Popup(background='atlas://images/eds/pop', title='CyanogenMod (NIGHTLY)',content=Box, auto_dismiss=True,
size_hint=(None, None), size=(750, 500))
done.bind(on_release=popup.dismiss)
popup.open()
except:
EdsNotify().run("'Url Not Found", 'Error Loading: http://get.cm')
def boot_scripts(self):
self.panel_layout.clear_widgets()
title = Label(text='[b][color=#22A0D6][size=20]Init.d Scripts[/size][/color][/b]', markup = True, pos_hint={'x':.0, 'y':.20})
grid_layout = GridLayout(cols=3, row_force_default=True, row_default_height=40, spacing=10, pos_hint={'x':.0, 'y':-.35})
ext = CustomButton(text='Ext4 Tweak', pos_hint={'x':.0, 'y':.550}, size_hint=(.90, .06))
sd = CustomButton(text='Sd Card Speed Fix', pos_hint={'x':.0, 'y':.550}, size_hint=(.90, .06))
zip = CustomButton(text='Zipalign', pos_hint={'x':.0, 'y':.550}, size_hint=(.90, .06))
self.panel_layout.add_widget(title)
self.panel_layout.add_widget(grid_layout)
grid_layout.add_widget(ext)
grid_layout.add_widget(sd)
grid_layout.add_widget(zip)
def cp_ext(self):
shutil.copy('%s/00ext4'% (Initd), (Rom_Initd))
if config.getint('Config', 'changelog'):
text_file = open(Changelog, "a")
text_file.write("\n%s -- Added ext4 tweak\n" % timestamp)
text_file.close()
EdsNotify().run("Script Added Successfully", '')
ext.bind(on_release=cp_ext)
def cp_sd(self):
shutil.copy('%s/05sdcardspeedfix'% (Initd), (Rom_Initd))
if config.getint('Config', 'changelog'):
text_file = open(Changelog, "a")
text_file.write("\n%s -- Added Sd Card Speed Fix\n" % timestamp)
text_file.close()
EdsNotify().run("Added Successfully", '')
sd.bind(on_release=cp_sd)
def cp_zip(self):
shutil.copy('%s/06zipalign'% (Initd), (Rom_Initd))
if config.getint('Config', 'changelog'):
text_file = open(Changelog, "a")
text_file.write("\n%s -- Added Zipalign on boot\n" % timestamp)
text_file.close()
EdsNotify().run("Added Successfully", '')
zip.bind(on_release=cp_zip)
def trans_scripts(self):
print 'Hello Transitions'
def boot_anims(self):
print 'Doing boot animations'
|
|
import __builtin__
import ast
import inspect
import types
from collections import OrderedDict
import numpy as np
from dsltools import NestedBlocks, ScopedDict
from .. import config, names, prims, syntax
from ..names import NameNotFound
from ..ndtypes import Type
from ..prims import Prim
from ..syntax import (Expr,
Assign, If, ForLoop, Return,
Var, PrimCall, Cast, Select,
Map, Reduce, IndexMap,
Enumerate, Zip, Len, Range,
Slice, Tuple, Array,
Const, Call, Index,
FormalArgs, ActualArgs,
UntypedFn, Closure,
SourceInfo)
from ..syntax.helpers import (none, true, false, one_i64, zero_i64, zero_i24,
is_python_constant, const)
from ..syntax.wrappers import build_untyped_prim_fn, build_untyped_expr_fn, build_untyped_cast_fn
from ..transforms import subst_expr, subst_stmt_list
from decorators import jit, macro
from python_ref import GlobalValueRef, ClosureCellRef
class UnsupportedSyntax(Exception):
def __init__(self, node, function_name = None, filename = None):
self.function_name = function_name
self.filename = filename
self.node = node
def __str__(self):
if self.function_name is not None and \
self.filename is not None and \
self.node.lineno is not None:
return "Parakeet doesn't support %s from function '%s' in file %s on line %d" % \
(self.node.__class__.__name__, self.function_name, self.filename, self.node.lineno)
elif self.function_name is not None:
return "Parakeet doesn't support %s in '%s'" % \
(self.node.__class__.__name__, self.function_name)
else:
return "Parakeet doesn't support %s" % self.node.__class__.__name__
class ExternalValue(object):
"""
Wrap up references global values with this class
"""
def __init__(self, python_value):
self.value = python_value
def __str__(self):
return "ExternalValue(%s)" % self.value
def mk_reduce_call(fn, positional, init = None):
init = none if init is None else init
axis = zero_i64
from .. import lib
return Reduce(fn = translate_function_value(lib.identity),
combine = fn,
args = positional,
axis = axis,
init = init)
def mk_simple_fn(mk_body, input_name = "x", fn_name = "cast"):
unique_arg_name = names.fresh(input_name)
unique_fn_name = names.fresh(fn_name)
var = Var(unique_arg_name)
formals = FormalArgs()
formals.add_positional(unique_arg_name, input_name)
body = mk_body(var)
return UntypedFn(unique_fn_name, formals, body)
def is_hashable(x):
try:
hash(x)
return True
except:
return False
def is_prim(v):
return isinstance(v, (Prim)) or (is_hashable(v) and v in prims.prim_lookup_by_value)
def is_builtin_function(v):
return isinstance(v, (types.TypeType, types.BuiltinFunctionType))
def is_user_function(v):
return isinstance(v, (types.FunctionType, jit, macro))
def is_function_value(v):
return is_user_function(v) or is_builtin_function(v) or is_prim(v)
def is_static_value(v):
return is_python_constant(v) or \
type(v) is np.dtype or \
is_function_value(v)
def value_to_syntax(v):
if isinstance(v, Expr):
return v
elif is_python_constant(v):
return const(v)
else:
return translate_function_value(v)
class AST_Translator(ast.NodeVisitor):
def __init__(self,
globals_dict=None,
closure_cell_dict=None,
parent=None,
function_name = None,
filename = None):
# assignments which need to get prepended at the beginning of the
# function
self.globals = globals_dict
self.blocks = NestedBlocks()
self.parent = parent
self.scopes = ScopedDict()
self.globals_dict = globals_dict
self.closure_cell_dict = closure_cell_dict
# mapping from names/paths to either a closure cell reference or a
# global value
self.python_refs = OrderedDict()
self.original_outer_names = []
self.localized_outer_names = []
self.filename = filename
self.function_name = function_name
self.push()
def push(self, scope = None, block = None):
if scope is None:
scope = {}
if block is None:
block = []
self.scopes.push(scope)
self.blocks.push(block)
def pop(self):
scope = self.scopes.pop()
block = self.blocks.pop()
return scope, block
def fresh_name(self, original_name):
fresh_name = names.fresh(original_name)
self.scopes[original_name] = fresh_name
return fresh_name
def fresh_names(self, original_names):
return map(self.fresh_name, original_names)
def fresh_var(self, name):
return Var(self.fresh_name(name))
def fresh_vars(self, original_names):
return map(self.fresh_var, original_names)
def current_block(self):
return self.blocks.top()
def current_scope(self):
return self.scopes.top()
def ast_to_value(self, expr):
if isinstance(expr, ast.Num):
return expr.n
elif isinstance(expr, ast.Tuple):
return tuple(self.ast_to_value(elt) for elt in expr.elts)
elif isinstance(expr, ast.Name):
return self.lookup_global(expr.id)
elif isinstance(expr, ast.Attribute):
left = self.ast_to_value(expr.value)
if isinstance(left, ExternalValue):
left = left.value
return getattr(left, expr.attr)
def lookup_global(self, key):
if isinstance(key, (list, tuple)):
assert len(key) == 1
key = key[0]
else:
assert isinstance(key, str), "Invalid global key: %s" % (key,)
if self.globals:
if key in self.globals:
return self.globals[key]
elif key in __builtin__.__dict__:
return __builtin__.__dict__[key]
else:
assert False, "Couldn't find global name %s" % key
else:
assert self.parent is not None
return self.parent.lookup_global(key)
def is_global(self, key):
if isinstance(key, (list, tuple)):
key = key[0]
if key in self.scopes:
return False
elif self.closure_cell_dict and key in self.closure_cell_dict:
return False
if self.globals:
return key in self.globals or key in __builtins__
assert self.parent is not None
return self.parent.is_global(key)
def local_ref_name(self, ref, python_name):
for (local_name, other_ref) in self.python_refs.iteritems():
if ref == other_ref:
return Var(local_name)
local_name = names.fresh(python_name)
self.scopes[python_name] = local_name
self.original_outer_names.append(python_name)
self.localized_outer_names.append(local_name)
self.python_refs[local_name] = ref
return Var(local_name)
def is_visible_name(self, name):
if name in self.scopes:
return True
if self.parent:
return self.parent.is_visible_name(name)
else:
return self.is_global(name)
def lookup(self, name):
#if name in reserved_names:
# return reserved_names[name]
if name in self.scopes:
return Var(self.scopes[name])
elif self.parent and self.parent.is_visible_name(name):
# don't actually keep the outer binding name, we just
# need to check that it's possible and tell the outer scope
# to register any necessary python refs
local_name = names.fresh(name)
self.scopes[name] = local_name
self.original_outer_names.append(name)
self.localized_outer_names.append(local_name)
return Var(local_name)
elif self.closure_cell_dict and name in self.closure_cell_dict:
ref = ClosureCellRef(self.closure_cell_dict[name], name)
return self.local_ref_name(ref, name)
elif self.is_global(name):
value = self.lookup_global(name)
if is_static_value(value):
return value_to_syntax(value)
elif isinstance(value, np.ndarray):
ref = GlobalValueRef(value)
return self.local_ref_name(ref, name)
else:
# assume that this is a module or object which will have some
# statically convertible value pulled out of it
return ExternalValue(value)
#else:
# assert False, "Can't use global value %s" % value
else:
raise NameNotFound(name)
def visit_list(self, nodes):
return map(self.visit, nodes)
def tuple_arg_assignments(self, elts, var):
"""
Recursively decompose a nested tuple argument like
def f((x,(y,z))):
...
into a single name and a series of assignments:
def f(tuple_arg):
x = tuple_arg[0]
tuple_arg_elt = tuple_arg[1]
y = tuple_arg_elt[0]
z = tuple_arg_elt[1]
"""
assignments = []
for (i, sub_arg) in enumerate(elts):
if isinstance(sub_arg, ast.Tuple):
name = "tuple_arg_elt"
else:
assert isinstance(sub_arg, ast.Name)
name = sub_arg.id
lhs = self.fresh_var(name)
stmt = Assign(lhs, Index(var, Const(i)))
assignments.append(stmt)
if isinstance(sub_arg, ast.Tuple):
more_stmts = self.tuple_arg_assignments(sub_arg.elts, lhs)
assignments.extend(more_stmts)
return assignments
def translate_args(self, args):
assert not args.kwarg
formals = FormalArgs()
assignments = []
for arg in args.args:
if isinstance(arg, ast.Name):
visible_name = arg.id
local_name = self.fresh_name(visible_name)
formals.add_positional(local_name, visible_name)
else:
assert isinstance(arg, ast.Tuple)
arg_name = self.fresh_name("tuple_arg")
formals.add_positional(arg_name)
var = Var(arg_name)
stmts = self.tuple_arg_assignments(arg.elts, var)
assignments.extend(stmts)
n_defaults = len(args.defaults)
if n_defaults > 0:
local_names = formals.positional[-n_defaults:]
for (k,expr) in zip(local_names, args.defaults):
v = self.ast_to_value(expr)
# for now we're putting literal python
# values in the defaults dictionary of
# a function's formal arguments
formals.defaults[k] = v
if args.vararg:
assert isinstance(args.vararg, str)
formals.starargs = self.fresh_name(args.vararg)
return formals, assignments
def visit_Name(self, expr):
assert isinstance(expr, ast.Name), "Expected AST Name object: %s" % expr
return self.lookup(expr.id)
def create_phi_nodes(self, left_scope, right_scope, new_names = {}):
"""
Phi nodes make explicit the possible sources of each variable's values and
are needed when either two branches merge or when one was optionally taken.
"""
merge = {}
for (name, ssa_name) in left_scope.iteritems():
left = Var(ssa_name)
if name in right_scope:
right = Var(right_scope[name])
else:
try:
right = self.lookup(name)
except NameNotFound:
continue
if name in new_names:
new_name = new_names[name]
else:
new_name = self.fresh_name(name)
merge[new_name] = (left, right)
for (name, ssa_name) in right_scope.iteritems():
if name not in left_scope:
try:
left = self.lookup(name)
right = Var(ssa_name)
if name in new_names:
new_name = new_names[name]
else:
new_name = self.fresh_name(name)
merge[new_name] = (left, right)
except names.NameNotFound:
# for now skip over variables which weren't defined before
# a control flow split, which means that loop-local variables
# can't be used after the loop.
# TODO: Fix this. Maybe with 'undef' nodes?
pass
return merge
def visit_Index(self, expr):
return self.visit(expr.value)
def visit_Ellipsis(self, expr):
raise RuntimeError("Ellipsis operator unsupported")
def visit_Slice(self, expr):
"""
x[l:u:s]
Optional fields
expr.lower
expr.upper
expr.step
"""
start = self.visit(expr.lower) if expr.lower else none
stop = self.visit(expr.upper) if expr.upper else none
step = self.visit(expr.step) if expr.step else none
return Slice(start, stop, step)
def visit_ExtSlice(self, expr):
slice_elts = map(self.visit, expr.dims)
if len(slice_elts) > 1:
return Tuple(slice_elts)
else:
return slice_elts[0]
def visit_UnaryOp(self, expr):
ssa_val = self.visit(expr.operand)
# UAdd doesn't do anything!
if expr.op.__class__.__name__ == 'UAdd':
return ssa_val
prim = prims.find_ast_op(expr.op)
return PrimCall(prim, [ssa_val])
def visit_BinOp(self, expr):
ssa_left = self.visit(expr.left)
ssa_right = self.visit(expr.right)
prim = prims.find_ast_op(expr.op)
return PrimCall(prim, [ssa_left, ssa_right])
def visit_BoolOp(self, expr):
values = map(self.visit, expr.values)
prim = prims.find_ast_op(expr.op)
# Python, strangely, allows more than two arguments to
# Boolean operators
result = values[0]
for v in values[1:]:
result = PrimCall(prim, [result, v])
return result
def visit_Compare(self, expr):
lhs = self.visit(expr.left)
assert len(expr.ops) == 1
prim = prims.find_ast_op(expr.ops[0])
assert len(expr.comparators) == 1
rhs = self.visit(expr.comparators[0])
return PrimCall(prim, [lhs, rhs])
def visit_Subscript(self, expr):
value = self.visit(expr.value)
index = self.visit(expr.slice)
return Index(value, index)
def generic_visit(self, expr):
raise UnsupportedSyntax(expr,
function_name = self.function_name,
filename = self.filename)
def visit(self, node):
res = ast.NodeVisitor.visit(self, node)
source_info = SourceInfo(filename = self.filename,
line = getattr(node, 'lineno', None),
col = getattr(node, 'e.col_offset', None),
function = self.function_name, )
res.source_info = source_info
return res
def translate_value_call(self, value, positional, keywords_dict= {}, starargs_expr = None):
if value is sum:
return mk_reduce_call(build_untyped_prim_fn(prims.add), positional, zero_i24)
elif value is max:
if len(positional) == 1:
return mk_reduce_call(build_untyped_prim_fn(prims.maximum), positional)
else:
assert len(positional) == 2
return PrimCall(prims.maximum, positional)
elif value is min:
if len(positional) == 1:
return mk_reduce_call(build_untyped_prim_fn(prims.minimum), positional)
else:
assert len(positional) == 2
return PrimCall(prims.minimum, positional)
elif value is map:
assert len(keywords_dict) == 0
assert len(positional) > 1
axis = keywords_dict.get("axis", None)
return Map(fn = positional[0], args = positional[1:], axis = axis)
elif value is enumerate:
assert len(positional) == 1, "Wrong number of args for 'enumerate': %s" % positional
assert len(keywords_dict) == 0, \
"Didn't expect keyword arguments for 'enumerate': %s" % keywords_dict
return Enumerate(positional[0])
elif value is len:
assert len(positional) == 1, "Wrong number of args for 'len': %s" % positional
assert len(keywords_dict) == 0, \
"Didn't expect keyword arguments for 'len': %s" % keywords_dict
return self.len(positional[0])
elif value is zip:
assert len(positional) > 1, "Wrong number of args for 'zip': %s" % positional
assert len(keywords_dict) == 0, \
"Didn't expect keyword arguments for 'zip': %s" % keywords_dict
return Zip(values = positional)
from ..mappings import function_mappings
if value in function_mappings:
value = function_mappings[value]
if isinstance(value, macro):
return value.transform(positional, keywords_dict)
fn = translate_function_value(value)
return Call(fn, ActualArgs(positional, keywords_dict, starargs_expr))
def visit_Call(self, expr):
"""
TODO:
The logic here is broken and haphazard, eventually try to handle nested
scopes correctly, along with globals, cell refs, etc..
"""
fn, args, keywords_list, starargs, kwargs = \
expr.func, expr.args, expr.keywords, expr.starargs, expr.kwargs
assert kwargs is None, "Dictionary of keyword args not supported"
positional = self.visit_list(args)
keywords_dict = {}
for kwd in keywords_list:
keywords_dict[kwd.arg] = self.visit(kwd.value)
if starargs:
starargs_expr = self.visit(starargs)
else:
starargs_expr = None
def is_attr_chain(expr):
return isinstance(expr, ast.Name) or \
(isinstance(expr, ast.Attribute) and is_attr_chain(expr.value))
def extract_attr_chain(expr):
if isinstance(expr, ast.Name):
return [expr.id]
else:
base = extract_attr_chain(expr.value)
base.append(expr.attr)
return base
def lookup_attr_chain(names):
value = self.lookup_global(names[0])
for name in names[1:]:
if hasattr(value, name):
value = getattr(value, name)
else:
try:
value = value[name]
except:
assert False, "Couldn't find global name %s" % ('.'.join(names))
return value
if is_attr_chain(fn):
names = extract_attr_chain(fn)
if self.is_global(names):
return self.translate_value_call(lookup_attr_chain(names),
positional, keywords_dict, starargs_expr)
fn_node = self.visit(fn)
if isinstance(fn_node, syntax.Expr):
actuals = ActualArgs(positional, keywords_dict, starargs_expr)
return Call(fn_node, actuals)
else:
assert isinstance(fn_node, ExternalValue)
return self.translate_value_call(fn_node.value,
positional, keywords_dict, starargs_expr)
def visit_List(self, expr):
return Array(self.visit_list(expr.elts))
def visit_Expr(self, expr):
# dummy assignment to allow for side effects on RHS
lhs = self.fresh_var("dummy")
if isinstance(expr.value, ast.Str):
return Assign(lhs, zero_i64)
# return syntax.Comment(expr.value.s.strip().replace('\n', ''))
else:
rhs = self.visit(expr.value)
return syntax.Assign(lhs, rhs)
def visit_GeneratorExp(self, expr):
return self.visit_ListComp(expr)
def visit_ListComp(self, expr):
gens = expr.generators
assert len(gens) == 1
gen = gens[0]
target = gen.target
if target.__class__ is ast.Name:
arg_vars = [target]
else:
assert target.__class__ is ast.Tuple and \
all(e.__class__ is ast.Name for e in target.elts),\
"Expected comprehension target to be variable or tuple of variables, got %s" % \
ast.dump(target)
arg_vars = [ast.Tuple(elts = target.elts)]
# build a lambda as a Python ast representing
# what we do to each element
args = ast.arguments(args = arg_vars,
vararg = None,
kwarg = None,
defaults = ())
fn = translate_function_ast(name = "comprehension_map",
args = args,
body = [ast.Return(expr.elt)],
parent = self)
seq = self.visit(gen.iter)
ifs = gen.ifs
assert len(ifs) == 0, "Parakeet: Conditions in array comprehensions not yet supported"
# short-circuit conversion from Map(Range) to IndexMap for simple cases
if seq.__class__ is Range and \
(seq.start is None or seq.start == zero_i64) and \
(seq.step is None or seq.step == one_i64):
return IndexMap(fn = fn, shape = seq.stop)
else:
return Map(fn = fn, args=(seq,), axis = zero_i64)
def visit_Attribute(self, expr):
# TODO:
# Recursive lookup to see if:
# (1) base object is local, if so-- create chain of attributes
# (2) base object is global but an adverb primitive-- use it locally
# without adding it to nonlocals
# (3) not local at all-- in which case, add the whole chain of strings
# to nonlocals
#
# AN IDEA:
# Allow external values to be brought into the syntax tree as
# a designated ExternalValue node
# and then here check if the LHS is an ExternalValue and if so,
# pull out the value. If it's a constant, then make it into syntax,
# if it's a function, then parse it, else raise an error.
#
from ..mappings import property_mappings, method_mappings
value = self.visit(expr.value)
attr = expr.attr
if isinstance(value, ExternalValue):
value = value.value
assert hasattr(value, attr), "Couldn't find attribute '%s' in %s" % (attr, value)
value = getattr(value, attr)
if is_static_value(value):
return value_to_syntax(value)
else:
return ExternalValue(value)
elif attr in property_mappings:
fn = property_mappings[attr]
if isinstance(fn, macro):
return fn.transform( [value] )
else:
return Call(translate_function_value(fn),
ActualArgs(positional = (value,)))
elif attr in method_mappings:
fn_python = method_mappings[attr]
fn_syntax = translate_function_value(fn_python)
return Closure(fn_syntax, args=(value,))
else:
assert False, "Attribute %s not supported" % attr
def visit_Num(self, expr):
return Const(expr.n)
def visit_Tuple(self, expr):
return syntax.Tuple(self.visit_list(expr.elts))
def visit_IfExp(self, expr):
cond = self.visit(expr.test)
if_true = self.visit(expr.body)
if_false = self.visit(expr.orelse)
return Select(cond, if_true, if_false)
def visit_lhs(self, lhs):
if isinstance(lhs, ast.Name):
return self.fresh_var(lhs.id)
elif isinstance(lhs, ast.Tuple):
return syntax.Tuple( map(self.visit_lhs, lhs.elts))
else:
# in case of slicing or attributes
res = self.visit(lhs)
return res
def visit_Assign(self, stmt):
# important to evaluate RHS before LHS for statements like 'x = x + 1'
ssa_rhs = self.visit(stmt.value)
ssa_lhs = self.visit_lhs(stmt.targets[0])
return Assign(ssa_lhs, ssa_rhs)
def visit_AugAssign(self, stmt):
ssa_incr = self.visit(stmt.value)
ssa_old_value = self.visit(stmt.target)
ssa_new_value = self.visit_lhs(stmt.target)
prim = prims.find_ast_op(stmt.op)
return Assign(ssa_new_value, PrimCall(prim, [ssa_old_value, ssa_incr]))
def visit_Return(self, stmt):
return syntax.Return(self.visit(stmt.value))
def visit_If(self, stmt):
cond = self.visit(stmt.test)
true_scope, true_block = self.visit_block(stmt.body)
false_scope, false_block = self.visit_block(stmt.orelse)
merge = self.create_phi_nodes(true_scope, false_scope)
return syntax.If(cond, true_block, false_block, merge)
def visit_loop_body(self, body, *exprs):
merge = {}
substitutions = {}
curr_scope = self.current_scope()
exprs = [self.visit(expr) for expr in exprs]
scope_after, body = self.visit_block(body)
for (k, name_after) in scope_after.iteritems():
if k in self.scopes:
name_before = self.scopes[k]
new_name = names.fresh(k + "_loop")
merge[new_name] = (Var(name_before), Var(name_after))
substitutions[name_before] = new_name
curr_scope[k] = new_name
exprs = [subst_expr(expr, substitutions) for expr in exprs]
body = subst_stmt_list(body, substitutions)
return body, merge, exprs
def visit_While(self, stmt):
assert not stmt.orelse
body, merge, (cond,) = self.visit_loop_body(stmt.body, stmt.test)
return syntax.While(cond, body, merge)
def assign(self, lhs, rhs):
self.current_block().append(Assign(lhs,rhs))
def assign_to_var(self, rhs, name = None):
if isinstance(rhs, (Var, Const)):
return rhs
if name is None:
name = "temp"
var = self.fresh_var(name)
self.assign(var, rhs)
return var
def add(self, x, y, temp = True):
expr = PrimCall(prims.add, [x,y])
if temp:
return self.assign_to_var(expr, "add")
else:
return expr
def sub(self, x, y, temp = True):
expr = PrimCall(prims.subtract, [x,y])
if temp:
return self.assign_to_var(expr, "sub")
else:
return expr
def mul(self, x, y, temp = True):
expr = PrimCall(prims.multiply, [x,y])
if temp:
return self.assign_to_var(expr, "mul")
else:
return expr
def div(self, x, y, temp = True):
expr = PrimCall(prims.divide, [x,y])
if temp:
return self.assign_to_var(expr, "div")
else:
return expr
def len(self, x):
if isinstance(x, Enumerate):
return self.len(x.value)
elif isinstance(x, Zip):
elt_lens = [self.len(v) for v in x.values]
result = elt_lens[0]
for n in elt_lens[1:]:
result = PrimCall(prims.minimum, [result, n])
return result
elif isinstance(x, (Array, Tuple)):
return Const(len(x.elts))
elif isinstance(x, Range):
# if it's a range from 0..len(x), then just return len(x)
if isinstance(x.stop, Len):
if isinstance(x.start, Const) and x.start.value == 0:
if isinstance(x.step, Const) and x.stop.value in (1,-1, None):
return x.stop
seq_var = self.assign_to_var(x, "len_input")
return self.assign_to_var(Len(seq_var), "len_result")
def is_none(self, v):
return v is None or isinstance(v, Const) and v.value is None
def for_loop_bindings(self, idx, lhs, rhs):
if isinstance(rhs, Enumerate):
array = rhs.value
elt = Index(array, idx)
if isinstance(lhs, Tuple):
var_names = ", ".join(str(elt) for elt in lhs.elts)
if len(lhs.elts) < 2:
raise SyntaxError("Too many values to unpack: 'enumerate' expects 2 but given %s" % var_names)
elif len(lhs.elts) > 2:
raise SyntaxError("Need more than 2 values to unpack for LHS of %s" % var_names)
idx_var, seq_var = lhs.elts
other_bindings = self.for_loop_bindings(idx, seq_var, array)
return [Assign(idx_var, idx)] + other_bindings
elif isinstance(lhs, Var):
seq_var = self.fresh_var("seq_elt")
other_bindings = self.for_loop_bindings(idx, seq_var, array)
return [Assign(lhs, Tuple(idx, seq_var))] + other_bindings
else:
raise SyntaxError("Unexpected binding in for loop: %s = %s" % (lhs,rhs))
elif isinstance(rhs, Zip):
values_str = ", ".join(str(v) for v in rhs.values)
if len(rhs.values) < 2:
raise SyntaxError("'zip' must take at least two arguments, given: %s" % values_str)
if isinstance(lhs, Tuple):
if len(lhs.elts) < len(rhs.values):
raise SyntaxError("Too many values to unpack in %s = %s" % (lhs, rhs))
elif len(lhs.elts) > len(rhs.values):
raise SyntaxError("Too few values on LHS of bindings in %s = %s" % (lhs,rhs))
result = []
for lhs_var, rhs_value in zip(lhs.elts, rhs.values):
result.extend(self.for_loop_bindings(idx, lhs_var, rhs_value))
return result
elif isinstance(lhs, Var):
lhs_vars = [self.fresh_var("elt%d" % i) for i in xrange(len(rhs.values))]
result = []
for lhs_var, rhs_value in zip(lhs_vars, rhs.values):
result.extend(self.for_loop_bindings(idx, lhs_var, rhs_value))
result.append(Assign(lhs, Tuple(elts=lhs_vars)))
return result
else:
raise SyntaxError("Unexpected binding in for loop: %s = %s" % (lhs,rhs))
elif isinstance(rhs, Range):
if isinstance(lhs, Tuple):
raise SyntaxError("Too few values in unpack in for loop binding %s = %s" % (lhs,rhs))
elif isinstance(lhs, Var):
start = rhs.start
if self.is_none(start):
start = zero_i64
step = rhs.step
if self.is_none(step):
step = one_i64
return [Assign(lhs, self.add(start, self.mul(idx, step, temp = False), temp= False))]
else:
raise SyntaxError("Unexpected binding in for loop: %s = %s" % (lhs,rhs))
else:
return [Assign(lhs, Index(rhs,idx))]
def visit_For(self, stmt):
assert not stmt.orelse
var = self.visit_lhs(stmt.target)
seq = self.visit(stmt.iter)
body, merge, _ = self.visit_loop_body(stmt.body)
if isinstance(seq, Range):
assert isinstance(var, Var), "Expect loop variable to be simple but got '%s'" % var
return ForLoop(var, seq.start, seq.stop, seq.step, body, merge)
else:
idx = self.fresh_var("idx")
n = self.len(seq)
bindings = self.for_loop_bindings(idx, var, seq)
return ForLoop(idx, zero_i64, n, one_i64, bindings + body, merge)
def visit_block(self, stmts):
self.push()
curr_block = self.current_block()
for stmt in stmts:
parakeet_stmt = self.visit(stmt)
curr_block.append(parakeet_stmt)
return self.pop()
def visit_FunctionDef(self, node):
"""
Translate a nested function
"""
fundef = translate_function_ast(node.name, node.args, node.body, parent = self)
local_var = self.fresh_var(node.name)
return Assign(local_var, fundef)
def visit_Lambda(self, node):
return translate_function_ast("lambda", node.args, [ast.Return(node.body)], parent = self)
def translate_function_ast(name, args, body,
globals_dict = None,
closure_vars = [],
closure_cells = [],
parent = None,
filename = None):
"""
Helper to launch translation of a python function's AST, and then construct
an untyped parakeet function from the arguments, refs, and translated body.
"""
assert len(closure_vars) == len(closure_cells)
closure_cell_dict = dict(zip(closure_vars, closure_cells))
if filename is None and parent is not None:
filename = parent.filename
translator = AST_Translator(globals_dict, closure_cell_dict,
parent, function_name = name, filename = filename)
assert not args.kwarg, "Parakeet doesn't support **kwargs, found in %s%s(%s)" % \
(filename +":" if filename else "", name, args)
ssa_args, assignments = translator.translate_args(args)
doc_string = None
if len(body) > 0 and isinstance(body[0], ast.Expr):
if isinstance(body[0].value, ast.Str):
doc_string = body[0].value.s
body = body[1:]
_, body = translator.visit_block(body)
body = assignments + body
ssa_fn_name = names.fresh(name)
# if function was nested in parakeet, it can have references to its
# surrounding parakeet scope, which can't be captured with a python ref cell
original_outer_names = translator.original_outer_names
localized_outer_names = translator.localized_outer_names
python_refs = translator.python_refs
ssa_args.prepend_nonlocal_args(localized_outer_names)
if globals_dict:
assert parent is None
assert len(original_outer_names) == len(python_refs)
return UntypedFn(ssa_fn_name, ssa_args, body, python_refs.values(), [])
else:
assert parent
fn = UntypedFn(ssa_fn_name, ssa_args, body, [], original_outer_names, doc_string = doc_string)
if len(original_outer_names) > 0:
try:
outer_ssa_vars = [parent.lookup(x) for x in original_outer_names]
except NameNotFound:
print "Failure while trying to look up non-local names used by function %s" % name
raise
return syntax.Closure(fn, outer_ssa_vars)
else:
return fn
def strip_leading_whitespace(source):
lines = source.splitlines()
assert len(lines) > 0
first_line = lines[0]
n_removed = len(first_line) - len(first_line.lstrip())
if n_removed > 0:
return '\n'.join(line[n_removed:] for line in lines)
else:
return source
def translate_function_source(source,
globals_dict,
closure_vars = [],
closure_cells = [],
filename = None):
assert len(closure_vars) == len(closure_cells)
syntax_tree = ast.parse(strip_leading_whitespace(source))
if isinstance(syntax_tree, (ast.Module, ast.Interactive)):
assert len(syntax_tree.body) == 1
syntax_tree = syntax_tree.body[0]
elif isinstance(syntax_tree, ast.Expression):
syntax_tree = syntax_tree.body
assert isinstance(syntax_tree, ast.FunctionDef), \
"Unexpected Python syntax node: %s" % ast.dump(syntax_tree)
return translate_function_ast(syntax_tree.name,
syntax_tree.args,
syntax_tree.body,
globals_dict,
closure_vars,
closure_cells,
filename = filename)
# python value of a user-defined function mapped to its
# untyped representation
_known_python_functions = {}
# keep track of which functions are being translated at this moment
# to check for recursive calls
_currently_processing = set([])
def _translate_function_value(fn):
"""
The core of function translation, should only end up here
if the python function's intermediate representation isn't cached
"""
assert fn not in _currently_processing, \
"Recursion detected through function value %s" % (fn,)
if isinstance(fn, Prim):
fundef = build_untyped_prim_fn(fn)
elif isinstance(fn, (Type, np.dtype, int, bool, long, float)):
fundef = build_untyped_cast_fn(fn)
elif isinstance(fn, type) and Expr in fn.mro():
fundef = build_untyped_expr_fn(fn)
elif isinstance(fn, macro):
fundef = fn.as_fn()
else:
# if it's not a macro or some sort of internal expression
# then we're really dealing with a Python function
# so get to work pulling apart its AST and translating
# it into Parakeet IR
assert type(fn) not in \
(types.BuiltinFunctionType, types.TypeType, np.ufunc, types.MethodType), \
"Unsupported function: %s" % (fn,)
_currently_processing.add(fn)
try:
assert hasattr(fn, 'func_globals'), "Expected function to have globals: %s" % fn
assert hasattr(fn, 'func_closure'), "Expected function to have closure cells: %s" % fn
assert hasattr(fn, 'func_code'), "Expected function to have code object: %s" % fn
source = inspect.getsource(fn)
filename = inspect.getsourcefile(fn)
except:
_currently_processing.remove(fn)
assert False, "Parakeet couldn't access source of function %s" % fn
globals_dict = fn.func_globals
free_vars = fn.func_code.co_freevars
closure_cells = fn.func_closure
if closure_cells is None:
closure_cells = ()
try:
fundef = translate_function_source(source,
globals_dict,
free_vars,
closure_cells,
filename = filename)
except:
_currently_processing.remove(fn)
raise
if config.print_untyped_function:
print "[ast_conversion] Translated %s into untyped function:\n%s" % (fn, repr(fundef))
_currently_processing.remove(fn)
_known_python_functions[fn] = fundef
return fundef
import threading
_lock = threading.RLock()
def translate_function_value(fn):
if fn in _known_python_functions:
return _known_python_functions[fn]
# if it's already a Parakeet function, just return it
if isinstance(fn, UntypedFn):
return fn
# short-circuit logic for turning dtypes and Python types into
# functions for casting from any value to those types
elif isinstance(fn, (np.dtype, int, long, float, bool)):
fundef = build_untyped_cast_fn(fn)
else:
# none of the obvious shortcuts worked,
# we're going to have to translate this function for real
from ..mappings import function_mappings
if fn in function_mappings:
fn = function_mappings[fn]
# ...unless we forgot to add it to mappings but some equivalent primitive
# got registered
if fn in prims.prim_lookup_by_value:
fn = prims.prim_lookup_by_value[fn]
# if the function has been wrapped with a decorator, unwrap it
while isinstance(fn, jit):
fn = fn.f
# check again if, after unwrapping the function a bunch, it isn't in the cache
if fn in _known_python_functions:
return _known_python_functions[fn]
with _lock:
fundef = _translate_function_value(fn)
_known_python_functions[fn] = fundef
return fundef
|
|
import argparse
import os
import random
import time
import numpy as np
from keras.layers import (LSTM, BatchNormalization, Convolution3D, Dense, Dropout, Flatten, Input,
MaxPooling3D, TimeDistributed, ZeroPadding3D)
from keras.models import Model, Sequential
from src.data import import_labels
from src.io import get_duration, get_num_frames, video_to_array
from src.processing import activity_localization, get_classification, smoothing
runtime_measures = {
'load_video': [],
'extract_features_c3d': [],
'temporal_localization_network': [],
'post-processing': [],
'video_duration': []
}
def run_runtime_tests(input_video, model_features, c3d_mean, model_localization):
input_size = (112, 112)
length = 16
# Setup post-processing variables
smoothing_k = 5
activity_threshold = .2
# Load labels
with open('dataset/labels.txt', 'r') as f:
labels = import_labels(f)
print('')
print('#'*50)
print(input_video)
print('Reading Video...')
t_s = time.time()
video_array = video_to_array(input_video, resize=input_size)
t_e = time.time()
print('Loading Video: {:.2f}s'.format(t_e-t_s))
runtime_measures['load_video'].append(t_e-t_s)
if video_array is None:
raise Exception('The video could not be read')
nb_frames = get_num_frames(input_video)
duration = get_duration(input_video)
fps = nb_frames / duration
runtime_measures['video_duration'].append(duration)
print('Duration: {:.1f}s'.format(duration))
print('FPS: {:.1f}'.format(fps))
print('Number of frames: {}'.format(nb_frames))
nb_clips = nb_frames // length
video_array = video_array.transpose(1, 0, 2, 3)
video_array = video_array[:nb_clips*length,:,:,:]
video_array = video_array.reshape((nb_clips, length, 3, 112, 112))
video_array = video_array.transpose(0, 2, 1, 3, 4)
# Extract features
print('Extracting features...')
t_s = time.time()
X = video_array - c3d_mean
Y = model_features.predict(X, batch_size=1, verbose=1)
t_e = time.time()
print('Extracting C3D features: {:.2f}s'.format(t_e-t_s))
runtime_measures['extract_features_c3d'].append(t_e-t_s)
# Predict with the temporal localization network
print('Predicting...')
t_s = time.time()
Y = Y.reshape(nb_clips, 1, 4096)
prediction = model_localization.predict(Y, batch_size=1, verbose=1)
prediction = prediction.reshape(nb_clips, 201)
t_e = time.time()
print('Prediction temporal activities: {:.2f}s'.format(t_e-t_s))
runtime_measures['temporal_localization_network'].append(t_e-t_s)
# Post processing the predited output
print('Post-processing output...')
t_s = time.time()
labels_idx, scores = get_classification(prediction, k=5)
print('Video: {}\n'.format(input_video))
print('Classification:')
for idx, score in zip(labels_idx, scores):
label = labels[idx]
print('{:.4f}\t{}'.format(score, label))
prediction_smoothed = smoothing(prediction, k=smoothing_k)
activities_idx, startings, endings, scores = activity_localization(
prediction_smoothed,
activity_threshold
)
t_e = time.time()
runtime_measures['post-processing'].append(t_e-t_s)
print('Post-processing runtime: {:.2f}s'.format(t_e-t_s))
print('\nDetection:')
print('Score\tInterval\t\tActivity')
for idx, s, e, score in zip(activities_idx, startings, endings, scores):
start = s * float(length) / fps
end = e * float(length) / fps
label = labels[idx]
print('{:.4f}\t{:.1f}s - {:.1f}s\t\t{}'.format(score, start, end, label))
def C3D_conv_features(summary=False):
""" Return the Keras model of the network until the fc6 layer where the
convolutional features can be extracted.
"""
from keras.layers.convolutional import Convolution3D, MaxPooling3D, ZeroPadding3D
from keras.layers.core import Dense, Dropout, Flatten
from keras.models import Sequential
model = Sequential()
# 1st layer group
model.add(Convolution3D(64, 3, 3, 3, activation='relu',
border_mode='same', name='conv1',
subsample=(1, 1, 1),
input_shape=(3, 16, 112, 112),
trainable=False))
model.add(MaxPooling3D(pool_size=(1, 2, 2), strides=(1, 2, 2),
border_mode='valid', name='pool1'))
# 2nd layer group
model.add(Convolution3D(128, 3, 3, 3, activation='relu',
border_mode='same', name='conv2',
subsample=(1, 1, 1),
trainable=False))
model.add(MaxPooling3D(pool_size=(2, 2, 2), strides=(2, 2, 2),
border_mode='valid', name='pool2'))
# 3rd layer group
model.add(Convolution3D(256, 3, 3, 3, activation='relu',
border_mode='same', name='conv3a',
subsample=(1, 1, 1),
trainable=False))
model.add(Convolution3D(256, 3, 3, 3, activation='relu',
border_mode='same', name='conv3b',
subsample=(1, 1, 1),
trainable=False))
model.add(MaxPooling3D(pool_size=(2, 2, 2), strides=(2, 2, 2),
border_mode='valid', name='pool3'))
# 4th layer group
model.add(Convolution3D(512, 3, 3, 3, activation='relu',
border_mode='same', name='conv4a',
subsample=(1, 1, 1),
trainable=False))
model.add(Convolution3D(512, 3, 3, 3, activation='relu',
border_mode='same', name='conv4b',
subsample=(1, 1, 1),
trainable=False))
model.add(MaxPooling3D(pool_size=(2, 2, 2), strides=(2, 2, 2),
border_mode='valid', name='pool4'))
# 5th layer group
model.add(Convolution3D(512, 3, 3, 3, activation='relu',
border_mode='same', name='conv5a',
subsample=(1, 1, 1),
trainable=False))
model.add(Convolution3D(512, 3, 3, 3, activation='relu',
border_mode='same', name='conv5b',
subsample=(1, 1, 1),
trainable=False))
model.add(ZeroPadding3D(padding=(0, 1, 1), name='zeropadding'))
model.add(MaxPooling3D(pool_size=(2, 2, 2), strides=(2, 2, 2),
border_mode='valid', name='pool5'))
model.add(Flatten(name='flatten'))
# FC layers group
model.add(Dense(4096, activation='relu', name='fc6', trainable=False))
model.add(Dropout(.5, name='do1'))
model.add(Dense(4096, activation='relu', name='fc7'))
model.add(Dropout(.5, name='do2'))
model.add(Dense(487, activation='softmax', name='fc8'))
# Load weights
model.load_weights('data/models/c3d-sports1M_weights.h5')
for _ in range(4):
model.pop_layer()
if summary:
print(model.summary())
return model
def temporal_localization_network(summary=False):
input_features = Input(batch_shape=(1, 1, 4096,), name='features')
input_normalized = BatchNormalization(name='normalization')(input_features)
input_dropout = Dropout(p=.5)(input_normalized)
lstm = LSTM(512, return_sequences=True, stateful=True, name='lsmt1')(input_dropout)
output_dropout = Dropout(p=.5)(lstm)
output = TimeDistributed(Dense(201, activation='softmax'), name='fc')(output_dropout)
model = Model(input=input_features, output=output)
model.load_weights('data/models/temporal-location_weights.hdf5')
if summary:
model.summary()
return model
if __name__ == '__main__':
videos_dir = '/imatge/amontes/work/datasets/ActivityNet/v1.3/videos'
N = 20 # Number of random videos
R = 3 # Repetitions
# Read dataset and choose 10 random videos from test dataset
videos_ids = [v for v in os.listdir(videos_dir) if v[-4:] == '.mp4']
start_video = random.choice(videos_ids)
videos_ids = random.sample(videos_ids, N)
# Load C3D model and mean
print('Loading C3D network...')
model_features = C3D_conv_features(True)
model_features.compile(optimizer='sgd', loss='mse')
mean_total = np.load('data/models/c3d-sports1M_mean.npy')
c3d_mean = np.mean(mean_total, axis=(0, 2, 3, 4), keepdims=True)
# Load the temporal localization network
print('Loading temporal localization network...')
model_localization = temporal_localization_network(True)
model_localization.compile(optimizer='rmsprop', loss='categorical_crossentropy')
# First lets start with a random video (The timing of the first video is not significant as
# Keras working with Theano require to compile which increase the runnig time only the first time)
video_path = os.path.join(videos_dir, start_video)
run_runtime_tests(video_path, model_features, c3d_mean, model_localization)
for i in range(N):
video_path = os.path.join(videos_dir, videos_ids[i])
for _ in range(R):
run_runtime_tests(video_path, model_features, c3d_mean, model_localization)
with open('runtime_2.out', 'w') as f:
for k in runtime_measures.keys():
f.write(k+';')
values = runtime_measures[k]
for v in values:
f.write(str(v))
f.write(';')
f.write('\n')
|
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pandas as pd
from pandas.api.types import CategoricalDtype
from pyspark import pandas as ps
from pyspark.pandas.tests.data_type_ops.testing_utils import OpsTestBase
class BinaryOpsTest(OpsTestBase):
@property
def pser(self):
return pd.Series([b"1", b"2", b"3"])
@property
def psser(self):
return ps.from_pandas(self.pser)
@property
def byte_pdf(self):
psers = {
"this": self.pser,
"that": pd.Series([b"2", b"3", b"4"]),
}
return pd.concat(psers, axis=1)
@property
def byte_psdf(self):
return ps.from_pandas(self.byte_pdf)
def test_add(self):
byte_pdf, byte_psdf = self.byte_pdf, self.byte_psdf
pser, psser = byte_pdf["this"], byte_psdf["this"]
other_pser, other_psser = byte_pdf["that"], byte_psdf["that"]
self.assert_eq(psser + b"1", pser + b"1")
self.assert_eq(psser + psser, pser + pser)
self.assert_eq(psser + psser.astype("bytes"), pser + pser.astype("bytes"))
self.assertRaises(TypeError, lambda: psser + "x")
self.assertRaises(TypeError, lambda: psser + 1)
self.assert_eq(pser + pser, psser + psser)
self.assert_eq(pser + other_pser, psser + other_psser)
for psser in self.pssers:
self.assertRaises(TypeError, lambda: self.psser + psser)
def test_sub(self):
self.assertRaises(TypeError, lambda: self.psser - "x")
self.assertRaises(TypeError, lambda: self.psser - 1)
for psser in self.pssers:
self.assertRaises(TypeError, lambda: self.psser - psser)
def test_mul(self):
self.assertRaises(TypeError, lambda: self.psser * "x")
self.assertRaises(TypeError, lambda: self.psser * 1)
for psser in self.pssers:
self.assertRaises(TypeError, lambda: self.psser * psser)
def test_truediv(self):
self.assertRaises(TypeError, lambda: self.psser / "x")
self.assertRaises(TypeError, lambda: self.psser / 1)
for psser in self.pssers:
self.assertRaises(TypeError, lambda: self.psser / psser)
def test_floordiv(self):
self.assertRaises(TypeError, lambda: self.psser // "x")
self.assertRaises(TypeError, lambda: self.psser // 1)
for psser in self.pssers:
self.assertRaises(TypeError, lambda: self.psser // psser)
def test_mod(self):
self.assertRaises(TypeError, lambda: self.psser % "x")
self.assertRaises(TypeError, lambda: self.psser % 1)
for psser in self.pssers:
self.assertRaises(TypeError, lambda: self.psser % psser)
def test_pow(self):
self.assertRaises(TypeError, lambda: self.psser ** "x")
self.assertRaises(TypeError, lambda: self.psser ** 1)
for psser in self.pssers:
self.assertRaises(TypeError, lambda: self.psser ** psser)
def test_radd(self):
self.assert_eq(b"1" + self.psser, b"1" + self.pser)
self.assertRaises(TypeError, lambda: "x" + self.psser)
self.assertRaises(TypeError, lambda: 1 + self.psser)
def test_rsub(self):
self.assertRaises(TypeError, lambda: "x" - self.psser)
self.assertRaises(TypeError, lambda: 1 - self.psser)
def test_rmul(self):
self.assertRaises(TypeError, lambda: "x" * self.psser)
self.assertRaises(TypeError, lambda: 2 * self.psser)
def test_rtruediv(self):
self.assertRaises(TypeError, lambda: "x" / self.psser)
self.assertRaises(TypeError, lambda: 1 / self.psser)
def test_rfloordiv(self):
self.assertRaises(TypeError, lambda: "x" // self.psser)
self.assertRaises(TypeError, lambda: 1 // self.psser)
def test_rmod(self):
self.assertRaises(TypeError, lambda: 1 % self.psser)
def test_rpow(self):
self.assertRaises(TypeError, lambda: "x" ** self.psser)
self.assertRaises(TypeError, lambda: 1 ** self.psser)
def test_and(self):
self.assertRaises(TypeError, lambda: self.psser & True)
self.assertRaises(TypeError, lambda: self.psser & False)
self.assertRaises(TypeError, lambda: self.psser & self.psser)
def test_rand(self):
self.assertRaises(TypeError, lambda: True & self.psser)
self.assertRaises(TypeError, lambda: False & self.psser)
def test_or(self):
self.assertRaises(TypeError, lambda: self.psser | True)
self.assertRaises(TypeError, lambda: self.psser | False)
self.assertRaises(TypeError, lambda: self.psser | self.psser)
def test_ror(self):
self.assertRaises(TypeError, lambda: True | self.psser)
self.assertRaises(TypeError, lambda: False | self.psser)
def test_from_to_pandas(self):
data = [b"1", b"2", b"3"]
pser = pd.Series(data)
psser = ps.Series(data)
self.assert_eq(pser, psser.to_pandas())
self.assert_eq(ps.from_pandas(pser), psser)
def test_isnull(self):
self.assert_eq(self.pser.isnull(), self.psser.isnull())
def test_astype(self):
pser = self.pser
psser = self.psser
self.assert_eq(psser.astype(str), psser.astype(str))
self.assert_eq(pser.astype(bool), psser.astype(bool))
self.assert_eq(pser.astype("category"), psser.astype("category"))
cat_type = CategoricalDtype(categories=[b"2", b"3", b"1"])
self.assert_eq(pser.astype(cat_type), psser.astype(cat_type))
def test_neg(self):
self.assertRaises(TypeError, lambda: -self.psser)
def test_abs(self):
self.assertRaises(TypeError, lambda: abs(self.psser))
def test_invert(self):
self.assertRaises(TypeError, lambda: ~self.psser)
def test_eq(self):
byte_pdf, byte_psdf = self.byte_pdf, self.byte_psdf
self.assert_eq(byte_pdf["this"] == byte_pdf["that"], byte_psdf["this"] == byte_psdf["that"])
self.assert_eq(byte_pdf["this"] == byte_pdf["this"], byte_psdf["this"] == byte_psdf["this"])
def test_ne(self):
byte_pdf, byte_psdf = self.byte_pdf, self.byte_psdf
self.assert_eq(byte_pdf["this"] != byte_pdf["that"], byte_psdf["this"] != byte_psdf["that"])
self.assert_eq(byte_pdf["this"] != byte_pdf["this"], byte_psdf["this"] != byte_psdf["this"])
def test_lt(self):
byte_pdf, byte_psdf = self.byte_pdf, self.byte_psdf
self.assert_eq(byte_pdf["this"] < byte_pdf["that"], byte_psdf["this"] < byte_psdf["that"])
self.assert_eq(byte_pdf["this"] < byte_pdf["this"], byte_psdf["this"] < byte_psdf["this"])
def test_le(self):
byte_pdf, byte_psdf = self.byte_pdf, self.byte_psdf
self.assert_eq(byte_pdf["this"] <= byte_pdf["that"], byte_psdf["this"] <= byte_psdf["that"])
self.assert_eq(byte_pdf["this"] <= byte_pdf["this"], byte_psdf["this"] <= byte_psdf["this"])
def test_gt(self):
byte_pdf, byte_psdf = self.byte_pdf, self.byte_psdf
self.assert_eq(byte_pdf["this"] > byte_pdf["that"], byte_psdf["this"] > byte_psdf["that"])
self.assert_eq(byte_pdf["this"] > byte_pdf["this"], byte_psdf["this"] > byte_psdf["this"])
def test_ge(self):
byte_pdf, byte_psdf = self.byte_pdf, self.byte_psdf
self.assert_eq(byte_pdf["this"] >= byte_pdf["that"], byte_psdf["this"] >= byte_psdf["that"])
self.assert_eq(byte_pdf["this"] >= byte_pdf["this"], byte_psdf["this"] >= byte_psdf["this"])
if __name__ == "__main__":
import unittest
from pyspark.pandas.tests.data_type_ops.test_binary_ops import * # noqa: F401
try:
import xmlrunner # type: ignore[import]
testRunner = xmlrunner.XMLTestRunner(output="target/test-reports", verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
|
|
# -*- coding: utf-8 -*-
"""Inception V3 model for Keras.
Note that the input image format for this model is different than for
the VGG16 and ResNet models (299x299 instead of 224x224),
and that the input preprocessing function is also different (same as Xception).
# Reference
- [Rethinking the Inception Architecture for Computer Vision](http://arxiv.org/abs/1512.00567)
"""
from __future__ import print_function
from __future__ import absolute_import
import warnings
import numpy as np
from keras.models import Model
from keras import layers
from keras.layers import Activation
from keras.layers import Dense
from keras.layers import Input
from keras.layers import BatchNormalization
from keras.layers import Conv2D
from keras.layers import MaxPooling2D
from keras.layers import AveragePooling2D
from keras.layers import GlobalAveragePooling2D
from keras.layers import GlobalMaxPooling2D
from keras.engine.topology import get_source_inputs
from keras.utils.layer_utils import convert_all_kernels_in_model
from keras.utils.data_utils import get_file
from keras import backend as K
from keras.applications.imagenet_utils import decode_predictions
from keras.applications.imagenet_utils import _obtain_input_shape
from keras.preprocessing import image
WEIGHTS_PATH = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.5/inception_v3_weights_tf_dim_ordering_tf_kernels.h5'
WEIGHTS_PATH_NO_TOP = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.5/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5'
def conv2d_bn(x,
filters,
num_row,
num_col,
padding='same',
strides=(1, 1),
name=None):
"""Utility function to apply conv + BN.
Arguments:
x: input tensor.
filters: filters in `Conv2D`.
num_row: height of the convolution kernel.
num_col: width of the convolution kernel.
padding: padding mode in `Conv2D`.
strides: strides in `Conv2D`.
name: name of the ops; will become `name + '_conv'`
for the convolution and `name + '_bn'` for the
batch norm layer.
Returns:
Output tensor after applying `Conv2D` and `BatchNormalization`.
"""
if name is not None:
bn_name = name + '_bn'
conv_name = name + '_conv'
else:
bn_name = None
conv_name = None
if K.image_data_format() == 'channels_first':
bn_axis = 1
else:
bn_axis = 3
x = Conv2D(
filters, (num_row, num_col),
strides=strides,
padding=padding,
use_bias=False,
name=conv_name)(x)
x = BatchNormalization(axis=bn_axis, scale=False, name=bn_name)(x)
x = Activation('relu', name=name)(x)
return x
def InceptionV3(include_top=True,
weights='imagenet',
input_tensor=None,
input_shape=None,
pooling=None,
classes=1000):
"""Instantiates the Inception v3 architecture.
Optionally loads weights pre-trained
on ImageNet. Note that when using TensorFlow,
for best performance you should set
`image_data_format="channels_last"` in your Keras config
at ~/.keras/keras.json.
The model and the weights are compatible with both
TensorFlow and Theano. The data format
convention used by the model is the one
specified in your Keras config file.
Note that the default input image size for this model is 299x299.
Arguments:
include_top: whether to include the fully-connected
layer at the top of the network.
weights: one of `None` (random initialization)
or "imagenet" (pre-training on ImageNet).
input_tensor: optional Keras tensor (i.e. output of `layers.Input()`)
to use as image input for the model.
input_shape: optional shape tuple, only to be specified
if `include_top` is False (otherwise the input shape
has to be `(299, 299, 3)` (with `channels_last` data format)
or `(3, 299, 299)` (with `channels_first` data format).
It should have exactly 3 inputs channels,
and width and height should be no smaller than 139.
E.g. `(150, 150, 3)` would be one valid value.
pooling: Optional pooling mode for feature extraction
when `include_top` is `False`.
- `None` means that the output of the model will be
the 4D tensor output of the
last convolutional layer.
- `avg` means that global average pooling
will be applied to the output of the
last convolutional layer, and thus
the output of the model will be a 2D tensor.
- `max` means that global max pooling will
be applied.
classes: optional number of classes to classify images
into, only to be specified if `include_top` is True, and
if no `weights` argument is specified.
Returns:
A Keras model instance.
Raises:
ValueError: in case of invalid argument for `weights`,
or invalid input shape.
"""
if weights not in {'imagenet', None}:
raise ValueError('The `weights` argument should be either '
'`None` (random initialization) or `imagenet` '
'(pre-training on ImageNet).')
if weights == 'imagenet' and include_top and classes != 1000:
raise ValueError('If using `weights` as imagenet with `include_top`'
' as true, `classes` should be 1000')
# Determine proper input shape
input_shape = _obtain_input_shape(
input_shape,
default_size=299,
min_size=139,
data_format=K.image_data_format(),
include_top=include_top)
if input_tensor is None:
img_input = Input(shape=input_shape)
else:
img_input = Input(tensor=input_tensor, shape=input_shape)
if K.image_data_format() == 'channels_first':
channel_axis = 1
else:
channel_axis = 3
x = conv2d_bn(img_input, 32, 3, 3, strides=(2, 2), padding='valid')
x = conv2d_bn(x, 32, 3, 3, padding='valid')
x = conv2d_bn(x, 64, 3, 3)
x = MaxPooling2D((3, 3), strides=(2, 2))(x)
x = conv2d_bn(x, 80, 1, 1, padding='valid')
x = conv2d_bn(x, 192, 3, 3, padding='valid')
x = MaxPooling2D((3, 3), strides=(2, 2))(x)
# mixed 0, 1, 2: 35 x 35 x 256
branch1x1 = conv2d_bn(x, 64, 1, 1)
branch5x5 = conv2d_bn(x, 48, 1, 1)
branch5x5 = conv2d_bn(branch5x5, 64, 5, 5)
branch3x3dbl = conv2d_bn(x, 64, 1, 1)
branch3x3dbl = conv2d_bn(branch3x3dbl, 96, 3, 3)
branch3x3dbl = conv2d_bn(branch3x3dbl, 96, 3, 3)
branch_pool = AveragePooling2D((3, 3), strides=(1, 1), padding='same')(x)
branch_pool = conv2d_bn(branch_pool, 32, 1, 1)
x = layers.concatenate(
[branch1x1, branch5x5, branch3x3dbl, branch_pool],
axis=channel_axis,
name='mixed0')
# mixed 1: 35 x 35 x 256
branch1x1 = conv2d_bn(x, 64, 1, 1)
branch5x5 = conv2d_bn(x, 48, 1, 1)
branch5x5 = conv2d_bn(branch5x5, 64, 5, 5)
branch3x3dbl = conv2d_bn(x, 64, 1, 1)
branch3x3dbl = conv2d_bn(branch3x3dbl, 96, 3, 3)
branch3x3dbl = conv2d_bn(branch3x3dbl, 96, 3, 3)
branch_pool = AveragePooling2D((3, 3), strides=(1, 1), padding='same')(x)
branch_pool = conv2d_bn(branch_pool, 64, 1, 1)
x = layers.concatenate(
[branch1x1, branch5x5, branch3x3dbl, branch_pool],
axis=channel_axis,
name='mixed1')
# mixed 2: 35 x 35 x 256
branch1x1 = conv2d_bn(x, 64, 1, 1)
branch5x5 = conv2d_bn(x, 48, 1, 1)
branch5x5 = conv2d_bn(branch5x5, 64, 5, 5)
branch3x3dbl = conv2d_bn(x, 64, 1, 1)
branch3x3dbl = conv2d_bn(branch3x3dbl, 96, 3, 3)
branch3x3dbl = conv2d_bn(branch3x3dbl, 96, 3, 3)
branch_pool = AveragePooling2D((3, 3), strides=(1, 1), padding='same')(x)
branch_pool = conv2d_bn(branch_pool, 64, 1, 1)
x = layers.concatenate(
[branch1x1, branch5x5, branch3x3dbl, branch_pool],
axis=channel_axis,
name='mixed2')
# mixed 3: 17 x 17 x 768
branch3x3 = conv2d_bn(x, 384, 3, 3, strides=(2, 2), padding='valid')
branch3x3dbl = conv2d_bn(x, 64, 1, 1)
branch3x3dbl = conv2d_bn(branch3x3dbl, 96, 3, 3)
branch3x3dbl = conv2d_bn(
branch3x3dbl, 96, 3, 3, strides=(2, 2), padding='valid')
branch_pool = MaxPooling2D((3, 3), strides=(2, 2))(x)
x = layers.concatenate(
[branch3x3, branch3x3dbl, branch_pool], axis=channel_axis, name='mixed3')
# mixed 4: 17 x 17 x 768
branch1x1 = conv2d_bn(x, 192, 1, 1)
branch7x7 = conv2d_bn(x, 128, 1, 1)
branch7x7 = conv2d_bn(branch7x7, 128, 1, 7)
branch7x7 = conv2d_bn(branch7x7, 192, 7, 1)
branch7x7dbl = conv2d_bn(x, 128, 1, 1)
branch7x7dbl = conv2d_bn(branch7x7dbl, 128, 7, 1)
branch7x7dbl = conv2d_bn(branch7x7dbl, 128, 1, 7)
branch7x7dbl = conv2d_bn(branch7x7dbl, 128, 7, 1)
branch7x7dbl = conv2d_bn(branch7x7dbl, 192, 1, 7)
branch_pool = AveragePooling2D((3, 3), strides=(1, 1), padding='same')(x)
branch_pool = conv2d_bn(branch_pool, 192, 1, 1)
x = layers.concatenate(
[branch1x1, branch7x7, branch7x7dbl, branch_pool],
axis=channel_axis,
name='mixed4')
# mixed 5, 6: 17 x 17 x 768
for i in range(2):
branch1x1 = conv2d_bn(x, 192, 1, 1)
branch7x7 = conv2d_bn(x, 160, 1, 1)
branch7x7 = conv2d_bn(branch7x7, 160, 1, 7)
branch7x7 = conv2d_bn(branch7x7, 192, 7, 1)
branch7x7dbl = conv2d_bn(x, 160, 1, 1)
branch7x7dbl = conv2d_bn(branch7x7dbl, 160, 7, 1)
branch7x7dbl = conv2d_bn(branch7x7dbl, 160, 1, 7)
branch7x7dbl = conv2d_bn(branch7x7dbl, 160, 7, 1)
branch7x7dbl = conv2d_bn(branch7x7dbl, 192, 1, 7)
branch_pool = AveragePooling2D(
(3, 3), strides=(1, 1), padding='same')(x)
branch_pool = conv2d_bn(branch_pool, 192, 1, 1)
x = layers.concatenate(
[branch1x1, branch7x7, branch7x7dbl, branch_pool],
axis=channel_axis,
name='mixed' + str(5 + i))
# mixed 7: 17 x 17 x 768
branch1x1 = conv2d_bn(x, 192, 1, 1)
branch7x7 = conv2d_bn(x, 192, 1, 1)
branch7x7 = conv2d_bn(branch7x7, 192, 1, 7)
branch7x7 = conv2d_bn(branch7x7, 192, 7, 1)
branch7x7dbl = conv2d_bn(x, 192, 1, 1)
branch7x7dbl = conv2d_bn(branch7x7dbl, 192, 7, 1)
branch7x7dbl = conv2d_bn(branch7x7dbl, 192, 1, 7)
branch7x7dbl = conv2d_bn(branch7x7dbl, 192, 7, 1)
branch7x7dbl = conv2d_bn(branch7x7dbl, 192, 1, 7)
branch_pool = AveragePooling2D((3, 3), strides=(1, 1), padding='same')(x)
branch_pool = conv2d_bn(branch_pool, 192, 1, 1)
x = layers.concatenate(
[branch1x1, branch7x7, branch7x7dbl, branch_pool],
axis=channel_axis,
name='mixed7')
# mixed 8: 8 x 8 x 1280
branch3x3 = conv2d_bn(x, 192, 1, 1)
branch3x3 = conv2d_bn(branch3x3, 320, 3, 3,
strides=(2, 2), padding='valid')
branch7x7x3 = conv2d_bn(x, 192, 1, 1)
branch7x7x3 = conv2d_bn(branch7x7x3, 192, 1, 7)
branch7x7x3 = conv2d_bn(branch7x7x3, 192, 7, 1)
branch7x7x3 = conv2d_bn(
branch7x7x3, 192, 3, 3, strides=(2, 2), padding='valid')
branch_pool = MaxPooling2D((3, 3), strides=(2, 2))(x)
x = layers.concatenate(
[branch3x3, branch7x7x3, branch_pool], axis=channel_axis, name='mixed8')
# mixed 9: 8 x 8 x 2048
for i in range(2):
branch1x1 = conv2d_bn(x, 320, 1, 1)
branch3x3 = conv2d_bn(x, 384, 1, 1)
branch3x3_1 = conv2d_bn(branch3x3, 384, 1, 3)
branch3x3_2 = conv2d_bn(branch3x3, 384, 3, 1)
branch3x3 = layers.concatenate(
[branch3x3_1, branch3x3_2], axis=channel_axis, name='mixed9_' + str(i))
branch3x3dbl = conv2d_bn(x, 448, 1, 1)
branch3x3dbl = conv2d_bn(branch3x3dbl, 384, 3, 3)
branch3x3dbl_1 = conv2d_bn(branch3x3dbl, 384, 1, 3)
branch3x3dbl_2 = conv2d_bn(branch3x3dbl, 384, 3, 1)
branch3x3dbl = layers.concatenate(
[branch3x3dbl_1, branch3x3dbl_2], axis=channel_axis)
branch_pool = AveragePooling2D(
(3, 3), strides=(1, 1), padding='same')(x)
branch_pool = conv2d_bn(branch_pool, 192, 1, 1)
x = layers.concatenate(
[branch1x1, branch3x3, branch3x3dbl, branch_pool],
axis=channel_axis,
name='mixed' + str(9 + i))
if include_top:
# Classification block
x = GlobalAveragePooling2D(name='avg_pool')(x)
x = Dense(classes, activation='softmax', name='predictions')(x)
else:
if pooling == 'avg':
x = GlobalAveragePooling2D()(x)
elif pooling == 'max':
x = GlobalMaxPooling2D()(x)
# Ensure that the model takes into account
# any potential predecessors of `input_tensor`.
if input_tensor is not None:
inputs = get_source_inputs(input_tensor)
else:
inputs = img_input
# Create model.
model = Model(inputs, x, name='inception_v3')
# load weights
if weights == 'imagenet':
if K.image_data_format() == 'channels_first':
if K.backend() == 'tensorflow':
warnings.warn('You are using the TensorFlow backend, yet you '
'are using the Theano '
'image data format convention '
'(`image_data_format="channels_first"`). '
'For best performance, set '
'`image_data_format="channels_last"` in '
'your Keras config '
'at ~/.keras/keras.json.')
if include_top:
weights_path = get_file(
'inception_v3_weights_tf_dim_ordering_tf_kernels.h5',
WEIGHTS_PATH,
cache_subdir='models',
md5_hash='9a0d58056eeedaa3f26cb7ebd46da564')
else:
# Replace this with a local copy for reproducibility
# weights_path = get_file(
# 'inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5',
# WEIGHTS_PATH_NO_TOP,
# cache_subdir='models',
# md5_hash='bcbd6486424b2319ff4ef7d526e38f63')
weights_path = 'inception/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5'
model.load_weights(weights_path)
if K.backend() == 'theano':
convert_all_kernels_in_model(model)
return model
def preprocess_input(x):
x /= 255.
x -= 0.5
x *= 2.
return x
if __name__ == '__main__':
model = InceptionV3(include_top=True, weights='imagenet')
img_path = 'elephant.jpg'
img = image.load_img(img_path, target_size=(299, 299))
x = image.img_to_array(img)
x = np.expand_dims(x, axis=0)
x = preprocess_input(x)
preds = model.predict(x)
print('Predicted:', decode_predictions(preds))
|
|
# coding=utf-8
import vdebug.ui.interface
import vdebug.util
import vim
import vdebug.log
import vdebug.opts
class Ui(vdebug.ui.interface.Ui):
"""Ui layer which manages the Vim windows.
"""
def __init__(self,breakpoints):
vdebug.ui.interface.Ui.__init__(self)
self.is_open = False
self.breakpoint_store = breakpoints
self.emptybuffer = None
self.breakpointwin = BreakpointWindow(self,'rightbelow 7new')
self.current_tab = "1"
self.tabnr = None
def is_modified(self):
modified = int(vim.eval('&mod'))
if modified:
return True
else:
return False
def open(self):
if self.is_open:
return
self.is_open = True
try:
existing_buffer = True
cur_buf_name = vim.eval("bufname('%')")
if cur_buf_name is None:
existing_buffer = False
cur_buf_name = ''
self.current_tab = vim.eval("tabpagenr()")
vim.command('silent tabnew')
self.empty_buf_num = vim.eval('bufnr("%")')
if existing_buffer:
vim.command('call Vdebug_edit("%s")' % cur_buf_name)
self.tabnr = vim.eval("tabpagenr()")
srcwin_name = self.__get_srcwin_name()
self.watchwin = WatchWindow(self,'vertical belowright new')
self.watchwin.create()
self.stackwin = StackWindow(self,'belowright new')
self.stackwin.create()
self.statuswin = StatusWindow(self,'belowright new')
self.statuswin.create()
self.statuswin.set_status("loading")
self.watchwin.set_height(20)
self.statuswin.set_height(5)
logwin = LogWindow(self,'rightbelow 6new')
vdebug.log.Log.set_logger(\
vdebug.log.WindowLogger(\
vdebug.opts.Options.get('debug_window_level'),\
logwin))
winnr = self.__get_srcwinno_by_name(srcwin_name)
self.sourcewin = SourceWindow(self,winnr)
self.sourcewin.focus()
except Exception as e:
self.is_open = False
raise e
def set_source_position(self,file,lineno):
if str(file).startswith('dbgp:'):
return
self.sourcewin.set_file(file)
self.sourcewin.set_line(lineno)
self.sourcewin.place_pointer(lineno)
def mark_as_stopped(self):
if self.is_open:
if self.sourcewin:
self.sourcewin.remove_pointer()
if self.statuswin:
self.statuswin.set_status("stopped")
self.remove_conn_details()
def set_conn_details(self,addr,port):
self.statuswin.insert("Connected to %s:%s" %(addr,port),2,True)
def remove_conn_details(self):
self.statuswin.insert("Not connected",2,True)
def set_listener_details(self,addr,port,idekey):
details = "Listening on %s:%s" %(addr,port)
if len(idekey):
details += " (IDE key: %s)" % idekey
self.statuswin.insert(details,1,True)
def get_current_file(self):
return vdebug.util.LocalFilePath(vim.current.buffer.name)
def get_current_row(self):
return vim.current.window.cursor[0]
def get_current_line(self):
return self.get_line(self.get_current_row())
def get_line(self,row):
return vim.eval("getline(" + str(row) + ")")
def register_breakpoint(self,breakpoint):
if breakpoint.type == 'line':
self.place_breakpoint(breakpoint.id,\
breakpoint.file,breakpoint.line)
if self.breakpointwin.is_open:
self.breakpointwin.add_breakpoint(breakpoint)
def place_breakpoint(self,sign_id,file,line):
vim.command('sign place '+str(sign_id)+\
' name=breakpt line='+str(line)+\
' file='+file.as_local())
def remove_breakpoint(self,breakpoint):
id = breakpoint.id
vim.command('sign unplace %i' % id)
if self.breakpointwin.is_open:
self.breakpointwin.remove_breakpoint(id)
def get_breakpoint_sign_positions(self):
sign_lines = self.command('sign place').split("\n")
positions = {}
for line in sign_lines:
if "name=breakpt" in line:
attributes = line.strip().split()
lineinfo = attributes[0].split('=')
idinfo = attributes[1].split('=')
positions[idinfo[1]] = lineinfo[1]
return positions
# Execute a vim command and return the output.
def command(self,cmd):
vim.command('redir => _tmp')
vim.command('silent %s' % cmd)
vim.command('redir END')
return vim.eval('_tmp')
def say(self,string):
""" Vim picks up Python prints, so just print """
print str(string)
vdebug.log.Log(string,vdebug.log.Logger.INFO)
def error(self,string):
vim.command('echohl Error | echo "'+\
str(string).replace('"','\\"')+\
'" | echohl None')
vdebug.log.Log(string,vdebug.log.Logger.ERROR)
def close(self):
if not self.is_open:
return
self.is_open = False
vdebug.log.Log.remove_logger('WindowLogger')
if self.tabnr:
vim.command('silent! '+self.tabnr+'tabc!')
if self.current_tab:
vim.command('tabn '+self.current_tab)
if self.empty_buf_num:
vim.command('bw' + self.empty_buf_num)
if self.watchwin:
self.watchwin.destroy()
if self.stackwin:
self.stackwin.destroy()
if self.statuswin:
self.statuswin.destroy()
self.watchwin = None
self.stackwin = None
self.statuswin = None
def __get_srcwin_name(self):
return vim.current.buffer.name
def __get_srcwinno_by_name(self,name):
i = 1
vdebug.log.Log("Searching for win by name %s" % name,\
vdebug.log.Logger.INFO)
for w in vim.windows:
vdebug.log.Log("Win %d, name %s" %(i,w.buffer.name),\
vdebug.log.Logger.INFO)
if w.buffer.name == name:
break
else:
i += 1
vdebug.log.Log("Returning window number %d" % i,\
vdebug.log.Logger.INFO)
return i
def __get_buf_list(self):
return vim.eval("range(1, bufnr('$'))")
class SourceWindow(vdebug.ui.interface.Window):
file = None
pointer_sign_id = '6145'
breakpoint_sign_id = '6146'
def __init__(self,ui,winno):
self.winno = str(winno)
def focus(self):
vim.command(self.winno+"wincmd w")
def command(self,cmd,silent = True):
self.focus()
prepend = "silent " if silent else ""
command_str = prepend + self.winno + "wincmd " + cmd
vim.command(command_str)
def set_file(self,file):
if file == self.file:
return
self.file = file
vdebug.log.Log("Setting source file: "+file,vdebug.log.Logger.INFO)
self.focus()
vim.command('call Vdebug_edit("%s")' % str(file).replace("\\", "\\\\"))
def set_line(self,lineno):
self.focus()
vim.command("normal %sgg" % str(lineno))
def get_file(self):
self.focus()
self.file = vdebug.util.LocalFilePath(vim.eval("expand('%:p')"))
return self.file
def clear_signs(self):
vim.command('sign unplace *')
def place_pointer(self,line):
if str(self.file).startswith('dbgp:'):
return
vdebug.log.Log("Placing pointer sign on line "+str(line),\
vdebug.log.Logger.INFO)
self.remove_pointer()
vim.command('sign place '+self.pointer_sign_id+\
' name=current line='+str(line)+\
' file='+self.file)
def remove_pointer(self):
vim.command('sign unplace %s' % self.pointer_sign_id)
class Window(vdebug.ui.interface.Window):
name = "WINDOW"
open_cmd = "new"
creation_count = 0
def __init__(self,ui,open_cmd):
self.buffer = None
self.ui = ui
self.open_cmd = open_cmd
self.is_open = False
def getwinnr(self):
return int(vim.eval("bufwinnr('"+self.name+"')"))
def set_height(self,height):
height = int(height)
minheight = int(vim.eval("&winminheight"))
if height < minheight:
height = minheight
if height <= 0:
height = 1
self.command('set winheight=%i' % height)
def write(self, msg, return_focus = True, after = "normal G"):
if not self.is_open:
self.create()
if return_focus:
prev_win = vim.eval('winnr()')
if self.buffer_empty():
self.buffer[:] = str(msg).split('\n')
else:
self.buffer.append(str(msg).split('\n'))
self.command(after)
if return_focus:
vim.command('%swincmd W' % prev_win)
def insert(self, msg, lineno = None, overwrite = False, allowEmpty = False):
if not self.is_open:
self.create()
""" insert into current position in buffer"""
if len(msg) == 0 and allowEmpty == False:
return
if self.buffer_empty():
self.buffer[:] = str(msg).split('\n')
else:
if lineno == None:
(lineno, rol) = vim.current.window.cursor
remaining_buffer = str(msg).split('\n')
if overwrite:
lfrom = lineno + 1
else:
lfrom = lineno
remaining_buffer.extend(self.buffer[lfrom:])
del self.buffer[lineno:]
if self.buffer_empty():
self.buffer[:] = remaining_buffer
else:
for line in remaining_buffer:
self.buffer.append(line)
self.command(str(lfrom))
def delete(self,start_line,end_line):
try:
self.buffer[end_line]
remaining_buffer = self.buffer[end_line:]
del self.buffer[start_line:]
self.buffer.append(remaining_buffer)
except IndexError:
del self.buffer[start_line:]
def buffer_empty(self):
if len(self.buffer) == 1 \
and len(self.buffer[0]) == 0:
return True
else:
return False
def create(self):
""" create window """
vim.command('silent ' + self.open_cmd + ' ' + self.name)
vim.command("setlocal buftype=nofile modifiable "+ \
"winfixheight winfixwidth")
self.buffer = vim.current.buffer
self.is_open = True
self.creation_count += 1
self.on_create()
def destroy(self):
""" destroy window """
if self.buffer == None or len(dir(self.buffer)) == 0:
return
self.is_open = False
if int(vim.eval('buffer_exists("'+self.name+'")')) == 1:
vim.command('bwipeout ' + self.name)
def clean(self):
""" clean all datas in buffer """
self.buffer[:] = []
def command(self, cmd):
""" go to my window & execute command """
winnr = self.getwinnr()
if winnr != int(vim.eval("winnr()")):
vim.command(str(winnr) + 'wincmd w')
vim.command(cmd)
def accept_renderer(self,renderer):
self.write(renderer.render())
class BreakpointWindow(Window):
name = "DebuggerBreakpoints"
is_visible = False
header = """===========================================================
ID | TYPE | DATA
==========================================================="""
def on_create(self):
self.clean()
self.write(self.header)
self.command('setlocal syntax=debugger_breakpoint')
for bp in self.ui.breakpoint_store.get_sorted_list():
self.add_breakpoint(bp)
if self.creation_count == 1:
cmd = 'silent! au BufWinLeave %s :silent! bdelete %s' %(self.name,self.name)
vim.command('%s | python debugger.runner.ui.breakpointwin.is_open = False' % cmd)
def add_breakpoint(self,breakpoint):
bp_str = " %-7i | %-11s | " %(breakpoint.id,breakpoint.type)
if breakpoint.type == 'line':
bp_str += "%s:%s" %(breakpoint.file,str(breakpoint.line))
elif breakpoint.type == 'conditional':
bp_str += "%s:%s when (%s)" \
%(breakpoint.file,str(breakpoint.line),breakpoint.condition)
elif breakpoint.type == 'exception':
bp_str += "Exception: %s" % breakpoint.exception
elif breakpoint.type == 'call' or \
breakpoint.type == 'return':
bp_str += "Function: %s" % breakpoint.function
self.write(bp_str)
def remove_breakpoint(self,breakpoint_id):
i = 0
for l in self.buffer:
bp_str = " %i " % breakpoint_id
bp_id_len = len(bp_str)
if l[:bp_id_len] == bp_str:
del self.buffer[i]
i += 1
class LogWindow(Window):
name = "DebuggerLog"
def on_create(self):
self.command('setlocal syntax=debugger_log')
if self.creation_count == 1:
vim.command('silent! au BufWinLeave %s :silent! bdelete %s' %(self.name,self.name))
def write(self, msg, return_focus = True):
Window.write(self, msg,return_focus=True)
class StackWindow(Window):
name = "DebuggerStack"
depth = 0
def on_create(self):
self.command('inoremap <buffer> <cr> <esc>'+\
':python debugger.handle_return_keypress()<cr>')
self.command('nnoremap <buffer> <cr> '+\
':python debugger.handle_return_keypress()<cr>')
self.command('nnoremap <buffer> <2-LeftMouse> '+\
':python debugger.handle_double_click()<cr>')
self.command('setlocal syntax=debugger_stack')
if self.creation_count == 1:
cmd = 'silent! au BufWinLeave %s :silent! bdelete %s' %(self.name,self.name)
vim.command('%s | python debugger.runner.ui.stackwin.is_open = False' % cmd)
def write(self, msg, return_focus = True):
Window.write(self, msg, after="normal gg")
def clean(self):
""" clean all datas in buffer """
Window.clean(self)
self.depth = 0
class WatchWindow(Window):
name = "DebuggerWatch"
def on_create(self):
self.command('inoremap <buffer> <cr> <esc>'+\
':python debugger.handle_return_keypress()<cr>')
self.command('nnoremap <buffer> <cr> '+\
':python debugger.handle_return_keypress()<cr>')
self.command('nnoremap <buffer> <2-LeftMouse> '+\
':python debugger.handle_double_click()<cr>')
self.command('setlocal syntax=debugger_watch')
if self.creation_count == 1:
cmd = 'silent! au BufWinLeave %s :silent! bdelete %s' %(self.name,self.name)
vim.command('%s | python debugger.runner.ui.watchwin.is_open = False' % cmd)
def write(self, msg, return_focus = True):
Window.write(self, msg, after="normal gg")
class StatusWindow(Window):
name = "DebuggerStatus"
status = None
def on_create(self):
keys = vdebug.util.Keymapper()
output = "Status: starting\nListening on port\nNot connected\n\n"
output += "Press %s to start debugging, " %(keys.run_key())
output += "%s to stop/close. " %(keys.close_key())
output += "Type :help Vdebug for more information."
self.write(output)
self.command('setlocal syntax=debugger_status')
if self.creation_count == 1:
cmd = 'au BufWinLeave %s :silent! bdelete %s' %(self.name,self.name)
vim.command('%s | python debugger.runner.ui.statuswin.is_open = False' % cmd)
def set_status(self,status):
self.insert("Status: "+str(status),0,True)
self.status = status
def get_status(self):
return self.status
class ResponseRenderer:
def __init__(self,response):
self.response = response
def render(self):
pass
class StackGetResponseRenderer(ResponseRenderer):
def render(self):
stack = self.response.get_stack()
string = ""
for s in stack:
where = s.get('where') if s.get('where') else 'main'
file = vdebug.util.FilePath(s.get('filename'))
line = "[%(num)s] %(where)s @ %(file)s:%(line)s" \
%{'num':s.get('level'),'where':where,\
'file':str(file.as_local()),'line':s.get('lineno')}
string += line + "\n"
return string
class ContextGetResponseRenderer(ResponseRenderer):
def __init__(self,response,title = None,contexts = {},current_context = 0):
ResponseRenderer.__init__(self,response)
self.title = title
self.contexts = contexts
self.current_context = current_context
def render(self,indent = 0):
res = self.__create_tabs()
if self.title:
res += "- %s\n\n" % self.title
properties = self.response.get_context()
num_props = len(properties)
vdebug.log.Log("Writing %i properties to the context window" % num_props,\
vdebug.log.Logger.INFO )
for idx, prop in enumerate(properties):
final = False
try:
next_prop = properties[idx+1]
except IndexError:
final = True
next_prop = None
res += self.__render_property(prop,next_prop,final,indent)
vdebug.log.Log("Writing to context window:\n"+res,vdebug.log.Logger.DEBUG)
return res
def __create_tabs(self):
res = []
if self.contexts:
for id,name in self.contexts.iteritems():
if self.current_context == id:
name = "*"+name
res.append("[ %s ]" % name)
if res:
return " ".join(res) + "\n\n"
else:
return ""
def __render_property(self,p,next_p,last = False,indent = 0):
line = "%(indent)s %(marker)s %(name)s = (%(type)s)%(value)s" \
%{'indent':"".rjust((p.depth * 2)+indent),\
'marker':self.__get_marker(p),'name':p.display_name.encode('latin1'),\
'type':p.type_and_size(),'value': " " + p.value}
line = line.rstrip() + "\n"
if vdebug.opts.Options.get('watch_window_style') == 'expanded':
depth = p.depth
if next_p and not last:
next_depth = next_p.depth
if depth == next_depth:
next_sep = "|"
num_spaces = depth * 2
elif depth > next_depth:
next_sep = "/"
num_spaces = (depth * 2) - 1
else:
next_sep = "\\"
num_spaces = (depth * 2) + 1
line += "".rjust(num_spaces+indent) + " " + next_sep + "\n"
elif depth > 0:
line += "".rjust((depth * 2) - 1 + indent) + " /" + "\n"
return line
def __get_marker(self,property):
char = vdebug.opts.Options.get('marker_default')
if property.numchildren > 0:
if property.child_count() == 0:
char = vdebug.opts.Options.get('marker_closed_tree')
else:
char = vdebug.opts.Options.get('marker_open_tree')
return char
|
|
#!/usr/bin/env python
# Copyright 2017 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
"""Bootstrap script to clone and forward to the recipe engine tool.
*******************
** DO NOT MODIFY **
*******************
This is a copy of https://github.com/luci/recipes-py/blob/master/doc/recipes.py.
To fix bugs, fix in the github repo then run the autoroller.
"""
import argparse
import json
import logging
import os
import random
import subprocess
import sys
import time
import urlparse
from collections import namedtuple
from cStringIO import StringIO
# The dependency entry for the recipe_engine in the client repo's recipes.cfg
#
# url (str) - the url to the engine repo we want to use.
# revision (str) - the git revision for the engine to get.
# path_override (str) - the subdirectory in the engine repo we should use to
# find it's recipes.py entrypoint. This is here for completeness, but will
# essentially always be empty. It would be used if the recipes-py repo was
# merged as a subdirectory of some other repo and you depended on that
# subdirectory.
# branch (str) - the branch to fetch for the engine as an absolute ref (e.g.
# refs/heads/master)
# repo_type ("GIT"|"GITILES") - An ignored enum which will be removed soon.
EngineDep = namedtuple('EngineDep',
'url revision path_override branch repo_type')
class MalformedRecipesCfg(Exception):
def __init__(self, msg, path):
super(MalformedRecipesCfg, self).__init__('malformed recipes.cfg: %s: %r'
% (msg, path))
def parse(repo_root, recipes_cfg_path):
"""Parse is a lightweight a recipes.cfg file parser.
Args:
repo_root (str) - native path to the root of the repo we're trying to run
recipes for.
recipes_cfg_path (str) - native path to the recipes.cfg file to process.
Returns (as tuple):
engine_dep (EngineDep): The recipe_engine dependency.
recipes_path (str) - native path to where the recipes live inside of the
current repo (i.e. the folder containing `recipes/` and/or
`recipe_modules`)
"""
with open(recipes_cfg_path, 'rU') as fh:
pb = json.load(fh)
try:
if pb['api_version'] != 2:
raise MalformedRecipesCfg('unknown version %d' % pb['api_version'],
recipes_cfg_path)
engine = pb['deps']['recipe_engine']
if 'url' not in engine:
raise MalformedRecipesCfg(
'Required field "url" in dependency "recipe_engine" not found',
recipes_cfg_path)
engine.setdefault('revision', '')
engine.setdefault('path_override', '')
engine.setdefault('branch', 'refs/heads/master')
recipes_path = pb.get('recipes_path', '')
# TODO(iannucci): only support absolute refs
if not engine['branch'].startswith('refs/'):
engine['branch'] = 'refs/heads/' + engine['branch']
engine.setdefault('repo_type', 'GIT')
if engine['repo_type'] not in ('GIT', 'GITILES'):
raise MalformedRecipesCfg(
'Unsupported "repo_type" value in dependency "recipe_engine"',
recipes_cfg_path)
recipes_path = os.path.join(
repo_root, recipes_path.replace('/', os.path.sep))
return EngineDep(**engine), recipes_path
except KeyError as ex:
raise MalformedRecipesCfg(ex.message, recipes_cfg_path)
GIT = 'git.bat' if sys.platform.startswith(('win', 'cygwin')) else 'git'
def _subprocess_call(argv, **kwargs):
logging.info('Running %r', argv)
return subprocess.call(argv, **kwargs)
def _git_check_call(argv, **kwargs):
argv = [GIT]+argv
logging.info('Running %r', argv)
subprocess.check_call(argv, **kwargs)
def _git_output(argv, **kwargs):
argv = [GIT]+argv
logging.info('Running %r', argv)
return subprocess.check_output(argv, **kwargs)
def parse_args(argv):
"""This extracts a subset of the arguments that this bootstrap script cares
about. Currently this consists of:
* an override for the recipe engine in the form of `-O recipe_engin=/path`
* the --package option.
"""
PREFIX = 'recipe_engine='
p = argparse.ArgumentParser(add_help=False)
p.add_argument('-O', '--project-override', action='append')
p.add_argument('--package', type=os.path.abspath)
args, _ = p.parse_known_args(argv)
for override in args.project_override or ():
if override.startswith(PREFIX):
return override[len(PREFIX):], args.package
return None, args.package
def checkout_engine(engine_path, repo_root, recipes_cfg_path):
dep, recipes_path = parse(repo_root, recipes_cfg_path)
url = dep.url
if not engine_path and url.startswith('file://'):
engine_path = urlparse.urlparse(url).path
if not engine_path:
revision = dep.revision
subpath = dep.path_override
branch = dep.branch
# Ensure that we have the recipe engine cloned.
engine = os.path.join(recipes_path, '.recipe_deps', 'recipe_engine')
engine_path = os.path.join(engine, subpath)
with open(os.devnull, 'w') as NUL:
# Note: this logic mirrors the logic in recipe_engine/fetch.py
_git_check_call(['init', engine], stdout=NUL)
try:
_git_check_call(['rev-parse', '--verify', '%s^{commit}' % revision],
cwd=engine, stdout=NUL, stderr=NUL)
except subprocess.CalledProcessError:
_git_check_call(['fetch', url, branch], cwd=engine, stdout=NUL,
stderr=NUL)
try:
_git_check_call(['diff', '--quiet', revision], cwd=engine)
except subprocess.CalledProcessError:
_git_check_call(['reset', '-q', '--hard', revision], cwd=engine)
return engine_path
def main():
if '--verbose' in sys.argv:
logging.getLogger().setLevel(logging.INFO)
args = sys.argv[1:]
engine_override, recipes_cfg_path = parse_args(args)
if recipes_cfg_path:
# calculate repo_root from recipes_cfg_path
repo_root = os.path.dirname(
os.path.dirname(
os.path.dirname(recipes_cfg_path)))
else:
# find repo_root with git and calculate recipes_cfg_path
repo_root = (_git_output(
['rev-parse', '--show-toplevel'],
cwd=os.path.abspath(os.path.dirname(__file__))).strip())
repo_root = os.path.abspath(repo_root)
recipes_cfg_path = os.path.join(repo_root, 'infra', 'config', 'recipes.cfg')
args = ['--package', recipes_cfg_path] + args
engine_path = checkout_engine(engine_override, repo_root, recipes_cfg_path)
return _subprocess_call([
sys.executable, '-u',
os.path.join(engine_path, 'recipes.py')] + args)
if __name__ == '__main__':
sys.exit(main())
|
|
from __future__ import annotations
from io import (
BytesIO,
StringIO,
)
import os
from urllib.error import HTTPError
import numpy as np
import pytest
import pandas.util._test_decorators as td
from pandas import DataFrame
import pandas._testing as tm
from pandas.io.xml import read_xml
"""
CHECK LIST
[x] - ValueError: "Values for parser can only be lxml or etree."
etree
[X] - ImportError: "lxml not found, please install or use the etree parser."
[X] - TypeError: "expected str, bytes or os.PathLike object, not NoneType"
[X] - ValueError: "Either element or attributes can be parsed not both."
[X] - ValueError: "xpath does not return any nodes..."
[X] - SyntaxError: "You have used an incorrect or unsupported XPath"
[X] - ValueError: "names does not match length of child elements in xpath."
[X] - TypeError: "...is not a valid type for names"
[X] - ValueError: "To use stylesheet, you need lxml installed..."
[] - URLError: (GENERAL ERROR WITH HTTPError AS SUBCLASS)
[X] - HTTPError: "HTTP Error 404: Not Found"
[] - OSError: (GENERAL ERROR WITH FileNotFoundError AS SUBCLASS)
[X] - FileNotFoundError: "No such file or directory"
[] - ParseError (FAILSAFE CATCH ALL FOR VERY COMPLEX XML)
[X] - UnicodeDecodeError: "'utf-8' codec can't decode byte 0xe9..."
[X] - UnicodeError: "UTF-16 stream does not start with BOM"
[X] - BadZipFile: "File is not a zip file"
[X] - OSError: "Invalid data stream"
[X] - LZMAError: "Input format not supported by decoder"
[X] - ValueError: "Unrecognized compression type"
[X] - PermissionError: "Forbidden"
lxml
[X] - ValueError: "Either element or attributes can be parsed not both."
[X] - AttributeError: "__enter__"
[X] - XSLTApplyError: "Cannot resolve URI"
[X] - XSLTParseError: "document is not a stylesheet"
[X] - ValueError: "xpath does not return any nodes."
[X] - XPathEvalError: "Invalid expression"
[] - XPathSyntaxError: (OLD VERSION IN lxml FOR XPATH ERRORS)
[X] - TypeError: "empty namespace prefix is not supported in XPath"
[X] - ValueError: "names does not match length of child elements in xpath."
[X] - TypeError: "...is not a valid type for names"
[X] - LookupError: "unknown encoding"
[] - URLError: (USUALLY DUE TO NETWORKING)
[X - HTTPError: "HTTP Error 404: Not Found"
[X] - OSError: "failed to load external entity"
[X] - XMLSyntaxError: "Start tag expected, '<' not found"
[] - ParserError: (FAILSAFE CATCH ALL FOR VERY COMPLEX XML
[X] - ValueError: "Values for parser can only be lxml or etree."
[X] - UnicodeDecodeError: "'utf-8' codec can't decode byte 0xe9..."
[X] - UnicodeError: "UTF-16 stream does not start with BOM"
[X] - BadZipFile: "File is not a zip file"
[X] - OSError: "Invalid data stream"
[X] - LZMAError: "Input format not supported by decoder"
[X] - ValueError: "Unrecognized compression type"
[X] - PermissionError: "Forbidden"
"""
geom_df = DataFrame(
{
"shape": ["square", "circle", "triangle"],
"degrees": [360, 360, 180],
"sides": [4, np.nan, 3],
}
)
xml_default_nmsp = """\
<?xml version='1.0' encoding='utf-8'?>
<data xmlns="http://example.com">
<row>
<shape>square</shape>
<degrees>360</degrees>
<sides>4</sides>
</row>
<row>
<shape>circle</shape>
<degrees>360</degrees>
<sides/>
</row>
<row>
<shape>triangle</shape>
<degrees>180</degrees>
<sides>3</sides>
</row>
</data>"""
xml_prefix_nmsp = """\
<?xml version='1.0' encoding='utf-8'?>
<doc:data xmlns:doc="http://example.com">
<doc:row>
<doc:shape>square</doc:shape>
<doc:degrees>360</doc:degrees>
<doc:sides>4.0</doc:sides>
</doc:row>
<doc:row>
<doc:shape>circle</doc:shape>
<doc:degrees>360</doc:degrees>
<doc:sides/>
</doc:row>
<doc:row>
<doc:shape>triangle</doc:shape>
<doc:degrees>180</doc:degrees>
<doc:sides>3.0</doc:sides>
</doc:row>
</doc:data>"""
df_kml = DataFrame(
{
"id": {
0: "ID_00001",
1: "ID_00002",
2: "ID_00003",
3: "ID_00004",
4: "ID_00005",
},
"name": {
0: "Blue Line (Forest Park)",
1: "Red, Purple Line",
2: "Red, Purple Line",
3: "Red, Purple Line",
4: "Red, Purple Line",
},
"styleUrl": {
0: "#LineStyle01",
1: "#LineStyle01",
2: "#LineStyle01",
3: "#LineStyle01",
4: "#LineStyle01",
},
"extrude": {0: 0, 1: 0, 2: 0, 3: 0, 4: 0},
"altitudeMode": {
0: "clampedToGround",
1: "clampedToGround",
2: "clampedToGround",
3: "clampedToGround",
4: "clampedToGround",
},
"coordinates": {
0: (
"-87.77678526964958,41.8708863930319,0 "
"-87.77826234150609,41.87097820122218,0 "
"-87.78251583439344,41.87130129991005,0 "
"-87.78418294588424,41.87145055520308,0 "
"-87.7872369165933,41.8717239119163,0 "
"-87.79160214925886,41.87210797280065,0"
),
1: (
"-87.65758750947528,41.96427269188822,0 "
"-87.65802133507393,41.96581929055245,0 "
"-87.65819033925305,41.96621846093642,0 "
"-87.6583189819129,41.96650362897086,0 "
"-87.65835858701473,41.96669002089185,0 "
"-87.65838428411853,41.96688150295095,0 "
"-87.65842208882658,41.96745896091846,0 "
"-87.65846556843937,41.9683761425439,0 "
"-87.65849296214573,41.96913893870342,0"
),
2: (
"-87.65492939166126,41.95377494531437,0 "
"-87.65557043199591,41.95376544118533,0 "
"-87.65606302030132,41.95376391658746,0 "
"-87.65623502146268,41.95377379126367,0 "
"-87.65634748981634,41.95380103566435,0 "
"-87.65646537904269,41.95387703994676,0 "
"-87.65656532461145,41.95396622645799,0 "
"-87.65664760856414,41.95404201996044,0 "
"-87.65671750555913,41.95416647054043,0 "
"-87.65673983607117,41.95429949810849,0 "
"-87.65673866475777,41.95441024240925,0 "
"-87.6567690255541,41.95490657227902,0 "
"-87.65683672482363,41.95692259283837,0 "
"-87.6568900886376,41.95861070983142,0 "
"-87.65699865558875,41.96181418669004,0 "
"-87.65756347177603,41.96397045777844,0 "
"-87.65758750947528,41.96427269188822,0"
),
3: (
"-87.65362593118043,41.94742799535678,0 "
"-87.65363554415794,41.94819886386848,0 "
"-87.6536456393239,41.95059994675451,0 "
"-87.65365831235026,41.95108288489359,0 "
"-87.6536604873874,41.9519954657554,0 "
"-87.65362592053201,41.95245597302328,0 "
"-87.65367158496069,41.95311153649393,0 "
"-87.65368468595476,41.9533202828916,0 "
"-87.65369271253692,41.95343095587119,0 "
"-87.65373335834569,41.95351536301472,0 "
"-87.65378605844126,41.95358212680591,0 "
"-87.65385067928185,41.95364452823767,0 "
"-87.6539390793817,41.95370263886964,0 "
"-87.6540786298351,41.95373403675265,0 "
"-87.65430648647626,41.9537535411832,0 "
"-87.65492939166126,41.95377494531437,0"
),
4: (
"-87.65345391792157,41.94217681262115,0 "
"-87.65342448305786,41.94237224420864,0 "
"-87.65339745703922,41.94268217746244,0 "
"-87.65337753982941,41.94288140770284,0 "
"-87.65336256753105,41.94317369618263,0 "
"-87.65338799707138,41.94357253961736,0 "
"-87.65340240886648,41.94389158188269,0 "
"-87.65341837392448,41.94406444407721,0 "
"-87.65342275247338,41.94421065714904,0 "
"-87.65347469646018,41.94434829382345,0 "
"-87.65351486483024,41.94447699917548,0 "
"-87.65353483605053,41.9453896864472,0 "
"-87.65361975532807,41.94689193720703,0 "
"-87.65362593118043,41.94742799535678,0"
),
},
}
)
@pytest.fixture(params=["rb", "r"])
def mode(request):
return request.param
@pytest.fixture(params=[pytest.param("lxml", marks=td.skip_if_no("lxml")), "etree"])
def parser(request):
return request.param
# FILE / URL
@td.skip_if_no("lxml")
def test_parser_consistency_file(datapath):
filename = datapath("io", "data", "xml", "books.xml")
df_file_lxml = read_xml(filename, parser="lxml")
df_file_etree = read_xml(filename, parser="etree")
tm.assert_frame_equal(df_file_lxml, df_file_etree)
@tm.network
@pytest.mark.slow
@td.skip_if_no("lxml")
def test_parser_consistency_url(datapath):
url = (
"https://data.cityofchicago.org/api/views/"
"8pix-ypme/rows.xml?accessType=DOWNLOAD"
)
df_url_lxml = read_xml(url, xpath=".//row/row", parser="lxml")
df_url_etree = read_xml(url, xpath=".//row/row", parser="etree")
tm.assert_frame_equal(df_url_lxml, df_url_etree)
def test_file_like(datapath, parser, mode):
filename = datapath("io", "data", "xml", "books.xml")
with open(filename, mode) as f:
df_file = read_xml(f, parser=parser)
df_expected = DataFrame(
{
"category": ["cooking", "children", "web"],
"title": ["Everyday Italian", "Harry Potter", "Learning XML"],
"author": ["Giada De Laurentiis", "J K. Rowling", "Erik T. Ray"],
"year": [2005, 2005, 2003],
"price": [30.00, 29.99, 39.95],
}
)
tm.assert_frame_equal(df_file, df_expected)
def test_file_io(datapath, parser, mode):
filename = datapath("io", "data", "xml", "books.xml")
with open(filename, mode) as f:
xml_obj = f.read()
df_io = read_xml(
(BytesIO(xml_obj) if isinstance(xml_obj, bytes) else StringIO(xml_obj)),
parser=parser,
)
df_expected = DataFrame(
{
"category": ["cooking", "children", "web"],
"title": ["Everyday Italian", "Harry Potter", "Learning XML"],
"author": ["Giada De Laurentiis", "J K. Rowling", "Erik T. Ray"],
"year": [2005, 2005, 2003],
"price": [30.00, 29.99, 39.95],
}
)
tm.assert_frame_equal(df_io, df_expected)
def test_file_buffered_reader_string(datapath, parser, mode):
filename = datapath("io", "data", "xml", "books.xml")
with open(filename, mode) as f:
xml_obj = f.read()
df_str = read_xml(xml_obj, parser=parser)
df_expected = DataFrame(
{
"category": ["cooking", "children", "web"],
"title": ["Everyday Italian", "Harry Potter", "Learning XML"],
"author": ["Giada De Laurentiis", "J K. Rowling", "Erik T. Ray"],
"year": [2005, 2005, 2003],
"price": [30.00, 29.99, 39.95],
}
)
tm.assert_frame_equal(df_str, df_expected)
def test_file_buffered_reader_no_xml_declaration(datapath, parser, mode):
filename = datapath("io", "data", "xml", "books.xml")
with open(filename, mode) as f:
next(f)
xml_obj = f.read()
df_str = read_xml(xml_obj, parser=parser)
df_expected = DataFrame(
{
"category": ["cooking", "children", "web"],
"title": ["Everyday Italian", "Harry Potter", "Learning XML"],
"author": ["Giada De Laurentiis", "J K. Rowling", "Erik T. Ray"],
"year": [2005, 2005, 2003],
"price": [30.00, 29.99, 39.95],
}
)
tm.assert_frame_equal(df_str, df_expected)
def test_file_handle_close(datapath, parser):
xml_file = datapath("io", "data", "xml", "books.xml")
with open(xml_file, "rb") as f:
read_xml(BytesIO(f.read()), parser=parser)
assert not f.closed
@td.skip_if_no("lxml")
@pytest.mark.parametrize("val", ["", b""])
def test_empty_string_lxml(val):
from lxml.etree import XMLSyntaxError
with pytest.raises(XMLSyntaxError, match="Document is empty"):
read_xml(val, parser="lxml")
@pytest.mark.parametrize("val", ["", b""])
def test_empty_string_etree(val):
from xml.etree.ElementTree import ParseError
with pytest.raises(ParseError, match="no element found"):
read_xml(val, parser="etree")
@td.skip_if_no("lxml")
def test_wrong_file_path_lxml():
from lxml.etree import XMLSyntaxError
filename = os.path.join("data", "html", "books.xml")
with pytest.raises(
XMLSyntaxError,
match=("Start tag expected, '<' not found"),
):
read_xml(filename, parser="lxml")
def test_wrong_file_path_etree():
from xml.etree.ElementTree import ParseError
filename = os.path.join("data", "html", "books.xml")
with pytest.raises(
ParseError,
match=("not well-formed"),
):
read_xml(filename, parser="etree")
@tm.network
@td.skip_if_no("lxml")
def test_url():
url = "https://www.w3schools.com/xml/books.xml"
df_url = read_xml(url, xpath=".//book[count(*)=4]")
df_expected = DataFrame(
{
"category": ["cooking", "children", "web"],
"title": ["Everyday Italian", "Harry Potter", "Learning XML"],
"author": ["Giada De Laurentiis", "J K. Rowling", "Erik T. Ray"],
"year": [2005, 2005, 2003],
"price": [30.00, 29.99, 39.95],
"cover": [None, None, "paperback"],
}
)
tm.assert_frame_equal(df_url, df_expected)
@tm.network
def test_wrong_url(parser):
with pytest.raises(HTTPError, match=("HTTP Error 404: Not Found")):
url = "https://www.w3schools.com/xml/python.xml"
read_xml(url, xpath=".//book[count(*)=4]", parser=parser)
# XPATH
@td.skip_if_no("lxml")
def test_empty_xpath_lxml(datapath):
filename = datapath("io", "data", "xml", "books.xml")
with pytest.raises(ValueError, match=("xpath does not return any nodes")):
read_xml(filename, xpath=".//python", parser="lxml")
def test_bad_xpath_etree(datapath):
filename = datapath("io", "data", "xml", "books.xml")
with pytest.raises(
SyntaxError, match=("You have used an incorrect or unsupported XPath")
):
read_xml(filename, xpath=".//[book]", parser="etree")
@td.skip_if_no("lxml")
def test_bad_xpath_lxml(datapath):
from lxml.etree import XPathEvalError
filename = datapath("io", "data", "xml", "books.xml")
with pytest.raises(XPathEvalError, match=("Invalid expression")):
read_xml(filename, xpath=".//[book]", parser="lxml")
# NAMESPACE
def test_default_namespace(parser):
df_nmsp = read_xml(
xml_default_nmsp,
xpath=".//ns:row",
namespaces={"ns": "http://example.com"},
parser=parser,
)
df_expected = DataFrame(
{
"shape": ["square", "circle", "triangle"],
"degrees": [360, 360, 180],
"sides": [4.0, float("nan"), 3.0],
}
)
tm.assert_frame_equal(df_nmsp, df_expected)
def test_prefix_namespace(parser):
df_nmsp = read_xml(
xml_prefix_nmsp,
xpath=".//doc:row",
namespaces={"doc": "http://example.com"},
parser=parser,
)
df_expected = DataFrame(
{
"shape": ["square", "circle", "triangle"],
"degrees": [360, 360, 180],
"sides": [4.0, float("nan"), 3.0],
}
)
tm.assert_frame_equal(df_nmsp, df_expected)
@td.skip_if_no("lxml")
def test_consistency_default_namespace():
df_lxml = read_xml(
xml_default_nmsp,
xpath=".//ns:row",
namespaces={"ns": "http://example.com"},
parser="lxml",
)
df_etree = read_xml(
xml_default_nmsp,
xpath=".//doc:row",
namespaces={"doc": "http://example.com"},
parser="etree",
)
tm.assert_frame_equal(df_lxml, df_etree)
@td.skip_if_no("lxml")
def test_consistency_prefix_namespace():
df_lxml = read_xml(
xml_prefix_nmsp,
xpath=".//doc:row",
namespaces={"doc": "http://example.com"},
parser="lxml",
)
df_etree = read_xml(
xml_prefix_nmsp,
xpath=".//doc:row",
namespaces={"doc": "http://example.com"},
parser="etree",
)
tm.assert_frame_equal(df_lxml, df_etree)
# PREFIX
def test_missing_prefix_with_default_namespace(datapath, parser):
filename = datapath("io", "data", "xml", "books.xml")
with pytest.raises(ValueError, match=("xpath does not return any nodes")):
read_xml(filename, xpath=".//Placemark", parser=parser)
def test_missing_prefix_definition_etree(datapath):
filename = datapath("io", "data", "xml", "cta_rail_lines.kml")
with pytest.raises(SyntaxError, match=("you used an undeclared namespace prefix")):
read_xml(filename, xpath=".//kml:Placemark", parser="etree")
@td.skip_if_no("lxml")
def test_missing_prefix_definition_lxml(datapath):
from lxml.etree import XPathEvalError
filename = datapath("io", "data", "xml", "cta_rail_lines.kml")
with pytest.raises(XPathEvalError, match=("Undefined namespace prefix")):
read_xml(filename, xpath=".//kml:Placemark", parser="lxml")
@td.skip_if_no("lxml")
@pytest.mark.parametrize("key", ["", None])
def test_none_namespace_prefix(key):
with pytest.raises(
TypeError, match=("empty namespace prefix is not supported in XPath")
):
read_xml(
xml_default_nmsp,
xpath=".//kml:Placemark",
namespaces={key: "http://www.opengis.net/kml/2.2"},
parser="lxml",
)
# ELEMS AND ATTRS
def test_file_elems_and_attrs(datapath, parser):
filename = datapath("io", "data", "xml", "books.xml")
df_file = read_xml(filename, parser=parser)
df_expected = DataFrame(
{
"category": ["cooking", "children", "web"],
"title": ["Everyday Italian", "Harry Potter", "Learning XML"],
"author": ["Giada De Laurentiis", "J K. Rowling", "Erik T. Ray"],
"year": [2005, 2005, 2003],
"price": [30.00, 29.99, 39.95],
}
)
tm.assert_frame_equal(df_file, df_expected)
def test_file_only_attrs(datapath, parser):
filename = datapath("io", "data", "xml", "books.xml")
df_file = read_xml(filename, attrs_only=True, parser=parser)
df_expected = DataFrame({"category": ["cooking", "children", "web"]})
tm.assert_frame_equal(df_file, df_expected)
def test_file_only_elems(datapath, parser):
filename = datapath("io", "data", "xml", "books.xml")
df_file = read_xml(filename, elems_only=True, parser=parser)
df_expected = DataFrame(
{
"title": ["Everyday Italian", "Harry Potter", "Learning XML"],
"author": ["Giada De Laurentiis", "J K. Rowling", "Erik T. Ray"],
"year": [2005, 2005, 2003],
"price": [30.00, 29.99, 39.95],
}
)
tm.assert_frame_equal(df_file, df_expected)
def test_elem_and_attrs_only(datapath, parser):
filename = datapath("io", "data", "xml", "cta_rail_lines.kml")
with pytest.raises(
ValueError,
match=("Either element or attributes can be parsed not both"),
):
read_xml(filename, elems_only=True, attrs_only=True, parser=parser)
@td.skip_if_no("lxml")
def test_attribute_centric_xml():
xml = """\
<?xml version="1.0" encoding="UTF-8"?>
<TrainSchedule>
<Stations>
<station Name="Manhattan" coords="31,460,195,498"/>
<station Name="Laraway Road" coords="63,409,194,455"/>
<station Name="179th St (Orland Park)" coords="0,364,110,395"/>
<station Name="153rd St (Orland Park)" coords="7,333,113,362"/>
<station Name="143rd St (Orland Park)" coords="17,297,115,330"/>
<station Name="Palos Park" coords="128,281,239,303"/>
<station Name="Palos Heights" coords="148,257,283,279"/>
<station Name="Worth" coords="170,230,248,255"/>
<station Name="Chicago Ridge" coords="70,187,208,214"/>
<station Name="Oak Lawn" coords="166,159,266,185"/>
<station Name="Ashburn" coords="197,133,336,157"/>
<station Name="Wrightwood" coords="219,106,340,133"/>
<station Name="Chicago Union Sta" coords="220,0,360,43"/>
</Stations>
</TrainSchedule>"""
df_lxml = read_xml(xml, xpath=".//station")
df_etree = read_xml(xml, xpath=".//station", parser="etree")
tm.assert_frame_equal(df_lxml, df_etree)
# NAMES
def test_names_option_output(datapath, parser):
filename = datapath("io", "data", "xml", "books.xml")
df_file = read_xml(
filename, names=["Col1", "Col2", "Col3", "Col4", "Col5"], parser=parser
)
df_expected = DataFrame(
{
"Col1": ["cooking", "children", "web"],
"Col2": ["Everyday Italian", "Harry Potter", "Learning XML"],
"Col3": ["Giada De Laurentiis", "J K. Rowling", "Erik T. Ray"],
"Col4": [2005, 2005, 2003],
"Col5": [30.00, 29.99, 39.95],
}
)
tm.assert_frame_equal(df_file, df_expected)
def test_names_option_wrong_length(datapath, parser):
filename = datapath("io", "data", "xml", "books.xml")
with pytest.raises(ValueError, match=("names does not match length")):
read_xml(filename, names=["Col1", "Col2", "Col3"], parser=parser)
def test_names_option_wrong_type(datapath, parser):
filename = datapath("io", "data", "xml", "books.xml")
with pytest.raises(TypeError, match=("is not a valid type for names")):
read_xml(
filename, names="Col1, Col2, Col3", parser=parser # type: ignore[arg-type]
)
# ENCODING
def test_wrong_encoding(datapath, parser):
filename = datapath("io", "data", "xml", "baby_names.xml")
with pytest.raises(UnicodeDecodeError, match=("'utf-8' codec can't decode")):
read_xml(filename, parser=parser)
def test_utf16_encoding(datapath, parser):
filename = datapath("io", "data", "xml", "baby_names.xml")
with pytest.raises(
UnicodeError,
match=(
"UTF-16 stream does not start with BOM|"
"'utf-16-le' codec can't decode byte"
),
):
read_xml(filename, encoding="UTF-16", parser=parser)
def test_unknown_encoding(datapath, parser):
filename = datapath("io", "data", "xml", "baby_names.xml")
with pytest.raises(LookupError, match=("unknown encoding: UFT-8")):
read_xml(filename, encoding="UFT-8", parser=parser)
def test_ascii_encoding(datapath, parser):
filename = datapath("io", "data", "xml", "baby_names.xml")
with pytest.raises(UnicodeDecodeError, match=("'ascii' codec can't decode byte")):
read_xml(filename, encoding="ascii", parser=parser)
@td.skip_if_no("lxml")
def test_parser_consistency_with_encoding(datapath):
filename = datapath("io", "data", "xml", "baby_names.xml")
df_lxml = read_xml(filename, parser="lxml", encoding="ISO-8859-1")
df_etree = read_xml(filename, parser="etree", encoding="iso-8859-1")
tm.assert_frame_equal(df_lxml, df_etree)
# PARSER
@td.skip_if_installed("lxml")
def test_default_parser_no_lxml(datapath):
filename = datapath("io", "data", "xml", "books.xml")
with pytest.raises(
ImportError, match=("lxml not found, please install or use the etree parser.")
):
read_xml(filename)
def test_wrong_parser(datapath):
filename = datapath("io", "data", "xml", "books.xml")
with pytest.raises(
ValueError, match=("Values for parser can only be lxml or etree.")
):
read_xml(filename, parser="bs4")
# STYLESHEET
@td.skip_if_no("lxml")
def test_stylesheet_file(datapath):
kml = datapath("io", "data", "xml", "cta_rail_lines.kml")
xsl = datapath("io", "data", "xml", "flatten_doc.xsl")
df_style = read_xml(
kml,
xpath=".//k:Placemark",
namespaces={"k": "http://www.opengis.net/kml/2.2"},
stylesheet=xsl,
)
tm.assert_frame_equal(df_kml, df_style)
@td.skip_if_no("lxml")
def test_stylesheet_file_like(datapath, mode):
kml = datapath("io", "data", "xml", "cta_rail_lines.kml")
xsl = datapath("io", "data", "xml", "flatten_doc.xsl")
with open(xsl, mode) as f:
df_style = read_xml(
kml,
xpath=".//k:Placemark",
namespaces={"k": "http://www.opengis.net/kml/2.2"},
stylesheet=f,
)
tm.assert_frame_equal(df_kml, df_style)
@td.skip_if_no("lxml")
def test_stylesheet_io(datapath, mode):
kml = datapath("io", "data", "xml", "cta_rail_lines.kml")
xsl = datapath("io", "data", "xml", "flatten_doc.xsl")
xsl_obj: BytesIO | StringIO
with open(xsl, mode) as f:
if mode == "rb":
xsl_obj = BytesIO(f.read())
else:
xsl_obj = StringIO(f.read())
df_style = read_xml(
kml,
xpath=".//k:Placemark",
namespaces={"k": "http://www.opengis.net/kml/2.2"},
stylesheet=xsl_obj,
)
tm.assert_frame_equal(df_kml, df_style)
@td.skip_if_no("lxml")
def test_stylesheet_buffered_reader(datapath, mode):
kml = datapath("io", "data", "xml", "cta_rail_lines.kml")
xsl = datapath("io", "data", "xml", "flatten_doc.xsl")
with open(xsl, mode) as f:
xsl_obj = f.read()
df_style = read_xml(
kml,
xpath=".//k:Placemark",
namespaces={"k": "http://www.opengis.net/kml/2.2"},
stylesheet=xsl_obj,
)
tm.assert_frame_equal(df_kml, df_style)
@td.skip_if_no("lxml")
def test_not_stylesheet(datapath):
from lxml.etree import XSLTParseError
kml = datapath("io", "data", "xml", "cta_rail_lines.kml")
xsl = datapath("io", "data", "xml", "books.xml")
with pytest.raises(XSLTParseError, match=("document is not a stylesheet")):
read_xml(kml, stylesheet=xsl)
@td.skip_if_no("lxml")
def test_incorrect_xsl_syntax(datapath):
from lxml.etree import XMLSyntaxError
xsl = """\
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
xmlns:k="http://www.opengis.net/kml/2.2"/>
<xsl:output method="xml" omit-xml-declaration="yes"
cdata-section-elements="k:description" indent="yes"/>
<xsl:strip-space elements="*"/>
<xsl:template match="node()|@*">
<xsl:copy>
<xsl:apply-templates select="node()|@*"/>
</xsl:copy>
</xsl:template>
<xsl:template match="k:MultiGeometry|k:LineString">
<xsl:apply-templates select='*'/>
</xsl:template>
<xsl:template match="k:description|k:Snippet|k:Style"/>
</xsl:stylesheet>"""
kml = datapath("io", "data", "xml", "cta_rail_lines.kml")
with pytest.raises(
XMLSyntaxError, match=("Extra content at the end of the document")
):
read_xml(kml, stylesheet=xsl)
@td.skip_if_no("lxml")
def test_incorrect_xsl_eval(datapath):
from lxml.etree import XSLTParseError
xsl = """\
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
xmlns:k="http://www.opengis.net/kml/2.2">
<xsl:output method="xml" omit-xml-declaration="yes"
cdata-section-elements="k:description" indent="yes"/>
<xsl:strip-space elements="*"/>
<xsl:template match="node(*)|@*">
<xsl:copy>
<xsl:apply-templates select="node()|@*"/>
</xsl:copy>
</xsl:template>
<xsl:template match="k:MultiGeometry|k:LineString">
<xsl:apply-templates select='*'/>
</xsl:template>
<xsl:template match="k:description|k:Snippet|k:Style"/>
</xsl:stylesheet>"""
kml = datapath("io", "data", "xml", "cta_rail_lines.kml")
with pytest.raises(XSLTParseError, match=("failed to compile")):
read_xml(kml, stylesheet=xsl)
@td.skip_if_no("lxml")
def test_incorrect_xsl_apply(datapath):
from lxml.etree import XSLTApplyError
xsl = """\
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<xsl:output method="xml" encoding="utf-8" indent="yes" />
<xsl:strip-space elements="*"/>
<xsl:template match="@*|node()">
<xsl:copy>
<xsl:copy-of select="document('non_existent.xml')/*"/>
</xsl:copy>
</xsl:template>
</xsl:stylesheet>"""
kml = datapath("io", "data", "xml", "cta_rail_lines.kml")
with pytest.raises(XSLTApplyError, match=("Cannot resolve URI")):
read_xml(kml, stylesheet=xsl)
@td.skip_if_no("lxml")
def test_wrong_stylesheet():
from lxml.etree import XMLSyntaxError
kml = os.path.join("data", "xml", "cta_rail_lines.kml")
xsl = os.path.join("data", "xml", "flatten.xsl")
with pytest.raises(
XMLSyntaxError,
match=("Start tag expected, '<' not found"),
):
read_xml(kml, stylesheet=xsl)
@td.skip_if_no("lxml")
def test_stylesheet_file_close(datapath, mode):
kml = datapath("io", "data", "xml", "cta_rail_lines.kml")
xsl = datapath("io", "data", "xml", "flatten_doc.xsl")
xsl_obj: BytesIO | StringIO
with open(xsl, mode) as f:
if mode == "rb":
xsl_obj = BytesIO(f.read())
else:
xsl_obj = StringIO(f.read())
read_xml(kml, stylesheet=xsl_obj)
assert not f.closed
@td.skip_if_no("lxml")
def test_stylesheet_with_etree(datapath):
kml = os.path.join("data", "xml", "cta_rail_lines.kml")
xsl = os.path.join("data", "xml", "flatten_doc.xsl")
with pytest.raises(
ValueError, match=("To use stylesheet, you need lxml installed")
):
read_xml(kml, parser="etree", stylesheet=xsl)
@td.skip_if_no("lxml")
@pytest.mark.parametrize("val", ["", b""])
def test_empty_stylesheet(val):
from lxml.etree import XMLSyntaxError
kml = os.path.join("data", "xml", "cta_rail_lines.kml")
with pytest.raises(
XMLSyntaxError, match=("Document is empty|Start tag expected, '<' not found")
):
read_xml(kml, stylesheet=val)
@tm.network
@td.skip_if_no("lxml")
def test_online_stylesheet():
xml = "https://www.w3schools.com/xml/cdcatalog_with_xsl.xml"
xsl = "https://www.w3schools.com/xml/cdcatalog.xsl"
df_xsl = read_xml(
xml,
xpath=".//tr[td and position() <= 6]",
names=["title", "artist"],
stylesheet=xsl,
)
df_expected = DataFrame(
{
"title": {
0: "Empire Burlesque",
1: "Hide your heart",
2: "Greatest Hits",
3: "Still got the blues",
4: "Eros",
},
"artist": {
0: "Bob Dylan",
1: "Bonnie Tyler",
2: "Dolly Parton",
3: "Gary Moore",
4: "Eros Ramazzotti",
},
}
)
tm.assert_frame_equal(df_expected, df_xsl)
# COMPRESSION
@pytest.mark.parametrize("comp", ["bz2", "gzip", "xz", "zip"])
def test_compression_read(parser, comp):
with tm.ensure_clean() as path:
geom_df.to_xml(path, index=False, parser=parser, compression=comp)
xml_df = read_xml(path, parser=parser, compression=comp)
tm.assert_frame_equal(xml_df, geom_df)
@pytest.mark.parametrize("comp", ["gzip", "xz", "zip"])
def test_wrong_compression_bz2(parser, comp):
with tm.ensure_clean() as path:
geom_df.to_xml(path, parser=parser, compression=comp)
with pytest.raises(OSError, match="Invalid data stream"):
read_xml(path, parser=parser, compression="bz2")
@pytest.mark.parametrize("comp", ["bz2", "xz", "zip"])
def test_wrong_compression_gz(parser, comp):
with tm.ensure_clean() as path:
geom_df.to_xml(path, parser=parser, compression=comp)
with pytest.raises(OSError, match="Not a gzipped file"):
read_xml(path, parser=parser, compression="gzip")
@pytest.mark.parametrize("comp", ["bz2", "gzip", "zip"])
def test_wrong_compression_xz(parser, comp):
from lzma import LZMAError
with tm.ensure_clean() as path:
geom_df.to_xml(path, parser=parser, compression=comp)
with pytest.raises(LZMAError, match="Input format not supported by decoder"):
read_xml(path, parser=parser, compression="xz")
@pytest.mark.parametrize("comp", ["bz2", "gzip", "xz"])
def test_wrong_compression_zip(parser, comp):
from zipfile import BadZipFile
with tm.ensure_clean() as path:
geom_df.to_xml(path, parser=parser, compression=comp)
with pytest.raises(BadZipFile, match="File is not a zip file"):
read_xml(path, parser=parser, compression="zip")
def test_unsuported_compression(datapath, parser):
with pytest.raises(ValueError, match="Unrecognized compression type"):
with tm.ensure_clean() as path:
read_xml(path, parser=parser, compression="7z")
# STORAGE OPTIONS
@tm.network
@td.skip_if_no("s3fs")
@td.skip_if_no("lxml")
def test_s3_parser_consistency():
# Python Software Foundation (2019 IRS-990 RETURN)
s3 = "s3://irs-form-990/201923199349319487_public.xml"
df_lxml = read_xml(
s3,
xpath=".//irs:Form990PartVIISectionAGrp",
namespaces={"irs": "http://www.irs.gov/efile"},
parser="lxml",
storage_options={"anon": True},
)
df_etree = read_xml(
s3,
xpath=".//irs:Form990PartVIISectionAGrp",
namespaces={"irs": "http://www.irs.gov/efile"},
parser="etree",
storage_options={"anon": True},
)
tm.assert_frame_equal(df_lxml, df_etree)
|
|
# -*- coding: utf-8 -*-
"""
Example:
```
df = pd.DataFrame({
'date': ['2014-01-01', '2014-01-02', '2014-01-03', '2014-01-04'],
'c1': [1.0, 3.0, 4.0, 2.4],
'c2': [1.0, 3.0, 4.0, 9.4],
'c3': [1.0, 3.0, 4.0, 1.4],
})
p = TimeSeriesPlot()
p.add(df['date'], df['c1'])
p.add(df['date'], df['c2'])
p.add(df['date'], df['c3'])
p.save("hoge.png", max_col=1)
```
"""
import datetime
import matplotlib.pylab as plt
import matplotlib.dates as mdates
from ume.utils import dynamic_load
BASE_COLORS = [
(55/256.0, 166/256.0, 134/256.0),
(54/256.0, 89/256.0, 63/256.0),
(242/256.0, 186/256.0, 82/256.0),
(217/256.0, 118/256.0, 61/256.0),
(217/256.0, 65/256.0, 65/256.0),
]
PLATE_TS_KWARGS = [
'color',
'marker',
'linewidth',
]
PLATE_HIST_KWARGS = [
'bins',
'color',
]
PLATE_BAR_KWARGS = [
'linewidth',
'color',
]
AX_PLOT_KWARGS = [
'linewidth',
'color',
'markersize',
'marker',
]
AX_ANNOTATE_KWARGS = [
's',
'xy',
'xytext',
'textcoords',
'ha',
'va',
'bbox',
'fontsize',
'fontsize',
'arrowprops',
]
AX_HLINE_KWARGS = [
'y',
'linewidth',
'color',
'xmin',
'xmax',
'alpha',
'facecolor',
]
AX_VLINE_KWARGS = [
'x',
'linewidth',
'color',
'ymin',
'ymax',
'alpha',
'facecolor',
]
BLACK = (0/256.0, 0/256.0, 0/256.0)
def _change_tick_fontsize(ax, size):
for tl in ax.get_xticklabels():
tl.set_fontsize(size)
for tl in ax.get_yticklabels():
tl.set_fontsize(size)
def _transform_date(l, fmt="%Y-%m-%d"):
return [datetime.datetime.strptime(x, fmt) for x in l]
def plate_timeseries(ax, X, y, params):
plate_timeseries_params = {
k: params[k]
for k in params.keys() if k in PLATE_TS_KWARGS
}
tr_param = {'fmt': params['dateformat']} if 'dateformat' in params else {}
datelist = _transform_date(X, **tr_param)
ax.plot(datelist, y, **plate_timeseries_params)
def plate_hist(ax, X, _y, params):
plate_hist_params = {
k: params[k]
for k in params.keys() if k in PLATE_HIST_KWARGS
}
ax.hist(X, **plate_hist_params)
def plate_bar(ax, X, y, params):
plate_bar_params = {
k: params[k]
for k in params.keys() if k in PLATE_BAR_KWARGS
}
ax.bar(X, y, **plate_bar_params)
def plate_hline(ax, _X, _y, params):
plot_params = {
k: params[k]
for k in params.keys() if k in AX_HLINE_KWARGS
}
ax.axhline(**plot_params)
def plate_vline(ax, _X, _y, params):
plot_params = {
k: params[k]
for k in params.keys() if k in AX_VLINE_KWARGS
}
ax.axvline(**plot_params)
def plate_annotate(ax, X, y, params):
"""
http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.annotate
"""
if 's' not in params:
raise RuntimeError("A `s` parameter is required.")
plot_params = {
k: params[k]
for k in params.keys() if k in AX_ANNOTATE_KWARGS
}
ax.annotate(
**plot_params
)
def plate_annotate_all(ax, X, y, params):
"""
http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.annotate
"""
if 's' not in params:
raise RuntimeError("A `label` parameter is required.")
plot_params = {
k: params[k]
for k in params.keys() if k in AX_ANNOTATE_KWARGS
}
x_list = list(X)
y_list = list(y)
s_list = list(plot_params['s'])
for x_single, y_single, s_single in zip(x_list, y_list, s_list):
plot_params['xy'] = (x_single, y_single)
plot_params['s'] = s_single
ax.annotate(
**plot_params
)
def plate_line(ax, X, y, params):
ax_plot_params = {
k: params[k]
for k in params.keys() if k in AX_PLOT_KWARGS
}
ax.plot(X, y, **ax_plot_params)
def plate_scatter(ax, X, y, params):
ax_plot_params = {
k: params[k]
for k in params.keys() if k in AX_PLOT_KWARGS
}
ax.plot(X, y, '.', **ax_plot_params)
class Plot(object):
def __init__(self, title="No title"):
self.datastore = []
self.title = title
def add(self, plot_data):
self.datastore.append(plot_data)
def _ax_plot_with_params(self, ax, plate):
params = {} if 'params' not in plate else plate['params']
X = plate["X"]
y = plate["y"]
plot_func = plate.get('plot_func', 'line')
func = dynamic_load(plot_func)
func(ax, X, y, params)
def _plot(self, idx, ax):
plot_data_list = self.datastore[idx]
if type(plot_data_list) is dict and 'plot' in plot_data_list:
for plot_data in plot_data_list['plot']:
self._ax_plot_with_params(ax, plot_data)
# ax change
params = plot_data_list['ax_param']
if 'xlim' in params:
plt.xlim(**params['xlim'])
if 'ylim' in params:
plt.ylim(**params['ylim'])
if 'xlabel' in params:
plt.xlabel(params['xlabel'])
if 'xlabel' in params:
ax.set_xlabel(params['xlabel'], fontsize=8)
if 'ylabel' in params:
ax.set_ylabel(params['ylabel'], fontsize=8)
if 'legend' in params:
patches = []
legend_names = []
for legend_args in params['legend']:
legend_names.append(legend_args['name'])
patches.append(
plt.Rectangle(
(0.2, 0.2),
1.5,
0.5,
fc=legend_args['color'],
lw=0.1
)
)
if 'legend_args' in params:
legend_args = {
'fontsize': 8,
}
legend_args.update(params['legend_args'])
ax.legend(patches, legend_names, **legend_args)
if 'grid' in params and (
params['grid'] == 'True' or params['grid'] is True):
ax.grid(color=(200/256.0, 200/256.0, 200/256.0),
linestyle=':',
linewidth=0.5)
if 'dateformat' in params:
plt.gcf().axes[idx].xaxis.set_major_formatter(
mdates.DateFormatter('%Y-%m-%d'))
if 'xticks_rotation' in params:
plt.xticks(rotation=params['xticks_rotation'])
if 'invert_xaxis' in params:
plt.gca().invert_xaxis()
if 'invert_yaxis' in params:
plt.gca().invert_yaxis()
_change_tick_fontsize(ax, 8)
elif 'plot' not in plot_data_list:
pass # empty space
else:
raise RuntimeError("Plot#add is required list parameter'")
def save(self, plot_filename, max_col=2):
fig = plt.figure()
fig.suptitle(self.title, fontsize=8)
if len(self.datastore) <= max_col:
colsz = len(self.datastore)
rowsz = 1
for i, d in enumerate(self.datastore):
if len(d) == 0:
continue # empty space
ax = fig.add_subplot(rowsz, colsz, i + 1)
self._plot(i, ax)
else:
sz = len(self.datastore)
colsz = max_col
rowsz = sz / max_col
rowsz = rowsz if sz % max_col == 0 else rowsz + 1
for i, d in enumerate(self.datastore):
if len(d) == 0:
continue # empty space
ax = fig.add_subplot(rowsz, colsz, i + 1)
self._plot(i, ax)
#plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
plt.tight_layout()
plt.savefig(plot_filename)
return self
|
|
from base import BaseClient
class MetadataClient(BaseClient):
_supportedFieldTypes = [
'AutoNumber',
'Checkbox',
'Currency',
'Date',
'DateTime',
'Email',
'Lookup',
'MasterDetail',
'MultiselectPicklist',
'Number',
'Percent',
'Phone',
'Picklist',
'Text',
'TextArea',
'LongTextArea',
'Url',
'EncryptedText']
'''Some default values'''
_apiVersion = 30.0
_defaultCheckboxVal = False
_defaultTextLength = 255
_defaultLongTextAreaLength = 32768
_defaultVisibleLines = 4
_defaultCurrencyScale = 2
_defaultNumberScale = 0
_defaultPercentScale = 0
_defaultCurrencyPrecision = 18
_defultNumberPrecision = 18
_defaultPercentPrecision = 3
def __init__(self, partnerWsdl, metadataWsdl):
super(MetadataClient, self).__init__(partnerWsdl, metadataWsdl)
'''
Retrieves property information on metadata components
metadataType => the type of metadata component that you want property information for
e.g. CustomObject
'''
def listMetadata(self, metadataType):
query = self._metadata.factory.create('ListMetadataQuery')
query.type = metadataType
return self._metadata.service.listMetadata(query, self._apiVersion)
'''
Creates a custom object in Salesforce.
Object properties are passed in through key word arguments.
Returns a save result object.
'''
def createObject(self, **kwargs):
obj = self._metadata.factory.create('CustomObject')
obj.fullName = kwargs.get('Full Name')
obj.label = kwargs.get('Label')
obj.pluralLabel = kwargs.get('Plural Label')
deploymentStatus = self._metadata.factory.create('DeploymentStatus')
obj.deploymentStatus = deploymentStatus.Deployed
sharingModel = self._metadata.factory.create('SharingModel')
obj.sharingModel = sharingModel.ReadWrite
nameField = self._metadata.factory.create('CustomField')
fieldType = self._metadata.factory.create('FieldType')
nameField.type = fieldType.Text
nameField.label = obj.label + ' Name'
obj.nameField = nameField
return self._metadata.service.createMetadata(obj)
'''
Creates a custom field in Salesforce.
fieldType => A string containing the field type.
Must match an option in _supportedFieldTypes.
Field properties are passed in through key word arguments.
Returns the field.
'''
def createField(self, **kwargs):
field = self._metadata.factory.create('CustomField')
fieldType = kwargs.get('Field Type', None)
field.type = self._getFieldType(fieldType)
field.fullName = kwargs.get('Object Name', '') + '.' + kwargs.get('Label', '').replace(' ', '_') + '__c'
field.label = kwargs.get('Label', '')
field.description = kwargs.get('Description', None)
field.inlineHelpText = kwargs.get('Inline Help Text', None)
field.externalId = kwargs.get('External Id', False)
field.required = kwargs.get('Required', False)
field.unique = kwargs.get('Unique', False)
field.length = kwargs.get('Length', None)
if field.type in ('Text', 'EncryptedText') and field.length == None:
field.length = self._defaultTextLength
if field.type == 'TextArea' and field.length != None:
field.length = None
if field.length == '':
field.length = None
field.visibleLines = kwargs.get('Visible Lines', None)
if field.type == 'LongTextArea' and field.length == None:
field.length = self._defaultTextAreaLength
if field.type in ('LongTextArea', 'MultiselectPicklist') and field.visibleLines == None:
if field.visibleLines == None:
field.visibleLines = self._defaultVisibleLines
field.defaultValue = kwargs.get('Default Value', None)
if field.defaultValue == None:
if field.type == 'Checkbox':
field.defaultValue = self._defaultCheckboxVal
field.scale = kwargs.get('Scale', None)
if field.type == 'Currency' and field.scale == None:
field.scale = self._defaultCurrencyScale
if field.type == 'Number' and field.scale == None:
field.scale = self._defaultNumberScale
if field.type == 'Percent' and field.scale == None:
field.scale = self._defaultPrecisionScale
field.precision = kwargs.get('Precision', None)
if field.type == 'Currency' and field.precision == None:
field.precision = self._defaultCurrencyPrecision
if field.type == 'Number' and field.precision == None:
field.precision = self._defaultNumberPrecision
if field.type == 'Percent' and field.precision == None:
field.precision = self._defaultPercentPrecision
if field.type == 'EncryptedText':
field.maskType = self._getMaskType(kwargs.get('Mask Type', None))
field.maskChar = self._getMaskChar(kwargs.get('Mask Char', None))
if field.type in ('Picklist', 'MultiselectPicklist'):
field.picklist = self._getPicklist(
kwargs.get('Picklist Values', ''),
kwargs.get('Is Sorted', False))
if field.type in ('Lookup', 'MasterDetail'):
field.referenceTo = kwargs.get('Reference To')
field.relationshipLabel = kwargs.get('Relationship Label', '')
field.relationshipName = kwargs.get('Relationship Label', '').replace(' ', '')
if field.type == 'AutoNumber':
field.startingNumber = kwargs.get('Starting Number', 1)
return field
'''
Create a field type object.
fieldType => A string containing the field type.
Must match an option in _supportedFieldTypes.
Returns a field type object. Returns None if it can't
find a field type that matches the input string.
'''
def _getFieldType(self, fieldType):
fieldTypes = self._metadata.factory.create('FieldType')
for t in fieldTypes:
if t[0] == fieldType and t[0] in self._supportedFieldTypes:
return t[1]
return None
'''
Create the picklist values available for the field.
Currently does not support dependent picklists or modifying the default value.
values => A semicolon delimited list of picklist values.
isSorted => A boolean to determine whether the values should be
sorted alphabetically or displayed in the order entered.
Returns a picklist object.
'''
def _getPicklist(self, values, isSorted):
picklist = self._metadata.factory.create('Picklist')
picklist.sorted = isSorted
picklistValues = []
for value in values.split(';'):
picklistValue = self._metadata.factory.create('PicklistValue')
picklistValue.fullName = value
picklistValue.default = False
picklistValues.append(picklistValue)
picklist.picklistValues = picklistValues
return picklist
'''
Get an encrypted text mask type: e.g. SSN, credit card, etc.
maskType => The mask type name.
Returns a mask type object. Defaults to masking all characters
if it can't find a match with the requested mask type.
'''
def _getMaskType(self, maskType):
maskTypes = self._metadata.factory.create('EncryptedFieldMaskType')
for t in maskTypes:
if t[0] == maskType:
return t[1]
return maskTypes.all
'''
Get an encrypted text mask char: either * or X.
maskChar => The desired masking character.
Returns a mask char object. Defaults to asterisk if it
can't find a mask character that matches the input string.
'''
def _getMaskChar(self, maskChar):
maskChars = self._metadata.factory.create('EncryptedFieldMaskChar')
for m in maskChars:
if m[0] == maskChar:
return m[1]
return maskChars.asterisk
'''
Deploy list of fields to Salesforce.com.
fields => A list of field objects to be deployed.
Returns a list of SaveResult objects.
'''
def deployFields(self, fields):
return self._metadata.service.createMetadata(fields)
|
|
# Copyright (c) 2016 GeoSpark
#
# Released under the MIT License (MIT)
# See the LICENSE file, or visit http://opensource.org/licenses/MIT
"""ILI9341 Font Packer v1.1
Converts TrueType fonts to a compact binary bitmap format for use with Paul Stoffregen's ILI9341 library for the Teensy.
Usage:
font_packer.py --height=<pixels> [--range=<range-string>] [--packed|--code|--smoke] <font-file> [<output>]
font_packer.py -h | --help
font_packer.py --version
Options:
-h --help Show this screen.
--version Show version.
--height=<pixels> The maximum height of the glyphs.
--range=<range-string> A range of Unicode codepoints to generate glyphs for [default: 32-126].
--packed Sends the packed binary bitmap font to output or stdout.
--code Generates C structs to output or stdout.
--smoke Smoke proof. Displays to output or stdout the bitmaps of each character as asterisks.
output Output file name or stdout if not supplied.
"""
import os
import sys
import struct
import math
import binascii
from collections import OrderedDict
import io
from bitstring import Bits, BitString
from docopt import docopt
from font import Font
from range_parser import parse_disjoint_range
def generate(height, codepoints, font_file_path, output_file_name, packed, code, smoke):
font = Font(font_file_path, height)
if smoke:
glyphs = [chr(x) for x in codepoints]
f = sys.stdout.buffer
if output_file_name is not None:
f = open(output_file_name, 'wt')
for cur_glyph in glyphs:
ch = font.render_character(cur_glyph)
f.write(cur_glyph + '\n')
f.write(repr(ch))
f.write('\n\n')
f.close()
return
ili9341_t3_font = OrderedDict()
ili9341_t3_font['index'] = 0
ili9341_t3_font['unicode'] = 0
ili9341_t3_font['data'] = 0
ili9341_t3_font['version'] = 1
ili9341_t3_font['reserved'] = 0
ili9341_t3_font['index1_first'] = (len(codepoints) & 0xff00) >> 8
ili9341_t3_font['index1_last'] = len(codepoints) & 0xff
ili9341_t3_font['index2_first'] = 0
ili9341_t3_font['index2_last'] = 0
ili9341_t3_font['bits_index'] = 0
ili9341_t3_font['bits_width'] = 0
ili9341_t3_font['bits_height'] = 0
ili9341_t3_font['bits_xoffset'] = 0
ili9341_t3_font['bits_yoffset'] = 0
ili9341_t3_font['bits_delta'] = 0
ili9341_t3_font['line_space'] = font.height
e_cap = font.glyph_for_character('E')
ili9341_t3_font['cap_height'] = e_cap.height - e_cap.descent
max_width = 1
max_height = 1
max_xoffset = 1
max_yoffset = 1
max_delta = 1
glyph_data = dict()
for codepoint in codepoints:
cur_glyph = chr(codepoint)
glyph_header = build_glyph(cur_glyph, font, glyph_data)
max_width = max(max_width, glyph_header['width'])
max_height = max(max_height, glyph_header['height'])
max_xoffset = max(abs(max_xoffset), abs(glyph_header['xoffset']))
max_yoffset = max(abs(max_yoffset), abs(glyph_header['yoffset']))
max_delta = max(max_delta, glyph_header['delta'])
ili9341_t3_font['bits_width'] = int(math.floor(math.log(max_width, 2))) + 1
ili9341_t3_font['bits_height'] = int(math.floor(math.log(max_height, 2))) + 1
ili9341_t3_font['bits_xoffset'] = int(math.floor(math.log(max_xoffset, 2))) + 2
ili9341_t3_font['bits_yoffset'] = int(math.floor(math.log(max_yoffset, 2))) + 2
ili9341_t3_font['bits_delta'] = int(math.floor(math.log(max_delta, 2))) + 1
output_data = bytearray()
index = list()
total_size = 0
for codepoint in codepoints:
ch = chr(codepoint)
index.append(total_size)
glyph = glyph_data[ch]
glyph_bytes = pack_glyph(glyph, ili9341_t3_font)
output_data.extend(glyph_bytes)
total_size += len(glyph_bytes)
ili9341_t3_font['bits_index'] = int(math.floor(math.log(total_size, 2))) + 1
index_bits = BitString()
for idx in index:
index_bits.append(Bits(uint=idx, length=ili9341_t3_font['bits_index']))
codepoint_table = BitString()
for codepoint in codepoints:
codepoint_table.append(Bits(uint=codepoint, length=21))
if packed:
f = sys.stdout.buffer
if output_file_name is not None:
f = open(output_file_name, 'wb')
f.write(struct.pack('<3I14Bxx', *tuple(ili9341_t3_font.values())))
index_bits.tofile(f)
codepoint_table.tofile(f)
f.write(output_data)
f.close()
if code:
f = sys.stdout.buffer
if output_file_name is not None:
f = open(output_file_name, 'wb')
variable_name = os.path.splitext(os.path.basename(font_file_path))[0] + '_' + str(height)
c = io.StringIO()
c.write('// extern const ILI9341_t3_font_t {};\n\n'.format(variable_name))
c.write('static const unsigned char {}_data[] = {{\n'.format(variable_name))
data_byte_array = ['0x' + binascii.hexlify(bytes([x])).decode() for x in output_data]
for i in range(0, len(data_byte_array), 10):
c.write(','.join(data_byte_array[i:i + 10]) + ',\n')
c.write('};\n')
c.write('/* font data size: {} bytes */\n\n'.format(len(data_byte_array)))
c.write('static const unsigned char {}_index[] = {{\n'.format(variable_name))
index_byte_array = ['0x' + binascii.hexlify(bytes([x])).decode() for x in index_bits.tobytes()]
for i in range(0, len(index_byte_array), 10):
c.write(','.join(index_byte_array[i:i + 10]) + ',\n')
c.write('};\n')
c.write('/* font index size: {} bytes */\n\n'.format(len(index_byte_array)))
c.write('static const unsigned char {}_codepoints[] = {{\n'.format(variable_name))
codepoint_byte_array = ['0x' + binascii.hexlify(bytes([x])).decode() for x in codepoint_table.tobytes()]
for i in range(0, len(codepoint_byte_array), 10):
c.write(','.join(codepoint_byte_array[i:i + 10]) + ',\n')
c.write('};\n')
c.write('/* Unicode codepoint table size: {} bytes */\n\n'.format(len(codepoint_byte_array)))
c.write('const ILI9341_t3_font_t {} = {{\n'.format(variable_name))
c.write(' {}_index,\n'.format(variable_name))
c.write(' {}_codepoints,\n'.format(variable_name))
c.write(' {}_data,\n'.format(variable_name))
c.write(' {},\n'.format(ili9341_t3_font['version']))
c.write(' {},\n'.format(ili9341_t3_font['reserved']))
c.write(' {},\n'.format(ili9341_t3_font['index1_first']))
c.write(' {},\n'.format(ili9341_t3_font['index1_last']))
c.write(' {},\n'.format(ili9341_t3_font['index2_first']))
c.write(' {},\n'.format(ili9341_t3_font['index2_last']))
c.write(' {},\n'.format(ili9341_t3_font['bits_index']))
c.write(' {},\n'.format(ili9341_t3_font['bits_width']))
c.write(' {},\n'.format(ili9341_t3_font['bits_height']))
c.write(' {},\n'.format(ili9341_t3_font['bits_xoffset']))
c.write(' {},\n'.format(ili9341_t3_font['bits_yoffset']))
c.write(' {},\n'.format(ili9341_t3_font['bits_delta']))
c.write(' {},\n'.format(ili9341_t3_font['line_space']))
c.write(' {}\n'.format(ili9341_t3_font['cap_height']))
c.write('};\n')
f.write(c.getvalue().encode('ascii'))
def pack_glyph(glyph, ili9341_t3_font):
glyph_bits = BitString()
header = glyph['header']
glyph_bits.append(Bits(uint=header['encoding'], length=3))
glyph_bits.append(Bits(uint=header['width'], length=ili9341_t3_font['bits_width']))
glyph_bits.append(Bits(uint=header['height'], length=ili9341_t3_font['bits_height']))
glyph_bits.append(Bits(int=header['xoffset'], length=ili9341_t3_font['bits_xoffset']))
glyph_bits.append(Bits(int=header['yoffset'], length=ili9341_t3_font['bits_yoffset']))
glyph_bits.append(Bits(uint=header['delta'], length=ili9341_t3_font['bits_delta']))
for row in glyph['data']:
if row['repeat'] == 0:
glyph_bits.append(Bits(bool=False))
else:
glyph_bits.append(Bits(bool=True))
glyph_bits.append(Bits(uint=row['repeat'] - 1, length=3))
for bit in row['bits']:
glyph_bits.append(Bits(bool=bit == 1))
glyph_bytes = glyph_bits.tobytes()
return glyph_bytes
def build_glyph(cur_glyph, font, glyph_data, isplaceholder=False):
glyph_header = dict()
g = font.glyph_for_character(cur_glyph)
glyph_header['encoding'] = 0
glyph_header['width'] = g.width
glyph_header['height'] = g.height
glyph_header['xoffset'] = g.x_bearing
glyph_header['yoffset'] = g.top-g.height
glyph_header['delta'] = g.advance_width
rows = list()
rows.append({'repeat': 0, 'bits': g.bitmap.pixels[0:g.bitmap.width]})
row_idx = 0
for row in range(1, g.bitmap.height):
start = row * g.bitmap.width
bits = g.bitmap.pixels[start:start + g.bitmap.width]
if rows[row_idx]['repeat'] < 7 and bits == rows[row_idx]['bits']:
rows[row_idx]['repeat'] += 1
else:
row_idx += 1
rows.append({'repeat': 0, 'bits': bits})
if isplaceholder:
glyph_data['placeholder'] = {'header': glyph_header, 'data': rows}
else:
glyph_data[cur_glyph] = {'header': glyph_header, 'data': rows}
return glyph_header
if __name__ == '__main__':
args = docopt(__doc__, version='ILI9341 Font Packer v1.1')
if not (args['--packed'] or args['--code'] or args['--smoke']):
args['--packed'] = True
r, invalid = parse_disjoint_range(args['--range'])
if len(invalid) > 0:
sys.stderr.write('Warning, invalid values in range: {}'.format(invalid))
generate(int(args['--height']), r, args['<font-file>'], args['<output>'], args['--packed'], args['--code'],
args['--smoke'])
|
|
# -*- coding: utf-8 -*-
import base64
import hashlib
import json
import logging
import os
import shutil
import subprocess
import sys
import tempfile
import traceback
import urlparse
import uuid
import zipfile
from django import forms
from django.conf import settings
from django.core.files.storage import default_storage as storage
import requests
from appvalidator import validate_app, validate_packaged_app
from celery import task
from django_statsd.clients import statsd
from PIL import Image
from tower import ugettext as _
import mkt
from lib.post_request_task.task import task as post_request_task
from mkt.constants import APP_PREVIEW_SIZES
from mkt.constants.regions import REGIONS_CHOICES_ID_DICT
from mkt.files.helpers import copyfileobj
from mkt.files.models import File, FileUpload, FileValidation
from mkt.files.utils import SafeUnzip
from mkt.site.decorators import set_modified_on, use_master
from mkt.site.helpers import absolutify
from mkt.site.mail import send_mail_jinja
from mkt.site.storage_utils import private_storage
from mkt.site.utils import (remove_icons, remove_promo_imgs, resize_image,
strip_bom)
from mkt.webapps.models import AddonExcludedRegion, Preview, Webapp
from mkt.webapps.utils import iarc_get_app_info
log = logging.getLogger('z.mkt.developers.task')
CT_URL = (
'https://developer.mozilla.org/docs/Web/Apps/Manifest#Serving_manifests'
)
REQUESTS_HEADERS = {
'User-Agent': 'Mozilla/5.0 (Mobile; rv:18.0) Gecko/18.0 Firefox/18.0'
}
@post_request_task
@use_master
def validator(upload_id, **kw):
if not settings.VALIDATE_ADDONS:
return None
log.info(u'[FileUpload:%s] Validating app.' % upload_id)
try:
upload = FileUpload.objects.get(pk=upload_id)
except FileUpload.DoesNotExist:
log.info(u'[FileUpload:%s] Does not exist.' % upload_id)
return
try:
validation_result = run_validator(upload.path, url=kw.get('url'))
if upload.validation:
# If there's any preliminary validation result, merge it with the
# actual validation result.
dec_prelim_result = json.loads(upload.validation)
if 'prelim' in dec_prelim_result:
dec_validation_result = json.loads(validation_result)
# Merge the messages.
dec_validation_result['messages'] += (
dec_prelim_result['messages'])
# Merge the success value.
if dec_validation_result['success']:
dec_validation_result['success'] = (
dec_prelim_result['success'])
# Merge the error count (we only raise errors, not warnings).
dec_validation_result['errors'] += dec_prelim_result['errors']
# Put the validation result back into JSON.
validation_result = json.dumps(dec_validation_result)
upload.validation = validation_result
upload.save() # We want to hit the custom save().
except Exception:
# Store the error with the FileUpload job, then raise
# it for normal logging.
tb = traceback.format_exception(*sys.exc_info())
upload.update(task_error=''.join(tb))
# Don't raise if we're being eager, setting the error is enough.
if not settings.CELERY_ALWAYS_EAGER:
raise
@task
@use_master
def file_validator(file_id, **kw):
if not settings.VALIDATE_ADDONS:
return None
log.info(u'[File:%s] Validating file.' % file_id)
try:
file = File.objects.get(pk=file_id)
except File.DoesNotExist:
log.info(u'[File:%s] Does not exist.' % file_id)
return
# Unlike upload validation, let the validator raise an exception if there
# is one.
result = run_validator(file.file_path, url=file.version.addon.manifest_url)
return FileValidation.from_json(file, result)
def run_validator(file_path, url=None):
"""A pre-configured wrapper around the app validator."""
temp_path = None
# Make a copy of the file since we can't assume the
# uploaded file is on the local filesystem.
temp_path = tempfile.mktemp()
with open(temp_path, 'wb') as local_f:
with private_storage.open(file_path) as remote_f:
copyfileobj(remote_f, local_f)
with statsd.timer('mkt.developers.validator'):
is_packaged = zipfile.is_zipfile(temp_path)
if is_packaged:
log.info(u'Running `validate_packaged_app` for path: %s'
% (file_path))
with statsd.timer('mkt.developers.validate_packaged_app'):
return validate_packaged_app(
temp_path,
market_urls=settings.VALIDATOR_IAF_URLS,
timeout=settings.VALIDATOR_TIMEOUT,
spidermonkey=settings.SPIDERMONKEY)
else:
log.info(u'Running `validate_app` for path: %s' % (file_path))
with statsd.timer('mkt.developers.validate_app'):
return validate_app(open(temp_path).read(),
market_urls=settings.VALIDATOR_IAF_URLS,
url=url)
# Clean up copied files.
os.unlink(temp_path)
def _hash_file(fd):
return hashlib.md5(fd.read()).hexdigest()[:8]
@post_request_task
@set_modified_on
def resize_icon(src, dst, sizes, locally=False, **kw):
"""Resizes addon/websites icons."""
log.info('[1@None] Resizing icon: %s' % dst)
open_ = open if locally else storage.open
delete = os.unlink if locally else storage.delete
try:
for s in sizes:
size_dst = '%s-%s.png' % (dst, s)
resize_image(src, size_dst, (s, s),
remove_src=False, locally=locally)
pngcrush_image.delay(size_dst, **kw)
with open_(src) as fd:
icon_hash = _hash_file(fd)
delete(src)
log.info('Icon resizing completed for: %s' % dst)
return {'icon_hash': icon_hash}
except Exception, e:
log.error("Error resizing icon: %s; %s" % (e, dst))
@post_request_task
@set_modified_on
def resize_promo_imgs(src, dst, sizes, locally=False, **kw):
"""Resizes webapp/website promo imgs."""
log.info('[1@None] Resizing promo imgs: %s' % dst)
try:
for s in sizes:
size_dst = '%s-%s.png' % (dst, s)
# Crop only to the width, keeping the aspect ratio.
resize_image(src, size_dst, (s, 0),
remove_src=False, locally=locally)
pngcrush_image.delay(size_dst, **kw)
if locally:
with open(src) as fd:
promo_img_hash = _hash_file(fd)
os.remove(src)
else:
with storage.open(src) as fd:
promo_img_hash = _hash_file(fd)
storage.delete(src)
log.info('Promo img hash resizing completed for: %s' % dst)
return {'promo_img_hash': promo_img_hash}
except Exception, e:
log.error("Error resizing promo img hash: %s; %s" % (e, dst))
@task
@set_modified_on
def pngcrush_image(src, hash_field='image_hash', **kw):
"""
Optimizes a PNG image by running it through Pngcrush. Returns hash.
src -- filesystem image path
hash_field -- field name to save the new hash on instance if passing
instance through set_modified_on
"""
log.info('[1@None] Optimizing image: %s' % src)
try:
# pngcrush -ow has some issues, use a temporary file and do the final
# renaming ourselves.
suffix = '.opti.png'
tmp_path = '%s%s' % (os.path.splitext(src)[0], suffix)
cmd = [settings.PNGCRUSH_BIN, '-q', '-rem', 'alla', '-brute',
'-reduce', '-e', suffix, src]
sp = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = sp.communicate()
if sp.returncode != 0:
log.error('Error optimizing image: %s; %s' % (src, stderr.strip()))
pngcrush_image.retry(args=[src], kwargs=kw, max_retries=3)
return False
shutil.move(tmp_path, src)
log.info('Image optimization completed for: %s' % src)
# Return hash for set_modified_on.
with open(src) as fd:
image_hash = _hash_file(fd)
return {
hash_field: image_hash
}
except Exception, e:
log.error('Error optimizing image: %s; %s' % (src, e))
@post_request_task
@set_modified_on
def resize_preview(src, pk, **kw):
"""Resizes preview images and stores the sizes on the preview."""
instance = Preview.objects.get(pk=pk)
thumb_dst, full_dst = instance.thumbnail_path, instance.image_path
sizes = instance.sizes or {}
log.info('[1@None] Resizing preview and storing size: %s' % thumb_dst)
try:
thumbnail_size = APP_PREVIEW_SIZES[0][:2]
image_size = APP_PREVIEW_SIZES[1][:2]
with storage.open(src, 'rb') as fp:
size = Image.open(fp).size
if size[0] > size[1]:
# If the image is wider than tall, then reverse the wanted size
# to keep the original aspect ratio while still resizing to
# the correct dimensions.
thumbnail_size = thumbnail_size[::-1]
image_size = image_size[::-1]
if kw.get('generate_thumbnail', True):
sizes['thumbnail'] = resize_image(src, thumb_dst,
thumbnail_size,
remove_src=False)
if kw.get('generate_image', True):
sizes['image'] = resize_image(src, full_dst,
image_size,
remove_src=False)
instance.sizes = sizes
instance.save()
log.info('Preview resized to: %s' % thumb_dst)
# Remove src file now that it has been processed.
try:
os.remove(src)
except OSError:
pass
return True
except Exception, e:
log.error("Error saving preview: %s; %s" % (e, thumb_dst))
def _fetch_content(url):
with statsd.timer('developers.tasks.fetch_content'):
try:
res = requests.get(url, timeout=30, stream=True,
headers=REQUESTS_HEADERS)
if not 200 <= res.status_code < 300:
statsd.incr('developers.tasks.fetch_content.error')
raise Exception('An invalid HTTP status code was returned.')
if not res.headers.keys():
statsd.incr('developers.tasks.fetch_content.error')
raise Exception('The HTTP server did not return headers.')
statsd.incr('developers.tasks.fetch_content.success')
return res
except requests.RequestException as e:
statsd.incr('developers.tasks.fetch_content.error')
log.error('fetch_content connection error: %s' % e)
raise Exception('The file could not be retrieved.')
class ResponseTooLargeException(Exception):
pass
def get_content_and_check_size(response, max_size):
# Read one extra byte. Reject if it's too big so we don't have issues
# downloading huge files.
content = response.iter_content(chunk_size=max_size + 1).next()
if len(content) > max_size:
raise ResponseTooLargeException('Too much data.')
return content
def save_icon(obj, icon_content):
"""
Saves the icon for `obj` to its final destination. `obj` can be an app or a
website.
"""
tmp_dst = os.path.join(settings.TMP_PATH, 'icon', uuid.uuid4().hex)
with storage.open(tmp_dst, 'wb') as fd:
fd.write(icon_content)
dirname = obj.get_icon_dir()
destination = os.path.join(dirname, '%s' % obj.pk)
remove_icons(destination)
icon_hash = resize_icon(tmp_dst, destination, mkt.CONTENT_ICON_SIZES,
set_modified_on=[obj])
# Need to set icon type so .get_icon_url() works normally
# submit step 4 does it through AppFormMedia, but we want to beat them to
# the punch. resize_icon outputs pngs so we know it's 'image/png'.
obj.icon_hash = icon_hash['icon_hash'] # In case, we're running not async.
obj.icon_type = 'image/png'
obj.save()
def save_promo_imgs(obj, img_content):
"""
Saves the promo image for `obj` to its final destination.
`obj` can be an app or a website.
"""
tmp_dst = os.path.join(settings.TMP_PATH, 'promo_imgs', uuid.uuid4().hex)
with storage.open(tmp_dst, 'wb') as fd:
fd.write(img_content)
dirname = obj.get_promo_img_dir()
destination = os.path.join(dirname, '%s' % obj.pk)
remove_promo_imgs(destination)
resize_promo_imgs(
tmp_dst, destination, mkt.PROMO_IMG_SIZES,
set_modified_on=[obj])
@post_request_task
@use_master
def fetch_icon(pk, file_pk=None, **kw):
"""
Downloads a webapp icon from the location specified in the manifest.
Returns False if icon was not able to be retrieved
If `file_pk` is not provided it will use the file from the app's
`current_version`.
"""
webapp = Webapp.objects.get(pk=pk)
log.info(u'[1@None] Fetching icon for webapp %s.' % webapp.name)
if file_pk:
file_obj = File.objects.get(pk=file_pk)
else:
file_obj = (webapp.current_version and
webapp.current_version.all_files[0])
manifest = webapp.get_manifest_json(file_obj)
if not manifest or 'icons' not in manifest:
# Set the icon type to empty.
webapp.update(icon_type='')
return
try:
biggest = max(int(size) for size in manifest['icons'])
except ValueError:
log.error('No icon to fetch for webapp "%s"' % webapp.name)
return False
icon_url = manifest['icons'][str(biggest)]
if icon_url.startswith('data:image'):
image_string = icon_url.split('base64,')[1]
content = base64.decodestring(image_string)
else:
if webapp.is_packaged:
# Get icons from package.
if icon_url.startswith('/'):
icon_url = icon_url[1:]
try:
zf = SafeUnzip(storage.open(file_obj.file_path))
zf.is_valid()
content = zf.extract_path(icon_url)
except (KeyError, forms.ValidationError): # Not found in archive.
log.error(u'[Webapp:%s] Icon %s not found in archive'
% (webapp, icon_url))
return False
else:
if not urlparse.urlparse(icon_url).scheme:
icon_url = webapp.origin + icon_url
try:
response = _fetch_content(icon_url)
except Exception, e:
log.error(u'[Webapp:%s] Failed to fetch icon for webapp: %s'
% (webapp, e))
# Set the icon type to empty.
webapp.update(icon_type='')
return False
try:
content = get_content_and_check_size(
response, settings.MAX_ICON_UPLOAD_SIZE)
except ResponseTooLargeException:
log.warning(u'[Webapp:%s] Icon exceeds maximum size.' % webapp)
return False
log.info('Icon fetching completed for app "%s"; saving icon' % webapp.name)
save_icon(webapp, content)
def failed_validation(*messages, **kwargs):
"""Return a validation object that looks like the add-on validator."""
upload = kwargs.pop('upload', None)
if upload is None or not upload.validation:
msgs = []
else:
msgs = json.loads(upload.validation)['messages']
for msg in messages:
msgs.append({'type': 'error', 'message': msg, 'tier': 1})
return json.dumps({'errors': sum(1 for m in msgs if m['type'] == 'error'),
'success': False,
'messages': msgs,
'prelim': True})
def _fetch_manifest(url, upload=None):
def fail(message, upload=None):
if upload is None:
# If `upload` is None, that means we're using one of @washort's old
# implementations that expects an exception back.
raise Exception(message)
upload.update(validation=failed_validation(message, upload=upload))
try:
response = _fetch_content(url)
except Exception, e:
log.error('Failed to fetch manifest from %r: %s' % (url, e))
fail(_('No manifest was found at that URL. Check the address and try '
'again.'), upload=upload)
return
ct = response.headers.get('content-type', '')
if not ct.startswith('application/x-web-app-manifest+json'):
fail(_('Manifests must be served with the HTTP header '
'"Content-Type: application/x-web-app-manifest+json". See %s '
'for more information.') % CT_URL,
upload=upload)
try:
max_webapp_size = settings.MAX_WEBAPP_UPLOAD_SIZE
content = get_content_and_check_size(response, max_webapp_size)
except ResponseTooLargeException:
fail(_('Your manifest must be less than %s bytes.') % max_webapp_size,
upload=upload)
return
try:
content.decode('utf_8')
except (UnicodeDecodeError, UnicodeEncodeError), exc:
log.info('Manifest decode error: %s: %s' % (url, exc))
fail(_('Your manifest file was not encoded as valid UTF-8.'),
upload=upload)
return
# Get the individual parts of the content type.
ct_split = map(str.strip, ct.split(';'))
if len(ct_split) > 1:
# Figure out if we've got a charset specified.
kv_pairs = dict(tuple(p.split('=', 1)) for p in ct_split[1:] if
'=' in p)
if 'charset' in kv_pairs and kv_pairs['charset'].lower() != 'utf-8':
fail(_("The manifest's encoding does not match the charset "
'provided in the HTTP Content-Type.'),
upload=upload)
content = strip_bom(content)
return content
@post_request_task
@use_master
def fetch_manifest(url, upload_pk=None, **kw):
log.info(u'[1@None] Fetching manifest: %s.' % url)
upload = FileUpload.objects.get(pk=upload_pk)
content = _fetch_manifest(url, upload)
if content is None:
return
upload.add_file([content], url, len(content))
# Send the upload to the validator.
validator(upload.pk, url=url)
@task
def region_email(ids, region_ids, **kw):
regions = [REGIONS_CHOICES_ID_DICT[id] for id in region_ids]
region_names = regions = sorted([unicode(r.name) for r in regions])
# Format the region names with commas and fanciness.
if len(regions) == 2:
suffix = 'two'
region_names = ' '.join([regions[0], _(u'and'), regions[1]])
else:
if len(regions) == 1:
suffix = 'one'
elif len(regions) > 2:
suffix = 'many'
region_names[-1] = _(u'and') + ' ' + region_names[-1]
region_names = ', '.join(region_names)
log.info('[%s@%s] Emailing devs about new region(s): %s.' %
(len(ids), region_email.rate_limit, region_names))
for id_ in ids:
log.info('[Webapp:%s] Emailing devs about new region(s): %s.' %
(id_, region_names))
product = Webapp.objects.get(id=id_)
to = set(product.authors.values_list('email', flat=True))
if len(regions) == 1:
subject = _(
u'{region} region added to the Firefox Marketplace').format(
region=regions[0])
else:
subject = _(u'New regions added to the Firefox Marketplace')
dev_url = absolutify(product.get_dev_url('edit'),
settings.SITE_URL) + '#details'
context = {'app': product.name,
'regions': region_names,
'dev_url': dev_url}
send_mail_jinja('%s: %s' % (product.name, subject),
'developers/emails/new_regions_%s.ltxt' % suffix,
context, recipient_list=to,
perm_setting='app_regions')
@task
@use_master
def region_exclude(ids, region_ids, **kw):
regions = [REGIONS_CHOICES_ID_DICT[id] for id in region_ids]
region_names = ', '.join(sorted([unicode(r.name) for r in regions]))
log.info('[%s@%s] Excluding new region(s): %s.' %
(len(ids), region_exclude.rate_limit, region_names))
for id_ in ids:
log.info('[Webapp:%s] Excluding region(s): %s.' %
(id_, region_names))
for region in regions:
# Already excluded? Swag!
AddonExcludedRegion.objects.get_or_create(addon_id=id_,
region=region.id)
@task
def save_test_plan(f, filename, addon):
dst_root = os.path.join(settings.ADDONS_PATH, str(addon.id))
dst = os.path.join(dst_root, filename)
with open(dst, 'wb+') as destination:
for chunk in f.chunks():
destination.write(chunk)
@task
@use_master
def refresh_iarc_ratings(ids, **kw):
"""
Refresh old or corrupt IARC ratings by re-fetching the certificate.
"""
for app in Webapp.objects.filter(id__in=ids):
data = iarc_get_app_info(app)
if data.get('rows'):
row = data['rows'][0]
# We found a rating, so store the id and code for future use.
app.set_descriptors(row.get('descriptors', []))
app.set_interactives(row.get('interactives', []))
app.set_content_ratings(row.get('ratings', {}))
|
|
from __future__ import absolute_import
import binascii
import contextlib
import logging
import os
import struct
import types
import weakref
import datetime
from collections import namedtuple
from . import sched_basic as sched
def use_gevent():
global sched
from . import sched_gevent as sched
def default_wrapper(func):
def wrap(*args, **kwargs):
socket = sched.socket
try:
return func(*args, **kwargs)
except socket.timeout:
return None
except socket.error as e:
if e.errno in (os.errno.EAGAIN, os.errno.ECONNRESET, os.errno.EBADF):
return b""
elif e.errno in (os.errno.EINTR,):
return None
raise
except KeyboardInterrupt:
return b""
return wrap
class ReadWrapper(object):
def __init__(self, channel, read_wrap):
self.channel = channel
self.read_wrap = read_wrap
def __enter__(self):
self.installed_wrapper = self.channel.read_wrap
self.channel.read_wrap = self
return self.channel
def __exit__(self, *args, **kwargs):
self.channel.read_wrap = self.installed_wrapper
def __call__(self, func):
def wrap(*args, **kwargs):
if self.channel.closed:
return b""
return self.read_wrap(func)(*args, **kwargs)
return wrap
class Channel(object):
'''
Openflow abstract connection class
This is not only for TCP but also for UDP.
This is the reason that the name is not "Connection" but "Channel".
You can subclass this to have instance members, of which lifecycle is
the same with channel.
'''
def __init__(self, *args, **kwargs):
self._socket = kwargs.pop("socket", None) # dedicated socket
self._sendto = kwargs.pop("sendto", None) # only if channel prefers sendto()
self.reader = kwargs.pop("reader", None)
self.read_wrap = kwargs.pop("read_wrap", default_wrapper)
self.remote_address = kwargs.pop("remote_address", None)
self.local_address = kwargs.pop("local_address", None)
if self._socket:
if self.remote_address is None:
self.remote_address = self._socket.getpeername()
if self.local_address is None:
self.local_address = self._socket.getsockname()
if hasattr(self._socket, "settimeout") and self._socket.gettimeout() == None:
self._socket.settimeout(0.5)
def attach(self, stream, **kwargs):
self._socket = stream
if hasattr(self._socket, "settimeout") and self._socket.gettimeout() == None:
self._socket.settimeout(0.5)
self.remote_address = stream.getpeername()
self.local_address = stream.getsockname()
@property
def closed(self):
# This is not self._socket.closed because in some use cases,
# self._socket is not available, for example with gevent.server.DatagramServer
return self.remote_address is None
def close(self):
if self._socket:
self._socket.close()
if self.remote_address is not None:
self.remote_address = None
def send(self, message, **kwargs):
if self._sendto:
self._sendto(message, self.remote_address)
elif self._socket:
self._socket.send(message)
else:
raise ValueError("socket or sendto is required")
def _recv(self, num):
if self.reader:
reader = self.reader
else:
reader = self._socket.recv
return ReadWrapper(self, self.read_wrap)(reader)(num)
class Error(Exception):
pass
class ChannelClose(Error):
pass
class OpenflowBaseChannel(Channel):
version = None # The negotiated version
accept_versions = [4,] # defaults to openflow 1.3
def __init__(self, *args, **kwargs):
super(OpenflowBaseChannel, self).__init__(*args, **kwargs)
self.buffer = b""
def __iter__(self):
while True:
ret = self.recv()
if ret:
yield ret
else:
break
def recv(self):
required_len = 8
while len(self.buffer) < required_len:
tmp = super(OpenflowBaseChannel, self)._recv(8192)
if tmp is None:
continue
elif len(tmp)==0:
return tmp
self.buffer += tmp
p = struct.unpack_from("!BBHI", self.buffer)
required_len = p[2]
while len(self.buffer) < required_len:
tmp = super(OpenflowBaseChannel, self)._recv(8192)
if tmp is None:
continue
elif len(tmp)==0:
return tmp
self.buffer += tmp
ret = self.buffer[0:required_len]
self.buffer = self.buffer[required_len:]
return ret
class LoggingChannel(OpenflowBaseChannel):
channel_log_name = "channel"
send_log_name = "send"
recv_log_name = "recv"
def __init__(self, *args, **kwargs):
super(LoggingChannel, self).__init__(*args, **kwargs)
logging.getLogger(self.channel_log_name).info("%s connect" % self)
def send(self, message, **kwargs):
logging.getLogger(self.send_log_name).info("%s %s" % (self, binascii.b2a_hex(message)))
return super(LoggingChannel, self).send(message, **kwargs)
def recv(self):
message = super(LoggingChannel, self).recv()
if message: # ignore b"" and None
logging.getLogger(self.recv_log_name).info("%s %s" % (self, binascii.b2a_hex(message)))
return message
def close(self):
if not self.closed:
super(LoggingChannel, self).close()
logging.getLogger(self.channel_log_name).info("%s close" % self)
class OpenflowChannel(OpenflowBaseChannel):
_start = None
def attach(self, stream, **kwargs):
super(OpenflowBaseChannel, self).attach(stream, **kwargs)
if kwargs.get("autostart", True):
self.start()
def start(self):
if self._start is None:
self.send(hello(self.accept_versions))
self._start = True
def recv(self):
message = super(OpenflowChannel, self).recv()
if message:
(version, oftype, length, xid) = parse_ofp_header(message)
if oftype==0: # HELLO
accept_versions = ofp_version_normalize(self.accept_versions)
if not accept_versions:
accept_versions = set([1,])
cross_versions = parse_hello(message) & accept_versions
if cross_versions:
self.version = max(cross_versions)
else:
ascii_txt = "Accept versions: %s" % ["- 1.0 1.1 1.2 1.3 1.4".split()[x] for x in list(accept_versions)]
self.send(struct.pack("!BBHIHH", max(accept_versions), 1,
struct.calcsize("!BBHIHH")+len(ascii_txt), hms_xid(),
0, 0) + ascii_txt)
raise ChannelClose(ascii_txt)
return message
def parse_ofp_header(message):
'''
@return (version, oftype, message_len, xid)
'''
return struct.unpack_from("!BBHI", message)
def ofp_header_only(oftype, version=1, xid=None):
if xid is None:
xid = hms_xid()
return struct.pack("!BBHI", version, oftype, 8, xid)
def hms_xid():
'''Xid looks readable datetime like format when logged as int.'''
now = datetime.datetime.now()
candidate = int(("%02d"*3+"%04d") % (now.hour, now.minute, now.second, now.microsecond/100))
if hasattr(hms_xid, "dedup"):
if hms_xid.dedup >= candidate:
candidate = hms_xid.dedup+1
setattr(hms_xid, "dedup", candidate)
return candidate
def ofp_version_normalize(versions):
if isinstance(versions, list) or isinstance(versions, tuple) or isinstance(versions, set):
vset = set()
for version in versions:
if isinstance(version, float):
version = [1.0, 1.1, 1.2, 1.3, 1.4].index(version) + 1
assert isinstance(version, int), "unknown version %s" % version
vset.add(version)
return vset
elif versions is None:
return set()
assert False, "unknown versions %s" % versions
def hello(versions, **kwargs):
xid = kwargs.get("xid", hms_xid())
if versions:
vset = ofp_version_normalize(versions)
else:
vset = set((1,))
version = max(vset)
if version < 4:
return struct.pack("!BBHI", version, 0, 8, xid)
else:
units = [0,]*(1 + version//32)
for v in vset:
units[v//32] |= 1<<(v%32)
versionbitmap_length = 4 + len(units)*4
fmt = "!BBHIHH%dI%dx" % (len(units), 8*((len(units)-1)%2))
return struct.pack(fmt, version, 0, struct.calcsize(fmt), xid, # HELLO
1, versionbitmap_length, *units) # VERSIONBITMAP
def parse_hello(message):
(version, oftype, length, xid) = parse_ofp_header(message)
assert oftype==0 # HELLO
versions = set()
if length == 8:
versions.add(version)
else:
(subtype, sublength) = struct.unpack_from("!HH", message, offset=8)
assert subtype == 1 # VERSIONBITMAP
units = struct.unpack_from("!%dI" % (sublength/4 - 1), message, offset=12)
for idx,unit in zip(range(len(units)),units):
for s in range(32):
if unit&(1<<s):
versions.add(idx*32 + s)
return versions
class OpenflowServerChannel(OpenflowChannel):
def loop(self):
try:
for message in self:
if not message:
break
self.handle_proxy(self.handle)(message, self)
except ChannelClose:
self.close()
def handle_proxy(self, handle):
return handle
def handle(self, message, channel):
logging.getLogger(__name__).warn("check MRO")
pass
class AutoEchoChannel(OpenflowServerChannel):
'''
AuthEchoChannel steals ECHO_REQUEST and automatically send echo response.
'''
def handle_proxy(self, handle):
def intercept(message, channel):
if message:
(version, oftype, length, xid) = parse_ofp_header(message)
if oftype==2: # ECHO
self.send(struct.pack("!BBHI", self.version, 3, length, xid)+message[8:])
else:
super(AutoEchoChannel, self).handle_proxy(handle)(message, channel)
return intercept
class WeakCallbackCaller(object):
@property
def callback(self):
if self.cbref:
return self.cbref()
class Barrier(WeakCallbackCaller):
def __init__(self, xid, message_handler=None):
if message_handler:
self.cbref = weakref.ref(message_handler)
else:
self.cbref = None
self.xid = xid
class Chunk(WeakCallbackCaller):
def __init__(self, message_handler):
if message_handler:
self.cbref = weakref.ref(message_handler)
else:
self.cbref = None
class ControllerChannel(OpenflowServerChannel, WeakCallbackCaller):
datapath = None
auxiliary = None
cbref = None
def __init__(self, *args, **kwargs):
super(ControllerChannel, self).__init__(*args, **kwargs)
self.seq_lock = sched.Lock()
self.seq = []
def send(self, message, **kwargs):
with self.seq_lock:
return self.locked_send(message, **kwargs)
def locked_send(self, message, **kwargs):
message_handler = kwargs.get("callback") # callable object
if message_handler is None:
pass
else:
assert isinstance(message_handler, object)
assert callable(message_handler)
(version, oftype, length, xid) = parse_ofp_header(message)
if (oftype==18 and version==1) or (oftype==20 and version!=1): # OFPT_BARRIER_REQUEST
self.seq.append(Barrier(xid, message_handler))
elif self.seq:
seq_last = self.seq[-1]
if isinstance(seq_last, Chunk):
if seq_last.callback != message_handler:
bxid = hms_xid()
if self.version==1:
msg = ofp_header_only(18, version=1, xid=bxid) # OFPT_BARRIER_REQUEST=18 (v1.0)
else:
msg = ofp_header_only(20, version=self.version, xid=bxid) # OFPT_BARRIER_REQUEST=20 (v1.1--v1.4)
self.seq.append(Barrier(bxid))
self.seq.append(Chunk(message_handler))
super(ControllerChannel, self).send(msg)
elif isinstance(seq_last, Barrier):
self.seq.append(Chunk(message_handler))
else:
assert False, "seq element must be Chunk or Barrier"
else:
if self.callback != message_handler:
self.seq.append(Chunk(message_handler))
if message_handler:
self.cbfunc = weakref.ref(message_handler)
super(ControllerChannel, self).send(message)
def recv(self):
message = super(ControllerChannel, self).recv()
if message:
(version, oftype, length, xid) = parse_ofp_header(message)
if oftype==6: # FEATURES_REPLY
if self.version < 4:
(self.datapath,) = struct.unpack_from("!Q", message, offset=8) # v1.0--v1.2
else:
(self.datapath,_1,_2,self.auxiliary) = struct.unpack_from("!QIBB", message, offset=8) # v1.3--v1.4
return message
def handle_proxy(self, handle):
def intercept(message, channel):
(version, oftype, length, xid) = parse_ofp_header(message)
if hasattr(self, "handle_async") and oftype in (10,11,12):
# bypass method call for async message
return super(ControllerChannel, self).handle_proxy(self.handle_async)(message, channel)
with self.seq_lock:
if self.seq:
if (oftype==19 and version==1) or (oftype==21 and version!=1): # is barrier
chunk_drop = False
for e in self.seq:
if isinstance(e, Barrier):
if e.xid == xid:
self.seq = self.seq[self.seq.index(e)+1:]
if e.callback:
return e.callback(message, self)
return True
else:
assert False, "missing barrier(xid=%x) before barrier(xid=%x)" % (e.xid, xid)
elif isinstance(e, Chunk):
assert chunk_drop==False, "dropping multiple chunks at a time"
chunk_drop = True
assert False, "got unknown barrier xid=%x" % xid
else:
e = self.seq[0]
if isinstance(e, Chunk):
if e.callback:
return e.callback(message, self)
if self.callback:
return self.callback(message, self)
else:
return super(ControllerChannel, self).handle_proxy(handle)(message, channel)
logging.getLogger(__name__).warn("No callback found for handling message %s" % binascii.b2a_hex(message))
return intercept
class RateLimit(object):
def __init__(self, size):
self.size = size
self.cold_lock = sched.Lock()
self.cold = []
self.loop_lock = sched.Lock()
def spawn(self, func, *args, **kwargs):
with self.cold_lock:
self.cold.append((func, args, kwargs))
sched.spawn(self.loop)
def loop(self):
with self.loop_lock:
while len(self.cold) > 0:
hot_lock = sched.Lock()
hot = []
children = {}
while len(hot) < self.size and len(self.cold) > 0:
task = None
with self.cold_lock:
task = self.cold.pop(0)
if task:
(func, args, kwargs) = task
def proxy():
func(*args, **kwargs)
with hot_lock:
hot.remove(task)
hot.append(task)
children[id(task)] = sched.spawn(proxy)
for task_id,job in tuple(children.items()):
running = False
with hot_lock:
if task_id in [id(task) for task in hot]:
running = True
if running:
job.join(0.5)
else:
chilren.pop(task)
break
class ParallelChannel(OpenflowServerChannel):
# mixin for parent channel
socket_dir = None
async_rate = 0
def __init__(self, *args, **kwargs):
super(ParallelChannel, self).__init__(*args, **kwargs)
self.close_lock = sched.Lock()
self.async_pool = RateLimit(self.async_rate)
def close(self):
with self.close_lock:
super(ParallelChannel, self).close()
def handle_proxy(self, handle):
def intercept(message, channel):
def proxy(message, channel):
try:
handle(message, channel)
except ChannelClose:
logging.getLogger(__name__).info("closing", exc_info=True)
channel.close()
except:
logging.getLogger(__name__).error("handle error", exc_info=True)
channel.close()
rated_call = False
if self.async_rate:
(version, oftype, length, xid) = parse_ofp_header(message)
if oftype in (10, 11, 12):
rated_call = True
if rated_call:
self.async_pool.spawn(proxy, message, channel)
else:
sched.spawn(proxy, message, channel)
return super(ParallelChannel, self).handle_proxy(intercept)
def socket_path(self, path):
if self.socket_dir:
path = os.path.join(self.socket_dir, path)
return os.path.abspath(path)
def helper_path(self, suffix):
old = self.socket_path("unknown-%x.%s" % (id(self), suffix))
if self.datapath:
new = self.socket_path("%x-%x.%s" % (self.datapath, id(self), suffix))
try:
os.rename(old, new)
except OSError:
pass
return new
return old
def override_required(self, *args, **kwargs):
raise Error("Concrete MixIn required")
def bound_socket(info, socktype):
socket = sched.socket
if isinstance(info, socket.socket):
return info
elif isinstance(info, tuple) or isinstance(info, list):
infos = [o for o in socket.getaddrinfo(*info) if o[1]==socktype or o[1]==0]
(family, socktype, proto, canonname, sockaddr) = infos[0]
s = socket.socket(family, socktype)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(sockaddr)
return s
elif isinstance(info, str):
s = socket.socket(socket.AF_UNIX, socktype)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(info)
return s
else:
raise ValueError("unexpected %s" % info)
def stream_socket(info):
return bound_socket(info, sched.socket.SOCK_STREAM)
def dgram_socket(info):
return bound_socket(info, sched.socket.SOCK_DGRAM)
class StreamServer(object):
channel_cls = None
def __init__(self, bound_sock, **kwargs):
self.accepting = False
self.sock = stream_socket(bound_sock)
self.channels_lock = sched.Lock()
self.channels = set()
self.server_address = self.sock.getsockname()
def start(self):
self.accepting = True
sock = self.sock
sock.settimeout(0.5)
sock.listen(10)
sched.spawn(self.run)
def run(self):
try:
while self.accepting:
try:
s = self.sock.accept()
except sched.socket.timeout:
continue
ch = self.channel_cls(socket=s[0], remote_address=s[1], read_wrap=self.read_wrap)
ch.start()
sched.spawn(self._loop_runner, ch)
finally:
self.sock.close()
def _loop_runner(self, ch):
with self.channels_lock:
self.channels.add(ch)
ch.loop()
ch.close()
with self.channels_lock:
self.channels.remove(ch)
def read_wrap(self, func):
def wrap(*args, **kwargs):
if self.accepting==False:
return b""
return default_wrapper(func)(*args, **kwargs)
return wrap
def stop(self):
self.accepting = False
for ch in list(self.channels):
ch.close()
class DgramServer(object):
channel_cls = None
def __init__(self, bound_sock):
self.accepting = False
self.sock = dgram_socket(bound_sock)
self.remotes_lock = sched.Lock()
self.remotes = {}
self.remote_locks = {}
def start(self):
self.accepting = True
sched.spawn(self.run)
def run(self):
sock = self.sock
while self.accepting:
try:
data,remote_address = sock.recv()
except sched.socket.timeout:
continue
with self.remotes_lock:
if remote_address in self.remotes:
ch = self.remotes[remote_address]
lock = self.remote_locks[remote_address]
else:
ch = self.channel_cls(sendto=sock.sendto, remote_address=remote_address, local_address=sock.getsockname())
ch.start()
self.remotes[remote_address] = ch
lock = sched.Lock()
self.remote_locks[remote_address] = lock
sched.spawn(self.locked_loop, ch, lock, data)
sock.close()
def locked_loop(self, ch, lock, data):
with lock:
ch.reader = StringIO.StringIO(data).read
ch.loop()
def stop(self):
self.accepting = False
class ParentChannel(ControllerChannel, ParallelChannel):
jackin = False
monitor = False
jackin_shutdown = None
monitor_shutdown = None
monitors = set()
def close(self):
if self.jackin_shutdown:
self.jackin_shutdown()
try:
os.remove(self.helper_path("jackin"))
except OSError:
pass
if self.monitor_shutdown:
self.monitor_shutdown()
try:
os.remove(self.helper_path("monitor"))
except OSError:
pass
super(ParentChannel, self).close()
def recv(self):
message = super(ParentChannel, self).recv()
if message:
(version, oftype, length, xid) = parse_ofp_header(message)
if oftype==0:
if self.jackin:
serv, addr = self.jackin_server()
self.jackin_shutdown = serv.stop
serv.start() # start after assignment especially for pthread
if self.monitor:
serv, addr = self.monitor_server()
self.monitor_shutdown = serv.stop
self.monitors = serv.channels
serv.start() # start after assignment especially for pthread
else:
if oftype==6: # FEATURES_REPLY
if self.jackin:
self.helper_path("jackin")
if self.monitor:
self.helper_path("monitor")
for ch in self.monitors:
ch.send(message)
return message
def jackin_server(self):
path = self.helper_path("jackin")
serv = type("JackinServer", (StreamServer,), dict(
channel_cls = type("JackinCChannel",(JackinChildChannel, AutoEchoChannel, LoggingChannel),{
"accept_versions":[self.version,],
"parent": self })))(path)
return serv, path
def monitor_server(self):
path = self.helper_path("monitor")
serv = type("MonitorServer", (StreamServer,), dict(
channel_cls = type("MonitorCChannel",(ChildChannel, AutoEchoChannel, LoggingChannel),{
"accept_versions":[self.version,],
"parent": self })))(path)
return serv, path
def temp_server(self):
s = sched.socket.socket(sched.socket.AF_INET, sched.socket.SOCK_STREAM)
s.setsockopt(sched.socket.SOL_SOCKET, sched.socket.SO_REUSEADDR, 1)
s.bind(("127.0.0.1", 0))
serv = type("TempServer", (StreamServer,), dict(
channel_cls = type("TempCChannel",(JackinChildChannel, AutoEchoChannel, LoggingChannel),{
"accept_versions":[self.version,],
"parent": self })))(s)
return serv.start, serv.stop, s.getsockname()
class JackinChannel(ParentChannel):
'''
MonitorChannel opens unix domain sockets for openflow operators(jackin programs),
such as ovs-ofctl.
'''
jackin = True
class MonitorChannel(ParentChannel):
'''
MonitorChannel opens unix domain sockets for openflow message listeners(monitors).
'''
monitor = True
class ChildChannel(OpenflowChannel):
parent = None # must be set
def send(self, message, **kwargs):
super(ChildChannel, self).send(message, **kwargs)
def handle(self, message, channel):
pass # ignore all messages
class WeakCallback(object):
def __init__(self, channel):
self.channel = channel
def __call__(self, message, upstream_channel):
self.channel.send(message)
class JackinChildChannel(ChildChannel):
def __init__(self, *args, **kwargs):
super(JackinChildChannel, self).__init__(*args, **kwargs)
self.cbfunc = WeakCallback(self)
def handle(self, message, channel):
(version, oftype, length, xid) = parse_ofp_header(message)
if oftype!=0:
self.parent.send(message, callback=self.cbfunc)
def close(self):
self.cbfunc = None # unref
super(JackinChildChannel, self).close()
class SyncTracker(object):
def __init__(self, xid, ev):
self.xid = xid
self.ev = ev
self.data = None
class SyncChannel(ParallelChannel):
'''
SyncChannel adds synchronous methods.
'''
def __init__(self, *args, **kwargs):
super(SyncChannel, self).__init__(*args, **kwargs)
self.syncs = {}
self.syncs_lock = sched.Lock()
def recv(self):
message = super(SyncChannel, self).recv()
if message:
(version, oftype, length, xid) = parse_ofp_header(message)
if xid in self.syncs:
x = self.syncs[xid]
if (version==1 and oftype==17) or (version!=1 and oftype==19): # multipart
with self.syncs_lock:
if x.data is None:
x.data = message
else:
x.data += message
if not struct.unpack_from("!H", message, offset=10)[0] & 1:
x.ev.set()
else:
x.data = message
x.ev.set()
return message
def send_sync(self, message, **kwargs):
(version, oftype, length, xid) = parse_ofp_header(message)
x = SyncTracker(xid, sched.Event())
with self.syncs_lock:
self.syncs[x.xid] = x
self.send(message, **kwargs)
x.ev.wait(timeout=self.timeout)
with self.syncs_lock:
self.syncs.pop(x.xid)
return x.data
def _sync_simple(self, req_oftype, res_oftype):
message = self.send_sync(ofp_header_only(req_oftype, version=self.version))
if message:
(version, oftype, length, xid) = parse_ofp_header(message)
if oftype != res_oftype:
raise OpenflowError(message)
else:
raise ChannelClose("no response")
return message
def close(self):
if self.syncs is not None:
for k,x in self.syncs.items():
x.data = ""
x.ev.set()
super(SyncChannel, self).close()
def echo(self):
return self._sync_simple(2, 3)
def feature(self):
return self._sync_simple(5, 6)
def get_config(self):
return self._sync_simple(7, 8)
def barrier(self):
if self.version==1:
return self._sync_simple(18, 19) # OFPT_BARRIER_REQUEST=18 (v1.0)
else:
return self._sync_simple(20, 21) # OFPT_BARRIER_REQUEST=20 (v1.1, v1.2, v1.3)
def single(self, message, **kwargs):
return self.multi((message,), **kwargs).pop()
def multi(self, messages, **kwargs):
prepared = []
for message in messages:
(version, oftype, length, xid) = parse_ofp_header(message)
x = SyncTracker(xid, sched.Event())
with self.syncs_lock:
self.syncs[x.xid] = x
self.send(message, **kwargs)
prepared.append(xid)
self.barrier()
results = []
for xid in prepared:
if xid in self.syncs:
results.append(self.syncs[xid].data)
with self.syncs_lock:
self.syncs.pop(xid)
else:
results.append(None)
return results
class PortMonitorChannel(ControllerChannel, ParallelChannel):
'''
PortMonitorChannel exposes `ports` property, which will be synced with the openflow switch.
'''
def __init__(self, *args, **kwargs):
super(PortMonitorChannel, self).__init__(*args, **kwargs)
self.timeout = kwargs.get("timeout", 2.0)
self._ports_lock = sched.Lock()
self._ports = []
self._ports_init = sched.Event()
self._port_monitor_multi = dict()
self._attach = weakref.WeakValueDictionary()
self._detach = weakref.WeakValueDictionary()
def recv(self):
message = super(PortMonitorChannel, self).recv()
if message:
ofp_port = "!H6s16sIIIIII" # ofp_port v1.0
ofp_port_names = '''port_no hw_addr name
config state
curr advertised supported peer'''
if self.version in (2,3,4):
ofp_port = "!I4x6s2x16sIIIIIIII"
ofp_port_names = '''port_no hw_addr name
config state
curr advertised supported peer
curr_speed max_speed'''
elif self.version == 5:
ofp_port = "!IH2x6s2x6sII"
ofp_port_names = '''port_no length hw_addr name
config state'''
(version, oftype, length, xid) = parse_ofp_header(message)
if xid in self._port_monitor_multi and oftype==19: # MULTIPART_REPLY
assert self.version in (4,5)
(mptype, flags) = struct.unpack_from("!HH4x", message, offset=8)
if mptype==13: # OFPMP_PORT_DESC
ports = self._port_monitor_multi[xid]
offset = 16
while offset < length:
port = list(struct.unpack_from(ofp_port, message, offset=offset))
port[2] = port[2].partition('\0')[0]
ports.append(namedtuple("ofp_port", ofp_port_names)(*port))
offset += struct.calcsize(ofp_port)
if not flags&1:
with self._ports_lock:
self._ports_replace(ports)
self._ports_init.set()
del(self._port_monitor_multi[xid])
elif oftype==6 and self.version != 4: # FEATURES_REPLY
fmt = "!BBHIQIB3x"
assert struct.calcsize(fmt) % 8 == 0
offset = struct.calcsize(fmt+"II")
ports = []
while offset < length:
port = list(struct.unpack_from(ofp_port, message, offset=offset))
port[2] = port[2].partition('\0')[0]
ports.append(namedtuple("ofp_port", ofp_port_names)(*port))
offset += struct.calcsize(ofp_port)
with self._ports_lock:
self._ports_replace(ports)
self._ports_init.set()
elif oftype==12: # PORT_STATUS
p = struct.unpack_from("!B7x"+ofp_port[1:], message, offset=8)
reason = p[0]
port = list(p[1:])
port[2] = port[2].partition('\0')[0]
self._update_port(reason, namedtuple("ofp_port", ofp_port_names)(*port))
return message
def _update_port(self, reason, port):
with self._ports_lock:
hit = [x for x in self._ports if x[0]==port[0]] # check with port_no(0)
if reason==0: # ADD
if self._ports_init.is_set():
assert not hit
self._ports.append(port)
s = self._attach.get(port.port_no, self._attach.get(port.name))
if s:
s.set()
self._attach.pop(s)
elif reason==1: # DELETE
if self._ports_init.is_set():
assert hit
if hit:
assert len(hit) == 1
self._ports.remove(hit.pop())
s = self._detach.get(port.port_no, self._detach.get(port.name))
if s:
s.set()
self._detach.pop(s)
elif reason==2: # MODIFY
if self._ports_init.is_set():
assert hit
if hit:
assert len(hit) == 1
old = hit.pop()
idx = self._ports.index(old)
self._ports.remove(old)
self._ports.insert(idx, port)
else:
self._ports.append(port)
else:
assert False, "unknown reason %d" % reason
@property
def ports(self):
if not self._ports_init.is_set():
if self.version in (4, 5):
xid = hms_xid()
with self._ports_lock:
self._port_monitor_multi[xid] = []
self.send(struct.pack("!BBHIHH4x", self.version,
18, # MULTIPART_REQUEST (v1.3, v1.4)
16, # struct.calcsize(fmt)==16
xid,
13, # PORT_DESC
0, # no REQ_MORE
))
else:
self.send(ofp_header_only(5, version=self.version)) # FEATURES_REQUEST
self._ports_init.wait(timeout=self.timeout)
return tuple(self._ports)
def _ports_replace(self, new_ports):
old_ports = self._ports
self._ports = new_ports
old_nums = set([p.port_no for p in old_ports])
old_names = set([p.name for p in old_ports])
new_nums = set([p.port_no for p in new_ports])
new_names = set([p.name for p in new_ports])
for port in old_ports:
if port.port_no in old_nums-new_nums:
with self._ports_lock:
s = self._detach.get(port.port_no)
if s:
s.set()
self._detach.pop(s)
if port.name in old_names-new_names:
with self._ports_lock:
s = self._detach.get(port.name)
if s:
s.set()
self._detach.pop(s)
for port in new_ports:
if port.port_no in new_nums-old_nums:
with self._ports_lock:
s = self._attach.get(port.port_no)
if s:
s.set()
self._attach.pop(s)
if port.name in new_names-old_names:
with self._ports_lock:
s = self._attach.get(port.name)
if s:
s.set()
self._attach.pop(s)
def close(self):
self._ports_init.set() # unlock the event
super(PortMonitorChannel, self).close()
def wait_attach(self, num_or_name, timeout=10):
for port in self._ports:
if port.port_no == num_or_name or port.name == num_or_name:
return port
with self._ports_lock:
if num_or_name not in self._attach:
result = self._attach[num_or_name] = sched.Event()
else:
result = self._attach[num_or_name]
if result.wait(timeout=timeout):
for port in self._ports:
if port.port_no == num_or_name or port.name == num_or_name:
return port
def wait_detach(self, num_or_name, timeout=10):
hit = False
for port in self._ports:
if port.port_no == num_or_name or port.name == num_or_name:
hit = True
if not hit:
return num_or_name # already detached
with self._ports_lock:
if num_or_name not in self._detach:
result = self._detach[num_or_name] = sched.Event()
else:
result = self._detach[num_or_name]
if result.wait(timeout=timeout):
return num_or_name
|
|
#!/usr/bin/env python2
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from .mininode import *
from .blockstore import BlockStore, TxStore
from .util import p2p_port
'''
This is a tool for comparing two or more ulmds to each other
using a script provided.
To use, create a class that implements get_tests(), and pass it in
as the test generator to TestManager. get_tests() should be a python
generator that returns TestInstance objects. See below for definition.
'''
# TestNode behaves as follows:
# Configure with a BlockStore and TxStore
# on_inv: log the message but don't request
# on_headers: log the chain tip
# on_pong: update ping response map (for synchronization)
# on_getheaders: provide headers via BlockStore
# on_getdata: provide blocks via BlockStore
global mininode_lock
class RejectResult(object):
'''
Outcome that expects rejection of a transaction or block.
'''
def __init__(self, code, reason=b''):
self.code = code
self.reason = reason
def match(self, other):
if self.code != other.code:
return False
return other.reason.startswith(self.reason)
def __repr__(self):
return '%i:%s' % (self.code,self.reason or '*')
class TestNode(NodeConnCB):
def __init__(self, block_store, tx_store):
NodeConnCB.__init__(self)
self.conn = None
self.bestblockhash = None
self.block_store = block_store
self.block_request_map = {}
self.tx_store = tx_store
self.tx_request_map = {}
self.block_reject_map = {}
self.tx_reject_map = {}
# When the pingmap is non-empty we're waiting for
# a response
self.pingMap = {}
self.lastInv = []
self.closed = False
def on_close(self, conn):
self.closed = True
def add_connection(self, conn):
self.conn = conn
def on_headers(self, conn, message):
if len(message.headers) > 0:
best_header = message.headers[-1]
best_header.calc_sha256()
self.bestblockhash = best_header.sha256
def on_getheaders(self, conn, message):
response = self.block_store.headers_for(message.locator, message.hashstop)
if response is not None:
conn.send_message(response)
def on_getdata(self, conn, message):
[conn.send_message(r) for r in self.block_store.get_blocks(message.inv)]
[conn.send_message(r) for r in self.tx_store.get_transactions(message.inv)]
for i in message.inv:
if i.type == 1:
self.tx_request_map[i.hash] = True
elif i.type == 2:
self.block_request_map[i.hash] = True
def on_inv(self, conn, message):
self.lastInv = [x.hash for x in message.inv]
def on_pong(self, conn, message):
try:
del self.pingMap[message.nonce]
except KeyError:
raise AssertionError("Got pong for unknown ping [%s]" % repr(message))
def on_reject(self, conn, message):
if message.message == b'tx':
self.tx_reject_map[message.data] = RejectResult(message.code, message.reason)
if message.message == b'block':
self.block_reject_map[message.data] = RejectResult(message.code, message.reason)
def send_inv(self, obj):
mtype = 2 if isinstance(obj, CBlock) else 1
self.conn.send_message(msg_inv([CInv(mtype, obj.sha256)]))
def send_getheaders(self):
# We ask for headers from their last tip.
m = msg_getheaders()
m.locator = self.block_store.get_locator(self.bestblockhash)
self.conn.send_message(m)
# This assumes BIP31
def send_ping(self, nonce):
self.pingMap[nonce] = True
self.conn.send_message(msg_ping(nonce))
def received_ping_response(self, nonce):
return nonce not in self.pingMap
def send_mempool(self):
self.lastInv = []
self.conn.send_message(msg_mempool())
# TestInstance:
#
# Instances of these are generated by the test generator, and fed into the
# comptool.
#
# "blocks_and_transactions" should be an array of
# [obj, True/False/None, hash/None]:
# - obj is either a CBlock, CBlockHeader, or a CTransaction, and
# - the second value indicates whether the object should be accepted
# into the blockchain or mempool (for tests where we expect a certain
# answer), or "None" if we don't expect a certain answer and are just
# comparing the behavior of the nodes being tested.
# - the third value is the hash to test the tip against (if None or omitted,
# use the hash of the block)
# - NOTE: if a block header, no test is performed; instead the header is
# just added to the block_store. This is to facilitate block delivery
# when communicating with headers-first clients (when withholding an
# intermediate block).
# sync_every_block: if True, then each block will be inv'ed, synced, and
# nodes will be tested based on the outcome for the block. If False,
# then inv's accumulate until all blocks are processed (or max inv size
# is reached) and then sent out in one inv message. Then the final block
# will be synced across all connections, and the outcome of the final
# block will be tested.
# sync_every_tx: analogous to behavior for sync_every_block, except if outcome
# on the final tx is None, then contents of entire mempool are compared
# across all connections. (If outcome of final tx is specified as true
# or false, then only the last tx is tested against outcome.)
class TestInstance(object):
def __init__(self, objects=None, sync_every_block=True, sync_every_tx=False):
self.blocks_and_transactions = objects if objects else []
self.sync_every_block = sync_every_block
self.sync_every_tx = sync_every_tx
class TestManager(object):
def __init__(self, testgen, datadir):
self.test_generator = testgen
self.connections = []
self.test_nodes = []
self.block_store = BlockStore(datadir)
self.tx_store = TxStore(datadir)
self.ping_counter = 1
def add_all_connections(self, nodes):
for i in range(len(nodes)):
# Create a p2p connection to each node
test_node = TestNode(self.block_store, self.tx_store)
self.test_nodes.append(test_node)
self.connections.append(NodeConn('127.0.0.1', p2p_port(i), nodes[i], test_node))
# Make sure the TestNode (callback class) has a reference to its
# associated NodeConn
test_node.add_connection(self.connections[-1])
def clear_all_connections(self):
self.connections = []
self.test_nodes = []
def wait_for_disconnections(self):
def disconnected():
return all(node.closed for node in self.test_nodes)
return wait_until(disconnected, timeout=10)
def wait_for_verack(self):
def veracked():
return all(node.verack_received for node in self.test_nodes)
return wait_until(veracked, timeout=10)
def wait_for_pings(self, counter):
def received_pongs():
return all(node.received_ping_response(counter) for node in self.test_nodes)
return wait_until(received_pongs)
# sync_blocks: Wait for all connections to request the blockhash given
# then send get_headers to find out the tip of each node, and synchronize
# the response by using a ping (and waiting for pong with same nonce).
def sync_blocks(self, blockhash, num_blocks):
def blocks_requested():
return all(
blockhash in node.block_request_map and node.block_request_map[blockhash]
for node in self.test_nodes
)
# --> error if not requested
if not wait_until(blocks_requested, attempts=20*num_blocks):
# print [ c.cb.block_request_map for c in self.connections ]
raise AssertionError("Not all nodes requested block")
# Send getheaders message
[ c.cb.send_getheaders() for c in self.connections ]
# Send ping and wait for response -- synchronization hack
[ c.cb.send_ping(self.ping_counter) for c in self.connections ]
self.wait_for_pings(self.ping_counter)
self.ping_counter += 1
# Analogous to sync_block (see above)
def sync_transaction(self, txhash, num_events):
# Wait for nodes to request transaction (50ms sleep * 20 tries * num_events)
def transaction_requested():
return all(
txhash in node.tx_request_map and node.tx_request_map[txhash]
for node in self.test_nodes
)
# --> error if not requested
if not wait_until(transaction_requested, attempts=20*num_events):
# print [ c.cb.tx_request_map for c in self.connections ]
raise AssertionError("Not all nodes requested transaction")
# Get the mempool
[ c.cb.send_mempool() for c in self.connections ]
# Send ping and wait for response -- synchronization hack
[ c.cb.send_ping(self.ping_counter) for c in self.connections ]
self.wait_for_pings(self.ping_counter)
self.ping_counter += 1
# Sort inv responses from each node
with mininode_lock:
[ c.cb.lastInv.sort() for c in self.connections ]
# Verify that the tip of each connection all agree with each other, and
# with the expected outcome (if given)
def check_results(self, blockhash, outcome):
with mininode_lock:
for c in self.connections:
if outcome is None:
if c.cb.bestblockhash != self.connections[0].cb.bestblockhash:
return False
elif isinstance(outcome, RejectResult): # Check that block was rejected w/ code
if c.cb.bestblockhash == blockhash:
return False
if blockhash not in c.cb.block_reject_map:
print 'Block not in reject map: %064x' % (blockhash)
return False
if not outcome.match(c.cb.block_reject_map[blockhash]):
print 'Block rejected with %s instead of expected %s: %064x' % (c.cb.block_reject_map[blockhash], outcome, blockhash)
return False
elif ((c.cb.bestblockhash == blockhash) != outcome):
# print c.cb.bestblockhash, blockhash, outcome
return False
return True
# Either check that the mempools all agree with each other, or that
# txhash's presence in the mempool matches the outcome specified.
# This is somewhat of a strange comparison, in that we're either comparing
# a particular tx to an outcome, or the entire mempools altogether;
# perhaps it would be useful to add the ability to check explicitly that
# a particular tx's existence in the mempool is the same across all nodes.
def check_mempool(self, txhash, outcome):
with mininode_lock:
for c in self.connections:
if outcome is None:
# Make sure the mempools agree with each other
if c.cb.lastInv != self.connections[0].cb.lastInv:
# print c.rpc.getrawmempool()
return False
elif isinstance(outcome, RejectResult): # Check that tx was rejected w/ code
if txhash in c.cb.lastInv:
return False
if txhash not in c.cb.tx_reject_map:
print 'Tx not in reject map: %064x' % (txhash)
return False
if not outcome.match(c.cb.tx_reject_map[txhash]):
print 'Tx rejected with %s instead of expected %s: %064x' % (c.cb.tx_reject_map[txhash], outcome, txhash)
return False
elif ((txhash in c.cb.lastInv) != outcome):
# print c.rpc.getrawmempool(), c.cb.lastInv
return False
return True
def run(self):
# Wait until verack is received
self.wait_for_verack()
test_number = 1
for test_instance in self.test_generator.get_tests():
# We use these variables to keep track of the last block
# and last transaction in the tests, which are used
# if we're not syncing on every block or every tx.
[ block, block_outcome, tip ] = [ None, None, None ]
[ tx, tx_outcome ] = [ None, None ]
invqueue = []
for test_obj in test_instance.blocks_and_transactions:
b_or_t = test_obj[0]
outcome = test_obj[1]
# Determine if we're dealing with a block or tx
if isinstance(b_or_t, CBlock): # Block test runner
block = b_or_t
block_outcome = outcome
tip = block.sha256
# each test_obj can have an optional third argument
# to specify the tip we should compare with
# (default is to use the block being tested)
if len(test_obj) >= 3:
tip = test_obj[2]
# Add to shared block_store, set as current block
# If there was an open getdata request for the block
# previously, and we didn't have an entry in the
# block_store, then immediately deliver, because the
# node wouldn't send another getdata request while
# the earlier one is outstanding.
first_block_with_hash = True
if self.block_store.get(block.sha256) is not None:
first_block_with_hash = False
with mininode_lock:
self.block_store.add_block(block)
for c in self.connections:
if first_block_with_hash and block.sha256 in c.cb.block_request_map and c.cb.block_request_map[block.sha256] == True:
# There was a previous request for this block hash
# Most likely, we delivered a header for this block
# but never had the block to respond to the getdata
c.send_message(msg_block(block))
else:
c.cb.block_request_map[block.sha256] = False
# Either send inv's to each node and sync, or add
# to invqueue for later inv'ing.
if (test_instance.sync_every_block):
[ c.cb.send_inv(block) for c in self.connections ]
self.sync_blocks(block.sha256, 1)
if (not self.check_results(tip, outcome)):
raise AssertionError("Test failed at test %d" % test_number)
else:
invqueue.append(CInv(2, block.sha256))
elif isinstance(b_or_t, CBlockHeader):
block_header = b_or_t
self.block_store.add_header(block_header)
else: # Tx test runner
assert(isinstance(b_or_t, CTransaction))
tx = b_or_t
tx_outcome = outcome
# Add to shared tx store and clear map entry
with mininode_lock:
self.tx_store.add_transaction(tx)
for c in self.connections:
c.cb.tx_request_map[tx.sha256] = False
# Again, either inv to all nodes or save for later
if (test_instance.sync_every_tx):
[ c.cb.send_inv(tx) for c in self.connections ]
self.sync_transaction(tx.sha256, 1)
if (not self.check_mempool(tx.sha256, outcome)):
raise AssertionError("Test failed at test %d" % test_number)
else:
invqueue.append(CInv(1, tx.sha256))
# Ensure we're not overflowing the inv queue
if len(invqueue) == MAX_INV_SZ:
[ c.send_message(msg_inv(invqueue)) for c in self.connections ]
invqueue = []
# Do final sync if we weren't syncing on every block or every tx.
if (not test_instance.sync_every_block and block is not None):
if len(invqueue) > 0:
[ c.send_message(msg_inv(invqueue)) for c in self.connections ]
invqueue = []
self.sync_blocks(block.sha256, len(test_instance.blocks_and_transactions))
if (not self.check_results(tip, block_outcome)):
raise AssertionError("Block test failed at test %d" % test_number)
if (not test_instance.sync_every_tx and tx is not None):
if len(invqueue) > 0:
[ c.send_message(msg_inv(invqueue)) for c in self.connections ]
invqueue = []
self.sync_transaction(tx.sha256, len(test_instance.blocks_and_transactions))
if (not self.check_mempool(tx.sha256, tx_outcome)):
raise AssertionError("Mempool test failed at test %d" % test_number)
print "Test %d: PASS" % test_number, [ c.rpc.getblockcount() for c in self.connections ]
test_number += 1
[ c.disconnect_node() for c in self.connections ]
self.wait_for_disconnections()
self.block_store.close()
self.tx_store.close()
|
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import numpy as np
import pickle
import os
import tensorflow as tf
import ray
from ray.rllib.common import Agent, TrainingResult
from ray.rllib.dqn import logger, models
from ray.rllib.dqn.common.wrappers import wrap_dqn
from ray.rllib.dqn.common.schedules import LinearSchedule
from ray.rllib.dqn.replay_buffer import ReplayBuffer, PrioritizedReplayBuffer
"""The default configuration dict for the DQN algorithm.
dueling: bool
whether to use dueling dqn
double_q: bool
whether to use double dqn
hiddens: array<int>
hidden layer sizes of the state and action value networks
model: dict
config options to pass to the model constructor
lr: float
learning rate for adam optimizer
schedule_max_timesteps: int
max num timesteps for annealing schedules
timesteps_per_iteration: int
number of env steps to optimize for before returning
buffer_size: int
size of the replay buffer
exploration_fraction: float
fraction of entire training period over which the exploration rate is
annealed
exploration_final_eps: float
final value of random action probability
sample_batch_size: int
update the replay buffer with this many samples at once
num_workers: int
the number of workers to use for parallel batch sample collection
train_batch_size: int
size of a batched sampled from replay buffer for training
print_freq: int
how often to print out training progress
set to None to disable printing
learning_starts: int
how many steps of the model to collect transitions for before learning
starts
gamma: float
discount factor
grad_norm_clipping: int or None
if not None, clip gradients during optimization at this value
target_network_update_freq: int
update the target network every `target_network_update_freq` steps.
prioritized_replay: True
if True prioritized replay buffer will be used.
prioritized_replay_alpha: float
alpha parameter for prioritized replay buffer
prioritized_replay_beta0: float
initial value of beta for prioritized replay buffer
prioritized_replay_beta_iters: int
number of iterations over which beta will be annealed from initial
value to 1.0. If set to None equals to schedule_max_timesteps
prioritized_replay_eps: float
epsilon to add to the TD errors when updating priorities.
num_cpu: int
number of cpus to use for training
"""
DEFAULT_CONFIG = dict(
dueling=True,
double_q=True,
hiddens=[256],
model={},
gpu_offset=0,
lr=5e-4,
schedule_max_timesteps=100000,
timesteps_per_iteration=1000,
buffer_size=50000,
exploration_fraction=0.1,
exploration_final_eps=0.02,
sample_batch_size=1,
num_workers=1,
train_batch_size=32,
print_freq=1,
learning_starts=1000,
gamma=1.0,
grad_norm_clipping=10,
target_network_update_freq=500,
prioritized_replay=False,
prioritized_replay_alpha=0.6,
prioritized_replay_beta0=0.4,
prioritized_replay_beta_iters=None,
prioritized_replay_eps=1e-6,
num_cpu=16)
class Actor(object):
def __init__(self, env_creator, config, logdir):
env = env_creator()
env = wrap_dqn(env, config["model"])
self.env = env
self.config = config
num_cpu = config["num_cpu"]
tf_config = tf.ConfigProto(
inter_op_parallelism_threads=num_cpu,
intra_op_parallelism_threads=num_cpu)
self.sess = tf.Session(config=tf_config)
self.dqn_graph = models.DQNGraph(env, config)
# Create the replay buffer
if config["prioritized_replay"]:
self.replay_buffer = PrioritizedReplayBuffer(
config["buffer_size"],
alpha=config["prioritized_replay_alpha"])
prioritized_replay_beta_iters = \
config["prioritized_replay_beta_iters"]
if prioritized_replay_beta_iters is None:
prioritized_replay_beta_iters = \
config["schedule_max_timesteps"]
self.beta_schedule = LinearSchedule(
prioritized_replay_beta_iters,
initial_p=config["prioritized_replay_beta0"],
final_p=1.0)
else:
self.replay_buffer = ReplayBuffer(config["buffer_size"])
self.beta_schedule = None
# Create the schedule for exploration starting from 1.
self.exploration = LinearSchedule(
schedule_timesteps=int(
config["exploration_fraction"] *
config["schedule_max_timesteps"]),
initial_p=1.0,
final_p=config["exploration_final_eps"])
# Initialize the parameters and copy them to the target network.
self.sess.run(tf.global_variables_initializer())
self.dqn_graph.update_target(self.sess)
self.variables = ray.experimental.TensorFlowVariables(
tf.group(self.dqn_graph.q_tp1, self.dqn_graph.q_t), self.sess)
self.episode_rewards = [0.0]
self.episode_lengths = [0.0]
self.saved_mean_reward = None
self.obs = self.env.reset()
self.file_writer = tf.summary.FileWriter(logdir, self.sess.graph)
def step(self, cur_timestep):
# Take action and update exploration to the newest value
action = self.dqn_graph.act(
self.sess, np.array(self.obs)[None],
self.exploration.value(cur_timestep))[0]
new_obs, rew, done, _ = self.env.step(action)
ret = (self.obs, action, rew, new_obs, float(done))
self.obs = new_obs
self.episode_rewards[-1] += rew
self.episode_lengths[-1] += 1
if done:
self.obs = self.env.reset()
self.episode_rewards.append(0.0)
self.episode_lengths.append(0.0)
return ret
def do_steps(self, num_steps, cur_timestep):
for _ in range(num_steps):
obs, action, rew, new_obs, done = self.step(cur_timestep)
self.replay_buffer.add(obs, action, rew, new_obs, done)
def get_gradient(self, cur_timestep):
if self.config["prioritized_replay"]:
experience = self.replay_buffer.sample(
self.config["train_batch_size"],
beta=self.beta_schedule.value(cur_timestep))
(obses_t, actions, rewards, obses_tp1,
dones, _, batch_idxes) = experience
else:
obses_t, actions, rewards, obses_tp1, dones = \
self.replay_buffer.sample(self.config["train_batch_size"])
batch_idxes = None
td_errors, grad = self.dqn_graph.compute_gradients(
self.sess, obses_t, actions, rewards, obses_tp1, dones,
np.ones_like(rewards))
if self.config["prioritized_replay"]:
new_priorities = (
np.abs(td_errors) + self.config["prioritized_replay_eps"])
self.replay_buffer.update_priorities(
batch_idxes, new_priorities)
return grad
def apply_gradients(self, grad):
self.dqn_graph.apply_gradients(self.sess, grad)
def stats(self, num_timesteps):
mean_100ep_reward = round(np.mean(self.episode_rewards[-101:-1]), 1)
mean_100ep_length = round(np.mean(self.episode_lengths[-101:-1]), 1)
exploration = self.exploration.value(num_timesteps)
return (
mean_100ep_reward,
mean_100ep_length,
len(self.episode_rewards),
exploration,
len(self.replay_buffer))
def get_weights(self):
return self.variables.get_weights()
def set_weights(self, weights):
self.variables.set_weights(weights)
def save(self):
return [
self.beta_schedule,
self.exploration,
self.episode_rewards,
self.episode_lengths,
self.saved_mean_reward,
self.obs,
self.replay_buffer]
def restore(self, data):
self.beta_schedule = data[0]
self.exploration = data[1]
self.episode_rewards = data[2]
self.episode_lengths = data[3]
self.saved_mean_reward = data[4]
self.obs = data[5]
self.replay_buffer = data[6]
@ray.remote
class RemoteActor(Actor):
def __init__(self, env_creator, config, logdir, gpu_mask):
os.environ["CUDA_VISIBLE_DEVICES"] = gpu_mask
Actor.__init__(self, env_creator, config, logdir)
class DQNAgent(Agent):
_agent_name = "DQN"
_default_config = DEFAULT_CONFIG
def _init(self):
self.actor = Actor(self.env_creator, self.config, self.logdir)
self.workers = [
RemoteActor.remote(
self.env_creator, self.config, self.logdir,
"{}".format(i + self.config["gpu_offset"]))
for i in range(self.config["num_workers"])]
self.cur_timestep = 0
self.num_iterations = 0
self.num_target_updates = 0
self.steps_since_update = 0
self.file_writer = tf.summary.FileWriter(
self.logdir, self.actor.sess.graph)
self.saver = tf.train.Saver(max_to_keep=None)
def _update_worker_weights(self):
w = self.actor.get_weights()
weights = ray.put(self.actor.get_weights())
for w in self.workers:
w.set_weights.remote(weights)
def _train(self):
config = self.config
sample_time, sync_time, learn_time, apply_time = 0, 0, 0, 0
iter_init_timesteps = self.cur_timestep
num_loop_iters = 0
steps_per_iter = config["sample_batch_size"] * len(self.workers)
while (self.cur_timestep - iter_init_timesteps <
config["timesteps_per_iteration"]):
dt = time.time()
ray.get([
w.do_steps.remote(
config["sample_batch_size"], self.cur_timestep)
for w in self.workers])
num_loop_iters += 1
self.cur_timestep += steps_per_iter
self.steps_since_update += steps_per_iter
sample_time += time.time() - dt
if self.cur_timestep > config["learning_starts"]:
dt = time.time()
# Minimize the error in Bellman's equation on a batch sampled
# from replay buffer.
self._update_worker_weights()
sync_time += (time.time() - dt)
dt = time.time()
gradients = ray.get(
[w.get_gradient.remote(self.cur_timestep)
for w in self.workers])
learn_time += (time.time() - dt)
dt = time.time()
for grad in gradients:
self.actor.apply_gradients(grad)
apply_time += (time.time() - dt)
if (self.cur_timestep > config["learning_starts"] and
self.steps_since_update >
config["target_network_update_freq"]):
self.actor.dqn_graph.update_target(self.actor.sess)
# Update target network periodically.
self._update_worker_weights()
self.steps_since_update -= config["target_network_update_freq"]
self.num_target_updates += 1
mean_100ep_reward = 0.0
mean_100ep_length = 0.0
num_episodes = 0
buffer_size_sum = 0
for mean_rew, mean_len, episodes, exploration, buf_sz in ray.get(
[w.stats.remote(self.cur_timestep) for w in self.workers]):
mean_100ep_reward += mean_rew
mean_100ep_length += mean_len
num_episodes += episodes
buffer_size_sum += buf_sz
mean_100ep_reward /= len(self.workers)
mean_100ep_length /= len(self.workers)
info = [
("mean_100ep_reward", mean_100ep_reward),
("exploration_frac", exploration),
("steps", self.cur_timestep),
("episodes", num_episodes),
("buffer_sizes_sum", buffer_size_sum),
("target_updates", self.num_target_updates),
("sample_time", sample_time),
("weight_sync_time", sync_time),
("apply_time", apply_time),
("learn_time", learn_time),
("samples_per_s",
num_loop_iters * np.float64(steps_per_iter) / sample_time),
("learn_samples_per_s",
num_loop_iters * np.float64(config["train_batch_size"]) *
np.float64(config["num_workers"]) / learn_time),
]
for k, v in info:
logger.record_tabular(k, v)
logger.dump_tabular()
result = TrainingResult(
episode_reward_mean=mean_100ep_reward,
episode_len_mean=mean_100ep_length,
timesteps_this_iter=self.cur_timestep - iter_init_timesteps,
info=info)
return result
def _save(self):
checkpoint_path = self.saver.save(
self.actor.sess,
os.path.join(self.logdir, "checkpoint"),
global_step=self.num_iterations)
extra_data = [
self.actor.save(),
ray.get([w.save.remote() for w in self.workers]),
self.cur_timestep,
self.num_iterations,
self.num_target_updates,
self.steps_since_update]
pickle.dump(extra_data, open(checkpoint_path + ".extra_data", "wb"))
return checkpoint_path
def _restore(self, checkpoint_path):
self.saver.restore(self.actor.sess, checkpoint_path)
extra_data = pickle.load(open(checkpoint_path + ".extra_data", "rb"))
self.actor.restore(extra_data[0])
ray.get([
w.restore.remote(d) for (d, w)
in zip(extra_data[1], self.workers)])
self.cur_timestep = extra_data[2]
self.num_iterations = extra_data[3]
self.num_target_updates = extra_data[4]
self.steps_since_update = extra_data[5]
def compute_action(self, observation):
return self.actor.dqn_graph.act(
self.actor.sess, np.array(observation)[None], 0.0)[0]
|
|
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import random
import re
import string
import time
import jsonpatch
import jsonpointer
from oslo_serialization import base64
import six
from yaql.language import specs
from yaql.language import utils
from yaql.language import yaqltypes
from murano.common import config as cfg
from murano.dsl import constants
from murano.dsl import dsl
from murano.dsl import helpers
from murano.dsl import yaql_integration
_random_string_counter = None
@specs.parameter('value', yaqltypes.String())
@specs.extension_method
def base64encode(value):
return base64.encode_as_text(value)
@specs.parameter('value', yaqltypes.String())
@specs.extension_method
def base64decode(value):
return base64.decode_as_text(value)
@specs.parameter('collection', yaqltypes.Iterable())
@specs.parameter('composer', yaqltypes.Lambda())
@specs.extension_method
def pselect(collection, composer):
return helpers.parallel_select(collection, composer)
@specs.parameter('mappings', collections.Mapping)
@specs.extension_method
def bind(obj, mappings):
if isinstance(obj, six.string_types) and obj.startswith('$'):
value = _convert_macro_parameter(obj[1:], mappings)
if value is not None:
return value
elif utils.is_sequence(obj):
return [bind(t, mappings) for t in obj]
elif isinstance(obj, collections.Mapping):
result = {}
for key, value in six.iteritems(obj):
result[bind(key, mappings)] = bind(value, mappings)
return result
elif isinstance(obj, six.string_types) and obj.startswith('$'):
value = _convert_macro_parameter(obj[1:], mappings)
if value is not None:
return value
return obj
def _convert_macro_parameter(macro, mappings):
replaced = [False]
def replace(match):
replaced[0] = True
return six.text_type(mappings.get(match.group(1)))
result = re.sub('{(\\w+?)}', replace, macro)
if replaced[0]:
return result
else:
return mappings[macro]
@specs.parameter('group', yaqltypes.String())
@specs.parameter('setting', yaqltypes.String())
@specs.parameter('read_as_file', bool)
def config(group, setting, read_as_file=False):
config_value = cfg.CONF[group][setting]
if read_as_file:
with open(config_value) as target_file:
return target_file.read()
else:
return config_value
@specs.parameter('setting', yaqltypes.String())
@specs.name('config')
def config_default(setting):
return cfg.CONF[setting]
@specs.parameter('string', yaqltypes.String())
@specs.parameter('start', int)
@specs.parameter('length', int)
@specs.inject('delegate', yaqltypes.Delegate('substring', method=True))
@specs.extension_method
def substr(delegate, string, start, length=-1):
return delegate(string, start, length)
@specs.extension_method
def patch_(engine, obj, patch):
if not isinstance(patch, tuple):
patch = (patch,)
patch = dsl.to_mutable(patch, engine)
patch = jsonpatch.JsonPatch(patch)
try:
obj = dsl.to_mutable(obj, engine)
return patch.apply(obj, in_place=True)
except jsonpointer.JsonPointerException:
return obj
def _int2base(x, base):
"""Converts decimal integers into another number base
Converts decimal integers into another number base
from base-2 to base-36.
:param x: decimal integer
:param base: number base, max value is 36
:return: integer converted to the specified base
"""
digs = string.digits + string.ascii_lowercase
if x < 0:
sign = -1
elif x == 0:
return '0'
else:
sign = 1
x *= sign
digits = []
while x:
digits.append(digs[x % base])
x //= base
if sign < 0:
digits.append('-')
digits.reverse()
return ''.join(digits)
def random_name():
"""Replace '#' char in pattern with supplied number
Replace '#' char in pattern with supplied number. If no pattern is
supplied, generate a short and unique name for the host.
:param pattern: hostname pattern
:param number: number to replace with in pattern
:return: hostname
"""
global _random_string_counter
counter = _random_string_counter or 1
# generate first 5 random chars
prefix = ''.join(random.choice(string.ascii_lowercase) for _ in range(5))
# convert timestamp to higher base to shorten hostname string
# (up to 8 chars)
timestamp = _int2base(int(time.time() * 1000), 36)[:8]
# third part of random name up to 2 chars
# (1295 is last 2-digit number in base-36, 1296 is first 3-digit number)
suffix = _int2base(counter, 36)
_random_string_counter = (counter + 1) % 1296
return prefix + timestamp + suffix
@specs.parameter('collection', yaqltypes.Iterable())
@specs.parameter('default', nullable=True)
@specs.extension_method
def first_or_default(collection, default=None):
try:
return next(iter(collection))
except StopIteration:
return default
@specs.parameter('logger_name', yaqltypes.String(True))
def logger(context, logger_name):
"""Instantiate Logger"""
log = yaql_integration.call_func(
context, 'new', 'io.murano.system.Logger',
logger_name=logger_name)
return log
@helpers.memoize
def get_context(runtime_version):
context = yaql_integration.create_empty_context()
context.register_function(base64decode)
context.register_function(base64encode)
context.register_function(pselect)
context.register_function(bind)
context.register_function(random_name)
context.register_function(patch_)
context.register_function(logger)
if runtime_version <= constants.RUNTIME_VERSION_1_1:
context.register_function(substr)
context.register_function(first_or_default)
root_context = yaql_integration.create_context(runtime_version)
for t in ('to_lower', 'to_upper', 'trim', 'join', 'split',
'starts_with', 'ends_with', 'matches', 'replace',
'flatten'):
for spec in utils.to_extension_method(t, root_context):
context.register_function(spec)
return context
@helpers.memoize
def get_restricted_context():
context = yaql_integration.create_empty_context()
context.register_function(config)
context.register_function(config_default)
return context
|
|
"""This module contains tests for the weighted graph implementation."""
"""This module contains the tests for a graph data structure."""
# Until we refactor our WGraph class to be ble to take in negative numbers, we're leaving negatives out for Floyd Warshall.
import pytest
TEST_NODES = [1, 2, 3, 4, "five", "six", "seven", "apple"]
TEST_EDGES = [(1, 2, 1), (2, 3, 2), (1, 3, 4), (3, 1, 2), (5, "six", 2), ("pear", "bear", 1)]
TRAVERSAL_NODES = ["A", "B", "C", "D", "E", "F", "G"]
TRAVERSAL_EDGES = [("A", "B", 1), ("A", "C", 1), ("B", "D", 1), ("B", "E", 1), ("C", "F", 1), ("C", "G", 1)]
CYCLICAL_NODES = [1, 2, 3]
CYCLICAL_EDGES = [(1, 2, 1), (2, 3, 1), (3, 1, 2)]
MED_CYCLICAL_NODES = [1, 2, 3, 4, 5, 6, 7]
MED_CYCLICAL_EDGES = [(1, 2, 1), (2, 3, 1), (3, 4, 2), (4, 5, 3), (5, 6, 4), (6, 7, 5), (7, 1, 6)]
COMPLEX_TRAVERSAL_NODES = [1, 2, 3, 4, 5, 6, 7, 8, 9]
COMPLEX_TRAVERSAL_EDGES = [(1, 3, 2), (1, 7, 3), (2, 4, 1), (2, 8, 4), (3, 6, 1), (3, 2, 1), (4, 5, 1), (4, 1, 1), (4, 9, 1), (5, 8, 1), (5, 2, 1), (5, 9, 1), (6, 3, 1), (6, 8, 1), (6, 5, 1), (7, 3, 1), (7, 1, 1), (7, 8, 1), (8, 1, 1), (8, 3, 1), (8, 5, 1), (8, 7, 1), (9, 2, 1), (9, 4, 1), (9, 6, 1)]
CIRCLE_WITH_TAIL_NODES = [1, 3, 2, 4, 5, 6]
CIRCLE_WITH_TAIL_EDGES = [(1, 2, 1), (2, 4, 1), (2, 3, 2), (3, 1, 3), (4, 5, 4), (5, 6, 5)]
@pytest.fixture
def empty_graph():
"""Create an empty graph."""
from weighted_graph import WGraph
return WGraph()
@pytest.fixture
def graph_nodes():
from weighted_graph import WGraph
g = WGraph()
for node in TEST_NODES:
g.add_node(node)
return g
@pytest.fixture
def graph_edges():
from weighted_graph import WGraph
g = WGraph()
for node in TEST_NODES:
g.add_node(node)
for edge in TEST_EDGES:
g.add_edge(edge[0], edge[1], edge[2])
return g
@pytest.fixture
def traversal_graph():
from weighted_graph import WGraph
tg = WGraph()
for node in TRAVERSAL_NODES:
tg.add_node(node)
for edge in TRAVERSAL_EDGES:
tg.add_edge(edge[0], edge[1], edge[2])
return tg
@pytest.fixture
def cyclical_graph():
from weighted_graph import WGraph
cg = WGraph()
for node in CYCLICAL_NODES:
cg.add_node(node)
for edge in CYCLICAL_EDGES:
cg.add_edge(edge[0], edge[1], edge[2])
return cg
@pytest.fixture
def med_cyclical_graph():
from weighted_graph import WGraph
mcg = WGraph()
for node in MED_CYCLICAL_NODES:
mcg.add_node(node)
for edge in MED_CYCLICAL_EDGES:
mcg.add_edge(edge[0], edge[1], edge[2])
return mcg
@pytest.fixture
def complex_traversal_graph():
from weighted_graph import WGraph
ctg = WGraph()
for node in COMPLEX_TRAVERSAL_NODES:
ctg.add_node(node)
for edge in COMPLEX_TRAVERSAL_EDGES:
ctg.add_edge(edge[0], edge[1], edge[2])
return ctg
@pytest.fixture
def circle_with_tail_graph():
from weighted_graph import WGraph
cwtg = WGraph()
for node in CIRCLE_WITH_TAIL_NODES:
cwtg.add_node(node)
for edge in CIRCLE_WITH_TAIL_EDGES:
cwtg.add_edge(edge[0], edge[1], edge[2])
return cwtg
@pytest.fixture
def simple_wgraph():
from weighted_graph import WGraph
wg = WGraph()
for edge in SIMPLE_WGRAPH:
wg.add_edge(edge[0], edge[1], edge[2])
return wg
# def test_init_graph(empty_graph):
# """Test initialization of an empty graph."""
# assert empty_graph._gdict == {}
# def test_nodes_returns_empty_list_empty_graph(empty_graph):
# """Test nodes() returns empty list for an empty graph."""
# assert empty_graph.nodes() == []
# def test_nodes_returns_list_non_empty_graph(graph_nodes):
# """Test nodes() returns correct list."""
# node_list = graph_nodes.nodes()
# for node in node_list:
# assert node in TEST_NODES
# assert len(node_list) == len(TEST_NODES)
# def test_add_node_creates_new_node(empty_graph):
# """Test add_node() creates a new node."""
# g = empty_graph
# g.add_node(1)
# assert g._gdict[1] == {}
# def test_add_node_already_exists(empty_graph):
# """Test adding a node that already exists."""
# empty_graph.add_node(1)
# empty_graph.add_node(1)
# assert empty_graph._gdict[1] == {}
# def test_add_edge_create_new_edge_no_weight(empty_graph):
# """Test Adding an edge to an empty graph."""
# empty_graph.add_edge("dog", "cat")
# assert "dog", "cat" in empty_graph._gict.keys()
# assert empty_graph._gdict["dog"] == {"cat": 1}
# assert empty_graph._gdict["dog"]["cat"] == 1
# def test_add_edge_create_new_edge_with_weight(empty_graph):
# """Test Adding an edge to an empty graph."""
# empty_graph.add_edge("dog", "cat", 3)
# assert "dog", "cat" in empty_graph._gict.keys()
# assert empty_graph._gdict["dog"] == {"cat": 3}
# assert empty_graph._gdict["dog"]["cat"] == 3
# def test_add_edge_non_int_weight_raises_error(empty_graph):
# """Test that creating an edge with a non-integer weight raises an error."""
# with pytest.raises(TypeError):
# empty_graph.add_edge("dog", "cat", "turtle")
# def test_add_edge_n1_already_exists(graph_edges):
# """Test adding an edge where n1 already exsists."""
# graph_edges.add_edge(1, "grapple")
# assert "grapple" in graph_edges._gdict[1]
# assert "grapple" in graph_edges.nodes()
# def test_add_edge_n1_does_not_exist_n2_does(graph_edges):
# """Test case where n1 does not exist, n2 does."""
# graph_edges.add_edge("grapple", 1)
# assert 1 in graph_edges._gdict["grapple"]
# assert "grapple" in graph_edges.nodes()
# def test_add_edge_already_exists(graph_edges):
# """Testing adding an edge to a graph already with that edge, doesn't change it."""
# old_edges = graph_edges._gdict[1]
# graph_edges.add_edge(1, 2)
# assert old_edges == graph_edges._gdict[1]
# def test_del_node_deletes_node_and_all_points_to_node(graph_edges):
# """Test that del_node() delets the node and all pointers to that node."""
# graph_edges.del_node(3)
# assert 3 not in graph_edges.nodes()
# for edge in graph_edges.edges():
# assert 3 != edge[1]
# def test_del_node_doesnt_exist_raises_keyerror(graph_edges):
# """Test that del_node() raises an error if node does not exist."""
# with pytest.raises(KeyError):
# graph_edges.del_node("widgets")
# def test_del_edge_raise_key_error_no_n1(graph_edges):
# """Test that del_edge() raises key error if n1 does not exist."""
# with pytest.raises(KeyError):
# graph_edges.del_edge("widgets", "anything")
# def test_del_edge_raise_value_error_no_n2(graph_edges):
# """Test that del edge raises a value error if n2 does not exist."""
# with pytest.raises(KeyError):
# graph_edges.del_edge(1, "widgets")
# def test_del_edge_functions_correctly(graph_edges):
# """Test that del edge functions correctly."""
# graph_edges.del_edge(1, 2)
# assert 2 not in graph_edges._gdict[1]
# graph_edges.del_edge(1, 3)
# assert 3 not in graph_edges._gdict[1]
# assert len(graph_edges._gdict[1]) == 0
# def test_has_node_return_false(graph_edges):
# """Test that has_node() returns false if node doesn't exist."""
# assert graph_edges.has_node("widgets") is False
# graph_edges.del_node(1)
# assert graph_edges.has_node(1) is False
# def test_has_node_return_true(graph_edges):
# """Test that has_node returns True if node exists."""
# assert graph_edges.has_node(1) is True
# graph_edges.add_node("widgets")
# assert graph_edges.has_node("widgets") is True
# def test_adjacent_returns_true(graph_edges):
# """Test that adjacent() returns true if given nodes are neighbors."""
# assert graph_edges.adjacent(1, 2) is True
# graph_edges.add_edge(1, 5)
# assert graph_edges.adjacent(1, 5) is True
# assert graph_edges.adjacent(5, "six") is True
# def test_adjacent_returns_false(graph_edges):
# """Test that adjacent() returns false if given nodes are not neighbors."""
# assert graph_edges.adjacent(1, 5) is False
# graph_edges.del_node(2)
# with pytest.raises(KeyError):
# graph_edges.adjacent(1, 2)
# def test_adjacent_raises_error_if_n_not_in_graph(graph_edges):
# """Test that adjacent() raises an error if nodes not in graph."""
# with pytest.raises(KeyError):
# graph_edges.adjacent(0, 1)
# with pytest.raises(KeyError):
# graph_edges.adjacent(1, 0)
# with pytest.raises(KeyError):
# graph_edges.adjacent("widget", "gizmo")
# def test_neighbors_returns_correct_list(graph_edges):
# """Test that neighbors returns correct list."""
# assert graph_edges.neighbors(1) == [2, 3]
# graph_edges.add_edge(1, 5)
# assert graph_edges.neighbors(1) == [2, 3, 5]
# def test_nieghbors_raises_key_error_no_node(graph_edges):
# """Test that neighbros raises a key error if the node does not exist."""
# with pytest.raises(KeyError):
# graph_edges.neighbors("widget")
# """Test traversal of graph."""
# def test_depth_traversal_of_simple_tree_graph_a_to_g(traversal_graph):
# """The module tests depth traversal of graph."""
# assert traversal_graph.depth_first_traversal("A") == ["A", "B", "D", "E", "C", "F", "G"]
# def test_depth_traversal_of_cyclical_graph(cyclical_graph):
# """The module tests depth traversal of a cyclical graph: 1 to 2, 2 to 3, 3 to 1."""
# assert cyclical_graph.depth_first_traversal(1) == [1, 2, 3]
# def test_breadth_traversal_of_simple_tree_graph_a_to_g(traversal_graph):
# """The module tests breadth traversal of graph."""
# assert traversal_graph.breadth_first_traversal("A") == ["A", "B", "C", "D", "E", "F", "G"]
# def test_breadth_traversal_of_medium_cyclical_graph(med_cyclical_graph):
# """The module tests breadth traversal of a cyclical graph: 1 to 7."""
# assert med_cyclical_graph.breadth_first_traversal(1) == [1, 2, 3, 4, 5, 6, 7]
# def test_breadth_traversal_of_complex_graph(complex_traversal_graph):
# """The module tests the breadth traversal of a complex graph."""
# assert complex_traversal_graph.breadth_first_traversal(1) == [1, 3, 7, 6, 2, 8, 5, 4, 9]
# def test_breadth_traversal_of_circle_with_tail(circle_with_tail_graph):
# """Test breadth traversal on a circle with a tail."""
# assert circle_with_tail_graph.breadth_first_traversal(1) == [1, 2, 4, 3, 5, 6]
# def test_depth_traversal_of_circle_with_tail(circle_with_tail_graph):
# """Test breadth traversal on a circle with a tail."""
# assert circle_with_tail_graph.breadth_first_traversal(1) == [1, 2, 4, 3, 5, 6]
# # TEST SHORTEST PATHS HERE
SIMPLE_WGRAPH = [("A", "B", 3), ("A", "D", 4), ("B", "C", 2), ("B", "E", 5), ("C", "D" ,1), ("C", "G", 6), ("D","E",3), ("D","B",4), ("E","G",5), ("E","C",2), ("E","F",5), ("F","G",2)]
BASIC_WGRAPH = [("A", "C", 3), ("A", "D", 0), ("B", "A", 1), ("B", "D", 1), ("C", "D", 1), ("D", "B", 4)]
COMPLEX_WGRAPH = [("A", "B", 3), ("A", "D", 4), ("B", "C", 2), ("B", "E", 5), ("C", "D" ,1), ("C", "G", 6), ("D","E",3), ("D","B",4), ("E","G",5), ("E","C",2), ("E","F",5), ("F","G",2), ("F","D",2), ("G","D",2), ("C","A",3), ("G","F",2), ("G","B",3), ("G","A",0), ("C","E",1), ("G","C",0), ("F","C",2)]
# ALL_NEGATIVE_WGRAPH = [("A", "B", -3), ("A", "D", -4), ("B", "C", -2), ("B", "E", -5), ("C", "D" ,-1), ("C", "G", -6), ("D","E",-3), ("D","B",-4), ("E","G",-5), ("E","C",-2), ("E","F",-5), ("F","G",-2)]
@pytest.fixture
def simple_wgraph():
"""Fixture to seet up a simple weighted-graph for use in testing."""
from weighted_graph import WGraph
wg = WGraph()
for edge in SIMPLE_WGRAPH:
wg.add_edge(edge[0], edge[1], edge[2])
return wg
@pytest.fixture
def basic_wgraph():
"""Fixture to seet up a basic weighted-graph for use in testing."""
from weighted_graph import WGraph
wg = WGraph()
for edge in BASIC_WGRAPH:
wg.add_edge(edge[0], edge[1], edge[2])
return wg
@pytest.fixture
def complex_wgraph():
"""Fixture to seet up a complex weighted-graph for use in testing."""
from weighted_graph import WGraph
wg = WGraph()
for edge in COMPLEX_WGRAPH:
wg.add_edge(edge[0], edge[1], edge[2])
return wg
# Until we refactor our WGraph class to be ble to take in negative numbers, we're leaving negatives out.
# @pytest.fixture
# def all_negative_wgraph():
# """Fixture to seet up an all negative weighted-graph for use in testing."""
# from weighted_graph import WGraph
# wg = WGraph()
# for edge in ALL_NEGATIVE_WGRAPH:
# wg.add_edge(edge[0], edge[1], edge[2])
# return wg
def test_dijkstra_shortest_path(simple_wgraph):
"""Test dijkstra shortest path."""
assert simple_wgraph.shortest_dijkstra("A", "G") == ["A", "B", "C", "G"]
def test_dijkstra_shortest_path_a_to_g(simple_wgraph):
"""Test the Dijkstra shortest path for simple graph, A to G."""
assert simple_wgraph.shortest_dijkstra("A", "G") == ["A", "B", "C", "G"]
def test_dijkstra_shortest_path_b_to_d(simple_wgraph):
"""Test the Dijkstra shortest path for simple graph, B to D."""
assert simple_wgraph.shortest_dijkstra("B", "D") == ["B", "C", "D"]
def test_dijkstra_shortest_path_a_to_f(simple_wgraph):
"""Test the Dijkstra shortest path for simple graph, A to F."""
assert simple_wgraph.shortest_dijkstra("A", "F") == ["A", "D", "E", "F"]
def test_dijkstra_shortest_path_d_to_c(simple_wgraph):
"""Test the Dijkstra shortest path for simple graph, D to C."""
assert simple_wgraph.shortest_dijkstra("D", "C") == ["D", "E", "C"]
def test_dijkstra_shortest_path_d_to_g(simple_wgraph):
"""Test the Dijkstra shortest path for simple graph, D to G."""
assert simple_wgraph.shortest_dijkstra("D", "G") == ["D", "E", "G"]
def test_dijkstra_shortest_path_d_to_f(simple_wgraph):
"""Test the Dijkstra shortest path for simple graph, D to F."""
assert simple_wgraph.shortest_dijkstra("D", "F") == ["D", "E", "F"]
def test_dijkstra_shortest_path_e_to_b(simple_wgraph):
"""Test the Dijkstra shortest path for simple graph, E to B."""
assert simple_wgraph.shortest_dijkstra("E", "B") == ["E", "C", "D", "B"]
def test_dijkstra_shortest_path_e_to_d(simple_wgraph):
"""Test the Dijkstra shortest path for simple graph, E to D."""
assert simple_wgraph.shortest_dijkstra("E", "D") == ["E", "C", "D"]
# complex_wgraph
def test_dijkstra_shortest_path_a_to_g_complex(complex_wgraph):
"""Test the Dijkstra shortest path for complex graph, A to G."""
assert complex_wgraph.shortest_dijkstra("A", "G") == ["A", "B", "C", "G"]
def test_dijkstra_shortest_path_b_to_d_complex(complex_wgraph):
"""Test the Dijkstra shortest path for complex graph, B to D."""
assert complex_wgraph.shortest_dijkstra("B", "D") == ["B", "C", "D"]
def test_dijkstra_shortest_path_a_to_f_complex(complex_wgraph):
"""Test the Dijkstra shortest path for complex graph, A to F."""
assert complex_wgraph.shortest_dijkstra("A", "F") == ["A", "B", "C", "E", "F"]
def test_dijkstra_shortest_path_d_to_c_complex(complex_wgraph):
"""Test the Dijkstra shortest path for complex graph, D to C."""
assert complex_wgraph.shortest_dijkstra("D", "C") == ["D", "E", "C"]
def test_dijkstra_shortest_path_d_to_g_complex(complex_wgraph):
"""Test the Dijkstra shortest path for complex graph, D to G."""
assert complex_wgraph.shortest_dijkstra("D", "G") == ["D", "E", "G"]
def test_dijkstra_shortest_path_d_to_f_complex(complex_wgraph):
"""Test the Dijkstra shortest path for complex graph, D to F."""
assert complex_wgraph.shortest_dijkstra("D", "F") == ["D", "E", "F"]
def test_dijkstra_shortest_path_e_to_b_complex(complex_wgraph):
"""Test the Dijkstra shortest path for complex graph, E to B."""
assert complex_wgraph.shortest_dijkstra("E", "B") == ["E", "C", "D", "B"]
def test_dijkstra_shortest_path_e_to_d_complex(complex_wgraph):
"""Test the Dijkstra shortest path for complex graph, E to D."""
assert complex_wgraph.shortest_dijkstra("E", "D") == ["E", "C", "D"]
# basic_wgraph
def test_dijkstra_shortest_path_a_to_b_basic(basic_wgraph):
"""Test the Dijkstra shortest path for basic graph, A to B."""
assert basic_wgraph.shortest_dijkstra("A", "B") == ["A", "D", "B"]
def test_dijkstra_shortest_path_b_to_c_basic(basic_wgraph):
"""Test the Dijkstra shortest path for basic graph, B to C."""
assert basic_wgraph.shortest_dijkstra("B", "C") == ["B", "A", "C"]
def test_dijkstra_shortest_path_d_to_c_basic(basic_wgraph):
"""Test the Dijkstra shortest path for basic graph, D to C."""
assert basic_wgraph.shortest_dijkstra("D", "C") == ["D", "B", "A", "C"]
def test_dijkstra_shortest_path_c_to_a_basic(basic_wgraph):
"""Test the Dijkstra shortest path for basic graph, C to A."""
assert basic_wgraph.shortest_dijkstra("C", "A") == ["C", "D", "B", "A"]
def test_dijkstra_shortest_path_c_to_b_basic(basic_wgraph):
"""Test the Dijkstra shortest path for basic graph, C to B."""
assert basic_wgraph.shortest_dijkstra("C", "B") == ["C", "D", "B"]
def test_floyd_warshall_shortest_path_a_to_g(simple_wgraph):
"""Test the floyd Warshall shortest path for simple graph, A to G."""
assert simple_wgraph.shortest_floyd_warshall("A", "G") == ["A", "B", "C", "G"]
def test_floyd_warshall_shortest_path_b_to_d(simple_wgraph):
"""Test the floyd Warshall shortest path for simple graph, B to D."""
assert simple_wgraph.shortest_floyd_warshall("B", "D") == ["B", "C", "D"]
def test_floyd_warshall_shortest_path_a_to_f(simple_wgraph):
"""Test the floyd Warshall shortest path for simple graph, A to F."""
assert simple_wgraph.shortest_floyd_warshall("A", "F") == ["A", "D", "E", "F"]
def test_floyd_warshall_shortest_path_d_to_c(simple_wgraph):
"""Test the floyd Warshall shortest path for simple graph, D to C."""
assert simple_wgraph.shortest_floyd_warshall("D", "C") == ["D", "E", "C"]
def test_floyd_warshall_shortest_path_d_to_g(simple_wgraph):
"""Test the floyd Warshall shortest path for simple graph, D to G."""
assert simple_wgraph.shortest_floyd_warshall("D", "G") == ["D", "E", "G"]
def test_floyd_warshall_shortest_path_d_to_f(simple_wgraph):
"""Test the floyd Warshall shortest path for simple graph, D to F."""
assert simple_wgraph.shortest_floyd_warshall("D", "F") == ["D", "E", "F"]
def test_floyd_warshall_shortest_path_e_to_b(simple_wgraph):
"""Test the floyd Warshall shortest path for simple graph, E to B."""
assert simple_wgraph.shortest_floyd_warshall("E", "B") == ["E", "C", "D", "B"]
def test_floyd_warshall_shortest_path_e_to_d(simple_wgraph):
"""Test the floyd Warshall shortest path for simple graph, E to D."""
assert simple_wgraph.shortest_floyd_warshall("E", "D") == ["E", "C", "D"]
# complex_wgraph
def test_floyd_warshall_shortest_path_a_to_g_complex(complex_wgraph):
"""Test the floyd Warshall shortest path for complex graph, A to G."""
assert complex_wgraph.shortest_floyd_warshall("A", "G") == ["A", "B", "C", "G"]
def test_floyd_warshall_shortest_path_b_to_d_complex(complex_wgraph):
"""Test the floyd Warshall shortest path for complex graph, B to D."""
assert complex_wgraph.shortest_floyd_warshall("B", "D") == ["B", "C", "D"]
def test_floyd_warshall_shortest_path_a_to_f_complex(complex_wgraph):
"""Test the floyd Warshall shortest path for complex graph, A to F."""
assert complex_wgraph.shortest_floyd_warshall("A", "F") == ["A", "B", "C", "E", "F"]
def test_floyd_warshall_shortest_path_d_to_c_complex(complex_wgraph):
"""Test the floyd Warshall shortest path for complex graph, D to C."""
assert complex_wgraph.shortest_floyd_warshall("D", "C") == ["D", "E", "C"]
def test_floyd_warshall_shortest_path_d_to_g_complex(complex_wgraph):
"""Test the floyd Warshall shortest path for complex graph, D to G."""
assert complex_wgraph.shortest_floyd_warshall("D", "G") == ["D", "E", "G"]
def test_floyd_warshall_shortest_path_d_to_f_complex(complex_wgraph):
"""Test the floyd Warshall shortest path for complex graph, D to F."""
assert complex_wgraph.shortest_floyd_warshall("D", "F") == ["D", "E", "F"]
def test_floyd_warshall_shortest_path_e_to_b_complex(complex_wgraph):
"""Test the floyd Warshall shortest path for complex graph, E to B."""
assert complex_wgraph.shortest_floyd_warshall("E", "B") == ["E", "C", "D", "B"]
def test_floyd_warshall_shortest_path_e_to_d_complex(complex_wgraph):
"""Test the floyd Warshall shortest path for complex graph, E to D."""
assert complex_wgraph.shortest_floyd_warshall("E", "D") == ["E", "C", "D"]
# basic_wgraph
def test_floyd_warshall_shortest_path_a_to_b_basic(basic_wgraph):
"""Test the floyd Warshall shortest path for basic graph, A to B."""
assert basic_wgraph.shortest_floyd_warshall("A", "B") == ["A", "D", "B"]
def test_floyd_warshall_shortest_path_b_to_c_basic(basic_wgraph):
"""Test the floyd Warshall shortest path for basic graph, B to C."""
assert basic_wgraph.shortest_floyd_warshall("B", "C") == ["B", "A", "C"]
def test_floyd_warshall_shortest_path_d_to_c_basic(basic_wgraph):
"""Test the floyd Warshall shortest path for basic graph, D to C."""
assert basic_wgraph.shortest_floyd_warshall("D", "C") == ["D", "B", "A", "C"]
def test_floyd_warshall_shortest_path_c_to_a_basic(basic_wgraph):
"""Test the floyd Warshall shortest path for basic graph, C to A."""
assert basic_wgraph.shortest_floyd_warshall("C", "A") == ["C", "D", "B", "A"]
def test_floyd_warshall_shortest_path_c_to_b_basic(basic_wgraph):
"""Test the floyd Warshall shortest path for basic graph, C to B."""
assert basic_wgraph.shortest_floyd_warshall("C", "B") == ["C", "D", "B"]
# all_negative_wgraph
# Until we refactor our WGraph class to be ble to take in negative numbers, we're leaving negatives out.
# def test_floyd_warshall_shortest_path_a_to_g_all_negative(all_negative_wgraph):
# """Test the floyd Warshall shortest path for all negative graph, A to G."""
# assert all_negative_wgraph.shortest_floyd_warshall("A", "G") == ["A", "C", "G"]
# def test_floyd_warshall_shortest_path_b_to_d_all_negative(all_negative_wgraph):
# """Test the floyd Warshall shortest path for all negative graph, B to D."""
# assert all_negative_wgraph.shortest_floyd_warshall("B", "D") == ["B", "C", "D"]
# def test_floyd_warshall_shortest_path_a_to_f_all_negative(all_negative_wgraph):
# """Test the floyd Warshall shortest path for all negative graph, A to F."""
# assert all_negative_wgraph.shortest_floyd_warshall("A", "F") == ["A", "C", "F"]
# def test_floyd_warshall_shortest_path_d_to_c_all_negative(all_negative_wgraph):
# """Test the floyd Warshall shortest path for all negative graph, d to c."""
# assert all_negative_wgraph.shortest_floyd_warshall("D", "C") == ["D", "C", "C"]
# def test_floyd_warshall_shortest_path_d_to_g_all_negative(all_negative_wgraph):
# """Test the floyd Warshall shortest path for all negative graph, d to g."""
# assert all_negative_wgraph.shortest_floyd_warshall("D", "G") == ["D", "C", "G"]
# def test_floyd_warshall_shortest_path_d_to_f_all_negative(all_negative_wgraph):
# """Test the floyd Warshall shortest path for all negative graph, d to f."""
# assert all_negative_wgraph.shortest_floyd_warshall("D", "F") == ["D", "C", "F"]
# def test_floyd_warshall_shortest_path_e_to_b_all_negative(all_negative_wgraph):
# """Test the floyd Warshall shortest path for all negative graph, e to b."""
# assert all_negative_wgraph.shortest_floyd_warshall("E", "B") == ["E", "C", "B"]
# def test_floyd_warshall_shortest_path_e_to_d_all_negative(all_negative_wgraph):
# """Test the floyd Warshall shortest path for all negative graph, e to d."""
# assert all_negative_wgraph.shortest_floyd_warshall("E", "D") == ["E", "C", "D"]
|
|
import collections
import functools
import json
import mock
import six
from fabric import api as fab
import fabricio
from fabricio import docker, utils
from fabricio.docker import stack as stack_module
from tests import SucceededResult, args_parser, FabricioTestCase
def as_ordereddict(result):
return collections.OrderedDict(sorted(result.items()))
class StackTestCase(FabricioTestCase):
maxDiff = None
def setUp(self):
stack_module.open = mock.MagicMock()
self.cd = mock.patch.object(fab, 'cd')
self.cd.start()
def tearDown(self):
stack_module.open = open
self.cd.stop()
@mock.patch.object(fabricio, 'log')
@mock.patch.object(stack_module, 'dict', new=collections.OrderedDict)
@mock.patch.object(stack_module, 'set', new=utils.OrderedSet)
@mock.patch.object(json, 'loads', new=functools.partial(json.loads, object_hook=as_ordereddict))
@mock.patch.object(fab, 'put')
def test_update(self, put, *args):
cases = dict(
worker=dict(
init_kwargs=dict(name='stack'),
update_kwargs={},
side_effect=[
SucceededResult(' Is Manager: false'), # manager status
],
expected_command_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
],
expected_result=None,
all_hosts=['host1', 'host2'],
),
no_changes=dict(
init_kwargs=dict(name='stack'),
update_kwargs={},
side_effect=[
SucceededResult(' Is Manager: true'), # manager status
SucceededResult(json.dumps([{'Config': {
'Labels': {
'fabricio.configuration': 'Y29tcG9zZS55bWw=',
'fabricio.digests': 'e30=',
},
}}])), # image info
SucceededResult(), # remove config file
],
expected_command_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-current-stack:stack']},
{'args': ['rm', '-f', 'docker-compose.yml']},
],
expected_result=False,
expected_config_filename='docker-compose.yml',
),
forced=dict(
init_kwargs=dict(name='stack'),
update_kwargs=dict(force=True),
side_effect=[
SucceededResult(' Is Manager: true'), # manager status
SucceededResult(), # stack deploy
docker.ImageNotFoundError(), # backup image info
fabricio.Error(), # update sentinel images
SucceededResult(), # stack images
SucceededResult(), # build new sentinel image
SucceededResult(), # remove config file
],
expected_command_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{
'args': ['docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack'],
},
{
'args': ['docker', 'inspect', '--type', 'image', 'fabricio-backup-stack:stack'],
},
{
'args': ['docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack'],
},
{
'args': ['docker', 'stack', 'services', '--format', '{{.Name}} {{.Image}}', 'stack'],
},
{
'args': ['echo', 'FROM scratch\nLABEL fabricio.configuration=Y29tcG9zZS55bWw= fabricio.digests=e30=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-'],
},
{'args': ['rm', '-f', 'docker-compose.yml']},
],
expected_result=True,
expected_config_filename='docker-compose.yml',
),
created=dict(
init_kwargs=dict(name='stack'),
update_kwargs=dict(),
side_effect=[
SucceededResult(' Is Manager: true'), # manager status
docker.ImageNotFoundError(), # current image info
SucceededResult(), # stack deploy
docker.ImageNotFoundError(), # backup image info
SucceededResult(), # update sentinel images
SucceededResult(), # stack images
SucceededResult(), # build new sentinel image
SucceededResult(), # remove config file
],
expected_command_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-current-stack:stack']},
{
'args': ['docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-backup-stack:stack']},
{
'args': ['docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack'],
},
{
'args': ['docker', 'stack', 'services', '--format', '{{.Name}} {{.Image}}', 'stack'],
},
{
'args': ['echo', 'FROM scratch\nLABEL fabricio.configuration=Y29tcG9zZS55bWw= fabricio.digests=e30=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-'],
},
{'args': ['rm', '-f', 'docker-compose.yml']},
],
expected_result=True,
expected_config_filename='docker-compose.yml',
),
created_skip_sentinels_errors=dict(
init_kwargs=dict(name='stack'),
update_kwargs=dict(),
side_effect=[
SucceededResult(' Is Manager: true'), # manager status
docker.ImageNotFoundError(), # image info
SucceededResult(), # stack deploy
docker.ImageNotFoundError(), # image info
fabricio.Error(), # update sentinel images
fabricio.Error(), # stack images
fabricio.Error(), # build new sentinel image
SucceededResult(), # remove config file
],
expected_command_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-current-stack:stack']},
{
'args': ['docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-backup-stack:stack']},
{
'args': ['docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack'],
},
{
'args': ['docker', 'stack', 'services', '--format', '{{.Name}} {{.Image}}', 'stack'],
},
{
'args': ['echo', 'FROM scratch\nLABEL fabricio.configuration=Y29tcG9zZS55bWw=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-'],
},
{'args': ['rm', '-f', 'docker-compose.yml']},
],
expected_result=True,
expected_config_filename='docker-compose.yml',
),
created_with_custom_compose=dict(
init_kwargs=dict(name='stack', options=dict(config='compose.yml')),
update_kwargs=dict(),
side_effect=[
SucceededResult(' Is Manager: true'), # manager status
docker.ImageNotFoundError(), # image info
SucceededResult(), # stack deploy
docker.ImageNotFoundError(), # image info
SucceededResult(), # update sentinel images
SucceededResult(), # stack images
SucceededResult(), # build new sentinel image
SucceededResult(), # remove config file
],
expected_command_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-current-stack:stack']},
{
'args': ['docker', 'stack', 'deploy', '--compose-file=compose.yml', 'stack'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-backup-stack:stack']},
{
'args': ['docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack'],
},
{
'args': ['docker', 'stack', 'services', '--format', '{{.Name}} {{.Image}}', 'stack'],
},
{
'args': ['echo', 'FROM scratch\nLABEL fabricio.configuration=Y29tcG9zZS55bWw= fabricio.digests=e30=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-'],
},
{'args': ['rm', '-f', 'compose.yml']},
],
expected_result=True,
expected_config_filename='compose.yml',
),
created_with_custom_compose2=dict(
init_kwargs=dict(name='stack', options={'compose-file': 'compose.yml'}),
update_kwargs=dict(),
side_effect=[
SucceededResult(' Is Manager: true'), # manager status
docker.ImageNotFoundError(), # image info
SucceededResult(), # stack deploy
docker.ImageNotFoundError(), # image info
SucceededResult(), # update sentinel images
SucceededResult(), # stack images
SucceededResult(), # build new sentinel image
SucceededResult(), # remove config file
],
expected_command_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-current-stack:stack']},
{
'args': ['docker', 'stack', 'deploy', '--compose-file=compose.yml', 'stack'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-backup-stack:stack']},
{
'args': ['docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack'],
},
{
'args': ['docker', 'stack', 'services', '--format', '{{.Name}} {{.Image}}', 'stack'],
},
{
'args': ['echo', 'FROM scratch\nLABEL fabricio.configuration=Y29tcG9zZS55bWw= fabricio.digests=e30=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-'],
},
{'args': ['rm', '-f', 'compose.yml']},
],
expected_result=True,
expected_config_filename='compose.yml',
),
created_with_custom_compose3=dict(
init_kwargs=dict(name='stack', options={'compose-file': '/custom/compose.yml'}),
update_kwargs=dict(),
side_effect=[
SucceededResult(' Is Manager: true'), # manager status
docker.ImageNotFoundError(), # image info
SucceededResult(), # stack deploy
docker.ImageNotFoundError(), # image info
SucceededResult(), # update sentinel images
SucceededResult(), # stack images
SucceededResult(), # build new sentinel image
SucceededResult(), # remove config file
],
expected_command_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-current-stack:stack']},
{
'args': ['docker', 'stack', 'deploy', '--compose-file=compose.yml', 'stack'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-backup-stack:stack']},
{
'args': ['docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack'],
},
{
'args': ['docker', 'stack', 'services', '--format', '{{.Name}} {{.Image}}', 'stack'],
},
{
'args': ['echo', 'FROM scratch\nLABEL fabricio.configuration=Y29tcG9zZS55bWw= fabricio.digests=e30=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-'],
},
{'args': ['rm', '-f', 'compose.yml']},
],
expected_result=True,
expected_config_filename='/custom/compose.yml',
),
created_with_custom_image=dict(
init_kwargs=dict(name='stack', image='image:tag'),
update_kwargs=dict(),
side_effect=[
SucceededResult(' Is Manager: true'), # manager status
docker.ImageNotFoundError(), # image info
SucceededResult(), # stack deploy
docker.ImageNotFoundError(), # image info
SucceededResult(), # update sentinel images
SucceededResult(), # stack images
SucceededResult(), # build new sentinel image
SucceededResult(), # remove config file
],
expected_command_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-current-stack:stack']},
{
'args': ['docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-backup-stack:stack']},
{
'args': ['docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack'],
},
{
'args': ['docker', 'stack', 'services', '--format', '{{.Name}} {{.Image}}', 'stack'],
},
{
'args': ['echo', 'FROM image:tag\nLABEL fabricio.configuration=Y29tcG9zZS55bWw= fabricio.digests=e30=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-'],
},
{'args': ['rm', '-f', 'docker-compose.yml']},
],
expected_result=True,
expected_config_filename='docker-compose.yml',
),
created_with_custom_image_update_params=dict(
init_kwargs=dict(name='stack', image='image:tag'),
update_kwargs=dict(tag='new-tag', registry='registry', account='account'),
side_effect=[
SucceededResult(' Is Manager: true'), # manager status
docker.ImageNotFoundError(), # image info
SucceededResult(), # stack deploy
docker.ImageNotFoundError(), # image info
SucceededResult(), # update sentinel images
SucceededResult(), # stack images
SucceededResult(), # build new sentinel image
SucceededResult(), # remove config file
],
expected_command_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-current-stack:stack']},
{
'args': ['docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-backup-stack:stack']},
{
'args': ['docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack'],
},
{
'args': ['docker', 'stack', 'services', '--format', '{{.Name}} {{.Image}}', 'stack'],
},
{
'args': ['echo', 'FROM registry/account/image:new-tag\nLABEL fabricio.configuration=Y29tcG9zZS55bWw= fabricio.digests=e30=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-'],
},
{'args': ['rm', '-f', 'docker-compose.yml']},
],
expected_result=True,
expected_config_filename='docker-compose.yml',
),
created_from_empty_image_with_custom_image_update_params=dict(
init_kwargs=dict(name='stack'),
update_kwargs=dict(tag='registry/account/image:tag'),
side_effect=[
SucceededResult(' Is Manager: true'), # manager status
docker.ImageNotFoundError(), # image info
SucceededResult(), # stack deploy
docker.ImageNotFoundError(), # image info
SucceededResult(), # update sentinel images
SucceededResult(), # stack images
SucceededResult(), # build new sentinel image
SucceededResult(), # remove config file
],
expected_command_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-current-stack:stack']},
{
'args': ['docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-backup-stack:stack']},
{
'args': ['docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack'],
},
{
'args': ['docker', 'stack', 'services', '--format', '{{.Name}} {{.Image}}', 'stack'],
},
{
'args': ['echo', 'FROM registry/account/image:tag\nLABEL fabricio.configuration=Y29tcG9zZS55bWw= fabricio.digests=e30=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-'],
},
{'args': ['rm', '-f', 'docker-compose.yml']},
],
expected_result=True,
expected_config_filename='docker-compose.yml',
),
updated_compose_changed=dict(
init_kwargs=dict(name='stack'),
update_kwargs=dict(),
side_effect=[
SucceededResult(' Is Manager: true'), # manager status
SucceededResult(json.dumps([{'Config': {
'Labels': {
'fabricio.configuration': 'b2xkLWNvbXBvc2UueW1s',
'fabricio.digests': 'eyJpbWFnZTp0YWciOiAiZGlnZXN0In0=',
},
}}])), # current image info
SucceededResult(), # stack deploy
SucceededResult('[{"Parent": "backup_parent_id"}]'), # backup image info
SucceededResult(), # update sentinel images
SucceededResult('service image:tag'), # stack images
SucceededResult(), SucceededResult(), SucceededResult(), # image pull
SucceededResult('digest'), # images digests
SucceededResult(), # build new sentinel image
SucceededResult(), # remove config file
],
expected_command_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-current-stack:stack']},
{
'args': ['docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-backup-stack:stack']},
{
'args': ['docker', 'rmi', 'fabricio-backup-stack:stack' , 'backup_parent_id;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack'],
},
{
'args': ['docker', 'stack', 'services', '--format', '{{.Name}} {{.Image}}', 'stack'],
},
{'args': ['docker', 'tag', 'image:tag', 'fabricio-temp-image:image', '&&', 'docker', 'rmi', 'image:tag']}, {'args': ['docker', 'pull', 'image:tag']}, {'args': ['docker', 'rmi', 'fabricio-temp-image:image']},
{
'args': ['docker', 'inspect', '--type', 'image', '--format', '{{index .RepoDigests 0}}', 'image:tag'],
},
{
'args': ['echo', 'FROM scratch\nLABEL fabricio.configuration=Y29tcG9zZS55bWw= fabricio.digests=eyJpbWFnZTp0YWciOiAiZGlnZXN0In0=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-'],
},
{'args': ['rm', '-f', 'docker-compose.yml']},
],
expected_result=True,
expected_config_filename='docker-compose.yml',
),
updated_image_changed=dict(
init_kwargs=dict(name='stack'),
update_kwargs=dict(),
side_effect=[
SucceededResult(' Is Manager: true'), # manager status
SucceededResult(json.dumps([{'Config': {
'Labels': {
'fabricio.configuration': 'Y29tcG9zZS55bWw=',
'fabricio.digests': 'eyJpbWFnZTp0YWciOiAiZGlnZXN0In0=',
},
}}])), # image info
SucceededResult(), SucceededResult(), SucceededResult(), # image pull
SucceededResult('new-digest'), # images digests
SucceededResult(), # stack deploy
docker.ImageNotFoundError(), # backup image info
SucceededResult(), # update sentinel images
SucceededResult('service image:tag'), # stack images
SucceededResult(), SucceededResult(), SucceededResult(), # image pull
SucceededResult('new-digest'), # images digests
SucceededResult(), # build new sentinel image
SucceededResult(), # remove config file
],
expected_command_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-current-stack:stack']},
{'args': ['docker', 'tag', 'image:tag', 'fabricio-temp-image:image', '&&', 'docker', 'rmi', 'image:tag']}, {'args': ['docker', 'pull', 'image:tag']}, {'args': ['docker', 'rmi', 'fabricio-temp-image:image']},
{
'args': ['docker', 'inspect', '--type', 'image', '--format', '{{index .RepoDigests 0}}', 'image:tag'],
},
{
'args': ['docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-backup-stack:stack']},
{
'args': ['docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack'],
},
{
'args': ['docker', 'stack', 'services', '--format', '{{.Name}} {{.Image}}', 'stack'],
},
{'args': ['docker', 'tag', 'image:tag', 'fabricio-temp-image:image', '&&', 'docker', 'rmi', 'image:tag']}, {'args': ['docker', 'pull', 'image:tag']}, {'args': ['docker', 'rmi', 'fabricio-temp-image:image']},
{
'args': ['docker', 'inspect', '--type', 'image', '--format', '{{index .RepoDigests 0}}', 'image:tag'],
},
{
'args': ['echo', 'FROM scratch\nLABEL fabricio.configuration=Y29tcG9zZS55bWw= fabricio.digests=eyJpbWFnZTp0YWciOiAibmV3LWRpZ2VzdCJ9\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-'],
},
{'args': ['rm', '-f', 'docker-compose.yml']},
],
expected_result=True,
expected_config_filename='docker-compose.yml',
),
updated_images_changed=dict(
init_kwargs=dict(name='stack'),
update_kwargs=dict(),
side_effect=[
SucceededResult(' Is Manager: true'), # manager status
SucceededResult(json.dumps([{'Config': {
'Labels': {
'fabricio.configuration': 'Y29tcG9zZS55bWw=',
'fabricio.digests': 'eyJpbWFnZTE6dGFnIjogImRpZ2VzdDEiLCAiaW1hZ2UyOnRhZyI6ICJkaWdlc3QyIn0=',
},
}}])), # image info
SucceededResult(), SucceededResult(), SucceededResult(), # image1 pull
SucceededResult(), SucceededResult(), SucceededResult(), # image2 pull
SucceededResult('new-digest1\nnew-digest2\n'), # images digests
SucceededResult(), # stack deploy
docker.ImageNotFoundError(), # backup image info
SucceededResult(), # update sentinel images
SucceededResult('service1 image1:tag\nservice2 image2:tag\n'), # stack images
SucceededResult(), SucceededResult(), SucceededResult(), # image1 pull
SucceededResult(), SucceededResult(), SucceededResult(), # image2 pull
SucceededResult('new-digest1\nnew-digest2\n'), # images digests
SucceededResult(), # build new sentinel image
SucceededResult(), # remove config file
],
expected_command_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-current-stack:stack']},
{'args': ['docker', 'tag', 'image1:tag', 'fabricio-temp-image:image1', '&&', 'docker', 'rmi', 'image1:tag']}, {'args': ['docker', 'pull', 'image1:tag']}, {'args': ['docker', 'rmi', 'fabricio-temp-image:image1']},
{'args': ['docker', 'tag', 'image2:tag', 'fabricio-temp-image:image2', '&&', 'docker', 'rmi', 'image2:tag']}, {'args': ['docker', 'pull', 'image2:tag']}, {'args': ['docker', 'rmi', 'fabricio-temp-image:image2']},
{
'args': ['docker', 'inspect', '--type', 'image', '--format', '{{index .RepoDigests 0}}', 'image1:tag', 'image2:tag'],
},
{
'args': ['docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-backup-stack:stack']},
{
'args': ['docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack'],
},
{
'args': ['docker', 'stack', 'services', '--format', '{{.Name}} {{.Image}}', 'stack'],
},
{'args': ['docker', 'tag', 'image1:tag', 'fabricio-temp-image:image1', '&&', 'docker', 'rmi', 'image1:tag']}, {'args': ['docker', 'pull', 'image1:tag']}, {'args': ['docker', 'rmi', 'fabricio-temp-image:image1']},
{'args': ['docker', 'tag', 'image2:tag', 'fabricio-temp-image:image2', '&&', 'docker', 'rmi', 'image2:tag']}, {'args': ['docker', 'pull', 'image2:tag']}, {'args': ['docker', 'rmi', 'fabricio-temp-image:image2']},
{
'args': ['docker', 'inspect', '--type', 'image', '--format', '{{index .RepoDigests 0}}', 'image1:tag', 'image2:tag'],
},
{
'args': ['echo', 'FROM scratch\nLABEL fabricio.configuration=Y29tcG9zZS55bWw= fabricio.digests=eyJpbWFnZTE6dGFnIjogIm5ldy1kaWdlc3QxIiwgImltYWdlMjp0YWciOiAibmV3LWRpZ2VzdDIifQ==\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-'],
},
{'args': ['rm', '-f', 'docker-compose.yml']},
],
expected_result=True,
expected_config_filename='docker-compose.yml',
),
)
for case, data in cases.items():
with self.subTest(case):
fab.env.command = '{0}__{1}'.format(self, case)
with mock.patch.dict(fab.env, dict(all_hosts=data.get('all_hosts', ['host']))):
stack_module.open.return_value = six.BytesIO(b'compose.yml')
stack_module.open.reset_mock()
put.reset_mock()
stack = docker.Stack(**data.get('init_kwargs', {}))
side_effect = self.command_checker(
args_parsers=args_parser,
expected_args_set=data.get('expected_command_args', []),
side_effects=data.get('side_effect', []),
)
with mock.patch.object(fabricio, 'run', side_effect=side_effect) as run:
with mock.patch('fabricio.operations.run', run):
with mock.patch('six.BytesIO') as compose_file:
result = stack.update(**data.get('update_kwargs', {}))
self.assertEqual(data['expected_result'], result)
expected_compose_file_name = data.get('expected_config_filename')
if expected_compose_file_name:
stack_module.open.assert_called_once_with(expected_compose_file_name, 'rb')
put.assert_called_once()
compose_file.assert_called_once_with(b'compose.yml')
@mock.patch.object(stack_module, 'dict', new=collections.OrderedDict)
@mock.patch.object(fab, 'put')
def test_revert(self, put, *args):
cases = dict(
worker=dict(
init_kwargs=dict(name='stack'),
side_effect=[
SucceededResult(' Is Manager: false'), # manager status
],
expected_command_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
],
all_hosts=['host1', 'host2'],
),
reverted=dict(
init_kwargs=dict(name='stack'),
side_effect=[
SucceededResult(' Is Manager: true'), # manager status
SucceededResult(json.dumps([{'Config': {
'Labels': {
'fabricio.configuration': 'b2xkLWNvbXBvc2UueW1s',
},
}}])), # backup image info
SucceededResult(), # stack deploy
SucceededResult(), # remove config file
SucceededResult('[{"Parent": "current_parent_id"}]'), # current image info
SucceededResult(), # update sentinel images
],
expected_command_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-backup-stack:stack']},
{
'args': ['docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack'],
},
{'args': ['rm', '-f', 'docker-compose.yml']},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-current-stack:stack']},
{
'args': ['docker', 'rmi', 'fabricio-current-stack:stack', 'current_parent_id;', 'docker', 'tag', 'fabricio-backup-stack:stack', 'fabricio-current-stack:stack;', 'docker', 'rmi', 'fabricio-backup-stack:stack'],
},
],
expected_compose_file=b'old-compose.yml',
),
reverted_with_service_update=dict(
init_kwargs=dict(name='stack'),
side_effect=[
SucceededResult(' Is Manager: true'), # manager status
SucceededResult(json.dumps([{'Config': {
'Labels': {
'fabricio.configuration': 'Y29tcG9zZS55bWw=', # compose.yml
'fabricio.digests': 'eyJpbWFnZTp0YWciOiAiZGlnZXN0In0=', # {"image:tag": "digest"}
},
}}])), # image info
SucceededResult(), # stack deploy
SucceededResult('service image:tag\n'), # stack services
SucceededResult(), # service update
SucceededResult(), # remove config file
SucceededResult('[{"Parent": "current_parent_id"}]'), # current image info
SucceededResult(), # update sentinel images
],
expected_command_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{
'args': ['docker', 'inspect', '--type', 'image', 'fabricio-backup-stack:stack'],
},
{
'args': ['docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack'],
},
{
'args': ['docker', 'stack', 'services', '--format', '{{.Name}} {{.Image}}', 'stack'],
},
{
'args': ['docker', 'service', 'update', '--image', 'digest', 'service'],
},
{'args': ['rm', '-f', 'docker-compose.yml']},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-current-stack:stack']},
{
'args': ['docker', 'rmi', 'fabricio-current-stack:stack', 'current_parent_id;', 'docker', 'tag', 'fabricio-backup-stack:stack', 'fabricio-current-stack:stack;', 'docker', 'rmi', 'fabricio-backup-stack:stack'],
},
],
expected_compose_file=b'compose.yml',
),
reverted_with_services_updates=dict(
init_kwargs=dict(name='stack'),
side_effect=[
SucceededResult(' Is Manager: true'), # manager status
SucceededResult(json.dumps([{'Config': {
'Labels': {
'fabricio.configuration': 'Y29tcG9zZS55bWw=', # compose.yml
'fabricio.digests': 'eyJpbWFnZTE6dGFnIjogImRpZ2VzdDEiLCAiaW1hZ2UyOnRhZyI6ICJkaWdlc3QyIn0=', # {"image1:tag": "digest1", "image2:tag": "digest2"}
},
}}])), # image info
SucceededResult(), # stack deploy
SucceededResult('service1 image1:tag\nservice2 image2:tag'), # stack services
SucceededResult(), # service update
SucceededResult(), # service update
SucceededResult(), # remove config file
SucceededResult('[{"Parent": "current_parent_id"}]'), # current image info
SucceededResult(), # update sentinel images
],
expected_command_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{
'args': ['docker', 'inspect', '--type', 'image', 'fabricio-backup-stack:stack'],
},
{
'args': ['docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack'],
},
{
'args': ['docker', 'stack', 'services', '--format', '{{.Name}} {{.Image}}', 'stack'],
},
{
'args': ['docker', 'service', 'update', '--image', 'digest1', 'service1'],
},
{
'args': ['docker', 'service', 'update', '--image', 'digest2', 'service2'],
},
{'args': ['rm', '-f', 'docker-compose.yml']},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-current-stack:stack']},
{
'args': ['docker', 'rmi', 'fabricio-current-stack:stack', 'current_parent_id;', 'docker', 'tag', 'fabricio-backup-stack:stack', 'fabricio-current-stack:stack;', 'docker', 'rmi', 'fabricio-backup-stack:stack'],
},
],
expected_compose_file=b'compose.yml',
),
)
for case, data in cases.items():
with self.subTest(case=case):
fab.env.command = '{0}__{1}'.format(self, case)
put.reset_mock()
with mock.patch.dict(fab.env, dict(all_hosts=data.get('all_hosts', ['host']))):
stack = docker.Stack(**data.get('init_kwargs', {}))
side_effects = data.get('side_effect', [])
side_effect = self.command_checker(
args_parsers=args_parser,
expected_args_set=data.get('expected_command_args', []),
side_effects=side_effects,
)
with mock.patch.object(fabricio, 'run', side_effect=side_effect) as run:
with mock.patch('fabricio.operations.run', run):
with mock.patch('six.BytesIO') as compose_file:
stack.revert()
self.assertEqual(run.call_count, len(side_effects))
expected_compose_file = data.get('expected_compose_file')
if expected_compose_file:
put.assert_called_once()
compose_file.assert_called_once_with(expected_compose_file)
else:
put.assert_not_called()
def test_revert_raises_error_when_backup_not_found(self):
side_effect = self.command_checker(
args_parsers=args_parser,
expected_args_set=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{'args': ['docker', 'inspect', '--type', 'image', 'fabricio-backup-stack:stack']},
],
side_effects=[
SucceededResult(' Is Manager: true'), # manager status
docker.ImageNotFoundError(), # image info
],
)
with mock.patch.object(fabricio, 'run', side_effect=side_effect):
fab.env.command = 'test_stack_revert_raises_error_when_backup_not_found'
stack = docker.Stack(name='stack')
with self.assertRaises(docker.ServiceError):
stack.revert()
@mock.patch.object(docker.Stack, 'is_manager', return_value=True)
@mock.patch.object(fabricio, 'run', side_effect=Exception())
def test_revert_does_not_rollback_sentinels_on_error(self, *args):
with mock.patch.object(docker.Stack, 'rotate_sentinel_images') as rotate_sentinel_images:
fab.env.command = 'test_stack_revert_does_not_rollback_sentinels_on_error'
stack = docker.Stack(name='stack')
with self.assertRaises(Exception):
stack.revert()
stack.revert()
rotate_sentinel_images.assert_not_called()
@mock.patch.object(docker.ManagedService, 'is_manager', return_value=True)
@mock.patch.object(fabricio, 'run')
def test_destroy(self, run, *_):
run.side_effect = [SucceededResult('service image')] + [SucceededResult('[{"Parent": "parent_id"}]')] * 4
stack = docker.Stack(name='name')
stack.destroy()
self.assertListEqual(
[
mock.call('docker stack services --format "{{.Name}} {{.Image}}" name'),
mock.call('docker stack rm name'),
mock.call('docker inspect --type image fabricio-current-stack:name', abort_exception=docker.ImageNotFoundError),
mock.call('docker inspect --type image fabricio-backup-stack:name', abort_exception=docker.ImageNotFoundError),
mock.call('docker rmi fabricio-current-stack:name fabricio-backup-stack:name parent_id parent_id image', ignore_errors=True),
],
run.mock_calls,
)
|
|
import urllib2
import urllib
import re
import sys
import os
import time
from threading import Thread, Lock, Event
class Task(Thread):
_obj = None
def __init__(self, obj, args = None):
self._obj = obj
self._args = args
self._flag_stop = Event()
self._flag_pause = Event()
self._flag_resume = Event()
super(Task, self).__init__()
def run(self):
print('starting task...')
if self._obj:
self._obj.run()
class PyDownloader():
CHUNK_SIZE = 1024 * 1024
DOWNLOAD_DIR = 'download'
def __init__(self):
self.task = None
self.linkList = []
self.link = None
self.urlName = ''
self.chunkSize = self.CHUNK_SIZE
self.downloadPath = self.DOWNLOAD_DIR
self.fileType = 'mp4'
self.keyword = 'test'
self.webData = ''
self.totalFileCount = 0
self.downloadedFileCount = 0
self.currentDownloadFile = 'None'
self.currentDownloadProgress = 0
self.currentDownloadSize = 0
self.currentDownloadSpeed = 0
def resetStats(self):
self.webData = ''
self.totalFileCount = 0
self.downloadedFileCount = 0
self.currentDownloadFile = 'None'
self.currentDownloadProgress = 0
self.currentDownloadSize = 0
self.linkList = []
def getDownloadLinks(self):
return self.linkList
def getTaskStatus(self):
if self.task and self.task.isAlive():
return "active"
else:
return "stopped"
def setChunkSize(self, chunkSize):
if chunkSize != None and chunkSize != '':
self.chunkSize = chunkSize
else:
self.chunkSize = self.CHUNK_SIZE
def setFileType(self, fileType):
if fileType != None:
self.fileType = fileType
def setKeyword(self, keyword):
if keyword != None:
self.keyword = keyword
def setDownloadPath(self, path):
if path != None and path != '':
self.downloadPath = path
else:
self.downloadPath = self.DOWNLOAD_DIR
if not os.path.isdir(self.downloadPath):
os.mkdir(self.downloadPath)
def stop(self):
if self.task:
self.task._flag_stop.set()
return ({'status':'ok', 'response':{'error':'task stop request received'}})
else:
return ({'status':'ok', 'response':{'error':'task not started'}})
def start(self):
if self.getTaskStatus() != 'active':
self.resetStats()
self.task = Task(self)
self.task.start()
return ({'status':'ok', 'response':{'error':'task started'}})
else:
return ({'status':'ok', 'response':{'error':'task is already active'}})
def run(self):
"""Download task """
try:
if self.keyword != '' or self.fileType != '':
print('parsing web link...')
self.webData = self.link.read()
self.getLinks()
else:
print('downloading web link...')
self.linkList.append(self.urlName)
self.totalFileCount = 1
if len(self.linkList):
print('\n'.join([urllib.unquote(i) for i in self.linkList]))
print('\n---------------------------------------------------------')
print('total links = %d' % self.totalFileCount)
print('\n---------------------------------------------------------')
ret = self.downloadLinks()
print(ret)
#self.linkList = [urllib.unquote(i) for i in self.linkList]
except Exception as e:
raise
def getLinks(self):
self.linkList = []
if self.keyword != '' and self.fileType != '':
self.linkList = re.findall(r'<a\s+href\s*=\s*\"(.*%s.*\.%s)\s*\"' % \
(self.keyword, self.fileType), self.webData, re.I)
elif self.keyword == '' and self.fileType != '':
self.linkList = re.findall(r'<a\s+href\s*=\s*\"(.+?\.%s)\s*\"' % \
(self.fileType), self.webData, re.I)
elif self.keyword != '' and self.fileType == '':
self.linkList = re.findall(r'<a\s+href\s*=\s*\"(.*%s.*\..+)\s*\"' % \
(self.keyword), self.webData, re.I)
self.totalFileCount = len(self.linkList)
"""
def getLinks(self):
self.linkList = []
if self.keyword != '' and self.fileType != '':
self.linkList = re.findall(r'\".*(http.*?%s.*\.%s)\s*\"' % \
(self.keyword, self.fileType), self.webData, re.I)
elif self.keyword == '' and self.fileType != '':
self.linkList = re.findall(r'\".*(http.+?\.%s)\s*\"' % \
(self.fileType), self.webData, re.I)
elif self.keyword != '' and self.fileType == '':
self.linkList = re.findall(r'\".*(http.*?%s.*\..+)\s*\"' % \
(self.keyword), self.webData, re.I)
self.totalFileCount = len(self.linkList)
"""
def getURLInfo(self, url):
info = {}
request = urllib2.Request(url)
link = urllib2.urlopen(request)
try:
if link.info()['accept-ranges'] == 'bytes':
info['resumeSupport'] = True
else:
info['resumeSupport'] = False
except:
info['resumeSupport'] = False
info['fileSize'] = link.info()['content-length']
info['date'] = link.info()['date']
return info
def downloadLinks(self):
"""Download all the links sequentially"""
for item in self.linkList:
self.currentDownloadProgress = 0
sizeCompleted = 0
if 'http' not in item:
self.currentDownloadFile = self.urlName + item
else:
self.currentDownloadFile = item
try:
localFileName = self.downloadPath + '/' + urllib.unquote(item).split('/')[-1]
urlInfo = self.getURLInfo(self.currentDownloadFile)
if urlInfo['resumeSupport']:
print("server file resume supported")
else:
print("server file resume NOT supported")
if os.path.isfile(localFileName) and urlInfo['resumeSupport']:
sizeCompleted = os.path.getsize(localFileName)
if sizeCompleted >= int(urlInfo['fileSize']):
self.downloadedFileCount += 1
continue
self.fd = open(localFileName, 'ab+')
self.fd.seek(sizeCompleted)
else:
self.fd = open(localFileName, 'wb')
request = urllib2.Request(self.currentDownloadFile)
if urlInfo['resumeSupport']:
request.headers['range'] = 'bytes=%s-' % (sizeCompleted)
self.link = urllib2.urlopen(request)
self.fileSize = int(urlInfo['fileSize'])
self.currentDownloadProgress = int((sizeCompleted / float(self.fileSize)) * 100)
self.currentDownloadSize = self.fileSize
print('downloading %s [%d bytes]...' % (urllib.unquote(item), self.fileSize))
while True:
if self.task._flag_stop.is_set():
self.fd.close()
return ({'status':'success', 'response':{'error':'user stopped service'}})
timeEnter = time.time()
chunk = self.link.read(self.chunkSize)
timeExit = time.time()
self.currentDownloadSpeed = int((self.chunkSize) / ((timeExit - timeEnter) * 1024.0))
if not chunk:
break
else:
self.fd.write(chunk)
sizeCompleted += self.chunkSize
self.currentDownloadProgress = int((sizeCompleted / float(self.fileSize)) * 100)
if self.currentDownloadProgress > 100: self.currentDownloadProgress = 100
sys.stdout.write('\r%3d%%' % (self.currentDownloadProgress))
sys.stdout.flush()
self.fd.close()
self.downloadedFileCount += 1
print(' (%d/%d) downloaded\n' % (self.downloadedFileCount, self.totalFileCount))
except Exception as e:
continue
#return ({'status':'error', 'response':{'error':'%s' % str(e)}})
return ({'status':'success', 'response':{'file_count':'%d' % self.downloadedFileCount}})
def setURL(self, url):
self.url = url
try:
self.link = urllib2.urlopen(url)
self.urlName = self.link.geturl()
return ({'status':'ok', 'response':{'url':'%s' % self.urlName}})
except Exception as e:
return ({'status':'error', 'response':{'error':'%s' % str(e)}})
|
|
#
"""leveldb log datastore.
Format is described at:
https://github.com/google/leveldb/blob/master/doc/log_format.md
block := record* trailer?
record :=
checksum: uint32 // crc32c of type and data[] ; little-endian
length: uint16 // little-endian
type: uint8 // One of FULL, FIRST, MIDDLE, LAST
data: uint8[length]
header :=
ident: char[4]
magic: uint16
version: uint8
"""
from __future__ import print_function
import logging
import os
import struct
import zlib
import wandb
logger = logging.getLogger(__name__)
LEVELDBLOG_HEADER_LEN = 7
LEVELDBLOG_BLOCK_LEN = 32768
LEVELDBLOG_DATA_LEN = LEVELDBLOG_BLOCK_LEN - LEVELDBLOG_HEADER_LEN
LEVELDBLOG_FULL = 1
LEVELDBLOG_FIRST = 2
LEVELDBLOG_MIDDLE = 3
LEVELDBLOG_LAST = 4
LEVELDBLOG_HEADER_IDENT = ":W&B"
LEVELDBLOG_HEADER_MAGIC = (
0xBEE1 # zlib.crc32(bytes("Weights & Biases", 'iso8859-1')) & 0xffff
)
LEVELDBLOG_HEADER_VERSION = 0
try:
bytes("", "ascii")
def strtobytes(x):
"""strtobytes."""
return bytes(x, "iso8859-1")
# def bytestostr(x):
# return str(x, 'iso8859-1')
except Exception:
strtobytes = str
# bytestostr = str
class DataStore(object):
def __init__(self):
self._opened_for_scan = False
self._fp = None
self._index = 0
self._size_bytes = 0
self._crc = [0] * (LEVELDBLOG_LAST + 1)
for x in range(1, LEVELDBLOG_LAST + 1):
self._crc[x] = zlib.crc32(strtobytes(chr(x))) & 0xFFFFFFFF
assert (
wandb._assert_is_internal_process
), "DataStore can only be used in the internal process"
def open_for_write(self, fname):
self._fname = fname
logger.info("open: %s", fname)
open_flags = "xb"
self._fp = open(fname, open_flags)
self._write_header()
def open_for_append(self, fname):
# TODO: implement
self._fname = fname
logger.info("open: %s", fname)
self._fp = open(fname, "wb")
# do something with _index
def open_for_scan(self, fname):
self._fname = fname
logger.info("open for scan: %s", fname)
self._fp = open(fname, "rb")
self._index = 0
self._size_bytes = os.stat(fname).st_size
self._opened_for_scan = True
self._read_header()
def in_last_block(self):
"""When reading, we want to know if we're in the last block to
handle in progress writes"""
return self._index > self._size_bytes - LEVELDBLOG_DATA_LEN
def scan_record(self):
assert self._opened_for_scan, "file not open for scanning"
# TODO(jhr): handle some assertions as file corruption issues
# assume we have enough room to read header, checked by caller?
header = self._fp.read(LEVELDBLOG_HEADER_LEN)
if len(header) == 0:
return None
assert (
len(header) == LEVELDBLOG_HEADER_LEN
), "record header is {} bytes instead of the expected {}".format(
len(header), LEVELDBLOG_HEADER_LEN
)
fields = struct.unpack("<IHB", header)
checksum, dlength, dtype = fields
# check len, better fit in the block
self._index += LEVELDBLOG_HEADER_LEN
data = self._fp.read(dlength)
checksum_computed = zlib.crc32(data, self._crc[dtype]) & 0xFFFFFFFF
assert (
checksum == checksum_computed
), "record checksum is invalid, data may be corrupt"
self._index += dlength
return dtype, data
def scan_data(self):
# TODO(jhr): handle some assertions as file corruption issues
# how much left in the block. if less than header len, read as pad,
offset = self._index % LEVELDBLOG_BLOCK_LEN
space_left = LEVELDBLOG_BLOCK_LEN - offset
if space_left < LEVELDBLOG_HEADER_LEN:
pad_check = strtobytes("\x00" * space_left)
pad = self._fp.read(space_left)
# verify they are zero
assert pad == pad_check, "invald padding"
self._index += space_left
record = self.scan_record()
if record is None: # eof
return None
dtype, data = record
if dtype == LEVELDBLOG_FULL:
return data
assert (
dtype == LEVELDBLOG_FIRST
), "expected record to be type {} but found {}".format(LEVELDBLOG_FIRST, dtype)
while True:
offset = self._index % LEVELDBLOG_BLOCK_LEN
record = self.scan_record()
if record is None: # eof
return None
dtype, new_data = record
if dtype == LEVELDBLOG_LAST:
data += new_data
break
assert (
dtype == LEVELDBLOG_MIDDLE
), "expected record to be type {} but found {}".format(
LEVELDBLOG_MIDDLE, dtype
)
data += new_data
return data
def _write_header(self):
data = struct.pack(
"<4sHB",
strtobytes(LEVELDBLOG_HEADER_IDENT),
LEVELDBLOG_HEADER_MAGIC,
LEVELDBLOG_HEADER_VERSION,
)
assert (
len(data) == LEVELDBLOG_HEADER_LEN
), "header size is {} bytes, expected {}".format(
len(data), LEVELDBLOG_HEADER_LEN
)
self._fp.write(data)
self._index += len(data)
def _read_header(self):
header = self._fp.read(LEVELDBLOG_HEADER_LEN)
assert (
len(header) == LEVELDBLOG_HEADER_LEN
), "header is {} bytes instead of the expected {}".format(
len(header), LEVELDBLOG_HEADER_LEN
)
ident, magic, version = struct.unpack("<4sHB", header)
if ident != strtobytes(LEVELDBLOG_HEADER_IDENT):
raise Exception("Invalid header")
if magic != LEVELDBLOG_HEADER_MAGIC:
raise Exception("Invalid header")
if version != LEVELDBLOG_HEADER_VERSION:
raise Exception("Invalid header")
self._index += len(header)
def _write_record(self, s, dtype=None):
"""Write record that must fit into a block."""
# double check that there is enough space
# (this is a precondition to calling this method)
assert len(s) + LEVELDBLOG_HEADER_LEN <= (
LEVELDBLOG_BLOCK_LEN - self._index % LEVELDBLOG_BLOCK_LEN
), "not enough space to write new records"
dlength = len(s)
dtype = dtype or LEVELDBLOG_FULL
# print("record: length={} type={}".format(dlength, dtype))
checksum = zlib.crc32(s, self._crc[dtype]) & 0xFFFFFFFF
# logger.info("write_record: index=%d len=%d dtype=%d",
# self._index, dlength, dtype)
self._fp.write(struct.pack("<IHB", checksum, dlength, dtype))
if dlength:
self._fp.write(s)
self._index += LEVELDBLOG_HEADER_LEN + len(s)
def _write_data(self, s):
file_offset = self._index
flush_index = 0
flush_offset = 0
offset = self._index % LEVELDBLOG_BLOCK_LEN
space_left = LEVELDBLOG_BLOCK_LEN - offset
data_used = 0
data_left = len(s)
# logger.info("write_data: index=%d offset=%d len=%d",
# self._index, offset, data_left)
if space_left < LEVELDBLOG_HEADER_LEN:
pad = "\x00" * space_left
self._fp.write(strtobytes(pad))
self._index += space_left
offset = 0
space_left = LEVELDBLOG_BLOCK_LEN
# does it fit in first (possibly partial) block?
if data_left + LEVELDBLOG_HEADER_LEN <= space_left:
self._write_record(s)
else:
# write first record (we could still be in the middle of a block,
# but this write will end on a block boundary)
data_room = space_left - LEVELDBLOG_HEADER_LEN
self._write_record(s[:data_room], LEVELDBLOG_FIRST)
data_used += data_room
data_left -= data_room
assert data_left, "data_left should be non-zero"
# write middles (if any)
while data_left > LEVELDBLOG_DATA_LEN:
self._write_record(
s[data_used : data_used + LEVELDBLOG_DATA_LEN], # noqa: E203
LEVELDBLOG_MIDDLE,
)
data_used += LEVELDBLOG_DATA_LEN
data_left -= LEVELDBLOG_DATA_LEN
# write last and flush the entire block to disk
self._write_record(s[data_used:], LEVELDBLOG_LAST)
self._fp.flush()
os.fsync(self._fp.fileno())
return file_offset, self._index - file_offset, flush_index, flush_offset
def write(self, obj):
"""Write a protocol buffer.
Arguments:
obj: Protocol buffer to write.
Returns:
(file_offset, length, flush_index, flush_offset) if successful,
None otherwise
"""
raw_size = obj.ByteSize()
s = obj.SerializeToString()
assert len(s) == raw_size, "invalid serialization"
ret = self._write_data(s)
return ret
def close(self):
if self._fp is not None:
logger.info("close: %s", self._fname)
self._fp.close()
|
|
#!/usr/bin/env python
__author__ = 'Brian McKenna <bmckenna@asascience.com>'
from collections import Counter
import gevent
from nose.plugins.attrib import attr
import os
import smtplib
import time
import unittest
from pyon.event.event import EventPublisher
from pyon.public import IonObject, OT, CFG
from pyon.util.int_test import IonIntegrationTestCase
from interface.objects import UserInfo
from interface.objects import DeliveryModeEnum, NotificationFrequencyEnum
from interface.objects import NotificationTypeEnum
from interface.services.dm.iuser_notification_service import UserNotificationServiceClient
outbox=[] # sent emails wind up here
@unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
@attr('LOCOINT')
@attr('INT', group='dm')
class RealTimeNotificationTestCase(IonIntegrationTestCase):
def setUp(self):
self._start_container()
# patch the CFG service.user_notification.max_daily_notifications value so we only test 10
original_CFG_max = CFG.get_safe("service.user_notification.max_daily_notifications", 1000)
CFG['service']['user_notification']['max_daily_notifications'] = 10
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.object_store = self.container.object_store
self.resource_registry = self.container.resource_registry
self.user_notification = UserNotificationServiceClient()
self.event_publisher = EventPublisher()
# create UserInfo object (user)
user = UserInfo()
user.name = 'Iceman'
user.contact.email = 'iceman@example.com'
user_id, _ = self.resource_registry.create(user)
self.user = self.resource_registry.read(user_id)
# create NotificationRequest objects (notifications)
# 4 notifications are created:
# REAL_TIME, EMAIL(user default via UserInfo)
# REAL_TIME, EMAIL(in DeliveryConfiguration)
# DISABLED, EMAIL(in DeliveryConfiguration)
# REAL_TIME, SMS(in DeliveryConfiguration)
# REAL_TIME, EMAIL(user default via UserInfo)
delivery_configuration = IonObject(OT.DeliveryConfiguration,
mode=DeliveryModeEnum.EMAIL,
frequency=NotificationFrequencyEnum.REAL_TIME)
notification_request = IonObject(OT.NotificationRequest,
name='REAL_TIME to default UserInfo email',
type=NotificationTypeEnum.SIMPLE,
origin='Miramar',
event_type=OT.ResourceLifecycleEvent,
delivery_configurations=[delivery_configuration])
# store this notification_id to check disabled_by_system status later
self.notification_id = self.user_notification.create_notification(notification=notification_request, user_id=self.user._id)
# REAL_TIME, EMAIL(in DeliveryConfiguration), 10 notifications/day max
delivery_configuration = IonObject(OT.DeliveryConfiguration,
email='slider@example.com',
mode=DeliveryModeEnum.EMAIL,
frequency=NotificationFrequencyEnum.REAL_TIME)
notification_request = IonObject(OT.NotificationRequest,
name='REAL_TIME to alternate email, 10 notifications/day max',
type=NotificationTypeEnum.SIMPLE,
origin="Miramar",
event_type=OT.ResourceLifecycleEvent,
delivery_configurations=[delivery_configuration])
self.user_notification.create_notification(notification=notification_request, user_id=self.user._id)
# DISABLED, EMAIL(in DeliveryConfiguration)
delivery_configuration = IonObject(OT.DeliveryConfiguration,
email='charlie@example.com',
mode=DeliveryModeEnum.EMAIL,
frequency=NotificationFrequencyEnum.DISABLED)
notification_request = IonObject(OT.NotificationRequest,
name='DISABLED to alternate email',
type=NotificationTypeEnum.SIMPLE,
origin="Miramar",
event_type=OT.ResourceLifecycleEvent,
delivery_configurations=[delivery_configuration])
self.user_notification.create_notification(notification=notification_request, user_id=self.user._id)
# REAL_TIME, SMS(in DeliveryConfiguration)
delivery_configuration = IonObject(OT.DeliveryConfiguration,
email='snot_nosed_jockey@example.com',
mode=DeliveryModeEnum.SMS,
frequency=NotificationFrequencyEnum.REAL_TIME)
notification_request = IonObject(OT.NotificationRequest,
name='SMS to alternate email',
type=NotificationTypeEnum.SIMPLE,
origin="Miramar",
event_type=OT.ResourceLifecycleEvent,
delivery_configurations=[delivery_configuration])
self.user_notification.create_notification(notification=notification_request, user_id=self.user._id)
def test_realtime_notifications(self):
# monkey patch smtplib.SMTP to capture sent emails
original_SMTP = smtplib.SMTP # store original for restoration
class MonkeyPatchSMTP(object):
def __init__(self, address, host):
self.address = address
self.host = host
def login(self,username,password):
self.username = username
self.password = password
def sendmail(self,from_addr, to_addrs, msg):
global outbox
outbox.append((from_addr, to_addrs, msg,time.time()))
return []
def quit(self):
pass
smtplib.SMTP=MonkeyPatchSMTP
# patch the CFG service.user_notification.max_daily_notifications value so we only test 10
original_CFG_max = CFG.get_safe("service.user_notification.max_daily_notifications", 1000)
CFG['service']['user_notification']['max_daily_notifications'] = 10
# publish event(s) - one should trigger notifications, the other not
self.event_publisher.publish_event(origin='Miramar', event_type=OT.ResourceLifecycleEvent)
self.event_publisher.publish_event(origin='Hong Kong', event_type=OT.ResourceLifecycleEvent)
time.sleep(1) # wait some non-trivial time for events to be processed by NotificationWorker
# outbox now contains tuple(from_addr, to_addrs, msg,time.time()) for each message sent
# verifies the alternate email requirement (eg. email specified in DeliveryConfiguration)
self.assertEqual(len([t[1] for t in outbox if t[1] == 'iceman@example.com']), 1) # 1 message to Iceman
self.assertEqual(len([t[1] for t in outbox if t[1] == 'slider@example.com']), 1) # 1 message to Slider
self.assertEqual(len([t[1] for t in outbox if t[1] == 'charlie@example.com']), 0 ) # no messages to Charlie (CIV)
# check SMS sent
self.assertEqual(len([t[1] for t in outbox if t[1] == 'snot_nosed_jockey@example.com']), 1 )
# check SMS <= 140 characters
self.assertLessEqual(len([(t[2]) for t in outbox if t[1] == 'snot_nosed_jockey@example.com'][0]), 140)
# publish 9 more events (already have 1), notification should be disabled at number 10
for x in xrange(9):
self.event_publisher.publish_event(origin='Miramar', event_type=OT.ResourceLifecycleEvent)
time.sleep(2) # give system time to update NotificationRequest (1 sec saw occasional fail)
# publish 1 more event, should NOT trigger any additional notifications
self.event_publisher.publish_event(origin='Miramar', event_type=OT.ResourceLifecycleEvent)
time.sleep(1) # wait some non-trivial time for events to be processed by NotificationWorker
# check there are 10 for Iceman, not 11 even though 11 have been published
# verifies the max_daily notification limit
self.assertEquals(len([t[1] for t in outbox if t[1] == 'iceman@example.com']), 10) # 10 to Iceman (reached limit)
# check NotificationRequest has been disabled_by_system
notification = self.resource_registry.read(self.notification_id)
self.assertTrue(notification.disabled_by_system)
# MONKEY PATCH time.time() fast forward to trigger NotificationSentScanner to persist counts
CONSTANT_TIME = time.time() + 600 # forward 10 minutes (from now FREEZES time.time())
def new_time():
return CONSTANT_TIME
old_time = time.time
time.time = new_time
# sleep 5s longer than interval taken from ion/processes/event/event_persister.py
time.sleep(float(CFG.get_safe("process.event_persister.persist_interval", 1.0))+5)
# get notification_counts from ObjectStore
notification_counts_obj = self.object_store.read('notification_counts')
# persisted as standard dicts, convert to Counter objects
notification_counts = {k:Counter(v) for k,v in notification_counts_obj.items() if not (k == '_id' or k == '_rev') }
self.assertEqual(int(notification_counts.get(self.user._id).get('all')), 30) # 3 DeliveryConfigurations * 10 notifications
# restore MONKEY PATCHed time (so we can go ahead from actual wall time)
time.time = old_time
# MONKEY PATCH time.time() fast forward to trigger NotificationSentScanner to flush/reset counts
CONSTANT_TIME = time.time() + 86400 # forward 1 day (from now FREEZES time.time())
def new_time():
return CONSTANT_TIME
old_time = time.time
time.time = new_time
# sleep 5s longer than interval taken from ion/processes/event/event_persister.py
time.sleep(float(CFG.get_safe("process.event_persister.persist_interval", 1.0))+5)
# get notification_counts from ObjectStore
notification_counts_obj = self.object_store.read('notification_counts')
# persisted as standard dicts, convert to Counter objects
notification_counts = {k:Counter(v) for k,v in notification_counts_obj.items() if not (k == '_id' or k == '_rev') }
self.assertEqual(notification_counts.get(self.user._id, None), None)
# restore MONKEY PATCHed time
time.time = old_time
# restore original smtplib.SMTP and CFG value
smtplib.SMTP = original_SMTP
CFG['service']['user_notification']['max_daily_notifications'] = original_CFG_max
|
|
"""
Written by Jason Pitt
Revised by Dominic Fitzgerald on 30 May 2017
swiftseq run [args]
The entry point for starting a new SwiftSeq run.
"""
import os
import warnings
from collections import defaultdict
from itertools import product
import six
from swiftseq.core import SwiftSeqStrings
from swiftseq.core.readgroups import create_readgroup_files
from swiftseq.core.workflow import Workflow
from swiftseq.util.path import mkdirs, mksym
ROOT = 0
GERMLINE = 100
TUMOR_NORMAL_PAIRS = 101
PATIENT_NAME = -1
BEFORE_EXTENSION = 0
EXTENSION = 1
""" First just write this so it can find the appropriate files and make
the proper directories. Later, add sanity checks, ability to handle .RG
info, fastqs, etc. """
#########
# TO-DO
#########
# Need to handle if RG ID contains non-ascii
# Need to determine how Picard handles RG ID
# Should likely be writing results to some form of set-up log
"""
Common Functions
"""
def create_analysis_dir(inputdata_type, inputdata_filepaths, inputdata_dir_root, analysis_dir_root):
"""
Verifies that the root of the directory given as input adheres to the required directory structure
before the rest of the program proceeds. Anything that violates the requirements are ignored. Everything
that passed verification is then re-created structurally at a different root; this becomes the
place where all analysis results will be written to. The inputdata files in the analysis directory
are replaced with symbolic links back to the original files.
Side effects: Re-creates the original data file structure using symlinks in place of
the actual files
:param data_type: str Either Workflow.TUMOR_NORMAL_PAIR or Workflow.GERMLINE; else will throw an exception
:param data_filepaths: list Relative paths of the original data files
:param data_dir_root: str Path to the directory housing all the original data
:param analysis_dir_root: str Path to the root of the location for the analysis results
:return: list Relative paths to all symlinks linked to the original data
"""
# Verify the correct directory structure
verified_filepaths = list()
if inputdata_type == Workflow.TUMOR_NORMAL_PAIR:
for data_filepath in inputdata_filepaths:
if data_filepath.split('/')[-3] not in {'tumor', 'normal'}:
warnings.warn('Warning: {} does not adhere to required directory structure. Will be ignored'.format(
data_filepath
))
continue
verified_filepaths.append(data_filepath)
elif inputdata_type == Workflow.GERMLINE:
for data_filepath in inputdata_filepaths:
parent_dir = os.path.join(inputdata_dir_root, os.path.dirname(data_filepath))
num_bam_files = len(
[f for f in os.listdir(parent_dir)
if os.path.splitext(f)[EXTENSION] == '.bam']
)
if num_bam_files > 1:
warnings.warn('Warning: {} does not adhere to required directory structure (multiple '
'seq files in the same directory). Will be ignored'.format(data_filepath))
continue
verified_filepaths.append(data_filepath)
else:
raise ValueError('Unsupported data type: {}'.format(inputdata_type))
# Create symlinks in the analysis directory
verified_symlinks = list()
for data_filepath in verified_filepaths:
source_filepath = os.path.join(inputdata_dir_root, data_filepath)
destination_symlink = os.path.join(analysis_dir_root, data_filepath)
try:
mksym(source_filepath, destination_symlink)
except (os.error, AttributeError):
warnings.warn('Could not create a symbolic link to {} for {}'.format(
destination_symlink,
source_filepath
))
continue
verified_symlinks.append(destination_symlink)
return verified_symlinks
def find_data_filepaths(swiftseq_data_dir, suffix):
"""
Traverses the data directory to find files that end with suffix. Returns a list
of filepaths relative to the data directory of those ending with suffix.
Side effects: None
:param swiftseq_data_dir: str Root of the swiftseq data directory
:param suffix: str Suffix to search for
:return: list Relative paths of those files ending with suffix
"""
filepaths_with_suffix = list()
for root, dirs, files in os.walk(swiftseq_data_dir, followlinks=True):
# If no files exist, raise an exception
# if not files:
# raise Exception('No files found in the data directory. '
# 'Please check input files and data directory structure.')
# Add all files found in this directory ending in suffix
filepaths_with_suffix.extend([
os.path.join(os.path.relpath(root, swiftseq_data_dir), file_)
for file_ in files if file_.endswith(suffix)
])
return filepaths_with_suffix
"""
Germline Functions
"""
def create_germline_samples_file(germline_samples_filepath, inputdata_symlinks):
"""
Writes a file to disk mapping patient IDs to the directories containing their data.
Side effects: Writes a file to disk
:param germline_samples_filepath: str Path to write out germline samples file
:param inputdata_symlinks: list Relative paths to all symlinks linked to the original data
"""
# Create a dictionary mapping the relative path of the parent folder with each file
inputdata_symlinks_dir_file_map = {
os.path.dirname(k) + '/': os.path.basename(k)
for k in inputdata_symlinks
}
with open(germline_samples_filepath, 'w') as germline_samples:
# Write out the header
germline_samples.write('ID dir\n')
# Write out entry for each patient path
for inputdata_dir, inputdata_file in six.iteritems(inputdata_symlinks_dir_file_map):
# TODO Remove trailing slash after debugging
germline_samples.write('{id} {dir}/\n'.format(
id=os.path.splitext(inputdata_file)[ROOT],
dir=inputdata_dir
))
"""
Tumor Normal Functions
"""
def create_tn_inputdata_map(inputdata_symlinks):
"""
Segments each path in the list of inputdata symlinks into a 3-layer dictionary:
1) Relative path up to and including the patient id
2) Next segment is either 'tumor' or 'normal'
3) Name of the sample corresponding to the tissue type
The value of each innermost key is the filename of the sample file.
Example:
(relative/path/to/patient_id)/(tumor)/(sample_123)/(sample_123.bam) becomes
tn_inputdata_map['relative/path/to/patient_id']['tumor']['sample_123'] = 'sample_123.bam'
:param inputdata_symlinks: list Relative paths to all symlinks linked to the original data
:return: dict Mapping of important parts of inputdata symlink paths to the inputdata filename
"""
tn_inputdata_map = defaultdict(lambda: defaultdict(dict))
for inputdata_symlink in inputdata_symlinks:
inputdata_path_parts = inputdata_symlink.strip().split('/')
# Get parts of the path to split into dictionary keys
patient_id_relpath = '/'.join(inputdata_path_parts[0:-3])
tissue, sample, sample_filename = inputdata_path_parts[-3:]
# Add an entry to the dictionary
tn_inputdata_map[patient_id_relpath][tissue][sample] = sample_filename
return tn_inputdata_map
def create_tn_patients_map_file(tn_patients_map_filepath, patient_id_relpaths):
"""
Given a list of patient id relative paths, map that relative path to the patient id and
out the mapping to a file.
Side effects: Creates a file on disk
:param tn_patients_map_filepath: str Path to the mapping file to be created on disk
:param patient_id_relpaths: list All patient id relative paths
"""
with open(tn_patients_map_filepath, 'w') as tn_samples:
# Write out the header
tn_samples.write('patient dir\n')
# Map each patient id relative path to the patient id
for patient_id_relpath in patient_id_relpaths:
patient_id = patient_id_relpath.split('/')[PATIENT_NAME]
# TODO Remove trailing slash after debugging
tn_samples.write('{patient_id} {patient_id_relpath}/\n'.format(
patient_id=patient_id,
patient_id_relpath=patient_id_relpath
))
def create_tissue_samples_files(tn_inputdata_map):
"""
For each patient id and tissue type, creates a file that outlines any samples and
associated files to that patient id and tissue type. This is to handle the case where
there's more than one sample associated with a given tissue. ie. 2 files for the tumor
Side effects: Creates a file on disk
:param tn_inputdata_map: dict Mapping of important parts of inputdata symlink paths
to the inputdata filename
"""
# Look at the both tissues for each patient id
for patient_id_relpath, tissues in six.iteritems(tn_inputdata_map):
for tissue, samples in six.iteritems(tissues):
# Get relative path up to and including tissue type ('tumor' or 'normal')
tissue_relpath = os.path.join(patient_id_relpath, tissue)
# Write out to a file on disk for each tissue type in that tissue's directory
sample_out_filepath = os.path.join(tissue_relpath, SwiftSeqStrings.sample_out_filename)
with open(sample_out_filepath, 'w') as tissue_samples_file:
# Write out the header
tissue_samples_file.write('ID sampleDir dir filepath\n')
# Write out all samples associated with this patient id and tissue
for sample_dir in samples:
associated_sample_filepath = tn_inputdata_map[patient_id_relpath][tissue][sample_dir]
tissue_samples_file.write('{id} {sample_dir} {dir} {filepath}\n'.format(
id=os.path.splitext(associated_sample_filepath)[BEFORE_EXTENSION],
sample_dir=sample_dir,
dir=os.path.join(tissue_relpath, sample_dir),
filepath=os.path.join(tissue_relpath, sample_dir, associated_sample_filepath)
))
def create_paired_output_dirs(tn_inputdata_map):
"""
Inside the directory for each patient id, creates a directory for every pair of tumor and
normal samples for that patient id.
Side effects: Creates directories on disk
:param tn_inputdata_map: dict Mapping of important parts of inputdata symlink paths
to the inputdata filename
"""
for patient_id_relpath, patient in six.iteritems(tn_inputdata_map):
# Find all tumor and normal sample directories
tumor_sample_dirs = [t for t in patient['tumor']]
normal_sample_dirs = [n for n in patient['normal']]
# Iterate over the cartesian product of all normal and tumor sample directories
for tumor_sample, normal_sample in product(tumor_sample_dirs, normal_sample_dirs):
tn_joined_sample_name = '___'.join((tumor_sample, normal_sample))
# Create the new directory for this tumor-normal pair
mkdirs(os.path.join(patient_id_relpath, SwiftSeqStrings.paired_analysis_dir, tn_joined_sample_name))
"""
Main Function
"""
def process_samples(inputdata_type, inputdata_filepaths, data_root, analysis_root, samtools_path):
"""
Performs the following tasks:
1) Checks to make sure input data is in the required format
- Those that don't adhere will not be included in the analysis
2) Recreates the directory structure of the input files in a separate analysis directory
3) Creates symbolic links from the original data to their corresponding directories in
the analysis directory
4) Creates readgroups files for each sample in the analysis directory as siblings of
the sample symbolic link
If this is a Tumor-Normal run:
5) Creates a file mapping patient id to the relative path housing that patient id's sample
files at the root of the analysis directory
6) Creates a file mapping each tissue type to the one or more sample files associated with
it for each patient id in that tissue's root directory
7) Creates a new directory <tumor_name>___<normal_name> for every pair of tumor and normal
sample for every patient id
Otherwise, this is a Germline run:
5) Creates a file mapping patient ids to the directories containing their data
:param inputdata_type: int Either Workflow.GERMLINE or Workflow.TUMOR_NORMAL_PAIR
:param inputdata_filepaths: list Relative paths to all original data files
:param data_root: str Path to the directory housing all the original data
:param analysis_root: str Path to the root of the location for the analysis results
:param samtools_path: str Path to the samtools executable
:return: (int, int, list) Number of individuals and samples to be analyzed, relative paths
to all symlinks linked to the original data
"""
# Relative paths to all symlinks linked to the original data
inputdata_symlinks = create_analysis_dir(
inputdata_type=inputdata_type,
inputdata_filepaths=inputdata_filepaths,
inputdata_dir_root=data_root,
analysis_dir_root=analysis_root
)
# Create readgroups files on disk
create_readgroup_files(
inputdata_symlinks=inputdata_symlinks,
samtools_path=samtools_path
)
# Record the number of individuals and samples analyzed
n_individuals_analyzed = n_samples_analyzed = len(inputdata_symlinks)
if inputdata_type == Workflow.TUMOR_NORMAL_PAIR:
# Mapping of important parts of inputdata symlink paths to the inputdata filename
# Ex. tn_inputdata_map['relative/path/to/patient_id']['tumor']['sample_123'] = 'sample_123.bam'
tn_inputdata_map = create_tn_inputdata_map(inputdata_symlinks)
# Create the mapping file from patient id relative paths to patient ids
# TODO Remove extra slash
create_tn_patients_map_file(
tn_patients_map_filepath=os.path.join(analysis_root, SwiftSeqStrings.patient_out_filename),
patient_id_relpaths=tn_inputdata_map.keys()
)
# For each sample file associated with a patient's tumor/normal, creates a mapping file
# Headers: ID sampleDir dir filepath
create_tissue_samples_files(tn_inputdata_map)
# Creates a directory for every pair of tumor and normal samples for each patient id
create_paired_output_dirs(tn_inputdata_map)
# Record the number of individuals analyzed
n_individuals_analyzed = len(tn_inputdata_map)
elif inputdata_type == Workflow.GERMLINE:
create_germline_samples_file(
germline_samples_filepath=os.path.join(analysis_root, SwiftSeqStrings.patient_out_filename),
inputdata_symlinks=inputdata_symlinks
)
else:
raise ValueError('Unsupported data type: {}'.format(inputdata_type))
return n_individuals_analyzed, n_samples_analyzed, inputdata_symlinks
|
|
import sys
from PyQt5 import QtWidgets, QtGui, QtCore
import androguard.session as session_module
from androguard.gui.DataModel import ApkModel, DexClassModel
from androguard.gui.apiwindow import APIWindow
from androguard.gui.binwindow import binWidget
from androguard.gui.fileloading import FileLoadingThread
from androguard.gui.helpers import class2func
from androguard.gui.methodswindow import MethodsWindow
from androguard.gui.resourceswindow import ResourcesWindow
from androguard.gui.sourcewindow import SourceWindow
from androguard.gui.stringswindow import StringsWindow
from androguard.gui.treewindow import TreeWindow
import os
import logging
log = logging.getLogger("androguard.gui")
def load_module(module_name, file_path):
"""
Load a module by name and search path
Returns None if Module could not be loaded.
"""
if sys.version_info >= (3,5,):
import importlib.util
spec = importlib.util.spec_from_file_location(module_name, file_path)
if not spec:
return
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
else:
import imp
mod = imp.load_source(module_name, file_path)
return mod
class TabsWindow(QtWidgets.QTabWidget):
def __init__(self, bin_windows, parent=None):
super().__init__(parent)
self.bin_windows = bin_windows
self.setTabsClosable(True)
self.tabCloseRequested.connect(self.tabCloseRequestedHandler)
self.currentChanged.connect(self.currentTabChanged)
self.closeAllTabs = QtWidgets.QAction(
"Close all tabs",
self,
triggered=self.actioncloseAllTabs)
self.closeOtherTabs = QtWidgets.QAction(
"Close other tabs",
self,
triggered=self.actioncloseOtherTabs)
self.closeLeftTabs = QtWidgets.QAction(
"Close left tabs",
self,
triggered=self.actioncloseLeftTabs)
self.closeRightTabs = QtWidgets.QAction(
"Close right tabs",
self,
triggered=self.actioncloseRightTabs)
def actioncloseAllTabs(self):
self.clear()
def actioncloseOtherTabs(self):
for i in range(self.currentIndex() - 1, -1, -1):
self.removeTab(i)
for i in range(self.count(), self.currentIndex(), -1):
self.removeTab(i)
def actioncloseLeftTabs(self):
for i in range(self.currentIndex() - 1, -1, -1):
self.removeTab(i)
def actioncloseRightTabs(self):
for i in range(self.count(), self.currentIndex(), -1):
self.removeTab(i)
def tabCloseRequestedHandler(self, index):
self.removeTab(index)
def currentTabChanged(self, index):
log.debug("curentTabChanged -> %d (%s)" % (index, self.tabToolTip(index)))
if index == -1:
return
current_title = self.tabToolTip(index)
for title in self.bin_windows:
if title != current_title:
log.debug("Disable %s" % title)
self.bin_windows[title].disable()
if current_title in self.bin_windows:
log.debug("Enable %s" % title)
self.bin_windows[current_title].enable()
def contextMenuEvent(self, event):
menu = QtWidgets.QMenu(self)
menu.addAction(self.closeAllTabs)
menu.addAction(self.closeOtherTabs)
menu.addAction(self.closeLeftTabs)
menu.addAction(self.closeRightTabs)
menu.exec_(event.globalPos())
class MainWindow(QtWidgets.QMainWindow):
"""Main window:
self.central: QTabWidget in center area
self.dock: QDockWidget in left area
self.tree: TreeWindow(QTreeWidget) in self.dock
"""
def __init__(self, parent=None, session=session_module.Session(), input_file=None, input_plugin=None):
super().__init__(parent)
self.session = session
self.bin_windows = {}
self.setupFileMenu()
self.setupViewMenu()
self.setupPluginsMenu()
self.setupHelpMenu()
self.setupCentral()
self.setupEmptyTree()
self.setupDock()
self.setupSession()
self.setWindowTitle("Androguard GUI")
self.showStatus("Androguard GUI")
self.installEventFilter(self)
self.input_plugin = input_plugin
if input_file:
self._openFile(input_file)
root = os.path.dirname(os.path.realpath(__file__))
self.setWindowIcon(QtGui.QIcon(os.path.join(root, "androguard.ico")))
def eventFilter(self, watched, event):
for bin_window in list(self.bin_windows.values()):
bin_window.eventFilter(watched, event)
return False
def showStatus(self, msg):
"""Helper function called by any window to display a message
in status bar.
"""
log.debug(msg)
self.statusBar().showMessage(msg)
def about(self):
"""User clicked About menu. Display a Message box."""
QtWidgets.QMessageBox.about(self, "About Androguard GUI",
"<p><b>Androguard GUI</b> is basically a GUI for Androguard :)." \
"<br>Have fun !</p>")
def _no_apk_loaded(self):
"""Show a message if no APK was loaded yet..."""
QtWidgets.QMessageBox.information(self, "No APK loaded yet!",
"<p>There was no APK loaded yet. Please load one using File->Open.</p>")
def setupSession(self):
log.debug("Setup Session")
self.fileLoadingThread = FileLoadingThread(self)
self.fileLoadingThread.file_loaded.connect(self.loadedFile)
def loadedFile(self, success):
if not success:
self.showStatus("Analysis of %s failed :(" %
str(self.fileLoadingThread.file_path))
return
self.updateDockWithTree()
self.cleanCentral()
self.showStatus("Analysis of %s done!" %
str(self.fileLoadingThread.file_path))
if self.input_plugin:
self._runPlugin(self.input_plugin)
def openFile(self):
self.session.reset()
filepath, _ = QtWidgets.QFileDialog.getOpenFileName(
self, "Open File", '.',
"Android Files (*.apk *.jar *.dex *.odex *.dey);;Androguard Session (*.ag)")
self._openFile(filepath)
def _openFile(self, filepath=None):
if filepath:
self.setupTree()
self.showStatus("Analyzing %s..." % str(filepath))
self.fileLoadingThread.load(filepath)
def addFile(self):
if not self.session.isOpen():
log.debug(self.session.analyzed_digest)
self._no_apk_loaded()
return
filepath, _ = QtWidgets.QFileDialog.getOpenFileName(
self, "Add File", '',
"Android Files (*.apk *.jar *.dex *.odex *.dey)")
if filepath:
self.showStatus("Analyzing %s..." % str(filepath))
self.fileLoadingThread.load(filepath)
def saveFile(self):
"""User clicked Save menu. Display a Dialog to ask whwre to save."""
filepath, _ = QtWidgets.QFileDialog.getSaveFileName(
self, "Save File", '', "Androguard Session (*.ag)")
if filepath:
# Ensure .ag as file ending
if not filepath.endswith(".ag"):
filepath = "{}.ag".format(filepath)
self.showStatus("Saving %s..." % str(filepath))
self.saveSession(filepath)
self.showStatus("Saved Session to %s!" % str(filepath))
def saveSession(self, filepath):
"""Save androguard session."""
try:
session_module.Save(self.session, filepath)
except RuntimeError as e:
log.exception(e)
os.remove(filepath)
log.warning("Session not saved")
def _runPlugin(self, filepath):
module_name = os.path.splitext(os.path.basename(filepath))[0]
log.debug("RUN plugin '{}' from {}".format(module_name, filepath))
mod = load_module(module_name, filepath)
log.debug("Loaded %s", mod)
if not mod or not hasattr(mod, 'PluginEntry'):
QtWidgets.QMessageBox.warning(self, "Not a valid Plugin",
"<p>This python file does not look like a valid plugin.</p>")
return
mod.PluginEntry(self.session)
def openRunPluginWindow(self):
filepath, _ = QtWidgets.QFileDialog.getOpenFileName(
self, "Open File", '',
"Python Files (*.py);;")
if filepath:
self._runPlugin(filepath)
def closeEvent(self, event):
"""Clicked [x] to close main window"""
event.accept()
def setupEmptyTree(self):
"""Setup empty Tree at startup. """
if hasattr(self, "tree"):
del self.tree
self.tree = QtWidgets.QTreeWidget(self)
self.tree.header().close()
def setupDock(self):
"""Setup empty Dock at startup. """
self.dock = QtWidgets.QDockWidget("Classes", self)
self.dock.setWidget(self.tree)
self.dock.setFeatures(QtWidgets.QDockWidget.NoDockWidgetFeatures)
self.addDockWidget(QtCore.Qt.LeftDockWidgetArea, self.dock)
def setupTree(self):
log.debug("Setup Tree")
self.tree = TreeWindow(win=self, session=self.session)
self.tree.setWindowTitle("Tree model")
self.dock.setWidget(self.tree)
def setupCentral(self):
"""Setup empty window supporting tabs at startup. """
self.central = TabsWindow(self.bin_windows, self)
self.setCentralWidget(self.central)
def cleanCentral(self):
self.central.actioncloseAllTabs()
def setupFileMenu(self):
log.debug("Setup File Menu")
self.fileMenu = self.menuBar().addMenu("&File")
self.fileMenu.addAction("&Open...", self.openFile, "Ctrl+O")
self.fileMenu.addAction("&Add...", self.addFile, "Ctrl+A")
self.fileMenu.addAction("&Save...", self.saveFile, "Ctrl+S")
self.fileMenu.addAction("E&xit", self.close, "Ctrl+Q")
def setupViewMenu(self):
log.debug("Setup View Menu")
self.viewMenu = self.menuBar().addMenu("&View")
self.viewMenu.addAction("&Strings...", self.openStringsWindow)
self.viewMenu.addAction("&Methods...", self.openMethodsWindow)
self.viewMenu.addAction("&API...", self.openAPIWindow)
self.viewMenu.addAction("&APK...", self.openApkWindow)
self.viewMenu.addAction("&Resources...", self.openResourcesWindow)
def setupPluginsMenu(self):
log.debug("Setup Plugins Menu")
self.pluginsMenu = self.menuBar().addMenu("&Plugins")
self.pluginsMenu.addAction("&Run...", self.openRunPluginWindow)
def setupHelpMenu(self):
log.debug("Setup Help Menu")
self.helpMenu = self.menuBar().addMenu("&Help")
self.helpMenu.addAction("&About", self.about)
self.helpMenu.addAction("About &Qt", QtWidgets.qApp.aboutQt)
def updateDockWithTree(self, empty=False):
"""Update the classes tree. Called when
- a new APK has been imported
- a classe has been renamed (displayed in the tree)
"""
self.setupTree()
self.tree.fill()
def openStringsWindow(self):
stringswin = StringsWindow(win=self, session=self.session)
self.central.addTab(stringswin, stringswin.title)
self.central.setTabToolTip(self.central.indexOf(stringswin),
stringswin.title)
self.central.setCurrentWidget(stringswin)
def openMethodsWindow(self):
methodswin = MethodsWindow(win=self, session=self.session)
self.central.addTab(methodswin, methodswin.title)
self.central.setTabToolTip(self.central.indexOf(methodswin),
methodswin.title)
self.central.setCurrentWidget(methodswin)
def openResourcesWindow(self):
resourceswin = ResourcesWindow(win=self, session=self.session)
self.central.addTab(resourceswin, resourceswin.title)
self.central.setTabToolTip(self.central.indexOf(resourceswin),
resourceswin.title)
self.central.setCurrentWidget(resourceswin)
def openAPIWindow(self):
apiwin = APIWindow(win=self, session=self.session)
self.central.addTab(apiwin, apiwin.title)
self.central.setTabToolTip(self.central.indexOf(apiwin),
apiwin.title)
self.central.setCurrentWidget(apiwin)
def openApkWindow(self):
log.debug("openApkWindow for %s" % self.session.analyzed_apk)
if not self.fileLoadingThread.file_path:
self._no_apk_loaded()
return
bin_window = binWidget(self, ApkModel(self.session.get_objects_apk(self.fileLoadingThread.file_path)[0]), "APK")
bin_window.activateWindow()
self.central.addTab(bin_window, bin_window.title)
self.central.setCurrentWidget(bin_window)
self.bin_windows[bin_window.title] = bin_window
def openBinWindow(self, current_class):
log.debug("openBinWindow for %s" % current_class)
dx = self.session.get_analysis(current_class)
bin_window = self.getMeOpenedWindowIfExists(current_class.current_title)
if not bin_window:
bin_window = binWidget(self, DexClassModel(current_class, dx), current_class.get_name())
bin_window.activateWindow()
self.central.addTab(bin_window, current_class.current_title)
self.central.setTabToolTip(self.central.indexOf(bin_window),
current_class.current_title)
self.bin_windows[current_class.current_title] = bin_window
bin_window.enable()
self.central.setCurrentWidget(bin_window)
def openSourceWindow(self, current_class, method=None):
"""Main function to open a decompile source window
It checks if it already opened and open that tab,
otherwise, initialize a new window.
"""
log.debug("openSourceWindow for %s" % current_class)
sourcewin = self.getMeOpenedWindowIfExists(current_class.current_title + "(S)")
if not sourcewin:
current_filename = self.session.get_filename_by_class(current_class)
current_digest = self.session.get_digest_by_class(current_class)
sourcewin = SourceWindow(win=self,
current_class=current_class,
current_title=current_class.current_title + "(S)",
current_filename=current_filename,
current_digest=current_digest,
session=self.session)
sourcewin.reload_java_sources()
self.central.addTab(sourcewin, sourcewin.title)
self.central.setTabToolTip(self.central.indexOf(sourcewin),
sourcewin.title)
if method:
sourcewin.browse_to_method(method)
self.central.setCurrentWidget(sourcewin)
def getMeOpenedWindowIfExists(self, name):
for idx in range(self.central.count()):
if name == self.central.tabToolTip(idx):
log.debug("Tab %s already opened at: %d" %
(name, idx))
return self.central.widget(idx)
return None
def doesClassExist(self, path):
arg = class2func(path)
try:
getattr(self.d, arg)
except AttributeError:
return False
return True
|
|
# -*- coding: utf-8 -*-
# FOGLAMP_BEGIN
# See: http://foglamp.readthedocs.io/
# FOGLAMP_END
import http.client
import json
import urllib.parse
from foglamp.common import logger
from foglamp.common.microservice_management_client import exceptions as client_exceptions
__author__ = "Ashwin Gopalakrishnan"
__copyright__ = "Copyright (c) 2017 OSIsoft, LLC"
__license__ = "Apache 2.0"
__version__ = "${VERSION}"
_logger = logger.setup(__name__)
class MicroserviceManagementClient(object):
_management_client_conn = None
def __init__(self, microservice_management_host, microservice_management_port):
self._management_client_conn = http.client.HTTPConnection("{0}:{1}".format(microservice_management_host, microservice_management_port))
def register_service(self, service_registration_payload):
""" Registers a newly created microservice with the core service
The core service will persist this information in memory rather than write it to the storage layer since it will
change on every run of FogLAMP.
:param service_registration_payload: A dict object describing the microservice and giving details of the
management interface for that microservice
:return: a JSON object containing the UUID of the newly registered service
"""
url = '/foglamp/service'
self._management_client_conn.request(method='POST', url=url, body=json.dumps(service_registration_payload))
r = self._management_client_conn.getresponse()
if r.status in range(400, 500):
_logger.error("Client error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
if r.status in range(500, 600):
_logger.error("Server error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
res = r.read().decode()
self._management_client_conn.close()
response = json.loads(res)
try:
response["id"]
except (KeyError, Exception) as ex:
_logger.exception("Could not register the microservice, From request %s, Reason: %s", json.dumps(service_registration_payload), str(ex))
raise
return response
def unregister_service(self, microservice_id):
""" Removes the registration record for a microservice
This is usually called by the microservice itself as part of its shutdown procedure, although this may not be
the only time it is called. A service may unregister, do some maintenance type operation and then re-register
if it desires.
:param microservice_id: string UUID of microservice
:return: a JSON object containing the UUID of the unregistered service
"""
url = '/foglamp/service/{}'.format(microservice_id)
self._management_client_conn.request(method='DELETE', url=url)
r = self._management_client_conn.getresponse()
if r.status in range(400, 500):
_logger.error("Client error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
if r.status in range(500, 600):
_logger.error("Server error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
res = r.read().decode()
self._management_client_conn.close()
response = json.loads(res)
try:
response["id"]
except (KeyError, Exception) as ex:
_logger.exception("Could not unregister the micro-service having uuid %s, Reason: %s",
microservice_id, str(ex))
raise
return response
def register_interest(self, category, microservice_id):
""" Register an interest of microservice in a configuration category
:param category: configuration category
:param microservice_id: microservice's UUID string
:return: A JSON object containing a registration ID for this registration
"""
url = '/foglamp/interest'
payload = json.dumps({"category": category, "service": microservice_id}, sort_keys=True)
self._management_client_conn.request(method='POST', url=url, body=payload)
r = self._management_client_conn.getresponse()
if r.status in range(400, 500):
_logger.error("Client error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
if r.status in range(500, 600):
_logger.error("Server error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
res = r.read().decode()
self._management_client_conn.close()
response = json.loads(res)
try:
response["id"]
except (KeyError, Exception) as ex:
_logger.exception("Could not register interest, for request payload %s, Reason: %s",
payload, str(ex))
raise
return response
def unregister_interest(self, registered_interest_id):
""" Remove a previously registered interest in a configuration category
:param registered_interest_id: registered interest id for a configuration category
:return: A JSON object containing the unregistered interest id
"""
url = '/foglamp/interest/{}'.format(registered_interest_id)
self._management_client_conn.request(method='DELETE', url=url)
r = self._management_client_conn.getresponse()
if r.status in range(400, 500):
_logger.error("Client error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
if r.status in range(500, 600):
_logger.error("Server error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
res = r.read().decode()
self._management_client_conn.close()
response = json.loads(res)
try:
response["id"]
except (KeyError, Exception) as ex:
_logger.exception("Could not unregister interest for %s, Reason: %s", registered_interest_id, str(ex))
raise
return response
def get_services(self, service_name=None, service_type=None):
""" Retrieve the details of one or more services that are registered
:param service_name: filter the returned services by name
:param service_type: filter the returned services by type
:return: list of registered microservices, all or based on filter(s) applied
"""
url = '/foglamp/service'
delimeter = '?'
if service_name:
url = '{}{}name={}'.format(url, delimeter, urllib.parse.quote(service_name))
delimeter = '&'
if service_type:
url = '{}{}type={}'.format(url, delimeter, service_type)
self._management_client_conn.request(method='GET', url=url)
r = self._management_client_conn.getresponse()
if r.status in range(400, 500):
_logger.error("Client error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
if r.status in range(500, 600):
_logger.error("Server error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
res = r.read().decode()
self._management_client_conn.close()
response = json.loads(res)
try:
response["services"]
except (KeyError, Exception) as ex:
_logger.exception("Could not find the micro-service for requested url %s, Reason: %s", url, str(ex))
raise
return response
def get_configuration_category(self, category_name=None):
"""
:param category_name:
:return:
"""
url = '/foglamp/service/category'
if category_name:
url = "{}/{}".format(url, urllib.parse.quote(category_name))
self._management_client_conn.request(method='GET', url=url)
r = self._management_client_conn.getresponse()
if r.status in range(400, 500):
_logger.error("Client error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
if r.status in range(500, 600):
_logger.error("Server error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
res = r.read().decode()
self._management_client_conn.close()
response = json.loads(res)
return response
def get_configuration_item(self, category_name, config_item):
"""
:param category_name:
:param config_item:
:return:
"""
url = "/foglamp/service/category/{}/{}".format(urllib.parse.quote(category_name), urllib.parse.quote(config_item))
self._management_client_conn.request(method='GET', url=url)
r = self._management_client_conn.getresponse()
if r.status in range(400, 500):
_logger.error("Client error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
if r.status in range(500, 600):
_logger.error("Server error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
res = r.read().decode()
self._management_client_conn.close()
response = json.loads(res)
return response
def create_configuration_category(self, category_data):
"""
:param category_data: e.g. '{"key": "TEST", "description": "description", "value": {"info": {"description": "Test", "type": "boolean", "default": "true"}}}'
:return:
"""
data = json.loads(category_data)
if 'keep_original_items' in data:
keep_original_item = 'true' if data['keep_original_items'] is True else 'false'
url = '/foglamp/service/category?keep_original_items={}'.format(keep_original_item)
del data['keep_original_items']
else:
url = '/foglamp/service/category'
self._management_client_conn.request(method='POST', url=url, body=json.dumps(data))
r = self._management_client_conn.getresponse()
if r.status in range(400, 500):
_logger.error("Client error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
if r.status in range(500, 600):
_logger.error("Server error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
res = r.read().decode()
self._management_client_conn.close()
response = json.loads(res)
return response
def create_child_category(self, parent, children):
"""
:param parent string
:param children list
:return:
"""
data = {"children": children}
url = '/foglamp/service/category/{}/children'.format(urllib.parse.quote(parent))
self._management_client_conn.request(method='POST', url=url, body=json.dumps(data))
r = self._management_client_conn.getresponse()
if r.status in range(400, 500):
_logger.error("Client error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
if r.status in range(500, 600):
_logger.error("Server error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
res = r.read().decode()
self._management_client_conn.close()
response = json.loads(res)
return response
def update_configuration_item(self, category_name, config_item, category_data):
"""
:param category_name:
:param config_item:
:param category_data: e.g. '{"value": "true"}'
:return:
"""
url = "/foglamp/service/category/{}/{}".format(urllib.parse.quote(category_name), urllib.parse.quote(config_item))
self._management_client_conn.request(method='PUT', url=url, body=category_data)
r = self._management_client_conn.getresponse()
if r.status in range(400, 500):
_logger.error("Client error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
if r.status in range(500, 600):
_logger.error("Server error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
res = r.read().decode()
self._management_client_conn.close()
response = json.loads(res)
return response
def delete_configuration_item(self, category_name, config_item):
"""
:param category_name:
:param config_item:
:return:
"""
url = "/foglamp/service/category/{}/{}/value".format(urllib.parse.quote(category_name), urllib.parse.quote(config_item))
self._management_client_conn.request(method='DELETE', url=url)
r = self._management_client_conn.getresponse()
if r.status in range(400, 500):
_logger.error("Client error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
if r.status in range(500, 600):
_logger.error("Server error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
res = r.read().decode()
self._management_client_conn.close()
response = json.loads(res)
return response
def get_asset_tracker_events(self):
url = '/foglamp/track'
self._management_client_conn.request(method='GET', url=url)
r = self._management_client_conn.getresponse()
if r.status in range(400, 500):
_logger.error("Client error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
if r.status in range(500, 600):
_logger.error("Server error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
res = r.read().decode()
self._management_client_conn.close()
response = json.loads(res)
return response
def create_asset_tracker_event(self, asset_event):
"""
:param asset_event
e.g. {"asset": "AirIntake", "event": "Ingest", "service": "PT100_In1", "plugin": "PT100"}
:return:
"""
url = '/foglamp/track'
self._management_client_conn.request(method='POST', url=url, body=json.dumps(asset_event))
r = self._management_client_conn.getresponse()
if r.status in range(400, 500):
_logger.error("Client error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
if r.status in range(500, 600):
_logger.error("Server error code: %d, Reason: %s", r.status, r.reason)
raise client_exceptions.MicroserviceManagementClientError(status=r.status, reason=r.reason)
res = r.read().decode()
self._management_client_conn.close()
response = json.loads(res)
return response
|
|
"""
Algorithm:
starting number of schedules
places all of these in a pq
do batches of randomizations of schedules, rescore
in each randomization of a schedule generate more schedules, each in a varying level of randomization
put them all back along with the original, unchanged schedule
randomization level based on score
store number of conflicts, randomize based on that
high randomization = randomizing days totally randomly
low randomization = swapping out profs/groups with high conflicts or swapping between
when the pq gets over some length (1000000?) take out half the elements, dump the pq, and load the elements back into the pq
"""
from Scheduler import *
from heapq import *
import time, threading
class Algorithm:
def __init__(self):
self.num_days = 8
self.num_rooms = 5
self.max_students_per_room = 30
# creating professors
prof1 = Professor("Boutell", [True, False, True, True, True, True, True, True] )
prof2 = Professor("Aidoo", [False, False, True, True, True, True, True, True] )
prof3 = Professor("Song", [True, True, False, True, True, True, True, True] )
prof4 = Professor("DeVasher", [True, True, True, True, True, False, True, True] )
prof5 = Professor("Rupakheti", [True, True, True, True, False, True, True, True] )
prof6 = Professor("Coleman", [True, True, True, True, True, True, False, False] )
prof7 = Professor("Steve", [True, False, True, True, True, True, True, True] )
prof8 = Professor("Copinger", [True, True, True, True, False, True, True, True] )
prof9 = Professor("Jane Doe", [True, True, True, False, True, True, True, True] )
self.profs = [prof1, prof2, prof3, prof4, prof5, prof6, prof7, prof8, prof9]
# creating groups
group1 = Group(13, "Boutell", self.profs, [True, True, False, True, True, True, True, True] )
group2 = Group(18, "Coleman", self.profs, [True, True, True, False, True, True, True, True] )
group3 = Group(16, "Aidoo", self.profs, [True, True, True, True, False, True, True, True] )
group4 = Group(15, "Rupakheti", self.profs, [True, True, True, True, True, False, True, True] )
group5 = Group(17, "DeVasher", self.profs, [True, True, False, True, True, True, False, True] )
group6 = Group(16, "Song", self.profs, [True, True, True, True, True, True, True, False] )
self.groups = [group1, group2, group3, group4, group5, group6]
# for use in generating the base
self.num_threads = 1
self.base_list = []
self.base_num_to_generate = 100
self.base_num_lecture_score_limit = 0
self.base_repeat_lecture_limit = 50
# Processing in the second "phase"
self.batch_size = 100
self.iterations = 10000
self.max_base_list_size = 200
def run(self):
# turns out python uses something called a global interpreter lock meaning only
# one thread can be executed at any given time. As a result trying to multithread
# it actually significantly slows it down.
start_time = time.time()
self.generate_base()
heapify(self.base_list)
print("Base created in {:.2f} seconds.".format(time.time()-start_time))
# should now only switch around who is already in there
self.shuffle_base()
# just to glance at the results
print(len(self.base_list))
for i in range(10):
to_optimize = []
to_optimize.append(heappop(self.base_list))
print(to_optimize[len(to_optimize)-1])
for sched in to_optimize:
sched.eliminate_repeat_lectures()
sched.score_schedule()
for sched in to_optimize:
print(sched)
time_taken = time.time() - start_time
print("Complete.\nRun time: {:.2f} seconds".format(time_taken))
def shuffle_base(self):
for unused_var in range(self.iterations):
if unused_var % 100 == 0:
print( "Iteration: {:d}".format(unused_var) )
print( "Size of base: {:d}".format(len(self.base_list)) )
if len(self.base_list) > self.max_base_list_size:
print("Shrinking base list...")
temp = []
for i in range(self.max_base_list_size//2):
temp.append( heappop(self.base_list) )
self.base_list = []
while len(temp) > 0:
heappush( self.base_list, temp.pop() )
print("Done.")
batch = []
to_put_back = []
for i in range(self.batch_size):
batch.append( heappop(self.base_list) )
for sched in batch:
to_put_back.append(sched)
for day_num in range(self.num_days):
alternate = sched.copy()
alternate.randomize_groups_day( day_num )
alternate.score_schedule()
if alternate < sched:
to_put_back.append( alternate )
# this will be more useful when schedule conflicts are added
alternate = sched.copy()
alternate.scramble_days()
alternate.score_schedule()
if alternate < sched:
to_put_back.append( alternate )
for sched in to_put_back:
heappush(self.base_list, sched)
def generate_base(self):
sched = Schedule( self.profs, self.groups, self.num_days, self.num_rooms, self.max_students_per_room )
i = 0
for unused_var in range( self.base_num_to_generate//self.num_threads ):
sched = sched.copy()
sched.generate_random_schedule()
while sched.score_num_of_prof_lectures() > self.base_num_lecture_score_limit:
sched.randomize_all_professors()
while sched.score_repeat_lectures() > self.base_repeat_lecture_limit:
sched.randomize_all_groups()
sched.score_schedule()
self.base_list.append(sched)
if len(self.base_list) >= 100 * i:
i += 1
print(len(self.base_list))
if __name__ == "__main__":
alg = Algorithm()
alg.run()
|
|
# -*- encoding: utf-8 -*-
"""Prints the location of python object definition in your file-system.
"""
from __future__ import print_function
import sys
import argparse
import importlib
import inspect
import re
import os
from textwrap import dedent
import ast
from collections import namedtuple
# =============== #
# Compute version #
# =============== #
__version__ = 'dev'
__revision__ = 'git'
_VERSION_SCRIPT = os.path.join(os.path.dirname(__file__),
"script", "version")
def get_version():
if __version__ != 'dev':
return __version__
import subprocess as sp
cmd = [_VERSION_SCRIPT, "get"]
return sp.check_output(cmd).decode().strip()
def get_revision():
if __revision__ != 'git':
return __revision__
import subprocess as sp
cmd = [_VERSION_SCRIPT, "revision"]
return sp.check_output(cmd).decode().strip()
# ===== #
# PyLoc #
# ===== #
class PylocError(Exception):
"""Base class of all exception raised by this module."""
class ModuleNameError(PylocError):
def __init__(self, name, error):
self.name = name
self.error = error
def __str__(self):
return "failed to import '{}' ({}: {})"\
.format(self.name,
type(self.error).__name__,
self.error)
class AttributeNameError(PylocError):
def __init__(self, prefix, name):
self.prefix = prefix
self.name = name
def __str__(self):
return "cannot get attribute '%s' from '%s'" %(self.name, self.prefix)
Location = namedtuple('Location', 'filename line column')
class _ClassDefVisitor(ast.NodeVisitor):
def __init__(self, qualname):
self.qualname = qualname
self.candidates = []
self.path = []
def visit_ClassDef(self, node):
self.path.append(node)
qualname = ".".join(n.name for n in self.path)
if qualname == self.qualname:
self.candidates.append(node)
retval = self.generic_visit(node)
self.path.pop()
return retval
def visit_FunctionDef(self, node):
# Do not descend into FunctionDef
pass
def _get_file_content(filename):
with open(filename) as f:
return f.read()
def _search_classdef(filename, qualname):
source = _get_file_content(filename)
root_node = ast.parse(source, filename)
visitor = _ClassDefVisitor(qualname)
visitor.visit(root_node)
return visitor.candidates
def _iter_class_methods(obj):
for attr in dir(obj):
val = getattr(obj, attr)
if inspect.isfunction(val) or inspect.ismethod(val):
yield val
def _get_line(obj):
if inspect.ismethod(obj):
obj = obj.__func__
if inspect.isfunction(obj):
obj = obj.__code__
if inspect.istraceback(obj):
obj = obj.tb_frame
if inspect.isframe(obj):
obj = obj.f_code
if inspect.iscode(obj) and hasattr(obj, 'co_firstlineno'):
return obj.co_firstlineno
return None
def _disamb_class_loc(candidates, obj):
methods = list(_iter_class_methods(obj))
if not methods:
return
meth_line = min(_get_line(m) for m in methods)
best_candidate = None
best_dist = None
# Select the closest candidates coming before the first method definition
for c in candidates:
if c.lineno < meth_line: # Must come before
dist = meth_line - c.lineno
if best_dist is None or best_dist > dist:
best_dist = dist
best_candidate = c
return best_candidate
def _candidate_nodes_to_locations(filename, candidates):
return sorted([Location(filename, c.lineno, c.col_offset)
for c in candidates])
def _get_node_name(node):
if hasattr(node, "name"):
if hasattr(node, "asname") and node.asname:
return node.asname
return node.name
elif hasattr(node, "id"):
return node.id
else:
raise ValueError("do not know how to get name of node: {!r}"
.format(node))
def _iter_assigned_names(node):
assert isinstance(node, ast.Assign)
for target in node.targets:
for n in ast.walk(target):
if isinstance(n, ast.Name):
yield n
class _AssignVisitor(ast.NodeVisitor):
def __init__(self, qualname):
self.qualname = qualname
self.candidates = []
self.path = []
def visit_ClassDef(self, node):
self.path.append(node)
retval = self.generic_visit(node)
self.path.pop()
return retval
def visit_Assign(self, node):
for name_node in _iter_assigned_names(node):
qualname = ".".join(_get_node_name(n)
for n in self.path+[name_node])
if qualname == self.qualname:
self.candidates.append(node)
def visit_ImportFrom(self, node):
for name_node in node.names:
qualname = ".".join(_get_node_name(n)
for n in self.path+[name_node])
if qualname == self.qualname:
self.candidates.append(node)
def visit_FunctionDef(self, node):
# Do not descend into FunctionDef
pass
def _search_assign(filename, qualname):
source = _get_file_content(filename)
root_node = ast.parse(source, filename)
visitor = _AssignVisitor(qualname)
visitor.visit(root_node)
return visitor.candidates
def _is_inspectable(obj):
return inspect.isclass(obj) \
or inspect.ismethod(obj) \
or inspect.isfunction(obj) \
or inspect.ismodule(obj)
def _find_frozen_file(obj, qualname, filename):
mo = re.match(r"^<frozen (.*)>$", filename)
if mo:
try:
mod = importlib.import_module(mo.group(1))
except ImportError:
pass
else:
filename = mod.__file__
return filename
def _find_file_harder(obj, qualname, filename):
strategies = (_find_frozen_file,)
i = 0
while True:
if os.path.exists(filename):
return filename
if i >= len(strategies):
raise RuntimeError("failed to get an existing source file name")
strategy = strategies[i]
filename = strategy(obj, qualname, filename)
i += 1
def _get_locations(obj, qualname):
filename = inspect.getsourcefile(obj)
if not filename:
return [Location(inspect.getfile(obj), None, None)]
filename = _find_file_harder(obj, qualname, filename)
if inspect.ismodule(obj):
return [Location(filename, None, None)]
if inspect.isclass(obj):
### Search for ClassDef node in AST.
candidates = _search_classdef(filename, qualname)
if candidates:
if len(candidates) > 1:
# Try to disambiguite by locating the method defined in the
# class.
candidate = _disamb_class_loc(candidates, obj)
if candidate is not None:
return [Location(filename,
candidate.lineno,
candidate.col_offset)]
return _candidate_nodes_to_locations(filename, candidates)
### Search for Assign node in AST
candidates = _search_assign(filename, qualname)
if candidates:
return _candidate_nodes_to_locations(filename, candidates)
return [Location(filename, None, None)]
return [Location(filename, _get_line(obj), None)]
def _has_same_filename(locs):
filename = locs[0].filename
return all(map(lambda x: x.filename == filename, locs))
def _from_pydoc_format(target):
"""
The pydoc target format has no column to separate the package/module part
from the object part.
"""
parts = target.split(".")
nparts = len(parts)
if nparts <= 1:
return target
for i in range(nparts, 0, -1):
mod_part = ".".join(parts[:i])
try:
importlib.import_module(mod_part)
except ImportError:
pass
else:
qual_part = ".".join(parts[i:])
if qual_part:
return mod_part + ':' + qual_part
return mod_part
# We cannot import the target at all. Return it as is. The rest of the
# program will report the error.
return target
def pyloc(target):
"""Return possible location defining ``target`` object.
``target`` named "module[:qualname]".
Return a list of location namedtuple where the first value is
the filename, the second the line number and the third the column number.
The line and column number may be None if no applicable (i.e. for module
or package) or if they cannot be found.
Inspired by 'inspect._main()' and 'inspect.findsource()' by
Ka-Ping Yee <ping@lfw.org> and
Yury Selivanov <yselivanov@sprymix.com>
"""
if not target:
raise ValueError("target must be a non-empty string")
if ":" not in target:
target = _from_pydoc_format(target)
mod_name, has_qualname, qualname = target.partition(":")
### Try to import the module containing the given target.
try:
module = importlib.import_module(mod_name)
except ImportError as exc:
raise ModuleNameError(mod_name, exc)
### Get location of module
if not has_qualname:
return _get_locations(module, None)
### Get the object in module
attrs = qualname.split(".")
obj = module
last_inspectable_obj = obj
last_inspectable_idx = 0
for i in range(len(attrs)):
attr = attrs[i]
try:
obj = getattr(obj, attr)
except AttributeError:
raise AttributeNameError(".".join([module.__name__]+attrs[:i]),
attr)
else:
obj = getattr(obj, "__wrapped__", obj)
if _is_inspectable(obj):
last_inspectable_obj = obj
last_inspectable_idx = i
last_inspectable_obj_qualname = ".".join(attrs[:last_inspectable_idx+1])
### Get location
last_inspectable_locs = _get_locations(last_inspectable_obj,
last_inspectable_obj_qualname)
if last_inspectable_obj == obj:
return last_inspectable_locs
### Further investigate location of non-inspect-able object.
assert _has_same_filename(last_inspectable_locs)
filename = last_inspectable_locs[0].filename
candidates = _search_assign(filename, qualname)
if candidates:
return _candidate_nodes_to_locations(filename, candidates)
return [Location(filename, None, None)]
# =============================== #
# Command line interface function #
# =============================== #
DEFAULT_LOC_FORMAT = "emacs"
def format_loc(loc, format=DEFAULT_LOC_FORMAT):
if format == 'emacs' or format == 'vi':
s = ""
if loc.line:
s += "+%d" %(loc.line,)
if loc.column:
s += ":%d " %(loc.column,)
else:
s += " "
s += loc.filename
return s
elif format == 'human':
s = "Filename: %s" %(loc.filename,)
if loc.line:
s += "\nLine: %d" %(loc.line,)
if loc.column:
s += "\nColumn: %d" %(loc.column,)
return s
else:
raise ValueError("unsupported format: {}".format(format))
_EPILOGUE = """
environment variables:
PYLOC_DEFAULT_FORMAT - default output format (default: {default_format})
Copyright (c) 2015-2016, Nicolas Despres
All right reserved.
""".format(
default_format=DEFAULT_LOC_FORMAT,
)
def _build_cli():
class LazyVersionAction(argparse.Action):
"""Replacement for the default 'version' action.
This action is a lazily evaluate the 'version' keyword argument. The
default 'version' action does not provide this feature.
When called in develop mode some version string might be expensive
to compute since they require to probe the underlying code repository.
"""
def __init__(self, option_strings, dest, nargs=None, **kwargs):
if nargs is not None:
raise ValueError("'nargs' is not allowed.")
if "choices" in kwargs:
raise ValueError("'choices' is not allowed")
if "type" in kwargs:
raise ValueError("'type' is not allowed")
self.version = kwargs.pop('version')
super(LazyVersionAction, self).__init__(option_strings, dest,
nargs=0,
**kwargs)
def __call__(self, parser, namespace, values, option_string=None):
print(self.version())
sys.exit(0)
class RawDescriptionWithArgumentDefaultsHelpFormatter(
argparse.ArgumentDefaultsHelpFormatter,
argparse.RawDescriptionHelpFormatter,
):
"""Mix both formatter."""
def _version():
return \
"pyloc {v} "\
"on python {pyv.major}.{pyv.minor}.{pyv.micro} "\
"(rev: {rev})"\
.format(v=get_version(),
pyv=sys.version_info,
rev=get_revision())
parser = argparse.ArgumentParser(
description=__doc__,
epilog=dedent(_EPILOGUE),
formatter_class=RawDescriptionWithArgumentDefaultsHelpFormatter)
parser.add_argument(
"-f", "--format",
action="store",
choices=("emacs", "vi", "human"),
default=os.environ.get("PYLOC_DEFAULT_FORMAT", DEFAULT_LOC_FORMAT),
help="How to write object location")
parser.add_argument(
"-a", "--all",
action="store_true",
help="Print all possible location in case ambiguities")
parser.add_argument(
"--version",
action=LazyVersionAction,
version=_version)
parser.add_argument(
"object_name",
action="store",
help="A python object named: module[:qualname]")
return parser
def _error(msg):
sys.stderr.write("pyloc: ")
sys.stderr.write(msg)
sys.stderr.write("\n")
def _main():
cli = _build_cli()
options = cli.parse_args(sys.argv[1:])
try:
locs = pyloc(options.object_name)
except PylocError as e:
_error(str(e))
return 1
else:
if options.all:
locs_to_print = locs
else:
if len(locs) > 1:
assert _has_same_filename(locs)
locs_to_print = [locs[0]]
else:
locs_to_print = locs
for loc in locs_to_print:
sys.stdout.write(format_loc(loc, format=options.format))
sys.stdout.write("\n")
return 0
if __name__ == "__main__":
sys.exit(_main())
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class OrdersOperations:
"""OrdersOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.databoxedge.v2019_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_by_data_box_edge_device(
self,
device_name: str,
resource_group_name: str,
**kwargs
) -> AsyncIterable["_models.OrderList"]:
"""Lists all the orders related to a Data Box Edge/Data Box Gateway device.
Lists all the orders related to a Data Box Edge/Data Box Gateway device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either OrderList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databoxedge.v2019_07_01.models.OrderList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.OrderList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_data_box_edge_device.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('OrderList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_data_box_edge_device.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/orders'} # type: ignore
async def get(
self,
device_name: str,
resource_group_name: str,
**kwargs
) -> "_models.Order":
"""Gets a specific order by name.
Gets a specific order by name.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Order, or the result of cls(response)
:rtype: ~azure.mgmt.databoxedge.v2019_07_01.models.Order
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Order"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Order', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/orders/default'} # type: ignore
async def _create_or_update_initial(
self,
device_name: str,
resource_group_name: str,
order: "_models.Order",
**kwargs
) -> Optional["_models.Order"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Order"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(order, 'Order')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Order', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/orders/default'} # type: ignore
async def begin_create_or_update(
self,
device_name: str,
resource_group_name: str,
order: "_models.Order",
**kwargs
) -> AsyncLROPoller["_models.Order"]:
"""Creates or updates an order.
Creates or updates an order.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param order: The order to be created or updated.
:type order: ~azure.mgmt.databoxedge.v2019_07_01.models.Order
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Order or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databoxedge.v2019_07_01.models.Order]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Order"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
device_name=device_name,
resource_group_name=resource_group_name,
order=order,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('Order', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/orders/default'} # type: ignore
async def _delete_initial(
self,
device_name: str,
resource_group_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/orders/default'} # type: ignore
async def begin_delete(
self,
device_name: str,
resource_group_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the order related to the device.
Deletes the order related to the device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
device_name=device_name,
resource_group_name=resource_group_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/orders/default'} # type: ignore
|
|
#!/usr/bin/env python
"""Tests for grr.lib.aff4_objects.standard."""
import hashlib
import StringIO
import zlib
from grr.lib import aff4
from grr.lib import rdfvalue
from grr.lib import test_lib
class BlobImageTest(test_lib.GRRBaseTest):
"""Tests for cron functionality."""
def testAppendContentError(self):
src_content = "ABCD" * 10
src_fd = StringIO.StringIO(src_content)
dest_fd = aff4.FACTORY.Create(aff4.ROOT_URN.Add("temp"),
"BlobImage", token=self.token, mode="rw")
dest_fd.SetChunksize(7)
dest_fd.AppendContent(src_fd)
dest_fd.Seek(0)
self.assertEqual(dest_fd.Read(5000), src_content)
src_fd.seek(0)
self.assertRaises(IOError, dest_fd.AppendContent, src_fd)
def testAppendContent(self):
"""Test writing content where content length % chunksize == 0."""
src_content = "ABCDEFG" * 10 # 10 chunksize blobs
src_fd = StringIO.StringIO(src_content)
dest_fd = aff4.FACTORY.Create(aff4.ROOT_URN.Add("temp"),
"BlobImage", token=self.token, mode="rw")
self.assertEqual(dest_fd.Get(dest_fd.Schema.HASHES), None)
dest_fd.SetChunksize(7)
dest_fd.AppendContent(src_fd)
self.assertEqual(int(dest_fd.Get(dest_fd.Schema.SIZE)), len(src_content))
self.assertTrue(dest_fd.Get(dest_fd.Schema.HASHES))
dest_fd.Seek(0)
self.assertEqual(dest_fd.Read(5000), src_content)
src_fd.seek(0)
dest_fd.AppendContent(src_fd)
self.assertEqual(dest_fd.size, 2 * len(src_content))
self.assertEqual(int(dest_fd.Get(dest_fd.Schema.SIZE)),
2 * len(src_content))
dest_fd.Seek(0)
self.assertEqual(dest_fd.Read(5000), src_content+src_content)
class IndexTest(test_lib.AFF4ObjectTest):
def testIndexesCreation(self):
"""Check indexes can be created and queried."""
client1 = aff4.FACTORY.Create("C.0000000000000001", "VFSGRRClient",
mode="w", token=self.token)
client2 = aff4.FACTORY.Create("C.0000000000000002", "VFSGRRClient",
mode="w", token=self.token)
client_schema = client1.Schema
client1.Set(client_schema.HOSTNAME("client1"))
client1.Flush()
client2.Set(client_schema.HOSTNAME("client2"))
client2.Flush()
index = aff4.FACTORY.Create("aff4:/index/myfirstindex", "AFF4Index",
mode="w", token=self.token)
index.Add(client1.urn, client_schema.LABELS, "test1")
index.Add(client1.urn, client_schema.LABELS, "test2")
index.Add(client2.urn, client_schema.LABELS, "extra-test1-extra")
index.Add(client2.urn, client_schema.LABELS, "test2")
index.Flush(sync=True)
index.Close(sync=True)
# Reopen for querying.
index = aff4.FACTORY.Open("aff4:/index/myfirstindex", aff4_type="AFF4Index",
token=self.token)
results = list(index.Query([client_schema.LABELS], "test1"))
self.assertEqual(len(results), 1)
results = list(index.Query([client_schema.LABELS], ".*test.*"))
self.assertEqual(len(results), 2)
results = list(index.Query([client_schema.LABELS], "^test1.*"))
self.assertEqual(len(results), 1)
results = list(index.Query([client_schema.LABELS], ".*test1$"))
self.assertEqual(len(results), 1)
# Check limit works.
results = list(index.Query([client_schema.LABELS], ".*test.*", limit=1))
self.assertEqual(len(results), 1)
def testIndexesDeletion(self):
"""Check indexes can be created and queried."""
client1 = aff4.FACTORY.Create("C.0000000000000001", "VFSGRRClient",
mode="w", token=self.token)
client2 = aff4.FACTORY.Create("C.0000000000000002", "VFSGRRClient",
mode="w", token=self.token)
client_schema = client1.Schema
client1.Set(client_schema.HOSTNAME("client1"))
client1.Flush()
client2.Set(client_schema.HOSTNAME("client2"))
client2.Flush()
index = aff4.FACTORY.Create("aff4:/index/myfirstindex", "AFF4Index",
mode="w", token=self.token)
index.Add(client1.urn, client_schema.LABELS, "test1")
index.Add(client1.urn, client_schema.LABELS, "test2")
index.Add(client2.urn, client_schema.LABELS, "test2")
index.Add(client1.urn, client_schema.LABELS, "test2")
index.Add(client2.urn, client_schema.LABELS, "test3")
index.Flush(sync=True)
index.DeleteAttributeIndexesForURN(client_schema.LABELS, "test1",
client1.urn)
index.Flush(sync=True)
results = list(index.Query([client_schema.LABELS], "test1"))
self.assertEqual(len(results), 0)
index = aff4.FACTORY.Create("aff4:/index/myfirstindex", "AFF4Index",
mode="rw", token=self.token)
index.DeleteAttributeIndexesForURN(client_schema.LABELS, "test2",
client1.urn)
index.Flush(sync=True)
results = list(index.Query([client_schema.LABELS], "test2"))
self.assertEqual(len(results), 1)
class AFF4IndexSetTest(test_lib.GRRBaseTest):
def CreateIndex(self, token=None):
return aff4.FACTORY.Create("aff4:/index/foo", "AFF4IndexSet",
mode="w", token=token)
def ReadIndex(self, token=None):
return aff4.FACTORY.Open("aff4:/index/foo", aff4_type="AFF4IndexSet",
token=token)
def testValueAddedToTheIndexIsThenListed(self):
with self.CreateIndex(token=self.token) as index:
index.Add("wow")
index = self.ReadIndex(token=self.token)
self.assertListEqual(["wow"], list(index.ListValues()))
with self.CreateIndex(token=self.token) as index:
index.Add("wow2")
index = self.ReadIndex(token=self.token)
self.assertListEqual(["wow", "wow2"], sorted(index.ListValues()))
def testValuesAddedToTheIndexAreListedBeforeFlushing(self):
with self.CreateIndex(token=self.token) as index:
index.Add("wow")
index.Add("wow2")
self.assertListEqual(["wow", "wow2"], sorted(index.ListValues()))
def testValueRemovedFromTheIndexIsNotListed(self):
with self.CreateIndex(token=self.token) as index:
index.Add("wow")
index.Add("wow2")
index.Add("wow3")
index = self.ReadIndex(token=self.token)
self.assertListEqual(["wow", "wow2", "wow3"], sorted(index.ListValues()))
with self.CreateIndex(token=self.token) as index:
index.Remove("wow2")
index = self.ReadIndex(token=self.token)
self.assertListEqual(["wow", "wow3"], sorted(index.ListValues()))
def testValueRemovedFromTheIndexIsNotListedBeforeFlushing(self):
with self.CreateIndex(token=self.token) as index:
index.Add("wow")
index.Add("wow2")
index.Add("wow3")
index = self.ReadIndex(token=self.token)
index.Remove("wow2")
self.assertListEqual(["wow", "wow3"], sorted(index.ListValues()))
def testValuesAddedAndThenFremovedAreNotListedBeforeFlushing(self):
with self.CreateIndex(token=self.token) as index:
index.Add("wow")
index.Add("wow2")
index.Add("wow3")
index.Remove("wow2")
self.assertListEqual(["wow", "wow3"], sorted(index.ListValues()))
class AFF4LabelsIndexTest(test_lib.GRRBaseTest):
def CreateIndex(self, token=None):
return aff4.FACTORY.Create("aff4:/index/labels", "AFF4LabelsIndex",
mode="w", token=token)
def ReadIndex(self, token=None):
return aff4.FACTORY.Open("aff4:/index/labels", aff4_type="AFF4LabelsIndex",
token=token)
def testIndexSeparatorNotAllowedInLabelName(self):
self.assertRaises(ValueError, rdfvalue.AFF4ObjectLabel,
name=aff4.AFF4LabelsIndex.SEPARATOR)
def testAddedLabelIsCorrectlyListed(self):
urn = rdfvalue.RDFURN("aff4:/foo/bar")
with self.CreateIndex(token=self.token) as index:
index.AddLabel(urn, "foo", owner="testuser")
index = self.ReadIndex(token=self.token)
self.assertListEqual(index.ListUsedLabels(),
[rdfvalue.AFF4ObjectLabel(name="foo",
owner="testuser")])
def testMultipleLabelsWithDifferentOwnersAreCorrectlyListed(self):
urn = rdfvalue.RDFURN("aff4:/foo/bar")
with self.CreateIndex(token=self.token) as index:
index.AddLabel(urn, "foo", owner="testuser1")
index.AddLabel(urn, "foo", owner="testuser2")
index = self.ReadIndex(token=self.token)
self.assertListEqual(index.ListUsedLabels(),
[rdfvalue.AFF4ObjectLabel(name="foo",
owner="testuser1"),
rdfvalue.AFF4ObjectLabel(name="foo",
owner="testuser2")])
def testUrnWithAddedLabelCanBeFound(self):
urn = rdfvalue.RDFURN("aff4:/foo/bar")
with self.CreateIndex(token=self.token) as index:
index.AddLabel(urn, "foo", owner="testuser")
index = self.ReadIndex(token=self.token)
found_urns = index.FindUrnsByLabel("foo")
self.assertListEqual(found_urns, [urn])
def testUrnWithAddedLabelCanBeFoundWithOwnerSpecified(self):
urn = rdfvalue.RDFURN("aff4:/foo/bar")
with self.CreateIndex(token=self.token) as index:
index.AddLabel(urn, "foo", owner="testuser")
index = self.ReadIndex(token=self.token)
found_urns = index.FindUrnsByLabel("foo", owner="testuser")
self.assertListEqual(found_urns, [urn])
def testUrnsWithAddedLabelNotFoundWithAnotherOwner(self):
urn = rdfvalue.RDFURN("aff4:/foo/bar")
with self.CreateIndex(token=self.token) as index:
index.AddLabel(urn, "foo", owner="testuser")
index = self.ReadIndex(token=self.token)
found_urns = index.FindUrnsByLabel("foo", owner="another")
self.assertFalse(found_urns)
def testUrnWithAddedLabelCanBeFoundViaLabelRegex(self):
with self.CreateIndex(token=self.token) as index:
index.AddLabel(rdfvalue.RDFURN("aff4:/foo/bar1"),
"foo", owner="testuser1")
index.AddLabel(rdfvalue.RDFURN("aff4:/foo/bar2"),
"bar", owner="testuser2")
index.AddLabel(rdfvalue.RDFURN("aff4:/foo/bar3"),
"foo", owner="testuser3")
index = self.ReadIndex(token=self.token)
found_urns = index.FindUrnsByLabelNameRegex("f.*o")
self.assertEqual(len(found_urns), 2)
self.assertListEqual(
found_urns[rdfvalue.AFF4ObjectLabel(name="foo", owner="testuser1")],
[rdfvalue.RDFURN("aff4:/foo/bar1")])
self.assertListEqual(
found_urns[rdfvalue.AFF4ObjectLabel(name="foo", owner="testuser3")],
[rdfvalue.RDFURN("aff4:/foo/bar3")])
def testUrnWithAddedLabelCanBeFoundViaLabelRegexAndOwner(self):
with self.CreateIndex(token=self.token) as index:
index.AddLabel(rdfvalue.RDFURN("aff4:/foo/bar1"),
"foo", owner="testuser1")
index.AddLabel(rdfvalue.RDFURN("aff4:/foo/bar2"),
"bar", owner="testuser2")
index.AddLabel(rdfvalue.RDFURN("aff4:/foo/bar3"),
"foo", owner="testuser3")
index = self.ReadIndex(token=self.token)
found_urns = index.FindUrnsByLabelNameRegex("f.*o", owner="testuser3")
self.assertEqual(len(found_urns), 1)
self.assertListEqual(
found_urns[rdfvalue.AFF4ObjectLabel(name="foo", owner="testuser3")],
[rdfvalue.RDFURN("aff4:/foo/bar3")])
def testUrnWithAddedLabelNotFoundWithWrongOwner(self):
with self.CreateIndex(token=self.token) as index:
index.AddLabel(rdfvalue.RDFURN("aff4:/foo/bar1"),
"foo", owner="testuser1")
index.AddLabel(rdfvalue.RDFURN("aff4:/foo/bar2"),
"bar", owner="testuser2")
index.AddLabel(rdfvalue.RDFURN("aff4:/foo/bar3"),
"foo", owner="testuser3")
index = self.ReadIndex(token=self.token)
found_urns = index.FindUrnsByLabelNameRegex("f.*o", owner="another")
self.assertEqual(len(found_urns), 0)
def testTimestampInformationIsNotStoredInIndex(self):
urn = rdfvalue.RDFURN("aff4:/foo/bar")
with self.CreateIndex(token=self.token) as index:
index.AddLabel(urn, "foo", owner="user")
index = self.ReadIndex(token=self.token)
used_labels = index.ListUsedLabels()
self.assertEqual(len(used_labels), 1)
self.assertFalse(used_labels[0].HasField("timestamp"))
def testOwnerInformationIsStoredInIndex(self):
urn = rdfvalue.RDFURN("aff4:/foo/bar")
with self.CreateIndex(token=self.token) as index:
index.AddLabel(urn, "foo", owner="testuser")
index = self.ReadIndex(token=self.token)
used_labels = index.ListUsedLabels()
self.assertEqual(len(used_labels), 1)
self.assertEqual("testuser", used_labels[0].owner)
def testDeletedLabelIsRemovedFromUrnsAndLabelsMapping(self):
urn = rdfvalue.RDFURN("aff4:/foo/bar")
with self.CreateIndex(token=self.token) as index:
index.AddLabel(urn, "foo", owner="testuser")
index = self.ReadIndex(token=self.token)
found_urns = index.FindUrnsByLabel("foo")
self.assertListEqual(found_urns, [urn])
with self.CreateIndex(token=self.token) as index:
index.RemoveLabel(urn, "foo", owner="testuser")
index = self.ReadIndex(token=self.token)
found_urns = index.FindUrnsByLabel("foo")
self.assertFalse(found_urns)
def testDeletedLabelIsNotRemovedFromUsedLabelsList(self):
label = rdfvalue.AFF4ObjectLabel(name="foo", owner="testuser")
urn = rdfvalue.RDFURN("aff4:/foo/bar")
with self.CreateIndex(token=self.token) as index:
index.AddLabel(urn, "foo", owner="testuser")
index = self.ReadIndex(token=self.token)
self.assertListEqual(index.ListUsedLabels(), [label])
with self.CreateIndex(token=self.token) as index:
index.RemoveLabel(urn, "foo", owner="testuser")
index = self.ReadIndex(token=self.token)
self.assertListEqual(index.ListUsedLabels(), [label])
def testLabelsWhoseNamesAreSubstringsAreDistinguished1(self):
with self.CreateIndex(token=self.token) as index:
index.AddLabel(rdfvalue.RDFURN("aff4:/foo/bar1"),
"foo", owner="testuser1")
index.AddLabel(rdfvalue.RDFURN("aff4:/foo/bar2"),
"foobar", owner="testuser2")
index = self.ReadIndex(token=self.token)
found_urns = index.FindUrnsByLabel("foo")
self.assertEqual(len(found_urns), 1)
self.assertListEqual(
found_urns,
[rdfvalue.RDFURN("aff4:/foo/bar1")])
def testLabelsWhoseNamesAreSubstringsAreDistinguished2(self):
with self.CreateIndex(token=self.token) as index:
index.AddLabel(rdfvalue.RDFURN("aff4:/foo/bar1"),
"foo", owner="testuser1")
index.AddLabel(rdfvalue.RDFURN("aff4:/foo/bar2"),
"barfoobar", owner="testuser2")
index = self.ReadIndex(token=self.token)
found_urns = index.FindUrnsByLabel("foo")
self.assertEqual(len(found_urns), 1)
self.assertListEqual(
found_urns,
[rdfvalue.RDFURN("aff4:/foo/bar1")])
def testLabelsWhoseNamesAreSubstringsAreDistinguished3(self):
with self.CreateIndex(token=self.token) as index:
index.AddLabel(rdfvalue.RDFURN("aff4:/foo/bar1"),
"foo", owner="testuser1")
index.AddLabel(rdfvalue.RDFURN("aff4:/foo/bar2"),
"barfoo", owner="testuser2")
index = self.ReadIndex(token=self.token)
found_urns = index.FindUrnsByLabel("foo")
self.assertEqual(len(found_urns), 1)
self.assertListEqual(
found_urns,
[rdfvalue.RDFURN("aff4:/foo/bar1")])
class AFF4SparseImageTest(test_lib.GRRBaseTest):
def AddBlobToBlobStore(self, blob_contents):
blob_hash = hashlib.sha256(blob_contents).digest()
# The compressed blob data.
cdata = zlib.compress(blob_contents)
urn = rdfvalue.RDFURN("aff4:/blobs").Add(blob_hash.encode("hex"))
# Write the blob to the data store. We cheat here and just store the
# compressed data to avoid recompressing it.
blob_fd = aff4.FACTORY.Create(urn, "AFF4MemoryStream", mode="w",
token=self.token)
blob_fd.Set(blob_fd.Schema.CONTENT(cdata))
blob_fd.Set(blob_fd.Schema.SIZE(len(blob_contents)))
super(aff4.AFF4MemoryStream, blob_fd).Close(sync=True)
return blob_hash
def assertChunkEqual(self, fd, chunk, contents):
fd.Seek(chunk * fd.chunksize)
self.assertEqual(fd.Read(len(contents)), contents)
def testAddChunk(self):
"""Makes sure we can add a chunk and modify it."""
urn = aff4.ROOT_URN.Add("temp_sparse_image.dd")
fd = aff4.FACTORY.Create(urn, aff4_type="AFF4SparseImage",
token=self.token, mode="rw")
chunk_number = 0
# 64*1024 characters.
blob_contents = "test" * 1024 * 16
blob_hash = self.AddBlobToBlobStore(blob_contents)
fd.AddBlob(blob_hash=blob_hash, length=len(blob_contents),
chunk_number=chunk_number)
fd.index.seek(0)
fd.index.read(32)
fd.Flush()
# Make sure us and our index have been increased in size properly.
self.assertEqual(fd.size, len(blob_contents))
self.assertEqual(fd.index.size, len(blob_hash))
self.assertChunkEqual(fd, chunk_number, blob_contents)
# Change the contents of the blob.
blob_contents = blob_contents.replace("test", "estt")
blob_hash = self.AddBlobToBlobStore(blob_contents)
# This time we're updating the blob.
fd.AddBlob(blob_hash, len(blob_contents), chunk_number=chunk_number)
# The size shouldn't get any bigger, since we got rid of the old blob.
self.assertEqual(fd.size, len(blob_contents))
# Similarly for the index.
self.assertEqual(fd.index.size, len(blob_hash))
self.assertChunkEqual(fd, chunk_number, blob_contents)
def testReadAhead(self):
"""Read a chunk, and test that the next few are in cache."""
urn = aff4.ROOT_URN.Add("temp_sparse_image.dd")
fd = aff4.FACTORY.Create(urn, aff4_type="AFF4SparseImage",
token=self.token, mode="rw")
start_chunk = 1000
blob_hashes = []
blobs = []
num_chunks = 5
for chunk in xrange(start_chunk, start_chunk + num_chunks):
# Make sure the blobs have unique content.
blob_contents = str(chunk % 10)*64*1024
blobs.append(blob_contents)
blob_hash = self.AddBlobToBlobStore(blob_contents)
fd.AddBlob(blob_hash=blob_hash, length=len(blob_contents),
chunk_number=chunk)
blob_hashes.append(blob_hash)
self.assertEqual(fd.size, fd.chunksize * num_chunks)
self.assertEqual(fd.index.size, fd.index.chunksize * num_chunks)
# Read the first chunk.
fd.Seek(start_chunk * fd.chunksize)
fd.Read(fd.chunksize)
# The cache will have the chunks, but maybe in a different order, so we use
# assertItemsEqual here, not assertSequenceEqual.
self.assertItemsEqual(blob_hashes, fd.chunk_cache._hash.keys())
fd.Flush()
# They shouldn't be in cache anymore, so the chunk_cache should be empty.
self.assertFalse(fd.chunk_cache._hash.keys())
# Make sure the contents of the file are what we put into it.
fd.Seek(start_chunk * fd.chunksize)
self.assertEqual(fd.Read(fd.chunksize * num_chunks),
"".join(blobs))
def testReadingAfterLastChunk(self):
urn = aff4.ROOT_URN.Add("temp_sparse_image.dd")
fd = aff4.FACTORY.Create(urn, aff4_type="AFF4SparseImage",
token=self.token, mode="rw")
# We shouldn't be able to get any chunks yet.
self.assertFalse(fd.Read(10000))
start_chunk = 1000
num_chunks = 5
for chunk in xrange(start_chunk, start_chunk + num_chunks):
# Make sure the blobs have unique content.
blob_contents = str(chunk % 10)*64*1024
blob_hash = self.AddBlobToBlobStore(blob_contents)
fd.AddBlob(blob_hash=blob_hash, length=len(blob_contents),
chunk_number=chunk)
# Make sure we can read the chunks we just wrote without error.
fd.Seek(start_chunk * fd.chunksize)
fd.Read(num_chunks * fd.chunksize)
# Seek past the end of our chunks.
fd.Seek((start_chunk + num_chunks) * fd.chunksize)
# We should get the empty string back.
self.assertEqual(fd.Read(10000), "")
# Seek to before our chunks start.
fd.Seek((start_chunk - 1) * fd.chunksize)
# There should be no chunk there and we should raise.
with self.assertRaises(aff4.ChunkNotFoundError):
fd.Read(fd.chunksize)
class VFSDirectoryTest(test_lib.GRRBaseTest):
def testRealPathspec(self):
client_id = rdfvalue.ClientURN("C.%016X" % 1234)
for path in ["a/b", "a/b/c/d"]:
d = aff4.FACTORY.Create(client_id.Add("fs/os").Add(path),
aff4_type="VFSDirectory",
token=self.token)
pathspec = rdfvalue.PathSpec(path=path,
pathtype=rdfvalue.PathSpec.PathType.OS)
d.Set(d.Schema.PATHSPEC, pathspec)
d.Close()
d = aff4.FACTORY.Create(client_id.Add("fs/os").Add("a/b/c"),
aff4_type="VFSDirectory", mode="rw",
token=self.token)
self.assertEqual(d.real_pathspec.CollapsePath(), "a/b/c")
|
|
"""The tests for the Tasmota binary sensor platform."""
import copy
from datetime import timedelta
import json
from hatasmota.utils import (
get_topic_stat_status,
get_topic_stat_switch,
get_topic_tele_sensor,
get_topic_tele_will,
)
from homeassistant.components import binary_sensor
from homeassistant.components.tasmota.const import DEFAULT_PREFIX
from homeassistant.const import (
ATTR_ASSUMED_STATE,
EVENT_STATE_CHANGED,
STATE_OFF,
STATE_ON,
)
import homeassistant.core as ha
import homeassistant.util.dt as dt_util
from .test_common import (
DEFAULT_CONFIG,
help_test_availability,
help_test_availability_discovery_update,
help_test_availability_poll_state,
help_test_availability_when_connection_lost,
help_test_discovery_device_remove,
help_test_discovery_removal,
help_test_discovery_update_unchanged,
help_test_entity_id_update_discovery_update,
help_test_entity_id_update_subscriptions,
)
from tests.async_mock import patch
from tests.common import async_fire_mqtt_message, async_fire_time_changed
async def test_controlling_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test normal state update
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/SWITCH1", '{"STATE":"ON"}')
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/SWITCH1", '{"STATE":"OFF"}')
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
# Test periodic state update
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/SENSOR", '{"Switch1":"ON"}')
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/SENSOR", '{"Switch1":"OFF"}')
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
# Test polled state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/stat/STATUS8", '{"StatusSNS":{"Switch1":"ON"}}'
)
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
async_fire_mqtt_message(
hass, "tasmota_49A3BC/stat/STATUS8", '{"StatusSNS":{"Switch1":"OFF"}}'
)
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
async def test_off_delay(hass, mqtt_mock, setup_tasmota):
"""Test off_delay option."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 13 # PUSHON: 1s off_delay
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
events = []
@ha.callback
def callback(event):
"""Verify event got called."""
events.append(event.data["new_state"].state)
hass.bus.async_listen(EVENT_STATE_CHANGED, callback)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
await hass.async_block_till_done()
assert events == ["off"]
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/SWITCH1", '{"STATE":"ON"}')
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
assert events == ["off", "on"]
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/SWITCH1", '{"STATE":"ON"}')
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
assert events == ["off", "on", "on"]
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=1))
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
assert events == ["off", "on", "on", "off"]
async def test_availability_when_connection_lost(
hass, mqtt_client_mock, mqtt_mock, setup_tasmota
):
"""Test availability after MQTT disconnection."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
await help_test_availability_when_connection_lost(
hass, mqtt_client_mock, mqtt_mock, binary_sensor.DOMAIN, config
)
async def test_availability(hass, mqtt_mock, setup_tasmota):
"""Test availability."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
await help_test_availability(hass, mqtt_mock, binary_sensor.DOMAIN, config)
async def test_availability_discovery_update(hass, mqtt_mock, setup_tasmota):
"""Test availability discovery update."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
await help_test_availability_discovery_update(
hass, mqtt_mock, binary_sensor.DOMAIN, config
)
async def test_availability_poll_state(
hass, mqtt_client_mock, mqtt_mock, setup_tasmota
):
"""Test polling after MQTT connection (re)established."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
poll_topic = "tasmota_49A3BC/cmnd/STATUS"
await help_test_availability_poll_state(
hass, mqtt_client_mock, mqtt_mock, binary_sensor.DOMAIN, config, poll_topic, "8"
)
async def test_discovery_removal_binary_sensor(hass, mqtt_mock, caplog, setup_tasmota):
"""Test removal of discovered binary_sensor."""
config1 = copy.deepcopy(DEFAULT_CONFIG)
config2 = copy.deepcopy(DEFAULT_CONFIG)
config1["swc"][0] = 1
config2["swc"][0] = 0
await help_test_discovery_removal(
hass, mqtt_mock, caplog, binary_sensor.DOMAIN, config1, config2
)
async def test_discovery_update_unchanged_binary_sensor(
hass, mqtt_mock, caplog, setup_tasmota
):
"""Test update of discovered binary_sensor."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
with patch(
"homeassistant.components.tasmota.binary_sensor.TasmotaBinarySensor.discovery_update"
) as discovery_update:
await help_test_discovery_update_unchanged(
hass, mqtt_mock, caplog, binary_sensor.DOMAIN, config, discovery_update
)
async def test_discovery_device_remove(hass, mqtt_mock, setup_tasmota):
"""Test device registry remove."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
unique_id = f"{DEFAULT_CONFIG['mac']}_binary_sensor_switch_0"
await help_test_discovery_device_remove(
hass, mqtt_mock, binary_sensor.DOMAIN, unique_id, config
)
async def test_entity_id_update_subscriptions(hass, mqtt_mock, setup_tasmota):
"""Test MQTT subscriptions are managed when entity_id is updated."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
topics = [
get_topic_stat_switch(config, 0),
get_topic_tele_sensor(config),
get_topic_stat_status(config, 8),
get_topic_tele_will(config),
]
await help_test_entity_id_update_subscriptions(
hass, mqtt_mock, binary_sensor.DOMAIN, config, topics
)
async def test_entity_id_update_discovery_update(hass, mqtt_mock, setup_tasmota):
"""Test MQTT discovery update when entity_id is updated."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
await help_test_entity_id_update_discovery_update(
hass, mqtt_mock, binary_sensor.DOMAIN, config
)
|
|
# Copyright (C) 2013-2015 MetaMorph Software, Inc
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this data, including any software or models in source or binary
# form, as well as any drawings, specifications, and documentation
# (collectively "the Data"), to deal in the Data without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Data, and to
# permit persons to whom the Data is furnished to do so, subject to the
# following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Data.
# THE DATA IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS, SPONSORS, DEVELOPERS, CONTRIBUTORS, OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE DATA OR THE USE OR OTHER DEALINGS IN THE DATA.
# =======================
# This version of the META tools is a fork of an original version produced
# by Vanderbilt University's Institute for Software Integrated Systems (ISIS).
# Their license statement:
# Copyright (C) 2011-2014 Vanderbilt University
# Developed with the sponsorship of the Defense Advanced Research Projects
# Agency (DARPA) and delivered to the U.S. Government with Unlimited Rights
# as defined in DFARS 252.227-7013.
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this data, including any software or models in source or binary
# form, as well as any drawings, specifications, and documentation
# (collectively "the Data"), to deal in the Data without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Data, and to
# permit persons to whom the Data is furnished to do so, subject to the
# following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Data.
# THE DATA IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS, SPONSORS, DEVELOPERS, CONTRIBUTORS, OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE DATA OR THE USE OR OTHER DEALINGS IN THE DATA.
import os
import time
import shutil
import datetime
import logging
import subprocess
from py_modelica.modelica_simulation_tools.tool_base import ToolBase
from py_modelica.modelica_simulation_tools.openmodelica_element_tree import OpenModelicaElementTree
from py_modelica.utility_functions import subprocess_call
from py_modelica.exception_classes import ModelicaInstantiationError, ModelicaCompilationError,\
ModelicaSimulationError
class OpenModelica(ToolBase):
tool_name = "OpenModelica"
model_statistics = {}
compile_errors = []
om_home = ""
om_etree = None # OpenModelicaElementTree for parsing .xml files
short_name = ""
def __init__(self, model_config, om_home=""):
"""
Constructor for OpenModelica class, called before ToolBase's
_initialize.
This method only makes sure that there is a environment
OPENMODELICAHOME defined.
"""
if not om_home:
self.om_home = os.getenv("OPENMODELICAHOME")
if self.om_home:
if os.path.exists(self.om_home):
self.tool_path = os.path.join(self.om_home, "bin")
else:
msg = "OpenModelica path not found at env: {0}".format(self.om_home)
raise ModelicaInstantiationError(msg)
else:
if os.name == "nt":
msg = "No environment variable OPENMODELICAHOME defined."
raise ModelicaInstantiationError(msg)
elif os.path.exists(om_home):
self.om_home = om_home
self.tool_path = os.path.join(self.om_home, "bin")
else:
msg = "Given OpenModelica home not found at: {0}".format(om_home)
raise ModelicaInstantiationError(msg)
self._initialize(model_config)
# end of _set_tool_home
def _write_mos_script(self, log):
"""
Writes out .mos-script for translation of the modelica model
"""
log.debug("Entered _write_mos_script")
with open(self.mos_file_name, 'wb') as file_out:
lines = ['// OpenModelica script file to run a model']
lines.append('loadModel(Modelica, {"' + self.msl_version + '"});')
for lib_name in self.lib_package_names:
lines.append('loadModel({0});'.format(lib_name))
if self.model_file_name != "":
lines.append('cd("{0}");'.format(self.mo_dir.replace("\\", "/")))
lines.append('loadFile("{0}");'.format(os.path.basename(self.model_file_name)))
translate = 'translateModel(' + self.model_name
#if self.variable_filter:
# translate += ', variable_filter=['
# for var in self.variable_filter:
# translate += var
# translate += ']'
translate += ', fileNamePrefix="{0}"'.format(self.short_name)
translate += ');'
lines.append(translate)
lines.append('getErrorString();')
file_out.write("\n".join(lines))
log.debug("Generated .mos-script at : {0}".format(self.mos_file_name))
# end of _write_mos_script
def _setup_libs_env_vars(self, log):
"""
Function adds given library paths to environment-variable OPENMODELICALIBRARY.
Which is updated in os.environ and returned.
If path does not exist on hard-drive it looks for it at; /working_dir/Modelica
(This is where the packages are put during remote execution.)
"""
log.debug('Entered _setup_libs_env_vars')
my_env = os.environ
lib_paths = ""
remote = False
# supports relative paths and adds the package paths if exist
for lib_path in self.lib_package_paths:
lib_full_path = os.path.abspath(lib_path)
if os.path.exists(lib_full_path):
lib_paths += lib_full_path
lib_paths += os.pathsep
else:
print "The library path {0} does not exist, this might lead to errors.".format(lib_full_path)
if 'OPENMODELICALIBRARY' in my_env:
my_env['OPENMODELICALIBRARY'] += os.pathsep + lib_paths
log.debug("Added paths to existing OPENMODELICALIBRARY environment variable; ")
else:
if os.name == 'nt':
om_std_lib = os.path.join(self.om_home, 'lib', 'omlibrary')
elif os.name == 'posix':
om_std_lib = os.sep + os.path.join('usr', 'lib', 'omlibrary')
else:
raise ModelicaInstantiationError('Only Windows and Linux are supported by py_modelica.')
om_lib = {'OPENMODELICALIBRARY': '{0}{1}{2}'.format(om_std_lib, os.pathsep, lib_paths)}
log.debug("No environment variable OPENMODELICALIBRARY found, created;")
my_env.update(om_lib)
log.debug('OPENMODELICALIBRARY : {0}'.format(my_env['OPENMODELICALIBRARY']))
return my_env
# end of _setup_libs_env_vars
def _print_revision_number(self, log):
"""
Gets the revision number of the omc-compiler
"""
log.debug("Entered _print_revision_number")
command = '"{0}" +version'.format(os.path.join(self.tool_path, "omc"))
try:
return_str = subprocess_call(command, log)
version = return_str.split('(')
self.tool_version = version[0].strip()
self.tool_version_number = version[1].strip().strip(')')
except subprocess.CalledProcessError as err:
raise ModelicaCompilationError("Could not call omc.", sp_msg=err.returncode)
# end of _print_revision_number
def _translate_modelica_model(self, log, my_env):
"""
Calls omc(.exe) to translate the modelica model into c-code.
"""
os.chdir(self.mo_dir)
command = '"{0}" +q +s "{1}"'.format(os.path.join(self.tool_path, 'omc'), self.mos_file_name)
t_stamp = time.time()
try:
return_string = subprocess_call(command, log, my_env)
except subprocess.CalledProcessError as err:
raise ModelicaCompilationError('OMC could not compile model.', sp_msg=err.returncode)
self.translation_time = time.time() - t_stamp
if os.path.exists(os.path.join(self.mo_dir, self.short_name) + '_init.xml'):
self.model_is_compiled = True
else:
msg = 'Subprocess call with command = "{0}" returned with 0, but _init.xml does not '\
'exist - something went wrong during translation of model.'.format(command)
raise ModelicaCompilationError(msg, return_string)
# N.B. this is not used and thus not maintained
if not self.working_dir == self.mo_dir:
files_to_move = [self.short_name + '.c',
self.short_name + '.makefile',
self.short_name + '.o',
self.short_name + '_functions.c',
self.short_name + '_functions.h',
self.short_name + '_init.xml',
self.short_name + '_records.c',
self.short_name + '_records.o',
'_' + self.short_name + '.h']
for this_file in files_to_move:
dst_file_name = os.path.join(self.working_dir, this_file)
if os.path.exists(dst_file_name):
# remove dst file if it already exists
os.remove(dst_file_name)
this_file = os.path.join(self.mo_dir, this_file)
if os.path.exists(this_file):
shutil.move(this_file, self.working_dir)
os.chdir(self.working_dir)
# end of _translate_modelica_model
def _make_model(self, log):
"""
Compiles the generated c-code into an executable
"""
if os.name == 'nt':
# Windows
# Make model
# Add %OPENMODELICAHOME%\MinGW\bin to environment variables
env_var_mingw = os.path.join(os.getenv('OPENMODELICAHOME'), 'mingw', 'bin')
my_env = os.environ
my_env["PATH"] += os.pathsep + env_var_mingw
command = 'mingw32-make.exe -f {0}.makefile'.format(self.short_name)
# compile the c-code
t_stamp = time.time()
try:
subprocess_call(command, log, my_env)
except subprocess.CalledProcessError as err:
raise ModelicaCompilationError("Generated C-code from omc could not be compiled",
sp_msg=err.returncode)
self.make_time = time.time() - t_stamp
elif os.name == 'posix':
# Unix
# make -f model_name.makefile
command = "make -f {0}.makefile".format(self.short_name)
t_stamp = time.time()
try:
subprocess_call(command, log)
except subprocess.CalledProcessError as err:
raise ModelicaCompilationError("Generated C-code from omc could not be compiled",
sp_msg=err.returncode)
self.make_time = time.time() - t_stamp
# end of _make_model
def compile_model(self):
"""
Compile the model
"""
log = logging.getLogger()
log.debug("Entered compile_model")
# create a directory for the compiled model
self.working_dir = os.path.normpath(os.path.join(os.getcwd(), self.output_dir))
if not os.path.exists(self.working_dir):
os.makedirs(self.working_dir)
log.debug("Working dir : {0}".format(self.working_dir))
# write .mos script (short_name is used for output files)
if not self.short_name:
self.short_name = self.model_name.split('.')[-1]
log.info('Result mat-file name not given using default : {0}_res.mat'.format(
self.short_name))
else:
log.info('Result mat-file name set to : {0}_res.mat'.format(self.short_name))
self.result_mat = '{0}_res.mat'.format(self.short_name)
self.mos_file_name = 'om_sim.mos'
self._write_mos_script(log)
# print and save revision number
self._print_revision_number(log)
# Add paths to additional modelica-libraries in OPENMODELICALIBRARY
my_env = os.environ
if self.lib_package_paths:
my_env = self._setup_libs_env_vars(log)
# translate the modelica code according to the .mos script
self._translate_modelica_model(log, my_env)
# compile the c-code into executable
self._make_model(log)
self.compilation_time = self.translation_time + self.make_time
# Load *_init.xml into data-tree-structure
xml_file = os.path.join(self.working_dir, '{0}_init.xml'.format(self.short_name))
self.om_etree = OpenModelicaElementTree(xml_file)
self.model_statistic = self.om_etree.get_statistics()
return self.model_is_compiled
# end of compile_model
def simulate_model(self):
"""
Simulate model using current settings
"""
log = logging.getLogger()
log.debug("Entered simulate_model")
if not self.model_is_compiled:
msg = 'The model was never compiled!'
log.error(msg)
raise ModelicaSimulationError('The model was never compiled!')
# change current directory to working directory
os.chdir(self.working_dir)
# short_name.exe
command = ''
if os.name == 'nt':
command = '{0}.exe'.format(self.short_name)
elif os.name == 'posix':
command = './{0}'.format(self.short_name)
# run the simulation
total_cnt = 0 # Counter to check to total elapsed simulation time
om_stdout = open('om_stdout.txt', 'w')
t_stamp = time.time()
sim_process = subprocess.Popen([command], stdout=om_stdout, stderr=om_stdout)
log.debug('OpenModelica simulation sub-process opened.')
while sim_process.poll() is None:
time.sleep(1)
total_cnt += 1
if total_cnt > self.max_simulation_time:
sim_process.kill()
raise ModelicaSimulationError("OpenModelica simulation took more than {1:.1f} hours. "
"Simulation killed.".format(float(self.max_simulation_time)/3600))
self.simulation_time = time.time() - t_stamp
self.total_time = self.compilation_time + self.simulation_time
om_stdout.close()
with open('om_stdout.txt', 'r') as f_in:
return_string = '\n'.join(f_in.readlines())
log.info("Simulation output : {0}".format(return_string))
if not os.path.exists(self.result_mat):
msg = 'Subprocess call with command = "{0}" returned with 0, but the result '\
'.mat-file does not exist'.format(command)
raise ModelicaSimulationError(msg, return_string)
else:
self.model_did_simulate = True
log.info('OpenModelica simulation was successful.')
return self.model_did_simulate
# end of simulate_model
def change_experiment(self,
start_time='0',
stop_time='1',
n_interval='500',
tolerance='1e-5',
solver='dassl',
increment='',
max_fixed_step='',
output_format='',
variable_filter=''):
"""
Change the default experiment values.
"""
log = logging.getLogger()
log.debug("Entered change_experiment")
if not self.model_is_compiled:
msg = "Model must be compiled before changing experiment."
log.error(msg)
return False
os.chdir(self.working_dir)
self.experiment.clear()
self.experiment['StartTime'] = start_time
self.experiment['StopTime'] = stop_time
self.experiment['NumberOfIntervals'] = n_interval
self.experiment['Tolerance'] = tolerance
self.experiment['Solver'] = solver
if increment:
self.experiment['Increment'] = increment
else:
step_size = (float(stop_time) - float(start_time))/float(n_interval)
self.experiment['Increment'] = str(step_size)
if output_format:
self.experiment['OutputFormat'] = output_format
changed = self.om_etree.change_experiment(self.experiment['StartTime'],
self.experiment['StopTime'],
self.experiment['Increment'],
self.experiment['Tolerance'],
self.experiment['Solver'],
output_format,
variable_filter)
# generate a new date_time for saving result
self.date_time = '{0}'.format(datetime.datetime.today())
if changed:
self.om_etree.write()
os.chdir(self.root_dir)
if changed:
log.info("Experiment has been change to: {0}".format(self.experiment))
else:
log.debug("No changes were made.")
return changed
# end of change_experiment
def change_parameter(self, change_dict):
"""
Change parameters values of those given in change_dict
"""
log = logging.getLogger()
log.debug("Entered change_parameter")
if not self.model_is_compiled:
log.error("Model not compiled before changing parameters.")
return False
os.chdir(self.working_dir)
changed = self.om_etree.change_parameter(change_dict)
if changed:
self.om_etree.write()
os.chdir(self.root_dir)
# generate a new date_time for saving result
self.date_time = '{0}'.format(datetime.datetime.today())
return changed
# end of change_parameter
|
|
import pandas as pd
import json, hashlib, os, codecs, base64
from igf_data.igfdb.baseadaptor import BaseAdaptor
from igf_data.igfdb.igfTables import User
class UserAdaptor(BaseAdaptor):
'''
An adaptor class for table User
'''
def _email_check(self, email):
'''
An internal function to check if email_id has '@' or not
:param email: a string containing the email id
'''
if '@' not in email:
raise ValueError('Email id {0} is not correctly formatted'.format(email))
def _encrypt_password(self, series, password_column='password',
salt_column='encryption_salt',
ht_pass_column='ht_password'):
'''
An internal function for encrypting password
:param series: A pandas data series
:param password_column: Name of the password column, default password
:param salt_column: Name of the salt column, default encryption_salt
:param ht_pass_column: Name of the ht_password column, default ht_password
:returns: A pandas series
'''
try:
if not isinstance(series, pd.Series):
series=pd.DataFrame(series)
if password_column in series.index and \
not pd.isnull(series[password_column]): # password is optional
salt=codecs.encode(os.urandom(32),"hex").decode("utf-8") # calculate salt value
password=series[password_column] # fetch password
if not isinstance(password, str):
password=str(series.password_column).encode('utf-8') # encode password if its not a string
if password: # always encrypt password
ht_pass=\
'{0}{1}'.format(\
'{SHA}',
base64.b64encode(\
hashlib.sha1(password.encode('utf-8')).\
digest()).decode()) # calculate sha1 for htaccess password
series[ht_pass_column]=ht_pass # set htaccess password
key=salt+password # construct key using salt and password
password=hashlib.sha512(str(key).encode('utf-8')).hexdigest() # create password hash
series[password_column]=password # set hash to data series
series[salt_column]=salt # set salt to data series
return series
except:
raise
def _map_missing_user_status(self,data_series,categoty_column,hpc_user_column,
hpc_user,non_hpc_user):
'''
An internal function for assigning user status
:param data_series: A pandas data series
:param categoty_column: Name of the category column ## FIX TYPO
:param hpc_user_column: Name of the hpc username column
:param hpc_user: HPC user tag
:param non_hpc_user: Non HPC user tag
:returns: A pandas data series
'''
try:
if not isinstance(data_series, pd.Series):
data_series=pd.DataFrame(data_series)
if categoty_column not in data_series or \
pd.isnull(data_series[categoty_column]):
if hpc_user_column in data_series and \
not pd.isnull(data_series[hpc_user_column]) and \
data_series[hpc_user_column]!='':
data_series[categoty_column]=hpc_user # assign hpc user
else:
data_series[categoty_column]=non_hpc_user # non hpc user
return data_series
except:
raise
def _preprocess_data(self,data, password_column='password', categoty_column='category',
email_column='email_id', hpc_user_column='hpc_username',
hpc_user='HPC_USER', non_hpc_user='NON_HPC_USER',
user_igf_id_column='user_igf_id', username_column='username',
salt_column='encryption_salt'):
'''
An internal function for preprocess data before loading
:param data: A pamdas dataframe or a list of dictionaries
:param password_column: Name of the password column, default password
:param categoty_column: Name of the user category column, default category
:param email_column: Name of the email id column, default email_id
:param hpc_user_column: Name of the hpc username column, default hpc_username
:param hpc_user: Tag name for HPC user, default HPC_USER
:param non_hpc_user: Tag name for non HPC user, default NON_HPC_USER
:param user_igf_id_column: Name of the user id column, default user_igf_id
:param username_column: Name of the igf username column, default username
:param salt_column: Name of the salt column, default encryption_salt
:returns: A pandas dataframe
'''
try:
if not isinstance(data, pd.DataFrame):
data=pd.DataFrame(data)
new_data=data.apply(lambda x: self._encrypt_password(series=x),1) # encrypt password
new_data[email_column].map(lambda x: self._email_check(email=x)) # check email id, it should contail '@'
new_data=new_data.fillna('')
if categoty_column not in new_data.columns:
new_data[categoty_column]=None # add category column if it doesn't exists
new_data.apply(\
lambda x: self._map_missing_user_status(\
data_series=x,
categoty_column=categoty_column,
hpc_user_column=hpc_user_column,
hpc_user=hpc_user,
non_hpc_user=non_hpc_user),
axis=1) # assign categoty, if user has hpc_username, then its 'HPC_USER'
return new_data
except:
raise
def store_user_data(self, data, autosave=True):
'''
Load data to user table
:param data: A pandas dataframe
:param autosave: A toggle for autocommit, default True
:returns: None
'''
try:
if not isinstance(data, pd.DataFrame):
data=pd.DataFrame(data)
data=self._preprocess_data(data=data)
self.store_records(table=User, data=data, mode='serial' )
if autosave:
self.commit_session()
except:
if autosave:
self.rollback_session()
raise
def fetch_user_records_igf_id(self, user_igf_id):
'''
A method for fetching data for User table
:param user_igf_id: an igf id
:returns: user object
'''
try:
user=\
self.fetch_records_by_column(\
table=User,
column_name=User.user_igf_id,
column_id=user_igf_id,
output_mode='one' )
return user
except:
raise
def fetch_user_records_email_id(self, user_email_id):
'''
A method for fetching data for User table
:param user_email_id: an email id
:returns: user object
'''
try:
user=\
self.fetch_records_by_column(\
table=User,
column_name=User.email_id,
column_id=user_email_id,
output_mode='one' )
return user
except:
raise
def check_user_records_email_id(self,email_id):
'''
A method for checking existing user data in db
:param email_id: An email id
:returns: True if the file is present in db or False if its not
'''
try:
user_check=False
user_obj=\
self.fetch_records_by_column(\
table=User,
column_name=User.email_id,
column_id=email_id,
output_mode='one_or_none' )
if user_obj is not None:
user_check=True
return user_check
except:
raise
|
|
from __future__ import division, absolute_import, print_function
import os
import re
import sys
import types
import shlex
import time
from copy import copy
from distutils import ccompiler
from distutils.ccompiler import *
from distutils.errors import DistutilsExecError, DistutilsModuleError, \
DistutilsPlatformError, CompileError
from distutils.sysconfig import customize_compiler
from distutils.version import LooseVersion
from numpy.distutils import log
from numpy.distutils.compat import get_exception
from numpy.distutils.exec_command import exec_command
from numpy.distutils.misc_util import cyg2win32, is_sequence, mingw32, \
quote_args, get_num_build_jobs, \
_commandline_dep_string
# globals for parallel build management
try:
import threading
except ImportError:
import dummy_threading as threading
_job_semaphore = None
_global_lock = threading.Lock()
_processing_files = set()
def _needs_build(obj, cc_args, extra_postargs, pp_opts):
"""
Check if an objects needs to be rebuild based on its dependencies
Parameters
----------
obj : str
object file
Returns
-------
bool
"""
# defined in unixcompiler.py
dep_file = obj + '.d'
if not os.path.exists(dep_file):
return True
# dep_file is a makefile containing 'object: dependencies'
# formatted like posix shell (spaces escaped, \ line continuations)
# the last line contains the compiler commandline arguments as some
# projects may compile an extension multiple times with different
# arguments
with open(dep_file, "r") as f:
lines = f.readlines()
cmdline =_commandline_dep_string(cc_args, extra_postargs, pp_opts)
last_cmdline = lines[-1]
if last_cmdline != cmdline:
return True
contents = ''.join(lines[:-1])
deps = [x for x in shlex.split(contents, posix=True)
if x != "\n" and not x.endswith(":")]
try:
t_obj = os.stat(obj).st_mtime
# check if any of the dependencies is newer than the object
# the dependencies includes the source used to create the object
for f in deps:
if os.stat(f).st_mtime > t_obj:
return True
except OSError:
# no object counts as newer (shouldn't happen if dep_file exists)
return True
return False
def replace_method(klass, method_name, func):
if sys.version_info[0] < 3:
m = types.MethodType(func, None, klass)
else:
# Py3k does not have unbound method anymore, MethodType does not work
m = lambda self, *args, **kw: func(self, *args, **kw)
setattr(klass, method_name, m)
######################################################################
## Method that subclasses may redefine. But don't call this method,
## it i private to CCompiler class and may return unexpected
## results if used elsewhere. So, you have been warned..
def CCompiler_find_executables(self):
"""
Does nothing here, but is called by the get_version method and can be
overridden by subclasses. In particular it is redefined in the `FCompiler`
class where more documentation can be found.
"""
pass
replace_method(CCompiler, 'find_executables', CCompiler_find_executables)
# Using customized CCompiler.spawn.
def CCompiler_spawn(self, cmd, display=None):
"""
Execute a command in a sub-process.
Parameters
----------
cmd : str
The command to execute.
display : str or sequence of str, optional
The text to add to the log file kept by `numpy.distutils`.
If not given, `display` is equal to `cmd`.
Returns
-------
None
Raises
------
DistutilsExecError
If the command failed, i.e. the exit status was not 0.
"""
if display is None:
display = cmd
if is_sequence(display):
display = ' '.join(list(display))
log.info(display)
s, o = exec_command(cmd)
if s:
if is_sequence(cmd):
cmd = ' '.join(list(cmd))
try:
print(o)
except UnicodeError:
# When installing through pip, `o` can contain non-ascii chars
pass
if re.search('Too many open files', o):
msg = '\nTry rerunning setup command until build succeeds.'
else:
msg = ''
raise DistutilsExecError('Command "%s" failed with exit status %d%s' % (cmd, s, msg))
replace_method(CCompiler, 'spawn', CCompiler_spawn)
def CCompiler_object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
"""
Return the name of the object files for the given source files.
Parameters
----------
source_filenames : list of str
The list of paths to source files. Paths can be either relative or
absolute, this is handled transparently.
strip_dir : bool, optional
Whether to strip the directory from the returned paths. If True,
the file name prepended by `output_dir` is returned. Default is False.
output_dir : str, optional
If given, this path is prepended to the returned paths to the
object files.
Returns
-------
obj_names : list of str
The list of paths to the object files corresponding to the source
files in `source_filenames`.
"""
if output_dir is None:
output_dir = ''
obj_names = []
for src_name in source_filenames:
base, ext = os.path.splitext(os.path.normpath(src_name))
base = os.path.splitdrive(base)[1] # Chop off the drive
base = base[os.path.isabs(base):] # If abs, chop off leading /
if base.startswith('..'):
# Resolve starting relative path components, middle ones
# (if any) have been handled by os.path.normpath above.
i = base.rfind('..')+2
d = base[:i]
d = os.path.basename(os.path.abspath(d))
base = d + base[i:]
if ext not in self.src_extensions:
raise UnknownFileError("unknown file type '%s' (from '%s')" % (ext, src_name))
if strip_dir:
base = os.path.basename(base)
obj_name = os.path.join(output_dir, base + self.obj_extension)
obj_names.append(obj_name)
return obj_names
replace_method(CCompiler, 'object_filenames', CCompiler_object_filenames)
def CCompiler_compile(self, sources, output_dir=None, macros=None,
include_dirs=None, debug=0, extra_preargs=None,
extra_postargs=None, depends=None):
"""
Compile one or more source files.
Please refer to the Python distutils API reference for more details.
Parameters
----------
sources : list of str
A list of filenames
output_dir : str, optional
Path to the output directory.
macros : list of tuples
A list of macro definitions.
include_dirs : list of str, optional
The directories to add to the default include file search path for
this compilation only.
debug : bool, optional
Whether or not to output debug symbols in or alongside the object
file(s).
extra_preargs, extra_postargs : ?
Extra pre- and post-arguments.
depends : list of str, optional
A list of file names that all targets depend on.
Returns
-------
objects : list of str
A list of object file names, one per source file `sources`.
Raises
------
CompileError
If compilation fails.
"""
# This method is effective only with Python >=2.3 distutils.
# Any changes here should be applied also to fcompiler.compile
# method to support pre Python 2.3 distutils.
global _job_semaphore
jobs = get_num_build_jobs()
# setup semaphore to not exceed number of compile jobs when parallelized at
# extension level (python >= 3.5)
with _global_lock:
if _job_semaphore is None:
_job_semaphore = threading.Semaphore(jobs)
if not sources:
return []
# FIXME:RELATIVE_IMPORT
if sys.version_info[0] < 3:
from .fcompiler import FCompiler, is_f_file, has_f90_header
else:
from numpy.distutils.fcompiler import (FCompiler, is_f_file,
has_f90_header)
if isinstance(self, FCompiler):
display = []
for fc in ['f77', 'f90', 'fix']:
fcomp = getattr(self, 'compiler_'+fc)
if fcomp is None:
continue
display.append("Fortran %s compiler: %s" % (fc, ' '.join(fcomp)))
display = '\n'.join(display)
else:
ccomp = self.compiler_so
display = "C compiler: %s\n" % (' '.join(ccomp),)
log.info(display)
macros, objects, extra_postargs, pp_opts, build = \
self._setup_compile(output_dir, macros, include_dirs, sources,
depends, extra_postargs)
cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
display = "compile options: '%s'" % (' '.join(cc_args))
if extra_postargs:
display += "\nextra options: '%s'" % (' '.join(extra_postargs))
log.info(display)
def single_compile(args):
obj, (src, ext) = args
if not _needs_build(obj, cc_args, extra_postargs, pp_opts):
return
# check if we are currently already processing the same object
# happens when using the same source in multiple extensions
while True:
# need explicit lock as there is no atomic check and add with GIL
with _global_lock:
# file not being worked on, start working
if obj not in _processing_files:
_processing_files.add(obj)
break
# wait for the processing to end
time.sleep(0.1)
try:
# retrieve slot from our #job semaphore and build
with _job_semaphore:
self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
finally:
# register being done processing
with _global_lock:
_processing_files.remove(obj)
if isinstance(self, FCompiler):
objects_to_build = list(build.keys())
f77_objects, other_objects = [], []
for obj in objects:
if obj in objects_to_build:
src, ext = build[obj]
if self.compiler_type=='absoft':
obj = cyg2win32(obj)
src = cyg2win32(src)
if is_f_file(src) and not has_f90_header(src):
f77_objects.append((obj, (src, ext)))
else:
other_objects.append((obj, (src, ext)))
# f77 objects can be built in parallel
build_items = f77_objects
# build f90 modules serial, module files are generated during
# compilation and may be used by files later in the list so the
# ordering is important
for o in other_objects:
single_compile(o)
else:
build_items = build.items()
if len(build) > 1 and jobs > 1:
# build parallel
import multiprocessing.pool
pool = multiprocessing.pool.ThreadPool(jobs)
pool.map(single_compile, build_items)
pool.close()
else:
# build serial
for o in build_items:
single_compile(o)
# Return *all* object filenames, not just the ones we just built.
return objects
replace_method(CCompiler, 'compile', CCompiler_compile)
def CCompiler_customize_cmd(self, cmd, ignore=()):
"""
Customize compiler using distutils command.
Parameters
----------
cmd : class instance
An instance inheriting from `distutils.cmd.Command`.
ignore : sequence of str, optional
List of `CCompiler` commands (without ``'set_'``) that should not be
altered. Strings that are checked for are:
``('include_dirs', 'define', 'undef', 'libraries', 'library_dirs',
'rpath', 'link_objects')``.
Returns
-------
None
"""
log.info('customize %s using %s' % (self.__class__.__name__,
cmd.__class__.__name__))
def allow(attr):
return getattr(cmd, attr, None) is not None and attr not in ignore
if allow('include_dirs'):
self.set_include_dirs(cmd.include_dirs)
if allow('define'):
for (name, value) in cmd.define:
self.define_macro(name, value)
if allow('undef'):
for macro in cmd.undef:
self.undefine_macro(macro)
if allow('libraries'):
self.set_libraries(self.libraries + cmd.libraries)
if allow('library_dirs'):
self.set_library_dirs(self.library_dirs + cmd.library_dirs)
if allow('rpath'):
self.set_runtime_library_dirs(cmd.rpath)
if allow('link_objects'):
self.set_link_objects(cmd.link_objects)
replace_method(CCompiler, 'customize_cmd', CCompiler_customize_cmd)
def _compiler_to_string(compiler):
props = []
mx = 0
keys = list(compiler.executables.keys())
for key in ['version', 'libraries', 'library_dirs',
'object_switch', 'compile_switch',
'include_dirs', 'define', 'undef', 'rpath', 'link_objects']:
if key not in keys:
keys.append(key)
for key in keys:
if hasattr(compiler, key):
v = getattr(compiler, key)
mx = max(mx, len(key))
props.append((key, repr(v)))
lines = []
format = '%-' + repr(mx+1) + 's = %s'
for prop in props:
lines.append(format % prop)
return '\n'.join(lines)
def CCompiler_show_customization(self):
"""
Print the compiler customizations to stdout.
Parameters
----------
None
Returns
-------
None
Notes
-----
Printing is only done if the distutils log threshold is < 2.
"""
if 0:
for attrname in ['include_dirs', 'define', 'undef',
'libraries', 'library_dirs',
'rpath', 'link_objects']:
attr = getattr(self, attrname, None)
if not attr:
continue
log.info("compiler '%s' is set to %s" % (attrname, attr))
try:
self.get_version()
except Exception:
pass
if log._global_log.threshold<2:
print('*'*80)
print(self.__class__)
print(_compiler_to_string(self))
print('*'*80)
replace_method(CCompiler, 'show_customization', CCompiler_show_customization)
def CCompiler_customize(self, dist, need_cxx=0):
"""
Do any platform-specific customization of a compiler instance.
This method calls `distutils.sysconfig.customize_compiler` for
platform-specific customization, as well as optionally remove a flag
to suppress spurious warnings in case C++ code is being compiled.
Parameters
----------
dist : object
This parameter is not used for anything.
need_cxx : bool, optional
Whether or not C++ has to be compiled. If so (True), the
``"-Wstrict-prototypes"`` option is removed to prevent spurious
warnings. Default is False.
Returns
-------
None
Notes
-----
All the default options used by distutils can be extracted with::
from distutils import sysconfig
sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'BASECFLAGS',
'CCSHARED', 'LDSHARED', 'SO')
"""
# See FCompiler.customize for suggested usage.
log.info('customize %s' % (self.__class__.__name__))
customize_compiler(self)
if need_cxx:
# In general, distutils uses -Wstrict-prototypes, but this option is
# not valid for C++ code, only for C. Remove it if it's there to
# avoid a spurious warning on every compilation.
try:
self.compiler_so.remove('-Wstrict-prototypes')
except (AttributeError, ValueError):
pass
if hasattr(self, 'compiler') and 'cc' in self.compiler[0]:
if not self.compiler_cxx:
if self.compiler[0].startswith('gcc'):
a, b = 'gcc', 'g++'
else:
a, b = 'cc', 'c++'
self.compiler_cxx = [self.compiler[0].replace(a, b)]\
+ self.compiler[1:]
else:
if hasattr(self, 'compiler'):
log.warn("#### %s #######" % (self.compiler,))
if not hasattr(self, 'compiler_cxx'):
log.warn('Missing compiler_cxx fix for ' + self.__class__.__name__)
# check if compiler supports gcc style automatic dependencies
# run on every extension so skip for known good compilers
if hasattr(self, 'compiler') and ('gcc' in self.compiler[0] or
'g++' in self.compiler[0] or
'clang' in self.compiler[0]):
self._auto_depends = True
elif os.name == 'posix':
import tempfile
import shutil
tmpdir = tempfile.mkdtemp()
try:
fn = os.path.join(tmpdir, "file.c")
with open(fn, "w") as f:
f.write("int a;\n")
self.compile([fn], output_dir=tmpdir,
extra_preargs=['-MMD', '-MF', fn + '.d'])
self._auto_depends = True
except CompileError:
self._auto_depends = False
finally:
shutil.rmtree(tmpdir)
return
replace_method(CCompiler, 'customize', CCompiler_customize)
def simple_version_match(pat=r'[-.\d]+', ignore='', start=''):
"""
Simple matching of version numbers, for use in CCompiler and FCompiler.
Parameters
----------
pat : str, optional
A regular expression matching version numbers.
Default is ``r'[-.\\d]+'``.
ignore : str, optional
A regular expression matching patterns to skip.
Default is ``''``, in which case nothing is skipped.
start : str, optional
A regular expression matching the start of where to start looking
for version numbers.
Default is ``''``, in which case searching is started at the
beginning of the version string given to `matcher`.
Returns
-------
matcher : callable
A function that is appropriate to use as the ``.version_match``
attribute of a `CCompiler` class. `matcher` takes a single parameter,
a version string.
"""
def matcher(self, version_string):
# version string may appear in the second line, so getting rid
# of new lines:
version_string = version_string.replace('\n', ' ')
pos = 0
if start:
m = re.match(start, version_string)
if not m:
return None
pos = m.end()
while True:
m = re.search(pat, version_string[pos:])
if not m:
return None
if ignore and re.match(ignore, m.group(0)):
pos = m.end()
continue
break
return m.group(0)
return matcher
def CCompiler_get_version(self, force=False, ok_status=[0]):
"""
Return compiler version, or None if compiler is not available.
Parameters
----------
force : bool, optional
If True, force a new determination of the version, even if the
compiler already has a version attribute. Default is False.
ok_status : list of int, optional
The list of status values returned by the version look-up process
for which a version string is returned. If the status value is not
in `ok_status`, None is returned. Default is ``[0]``.
Returns
-------
version : str or None
Version string, in the format of `distutils.version.LooseVersion`.
"""
if not force and hasattr(self, 'version'):
return self.version
self.find_executables()
try:
version_cmd = self.version_cmd
except AttributeError:
return None
if not version_cmd or not version_cmd[0]:
return None
try:
matcher = self.version_match
except AttributeError:
try:
pat = self.version_pattern
except AttributeError:
return None
def matcher(version_string):
m = re.match(pat, version_string)
if not m:
return None
version = m.group('version')
return version
status, output = exec_command(version_cmd, use_tee=0)
version = None
if status in ok_status:
version = matcher(output)
if version:
version = LooseVersion(version)
self.version = version
return version
replace_method(CCompiler, 'get_version', CCompiler_get_version)
def CCompiler_cxx_compiler(self):
"""
Return the C++ compiler.
Parameters
----------
None
Returns
-------
cxx : class instance
The C++ compiler, as a `CCompiler` instance.
"""
if self.compiler_type in ('msvc', 'intelw', 'intelemw'):
return self
cxx = copy(self)
cxx.compiler_so = [cxx.compiler_cxx[0]] + cxx.compiler_so[1:]
if sys.platform.startswith('aix') and 'ld_so_aix' in cxx.linker_so[0]:
# AIX needs the ld_so_aix script included with Python
cxx.linker_so = [cxx.linker_so[0], cxx.compiler_cxx[0]] \
+ cxx.linker_so[2:]
else:
cxx.linker_so = [cxx.compiler_cxx[0]] + cxx.linker_so[1:]
return cxx
replace_method(CCompiler, 'cxx_compiler', CCompiler_cxx_compiler)
compiler_class['intel'] = ('intelccompiler', 'IntelCCompiler',
"Intel C Compiler for 32-bit applications")
compiler_class['intele'] = ('intelccompiler', 'IntelItaniumCCompiler',
"Intel C Itanium Compiler for Itanium-based applications")
compiler_class['intelem'] = ('intelccompiler', 'IntelEM64TCCompiler',
"Intel C Compiler for 64-bit applications")
compiler_class['intelw'] = ('intelccompiler', 'IntelCCompilerW',
"Intel C Compiler for 32-bit applications on Windows")
compiler_class['intelemw'] = ('intelccompiler', 'IntelEM64TCCompilerW',
"Intel C Compiler for 64-bit applications on Windows")
compiler_class['pathcc'] = ('pathccompiler', 'PathScaleCCompiler',
"PathScale Compiler for SiCortex-based applications")
ccompiler._default_compilers += (('linux.*', 'intel'),
('linux.*', 'intele'),
('linux.*', 'intelem'),
('linux.*', 'pathcc'),
('nt', 'intelw'),
('nt', 'intelemw'))
if sys.platform == 'win32':
compiler_class['mingw32'] = ('mingw32ccompiler', 'Mingw32CCompiler',
"Mingw32 port of GNU C Compiler for Win32"\
"(for MSC built Python)")
if mingw32():
# On windows platforms, we want to default to mingw32 (gcc)
# because msvc can't build blitz stuff.
log.info('Setting mingw32 as default compiler for nt.')
ccompiler._default_compilers = (('nt', 'mingw32'),) \
+ ccompiler._default_compilers
_distutils_new_compiler = new_compiler
def new_compiler (plat=None,
compiler=None,
verbose=0,
dry_run=0,
force=0):
# Try first C compilers from numpy.distutils.
if plat is None:
plat = os.name
try:
if compiler is None:
compiler = get_default_compiler(plat)
(module_name, class_name, long_description) = compiler_class[compiler]
except KeyError:
msg = "don't know how to compile C/C++ code on platform '%s'" % plat
if compiler is not None:
msg = msg + " with '%s' compiler" % compiler
raise DistutilsPlatformError(msg)
module_name = "numpy.distutils." + module_name
try:
__import__ (module_name)
except ImportError:
msg = str(get_exception())
log.info('%s in numpy.distutils; trying from distutils',
str(msg))
module_name = module_name[6:]
try:
__import__(module_name)
except ImportError:
msg = str(get_exception())
raise DistutilsModuleError("can't compile C/C++ code: unable to load module '%s'" % \
module_name)
try:
module = sys.modules[module_name]
klass = vars(module)[class_name]
except KeyError:
raise DistutilsModuleError(("can't compile C/C++ code: unable to find class '%s' " +
"in module '%s'") % (class_name, module_name))
compiler = klass(None, dry_run, force)
log.debug('new_compiler returns %s' % (klass))
return compiler
ccompiler.new_compiler = new_compiler
_distutils_gen_lib_options = gen_lib_options
def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
library_dirs = quote_args(library_dirs)
runtime_library_dirs = quote_args(runtime_library_dirs)
r = _distutils_gen_lib_options(compiler, library_dirs,
runtime_library_dirs, libraries)
lib_opts = []
for i in r:
if is_sequence(i):
lib_opts.extend(list(i))
else:
lib_opts.append(i)
return lib_opts
ccompiler.gen_lib_options = gen_lib_options
# Also fix up the various compiler modules, which do
# from distutils.ccompiler import gen_lib_options
# Don't bother with mwerks, as we don't support Classic Mac.
for _cc in ['msvc9', 'msvc', '_msvc', 'bcpp', 'cygwinc', 'emxc', 'unixc']:
_m = sys.modules.get('distutils.' + _cc + 'compiler')
if _m is not None:
setattr(_m, 'gen_lib_options', gen_lib_options)
_distutils_gen_preprocess_options = gen_preprocess_options
def gen_preprocess_options (macros, include_dirs):
include_dirs = quote_args(include_dirs)
return _distutils_gen_preprocess_options(macros, include_dirs)
ccompiler.gen_preprocess_options = gen_preprocess_options
##Fix distutils.util.split_quoted:
# NOTE: I removed this fix in revision 4481 (see ticket #619), but it appears
# that removing this fix causes f2py problems on Windows XP (see ticket #723).
# Specifically, on WinXP when gfortran is installed in a directory path, which
# contains spaces, then f2py is unable to find it.
import string
_wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
_squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
_dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
_has_white_re = re.compile(r'\s')
def split_quoted(s):
s = s.strip()
words = []
pos = 0
while s:
m = _wordchars_re.match(s, pos)
end = m.end()
if end == len(s):
words.append(s[:end])
break
if s[end] in string.whitespace: # unescaped, unquoted whitespace: now
words.append(s[:end]) # we definitely have a word delimiter
s = s[end:].lstrip()
pos = 0
elif s[end] == '\\': # preserve whatever is being escaped;
# will become part of the current word
s = s[:end] + s[end+1:]
pos = end+1
else:
if s[end] == "'": # slurp singly-quoted string
m = _squote_re.match(s, end)
elif s[end] == '"': # slurp doubly-quoted string
m = _dquote_re.match(s, end)
else:
raise RuntimeError("this can't happen (bad char '%c')" % s[end])
if m is None:
raise ValueError("bad string (mismatched %s quotes?)" % s[end])
(beg, end) = m.span()
if _has_white_re.search(s[beg+1:end-1]):
s = s[:beg] + s[beg+1:end-1] + s[end:]
pos = m.end() - 2
else:
# Keeping quotes when a quoted word does not contain
# white-space. XXX: send a patch to distutils
pos = m.end()
if pos >= len(s):
words.append(s)
break
return words
ccompiler.split_quoted = split_quoted
##Fix distutils.util.split_quoted:
|
|
from interpreter.kmp_py import reader, evaluator,schemeExceptions, initialize
from interpreter.kmp_py.scheme import *
from unittest import TestCase
import sys
# initialize global environments and adds initial bindings to globalEnv and syntaxEnv
initialize.initialize()
def eval_string(string, env = evaluator.SchemeEvaluator.globalEnv):
r = reader.SchemeReader()
e = evaluator.SchemeEvaluator()
obj = r.read(SchemeStringStream(string))
return e.evaluate(obj, env)
class SchemeEvaluatorArithmetic(TestCase):
def test_eval_number(self):
obj = eval_string('1')
self.assertEqual(obj.type, 'schemeNumber', 'A number should evaluate to itself.')
self.assertEqual(obj.value, 1, 'Evaluated number does not have the right value.')
def test_eval_number_negative(self):
obj = eval_string('-3')
self.assertEqual(obj.type, 'schemeNumber', 'A number should evaluate to itself.')
self.assertEqual(obj.value, -3, 'Evaluated number does not have the right value.')
def test_eval_float(self):
obj = eval_string('3.14')
self.assertEqual(obj.type, 'schemeNumber', 'A number should evaluate to itself.')
self.assertEqual(obj.value, 3.14, 'Evaluated number does not have the right value.')
def test_eval_float_negative(self):
obj = eval_string('-3.14')
self.assertEqual(obj.type, 'schemeNumber', 'A number should evaluate to itself.')
self.assertEqual(obj.value, -3.14, 'Evaluated number does not have the right value.')
def test_eval_plus(self):
obj = eval_string('(+ 1 2)')
self.assertEqual(obj.type, 'schemeNumber', 'Adding two numbers should result in another number.')
self.assertEqual(obj.value, 3, '(+ 1 2) should result in 3')
def test_eval_plus_with_negative(self):
obj = eval_string('(+ -3 5)')
self.assertEqual(obj.type, 'schemeNumber', 'Adding two numbers should result in another number.')
self.assertEqual(obj.value, 2, '(+ -3 5) should result in 2')
def test_eval_plus_noNumber(self):
self.assertRaises(schemeExceptions.ArgumentTypeException, eval_string, '(+ "hello" "world")')
def teest_eval_plus_float(self):
obj = eval_string('(+ 3.4 3)')
self.assertEqual(obj.type, 'schemeNumber', 'Adding two numbers should result in another number, even if one is float.')
self.assertEqual(obj.value, 6.4, '(+ 3.4 3) should result in 6.4')
def teest_eval_plus_two_float(self):
obj = eval_string('(+ 3.4 1.2)')
self.assertEqual(obj.type, 'schemeNumber', 'Adding two floats should result in schemeNumber.')
self.assertEqual(obj.value, 4.6, '(+ 3.4 1.2) should result in 4.6')
def test_eval_minus(self):
obj = eval_string('(- 44 2)')
self.assertEqual(obj.type, 'schemeNumber', 'Subtracting two numbers should result in another number.')
self.assertEqual(obj.value, 42, '(- 44 2) should result in 42')
def test_eval_minus_with_negative_result(self):
obj = eval_string('(- 10 20)')
self.assertEqual(obj.type, 'schemeNumber', 'Subtracting two numbers should result in another number.')
self. assertEqual(obj.value, -10, '(- 10 20) should result in -10')
def test_eval_minus_oneArgument(self):
obj = eval_string('(- 3)')
self.assertEqual(obj.type, 'schemeNumber', 'Subtracting should result in a number.')
self.assertEqual(obj.value, -3, '(- 3) should result in -3')
def test_eval_minus_noNumber(self):
self.assertRaises(schemeExceptions.ArgumentTypeException, eval_string, '(- "hello" "world")')
def test_eval_minus_tooFewArguments(self):
self.assertRaises(schemeExceptions.ArgumentCountException, eval_string, '(-)')
def test_eval_mul(self):
obj = eval_string('(* 10 20)')
self.assertEqual(obj.type, 'schemeNumber', 'Multiplying two numbers should result in another number.')
self.assertEqual(obj.value, 200, '(* 10 20) should result in 200')
def test_eval_mul_noNumber(self):
self.assertRaises(schemeExceptions.ArgumentTypeException, eval_string, '(* "hello" "world")')
def test_eval_div(self):
obj = eval_string('(/ 20 10)')
self.assertEqual(obj.type, 'schemeNumber', 'Dividing a number by another should result in another number.')
self.assertEqual(obj.value, 2, '(/ 20 10) should result in 2')
def test_eval_div_divisionByZero(self):
self.assertRaises(schemeExceptions.DivisionByZero, eval_string, '(/ 3 0)')
def test_eval_div_noNumber(self):
self.assertRaises(schemeExceptions.ArgumentTypeException, eval_string, '(/ "hello" 3)')
def test_eval_div_oneArgument(self):
obj = eval_string('(/ 4)')
self.assertEqual(obj.type, 'schemeNumber', 'Division should result in a number.')
self.assertEqual(obj.value, 0.25, '(/ 4) should result in 1/4, i. e. in 0.25.')
def test_eval_div_oneArgument_Zero(self):
self.assertRaises(schemeExceptions.DivisionByZero, eval_string, '(/ 0)')
def test_eval_div_tooFewArguments(self):
self.assertRaises(schemeExceptions.ArgumentCountException, eval_string, '(/)')
def test_eval_modulo(self):
obj = eval_string('(% 20 10)')
self.assertEqual(obj.type, 'schemeNumber', 'Dividing a number by another should result in another number.')
self.assertEqual(obj.value, 0, '(% 20 10) should result in 0')
obj = eval_string('(% 7 4)')
self.assertEqual(obj.type, 'schemeNumber', 'Dividing a number by another should result in another number.')
self.assertEqual(obj.value, 3, '(% 7 4) should result in 3')
obj = eval_string('(% 4 7)')
self.assertEqual(obj.type, 'schemeNumber', 'Dividing a number by another should result in another number.')
self.assertEqual(obj.value, 4, '(% 4 7) should result in 4')
obj = eval_string('(% 0 7)')
self.assertEqual(obj.type, 'schemeNumber', 'Dividing a number by another should result in another number.')
self.assertEqual(obj.value, 0, '(% 0 7) should result in 0')
def test_eval_modulo_noInt(self):
self.assertRaises(schemeExceptions.ArgumentTypeException, eval_string, '(% 3.2 5.3)')
def test_eval_modulo_noNumber(self):
self.assertRaises(schemeExceptions.ArgumentTypeException, eval_string, '(% "hello" 3)')
self.assertRaises(schemeExceptions.ArgumentTypeException, eval_string, '(% 3 "hello")')
def test_eval_modulo_ZeroArgument(self):
self.assertRaises(schemeExceptions.DivisionByZero, eval_string, '(% 2 0)')
def test_eval_modulo_tooManyArguments(self):
self.assertRaises(schemeExceptions.ArgumentCountException, eval_string, '(% 3 4 5)')
def test_eval_modulo_tooFewArguments(self):
self.assertRaises(schemeExceptions.ArgumentCountException, eval_string, '(% 10)')
def test_eval_gt(self):
obj = eval_string('(> 20 10)')
self.assertEqual(obj.type, 'schemeTrue', '20 is greater than 10, so (> 20 10) should result in schemeTrue.')
obj = eval_string('(> 10 20)')
self.assertEqual(obj.type, 'schemeFalse', '10 is not greater than 20, so (> 10 20) should result in schemeFalse.')
obj = eval_string('(>= 20 10)')
self.assertEqual(obj.type, 'schemeTrue', '20 is greater than 10, so (>= 20 10) should result in schemeTrue.')
obj = eval_string('(>= 10 20)')
self.assertEqual(obj.type, 'schemeFalse', '10 is greater than 20, so (>= 10 20) should result in schemeFalse.')
obj = eval_string('(>= 20 20)')
self.assertEqual(obj.type, 'schemeTrue', '20 equal to 20, so (>= 20 20) should result in schemeTrue.')
def test_eval_gt_tooManyArguments(self):
self.assertRaises(schemeExceptions.ArgumentCountException, eval_string, '(> 10 20 30)')
self.assertRaises(schemeExceptions.ArgumentCountException, eval_string, '(>= 10 20 30)')
def test_eval_gt_tooFewArguments(self):
self.assertRaises(schemeExceptions.ArgumentCountException, eval_string, '(> 10)')
self.assertRaises(schemeExceptions.ArgumentCountException, eval_string, '(>= 10)')
def test_eval_lt(self):
obj = eval_string('(< 20 10)')
self.assertEqual(obj.type, 'schemeFalse', '20 is not greater than 10, so (< 20 10) should result in schemeFalse.')
obj = eval_string('(< 10 20)')
self.assertEqual(obj.type, 'schemeTrue', '10 is not greater than 20, so (< 10 20) should result in schemeTrue.')
obj = eval_string('(<= 20 10)')
self.assertEqual(obj.type, 'schemeFalse', '20 is greater than 10, so (<= 20 10) should result in schemeFalse.')
obj = eval_string('(<= 10 20)')
self.assertEqual(obj.type, 'schemeTrue', '10 is greater than 20, so (<= 10 20) should result in schemeTrue.')
obj = eval_string('(<= 20 20)')
self.assertEqual(obj.type, 'schemeTrue', '20 equal to 20, so (<= 20 20) should result in schemeTrue.')
def test_eval_lt_tooManyArguments(self):
self.assertRaises(schemeExceptions.ArgumentCountException, eval_string, '(< 10 20 30)')
self.assertRaises(schemeExceptions.ArgumentCountException, eval_string, '(<= 10 20 30)')
def test_eval_lt_tooFewArguments(self):
self.assertRaises(schemeExceptions.ArgumentCountException, eval_string, '(< 10)')
self.assertRaises(schemeExceptions.ArgumentCountException, eval_string, '(<= 10)')
def test_eval_plus_mul(self):
obj = eval_string('(+ (* 3 2) 2)')
self.assertEqual(obj.type, 'schemeNumber', 'Adding and multiplying two numbers should result in another number.')
self.assertEqual(obj.value, 8, '(+ (* 3 2) 2) should result in 8')
def test_eval_minus_plus_eq(self):
obj = eval_string('(eq? (- 6 3) (+ 1 2))')
self.assertEqual(obj.type, 'schemeTrue', 'A schemeTrue was expected. Got a {0} instead'.format(obj.type))
def test_eval_abs(self):
obj = eval_string('(abs 5)')
self.assertEqual(obj.type, 'schemeNumber', '(abs 5) should evaluate to type schemeNumber.')
self.assertEqual(obj.value, 5, '(abs 5) should evaluate to schemeNumber 5.')
obj = eval_string('(abs -5)')
self.assertEqual(obj.type, 'schemeNumber', '(abs -5) should evaluate to type schemeNumber.')
self.assertEqual(obj.value, 5, '(abs 5) should evaluate to schemeNumber 5.')
obj = eval_string('(abs 0)')
self.assertEqual(obj.type, 'schemeNumber', '(abs 0) should evaluate to type schemeNumber.')
self.assertEqual(obj.value, 0, '(abs 0) should evaluate to schemeNumber 0.')
def test_eval_abs_tooManyArguments(self):
self.assertRaises(schemeExceptions.ArgumentCountException, eval_string, '(abs 1 2 3)')
def test_eval_abs_tooFewArguments(self):
self.assertRaises(schemeExceptions.ArgumentCountException, eval_string, '(abs)')
def test_eval_abs_noNumber(self):
self.assertRaises(schemeExceptions.ArgumentTypeException, eval_string, '(abs "hello")')
def test_eval_eq_arithmetic(self):
obj = eval_string('(= 1 1)')
self.assertEqual(obj.type, 'schemeTrue', '(= 1 1) should evaluate to schemeTrue')
def test_eval_eq_arithmetic_toManyArguments(self):
self.assertRaises(schemeExceptions.ArgumentCountException, eval_string,'(= 1 2 3)')
def test_eval_eq_arithmetic_noNumberArgument(self):
self.assertRaises(schemeExceptions.ArgumentTypeException, eval_string, '(= "hello" "hello")')
|
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
"""
Utilities for using modules
"""
import frappe, os, json
import frappe.utils
from frappe import _
lower_case_files_for = ['DocType', 'Page', 'Report',
"Workflow", 'Module Def', 'Desktop Item', 'Workflow State', 'Workflow Action', 'Print Format',
"Website Theme", 'Web Form', 'Email Alert']
def export_module_json(doc, is_standard, module):
"""Make a folder for the given doc and add its json file (make it a standard
object that will be synced)"""
if (not frappe.flags.in_import and getattr(frappe.get_conf(),'developer_mode', 0)
and is_standard):
from frappe.modules.export_file import export_to_files
from frappe.modules import get_module_path
# json
export_to_files(record_list=[[doc.doctype, doc.name]], record_module=module)
path = os.path.join(get_module_path(module), scrub(doc.doctype),
scrub(doc.name), scrub(doc.name))
return path
def get_doc_module(module, doctype, name):
"""Get custom module for given document"""
module_name = "{app}.{module}.{doctype}.{name}.{name}".format(
app = frappe.local.module_app[scrub(module)],
doctype = scrub(doctype),
module = scrub(module),
name = scrub(name)
)
return frappe.get_module(module_name)
@frappe.whitelist()
def export_customizations(module, doctype, sync_on_migrate=0):
"""Export Custom Field and Property Setter for the current document to the app folder.
This will be synced with bench migrate"""
if not frappe.get_conf().developer_mode:
raise 'Not developer mode'
custom = {'custom_fields': [], 'property_setters': [],
'doctype': doctype, 'sync_on_migrate': 1}
def add(_doctype):
custom['custom_fields'] += frappe.get_all('Custom Field',
fields='*', filters={'dt': _doctype})
custom['property_setters'] += frappe.get_all('Property Setter',
fields='*', filters={'doc_type': _doctype})
add(doctype)
# add custom fields and property setters for all child tables
for d in frappe.get_meta(doctype).get_table_fields():
add(d.options)
folder_path = os.path.join(get_module_path(module), 'custom')
if not os.path.exists(folder_path):
os.makedirs(folder_path)
path = os.path.join(folder_path, scrub(doctype)+ '.json')
with open(path, 'w') as f:
f.write(frappe.as_json(custom))
frappe.msgprint('Customizations exported to {0}'.format(path))
def sync_customizations(app=None):
'''Sync custom fields and property setters from custom folder in each app module'''
if app:
apps = [app]
else:
apps = frappe.get_installed_apps()
for app_name in apps:
for module_name in frappe.local.app_modules.get(app_name) or []:
folder = frappe.get_app_path(app_name, module_name, 'custom')
if os.path.exists(folder):
for fname in os.listdir(folder):
with open(os.path.join(folder, fname), 'r') as f:
data = json.loads(f.read())
if data.get('sync_on_migrate'):
sync_customizations_for_doctype(data)
def sync_customizations_for_doctype(data):
'''Sync doctype customzations for a particular data set'''
from frappe.core.doctype.doctype.doctype import validate_fields_for_doctype
doctype = data['doctype']
if data['custom_fields']:
frappe.db.sql('delete from `tabCustom Field` where dt=%s', doctype)
for d in data['custom_fields']:
d['doctype'] = 'Custom Field'
doc = frappe.get_doc(d)
doc.db_insert()
if data['property_setters']:
frappe.db.sql('delete from `tabProperty Setter` where doc_type=%s', doctype)
for d in data['property_setters']:
d['doctype'] = 'Property Setter'
doc = frappe.get_doc(d)
doc.db_insert()
print 'Updating customizations for {0}'.format(doctype)
validate_fields_for_doctype(doctype)
def scrub(txt):
return frappe.scrub(txt)
def scrub_dt_dn(dt, dn):
"""Returns in lowercase and code friendly names of doctype and name for certain types"""
ndt, ndn = dt, dn
if dt in lower_case_files_for:
ndt, ndn = scrub(dt), scrub(dn)
return ndt, ndn
def get_module_path(module):
"""Returns path of the given module"""
return frappe.get_module_path(module)
def get_doc_path(module, doctype, name):
dt, dn = scrub_dt_dn(doctype, name)
return os.path.join(get_module_path(module), dt, dn)
def reload_doc(module, dt=None, dn=None, force=False, reset_permissions=False):
from frappe.modules.import_file import import_files
return import_files(module, dt, dn, force=force, reset_permissions=reset_permissions)
def export_doc(doctype, name, module=None):
"""Write a doc to standard path."""
from frappe.modules.export_file import write_document_file
print doctype, name
if not module: module = frappe.db.get_value('DocType', name, 'module')
write_document_file(frappe.get_doc(doctype, name), module)
def get_doctype_module(doctype):
"""Returns **Module Def** name of given doctype."""
def make_modules_dict():
return dict(frappe.db.sql("select name, module from tabDocType"))
return frappe.cache().get_value("doctype_modules", make_modules_dict)[doctype]
doctype_python_modules = {}
def load_doctype_module(doctype, module=None, prefix="", suffix=""):
"""Returns the module object for given doctype."""
if not module:
module = get_doctype_module(doctype)
app = get_module_app(module)
key = (app, doctype, prefix, suffix)
module_name = get_module_name(doctype, module, prefix, suffix)
try:
if key not in doctype_python_modules:
doctype_python_modules[key] = frappe.get_module(module_name)
except ImportError:
raise ImportError, 'Module import failed for {0} ({1})'.format(doctype, module_name)
return doctype_python_modules[key]
def get_module_name(doctype, module, prefix="", suffix="", app=None):
return '{app}.{module}.doctype.{doctype}.{prefix}{doctype}{suffix}'.format(\
app = scrub(app or get_module_app(module)),
module = scrub(module),
doctype = scrub(doctype),
prefix=prefix,
suffix=suffix)
def get_module_app(module):
return frappe.local.module_app[scrub(module)]
def get_app_publisher(module):
app = frappe.local.module_app[scrub(module)]
if not app:
frappe.throw(_("App not found"))
app_publisher = frappe.get_hooks(hook="app_publisher", app_name=app)[0]
return app_publisher
def make_boilerplate(template, doc, opts=None):
target_path = get_doc_path(doc.module, doc.doctype, doc.name)
template_name = template.replace("controller", scrub(doc.name))
target_file_path = os.path.join(target_path, template_name)
app_publisher = get_app_publisher(doc.module)
if not os.path.exists(target_file_path):
if not opts:
opts = {}
with open(target_file_path, 'w') as target:
with open(os.path.join(get_module_path("core"), "doctype", scrub(doc.doctype),
"boilerplate", template), 'r') as source:
target.write(frappe.utils.encode(
frappe.utils.cstr(source.read()).format(app_publisher=app_publisher,
classname=doc.name.replace(" ", ""), doctype=doc.name, **opts)
))
|
|
import os
import sys
import time
import atexit
import datetime
import argparse
CUR_DIR = os.path.dirname(os.path.realpath(__file__))
try:
import RPi.GPIO as GPIO
except ImportError:
# If you aren't running this on a Pi, you won't have
# the GPIO avaialble, so there is a file in utilities that
# stubs out the necessary values.
import utilities.gpio_dev as GPIO
class OpenSprinkler():
### Low-Level Hardware Stuff. Don't mess with these. ###
def _enable_shift_register_output(self):
"""
Low-level function to enable shift register output. Don't call this
yourself unless you know why you are doing it.
"""
GPIO.output(self.PIN_SR_NOE, False)
def _disable_shift_register_output(self):
"""
Low-level function to disable shift register output. Don't call this
yourself unless you know why you are doing it.
"""
GPIO.output(self.PIN_SR_NOE, True)
def _set_shift_registers(self, new_values):
"""
This is the low-level function that is called to set the shift registers.
I don't pretent do understand the inner workings here, but it works. Don't
use this to turn on/off stations, use set_station_status() as the
higher-level interface.
"""
GPIO.output(self.PIN_SR_CLK, False)
GPIO.output(self.PIN_SR_LAT, False)
for s in range(0, self.number_of_stations):
GPIO.output(self.PIN_SR_CLK, False)
GPIO.output(self.PIN_SR_DAT, new_values[self.number_of_stations-1-s])
GPIO.output(self.PIN_SR_CLK, True)
GPIO.output(self.PIN_SR_LAT, True)
# Update the status file
self._update_status_file()
def _initialize_hardware(self):
"""
This contains the low-level stuff required to make the GPIO operations work. Someone
smarter than me wrote this stuff, I just smile and nod.
"""
self.PIN_SR_CLK = 4
self.PIN_SR_NOE = 17
self.PIN_SR_LAT = 22
self.PIN_SR_DAT = 21
# The 2nd revision of the RPI has a different pin value
if GPIO.RPI_REVISION == 2:
self.PIN_SR_DAT = 27
# Not sure why this is called, but it was in the original script.
GPIO.cleanup()
# setup GPIO pins to interface with shift register. Don't muck with this
# stuff unless you know why you are doing it.
GPIO.setmode(GPIO.BCM)
GPIO.setup(self.PIN_SR_CLK, GPIO.OUT)
GPIO.setup(self.PIN_SR_NOE, GPIO.OUT)
self._disable_shift_register_output()
GPIO.setup(self.PIN_SR_DAT, GPIO.OUT)
GPIO.setup(self.PIN_SR_LAT, GPIO.OUT)
self._set_shift_registers(self.station_values)
self._enable_shift_register_output()
def cleanup(self):
"""
This runs at the termination of the file, turning off all stations, making
sure that any PID files are removed, and running GPIO cleanup.
"""
self.log("Running Cleanup.")
self.reset_all_stations()
self._remove_pid_file()
GPIO.cleanup()
### Convenience methods for filesystem operations. You don't need to call these
### manually, they are handled by the higher-level operations.
def _update_status_file(self):
"""
Updates the STATUS file with the value for each station.
"""
file_path = os.path.join(CUR_DIR, 'STATUS')
f = open(file_path, 'w')
station_values = "%s" % "".join([str(s) for s in self.station_values])
f.write(station_values)
f.close()
def _create_pid_file(self, minutes_to_run):
"""
Writes a PID file to the directory to indicate what the PID of the
current program is and when it expires.
"""
expiration = datetime.datetime.now() + datetime.timedelta(minutes=minutes_to_run)
file_path = os.path.join(CUR_DIR, '%s.pid' % self.pid)
f = open(file_path, 'w')
f.write("%s" % expiration.strftime('%Y-%m-%d %H:%M'))
f.close()
def _remove_pid_file(self):
"""
Handles removal of the PID file.
"""
file_path = os.path.join(CUR_DIR, '%s.pid' % self.pid)
if os.path.exists(file_path):
os.remove(file_path)
def create_delay(self, hours):
"""
Creates a delay file that will expire after the number of hours
passed.
"""
# Calculate what the datetime object will be by adding the current time
# and the number of hours to delay. This will be the body of the DELAY file.
future_time = datetime.datetime.now() + datetime.timedelta(hours=hours)
expiration = future_time.strftime('%Y-%m-%d %H:%M')
# Write out the DELAY file and make the body the expiration time.
self.log("Creating DELAY file with expiration %s" % expiration)
delay_file_path = os.path.join(CUR_DIR, 'DELAY')
f = open(delay_file_path, 'w')
f.write(expiration)
f.close()
def check_for_delay(self):
"""
Look at the filesystem to see if a DELAY file exists. If the
file does exist, open it up to see if it's expired. If so, remove
the file.
"""
delay_file_path = os.path.join(CUR_DIR, 'DELAY')
if os.path.exists(delay_file_path):
# Read the file so we can inspect the contents
f = open(delay_file_path, 'r')
data = f.read()
f.close()
# The file might have a bad value. Check carefully.
try:
# Try to turn the body into a datetime object
expiration = datetime.datetime.strptime(data, '%Y-%m-%d %H:%M')
now = datetime.datetime.now()
# If the expiration time is less than now (i.e. it has passed)
# then go ahead and remove the file.
if now >= expiration:
self.log("Expiration has passed. Removing DELAY file.")
os.remove(delay_file_path)
else:
return expiration
except ValueError:
# If we can't cast the value of the file into a date object, there is
# no sense keeping the file around. Deleate it.
self.log("Could not read date in file. Removing file.")
os.remove(delay_file_path)
return None
else:
return None
### Logging functionality ###
def log(self, message):
"""
A convenience method for writing operations to a log file. If debugging
is enabled, the message is output to the console.
"""
file_path = os.path.join(CUR_DIR, 'log.txt')
f = open(file_path, 'a')
now_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
msg = '%s\t%s\t%s\n' % (now_time, self.pid, message)
f.write(msg)
if self.debug:
print msg
### Higher-Level Interface. These are the functions you want to call
def operate_station(self, station_number, minutes, queue, callback_function):
"""
This is the method that operates a station. Running it causes any
currently-running stations to turn off, then a pid file is created that
lets the system know that there is a process running. When it completes,
ALL stations are turned off and the file is cleaned up.
"""
self.log("Operating station %d for %d minutes." % (station_number, minutes))
# Check to see if a delay is in effect
if self.check_for_delay():
self.log("Delay in effect. Job will not run.")
return
# First, set all stations to zero
self.station_values = [0] * self.number_of_stations
# Next, enable just the station to run (adjusting for 0-based index)
try:
self.station_values[station_number-1] = 1
except IndexError:
self.log("Invalid station number %d passed. Skipping." % station_number)
# Send the command
self._set_shift_registers(self.station_values)
# Create a filesystem flag to indicate that the system is running
self._create_pid_file(minutes)
# After the number of minutes have passed, turn it off
time_to_stop = datetime.datetime.now() + datetime.timedelta(minutes=minutes)
# We want to stop it 5 seconds early so it's completely off when the time is up.
# This ensures that a program that is set to start the next minute won't have
# to kill this one.
time_to_stop -= datetime.timedelta(seconds=5)
while True:
# If the queue is not empty, it's because a message was passed from the
# parent thread.
if queue and not queue.empty():
self.log("Recieved Kill Signal in Thread")
# Remove the item from the queue
queue.get(1)
self._remove_pid_file()
self.reset_all_stations()
break
if datetime.datetime.now() < time_to_stop:
pass
else:
self.log("Finished operating station.")
# We don't know if a new job started while we were snoozing.
# If one did, we don't want to close all valves anymore.
# We need a way to check and see if this process is the most
# recent one.
self._remove_pid_file()
self.reset_all_stations()
break
# If a callback function was passed, we call it now.
if callback_function:
callback_function(station_number)
def reset_all_stations(self):
"""
A convenience method for turning everything off.
"""
self.log("Turning Off All Stations.")
off_values = [0] * self.number_of_stations
self.station_values = off_values
self._set_shift_registers(off_values)
def __init__(self, debug=False, number_of_stations=8):
self.number_of_stations = number_of_stations
# If debug is true, we print log messages to console
self.debug = debug
# We need to save the PID of the current process.
self.pid = os.getpid()
# Initial values are zero (off) for all stations.
self.station_values = [0] * number_of_stations
# Get the hardware ready for operations
self._initialize_hardware()
if __name__ == "__main__":
pass
|
|
import sys
import json
import re
from deriva.core import ErmrestCatalog, AttrDict, ermrest_model, get_credential
from deriva.config.base_config import BaseSpec, BaseSpecList, ConfigUtil, ConfigBaseCLI
if sys.version_info > (3,):
unicode = str
MY_VERSION = 0.99
class NoForeignKeyError(ValueError):
pass
class AttrSpecList(BaseSpecList):
SPEC_TYPES = ["catalog_annotations", "schema_annotations", "table_annotations", "column_annotations",
"foreign_key_annotations"]
def __init__(self, known_attrs, specdict, strict=False):
self.ignore_unmanaged = False
self.managed_annotations = self.annotation_list(known_attrs.get(u'managed'))
if self.managed_annotations is None:
raise ValueError("No 'managed' attribute list")
if known_attrs.get(u'ignore_all_unmanaged'):
self.ignore_unmanaged = True
self.ignored_annotations = self.annotation_list(known_attrs.get(u'ignored'))
if self.ignored_annotations is None:
self.ignored_annotations = []
# dictlist = dictlist + [{"managed_annotations": self.managed_annotations}, {"ignored_annotations": self.ignored_annotations}, {"ignore_all_unmanaged": self.ignore_unmanaged}]
BaseSpecList.__init__(self, AttrSpec, specdict, strict)
def annotation_list(self, orig_list):
if orig_list is None:
return None
new = []
for item in orig_list:
new.append(unicode(item))
return new
def add_list(self, dictlist):
for d in dictlist:
if len(d) > 0:
s = AttrSpec(d, self.managed_annotations, self.ignore_unmanaged, self.ignored_annotations)
self.add_spec(s)
class AttrSpec(BaseSpec):
def __init__(self, specdict, managed_annotations, ignore_unmanaged, ignored_annotations):
BaseSpec.__init__(self, specdict, ["uri", "value"], "attributes", ignore_unmanaged)
self.ignore_unmanaged = ignore_unmanaged
self.managed_annotations = managed_annotations
self.ignored_annotations = ignored_annotations
self.known_annotations = self.managed_annotations + self.ignored_annotations
self.validate_annotation()
def validate_annotation(self):
return self.specdict.get("uri") in self.managed_annotations
class AttrConfig:
def __init__(self, server, catalog_id, config_file, credentials, verbose=False, schema_name=None, table_name=None):
self.config = json.load(open(config_file))
self.ignored_schema_patterns = []
ip = self.config.get("ignored_schema_patterns")
if ip is not None:
for p in ip:
self.ignored_schema_patterns.append(re.compile(p))
self.known_attrs = self.config.get(u'known_attributes')
self.managed_annotations = self.known_attrs.get(u'managed')
self.known_annotations = self.managed_annotations
self.all_annotations = self.known_annotations
self.ignored_annotations = self.known_attrs.get(u'ignored')
if self.ignored_annotations is not None:
self.all_annotations = self.all_annotations + self.ignored_annotations
self.ignore_unmanaged = self.known_attrs.get(u'ignore_all_unmanaged')
self.annotation_specs = dict()
for key in AttrSpecList.SPEC_TYPES:
self.annotation_specs[key] = self.make_speclist(key)
self.server = server
self.catalog_id = catalog_id
self.verbose = verbose
old_catalog = ErmrestCatalog('https', self.server, self.catalog_id, credentials)
self.saved_toplevel_config = ConfigUtil.find_toplevel_node(old_catalog.getCatalogModel(), schema_name,
table_name)
self.catalog = ErmrestCatalog('https', self.server, self.catalog_id, credentials)
self.toplevel_config = ConfigUtil.find_toplevel_node(self.catalog.getCatalogModel(), schema_name, table_name)
def make_speclist(self, name):
d = self.config.get(unicode(name))
if d is None:
d = [dict()]
return AttrSpecList(self.known_attrs, d)
def find_best_schema_specs(self, schema_name):
specs = dict()
for key in self.managed_annotations:
specs[key] = self.annotation_specs["schema_annotations"].find_best_schema_spec(schema_name, key=key)
return specs
def find_best_table_specs(self, schema_name, table_name):
specs = dict()
for key in self.managed_annotations:
specs[key] = self.annotation_specs["table_annotations"].find_best_table_spec(schema_name, table_name,
key=key)
return specs
def find_best_fkey_specs(self, fkey):
specs = dict()
for key in self.managed_annotations:
specs[key] = self.annotation_specs["foreign_key_annotations"].find_best_foreign_key_spec(fkey.table.schema.name,
fkey.table.name,
fkey.names,
key=key)
return specs
def find_best_column_specs(self, schema_name, table_name, column_name):
specs = dict()
for key in self.managed_annotations:
specs[key] = self.annotation_specs["column_annotations"].find_best_column_spec(schema_name, table_name,
column_name, key=key)
return specs
def node_name(self, node):
if isinstance(node, ermrest_model.Schema):
return "schema {s}".format(s=str(node.name))
if isinstance(node, ermrest_model.Table):
return "table {s}.{t}".format(s=str(node.schema.name), t=str(node.name))
if isinstance(node, ermrest_model.Column):
return "column {s}.{t}.{c}".format(s=str(node.table.schema.name), t=str(node.table.name), c=str(node.name))
if isinstance(node, ermrest_model.ForeignKey):
return "foreign key {n}".format(n=str(node.names))
return str("unknown node type {t}".format(t=type(node)))
def set_node_annotations(self, node, specs, saved_node):
if specs is None:
if not self.ignore_unmanaged:
if self.verbose:
print("{n}: clearing annotations".format(n=self.node_name(node)))
node.annotations.clear()
return
for k in self.managed_annotations:
s = specs.get(k)
if s is not None and u'value' in s:
if self.verbose:
print("{n}: setting {k} to {v}".format(n=self.node_name(node), k=k, v=s[u'value']))
node.annotations[k] = s[u'value']
elif k in node.annotations:
if self.verbose:
print("{n}: clearing {k}".format(n=self.node_name(node), k=k))
node.annotations.pop(k)
if not self.ignore_unmanaged:
for k in node.annotations.keys():
if k not in self.all_annotations:
raise ValueError("annotation key {k} is neither managed nor ignored".format(k=k))
def set_table_annotations(self, table, saved_table):
self.set_node_annotations(table, self.find_best_table_specs(table.schema.name, table.name), saved_table)
for column in table.column_definitions:
self.set_column_annotations(column, self.find_named_column(saved_table, column.name))
for fkey in table.foreign_keys:
self.set_fkey_annotations(fkey, self.find_corresponding_fkey(saved_table, fkey))
def find_corresponding_fkey(self, table, base_fkey):
if table is None:
return None
if base_fkey.names is None or len(base_fkey.names) == 0:
return None
names = base_fkey.names[0]
if len(names) != 2:
return None
for fkey in table.foreign_keys:
if fkey is not None and fkey.names is not None and len(fkey.names) > 0:
for n in fkey.names:
if len(n) == 2 and n[0] == names[0] and n[1] == names[1]:
return fkey
return None
def find_named_column(self, table, column_name):
if table is None:
return None
for column in table.column_definitions:
if column.name == column_name:
return column
return None
def find_named_schema(self, catalog, schema_name):
if catalog is None or catalog.schemas is None:
return None
return catalog.schemas.get(schema_name)
def find_named_table(self, schema, table_name):
if schema is None:
return None
if schema.tables is None:
return None
return schema.tables.get(table_name)
def set_fkey_annotations(self, fkey, saved_fkey):
self.set_node_annotations(fkey, self.find_best_fkey_specs(fkey), saved_fkey)
def set_column_annotations(self, column, saved_column):
self.set_node_annotations(column, self.find_best_column_specs(column.table.schema.name, column.table.name, column.name),
saved_column)
def set_schema_annotations(self, schema, saved_schema):
for pat in self.ignored_schema_patterns:
if pat.match(schema.name) is not None:
print("ignoring schema {s}".format(s=schema.name))
return
specs = self.find_best_schema_specs(schema.name)
self.set_node_annotations(schema, specs, saved_schema)
for table in schema.tables.values():
self.set_table_annotations(table, self.find_named_table(saved_schema, table.name))
def set_catalog_annotations(self):
specs = dict()
for key in self.managed_annotations:
specs[key] = self.annotation_specs["catalog_annotations"].find_catalog_spec(key)
self.set_node_annotations(self.toplevel_config, specs, self.saved_toplevel_config)
for schema in self.toplevel_config.schemas.values():
self.set_schema_annotations(schema, self.find_named_schema(self.saved_toplevel_config, schema.name))
def set_attributes(self):
if isinstance(self.toplevel_config, ermrest_model.Model):
self.set_catalog_annotations()
elif isinstance(self.toplevel_config, ermrest_model.Schema):
self.set_schema_annotations(self.toplevel_config, self.saved_toplevel_config)
elif isinstance(self.toplevel_config, ermrest_model.Table):
self.set_table_annotations(self.toplevel_config, self.saved_toplevel_config)
else:
raise ValueError("toplevel config is a {t}".format(t=str(type(self.toplevel_config))))
def apply_annotations(self):
self.toplevel_config.apply(self.saved_toplevel_config)
def main():
cli = ConfigBaseCLI("annotation config tool", None, version=MY_VERSION)
args = cli.parse_cli()
table_name = cli.get_table_arg(args)
schema_names = cli.get_schema_arg_list(args)
credentials = get_credential(args.host, args.credential_file)
for schema in schema_names:
attr_config = AttrConfig(args.host, args.catalog, args.config_file, credentials, args.verbose or args.debug,
schema, table_name)
attr_config.set_attributes()
if not args.dryrun:
attr_config.apply_annotations()
if __name__ == '__main__':
sys.exit(main())
|
|
## IMPORTS #####################################################################
import math
import numpy as np
from numpy import atleast_2d as twod
from numpy import asmatrix as mat
from numpy import asmatrix as arr
from .utils import toIndex
################################################################################
## Base (abstract) "classify" class and associated functions ###################
################################################################################
class classifier:
def __init__(self, *args, **kwargs):
"""Constructor for abstract base class for various classifiers.
This class implements methods that generalize to different classifiers.
Optional arguments X,Y,... call train(X,Y,...) to initialize the model
"""
self.classes = []
# TODO: if Y!=None init classes from data? (leave to train?)
if len(args) or len(kwargs):
return self.train(*args, **kwargs)
def __call__(self, *args, **kwargs):
"""Provides syntatic sugar for prediction; calls "predict". """
return self.predict(*args, **kwargs)
def predict(self, X):
"""Abstract method, implemented by derived classes.
Args:
X (arr): M,N array of M data points with N features each
Returns:
arr: M, or M,1 array of the predicted class for each data point
Derived classes do not need to implement this function if predictSoft is
implemented; by default it uses predictSoft and converts to the most likely class.
"""
idx = np.argmax( self.predictSoft(X) , axis=1 ) # find most likely class (index)
return np.asarray(self.classes)[idx] # convert to saved class values
def predictSoft(self,X):
"""Abstract method, implemented by derived classes.
Args:
X (arr): M,N array of M data points with N features each
Returns:
arr: MxC array of C class probabilities for each data point
"""
raise NotImplementedError
####################################################
# Standard loss f'n definitions for classifiers #
####################################################
def err(self, X, Y):
"""This method computes the error rate on a data set (X,Y)
Args:
X (arr): M,N array of M data points with N features each
Y (arr): M, or M,1 array of target class values for each data point
Returns:
float: fraction of prediction errors, 1/M \sum (Y[i]!=f(X[i]))
"""
Y = arr( Y )
Yhat = arr( self.predict(X) )
return np.mean(Yhat.reshape(Y.shape) != Y)
def nll(self, X, Y):
"""Compute the (average) negative log-likelihood of the soft predictions
Using predictSoft, normalizes and inteprets as conditional probabilities to compute
(1/M) \sum_i log Pr[ y^{(i)} | f, x^{(i)} ]
Args:
X (arr): M,N array of M data points with N features each
Y (arr): M, or M,1 array of target class values for each data point
Returns:
float: Negative log likelihood of the predictions
"""
M,N = X.shape
P = np.asarray( self.predictSoft(X) )
P /= np.sum(P, axis=1, keepdims=True) # normalize to sum to one
Y = toIndex(Y, self.classes)
return - np.mean( np.log( P[ np.arange(M), Y ] ) ) # evaluate
def auc(self, X, Y):
"""Compute the area under the roc curve on the given test data.
Args:
X (arr): M,N array of M data points with N features each
Y (arr): M, or M,1 array of target class values for each data point
Returns:
float: Area under the ROC curve
This method only works on binary classifiers.
"""
if len(self.classes) != 2:
raise ValueError('This method can only supports binary classification ')
try: # compute 'response' (soft binary classification score)
soft = self.predictSoft(X)[:,1] # p(class = 2nd)
except (AttributeError, IndexError): # or we can use 'hard' binary prediction if soft is unavailable
soft = self.predict(X)
n,d = twod(soft).shape # ensure soft is the correct shape
soft = soft.flatten() if n==1 else soft.T.flatten()
indices = np.argsort(soft) # sort data by score value
Y = Y[indices]
sorted_soft = soft[indices]
# compute rank (averaged for ties) of sorted data
dif = np.hstack( ([True],np.diff(sorted_soft)!=0,[True]) )
r1 = np.argwhere(dif).flatten()
r2 = r1[0:-1] + 0.5*(r1[1:]-r1[0:-1]) + 0.5
rnk = r2[np.cumsum(dif[:-1])-1]
# number of true negatives and positives
n0,n1 = sum(Y == self.classes[0]), sum(Y == self.classes[1])
if n0 == 0 or n1 == 0:
raise ValueError('Data of both class values not found')
# compute AUC using Mann-Whitney U statistic
result = (np.sum(rnk[Y == self.classes[1]]) - n1 * (n1 + 1.0) / 2.0) / n1 / n0
return result
def confusion(self, X, Y):
"""Estimate the confusion matrix (Y x Y_hat) from test data.
Args:
X (arr): M,N array of M data points with N features each
Y (arr): M, or M,1 array of target class values for each data point
Returns:
C (arr): C[i,j] = # of data from class i that were predicted as class j
"""
Y_hat = self.predict(X)
num_classes = len(self.classes)
indices = toIndex(Y, self.classes) + num_classes * (toIndex(Y_hat, self.classes) - 1)
C = np.histogram(indices, np.arange(1, num_classes**2 + 2))[0]
C = np.reshape(C, (num_classes, num_classes))
return np.transpose(C)
def roc(self, X, Y):
"""Compute the receiver operating charateristic curve on a data set.
Args:
X (arr): M,N array of M data points with N features each
Y (arr): M, or M,1 array of target class values for each data point
Returns:
tuple : (fpr,tpr,tnr) where
fpr = false positive rate (1xN numpy vector)
tpr = true positive rate (1xN numpy vector)
tnr = true negative rate (1xN numpy vector)
This method is only defined for binary classifiers.
Plot fpr vs. tpr to see the ROC curve.
Plot tpr vs. tnr to see the sensitivity/specificity curve.
"""
if len(self.classes) > 2:
raise ValueError('This method can only supports binary classification ')
try: # compute 'response' (soft binary classification score)
soft = self.predictSoft(X)[:,1] # p(class = 2nd)
except (AttributeError, IndexError):
soft = self.predict(X) # or we can use 'hard' binary prediction if soft is unavailable
n,d = twod(soft).shape
if n == 1:
soft = soft.flatten()
else:
soft = soft.T.flatten()
# number of true negatives and positives
n0 = float(np.sum(Y == self.classes[0]))
n1 = float(np.sum(Y == self.classes[1]))
if n0 == 0 or n1 == 0:
raise ValueError('Data of both class values not found')
# sort data by score value
indices = np.argsort(soft)
Y = Y[indices]
sorted_soft = soft[indices] #np.sort(soft)
# compute false positives and true positive rates
tpr = np.divide(np.cumsum(Y[::-1] == self.classes[1]).astype(float), n1)
fpr = np.divide(np.cumsum(Y[::-1] == self.classes[0]).astype(float), n0)
tnr = np.divide(np.cumsum(Y == self.classes[0]).astype(float), n0)[::-1]
# find ties in the sorting score
same = np.append(np.asarray(sorted_soft[0:-1] == sorted_soft[1:]), 0)
tpr = np.append([0], tpr[np.logical_not(same)])
fpr = np.append([0], fpr[np.logical_not(same)])
tnr = np.append([1], tnr[np.logical_not(same)])
return fpr, tpr, tnr
################################################################################
## REGRESS #####################################################################
################################################################################
class regressor:
def __init__(self, *args, **kwargs):
"""Simple constructor for base regressor class; specialized by various learners"""
if len(args) or len(kwargs):
return self.train(*args, **kwargs)
def __call__(self, *args, **kwargs):
"""Syntatic sugar for prediction; same as "predict". """
return self.predict(*args, **kwargs)
####################################################
# Standard loss f'n definitions for regressors #
####################################################
def mae(self, X, Y):
"""Computes the mean absolute error
Computes
(1/M) \sum_i | f(x^{(i)}) - y^{(i)} |
of a regression model f(.) on test data X and Y.
Args:
X (arr): M x N array that contains M data points with N features
Y (arr): M x 1 array of target values for each data point
Returns:
float: mean absolute error
"""
Yhat = self.predict(X)
return np.mean(np.absolute(Y - Yhat.reshape(Y.shape)), axis=0)
def mse(self, X, Y):
"""Computes the mean squared error
Computes
(1/M) \sum_i ( f(x^{(i)}) - y^{(i)} )^2
of a regression model f(.) on test data X and Y.
Args:
X (arr): M x N array that contains M data points with N features
Y (arr): M x 1 array of target values for each data point
Returns:
float: mean squared error
"""
Yhat = self.predict(X)
return np.mean( (Y - Yhat.reshape(Y.shape))**2 , axis=0)
def rmse(self, X, Y):
"""Computes the root mean squared error
Computes
sqrt( f.mse(X,Y) )
of a regression model f(.) on test data X and Y.
Args:
X (arr): M x N array that contains M data points with N features
Y (arr): M x 1 array of target values for each data point
Returns:
float: root mean squared error
"""
return np.sqrt(self.mse(X, Y))
################################################################################
################################################################################
################################################################################
|
|
#!python3
#-*- coding: utf-8 -*-
import os
import sys
import json
import webbrowser
import operator
from math import ceil
from geopy.distance import vincenty
def dump_poke_groups(poke_groups, pokeGroups):
data = []
for groupNum in range(pokeGroups):
groupId = '@'+str(groupNum)
groupNode = poke_groups[groupId]
data.append(groupNode[1])
with open('data/locs.json', 'w') as locs:
json.dump(data, locs, indent=2)
def add_nest(poke_groups, pokeGroups):
print('\n- New Nest -')
name = input('Name: ')
coords = None
while (coords == None):
coords = input('Lat,Lng (or Google Maps link): ')
if ('@' in coords):
coords = coords.split('@')[1]
coords = coords.split(',')
try:
lat, lng = float(coords[0]), float(coords[1])
except:
coords = None
rad = None
while (rad == None):
try:
rad = int(input('Radius: '))
except:
pass
aux = input('Common list (Pokemon1, Pokemon2, ...): ')
if (aux != ''):
common = aux.split(',')
for i in range(len(common)):
common[i] = common[i].strip()
else:
common = []
node = [[],{}]
groupId = '@'+str(pokeGroups)
poke_groups[groupId] = node
pokeGroups += 1
update_nest(poke_groups, pokeGroups, groupId, lat, lng, name, rad, common)
return poke_groups, pokeGroups
def update_nest(poke_groups, pokeGroups, groupId, lat=None, lng=None, name=None, rad=None, common=None):
groupNode = poke_groups[groupId]
groupInfo = groupNode[1]
if (lat != None):
groupInfo['lat'] = lat
if (lng != None):
groupInfo['lng'] = lng
if (name != None):
groupInfo['name'] = name
if (rad != None):
groupInfo['rad'] = rad
if (common != None):
groupInfo['common'] = common
dump_poke_groups(poke_groups, pokeGroups)
return poke_groups
def add_spawn(spawnInfo, poke_groups, pokeGroups):
joined = spawnInfo['joined']
closerId = None
closerDist = None
while (spawnInfo['joined'] == 0):
for groupNum in range(pokeGroups):
groupId = '@'+str(groupNum)
groupNode = poke_groups[groupId]
groupInfo = groupNode[1]
point1 = (spawnInfo['lat'],spawnInfo['lng'])
point2 = (groupInfo['lat'],groupInfo['lng'])
dist = vincenty(point1, point2)
if (dist.meters <= groupInfo['rad']):
spawnInfo['joined'] += 1
groupNode[0].append((spawnInfo,dist.meters))
else:
if (closerId == None) or (dist.meters < closerDist):
closerId = groupId
closerDist = dist.meters
if spawnInfo['joined'] == 0:
closerInfo = poke_groups[closerId][1]
pos = str(spawnInfo['lat']) + ',' + str(spawnInfo['lng'])
print ('\nSpawn at [' + pos + '] outside of any nest range.')
url = 'https://www.google.com/maps/?q=' + pos
webbrowser.open(url)
print('Closer nest identified: ' + closerInfo['name'] + ' (Radius: ' + str(closerInfo['rad']) + 'm)')
print('Increase range of \'' + closerInfo['name'] + '\' to ' + str(ceil(closerDist)) + 'm?')
choice = ''
while (choice == ''):
choice = input('y or n: ')
choice = choice.lower()
if (choice == 'y'):
poke_groups = update_nest(poke_groups, pokeGroups, closerId, rad=ceil(closerDist))
elif (choice == 'n'):
poke_groups, pokeGroups = add_nest(poke_groups, pokeGroups)
else:
choice = ''
return poke_groups, pokeGroups
def parse_groups(nest_locs, poke_spawns):
poke_groups = {}
pokeGroups = 0
for loc in nest_locs:
node = [[],loc]
poke_groups['@'+str(pokeGroups)] = node
pokeGroups += 1
for spawnInfo in poke_spawns:
spawnInfo.pop('time', None)
spawnInfo['joined'] = 0
poke_groups, pokeGroups = add_spawn(spawnInfo, poke_groups, pokeGroups)
return poke_groups, pokeGroups
def eval_nests(poke_groups, pokeGroups):
for groupNum in range(pokeGroups):
poke_count = {}
dup_check = {}
groupId = '@'+str(groupNum)
groupNode = poke_groups[groupId]
if (groupNode[1]['rad'] == 0):
continue
all_dup = True
for spawnNode in groupNode[0]:
spawnInfo = spawnNode[0]
pokeId = spawnInfo['pokemonId']
if pokeId in poke_count:
poke_count[pokeId] += 1
else:
poke_count[pokeId] = 1
if (spawnInfo['joined'] > 1):
if pokeId in dup_check:
dup_check[pokeId] += 1
else:
dup_check[pokeId] = 1
else:
all_dup = False
for dup_key in dup_check:
if (not all_dup and poke_count[dup_key] <= dup_check[dup_key]):
poke_count[dup_key] *= -1
yield groupNode, sorted(poke_count.items(), key=operator.itemgetter(1), reverse=True)
def print_nest(groupNode, nestInfo, poke_list, global_common):
nest_common = set(global_common + groupNode[1]['common'])
os.system('cls' if os.name == 'nt' else 'clear')
pos_len = len(nestInfo)
neg_len = 0
i = pos_len - 1
while (i >= 0 and nestInfo[i][1] < 0):
pos_len -= 1
neg_len += 1
i -= 1
print ('- ' + groupNode[1]['name'] + ' -')
total_len = len(groupNode[0]) - neg_len
i = 0
while (i < pos_len and (nestInfo[i][1] / total_len) > 0.1):
id = nestInfo[i][0] - 1
name = poke_list[id]
if name not in nest_common:
print('Possible nest of:', name)
i = pos_len
else:
i += 1
print('\nUncommon spawning rate:')
for i in range(0, pos_len):
id = nestInfo[i][0] - 1
name = poke_list[id]
if name not in global_common:
spawnCount = nestInfo[i][1]
print('%-12s' % (name), '\t-\t' + '%2.f' % ((spawnCount / total_len)*100) + '% (' + str(spawnCount) + ' out of ' + str(total_len) + ')')
print('\nCommon spawning rate:')
for i in range(0, pos_len):
id = nestInfo[i][0] - 1
name = poke_list[id]
if name in global_common:
spawnCount = nestInfo[i][1]
print('%-12s' % (name), '\t-\t' + '%2.f' % ((spawnCount / total_len)*100) + '% (' + str(spawnCount) + ' out of ' + str(total_len) + ')')
if (neg_len > 0):
print('\nLikely misplaced spawns rate:')
for i in range(-1, (-1*neg_len)-1, -1):
id = nestInfo[i][0] - 1
name = poke_list[id]
spawnCount = nestInfo[i][1] * -1
total_len = len(groupNode[0])
print('%-12s' % (name), '\t-\t' + '%2.f' % ((spawnCount / total_len)*100) + '% (' + str(spawnCount) + ' out of ' + str(total_len) + ')')
input('\nPress any key to continue . . . ')
def load_data():
i = 0
while (i < 4):
try:
file = 'data/locs.json'
with open(file, 'r') as locs:
nest_locs = json.load(locs)
i += 1
file = 'data/pokealert_spawn_points.json'
with open(file, 'r') as spawns:
poke_spawns = json.load(spawns)
i += 1
file = 'data/common_pokemon.json'
with open(file, 'r') as commons:
global_common = json.load(commons)
i += 1
file = 'data/pokemon_list.json'
with open(file, 'r') as pokemons:
poke_list = json.load(pokemons)
i += 1
except FileNotFoundError:
print('\nFile', '\"' + file + '\"', 'not found.')
exampleFile = file.rstrip('json')[:-1] + '-example' + '.json'
try:
with open(exampleFile, 'r') as example:
data = json.load(example)
print('Creating it with the contents of', '\"' + exampleFile + '\"' + '.')
with open(file, 'w+') as out:
json.dump(data, out, indent=2)
except:
raise
i = 0
except:
if (i == 0):
print('Incorrect data on \'locs.json\' file.',)
elif (i == 1):
print('Incorrect data on \'pokealert_spawn_points.json\' file.',)
elif (i == 2):
print('Incorrect data on \'common_pokemon.json\' file.',)
else:
print('Incorrect data on \'pokemon_list.json\' file.')
raise
return nest_locs, poke_spawns, poke_list, global_common
if __name__ == '__main__':
os.system('cls' if os.name == 'nt' else 'clear')
try:
nest_locs, poke_spawns, poke_list, global_common = load_data()
except:
print('bug')
sys.exit()
if (len(nest_locs) == 0):
print('No nest location found on \'locs.json\'')
sys.exit()
if (len(poke_spawns) == 0):
print('No spawn point found on \'pokealert_spawn_points.json\'')
sys.exit()
poke_groups, pokeGroups = parse_groups(nest_locs, poke_spawns)
input('\nNests parsed! Press any key to start . . . ')
for groupNode, nestInfo in eval_nests(poke_groups, pokeGroups):
print_nest(groupNode, nestInfo, poke_list, global_common)
os.system('cls' if os.name == 'nt' else 'clear')
input('Press any key to continue . . . ')
|
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import json
import os
import tensorflow as tf
import tensorflow_transform as tft
import tensorflow_model_analysis as tfma
from tensorflow.python.lib.io import file_io
from tensorflow_transform.beam.tft_beam_io import transform_fn_io
from tensorflow_transform.saved import input_fn_maker
from tensorflow_transform.saved import saved_transform_io
from tensorflow_transform.tf_metadata import dataset_metadata
from tensorflow_transform.tf_metadata import dataset_schema
from tensorflow_transform.tf_metadata import metadata_io
IMAGE_EMBEDDING_SIZE = 2048
CLASSIFICATION_TARGET_TYPES = [tf.bool, tf.int32, tf.int64]
REGRESSION_TARGET_TYPES = [tf.float32, tf.float64]
TARGET_TYPES = CLASSIFICATION_TARGET_TYPES + REGRESSION_TARGET_TYPES
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--job-dir',
type=str,
required=True,
help='GCS or local directory.')
parser.add_argument('--transformed-data-dir',
type=str,
required=True,
help='GCS path containing tf-transformed training and eval data.')
parser.add_argument('--schema',
type=str,
required=True,
help='GCS json schema file path.')
parser.add_argument('--target',
type=str,
required=True,
help='The name of the column to predict in training data.')
parser.add_argument('--learning-rate',
type=float,
default=0.1,
help='Learning rate for training.')
parser.add_argument('--optimizer',
choices=['Adam', 'SGD', 'Adagrad'],
default='Adagrad',
help='Optimizer for training. If not provided, '
'tf.estimator default will be used.')
parser.add_argument('--hidden-layer-size',
type=str,
default='100',
help='comma separated hidden layer sizes. For example "200,100,50".')
parser.add_argument('--steps',
type=int,
help='Maximum number of training steps to perform. If unspecified, will '
'honor epochs.')
parser.add_argument('--epochs',
type=int,
help='Maximum number of training data epochs on which to train. If '
'both "steps" and "epochs" are specified, the training '
'job will run for "steps" or "epochs", whichever occurs first.')
parser.add_argument('--preprocessing-module',
type=str,
required=False,
help=('GCS path to a python file defining '
'"preprocess" and "get_feature_columns" functions.'))
args = parser.parse_args()
args.hidden_layer_size = [int(x.strip()) for x in args.hidden_layer_size.split(',')]
return args
def is_classification(transformed_data_dir, target):
"""Whether the scenario is classification (vs regression).
Returns:
The number of classes if the target represents a classification
problem, or None if it does not.
"""
transformed_metadata = metadata_io.read_metadata(
os.path.join(transformed_data_dir, transform_fn_io.TRANSFORMED_METADATA_DIR))
transformed_feature_spec = transformed_metadata.schema.as_feature_spec()
if target not in transformed_feature_spec:
raise ValueError('Cannot find target "%s" in transformed data.' % target)
feature = transformed_feature_spec[target]
if (not isinstance(feature, tf.FixedLenFeature) or feature.shape != [] or
feature.dtype not in TARGET_TYPES):
raise ValueError('target "%s" is of invalid type.' % target)
if feature.dtype in CLASSIFICATION_TARGET_TYPES:
if feature.dtype == tf.bool:
return 2
return get_vocab_size(transformed_data_dir, target)
return None
def make_tft_input_metadata(schema):
"""Create tf-transform metadata from given schema."""
tft_schema = {}
for col_schema in schema:
col_type = col_schema['type']
col_name = col_schema['name']
if col_type == 'NUMBER':
tft_schema[col_name] = dataset_schema.ColumnSchema(
tf.float32, [], dataset_schema.FixedColumnRepresentation(default_value=0.0))
elif col_type in ['CATEGORY', 'TEXT', 'IMAGE_URL', 'KEY']:
tft_schema[col_name] = dataset_schema.ColumnSchema(
tf.string, [], dataset_schema.FixedColumnRepresentation(default_value=''))
return dataset_metadata.DatasetMetadata(dataset_schema.Schema(tft_schema))
def make_training_input_fn(transformed_data_dir, mode, batch_size, target_name, num_epochs=None):
"""Creates an input function reading from transformed data.
Args:
transformed_data_dir: Directory to read transformed data and metadata from.
mode: 'train' or 'eval'.
batch_size: Batch size.
target_name: name of the target column.
num_epochs: number of training data epochs.
Returns:
The input function for training or eval.
"""
transformed_metadata = metadata_io.read_metadata(
os.path.join(transformed_data_dir, transform_fn_io.TRANSFORMED_METADATA_DIR))
transformed_feature_spec = transformed_metadata.schema.as_feature_spec()
def _input_fn():
"""Input function for training and eval."""
epochs = 1 if mode == 'eval' else num_epochs
transformed_features = tf.contrib.learn.io.read_batch_features(
os.path.join(transformed_data_dir, mode + '-*'),
batch_size, transformed_feature_spec, tf.TFRecordReader, num_epochs=epochs)
# Extract features and label from the transformed tensors.
transformed_labels = transformed_features.pop(target_name)
return transformed_features, transformed_labels
return _input_fn
def make_serving_input_fn(transformed_data_dir, schema, target_name):
"""Creates an input function reading from transformed data.
Args:
transformed_data_dir: Directory to read transformed data and metadata from.
schema: the raw data schema.
target_name: name of the target column.
Returns:
The input function for serving.
"""
raw_metadata = make_tft_input_metadata(schema)
raw_feature_spec = raw_metadata.schema.as_feature_spec()
raw_keys = [x['name'] for x in schema]
raw_keys.remove(target_name)
serving_input_fn = input_fn_maker.build_csv_transforming_serving_input_receiver_fn(
raw_metadata=raw_metadata,
transform_savedmodel_dir=transformed_data_dir + '/transform_fn',
raw_keys=raw_keys)
return serving_input_fn
def get_vocab_size(transformed_data_dir, feature_name):
"""Get vocab size of a given text or category column."""
vocab_file = os.path.join(transformed_data_dir,
transform_fn_io.TRANSFORM_FN_DIR,
'assets',
'vocab_' + feature_name)
with file_io.FileIO(vocab_file, 'r') as f:
return sum(1 for _ in f)
def build_feature_columns(schema, transformed_data_dir, target):
"""Build feature columns that tf.estimator expects."""
feature_columns = []
for entry in schema:
name = entry['name']
datatype = entry['type']
if name == target:
continue
if datatype == 'NUMBER':
feature_columns.append(tf.feature_column.numeric_column(name, shape=()))
elif datatype == 'IMAGE_URL':
feature_columns.append(tf.feature_column.numeric_column(name, shape=(2048)))
elif datatype == 'CATEGORY':
vocab_size = get_vocab_size(transformed_data_dir, name)
category_column = tf.feature_column.categorical_column_with_identity(name, num_buckets=vocab_size)
indicator_column = tf.feature_column.indicator_column(category_column)
feature_columns.append(indicator_column)
elif datatype == 'TEXT':
vocab_size = get_vocab_size(transformed_data_dir, name)
indices_column = tf.feature_column.categorical_column_with_identity(name + '_indices', num_buckets=vocab_size + 1)
weighted_column = tf.feature_column.weighted_categorical_column(indices_column, name + '_weights')
indicator_column = tf.feature_column.indicator_column(weighted_column)
feature_columns.append(indicator_column)
return feature_columns
def get_estimator(schema, transformed_data_dir, target_name, output_dir, hidden_units,
optimizer, learning_rate, feature_columns):
"""Get proper tf.estimator (DNNClassifier or DNNRegressor)."""
optimizer = tf.train.AdagradOptimizer(learning_rate)
if optimizer == 'Adam':
optimizer = tf.train.AdamOptimizer(learning_rate)
elif optimizer == 'SGD':
optimizer = tf.train.GradientDescentOptimizer(learning_rate)
# Set how often to run checkpointing in terms of steps.
config = tf.contrib.learn.RunConfig(save_checkpoints_steps=1000)
n_classes = is_classification(transformed_data_dir, target_name)
if n_classes:
estimator = tf.estimator.DNNClassifier(
feature_columns=feature_columns,
hidden_units=hidden_units,
n_classes=n_classes,
config=config,
model_dir=output_dir)
else:
estimator = tf.estimator.DNNRegressor(
feature_columns=feature_columns,
hidden_units=hidden_units,
config=config,
model_dir=output_dir,
optimizer=optimizer)
return estimator
def eval_input_receiver_fn(tf_transform_dir, schema, target):
"""Build everything needed for the tf-model-analysis to run the model.
Args:
tf_transform_dir: directory in which the tf-transform model was written
during the preprocessing step.
schema: the raw data schema.
target: name of the target column.
Returns:
EvalInputReceiver function, which contains:
- Tensorflow graph which parses raw untranformed features, applies the
tf-transform preprocessing operators.
- Set of raw, untransformed features.
- Label against which predictions will be compared.
"""
raw_metadata = make_tft_input_metadata(schema)
raw_feature_spec = raw_metadata.schema.as_feature_spec()
serialized_tf_example = tf.placeholder(
dtype=tf.string, shape=[None], name='input_example_tensor')
features = tf.parse_example(serialized_tf_example, raw_feature_spec)
_, transformed_features = (
saved_transform_io.partially_apply_saved_transform(
os.path.join(tf_transform_dir, transform_fn_io.TRANSFORM_FN_DIR),
features))
receiver_tensors = {'examples': serialized_tf_example}
return tfma.export.EvalInputReceiver(
features=transformed_features,
receiver_tensors=receiver_tensors,
labels=transformed_features[target])
def main():
# configure the TF_CONFIG such that the tensorflow recoginzes the MASTER in the yaml file as the chief.
# TODO: kubeflow is working on fixing the problem and this TF_CONFIG can be
# removed then.
args = parse_arguments()
tf.logging.set_verbosity(tf.logging.INFO)
schema = json.loads(file_io.read_file_to_string(args.schema))
feature_columns = None
if args.preprocessing_module:
module_dir = os.path.abspath(os.path.dirname(__file__))
preprocessing_module_path = os.path.join(module_dir, 'preprocessing.py')
with open(preprocessing_module_path, 'w+') as preprocessing_file:
preprocessing_file.write(
file_io.read_file_to_string(args.preprocessing_module))
import preprocessing
feature_columns = preprocessing.get_feature_columns(args.transformed_data_dir)
else:
feature_columns = build_feature_columns(schema, args.transformed_data_dir, args.target)
estimator = get_estimator(schema, args.transformed_data_dir, args.target, args.job_dir,
args.hidden_layer_size, args.optimizer, args.learning_rate,
feature_columns)
# TODO: Expose batch size.
train_input_fn = make_training_input_fn(
args.transformed_data_dir,
'train',
32,
args.target,
num_epochs=args.epochs)
eval_input_fn = make_training_input_fn(
args.transformed_data_dir,
'eval',
32,
args.target)
serving_input_fn = make_serving_input_fn(
args.transformed_data_dir,
schema,
args.target)
exporter = tf.estimator.FinalExporter('export', serving_input_fn)
train_spec = tf.estimator.TrainSpec(input_fn=train_input_fn, max_steps=args.steps)
eval_spec = tf.estimator.EvalSpec(input_fn=eval_input_fn, exporters=[exporter])
tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec)
eval_model_dir = os.path.join(args.job_dir, 'tfma_eval_model_dir')
tfma.export.export_eval_savedmodel(
estimator=estimator,
export_dir_base=eval_model_dir,
eval_input_receiver_fn=(
lambda: eval_input_receiver_fn(
args.transformed_data_dir, schema, args.target)))
metadata = {
'outputs' : [{
'type': 'tensorboard',
'source': args.job_dir,
}]
}
with open('/mlpipeline-ui-metadata.json', 'w') as f:
json.dump(metadata, f)
with open('/output.txt', 'w') as f:
f.write(args.job_dir)
if __name__ == '__main__':
main()
|
|
""" Principal Component Analysis
"""
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Olivier Grisel <olivier.grisel@ensta.org>
# Mathieu Blondel <mathieu@mblondel.org>
# Denis A. Engemann <d.engemann@fz-juelich.de>
#
# License: BSD 3 clause
from math import log, sqrt
import warnings
import numpy as np
from scipy import linalg
from scipy.special import gammaln
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.utils import check_random_state, as_float_array, check_array
#from sklearn.utils import atleast2d_or_csr
from sklearn.utils import deprecated
from sklearn.utils.extmath import (fast_logdet, safe_sparse_dot, randomized_svd)#,
#fast_dot)
def _assess_dimension_(spectrum, rank, n_samples, n_features):
"""Compute the likelihood of a rank ``rank`` dataset
The dataset is assumed to be embedded in gaussian noise of shape(n,
dimf) having spectrum ``spectrum``.
Parameters
----------
spectrum: array of shape (n)
data spectrum
rank: int,
tested rank value
n_samples: int,
number of samples
dim: int,
embedding/empirical dimension
Returns
-------
ll: float,
The log-likelihood
Notes
-----
This implements the method of `Thomas P. Minka:
Automatic Choice of Dimensionality for PCA. NIPS 2000: 598-604`
"""
if rank > len(spectrum):
raise ValueError("The tested rank cannot exceed the rank of the"
" dataset")
pu = -rank * log(2.)
for i in range(rank):
pu += (gammaln((n_features - i) / 2.)
- log(np.pi) * (n_features - i) / 2.)
pl = np.sum(np.log(spectrum[:rank]))
pl = -pl * n_samples / 2.
if rank == n_features:
pv = 0
v = 1
else:
v = np.sum(spectrum[rank:]) / (n_features - rank)
pv = -np.log(v) * n_samples * (n_features - rank) / 2.
m = n_features * rank - rank * (rank + 1.) / 2.
pp = log(2. * np.pi) * (m + rank + 1.) / 2.
pa = 0.
spectrum_ = spectrum.copy()
spectrum_[rank:n_features] = v
for i in range(rank):
for j in range(i + 1, len(spectrum)):
pa += log((spectrum[i] - spectrum[j]) *
(1. / spectrum_[j] - 1. / spectrum_[i])) + log(n_samples)
ll = pu + pl + pv + pp - pa / 2. - rank * log(n_samples) / 2.
return ll
def _infer_dimension_(spectrum, n_samples, n_features):
"""Infers the dimension of a dataset of shape (n_samples, n_features)
The dataset is described by its spectrum `spectrum`.
"""
n_spectrum = len(spectrum)
ll = np.empty(n_spectrum)
for rank in range(n_spectrum):
ll[rank] = _assess_dimension_(spectrum, rank, n_samples, n_features)
return ll.argmax()
class PCA(BaseEstimator, TransformerMixin):
"""Principal component analysis (PCA)
Linear dimensionality reduction using Singular Value Decomposition of the
data and keeping only the most significant singular vectors to project the
data to a lower dimensional space.
This implementation uses the scipy.linalg implementation of the singular
value decomposition. It only works for dense arrays and is not scalable to
large dimensional data.
The time complexity of this implementation is ``O(n ** 3)`` assuming
n ~ n_samples ~ n_features.
Parameters
----------
n_components : int, None or string
Number of components to keep.
if n_components is not set all components are kept::
n_components == min(n_samples, n_features)
if n_components == 'mle', Minka\'s MLE is used to guess the dimension
if ``0 < n_components < 1``, select the number of components such that
the amount of variance that needs to be explained is greater than the
percentage specified by n_components
copy : bool
If False, data passed to fit are overwritten and running
fit(X).transform(X) will not yield the expected results,
use fit_transform(X) instead.
whiten : bool, optional
When True (False by default) the `components_` vectors are divided
by n_samples times singular values to ensure uncorrelated outputs
with unit component-wise variances.
Whitening will remove some information from the transformed signal
(the relative variance scales of the components) but can sometime
improve the predictive accuracy of the downstream estimators by
making there data respect some hard-wired assumptions.
Attributes
----------
`components_` : array, [n_components, n_features]
Components with maximum variance.
`explained_variance_ratio_` : array, [n_components]
Percentage of variance explained by each of the selected components. \
k is not set then all components are stored and the sum of explained \
variances is equal to 1.0
`n_components_` : int
The estimated number of components. Relevant when n_components is set
to 'mle' or a number between 0 and 1 to select using explained
variance.
`noise_variance_` : float
The estimated noise covariance following the Probabilistic PCA model
from Tipping and Bishop 1999. See "Pattern Recognition and
Machine Learning" by C. Bishop, 12.2.1 p. 574 or
http://www.miketipping.com/papers/met-mppca.pdf. It is required to
computed the estimated data covariance and score samples.
Notes
-----
For n_components='mle', this class uses the method of `Thomas P. Minka:
Automatic Choice of Dimensionality for PCA. NIPS 2000: 598-604`
Implements the probabilistic PCA model from:
M. Tipping and C. Bishop, Probabilistic Principal Component Analysis,
Journal of the Royal Statistical Society, Series B, 61, Part 3, pp. 611-622
via the score and score_samples methods.
See http://www.miketipping.com/papers/met-mppca.pdf
Due to implementation subtleties of the Singular Value Decomposition (SVD),
which is used in this implementation, running fit twice on the same matrix
can lead to principal components with signs flipped (change in direction).
For this reason, it is important to always use the same estimator object to
transform data in a consistent fashion.
Examples
--------
>>> import numpy as np
>>> from sklearn.decomposition import PCA
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> pca = PCA(n_components=2)
>>> pca.fit(X)
PCA(copy=True, n_components=2, whiten=False)
>>> print(pca.explained_variance_ratio_) # doctest: +ELLIPSIS
[ 0.99244... 0.00755...]
See also
--------
ProbabilisticPCA
RandomizedPCA
KernelPCA
SparsePCA
TruncatedSVD
"""
def __init__(self, n_components=None, copy=True, whiten=False):
self.n_components = n_components
self.copy = copy
self.whiten = whiten
def fit(self, X, y=None):
"""Fit the model with X.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training data, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
self : object
Returns the instance itself.
"""
self._fit(X)
return self
def fit_transform(self, X, y=None):
"""Fit the model with X and apply the dimensionality reduction on X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
U, S, V = self._fit(X)
U = U[:, :self.n_components_]
if self.whiten:
# X_new = X * V / S * sqrt(n_samples) = U * sqrt(n_samples)
U *= sqrt(X.shape[0])
else:
# X_new = X * V = U * S * V^T * V = U * S
U *= S[:self.n_components_]
return U
def _fit(self, X, U=None, S=None, V=None, mean=None):
""" Fit the model on X
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training vector, where n_samples in the number of samples and
n_features is the number of features.
Returns
-------
U, s, V : ndarrays
The SVD of the input data, copied and centered when
requested.
"""
X = check_array(X)
n_samples, n_features = X.shape
if S is None and V is None:
X = as_float_array(X, copy=self.copy)
# Center data
self.mean_ = np.mean(X, axis=0)
X -= self.mean_
U, S, V = linalg.svd(X, full_matrices=False)
else:
assert S is not None and V is not None and U is not None
self.mean_ = mean
explained_variance_ = (S ** 2) / n_samples
explained_variance_ratio_ = (explained_variance_ /
explained_variance_.sum())
if self.whiten:
components_ = V / (S[:, np.newaxis] / sqrt(n_samples))
else:
components_ = V
n_components = self.n_components
if n_components is None:
n_components = n_features
elif n_components == 'mle':
if n_samples < n_features:
raise ValueError("n_components='mle' is only supported "
"if n_samples >= n_features")
n_components = _infer_dimension_(explained_variance_,
n_samples, n_features)
if 0 < n_components < 1.0:
# number of components for which the cumulated explained variance
# percentage is superior to the desired threshold
ratio_cumsum = explained_variance_ratio_.cumsum()
n_components = np.sum(ratio_cumsum < n_components) + 1
# Compute noise covariance using Probabilistic PCA model
# The sigma2 maximum likelihood (cf. eq. 12.46)
if n_components < n_features:
self.noise_variance_ = explained_variance_[n_components:].mean()
else:
self.noise_variance_ = 0.
# store n_samples to revert whitening when getting covariance
self.n_samples_ = n_samples
self.components_ = components_[:n_components]
self.explained_variance_ = explained_variance_[:n_components]
explained_variance_ratio_ = explained_variance_ratio_[:n_components]
self.explained_variance_ratio_ = explained_variance_ratio_
self.n_components_ = n_components
return U, S, V, self.mean_
def get_covariance(self):
"""Compute data covariance with the generative model.
``cov = components_.T * S**2 * components_ + sigma2 * eye(n_features)``
where S**2 contains the explained variances.
Returns
-------
cov : array, shape=(n_features, n_features)
Estimated covariance of data.
"""
components_ = self.components_
exp_var = self.explained_variance_
if self.whiten:
components_ = components_ * np.sqrt(exp_var[:, np.newaxis])
exp_var_diff = np.maximum(exp_var - self.noise_variance_, 0.)
cov = np.dot(components_.T * exp_var_diff, components_)
cov.flat[::len(cov) + 1] += self.noise_variance_ # modify diag inplace
return cov
def dot_precision(self,X,logdet=False):
"""Compute the dot product of a matrix X by the data
precision matrix with the generative model.
Returns
-------
Y : array, shape=(n_samples, n_features)
=X*precision
"""
n_features = self.components_.shape[1]
# handle corner cases first
if self.n_components_ == 0:
if logdet:
#import pdb;pdb.set_trace()
if np.isscalar(self.noise_variance_):
logdet_cov = np.log(self.noise_variance_)*X.shape[1]
else:
assert self.noise_variance_.shape[0] == X.shape[1], "self.noise_variance_.shape[0] == X.shape[1]"
logdet_cov = np.log(self.noise_variance_).sum()
return X / self.noise_variance_, logdet_cov
else:
return X / self.noise_variance_
if self.n_components_ == n_features:
covariance = self.get_covariance()
if logdet:
return X.dot(linalg.inv(covariance)), fast_logdet(covariance)
else:
return X.dot(linalg.inv(covariance))
# Get precision using matrix inversion lemma
components_ = self.components_
exp_var = self.explained_variance_
if self.whiten:
components_ = components_ * np.sqrt(exp_var[:, np.newaxis])
exp_var_diff = np.maximum(exp_var - self.noise_variance_, 0.)
Xprecision = (1.0 / exp_var_diff) + (1.0 / self.noise_variance_)
if logdet:
#import pdb;pdb.set_trace()
if np.isscalar(self.noise_variance_):
logdet_cov = np.log(self.noise_variance_)*X.shape[1]
else:
assert self.noise_variance_.shape[0] == X.shape[1], "self.noise_variance_.shape[0] == X.shape[1]"
logdet_cov = np.log(self.noise_variance_).sum()
logdet_cov += np.log(((1.0 / exp_var_diff) + (1.0 / self.noise_variance_))).sum()
logdet_cov += np.log(exp_var_diff).sum()
Xprecision *= (self.noise_variance_ * self.noise_variance_)
Xprecision = (X.dot(components_.T/(-Xprecision))).dot(components_)
Xprecision += X/self.noise_variance_
#cprecision=((1.0 / exp_var_diff) + (1.0 / self.noise_variance_))*(-self.noise_variance_ * self.noise_variance_)
#cprecision = (components_.T/cprecision).dot(components_)
#cprecision.flat[::len(cprecision) + 1] += 1. / self.noise_variance_
#Xcprecision = X.dot(cprecision)
if logdet:
return Xprecision, logdet_cov
else:
return Xprecision
def get_precision(self):
"""Compute data precision matrix with the generative model.
Equals the inverse of the covariance but computed with
the matrix inversion lemma for efficiency.
Returns
-------
precision : array, shape=(n_features, n_features)
Estimated precision of data.
"""
n_features = self.components_.shape[1]
# handle corner cases first
if self.n_components_ == 0:
return np.eye(n_features) / self.noise_variance_
if self.n_components_ == n_features:
return linalg.inv(self.get_covariance())
# Get precision using matrix inversion lemma
components_ = self.components_
exp_var = self.explained_variance_
if self.whiten:
components_ = components_ * np.sqrt(exp_var[:, np.newaxis])
exp_var_diff = np.maximum(exp_var - self.noise_variance_, 0.)
cprecision=((1.0 / exp_var_diff) + (1.0 / self.noise_variance_))*(-self.noise_variance_ * self.noise_variance_)
cprecision = (components_.T/cprecision).dot(components_)
cprecision.flat[::len(cprecision) + 1] += 1. / self.noise_variance_
return cprecision
#old:
#precision = np.dot(components_, components_.T) / self.noise_variance_
#precision.flat[::len(precision) + 1] += 1. / exp_var_diff
#precision = np.dot(components_.T,
# np.dot(linalg.inv(precision), components_))
#precision /= -(self.noise_variance_ ** 2)
#precision.flat[::len(precision) + 1] += 1. / self.noise_variance_
#return precision
def transform(self, X):
"""Apply the dimensionality reduction on X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
X = check_array(X)
if self.mean_ is not None:
X = X - self.mean_
#X_transformed = fast_dot(X, self.components_.T)
X_transformed = np.dot(X, self.components_.T)
return X_transformed
def inverse_transform(self, X):
"""Transform data back to its original space, i.e.,
return an input X_original whose transform would be X
Parameters
----------
X : array-like, shape (n_samples, n_components)
New data, where n_samples is the number of samples
and n_components is the number of components.
Returns
-------
X_original array-like, shape (n_samples, n_features)
Notes
-----
If whitening is enabled, inverse_transform does not compute the
exact inverse operation as transform.
"""
#Xtrans = fast_dot(X, self.components_)
Xtrans = np.dot(X, self.components_)
return Xtrans + self.mean_
def score_samples(self, X):
"""Return the log-likelihood of each sample
See. "Pattern Recognition and Machine Learning"
by C. Bishop, 12.2.1 p. 574
or http://www.miketipping.com/papers/met-mppca.pdf
Parameters
----------
X: array, shape(n_samples, n_features)
The data.
Returns
-------
ll: array, shape (n_samples,)
Log-likelihood of each sample under the current model
"""
Xr = X - self.mean_
n_features = X.shape[1]
log_like = np.zeros(X.shape[0])
#precision = self.get_precision()
#print "get rid of me"
#log_like = -.5 * (Xr * (np.dot(Xr, precision))).sum(axis=1)
XrP,ldet = self.dot_precision(X=Xr,logdet=True)
log_like = -.5 * (Xr * XrP).sum(axis=1)
log_like -= .5 * (n_features * log(2. * np.pi)
+ldet)
#- fast_logdet(precision))
return log_like
def score(self, X, y=None):
"""Return the average log-likelihood of all samples
See. "Pattern Recognition and Machine Learning"
by C. Bishop, 12.2.1 p. 574
or http://www.miketipping.com/papers/met-mppca.pdf
Parameters
----------
X: array, shape(n_samples, n_features)
The data.
Returns
-------
ll: float
Average log-likelihood of the samples under the current model
"""
return np.mean(self.score_samples(X))
@deprecated("ProbabilisticPCA will be removed in 0.16. WARNING: the covariance"
" estimation was previously incorrect, your output might be different "
" than under the previous versions. Use PCA that implements score"
" and score_samples. To work with homoscedastic=False, you should use"
" FactorAnalysis.")
class ProbabilisticPCA(PCA):
"""Additional layer on top of PCA that adds a probabilistic evaluation"""
__doc__ += PCA.__doc__
def fit(self, X, y=None, homoscedastic=True, compute_cov=False):
"""Additionally to PCA.fit, learns a covariance model
Parameters
----------
X : array of shape(n_samples, n_features)
The data to fit
homoscedastic : bool, optional,
If True, average variance across remaining dimensions
"""
self.covariance_ = None
PCA.fit(self, X)
n_samples, n_features = X.shape
n_components = self.n_components
if n_components is None:
n_components = n_features
explained_variance = self.explained_variance_.copy()
if homoscedastic:
explained_variance -= self.noise_variance_
if n_features == n_components:
delta = 0.
elif homoscedastic:
delta = self.noise_variance_
else:
Xr = X - self.mean_
Xr -= np.dot(np.dot(Xr, self.components_.T), self.components_)
delta = (Xr ** 2).mean(axis=0) / (n_features - n_components)
if compute_cov or (~homoscedastic):
# Make the low rank part of the estimated covariance
self.covariance_ = np.dot(self.components_[:n_components].T *
explained_variance,
self.components_[:n_components])
# Add delta to the diagonal without extra allocation
self.covariance_.flat[::n_features + 1] += delta
return self
def score(self, X, y=None):
"""Return a score associated to new data
Parameters
----------
X: array of shape(n_samples, n_features)
The data to test
Returns
-------
ll: array of shape (n_samples),
log-likelihood of each row of X under the current model
"""
Xr = X - self.mean_
n_features = X.shape[1]
log_like = np.zeros(X.shape[0])
if (self.precision_ is None) and (self.covariance_ is None):
XrP,ldet = self.dot_precision(X=Xr, logdet=True )
else:
if self.precision_ is None:
self.precision_ = linalg.inv(self.covariance_)
XrP = np.dot(Xr, self.precision_)
ldet = fast_logdet(self.covariance_)
log_like = -.5 * (Xr * XrP).sum(axis=1)
log_like -= .5 * (ldet
+ n_features * log(2. * np.pi))
return log_like
class RandomizedPCA(BaseEstimator, TransformerMixin):
"""Principal component analysis (PCA) using randomized SVD
Linear dimensionality reduction using approximated Singular Value
Decomposition of the data and keeping only the most significant
singular vectors to project the data to a lower dimensional space.
Parameters
----------
n_components : int, optional
Maximum number of components to keep. When not given or None, this
is set to n_features (the second dimension of the training data).
copy : bool
If False, data passed to fit are overwritten and running
fit(X).transform(X) will not yield the expected results,
use fit_transform(X) instead.
iterated_power : int, optional
Number of iterations for the power method. 3 by default.
whiten : bool, optional
When True (False by default) the `components_` vectors are divided
by the singular values to ensure uncorrelated outputs with unit
component-wise variances.
Whitening will remove some information from the transformed signal
(the relative variance scales of the components) but can sometime
improve the predictive accuracy of the downstream estimators by
making their data respect some hard-wired assumptions.
random_state : int or RandomState instance or None (default)
Pseudo Random Number generator seed control. If None, use the
numpy.random singleton.
Attributes
----------
`components_` : array, [n_components, n_features]
Components with maximum variance.
`explained_variance_ratio_` : array, [n_components]
Percentage of variance explained by each of the selected components. \
k is not set then all components are stored and the sum of explained \
variances is equal to 1.0
Examples
--------
>>> import numpy as np
>>> from sklearn.decomposition import RandomizedPCA
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> pca = RandomizedPCA(n_components=2)
>>> pca.fit(X) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
RandomizedPCA(copy=True, iterated_power=3, n_components=2,
random_state=None, whiten=False)
>>> print(pca.explained_variance_ratio_) # doctest: +ELLIPSIS
[ 0.99244... 0.00755...]
See also
--------
PCA
ProbabilisticPCA
TruncatedSVD
References
----------
.. [Halko2009] `Finding structure with randomness: Stochastic algorithms
for constructing approximate matrix decompositions Halko, et al., 2009
(arXiv:909)`
.. [MRT] `A randomized algorithm for the decomposition of matrices
Per-Gunnar Martinsson, Vladimir Rokhlin and Mark Tygert`
Notes
-----
This class supports sparse matrix input for backward compatibility, but
actually computes a truncated SVD instead of a PCA in that case (i.e. no
centering is performed). This support is deprecated; use the class
TruncatedSVD for sparse matrix support.
"""
def __init__(self, n_components=None, copy=True, iterated_power=3,
whiten=False, random_state=None):
self.n_components = n_components
self.copy = copy
self.iterated_power = iterated_power
self.whiten = whiten
self.mean_ = None
self.random_state = random_state
def fit(self, X, y=None):
"""Fit the model with X.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training data, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
self : object
Returns the instance itself.
"""
self._fit(X)
return self
def _fit(self, X):
"""Fit the model to the data X.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training vector, where n_samples in the number of samples and
n_features is the number of features.
Returns
-------
X : ndarray, shape (n_samples, n_features)
The input data, copied, centered and whitened when requested.
"""
random_state = check_random_state(self.random_state)
if hasattr(X, 'todense'):
warnings.warn("Sparse matrix support is deprecated"
" and will be dropped in 0.16."
" Use TruncatedSVD instead.",
DeprecationWarning)
else:
# not a sparse matrix, ensure this is a 2D array
X = np.atleast_2d(as_float_array(X, copy=self.copy))
n_samples = X.shape[0]
if not hasattr(X, 'todense'):
# Center data
self.mean_ = np.mean(X, axis=0)
X -= self.mean_
if self.n_components is None:
n_components = X.shape[1]
else:
n_components = self.n_components
U, S, V = randomized_svd(X, n_components,
n_iter=self.iterated_power,
random_state=random_state)
self.explained_variance_ = exp_var = (S ** 2) / n_samples
self.explained_variance_ratio_ = exp_var / exp_var.sum()
if self.whiten:
self.components_ = V / S[:, np.newaxis] * sqrt(n_samples)
else:
self.components_ = V
return X
def transform(self, X, y=None):
"""Apply dimensionality reduction on X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
# XXX remove scipy.sparse support here in 0.16
X = check_array(X)
if self.mean_ is not None:
X = X - self.mean_
X = safe_sparse_dot(X, self.components_.T)
return X
def fit_transform(self, X, y=None):
"""Apply dimensionality reduction on X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
X = check_array(X)
X = self._fit(X)
# X = self._fit(atleast2d_or_csr(X))
X = safe_sparse_dot(X, self.components_.T)
return X
def inverse_transform(self, X, y=None):
"""Transform data back to its original space.
Returns an array X_original whose transform would be X.
Parameters
----------
X : array-like, shape (n_samples, n_components)
New data, where n_samples in the number of samples
and n_components is the number of components.
Returns
-------
X_original array-like, shape (n_samples, n_features)
Notes
-----
If whitening is enabled, inverse_transform does not compute the
exact inverse operation of transform.
"""
# XXX remove scipy.sparse support here in 0.16
X_original = safe_sparse_dot(X, self.components_)
if self.mean_ is not None:
X_original = X_original + self.mean_
return X_original
|
|
import numpy as np
import os
import cv2
import logging
import scipy
import scipy.interpolate
import ImageTools
import StackTools
import CompleteAlignParallel
import FastAlignParallel
import ComputeStdevImage
import FrameStack
logger = logging.getLogger(__name__)
class AoRecording:
"""Class for information about a single AO recording.
"""
#default values for xcorr frame alignment
templateSize = 128
largeFrameSize = 231
smallFrameSize = 91
#default values for continous alignment
smallSzRow = 25
largeSzRow = 125
smallSzCol = 151
largeSzCol = 175
numberPointsToAlign = 55
maxStdDist = 49 # max allowable variance (std) in distances moved
maxDist = 20 # max allowable distance of deviation in pixels
def __init__(self,filepath=None):
"""Initialse an AoRecording object
PARAMS:
[filepath] = full path to the source file
filepath
"""
self.filepath=filepath
self.nframes=0
self.frameheight=0
self.framewidth=0
self.data=[]
self.mask=None
self.goodFrames = None
self.templateFrame = None
self.filterResults = None
self.timeTics = None
self.currentStack = None
self.currentStdevFrame = None
self.currentAverageFrame = None
self.b_continue = 1 # flag to check if an error in processing has occurred
def get_masked(self):
#returns an np.maskedarray type
if self.mask is None:
masked_data = np.ma.array(self.data,
mask = np.ones((self.data.shape),dtype=np.bool))
else:
masked_data = np.ma.array(self.data,
mask = np.tile(~self.mask,[self.data.frameCount,1,1]))
return masked_data
def set_mask(self,roi=None):
"""Create a mask for the image
Params:
roi - [(x1,y1),(x2,y2)]
If roi is None user is prompted to draw a mask, otherwise mask is created from roi"""
if roi is None:
mask = ImageTools.click_and_crop(self.data[0:,:,],types=['mask'])
self.mask = mask['mask']
else:
x1,y1 = roi[0]
x2,y2 = roi[1]
assert x1 >= 0
assert x2 >= x1 and x2 <= self.data.frameWidth
assert y1 >= 0
assert y2 >= y1 and y2 <= self.data.frameHeight
mask = np.zeros((self.data.frameHeight,self.data.frameWidth),dtype=np.bool)
mask[y1:y2, x1:x2] = 1
self.mask = mask
def write_video(self,filename):
"""Write the current framestack to an avi"""
self.data.write_stack(filename)
def load_video(self, cropInterlace = True):
"""Loads an AO video
Loads the video identified by filepath into a nframes height x width numpy array
PARAMS:
cropInterlace - boolean attempt to crop interlace bars at the sides of the video
"""
RGB=False #indicator is video is in RGB format, in which case only use G channel
cap = cv2.VideoCapture(self.filepath)
if not cap.isOpened():
logger.warning('Failed opening video: %s',self.filepath)
return
nframes = cap.get(cv2.cv.CV_CAP_PROP_FRAME_COUNT)
frameheight = cap.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT)
framewidth = cap.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH)
#preallocate a numpy array
data = np.empty([int(nframes), int(frameheight), int(framewidth)],dtype=np.uint8)
ret, frame = cap.read() #get the first frame
if len(frame.shape)>2:
#frames are RGB format, using only G channel
logger.debug('Video is in RGB format, using only G channel')
RGB=True
while(ret):
frame_idx = int(cap.get(cv2.cv.CV_CAP_PROP_POS_FRAMES))
if RGB:
data[frame_idx - 1,:,:] = frame[:,:,1]
else:
data[frame_idx -1 ,:,:] = frame[:,:]
ret, frame = cap.read()
cap.release()
if cropInterlace:
frameSums = data.sum(0)
midRow = frameSums.shape[1]/2
r,c = np.where(frameSums[:,0:midRow]==0)
left = max(c) + 1
r,c = np.where(frameSums[:,midRow:]==0)
right = (min(c) - 1) + midRow
#limit the interlace to a maximum of 200 pixels, otherwise can cause problems with v dark videos
left = min([left,200])
right = max([right,framewidth-200])
data = data[:,:,left:right]
self.data = FrameStack.FrameStack(data)
def write_average_frame(self,filename):
if self.currentAverageFrame is None:
logger.error('Average frame not created')
raise ValueError
#check to see if an error has occured before this
if not self.b_continue:
return
ImageTools.write_image(filename, self.currentAverageFrame)
def write_frame(self,filename,frameTypes):
#check to see if an error has occured before this
if self.data is None:
return
if isinstance(frameTypes,str):
frameTypes = [frameTypes]
for frameType in frameTypes:
assert frameType in ['average','stdev']
if frameType == 'average':
self.write_average_frame(filename)
if frameType == 'stdev':
if self.currentStdevFrame is not None:
ImageTools.write_image(filename,self.currentStdevFrame)
def filter_frames(self, minCorr=0.38):
'''Perform an initial filtering on a frame set
filterFrames()
minCorr default 0.38 is ok for cones, other structures require lower values
'''
#check to see if an error has occured before this
if not self.b_continue:
return
framestack = self.data
# calculate mean brightness for each frame,
# if framestack is a masked array masked values are ignored
frame_brightnesses = np.apply_over_axes(np.mean, framestack, [1,2]).flatten()
max_bright = frame_brightnesses.max()
#find frame index for frames >50% max_bright
# frames not in this list will be excluded
good_brights = np.array(frame_brightnesses > max_bright * 0.5, dtype=np.bool)
#good_brights = [i for i, val in enumerate(frame_brightnesses) if val > 0.5* max_bright]
brightestFrames = np.array(frame_brightnesses > max_bright * 0.85, dtype=np.bool)
framelist = np.where(good_brights)[0]
framestack.filter_frames_by_idx(good_brights) # only going to use good frames from here on.
if len(good_brights) < 1:
logger.error("No good frames found")
self.data = None
raise RuntimeError("No good frames found:Brightness too low")
results = []
midRow = int(framestack.frameWidth / 2)
midCol = int(framestack.frameHeight / 2)
for iFrame in np.arange(1,len(framelist)):
currImage = framestack[iFrame - 1,:,:] #target frame
tempImage = framestack[iFrame,:,:] #template frame
shear = ImageTools.find_frame_shear(currImage, tempImage)
tempImage = tempImage[midRow - self.templateSize / 2 : midRow + self.templateSize / 2,
midCol - self.templateSize / 2 : midCol + self.templateSize / 2]
displacement = ImageTools.find_frame_shift(currImage,
tempImage,
topLeft=[(0,0),
(midRow - self.templateSize / 2,midCol - self.templateSize / 2)],
method='xcorr',
applyBlur=True,
attemptSubPixelAlignment=False)
motion = (displacement['coords'][0]**2 + displacement['coords'][0]**2)**0.5
results.append({'frameid':framestack.frameIds[iFrame],
'shear':shear['shearval'],
'correlation':displacement['maxcorr'],
'shift':displacement['coords'],
'motion':motion})
#filter frames where sheer > 20
#results = [r for r in results if r['motion'] <= 20]
#if len(results) < 1:
#logger.error("No good frames found")
#self.data = None
#raise RuntimeError("No good frames found:Shear too high")
#data for frame 0 is missing, use the data from the first remaining frame
#r=[r for r in results if r['frameid'] == 1]
if not results:
raise RuntimeError('Could not get displacements')
r =dict(results[0]) #make a copy of this item
r['frameid']=framelist[0]
r['shift']=(0,0)
r['motion']=0
results.append(r)
maxCorr = max([result['correlation'] for result in results])
if maxCorr < minCorr:
#all correlations are crummy, just bail
#TODO
logger.warning('No good frames found')
raise RuntimeError("No good frames found:Correlation too low")
else:
goodFrames = [result['frameid'] for result in results if result['shear'] < 20 and result['correlation'] > 0.5 * maxCorr and result['motion'] < 50 ]
badFrames = [frameid for frameid in self.data.frameIds if frameid not in goodFrames]
if not goodFrames:
logger.warning('No good frames found')
raise RuntimeError('No good frames found:Group criteria (Shear, Correlation, Motion) not met.')
logger.info('Removing frames {} due to brightness or shear'.format(badFrames))
self.data.filter_frames_by_id(goodFrames)
self.data.templateFrameId = goodFrames[frame_brightnesses[goodFrames].argmax()] #return the brightest of the remaining frames as a potential template
self.filterResults = results #store this for debugging
def fixed_align_frames(self,maxDisplacement=50):
'''perform fixed alignment on the framestack
maxDisplacement=50 - maximum allowed displacement, frames with > than this will be removed from the stack'''
if self.data is None:
logger.warning('No frames found')
return
if self.data.templateFrame is None:
logger.warning('template frame not set')
return
framesToProcess = [i for i in self.data.frameIds if i != self.data.templateFrameId]
midRow = int(self.data.frameWidth / 2)
midCol = int(self.data.frameHeight / 2)
targetFrame = self.data.templateFrame
targetFrame = targetFrame[midRow - self.largeFrameSize : midRow + self.largeFrameSize,
midCol - self.largeFrameSize : midRow + self.largeFrameSize]
results = []
#ensure the target frame is included in the output
results.append({'frameid':self.data.templateFrameId,
'correlation':1,
'shift':(0,0)})
for iFrame in framesToProcess:
templateFrame = self.data.get_frame_by_id(iFrame)
templateFrame = templateFrame[midRow - self.smallFrameSize : midRow + self.smallFrameSize,
midCol - self.smallFrameSize : midCol + self.smallFrameSize]
displacement = ImageTools.find_frame_shift(targetFrame,
templateFrame,
topLeft=[(midRow - self.largeFrameSize,midCol - self.largeFrameSize),
(midRow - self.smallFrameSize,midCol - self.smallFrameSize)],
applyBlur=True,
method='xcorr',
attemptSubPixelAlignment=False)
results.append({'frameid':iFrame,
'correlation':displacement['maxcorr'],
'shift':displacement['coords']})
#Check displacement is les than 50 pixels
good_results = [result for result in results
if abs(result['shift'][1])<=maxDisplacement
and abs(result['shift'][0]) <= maxDisplacement]
bad_results = [result['frameid'] for result in results
if abs(result['shift'][1]) > maxDisplacement
or abs(result['shift'][0]) > maxDisplacement]
logger.info('Removing frames {} for too large displacements'.format(bad_results))
if not good_results:
#no good frames found
logger.warning('frame displacements are too large')
raise RuntimeError('frame displacements are too large')
alignedData = StackTools.apply_displacements(self.data,good_results)
self.data = alignedData
self.currentStack = alignedData
def complete_align(self,minCorr = 0.38):
"""Takes a roughly aligned stack and performs a complete alignment
minCorr (default 0.38, minimum correlation for inclusion in the output stack)
"""
if self.data is None:
logger.warning('Aborting:No good frames found')
return
nrows,ncols = self.data.frameHeight, self.data.frameWidth
targetFrameData = self.data.templateFrame
framesToProcess = [frameid for frameid in self.data.frameIds if not frameid == self.data.templateFrameId]
#apply a mask to the target frame
mask = np.zeros(targetFrameData.shape,dtype=np.bool)
mask[targetFrameData > 0] = 1
#convert the targetFrameData to a masked array for simple calculation of means
targetFrameData = np.ma.array(targetFrameData,
mask=~mask)
randomData = targetFrameData.std() * np.random.standard_normal(targetFrameData.shape) + targetFrameData.mean()
targetFrameData = (targetFrameData.data * ~targetFrameData.mask) + (randomData * targetFrameData.mask) #no longer a masked array
#setup the row indices
defaultStep = int((nrows - self.smallSzRow + 1) / (self.numberPointsToAlign))
smallRowStart = np.array(range(self.numberPointsToAlign)) * defaultStep
#the large rows should be centered on the small rows
halfDifference = int((self.largeSzRow - self.smallSzRow) / 2)
largeRowStart = smallRowStart - halfDifference # this gives some values out of bounds
largeRowStart[largeRowStart < 0] = 0
maxRowStart = nrows - self.largeSzRow
largeRowStart[largeRowStart > maxRowStart] = maxRowStart
smallColStart = (ncols / 2) - (self.smallSzCol / 2)
largeColStart = (ncols / 2) - (self.largeSzCol / 2)
results = []
for frameId in framesToProcess:
#loop through all the frames here
#need to generate a new mask for each frame
image = self.data.get_frame_by_id(frameId)
mask = np.zeros(image.shape,dtype=np.bool)
mask[image > 0] = 1
image = np.ma.array(image,
mask=~mask)
randomData = image.std() * np.random.standard_normal(image.shape) + image.mean()
image = (image.data * ~image.mask) + (randomData * image.mask) #no longer a masked array
results.append({'frameid':frameId,'stripResults':[]})
for idxStrip in range(len(smallRowStart)):
#loop through the strips here
stripResults = [result['stripResults'] for result in results if result['frameid'] == frameId][0]
smallStrip = image[smallRowStart[idxStrip]:smallRowStart[idxStrip]+self.smallSzRow,
smallColStart:smallColStart + self.smallSzCol]
largeStrip = targetFrameData[largeRowStart[idxStrip]:largeRowStart[idxStrip]+self.largeSzRow,
largeColStart:largeColStart + self.largeSzCol]
displacement = ImageTools.find_frame_shift(largeStrip,
smallStrip,
topLeft=[(largeRowStart[idxStrip],largeColStart),
(smallRowStart[idxStrip],smallColStart)],
method='xcorr',
applyBlur=True,
attemptSubPixelAlignment=True)
#the offsets returned here are for the small strip within the large strip
#coords = displacement['coords']
#displacement['coords'] = (coords[0] + largeRowStart[idxStrip],
#coords[1] + largeColStart)
stripResults.append(displacement)
newCoords = self._get_coords(nrows, ncols)
timetics=[]
for jndx in range(self.numberPointsToAlign):
timetics.append(newCoords['times'][(smallRowStart[jndx]+int(self.smallSzRow/2)),
(smallColStart+int(self.smallSzCol/2)-1)])
self.timeTics = np.array(timetics)
self.times = newCoords['times']
alignmentSplines = self._make_valid_points(results,minCorr)
self.data = self.fast_align(alignmentSplines)
def complete_align_parallel(self,minCorr = 0.38):
"""Takes a roughly aligned stack and performs a complete alignment
minCorr (default 0.38, minimum correlation for inclusion in the output stack)
"""
#check to see if an error has occured before this
if self.data is None:
logger.warning('Aborting:No good frames found')
return
nrows,ncols = self.data.frameHeight, self.data.frameWidth
newCoords = self._get_coords(nrows, ncols)
#setup the row indices
defaultStep = int((nrows - self.smallSzRow + 1) / (self.numberPointsToAlign))
smallRowStart = np.array(range(self.numberPointsToAlign)) * defaultStep
#the large rows should be centered on the small rows
halfDifference = int((self.largeSzRow - self.smallSzRow) / 2)
largeRowStart = smallRowStart - halfDifference # this gives some values out of bounds
largeRowStart[largeRowStart < 0] = 0
maxRowStart = nrows - self.largeSzRow
largeRowStart[largeRowStart > maxRowStart] = maxRowStart
smallColStart = (ncols / 2) - (self.smallSzCol / 2)
largeColStart = (ncols / 2) - (self.largeSzCol / 2)
logging.debug('Starting parallel alignment')
CompleteAlignParallel.complete_align_parallel(self.data,
(smallRowStart,largeRowStart),
(smallColStart,largeColStart),
(self.smallSzRow,self.largeSzRow),
(self.smallSzCol,self.largeSzCol))
timetics=[]
for jndx in range(self.numberPointsToAlign):
timetics.append(newCoords['times'][(smallRowStart[jndx]+int(self.smallSzRow/2)),
(smallColStart+int(self.smallSzCol/2)-1)])
self.timeTics = np.array(timetics)
self.times = newCoords['times']
alignmentSplines = self._make_valid_points(CompleteAlignParallel.results['splines'],minCorr)
self.data = self.fast_align_parallel(alignmentSplines)
def _make_valid_points(self,displacements,minCorr):
"""Takes the displacements created by complete_align() and converts them into a series of fitted splines
returns a list of dicts, one dict for each frame
{'frameid':original frame number
'ppx':splines for generating x coords
'ppy':splines for generating y coords
}
N.B. there may not be a dict entry for every frame
"""
if self.timeTics is None:
logger.debug('Complete alignment not completed')
raise ValueError
#convert the complete alignment results to arrays
correls = np.empty((len(displacements),self.numberPointsToAlign),dtype=np.float32) #frames by strips N.B. template frame is excluded
xShifts = np.empty((len(displacements),self.numberPointsToAlign),dtype=np.float32)
yShifts = np.empty((len(displacements),self.numberPointsToAlign),dtype=np.float32)
frameids = [frame['frameid'] for frame in displacements]
frameIdx = -1
for frameDisplacement in displacements:
frameIdx = frameIdx + 1
stripResults = frameDisplacement['stripResults']
correls[frameIdx,:] = [stripResult['maxcorr'] for stripResult in stripResults]
xShifts[frameIdx,:] = [stripResult['coords'][0] for stripResult in stripResults]
yShifts[frameIdx,:] = [stripResult['coords'][1] for stripResult in stripResults]
#this trims the first and last strips
#ugly code...
correls[:,0:2]=0
correls[:,(correls.shape[1]-2):(correls.shape[1])]=0
goodCorrels = correls > minCorr
dists = np.sqrt(xShifts**2 + yShifts**2)
stdDist = np.std(dists,axis=1) #deviation of displacements per frame
goodStdDists = stdDist < self.maxStdDist
goodDists = dists < self.maxDist
goodPoints = np.logical_and(goodCorrels,goodDists)
#require at least 10 good points per frame
goodFrames = goodPoints.sum(axis=1) > 9
goodFrames = np.logical_and(goodFrames,goodStdDists)
output = []
goodFrame_list = np.where(goodFrames)[0].tolist()
badFrames = np.array(frameids)[~goodFrames].tolist()
if len(badFrames)>0:
logger.info('Removing frames {} for bad strip alignments'.format(badFrames))
self.data.delete_frame_by_id(badFrames)
else:
logger.info('All frames have good strip alignments')
for iFrame in goodFrame_list:
#work through the list of good frames
displaceX = xShifts[iFrame,goodPoints[iFrame,:]]
displaceY = yShifts[iFrame,goodPoints[iFrame,:]]
times = self.timeTics[goodPoints[iFrame,:]]
#going to apply a moving average of size 5, need to padd the sequences with 2 extra values at each end
#while we are at it, padd with an extra value for time = 0 and time = maxTime for the spline fitting
nrep = 3
displaceX = np.insert(displaceX, 0, [displaceX[0]] * nrep )
displaceX = np.insert(displaceX, len(displaceX), [displaceX[-1]] * nrep)
displaceY = np.insert(displaceY, 0, [displaceY[0]] * nrep)
displaceY = np.insert(displaceY, len(displaceY), [displaceY[-1]] * nrep)
times = np.insert(times,[0,-1],[0,self.times.max()])
displaceX = self._smooth(displaceX)
displaceY = self._smooth(displaceY)
# resample at a higher frequency
freqFactor = 20
displaceX = np.interp(np.linspace(0,len(displaceX),num=len(displaceX)*freqFactor),
np.linspace(0,len(displaceX),num=len(displaceX)),
displaceX)
displaceY = np.interp(np.linspace(0,len(displaceY),num=len(displaceY)*freqFactor),
np.linspace(0,len(displaceY),num=len(displaceY)),
displaceY)
times = np.interp(np.linspace(0,len(times),num=len(times)*freqFactor),
np.linspace(0,len(times),num=len(times)),
times)
displaceX = scipy.interpolate.UnivariateSpline(times,
displaceX)
displaceY = scipy.interpolate.UnivariateSpline(times,
displaceY)
output.append({'frameid':frameids[iFrame],
'ppx':displaceX,
'ppy':displaceY})
return output
def _smooth(self,seq,n=5):
"""Apply a moving average to an input sequence
returns a smoothed sequence of length len(seq) - n + 1"""
ret = np.cumsum(seq, dtype=float)
ret[n:] = ret[n:] - ret[:-n]
return ret[n - 1:] / n
def _get_coords(self,numrows,numcols):
"""return the timing of pixels in a padded frame in terms of the original samples
"""
pixPerLineOrig = np.float(1000)
#pixPerLineOrig = np.float(5) #debug code
linesPerFrameOrig = np.float(1024)
sampRate = np.float(23) #Hz
sampTime = np.float(1 / (pixPerLineOrig * linesPerFrameOrig * sampRate)) #sample time per pixel
#sampTime = 1 #debug code
lineTime = np.float(pixPerLineOrig * sampTime)
frameTime = np.float(numrows * lineTime)
#i think this is an error in sburns code
#rows = np.array(range(numrows))
#cols = np.array(range(numcols))
rows = np.array(range(numrows))
cols = np.array(range(numcols))
coltime = np.atleast_2d(cols * sampTime)
rowtime = np.atleast_2d(rows * lineTime)
[rowlocs,collocs] = np.meshgrid(rows,cols);
[rowlocs,collocs]= [a.T for a in [rowlocs, collocs]]
times = rowtime + coltime.T
times = times.T
return {'rowlocs':rowlocs,'collocs':collocs,'times':times,'FrameTimeIncrement':frameTime}
def fast_align_parallel(self,alignmentSplines):
newCoords = self._get_coords(self.data.frameHeight,
self.data.frameWidth)
alignedData = FastAlignParallel.fast_align_parallel(self.data,
alignmentSplines,
newCoords)
return alignedData
def fast_align(self,alignmentSplines):
outputmargin = 30
nrows = self.data.frameHeight
ncols = self.data.frameWidth
nframes = len(alignmentSplines)
templateFrameIdx = self.data.get_idx_from_id(self.data.templateFrameId)
outputSizeRows = nrows + 2*outputmargin
outputSizeCols = ncols + 2*outputmargin
outstack = np.ones((nframes + 1, outputSizeRows, outputSizeCols))
outstack = outstack * -1
#insert the template frame unchanged in first position
outstack[templateFrameIdx,
outputmargin:outputmargin+nrows,
outputmargin:outputmargin+ncols] = self.data.templateFrame
interiorMask = outstack[templateFrameIdx,:,:] > -0.001 #there has to be a better way to do this.
mask = self.data.templateFrame > 0 #this mask is the size of the rough aligned images,true over the region of the template image, we will use it to ensure we only sample valid points
newCoords = self._get_coords(self.data.frameHeight,
self.data.frameWidth)
times = newCoords['times']
times = times.ravel()
for frame in alignmentSplines:
frameIdx = self.data.get_idx_from_id(frame['frameid'])
logging.debug('Aligning frame{}'.format(frame['frameid']))
srcImg = self.data.get_frame_by_id(frame['frameid']) * mask
srcImg = srcImg + ((srcImg==0) * -1)
tmpFrame = np.ones(interiorMask.shape) * -1
tmpFrame = tmpFrame + interiorMask
newx = frame['ppx'](times).reshape(nrows,ncols)
newy = frame['ppy'](times).reshape(nrows,ncols)
finalCols = np.int64(np.round(newCoords['collocs'] + newx + outputmargin))
finalRows = np.int64(np.round(newCoords['rowlocs'] + newy + outputmargin))
mask2 = mask * (finalRows > 0.5)
mask2 = mask2 * (finalRows < outputSizeRows)
mask2 = mask2 * (finalCols > 0.5)
mask2 = mask2 *(finalCols < outputSizeCols)
validRows, validCols = np.where(mask2)
for idx in range(len(validRows)):
#for each valid pixel, take it from the source image and place it in the new location
tmpFrame[finalRows[validRows[idx],validCols[idx]],
finalCols[validRows[idx],validCols[idx]]] = srcImg[validRows[idx],
validCols[idx]]
outstack[frameIdx,:,:] = tmpFrame
return FrameStack.FrameStack(outstack,
frameIds = self.data.frameIds,
templateFrame = self.data.templateFrameId)
def create_average_frame(self,type='mean'):
assert type in ['lucky','mean']
#check to see if an error has occured before this
if self.data is None:
logger.debug('')
if type == 'lucky':
#creating a lucky average
if self.data.frameCount > 20:
self.currentAverageFrame = StackTools.compute_lucky_image(self.data)
else:
self.currentAverageFrame = None
logger.warning('Too few frames to create lucky average')
else:
self.currentAverageFrame = self.data.mean(axis=0)
def create_stdev_frame(self):
#check to see if an error has occured before this
if not self.b_continue:
return
self.currentStdevFrame = ComputeStdevImage.compute_stdev_image(self.data)
def fixInterlace(self):
StackTools.interlaceStack(self.data)
|
|
# Copyright 2017 The PDFium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Classes that draw conclusions out of a comparison and represent them."""
from collections import Counter
FORMAT_RED = '\033[01;31m{0}\033[00m'
FORMAT_GREEN = '\033[01;32m{0}\033[00m'
FORMAT_MAGENTA = '\033[01;35m{0}\033[00m'
FORMAT_CYAN = '\033[01;36m{0}\033[00m'
FORMAT_NORMAL = '{0}'
RATING_FAILURE = 'failure'
RATING_REGRESSION = 'regression'
RATING_IMPROVEMENT = 'improvement'
RATING_NO_CHANGE = 'no_change'
RATING_SMALL_CHANGE = 'small_change'
RATINGS = [
RATING_FAILURE, RATING_REGRESSION, RATING_IMPROVEMENT, RATING_NO_CHANGE,
RATING_SMALL_CHANGE
]
RATING_TO_COLOR = {
RATING_FAILURE: FORMAT_MAGENTA,
RATING_REGRESSION: FORMAT_RED,
RATING_IMPROVEMENT: FORMAT_CYAN,
RATING_NO_CHANGE: FORMAT_GREEN,
RATING_SMALL_CHANGE: FORMAT_NORMAL,
}
class ComparisonConclusions(object):
"""All conclusions drawn from a comparison.
This is initialized empty and then processes pairs of results for each test
case, determining the rating for that case, which can be:
"failure" if either or both runs for the case failed.
"regression" if there is a significant increase in time for the test case.
"improvement" if there is a significant decrease in time for the test case.
"no_change" if the time for the test case did not change at all.
"small_change" if the time for the test case changed but within the threshold.
"""
def __init__(self, threshold_significant):
"""Initializes an empty ComparisonConclusions.
Args:
threshold_significant: Float with the tolerance beyond which changes in
measurements are considered significant.
The change is considered as a multiplication rather than an addition
of a fraction of the previous measurement, that is, a
threshold_significant of 1.0 will flag test cases that became over
100% slower (> 200% of the previous time measured) or over 100% faster
(< 50% of the previous time measured).
threshold_significant 0.02 -> 98.04% to 102% is not significant
threshold_significant 0.1 -> 90.9% to 110% is not significant
threshold_significant 0.25 -> 80% to 125% is not significant
threshold_significant 1 -> 50% to 200% is not significant
threshold_significant 4 -> 20% to 500% is not significant
"""
self.threshold_significant = threshold_significant
self.threshold_significant_negative = (1 / (1 + threshold_significant)) - 1
self.params = {'threshold': threshold_significant}
self.summary = ComparisonSummary()
self.case_results = {}
def ProcessCase(self, case_name, before, after):
"""Feeds a test case results to the ComparisonConclusions.
Args:
case_name: String identifying the case.
before: Measurement for the "before" version of the code.
after: Measurement for the "after" version of the code.
"""
# Switch 0 to None to simplify the json dict output. All zeros are
# considered failed runs, so they will be represented by "null".
if not before:
before = None
if not after:
after = None
if not before or not after:
ratio = None
rating = RATING_FAILURE
else:
ratio = (float(after) / before) - 1.0
if ratio > self.threshold_significant:
rating = RATING_REGRESSION
elif ratio < self.threshold_significant_negative:
rating = RATING_IMPROVEMENT
elif ratio == 0:
rating = RATING_NO_CHANGE
else:
rating = RATING_SMALL_CHANGE
case_result = CaseResult(case_name, before, after, ratio, rating)
self.summary.ProcessCaseResult(case_result)
self.case_results[case_name] = case_result
def GetSummary(self):
"""Gets the ComparisonSummary with consolidated totals."""
return self.summary
def GetCaseResults(self):
"""Gets a dict mapping each test case identifier to its CaseResult."""
return self.case_results
def GetOutputDict(self):
"""Returns a conclusions dict with all the conclusions drawn.
Returns:
A serializable dict with the format illustrated below:
{
"version": 1,
"params": {
"threshold": 0.02
},
"summary": {
"total": 123,
"failure": 1,
"regression": 2,
"improvement": 1,
"no_change": 100,
"small_change": 19
},
"comparison_by_case": {
"testing/resources/new_test.pdf": {
"before": None,
"after": 1000,
"ratio": None,
"rating": "failure"
},
"testing/resources/test1.pdf": {
"before": 100,
"after": 120,
"ratio": 0.2,
"rating": "regression"
},
"testing/resources/test2.pdf": {
"before": 100,
"after": 2000,
"ratio": 19.0,
"rating": "regression"
},
"testing/resources/test3.pdf": {
"before": 1000,
"after": 1005,
"ratio": 0.005,
"rating": "small_change"
},
"testing/resources/test4.pdf": {
"before": 1000,
"after": 1000,
"ratio": 0.0,
"rating": "no_change"
},
"testing/resources/test5.pdf": {
"before": 1000,
"after": 600,
"ratio": -0.4,
"rating": "improvement"
}
}
}
"""
output_dict = {}
output_dict['version'] = 1
output_dict['params'] = {'threshold': self.threshold_significant}
output_dict['summary'] = self.summary.GetOutputDict()
output_dict['comparison_by_case'] = {
cr.case_name.decode('utf-8'): cr.GetOutputDict()
for cr in self.GetCaseResults().values()
}
return output_dict
class ComparisonSummary(object):
"""Totals computed for a comparison."""
def __init__(self):
self.rating_counter = Counter()
def ProcessCaseResult(self, case_result):
self.rating_counter[case_result.rating] += 1
def GetTotal(self):
"""Gets the number of test cases processed."""
return sum(self.rating_counter.values())
def GetCount(self, rating):
"""Gets the number of test cases processed with a given rating."""
return self.rating_counter[rating]
def GetOutputDict(self):
"""Returns a dict that can be serialized with all the totals."""
result = {'total': self.GetTotal()}
for rating in RATINGS:
result[rating] = self.GetCount(rating)
return result
class CaseResult(object):
"""The conclusion for the comparison of a single test case."""
def __init__(self, case_name, before, after, ratio, rating):
"""Initializes an empty ComparisonConclusions.
Args:
case_name: String identifying the case.
before: Measurement for the "before" version of the code.
after: Measurement for the "after" version of the code.
ratio: Difference between |after| and |before| as a fraction of |before|.
rating: Rating for this test case.
"""
self.case_name = case_name
self.before = before
self.after = after
self.ratio = ratio
self.rating = rating
def GetOutputDict(self):
"""Returns a dict with the test case's conclusions."""
return {
'before': self.before,
'after': self.after,
'ratio': self.ratio,
'rating': self.rating
}
def PrintConclusionsDictHumanReadable(conclusions_dict, colored, key=None):
"""Prints a conclusions dict in a human-readable way.
Args:
conclusions_dict: Dict to print.
colored: Whether to color the output to highlight significant changes.
key: String with the CaseResult dictionary key to sort the cases.
"""
# Print header
print '=' * 80
print '{0:>11s} {1:>15s} {2}'.format('% Change', 'Time after', 'Test case')
print '-' * 80
color = FORMAT_NORMAL
# Print cases
if key is not None:
case_pairs = sorted(
conclusions_dict['comparison_by_case'].iteritems(),
key=lambda kv: kv[1][key])
else:
case_pairs = sorted(conclusions_dict['comparison_by_case'].iteritems())
for case_name, case_dict in case_pairs:
if colored:
color = RATING_TO_COLOR[case_dict['rating']]
if case_dict['rating'] == RATING_FAILURE:
print u'{} to measure time for {}'.format(
color.format('Failed'), case_name).encode('utf-8')
continue
print u'{0} {1:15,d} {2}'.format(
color.format('{:+11.4%}'.format(case_dict['ratio'])),
case_dict['after'], case_name).encode('utf-8')
# Print totals
totals = conclusions_dict['summary']
print '=' * 80
print 'Test cases run: %d' % totals['total']
if colored:
color = FORMAT_MAGENTA if totals[RATING_FAILURE] else FORMAT_GREEN
print('Failed to measure: %s' % color.format(totals[RATING_FAILURE]))
if colored:
color = FORMAT_RED if totals[RATING_REGRESSION] else FORMAT_GREEN
print('Regressions: %s' % color.format(totals[RATING_REGRESSION]))
if colored:
color = FORMAT_CYAN if totals[RATING_IMPROVEMENT] else FORMAT_GREEN
print('Improvements: %s' % color.format(totals[RATING_IMPROVEMENT]))
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
try:
from ._models_py3 import AffectedMoveResource
from ._models_py3 import AutomaticResolutionProperties
from ._models_py3 import AvailabilitySetResourceSettings
from ._models_py3 import AzureResourceReference
from ._models_py3 import CloudErrorBody
from ._models_py3 import CommitRequest
from ._models_py3 import DiscardRequest
from ._models_py3 import Display
from ._models_py3 import Identity
from ._models_py3 import JobStatus
from ._models_py3 import LBBackendAddressPoolResourceSettings
from ._models_py3 import LBFrontendIPConfigurationResourceSettings
from ._models_py3 import LoadBalancerBackendAddressPoolReference
from ._models_py3 import LoadBalancerNatRuleReference
from ._models_py3 import LoadBalancerResourceSettings
from ._models_py3 import ManualResolutionProperties
from ._models_py3 import MoveCollection
from ._models_py3 import MoveCollectionProperties
from ._models_py3 import MoveCollectionResultList
from ._models_py3 import MoveErrorInfo
from ._models_py3 import MoveResource
from ._models_py3 import MoveResourceCollection
from ._models_py3 import MoveResourceDependency
from ._models_py3 import MoveResourceDependencyOverride
from ._models_py3 import MoveResourceError
from ._models_py3 import MoveResourceErrorBody
from ._models_py3 import MoveResourceFilter
from ._models_py3 import MoveResourceFilterProperties
from ._models_py3 import MoveResourceProperties
from ._models_py3 import MoveResourcePropertiesErrors
from ._models_py3 import MoveResourcePropertiesMoveStatus
from ._models_py3 import MoveResourcePropertiesSourceResourceSettings
from ._models_py3 import MoveResourceStatus
from ._models_py3 import NetworkInterfaceResourceSettings
from ._models_py3 import NetworkSecurityGroupResourceSettings
from ._models_py3 import NicIpConfigurationResourceSettings
from ._models_py3 import NsgSecurityRule
from ._models_py3 import OperationErrorAdditionalInfo
from ._models_py3 import OperationStatus
from ._models_py3 import OperationStatusError
from ._models_py3 import OperationsDiscovery
from ._models_py3 import OperationsDiscoveryCollection
from ._models_py3 import PrepareRequest
from ._models_py3 import ProxyResourceReference
from ._models_py3 import PublicIPAddressResourceSettings
from ._models_py3 import ResourceGroupResourceSettings
from ._models_py3 import ResourceMoveRequest
from ._models_py3 import ResourceSettings
from ._models_py3 import SqlDatabaseResourceSettings
from ._models_py3 import SqlElasticPoolResourceSettings
from ._models_py3 import SqlServerResourceSettings
from ._models_py3 import SubnetReference
from ._models_py3 import SubnetResourceSettings
from ._models_py3 import UnresolvedDependency
from ._models_py3 import UnresolvedDependencyCollection
from ._models_py3 import UpdateMoveCollectionRequest
from ._models_py3 import VirtualMachineResourceSettings
from ._models_py3 import VirtualNetworkResourceSettings
except (SyntaxError, ImportError):
from ._models import AffectedMoveResource # type: ignore
from ._models import AutomaticResolutionProperties # type: ignore
from ._models import AvailabilitySetResourceSettings # type: ignore
from ._models import AzureResourceReference # type: ignore
from ._models import CloudErrorBody # type: ignore
from ._models import CommitRequest # type: ignore
from ._models import DiscardRequest # type: ignore
from ._models import Display # type: ignore
from ._models import Identity # type: ignore
from ._models import JobStatus # type: ignore
from ._models import LBBackendAddressPoolResourceSettings # type: ignore
from ._models import LBFrontendIPConfigurationResourceSettings # type: ignore
from ._models import LoadBalancerBackendAddressPoolReference # type: ignore
from ._models import LoadBalancerNatRuleReference # type: ignore
from ._models import LoadBalancerResourceSettings # type: ignore
from ._models import ManualResolutionProperties # type: ignore
from ._models import MoveCollection # type: ignore
from ._models import MoveCollectionProperties # type: ignore
from ._models import MoveCollectionResultList # type: ignore
from ._models import MoveErrorInfo # type: ignore
from ._models import MoveResource # type: ignore
from ._models import MoveResourceCollection # type: ignore
from ._models import MoveResourceDependency # type: ignore
from ._models import MoveResourceDependencyOverride # type: ignore
from ._models import MoveResourceError # type: ignore
from ._models import MoveResourceErrorBody # type: ignore
from ._models import MoveResourceFilter # type: ignore
from ._models import MoveResourceFilterProperties # type: ignore
from ._models import MoveResourceProperties # type: ignore
from ._models import MoveResourcePropertiesErrors # type: ignore
from ._models import MoveResourcePropertiesMoveStatus # type: ignore
from ._models import MoveResourcePropertiesSourceResourceSettings # type: ignore
from ._models import MoveResourceStatus # type: ignore
from ._models import NetworkInterfaceResourceSettings # type: ignore
from ._models import NetworkSecurityGroupResourceSettings # type: ignore
from ._models import NicIpConfigurationResourceSettings # type: ignore
from ._models import NsgSecurityRule # type: ignore
from ._models import OperationErrorAdditionalInfo # type: ignore
from ._models import OperationStatus # type: ignore
from ._models import OperationStatusError # type: ignore
from ._models import OperationsDiscovery # type: ignore
from ._models import OperationsDiscoveryCollection # type: ignore
from ._models import PrepareRequest # type: ignore
from ._models import ProxyResourceReference # type: ignore
from ._models import PublicIPAddressResourceSettings # type: ignore
from ._models import ResourceGroupResourceSettings # type: ignore
from ._models import ResourceMoveRequest # type: ignore
from ._models import ResourceSettings # type: ignore
from ._models import SqlDatabaseResourceSettings # type: ignore
from ._models import SqlElasticPoolResourceSettings # type: ignore
from ._models import SqlServerResourceSettings # type: ignore
from ._models import SubnetReference # type: ignore
from ._models import SubnetResourceSettings # type: ignore
from ._models import UnresolvedDependency # type: ignore
from ._models import UnresolvedDependencyCollection # type: ignore
from ._models import UpdateMoveCollectionRequest # type: ignore
from ._models import VirtualMachineResourceSettings # type: ignore
from ._models import VirtualNetworkResourceSettings # type: ignore
from ._region_move_service_api_enums import (
DependencyType,
MoveResourceInputType,
MoveState,
ProvisioningState,
ResolutionType,
ResourceIdentityType,
TargetAvailabilityZone,
ZoneRedundant,
)
__all__ = [
'AffectedMoveResource',
'AutomaticResolutionProperties',
'AvailabilitySetResourceSettings',
'AzureResourceReference',
'CloudErrorBody',
'CommitRequest',
'DiscardRequest',
'Display',
'Identity',
'JobStatus',
'LBBackendAddressPoolResourceSettings',
'LBFrontendIPConfigurationResourceSettings',
'LoadBalancerBackendAddressPoolReference',
'LoadBalancerNatRuleReference',
'LoadBalancerResourceSettings',
'ManualResolutionProperties',
'MoveCollection',
'MoveCollectionProperties',
'MoveCollectionResultList',
'MoveErrorInfo',
'MoveResource',
'MoveResourceCollection',
'MoveResourceDependency',
'MoveResourceDependencyOverride',
'MoveResourceError',
'MoveResourceErrorBody',
'MoveResourceFilter',
'MoveResourceFilterProperties',
'MoveResourceProperties',
'MoveResourcePropertiesErrors',
'MoveResourcePropertiesMoveStatus',
'MoveResourcePropertiesSourceResourceSettings',
'MoveResourceStatus',
'NetworkInterfaceResourceSettings',
'NetworkSecurityGroupResourceSettings',
'NicIpConfigurationResourceSettings',
'NsgSecurityRule',
'OperationErrorAdditionalInfo',
'OperationStatus',
'OperationStatusError',
'OperationsDiscovery',
'OperationsDiscoveryCollection',
'PrepareRequest',
'ProxyResourceReference',
'PublicIPAddressResourceSettings',
'ResourceGroupResourceSettings',
'ResourceMoveRequest',
'ResourceSettings',
'SqlDatabaseResourceSettings',
'SqlElasticPoolResourceSettings',
'SqlServerResourceSettings',
'SubnetReference',
'SubnetResourceSettings',
'UnresolvedDependency',
'UnresolvedDependencyCollection',
'UpdateMoveCollectionRequest',
'VirtualMachineResourceSettings',
'VirtualNetworkResourceSettings',
'DependencyType',
'MoveResourceInputType',
'MoveState',
'ProvisioningState',
'ResolutionType',
'ResourceIdentityType',
'TargetAvailabilityZone',
'ZoneRedundant',
]
|
|
import os
import logging, logging.handlers
# Live site settings (others should override in local.py)
ROOT_PATH = os.path.dirname(__file__)
DEBUG = False
TEMPLATE_DEBUG = DEBUG
TESTING = False # Override this in test_settings.py
DATABASE_ENGINE = 'mysql'
DATABASE_NAME = 'bookworm'
DATABASE_USER = 'threepress'
DATABASE_PASSWORD = '3press'
DATABASE_HOST = ''
DATABASE_PORT = ''
SITE_ID = 1
# base url: leave '' if deployed at webserver root. don't forget the trailing slash
# Example:
# BASE_URL = 'apps/bookworm/'
BASE_URL = ''
# Django settings for bookworm project.
ADMINS = (
('Bookworm', 'bookworm@oreilly.com'),
)
MANAGERS = ADMINS
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/New_York'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = os.path.join(ROOT_PATH, 'library', 'storage')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
MEDIA_URL = '/static/'
ORM_MEDIA_URL = '/orm-media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = ''
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.core.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request",
"bookworm.library.context_processors.nav",
"bookworm.library.context_processors.mobile",
"bookworm.library.context_processors.local_settings",
"bookworm.library.context_processors.profile",
"bookworm.search.context_processors.search"
)
MIDDLEWARE_CLASSES = (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'bookworm.middleware.Language',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.http.ConditionalGetMiddleware',
'django.middleware.doc.XViewMiddleware',
'django_authopenid.middleware.OpenIDMiddleware',
'django.middleware.http.SetRemoteAddrFromForwardedFor',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
'bookworm.minidetector.Middleware',
'bookworm.middleware.Mobile',
'bookworm.api.middleware.SSLRedirect',
'bookworm.api.middleware.APIKeyCheck',
)
ugettext = lambda s: s
# Only allow the list of languages for which we have translations
LANGUAGES = (
('de', ugettext('German')),
('en', ugettext('English')),
('da', ugettext('Danish')),
('fi', ugettext('Finnish')),
('it', ugettext('Italian')),
('es', ugettext('Spanish')),
# ('zh-tw', ugettext('Simplified Chinese')),
# ('he', ugettext('Hebrew')),
)
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (
'%s/library/templates/auth' % ROOT_PATH,
'%s/library/templates/host' % ROOT_PATH,
'%s/library/templates' % ROOT_PATH,
'%s/library/templates/includes' % ROOT_PATH,
'%s/search/templates' % ROOT_PATH,
'%s/librarything/templates' % ROOT_PATH,
'%s/orm/templates' % ROOT_PATH,
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.admin',
'django.contrib.flatpages',
'django.contrib.sitemaps',
'django.contrib.humanize',
'django_authopenid',
'bookworm.minidetector',
'bookworm.library',
'bookworm.search',
'bookworm.librarything',
'bookworm.mobile',
'bookworm.api',
)
AUTH_PROFILE_MODULE = "library.userpref"
ugettext = lambda s: s
LOGIN_URL = '/%s%s%s' % (BASE_URL, ugettext('account/'), ugettext('signin/'))
SITE_ID = 2
DEFAULT_NUM_RESULTS = 20
DEFAULT_START_PAGE = 1
DEFAULT_ORDER_FIELD = 'created_time'
DEFAULT_ORDER_DIRECTION = 'desc'
VALID_ORDER_DIRECTIONS = ('asc', 'desc')
VALID_ORDER_FIELDS = ('created_time', 'title', 'orderable_author')
# Search database info
SEARCH_ROOT = os.path.join(ROOT_PATH, 'search')
# Are we running with mobile settings on?
MOBILE = False
FORCE_SCRIPT_NAME = ''
# Domain which to redirect requests that are coming from a mobile device
MOBILE_HOST = 'http://m.bookworm.oreilly.com/'
# Hosting credit
HOSTING_CREDIT = "O'Reilly Media"
HOSTING_CREDIT_URL = 'http://oreilly.com/'
# Email reply-to address
REPLYTO_EMAIL = 'donotreply@oreilly.com'
DEFAULT_FROM_EMAIL = REPLYTO_EMAIL
# The admin address that's displayed on the site in help pages
DISPLAY_ADMIN_EMAIL = 'bookworm@oreilly.com'
ADMIN_EMAIL = DISPLAY_ADMIN_EMAIL
# Set up logging
LOG_DIR = '%s/log/' % ROOT_PATH
LOG_NAME = 'bookworm.log'
TEST_DATABASE_CHARSET = 'utf8'
TEST_DATABASE_COLLATION='utf8_unicode_ci'
SEARCH_ROOT = os.path.join(ROOT_PATH, 'search', 'dbs')
CACHE_BACKEND = 'file:///tmp/bookworm/django_cache'
CACHE_MIDDLEWARE_ANONYMOUS_ONLY = True
CACHE_TEMPLATE_TIMEOUT = 60 * 60 * 1
DATE_FORMAT = "l, N j Y"
LIBRARYTHING_KEY = ''
# Access time, filename/function#line-number message
log_formatter = logging.Formatter("[%(asctime)s %(filename)s/%(funcName)s#%(lineno)d] %(message)s")
# This should roll logs over at midnight and date-stamp them appropriately
handler = logging.handlers.TimedRotatingFileHandler(filename="%s/%s" % (LOG_DIR, LOG_NAME),
when='midnight')
handler.setFormatter(log_formatter)
log = logging.getLogger('')
log.setLevel(logging.INFO)
log.addHandler(handler)
# If set, the templates will load jQuery locally instead of from Google
OFFLINE = False
# Google Analytics key
ANALYTICS_KEY = 'UA-162955-4'
# The email addresses of the users who should receive an error email
# (should be a list)
ERROR_EMAIL_RECIPIENTS = (ADMINS[0][1], )
# The URL for the epubcheck webservice
EPUBCHECK_WEBSERVICE = 'http://threepress.org/epubcheck-service/'
# Apps to test
TEST_APPS = ('library',)
TESTING = False
# Feedbooks OPDS feed
FEEDBOOKS_OPDS_FEED = 'http://feedbooks.com/books/top.atom'
# Always upload files to the filesystem
FILE_UPLOAD_HANDLERS = ("django.core.files.uploadhandler.TemporaryFileUploadHandler",)
# Maximum number of CSS files to attempt to display at once
MAX_CSS_FILES = 10
# API key field name
API_FIELD_NAME = 'api_key'
# Hostname (no trailing slash)
HOSTNAME = 'http://bookworm.oreilly.com'
# Secure hostname (no trailing slash)
SECURE_HOSTNAME = 'https://bookworm.oreilly.com'
XSLT_DIR = os.path.join(ROOT_PATH, 'library', 'xsl')
DTBOOK2XHTML = os.path.join(XSLT_DIR, 'dtbook2xhtml.xsl')
# Don't ever try to call epubcheck -- useful in testing offline
SKIP_EPUBCHECK = False
CUSTOMER_SERVICE_URL = 'http://getsatisfaction.com/oreilly'
CUSTOMER_SERVICE_NAME = 'Get Satisfaction'
try:
from local import *
except ImportError:
pass
|
|
##########################################################################
#
# Copyright 2008-2009 VMware, Inc.
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
"""d3d.h"""
from winapi import *
from ddraw import *
from d3dtypes import *
from d3dcaps import *
def OutPointer(type):
return Out(Pointer(type), "out")
d3dnextFlags = Flags(DWORD, [
"D3DNEXT_NEXT",
"D3DNEXT_HEAD",
"D3DNEXT_TAIL",
])
direct3dFlags = Flags(DWORD, [
"DIRECT3D_VERSION",
])
d3ddpFlags = Flags(DWORD, [
"D3DDP_WAIT",
"D3DDP_OUTOFORDER",
"D3DDP_DONOTCLIP",
"D3DDP_DONOTUPDATEEXTENTS",
"D3DDP_DONOTLIGHT",
])
HRESULT = Enum("HRESULT", [
"D3D_OK",
"D3DERR_BADMAJORVERSION",
"D3DERR_BADMINORVERSION",
"D3DERR_INVALID_DEVICE",
"D3DERR_INITFAILED",
"D3DERR_DEVICEAGGREGATED",
"D3DERR_EXECUTE_CREATE_FAILED",
"D3DERR_EXECUTE_DESTROY_FAILED",
"D3DERR_EXECUTE_LOCK_FAILED",
"D3DERR_EXECUTE_UNLOCK_FAILED",
"D3DERR_EXECUTE_LOCKED",
"D3DERR_EXECUTE_NOT_LOCKED",
"D3DERR_EXECUTE_FAILED",
"D3DERR_EXECUTE_CLIPPED_FAILED",
"D3DERR_TEXTURE_NO_SUPPORT",
"D3DERR_TEXTURE_CREATE_FAILED",
"D3DERR_TEXTURE_DESTROY_FAILED",
"D3DERR_TEXTURE_LOCK_FAILED",
"D3DERR_TEXTURE_UNLOCK_FAILED",
"D3DERR_TEXTURE_LOAD_FAILED",
"D3DERR_TEXTURE_SWAP_FAILED",
"D3DERR_TEXTURE_LOCKED",
"D3DERR_TEXTURE_NOT_LOCKED",
"D3DERR_TEXTURE_GETSURF_FAILED",
"D3DERR_MATRIX_CREATE_FAILED",
"D3DERR_MATRIX_DESTROY_FAILED",
"D3DERR_MATRIX_SETDATA_FAILED",
"D3DERR_MATRIX_GETDATA_FAILED",
"D3DERR_SETVIEWPORTDATA_FAILED",
"D3DERR_INVALIDCURRENTVIEWPORT",
"D3DERR_INVALIDPRIMITIVETYPE",
"D3DERR_INVALIDVERTEXTYPE",
"D3DERR_TEXTURE_BADSIZE",
"D3DERR_INVALIDRAMPTEXTURE",
"D3DERR_MATERIAL_CREATE_FAILED",
"D3DERR_MATERIAL_DESTROY_FAILED",
"D3DERR_MATERIAL_SETDATA_FAILED",
"D3DERR_MATERIAL_GETDATA_FAILED",
"D3DERR_INVALIDPALETTE",
"D3DERR_ZBUFF_NEEDS_SYSTEMMEMORY",
"D3DERR_ZBUFF_NEEDS_VIDEOMEMORY",
"D3DERR_SURFACENOTINVIDMEM",
"D3DERR_LIGHT_SET_FAILED",
"D3DERR_LIGHTHASVIEWPORT",
"D3DERR_LIGHTNOTINTHISVIEWPORT",
"D3DERR_SCENE_IN_SCENE",
"D3DERR_SCENE_NOT_IN_SCENE",
"D3DERR_SCENE_BEGIN_FAILED",
"D3DERR_SCENE_END_FAILED",
"D3DERR_INBEGIN",
"D3DERR_NOTINBEGIN",
"D3DERR_NOVIEWPORTS",
"D3DERR_VIEWPORTDATANOTSET",
"D3DERR_VIEWPORTHASNODEVICE",
"D3DERR_NOCURRENTVIEWPORT",
"D3DERR_INVALIDVERTEXFORMAT",
"D3DERR_COLORKEYATTACHED",
"D3DERR_VERTEXBUFFEROPTIMIZED",
"D3DERR_VBUF_CREATE_FAILED",
"D3DERR_VERTEXBUFFERLOCKED",
"D3DERR_VERTEXBUFFERUNLOCKFAILED",
"D3DERR_ZBUFFER_NOTPRESENT",
"D3DERR_STENCILBUFFER_NOTPRESENT",
"D3DERR_WRONGTEXTUREFORMAT",
"D3DERR_UNSUPPORTEDCOLOROPERATION",
"D3DERR_UNSUPPORTEDCOLORARG",
"D3DERR_UNSUPPORTEDALPHAOPERATION",
"D3DERR_UNSUPPORTEDALPHAARG",
"D3DERR_TOOMANYOPERATIONS",
"D3DERR_CONFLICTINGTEXTUREFILTER",
"D3DERR_UNSUPPORTEDFACTORVALUE",
"D3DERR_CONFLICTINGRENDERSTATE",
"D3DERR_UNSUPPORTEDTEXTUREFILTER",
"D3DERR_TOOMANYPRIMITIVES",
"D3DERR_INVALIDMATRIX",
"D3DERR_TOOMANYVERTICES",
"D3DERR_CONFLICTINGTEXTUREPALETTE",
"D3DERR_INVALIDSTATEBLOCK",
"D3DERR_INBEGINSTATEBLOCK",
"D3DERR_NOTINBEGINSTATEBLOCK",
])
IDirect3D = Interface("IDirect3D", IUnknown)
IDirect3D2 = Interface("IDirect3D2", IUnknown)
IDirect3D3 = Interface("IDirect3D3", IUnknown)
IDirect3D7 = Interface("IDirect3D7", IUnknown)
IDirect3DDevice = Interface("IDirect3DDevice", IUnknown)
IDirect3DDevice2 = Interface("IDirect3DDevice2", IUnknown)
IDirect3DDevice3 = Interface("IDirect3DDevice3", IUnknown)
IDirect3DDevice7 = Interface("IDirect3DDevice7", IUnknown)
IDirect3DExecuteBuffer = Interface("IDirect3DExecuteBuffer", IUnknown)
IDirect3DLight = Interface("IDirect3DLight", IUnknown)
IDirect3DMaterial = Interface("IDirect3DMaterial", IUnknown)
IDirect3DMaterial2 = Interface("IDirect3DMaterial2", IUnknown)
IDirect3DMaterial3 = Interface("IDirect3DMaterial3", IUnknown)
IDirect3DTexture = Interface("IDirect3DTexture", IUnknown)
IDirect3DTexture2 = Interface("IDirect3DTexture2", IUnknown)
IDirect3DViewport = Interface("IDirect3DViewport", IUnknown)
IDirect3DViewport2 = Interface("IDirect3DViewport2", IDirect3DViewport)
IDirect3DViewport3 = Interface("IDirect3DViewport3", IDirect3DViewport2)
IDirect3DVertexBuffer = Interface("IDirect3DVertexBuffer", IUnknown)
IDirect3DVertexBuffer7 = Interface("IDirect3DVertexBuffer7", IUnknown)
LPUNKNOWN = Pointer(IUnknown)
LPDIRECT3D = Pointer(IDirect3D)
LPDIRECT3DDEVICE = Pointer(IDirect3DDevice)
LPDIRECT3DEXECUTEBUFFER = Pointer(IDirect3DExecuteBuffer)
LPDIRECT3DLIGHT = Pointer(IDirect3DLight)
LPDIRECT3DMATERIAL = Pointer(IDirect3DMaterial)
LPDIRECT3DTEXTURE = Pointer(IDirect3DTexture)
LPDIRECT3DVIEWPORT = Pointer(IDirect3DViewport)
LPDIRECT3D2 = Pointer(IDirect3D2)
LPDIRECT3DDEVICE2 = Pointer(IDirect3DDevice2)
LPDIRECT3DMATERIAL2 = Pointer(IDirect3DMaterial2)
LPDIRECT3DTEXTURE2 = Pointer(IDirect3DTexture2)
LPDIRECT3DVIEWPORT2 = Pointer(IDirect3DViewport2)
LPDIRECT3D3 = Pointer(IDirect3D3)
LPDIRECT3DDEVICE3 = Pointer(IDirect3DDevice3)
LPDIRECT3DMATERIAL3 = Pointer(IDirect3DMaterial3)
LPDIRECT3DVIEWPORT3 = Pointer(IDirect3DViewport3)
LPDIRECT3DVERTEXBUFFER = Pointer(IDirect3DVertexBuffer)
LPDIRECT3D7 = Pointer(IDirect3D7)
LPDIRECT3DDEVICE7 = Pointer(IDirect3DDevice7)
LPDIRECT3DVERTEXBUFFER7 = Pointer(IDirect3DVertexBuffer7)
IDirect3D.methods += [
Method(HRESULT, "Initialize", [(REFCLSID, "riid")]),
Method(HRESULT, "EnumDevices", [(LPD3DENUMDEVICESCALLBACK, "lpEnumDevicesCallback"), (LPVOID, "lpUserArg")]),
Method(HRESULT, "CreateLight", [Out(Pointer(LPDIRECT3DLIGHT), "lplpDirect3DLight"), (LPUNKNOWN, "pUnkOuter")]),
Method(HRESULT, "CreateMaterial", [Out(Pointer(LPDIRECT3DMATERIAL), "lplpDirect3DMaterial"), (LPUNKNOWN, "pUnkOuter")]),
Method(HRESULT, "CreateViewport", [Out(Pointer(LPDIRECT3DVIEWPORT), "lplpD3DViewport"), (LPUNKNOWN, "pUnkOuter")]),
Method(HRESULT, "FindDevice", [(LPD3DFINDDEVICESEARCH, "lpD3DDFS"), (LPD3DFINDDEVICERESULT, "lplpD3DDevice")]),
]
IDirect3D2.methods += [
Method(HRESULT, "EnumDevices", [(LPD3DENUMDEVICESCALLBACK, "lpEnumDevicesCallback"), (LPVOID, "lpUserArg")]),
Method(HRESULT, "CreateLight", [Out(Pointer(LPDIRECT3DLIGHT), "lplpDirect3DLight"), (LPUNKNOWN, "pUnkOuter")]),
Method(HRESULT, "CreateMaterial", [Out(Pointer(LPDIRECT3DMATERIAL2), "lplpDirect3DMaterial2"), (LPUNKNOWN, "pUnkOuter")]),
Method(HRESULT, "CreateViewport", [Out(Pointer(LPDIRECT3DVIEWPORT2), "lplpD3DViewport2"), (LPUNKNOWN, "pUnkOuter")]),
Method(HRESULT, "FindDevice", [(LPD3DFINDDEVICESEARCH, "lpD3DDFS"), (LPD3DFINDDEVICERESULT, "lpD3DFDR")]),
Method(HRESULT, "CreateDevice", [(REFCLSID, "rclsid"), (LPDIRECTDRAWSURFACE, "lpDDS"), Out(Pointer(LPDIRECT3DDEVICE2), "lplpD3DDevice2")]),
]
IDirect3D3.methods += [
Method(HRESULT, "EnumDevices", [(LPD3DENUMDEVICESCALLBACK, "lpEnumDevicesCallback"), (LPVOID, "lpUserArg")]),
Method(HRESULT, "CreateLight", [Out(Pointer(LPDIRECT3DLIGHT), "lplpDirect3DLight"), (LPUNKNOWN, "pUnkOuter")]),
Method(HRESULT, "CreateMaterial", [Out(Pointer(LPDIRECT3DMATERIAL3), "lplpDirect3DMaterial3"), (LPUNKNOWN, "pUnkOuter")]),
Method(HRESULT, "CreateViewport", [Out(Pointer(LPDIRECT3DVIEWPORT3), "lplpD3DViewport3"), (LPUNKNOWN, "pUnkOuter")]),
Method(HRESULT, "FindDevice", [(LPD3DFINDDEVICESEARCH, "lpD3DDFS"), Out(LPD3DFINDDEVICERESULT, "lpD3DFDR")]),
Method(HRESULT, "CreateDevice", [(REFCLSID, "rclsid"), (LPDIRECTDRAWSURFACE4, "lpDDS"), Out(Pointer(LPDIRECT3DDEVICE3), "lplpD3DDevice3"), (LPUNKNOWN, "lpUnk")]),
Method(HRESULT, "CreateVertexBuffer", [(LPD3DVERTEXBUFFERDESC, "lpD3DVertBufDesc"), Out(Pointer(LPDIRECT3DVERTEXBUFFER), "lplpD3DVertBuf"), (DWORD, "dwFlags"), (LPUNKNOWN, "lpUnk")]),
Method(HRESULT, "EnumZBufferFormats", [(REFCLSID, "riidDevice"), (LPD3DENUMPIXELFORMATSCALLBACK, "lpEnumCallback"), (LPVOID, "lpContext")]),
Method(HRESULT, "EvictManagedTextures", []),
]
IDirect3D7.methods += [
Method(HRESULT, "EnumDevices", [(LPD3DENUMDEVICESCALLBACK7, "lpEnumDevicesCallback"), (LPVOID, "lpUserArg")]),
Method(HRESULT, "CreateDevice", [(REFCLSID, "rclsid"), (LPDIRECTDRAWSURFACE7, "lpDDS"), Out(Pointer(LPDIRECT3DDEVICE7), "lplpD3DDevice")]),
Method(HRESULT, "CreateVertexBuffer", [(LPD3DVERTEXBUFFERDESC, "lpD3DVertBufDesc"), Out(Pointer(LPDIRECT3DVERTEXBUFFER7), "lplpD3DVertBuf"), (DWORD, "dwFlags")]),
Method(HRESULT, "EnumZBufferFormats", [(REFCLSID, "riidDevice"), (LPD3DENUMPIXELFORMATSCALLBACK, "lpEnumCallback"), (LPVOID, "lpContext")]),
Method(HRESULT, "EvictManagedTextures", []),
]
IDirect3DDevice.methods += [
Method(HRESULT, "Initialize", [(LPDIRECT3D, "lpDirect3D"), (LPGUID, "lpGUID"), (LPD3DDEVICEDESC, "lpD3DDVDesc")]),
Method(HRESULT, "GetCaps", [Out(LPD3DDEVICEDESC, "lpD3DHWDevDesc"), Out(LPD3DDEVICEDESC, "lpD3DHELDevDesc")]),
Method(HRESULT, "SwapTextureHandles", [(LPDIRECT3DTEXTURE, "lpD3Dtex1"), (LPDIRECT3DTEXTURE, "lpD3DTex2")]),
Method(HRESULT, "CreateExecuteBuffer", [(LPD3DEXECUTEBUFFERDESC, "lpDesc"), Out(Pointer(LPDIRECT3DEXECUTEBUFFER), "lplpDirect3DExecuteBuffer"), (LPUNKNOWN, "pUnkOuter")]),
Method(HRESULT, "GetStats", [(LPD3DSTATS, "lpD3DStats")]),
Method(HRESULT, "Execute", [(LPDIRECT3DEXECUTEBUFFER, "lpDirect3DExecuteBuffer"), (LPDIRECT3DVIEWPORT, "lpDirect3DViewport"), (DWORD, "dwFlags")]),
Method(HRESULT, "AddViewport", [(LPDIRECT3DVIEWPORT, "lpDirect3DViewport")]),
Method(HRESULT, "DeleteViewport", [(LPDIRECT3DVIEWPORT, "lpDirect3DViewport")]),
Method(HRESULT, "NextViewport", [(LPDIRECT3DVIEWPORT, "lpDirect3DViewport"), Out(Pointer(LPDIRECT3DVIEWPORT), "lplpDirect3DViewport"), (DWORD, "dwFlags")]),
Method(HRESULT, "Pick", [(LPDIRECT3DEXECUTEBUFFER, "lpDirect3DExecuteBuffer"), (LPDIRECT3DVIEWPORT, "lpDirect3DViewport"), (DWORD, "dwFlags"), (LPD3DRECT, "lpRect")]),
Method(HRESULT, "GetPickRecords", [(LPDWORD, "lpCount"), (LPD3DPICKRECORD, "lpD3DPickRec")]),
Method(HRESULT, "EnumTextureFormats", [(LPD3DENUMTEXTUREFORMATSCALLBACK, "lpD3DEnumTextureProc"), (LPVOID, "lpArg")]),
Method(HRESULT, "CreateMatrix", [Out(LPD3DMATRIXHANDLE, "lpD3DMatHandle")]),
Method(HRESULT, "SetMatrix", [(D3DMATRIXHANDLE, "D3DMatHandle"), (Const(LPD3DMATRIX), "lpD3DMatrix")]),
Method(HRESULT, "GetMatrix", [(D3DMATRIXHANDLE, "D3DMatHandle"), Out(LPD3DMATRIX, "lpD3DMatrix")]),
Method(HRESULT, "DeleteMatrix", [(D3DMATRIXHANDLE, "D3DMatHandle")]),
Method(HRESULT, "BeginScene", []),
Method(HRESULT, "EndScene", []),
Method(HRESULT, "GetDirect3D", [Out(Pointer(LPDIRECT3D), "lplpDirect3D")]),
]
IDirect3DDevice2.methods += [
Method(HRESULT, "GetCaps", [Out(LPD3DDEVICEDESC, "lpD3DHWDevDesc"), Out(LPD3DDEVICEDESC, "lpD3DHELDevDesc")]),
Method(HRESULT, "SwapTextureHandles", [(LPDIRECT3DTEXTURE2, "lpD3DTex1"), (LPDIRECT3DTEXTURE2, "lpD3DTex2")]),
Method(HRESULT, "GetStats", [Out(LPD3DSTATS, "lpD3DStats")]),
Method(HRESULT, "AddViewport", [(LPDIRECT3DVIEWPORT2, "lpDirect3DViewport2")]),
Method(HRESULT, "DeleteViewport", [(LPDIRECT3DVIEWPORT2, "lpDirect3DViewport2")]),
Method(HRESULT, "NextViewport", [(LPDIRECT3DVIEWPORT2, "lpDirect3DViewport2"), Out(Pointer(LPDIRECT3DVIEWPORT2), "lplpDirect3DViewport2"), (DWORD, "dwFlags")]),
Method(HRESULT, "EnumTextureFormats", [(LPD3DENUMTEXTUREFORMATSCALLBACK, "lpD3DEnumTextureProc"), (LPVOID, "lpArg")]),
Method(HRESULT, "BeginScene", []),
Method(HRESULT, "EndScene", []),
Method(HRESULT, "GetDirect3D", [Out(Pointer(LPDIRECT3D2), "lplpDirect3D2")]),
Method(HRESULT, "SetCurrentViewport", [(LPDIRECT3DVIEWPORT2, "lpDirect3DViewport2")]),
Method(HRESULT, "GetCurrentViewport", [Out(Pointer(LPDIRECT3DVIEWPORT2), "lplpDirect3DViewport2")]),
Method(HRESULT, "SetRenderTarget", [(LPDIRECTDRAWSURFACE, "lpNewRenderTarget"), (DWORD, "dwFlags")]),
Method(HRESULT, "GetRenderTarget", [Out(Pointer(LPDIRECTDRAWSURFACE), "lplpRenderTarget")]),
Method(HRESULT, "Begin", [(D3DPRIMITIVETYPE, "d3dpt"), (D3DVERTEXTYPE, "dwVertexTypeDesc"), (DWORD, "dwFlags")]),
Method(HRESULT, "BeginIndexed", [(D3DPRIMITIVETYPE, "d3dptPrimitiveType"), (D3DVERTEXTYPE, "d3dvtVertexType"), (LPVOID, "lpvVertices"), (DWORD, "dwNumVertices"), (DWORD, "dwFlags")]),
Method(HRESULT, "Vertex", [(LPVOID, "lpVertexType")]),
Method(HRESULT, "Index", [(WORD, "wVertexIndex")]),
Method(HRESULT, "End", [(DWORD, "dwFlags")]),
Method(HRESULT, "GetRenderState", [(D3DRENDERSTATETYPE, "dwRenderStateType"), Out(LPDWORD, "lpdwRenderState")]),
Method(HRESULT, "SetRenderState", [(D3DRENDERSTATETYPE, "dwRenderStateType"), (DWORD, "dwRenderState")]),
Method(HRESULT, "GetLightState", [(D3DLIGHTSTATETYPE, "dwLightStateType"), Out(LPDWORD, "lpdwLightState")]),
Method(HRESULT, "SetLightState", [(D3DLIGHTSTATETYPE, "dwLightStateType"), (DWORD, "dwLightState")]),
Method(HRESULT, "SetTransform", [(D3DTRANSFORMSTATETYPE, "dtstTransformStateType"), (LPD3DMATRIX, "lpD3DMatrix")]),
Method(HRESULT, "GetTransform", [(D3DTRANSFORMSTATETYPE, "dtstTransformStateType"), Out(LPD3DMATRIX, "lpD3DMatrix")]),
Method(HRESULT, "MultiplyTransform", [(D3DTRANSFORMSTATETYPE, "dtstTransformStateType"), (LPD3DMATRIX, "lpD3DMatrix")]),
Method(HRESULT, "DrawPrimitive", [(D3DPRIMITIVETYPE, "d3dptPrimitiveType"), (D3DVERTEXTYPE, "d3dvtVertexType"), (LPVOID, "lpvVertices"), (DWORD, "dwVertexCount"), (DWORD, "dwFlags")]),
Method(HRESULT, "DrawIndexedPrimitive", [(D3DPRIMITIVETYPE, "d3dptPrimitiveType"), (D3DVERTEXTYPE, "d3dvtVertexType"), (LPVOID, "lpvVertices"), (DWORD, "dwVertexCount"), (LPWORD, "dwIndices"), (DWORD, "dwIndexCount"), (DWORD, "dwFlags")]),
Method(HRESULT, "SetClipStatus", [(LPD3DCLIPSTATUS, "lpD3DClipStatus")]),
Method(HRESULT, "GetClipStatus", [(LPD3DCLIPSTATUS, "lpD3DClipStatus")]),
]
IDirect3DDevice3.methods += [
Method(HRESULT, "GetCaps", [Out(LPD3DDEVICEDESC, "lpD3DHWDevDesc"), Out(LPD3DDEVICEDESC, "lpD3DHELDevDesc")]),
Method(HRESULT, "GetStats", [Out(LPD3DSTATS, "lpD3DStats")]),
Method(HRESULT, "AddViewport", [(LPDIRECT3DVIEWPORT3, "lpDirect3DViewport3")]),
Method(HRESULT, "DeleteViewport", [(LPDIRECT3DVIEWPORT3, "lpDirect3DViewport3")]),
Method(HRESULT, "NextViewport", [(LPDIRECT3DVIEWPORT3, "lpDirect3DViewport3"), Out(Pointer(LPDIRECT3DVIEWPORT3), "lplpDirect3DViewport3"), (DWORD, "dwFlags")]),
Method(HRESULT, "EnumTextureFormats", [(LPD3DENUMPIXELFORMATSCALLBACK, "lpD3DEnumPixelProc"), (LPVOID, "lpArg")]),
Method(HRESULT, "BeginScene", []),
Method(HRESULT, "EndScene", []),
Method(HRESULT, "GetDirect3D", [Out(Pointer(LPDIRECT3D3), "lplpDirect3D3")]),
Method(HRESULT, "SetCurrentViewport", [(LPDIRECT3DVIEWPORT3, "lpDirect3DViewport3")]),
Method(HRESULT, "GetCurrentViewport", [Out(Pointer(LPDIRECT3DVIEWPORT3), "lplpDirect3DViewport3")]),
Method(HRESULT, "SetRenderTarget", [(LPDIRECTDRAWSURFACE4, "lpNewRenderTarget"), (DWORD, "dwFlags")]),
Method(HRESULT, "GetRenderTarget", [Out(Pointer(LPDIRECTDRAWSURFACE4), "lplpRenderTarget")]),
Method(HRESULT, "Begin", [(D3DPRIMITIVETYPE, "d3dptPrimitiveType"), (DWORD, "dwVertexTypeDesc"), (DWORD, "dwFlags")]),
Method(HRESULT, "BeginIndexed", [(D3DPRIMITIVETYPE, "d3dptPrimitiveType"), (DWORD, "d3dvtVertexType"), (LPVOID, "lpvVertices"), (DWORD, "dwNumVertices"), (DWORD, "dwFlags")]),
Method(HRESULT, "Vertex", [(LPVOID, "lpVertexType")]),
Method(HRESULT, "Index", [(WORD, "wVertexIndex")]),
Method(HRESULT, "End", [(DWORD, "dwFlags")]),
Method(HRESULT, "GetRenderState", [(D3DRENDERSTATETYPE, "dwRenderStateType"), Out(LPDWORD, "lpdwRenderState")]),
Method(HRESULT, "SetRenderState", [(D3DRENDERSTATETYPE, "dwRenderStateType"), (DWORD, "dwRenderState")]),
Method(HRESULT, "GetLightState", [(D3DLIGHTSTATETYPE, "dwLightStateType"), Out(LPDWORD, "lpdwLightState")]),
Method(HRESULT, "SetLightState", [(D3DLIGHTSTATETYPE, "dwLightStateType"), (DWORD, "dwLightState")]),
Method(HRESULT, "SetTransform", [(D3DTRANSFORMSTATETYPE, "dtstTransformStateType"), (LPD3DMATRIX, "lpD3DMatrix")]),
Method(HRESULT, "GetTransform", [(D3DTRANSFORMSTATETYPE, "dtstTransformStateType"), Out(LPD3DMATRIX, "lpD3DMatrix")]),
Method(HRESULT, "MultiplyTransform", [(D3DTRANSFORMSTATETYPE, "dtstTransformStateType"), (LPD3DMATRIX, "lpD3DMatrix")]),
Method(HRESULT, "DrawPrimitive", [(D3DPRIMITIVETYPE, "d3dptPrimitiveType"), (DWORD, "d3dvtVertexType"), (LPVOID, "lpvVertices"), (DWORD, "dwVertexCount"), (DWORD, "dwFlags")]),
Method(HRESULT, "DrawIndexedPrimitive", [(D3DPRIMITIVETYPE, "d3dptPrimitiveType"), (DWORD, "d3dvtVertexType"), (LPVOID, "lpvVertices"), (DWORD, "dwVertexCount"), (LPWORD, "dwIndices"), (DWORD, "dwIndexCount"), (DWORD, "dwFlags")]),
Method(HRESULT, "SetClipStatus", [(LPD3DCLIPSTATUS, "lpD3DClipStatus")]),
Method(HRESULT, "GetClipStatus", [Out(LPD3DCLIPSTATUS, "lpD3DClipStatus")]),
Method(HRESULT, "DrawPrimitiveStrided", [(D3DPRIMITIVETYPE, "d3dptPrimitiveType"), (DWORD, "dwVertexType"), (LPD3DDRAWPRIMITIVESTRIDEDDATA, "lpD3DDrawPrimStrideData"), (DWORD, "dwVertexCount"), (DWORD, "dwFlags")]),
Method(HRESULT, "DrawIndexedPrimitiveStrided", [(D3DPRIMITIVETYPE, "d3dptPrimitiveType"), (DWORD, "dwVertexType"), (LPD3DDRAWPRIMITIVESTRIDEDDATA, "lpD3DDrawPrimStrideData"), (DWORD, "dwVertexCount"), (LPWORD, "lpIndex"), (DWORD, "dwIndexCount"), (DWORD, "dwFlags")]),
Method(HRESULT, "DrawPrimitiveVB", [(D3DPRIMITIVETYPE, "d3dptPrimitiveType"), (LPDIRECT3DVERTEXBUFFER, "lpD3DVertexBuf"), (DWORD, "dwStartVertex"), (DWORD, "dwNumVertices"), (DWORD, "dwFlags")]),
Method(HRESULT, "DrawIndexedPrimitiveVB", [(D3DPRIMITIVETYPE, "d3dptPrimitiveType"), (LPDIRECT3DVERTEXBUFFER, "lpD3DVertexBuf"), (LPWORD, "lpwIndices"), (DWORD, "dwIndexCount"), (DWORD, "dwFlags")]),
Method(HRESULT, "ComputeSphereVisibility", [(LPD3DVECTOR, "lpCenters"), (LPD3DVALUE, "lpRadii"), (DWORD, "dwNumSpheres"), (DWORD, "dwFlags"), (LPDWORD, "lpdwReturnValues")]),
Method(HRESULT, "GetTexture", [(DWORD, "dwStage"), Out(Pointer(LPDIRECT3DTEXTURE2), "lplpTexture2")]),
Method(HRESULT, "SetTexture", [(DWORD, "dwStage"), (LPDIRECT3DTEXTURE2, "lpTexture2")]),
Method(HRESULT, "GetTextureStageState", [(DWORD, "dwStage"), (D3DTEXTURESTAGESTATETYPE, "d3dTexStageStateType"), Out(LPDWORD, "lpdwState")]),
Method(HRESULT, "SetTextureStageState", [(DWORD, "dwStage"), (D3DTEXTURESTAGESTATETYPE, "d3dTexStageStateType"), (DWORD, "dwState")]),
Method(HRESULT, "ValidateDevice", [(LPDWORD, "lpdwPasses")]),
]
IDirect3DDevice7.methods += [
Method(HRESULT, "GetCaps", [Out(LPD3DDEVICEDESC7, "lpD3DHELDevDesc")]),
Method(HRESULT, "EnumTextureFormats", [(LPD3DENUMPIXELFORMATSCALLBACK, "lpD3DEnumPixelProc"), (LPVOID, "lpArg")]),
Method(HRESULT, "BeginScene", []),
Method(HRESULT, "EndScene", []),
Method(HRESULT, "GetDirect3D", [Out(Pointer(LPDIRECT3D7), "lplpDirect3D3")]),
Method(HRESULT, "SetRenderTarget", [(LPDIRECTDRAWSURFACE7, "lpNewRenderTarget"), (DWORD, "dwFlags")]),
Method(HRESULT, "GetRenderTarget", [Out(Pointer(LPDIRECTDRAWSURFACE7), "lplpRenderTarget")]),
Method(HRESULT, "Clear", [(DWORD, "dwCount"), (LPD3DRECT, "lpRects"), (DWORD, "dwFlags"), (D3DCOLOR, "dwColor"), (D3DVALUE, "dvZ"), (DWORD, "dwStencil")]),
Method(HRESULT, "SetTransform", [(D3DTRANSFORMSTATETYPE, "dtstTransformStateType"), (LPD3DMATRIX, "lpD3DMatrix")]),
Method(HRESULT, "GetTransform", [(D3DTRANSFORMSTATETYPE, "dtstTransformStateType"), Out(LPD3DMATRIX, "lpD3DMatrix")]),
Method(HRESULT, "SetViewport", [(LPD3DVIEWPORT7, "lpData")]),
Method(HRESULT, "MultiplyTransform", [(D3DTRANSFORMSTATETYPE, "dtstTransformStateType"), (LPD3DMATRIX, "lpD3DMatrix")]),
Method(HRESULT, "GetViewport", [Out(LPD3DVIEWPORT7, "lpData")]),
Method(HRESULT, "SetMaterial", [(LPD3DMATERIAL7, "lpMat")]),
Method(HRESULT, "GetMaterial", [Out(LPD3DMATERIAL7, "lpMat")]),
Method(HRESULT, "SetLight", [(DWORD, "dwLightIndex"), (LPD3DLIGHT7, "lpLight")]),
Method(HRESULT, "GetLight", [(DWORD, "dwLightIndex"), (LPD3DLIGHT7, "lpLight")]),
Method(HRESULT, "SetRenderState", [(D3DRENDERSTATETYPE, "dwRenderStateType"), (DWORD, "dwRenderState")]),
Method(HRESULT, "GetRenderState", [(D3DRENDERSTATETYPE, "dwRenderStateType"), Out(LPDWORD, "lpdwRenderState")]),
Method(HRESULT, "BeginStateBlock", []),
Method(HRESULT, "EndStateBlock", [Out(LPDWORD, "lpdwBlockHandle")]),
Method(HRESULT, "PreLoad", [(LPDIRECTDRAWSURFACE7, "lpddsTexture")]),
Method(HRESULT, "DrawPrimitive", [(D3DPRIMITIVETYPE, "d3dptPrimitiveType"), (DWORD, "d3dvtVertexType"), (LPVOID, "lpvVertices"), (DWORD, "dwVertexCount"), (DWORD, "dwFlags")]),
Method(HRESULT, "DrawIndexedPrimitive", [(D3DPRIMITIVETYPE, "d3dptPrimitiveType"), (DWORD, "d3dvtVertexType"), (LPVOID, "lpvVertices"), (DWORD, "dwVertexCount"), (LPWORD, "dwIndices"), (DWORD, "dwIndexCount"), (DWORD, "dwFlags")]),
Method(HRESULT, "SetClipStatus", [(LPD3DCLIPSTATUS, "lpD3DClipStatus")]),
Method(HRESULT, "GetClipStatus", [Out(LPD3DCLIPSTATUS, "lpD3DClipStatus")]),
Method(HRESULT, "DrawPrimitiveStrided", [(D3DPRIMITIVETYPE, "d3dptPrimitiveType"), (DWORD, "dwVertexType"), (LPD3DDRAWPRIMITIVESTRIDEDDATA, "lpD3DDrawPrimStrideData"), (DWORD, "dwVertexCount"), (DWORD, "dwFlags")]),
Method(HRESULT, "DrawIndexedPrimitiveStrided", [(D3DPRIMITIVETYPE, "d3dptPrimitiveType"), (DWORD, "dwVertexType"), (LPD3DDRAWPRIMITIVESTRIDEDDATA, "lpD3DDrawPrimStrideData"), (DWORD, "dwVertexCount"), (LPWORD, "lpIndex"), (DWORD, "dwIndexCount"), (DWORD, "dwFlags")]),
Method(HRESULT, "DrawPrimitiveVB", [(D3DPRIMITIVETYPE, "d3dptPrimitiveType"), (LPDIRECT3DVERTEXBUFFER7, "lpD3DVertexBuf"), (DWORD, "dwStartVertex"), (DWORD, "dwNumVertices"), (DWORD, "dwFlags")]),
Method(HRESULT, "DrawIndexedPrimitiveVB", [(D3DPRIMITIVETYPE, "d3dptPrimitiveType"), (LPDIRECT3DVERTEXBUFFER7, "lpD3DVertexBuf"), (DWORD, "dwStartVertex"), (DWORD, "dwNumVertices"), (LPWORD, "lpwIndices"), (DWORD, "dwIndexCount"), (DWORD, "dwFlags")]),
Method(HRESULT, "ComputeSphereVisibility", [(LPD3DVECTOR, "lpCenters"), (LPD3DVALUE, "lpRadii"), (DWORD, "dwNumSpheres"), (DWORD, "dwFlags"), (LPDWORD, "lpdwReturnValues")]),
Method(HRESULT, "GetTexture", [(DWORD, "dwStage"), Out(Pointer(LPDIRECTDRAWSURFACE7), "lpTexture")]),
Method(HRESULT, "SetTexture", [(DWORD, "dwStage"), (LPDIRECTDRAWSURFACE7, "lpTexture")]),
Method(HRESULT, "GetTextureStageState", [(DWORD, "dwStage"), (D3DTEXTURESTAGESTATETYPE, "d3dTexStageStateType"), Out(LPDWORD, "lpdwState")]),
Method(HRESULT, "SetTextureStageState", [(DWORD, "dwStage"), (D3DTEXTURESTAGESTATETYPE, "d3dTexStageStateType"), (DWORD, "dwState")]),
Method(HRESULT, "ValidateDevice", [Out(LPDWORD, "lpdwPasses")]),
Method(HRESULT, "ApplyStateBlock", [(DWORD, "dwBlockHandle")]),
Method(HRESULT, "CaptureStateBlock", [(DWORD, "dwBlockHandle")]),
Method(HRESULT, "DeleteStateBlock", [(DWORD, "dwBlockHandle")]),
Method(HRESULT, "CreateStateBlock", [(D3DSTATEBLOCKTYPE, "d3dsbType"), Out(LPDWORD, "lpdwBlockHandle")]),
Method(HRESULT, "Load", [(LPDIRECTDRAWSURFACE7, "lpDestTex"), (LPPOINT, "lpDestPoint"), (LPDIRECTDRAWSURFACE7, "lpSrcTex"), (LPRECT, "lprcSrcRect"), (DWORD, "dwFlags")]),
Method(HRESULT, "LightEnable", [(DWORD, "dwLightIndex"), (BOOL, "bEnable")]),
Method(HRESULT, "GetLightEnable", [(DWORD, "dwLightIndex"), Out(Pointer(BOOL), "pbEnable")]),
Method(HRESULT, "SetClipPlane", [(DWORD, "dwIndex"), (Pointer(D3DVALUE), "pPlaneEquation")]),
Method(HRESULT, "GetClipPlane", [(DWORD, "dwIndex"), Out(Pointer(D3DVALUE), "pPlaneEquation")]),
Method(HRESULT, "GetInfo", [(DWORD, "dwDevInfoID"), Out(LPVOID, "pDevInfoStruct"), (DWORD, "dwSize")]),
]
IDirect3DExecuteBuffer.methods += [
Method(HRESULT, "Initialize", [(LPDIRECT3DDEVICE, "lpDirect3DDevice"), (LPD3DEXECUTEBUFFERDESC, "lpDesc")]),
Method(HRESULT, "Lock", [(LPD3DEXECUTEBUFFERDESC, "lpDesc")]),
Method(HRESULT, "Unlock", []),
Method(HRESULT, "SetExecuteData", [(LPD3DEXECUTEDATA, "lpData")]),
Method(HRESULT, "GetExecuteData", [Out(LPD3DEXECUTEDATA, "lpData")]),
Method(HRESULT, "Validate", [(LPDWORD, "lpdwOffset"), (LPD3DVALIDATECALLBACK, "lpFunc"), (LPVOID, "lpUserArg"), (DWORD, "dwReserved")]),
Method(HRESULT, "Optimize", [(DWORD, "dwDummy")]),
]
IDirect3DLight.methods += [
Method(HRESULT, "Initialize", [(LPDIRECT3D, "lpDirect3D")]),
Method(HRESULT, "SetLight", [(LPD3DLIGHT, "lpLight")]),
Method(HRESULT, "GetLight", [Out(LPD3DLIGHT, "lpLight")]),
]
IDirect3DMaterial.methods += [
Method(HRESULT, "Initialize", [(LPDIRECT3D, "lpDirect3D")]),
Method(HRESULT, "SetMaterial", [(LPD3DMATERIAL, "lpMat")]),
Method(HRESULT, "GetMaterial", [Out(LPD3DMATERIAL, "lpMat")]),
Method(HRESULT, "GetHandle", [(LPDIRECT3DDEVICE, "lpDirect3DDevice"), Out(LPD3DMATERIALHANDLE, "lpHandle")]),
Method(HRESULT, "Reserve", []),
Method(HRESULT, "Unreserve", []),
]
IDirect3DMaterial2.methods += [
Method(HRESULT, "SetMaterial", [(LPD3DMATERIAL, "lpMat")]),
Method(HRESULT, "GetMaterial", [Out(LPD3DMATERIAL, "lpMat")]),
Method(HRESULT, "GetHandle", [(LPDIRECT3DDEVICE2, "lpDirect3DDevice2"), Out(LPD3DMATERIALHANDLE, "lpHandle")]),
]
IDirect3DMaterial3.methods += [
Method(HRESULT, "SetMaterial", [(LPD3DMATERIAL, "lpMat")]),
Method(HRESULT, "GetMaterial", [Out(LPD3DMATERIAL, "lpMat")]),
Method(HRESULT, "GetHandle", [(LPDIRECT3DDEVICE3, "lpDirect3DDevice3"), Out(LPD3DMATERIALHANDLE, "lpHandle")]),
]
IDirect3DTexture.methods += [
Method(HRESULT, "Initialize", [(LPDIRECT3DDEVICE, "lpDirect3DDevice"), (LPDIRECTDRAWSURFACE, "lpDDSurface")]),
Method(HRESULT, "GetHandle", [(LPDIRECT3DDEVICE, "lpDirect3DDevice"), Out(LPD3DTEXTUREHANDLE, "lpHandle")]),
Method(HRESULT, "PaletteChanged", [(DWORD, "dwStart"), (DWORD, "dwCount")]),
Method(HRESULT, "Load", [(LPDIRECT3DTEXTURE, "lpD3DTexture")]),
Method(HRESULT, "Unload", []),
]
IDirect3DTexture2.methods += [
Method(HRESULT, "GetHandle", [(LPDIRECT3DDEVICE2, "lpDirect3DDevice2"), Out(LPD3DTEXTUREHANDLE, "lpHandle")]),
Method(HRESULT, "PaletteChanged", [(DWORD, "dwStart"), (DWORD, "dwCount")]),
Method(HRESULT, "Load", [(LPDIRECT3DTEXTURE2, "lpD3DTexture2")]),
]
IDirect3DViewport.methods += [
Method(HRESULT, "Initialize", [(LPDIRECT3D, "lpDirect3D")]),
Method(HRESULT, "GetViewport", [Out(LPD3DVIEWPORT, "lpData")]),
Method(HRESULT, "SetViewport", [(LPD3DVIEWPORT, "lpData")]),
Method(HRESULT, "TransformVertices", [(DWORD, "dwVertexCount"), (LPD3DTRANSFORMDATA, "lpData"), (DWORD, "dwFlags"), (LPDWORD, "lpOffScreen")]),
Method(HRESULT, "LightElements", [(DWORD, "dwElementCount"), (LPD3DLIGHTDATA, "lpData")]),
Method(HRESULT, "SetBackground", [(D3DMATERIALHANDLE, "hMat")]),
Method(HRESULT, "GetBackground", [Out(LPD3DMATERIALHANDLE, "lphMat"), Out(LPBOOL, "lpValid")]),
Method(HRESULT, "SetBackgroundDepth", [(LPDIRECTDRAWSURFACE, "lpDDSurface")]),
Method(HRESULT, "GetBackgroundDepth", [Out(Pointer(LPDIRECTDRAWSURFACE), "lplpDDSurface"), Out(LPBOOL, "lpValid")]),
Method(HRESULT, "Clear", [(DWORD, "dwCount"), (LPD3DRECT, "lpRects"), (DWORD, "dwFlags")]),
Method(HRESULT, "AddLight", [(LPDIRECT3DLIGHT, "lpDirect3DLight")]),
Method(HRESULT, "DeleteLight", [(LPDIRECT3DLIGHT, "lpDirect3DLight")]),
Method(HRESULT, "NextLight", [(LPDIRECT3DLIGHT, "lpDirect3DLight"), Out(Pointer(LPDIRECT3DLIGHT), "lplpDirect3DLight"), (DWORD, "dwFlags")]),
]
IDirect3DViewport2.methods += [
Method(HRESULT, "GetViewport2", [Out(LPD3DVIEWPORT2, "lpData")]),
Method(HRESULT, "SetViewport2", [(LPD3DVIEWPORT2, "lpData")]),
]
IDirect3DViewport3.methods += [
Method(HRESULT, "SetBackgroundDepth2", [(LPDIRECTDRAWSURFACE4, "lpDDS")]),
Method(HRESULT, "GetBackgroundDepth2", [Out(Pointer(LPDIRECTDRAWSURFACE4), "lplpDDS"), (LPBOOL, "lpValid")]),
Method(HRESULT, "Clear2", [(DWORD, "dwCount"), (LPD3DRECT, "lpRects"), (DWORD, "dwFlags"), (D3DCOLOR, "dwColor"), (D3DVALUE, "dvZ"), (DWORD, "dwStencil")]),
]
IDirect3DVertexBuffer.methods += [
Method(HRESULT, "Lock", [(DWORD, "dwFlags"), Out(Pointer(LPVOID), "lplpData"), (LPDWORD, "lpdwSize")]),
Method(HRESULT, "Unlock", []),
Method(HRESULT, "ProcessVertices", [(DWORD, "dwVertexOp"), (DWORD, "dwDestIndex"), (DWORD, "dwCount"), (LPDIRECT3DVERTEXBUFFER, "lpSrcBuffer"), (DWORD, "dwSrcIndex"), (LPDIRECT3DDEVICE3, "lpD3DDevice"), (DWORD, "dwFlags")]),
Method(HRESULT, "GetVertexBufferDesc", [Out(LPD3DVERTEXBUFFERDESC, "lpD3DVertexBufferDesc")]),
Method(HRESULT, "Optimize", [(LPDIRECT3DDEVICE3, "lpD3DDevice"), (DWORD, "dwFlags")]),
]
IDirect3DVertexBuffer7.methods += [
Method(HRESULT, "Lock", [(DWORD, "dwFlags"), Out(Pointer(LPVOID), "lplpData"), (LPDWORD, "lpdwSize")]),
Method(HRESULT, "Unlock", []),
Method(HRESULT, "ProcessVertices", [(DWORD, "dwVertexOp"), (DWORD, "dwDestIndex"), (DWORD, "dwCount"), (LPDIRECT3DVERTEXBUFFER7, "lpSrcBuffer"), (DWORD, "dwSrcIndex"), (LPDIRECT3DDEVICE7, "lpD3DDevice"), (DWORD, "dwFlags")]),
Method(HRESULT, "GetVertexBufferDesc", [Out(LPD3DVERTEXBUFFERDESC, "lpD3DVertexBufferDesc")]),
Method(HRESULT, "Optimize", [(LPDIRECT3DDEVICE7, "lpD3DDevice"), (DWORD, "dwFlags")]),
Method(HRESULT, "ProcessVerticesStrided", [(DWORD, "dwVertexOp"), (DWORD, "dwDestIndex"), (DWORD, "dwCount"), (LPD3DDRAWPRIMITIVESTRIDEDDATA, "lpStrideData"), (DWORD, "dwVertexTypeDesc"), (LPDIRECT3DDEVICE7, "lpD3DDevice"), (DWORD, "dwFlags")]),
]
interfaces = [
IDirectDraw,
IDirectDraw2,
IDirectDraw4,
IDirectDraw7,
IDirect3D,
IDirect3D2,
IDirect3D3,
IDirect3D7,
]
ddraw.add_interfaces(interfaces)
|
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import errno
import json
import testtools
import six
from six.moves.urllib import parse
from glanceclient.v1 import client
from glanceclient.v1 import images
from glanceclient.v1 import shell
from tests import utils
fixtures = {
'/v1/images': {
'POST': (
{
'location': '/v1/images/1',
'x-openstack-request-id': 'req-1234',
},
json.dumps(
{'image': {
'id': '1',
'name': 'image-1',
'container_format': 'ovf',
'disk_format': 'vhd',
'owner': 'asdf',
'size': '1024',
'min_ram': '512',
'min_disk': '10',
'properties': {'a': 'b', 'c': 'd'},
'is_public': False,
'protected': False,
'deleted': False,
}},
),
),
},
'/v1/images/detail?limit=20': {
'GET': (
{},
{'images': [
{
'id': 'a',
'name': 'image-1',
'properties': {'arch': 'x86_64'},
},
{
'id': 'b',
'name': 'image-2',
'properties': {'arch': 'x86_64'},
},
]},
),
},
'/v1/images/detail?is_public=None&limit=20': {
'GET': (
{'x-openstack-request-id': 'req-1234'},
{'images': [
{
'id': 'a',
'owner': 'A',
'is_public': 'True',
'name': 'image-1',
'properties': {'arch': 'x86_64'},
},
{
'id': 'b',
'owner': 'B',
'is_public': 'False',
'name': 'image-2',
'properties': {'arch': 'x86_64'},
},
{
'id': 'c',
'is_public': 'False',
'name': 'image-3',
'properties': {'arch': 'x86_64'},
},
]},
),
},
'/v1/images/detail?is_public=None&limit=5': {
'GET': (
{},
{'images': [
{
'id': 'a',
'owner': 'A',
'name': 'image-1',
'properties': {'arch': 'x86_64'},
},
{
'id': 'b',
'owner': 'B',
'name': 'image-2',
'properties': {'arch': 'x86_64'},
},
{
'id': 'b2',
'owner': 'B',
'name': 'image-3',
'properties': {'arch': 'x86_64'},
},
{
'id': 'c',
'name': 'image-3',
'properties': {'arch': 'x86_64'},
},
]},
),
},
'/v1/images/detail?limit=5': {
'GET': (
{},
{'images': [
{
'id': 'a',
'owner': 'A',
'is_public': 'False',
'name': 'image-1',
'properties': {'arch': 'x86_64'},
},
{
'id': 'b',
'owner': 'A',
'is_public': 'False',
'name': 'image-2',
'properties': {'arch': 'x86_64'},
},
{
'id': 'b2',
'owner': 'B',
'name': 'image-3',
'properties': {'arch': 'x86_64'},
},
{
'id': 'c',
'is_public': 'True',
'name': 'image-3',
'properties': {'arch': 'x86_64'},
},
]},
),
},
'/v1/images/detail?limit=20&marker=a': {
'GET': (
{},
{'images': [
{
'id': 'b',
'name': 'image-1',
'properties': {'arch': 'x86_64'},
},
{
'id': 'c',
'name': 'image-2',
'properties': {'arch': 'x86_64'},
},
]},
),
},
'/v1/images/detail?limit=1': {
'GET': (
{},
{'images': [
{
'id': 'a',
'name': 'image-0',
'properties': {'arch': 'x86_64'},
},
]},
),
},
'/v1/images/detail?limit=1&marker=a': {
'GET': (
{},
{'images': [
{
'id': 'b',
'name': 'image-1',
'properties': {'arch': 'x86_64'},
},
]},
),
},
'/v1/images/detail?limit=2': {
'GET': (
{},
{'images': [
{
'id': 'a',
'name': 'image-1',
'properties': {'arch': 'x86_64'},
},
{
'id': 'b',
'name': 'image-2',
'properties': {'arch': 'x86_64'},
},
]},
),
},
'/v1/images/detail?limit=2&marker=b': {
'GET': (
{},
{'images': [
{
'id': 'c',
'name': 'image-3',
'properties': {'arch': 'x86_64'},
},
]},
),
},
'/v1/images/detail?limit=20&name=foo': {
'GET': (
{},
{'images': [
{
'id': 'a',
'name': 'image-1',
'properties': {'arch': 'x86_64'},
},
{
'id': 'b',
'name': 'image-2',
'properties': {'arch': 'x86_64'},
},
]},
),
},
'/v1/images/detail?limit=20&property-ping=pong':
{
'GET': (
{},
{'images': [
{
'id': '1',
'name': 'image-1',
'properties': {'arch': 'x86_64'},
},
]},
),
},
'/v1/images/detail?limit=20&sort_dir=desc': {
'GET': (
{},
{'images': [
{
'id': 'a',
'name': 'image-1',
'properties': {'arch': 'x86_64'},
},
{
'id': 'b',
'name': 'image-2',
'properties': {'arch': 'x86_64'},
},
]},
),
},
'/v1/images/detail?limit=20&sort_key=name': {
'GET': (
{},
{'images': [
{
'id': 'a',
'name': 'image-1',
'properties': {'arch': 'x86_64'},
},
{
'id': 'b',
'name': 'image-2',
'properties': {'arch': 'x86_64'},
},
]},
),
},
'/v1/images/1': {
'HEAD': (
{
'x-image-meta-id': '1',
'x-image-meta-name': 'image-1',
'x-image-meta-property-arch': 'x86_64',
'x-image-meta-is_public': 'false',
'x-image-meta-protected': 'false',
'x-image-meta-deleted': 'false',
},
None),
'GET': (
{},
'XXX',
),
'PUT': (
{},
json.dumps(
{'image': {
'id': '1',
'name': 'image-2',
'container_format': 'ovf',
'disk_format': 'vhd',
'owner': 'asdf',
'size': '1024',
'min_ram': '512',
'min_disk': '10',
'properties': {'a': 'b', 'c': 'd'},
'is_public': False,
'protected': False,
}},
),
),
'DELETE': ({}, None),
},
'/v1/images/2': {
'HEAD': (
{
'x-image-meta-id': '2'
},
None,
),
'GET': (
{
'x-image-meta-checksum': 'wrong'
},
'YYY',
),
},
'/v1/images/3': {
'HEAD': (
{
'x-image-meta-id': '3',
'x-image-meta-name': u"ni\xf1o"
},
None,
),
'GET': (
{
'x-image-meta-checksum': '0745064918b49693cca64d6b6a13d28a'
},
'ZZZ',
),
},
'/v1/images/4': {
'HEAD': (
{
'x-image-meta-id': '4',
'x-image-meta-name': 'image-4',
'x-image-meta-property-arch': 'x86_64',
'x-image-meta-is_public': 'false',
'x-image-meta-protected': 'false',
'x-image-meta-deleted': 'false',
'x-openstack-request-id': 'req-1234',
},
None),
'GET': (
{
'x-openstack-request-id': 'req-1234',
},
'XXX',
),
'PUT': (
{
'x-openstack-request-id': 'req-1234',
},
json.dumps(
{'image': {
'id': '4',
'name': 'image-4',
'container_format': 'ovf',
'disk_format': 'vhd',
'owner': 'asdf',
'size': '1024',
'min_ram': '512',
'min_disk': '10',
'properties': {'a': 'b', 'c': 'd'},
'is_public': False,
'protected': False,
}},
),
),
'DELETE': (
{
'x-openstack-request-id': 'req-1234',
},
None),
},
'/v1/images/v2_created_img': {
'PUT': (
{},
json.dumps({
"image": {
"status": "queued",
"deleted": False,
"container_format": "bare",
"min_ram": 0,
"updated_at": "2013-12-20T01:51:45",
"owner": "foo",
"min_disk": 0,
"is_public": False,
"deleted_at": None,
"id": "v2_created_img",
"size": None,
"name": "bar",
"checksum": None,
"created_at": "2013-12-20T01:50:38",
"disk_format": "qcow2",
"properties": {},
"protected": False
}
})
),
},
}
class ImageManagerTest(testtools.TestCase):
def setUp(self):
super(ImageManagerTest, self).setUp()
self.api = utils.FakeAPI(fixtures)
self.mgr = images.ImageManager(self.api)
def test_paginated_list(self):
images = list(self.mgr.list(page_size=2))
expect = [
('GET', '/v1/images/detail?limit=2', {}, None),
('GET', '/v1/images/detail?limit=2&marker=b', {}, None),
]
self.assertEqual(expect, self.api.calls)
self.assertEqual(3, len(images))
self.assertEqual('a', images[0].id)
self.assertEqual('b', images[1].id)
self.assertEqual('c', images[2].id)
def test_list_with_limit_less_than_page_size(self):
results = list(self.mgr.list(page_size=2, limit=1))
expect = [('GET', '/v1/images/detail?limit=2', {}, None)]
self.assertEqual(1, len(results))
self.assertEqual(expect, self.api.calls)
def test_list_with_limit_greater_than_page_size(self):
images = list(self.mgr.list(page_size=1, limit=2))
expect = [
('GET', '/v1/images/detail?limit=1', {}, None),
('GET', '/v1/images/detail?limit=1&marker=a', {}, None),
]
self.assertEqual(2, len(images))
self.assertEqual('a', images[0].id)
self.assertEqual('b', images[1].id)
self.assertEqual(expect, self.api.calls)
def test_list_with_marker(self):
list(self.mgr.list(marker='a'))
url = '/v1/images/detail?limit=20&marker=a'
expect = [('GET', url, {}, None)]
self.assertEqual(expect, self.api.calls)
def test_list_with_filter(self):
list(self.mgr.list(filters={'name': "foo"}))
url = '/v1/images/detail?limit=20&name=foo'
expect = [('GET', url, {}, None)]
self.assertEqual(expect, self.api.calls)
def test_list_with_property_filters(self):
list(self.mgr.list(filters={'properties': {'ping': 'pong'}}))
url = '/v1/images/detail?limit=20&property-ping=pong'
expect = [('GET', url, {}, None)]
self.assertEqual(expect, self.api.calls)
def test_list_with_sort_dir(self):
list(self.mgr.list(sort_dir='desc'))
url = '/v1/images/detail?limit=20&sort_dir=desc'
expect = [('GET', url, {}, None)]
self.assertEqual(expect, self.api.calls)
def test_list_with_sort_key(self):
list(self.mgr.list(sort_key='name'))
url = '/v1/images/detail?limit=20&sort_key=name'
expect = [('GET', url, {}, None)]
self.assertEqual(expect, self.api.calls)
def test_get(self):
image = self.mgr.get('1')
expect = [('HEAD', '/v1/images/1', {}, None)]
self.assertEqual(expect, self.api.calls)
self.assertEqual('1', image.id)
self.assertEqual('image-1', image.name)
self.assertEqual(False, image.is_public)
self.assertEqual(False, image.protected)
self.assertEqual(False, image.deleted)
self.assertEqual({u'arch': u'x86_64'}, image.properties)
def test_get_int(self):
image = self.mgr.get(1)
expect = [('HEAD', '/v1/images/1', {}, None)]
self.assertEqual(expect, self.api.calls)
self.assertEqual('1', image.id)
self.assertEqual('image-1', image.name)
self.assertEqual(False, image.is_public)
self.assertEqual(False, image.protected)
self.assertEqual(False, image.deleted)
self.assertEqual({u'arch': u'x86_64'}, image.properties)
def test_get_encoding(self):
image = self.mgr.get('3')
self.assertEqual(u"ni\xf1o", image.name)
def test_get_req_id(self):
params = {'return_req_id': []}
self.mgr.get('4', **params)
expect_req_id = ['req-1234']
self.assertEqual(expect_req_id, params['return_req_id'])
def test_data(self):
data = ''.join([b for b in self.mgr.data('1', do_checksum=False)])
expect = [('GET', '/v1/images/1', {}, None)]
self.assertEqual(expect, self.api.calls)
self.assertEqual('XXX', data)
expect += [('GET', '/v1/images/1', {}, None)]
data = ''.join([b for b in self.mgr.data('1')])
self.assertEqual(expect, self.api.calls)
self.assertEqual('XXX', data)
def test_data_with_wrong_checksum(self):
data = ''.join([b for b in self.mgr.data('2', do_checksum=False)])
expect = [('GET', '/v1/images/2', {}, None)]
self.assertEqual(expect, self.api.calls)
self.assertEqual('YYY', data)
expect += [('GET', '/v1/images/2', {}, None)]
data = self.mgr.data('2')
self.assertEqual(expect, self.api.calls)
try:
data = ''.join([b for b in data])
self.fail('data did not raise an error.')
except IOError as e:
self.assertEqual(errno.EPIPE, e.errno)
msg = 'was fd7c5c4fdaa97163ee4ba8842baa537a expected wrong'
self.assertTrue(msg in str(e))
def test_data_req_id(self):
params = {
'do_checksum': False,
'return_req_id': [],
}
''.join([b for b in self.mgr.data('4', **params)])
expect_req_id = ['req-1234']
self.assertEqual(expect_req_id, params['return_req_id'])
def test_data_with_checksum(self):
data = ''.join([b for b in self.mgr.data('3', do_checksum=False)])
expect = [('GET', '/v1/images/3', {}, None)]
self.assertEqual(expect, self.api.calls)
self.assertEqual('ZZZ', data)
expect += [('GET', '/v1/images/3', {}, None)]
data = ''.join([b for b in self.mgr.data('3')])
self.assertEqual(expect, self.api.calls)
self.assertEqual('ZZZ', data)
def test_delete(self):
self.mgr.delete('1')
expect = [('DELETE', '/v1/images/1', {}, None)]
self.assertEqual(expect, self.api.calls)
def test_delete_req_id(self):
params = {
'return_req_id': []
}
self.mgr.delete('4', **params)
expect = [('DELETE', '/v1/images/4', {}, None)]
self.assertEqual(self.api.calls, expect)
expect_req_id = ['req-1234']
self.assertEqual(expect_req_id, params['return_req_id'])
def test_create_without_data(self):
params = {
'id': '1',
'name': 'image-1',
'container_format': 'ovf',
'disk_format': 'vhd',
'owner': 'asdf',
'size': 1024,
'min_ram': 512,
'min_disk': 10,
'copy_from': 'http://example.com',
'properties': {'a': 'b', 'c': 'd'},
}
image = self.mgr.create(**params)
expect_headers = {
'x-image-meta-id': '1',
'x-image-meta-name': 'image-1',
'x-image-meta-container_format': 'ovf',
'x-image-meta-disk_format': 'vhd',
'x-image-meta-owner': 'asdf',
'x-image-meta-size': '1024',
'x-image-meta-min_ram': '512',
'x-image-meta-min_disk': '10',
'x-glance-api-copy-from': 'http://example.com',
'x-image-meta-property-a': 'b',
'x-image-meta-property-c': 'd',
}
expect = [('POST', '/v1/images', expect_headers, None)]
self.assertEqual(expect, self.api.calls)
self.assertEqual('1', image.id)
self.assertEqual('image-1', image.name)
self.assertEqual('ovf', image.container_format)
self.assertEqual('vhd', image.disk_format)
self.assertEqual('asdf', image.owner)
self.assertEqual(1024, image.size)
self.assertEqual(512, image.min_ram)
self.assertEqual(10, image.min_disk)
self.assertEqual(False, image.is_public)
self.assertEqual(False, image.protected)
self.assertEqual(False, image.deleted)
self.assertEqual({'a': 'b', 'c': 'd'}, image.properties)
def test_create_with_data(self):
image_data = six.StringIO('XXX')
self.mgr.create(data=image_data)
expect_headers = {'x-image-meta-size': '3'}
expect = [('POST', '/v1/images', expect_headers, image_data)]
self.assertEqual(expect, self.api.calls)
def test_create_req_id(self):
params = {
'id': '4',
'name': 'image-4',
'container_format': 'ovf',
'disk_format': 'vhd',
'owner': 'asdf',
'size': 1024,
'min_ram': 512,
'min_disk': 10,
'copy_from': 'http://example.com',
'properties': {'a': 'b', 'c': 'd'},
'return_req_id': [],
}
image = self.mgr.create(**params)
expect_headers = {
'x-image-meta-id': '4',
'x-image-meta-name': 'image-4',
'x-image-meta-container_format': 'ovf',
'x-image-meta-disk_format': 'vhd',
'x-image-meta-owner': 'asdf',
'x-image-meta-size': '1024',
'x-image-meta-min_ram': '512',
'x-image-meta-min_disk': '10',
'x-glance-api-copy-from': 'http://example.com',
'x-image-meta-property-a': 'b',
'x-image-meta-property-c': 'd',
}
expect = [('POST', '/v1/images', expect_headers, None)]
self.assertEqual(self.api.calls, expect)
self.assertEqual(image.id, '1')
expect_req_id = ['req-1234']
self.assertEqual(expect_req_id, params['return_req_id'])
def test_update(self):
fields = {
'name': 'image-2',
'container_format': 'ovf',
'disk_format': 'vhd',
'owner': 'asdf',
'size': 1024,
'min_ram': 512,
'min_disk': 10,
'copy_from': 'http://example.com',
'properties': {'a': 'b', 'c': 'd'},
'deleted': False,
}
image = self.mgr.update('1', **fields)
expect_hdrs = {
'x-image-meta-name': 'image-2',
'x-image-meta-container_format': 'ovf',
'x-image-meta-disk_format': 'vhd',
'x-image-meta-owner': 'asdf',
'x-image-meta-size': '1024',
'x-image-meta-min_ram': '512',
'x-image-meta-min_disk': '10',
'x-glance-api-copy-from': 'http://example.com',
'x-image-meta-property-a': 'b',
'x-image-meta-property-c': 'd',
'x-image-meta-deleted': 'False',
'x-glance-registry-purge-props': 'false',
}
expect = [('PUT', '/v1/images/1', expect_hdrs, None)]
self.assertEqual(expect, self.api.calls)
self.assertEqual('1', image.id)
self.assertEqual('image-2', image.name)
self.assertEqual(1024, image.size)
self.assertEqual(512, image.min_ram)
self.assertEqual(10, image.min_disk)
def test_update_with_data(self):
image_data = six.StringIO('XXX')
self.mgr.update('1', data=image_data)
expect_headers = {'x-image-meta-size': '3',
'x-glance-registry-purge-props': 'false'}
expect = [('PUT', '/v1/images/1', expect_headers, image_data)]
self.assertEqual(expect, self.api.calls)
def test_update_with_purge_props(self):
self.mgr.update('1', purge_props=True)
expect_headers = {'x-glance-registry-purge-props': 'true'}
expect = [('PUT', '/v1/images/1', expect_headers, None)]
self.assertEqual(expect, self.api.calls)
def test_update_with_purge_props_false(self):
self.mgr.update('1', purge_props=False)
expect_headers = {'x-glance-registry-purge-props': 'false'}
expect = [('PUT', '/v1/images/1', expect_headers, None)]
self.assertEqual(expect, self.api.calls)
def test_update_req_id(self):
fields = {
'purge_props': True,
'return_req_id': [],
}
self.mgr.update('4', **fields)
expect_headers = {'x-glance-registry-purge-props': 'true'}
expect = [('PUT', '/v1/images/4', expect_headers, None)]
self.assertEqual(self.api.calls, expect)
expect_req_id = ['req-1234']
self.assertEqual(expect_req_id, fields['return_req_id'])
def test_image_meta_from_headers_encoding(self):
value = u"ni\xf1o"
if six.PY2:
fields = {"x-image-meta-name": "ni\xc3\xb1o"}
else:
fields = {"x-image-meta-name": value}
headers = self.mgr._image_meta_from_headers(fields)
self.assertEqual(value, headers["name"])
def test_image_list_with_owner(self):
images = self.mgr.list(owner='A', page_size=20)
image_list = list(images)
self.assertEqual('A', image_list[0].owner)
self.assertEqual('a', image_list[0].id)
self.assertEqual(1, len(image_list))
def test_image_list_with_owner_req_id(self):
fields = {
'owner': 'A',
'return_req_id': [],
}
images = self.mgr.list(**fields)
next(images)
self.assertEqual(fields['return_req_id'], ['req-1234'])
def test_image_list_with_notfound_owner(self):
images = self.mgr.list(owner='X', page_size=20)
self.assertEqual(0, len(list(images)))
def test_image_list_with_empty_string_owner(self):
images = self.mgr.list(owner='', page_size=20)
image_list = list(images)
self.assertRaises(AttributeError, lambda: image_list[0].owner)
self.assertEqual('c', image_list[0].id)
self.assertEqual(1, len(image_list))
def test_image_list_with_unspecified_owner(self):
images = self.mgr.list(owner=None, page_size=5)
image_list = list(images)
self.assertEqual('A', image_list[0].owner)
self.assertEqual('a', image_list[0].id)
self.assertEqual('A', image_list[1].owner)
self.assertEqual('b', image_list[1].id)
self.assertEqual('B', image_list[2].owner)
self.assertEqual('b2', image_list[2].id)
self.assertRaises(AttributeError, lambda: image_list[3].owner)
self.assertEqual('c', image_list[3].id)
self.assertEqual(4, len(image_list))
def test_image_list_with_owner_and_limit(self):
images = self.mgr.list(owner='B', page_size=5, limit=1)
image_list = list(images)
self.assertEqual('B', image_list[0].owner)
self.assertEqual('b', image_list[0].id)
self.assertEqual(1, len(image_list))
def test_image_list_all_tenants(self):
images = self.mgr.list(is_public=None, page_size=5)
image_list = list(images)
self.assertEqual('A', image_list[0].owner)
self.assertEqual('a', image_list[0].id)
self.assertEqual('B', image_list[1].owner)
self.assertEqual('b', image_list[1].id)
self.assertEqual('B', image_list[2].owner)
self.assertEqual('b2', image_list[2].id)
self.assertRaises(AttributeError, lambda: image_list[3].owner)
self.assertEqual('c', image_list[3].id)
self.assertEqual(4, len(image_list))
def test_update_v2_created_image_using_v1(self):
fields_to_update = {
'name': 'bar',
'container_format': 'bare',
'disk_format': 'qcow2',
}
image = self.mgr.update('v2_created_img', **fields_to_update)
expect_hdrs = {
'x-image-meta-name': 'bar',
'x-image-meta-container_format': 'bare',
'x-image-meta-disk_format': 'qcow2',
'x-glance-registry-purge-props': 'false',
}
expect = [('PUT', '/v1/images/v2_created_img', expect_hdrs, None)]
self.assertEqual(expect, self.api.calls)
self.assertEqual('v2_created_img', image.id)
self.assertEqual('bar', image.name)
self.assertEqual(0, image.size)
self.assertEqual('bare', image.container_format)
self.assertEqual('qcow2', image.disk_format)
class ImageTest(testtools.TestCase):
def setUp(self):
super(ImageTest, self).setUp()
self.api = utils.FakeAPI(fixtures)
self.mgr = images.ImageManager(self.api)
def test_delete(self):
image = self.mgr.get('1')
image.delete()
expect = [
('HEAD', '/v1/images/1', {}, None),
('HEAD', '/v1/images/1', {}, None),
('DELETE', '/v1/images/1', {}, None),
]
self.assertEqual(expect, self.api.calls)
def test_update(self):
image = self.mgr.get('1')
image.update(name='image-5')
expect = [
('HEAD', '/v1/images/1', {}, None),
('HEAD', '/v1/images/1', {}, None),
('PUT', '/v1/images/1',
{'x-image-meta-name': 'image-5',
'x-glance-registry-purge-props': 'false'}, None),
]
self.assertEqual(expect, self.api.calls)
def test_data(self):
image = self.mgr.get('1')
data = ''.join([b for b in image.data()])
expect = [
('HEAD', '/v1/images/1', {}, None),
('HEAD', '/v1/images/1', {}, None),
('GET', '/v1/images/1', {}, None),
]
self.assertEqual(expect, self.api.calls)
self.assertEqual('XXX', data)
data = ''.join([b for b in image.data(do_checksum=False)])
expect += [('GET', '/v1/images/1', {}, None)]
self.assertEqual(expect, self.api.calls)
self.assertEqual('XXX', data)
def test_data_with_wrong_checksum(self):
image = self.mgr.get('2')
data = ''.join([b for b in image.data(do_checksum=False)])
expect = [
('HEAD', '/v1/images/2', {}, None),
('HEAD', '/v1/images/2', {}, None),
('GET', '/v1/images/2', {}, None),
]
self.assertEqual(expect, self.api.calls)
self.assertEqual('YYY', data)
data = image.data()
expect += [('GET', '/v1/images/2', {}, None)]
self.assertEqual(expect, self.api.calls)
try:
data = ''.join([b for b in image.data()])
self.fail('data did not raise an error.')
except IOError as e:
self.assertEqual(errno.EPIPE, e.errno)
msg = 'was fd7c5c4fdaa97163ee4ba8842baa537a expected wrong'
self.assertTrue(msg in str(e))
def test_data_with_checksum(self):
image = self.mgr.get('3')
data = ''.join([b for b in image.data(do_checksum=False)])
expect = [
('HEAD', '/v1/images/3', {}, None),
('HEAD', '/v1/images/3', {}, None),
('GET', '/v1/images/3', {}, None),
]
self.assertEqual(expect, self.api.calls)
self.assertEqual('ZZZ', data)
data = ''.join([b for b in image.data()])
expect += [('GET', '/v1/images/3', {}, None)]
self.assertEqual(expect, self.api.calls)
self.assertEqual('ZZZ', data)
class ParameterFakeAPI(utils.FakeAPI):
image_list = {'images': [
{
'id': 'a',
'name': 'image-1',
'properties': {'arch': 'x86_64'},
},
{
'id': 'b',
'name': 'image-2',
'properties': {'arch': 'x86_64'},
},
]}
def get(self, url, **kwargs):
self.url = url
return utils.FakeResponse({}), ParameterFakeAPI.image_list
class FakeArg(object):
def __init__(self, arg_dict):
self.arg_dict = arg_dict
self.fields = arg_dict.keys()
def __getattr__(self, name):
if name in self.arg_dict:
return self.arg_dict[name]
else:
return None
class UrlParameterTest(testtools.TestCase):
def setUp(self):
super(UrlParameterTest, self).setUp()
self.api = ParameterFakeAPI({})
self.gc = client.Client("http://fakeaddress.com")
self.gc.images = images.ImageManager(self.api)
def test_is_public_list(self):
shell.do_image_list(self.gc, FakeArg({"is_public": "True"}))
parts = parse.urlparse(self.api.url)
qs_dict = parse.parse_qs(parts.query)
self.assertTrue('is_public' in qs_dict)
self.assertTrue(qs_dict['is_public'][0].lower() == "true")
|
|
# Copyright (c) 2014 OpenStack Foundation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
import six
from neutron.api.v2 import attributes
from neutron.callbacks import events
from neutron.callbacks import exceptions
from neutron.callbacks import registry
from neutron.callbacks import resources
from neutron.common import constants as l3_const
from neutron.common import exceptions as n_exc
from neutron.common import utils as n_utils
from neutron.db import l3_attrs_db
from neutron.db import l3_db
from neutron.db import l3_dvrscheduler_db as l3_dvrsched_db
from neutron.db import models_v2
from neutron.extensions import l3
from neutron.extensions import portbindings
from neutron.i18n import _LI, _LW
from neutron import manager
from neutron.plugins.common import constants
from neutron.plugins.common import utils as p_utils
LOG = logging.getLogger(__name__)
router_distributed_opts = [
cfg.BoolOpt('router_distributed',
default=False,
help=_("System-wide flag to determine the type of router "
"that tenants can create. Only admin can override.")),
]
cfg.CONF.register_opts(router_distributed_opts)
class L3_NAT_with_dvr_db_mixin(l3_db.L3_NAT_db_mixin,
l3_attrs_db.ExtraAttributesMixin):
"""Mixin class to enable DVR support."""
router_device_owners = (
l3_db.L3_NAT_db_mixin.router_device_owners +
(l3_const.DEVICE_OWNER_DVR_INTERFACE,
l3_const.DEVICE_OWNER_ROUTER_SNAT,
l3_const.DEVICE_OWNER_AGENT_GW))
extra_attributes = (
l3_attrs_db.ExtraAttributesMixin.extra_attributes + [{
'name': "distributed",
'default': cfg.CONF.router_distributed
}])
def _create_router_db(self, context, router, tenant_id):
"""Create a router db object with dvr additions."""
router['distributed'] = is_distributed_router(router)
with context.session.begin(subtransactions=True):
router_db = super(
L3_NAT_with_dvr_db_mixin, self)._create_router_db(
context, router, tenant_id)
self._process_extra_attr_router_create(context, router_db, router)
return router_db
def _validate_router_migration(self, context, router_db, router_res):
"""Allow centralized -> distributed state transition only."""
if (router_db.extra_attributes.distributed and
router_res.get('distributed') is False):
LOG.info(_LI("Centralizing distributed router %s "
"is not supported"), router_db['id'])
raise n_exc.BadRequest(
resource='router',
msg=_("Migration from distributed router to centralized is "
"not supported"))
elif (not router_db.extra_attributes.distributed and
router_res.get('distributed')):
# router should be disabled in order for upgrade
if router_db.admin_state_up:
msg = _('Cannot upgrade active router to distributed. Please '
'set router admin_state_up to False prior to upgrade.')
raise n_exc.BadRequest(resource='router', msg=msg)
# Notify advanced services of the imminent state transition
# for the router.
try:
kwargs = {'context': context, 'router': router_db}
registry.notify(
resources.ROUTER, events.BEFORE_UPDATE, self, **kwargs)
except exceptions.CallbackFailure as e:
with excutils.save_and_reraise_exception():
# NOTE(armax): preserve old check's behavior
if len(e.errors) == 1:
raise e.errors[0].error
raise l3.RouterInUse(router_id=router_db['id'],
reason=e)
def _update_distributed_attr(
self, context, router_id, router_db, data, gw_info):
"""Update the model to support the dvr case of a router."""
if data.get('distributed'):
old_owner = l3_const.DEVICE_OWNER_ROUTER_INTF
new_owner = l3_const.DEVICE_OWNER_DVR_INTERFACE
for rp in router_db.attached_ports.filter_by(port_type=old_owner):
rp.port_type = new_owner
rp.port.device_owner = new_owner
def _update_router_db(self, context, router_id, data, gw_info):
with context.session.begin(subtransactions=True):
router_db = super(
L3_NAT_with_dvr_db_mixin, self)._update_router_db(
context, router_id, data, gw_info)
migrating_to_distributed = (
not router_db.extra_attributes.distributed and
data.get('distributed') is True)
self._validate_router_migration(context, router_db, data)
router_db.extra_attributes.update(data)
self._update_distributed_attr(
context, router_id, router_db, data, gw_info)
if migrating_to_distributed:
if router_db['gw_port_id']:
# If the Legacy router is getting migrated to a DVR
# router, make sure to create corresponding
# snat interface ports that are to be consumed by
# the Service Node.
if not self._create_snat_intf_ports_if_not_exists(
context.elevated(), router_db):
LOG.debug("SNAT interface ports not created: %s",
router_db['id'])
cur_agents = self.list_l3_agents_hosting_router(
context, router_db['id'])['agents']
for agent in cur_agents:
self._unbind_router(context, router_db['id'],
agent['id'])
return router_db
def _delete_current_gw_port(self, context, router_id, router, new_network):
"""
Overriden here to handle deletion of dvr internal ports.
If there is a valid router update with gateway port to be deleted,
then go ahead and delete the csnat ports and the floatingip
agent gateway port associated with the dvr router.
"""
gw_ext_net_id = (
router.gw_port['network_id'] if router.gw_port else None)
super(L3_NAT_with_dvr_db_mixin,
self)._delete_current_gw_port(context, router_id,
router, new_network)
if (is_distributed_router(router) and
gw_ext_net_id != new_network):
self.delete_csnat_router_interface_ports(
context.elevated(), router)
# NOTE(Swami): Delete the Floatingip agent gateway port
# on all hosts when it is the last gateway port in the
# given external network.
filters = {'network_id': [gw_ext_net_id],
'device_owner': [l3_const.DEVICE_OWNER_ROUTER_GW]}
ext_net_gw_ports = self._core_plugin.get_ports(
context.elevated(), filters)
if not ext_net_gw_ports:
self.delete_floatingip_agent_gateway_port(
context.elevated(), None, gw_ext_net_id)
def _create_gw_port(self, context, router_id, router, new_network,
ext_ips):
super(L3_NAT_with_dvr_db_mixin,
self)._create_gw_port(context, router_id, router, new_network,
ext_ips)
# Make sure that the gateway port exists before creating the
# snat interface ports for distributed router.
if router.extra_attributes.distributed and router.gw_port:
snat_p_list = self._create_snat_intf_ports_if_not_exists(
context.elevated(), router)
if not snat_p_list:
LOG.debug("SNAT interface ports not created: %s", snat_p_list)
def _get_device_owner(self, context, router=None):
"""Get device_owner for the specified router."""
router_is_uuid = isinstance(router, six.string_types)
if router_is_uuid:
router = self._get_router(context, router)
if is_distributed_router(router):
return l3_const.DEVICE_OWNER_DVR_INTERFACE
return super(L3_NAT_with_dvr_db_mixin,
self)._get_device_owner(context, router)
def _get_interface_ports_for_network(self, context, network_id):
router_intf_qry = context.session.query(l3_db.RouterPort)
router_intf_qry = router_intf_qry.join(models_v2.Port)
return router_intf_qry.filter(
models_v2.Port.network_id == network_id,
l3_db.RouterPort.port_type.in_(l3_const.ROUTER_INTERFACE_OWNERS)
)
def _update_fip_assoc(self, context, fip, floatingip_db, external_port):
"""Override to create floating agent gw port for DVR.
Floating IP Agent gateway port will be created when a
floatingIP association happens.
"""
fip_port = fip.get('port_id')
super(L3_NAT_with_dvr_db_mixin, self)._update_fip_assoc(
context, fip, floatingip_db, external_port)
associate_fip = fip_port and floatingip_db['id']
if associate_fip and floatingip_db.get('router_id'):
admin_ctx = context.elevated()
router_dict = self.get_router(
admin_ctx, floatingip_db['router_id'])
# Check if distributed router and then create the
# FloatingIP agent gateway port
if router_dict.get('distributed'):
vm_hostid = self._get_vm_port_hostid(
context, fip_port)
if vm_hostid:
# FIXME (Swami): This FIP Agent Gateway port should be
# created only once and there should not be a duplicate
# for the same host. Until we find a good solution for
# augmenting multiple server requests we should use the
# existing flow.
fip_agent_port = (
self.create_fip_agent_gw_port_if_not_exists(
admin_ctx, external_port['network_id'],
vm_hostid))
LOG.debug("FIP Agent gateway port: %s", fip_agent_port)
def _get_floatingip_on_port(self, context, port_id=None):
"""Helper function to retrieve the fip associated with port."""
fip_qry = context.session.query(l3_db.FloatingIP)
floating_ip = fip_qry.filter_by(fixed_port_id=port_id)
return floating_ip.first()
def add_router_interface(self, context, router_id, interface_info):
add_by_port, add_by_sub = self._validate_interface_info(interface_info)
router = self._get_router(context, router_id)
device_owner = self._get_device_owner(context, router)
# This should be True unless adding an IPv6 prefix to an existing port
new_port = True
if add_by_port:
port, subnets = self._add_interface_by_port(
context, router, interface_info['port_id'], device_owner)
elif add_by_sub:
port, subnets, new_port = self._add_interface_by_subnet(
context, router, interface_info['subnet_id'], device_owner)
if new_port:
if router.extra_attributes.distributed and router.gw_port:
try:
admin_context = context.elevated()
self._add_csnat_router_interface_port(
admin_context, router, port['network_id'],
port['fixed_ips'][-1]['subnet_id'])
except Exception:
with excutils.save_and_reraise_exception():
# we need to preserve the original state prior
# the request by rolling back the port creation
# that led to new_port=True
self._core_plugin.delete_port(
admin_context, port['id'])
with context.session.begin(subtransactions=True):
router_port = l3_db.RouterPort(
port_id=port['id'],
router_id=router.id,
port_type=device_owner
)
context.session.add(router_port)
router_interface_info = self._make_router_interface_info(
router_id, port['tenant_id'], port['id'], subnets[-1]['id'],
[subnet['id'] for subnet in subnets])
self.notify_router_interface_action(
context, router_interface_info, 'add')
return router_interface_info
def _port_has_ipv6_address(self, port):
"""Overridden to return False if DVR SNAT port."""
if port['device_owner'] == l3_const.DEVICE_OWNER_ROUTER_SNAT:
return False
return super(L3_NAT_with_dvr_db_mixin,
self)._port_has_ipv6_address(port)
def _check_dvr_router_remove_required_and_notify_agent(
self, context, router, port, subnets):
if router.extra_attributes.distributed:
if router.gw_port and subnets[0]['id']:
self.delete_csnat_router_interface_ports(
context.elevated(), router, subnet_id=subnets[0]['id'])
plugin = manager.NeutronManager.get_service_plugins().get(
constants.L3_ROUTER_NAT)
l3_agents = plugin.get_l3_agents_hosting_routers(context,
[router['id']])
for l3_agent in l3_agents:
if not plugin.check_ports_exist_on_l3agent(context, l3_agent,
router['id']):
plugin.remove_router_from_l3_agent(
context, l3_agent['id'], router['id'])
router_interface_info = self._make_router_interface_info(
router['id'], port['tenant_id'], port['id'], subnets[0]['id'],
[subnet['id'] for subnet in subnets])
self.notify_router_interface_action(
context, router_interface_info, 'remove')
return router_interface_info
def remove_router_interface(self, context, router_id, interface_info):
remove_by_port, remove_by_subnet = (
self._validate_interface_info(interface_info, for_removal=True)
)
port_id = interface_info.get('port_id')
subnet_id = interface_info.get('subnet_id')
router = self._get_router(context, router_id)
device_owner = self._get_device_owner(context, router)
if remove_by_port:
port, subnets = self._remove_interface_by_port(
context, router_id, port_id, subnet_id, device_owner)
# remove_by_subnet is not used here, because the validation logic of
# _validate_interface_info ensures that at least one of remote_by_*
# is True.
else:
port, subnets = self._remove_interface_by_subnet(
context, router_id, subnet_id, device_owner)
router_interface_info = (
self._check_dvr_router_remove_required_and_notify_agent(
context, router, port, subnets))
return router_interface_info
def _get_snat_sync_interfaces(self, context, router_ids):
"""Query router interfaces that relate to list of router_ids."""
if not router_ids:
return []
qry = context.session.query(l3_db.RouterPort)
qry = qry.filter(
l3_db.RouterPort.router_id.in_(router_ids),
l3_db.RouterPort.port_type == l3_const.DEVICE_OWNER_ROUTER_SNAT
)
interfaces = collections.defaultdict(list)
for rp in qry:
interfaces[rp.router_id].append(
self._core_plugin._make_port_dict(rp.port, None))
LOG.debug("Return the SNAT ports: %s", interfaces)
return interfaces
def _build_routers_list(self, context, routers, gw_ports):
# Perform a single query up front for all routers
if not routers:
return []
router_ids = [r['id'] for r in routers]
snat_binding = l3_dvrsched_db.CentralizedSnatL3AgentBinding
query = (context.session.query(snat_binding).
filter(snat_binding.router_id.in_(router_ids))).all()
bindings = dict((b.router_id, b) for b in query)
for rtr in routers:
gw_port_id = rtr['gw_port_id']
# Collect gw ports only if available
if gw_port_id and gw_ports.get(gw_port_id):
rtr['gw_port'] = gw_ports[gw_port_id]
if 'enable_snat' in rtr[l3.EXTERNAL_GW_INFO]:
rtr['enable_snat'] = (
rtr[l3.EXTERNAL_GW_INFO]['enable_snat'])
binding = bindings.get(rtr['id'])
if not binding:
rtr['gw_port_host'] = None
LOG.debug('No snat is bound to router %s', rtr['id'])
continue
rtr['gw_port_host'] = binding.l3_agent.host
return routers
def _process_routers(self, context, routers):
routers_dict = {}
snat_intfs_by_router_id = self._get_snat_sync_interfaces(
context, [r['id'] for r in routers])
for router in routers:
routers_dict[router['id']] = router
if router['gw_port_id']:
snat_router_intfs = snat_intfs_by_router_id[router['id']]
LOG.debug("SNAT ports returned: %s ", snat_router_intfs)
router[l3_const.SNAT_ROUTER_INTF_KEY] = snat_router_intfs
return routers_dict
def _process_floating_ips_dvr(self, context, routers_dict,
floating_ips, host, agent):
fip_sync_interfaces = None
LOG.debug("FIP Agent : %s ", agent.id)
for floating_ip in floating_ips:
router = routers_dict.get(floating_ip['router_id'])
if router:
router_floatingips = router.get(l3_const.FLOATINGIP_KEY, [])
if router['distributed']:
if floating_ip.get('host', None) != host:
continue
LOG.debug("Floating IP host: %s", floating_ip['host'])
router_floatingips.append(floating_ip)
router[l3_const.FLOATINGIP_KEY] = router_floatingips
if not fip_sync_interfaces:
fip_sync_interfaces = self._get_fip_sync_interfaces(
context, agent.id)
LOG.debug("FIP Agent ports: %s", fip_sync_interfaces)
router[l3_const.FLOATINGIP_AGENT_INTF_KEY] = (
fip_sync_interfaces)
def _get_fip_sync_interfaces(self, context, fip_agent_id):
"""Query router interfaces that relate to list of router_ids."""
if not fip_agent_id:
return []
filters = {'device_id': [fip_agent_id],
'device_owner': [l3_const.DEVICE_OWNER_AGENT_GW]}
interfaces = self._core_plugin.get_ports(context.elevated(), filters)
LOG.debug("Return the FIP ports: %s ", interfaces)
return interfaces
def _get_dvr_sync_data(self, context, host, agent, router_ids=None,
active=None):
routers, interfaces, floating_ips = self._get_router_info_list(
context, router_ids=router_ids, active=active,
device_owners=l3_const.ROUTER_INTERFACE_OWNERS)
dvr_router_ids = set(router['id'] for router in routers
if is_distributed_router(router))
floating_ip_port_ids = [fip['port_id'] for fip in floating_ips
if fip['router_id'] in dvr_router_ids]
if floating_ip_port_ids:
port_filter = {portbindings.HOST_ID: [host],
'id': floating_ip_port_ids}
ports = self._core_plugin.get_ports(context, port_filter)
port_dict = dict((port['id'], port) for port in ports)
# Add the port binding host to the floatingip dictionary
for fip in floating_ips:
vm_port = port_dict.get(fip['port_id'], None)
if vm_port:
fip['host'] = self._get_vm_port_hostid(context,
fip['port_id'],
port=vm_port)
routers_dict = self._process_routers(context, routers)
self._process_floating_ips_dvr(context, routers_dict,
floating_ips, host, agent)
ports_to_populate = []
for router in routers_dict.values():
if router.get('gw_port'):
ports_to_populate.append(router['gw_port'])
if router.get(l3_const.FLOATINGIP_AGENT_INTF_KEY):
ports_to_populate += router[l3_const.FLOATINGIP_AGENT_INTF_KEY]
if router.get(l3_const.SNAT_ROUTER_INTF_KEY):
ports_to_populate += router[l3_const.SNAT_ROUTER_INTF_KEY]
ports_to_populate += interfaces
self._populate_subnets_for_ports(context, ports_to_populate)
self._process_interfaces(routers_dict, interfaces)
return list(routers_dict.values())
def _get_vm_port_hostid(self, context, port_id, port=None):
"""Return the portbinding host_id."""
vm_port_db = port or self._core_plugin.get_port(context, port_id)
device_owner = vm_port_db['device_owner'] if vm_port_db else ""
if (n_utils.is_dvr_serviced(device_owner) or
device_owner == l3_const.DEVICE_OWNER_AGENT_GW):
return vm_port_db[portbindings.HOST_ID]
def _get_agent_gw_ports_exist_for_network(
self, context, network_id, host, agent_id):
"""Return agent gw port if exist, or None otherwise."""
if not network_id:
LOG.debug("Network not specified")
return
filters = {
'network_id': [network_id],
'device_id': [agent_id],
'device_owner': [l3_const.DEVICE_OWNER_AGENT_GW]
}
ports = self._core_plugin.get_ports(context, filters)
if ports:
return ports[0]
def _get_router_ids(self, context):
"""Function to retrieve router IDs for a context without joins"""
query = self._model_query(context, l3_db.Router.id)
return [row[0] for row in query]
def delete_floatingip_agent_gateway_port(
self, context, host_id, ext_net_id):
"""Function to delete FIP gateway port with given ext_net_id."""
# delete any fip agent gw port
device_filter = {'device_owner': [l3_const.DEVICE_OWNER_AGENT_GW],
'network_id': [ext_net_id]}
ports = self._core_plugin.get_ports(context,
filters=device_filter)
for p in ports:
if not host_id or p[portbindings.HOST_ID] == host_id:
self._core_plugin.ipam.delete_port(context, p['id'])
if host_id:
return
def create_fip_agent_gw_port_if_not_exists(
self, context, network_id, host):
"""Function to return the FIP Agent GW port.
This function will create a FIP Agent GW port
if required. If the port already exists, it
will return the existing port and will not
create a new one.
"""
l3_agent_db = self._get_agent_by_type_and_host(
context, l3_const.AGENT_TYPE_L3, host)
if l3_agent_db:
LOG.debug("Agent ID exists: %s", l3_agent_db['id'])
f_port = self._get_agent_gw_ports_exist_for_network(
context, network_id, host, l3_agent_db['id'])
if not f_port:
LOG.info(_LI('Agent Gateway port does not exist,'
' so create one: %s'), f_port)
port_data = {'tenant_id': '',
'network_id': network_id,
'device_id': l3_agent_db['id'],
'device_owner': l3_const.DEVICE_OWNER_AGENT_GW,
'binding:host_id': host,
'admin_state_up': True,
'name': ''}
agent_port = p_utils.create_port(self._core_plugin, context,
{'port': port_data})
if agent_port:
self._populate_subnets_for_ports(context, [agent_port])
return agent_port
msg = _("Unable to create the Agent Gateway Port")
raise n_exc.BadRequest(resource='router', msg=msg)
else:
self._populate_subnets_for_ports(context, [f_port])
return f_port
def _get_snat_interface_ports_for_router(self, context, router_id):
"""Return all existing snat_router_interface ports."""
qry = context.session.query(l3_db.RouterPort)
qry = qry.filter_by(
router_id=router_id,
port_type=l3_const.DEVICE_OWNER_ROUTER_SNAT
)
ports = [self._core_plugin._make_port_dict(rp.port, None)
for rp in qry]
return ports
def _add_csnat_router_interface_port(
self, context, router, network_id, subnet_id, do_pop=True):
"""Add SNAT interface to the specified router and subnet."""
port_data = {'tenant_id': '',
'network_id': network_id,
'fixed_ips': [{'subnet_id': subnet_id}],
'device_id': router.id,
'device_owner': l3_const.DEVICE_OWNER_ROUTER_SNAT,
'admin_state_up': True,
'name': ''}
snat_port = p_utils.create_port(self._core_plugin, context,
{'port': port_data})
if not snat_port:
msg = _("Unable to create the SNAT Interface Port")
raise n_exc.BadRequest(resource='router', msg=msg)
with context.session.begin(subtransactions=True):
router_port = l3_db.RouterPort(
port_id=snat_port['id'],
router_id=router.id,
port_type=l3_const.DEVICE_OWNER_ROUTER_SNAT
)
context.session.add(router_port)
if do_pop:
return self._populate_subnets_for_ports(context, [snat_port])
return snat_port
def _create_snat_intf_ports_if_not_exists(self, context, router):
"""Function to return the snat interface port list.
This function will return the snat interface port list
if it exists. If the port does not exist it will create
new ports and then return the list.
"""
port_list = self._get_snat_interface_ports_for_router(
context, router.id)
if port_list:
self._populate_subnets_for_ports(context, port_list)
return port_list
port_list = []
int_ports = (
rp.port for rp in
router.attached_ports.filter_by(
port_type=l3_const.DEVICE_OWNER_DVR_INTERFACE
)
)
LOG.info(_LI('SNAT interface port list does not exist,'
' so create one: %s'), port_list)
for intf in int_ports:
if intf.fixed_ips:
# Passing the subnet for the port to make sure the IP's
# are assigned on the right subnet if multiple subnet
# exists
snat_port = self._add_csnat_router_interface_port(
context, router, intf['network_id'],
intf['fixed_ips'][0]['subnet_id'], do_pop=False)
port_list.append(snat_port)
if port_list:
self._populate_subnets_for_ports(context, port_list)
return port_list
def dvr_vmarp_table_update(self, context, port_dict, action):
"""Notify L3 agents of VM ARP table changes.
When a VM goes up or down, look for one DVR router on the port's
subnet, and send the VM's ARP details to all L3 agents hosting the
router.
"""
# Check this is a valid VM or service port
if not (n_utils.is_dvr_serviced(port_dict['device_owner']) and
port_dict['fixed_ips']):
return
ip_address = port_dict['fixed_ips'][0]['ip_address']
subnet = port_dict['fixed_ips'][0]['subnet_id']
filters = {'fixed_ips': {'subnet_id': [subnet]}}
ports = self._core_plugin.get_ports(context, filters=filters)
for port in ports:
if port['device_owner'] == l3_const.DEVICE_OWNER_DVR_INTERFACE:
router_id = port['device_id']
router_dict = self._get_router(context, router_id)
if router_dict.extra_attributes.distributed:
arp_table = {'ip_address': ip_address,
'mac_address': port_dict['mac_address'],
'subnet_id': subnet}
if action == "add":
notify_action = self.l3_rpc_notifier.add_arp_entry
elif action == "del":
notify_action = self.l3_rpc_notifier.del_arp_entry
notify_action(context, router_id, arp_table)
return
def delete_csnat_router_interface_ports(self, context,
router, subnet_id=None):
# Each csnat router interface port is associated
# with a subnet, so we need to pass the subnet id to
# delete the right ports.
# TODO(markmcclain): This is suboptimal but was left to reduce
# changeset size since it is late in cycle
ports = (
rp.port.id for rp in
router.attached_ports.filter_by(
port_type=l3_const.DEVICE_OWNER_ROUTER_SNAT)
if rp.port
)
c_snat_ports = self._core_plugin.get_ports(
context,
filters={'id': ports}
)
for p in c_snat_ports:
if subnet_id is None:
self._core_plugin.delete_port(context,
p['id'],
l3_port_check=False)
else:
if p['fixed_ips'][0]['subnet_id'] == subnet_id:
LOG.debug("Subnet matches: %s", subnet_id)
self._core_plugin.delete_port(context,
p['id'],
l3_port_check=False)
def create_floatingip(self, context, floatingip,
initial_status=l3_const.FLOATINGIP_STATUS_ACTIVE):
floating_ip = self._create_floatingip(
context, floatingip, initial_status)
self._notify_floating_ip_change(context, floating_ip)
return floating_ip
def _notify_floating_ip_change(self, context, floating_ip):
router_id = floating_ip['router_id']
fixed_port_id = floating_ip['port_id']
# we need to notify agents only in case Floating IP is associated
if not router_id or not fixed_port_id:
return
try:
router = self._get_router(context, router_id)
except l3.RouterNotFound:
# TODO(obondarev): bug 1507602 was filed to investigate race
# condition here. For now we preserve original behavior and do
# broad notification
LOG.warning(_LW("Router %s was not found. "
"Doing broad notification."),
router_id)
self.notify_router_updated(context, router_id)
return
if is_distributed_router(router):
host = self._get_vm_port_hostid(context, fixed_port_id)
self.l3_rpc_notifier.routers_updated_on_host(
context, [router_id], host)
else:
self.notify_router_updated(context, router_id)
def update_floatingip(self, context, id, floatingip):
old_floatingip, floatingip = self._update_floatingip(
context, id, floatingip)
self._notify_floating_ip_change(context, old_floatingip)
if (floatingip['router_id'] != old_floatingip['router_id'] or
floatingip['port_id'] != old_floatingip['port_id']):
self._notify_floating_ip_change(context, floatingip)
return floatingip
def delete_floatingip(self, context, id):
floating_ip = self._delete_floatingip(context, id)
self._notify_floating_ip_change(context, floating_ip)
def is_distributed_router(router):
"""Return True if router to be handled is distributed."""
try:
# See if router is a DB object first
requested_router_type = router.extra_attributes.distributed
except AttributeError:
# if not, try to see if it is a request body
requested_router_type = router.get('distributed')
if attributes.is_attr_set(requested_router_type):
return requested_router_type
return cfg.CONF.router_distributed
|
|
"""
Allow Panflute to be run as a command line executable
to be used as a Pandoc filter or used in Pandoctools
shell scripts as Pandoc filter with arguments.
Exports ``main`` and ``panfl``.
"""
import os
import os.path as p
from pathlib import Path
import sys
import click
from io import StringIO
from .io import load, dump
from .utils import debug, ContextImport
reduced_sys_path = [d for d in sys.path if (d not in ('', '.')) and p.isdir(d)]
def get_filter_dirs(hardcoded=True):
"""
Return directories where we expect to find filters.
If hardcoded=True use paths we know ex ante; if False ask Pandoc for a list
"""
if hardcoded:
if os.name == 'nt':
base = Path(os.environ['APPDATA'])
d1 = base / 'pandoc' / 'filters'
return [str(d1)]
else:
from .tools import pandoc_version
version = pandoc_version.version
base = Path(os.environ['HOME'])
res = []
# str below to convert from Path to str is necessary
# for test_panfl.test_get_filter_dirs
# and also for consistencies of the return types in the
# pandoc 2.12 dropped this historical convention
if version[:2] < (2, 12):
res.append(str(base / '.pandoc' / 'filters'))
res.append(str(base / '.local' / 'share' / 'pandoc' / 'filters'))
return res
else:
from .tools import pandoc_version
return pandoc_version.data_dir
def stdio(filters=None, search_dirs=None, data_dir=True, sys_path=True,
panfl_=False, input_stream=None, output_stream=None):
"""
Reads JSON from stdin and second CLI argument:
``sys.argv[1]``. Dumps JSON doc to the stdout.
:param filters: Union[List[str], None]
if None then read from metadata
:param search_dirs: Union[List[str], None]
if None then read from metadata
:param data_dir: bool
:param sys_path: bool
:param panfl_: bool
:param input_stream: io.StringIO or None
for debug purpose
:param output_stream: io.StringIO or None
for debug purpose
:return: None
"""
doc = load(input_stream)
verbose = doc.get_metadata('panflute-verbose', False)
if search_dirs is None:
# metadata 'panflute-path' can be a list, a string, or missing
# `search_dirs` should be a list of str
search_dirs = doc.get_metadata('panflute-path', [])
if type(search_dirs) != list:
search_dirs = [search_dirs]
if '--data-dir' in search_dirs:
data_dir = True
if '--no-sys-path' in search_dirs:
sys_path = False
search_dirs = [dir_ for dir_ in search_dirs
if dir_ not in ('--data-dir', '--no-sys-path')]
if verbose:
debug('panflute: data_dir={} sys_path={}'.format(data_dir, sys_path))
search_dirs = [p.normpath(p.expanduser(p.expandvars(dir_))) for dir_ in search_dirs]
if not panfl_:
# default panflute behaviour:
search_dirs.append('.')
if data_dir:
search_dirs.extend(get_filter_dirs())
if sys_path:
search_dirs += sys.path
else:
# panfl/pandoctools behaviour:
if data_dir:
search_dirs.extend(get_filter_dirs())
if sys_path:
search_dirs += reduced_sys_path
if verbose:
debug('panflute will search for filters in the following folders:')
debug(' '.join('"{}"'.format(f) for f in search_dirs))
# Display message (tests that everything is working ok)
msg = doc.get_metadata('panflute-echo', False)
if msg:
debug(msg)
if filters is None:
# metadata 'panflute-filters' can be a list, a string, or missing
# `filters` should be a list of str
filters = doc.get_metadata('panflute-filters', [])
if type(filters) != list:
filters = [filters]
if filters:
if verbose:
msg = "panflute: will run the following filters:"
debug(msg, ' '.join(filters))
doc = autorun_filters(filters, doc, search_dirs, verbose)
elif verbose:
debug("panflute: no filters were provided")
dump(doc, output_stream)
def main():
"""
Allows Panflute to be run as a command line executable
to be used as a Pandoc filter.
"""
stdio()
help_str = """Allows Panflute to be run as a command line executable:
* to be used in Pandoctools shell scripts as Pandoc filter with
multiple arguments (should have -t/--to option in this case):
`pandoc example.md -t json | panfl foo.bar -t markdown | pandoc -f json`
* to be used as a Pandoc filter (in this case only one positional
argument is allowed of all options): `pandoc --filter panfl`
Filters may be set with or without .py extension.
It can be relative or absolute paths to files or modules specs
like `foo.bar`.
MIND THAT Panflute temporarily prepends folder of the filter
(or relevant dir provided if module spec) TO THE `sys.path` before
importing the filter!
Search preserves directories order (except for --data-dir and
`sys.path`).
"""
@click.command(help=help_str)
@click.argument('filters', nargs=-1)
@click.option('-w', '-t', '--write', '--to', 'to', type=str, default=None,
help='Derivative of Pandoc writer option that Pandoc passes to filters.')
@click.option('--dir', '-d', 'search_dirs', multiple=True,
help="Search filters in provided directories: `-d dir1 -d dir2`.")
@click.option('--data-dir', is_flag=True, default=False,
help="Search filters in default user data directory listed in `pandoc --version` " +
"(in it's `filters` subfolder actually). It's appended to the search list.")
@click.option('--no-sys-path', 'sys_path', is_flag=True, default=True,
help="Disable search filters in python's `sys.path` (without '' and '.') " +
"that is appended to the search list.")
def panfl(filters, to, search_dirs, data_dir, sys_path):
"""
Allows Panflute to be run as a command line executable:
* to be used in Pandoctools shell scripts as Pandoc filter with
multiple arguments (should have -t/--to option in this case):
``pandoc -t json | panfl -t markdown foo.bar | pandoc -f json``
* to be used as a Pandoc filter (in this case only one positional
argument is allowed of all options):
``pandoc --filter panfl``
MIND THAT Panflute temporarily prepends folder of the filter
(or relevant dir provided if module spec) TO THE `sys.path` before
importing the filter!
"""
if to is None:
if (len(filters) > 1) or search_dirs or not sys_path or data_dir:
raise ValueError('When no `--to` option then Pandoc filter mode assumed and ' +
'only one positional argument is allowed of all options.')
else:
filters, search_dirs = None, None
sys_path, data_dir = True, False
else:
filters, search_dirs = list(filters), list(search_dirs)
# `load()` in `stdio()` needs `to` in the 2nd arg
sys.argv[1:] = []
sys.argv.append(to)
stdio(filters, search_dirs, data_dir, sys_path, panfl_=True)
def autorun_filters(filters, doc, search_dirs, verbose):
"""
:param filters: list of str
:param doc: panflute.Doc
:param search_dirs: list of str
:param verbose: bool
:return: panflute.Doc
"""
def remove_py(s):
return s[:-3] if s.endswith('.py') else s
filter_paths = []
for filter_ in filters:
filter_exp = p.normpath(p.expanduser(p.expandvars(filter_)))
if filter_exp == remove_py(p.basename(filter_exp)).lstrip('.'):
# import .foo # is not supported
module = True
mod_path = filter_exp.replace('.', p.sep)
path_postfixes = (p.join(mod_path, '__init__.py'), mod_path + '.py')
else:
module = False
# allow with and without .py ending
path_postfixes = (remove_py(filter_exp) + '.py',)
for path, path_postf in [(path, path_postf)
for path in search_dirs
for path_postf in path_postfixes]:
if p.isabs(path_postf):
filter_path = path_postf
else:
filter_path = p.abspath(p.normpath(p.join(path, path_postf)))
if p.isfile(filter_path):
if verbose:
debug(f'panflute: filter "{filter_}" found in {filter_path}')
if module and not (path in reduced_sys_path):
extra_dir = p.abspath(path)
# `path` already doesn't contain `.`, `..`, env vars or `~`
else:
extra_dir = None
module_ = filter_exp if module else filter_path
filter_paths.append((filter_, filter_path, module_, extra_dir))
break
elif p.isabs(path_postf):
if verbose:
debug(f' filter "{filter_}" NOT found in {filter_path}')
raise Exception("filter not found: " + filter_)
elif verbose:
debug(f' filter "{filter_}" NOT found in {filter_path}')
else:
raise Exception("filter not found: " + filter_)
# Intercept any print() statements made by filters (which would cause Pandoc to fail)
sys.stdout = alt_stdout = StringIO()
for filter_, filter_path, module_, extra_dir in filter_paths:
if verbose:
debug("panflute: running filter <{}>".format(filter_))
with ContextImport(module_, extra_dir) as module:
try:
module.main(doc)
except Exception as e:
debug("Failed to run filter: " + filter_)
if not hasattr(module, 'main'):
debug(' - Possible cause: filter lacks a main() function')
debug('Filter code:')
debug('-' * 64)
with open(filter_path) as fp:
debug(fp.read())
debug('-' * 64)
raise Exception(e)
if verbose:
debug("panflute: filter <{}> completed".format(filter_))
alt_stdout_data = alt_stdout.getvalue()
if alt_stdout_data:
msg = (
f'Panflute Warning: filter "{filter_}" wrote to stdout, ',
'but Pandoc does not allow that')
debug(msg)
debug(alt_stdout_data)
sys.stderr.flush()
sys.stdout = alt_stdout = StringIO()
# Restore stdout
sys.stdout = sys.__stdout__
return doc
|
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import json
import uuid
import mock
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_serialization import jsonutils
import webob
from cinder.api.contrib import volume_actions
from cinder import exception
from cinder.image import glance
from cinder import test
from cinder.tests.api import fakes
from cinder.tests.api.v2 import stubs
from cinder import volume
from cinder.volume import api as volume_api
from cinder.volume import rpcapi as volume_rpcapi
CONF = cfg.CONF
class VolumeActionsTest(test.TestCase):
_actions = ('os-reserve', 'os-unreserve')
_methods = ('attach', 'detach', 'reserve_volume', 'unreserve_volume')
def setUp(self):
super(VolumeActionsTest, self).setUp()
self.UUID = uuid.uuid4()
self.api_patchers = {}
for _meth in self._methods:
self.api_patchers[_meth] = mock.patch('cinder.volume.API.' + _meth)
self.api_patchers[_meth].start()
self.addCleanup(self.api_patchers[_meth].stop)
self.api_patchers[_meth].return_value = True
vol = {'id': 'fake', 'host': 'fake', 'status': 'available', 'size': 1,
'migration_status': None, 'volume_type_id': 'fake'}
self.get_patcher = mock.patch('cinder.volume.API.get')
self.mock_volume_get = self.get_patcher.start()
self.addCleanup(self.get_patcher.stop)
self.mock_volume_get.return_value = vol
self.update_patcher = mock.patch('cinder.volume.API.update')
self.mock_volume_update = self.update_patcher.start()
self.addCleanup(self.update_patcher.stop)
self.mock_volume_update.return_value = vol
self.flags(rpc_backend='cinder.openstack.common.rpc.impl_fake')
def test_simple_api_actions(self):
app = fakes.wsgi_app()
for _action in self._actions:
req = webob.Request.blank('/v2/fake/volumes/%s/action' %
self.UUID)
req.method = 'POST'
req.body = jsonutils.dumps({_action: None})
req.content_type = 'application/json'
res = req.get_response(app)
self.assertEqual(res.status_int, 202)
def test_initialize_connection(self):
with mock.patch.object(volume_api.API,
'initialize_connection') as init_conn:
init_conn.return_value = {}
body = {'os-initialize_connection': {'connector': 'fake'}}
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = "POST"
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 200)
def test_initialize_connection_without_connector(self):
with mock.patch.object(volume_api.API,
'initialize_connection') as init_conn:
init_conn.return_value = {}
body = {'os-initialize_connection': {}}
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = "POST"
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 400)
def test_initialize_connection_exception(self):
with mock.patch.object(volume_api.API,
'initialize_connection') as init_conn:
init_conn.side_effect = \
exception.VolumeBackendAPIException(data=None)
body = {'os-initialize_connection': {'connector': 'fake'}}
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = "POST"
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 500)
def test_terminate_connection(self):
with mock.patch.object(volume_api.API,
'terminate_connection') as terminate_conn:
terminate_conn.return_value = {}
body = {'os-terminate_connection': {'connector': 'fake'}}
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = "POST"
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 202)
def test_terminate_connection_without_connector(self):
with mock.patch.object(volume_api.API,
'terminate_connection') as terminate_conn:
terminate_conn.return_value = {}
body = {'os-terminate_connection': {}}
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = "POST"
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 400)
def test_terminate_connection_with_exception(self):
with mock.patch.object(volume_api.API,
'terminate_connection') as terminate_conn:
terminate_conn.side_effect = \
exception.VolumeBackendAPIException(data=None)
body = {'os-terminate_connection': {'connector': 'fake'}}
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = "POST"
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 500)
def test_attach_to_instance(self):
body = {'os-attach': {'instance_uuid': 'fake',
'mountpoint': '/dev/vdc',
'mode': 'rw'}}
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = "POST"
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 202)
def test_attach_to_host(self):
# using 'read-write' mode attach volume by default
body = {'os-attach': {'host_name': 'fake_host',
'mountpoint': '/dev/vdc'}}
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = "POST"
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 202)
def test_detach(self):
body = {'os-detach': {'attachment_id': 'fakeuuid'}}
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = "POST"
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = req.get_response(fakes.wsgi_app())
self.assertEqual(202, res.status_int)
def test_attach_with_invalid_arguments(self):
# Invalid request to attach volume an invalid target
body = {'os-attach': {'mountpoint': '/dev/vdc'}}
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = "POST"
req.headers["content-type"] = "application/json"
req.body = jsonutils.dumps(body)
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 400)
# Invalid request to attach volume to an instance and a host
body = {'os-attach': {'instance_uuid': 'fake',
'host_name': 'fake_host',
'mountpoint': '/dev/vdc'}}
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = "POST"
req.headers["content-type"] = "application/json"
req.body = jsonutils.dumps(body)
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 400)
# Invalid request to attach volume with an invalid mode
body = {'os-attach': {'instance_uuid': 'fake',
'mountpoint': '/dev/vdc',
'mode': 'rr'}}
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = "POST"
req.headers["content-type"] = "application/json"
req.body = jsonutils.dumps(body)
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 400)
body = {'os-attach': {'host_name': 'fake_host',
'mountpoint': '/dev/vdc',
'mode': 'ww'}}
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = "POST"
req.headers["content-type"] = "application/json"
req.body = jsonutils.dumps(body)
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 400)
def test_begin_detaching(self):
def fake_begin_detaching(*args, **kwargs):
return {}
self.stubs.Set(volume.API, 'begin_detaching',
fake_begin_detaching)
body = {'os-begin_detaching': {'fake': 'fake'}}
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = "POST"
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 202)
def test_roll_detaching(self):
def fake_roll_detaching(*args, **kwargs):
return {}
self.stubs.Set(volume.API, 'roll_detaching',
fake_roll_detaching)
body = {'os-roll_detaching': {'fake': 'fake'}}
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = "POST"
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 202)
def test_extend_volume(self):
def fake_extend_volume(*args, **kwargs):
return {}
self.stubs.Set(volume.API, 'extend',
fake_extend_volume)
body = {'os-extend': {'new_size': 5}}
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = "POST"
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 202)
def test_update_readonly_flag(self):
def fake_update_readonly_flag(*args, **kwargs):
return {}
self.stubs.Set(volume.API, 'update_readonly_flag',
fake_update_readonly_flag)
def make_update_readonly_flag_test(self, readonly, return_code):
body = {"os-update_readonly_flag": {"readonly": readonly}}
if readonly is None:
body = {"os-update_readonly_flag": {}}
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = "POST"
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, return_code)
make_update_readonly_flag_test(self, True, 202)
make_update_readonly_flag_test(self, False, 202)
make_update_readonly_flag_test(self, '1', 202)
make_update_readonly_flag_test(self, '0', 202)
make_update_readonly_flag_test(self, 'true', 202)
make_update_readonly_flag_test(self, 'false', 202)
make_update_readonly_flag_test(self, 'tt', 400)
make_update_readonly_flag_test(self, 11, 400)
make_update_readonly_flag_test(self, None, 400)
def test_set_bootable(self):
def make_set_bootable_test(self, bootable, return_code):
body = {"os-set_bootable": {"bootable": bootable}}
if bootable is None:
body = {"os-set_bootable": {}}
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = "POST"
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, return_code)
make_set_bootable_test(self, True, 200)
make_set_bootable_test(self, False, 200)
make_set_bootable_test(self, '1', 200)
make_set_bootable_test(self, '0', 200)
make_set_bootable_test(self, 'true', 200)
make_set_bootable_test(self, 'false', 200)
make_set_bootable_test(self, 'tt', 400)
make_set_bootable_test(self, 11, 400)
make_set_bootable_test(self, None, 400)
class VolumeRetypeActionsTest(VolumeActionsTest):
def setUp(self):
def get_vol_type(*args, **kwargs):
d1 = {'id': 'fake', 'qos_specs_id': 'fakeqid1', 'extra_specs': {}}
d2 = {'id': 'foo', 'qos_specs_id': 'fakeqid2', 'extra_specs': {}}
return d1 if d1['id'] == args[1] else d2
self.retype_patchers = {}
self.retype_mocks = {}
paths = ['cinder.volume.volume_types.get_volume_type',
'cinder.volume.volume_types.get_volume_type_by_name',
'cinder.volume.qos_specs.get_qos_specs',
'cinder.quota.QUOTAS.add_volume_type_opts',
'cinder.quota.QUOTAS.reserve']
for path in paths:
name = path.split('.')[-1]
self.retype_patchers[name] = mock.patch(path)
self.retype_mocks[name] = self.retype_patchers[name].start()
self.addCleanup(self.retype_patchers[name].stop)
self.retype_mocks['get_volume_type'].side_effect = get_vol_type
self.retype_mocks['get_volume_type_by_name'].side_effect = get_vol_type
self.retype_mocks['add_volume_type_opts'].return_value = None
self.retype_mocks['reserve'].return_value = None
super(VolumeRetypeActionsTest, self).setUp()
def _retype_volume_exec(self, expected_status, new_type='foo'):
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = 'POST'
req.headers['content-type'] = 'application/json'
retype_body = {'new_type': new_type, 'migration_policy': 'never'}
req.body = jsonutils.dumps({'os-retype': retype_body})
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, expected_status)
@mock.patch('cinder.volume.qos_specs.get_qos_specs')
def test_retype_volume_success(self, _mock_get_qspecs):
# Test that the retype API works for both available and in-use
self._retype_volume_exec(202)
self.mock_volume_get.return_value['status'] = 'in-use'
specs = {'id': 'fakeqid1', 'name': 'fake_name1',
'consumer': 'back-end', 'specs': {'key1': 'value1'}}
_mock_get_qspecs.return_value = specs
self._retype_volume_exec(202)
def test_retype_volume_no_body(self):
# Request with no body should fail
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = 'POST'
req.headers['content-type'] = 'application/json'
req.body = jsonutils.dumps({'os-retype': None})
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 400)
def test_retype_volume_bad_policy(self):
# Request with invalid migration policy should fail
req = webob.Request.blank('/v2/fake/volumes/1/action')
req.method = 'POST'
req.headers['content-type'] = 'application/json'
retype_body = {'new_type': 'foo', 'migration_policy': 'invalid'}
req.body = jsonutils.dumps({'os-retype': retype_body})
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 400)
def test_retype_volume_bad_status(self):
# Should fail if volume does not have proper status
self.mock_volume_get.return_value['status'] = 'error'
self._retype_volume_exec(400)
def test_retype_type_no_exist(self):
# Should fail if new type does not exist
exc = exception.VolumeTypeNotFound('exc')
self.retype_mocks['get_volume_type'].side_effect = exc
self._retype_volume_exec(404)
def test_retype_same_type(self):
# Should fail if new type and old type are the same
self._retype_volume_exec(400, new_type='fake')
def test_retype_over_quota(self):
# Should fail if going over quota for new type
exc = exception.OverQuota(overs=['gigabytes'],
quotas={'gigabytes': 20},
usages={'gigabytes': {'reserved': 5,
'in_use': 15}})
self.retype_mocks['reserve'].side_effect = exc
self._retype_volume_exec(413)
@mock.patch('cinder.volume.qos_specs.get_qos_specs')
def _retype_volume_diff_qos(self, vol_status, consumer, expected_status,
_mock_get_qspecs):
def fake_get_qos(ctxt, qos_id):
d1 = {'id': 'fakeqid1', 'name': 'fake_name1',
'consumer': consumer, 'specs': {'key1': 'value1'}}
d2 = {'id': 'fakeqid2', 'name': 'fake_name2',
'consumer': consumer, 'specs': {'key1': 'value1'}}
return d1 if d1['id'] == qos_id else d2
self.mock_volume_get.return_value['status'] = vol_status
_mock_get_qspecs.side_effect = fake_get_qos
self._retype_volume_exec(expected_status)
def test_retype_volume_diff_qos_fe_in_use(self):
# should fail if changing qos enforced by front-end for in-use volumes
self._retype_volume_diff_qos('in-use', 'front-end', 400)
def test_retype_volume_diff_qos_fe_available(self):
# should NOT fail if changing qos enforced by FE for available volumes
self._retype_volume_diff_qos('available', 'front-end', 202)
def test_retype_volume_diff_qos_be(self):
# should NOT fail if changing qos enforced by back-end
self._retype_volume_diff_qos('available', 'back-end', 202)
self._retype_volume_diff_qos('in-use', 'back-end', 202)
def stub_volume_get(self, context, volume_id):
volume = stubs.stub_volume(volume_id)
if volume_id == 5:
volume['status'] = 'in-use'
else:
volume['status'] = 'available'
return volume
def stub_upload_volume_to_image_service(self, context, volume, metadata,
force):
ret = {"id": volume['id'],
"updated_at": datetime.datetime(1, 1, 1, 1, 1, 1),
"status": 'uploading',
"display_description": volume['display_description'],
"size": volume['size'],
"volume_type": volume['volume_type'],
"image_id": 1,
"container_format": 'bare',
"disk_format": 'raw',
"image_name": 'image_name'}
return ret
class VolumeImageActionsTest(test.TestCase):
def setUp(self):
super(VolumeImageActionsTest, self).setUp()
self.controller = volume_actions.VolumeActionsController()
self.stubs.Set(volume_api.API, 'get', stub_volume_get)
def _get_os_volume_upload_image(self):
vol = {
"container_format": 'bare',
"disk_format": 'raw',
"updated_at": datetime.datetime(1, 1, 1, 1, 1, 1),
"image_name": 'image_name',
"is_public": False,
"force": True}
body = {"os-volume_upload_image": vol}
return body
def fake_image_service_create(self, *args):
ret = {
'status': u'queued',
'name': u'image_name',
'deleted': False,
'container_format': u'bare',
'created_at': datetime.datetime(1, 1, 1, 1, 1, 1),
'disk_format': u'raw',
'updated_at': datetime.datetime(1, 1, 1, 1, 1, 1),
'id': 1,
'min_ram': 0,
'checksum': None,
'min_disk': 0,
'is_public': False,
'deleted_at': None,
'properties': {u'x_billing_code_license': u'246254365'},
'size': 0}
return ret
def fake_rpc_copy_volume_to_image(self, *args):
pass
def test_copy_volume_to_image(self):
self.stubs.Set(volume_api.API,
"copy_volume_to_image",
stub_upload_volume_to_image_service)
id = 1
vol = {"container_format": 'bare',
"disk_format": 'raw',
"image_name": 'image_name',
"force": True}
body = {"os-volume_upload_image": vol}
req = fakes.HTTPRequest.blank('/v2/tenant1/volumes/%s/action' % id)
res_dict = self.controller._volume_upload_image(req, id, body)
expected = {'os-volume_upload_image':
{'id': id,
'updated_at': datetime.datetime(1, 1, 1, 1, 1, 1),
'status': 'uploading',
'display_description': 'displaydesc',
'size': 1,
'volume_type': {'name': 'vol_type_name'},
'image_id': 1,
'container_format': 'bare',
'disk_format': 'raw',
'image_name': 'image_name'}}
self.assertDictMatch(res_dict, expected)
def test_copy_volume_to_image_volumenotfound(self):
def stub_volume_get_raise_exc(self, context, volume_id):
raise exception.VolumeNotFound(volume_id=volume_id)
self.stubs.Set(volume_api.API, 'get', stub_volume_get_raise_exc)
id = 1
vol = {"container_format": 'bare',
"disk_format": 'raw',
"image_name": 'image_name',
"force": True}
body = {"os-volume_upload_image": vol}
req = fakes.HTTPRequest.blank('/v2/tenant1/volumes/%s/action' % id)
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._volume_upload_image,
req,
id,
body)
def test_copy_volume_to_image_invalidvolume(self):
def stub_upload_volume_to_image_service_raise(self, context, volume,
metadata, force):
raise exception.InvalidVolume(reason='blah')
self.stubs.Set(volume_api.API,
"copy_volume_to_image",
stub_upload_volume_to_image_service_raise)
id = 1
vol = {"container_format": 'bare',
"disk_format": 'raw',
"image_name": 'image_name',
"force": True}
body = {"os-volume_upload_image": vol}
req = fakes.HTTPRequest.blank('/v2/tenant1/volumes/%s/action' % id)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._volume_upload_image,
req,
id,
body)
def test_copy_volume_to_image_valueerror(self):
def stub_upload_volume_to_image_service_raise(self, context, volume,
metadata, force):
raise ValueError
self.stubs.Set(volume_api.API,
"copy_volume_to_image",
stub_upload_volume_to_image_service_raise)
id = 1
vol = {"container_format": 'bare',
"disk_format": 'raw',
"image_name": 'image_name',
"force": True}
body = {"os-volume_upload_image": vol}
req = fakes.HTTPRequest.blank('/v2/tenant1/volumes/%s/action' % id)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._volume_upload_image,
req,
id,
body)
def test_copy_volume_to_image_remoteerror(self):
def stub_upload_volume_to_image_service_raise(self, context, volume,
metadata, force):
raise messaging.RemoteError
self.stubs.Set(volume_api.API,
"copy_volume_to_image",
stub_upload_volume_to_image_service_raise)
id = 1
vol = {"container_format": 'bare',
"disk_format": 'raw',
"image_name": 'image_name',
"force": True}
body = {"os-volume_upload_image": vol}
req = fakes.HTTPRequest.blank('/v2/tenant1/volumes/%s/action' % id)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._volume_upload_image,
req,
id,
body)
def test_volume_upload_image_typeerror(self):
id = 1
body = {"os-volume_upload_image_fake": "fake"}
req = webob.Request.blank('/v2/tenant1/volumes/%s/action' % id)
req.method = 'POST'
req.headers['Content-Type'] = 'application/json'
req.body = json.dumps(body)
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 400)
def test_volume_upload_image_without_type(self):
id = 1
vol = {"container_format": 'bare',
"disk_format": 'raw',
"image_name": None,
"force": True}
body = {"": vol}
req = webob.Request.blank('/v2/tenant1/volumes/%s/action' % id)
req.method = 'POST'
req.headers['Content-Type'] = 'application/json'
req.body = json.dumps(body)
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 400)
def test_extend_volume_valueerror(self):
id = 1
body = {'os-extend': {'new_size': 'fake'}}
req = fakes.HTTPRequest.blank('/v2/tenant1/volumes/%s/action' % id)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._extend,
req,
id,
body)
def test_copy_volume_to_image_notimagename(self):
id = 1
vol = {"container_format": 'bare',
"disk_format": 'raw',
"image_name": None,
"force": True}
body = {"os-volume_upload_image": vol}
req = fakes.HTTPRequest.blank('/v2/tenant1/volumes/%s/action' % id)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._volume_upload_image,
req,
id,
body)
def test_copy_volume_to_image_with_protected_prop(self):
"""Test create image from volume with protected properties."""
id = 1
def fake_get_volume_image_metadata(*args):
meta_dict = {
"volume_id": id,
"key": "x_billing_code_license",
"value": "246254365"}
return meta_dict
# Need to mock get_volume_image_metadata, create,
# update and copy_volume_to_image
with mock.patch.object(volume_api.API, "get_volume_image_metadata") \
as mock_get_volume_image_metadata:
mock_get_volume_image_metadata.side_effect = \
fake_get_volume_image_metadata
with mock.patch.object(glance.GlanceImageService, "create") \
as mock_create:
mock_create.side_effect = self.fake_image_service_create
with mock.patch.object(volume_api.API, "update") \
as mock_update:
mock_update.side_effect = stubs.stub_volume_update
with mock.patch.object(volume_rpcapi.VolumeAPI,
"copy_volume_to_image") \
as mock_copy_volume_to_image:
mock_copy_volume_to_image.side_effect = \
self.fake_rpc_copy_volume_to_image
req = fakes.HTTPRequest.blank(
'/v2/tenant1/volumes/%s/action' % id)
body = self._get_os_volume_upload_image()
res_dict = self.controller._volume_upload_image(req,
id,
body)
expected_res = {
'os-volume_upload_image': {
'id': id,
'updated_at': datetime.datetime(1900, 1, 1,
1, 1, 1),
'status': 'uploading',
'display_description': 'displaydesc',
'size': 1,
'volume_type': {'name': 'vol_type_name'},
'image_id': 1,
'container_format': 'bare',
'disk_format': 'raw',
'image_name': 'image_name'
}
}
self.assertDictMatch(res_dict, expected_res)
def test_copy_volume_to_image_without_glance_metadata(self):
"""Test create image from volume if volume is created without image.
In this case volume glance metadata will not be available for this
volume.
"""
id = 1
def fake_get_volume_image_metadata_raise(*args):
raise exception.GlanceMetadataNotFound(id=id)
# Need to mock get_volume_image_metadata, create,
# update and copy_volume_to_image
with mock.patch.object(volume_api.API, "get_volume_image_metadata") \
as mock_get_volume_image_metadata:
mock_get_volume_image_metadata.side_effect = \
fake_get_volume_image_metadata_raise
with mock.patch.object(glance.GlanceImageService, "create") \
as mock_create:
mock_create.side_effect = self.fake_image_service_create
with mock.patch.object(volume_api.API, "update") \
as mock_update:
mock_update.side_effect = stubs.stub_volume_update
with mock.patch.object(volume_rpcapi.VolumeAPI,
"copy_volume_to_image") \
as mock_copy_volume_to_image:
mock_copy_volume_to_image.side_effect = \
self.fake_rpc_copy_volume_to_image
req = fakes.HTTPRequest.blank(
'/v2/tenant1/volumes/%s/action' % id)
body = self._get_os_volume_upload_image()
res_dict = self.controller._volume_upload_image(req,
id,
body)
expected_res = {
'os-volume_upload_image': {
'id': id,
'updated_at': datetime.datetime(1900, 1, 1,
1, 1, 1),
'status': 'uploading',
'display_description': 'displaydesc',
'size': 1,
'volume_type': {'name': 'vol_type_name'},
'image_id': 1,
'container_format': 'bare',
'disk_format': 'raw',
'image_name': 'image_name'
}
}
self.assertDictMatch(res_dict, expected_res)
def test_copy_volume_to_image_without_protected_prop(self):
"""Test protected property is not defined with the root image."""
id = 1
def fake_get_volume_image_metadata(*args):
return []
# Need to mock get_volume_image_metadata, create,
# update and copy_volume_to_image
with mock.patch.object(volume_api.API, "get_volume_image_metadata") \
as mock_get_volume_image_metadata:
mock_get_volume_image_metadata.side_effect = \
fake_get_volume_image_metadata
with mock.patch.object(glance.GlanceImageService, "create") \
as mock_create:
mock_create.side_effect = self.fake_image_service_create
with mock.patch.object(volume_api.API, "update") \
as mock_update:
mock_update.side_effect = stubs.stub_volume_update
with mock.patch.object(volume_rpcapi.VolumeAPI,
"copy_volume_to_image") \
as mock_copy_volume_to_image:
mock_copy_volume_to_image.side_effect = \
self.fake_rpc_copy_volume_to_image
req = fakes.HTTPRequest.blank(
'/v2/tenant1/volumes/%s/action' % id)
body = self._get_os_volume_upload_image()
res_dict = self.controller._volume_upload_image(req,
id,
body)
expected_res = {
'os-volume_upload_image': {
'id': id,
'updated_at': datetime.datetime(1900, 1, 1,
1, 1, 1),
'status': 'uploading',
'display_description': 'displaydesc',
'size': 1,
'volume_type': {'name': 'vol_type_name'},
'image_id': 1,
'container_format': 'bare',
'disk_format': 'raw',
'image_name': 'image_name'
}
}
self.assertDictMatch(res_dict, expected_res)
def test_copy_volume_to_image_without_core_prop(self):
"""Test glance_core_properties defined in cinder.conf is empty."""
id = 1
# Need to mock create, update, copy_volume_to_image
with mock.patch.object(glance.GlanceImageService, "create") \
as mock_create:
mock_create.side_effect = self.fake_image_service_create
with mock.patch.object(volume_api.API, "update") \
as mock_update:
mock_update.side_effect = stubs.stub_volume_update
with mock.patch.object(volume_rpcapi.VolumeAPI,
"copy_volume_to_image") \
as mock_copy_volume_to_image:
mock_copy_volume_to_image.side_effect = \
self.fake_rpc_copy_volume_to_image
self.override_config('glance_core_properties', [])
req = fakes.HTTPRequest.blank(
'/v2/tenant1/volumes/%s/action' % id)
body = self._get_os_volume_upload_image()
res_dict = self.controller._volume_upload_image(req,
id,
body)
expected_res = {
'os-volume_upload_image': {
'id': id,
'updated_at': datetime.datetime(1900, 1, 1,
1, 1, 1),
'status': 'uploading',
'display_description': 'displaydesc',
'size': 1,
'volume_type': {'name': 'vol_type_name'},
'image_id': 1,
'container_format': 'bare',
'disk_format': 'raw',
'image_name': 'image_name'
}
}
self.assertDictMatch(res_dict, expected_res)
|
|
## @file
# This file is used to create a database used by build tool
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# (C) Copyright 2016 Hewlett Packard Enterprise Development LP<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
from Common.StringUtils import *
from Common.DataType import *
from Common.Misc import *
from types import *
from collections import OrderedDict
from Workspace.BuildClassObject import PackageBuildClassObject, StructurePcd, PcdClassObject
## Platform build information from DEC file
#
# This class is used to retrieve information stored in database and convert them
# into PackageBuildClassObject form for easier use for AutoGen.
#
class DecBuildData(PackageBuildClassObject):
# dict used to convert PCD type in database to string used by build tool
_PCD_TYPE_STRING_ = {
MODEL_PCD_FIXED_AT_BUILD : TAB_PCDS_FIXED_AT_BUILD,
MODEL_PCD_PATCHABLE_IN_MODULE : TAB_PCDS_PATCHABLE_IN_MODULE,
MODEL_PCD_FEATURE_FLAG : TAB_PCDS_FEATURE_FLAG,
MODEL_PCD_DYNAMIC : TAB_PCDS_DYNAMIC,
MODEL_PCD_DYNAMIC_DEFAULT : TAB_PCDS_DYNAMIC,
MODEL_PCD_DYNAMIC_HII : TAB_PCDS_DYNAMIC_HII,
MODEL_PCD_DYNAMIC_VPD : TAB_PCDS_DYNAMIC_VPD,
MODEL_PCD_DYNAMIC_EX : TAB_PCDS_DYNAMIC_EX,
MODEL_PCD_DYNAMIC_EX_DEFAULT : TAB_PCDS_DYNAMIC_EX,
MODEL_PCD_DYNAMIC_EX_HII : TAB_PCDS_DYNAMIC_EX_HII,
MODEL_PCD_DYNAMIC_EX_VPD : TAB_PCDS_DYNAMIC_EX_VPD,
}
# dict used to convert part of [Defines] to members of DecBuildData directly
_PROPERTY_ = {
#
# Required Fields
#
TAB_DEC_DEFINES_PACKAGE_NAME : "_PackageName",
TAB_DEC_DEFINES_PACKAGE_GUID : "_Guid",
TAB_DEC_DEFINES_PACKAGE_VERSION : "_Version",
TAB_DEC_DEFINES_PKG_UNI_FILE : "_PkgUniFile",
}
## Constructor of DecBuildData
#
# Initialize object of DecBuildData
#
# @param FilePath The path of package description file
# @param RawData The raw data of DEC file
# @param BuildDataBase Database used to retrieve module information
# @param Arch The target architecture
# @param Platform (not used for DecBuildData)
# @param Macros Macros used for replacement in DSC file
#
def __init__(self, File, RawData, BuildDataBase, Arch=TAB_ARCH_COMMON, Target=None, Toolchain=None):
self.MetaFile = File
self._PackageDir = File.Dir
self._RawData = RawData
self._Bdb = BuildDataBase
self._Arch = Arch
self._Target = Target
self._Toolchain = Toolchain
self._Clear()
## XXX[key] = value
def __setitem__(self, key, value):
self.__dict__[self._PROPERTY_[key]] = value
## value = XXX[key]
def __getitem__(self, key):
return self.__dict__[self._PROPERTY_[key]]
## "in" test support
def __contains__(self, key):
return key in self._PROPERTY_
## Set all internal used members of DecBuildData to None
def _Clear(self):
self._Header = None
self._PackageName = None
self._Guid = None
self._Version = None
self._PkgUniFile = None
self._Protocols = None
self._Ppis = None
self._Guids = None
self._Includes = None
self._CommonIncludes = None
self._LibraryClasses = None
self._Pcds = None
self._MacroDict = None
self._PrivateProtocols = None
self._PrivatePpis = None
self._PrivateGuids = None
self._PrivateIncludes = None
## Get current effective macros
@property
def _Macros(self):
if self._MacroDict is None:
self._MacroDict = dict(GlobalData.gGlobalDefines)
return self._MacroDict
## Get architecture
@property
def Arch(self):
return self._Arch
## Retrieve all information in [Defines] section
#
# (Retriving all [Defines] information in one-shot is just to save time.)
#
def _GetHeaderInfo(self):
RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch]
for Record in RecordList:
Name = Record[1]
if Name in self:
self[Name] = Record[2]
self._Header = 'DUMMY'
## Retrieve package name
@property
def PackageName(self):
if self._PackageName is None:
if self._Header is None:
self._GetHeaderInfo()
if self._PackageName is None:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "No PACKAGE_NAME", File=self.MetaFile)
return self._PackageName
## Retrieve file guid
@property
def PackageName(self):
if self._Guid is None:
if self._Header is None:
self._GetHeaderInfo()
if self._Guid is None:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "No PACKAGE_GUID", File=self.MetaFile)
return self._Guid
## Retrieve package version
@property
def Version(self):
if self._Version is None:
if self._Header is None:
self._GetHeaderInfo()
if self._Version is None:
self._Version = ''
return self._Version
## Retrieve protocol definitions (name/value pairs)
@property
def Protocols(self):
if self._Protocols is None:
#
# tdict is a special kind of dict, used for selecting correct
# protocol defition for given ARCH
#
ProtocolDict = tdict(True)
PrivateProtocolDict = tdict(True)
NameList = []
PrivateNameList = []
PublicNameList = []
# find out all protocol definitions for specific and 'common' arch
RecordList = self._RawData[MODEL_EFI_PROTOCOL, self._Arch]
for Name, Guid, Dummy, Arch, PrivateFlag, ID, LineNo in RecordList:
if PrivateFlag == 'PRIVATE':
if Name not in PrivateNameList:
PrivateNameList.append(Name)
PrivateProtocolDict[Arch, Name] = Guid
if Name in PublicNameList:
EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
else:
if Name not in PublicNameList:
PublicNameList.append(Name)
if Name in PrivateNameList:
EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
if Name not in NameList:
NameList.append(Name)
ProtocolDict[Arch, Name] = Guid
# use OrderedDict to keep the order
self._Protocols = OrderedDict()
self._PrivateProtocols = OrderedDict()
for Name in NameList:
#
# limit the ARCH to self._Arch, if no self._Arch found, tdict
# will automatically turn to 'common' ARCH for trying
#
self._Protocols[Name] = ProtocolDict[self._Arch, Name]
for Name in PrivateNameList:
self._PrivateProtocols[Name] = PrivateProtocolDict[self._Arch, Name]
return self._Protocols
## Retrieve PPI definitions (name/value pairs)
@property
def Ppis(self):
if self._Ppis is None:
#
# tdict is a special kind of dict, used for selecting correct
# PPI defition for given ARCH
#
PpiDict = tdict(True)
PrivatePpiDict = tdict(True)
NameList = []
PrivateNameList = []
PublicNameList = []
# find out all PPI definitions for specific arch and 'common' arch
RecordList = self._RawData[MODEL_EFI_PPI, self._Arch]
for Name, Guid, Dummy, Arch, PrivateFlag, ID, LineNo in RecordList:
if PrivateFlag == 'PRIVATE':
if Name not in PrivateNameList:
PrivateNameList.append(Name)
PrivatePpiDict[Arch, Name] = Guid
if Name in PublicNameList:
EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
else:
if Name not in PublicNameList:
PublicNameList.append(Name)
if Name in PrivateNameList:
EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
if Name not in NameList:
NameList.append(Name)
PpiDict[Arch, Name] = Guid
# use OrderedDict to keep the order
self._Ppis = OrderedDict()
self._PrivatePpis = OrderedDict()
for Name in NameList:
#
# limit the ARCH to self._Arch, if no self._Arch found, tdict
# will automatically turn to 'common' ARCH for trying
#
self._Ppis[Name] = PpiDict[self._Arch, Name]
for Name in PrivateNameList:
self._PrivatePpis[Name] = PrivatePpiDict[self._Arch, Name]
return self._Ppis
## Retrieve GUID definitions (name/value pairs)
@property
def Guids(self):
if self._Guids is None:
#
# tdict is a special kind of dict, used for selecting correct
# GUID defition for given ARCH
#
GuidDict = tdict(True)
PrivateGuidDict = tdict(True)
NameList = []
PrivateNameList = []
PublicNameList = []
# find out all protocol definitions for specific and 'common' arch
RecordList = self._RawData[MODEL_EFI_GUID, self._Arch]
for Name, Guid, Dummy, Arch, PrivateFlag, ID, LineNo in RecordList:
if PrivateFlag == 'PRIVATE':
if Name not in PrivateNameList:
PrivateNameList.append(Name)
PrivateGuidDict[Arch, Name] = Guid
if Name in PublicNameList:
EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
else:
if Name not in PublicNameList:
PublicNameList.append(Name)
if Name in PrivateNameList:
EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
if Name not in NameList:
NameList.append(Name)
GuidDict[Arch, Name] = Guid
# use OrderedDict to keep the order
self._Guids = OrderedDict()
self._PrivateGuids = OrderedDict()
for Name in NameList:
#
# limit the ARCH to self._Arch, if no self._Arch found, tdict
# will automatically turn to 'common' ARCH for trying
#
self._Guids[Name] = GuidDict[self._Arch, Name]
for Name in PrivateNameList:
self._PrivateGuids[Name] = PrivateGuidDict[self._Arch, Name]
return self._Guids
## Retrieve public include paths declared in this package
@property
def Includes(self):
if self._Includes is None or self._CommonIncludes is None:
self._CommonIncludes = []
self._Includes = []
self._PrivateIncludes = []
PublicInclues = []
RecordList = self._RawData[MODEL_EFI_INCLUDE, self._Arch]
Macros = self._Macros
Macros["EDK_SOURCE"] = GlobalData.gEcpSource
for Record in RecordList:
File = PathClass(NormPath(Record[0], Macros), self._PackageDir, Arch=self._Arch)
LineNo = Record[-1]
# validate the path
ErrorCode, ErrorInfo = File.Validate()
if ErrorCode != 0:
EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
# avoid duplicate include path
if File not in self._Includes:
self._Includes.append(File)
if Record[4] == 'PRIVATE':
if File not in self._PrivateIncludes:
self._PrivateIncludes.append(File)
if File in PublicInclues:
EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % File, File=self.MetaFile, Line=LineNo)
else:
if File not in PublicInclues:
PublicInclues.append(File)
if File in self._PrivateIncludes:
EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % File, File=self.MetaFile, Line=LineNo)
if Record[3] == TAB_COMMON:
self._CommonIncludes.append(File)
return self._Includes
## Retrieve library class declarations (not used in build at present)
@property
def LibraryClasses(self):
if self._LibraryClasses is None:
#
# tdict is a special kind of dict, used for selecting correct
# library class declaration for given ARCH
#
LibraryClassDict = tdict(True)
LibraryClassSet = set()
RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch]
Macros = self._Macros
for LibraryClass, File, Dummy, Arch, PrivateFlag, ID, LineNo in RecordList:
File = PathClass(NormPath(File, Macros), self._PackageDir, Arch=self._Arch)
# check the file validation
ErrorCode, ErrorInfo = File.Validate()
if ErrorCode != 0:
EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
LibraryClassSet.add(LibraryClass)
LibraryClassDict[Arch, LibraryClass] = File
self._LibraryClasses = OrderedDict()
for LibraryClass in LibraryClassSet:
self._LibraryClasses[LibraryClass] = LibraryClassDict[self._Arch, LibraryClass]
return self._LibraryClasses
## Retrieve PCD declarations
@property
def Pcds(self):
if self._Pcds is None:
self._Pcds = OrderedDict()
self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
self._Pcds.update(self._GetPcd(MODEL_PCD_PATCHABLE_IN_MODULE))
self._Pcds.update(self._GetPcd(MODEL_PCD_FEATURE_FLAG))
self._Pcds.update(self._GetPcd(MODEL_PCD_DYNAMIC))
self._Pcds.update(self._GetPcd(MODEL_PCD_DYNAMIC_EX))
return self._Pcds
def ParsePcdName(self,TokenCName):
TokenCName = TokenCName.strip()
if TokenCName.startswith("["):
if "." in TokenCName:
Demesionattr = TokenCName[:TokenCName.index(".")]
Fields = TokenCName[TokenCName.index(".")+1:]
else:
Demesionattr = TokenCName
Fields = ""
else:
Demesionattr = ""
Fields = TokenCName
return Demesionattr,Fields
def ProcessStructurePcd(self, StructurePcdRawDataSet):
s_pcd_set = OrderedDict()
for s_pcd, LineNo in StructurePcdRawDataSet:
if s_pcd.TokenSpaceGuidCName not in s_pcd_set:
s_pcd_set[s_pcd.TokenSpaceGuidCName] = []
s_pcd_set[s_pcd.TokenSpaceGuidCName].append((s_pcd, LineNo))
str_pcd_set = []
for pcdname in s_pcd_set:
dep_pkgs = []
struct_pcd = StructurePcd()
for item, LineNo in s_pcd_set[pcdname]:
if not item.TokenCName:
continue
if "<HeaderFiles>" in item.TokenCName:
struct_pcd.StructuredPcdIncludeFile.append(item.DefaultValue)
elif "<Packages>" in item.TokenCName:
dep_pkgs.append(item.DefaultValue)
elif item.DatumType == item.TokenCName:
struct_pcd.copy(item)
struct_pcd.TokenValue = struct_pcd.TokenValue.strip("{").strip()
struct_pcd.TokenSpaceGuidCName, struct_pcd.TokenCName = pcdname.split(".")
struct_pcd.PcdDefineLineNo = LineNo
struct_pcd.PkgPath = self.MetaFile.File
struct_pcd.SetDecDefaultValue(item.DefaultValue)
else:
DemesionAttr, Fields = self.ParsePcdName(item.TokenCName)
struct_pcd.AddDefaultValue(Fields, item.DefaultValue, self.MetaFile.File, LineNo,DemesionAttr)
struct_pcd.PackageDecs = dep_pkgs
str_pcd_set.append(struct_pcd)
return str_pcd_set
## Retrieve PCD declarations for given type
def _GetPcd(self, Type):
Pcds = OrderedDict()
#
# tdict is a special kind of dict, used for selecting correct
# PCD declaration for given ARCH
#
PcdDict = tdict(True, 3)
# for summarizing PCD
PcdSet = []
# find out all PCDs of the 'type'
StrPcdSet = []
RecordList = self._RawData[Type, self._Arch]
for TokenSpaceGuid, PcdCName, Setting, Arch, PrivateFlag, Dummy1, Dummy2 in RecordList:
PcdDict[Arch, PcdCName, TokenSpaceGuid] = (Setting, Dummy2)
if not (PcdCName, TokenSpaceGuid) in PcdSet:
PcdSet.append((PcdCName, TokenSpaceGuid))
DefinitionPosition = {}
for PcdCName, TokenSpaceGuid in PcdSet:
#
# limit the ARCH to self._Arch, if no self._Arch found, tdict
# will automatically turn to 'common' ARCH and try again
#
Setting, LineNo = PcdDict[self._Arch, PcdCName, TokenSpaceGuid]
if Setting is None:
continue
DefaultValue, DatumType, TokenNumber = AnalyzePcdData(Setting)
validateranges, validlists, expressions = self._RawData.GetValidExpression(TokenSpaceGuid, PcdCName)
PcdObj = PcdClassObject(
PcdCName,
TokenSpaceGuid,
self._PCD_TYPE_STRING_[Type],
DatumType,
DefaultValue,
TokenNumber,
'',
{},
False,
None,
list(validateranges),
list(validlists),
list(expressions)
)
DefinitionPosition[PcdObj] = (self.MetaFile.File, LineNo)
if "." in TokenSpaceGuid:
StrPcdSet.append((PcdObj, LineNo))
else:
Pcds[PcdCName, TokenSpaceGuid, self._PCD_TYPE_STRING_[Type]] = PcdObj
StructurePcds = self.ProcessStructurePcd(StrPcdSet)
for pcd in StructurePcds:
Pcds[pcd.TokenCName, pcd.TokenSpaceGuidCName, self._PCD_TYPE_STRING_[Type]] = pcd
for pcd in Pcds.values():
if pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
if not pcd.IsAggregateDatumType():
EdkLogger.error('build', FORMAT_INVALID, "DatumType only support BOOLEAN, UINT8, UINT16, UINT32, UINT64, VOID* or a valid struct name.", DefinitionPosition[pcd][0], DefinitionPosition[pcd][1])
elif not pcd.IsArray() and not pcd.StructuredPcdIncludeFile:
EdkLogger.error("build", PCD_STRUCTURE_PCD_ERROR, "The structure Pcd %s.%s header file is not found in %s line %s \n" % (pcd.TokenSpaceGuidCName, pcd.TokenCName, pcd.DefinitionPosition[0], pcd.DefinitionPosition[1] ))
return Pcds
@property
def CommonIncludes(self):
if self._CommonIncludes is None:
self.Includes
return self._CommonIncludes
|
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for MapDefunOp."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
from tensorflow.python.client import session
from tensorflow.python.data.experimental.ops import map_defun
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.eager import function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_spec
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import sparse_ops
from tensorflow.python.platform import test
@test_util.run_v1_only("b/123903858: Add eager and V2 test coverage")
class MapDefunTest(test_base.DatasetTestBase):
def testMapDefunSimple(self):
@function.defun(input_signature=[tensor_spec.TensorSpec([2], dtypes.int32)])
def simple_fn(x):
return x * 2 + 3
nums = [[1, 2], [3, 4], [5, 6]]
elems = constant_op.constant(nums, dtype=dtypes.int32, name="data")
r = map_defun.map_defun(simple_fn, [elems], [dtypes.int32], [(2,)])[0]
expected = elems * 2 + 3
self.assertAllEqual(self.evaluate(r), self.evaluate(expected))
def testMapDefunMismatchedTypes(self):
@function.defun(input_signature=[tensor_spec.TensorSpec([], dtypes.int32)])
def fn(x):
return math_ops.cast(x, dtypes.float64)
nums = [1, 2, 3, 4, 5, 6]
elems = constant_op.constant(nums, dtype=dtypes.int32, name="data")
r = map_defun.map_defun(fn, [elems], [dtypes.int32], [()])[0]
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(r)
def testMapDefunReduceDim(self):
# Tests where the output has a different rank from the input
@function.defun(input_signature=[tensor_spec.TensorSpec([2], dtypes.int32)])
def fn(x):
return array_ops.gather(x, 0)
nums = [[1, 2], [3, 4], [5, 6]]
elems = constant_op.constant(nums, dtype=dtypes.int32, name="data")
r = map_defun.map_defun(fn, [elems], [dtypes.int32], [()])[0]
expected = constant_op.constant([1, 3, 5])
self.assertAllEqual(self.evaluate(r), self.evaluate(expected))
def testMapDefunMultipleOutputs(self):
@function.defun(input_signature=[tensor_spec.TensorSpec([2], dtypes.int32)])
def fn(x):
return (x, math_ops.cast(x * 2 + 3, dtypes.float64))
nums = [[1, 2], [3, 4], [5, 6]]
elems = constant_op.constant(nums, dtype=dtypes.int32, name="data")
r = map_defun.map_defun(fn, [elems], [dtypes.int32, dtypes.float64], [(2,),
(2,)])
expected = [elems, elems * 2 + 3]
self.assertAllEqual(self.evaluate(r), self.evaluate(expected))
def testMapDefunShapeInference(self):
@function.defun(input_signature=[tensor_spec.TensorSpec([2], dtypes.int32)])
def fn(x):
return x
nums = [[1, 2], [3, 4], [5, 6]]
elems = constant_op.constant(nums, dtype=dtypes.int32, name="data")
result = map_defun.map_defun(fn, [elems], [dtypes.int32], [(2,)])[0]
self.assertEqual(result.get_shape(), (3, 2))
def testMapDefunPartialShapeInference(self):
@function.defun(input_signature=[tensor_spec.TensorSpec([2], dtypes.int32)])
def fn(x):
return x
elems = array_ops.placeholder(dtypes.int64, (None, 2))
result = map_defun.map_defun(fn, [elems], [dtypes.int32], [(2,)])
self.assertEqual(result[0].get_shape().as_list(), [None, 2])
def testMapDefunRaisesErrorOnRuntimeShapeMismatch(self):
@function.defun(input_signature=[
tensor_spec.TensorSpec(None, dtypes.int32),
tensor_spec.TensorSpec(None, dtypes.int32)
])
def fn(x, y):
return x, y
elems1 = array_ops.placeholder(dtypes.int32)
elems2 = array_ops.placeholder(dtypes.int32)
result = map_defun.map_defun(fn, [elems1, elems2],
[dtypes.int32, dtypes.int32], [(), ()])
with self.cached_session() as sess:
with self.assertRaisesWithPredicateMatch(
errors.InvalidArgumentError,
"All inputs must have the same dimension 0."):
sess.run(result, feed_dict={elems1: [1, 2, 3, 4, 5], elems2: [1, 2, 3]})
def testMapDefunRaisesDefunError(self):
@function.defun(input_signature=[tensor_spec.TensorSpec([], dtypes.int32)])
def fn(x):
with ops.control_dependencies([check_ops.assert_equal(x, 0)]):
return array_ops.identity(x)
elems = constant_op.constant([0, 0, 0, 37, 0])
result = map_defun.map_defun(fn, [elems], [dtypes.int32], [()])
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(result)
def testMapDefunCancelledCorrectly(self):
@function.defun(input_signature=[tensor_spec.TensorSpec([5], dtypes.int64)])
def defun(x):
# x has leading dimension 5, this will raise an error
return array_ops.gather(x, 10)
c = array_ops.tile(
array_ops.expand_dims(
constant_op.constant([1, 2, 3, 4, 5], dtype=dtypes.int64), 0),
[100, 1])
map_defun_op = map_defun.map_defun(defun, [c], [dtypes.int64], [()])[0]
with self.assertRaisesRegexp(errors.InvalidArgumentError,
r"indices = 10 is not in \[0, 5\)"):
self.evaluate(map_defun_op)
def testMapDefunWithUnspecifiedOutputShape(self):
@function.defun(input_signature=[tensor_spec.TensorSpec([2], dtypes.int32)])
def simple_fn(x):
res = x * 2 + 3
return (res, res + 1, res + 2)
nums = [[1, 2], [3, 4], [5, 6]]
elems = constant_op.constant(nums, dtype=dtypes.int32, name="data")
r = map_defun.map_defun(simple_fn, [elems],
[dtypes.int32, dtypes.int32, dtypes.int32],
[None, (None,), (2,)])
expected = elems * 2 + 3
self.assertAllEqual(self.evaluate(r[0]), self.evaluate(expected))
self.assertAllEqual(self.evaluate(r[1]), self.evaluate(expected + 1))
self.assertAllEqual(self.evaluate(r[2]), self.evaluate(expected + 2))
def testMapDefunWithDifferentOutputShapeEachRun(self):
@function.defun(
input_signature=[tensor_spec.TensorSpec(None, dtypes.int32)])
def simple_fn(x):
return x * 2 + 3
elems = array_ops.placeholder(dtypes.int32, name="data")
r = map_defun.map_defun(simple_fn, [elems], [dtypes.int32], [None])[0]
with session.Session() as sess:
self.assertAllEqual(sess.run(r, feed_dict={elems: [0]}), [3])
self.assertAllEqual(
sess.run(r, feed_dict={elems: [[0], [1]]}), [[3], [5]])
def testMapDefunWithWrongOutputShape(self):
@function.defun(input_signature=[tensor_spec.TensorSpec([2], dtypes.int32)])
def simple_fn(x):
return x * 2 + 3
nums = [[1, 2], [3, 4], [5, 6]]
elems = constant_op.constant(nums, dtype=dtypes.int32, name="data")
r = map_defun.map_defun(simple_fn, [elems], [dtypes.int32], [(1,)])[0]
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(r)
def testMapDefunWithInvalidInput(self):
@function.defun(
input_signature=[tensor_spec.TensorSpec(None, dtypes.int32)])
def simple_fn(x):
return x * 2
c = constant_op.constant(2)
with self.assertRaises(ValueError):
# Fails at graph construction time for inputs with known shapes.
r = map_defun.map_defun(simple_fn, [c], [dtypes.int32], [None])[0]
p = array_ops.placeholder(dtypes.int32)
r = map_defun.map_defun(simple_fn, [p], [dtypes.int32], [None])[0]
with session.Session() as sess:
with self.assertRaises(errors.InvalidArgumentError):
sess.run(r, feed_dict={p: 0})
def _assert_op_cancelled(self, sess, map_defun_op):
with self.assertRaisesRegexp(errors.CancelledError, "was cancelled"):
self.evaluate(map_defun_op)
def testMapDefunWithParentCancellation(self):
# Checks that a cancellation of the parent graph is threaded through to
# MapDefunOp correctly.
@function.defun(input_signature=[tensor_spec.TensorSpec([], dtypes.int32)])
def simple_fn(x):
del x
queue = data_flow_ops.FIFOQueue(10, dtypes.int32, ())
# Blocking
return queue.dequeue_many(5)
c = constant_op.constant([1, 2, 3, 4, 5])
map_defun_op = map_defun.map_defun(simple_fn, [c], [dtypes.int32], [()])[0]
with self.cached_session() as sess:
thread = self.checkedThread(
self._assert_op_cancelled, args=(sess, map_defun_op))
thread.start()
time.sleep(0.2)
sess.close()
thread.join()
def testMapDefunWithCapturedInputs(self):
c = constant_op.constant(2)
@function.defun(input_signature=[tensor_spec.TensorSpec([], dtypes.int32)])
def fn(x):
return x + c
x = constant_op.constant([1, 2, 3, 4])
map_defun_op = map_defun.map_defun(fn, [x], [dtypes.int32], [()])[0]
expected = x + c
self.assertAllEqual(self.evaluate(expected), self.evaluate(map_defun_op))
def testMapDefunWithVariantTensor(self):
@function.defun(
input_signature=[tensor_spec.TensorSpec([], dtypes.variant)])
def fn(x):
return x
st = sparse_tensor.SparseTensor(
indices=[[0, 0], [1, 2]], values=[1, 2], dense_shape=[3, 4])
serialized = sparse_ops.serialize_sparse_v2(st, out_type=dtypes.variant)
serialized = array_ops.stack([serialized, serialized])
map_defun_op = map_defun.map_defun(fn, [serialized], [dtypes.variant],
[None])[0]
deserialized = sparse_ops.deserialize_sparse(map_defun_op, dtypes.int32)
expected = sparse_tensor.SparseTensorValue(
indices=[[0, 0, 0], [0, 1, 2], [1, 0, 0], [1, 1, 2]],
values=[1, 2, 1, 2],
dense_shape=[2, 3, 4])
actual = self.evaluate(deserialized)
self.assertSparseValuesEqual(expected, actual)
def testMapDefunWithVariantTensorAsCaptured(self):
st = sparse_tensor.SparseTensor(
indices=[[0, 0], [1, 2]], values=[1, 2], dense_shape=[3, 4])
serialized = sparse_ops.serialize_sparse_v2(st, out_type=dtypes.variant)
@function.defun(input_signature=[tensor_spec.TensorSpec([], dtypes.int32)])
def fn(x):
del x
return serialized
x = constant_op.constant([0, 0])
map_defun_op = map_defun.map_defun(fn, [x], [dtypes.variant], [None])[0]
deserialized = sparse_ops.deserialize_sparse(map_defun_op, dtypes.int32)
expected = sparse_tensor.SparseTensorValue(
indices=[[0, 0, 0], [0, 1, 2], [1, 0, 0], [1, 1, 2]],
values=[1, 2, 1, 2],
dense_shape=[2, 3, 4])
actual = self.evaluate(deserialized)
self.assertSparseValuesEqual(expected, actual)
def testMapDefunWithStrTensor(self):
@function.defun(input_signature=[tensor_spec.TensorSpec([], dtypes.string)])
def fn(x):
return x
st = sparse_tensor.SparseTensor(
indices=[[0, 0], [1, 2]], values=[1, 2], dense_shape=[3, 4])
serialized = sparse_ops.serialize_sparse_v2(st, out_type=dtypes.string)
serialized = array_ops.stack([serialized, serialized])
map_defun_op = map_defun.map_defun(fn, [serialized], [dtypes.string],
[None])[0]
deserialized = sparse_ops.deserialize_sparse(map_defun_op, dtypes.int32)
expected = sparse_tensor.SparseTensorValue(
indices=[[0, 0, 0], [0, 1, 2], [1, 0, 0], [1, 1, 2]],
values=[1, 2, 1, 2],
dense_shape=[2, 3, 4])
actual = self.evaluate(deserialized)
self.assertSparseValuesEqual(expected, actual)
if __name__ == "__main__":
test.main()
|
|
# engine/default.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Default implementations of per-dialect sqlalchemy.engine classes.
These are semi-private implementation classes which are only of importance
to database dialect authors; dialects will usually use the classes here
as the base class for their own corresponding classes.
"""
import codecs
import random
import re
import weakref
from . import interfaces
from . import reflection
from . import result
from .. import event
from .. import exc
from .. import pool
from .. import processors
from .. import types as sqltypes
from .. import util
from ..sql import compiler
from ..sql import expression
from ..sql import schema
from ..sql.elements import quoted_name
AUTOCOMMIT_REGEXP = re.compile(
r"\s*(?:UPDATE|INSERT|CREATE|DELETE|DROP|ALTER)", re.I | re.UNICODE
)
# When we're handed literal SQL, ensure it's a SELECT query
SERVER_SIDE_CURSOR_RE = re.compile(r"\s*SELECT", re.I | re.UNICODE)
class DefaultDialect(interfaces.Dialect):
"""Default implementation of Dialect"""
statement_compiler = compiler.SQLCompiler
ddl_compiler = compiler.DDLCompiler
type_compiler = compiler.GenericTypeCompiler
preparer = compiler.IdentifierPreparer
supports_alter = True
supports_comments = False
inline_comments = False
# the first value we'd get for an autoincrement
# column.
default_sequence_base = 1
# most DBAPIs happy with this for execute().
# not cx_oracle.
execute_sequence_format = tuple
supports_views = True
supports_sequences = False
sequences_optional = False
preexecute_autoincrement_sequences = False
postfetch_lastrowid = True
implicit_returning = False
supports_right_nested_joins = True
cte_follows_insert = False
supports_native_enum = False
supports_native_boolean = False
non_native_boolean_check_constraint = True
supports_simple_order_by_label = True
tuple_in_values = False
engine_config_types = util.immutabledict(
[
("convert_unicode", util.bool_or_str("force")),
("pool_timeout", util.asint),
("echo", util.bool_or_str("debug")),
("echo_pool", util.bool_or_str("debug")),
("pool_recycle", util.asint),
("pool_size", util.asint),
("max_overflow", util.asint),
("pool_threadlocal", util.asbool),
]
)
# if the NUMERIC type
# returns decimal.Decimal.
# *not* the FLOAT type however.
supports_native_decimal = False
if util.py3k:
supports_unicode_statements = True
supports_unicode_binds = True
returns_unicode_strings = True
description_encoding = None
else:
supports_unicode_statements = False
supports_unicode_binds = False
returns_unicode_strings = False
description_encoding = "use_encoding"
name = "default"
# length at which to truncate
# any identifier.
max_identifier_length = 9999
_user_defined_max_identifier_length = None
# length at which to truncate
# the name of an index.
# Usually None to indicate
# 'use max_identifier_length'.
# thanks to MySQL, sigh
max_index_name_length = None
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
colspecs = {}
default_paramstyle = "named"
supports_default_values = False
supports_empty_insert = True
supports_multivalues_insert = False
supports_is_distinct_from = True
supports_server_side_cursors = False
server_version_info = None
construct_arguments = None
"""Optional set of argument specifiers for various SQLAlchemy
constructs, typically schema items.
To implement, establish as a series of tuples, as in::
construct_arguments = [
(schema.Index, {
"using": False,
"where": None,
"ops": None
})
]
If the above construct is established on the PostgreSQL dialect,
the :class:`.Index` construct will now accept the keyword arguments
``postgresql_using``, ``postgresql_where``, nad ``postgresql_ops``.
Any other argument specified to the constructor of :class:`.Index`
which is prefixed with ``postgresql_`` will raise :class:`.ArgumentError`.
A dialect which does not include a ``construct_arguments`` member will
not participate in the argument validation system. For such a dialect,
any argument name is accepted by all participating constructs, within
the namespace of arguments prefixed with that dialect name. The rationale
here is so that third-party dialects that haven't yet implemented this
feature continue to function in the old way.
.. versionadded:: 0.9.2
.. seealso::
:class:`.DialectKWArgs` - implementing base class which consumes
:attr:`.DefaultDialect.construct_arguments`
"""
# indicates symbol names are
# UPPERCASEd if they are case insensitive
# within the database.
# if this is True, the methods normalize_name()
# and denormalize_name() must be provided.
requires_name_normalize = False
reflection_options = ()
dbapi_exception_translation_map = util.immutabledict()
"""mapping used in the extremely unusual case that a DBAPI's
published exceptions don't actually have the __name__ that they
are linked towards.
.. versionadded:: 1.0.5
"""
@util.deprecated_params(
convert_unicode=(
"1.3",
"The :paramref:`_sa.create_engine.convert_unicode` parameter "
"and corresponding dialect-level parameters are deprecated, "
"and will be removed in a future release. Modern DBAPIs support "
"Python Unicode natively and this parameter is unnecessary.",
)
)
def __init__(
self,
convert_unicode=False,
encoding="utf-8",
paramstyle=None,
dbapi=None,
implicit_returning=None,
supports_right_nested_joins=None,
case_sensitive=True,
supports_native_boolean=None,
empty_in_strategy="static",
max_identifier_length=None,
label_length=None,
**kwargs
):
if not getattr(self, "ported_sqla_06", True):
util.warn(
"The %s dialect is not yet ported to the 0.6 format"
% self.name
)
self.convert_unicode = convert_unicode
self.encoding = encoding
self.positional = False
self._ischema = None
self.dbapi = dbapi
if paramstyle is not None:
self.paramstyle = paramstyle
elif self.dbapi is not None:
self.paramstyle = self.dbapi.paramstyle
else:
self.paramstyle = self.default_paramstyle
if implicit_returning is not None:
self.implicit_returning = implicit_returning
self.positional = self.paramstyle in ("qmark", "format", "numeric")
self.identifier_preparer = self.preparer(self)
self.type_compiler = self.type_compiler(self)
if supports_right_nested_joins is not None:
self.supports_right_nested_joins = supports_right_nested_joins
if supports_native_boolean is not None:
self.supports_native_boolean = supports_native_boolean
self.case_sensitive = case_sensitive
self.empty_in_strategy = empty_in_strategy
if empty_in_strategy == "static":
self._use_static_in = True
elif empty_in_strategy in ("dynamic", "dynamic_warn"):
self._use_static_in = False
self._warn_on_empty_in = empty_in_strategy == "dynamic_warn"
else:
raise exc.ArgumentError(
"empty_in_strategy may be 'static', "
"'dynamic', or 'dynamic_warn'"
)
self._user_defined_max_identifier_length = max_identifier_length
if self._user_defined_max_identifier_length:
self.max_identifier_length = (
self._user_defined_max_identifier_length
)
self.label_length = label_length
if self.description_encoding == "use_encoding":
self._description_decoder = (
processors.to_unicode_processor_factory
)(encoding)
elif self.description_encoding is not None:
self._description_decoder = (
processors.to_unicode_processor_factory
)(self.description_encoding)
self._encoder = codecs.getencoder(self.encoding)
self._decoder = processors.to_unicode_processor_factory(self.encoding)
@util.memoized_property
def _type_memos(self):
return weakref.WeakKeyDictionary()
@property
def dialect_description(self):
return self.name + "+" + self.driver
@property
def supports_sane_rowcount_returning(self):
"""True if this dialect supports sane rowcount even if RETURNING is
in use.
For dialects that don't support RETURNING, this is synomous
with supports_sane_rowcount.
"""
return self.supports_sane_rowcount
@classmethod
def get_pool_class(cls, url):
return getattr(cls, "poolclass", pool.QueuePool)
@classmethod
def load_provisioning(cls):
package = ".".join(cls.__module__.split(".")[0:-1])
try:
__import__(package + ".provision")
except ImportError:
pass
def initialize(self, connection):
try:
self.server_version_info = self._get_server_version_info(
connection
)
except NotImplementedError:
self.server_version_info = None
try:
self.default_schema_name = self._get_default_schema_name(
connection
)
except NotImplementedError:
self.default_schema_name = None
try:
self.default_isolation_level = self.get_isolation_level(
connection.connection
)
except NotImplementedError:
self.default_isolation_level = None
self.returns_unicode_strings = self._check_unicode_returns(connection)
if (
self.description_encoding is not None
and self._check_unicode_description(connection)
):
self._description_decoder = self.description_encoding = None
if not self._user_defined_max_identifier_length:
max_ident_length = self._check_max_identifier_length(connection)
if max_ident_length:
self.max_identifier_length = max_ident_length
if (
self.label_length
and self.label_length > self.max_identifier_length
):
raise exc.ArgumentError(
"Label length of %d is greater than this dialect's"
" maximum identifier length of %d"
% (self.label_length, self.max_identifier_length)
)
def on_connect(self):
# inherits the docstring from interfaces.Dialect.on_connect
return None
def _check_max_identifier_length(self, connection):
"""Perform a connection / server version specific check to determine
the max_identifier_length.
If the dialect's class level max_identifier_length should be used,
can return None.
.. versionadded:: 1.3.9
"""
return None
def _check_unicode_returns(self, connection, additional_tests=None):
if util.py2k and not self.supports_unicode_statements:
cast_to = util.binary_type
else:
cast_to = util.text_type
if self.positional:
parameters = self.execute_sequence_format()
else:
parameters = {}
def check_unicode(test):
statement = cast_to(
expression.select([test]).compile(dialect=self)
)
try:
cursor = connection.connection.cursor()
connection._cursor_execute(cursor, statement, parameters)
row = cursor.fetchone()
cursor.close()
except exc.DBAPIError as de:
# note that _cursor_execute() will have closed the cursor
# if an exception is thrown.
util.warn(
"Exception attempting to "
"detect unicode returns: %r" % de
)
return False
else:
return isinstance(row[0], util.text_type)
tests = [
# detect plain VARCHAR
expression.cast(
expression.literal_column("'test plain returns'"),
sqltypes.VARCHAR(60),
),
# detect if there's an NVARCHAR type with different behavior
# available
expression.cast(
expression.literal_column("'test unicode returns'"),
sqltypes.Unicode(60),
),
]
if additional_tests:
tests += additional_tests
results = {check_unicode(test) for test in tests}
if results.issuperset([True, False]):
return "conditional"
else:
return results == {True}
def _check_unicode_description(self, connection):
# all DBAPIs on Py2K return cursor.description as encoded,
# until pypy2.1beta2 with sqlite, so let's just check it -
# it's likely others will start doing this too in Py2k.
if util.py2k and not self.supports_unicode_statements:
cast_to = util.binary_type
else:
cast_to = util.text_type
cursor = connection.connection.cursor()
try:
cursor.execute(
cast_to(
expression.select(
[expression.literal_column("'x'").label("some_label")]
).compile(dialect=self)
)
)
return isinstance(cursor.description[0][0], util.text_type)
finally:
cursor.close()
def type_descriptor(self, typeobj):
"""Provide a database-specific :class:`.TypeEngine` object, given
the generic object which comes from the types module.
This method looks for a dictionary called
``colspecs`` as a class or instance-level variable,
and passes on to :func:`_types.adapt_type`.
"""
return sqltypes.adapt_type(typeobj, self.colspecs)
def reflecttable(
self,
connection,
table,
include_columns,
exclude_columns,
resolve_fks,
**opts
):
insp = reflection.Inspector.from_engine(connection)
return insp.reflecttable(
table, include_columns, exclude_columns, resolve_fks, **opts
)
def get_pk_constraint(self, conn, table_name, schema=None, **kw):
"""Compatibility method, adapts the result of get_primary_keys()
for those dialects which don't implement get_pk_constraint().
"""
return {
"constrained_columns": self.get_primary_keys(
conn, table_name, schema=schema, **kw
)
}
def validate_identifier(self, ident):
if len(ident) > self.max_identifier_length:
raise exc.IdentifierError(
"Identifier '%s' exceeds maximum length of %d characters"
% (ident, self.max_identifier_length)
)
def connect(self, *cargs, **cparams):
# inherits the docstring from interfaces.Dialect.connect
return self.dbapi.connect(*cargs, **cparams)
def create_connect_args(self, url):
# inherits the docstring from interfaces.Dialect.create_connect_args
opts = url.translate_connect_args()
opts.update(url.query)
return [[], opts]
def set_engine_execution_options(self, engine, opts):
if "isolation_level" in opts:
isolation_level = opts["isolation_level"]
@event.listens_for(engine, "engine_connect")
def set_isolation(connection, branch):
if not branch:
self._set_connection_isolation(connection, isolation_level)
if "schema_translate_map" in opts:
getter = schema._schema_getter(opts["schema_translate_map"])
engine.schema_for_object = getter
@event.listens_for(engine, "engine_connect")
def set_schema_translate_map(connection, branch):
connection.schema_for_object = getter
def set_connection_execution_options(self, connection, opts):
if "isolation_level" in opts:
self._set_connection_isolation(connection, opts["isolation_level"])
if "schema_translate_map" in opts:
getter = schema._schema_getter(opts["schema_translate_map"])
connection.schema_for_object = getter
def _set_connection_isolation(self, connection, level):
if connection.in_transaction():
util.warn(
"Connection is already established with a Transaction; "
"setting isolation_level may implicitly rollback or commit "
"the existing transaction, or have no effect until "
"next transaction"
)
self.set_isolation_level(connection.connection, level)
connection.connection._connection_record.finalize_callback.append(
self.reset_isolation_level
)
def do_begin(self, dbapi_connection):
pass
def do_rollback(self, dbapi_connection):
dbapi_connection.rollback()
def do_commit(self, dbapi_connection):
dbapi_connection.commit()
def do_close(self, dbapi_connection):
dbapi_connection.close()
@util.memoized_property
def _dialect_specific_select_one(self):
return str(expression.select([1]).compile(dialect=self))
def do_ping(self, dbapi_connection):
cursor = None
try:
cursor = dbapi_connection.cursor()
try:
cursor.execute(self._dialect_specific_select_one)
finally:
cursor.close()
except self.dbapi.Error as err:
if self.is_disconnect(err, dbapi_connection, cursor):
return False
else:
raise
else:
return True
def create_xid(self):
"""Create a random two-phase transaction ID.
This id will be passed to do_begin_twophase(), do_rollback_twophase(),
do_commit_twophase(). Its format is unspecified.
"""
return "_sa_%032x" % random.randint(0, 2 ** 128)
def do_savepoint(self, connection, name):
connection.execute(expression.SavepointClause(name))
def do_rollback_to_savepoint(self, connection, name):
connection.execute(expression.RollbackToSavepointClause(name))
def do_release_savepoint(self, connection, name):
connection.execute(expression.ReleaseSavepointClause(name))
def do_executemany(self, cursor, statement, parameters, context=None):
cursor.executemany(statement, parameters)
def do_execute(self, cursor, statement, parameters, context=None):
cursor.execute(statement, parameters)
def do_execute_no_params(self, cursor, statement, context=None):
cursor.execute(statement)
def is_disconnect(self, e, connection, cursor):
return False
def reset_isolation_level(self, dbapi_conn):
# default_isolation_level is read from the first connection
# after the initial set of 'isolation_level', if any, so is
# the configured default of this dialect.
self.set_isolation_level(dbapi_conn, self.default_isolation_level)
def normalize_name(self, name):
if name is None:
return None
if util.py2k:
if isinstance(name, str):
name = name.decode(self.encoding)
name_lower = name.lower()
name_upper = name.upper()
if name_upper == name_lower:
# name has no upper/lower conversion, e.g. non-european characters.
# return unchanged
return name
elif name_upper == name and not (
self.identifier_preparer._requires_quotes
)(name_lower):
# name is all uppercase and doesn't require quoting; normalize
# to all lower case
return name_lower
elif name_lower == name:
# name is all lower case, which if denormalized means we need to
# force quoting on it
return quoted_name(name, quote=True)
else:
# name is mixed case, means it will be quoted in SQL when used
# later, no normalizes
return name
def denormalize_name(self, name):
if name is None:
return None
name_lower = name.lower()
name_upper = name.upper()
if name_upper == name_lower:
# name has no upper/lower conversion, e.g. non-european characters.
# return unchanged
return name
elif name_lower == name and not (
self.identifier_preparer._requires_quotes
)(name_lower):
name = name_upper
if util.py2k:
if not self.supports_unicode_binds:
name = name.encode(self.encoding)
else:
name = unicode(name) # noqa
return name
class _RendersLiteral(object):
def literal_processor(self, dialect):
def process(value):
return "'%s'" % value
return process
class _StrDateTime(_RendersLiteral, sqltypes.DateTime):
pass
class _StrDate(_RendersLiteral, sqltypes.Date):
pass
class _StrTime(_RendersLiteral, sqltypes.Time):
pass
class StrCompileDialect(DefaultDialect):
statement_compiler = compiler.StrSQLCompiler
ddl_compiler = compiler.DDLCompiler
type_compiler = compiler.StrSQLTypeCompiler
preparer = compiler.IdentifierPreparer
supports_sequences = True
sequences_optional = True
preexecute_autoincrement_sequences = False
implicit_returning = False
supports_native_boolean = True
supports_simple_order_by_label = True
colspecs = {
sqltypes.DateTime: _StrDateTime,
sqltypes.Date: _StrDate,
sqltypes.Time: _StrTime,
}
class DefaultExecutionContext(interfaces.ExecutionContext):
isinsert = False
isupdate = False
isdelete = False
is_crud = False
is_text = False
isddl = False
executemany = False
compiled = None
statement = None
result_column_struct = None
returned_defaults = None
_is_implicit_returning = False
_is_explicit_returning = False
# a hook for SQLite's translation of
# result column names
_translate_colname = None
_expanded_parameters = util.immutabledict()
@classmethod
def _init_ddl(cls, dialect, connection, dbapi_connection, compiled_ddl):
"""Initialize execution context for a DDLElement construct."""
self = cls.__new__(cls)
self.root_connection = connection
self._dbapi_connection = dbapi_connection
self.dialect = connection.dialect
self.compiled = compiled = compiled_ddl
self.isddl = True
self.execution_options = compiled.execution_options
if connection._execution_options:
self.execution_options = dict(self.execution_options)
self.execution_options.update(connection._execution_options)
if not dialect.supports_unicode_statements:
self.unicode_statement = util.text_type(compiled)
self.statement = dialect._encoder(self.unicode_statement)[0]
else:
self.statement = self.unicode_statement = util.text_type(compiled)
self.cursor = self.create_cursor()
self.compiled_parameters = []
if dialect.positional:
self.parameters = [dialect.execute_sequence_format()]
else:
self.parameters = [{}]
return self
@classmethod
def _init_compiled(
cls, dialect, connection, dbapi_connection, compiled, parameters
):
"""Initialize execution context for a Compiled construct."""
self = cls.__new__(cls)
self.root_connection = connection
self._dbapi_connection = dbapi_connection
self.dialect = connection.dialect
self.compiled = compiled
# this should be caught in the engine before
# we get here
assert compiled.can_execute
self.execution_options = compiled.execution_options.union(
connection._execution_options
)
self.result_column_struct = (
compiled._result_columns,
compiled._ordered_columns,
compiled._textual_ordered_columns,
)
self.unicode_statement = util.text_type(compiled)
if not dialect.supports_unicode_statements:
self.statement = self.unicode_statement.encode(
self.dialect.encoding
)
else:
self.statement = self.unicode_statement
self.isinsert = compiled.isinsert
self.isupdate = compiled.isupdate
self.isdelete = compiled.isdelete
self.is_text = compiled.isplaintext
if not parameters:
self.compiled_parameters = [compiled.construct_params()]
else:
self.compiled_parameters = [
compiled.construct_params(m, _group_number=grp)
for grp, m in enumerate(parameters)
]
self.executemany = len(parameters) > 1
self.cursor = self.create_cursor()
if self.isinsert or self.isupdate or self.isdelete:
self.is_crud = True
self._is_explicit_returning = bool(compiled.statement._returning)
self._is_implicit_returning = bool(
compiled.returning and not compiled.statement._returning
)
if self.compiled.insert_prefetch or self.compiled.update_prefetch:
if self.executemany:
self._process_executemany_defaults()
else:
self._process_executesingle_defaults()
processors = compiled._bind_processors
if compiled.contains_expanding_parameters:
# copy processors for this case as they will be mutated
processors = dict(processors)
positiontup = self._expand_in_parameters(compiled, processors)
elif compiled.positional:
positiontup = self.compiled.positiontup
# Convert the dictionary of bind parameter values
# into a dict or list to be sent to the DBAPI's
# execute() or executemany() method.
parameters = []
if compiled.positional:
for compiled_params in self.compiled_parameters:
param = []
for key in positiontup:
if key in processors:
param.append(processors[key](compiled_params[key]))
else:
param.append(compiled_params[key])
parameters.append(dialect.execute_sequence_format(param))
else:
encode = not dialect.supports_unicode_statements
for compiled_params in self.compiled_parameters:
if encode:
param = dict(
(
dialect._encoder(key)[0],
processors[key](compiled_params[key])
if key in processors
else compiled_params[key],
)
for key in compiled_params
)
else:
param = dict(
(
key,
processors[key](compiled_params[key])
if key in processors
else compiled_params[key],
)
for key in compiled_params
)
parameters.append(param)
self.parameters = dialect.execute_sequence_format(parameters)
return self
def _expand_in_parameters(self, compiled, processors):
"""handle special 'expanding' parameters, IN tuples that are rendered
on a per-parameter basis for an otherwise fixed SQL statement string.
"""
if self.executemany:
raise exc.InvalidRequestError(
"'expanding' parameters can't be used with " "executemany()"
)
if self.compiled.positional and self.compiled._numeric_binds:
# I'm not familiar with any DBAPI that uses 'numeric'
raise NotImplementedError(
"'expanding' bind parameters not supported with "
"'numeric' paramstyle at this time."
)
self._expanded_parameters = {}
compiled_params = self.compiled_parameters[0]
if compiled.positional:
positiontup = []
else:
positiontup = None
replacement_expressions = {}
to_update_sets = {}
for name in (
self.compiled.positiontup
if compiled.positional
else self.compiled.binds
):
parameter = self.compiled.binds[name]
if parameter.expanding:
if name in replacement_expressions:
to_update = to_update_sets[name]
else:
# we are removing the parameter from compiled_params
# because it is a list value, which is not expected by
# TypeEngine objects that would otherwise be asked to
# process it. the single name is being replaced with
# individual numbered parameters for each value in the
# param.
values = compiled_params.pop(name)
if not values:
to_update = to_update_sets[name] = []
replacement_expressions[
name
] = self.compiled.visit_empty_set_expr(
parameter._expanding_in_types
if parameter._expanding_in_types
else [parameter.type]
)
elif isinstance(values[0], (tuple, list)):
to_update = to_update_sets[name] = [
("%s_%s_%s" % (name, i, j), value)
for i, tuple_element in enumerate(values, 1)
for j, value in enumerate(tuple_element, 1)
]
replacement_expressions[name] = (
"VALUES " if self.dialect.tuple_in_values else ""
) + ", ".join(
"(%s)"
% ", ".join(
self.compiled.bindtemplate
% {
"name": to_update[
i * len(tuple_element) + j
][0]
}
for j, value in enumerate(tuple_element)
)
for i, tuple_element in enumerate(values)
)
else:
to_update = to_update_sets[name] = [
("%s_%s" % (name, i), value)
for i, value in enumerate(values, 1)
]
replacement_expressions[name] = ", ".join(
self.compiled.bindtemplate % {"name": key}
for key, value in to_update
)
compiled_params.update(to_update)
processors.update(
(key, processors[name])
for key, value in to_update
if name in processors
)
if compiled.positional:
positiontup.extend(name for name, value in to_update)
self._expanded_parameters[name] = [
expand_key for expand_key, value in to_update
]
elif compiled.positional:
positiontup.append(name)
def process_expanding(m):
return replacement_expressions[m.group(1)]
self.statement = re.sub(
r"\[EXPANDING_(\S+)\]", process_expanding, self.statement
)
return positiontup
@classmethod
def _init_statement(
cls, dialect, connection, dbapi_connection, statement, parameters
):
"""Initialize execution context for a string SQL statement."""
self = cls.__new__(cls)
self.root_connection = connection
self._dbapi_connection = dbapi_connection
self.dialect = connection.dialect
self.is_text = True
# plain text statement
self.execution_options = connection._execution_options
if not parameters:
if self.dialect.positional:
self.parameters = [dialect.execute_sequence_format()]
else:
self.parameters = [{}]
elif isinstance(parameters[0], dialect.execute_sequence_format):
self.parameters = parameters
elif isinstance(parameters[0], dict):
if dialect.supports_unicode_statements:
self.parameters = parameters
else:
self.parameters = [
{dialect._encoder(k)[0]: d[k] for k in d}
for d in parameters
] or [{}]
else:
self.parameters = [
dialect.execute_sequence_format(p) for p in parameters
]
self.executemany = len(parameters) > 1
if not dialect.supports_unicode_statements and isinstance(
statement, util.text_type
):
self.unicode_statement = statement
self.statement = dialect._encoder(statement)[0]
else:
self.statement = self.unicode_statement = statement
self.cursor = self.create_cursor()
return self
@classmethod
def _init_default(cls, dialect, connection, dbapi_connection):
"""Initialize execution context for a ColumnDefault construct."""
self = cls.__new__(cls)
self.root_connection = connection
self._dbapi_connection = dbapi_connection
self.dialect = connection.dialect
self.execution_options = connection._execution_options
self.cursor = self.create_cursor()
return self
@util.memoized_property
def engine(self):
return self.root_connection.engine
@util.memoized_property
def postfetch_cols(self):
return self.compiled.postfetch
@util.memoized_property
def prefetch_cols(self):
if self.isinsert:
return self.compiled.insert_prefetch
elif self.isupdate:
return self.compiled.update_prefetch
else:
return ()
@util.memoized_property
def returning_cols(self):
self.compiled.returning
@util.memoized_property
def no_parameters(self):
return self.execution_options.get("no_parameters", False)
@util.memoized_property
def should_autocommit(self):
autocommit = self.execution_options.get(
"autocommit",
not self.compiled
and self.statement
and expression.PARSE_AUTOCOMMIT
or False,
)
if autocommit is expression.PARSE_AUTOCOMMIT:
return self.should_autocommit_text(self.unicode_statement)
else:
return autocommit
def _execute_scalar(self, stmt, type_):
"""Execute a string statement on the current cursor, returning a
scalar result.
Used to fire off sequences, default phrases, and "select lastrowid"
types of statements individually or in the context of a parent INSERT
or UPDATE statement.
"""
conn = self.root_connection
if (
isinstance(stmt, util.text_type)
and not self.dialect.supports_unicode_statements
):
stmt = self.dialect._encoder(stmt)[0]
if self.dialect.positional:
default_params = self.dialect.execute_sequence_format()
else:
default_params = {}
conn._cursor_execute(self.cursor, stmt, default_params, context=self)
r = self.cursor.fetchone()[0]
if type_ is not None:
# apply type post processors to the result
proc = type_._cached_result_processor(
self.dialect, self.cursor.description[0][1]
)
if proc:
return proc(r)
return r
@property
def connection(self):
return self.root_connection._branch()
def should_autocommit_text(self, statement):
return AUTOCOMMIT_REGEXP.match(statement)
def _use_server_side_cursor(self):
if not self.dialect.supports_server_side_cursors:
return False
if self.dialect.server_side_cursors:
use_server_side = self.execution_options.get(
"stream_results", True
) and (
(
self.compiled
and isinstance(
self.compiled.statement, expression.Selectable
)
or (
(
not self.compiled
or isinstance(
self.compiled.statement, expression.TextClause
)
)
and self.statement
and SERVER_SIDE_CURSOR_RE.match(self.statement)
)
)
)
else:
use_server_side = self.execution_options.get(
"stream_results", False
)
return use_server_side
def create_cursor(self):
if self._use_server_side_cursor():
self._is_server_side = True
return self.create_server_side_cursor()
else:
self._is_server_side = False
return self._dbapi_connection.cursor()
def create_server_side_cursor(self):
raise NotImplementedError()
def pre_exec(self):
pass
def post_exec(self):
pass
def get_result_processor(self, type_, colname, coltype):
"""Return a 'result processor' for a given type as present in
cursor.description.
This has a default implementation that dialects can override
for context-sensitive result type handling.
"""
return type_._cached_result_processor(self.dialect, coltype)
def get_lastrowid(self):
"""return self.cursor.lastrowid, or equivalent, after an INSERT.
This may involve calling special cursor functions,
issuing a new SELECT on the cursor (or a new one),
or returning a stored value that was
calculated within post_exec().
This function will only be called for dialects
which support "implicit" primary key generation,
keep preexecute_autoincrement_sequences set to False,
and when no explicit id value was bound to the
statement.
The function is called once, directly after
post_exec() and before the transaction is committed
or ResultProxy is generated. If the post_exec()
method assigns a value to `self._lastrowid`, the
value is used in place of calling get_lastrowid().
Note that this method is *not* equivalent to the
``lastrowid`` method on ``ResultProxy``, which is a
direct proxy to the DBAPI ``lastrowid`` accessor
in all cases.
"""
return self.cursor.lastrowid
def handle_dbapi_exception(self, e):
pass
def get_result_proxy(self):
if self._is_server_side:
return result.BufferedRowResultProxy(self)
else:
return result.ResultProxy(self)
@property
def rowcount(self):
return self.cursor.rowcount
def supports_sane_rowcount(self):
return self.dialect.supports_sane_rowcount
def supports_sane_multi_rowcount(self):
return self.dialect.supports_sane_multi_rowcount
def _setup_crud_result_proxy(self):
if self.isinsert and not self.executemany:
if (
not self._is_implicit_returning
and not self.compiled.inline
and self.dialect.postfetch_lastrowid
):
self._setup_ins_pk_from_lastrowid()
elif not self._is_implicit_returning:
self._setup_ins_pk_from_empty()
result = self.get_result_proxy()
if self.isinsert:
if self._is_implicit_returning:
row = result.fetchone()
self.returned_defaults = row
self._setup_ins_pk_from_implicit_returning(row)
result._soft_close()
result._metadata = None
elif not self._is_explicit_returning:
result._soft_close()
result._metadata = None
elif self.isupdate and self._is_implicit_returning:
row = result.fetchone()
self.returned_defaults = row
result._soft_close()
result._metadata = None
elif result._metadata is None:
# no results, get rowcount
# (which requires open cursor on some drivers
# such as kintersbasdb, mxodbc)
result.rowcount
result._soft_close()
return result
def _setup_ins_pk_from_lastrowid(self):
key_getter = self.compiled._key_getters_for_crud_column[2]
table = self.compiled.statement.table
compiled_params = self.compiled_parameters[0]
lastrowid = self.get_lastrowid()
if lastrowid is not None:
autoinc_col = table._autoincrement_column
if autoinc_col is not None:
# apply type post processors to the lastrowid
proc = autoinc_col.type._cached_result_processor(
self.dialect, None
)
if proc is not None:
lastrowid = proc(lastrowid)
self.inserted_primary_key = [
lastrowid
if c is autoinc_col
else compiled_params.get(key_getter(c), None)
for c in table.primary_key
]
else:
# don't have a usable lastrowid, so
# do the same as _setup_ins_pk_from_empty
self.inserted_primary_key = [
compiled_params.get(key_getter(c), None)
for c in table.primary_key
]
def _setup_ins_pk_from_empty(self):
key_getter = self.compiled._key_getters_for_crud_column[2]
table = self.compiled.statement.table
compiled_params = self.compiled_parameters[0]
self.inserted_primary_key = [
compiled_params.get(key_getter(c), None) for c in table.primary_key
]
def _setup_ins_pk_from_implicit_returning(self, row):
if row is None:
self.inserted_primary_key = None
return
key_getter = self.compiled._key_getters_for_crud_column[2]
table = self.compiled.statement.table
compiled_params = self.compiled_parameters[0]
self.inserted_primary_key = [
row[col] if value is None else value
for col, value in [
(col, compiled_params.get(key_getter(col), None))
for col in table.primary_key
]
]
def lastrow_has_defaults(self):
return (self.isinsert or self.isupdate) and bool(
self.compiled.postfetch
)
def set_input_sizes(
self, translate=None, include_types=None, exclude_types=None
):
"""Given a cursor and ClauseParameters, call the appropriate
style of ``setinputsizes()`` on the cursor, using DB-API types
from the bind parameter's ``TypeEngine`` objects.
This method only called by those dialects which require it,
currently cx_oracle.
"""
if not hasattr(self.compiled, "bind_names"):
return
inputsizes = {}
for bindparam in self.compiled.bind_names:
dialect_impl = bindparam.type._unwrapped_dialect_impl(self.dialect)
dialect_impl_cls = type(dialect_impl)
dbtype = dialect_impl.get_dbapi_type(self.dialect.dbapi)
if (
dbtype is not None
and (
not exclude_types
or dbtype not in exclude_types
and dialect_impl_cls not in exclude_types
)
and (
not include_types
or dbtype in include_types
or dialect_impl_cls in include_types
)
):
inputsizes[bindparam] = dbtype
else:
inputsizes[bindparam] = None
if self.dialect._has_events:
self.dialect.dispatch.do_setinputsizes(
inputsizes, self.cursor, self.statement, self.parameters, self
)
if self.dialect.positional:
positional_inputsizes = []
for key in self.compiled.positiontup:
bindparam = self.compiled.binds[key]
dbtype = inputsizes.get(bindparam, None)
if dbtype is not None:
if key in self._expanded_parameters:
positional_inputsizes.extend(
[dbtype] * len(self._expanded_parameters[key])
)
else:
positional_inputsizes.append(dbtype)
try:
self.cursor.setinputsizes(*positional_inputsizes)
except BaseException as e:
self.root_connection._handle_dbapi_exception(
e, None, None, None, self
)
else:
keyword_inputsizes = {}
for bindparam, key in self.compiled.bind_names.items():
dbtype = inputsizes.get(bindparam, None)
if dbtype is not None:
if translate:
# TODO: this part won't work w/ the
# expanded_parameters feature, e.g. for cx_oracle
# quoted bound names
key = translate.get(key, key)
if not self.dialect.supports_unicode_binds:
key = self.dialect._encoder(key)[0]
if key in self._expanded_parameters:
keyword_inputsizes.update(
(expand_key, dbtype)
for expand_key in self._expanded_parameters[key]
)
else:
keyword_inputsizes[key] = dbtype
try:
self.cursor.setinputsizes(**keyword_inputsizes)
except BaseException as e:
self.root_connection._handle_dbapi_exception(
e, None, None, None, self
)
def _exec_default(self, column, default, type_):
if default.is_sequence:
return self.fire_sequence(default, type_)
elif default.is_callable:
self.current_column = column
return default.arg(self)
elif default.is_clause_element:
# TODO: expensive branching here should be
# pulled into _exec_scalar()
conn = self.connection
if not default._arg_is_typed:
default_arg = expression.type_coerce(default.arg, type_)
else:
default_arg = default.arg
c = expression.select([default_arg]).compile(bind=conn)
return conn._execute_compiled(c, (), {}).scalar()
else:
return default.arg
current_parameters = None
"""A dictionary of parameters applied to the current row.
This attribute is only available in the context of a user-defined default
generation function, e.g. as described at :ref:`context_default_functions`.
It consists of a dictionary which includes entries for each column/value
pair that is to be part of the INSERT or UPDATE statement. The keys of the
dictionary will be the key value of each :class:`_schema.Column`,
which is usually
synonymous with the name.
Note that the :attr:`.DefaultExecutionContext.current_parameters` attribute
does not accommodate for the "multi-values" feature of the
:meth:`_expression.Insert.values` method. The
:meth:`.DefaultExecutionContext.get_current_parameters` method should be
preferred.
.. seealso::
:meth:`.DefaultExecutionContext.get_current_parameters`
:ref:`context_default_functions`
"""
def get_current_parameters(self, isolate_multiinsert_groups=True):
"""Return a dictionary of parameters applied to the current row.
This method can only be used in the context of a user-defined default
generation function, e.g. as described at
:ref:`context_default_functions`. When invoked, a dictionary is
returned which includes entries for each column/value pair that is part
of the INSERT or UPDATE statement. The keys of the dictionary will be
the key value of each :class:`_schema.Column`,
which is usually synonymous
with the name.
:param isolate_multiinsert_groups=True: indicates that multi-valued
INSERT constructs created using :meth:`_expression.Insert.values`
should be
handled by returning only the subset of parameters that are local
to the current column default invocation. When ``False``, the
raw parameters of the statement are returned including the
naming convention used in the case of multi-valued INSERT.
.. versionadded:: 1.2 added
:meth:`.DefaultExecutionContext.get_current_parameters`
which provides more functionality over the existing
:attr:`.DefaultExecutionContext.current_parameters`
attribute.
.. seealso::
:attr:`.DefaultExecutionContext.current_parameters`
:ref:`context_default_functions`
"""
try:
parameters = self.current_parameters
column = self.current_column
except AttributeError:
raise exc.InvalidRequestError(
"get_current_parameters() can only be invoked in the "
"context of a Python side column default function"
)
if (
isolate_multiinsert_groups
and self.isinsert
and self.compiled.statement._has_multi_parameters
):
if column._is_multiparam_column:
index = column.index + 1
d = {column.original.key: parameters[column.key]}
else:
d = {column.key: parameters[column.key]}
index = 0
keys = self.compiled.statement.parameters[0].keys()
d.update(
(key, parameters["%s_m%d" % (key, index)]) for key in keys
)
return d
else:
return parameters
def get_insert_default(self, column):
if column.default is None:
return None
else:
return self._exec_default(column, column.default, column.type)
def get_update_default(self, column):
if column.onupdate is None:
return None
else:
return self._exec_default(column, column.onupdate, column.type)
def _process_executemany_defaults(self):
key_getter = self.compiled._key_getters_for_crud_column[2]
scalar_defaults = {}
insert_prefetch = self.compiled.insert_prefetch
update_prefetch = self.compiled.update_prefetch
# pre-determine scalar Python-side defaults
# to avoid many calls of get_insert_default()/
# get_update_default()
for c in insert_prefetch:
if c.default and c.default.is_scalar:
scalar_defaults[c] = c.default.arg
for c in update_prefetch:
if c.onupdate and c.onupdate.is_scalar:
scalar_defaults[c] = c.onupdate.arg
for param in self.compiled_parameters:
self.current_parameters = param
for c in insert_prefetch:
if c in scalar_defaults:
val = scalar_defaults[c]
else:
val = self.get_insert_default(c)
if val is not None:
param[key_getter(c)] = val
for c in update_prefetch:
if c in scalar_defaults:
val = scalar_defaults[c]
else:
val = self.get_update_default(c)
if val is not None:
param[key_getter(c)] = val
del self.current_parameters
def _process_executesingle_defaults(self):
key_getter = self.compiled._key_getters_for_crud_column[2]
self.current_parameters = (
compiled_parameters
) = self.compiled_parameters[0]
for c in self.compiled.insert_prefetch:
if c.default and not c.default.is_sequence and c.default.is_scalar:
val = c.default.arg
else:
val = self.get_insert_default(c)
if val is not None:
compiled_parameters[key_getter(c)] = val
for c in self.compiled.update_prefetch:
val = self.get_update_default(c)
if val is not None:
compiled_parameters[key_getter(c)] = val
del self.current_parameters
DefaultDialect.execution_ctx_cls = DefaultExecutionContext
|
|
"""
This module defines the :class:`GenericFunction` class, which is the base for
the implementation of spatial functions in GeoAlchemy. This module is also
where actual spatial functions are defined. Spatial functions supported by
GeoAlchemy are defined in this module. See :class:`GenericFunction` to know how
to create new spatial functions.
.. note::
By convention the names of spatial functions are prefixed by ``ST_``. This
is to be consistent with PostGIS', which itself is based on the ``SQL-MM``
standard.
Functions created by subclassing :class:`GenericFunction` can be called
in several ways:
* By using the ``func`` object, which is the SQLAlchemy standard way of calling
a function. For example, without the ORM::
select([func.ST_Area(lake_table.c.geom)])
and with the ORM::
Session.query(func.ST_Area(Lake.geom))
* By applying the function to a geometry column. For example, without the
ORM::
select([lake_table.c.geom.ST_Area()])
and with the ORM::
Session.query(Lake.geom.ST_Area())
* By applying the function to a :class:`geoalchemy2.elements.WKBElement`
object (:class:`geoalchemy2.elements.WKBElement` is the type into
which GeoAlchemy converts geometry values read from the database), or
to a :class:`geoalchemy2.elements.WKTElement` object. For example,
without the ORM::
conn.scalar(lake['geom'].ST_Area())
and with the ORM::
session.scalar(lake.geom.ST_Area())
Reference
---------
"""
import re
from sqlalchemy import inspect
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql import annotation
from sqlalchemy.sql import functions
from sqlalchemy.sql.elements import ColumnElement
from . import elements
from ._functions import _FUNCTIONS
try:
# SQLAlchemy < 2
from sqlalchemy.sql.functions import _GenericMeta
from sqlalchemy.util import with_metaclass
class _GeoGenericMeta(_GenericMeta):
"""Extend the metaclass mechanism of sqlalchemy to register the functions in
a specific registry for geoalchemy2"""
_register = False
def __init__(cls, clsname, bases, clsdict):
# Register the function
elements.function_registry.add(clsname.lower())
super(_GeoGenericMeta, cls).__init__(clsname, bases, clsdict)
_GeoFunctionBase = with_metaclass(_GeoGenericMeta, functions.GenericFunction)
_GeoFunctionParent = functions.GenericFunction
except ImportError:
# SQLAlchemy >= 2
class GeoGenericFunction(functions.GenericFunction):
def __init_subclass__(cls) -> None:
if annotation.Annotated not in cls.__mro__:
cls._register_geo_function(cls.__name__, cls.__dict__)
super().__init_subclass__()
@classmethod
def _register_geo_function(cls, clsname, clsdict):
# Check _register attribute status
cls._register = getattr(cls, "_register", True)
# Register the function if required
if cls._register:
elements.function_registry.add(clsname.lower())
else:
# Set _register to True to register child classes by default
cls._register = True
_GeoFunctionBase = GeoGenericFunction
_GeoFunctionParent = GeoGenericFunction
class TableRowElement(ColumnElement):
inherit_cache = False
def __init__(self, selectable):
self.selectable = selectable
@property
def _from_objects(self):
return [self.selectable]
class ST_AsGeoJSON(_GeoFunctionBase):
"""Special process for the ST_AsGeoJSON() function to be able to work with its
feature version introduced in PostGIS 3."""
name = "ST_AsGeoJSON"
inherit_cache = True
def __init__(self, *args, **kwargs):
expr = kwargs.pop('expr', None)
args = list(args)
if expr is not None:
args = [expr] + args
for idx, element in enumerate(args):
if isinstance(element, functions.Function):
continue
elif isinstance(element, elements.HasFunction):
if element.extended:
func_name = element.geom_from_extended_version
func_args = [element.data]
else:
func_name = element.geom_from
func_args = [element.data, element.srid]
args[idx] = getattr(functions.func, func_name)(*func_args)
else:
try:
insp = inspect(element)
if hasattr(insp, "selectable"):
args[idx] = TableRowElement(insp.selectable)
except Exception:
continue
_GeoFunctionParent.__init__(self, *args, **kwargs)
__doc__ = (
'Return the geometry as a GeoJSON "geometry" object, or the row as a '
'GeoJSON feature" object (PostGIS 3 only). (Cf GeoJSON specifications RFC '
'7946). 2D and 3D Geometries are both supported. GeoJSON only support SFS '
'1.1 geometry types (no curve support for example). '
'See https://postgis.net/docs/ST_AsGeoJSON.html')
@compiles(TableRowElement)
def _compile_table_row_thing(element, compiler, **kw):
# In order to get a name as reliably as possible, noting that some
# SQL compilers don't say "table AS name" and might not have the "AS",
# table and alias names can have spaces in them, etc., get it from
# a column instead because that's what we want to be showing here anyway.
compiled = compiler.process(list(element.selectable.columns)[0], **kw)
# 1. check for exact name of the selectable is here, use that.
# This way if it has dots and spaces and anything else in it, we
# can get it w/ correct quoting
schema = getattr(element.selectable, "schema", "")
name = element.selectable.name
pattern = r"(.?%s.?\.)?(.?%s.?)\." % (schema, name)
m = re.match(pattern, compiled)
if m:
return m.group(2)
# 2. just split on the dot, assume anonymized name
return compiled.split(".")[0]
class GenericFunction(_GeoFunctionBase):
"""
The base class for GeoAlchemy functions.
This class inherits from ``sqlalchemy.sql.functions.GenericFunction``, so
functions defined by subclassing this class can be given a fixed return
type. For example, functions like :class:`ST_Buffer` and
:class:`ST_Envelope` have their ``type`` attributes set to
:class:`geoalchemy2.types.Geometry`.
This class allows constructs like ``Lake.geom.ST_Buffer(2)``. In that
case the ``Function`` instance is bound to an expression (``Lake.geom``
here), and that expression is passed to the function when the function
is actually called.
If you need to use a function that GeoAlchemy does not provide you will
certainly want to subclass this class. For example, if you need the
``ST_TransScale`` spatial function, which isn't (currently) natively
supported by GeoAlchemy, you will write this::
from geoalchemy2 import Geometry
from geoalchemy2.functions import GenericFunction
class ST_TransScale(GenericFunction):
name = 'ST_TransScale'
type = Geometry
"""
# Set _register to False in order not to register this class in
# sqlalchemy.sql.functions._registry. Only its children will be registered.
_register = False
def __init__(self, *args, **kwargs):
expr = kwargs.pop('expr', None)
args = list(args)
if expr is not None:
args = [expr] + args
for idx, elem in enumerate(args):
if isinstance(elem, elements.HasFunction):
if elem.extended:
func_name = elem.geom_from_extended_version
func_args = [elem.data]
else:
func_name = elem.geom_from
func_args = [elem.data, elem.srid]
args[idx] = getattr(functions.func, func_name)(*func_args)
_GeoFunctionParent.__init__(self, *args, **kwargs)
# Iterate through _FUNCTIONS and create GenericFunction classes dynamically
for name, type_, doc in _FUNCTIONS:
attributes = {
'name': name,
'inherit_cache': True,
}
docs = []
if isinstance(doc, tuple):
docs.append(doc[0])
docs.append('see http://postgis.net/docs/{0}.html'.format(doc[1]))
elif doc is not None:
docs.append(doc)
docs.append('see http://postgis.net/docs/{0}.html'.format(name))
if type_ is not None:
attributes['type'] = type_
type_str = '{0}.{1}'.format(type_.__module__, type_.__name__)
docs.append('Return type: :class:`{0}`.'.format(type_str))
if len(docs) != 0:
attributes['__doc__'] = '\n\n'.join(docs)
globals()[name] = type(name, (GenericFunction,), attributes)
#
# Define compiled versions for functions in SpatiaLite whose names don't have
# the ST_ prefix.
#
_SQLITE_FUNCTIONS = {
"ST_GeomFromEWKT": "GeomFromEWKT",
"ST_GeomFromEWKB": "GeomFromEWKB",
"ST_AsBinary": "AsBinary",
"ST_AsEWKB": "AsEWKB",
"ST_AsGeoJSON": "AsGeoJSON",
}
# Default handlers are required for SQLAlchemy < 1.1
# See more details in https://github.com/geoalchemy/geoalchemy2/issues/213
def _compiles_default(cls):
def _compile_default(element, compiler, **kw):
return "{}({})".format(cls, compiler.process(element.clauses, **kw))
compiles(globals()[cls])(_compile_default)
def _compiles_sqlite(cls, fn):
def _compile_sqlite(element, compiler, **kw):
return "{}({})".format(fn, compiler.process(element.clauses, **kw))
compiles(globals()[cls], "sqlite")(_compile_sqlite)
def register_sqlite_mapping(mapping):
"""Register compilation mappings for the given functions.
``mapping`` should have the following form::
{
"function_name_1": "sqlite_function_name_1",
"function_name_2": "sqlite_function_name_2",
...
}
"""
for cls, fn in mapping.items():
_compiles_default(cls)
_compiles_sqlite(cls, fn)
register_sqlite_mapping(_SQLITE_FUNCTIONS)
|
|
""" A *Marker* is a proxy class which wraps some schema.
Immediately, the example is:
```python
from good import Schema, Required
Schema({
'name': str, # required key
Optional('age'): int, # optional key
}, default_keys=Required)
```
This way, keys marked with `Required()` will report errors if no value if provided.
Typically, a marker "decorates" a mapping key, but some of them can be "standalone":
```python
from good import Schema, Extra
Schema({
'name': str,
Extra: int # allow any keys, provided their values are integer
})
```
Each marker can have it's own unique behavior since nothing is hardcoded into the core [`Schema`](#schema).
Keep on reading to learn how markers perform.
"""
from gettext import gettext as _
from .signals import RemoveValue
from .errors import Invalid, MultipleInvalid
from .util import const, get_type_name, get_literal_name
class Marker:
""" A Marker is a class that decorates a mapping key.
Its compilation goes in 3 phases:
1. Marker.key is set by the user: `Required('name')`
2. Marker.key is compiled into a schema, and notified with `Marker.on_compiled(name, key_schema)`
3. Marker receives a value schema, through `Marker.on_compiled(value_schema=value_schema)`
Note that if a marker is used as a mapping key, its `key_schema` is compiled as a matcher for performance.
When CompiledSchema performs matching, it collects input values that match the marker
(using `priority` to decide which of the markers will actually get the duck)
and then calls execute() on the Marker so it can implement its logic.
Note that `execute()` is always called, regardless of whether the marker has matched anything:
this gives markers a chance to modify the input to its taste.
This opens the possibilities of implementing custom markers which validate the whole schema!
Finally, note that a marker does not necessarily decorate something: it can be used as a class:
```python
Schema({
'name': str,
Extra: Reject
})
```
In this case, a Marker class is automatically instantiated
with an identity function (which matches any value): `Extra(lambda x: x)`.
"""
#: Marker priority
#: This defines matching order for mapping keys
priority = 0
#: The default marker error message
#: Stored here just for convenience
error_message = None
def __init__(self, key):
#: The original key
self.key = key
#: Human-readable marker representation
self.name = None
#: CompiledSchema for the key
self.key_schema = None
#: CompiledSchema for value (if the Marker was used as a key in a mapping)
self.value_schema = None
#: Whether the marker is used as a mapping key
self.as_mapping_key = False
def on_compiled(self, name=None, key_schema=None, value_schema=None, as_mapping_key=None):
""" When CompiledSchema compiles this marker, it sets informational values onto it.
Note that arguments may be provided in two incomplete sets,
e.g. (name, key_schema, None) and then (None, None, value_schema).
Thus, all assignments must be handled individually.
It is possible that a marker may have no `value_schema` at all:
e.g. in the case of { Extra: Reject } -- `Reject` will have no value schema,
but `Extra` will have compiled `Reject` as the value.
:param key_schema: Compiled key schema
:type key_schema: CompiledSchema|None
:param value_schema: Compiled value schema
:type value_schema: CompiledSchema|None
:param name: Human-friendly marker name
:type name: unicode|None
:param as_mapping_key: Whether it's used as a mapping key?
:type as_mapping_key: bool|None
:rtype: Marker
"""
if self.name is None:
self.name = name
if self.key_schema is None:
self.key_schema = key_schema
if self.value_schema is None:
self.value_schema = value_schema
if as_mapping_key:
self.as_mapping_key = True
return self
def __repr__(self):
return '{cls}({0})'.format(
self.name or self.key,
cls=type(self).__name__)
#region Marker is a Proxy
def __hash__(self):
return hash(self.key)
def __eq__(self, other):
# Marker equality comparison:
# key == key | key == Marker.key | key is Marker
return self.key == (other.key if isinstance(other, Marker) else other) or other is type(self)
def __bytes__(self):
return bytes(self.key)
def __str__(self):
return get_literal_name(self.key)
#endregion
def __call__(self, v):
""" Validate a key using this Marker's schema """
return self.key_schema(v)
def execute(self, d, matches):
""" Execute the marker against the the matching values from the input.
Note that `execute()` is called precisely once, and even if there are no matches for the marker.
:param d: The original user input
:type d: dict
:param matches: List of (input-key, sanitized-input-key, input-value) triples that matched the given marker
:type matches: list[tuple]
:returns: The list of matches, potentially modified
:rtype: list[tuple]
:raises: Invalid|MultipleInvalid
"""
return matches # No-op by default
class Required(Marker):
""" `Required(key)` is used to decorate mapping keys and hence specify that these keys must always be present in
the input mapping.
When compiled, [`Schema`](#schema) uses `default_keys` as the default marker:
```python
from good import Schema, Required
schema = Schema({
'name': str,
'age': int
}, default_keys=Required) # wrap with Required() by default
schema({'name': 'Mark'})
#-> Invalid: Required key not provided @ ['age']: expected age, got -none-
```
Remember that mapping keys are schemas as well, and `Require` will expect to always have a match:
```python
schema = Schema({
Required(str): int,
})
schema({}) # no `str` keys provided
#-> Invalid: Required key not provided: expected String, got -none-
```
In addition, the `Required` marker has special behavior with [`Default`](#default) that allows to set the key
to a default value if the key was not provided. More details in the docs for [`Default`](#default).
"""
priority = 0
error_message = _(u'Required key not provided')
def execute(self, d, matches):
# If a Required() key is present -- it expects to ALWAYS have one or more matches
# When Required() has no matches...
if not matches:
# Last chance: value_schema supports Undefined, and the key is a literal
if self.value_schema.supports_undefined:
# Schema supports `Undefined`, then use it!
v = self.value_schema(const.UNDEFINED)
matches.append((self.key_schema.schema, self.key_schema.schema, v))
return matches
else:
# Invalid
path = [self.key] if self.key_schema.compiled_type == const.COMPILED_TYPE.LITERAL else []
raise Invalid(self.error_message, self.name, _(u'-none-'), path)
return matches
class Optional(Marker):
""" `Optional(key)` is controversial to [`Required(key)`](#required): specified that the mapping key is not required.
This only has meaning when a [`Schema`](#schema) has `default_keys=Required`:
then, it decorates all keys with `Required()`, unless a key is already decorated with some Marker.
`Optional()` steps in: those keys are already decorated and hence are not wrapped with `Required()`.
So, it's only used to prevent `Schema` from putting `Required()` on a key.
In all other senses, it has absolutely no special behavior.
As a result, optional key can be missing, but if it was provided -- its value must match the value schema.
Example: use as `default_keys`:
```python
schema = Schema({
'name': str,
'age': int
}, default_keys=Optional) # Make all keys optional by default
schema({}) #-> {} -- okay
schema({'name': None})
#-> Invalid: Wrong type @ ['name']: expected String, got None
```
Example: use to mark specific keys are not required:
```python
schema = Schema({
'name': str,
Optional(str): int # key is optional
})
schema({'name': 'Mark'}) # valid
schema({'name': 'Mark', 'age': 10}) # valid
schema({'name': 'Mark', 'age': 'X'})
#-> Invalid: Wrong type @ ['age']: expected Integer number, got Binary String
```
"""
priority = 0
pass # no-op
class Remove(Marker):
""" `Remove(key)` marker is used to declare that the key, if encountered,
should be removed, without validating the value.
`Remove` has highest priority, so it operates before everything else in the schema.
Example:
```python
schema = Schema({
Remove('name'): str, # `str` does not mean anything since the key is removed anyway
'age': int
})
schema({'name': 111, 'age': 18}) #-> {'age': 18}
```
However, it's more natural to use `Remove()` on values.
Remember that in this case `'name'` will become [`Required()`](#required),
if not decorated with [`Optional()`](#optional):
```python
schema = Schema({
Optional('name'): Remove
})
schema({'name': 111, 'age': 18}) #-> {'age': 18}
```
**Bonus**: `Remove()` can be used in iterables as well:
```python
schema = Schema([str, Remove(int)])
schema(['a', 'b', 1, 2]) #-> ['a', 'b']
```
"""
priority = 1000 # We always want to remove keys prior to any other actions
def execute(self, d, matches):
# Remove all matching keys from the input
for k, sanitized_k, v in matches:
d.pop(k)
# Clean the list of matches so further processing does not assign them again
return []
def __call__(self, v):
if not self.as_mapping_key:
# When used on a value -- drop it
raise RemoveValue()
return super(Remove, self).__call__(v)
class Reject(Marker):
""" `Reject(key)` marker is used to report [`Invalid`](#invalid) errors every time is matches something in the input.
It has lower priority than most of other schemas, so rejection will only happen
if no other schemas has matched this value.
Example:
```python
schema = Schema({
Reject('name'): None, # Reject by key
Optional('age'): Msg(Reject, u"Field is not supported anymore"), # alternative form
})
schema({'name': 111})
#-> Invalid: Field is not supported anymore @ ['name']: expected -none-, got name
```
"""
priority = -50
error_message = _(u'Value rejected')
def __call__(self, v):
if not self.as_mapping_key:
# When used on a value -- complain
raise Invalid(self.error_message, _(u'-none-'), get_literal_name(v), validator=self)
return super(Reject, self).__call__(v)
def execute(self, d, matches):
# Complain on all values it gets
if matches:
errors = []
for k, sanitized_k, v in matches:
errors.append(Invalid(self.error_message, _(u'-none-'), get_literal_name(k), [k]))
raise MultipleInvalid.if_multiple(errors)
return matches
class Allow(Marker):
""" `Allow(key)` is a no-op marker that never complains on anything.
Designed to be used with [`Extra`](#extra).
"""
priority = 0
pass # no-op
class Extra(Marker):
""" `Extra` is a catch-all marker to define the behavior for mapping keys not defined in the schema.
It has the lowest priority, and delegates its function to its value, which can be a schema, or another marker.
Given without argument, it's compiled with an identity function `lambda x:x` which is a catch-all:
it matches any value. Together with lowest priority, `Extra` will only catch values which did not match anything else.
Every mapping has an `Extra` implicitly, and `extra_keys` argument controls the default behavior.
Example with `Extra: <schema>`:
```python
schema = Schema({
'name': str,
Extra: int # this will allow extra keys provided they're int
})
schema({'name': 'Alex', 'age': 18'}) #-> ok
schema({'name': 'Alex', 'age': 'X'})
#-> Invalid: Wrong type @ ['age']: expected Integer number, got Binary String
```
Example with `Extra: Reject`: reject all extra values:
```python
schema = Schema({
'name': str,
Extra: Reject
})
schema({'name': 'Alex', 'age': 'X'})
#-> Invalid: Extra keys not allowed @ ['age']: expected -none-, got age
```
Example with `Extra: Remove`: silently discard all extra values:
```python
schema = Schema({'name': str}, extra_keys=Remove)
schema({'name': 'Alex', 'age': 'X'}) #-> {'name': 'Alex'}
```
Example with `Extra: Allow`: allow any extra values:
```python
schema = Schema({'name': str}, extra_keys=Allow)
schema({'name': 'Alex', 'age': 'X'}) #-> {'name': 'Alex', 'age': 'X'}
```
"""
priority = -1000 # Extra should match last
error_message = _(u'Extra keys not allowed')
def on_compiled(self, name=None, key_schema=None, value_schema=None, as_mapping_key=None):
# Special case
# When { Extra: Reject }, use a customized error message
if value_schema and isinstance(value_schema.compiled, Reject):
value_schema.compiled.error_message = self.error_message
return super(Extra, self).on_compiled(name, key_schema, value_schema, as_mapping_key)
def execute(self, d, matches):
# Delegate the decision to the value.
# If the value is a marker -- call execute() on it
# This is for the cases when `Extra` is mapped to a marker
if isinstance(self.value_schema.compiled, Marker):
return self.value_schema.compiled.execute(d, matches)
# Otherwise, it's a schema, which must be called on every value to validate it.
# However, CompiledSchema does this anyway at the next step, so doing nothing here
return matches
class Entire(Optional):
""" `Entire` is a convenience marker that validates the entire mapping using validators provided as a value.
It has absolutely lowest priority, lower than `Extra`, hence it never matches any keys, but is still executed to
validate the mapping itself.
This opens the possibilities to define rules on multiple fields.
This feature is leveraged by the [`Inclusive`](#inclusive) and [`Exclusive`](#exclusive) group validators.
For example, let's require the mapping to have no more than 3 keys:
```python
from good import Schema, Entire
def maxkeys(n):
# Return a validator function
def validator(d):
# `d` is the dictionary.
# Validate it
assert len(d) <= 3, 'Dict size should be <= 3'
# Return the value since all callable schemas should do that
return d
return validator
schema = Schema({
str: int,
Entire: maxkeys(3)
})
```
In this example, `Entire` is executed for every input dictionary, and magically calls the schema it's mapped to.
The `maxkeys(n)` schema is a validator that complains on the dictionary size if it's too huge.
`Schema` catches the `AssertionError` thrown by it and converts it to [`Invalid`](#invalid).
Note that the schema this marker is mapped to can't replace the mapping object, but it can mutate the given mapping.
"""
priority = -2000 # Should never match anything
def execute(self, d, matches):
# Ignore `matches`, since it's always empty.
# Instead, pass the mapping `d` to the schema it's mapped to: `value_schema`
try:
self.value_schema(d)
except Invalid as e:
e.enrich(
expected=self.value_schema.name,
provided=get_type_name(type(d)),
validator=self.value_schema.schema
)
raise
# Still return the same `matches` list
return matches
__all__ = ('Required', 'Optional', 'Remove', 'Reject', 'Allow', 'Extra', 'Entire')
|
|
# coding: utf-8
'''
The MIT License (MIT)
Copyright (c) 2014 Andreas "Akki" Nitsch
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
"""This module containes a class that is used to extract texts from balsamiq-mockup-files and to write them
into XML-files that can be used as input for translation-memory-systems.
"""
__version__ = "1.0.3"
import argparse
import glob
import logging
import os
import re
import sys
from lxml import etree
from OutputExporter import OutputExporter
from TextElement import TextElement
from TextFormatFixer import TextFormatFixer
logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.WARNING)
class AdvancedMockupStringExtractor():
"""Class handling the extracting-process of text from Mockup-files."""
controlElementsWithText = ["com.balsamiq.mockups::Label", "com.balsamiq.mockups::Paragraph", "com.balsamiq.mockups::TextArea", "com.balsamiq.mockups::TextInput", "com.balsamiq.mockups::SubTitle", "com.balsamiq.mockups::Button", "com.balsamiq.mockups::RadioButton", "com.balsamiq.mockups::Accordion", "com.balsamiq.mockups::Tooltip", "com.balsamiq.mockups::IconLabel", "com.balsamiq.mockups::ComboBox", "com.balsamiq.mockups::ButtonBar", "com.balsamiq.mockups::TabBar", "com.balsamiq.mockups::CheckBox", "com.balsamiq.mockups::Link"]
# pylint: disable-msg=W0105
"""List of names of mockup-elements containing text."""
texts = []
"""List with all recognized text in balsamiq-objects."""
ignored = []
"""List with ignored texts in balsamiq-objects."""
IgnoreTags = ["IGNORE", "IGNOREEXCLUDE"]
"""Tag indicating that element in balsamiq-file should be ignored."""
listItemPattern = re.compile('\*%20[a-zA-z0-9 ]*(%0A){0,1}')
"""Pattern of regular expression that will be used to determine if a text containes list-items."""
def __init__(self, input_file_dir=None, fake=None, force=False):
""" Constructor.
If input_file is given, only this file will be parsed. Otherwise all bmml-files in directory and subdirectories
will be parsed.
@param input_file_dir: file or directory that should be parsed (default None)
"""
self.faketranslation = fake
self.force = force
if input_file_dir:
if os.path.isfile(input_file_dir):
self.extract_text(input_file_dir)
else:
self.extract_text_from_directory(input_file_dir)
else:
self.extract_text_from_directory(".")
def extract_text(self, input_file):
""" Extract text from mockup-elements by calling corresponding method for each mockup-element-type.
Keyword arguments:
@param input_file: mockup-file that should be parsed for texts.
"""
logging.info("Extracting text from file " + input_file)
try:
tree = etree.parse(input_file)
except etree.XMLSyntaxError:
logging.error("XML syntaxerror in file " + input_file)
if self.force:
return
else:
sys.exit(-1)
root = tree.getroot()
for element in root.iter():
try:
self.extract_element_info(element, input_file)
except KeyError:
pass
def extract_text_from_directory(self, input_path):
for infile in glob.glob(os.path.join(input_path, '*.bmml')):
self.extract_text(infile)
for infile in glob.glob(os.path.join(input_path + "/assets", '*.bmml')):
self.extract_text(infile)
def get_control_property(self, control_properties, tag):
""" Return the text contained in an element-property with tag-element tag.
Keyword arguments:
@param control_properties: properties of an element.
@param tag: tag of the property that should be returned.
"""
for control_prop in control_properties:
if control_prop.tag == tag:
return control_prop.text
return None #control-property not contained in properties of control
def get_control_id(self, control_properties, input_file):
""" Return the control-id of an element.
Keyword arguments:
@param control_properties: properties of an element
@param input_file: input-file containing the element
"""
result = self.get_control_property(control_properties, 'customID')
if not result:
#logging.error("\tElement without custom id in file %s\n\tText in element: %s", input_file, self.get_text(control_properties))
#sys.exit(-1) #TODO hier weitermachen
return False
return result
def get_text(self, control_properties):
""" Return the text contained in an element.
Keyword arguments:
@param control_properties: properties of an element.
"""
result = self.substitute_formatingchars(self.get_control_property(control_properties, 'text'))
return TextFormatFixer.fix_text(result)
def get_metainformation(self, control_properties):
""" Return the metainformation contained in an element.
Keyword arguments:
@param control_properties: properties of an element.
"""
return self.substitute_formatingchars(self.get_control_property(control_properties, 'customData'))
def element_should_be_ignored(self, control_id):
""" Return if an element and its text should be irgnored.
Keyword arguments:
@param control_id: control-id of an element.
"""
if control_id.upper() in self.IgnoreTags:
return True
else:
return False
def checkElementWithoutText(self, element, input_file):
""" Check if an element that does not have text that should be translated has got an id.
For example this can happen if the user gives the id to a group of elements containing the
element with text instead of giving the id to the element with text.
If there is an element that has an id but should have no text, the program is aborted with a warning
if the force-flag is not set to true.
"""
for prop in element:
id = self.get_control_id(prop, input_file)
if id and not self.element_should_be_ignored(id):
logging.warning("Element with ID should have no text\n\tID: %s\n\tcontrolType: %s\n\tfile: %s\n", id, element.attrib["controlTypeID"], input_file)
if not self.force:
sys.exit(-1)
def extract_element_info(self, element, input_file):
""" Extract text from default mockup-elements (text, button etc.)
Give error-message if there is already an element with same ID but different text.
Appends all text-information as instance of class TextElement to list.
@param element: xml-element from mockup-file.
@param input_file: name of input-file.
"""
if element.attrib["controlTypeID"] not in self.controlElementsWithText:
self.checkElementWithoutText(element, input_file)
return
if element.attrib["controlTypeID"] == "com.balsamiq.mockups::ButtonBar" or element.attrib["controlTypeID"] == "com.balsamiq.mockups::TabBar":
return self.extract_text_from_buttonbar_tabbar(element, input_file)
elif element.attrib["controlTypeID"] == "com.balsamiq.mockups::ComboBox":
return self.extract_text_from_combobox(element, input_file)
for control_properties in element:
control_id = self.get_control_id(control_properties, input_file)
text = self.get_text(control_properties)
if self.get_control_property(control_properties, 'customData'):
metainfo = self.substitute_formatingchars(self.get_control_property(control_properties, 'customData'))
else:
metainfo = None
try:
if self.faketranslation:
new_text_element = TextElement(control_id, '#'+ self.faketranslation + '# ' + text + ' #' + self.faketranslation + '#', input_file, metainfo)
else:
new_text_element = TextElement(control_id, text, input_file, metainfo)
if not self.element_should_be_ignored(control_id):
self.checkElementIdUnique(new_text_element)
if new_text_element in self.texts:
pass
else:
self.texts.append(new_text_element)
else:
self.ignored.append(new_text_element)
except AttributeError:
pass
def checkElementIdUnique(self, newElement):
""" Check if an element with same ID was already extracted. If so, check if texts of both elements are the same.
If the texts are not the same, exit program with an error-message.
"""
for oldElement in self.texts:
if oldElement.identifier == newElement.identifier and oldElement.text != newElement.text:
logging.error("Element has got same ID but different text like other element: \n\tID: %s\n\ttext: %s\n\tfilename: %s\n\n\tID: %s\n\ttext: %s\n\tfilename: %s", newElement.identifier, newElement.text, newElement.filename, oldElement.identifier, oldElement.text, oldElement.filename)
if not self.force:
sys.exit(-1)
def get_text_from_combined_element(self, element, input_file, seperator):
""" Extracts texts from element holding more than one text. TextElements will return an index to make sure that
the order of the texts can be comprehended.
Keyword arguments:
@param element: xml-element from mockup-file.
@param input_file: input-file containing the element.
@parameter seperator: seperator that is used to divide different texts in the text-property.
"""
for control_properties in element:
control_id = self.get_control_id(control_properties, input_file)
texts = self.get_text(control_properties).split(seperator)
if self.get_control_property(control_properties, 'customData'):
metainfo = self.substitute_formatingchars(self.get_control_property(control_properties, 'customData'))
else:
metainfo = None
index = 0
for text in texts:
try:
if not self.element_should_be_ignored(control_id):
self.texts.append(TextElement(control_id + "_" + text.replace(' ', ''), text, input_file, metainfo, index))
index = index + 1
except AttributeError:
pass
def extract_text_from_buttonbar_tabbar(self, element, input_file):
"""Extracts texts from buttonbar- and tabbar-elements and append them to self.texts.
The appended elements will have an index to be able to distinguish the elements of the buttonbar/tabbar and to
know the order they are contained in the buttonbar/tabbar.
"""
self.get_text_from_combined_element(element, input_file, '%2C')
def extract_text_from_combobox(self, element, input_file):
"""Extracts texts from combobox-elements and append them to self.texts.
The appended elements will have an index to be able to distinguish the elements of the combobox and to
know the order they are contained in the combobox.
"""
self.get_text_from_combined_element(element, input_file, '<br />')
def containes_unordered_list(self, text):
"""Returns True if the text containes an unordered list, otherwise returns False"""
return self.listItemPattern.match(text)
def substitute_unordered_list(self, text):
"""Replaces the markdown-markup of an unordered list with html-markup of an unordered list."""
if not self.containes_unordered_list(text):
return text
else:
items = re.split('\*%20', text)
result = '<ul>'
for item in items:
if item:#item not empty
result = result + '<li>' + item + '</li>'
result = result + '</ul>'
return result
def substitute_formatingchars(self, text):
""" Remove all formating chars from mockup.
@param text: Text element from mockup containing format-information.
@return: Tuple containing text and format-information.
"""
result = ""
try:
result = text.replace("%0A", "<br />") #prevend newlines from beeing unquoted
result = self.remove_multiple_whitespaces(result)
result = result.replace("breakNewLine", ' <br />')
result = result.replace("<br /> ", "<br />")
result = result.replace(" <br />", "<br />")
result = result.replace(" <li>", "<li>")
result = result.replace("<li> ", "<li>")
result = self.substitute_bold(result)
except AttributeError:
logging.debug("Trying to substitute formating and html-chars in empty string.")
return result
def substitute_bold(self, text):
"""Return text where markdown-markup for bold text (with two asterixes) is replaced with html-markup for bold text."""
return re.sub(r'\*(.*?)\*', r'<b>\1</b>', text)
def substitute_italic(self, text):
"""Return text where markdown-markup for italic text (with two underscores) is replaced with html-markup for italic text."""
return re.sub(r'_(.*?)_', r'<i>\1</i>', text)
def remove_multiple_whitespaces(self, text):
"""Return text where multiple whitespaces are replaced with only one whitespace."""
return re.sub( '\s+', ' ', text).strip()
def check_ignored_texts(self):
"""Check that the text of every text-element that is set to be ignored is included in another not ignored text-element."""
for ignored in self.ignored:
contained = False
for text in self.texts:
if text.text == ignored.text:
contained = True
if contained:
continue
else:
logging.error("Ignored text not in self.texts: %s %s ", ignored.filename, ignored.text)
if __name__ == "__main__":
PARSER = argparse.ArgumentParser()
PARSER.add_argument('-c', '--check', help='do not generate output, just check if all ids of textelements are given.', action='store_true')
PARSER.add_argument('--faketranslation', help='generate fake translation-output. Will add given parameter as prefix and postfix to every text in output-file')
PARSER.add_argument('-f', '--force', help='force generating outpu-file even if errors occure.', action='store_true')
PARSER.add_argument('-i', '--input', help='input-file or directory that will be read. When directory is given, all mockup-files in directory will be read.')
PARSER.add_argument('--json', help='write output in json-format instead of xml-format.', action='store_true')
PARSER.add_argument('-min', '--minified', help='remove whitespaces from generated output.', action='store_true')
PARSER.add_argument('-o', '--output', help='name of file that will contain the generated output.')
PARSER.add_argument('-v', '--version', help='show version number.', action='store_true')
PARSER.add_argument('--verbose', help='increase output verbosity.', action='store_true')
ARGUMENTS = PARSER.parse_args()
if ARGUMENTS.version:
print 'AdvancedMockupStringExtractor V', __version__
sys.exit(0)
if not ARGUMENTS.output and not ARGUMENTS.check:
logging.error('You have to give the name of the output-file there the generated data will be stored in.')
sys.exit(-1)
if ARGUMENTS.verbose:
logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
if ARGUMENTS.force:
force = True
if ARGUMENTS.input:
EXTRACTOR = AdvancedMockupStringExtractor(ARGUMENTS.input, fake=ARGUMENTS.faketranslation, force=ARGUMENTS.force)
else:
EXTRACTOR = AdvancedMockupStringExtractor(fake=ARGUMENTS.faketranslation, force=ARGUMENTS.force)
if ARGUMENTS.faketranslation:
EXTRACTOR.faketranslation = ARGUMENTS.faketranslation
if ARGUMENTS.check:
EXTRACTOR.check_ignored_texts()
sys.exit(0)
EXPORTER = OutputExporter(EXTRACTOR.texts)
if ARGUMENTS.json:
EXPORTER.json_export(ARGUMENTS.output, ARGUMENTS.minified)
else:
EXPORTER.xml_export(ARGUMENTS.output, ARGUMENTS.minified)
|
|
# Copyright 2017 Telstra Open Source
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import pprint
import json
from collections import OrderedDict
import click
from datetime import datetime
import prettytable.prettytable as prettytable
from prettytable import PrettyTable
from kilda.probe.entity.message import create_dump_state
from kilda.probe.messaging import send_with_context
from kilda.probe.messaging import receive_with_context_async
LOG = logging.getLogger(__name__)
def print_flow(flow, border):
table = PrettyTable(['Property', 'Forward', 'Reverse'], border=border,
valign='m')
for k, v in flow['forward'].items():
if k == 'flowpath':
table.add_row(['flowpath:latency_ns', v['latency_ns'],
flow['reverse'][k]['latency_ns']])
else:
table.add_row([k, v, flow['reverse'][k]])
table.add_row(['path', print_path(flow['forward'], border),
print_path(flow['reverse'], border)])
print(table)
def print_path(flow, border):
path = flow['flowpath']['path']
keys = ['switch_id', 'port_no', 'segment_latency', 'seq_id']
table = PrettyTable(keys, border=border, vrules=prettytable.NONE,
hrules=prettytable.HEADER,
padding_width=0)
for p in path:
table.add_row([p.get(x, None) for x in keys])
return table
def print_isls_tower(isls, border):
table = PrettyTable(['Isl'], border=border)
for isl in isls:
child_table = PrettyTable(['Property', 'Value'], border=border)
for k, v in isl.items():
if k == 'path':
for p in v:
for kk, vv in p.items():
child_table.add_row(
['path:{}:{}'.format(p['seq_id'], kk), vv])
else:
child_table.add_row([k, v])
table.add_row([child_table])
print(table)
def print_isls(isls, border):
if not isls:
return
columns = set()
raw = []
for isl in isls:
d = isl.copy()
if 'path' in d:
for p in d['path']:
for kk, vv in p.items():
d['p{}:{}'.format(p['seq_id'], kk)] = vv
raw.append(d)
columns.update(d.keys())
columns -= {'id', 'path', 'message_type', 'p0:segment_latency',
'created_in_cache', 'updated_in_cache', 'clazz'}
sorted_columns = ['id'] + sorted(list(columns)) + ['created_in_cache',
'updated_in_cache']
sorted_columns_with_names = OrderedDict(
zip(sorted_columns, sorted_columns))
sorted_columns_with_names.update({'available_bandwidth': 'av/bw',
'created_in_cache': 'created',
'updated_in_cache': 'updated',
'latency_ns': 'lat'})
table = PrettyTable(sorted_columns_with_names.values(),
border=border,
sortby='id',
vrules=prettytable.FRAME,
hrules=prettytable.FRAME)
convert_timefied_to_human(raw)
for d in raw:
table.add_row([d.get(x, '-') for x in sorted_columns_with_names.keys()])
print(table)
def convert_timefied_to_human(data):
for r in data:
for time_field in ['created_in_cache', 'updated_in_cache']:
if time_field in r:
r[time_field] = datetime.utcfromtimestamp(r[time_field])
def print_switches(switches, border):
if not switches:
return
columns = set(switches[0].keys())
columns -= {'switch_id', 'created_in_cache', 'updated_in_cache'}
sorted_columns = ['switch_id'] + sorted(columns) + ['created_in_cache',
'updated_in_cache']
sorted_columns_with_names = OrderedDict(
zip(sorted_columns, sorted_columns))
sorted_columns_with_names.update({'created_in_cache': 'created',
'updated_in_cache': 'updated'})
table = PrettyTable(sorted_columns_with_names.values(),
border=border,
sortby='switch_id',
vrules=prettytable.FRAME,
hrules=prettytable.FRAME)
convert_timefied_to_human(switches)
for s in switches:
table.add_row([s[x] for x in sorted_columns_with_names.keys()])
print(table)
def print_flows_from_payload(payload, border):
flows = payload['state']['flow']['flows']
if flows:
print('+----------')
print('| Flows')
for flow in flows:
print_flow(flow, border)
def cache_bolt_print_table(payload, border):
print_flows_from_payload(payload, border)
isls = payload['state']['network']['isls']
if isls:
print('+----------')
print('| Isls')
print_isls(isls, border)
switches = payload['state']['network']['switches']
if switches:
print('+----------')
print('| Switches')
print_switches(switches, border)
def crud_bolt_print_table(payload, border):
print_flows_from_payload(payload, border)
def print_table(records, border):
for record in records:
data = json.loads(record.value)
payload = data['payload']
LOG.debug(pprint.pformat(data))
table = PrettyTable(['Topology', 'Component', 'Task ID'],
border=border)
table.add_row(
[payload['topology'], payload['component'], payload['task_id']])
print(table)
clazz = payload['state']['clazz']
if clazz == 'org.openkilda.messaging.ctrl.state.CacheBoltState':
cache_bolt_print_table(payload, border)
elif clazz == 'org.openkilda.messaging.ctrl.state.CrudBoltState':
crud_bolt_print_table(payload, border)
else:
print(pprint.pformat(payload['state']))
print('\n')
@click.command(name='dump-state')
@click.argument('destination')
@click.option('--border/--no-border', default=True)
@click.option('--table', 'output_type', flag_value='table', default=True)
@click.option('--json', 'output_type', flag_value='json')
@click.option('--allow-dangerous-operation/--prevent-dangerous-operation', default=False)
@click.pass_obj
def dump_state_command(ctx, destination, border, output_type, allow_dangerous_operation):
if not allow_dangerous_operation:
click.secho("DON'T RUN ON PRODUCTION MAY CAUSE OVERSIZED KAFKA MESSAGE",
blink=True, bold=True)
return
message = create_dump_state(ctx.correlation_id, destination=destination)
LOG.debug('command = {}'.format(message.serialize()))
with receive_with_context_async(ctx) as records:
send_with_context(ctx, message.serialize())
if output_type == 'table':
print_table(records, border)
elif output_type == 'json':
for record in records:
data = json.loads(record.value)
print(pprint.pformat(data))
|
|
import inspect
import logging
from collections import OrderedDict
import baseEdge
from ds.vortex.core import baseNode
from ds.vortex.core import vortexEvent
from ds.vortex.nodes import allNodes
logger = logging.getLogger(__name__)
class Graph(object):
"""This graph class stores the nodes and will evaluate the graph on request.
Simple example:
gx = Graph("newGraph")
t = sumNode.SumNode("newMathNode") # first create an instance of the node
gx.addNode(t) # adds a node to the graph
gx.getNode("newMathNode") # gets node by name
if t in gx:
print t.name # check to see if newNode is in the graph
"""
addedNode = vortexEvent.VortexSignal()
removedNode = vortexEvent.VortexSignal()
addedEdge = vortexEvent.VortexSignal()
deletedEdge = vortexEvent.VortexSignal()
def __init__(self, name=""):
"""
:param name: str, the name of the graph
"""
self._edges = OrderedDict()
self._name = name
self._nodes = OrderedDict()
def __repr__(self):
return "{}{}".format(self.__class__.__name__, self.__dict__)
def __len__(self):
"""Returns the length of the nodes in the graph
:return: int, the length of nodes in this graph
"""
return len(self._nodes)
def __eq__(self, other):
return isinstance(other, Graph) and self._nodes == other.nodes and self._edges == other.edges
def __contains__(self, node):
"""Returns a bool if the node is in the graph
:param node: BaseNode instance
:return:bool
"""
try:
return node in self._nodes.values()
except TypeError:
return False
def get(self, fullPath):
"""Returns the node/edge/plug based on the fullPath rg. "testNode|output" would the output plug
:param fullPath: str
:return: Node,Plug,edge
"""
for node in self._nodes.values():
if node.fullPath() == fullPath:
return node
for plug in node.plugs.values():
if plug.fullPath() == fullPath:
return plug
for edge in self._edges.values():
if edge.fullPath() == fullPath:
return edge
def addNode(self, node, **kwargs):
"""Adds a Node instance to the graph this will also add the node to the graph class instance as a attribute
which can be accessed by graph.node
:param node: Node instance, the node to add
:param kwargs: plugName=plugValue, the kwargs sets the input plugs value.
:return Node instance
"""
if self.hasNode(node):
return
node.name = self.generateUniqueName(node)
self._nodes[node.name] = node
for plugName, plugValue in kwargs.iteritems():
plug = node.getPlug(plugName)
if plug.isInput():
plug.value = plugValue
for plug in node.plugs.values():
plug.connectionAdded.connect(self.addEdge)
plug.connectionRemoved.connect(self.deleteEdge)
self.addedNode.emit(node)
return node
@property
def nodes(self):
"""Returns all the nodes in the graph
:return: OrderedDict
"""
return self._nodes
@nodes.setter
def nodes(self, newNodes):
"""Empties and sets the nodes dict
:param newNodes: dict
"""
self._nodes = newNodes
def hasNode(self, node):
"""Checks the graph for the given node _name
:param node: node instance
:return: bool
"""
return node in self._nodes.values()
def deleteNode(self, node):
"""Removes a node from the graph
:param node: the node instance to delete
"""
if isinstance(node, str):
node = self.getNode(node)
node.disconnectAll()
del self._nodes[node.name]
self.removedNode.emit(node)
def getNode(self, nodeName):
"""Returns a node based on the name or empty list
:param nodeName: the name of the node to get
:return:Node instance
"""
return self._nodes.get(nodeName)
@property
def edges(self):
"""Returns a dict of the graph edges
:return: Dict
"""
return self._edges
def getEdge(self, edgeName):
"""Returns a edge if the edge name is in edges
:param edgeName: str, th name of the edge
:return:
"""
return self._edges.get(edgeName)
def addEdge(self, edge):
"""Adds the edge to the graph
:param edge: Edge
"""
print "trying to add edge"
if isinstance(edge, str):
edge = self.getEdge(edge)
if edge not in self._edges.values():
self._edges[edge.name] = edge
print "creating edge"
self.addedEdge.emit(edge)
def deleteEdge(self, edge):
"""Removes the edge from the graph
:param edge: Edge
"""
if edge in self._edges.values():
tmpEdge = edge
del self._edges[edge.name]
self.deletedEdge.emit(tmpEdge)
def generateUniqueName(self, node):
"""Create a unique name for the node in the graph, on node creation a digit is appended , eg nodeName00, nodeName01
:param node: node Instance
:return: str, returns the new node name as a string
"""
value = "%0{}d".format(0)
uIndex = 0
currentIndex = [int(i) for i in node.name if i.isdigit()]
if currentIndex:
name = node.name.split(str(sum(currentIndex)))[0]
else:
name = node.name
while name in self._nodes:
name = name + value % uIndex
uIndex += 1
return name
def clear(self):
"""Clears all the nodes and the edges from the graph
:return: None
"""
self._nodes.clear()
self._edges.clear()
def allLeaves(self):
"""Returns all the leaf nodes in the graph, a Leaf node is any node that has on connections
:return: list(Node)
"""
leafNodes = []
for node in self._nodes.values():
if not all(plug.connections for plug in node.inputs()):
leafNodes.append(node)
return leafNodes
def serializeGraph(self):
"""Creates a python dict from the graph, each node,plug and edge gets serialized
:return:
"""
logger.debug("serializing graph")
serializedGraph = {"name": self._name,
"version": "1.0.0",
"nodes": OrderedDict(),
"edges": dict(),
"moduleName": inspect.getmodulename(__file__)
}
logger.debug(serializedGraph)
for node in self._nodes.values():
serializedGraph["nodes"][node.name] = node.serialize()
for edge in self._edges.values():
serializedGraph["edges"][edge.name] = edge.serialize()
return serializedGraph
@classmethod
def loadGraph(cls, graphData):
"""load a vortex graph dict creates and returns a graph object
:param graphData: dict
:return: Graph()
"""
graph = cls(name=graphData.get("name"))
for node in graphData["nodes"].values():
moduleName = node.get("moduleName")
nodeName = node.get("name")
if moduleName == "baseNode":
newNode = baseNode.BaseNode(name=nodeName)
else:
newNode = allNodes.getNode(node.get("moduleName"))(name=nodeName)
for plugName, values in node.get("plugs").iteritems():
plug = newNode.getPlug(plugName=plugName)
if plug:
plug.value = values.get("value")
continue
newNode.addPlugByType(ioType=values.get("io"), name=plugName, value=values.get("value"))
graph.addNode(newNode)
for edge in graphData["edges"].values():
inputPlug = graph.getNode(edge["input"][1]).getPlug(edge["input"][0])
outputPlug = graph.getNode(edge["output"][1]).getPlug(edge["output"][0])
graph.addEdge(baseEdge.Edge(name=edge["name"], inputPlug=inputPlug, outputPlug=outputPlug))
return graph
|
|
# Copyright (c) 2011 Openstack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Handles all requests relating to schedulers.
"""
from novaclient import v1_1 as novaclient
from novaclient import exceptions as novaclient_exceptions
from nova import db
from nova import exception
from nova import flags
from nova import log as logging
from nova import rpc
from nova import utils
from eventlet import greenpool
FLAGS = flags.FLAGS
flags.DEFINE_bool('enable_zone_routing',
False,
'When True, routing to child zones will occur.')
LOG = logging.getLogger('nova.scheduler.api')
def _call_scheduler(method, context, params=None):
"""Generic handler for RPC calls to the scheduler.
:param params: Optional dictionary of arguments to be passed to the
scheduler worker
:retval: Result returned by scheduler worker
"""
if not params:
params = {}
queue = FLAGS.scheduler_topic
kwargs = {'method': method, 'args': params}
return rpc.call(context, queue, kwargs)
def get_host_list(context):
"""Return a list of hosts associated with this zone."""
return _call_scheduler('get_host_list', context)
def get_zone_list(context):
"""Return a list of zones assoicated with this zone."""
items = _call_scheduler('get_zone_list', context)
for item in items:
item['api_url'] = item['api_url'].replace('\\/', '/')
if not items:
items = db.zone_get_all(context)
return items
def zone_get(context, zone_id):
return db.zone_get(context, zone_id)
def zone_delete(context, zone_id):
return db.zone_delete(context, zone_id)
def zone_create(context, data):
return db.zone_create(context, data)
def zone_update(context, zone_id, data):
return db.zone_update(context, zone_id, data)
def get_zone_capabilities(context):
"""Returns a dict of key, value capabilities for this zone."""
return _call_scheduler('get_zone_capabilities', context=context)
def select(context, specs=None):
"""Returns a list of hosts."""
return _call_scheduler('select', context=context,
params={"request_spec": specs})
def update_service_capabilities(context, service_name, host, capabilities):
"""Send an update to all the scheduler services informing them
of the capabilities of this service."""
kwargs = dict(method='update_service_capabilities',
args=dict(service_name=service_name, host=host,
capabilities=capabilities))
return rpc.fanout_cast(context, 'scheduler', kwargs)
def call_zone_method(context, method_name, errors_to_ignore=None,
novaclient_collection_name='zones', zones=None,
*args, **kwargs):
"""Returns a list of (zone, call_result) objects."""
if not isinstance(errors_to_ignore, (list, tuple)):
# This will also handle the default None
errors_to_ignore = [errors_to_ignore]
pool = greenpool.GreenPool()
results = []
if zones is None:
zones = db.zone_get_all(context)
for zone in zones:
try:
nova = novaclient.Client(zone.username, zone.password, None,
zone.api_url)
nova.authenticate()
except novaclient_exceptions.BadRequest, e:
url = zone.api_url
LOG.warn(_("Failed request to zone; URL=%(url)s: %(e)s")
% locals())
#TODO (dabo) - add logic for failure counts per zone,
# with escalation after a given number of failures.
continue
novaclient_collection = getattr(nova, novaclient_collection_name)
collection_method = getattr(novaclient_collection, method_name)
def _error_trap(*args, **kwargs):
try:
return collection_method(*args, **kwargs)
except Exception as e:
if type(e) in errors_to_ignore:
return None
raise
res = pool.spawn(_error_trap, *args, **kwargs)
results.append((zone, res))
pool.waitall()
return [(zone.id, res.wait()) for zone, res in results]
def child_zone_helper(zone_list, func):
"""Fire off a command to each zone in the list.
The return is [novaclient return objects] from each child zone.
For example, if you are calling server.pause(), the list will
be whatever the response from server.pause() is. One entry
per child zone called."""
def _wrap_method(function, arg1):
"""Wrap method to supply an argument."""
def _wrap(*args, **kwargs):
return function(arg1, *args, **kwargs)
return _wrap
def _process(func, zone):
"""Worker stub for green thread pool. Give the worker
an authenticated nova client and zone info."""
try:
nova = novaclient.Client(zone.username, zone.password, None,
zone.api_url)
nova.authenticate()
except novaclient_exceptions.BadRequest, e:
url = zone.api_url
LOG.warn(_("Failed request to zone; URL=%(url)s: %(e)s")
% locals())
# This is being returned instead of raised, so that when
# results are processed in unmarshal_result() after the
# greenpool.imap completes, the exception can be raised
# there if no other zones had a response.
return exception.ZoneRequestError()
else:
return func(nova, zone)
green_pool = greenpool.GreenPool()
return [result for result in green_pool.imap(
_wrap_method(_process, func), zone_list)]
def _issue_novaclient_command(nova, zone, collection,
method_name, *args, **kwargs):
"""Use novaclient to issue command to a single child zone.
One of these will be run in parallel for each child zone.
"""
manager = getattr(nova, collection)
# NOTE(comstud): This is not ideal, but we have to do this based on
# how novaclient is implemented right now.
# 'find' is special cased as novaclient requires kwargs for it to
# filter on a 'get_all'.
# Every other method first needs to do a 'get' on the first argument
# passed, which should be a UUID. If it's 'get' itself that we want,
# we just return the result. Otherwise, we next call the real method
# that's wanted... passing other arguments that may or may not exist.
if method_name in ['find', 'findall']:
try:
return getattr(manager, method_name)(**kwargs)
except novaclient_exceptions.NotFound:
url = zone.api_url
LOG.debug(_("%(collection)s.%(method_name)s didn't find "
"anything matching '%(kwargs)s' on '%(url)s'" %
locals()))
return None
args = list(args)
# pop off the UUID to look up
item = args.pop(0)
try:
result = manager.get(item)
except novaclient_exceptions.NotFound:
url = zone.api_url
LOG.debug(_("%(collection)s '%(item)s' not found on '%(url)s'" %
locals()))
return None
if method_name.lower() != 'get':
# if we're doing something other than 'get', call it passing args.
result = getattr(result, method_name)(*args, **kwargs)
return result
def wrap_novaclient_function(f, collection, method_name, *args, **kwargs):
"""Appends collection, method_name and arguments to the incoming
(nova, zone) call from child_zone_helper."""
def inner(nova, zone):
return f(nova, zone, collection, method_name, *args, **kwargs)
return inner
class RedirectResult(exception.Error):
"""Used to the HTTP API know that these results are pre-cooked
and they can be returned to the caller directly."""
def __init__(self, results):
self.results = results
super(RedirectResult, self).__init__(
message=_("Uncaught Zone redirection exception"))
class reroute_compute(object):
"""
reroute_compute is responsible for trying to lookup a resource in the
current zone and if it's not found there, delegating the call to the
child zones.
Since reroute_compute will be making 'cross-zone' calls, the ID for the
object must come in as a UUID-- if we receive an integer ID, we bail.
The steps involved are:
1. Validate that item_id is UUID like
2. Lookup item by UUID in the zone local database
3. If the item was found, then extract integer ID, and pass that to
the wrapped method. (This ensures that zone-local code can
continue to use integer IDs).
4. If the item was not found, we delegate the call to a child zone
using the UUID.
"""
def __init__(self, method_name):
self.method_name = method_name
def _route_to_child_zones(self, context, collection, item_uuid):
if not FLAGS.enable_zone_routing:
raise exception.InstanceNotFound(instance_id=item_uuid)
self.item_uuid = item_uuid
zones = db.zone_get_all(context)
if not zones:
raise exception.InstanceNotFound(instance_id=item_uuid)
# Ask the children to provide an answer ...
LOG.debug(_("Asking child zones ..."))
result = self._call_child_zones(zones,
wrap_novaclient_function(_issue_novaclient_command,
collection, self.method_name, item_uuid))
# Scrub the results and raise another exception
# so the API layers can bail out gracefully ...
raise RedirectResult(self.unmarshall_result(result))
def __call__(self, f):
def wrapped_f(*args, **kwargs):
collection, context, item_id_or_uuid = \
self.get_collection_context_and_id(args, kwargs)
attempt_reroute = False
if utils.is_uuid_like(item_id_or_uuid):
item_uuid = item_id_or_uuid
try:
instance = db.instance_get_by_uuid(context, item_uuid)
except exception.InstanceNotFound, e:
# NOTE(sirp): since a UUID was passed in, we can attempt
# to reroute to a child zone
attempt_reroute = True
LOG.debug(_("Instance %(item_uuid)s not found "
"locally: '%(e)s'" % locals()))
else:
# NOTE(sirp): since we're not re-routing in this case, and
# we we were passed a UUID, we need to replace that UUID
# with an integer ID in the argument list so that the
# zone-local code can continue to use integer IDs.
item_id = instance['id']
args = list(args) # needs to be mutable to replace
self.replace_uuid_with_id(args, kwargs, item_id)
if attempt_reroute:
return self._route_to_child_zones(context, collection,
item_uuid)
else:
return f(*args, **kwargs)
return wrapped_f
def _call_child_zones(self, zones, function):
"""Ask the child zones to perform this operation.
Broken out for testing."""
return child_zone_helper(zones, function)
def get_collection_context_and_id(self, args, kwargs):
"""Returns a tuple of (novaclient collection name, security
context and resource id. Derived class should override this."""
context = kwargs.get('context', None)
instance_id = kwargs.get('instance_id', None)
if len(args) > 0 and not context:
context = args[1]
if len(args) > 1 and not instance_id:
instance_id = args[2]
return ("servers", context, instance_id)
@staticmethod
def replace_uuid_with_id(args, kwargs, replacement_id):
"""
Extracts the UUID parameter from the arg or kwarg list and replaces
it with an integer ID.
"""
if 'instance_id' in kwargs:
kwargs['instance_id'] = replacement_id
elif len(args) > 1:
args.pop(2)
args.insert(2, replacement_id)
def unmarshall_result(self, zone_responses):
"""Result is a list of responses from each child zone.
Each decorator derivation is responsible to turning this
into a format expected by the calling method. For
example, this one is expected to return a single Server
dict {'server':{k:v}}. Others may return a list of them, like
{'servers':[{k,v}]}"""
reduced_response = []
found_exception = None
for zone_response in zone_responses:
if not zone_response:
continue
if isinstance(zone_response, BaseException):
found_exception = zone_response
continue
server = zone_response.__dict__
for k in server.keys():
if k[0] == '_' or k == 'manager':
del server[k]
reduced_response.append(dict(server=server))
if reduced_response:
return reduced_response[0] # first for now.
elif found_exception:
raise found_exception
raise exception.InstanceNotFound(instance_id=self.item_uuid)
def redirect_handler(f):
def new_f(*args, **kwargs):
try:
return f(*args, **kwargs)
except RedirectResult, e:
return e.results
return new_f
|
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Stub version of the Task Queue API.
This stub only stores tasks; it doesn't actually run them. It also validates
the tasks by checking their queue name against the queue.yaml.
As well as implementing Task Queue API functions, the stub exposes various other
functions that are used by the dev_appserver's admin console to display the
application's queues and tasks.
"""
import base64
import bisect
import datetime
import logging
import os
import random
import string
import time
import taskqueue_service_pb
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import queueinfo
from google.appengine.runtime import apiproxy_errors
DEFAULT_RATE = '5.00/s'
DEFAULT_BUCKET_SIZE = 5
MAX_ETA_DELTA_DAYS = 30
admin_console_dummy_tasks = {}
BUILT_IN_HEADERS = set(['x-appengine-queuename',
'x-appengine-taskname',
'x-appengine-taskretrycount',
'x-appengine-development-payload',
'content-length'])
class _DummyTaskStore(object):
"""A class that encapsulates a sorted store of tasks.
Used for testing the admin console.
"""
def __init__(self):
"""Constructor."""
self._sorted_by_name = []
self._sorted_by_eta = []
def _InsertTask(self, task):
"""Insert a task into the dummy store, keeps lists sorted.
Args:
task: the new task.
"""
eta = task.eta_usec()
name = task.task_name()
bisect.insort_left(self._sorted_by_eta, (eta, name, task))
bisect.insort_left(self._sorted_by_name, (name, task))
def Lookup(self, maximum, name=None, eta=None):
"""Lookup a number of sorted tasks from the store.
If 'eta' is specified, the tasks are looked up in a list sorted by 'eta',
then 'name'. Otherwise they are sorted by 'name'. We need to be able to
sort by 'eta' and 'name' because tasks can have identical eta. If you had
20 tasks with the same ETA, you wouldn't be able to page past them, since
the 'next eta' would give the first one again. Names are unique, though.
Args:
maximum: the maximum number of tasks to return.
name: a task name to start with.
eta: an eta to start with.
Returns:
A list of up to 'maximum' tasks.
Raises:
ValueError: if the task store gets corrupted.
"""
if eta is None:
pos = bisect.bisect_left(self._sorted_by_name, (name,))
tasks = (x[1] for x in self._sorted_by_name[pos:pos + maximum])
return list(tasks)
if name is None:
raise ValueError('must supply name or eta')
pos = bisect.bisect_left(self._sorted_by_eta, (eta, name))
tasks = (x[2] for x in self._sorted_by_eta[pos:pos + maximum])
return list(tasks)
def Count(self):
"""Returns the number of tasks in the store."""
return len(self._sorted_by_name)
def Oldest(self):
"""Returns the oldest eta in the store, or None if no tasks."""
if self._sorted_by_eta:
return self._sorted_by_eta[0][0]
return None
def Delete(self, name):
"""Deletes a task from the store by name.
Args:
name: the name of the task to delete.
Returns:
TaskQueueServiceError.UNKNOWN_TASK: if the task is unknown.
TaskQueueServiceError.INTERNAL_ERROR: if the store is corrupted.
TaskQueueServiceError.OK: otherwise.
"""
pos = bisect.bisect_left(self._sorted_by_name, (name,))
if pos >= len(self._sorted_by_name):
return taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_TASK
if self._sorted_by_name[pos][1].task_name() != name:
logging.info('looking for task name %s, got task name %s', name,
self._sorted_by_name[pos][1].task_name())
return taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_TASK
old_task = self._sorted_by_name.pop(pos)[1]
eta = old_task.eta_usec()
pos = bisect.bisect_left(self._sorted_by_eta, (eta, name, None))
if self._sorted_by_eta[pos][2] is not old_task:
logging.error('task store corrupted')
return taskqueue_service_pb.TaskQueueServiceError.INTERNAL_ERRROR
self._sorted_by_eta.pop(pos)
return taskqueue_service_pb.TaskQueueServiceError.OK
def Populate(self, num_tasks):
"""Populates the store with a number of tasks.
Args:
num_tasks: the number of tasks to insert.
"""
now = datetime.datetime.utcnow()
now_sec = time.mktime(now.timetuple())
def RandomTask():
"""Creates a new task and randomly populates values."""
task = taskqueue_service_pb.TaskQueueQueryTasksResponse_Task()
task.set_task_name(''.join(random.choice(string.ascii_lowercase)
for x in range(20)))
task.set_eta_usec(int(now_sec * 1e6) + random.randint(-10e6, 600e6))
task.set_creation_time_usec(min(now_sec * 1e6, task.eta_usec()) -
random.randint(0, 2e7))
task.set_url(random.choice(['/a', '/b', '/c', '/d']))
if random.random() < 0.2:
task.set_method(
taskqueue_service_pb.TaskQueueQueryTasksResponse_Task.POST)
task.set_body('A' * 2000)
else:
task.set_method(
taskqueue_service_pb.TaskQueueQueryTasksResponse_Task.GET)
task.set_retry_count(max(0, random.randint(-10, 5)))
return task
for _ in range(num_tasks):
self._InsertTask(RandomTask())
def _ParseQueueYaml(unused_self, root_path):
"""Loads the queue.yaml file and parses it.
Args:
unused_self: Allows this function to be bound to a class member. Not used.
root_path: Directory containing queue.yaml. Not used.
Returns:
None if queue.yaml doesn't exist, otherwise a queueinfo.QueueEntry object
populated from the queue.yaml.
"""
if root_path is None:
return None
for queueyaml in ('queue.yaml', 'queue.yml'):
try:
fh = open(os.path.join(root_path, queueyaml), 'r')
except IOError:
continue
try:
queue_info = queueinfo.LoadSingleQueue(fh)
return queue_info
finally:
fh.close()
return None
def _CompareTasksByEta(a, b):
"""Python sort comparator for tasks by estimated time of arrival (ETA).
Args:
a: A taskqueue_service_pb.TaskQueueAddRequest.
b: A taskqueue_service_pb.TaskQueueAddRequest.
Returns:
Standard 1/0/-1 comparison result.
"""
if a.eta_usec() > b.eta_usec():
return 1
if a.eta_usec() < b.eta_usec():
return -1
return 0
def _FormatEta(eta_usec):
"""Formats a task ETA as a date string in UTC."""
eta = datetime.datetime.fromtimestamp(eta_usec/1000000)
return eta.strftime('%Y/%m/%d %H:%M:%S')
def _EtaDelta(eta_usec):
"""Formats a task ETA as a relative time string."""
eta = datetime.datetime.fromtimestamp(eta_usec/1000000)
now = datetime.datetime.utcnow()
if eta > now:
return str(eta - now) + ' from now'
else:
return str(now - eta) + ' ago'
class TaskQueueServiceStub(apiproxy_stub.APIProxyStub):
"""Python only task queue service stub.
This stub does not attempt to automatically execute tasks. Instead, it
stores them for display on a console. The user may manually execute the
tasks from the console.
"""
queue_yaml_parser = _ParseQueueYaml
def __init__(self, service_name='taskqueue', root_path=None):
"""Constructor.
Args:
service_name: Service name expected for all calls.
root_path: Root path to the directory of the application which may contain
a queue.yaml file. If None, then it's assumed no queue.yaml file is
available.
"""
super(TaskQueueServiceStub, self).__init__(service_name)
self._taskqueues = {}
self._next_task_id = 1
self._root_path = root_path
self._app_queues = {}
def _Dynamic_Add(self, request, response):
"""Local implementation of the Add RPC in TaskQueueService.
Must adhere to the '_Dynamic_' naming convention for stubbing to work.
See taskqueue_service.proto for a full description of the RPC.
Args:
request: A taskqueue_service_pb.TaskQueueAddRequest.
response: A taskqueue_service_pb.TaskQueueAddResponse.
"""
if request.eta_usec() < 0:
raise apiproxy_errors.ApplicationError(
taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA)
eta = datetime.datetime.utcfromtimestamp(request.eta_usec() / 1e6)
max_eta = (datetime.datetime.utcnow() +
datetime.timedelta(days=MAX_ETA_DELTA_DAYS))
if eta > max_eta:
raise apiproxy_errors.ApplicationError(
taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA)
if not self._IsValidQueue(request.queue_name()):
raise apiproxy_errors.ApplicationError(
taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
if not request.task_name():
request.set_task_name('task%d' % self._next_task_id)
response.set_chosen_task_name(request.task_name())
self._next_task_id += 1
if request.has_transaction():
try:
apiproxy_stub_map.MakeSyncCall(
'datastore_v3', 'AddAction', request, api_base_pb.VoidProto())
except apiproxy_errors.ApplicationError, e:
e.application_error = (e.application_error +
taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR)
raise e
else:
tasks = self._taskqueues.setdefault(request.queue_name(), [])
for task in tasks:
if task.task_name() == request.task_name():
raise apiproxy_errors.ApplicationError(
taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS)
tasks.append(request)
tasks.sort(_CompareTasksByEta)
def _IsValidQueue(self, queue_name):
"""Determines whether a queue is valid, i.e. tasks can be added to it.
Valid queues are the 'default' queue, plus any queues in the queue.yaml
file.
Args:
queue_name: the name of the queue to validate.
Returns:
True iff queue is valid.
"""
if queue_name == 'default':
return True
queue_info = self.queue_yaml_parser(self._root_path)
if queue_info and queue_info.queue:
for entry in queue_info.queue:
if entry.name == queue_name:
return True
return False
def GetQueues(self):
"""Gets all the applications's queues.
Returns:
A list of dictionaries, where each dictionary contains one queue's
attributes. E.g.:
[{'name': 'some-queue',
'max_rate': '1/s',
'bucket_size': 5,
'oldest_task': '2009/02/02 05:37:42',
'eta_delta': '0:00:06.342511 ago',
'tasks_in_queue': 12}, ...]
The list of queues always includes the default queue.
"""
queues = []
queue_info = self.queue_yaml_parser(self._root_path)
has_default = False
if queue_info and queue_info.queue:
for entry in queue_info.queue:
if entry.name == 'default':
has_default = True
queue = {}
queues.append(queue)
queue['name'] = entry.name
queue['max_rate'] = entry.rate
if entry.bucket_size:
queue['bucket_size'] = entry.bucket_size
else:
queue['bucket_size'] = DEFAULT_BUCKET_SIZE
tasks = self._taskqueues.setdefault(entry.name, [])
if tasks:
queue['oldest_task'] = _FormatEta(tasks[0].eta_usec())
queue['eta_delta'] = _EtaDelta(tasks[0].eta_usec())
else:
queue['oldest_task'] = ''
queue['tasks_in_queue'] = len(tasks)
if not has_default:
queue = {}
queues.append(queue)
queue['name'] = 'default'
queue['max_rate'] = DEFAULT_RATE
queue['bucket_size'] = DEFAULT_BUCKET_SIZE
tasks = self._taskqueues.get('default', [])
if tasks:
queue['oldest_task'] = _FormatEta(tasks[0].eta_usec())
queue['eta_delta'] = _EtaDelta(tasks[0].eta_usec())
else:
queue['oldest_task'] = ''
queue['tasks_in_queue'] = len(tasks)
return queues
def GetTasks(self, queue_name):
"""Gets a queue's tasks.
Args:
queue_name: Queue's name to return tasks for.
Returns:
A list of dictionaries, where each dictionary contains one task's
attributes. E.g.
[{'name': 'task-123',
'url': '/update',
'method': 'GET',
'eta': '2009/02/02 05:37:42',
'eta_delta': '0:00:06.342511 ago',
'body': '',
'headers': [('user-header', 'some-value')
('X-AppEngine-QueueName': 'update-queue'),
('X-AppEngine-TaskName': 'task-123'),
('X-AppEngine-TaskRetryCount': '0'),
('X-AppEngine-Development-Payload': '1'),
('Content-Length': 0),
('Content-Type': 'application/octet-stream')]
Raises:
ValueError: A task request contains an unknown HTTP method type.
"""
tasks = self._taskqueues.get(queue_name, [])
result_tasks = []
for task_request in tasks:
task = {}
result_tasks.append(task)
task['name'] = task_request.task_name()
task['url'] = task_request.url()
method = task_request.method()
if method == taskqueue_service_pb.TaskQueueAddRequest.GET:
task['method'] = 'GET'
elif method == taskqueue_service_pb.TaskQueueAddRequest.POST:
task['method'] = 'POST'
elif method == taskqueue_service_pb.TaskQueueAddRequest.HEAD:
task['method'] = 'HEAD'
elif method == taskqueue_service_pb.TaskQueueAddRequest.PUT:
task['method'] = 'PUT'
elif method == taskqueue_service_pb.TaskQueueAddRequest.DELETE:
task['method'] = 'DELETE'
else:
raise ValueError('Unexpected method: %d' % method)
task['eta'] = _FormatEta(task_request.eta_usec())
task['eta_delta'] = _EtaDelta(task_request.eta_usec())
task['body'] = base64.b64encode(task_request.body())
headers = [(header.key(), header.value())
for header in task_request.header_list()
if header.key().lower() not in BUILT_IN_HEADERS]
headers.append(('X-AppEngine-QueueName', queue_name))
headers.append(('X-AppEngine-TaskName', task['name']))
headers.append(('X-AppEngine-TaskRetryCount', '0'))
headers.append(('X-AppEngine-Development-Payload', '1'))
headers.append(('Content-Length', len(task['body'])))
if 'content-type' not in frozenset(key.lower() for key, _ in headers):
headers.append(('Content-Type', 'application/octet-stream'))
task['headers'] = headers
return result_tasks
def DeleteTask(self, queue_name, task_name):
"""Deletes a task from a queue.
Args:
queue_name: the name of the queue to delete the task from.
task_name: the name of the task to delete.
"""
tasks = self._taskqueues.get(queue_name, [])
for task in tasks:
if task.task_name() == task_name:
tasks.remove(task)
return
def FlushQueue(self, queue_name):
"""Removes all tasks from a queue.
Args:
queue_name: the name of the queue to remove tasks from.
"""
self._taskqueues[queue_name] = []
def _Dynamic_UpdateQueue(self, request, unused_response):
"""Local implementation of the UpdateQueue RPC in TaskQueueService.
Must adhere to the '_Dynamic_' naming convention for stubbing to work.
See taskqueue_service.proto for a full description of the RPC.
Args:
request: A taskqueue_service_pb.TaskQueueUpdateQueueRequest.
unused_response: A taskqueue_service_pb.TaskQueueUpdateQueueResponse.
Not used.
"""
queues = self._app_queues.setdefault(request.app_id(), {})
defensive_copy = taskqueue_service_pb.TaskQueueUpdateQueueRequest()
defensive_copy.CopyFrom(request)
queues[request.queue_name()] = defensive_copy
def _Dynamic_FetchQueues(self, request, response):
"""Local implementation of the FetchQueues RPC in TaskQueueService.
Must adhere to the '_Dynamic_' naming convention for stubbing to work.
See taskqueue_service.proto for a full description of the RPC.
Args:
request: A taskqueue_service_pb.TaskQueueFetchQueuesRequest.
response: A taskqueue_service_pb.TaskQueueFetchQueuesResponse.
"""
queues = self._app_queues.get(request.app_id(), {})
for unused_key, queue in sorted(queues.items()[:request.max_rows()]):
response_queue = response.add_queue()
response_queue.set_queue_name(queue.queue_name())
response_queue.set_bucket_refill_per_second(
queue.bucket_refill_per_second())
response_queue.set_bucket_capacity(queue.bucket_capacity())
response_queue.set_user_specified_rate(queue.user_specified_rate())
def _Dynamic_FetchQueueStats(self, request, response):
"""Local 'random' implementation of the TaskQueueService.FetchQueueStats.
This implementation loads some stats from the dummy store,
the rest with random numbers.
Must adhere to the '_Dynamic_' naming convention for stubbing to work.
See taskqueue_service.proto for a full description of the RPC.
Args:
request: A taskqueue_service_pb.TaskQueueFetchQueueStatsRequest.
response: A taskqueue_service_pb.TaskQueueFetchQueueStatsResponse.
"""
for queue in request.queue_name_list():
store = self.GetDummyTaskStore(request.app_id(), queue)
stats = response.add_queuestats()
stats.set_num_tasks(store.Count())
if stats.num_tasks() == 0:
stats.set_oldest_eta_usec(-1)
else:
stats.set_oldest_eta_usec(store.Oldest())
if random.randint(0, 9) > 0:
scanner_info = stats.mutable_scanner_info()
scanner_info.set_executed_last_minute(random.randint(0, 10))
scanner_info.set_executed_last_hour(scanner_info.executed_last_minute()
+ random.randint(0, 100))
scanner_info.set_sampling_duration_seconds(random.random() * 10000.0)
def GetDummyTaskStore(self, app_id, queue_name):
"""Get the dummy task store for this app_id/queue_name pair.
Creates an entry and populates it, if there's not already an entry.
Args:
app_id: the app_id.
queue_name: the queue_name.
Returns:
the existing or the new dummy store.
"""
task_store_key = (app_id, queue_name)
if task_store_key not in admin_console_dummy_tasks:
store = _DummyTaskStore()
store.Populate(random.randint(10, 100))
admin_console_dummy_tasks[task_store_key] = store
else:
store = admin_console_dummy_tasks[task_store_key]
return store
def _Dynamic_QueryTasks(self, request, response):
"""Local implementation of the TaskQueueService.QueryTasks RPC.
Uses the dummy store, creating tasks if this is the first time the
queue has been seen.
Args:
request: A taskqueue_service_pb.TaskQueueQueryTasksRequest.
response: A taskqueue_service_pb.TaskQueueQueryTasksResponse.
"""
store = self.GetDummyTaskStore(request.app_id(), request.queue_name())
if request.has_start_eta_usec():
tasks = store.Lookup(request.max_rows(), name=request.start_task_name(),
eta=request.start_eta_usec())
else:
tasks = store.Lookup(request.max_rows(), name=request.start_task_name())
for task in tasks:
response.add_task().MergeFrom(task)
def _Dynamic_Delete(self, request, response):
"""Local delete implementation of TaskQueueService.Delete.
Deletes tasks from the dummy store. A 1/20 chance of a transient error.
Args:
request: A taskqueue_service_pb.TaskQueueDeleteTasksRequest.
response: A taskqueue_service_pb.TaskQueueDeleteTasksResponse.
"""
task_store_key = (request.app_id(), request.queue_name())
if task_store_key not in admin_console_dummy_tasks:
for _ in request.task_name_list():
response.add_result(
taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
return
store = admin_console_dummy_tasks[task_store_key]
for taskname in request.task_name_list():
if random.random() <= 0.05:
response.add_result(
taskqueue_service_pb.TaskQueueServiceError.TRANSIENT_ERROR)
else:
response.add_result(store.Delete(taskname))
def _Dynamic_DeleteQueue(self, request, response):
"""Local delete implementation of TaskQueueService.DeleteQueue.
Args:
request: A taskqueue_service_pb.TaskQueueDeleteQueueRequest.
response: A taskqueue_service_pb.TaskQueueDeleteQueueResponse.
"""
if not request.queue_name():
raise apiproxy_errors.ApplicationError(
taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_NAME)
queues = self._app_queues.get(request.app_id(), {})
if request.queue_name() not in queues:
raise apiproxy_errors.ApplicationError(
taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
del queues[request.queue_name()]
def _Dynamic_PurgeQueue(self, request, response):
"""Local purge implementation of TaskQueueService.PurgeQueue.
Args:
request: A taskqueue_service_pb.TaskQueuePurgeQueueRequest.
response: A taskqueue_service_pb.TaskQueuePurgeQueueResponse.
"""
if not request.queue_name():
raise apiproxy_errors.ApplicationError(
taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_NAME)
queues = self._app_queues.get(request.app_id(), {})
if request.queue_name() != 'default' and request.queue_name() not in queues:
raise apiproxy_errors.ApplicationError(
taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
store = self.GetDummyTaskStore(request.app_id(), request.queue_name())
for task in store.Lookup(store.Count()):
store.Delete(task.task_name())
self.FlushQueue(request.queue_name())
|
|
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_switch_controller_switch_profile
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_switch_controller_switch_profile.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_switch_controller_switch_profile_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'switch_controller_switch_profile': {
'login_passwd': 'test_value_3',
'login_passwd_override': 'enable',
'name': 'default_name_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_switch_controller_switch_profile.fortios_switch_controller(input_data, fos_instance)
expected_data = {
'login-passwd': 'test_value_3',
'login-passwd-override': 'enable',
'name': 'default_name_5'
}
set_method_mock.assert_called_with('switch-controller', 'switch-profile', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_switch_controller_switch_profile_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'switch_controller_switch_profile': {
'login_passwd': 'test_value_3',
'login_passwd_override': 'enable',
'name': 'default_name_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_switch_controller_switch_profile.fortios_switch_controller(input_data, fos_instance)
expected_data = {
'login-passwd': 'test_value_3',
'login-passwd-override': 'enable',
'name': 'default_name_5'
}
set_method_mock.assert_called_with('switch-controller', 'switch-profile', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_switch_controller_switch_profile_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'switch_controller_switch_profile': {
'login_passwd': 'test_value_3',
'login_passwd_override': 'enable',
'name': 'default_name_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_switch_controller_switch_profile.fortios_switch_controller(input_data, fos_instance)
delete_method_mock.assert_called_with('switch-controller', 'switch-profile', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_switch_controller_switch_profile_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'switch_controller_switch_profile': {
'login_passwd': 'test_value_3',
'login_passwd_override': 'enable',
'name': 'default_name_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_switch_controller_switch_profile.fortios_switch_controller(input_data, fos_instance)
delete_method_mock.assert_called_with('switch-controller', 'switch-profile', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_switch_controller_switch_profile_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'switch_controller_switch_profile': {
'login_passwd': 'test_value_3',
'login_passwd_override': 'enable',
'name': 'default_name_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_switch_controller_switch_profile.fortios_switch_controller(input_data, fos_instance)
expected_data = {
'login-passwd': 'test_value_3',
'login-passwd-override': 'enable',
'name': 'default_name_5'
}
set_method_mock.assert_called_with('switch-controller', 'switch-profile', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_switch_controller_switch_profile_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'switch_controller_switch_profile': {
'random_attribute_not_valid': 'tag',
'login_passwd': 'test_value_3',
'login_passwd_override': 'enable',
'name': 'default_name_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_switch_controller_switch_profile.fortios_switch_controller(input_data, fos_instance)
expected_data = {
'login-passwd': 'test_value_3',
'login-passwd-override': 'enable',
'name': 'default_name_5'
}
set_method_mock.assert_called_with('switch-controller', 'switch-profile', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
|
|
from cStringIO import StringIO
import os
import urlparse
import struct
from struct import calcsize
from twisted.protocols import basic
from twisted.internet import protocol, defer
from twisted.application import service
from twisted.internet.endpoints import TCP4ClientEndpoint
from twisted.web import xmlrpc, server
import rosgraph
import rospy
from rospy.impl import tcpros_service
from twisted.internet import reactor
class State(object):
'''Enum to represent the different states in each ROS protocol'''
WAIT = object()
INIT = object()
RESPONSE = object()
PUBLISH = object()
class ROSClientProtocol(basic.IntNStringReceiver):
'''A L{Protocol} for the client-side TCPROS protocol.'''
structFormat = "<I"
prefixLength = calcsize(structFormat)
state = State.WAIT
def __init__(self):
self.queue = defer.DeferredQueue()
def dataReceived(self, data):
if self.state == State.WAIT:
basic.IntNStringReceiver.dataReceived(self, data)
elif self.state == State.RESPONSE:
# Chop the first byte TODO: check that it's a valid response
data = data[1:]
self.state = State.WAIT
basic.IntNStringReceiver.dataReceived(self, data)
def stringReceived(self, message):
self.queue.put(message)
def send_header(self, header):
'''Encode the headers and send them to the underlying transport.
@param header: A C{dict} of key-value pairs of strings.'''
out = rosgraph.network.encode_ros_handshake_header(header)
self.transport.write(out)
def send_request(self, request):
'''Encode a request and send it.
@param request: A generated representation of a request.'''
s = StringIO()
request.serialize(s)
v = s.getvalue()
self.sendString(v)
self.state = State.RESPONSE
def send_response(self, response):
pass
class ROSServerProtocol(basic.IntNStringReceiver):
'''A L{Protocol} for the server-side TCPROS protocol.'''
structFormat = "<I"
prefixLength = calcsize(structFormat)
state = State.INIT
def stringReceived(self, message):
if self.state == State.INIT:
header = {'service': self.factory.resolved_name,
'type': self.factory.service_class._type,
'md5sum': self.factory.service_class._md5sum,
'callerid': self.factory.caller_id}
data = rosgraph.network.encode_ros_handshake_header(header)
self.transport.write(data)
self.state = State.WAIT
elif self.state == State.WAIT:
request = self.factory.service_class._request_class()
request.deserialize(message)
d = defer.maybeDeferred(self.factory.handler, request)
d.addCallback(self._write_response)
def _write_response(self, response):
s = StringIO()
response.serialize(s)
v = s.getvalue()
out = struct.pack('<B', 1) + struct.pack(self.structFormat,
len(v)) + v
self.transport.write(out)
class ROSClientFactory(protocol.ClientFactory):
'''A C{ClientFactory} that builds TCPROS client-side protocol instances.'''
protocol = ROSClientProtocol
class ROSMethodProxy(object):
'''Proxy to a remote service method.
@ivar service_class: The generated class that exposes our service.
@ivar protocol: A L{TCPROSServiceClient} representing the underlying
transport.
@ivar hostname: The C{str} remote hostname.
@ivar port: The C{int} remote port.
'''
def __init__(self, service_class, protocol, hostname, port, factory,
reactor=None):
self.service_class = service_class
self.protocol = protocol
self.hostname = hostname
self.port = port
self.factory = factory
if reactor:
self.reactor = reactor
else:
from twisted.internet import reactor
self.reactor = reactor
def build_factory(self):
return self.factory()
@defer.inlineCallbacks
def call(self, *params):
'''Encode the method params and issue a request to the service.'''
point = TCP4ClientEndpoint(self.reactor, self.hostname, self.port)
factory = self.build_factory()
p = yield point.connect(factory)
header = self.protocol.get_header_fields()
data = rosgraph.network.encode_ros_handshake_header(header)
p.transport.write(data)
message = yield p.queue.get()
# TODO validate message
request = self.service_class._request_class(*params)
p.send_request(request)
message = yield p.queue.get()
response = self.service_class._response_class()
response.deserialize(message)
defer.returnValue(response)
def __call__(self, *args, **kwargs):
return self.call(*args, **kwargs)
class ROSClient(object):
'''
A high-level object representing a ROS Client. Manages registration to
the ROS Master and talking to a ROS Service.
@ivar service_class: The generated class that exposes our service.
@ivar resolved_name: The C{str} with which we use to identify our hostname.
@ivar caller_id: The C{str} to identify our calls to ROS Master and the
peers.
@ivar master: A L{Proxy} to the ROS Master.
@ivar caller_api: The local XML-RPC endpoint.
'''
method_proxy = ROSMethodProxy
factory = ROSClientFactory
def __init__(self, service, caller_id, service_class, ros_master_uri=None,
reactor=None, persistent=False, headers=None):
self.resolved_name = rospy.names.resolve_name(service)
self.caller_id = '/' + caller_id
if ros_master_uri is None:
ros_master_uri = os.environ['ROS_MASTER_URI']
self.master = xmlrpc.Proxy(ros_master_uri)
if reactor is None:
from twisted.internet import reactor
self.reactor = reactor
self.headers = headers
if persistent:
if not self.headers:
self.headers = {}
self.headers['persistent'] = '1'
self.service_class = service_class
def build_factory(self):
return self.factory()
@defer.inlineCallbacks
def probe(self, scheme, hostname, port):
'''Check that the endpoint is reponding to our requests.
@param scheme: The C{str} protocol scheme for talking to the endpoint.
@param hostname: The C{str} hostname of the remote endpoint.
@param port: The C{int} port to talk to the endpoint.
@return A L{ROSMethodProxy} for issuing calls to the service.
'''
header = {'probe': '1', 'md5sum': '*', 'callerid': self.caller_id,
'service': self.resolved_name}
point = self.build_endpoint(hostname, port)
factory = self.build_factory()
p = yield point.connect(factory)
p.send_header(header)
# TODO: check response
response = yield p.queue.get()
protocol = tcpros_service.TCPROSServiceClient(self.resolved_name,
self.service_class, headers=self.headers)
method = self.method_proxy(self.service_class, protocol, hostname,
port, self.factory)
defer.returnValue(method)
def build_endpoint(self, hostname, port):
'''Factory method to build a low level transport endpoint'''
return TCP4ClientEndpoint(self.reactor, hostname, port)
def lookup_service(self):
'''Check if the service has been registered on the ROS Master'''
return self.master.callRemote('lookupService', self.caller_id,
self.resolved_name)
def getParam(self, param):
'''Query the ROS Master for a parameter
@param param: The C{str} parameter to query.
'''
param = '/' + param
return self.master.callRemote('getParam', self.caller_id, param)
@defer.inlineCallbacks
def wait_for_endpoint(self):
'''
Lookup the service on the ROS Master and probe that the endpoint has
been registered.
@return A L{ROSMethodProxy} representing the remote endpoint.
'''
(status, message, uri) = yield self.lookup_service()
if status == -1:
raise Exception("Can't find service")
o = urlparse.urlparse(uri)
# TODO: check response
response = yield self.getParam('tcp_keepalive')
endpoint = yield self.probe(o.scheme, o.hostname, o.port)
defer.returnValue(endpoint)
class ROSServerFactory(protocol.ServerFactory):
'''A TCPROS server factory
@ivar handler: A C{callable} that will be fired when an incoming request
is received.
@ivar service_class: The generated class that exposes our service.
@ivar resolved_name: The C{str} with which we use to identify our hostname.
@ivar caller_id: The C{str} to identify our calls to ROS Master and the
peers.
'''
protocol = ROSServerProtocol
def __init__(self, handler, service_class, resolved_name, caller_id):
self.handler = handler
self.service_class = service_class
self.resolved_name = resolved_name
self.caller_id = caller_id
class ROSService(service.Service):
'''A Twisted L{Service} suitable for using as a twistd plugin.
@ivar handler: A C{callable} that will be fired when an incoming request
is received.
@ivar service_class: The generated class that exposes our service.
@ivar resolved_name: The C{str} with which we use to identify our hostname.
@ivar caller_id: The C{str} to identify our calls to ROS Master and the
peers.
@ivar master: A L{Proxy} to the ROS Master.
@ivar caller_api: The local XML-RPC endpoint.
'''
def __init__(self, handler, service_class, name, ros_service, rpc_uri,
caller_api, ros_master_uri=None):
if ros_master_uri is None:
ros_master_uri = os.environ['ROS_MASTER_URI']
self.handler = handler
self.service_class = service_class
self.resolved_name = rospy.names.resolve_name(ros_service)
self.caller_id = rospy.names.resolve_name(name)
from twisted.web import xmlrpc
self.master = xmlrpc.Proxy(ros_master_uri)
self.rpc_uri = rpc_uri # local rpc
self.caller_api = caller_api # xmlrpc local uri
@defer.inlineCallbacks
def get_ros_server_factory(self):
'''
Register ourselves to the ROS Master and return a suitable
L{ServerFactory}.
'''
yield self.register_service()
factory = self.build_factory()
defer.returnValue(factory)
def build_factory(self):
return ROSServerFactory(self.handler, self.service_class,
self.resolved_name, self.caller_id)
def register_service(self):
'''Register ourselves to the ROS Master.'''
return self.master.callRemote('registerService', self.caller_id,
self.resolved_name, self.rpc_uri, self.caller_api)
class ROSSubscriber(service.Service):
def __init__(self, name, handler, topic, topic_type, caller_api,
ros_master_uri=None):
if ros_master_uri is None:
ros_master_uri = os.environ['ROS_MASTER_URI']
self.handler = handler # to be called when a new message comes in
self.caller_id = rospy.names.resolve_name(name)
from twisted.web import xmlrpc
self.master = xmlrpc.Proxy(ros_master_uri)
self.topic = topic
self.topic_type = topic_type
self.caller_api = caller_api # local xml uri
def register_subscriber(self):
return self.master.callRemote('registerSubscriber',
self.caller_id, self.topic, self.topic_type, self.caller_api)
class ROSPublisherEndpoint(object):
'''A high-level endpoint for publishing messages.
@ivar factory: A suitable L{ServerFactory} for ROS publishers.
'''
def __init__(self, factory):
self.factory = factory
def publish(self, text):
'''Send a message to all the subscribers.
@param text: The C{str} text to send.'''
for subscriber in self.factory.subscribers:
s = self.factory.data_class()
s.data = text
out = StringIO()
s.serialize(out)
v = out.getvalue()
subscriber.sendString(v)
class ROSPublisher(object):
'''
A high-level object representing a ROS Publisher. Manages registration to
the ROS Master and talking to ROS subscribers.
@ivar service_class: The generated class that exposes our service.
@ivar resolved_name: The C{str} with which we use to identify our hostname.
@ivar caller_id: The C{str} to identify our calls to ROS Master and the
peers.
@ivar master: A L{Proxy} to the ROS Master.
@ivar caller_api: The local XML-RPC endpoint.
'''
def __init__(self, name, topic, topic_type, caller_api, ros_hostname,
ros_rpc_port, data_class, ros_master_uri=None):
if ros_master_uri is None:
ros_master_uri = os.environ['ROS_MASTER_URI']
self.caller_id = rospy.names.resolve_name(name)
from twisted.web import xmlrpc
self.master = xmlrpc.Proxy(ros_master_uri)
self.topic = topic
self.topic_type = topic_type
self.caller_api = caller_api # local xml uri
self.xml_rpc_api = ROSPublisherSlaveAPI(ros_hostname, ros_rpc_port,
self.caller_id, self.topic, data_class)
self.data_class = data_class
self.ros_rpc_port = ros_rpc_port
def publish(self, msg):
'''Send a message to all the subscribers.
@param msg: A C{str} with the text to send.'''
self.factory.publish(msg)
@defer.inlineCallbacks
def register_publisher(self):
'''Register ourselves to the ROS Master'''
o = urlparse.urlparse(self.caller_api)
reactor.listenTCP(o.port, server.Site(self.xml_rpc_api))
response = yield self.master.callRemote('registerPublisher',
self.caller_id, self.topic, self.topic_type, self.caller_api)
factory = ROSPublisherFactory(self.topic, self.caller_id,
self.data_class)
reactor.listenTCP(self.ros_rpc_port, factory)
endpoint = ROSPublisherEndpoint(factory)
defer.returnValue(endpoint)
class ROSPublisherProtocol(basic.IntNStringReceiver):
structFormat = "<I"
prefixLength = calcsize(structFormat)
state = State.WAIT
def stringReceived(self, data):
if self.state == State.WAIT:
header = {'topic': self.factory.topic,
'message_definition': self.factory.data_class._full_text,
'tcp_nodelay': self.factory.tcp_nodelay,
'md5sum': self.factory.data_class._md5sum,
'type': self.factory.data_class._type,
'callerid': self.factory.caller_id}
out = rosgraph.network.encode_ros_handshake_header(header)
self.transport.write(out)
self.state = State.PUBLISH
self.factory.subscribers.append(self)
class ROSPublisherFactory(protocol.ServerFactory):
protocol = ROSPublisherProtocol
def __init__(self, topic, caller_id, data_class, tcp_nodelay=False):
self.subscribers = []
self.caller_id = caller_id
self.topic = topic
self.data_class = data_class
self.tcp_nodelay = tcp_nodelay
class ROSSubscriberProtocol(basic.IntNStringReceiver):
structFormat = "<I"
prefixLength = calcsize(structFormat)
state = State.WAIT
def stringReceived(self, data):
if self.state == State.WAIT:
# TODO: validate message
self.state = State.RESPONSE
elif self.state == State.RESPONSE:
s = self.factory.data_class()
s.deserialize(data)
self.factory.handler(s)
class ROSSubscriberFactory(protocol.ClientFactory):
protocol = ROSSubscriberProtocol
def __init__(self, data_class, handler):
self.data_class = data_class
self.handler = handler
class ROSPublisherSlaveAPI(xmlrpc.XMLRPC):
def __init__(self, ros_rpc_hostname, ros_rpc_port, name, topic, data_class,
allowNone=False, useDateTime=False):
xmlrpc.XMLRPC.__init__(self, allowNone=allowNone,
useDateTime=useDateTime)
self.ros_rpc_hostname = ros_rpc_hostname
self.rpc_port = ros_rpc_port
self.caller_id = rospy.names.resolve_name(name)
self.topic = rospy.names.resolve_name(topic)
self.data_class = data_class
def xmlrpc_requestTopic(self, subscriber, topic, protocol):
"""
Return all passed args.
"""
return [1, 'ready on %s:%d' % (self.ros_rpc_hostname, self.rpc_port),
['TCPROS', self.ros_rpc_hostname, self.rpc_port]]
class ROSSubscriberSlaveAPI(xmlrpc.XMLRPC):
def __init__(self, ros_rpc_hostname, ros_rpc_port, name, topic, data_class,
handler, allowNone=False, useDateTime=False):
xmlrpc.XMLRPC.__init__(self, allowNone=allowNone,
useDateTime=useDateTime)
self.ros_rpc_hostname = ros_rpc_hostname
self.rpc_port = ros_rpc_port
self.caller_id = rospy.names.resolve_name(name)
self.topic = rospy.names.resolve_name(topic)
self.data_class = data_class
self.handler = handler
self.connected_publishers = set()
@defer.inlineCallbacks
def connect_to_publishers(self, publishers):
new_connected_publishers = set()
for publisher in publishers:
if publisher not in self.connected_publishers:
master = xmlrpc.Proxy(publisher)
response = yield master.callRemote('requestTopic',
self.caller_id, self.topic, [['TCPROS']])
remote = response[2]
from twisted.internet import reactor
point = TCP4ClientEndpoint(reactor, remote[1], remote[2])
factory = ROSSubscriberFactory(self.data_class, self.handler)
p = yield point.connect(factory)
header = {'topic': self.topic,
'message_definition': self.data_class._full_text,
'tcp_nodelay': '0',
'md5sum': self.data_class._md5sum,
'type': self.data_class._type,
'callerid': self.caller_id}
out = rosgraph.network.encode_ros_handshake_header(header)
p.transport.write(out)
new_connected_publishers.add(publisher)
self.connected_publishsers = new_connected_publishers
def xmlrpc_requestTopic(self, subscriber, topic, protocol):
"""
Return all passed args.
"""
return [1, 'ready on %s:%d' % (self.ros_rpc_hostname, self.rpc_port),
['TCPROS', self.ros_rpc_hostname, self.rpc_port]]
@defer.inlineCallbacks
def xmlrpc_publisherUpdate(self, caller_id, topic, publishers):
yield self.connect_to_publishers(publishers)
defer.returnValue((1, '', 0))
|
|
# copyright (c) 2020 paddlepaddle authors. all rights reserved.
#
# licensed under the apache license, version 2.0 (the "license");
# you may not use this file except in compliance with the license.
# you may obtain a copy of the license at
#
# http://www.apache.org/licenses/license-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the license is distributed on an "as is" basis,
# without warranties or conditions of any kind, either express or implied.
# see the license for the specific language governing permissions and
# limitations under the license.
import unittest
import os
import sys
import argparse
import logging
import numpy as np
import time
import paddle
import paddle.fluid as fluid
from paddle.fluid.framework import IrGraph
from paddle.fluid.contrib.slim.quantization import Quant2Int8MkldnnPass
from paddle.fluid import core
paddle.enable_static()
logging.basicConfig(format='%(asctime)s-%(levelname)s: %(message)s')
_logger = logging.getLogger(__name__)
_logger.setLevel(logging.INFO)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--batch_size', type=int, default=1, help='Batch size.')
parser.add_argument(
'--skip_batch_num',
type=int,
default=0,
help='Number of the first minibatches to skip in performance statistics.'
)
parser.add_argument(
'--quant_model', type=str, default='', help='A path to a Quant model.')
parser.add_argument(
'--fp32_model',
type=str,
default='',
help='A path to an FP32 model. If empty, the Quant model will be used for FP32 inference.'
)
parser.add_argument('--infer_data', type=str, default='', help='Data file.')
parser.add_argument(
'--labels', type=str, default='', help='File with labels.')
parser.add_argument(
'--batch_num',
type=int,
default=0,
help='Number of batches to process. 0 or less means whole dataset. Default: 0.'
)
parser.add_argument(
'--acc_diff_threshold',
type=float,
default=0.01,
help='Accepted accuracy difference threshold.')
parser.add_argument(
'--ops_to_quantize',
type=str,
default='',
help='A comma separated list of operators to quantize. Only quantizable operators are taken into account. If the option is not used, an attempt to quantize all quantizable operators will be made.'
)
parser.add_argument(
'--op_ids_to_skip',
type=str,
default='',
help='A comma separated list of operator ids to skip in quantization.')
parser.add_argument(
'--targets',
type=str,
default='quant,int8,fp32',
help='A comma separated list of inference types to run ("int8", "fp32", "quant"). Default: "quant,int8,fp32"'
)
parser.add_argument(
'--debug',
action='store_true',
help='If used, the graph of Quant model is drawn.')
test_args, args = parser.parse_known_args(namespace=unittest)
return test_args, sys.argv[:1] + args
class QuantInt8NLPComparisonTest(unittest.TestCase):
"""
Test for accuracy comparison of Quant FP32 and INT8 NLP inference.
"""
def _reader_creator(self, data_file=None, labels_file=None):
assert data_file, "The dataset file is missing."
assert labels_file, "The labels file is missing."
def reader():
with open(data_file, 'r') as df:
with open(labels_file, 'r') as lf:
data_lines = df.readlines()
labels_lines = lf.readlines()
assert len(data_lines) == len(
labels_lines
), "The number of labels does not match the length of the dataset."
for i in range(len(data_lines)):
data_fields = data_lines[i].split(';')
assert len(
data_fields
) >= 2, "The number of data fields in the dataset is less than 2"
buffers = []
shape = []
for j in range(2):
data = data_fields[j].split(':')
assert len(
data
) >= 2, "Size of data in the dataset is less than 2"
# Shape is stored under index 0, while data under 1
shape = data[0].split()
shape.pop(0)
shape_np = np.array(shape).astype("int64")
buffer_i = data[1].split()
buffer_np = np.array(buffer_i).astype("int64")
buffer_np.shape = tuple(shape_np)
buffers.append(buffer_np)
label = labels_lines[i]
yield buffers[0], buffers[1], int(label)
return reader
def _get_batch_correct(self, batch_output=None, labels=None):
total = len(batch_output)
assert total > 0, "The batch output is empty."
correct = 0
for n, output in enumerate(batch_output[0]):
max_idx = np.where(output == output.max())
if max_idx == labels[n]:
correct += 1
return correct
def _predict(self,
test_reader=None,
model_path=None,
batch_size=1,
batch_num=1,
skip_batch_num=0,
target='quant'):
assert target in ['quant', 'int8', 'fp32']
place = fluid.CPUPlace()
exe = fluid.Executor(place)
inference_scope = fluid.executor.global_scope()
with fluid.scope_guard(inference_scope):
if os.path.exists(os.path.join(model_path, '__model__')):
[inference_program, feed_target_names,
fetch_targets] = fluid.io.load_inference_model(model_path, exe)
else:
[inference_program, feed_target_names,
fetch_targets] = fluid.io.load_inference_model(
model_path, exe, 'model', 'params')
graph = IrGraph(core.Graph(inference_program.desc), for_test=True)
if (self._debug):
graph.draw('.', 'quant_orig', graph.all_op_nodes())
if (target != 'quant'):
quant_transform_pass = Quant2Int8MkldnnPass(
self._quantized_ops,
_op_ids_to_skip=self._op_ids_to_skip,
_scope=inference_scope,
_place=place,
_core=core,
_debug=self._debug)
if (target == 'int8'):
graph = quant_transform_pass.apply(graph)
else: # target == fp32
graph = quant_transform_pass.prepare_and_optimize_fp32(
graph)
inference_program = graph.to_program()
total_correct = 0
total_samples = 0
batch_times = []
ppses = [] # predictions per second
iters = 0
infer_start_time = time.time()
for data in test_reader():
if batch_num > 0 and iters >= batch_num:
break
if iters == skip_batch_num:
total_samples = 0
infer_start_time = time.time()
input0 = np.array([x[0] for x in data]).astype('int64')
input1 = np.array([x[1] for x in data]).astype('int64')
labels = np.array([x[2] for x in data]).astype('int64')
start = time.time()
out = exe.run(inference_program,
feed={
feed_target_names[0]: input0,
feed_target_names[1]: input1
},
fetch_list=fetch_targets)
batch_time = (time.time() - start) * 1000 # in miliseconds
batch_times.append(batch_time)
batch_correct = self._get_batch_correct(out, labels)
batch_len = len(data)
total_samples += batch_len
total_correct += batch_correct
batch_acc = float(batch_correct) / float(batch_len)
pps = batch_len / batch_time * 1000
ppses.append(pps)
latency = batch_time / batch_len
iters += 1
appx = ' (warm-up)' if iters <= skip_batch_num else ''
_logger.info(
'batch {0}{4}, acc: {1:.4f}, latency: {2:.4f} ms, predictions per sec: {3:.2f}'
.format(iters, batch_acc, latency, pps, appx))
# Postprocess benchmark data
infer_total_time = time.time() - infer_start_time
batch_latencies = batch_times[skip_batch_num:]
batch_latency_avg = np.average(batch_latencies)
latency_avg = batch_latency_avg / batch_size
ppses = ppses[skip_batch_num:]
pps_avg = np.average(ppses)
acc_avg = float(np.sum(total_correct)) / float(total_samples)
_logger.info('Total inference run time: {:.2f} s'.format(
infer_total_time))
return acc_avg, pps_avg, latency_avg
def _print_performance(self, title, pps, lat):
_logger.info(
'{0}: avg predictions per sec: {1:.2f}, avg latency: {2:.4f} ms'.
format(title, pps, lat))
def _print_accuracy(self, title, acc):
_logger.info('{0}: avg accuracy: {1:.6f}'.format(title, acc))
def _summarize_performance(self, int8_pps, int8_lat, fp32_pps, fp32_lat):
_logger.info('--- Performance summary ---')
self._print_performance('INT8', int8_pps, int8_lat)
if fp32_lat >= 0:
self._print_performance('FP32', fp32_pps, fp32_lat)
def _summarize_accuracy(self, quant_acc, int8_acc, fp32_acc):
_logger.info('--- Accuracy summary ---')
self._print_accuracy('Quant', quant_acc)
self._print_accuracy('INT8', int8_acc)
if fp32_acc >= 0:
self._print_accuracy('FP32', fp32_acc)
def _compare_accuracy(self, threshold, quant_acc, int8_acc):
_logger.info(
'Accepted accuracy drop threshold: {0}. (condition: (Quant_acc - INT8_acc) <= threshold)'
.format(threshold))
# Random outputs give accuracy about 0.33, we assume valid accuracy to be at least 0.5
assert quant_acc > 0.5
assert int8_acc > 0.5
assert quant_acc - int8_acc <= threshold
def _strings_from_csv(self, string):
return set(s.strip() for s in string.split(','))
def _ints_from_csv(self, string):
return set(map(int, string.split(',')))
def test_graph_transformation(self):
if not fluid.core.is_compiled_with_mkldnn():
return
quant_model_path = test_case_args.quant_model
assert quant_model_path, 'The Quant model path cannot be empty. Please, use the --quant_model option.'
data_path = test_case_args.infer_data
assert data_path, 'The dataset path cannot be empty. Please, use the --infer_data option.'
fp32_model_path = test_case_args.fp32_model
labels_path = test_case_args.labels
batch_size = test_case_args.batch_size
batch_num = test_case_args.batch_num
skip_batch_num = test_case_args.skip_batch_num
acc_diff_threshold = test_case_args.acc_diff_threshold
self._debug = test_case_args.debug
self._quantized_ops = set()
if test_case_args.ops_to_quantize:
self._quantized_ops = self._strings_from_csv(
test_case_args.ops_to_quantize)
self._op_ids_to_skip = set([-1])
if test_case_args.op_ids_to_skip:
self._op_ids_to_skip = self._ints_from_csv(
test_case_args.op_ids_to_skip)
self._targets = self._strings_from_csv(test_case_args.targets)
assert self._targets.intersection(
{'quant', 'int8', 'fp32'}
), 'The --targets option, if used, must contain at least one of the targets: "quant", "int8", "fp32".'
_logger.info('Quant & INT8 prediction run.')
_logger.info('Quant model: {}'.format(quant_model_path))
if fp32_model_path:
_logger.info('FP32 model: {}'.format(fp32_model_path))
_logger.info('Dataset: {}'.format(data_path))
_logger.info('Labels: {}'.format(labels_path))
_logger.info('Batch size: {}'.format(batch_size))
_logger.info('Batch number: {}'.format(batch_num))
_logger.info('Accuracy drop threshold: {}.'.format(acc_diff_threshold))
_logger.info('Quantized ops: {}.'.format(','.join(
self._quantized_ops) if self._quantized_ops else 'all quantizable'))
_logger.info('Op ids to skip quantization: {}.'.format(','.join(
map(str, self._op_ids_to_skip)) if test_case_args.op_ids_to_skip
else 'none'))
_logger.info('Targets: {}.'.format(','.join(self._targets)))
if 'quant' in self._targets:
_logger.info('--- Quant prediction start ---')
val_reader = paddle.batch(
self._reader_creator(data_path, labels_path),
batch_size=batch_size)
quant_acc, quant_pps, quant_lat = self._predict(
val_reader,
quant_model_path,
batch_size,
batch_num,
skip_batch_num,
target='quant')
self._print_performance('Quant', quant_pps, quant_lat)
self._print_accuracy('Quant', quant_acc)
if 'int8' in self._targets:
_logger.info('--- INT8 prediction start ---')
val_reader = paddle.batch(
self._reader_creator(data_path, labels_path),
batch_size=batch_size)
int8_acc, int8_pps, int8_lat = self._predict(
val_reader,
quant_model_path,
batch_size,
batch_num,
skip_batch_num,
target='int8')
self._print_performance('INT8', int8_pps, int8_lat)
self._print_accuracy('INT8', int8_acc)
fp32_acc = fp32_pps = fp32_lat = -1
if 'fp32' in self._targets and fp32_model_path:
_logger.info('--- FP32 prediction start ---')
val_reader = paddle.batch(
self._reader_creator(data_path, labels_path),
batch_size=batch_size)
fp32_acc, fp32_pps, fp32_lat = self._predict(
val_reader,
fp32_model_path,
batch_size,
batch_num,
skip_batch_num,
target='fp32')
self._print_performance('FP32', fp32_pps, fp32_lat)
self._print_accuracy('FP32', fp32_acc)
if {'int8', 'fp32'}.issubset(self._targets):
self._summarize_performance(int8_pps, int8_lat, fp32_pps, fp32_lat)
if {'int8', 'quant'}.issubset(self._targets):
self._summarize_accuracy(quant_acc, int8_acc, fp32_acc)
self._compare_accuracy(acc_diff_threshold, quant_acc, int8_acc)
if __name__ == '__main__':
global test_case_args
test_case_args, remaining_args = parse_args()
unittest.main(argv=remaining_args)
|
|
import random
import minesweeper_util as u
import minesweeper as mnsw
import math
import time
class MinesweeperGame(object):
def __init__(self, num_mines=None, mine_prob=None):
self.cell_ids = list(self.gen_cells())
self.num_cells = len(self.cell_ids)
assert num_mines is not None or mine_prob is not None
if num_mines is not None:
assert num_mines >= 0 and num_mines <= self.num_cells
mines = [True] * num_mines + [False] * (self.num_cells - num_mines)
random.shuffle(mines)
self.mode = 'minecount'
else:
assert mine_prob >= 0. and mine_prob <= 1.
mines = [random.random() < mine_prob for i in xrange(self.num_cells)]
self.mode = 'mineprob'
self.mine_prob = mine_prob
self.num_mines = len(filter(None, mines))
self.mines = dict((c, m) for c, m in zip(self.cell_ids, mines))
self.cells = dict((c, None) for c in self.cell_ids)
self.yet_to_uncover = self.num_cells - self.num_mines
self.mine_exposed = False
def outcome(self):
if self.mine_exposed:
return 'loss'
elif self.yet_to_uncover == 0:
return 'win'
else:
return None
def can_play_cell(self, cell):
"""whether this cell can be cleared"""
c = self.cells[cell]
return (c is None or c == 'marked')
def is_frontier_cell(self, cell):
"""whether this cell is represented by the 'other' term of the minesweeper solution"""
return self.cells[cell] is None and all(self.cells[neighbor] in (None, 'marked') for neighbor in self.adjacent(cell))
def sweep(self, cell):
if not self.can_play_cell(cell):
return
if self.mines[cell]:
self.mine_exposed = True
return
self.yet_to_uncover -= 1
adj_count = len([c for c in self.adjacent(cell) if self.mines[c]])
self.cells[cell] = adj_count
if adj_count == 0:
for neighbor in self.adjacent(cell):
if self.can_play_cell(neighbor):
self.sweep(neighbor)
def mark(self, cell):
assert self.can_play_cell(cell)
self.cells[cell] = 'marked'
def gen_cells(self):
assert False, 'abstract'
def adjacent(self, cell_id):
assert False, 'abstract'
class GridMinesweeperGame(MinesweeperGame):
def __init__(self, width, height, *args, **kwargs):
self.width = width
self.height = height
super(GridMinesweeperGame, self).__init__(*args, **kwargs)
def gen_cells(self):
for i in xrange(self.width):
for j in xrange(self.height):
yield (i, j)
def adjacent(self, cell):
i, j = cell
for ni in xrange(i - 1, i + 2):
for nj in xrange(j - 1, j + 2):
if (ni >= 0 and ni < self.width and
nj >= 0 and nj < self.height and
(ni, nj) != (i, j)):
yield (ni, nj)
class BoardWrapper(object):
"""convert the gameboard to a form recognizable by the generate_rules() utility function"""
def __init__(self, game):
self.game = game
def toCell(self, cell_id):
cell = self.game.cells[cell_id]
code = {
None: 'x',
'marked': '*',
0: '.',
}.get(cell, str(cell))
return u.BoardCell(code, cell_id)
@property
def cells(self):
return dict((k, self.toCell(k)) for k in self.game.cell_ids)
def total_cells(self):
return self.game.num_cells
def adjacent(self, cell_id):
return dict((k, self.toCell(k)) for k in self.game.adjacent(cell_id))
def autoplay(game, **kwargs):
moves = 0
hopeless = False
while True:
#print game
#print '----'
result = game.outcome()
if result is not None:
return result, moves, hopeless
state = u.generate_rules(BoardWrapper(game), game.num_mines)
if game.mode == 'mineprob':
state[1] = game.mine_prob
solution = mnsw.solve(*state)
def _cells(cells):
for c in cells:
if c is not None:
yield c
else:
for e in game.cell_ids:
if game.is_frontier_cell(e):
yield e
def get_cells(p):
EPSILON = 1e-6
return _cells(k for k, v in solution.iteritems() if abs(v - p) < EPSILON)
mines = get_cells(1.)
safe = list(get_cells(0.))
for c in mines:
game.mark(c)
#print 'marking', c
if safe:
for c in safe:
game.sweep(c)
#print 'clearing', c
else:
# find safest
min_risk = min(solution.values())
if min_risk > .5 - 1e-6:
hopeless = True
safest = list(get_cells(min_risk))
STRATEGY = kwargs.get('strategy')
if STRATEGY:
safest = locpref_strategy(STRATEGY, game, safest)
move = random.choice(safest)
game.sweep(move)
#print 'safest', move
moves += 1
# strategy like:
# [['corner']] -- prefer corners
# [['corner', 'edge']] -- prefer corners/edges
# [['corner'], ['edge']] -- prefer corners, then edges
def locpref_strategy(strategy, game, safest):
def cell_type(cell):
edgex = (cell[0] in (0, game.width - 1))
edgey = (cell[1] in (0, game.height - 1))
if edgex and edgey:
return 'corner'
elif edgex or edgey:
return 'edge'
else:
return 'interior'
filtered_safest = None
for mode in strategy:
filtered_safest = filter(lambda e: cell_type(e) in mode, safest)
if filtered_safest:
break
return filtered_safest or safest
BEGINNER = 'GridMinesweeperGame(8, 8, num_mines=10)'
BEGINNER_NEW = 'GridMinesweeperGame(9, 9, num_mines=10)'
INTERMEDIATE = 'GridMinesweeperGame(16, 16, num_mines=40)'
EXPERT = 'GridMinesweeperGame(16, 30, num_mines=99)'
def run_trial(args):
gamestr, kwargs = args
return autoplay(eval(gamestr), **kwargs)
def trial(new_game_str, tolerance=.5e-3, first_safe=True, threaded=True, **kwargs):
try:
total_games = 0
total_wins = 0
total_hopeless = 0
hopeless_wins = 0
stop = False
if threaded:
import multiprocessing
def gen_trials():
pool = multiprocessing.Pool()
def args():
while not stop:
yield (new_game_str, kwargs)
return pool.imap_unordered(run_trial, args())
else:
def gen_trials():
while not stop:
yield run_trial((new_game_str, kwargs))
start = time.time()
for t in gen_trials():
result, moves, hopeless = t
loss_on_first_move = (result == 'loss' and moves == 1)
if loss_on_first_move and first_safe:
continue
total_games += 1
if result == 'win':
total_wins += 1
if hopeless:
total_hopeless += 1
if result == 'win':
hopeless_wins += 1
p = float(total_wins) / total_games
err = (p * (1-p) / total_games)**.5
if err == 0:
err = 1.
est_trials = -1
else:
est_trials = int(round(p * (1-p) / tolerance**2))
rate = total_games / (time.time() - start)
est_time_left = (est_trials - total_games) / rate
terminate = (err <= tolerance)
if terminate or total_games % 5 == 0:
print '%d/%d %d/%d %.4f+/-%.4f %d %.1f' % (total_wins, total_games, hopeless_wins, total_hopeless, p, err, est_trials, est_time_left)
if terminate:
return (total_games, total_wins, total_hopeless, hopeless_wins)
finally:
stop = True
|
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers
from google.api_core import gapic_v1
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.ads.googleads.v9.resources.types import geographic_view
from google.ads.googleads.v9.services.types import geographic_view_service
from .base import GeographicViewServiceTransport, DEFAULT_CLIENT_INFO
class GeographicViewServiceGrpcTransport(GeographicViewServiceTransport):
"""gRPC backend transport for GeographicViewService.
Service to manage geographic views.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
def __init__(
self,
*,
host: str = "googleads.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._ssl_channel_credentials = ssl_channel_credentials
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
warnings.warn(
"api_mtls_endpoint and client_cert_source are deprecated",
DeprecationWarning,
)
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
else api_mtls_endpoint + ":443"
)
if credentials is None:
credentials, _ = google.auth.default(
scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
)
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
ssl_credentials = SslCredentials().ssl_credentials
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
ssl_credentials=ssl_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._ssl_channel_credentials = ssl_credentials
else:
host = host if ":" in host else host + ":443"
if credentials is None:
credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES)
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
ssl_credentials=ssl_channel_credentials,
scopes=self.AUTH_SCOPES,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._stubs = {} # type: Dict[str, Callable]
# Run the base constructor.
super().__init__(
host=host, credentials=credentials, client_info=client_info,
)
@classmethod
def create_channel(
cls,
host: str = "googleads.googleapis.com",
credentials: ga_credentials.Credentials = None,
scopes: Optional[Sequence[str]] = None,
**kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
address (Optionsl[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
scopes=scopes or cls.AUTH_SCOPES,
**kwargs,
)
def close(self):
self.grpc_channel.close()
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def get_geographic_view(
self,
) -> Callable[
[geographic_view_service.GetGeographicViewRequest],
geographic_view.GeographicView,
]:
r"""Return a callable for the get geographic view method over gRPC.
Returns the requested geographic view in full detail.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `HeaderError <>`__
`InternalError <>`__ `QuotaError <>`__ `RequestError <>`__
Returns:
Callable[[~.GetGeographicViewRequest],
~.GeographicView]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_geographic_view" not in self._stubs:
self._stubs["get_geographic_view"] = self.grpc_channel.unary_unary(
"/google.ads.googleads.v9.services.GeographicViewService/GetGeographicView",
request_serializer=geographic_view_service.GetGeographicViewRequest.serialize,
response_deserializer=geographic_view.GeographicView.deserialize,
)
return self._stubs["get_geographic_view"]
__all__ = ("GeographicViewServiceGrpcTransport",)
|
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Project'
db.create_table(u'project_project', (
('projectid', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('description', self.gf('django.db.models.fields.CharField')(max_length=400)),
('manager', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['accounts.Employee'])),
))
db.send_create_signal(u'project', ['Project'])
# Adding model 'Teammember'
db.create_table(u'project_teammember', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('employee', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['accounts.Employee'])),
('project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['project.Project'])),
))
db.send_create_signal(u'project', ['Teammember'])
# Adding model 'Task'
db.create_table(u'project_task', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('taskid', self.gf('django.db.models.fields.IntegerField')(default=0)),
('name', self.gf('django.db.models.fields.CharField')(default='', max_length=200)),
('project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['project.Project'])),
('priority', self.gf('django.db.models.fields.CharField')(default='Normal', max_length=200)),
('startdate', self.gf('django.db.models.fields.DateField')(default=datetime.date.today)),
('deadline', self.gf('django.db.models.fields.DateField')(default=datetime.date.today)),
('enddate', self.gf('django.db.models.fields.DateField')(default=datetime.date.today)),
('rating', self.gf('django.db.models.fields.IntegerField')(default=0)),
('description', self.gf('django.db.models.fields.CharField')(default='', max_length=400)),
('approved', self.gf('django.db.models.fields.BooleanField')(default=False)),
('review', self.gf('django.db.models.fields.CharField')(default='', max_length=400)),
('manager', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['accounts.Employee'])),
))
db.send_create_signal(u'project', ['Task'])
# Adding unique constraint on 'Task', fields ['taskid', 'project']
db.create_unique(u'project_task', ['taskid', 'project_id'])
# Adding model 'Subtask'
db.create_table(u'project_subtask', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('subtaskid', self.gf('django.db.models.fields.IntegerField')(default=0)),
('name', self.gf('django.db.models.fields.CharField')(default='', max_length=200)),
('task', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['project.Task'])),
('project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['project.Project'])),
('priority', self.gf('django.db.models.fields.CharField')(default='Normal', max_length=200)),
('startdate', self.gf('django.db.models.fields.DateField')(default=datetime.date.today)),
('deadline', self.gf('django.db.models.fields.DateField')(default=datetime.date.today)),
('enddate', self.gf('django.db.models.fields.DateField')(default=datetime.date.today)),
('rating', self.gf('django.db.models.fields.IntegerField')(default=0)),
('description', self.gf('django.db.models.fields.CharField')(default='', max_length=400)),
('approved', self.gf('django.db.models.fields.BooleanField')(default=False)),
('review', self.gf('django.db.models.fields.CharField')(default='', max_length=400)),
('assignee', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['accounts.Employee'])),
))
db.send_create_signal(u'project', ['Subtask'])
# Adding unique constraint on 'Subtask', fields ['subtaskid', 'task', 'project']
db.create_unique(u'project_subtask', ['subtaskid', 'task_id', 'project_id'])
# Adding model 'TaskSkills'
db.create_table(u'project_taskskills', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('task', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['project.Task'])),
('project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['project.Project'])),
('skill', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['accounts.Skill'])),
))
db.send_create_signal(u'project', ['TaskSkills'])
# Adding unique constraint on 'TaskSkills', fields ['skill', 'task', 'project']
db.create_unique(u'project_taskskills', ['skill_id', 'task_id', 'project_id'])
# Adding model 'SubtaskSkills'
db.create_table(u'project_subtaskskills', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('subtask', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['project.Subtask'])),
('task', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['project.Task'])),
('project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['project.Project'])),
('skill', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['accounts.Skill'])),
))
db.send_create_signal(u'project', ['SubtaskSkills'])
# Adding unique constraint on 'SubtaskSkills', fields ['skill', 'subtask', 'task', 'project']
db.create_unique(u'project_subtaskskills', ['skill_id', 'subtask_id', 'task_id', 'project_id'])
# Adding model 'TaskIssue'
db.create_table(u'project_taskissue', (
('taskissueid', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('task', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['project.Task'])),
('project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['project.Project'])),
('createdate', self.gf('django.db.models.fields.DateField')(default=datetime.date.today)),
('resolvedate', self.gf('django.db.models.fields.DateField')(default=datetime.date.today)),
('priority', self.gf('django.db.models.fields.CharField')(default='Low', max_length=200)),
('description', self.gf('django.db.models.fields.CharField')(default='Unknown', max_length=400)),
))
db.send_create_signal(u'project', ['TaskIssue'])
# Adding model 'SubtaskIssue'
db.create_table(u'project_subtaskissue', (
('subtaskissueid', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('subtask', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['project.Subtask'])),
('task', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['project.Task'])),
('project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['project.Project'])),
('createdate', self.gf('django.db.models.fields.DateField')(default=datetime.date.today)),
('resolvedate', self.gf('django.db.models.fields.DateField')(default=datetime.date.today)),
('priority', self.gf('django.db.models.fields.CharField')(default='Low', max_length=200)),
))
db.send_create_signal(u'project', ['SubtaskIssue'])
def backwards(self, orm):
# Removing unique constraint on 'SubtaskSkills', fields ['skill', 'subtask', 'task', 'project']
db.delete_unique(u'project_subtaskskills', ['skill_id', 'subtask_id', 'task_id', 'project_id'])
# Removing unique constraint on 'TaskSkills', fields ['skill', 'task', 'project']
db.delete_unique(u'project_taskskills', ['skill_id', 'task_id', 'project_id'])
# Removing unique constraint on 'Subtask', fields ['subtaskid', 'task', 'project']
db.delete_unique(u'project_subtask', ['subtaskid', 'task_id', 'project_id'])
# Removing unique constraint on 'Task', fields ['taskid', 'project']
db.delete_unique(u'project_task', ['taskid', 'project_id'])
# Deleting model 'Project'
db.delete_table(u'project_project')
# Deleting model 'Teammember'
db.delete_table(u'project_teammember')
# Deleting model 'Task'
db.delete_table(u'project_task')
# Deleting model 'Subtask'
db.delete_table(u'project_subtask')
# Deleting model 'TaskSkills'
db.delete_table(u'project_taskskills')
# Deleting model 'SubtaskSkills'
db.delete_table(u'project_subtaskskills')
# Deleting model 'TaskIssue'
db.delete_table(u'project_taskissue')
# Deleting model 'SubtaskIssue'
db.delete_table(u'project_subtaskissue')
models = {
u'accounts.employee': {
'Meta': {'object_name': 'Employee'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'email': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'empid': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isManager': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
},
u'accounts.skill': {
'Meta': {'object_name': 'Skill'},
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'skillid': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'project.project': {
'Meta': {'object_name': 'Project'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'manager': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.Employee']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'projectid': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'project.subtask': {
'Meta': {'unique_together': "(('subtaskid', 'task', 'project'),)", 'object_name': 'Subtask'},
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'assignee': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.Employee']"}),
'deadline': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '400'}),
'enddate': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200'}),
'priority': ('django.db.models.fields.CharField', [], {'default': "'Normal'", 'max_length': '200'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Project']"}),
'rating': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'review': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '400'}),
'startdate': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'subtaskid': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Task']"})
},
u'project.subtaskissue': {
'Meta': {'object_name': 'SubtaskIssue'},
'createdate': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'priority': ('django.db.models.fields.CharField', [], {'default': "'Low'", 'max_length': '200'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Project']"}),
'resolvedate': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'subtask': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Subtask']"}),
'subtaskissueid': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Task']"})
},
u'project.subtaskskills': {
'Meta': {'unique_together': "(('skill', 'subtask', 'task', 'project'),)", 'object_name': 'SubtaskSkills'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Project']"}),
'skill': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.Skill']"}),
'subtask': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Subtask']"}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Task']"})
},
u'project.task': {
'Meta': {'unique_together': "(('taskid', 'project'),)", 'object_name': 'Task'},
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deadline': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '400'}),
'enddate': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'manager': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.Employee']"}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200'}),
'priority': ('django.db.models.fields.CharField', [], {'default': "'Normal'", 'max_length': '200'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Project']"}),
'rating': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'review': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '400'}),
'startdate': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'taskid': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'project.taskissue': {
'Meta': {'object_name': 'TaskIssue'},
'createdate': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'description': ('django.db.models.fields.CharField', [], {'default': "'Unknown'", 'max_length': '400'}),
'priority': ('django.db.models.fields.CharField', [], {'default': "'Low'", 'max_length': '200'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Project']"}),
'resolvedate': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Task']"}),
'taskissueid': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'project.taskskills': {
'Meta': {'unique_together': "(('skill', 'task', 'project'),)", 'object_name': 'TaskSkills'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Project']"}),
'skill': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.Skill']"}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Task']"})
},
u'project.teammember': {
'Meta': {'object_name': 'Teammember'},
'employee': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.Employee']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Project']"})
}
}
complete_apps = ['project']
|
|
# This class implements the AIML pattern-matching algorithm described
# by Dr. Richard Wallace at the following site:
# http://www.alicebot.org/documentation/matching.html
import marshal
import pprint
import re
import string
import sys
class PatternMgr:
# special dictionary keys
_UNDERSCORE = 0
_STAR = 1
_TEMPLATE = 2
_THAT = 3
_TOPIC = 4
_BOT_NAME = 5
def __init__(self):
self._root = {}
self._templateCount = 0
self._botName = u"Gabby"
punctuation = "\"`~!@#$%^&*()-_=+[{]}\|;:',<.>/?"
self._puncStripRE = re.compile("[" + re.escape(punctuation) + "]")
self._whitespaceRE = re.compile("\s+", re.LOCALE | re.UNICODE)
def numTemplates(self):
"""Return the number of templates currently stored."""
return self._templateCount
def setBotName(self, name):
"""Set the name of the bot, used to match <bot name="name"> tags in
patterns. The name must be a single word!
"""
# Collapse a multi-word name into a single word
self._botName = unicode(string.join(name.split()))
def dump(self):
"""Print all learned patterns, for debugging purposes."""
pprint.pprint(self._root)
def save(self, filename):
"""Dump the current patterns to the file specified by filename. To
restore later, use restore().
"""
try:
outFile = open(filename, "wb")
marshal.dump(self._templateCount, outFile)
marshal.dump(self._botName, outFile)
marshal.dump(self._root, outFile)
outFile.close()
except Exception, e:
print "Error saving PatternMgr to file %s:" % filename
raise Exception, e
def restore(self, filename):
"""Restore a previously save()d collection of patterns."""
try:
inFile = open(filename, "rb")
self._templateCount = marshal.load(inFile)
self._botName = marshal.load(inFile)
self._root = marshal.load(inFile)
inFile.close()
except Exception, e:
print "Error restoring PatternMgr from file %s:" % filename
raise Exception, e
def add(self, (pattern,that,topic), template):
"""Add a [pattern/that/topic] tuple and its corresponding template
to the node tree.
"""
# TODO: make sure words contains only legal characters
# (alphanumerics,*,_)
# Navigate through the node tree to the template's location, adding
# nodes if necessary.
node = self._root
for word in string.split(pattern):
key = word
if key == u"_":
key = self._UNDERSCORE
elif key == u"*":
key = self._STAR
elif key == u"BOT_NAME":
key = self._BOT_NAME
if not node.has_key(key):
node[key] = {}
node = node[key]
# navigate further down, if a non-empty "that" pattern was included
if len(that) > 0:
if not node.has_key(self._THAT):
node[self._THAT] = {}
node = node[self._THAT]
for word in string.split(that):
key = word
if key == u"_":
key = self._UNDERSCORE
elif key == u"*":
key = self._STAR
if not node.has_key(key):
node[key] = {}
node = node[key]
# navigate yet further down, if a non-empty "topic" string was included
if len(topic) > 0:
if not node.has_key(self._TOPIC):
node[self._TOPIC] = {}
node = node[self._TOPIC]
for word in string.split(topic):
key = word
if key == u"_":
key = self._UNDERSCORE
elif key == u"*":
key = self._STAR
if not node.has_key(key):
node[key] = {}
node = node[key]
# add the template.
if not node.has_key(self._TEMPLATE):
self._templateCount += 1
node[self._TEMPLATE] = template
def match(self, pattern, that, topic):
"""Return the template which is the closest match to pattern. The
'that' parameter contains the bot's previous response. The 'topic'
parameter contains the current topic of conversation.
Returns None if no template is found.
"""
if len(pattern) == 0:
return None
# Mutilate the input. Remove all punctuation and convert the
# text to all caps.
input = string.upper(pattern)
input = re.sub(self._puncStripRE, " ", input)
if that.strip() == u"": that = u"ULTRABOGUSDUMMYTHAT" # 'that' must never be empty
thatInput = string.upper(that)
thatInput = re.sub(self._puncStripRE, " ", thatInput)
thatInput = re.sub(self._whitespaceRE, " ", thatInput)
if topic.strip() == u"": topic = u"ULTRABOGUSDUMMYTOPIC" # 'topic' must never be empty
topicInput = string.upper(topic)
topicInput = re.sub(self._puncStripRE, " ", topicInput)
# Pass the input off to the recursive call
patMatch, template = self._match(input.split(), thatInput.split(), topicInput.split(), self._root)
return template
def star(self, starType, pattern, that, topic, index):
"""Returns a string, the portion of pattern that was matched by a *.
The 'starType' parameter specifies which type of star to find.
Legal values are:
- 'star': matches a star in the main pattern.
- 'thatstar': matches a star in the that pattern.
- 'topicstar': matches a star in the topic pattern.
"""
# Mutilate the input. Remove all punctuation and convert the
# text to all caps.
input = string.upper(pattern)
input = re.sub(self._puncStripRE, " ", input)
input = re.sub(self._whitespaceRE, " ", input)
if that.strip() == u"": that = u"ULTRABOGUSDUMMYTHAT" # 'that' must never be empty
thatInput = string.upper(that)
thatInput = re.sub(self._puncStripRE, " ", thatInput)
thatInput = re.sub(self._whitespaceRE, " ", thatInput)
if topic.strip() == u"": topic = u"ULTRABOGUSDUMMYTOPIC" # 'topic' must never be empty
topicInput = string.upper(topic)
topicInput = re.sub(self._puncStripRE, " ", topicInput)
topicInput = re.sub(self._whitespaceRE, " ", topicInput)
# Pass the input off to the recursive pattern-matcher
patMatch, template = self._match(input.split(), thatInput.split(), topicInput.split(), self._root)
if template == None:
return ""
# Extract the appropriate portion of the pattern, based on the
# starType argument.
words = None
if starType == 'star':
patMatch = patMatch[:patMatch.index(self._THAT)]
words = input.split()
elif starType == 'thatstar':
patMatch = patMatch[patMatch.index(self._THAT)+1 : patMatch.index(self._TOPIC)]
words = thatInput.split()
elif starType == 'topicstar':
patMatch = patMatch[patMatch.index(self._TOPIC)+1 :]
words = topicInput.split()
else:
# unknown value
raise ValueError, "starType must be in ['star', 'thatstar', 'topicstar']"
# compare the input string to the matched pattern, word by word.
# At the end of this loop, if foundTheRightStar is true, start and
# end will contain the start and end indices (in "words") of
# the substring that the desired star matched.
foundTheRightStar = False
start = end = j = numStars = k = 0
for i in range(len(words)):
# This condition is true after processing a star
# that ISN'T the one we're looking for.
if i < k:
continue
# If we're reached the end of the pattern, we're done.
if j == len(patMatch):
break
if not foundTheRightStar:
if patMatch[j] in [self._STAR, self._UNDERSCORE]: #we got a star
numStars += 1
if numStars == index:
# This is the star we care about.
foundTheRightStar = True
start = i
# Iterate through the rest of the string.
for k in range (i, len(words)):
# If the star is at the end of the pattern,
# we know exactly where it ends.
if j+1 == len (patMatch):
end = len (words)
break
# If the words have started matching the
# pattern again, the star has ended.
if patMatch[j+1] == words[k]:
end = k - 1
i = k
break
# If we just finished processing the star we cared
# about, we exit the loop early.
if foundTheRightStar:
break
# Move to the next element of the pattern.
j += 1
# extract the star words from the original, unmutilated input.
if foundTheRightStar:
#print string.join(pattern.split()[start:end+1])
if starType == 'star': return string.join(pattern.split()[start:end+1])
elif starType == 'thatstar': return string.join(that.split()[start:end+1])
elif starType == 'topicstar': return string.join(topic.split()[start:end+1])
else: return ""
def _match(self, words, thatWords, topicWords, root):
"""Return a tuple (pat, tem) where pat is a list of nodes, starting
at the root and leading to the matching pattern, and tem is the
matched template.
"""
# base-case: if the word list is empty, return the current node's
# template.
if len(words) == 0:
# we're out of words.
pattern = []
template = None
if len(thatWords) > 0:
# If thatWords isn't empty, recursively
# pattern-match on the _THAT node with thatWords as words.
try:
pattern, template = self._match(thatWords, [], topicWords, root[self._THAT])
if pattern != None:
pattern = [self._THAT] + pattern
except KeyError:
pattern = []
template = None
elif len(topicWords) > 0:
# If thatWords is empty and topicWords isn't, recursively pattern
# on the _TOPIC node with topicWords as words.
try:
pattern, template = self._match(topicWords, [], [], root[self._TOPIC])
if pattern != None:
pattern = [self._TOPIC] + pattern
except KeyError:
pattern = []
template = None
if template == None:
# we're totally out of input. Grab the template at this node.
pattern = []
try: template = root[self._TEMPLATE]
except KeyError: template = None
return (pattern, template)
first = words[0]
suffix = words[1:]
# Check underscore.
# Note: this is causing problems in the standard AIML set, and is
# currently disabled.
if root.has_key(self._UNDERSCORE):
# Must include the case where suf is [] in order to handle the case
# where a * or _ is at the end of the pattern.
for j in range(len(suffix)+1):
suf = suffix[j:]
pattern, template = self._match(suf, thatWords, topicWords, root[self._UNDERSCORE])
if template is not None:
newPattern = [self._UNDERSCORE] + pattern
return (newPattern, template)
# Check first
if root.has_key(first):
pattern, template = self._match(suffix, thatWords, topicWords, root[first])
if template is not None:
newPattern = [first] + pattern
return (newPattern, template)
# check bot name
if root.has_key(self._BOT_NAME) and first == self._botName:
pattern, template = self._match(suffix, thatWords, topicWords, root[self._BOT_NAME])
if template is not None:
newPattern = [first] + pattern
return (newPattern, template)
# check star
if root.has_key(self._STAR):
# Must include the case where suf is [] in order to handle the case
# where a * or _ is at the end of the pattern.
for j in range(len(suffix)+1):
suf = suffix[j:]
pattern, template = self._match(suf, thatWords, topicWords, root[self._STAR])
if template is not None:
newPattern = [self._STAR] + pattern
return (newPattern, template)
# No matches were found.
return (None, None)
|
|
import logging
import pykka
import serial
import time
logger = logging.getLogger('mopidy_arcam')
class ArcamTalker(pykka.ThreadingActor):
"""
Independent thread which does the communication with the Arcam amplifier.
"""
# Serial link config
BAUDRATE = 38400
BYTESIZE = 8
PARITY = 'N'
STOPBITS = 1
# Timeout in seconds used for read/write operations.
# If you set the timeout too low, the reads will never get complete
# confirmations and calibration will decrease volume forever. If you set
# the timeout too high, stuff takes more time. 0.2s seems like a good value
# for NAD C 355BEE.
TIMEOUT = 0.2
# Number of volume levels the amplifier supports.
# 100 for the Arcam AVR300 amplifier.
VOLUME_LEVELS = 100
# Translation map (commands and actions) for Arcam specific commands
# The key is the taken from the NAD fork and then a Arcam translation is stored in the corresponding value.
ARCAM_ZONE = "1";
ARCAM_ACTION_ASK = "9";
ARCAM_ACTION_ON = "1";
ARCAM_ACTION_OFF = "0";
ARCAM_ACTION_PREFIX_LENGTH = 6
ARCAM_VOLUME_OFFSET = 48
ASK_DEVICE_KEY = 'Ask'
commandRequestMap = {"Main.Power" :"PC_*",
"Main.Volume": "PC_/",
"Main.Mute" : "PC_.",
"Main.Source": "PC_1"}
commandResponseMap = {"AV_*" : "Main.Power",
"AV_/" : "Main.Volume",
"AV_." : "Main.Mute",
"AV_1" : "Main.Source"}
actionRequestMap = {"On" : ARCAM_ACTION_ON,
"Off" : ARCAM_ACTION_OFF,
"+" : ARCAM_ACTION_ON,
"-" : ARCAM_ACTION_OFF,
ASK_DEVICE_KEY : ARCAM_ACTION_ASK}
actionResponseMap = {"1": "On",
"0": "Off"}
sourceRequestMap = {"DVD" : "0",
"SAT" : "1",
"AV" : "2",
"PVR" : "3",
"VCR" : "4",
"CD" : "5",
"FM" : "6",
"AM" : "7",
"DVDA" : "8"}
sourceResponseMap = {"0" : "DVD",
"1" : "SAT",
"2" : "AV",
"3" : "PVR",
"4" : "VCR",
"5" : "CD",
"6" : "FM",
"7" : "AM",
"8" : "DVDA"}
_destruct = False
def buildRequestString(self, commandCode, zone, action):
if self.getRequestAction(action) != None:
resultString = str(self.commandRequestMap.get(commandCode)) + str(zone) + str(self.getRequestAction(action))
else:
resultString = str(self.commandRequestMap.get(commandCode)) + str(zone) + str(action)
return resultString
def getRequestAction(self, action):
return self.actionRequestMap.get(action)
def __init__(self, port, source, speakers_a, speakers_b):
super(ArcamTalker, self).__init__()
self.port = port
self.source = source
# Need to be changed to zones
self.speakers_a = speakers_a
self.speakers_b = speakers_b
self._arcam_volume = None
self._device = None
def on_start(self):
self._open_connection()
self._set_device_to_known_state()
def on_stop(self):
print "destruct"
self._destruct = True
def stop(self):
print "stop"
pykka.ThreadingActor.stop(self)
def destruct(self):
return self._destruct
def _open_connection(self):
logger.debug('Arcam amplifier: Connecting through "%s"', self.port)
self._device = serial.Serial(
port=self.port,
baudrate=self.BAUDRATE,
bytesize=self.BYTESIZE,
parity=self.PARITY,
stopbits=self.STOPBITS,
timeout=self.TIMEOUT)
self._get_device_model()
def _set_device_to_known_state(self):
self._power_device_on() # Starting main zone per default.
#self._select_speakers()
self._select_input_source()
self.mute(False)
#self.calibrate_volume() #We can simply just ask for the volume
self.get_volume()
def _get_device_model(self):
# Can't get this information from the Arcam receiver, so it's hardcoded
return "Arcam AVR300"
def _power_device_on(self):
self._check_and_set('Main.Power', 'On')
time.sleep(5) # Wait for the amp to actually start up and be ready for more input.
def _select_speakers(self):
if self.speakers_a is not None:
self._check_and_set('Main.SpeakerA', self.speakers_a.title())
if self.speakers_b is not None:
self._check_and_set('Main.SpeakerB', self.speakers_b.title())
def _select_input_source(self):
if self.source is not None:
self._check_and_set('Main.Source', self.sourceRequestMap.get(self.source), self.sourceResponseMap)
def mute(self, mute):
if mute:
self._check_and_set('Main.Mute', 'Off') # turn Mute off
else:
self._check_and_set('Main.Mute', 'On') # turn Mute on
def get_volume(self):
rawVolume = ord(self._ask_device("Main.Volume"))
self._arcam_volume = rawVolume - self.ARCAM_VOLUME_OFFSET;
return self._arcam_volume
def set_volume(self, volume):
# Increase or decrease the amplifier volume until it matches the given
# target volume.
logger.debug('Setting volume to %d' % volume)
target_nad_volume = int(round(volume * self.VOLUME_LEVELS / 100.0))
if self._arcam_volume is None:
return # Calibration needed
while target_nad_volume > self._arcam_volume:
self._increase_volume()
self._arcam_volume += 1
while target_nad_volume < self._arcam_volume:
self._decrease_volume()
self._arcam_volume -= 1
def calc_volume_char(self, volume):
return chr(volume+self.ARCAM_VOLUME_OFFSET)
def calc_volume_int(self, volume):
return ord(volume-self.ARCAM_VOLUME_OFFSET)
def _increase_volume(self):
# Increase volume. Returns :class:`True` if confirmed by device.
self._command_device("Main.Volume", "+")
def _decrease_volume(self):
# Decrease volume. Returns :class:`True` if confirmed by device.
self._command_device("Main.Volume", "-")
def update_volume(self, volume):
self._arcam_volume = volume
def _check_and_set(self, key, value, responseMap=None):
for attempt in range(1, 4):
if self._ask_device(key, responseMap) == value:
return
logger.info('Arcam amplifier: Setting "%s" to "%s" (attempt %d/3)', key, value, attempt)
if self._command_device(key, value) == value:
return
if self._ask_device(key, responseMap) != value:
logger.info(
'Arcam amplifier: Gave up on setting "%s" to "%s"',
key, value)
def _ask_device(self, key, responseMap=None):
self._write(self.buildRequestString(key, self.ARCAM_ZONE, self.ASK_DEVICE_KEY))
resultString = self.readline()
if len(resultString) > 0:
if responseMap != None:
resultString = responseMap.get(resultString[self.ARCAM_ACTION_PREFIX_LENGTH])
elif self.actionResponseMap.get(resultString[self.ARCAM_ACTION_PREFIX_LENGTH]) != None:
resultString = self.actionResponseMap.get(resultString[self.ARCAM_ACTION_PREFIX_LENGTH])
else:
# When no referenced action is found, return RAW type/data
resultString = resultString[self.ARCAM_ACTION_PREFIX_LENGTH]
return resultString
def _command_device(self, key, value):
#if type(value) == unicode:
# value = value.encode('utf-8')
#self._write('%s=%s' % (key, value))
self._write(self.buildRequestString(key, self.ARCAM_ZONE, value))
resultString = self.readline()
if len(resultString) > 0:
if self.actionResponseMap.get(resultString[self.ARCAM_ACTION_PREFIX_LENGTH]) != None:
resultString = self.actionResponseMap.get(resultString[self.ARCAM_ACTION_PREFIX_LENGTH])
else:
resultString = resultString[self.ARCAM_ACTION_PREFIX_LENGTH]
return resultString
def _write(self, data):
# Write data to device. Prepends and appends a newline to the data, as
# recommended by the NAD documentation.
if not self._device.isOpen():
self._device.open()
logger.debug('Trying to write: %s', data)
self._device.write('%s\r\n' % data)
def readline(self):
# Read line from device. The result is stripped for leading and
# trailing whitespace.
if not self._device.isOpen():
self._device.open()
return self._device.readline().strip()
def read_word(self):
if not self._device.isOpen():
self._device.open()
return self._device.read(8)
|
|
# Software License Agreement (BSD License)
#
# Copyright (c) 2009-2010, Tijn van der Zant and Komei Sugiura.
# Copyright (c) 2010-2013, Dirk Holz and Luca Iocchi.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# imports
import random
import sys
import copy
# read the locations, objects and sentences files
# and clean up the lists from the files
locations = []
location_categories = []
items = []
item_categories = []
cat1Sentences = []
cat2Sentences = []
cat3Situations = []
names = []
# get rid of empty lines and do not use anything that starts with a '#'
for loc in [location.strip('\n') for location in open('locations.txt', 'r').readlines()]:
if loc != '':
if loc[0] != '#':
locations.append(loc)
for loc_cat in [location_category.strip('\n') for location_category in open('location_categories.txt', 'r').readlines()]:
if loc_cat != '':
if loc_cat[0] != '#':
location_categories.append(loc_cat)
for it in [item.strip('\n') for item in open('items.txt', 'r').readlines()]:
if it != '':
if it[0] != '#':
items.append(it)
for item_cat in [item_category.strip('\n') for item_category in open('item_categories.txt', 'r').readlines()]:
if item_cat != '':
if item_cat[0] != '#':
item_categories.append(item_cat)
for sentence in [str(sent).strip('\n') for sent in open('cat1Sentences.txt' , 'r').readlines()]:
if sentence != '':
if sentence[0] != '#':
cat1Sentences.append(sentence)
for sentence in [str(sent).strip('\n') for sent in open('cat2Sentences.txt' , 'r').readlines()]:
if sentence != '':
if sentence[0] != '#':
cat2Sentences.append(sentence)
situations = []
questions = []
for sit in [str(sent).strip('\n') for sent in open('cat3Situations.txt' , 'r').readlines()]:
if sit != '':
if sit[0] != '#':
if sit.split()[0] == 'situation:':
situations.append( sit )
if sit.split()[0] == 'question:':
questions.append( sit )
cat3Situations = zip( situations, questions )
for name in [nam.strip('\n') for nam in open('names.txt', 'r').readlines()]:
if name != '':
if name[0] != '#':
names.append(name)
# are there at least two locations?
if len(locations) < 2:
print 'Not enough locations. Exiting program'
sys.exit(1)
# are there at least two items?
if len(items) < 2:
print 'Not enough items. Exiting program'
sys.exit(1)
# check if items/locations match their categories
if len(items) != len(item_categories):
print 'Number of items does not match number of item categories'
sys.exit(1)
if len(locations) != len(location_categories):
print 'Number of locations does not match number of location categories'
sys.exit(1)
# the function 'fillIn' takes a sentence and replaces
# the word 'location' for an actual location
# and replaces the word 'item' for an actual item
# as defined in the files:
# locations.txt
# and items.txt
def fillIn(sentence):
#shuffle the items and the locations
random.shuffle(items)
random.shuffle(locations)
random.shuffle(names)
#fill in the locations and items in the sentence
# the counters are used so an item or location is not used twice
# hence the shuffeling for randomization
itemCounter = 0
locationCounter = 0
nameCounter = 0
finalSentence = []
for word in sentence.split(' '):
# fill in a location
if word == 'LOCATION':
finalSentence.append( locations[locationCounter] )
locationCounter += 1
# or an item
elif word == 'ITEM':
finalSentence.append( items[itemCounter] )
itemCounter += 1
# is it a name?
elif word == 'NAME':
finalSentence.append( names[nameCounter] )
nameCounter += 1
# perhaps a location with a comma or dot?
elif word[:-1] == 'LOCATION':
finalSentence.append( locations[locationCounter] + word[-1])
locationCounter += 1
# or an item with a comma or dot or whatever
elif word[:-1] == 'ITEM':
finalSentence.append( items[itemCounter] + word[-1])
itemCounter += 1
# is it a namewith a comma, dot, whatever?
elif word[:-1] == 'NAME':
finalSentence.append( names[nameCounter] + word[-1] )
nameCounter += 1
# or else just the word
else:
finalSentence.append( word )
# then make a sentence again out of the created list
return ' '.join(finalSentence)
def fillInNew(sentence):
item_indices = range(len(items))
random.shuffle(item_indices)
location_indices = range(len(locations))
random.shuffle(location_indices)
itemCounter = 0
locationCounter = 0
nameCounter = 0
finalSentence = []
explanation = []
for word in sentence.split(' '):
# fill in a location
if word == 'LOCATION':
finalSentence.append( locations[int(location_indices[locationCounter])] )
locationCounter += 1
# or an item
elif word == 'ITEM':
finalSentence.append( items[int(item_indices[itemCounter])] )
itemCounter += 1
# is it a name?
elif word == 'NAME':
finalSentence.append( names[nameCounter] )
nameCounter += 1
# is it an item category?
elif word == 'ITEM_CATEGORY':
finalSentence.append( item_categories[int(item_indices[itemCounter])] )
explanation.append("(")
explanation.append( item_categories[int(item_indices[itemCounter])] )
explanation.append("=")
explanation.append( items[int(item_indices[itemCounter])] )
explanation.append(") ")
itemCounter += 1
# is it an location category?
elif word == 'LOCATION_CATEGORY':
finalSentence.append( location_categories[int(location_indices[locationCounter])] )
explanation.append("(")
explanation.append( location_categories[int(location_indices[locationCounter])] )
explanation.append("=")
explanation.append( items[int(location_indices[locationCounter])] )
explanation.append(") ")
locationCounter += 1
# perhaps a location with a comma or dot?
elif word[:-1] == 'LOCATION':
finalSentence.append( locations[int(location_indices[locationCounter])] + word[-1])
locationCounter += 1
# or an item with a comma or dot or whatever
elif word[:-1] == 'ITEM':
finalSentence.append( items[int(item_indices[itemCounter])] )
itemCounter += 1
# is it a namewith a comma, dot, whatever?
elif word[:-1] == 'NAME':
finalSentence.append( names[nameCounter] + word[-1] )
nameCounter += 1
# is it an item category?
elif word[:-1] == 'ITEM_CATEGORY':
finalSentence.append( item_categories[int(item_indices[itemCounter])] + word[-1])
explanation.append("(")
explanation.append( item_categories[int(item_indices[itemCounter])] )
explanation.append("=")
explanation.append( items[int(item_indices[itemCounter])] )
explanation.append(") ")
itemCounter += 1
# is it an location category?
elif word[:-1] == 'LOCATION_CATEGORY':
finalSentence.append( location_categories[int(location_indices[locationCounter])] + word[-1])
explanation.append("(")
explanation.append( location_categories[int(location_indices[locationCounter])] )
explanation.append("=")
explanation.append( locations[int(location_indices[locationCounter])] )
explanation.append(") ")
locationCounter += 1
# or else just the word
else:
finalSentence.append( word )
# then make a sentence again out of the created list
final_command = ' '.join(finalSentence)
final_explanation = ' '.join(explanation)
return final_command + " " + final_explanation
# the tests are defined here
def testOne():
print '\n'
print fillInNew( random.choice(cat1Sentences) )
print '\n\n'
# Category 2
def testTwo():
print '\n'
print fillInNew( random.choice(cat2Sentences) )
print '\n\n'
# Category 3
def testThree():
# print 'This is the situation for category 3, press enter for the question.\n\n'
situation = random.choice( cat3Situations )
print fillInNew( situation[0].split(':')[1] )
# raw_input()
print fillInNew( situation[1].split(':')[1] )
print '\n\n'
############################################# MAIN LOOP ####################################
# ask the user which test this program should generate
def mainLoop():
answer = 'begin'
while True:
answer = raw_input('Which category do you want to do? 1, 2, 3 or q(uit)')
if answer == 'q':
print 'Exiting program.'
sys.exit(1)
elif answer == '1':
print 'Category 1:\n',
testOne()
elif answer == '2':
print 'Category 2:\n'
testTwo()
elif answer == '3':
print 'Category 3:\n'
testThree()
else:
print '\nNot a valid input, please try 1, 2, 3 or q(uit)\n'
if __name__ == "__main__":
mainLoop()
|
|
from __future__ import unicode_literals
from rbpkg.package_manager.errors import (DependencyConflictError,
PackageInstallError)
from rbpkg.package_manager.pending_install import PendingInstall
from rbpkg.repository.loaders import InMemoryPackageDataLoader, set_data_loader
from rbpkg.repository.package_bundle import PackageBundle
from rbpkg.repository.package_channel import PackageChannel
from rbpkg.repository.package_release import PackageRelease
from rbpkg.repository.package_rules import PackageRules
from rbpkg.testing.testcases import TestCase
class PendingInstallTests(TestCase):
"""Unit tests for rbpkg.package_manager.manager.PendingInstall."""
def setUp(self):
super(PendingInstallTests, self).setUp()
self.data_loader = InMemoryPackageDataLoader()
set_data_loader(self.data_loader)
def tearDown(self):
super(PendingInstallTests, self).tearDown()
set_data_loader(None)
def test_add_package(self):
"""Testing PendingInstall.add_package"""
pending_install = PendingInstall()
bundle = PackageBundle(name='MyPackage')
channel = PackageChannel(bundle, name='1.0.x')
channel._loaded = True
bundle._channels = [channel]
release = PackageRelease(channel=channel, version='1.0')
channel._releases = [release]
rules1 = PackageRules(channel=channel,
version_range='*',
package_type='rpm',
package_name='TestPackage',
systems=['*'])
rules2 = PackageRules(channel=channel,
version_range='*',
package_type='python',
package_name='TestPackage',
systems=['*'])
channel._package_rules = [rules1, rules2]
pending_install.add_package(release, 'python')
self.assertEqual(
pending_install._bundle_infos,
[
{
'bundle': bundle,
'release': release,
'package_type': 'python',
'rules': rules2,
}
])
self.assertEqual(
pending_install._bundle_infos_map,
{
'MyPackage': {
'bundle': bundle,
'release': release,
'package_type': 'python',
'rules': rules2,
}
})
def test_add_package_without_available_rules(self):
"""Testing PendingInstall.add_package without available rules"""
pending_install = PendingInstall()
bundle = PackageBundle(name='MyPackage')
channel = PackageChannel(bundle, name='1.0.x')
channel._loaded = True
bundle._channels = [channel]
release = PackageRelease(channel=channel, version='1.0')
channel._releases = [release]
self.assertRaises(
PackageInstallError,
lambda: pending_install.add_package(release, 'python'))
self.assertEqual(pending_install._bundle_infos, [])
self.assertEqual(pending_install._bundle_infos_map, {})
def test_add_package_without_matching_package_type(self):
"""Testing PendingInstall.add_package without matching package type"""
pending_install = PendingInstall()
bundle = PackageBundle(name='MyPackage')
channel = PackageChannel(bundle, name='1.0.x')
channel._loaded = True
bundle._channels = [channel]
release = PackageRelease(channel=channel, version='1.0')
channel._releases = [release]
rules = PackageRules(channel=channel,
version_range='*',
package_type='rpm',
package_name='TestPackage',
systems=['*'])
channel._package_rules = [rules]
self.assertRaises(
PackageInstallError,
lambda: pending_install.add_package(release, 'python'))
self.assertEqual(pending_install._bundle_infos, [])
self.assertEqual(pending_install._bundle_infos_map, {})
def test_resolve_dependencies(self):
"""Testing PendingInstall.resolve_dependencies"""
self.data_loader.path_to_content.update({
'/packages/DepPackage1/index.json': {
'format_version': '1.0',
'name': 'DepPackage1',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'current_version': '1.5',
'channels': [
{
'name': '1.x',
'created_timestamp': '2015-10-13T08:17:29.958569',
'last_updated_timestamp': '2015-10-14T08:17:29.958569',
'latest_version': '1.5',
'current': True,
'visible': True,
'manifest_file': '1.x.json',
},
],
},
'/packages/DepPackage1/1.x.json': {
'format_version': '1.0',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'releases': [
{
'version': '1.5',
'type': 'stable',
'visible': True,
}
],
'package_rules': [
{
'version_range': '*',
'package_type': 'python',
'package_name': 'DepPackage1',
'systems': ['*'],
},
],
},
})
pending_install = PendingInstall()
bundle = PackageBundle(name='MyPackage')
channel = PackageChannel(bundle, name='1.0.x')
channel._loaded = True
bundle._channels = [channel]
release = PackageRelease(channel=channel, version='1.0')
channel._releases = [release]
rules = PackageRules(
channel=channel,
version_range='*',
required_dependencies=[
'DepPackage1>=1.0',
],
recommended_dependencies=[
'DepPackage2>=1.0',
],
optional_dependencies=[
'DepPackage3>=1.0',
],
package_type='python',
package_name='TestPackage',
systems=['*'])
channel._package_rules = [rules]
pending_install.add_package(release, 'python')
pending_install.resolve_dependencies()
self.assertEqual(len(pending_install._bundle_infos), 2)
self.assertEqual(pending_install._bundle_infos[0]['bundle'].name,
'MyPackage')
self.assertEqual(pending_install._bundle_infos[1]['bundle'].name,
'DepPackage1')
self.assertEqual(len(pending_install._bundle_infos_map), 2)
self.assertTrue('MyPackage' in pending_install._bundle_infos_map)
self.assertTrue('DepPackage1' in pending_install._bundle_infos_map)
def test_resolve_dependencies_with_recommended_deps(self):
"""Testing PendingInstall.resolve_dependencies with recommended
dependencies
"""
self.data_loader.path_to_content.update({
'/packages/DepPackage1/index.json': {
'format_version': '1.0',
'name': 'DepPackage1',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'current_version': '1.5',
'channels': [
{
'name': '1.x',
'created_timestamp': '2015-10-13T08:17:29.958569',
'last_updated_timestamp': '2015-10-14T08:17:29.958569',
'latest_version': '1.5',
'current': True,
'visible': True,
'manifest_file': '1.x.json',
},
],
},
'/packages/DepPackage1/1.x.json': {
'format_version': '1.0',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'releases': [
{
'version': '1.5',
'type': 'stable',
'visible': True,
}
],
'package_rules': [
{
'version_range': '*',
'package_type': 'python',
'package_name': 'DepPackage1',
'systems': ['*'],
},
],
},
'/packages/DepPackage2/index.json': {
'format_version': '1.0',
'name': 'DepPackage2',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'current_version': '1.5',
'channels': [
{
'name': '1.x',
'created_timestamp': '2015-10-13T08:17:29.958569',
'last_updated_timestamp': '2015-10-14T08:17:29.958569',
'latest_version': '1.5',
'current': True,
'visible': True,
'manifest_file': '1.x.json',
},
],
},
'/packages/DepPackage2/1.x.json': {
'format_version': '1.0',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'releases': [
{
'version': '1.5',
'type': 'stable',
'visible': True,
}
],
'package_rules': [
{
'version_range': '*',
'package_type': 'python',
'package_name': 'DepPackage2',
'systems': ['*'],
},
],
},
})
pending_install = \
PendingInstall(PendingInstall.INSTALL_DEPS_RECOMMENDED)
bundle = PackageBundle(name='MyPackage')
channel = PackageChannel(bundle, name='1.0.x')
channel._loaded = True
bundle._channels = [channel]
release = PackageRelease(channel=channel, version='1.0')
channel._releases = [release]
rules = PackageRules(
channel=channel,
version_range='*',
required_dependencies=[
'DepPackage1>=1.0',
],
recommended_dependencies=[
'DepPackage2>=1.0',
],
optional_dependencies=[
'DepPackage3>=1.0',
],
package_type='python',
package_name='TestPackage',
systems=['*'])
channel._package_rules = [rules]
pending_install.add_package(release, 'python')
pending_install.resolve_dependencies()
self.assertEqual(len(pending_install._bundle_infos), 3)
self.assertEqual(pending_install._bundle_infos[0]['bundle'].name,
'MyPackage')
self.assertEqual(pending_install._bundle_infos[1]['bundle'].name,
'DepPackage1')
self.assertEqual(pending_install._bundle_infos[2]['bundle'].name,
'DepPackage2')
self.assertEqual(len(pending_install._bundle_infos_map), 3)
self.assertTrue('MyPackage' in pending_install._bundle_infos_map)
self.assertTrue('DepPackage1' in pending_install._bundle_infos_map)
self.assertTrue('DepPackage2' in pending_install._bundle_infos_map)
def test_resolve_dependencies_with_all_deps(self):
"""Testing PendingInstall.resolve_dependencies with recommended
and optional dependencies
"""
self.data_loader.path_to_content.update({
'/packages/DepPackage1/index.json': {
'format_version': '1.0',
'name': 'DepPackage1',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'current_version': '1.5',
'channels': [
{
'name': '1.x',
'created_timestamp': '2015-10-13T08:17:29.958569',
'last_updated_timestamp': '2015-10-14T08:17:29.958569',
'latest_version': '1.5',
'current': True,
'visible': True,
'manifest_file': '1.x.json',
},
],
},
'/packages/DepPackage1/1.x.json': {
'format_version': '1.0',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'releases': [
{
'version': '1.5',
'type': 'stable',
'visible': True,
}
],
'package_rules': [
{
'version_range': '*',
'package_type': 'python',
'package_name': 'DepPackage1',
'systems': ['*'],
},
],
},
'/packages/DepPackage2/index.json': {
'format_version': '1.0',
'name': 'DepPackage2',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'current_version': '1.5',
'channels': [
{
'name': '1.x',
'created_timestamp': '2015-10-13T08:17:29.958569',
'last_updated_timestamp': '2015-10-14T08:17:29.958569',
'latest_version': '1.5',
'current': True,
'visible': True,
'manifest_file': '1.x.json',
},
],
},
'/packages/DepPackage2/1.x.json': {
'format_version': '1.0',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'releases': [
{
'version': '1.5',
'type': 'stable',
'visible': True,
}
],
'package_rules': [
{
'version_range': '*',
'package_type': 'python',
'package_name': 'DepPackage2',
'systems': ['*'],
},
],
},
'/packages/DepPackage3/index.json': {
'format_version': '1.0',
'name': 'DepPackage3',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'current_version': '1.5',
'channels': [
{
'name': '1.x',
'created_timestamp': '2015-10-13T08:17:29.958569',
'last_updated_timestamp': '2015-10-14T08:17:29.958569',
'latest_version': '1.5',
'current': True,
'visible': True,
'manifest_file': '1.x.json',
},
],
},
'/packages/DepPackage3/1.x.json': {
'format_version': '1.0',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'releases': [
{
'version': '1.5',
'type': 'stable',
'visible': True,
}
],
'package_rules': [
{
'version_range': '*',
'package_type': 'python',
'package_name': 'DepPackage3',
'systems': ['*'],
},
],
},
})
pending_install = PendingInstall(PendingInstall.INSTALL_DEPS_ALL)
bundle = PackageBundle(name='MyPackage')
channel = PackageChannel(bundle, name='1.0.x')
channel._loaded = True
bundle._channels = [channel]
release = PackageRelease(channel=channel, version='1.0')
channel._releases = [release]
rules = PackageRules(
channel=channel,
version_range='*',
required_dependencies=[
'DepPackage1>=1.0',
],
recommended_dependencies=[
'DepPackage2>=1.0',
],
optional_dependencies=[
'DepPackage3>=1.0',
],
package_type='python',
package_name='TestPackage',
systems=['*'])
channel._package_rules = [rules]
pending_install.add_package(release, 'python')
pending_install.resolve_dependencies()
self.assertEqual(len(pending_install._bundle_infos), 4)
self.assertEqual(pending_install._bundle_infos[0]['bundle'].name,
'MyPackage')
self.assertEqual(pending_install._bundle_infos[1]['bundle'].name,
'DepPackage1')
self.assertEqual(pending_install._bundle_infos[2]['bundle'].name,
'DepPackage2')
self.assertEqual(pending_install._bundle_infos[3]['bundle'].name,
'DepPackage3')
self.assertEqual(len(pending_install._bundle_infos_map), 4)
self.assertTrue('MyPackage' in pending_install._bundle_infos_map)
self.assertTrue('DepPackage1' in pending_install._bundle_infos_map)
self.assertTrue('DepPackage2' in pending_install._bundle_infos_map)
self.assertTrue('DepPackage3' in pending_install._bundle_infos_map)
def test_resolve_dependencies_with_release_only(self):
"""Testing PendingInstall.resolve_dependencies with considering
release packages only
"""
self.data_loader.path_to_content.update({
'/packages/DepPackage1/index.json': {
'format_version': '1.0',
'name': 'DepPackage1',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'current_version': '1.5',
'channels': [
{
'name': '2.x',
'created_timestamp': '2015-10-13T08:17:29.958569',
'last_updated_timestamp': '2015-10-14T08:17:29.958569',
'latest_version': '2.0',
'current': False,
'visible': True,
'type': 'prerelease',
'manifest_file': '2.x.json',
},
{
'name': '1.x',
'created_timestamp': '2015-10-13T08:17:29.958569',
'last_updated_timestamp': '2015-10-14T08:17:29.958569',
'latest_version': '1.5',
'current': True,
'visible': True,
'type': 'release',
'manifest_file': '1.x.json',
},
],
},
'/packages/DepPackage1/2.x.json': {
'format_version': '1.0',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'releases': [
{
'version': '2.0',
'type': 'stable',
'visible': True,
}
],
'package_rules': [
{
'version_range': '*',
'package_type': 'python',
'package_name': 'DepPackage1',
'systems': ['*'],
},
],
},
'/packages/DepPackage1/1.x.json': {
'format_version': '1.0',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'releases': [
{
'version': '1.5',
'type': 'stable',
'visible': True,
}
],
'package_rules': [
{
'version_range': '*',
'package_type': 'python',
'package_name': 'DepPackage1',
'systems': ['*'],
},
],
},
})
pending_install = PendingInstall(PendingInstall.INSTALL_DEPS_ALL)
bundle = PackageBundle(name='MyPackage')
channel = PackageChannel(bundle, name='1.0.x')
channel._loaded = True
bundle._channels = [channel]
release = PackageRelease(channel=channel, version='1.0')
channel._releases = [release]
rules = PackageRules(
channel=channel,
version_range='*',
required_dependencies=[
'DepPackage1>=1.0',
],
package_type='python',
package_name='TestPackage',
systems=['*'])
channel._package_rules = [rules]
pending_install.add_package(release, 'python')
pending_install.resolve_dependencies()
self.assertEqual(len(pending_install._bundle_infos), 2)
self.assertEqual(pending_install._bundle_infos[0]['bundle'].name,
'MyPackage')
self.assertEqual(pending_install._bundle_infos[1]['bundle'].name,
'DepPackage1')
self.assertEqual(pending_install._bundle_infos[1]['release'].version,
'1.5')
self.assertEqual(len(pending_install._bundle_infos_map), 2)
self.assertTrue('MyPackage' in pending_install._bundle_infos_map)
self.assertTrue('DepPackage1' in pending_install._bundle_infos_map)
def test_resolve_dependencies_with_prerelease(self):
"""Testing PendingInstall.resolve_dependencies with considering
pre-release packages
"""
self.data_loader.path_to_content.update({
'/packages/DepPackage1/index.json': {
'format_version': '1.0',
'name': 'DepPackage1',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'current_version': '1.5',
'channels': [
{
'name': '2.x',
'created_timestamp': '2015-10-13T08:17:29.958569',
'last_updated_timestamp': '2015-10-14T08:17:29.958569',
'latest_version': '2.0',
'current': False,
'visible': True,
'type': 'prerelease',
'manifest_file': '2.x.json',
},
{
'name': '1.x',
'created_timestamp': '2015-10-13T08:17:29.958569',
'last_updated_timestamp': '2015-10-14T08:17:29.958569',
'latest_version': '1.5',
'current': True,
'visible': True,
'type': 'release',
'manifest_file': '1.x.json',
},
],
},
'/packages/DepPackage1/2.x.json': {
'format_version': '1.0',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'releases': [
{
'version': '2.0',
'type': 'stable',
'visible': True,
}
],
'package_rules': [
{
'version_range': '*',
'package_type': 'python',
'package_name': 'DepPackage1',
'systems': ['*'],
},
],
},
'/packages/DepPackage1/1.x.json': {
'format_version': '1.0',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'releases': [
{
'version': '1.5',
'type': 'stable',
'visible': True,
}
],
'package_rules': [
{
'version_range': '*',
'package_type': 'python',
'package_name': 'DepPackage1',
'systems': ['*'],
},
],
},
})
pending_install = PendingInstall(PendingInstall.INSTALL_DEPS_ALL)
bundle = PackageBundle(name='MyPackage')
channel = PackageChannel(
bundle,
name='1.0.x',
channel_type=PackageChannel.CHANNEL_TYPE_PRERELEASE)
channel._loaded = True
bundle._channels = [channel]
release = PackageRelease(channel=channel, version='1.0')
channel._releases = [release]
rules = PackageRules(
channel=channel,
version_range='*',
required_dependencies=[
'DepPackage1>=1.0',
],
package_type='python',
package_name='TestPackage',
systems=['*'])
channel._package_rules = [rules]
pending_install.add_package(release, 'python')
pending_install.resolve_dependencies()
self.assertEqual(len(pending_install._bundle_infos), 2)
self.assertEqual(pending_install._bundle_infos[0]['bundle'].name,
'MyPackage')
self.assertEqual(pending_install._bundle_infos[1]['bundle'].name,
'DepPackage1')
self.assertEqual(pending_install._bundle_infos[1]['release'].version,
'2.0')
self.assertEqual(len(pending_install._bundle_infos_map), 2)
self.assertTrue('MyPackage' in pending_install._bundle_infos_map)
self.assertTrue('DepPackage1' in pending_install._bundle_infos_map)
def test_resolve_dependencies_with_nested_deps(self):
"""Testing PendingInstall.resolve_dependencies with nested dependencies
"""
self.data_loader.path_to_content.update({
'/packages/DepPackage1/index.json': {
'format_version': '1.0',
'name': 'DepPackage1',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'current_version': '1.5',
'channels': [
{
'name': '1.x',
'created_timestamp': '2015-10-13T08:17:29.958569',
'last_updated_timestamp': '2015-10-14T08:17:29.958569',
'latest_version': '1.5',
'current': True,
'visible': True,
'manifest_file': '1.x.json',
},
],
},
'/packages/DepPackage1/1.x.json': {
'format_version': '1.0',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'releases': [
{
'version': '1.5',
'type': 'stable',
'visible': True,
}
],
'package_rules': [
{
'version_range': '*',
'package_type': 'python',
'package_name': 'DepPackage1',
'systems': ['*'],
'dependencies': {
'required': [
'DepPackage2>=1.0',
],
},
},
],
},
'/packages/DepPackage2/index.json': {
'format_version': '1.0',
'name': 'DepPackage2',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'current_version': '1.5',
'channels': [
{
'name': '1.x',
'created_timestamp': '2015-10-13T08:17:29.958569',
'last_updated_timestamp': '2015-10-14T08:17:29.958569',
'latest_version': '1.5',
'current': True,
'visible': True,
'manifest_file': '1.x.json',
},
],
},
'/packages/DepPackage2/1.x.json': {
'format_version': '1.0',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'releases': [
{
'version': '1.5',
'type': 'stable',
'visible': True,
}
],
'package_rules': [
{
'version_range': '*',
'package_type': 'python',
'package_name': 'DepPackage2',
'systems': ['*'],
},
],
},
})
pending_install = PendingInstall()
bundle = PackageBundle(name='MyPackage')
channel = PackageChannel(bundle, name='1.0.x')
channel._loaded = True
bundle._channels = [channel]
release = PackageRelease(channel=channel, version='1.0')
channel._releases = [release]
rules = PackageRules(
channel=channel,
version_range='*',
required_dependencies=[
'DepPackage1>=1.0',
],
package_type='python',
package_name='TestPackage',
systems=['*'])
channel._package_rules = [rules]
pending_install.add_package(release, 'python')
pending_install.resolve_dependencies()
self.assertEqual(len(pending_install._bundle_infos), 3)
self.assertEqual(pending_install._bundle_infos[0]['bundle'].name,
'MyPackage')
self.assertEqual(pending_install._bundle_infos[1]['bundle'].name,
'DepPackage1')
self.assertEqual(pending_install._bundle_infos[2]['bundle'].name,
'DepPackage2')
self.assertEqual(len(pending_install._bundle_infos_map), 3)
self.assertTrue('MyPackage' in pending_install._bundle_infos_map)
self.assertTrue('DepPackage1' in pending_install._bundle_infos_map)
self.assertTrue('DepPackage2' in pending_install._bundle_infos_map)
def test_resolve_dependencies_with_version_conflicts(self):
"""Testing PendingInstall.resolve_dependencies with version conflicts
"""
self.data_loader.path_to_content.update({
'/packages/DepPackage1/index.json': {
'format_version': '1.0',
'name': 'DepPackage1',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'current_version': '1.5',
'channels': [
{
'name': '1.x',
'created_timestamp': '2015-10-13T08:17:29.958569',
'last_updated_timestamp': '2015-10-14T08:17:29.958569',
'latest_version': '1.5',
'current': True,
'visible': True,
'manifest_file': '1.x.json',
},
],
},
'/packages/DepPackage1/1.x.json': {
'format_version': '1.0',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'releases': [
{
'version': '1.5',
'type': 'stable',
'visible': True,
}
],
'package_rules': [
{
'version_range': '*',
'package_type': 'python',
'package_name': 'DepPackage1',
'systems': ['*'],
'dependencies': {
'required': [
'DepPackage2>=1.5',
],
},
},
],
},
'/packages/DepPackage2/index.json': {
'format_version': '1.0',
'name': 'DepPackage2',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'current_version': '1.5',
'channels': [
{
'name': '1.x',
'created_timestamp': '2015-10-13T08:17:29.958569',
'last_updated_timestamp': '2015-10-14T08:17:29.958569',
'latest_version': '1.5',
'current': True,
'visible': True,
'manifest_file': '1.x.json',
},
],
},
'/packages/DepPackage2/1.x.json': {
'format_version': '1.0',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'releases': [
{
'version': '1.5',
'type': 'stable',
'visible': True,
},
{
'version': '1.0',
'type': 'stable',
'visible': True,
},
],
'package_rules': [
{
'version_range': '*',
'package_type': 'python',
'package_name': 'DepPackage2',
'systems': ['*'],
},
],
},
})
pending_install = PendingInstall()
bundle = PackageBundle(name='MyPackage')
channel = PackageChannel(bundle, name='1.0.x')
channel._loaded = True
bundle._channels = [channel]
release = PackageRelease(channel=channel, version='1.0')
channel._releases = [release]
rules = PackageRules(
channel=channel,
version_range='*',
required_dependencies=[
'DepPackage1>=1.0',
'DepPackage2>=1.0,<1.5',
],
package_type='python',
package_name='TestPackage',
systems=['*'])
channel._package_rules = [rules]
pending_install.add_package(release, 'python')
self.assertRaises(DependencyConflictError,
pending_install.resolve_dependencies)
self.assertEqual(len(pending_install._bundle_infos), 1)
self.assertEqual(pending_install._bundle_infos[0]['bundle'].name,
'MyPackage')
self.assertEqual(len(pending_install._bundle_infos_map), 1)
self.assertTrue('MyPackage' in pending_install._bundle_infos_map)
def test_get_install_order(self):
"""Testing PendingInstall.get_install_order"""
self.data_loader.path_to_content.update({
'/packages/DepPackage1/index.json': {
'format_version': '1.0',
'name': 'DepPackage1',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'current_version': '1.5',
'channels': [
{
'name': '1.x',
'created_timestamp': '2015-10-13T08:17:29.958569',
'last_updated_timestamp': '2015-10-14T08:17:29.958569',
'latest_version': '1.5',
'current': True,
'visible': True,
'manifest_file': '1.x.json',
},
],
},
'/packages/DepPackage1/1.x.json': {
'format_version': '1.0',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'releases': [
{
'version': '1.5',
'type': 'stable',
'visible': True,
}
],
'package_rules': [
{
'version_range': '*',
'package_type': 'python',
'package_name': 'DepPackage1',
'systems': ['*'],
'dependencies': {
'required': [
'DepPackage2>=1.5',
],
},
},
],
},
'/packages/DepPackage2/index.json': {
'format_version': '1.0',
'name': 'DepPackage2',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'current_version': '1.5',
'channels': [
{
'name': '1.x',
'created_timestamp': '2015-10-13T08:17:29.958569',
'last_updated_timestamp': '2015-10-14T08:17:29.958569',
'latest_version': '1.5',
'current': True,
'visible': True,
'manifest_file': '1.x.json',
},
],
},
'/packages/DepPackage2/1.x.json': {
'format_version': '1.0',
'created_timestamp': '2015-10-11T08:17:29.958569',
'last_updated_timestamp': '2015-10-12T08:17:29.958569',
'releases': [
{
'version': '1.5',
'type': 'stable',
'visible': True,
},
],
'package_rules': [
{
'version_range': '*',
'package_type': 'python',
'package_name': 'DepPackage2',
'systems': ['*'],
},
],
},
})
pending_install = PendingInstall()
bundle = PackageBundle(name='MyPackage')
channel = PackageChannel(bundle, name='1.0.x')
channel._loaded = True
bundle._channels = [channel]
release = PackageRelease(channel=channel, version='1.0')
channel._releases = [release]
rules = PackageRules(
channel=channel,
version_range='*',
required_dependencies=[
'DepPackage1>=1.0',
],
package_type='python',
package_name='TestPackage',
systems=['*'])
channel._package_rules = [rules]
pending_install.add_package(release, 'python')
pending_install.resolve_dependencies()
install_order = pending_install.get_install_order()
self.assertEqual(len(install_order), 3)
self.assertEqual(install_order[0]['bundle'].name, 'DepPackage2')
self.assertEqual(install_order[1]['bundle'].name, 'DepPackage1')
self.assertEqual(install_order[2]['bundle'].name, 'MyPackage')
|
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for warm_starting_util."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
import six
from tensorflow.python.feature_column import feature_column_lib as fc
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import checkpoint_utils
from tensorflow.python.training import saver as saver_lib
from tensorflow.python.training import warm_starting_util as ws_util
ones = init_ops.ones_initializer
norms = init_ops.truncated_normal_initializer
rand = init_ops.random_uniform_initializer
zeros = init_ops.zeros_initializer
class WarmStartingUtilTest(test.TestCase):
def _write_vocab(self, string_values, file_name):
vocab_file = os.path.join(self.get_temp_dir(), file_name)
with open(vocab_file, "w") as f:
f.write("\n".join(string_values))
return vocab_file
def _write_checkpoint(self, sess):
self.evaluate(variables.global_variables_initializer())
saver = saver_lib.Saver()
ckpt_prefix = os.path.join(self.get_temp_dir(), "model")
saver.save(sess, ckpt_prefix, global_step=0)
def _create_prev_run_var(self,
var_name,
shape=None,
initializer=None,
partitioner=None):
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
var = variable_scope.get_variable(
var_name,
shape=shape,
initializer=initializer,
partitioner=partitioner)
self._write_checkpoint(sess)
if partitioner:
self.assertTrue(isinstance(var, variables.PartitionedVariable))
var = var._get_variable_list()
return var, self.evaluate(var)
def _create_prev_run_vars(self,
var_names,
shapes,
initializers):
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
all_vars = []
for var_name, shape, initializer in zip(var_names, shapes,
initializers):
all_vars.append(variable_scope.get_variable(
var_name,
shape=shape,
initializer=initializer))
self._write_checkpoint(sess)
return [self.evaluate(var) for var in all_vars]
def _create_dummy_inputs(self):
return {
"sc_int": array_ops.sparse_placeholder(dtypes.int32),
"sc_hash": array_ops.sparse_placeholder(dtypes.string),
"sc_keys": array_ops.sparse_placeholder(dtypes.string),
"sc_vocab": array_ops.sparse_placeholder(dtypes.string),
"real": array_ops.placeholder(dtypes.float32)
}
def _create_linear_model(self, feature_cols, partitioner):
cols_to_vars = {}
with variable_scope.variable_scope("", partitioner=partitioner):
# Create the variables.
fc.linear_model(
features=self._create_dummy_inputs(),
feature_columns=feature_cols,
units=1,
cols_to_vars=cols_to_vars)
# Return a dictionary mapping each column to its variable.
return cols_to_vars
def _assert_cols_to_vars(self, cols_to_vars, cols_to_expected_values, sess):
for col, expected_values in six.iteritems(cols_to_expected_values):
for i, var in enumerate(cols_to_vars[col]):
self.assertAllClose(expected_values[i], var.eval(sess))
def testWarmStartVar(self):
_, prev_val = self._create_prev_run_var(
"fruit_weights", initializer=[[0.5], [1.], [1.5], [2.]])
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
fruit_weights = variable_scope.get_variable(
"fruit_weights", initializer=[[0.], [0.], [0.], [0.]])
prev_tensor_name, var = ws_util._get_var_info(fruit_weights)
checkpoint_utils.init_from_checkpoint(self.get_temp_dir(),
{prev_tensor_name: var})
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(prev_val, fruit_weights.eval(sess))
def testWarmStartVarPrevVarPartitioned(self):
_, weights = self._create_prev_run_var(
"fruit_weights",
shape=[4, 1],
initializer=[[0.5], [1.], [1.5], [2.]],
partitioner=lambda shape, dtype: [2, 1])
prev_val = np.concatenate([weights[0], weights[1]], axis=0)
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
fruit_weights = variable_scope.get_variable(
"fruit_weights", initializer=[[0.], [0.], [0.], [0.]])
prev_tensor_name, var = ws_util._get_var_info(fruit_weights)
checkpoint_utils.init_from_checkpoint(self.get_temp_dir(),
{prev_tensor_name: var})
self.evaluate(variables.global_variables_initializer())
self.assertAllClose(prev_val, fruit_weights.eval(sess))
def testWarmStartVarCurrentVarPartitioned(self):
_, prev_val = self._create_prev_run_var(
"fruit_weights", initializer=[[0.5], [1.], [1.5], [2.]])
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
fruit_weights = variable_scope.get_variable(
"fruit_weights",
shape=[4, 1],
initializer=[[0.], [0.], [0.], [0.]],
partitioner=lambda shape, dtype: [2, 1])
self.assertTrue(
isinstance(fruit_weights, variables.PartitionedVariable))
prev_tensor_name, var = ws_util._get_var_info(fruit_weights)
checkpoint_utils.init_from_checkpoint(self.get_temp_dir(),
{prev_tensor_name: var})
self.evaluate(variables.global_variables_initializer())
fruit_weights = fruit_weights._get_variable_list()
new_val = np.concatenate(
[fruit_weights[0].eval(sess), fruit_weights[1].eval(sess)], axis=0)
self.assertAllClose(prev_val, new_val)
def testWarmStartVarBothVarsPartitioned(self):
_, weights = self._create_prev_run_var(
"old_scope/fruit_weights",
shape=[4, 1],
initializer=[[0.5], [1.], [1.5], [2.]],
partitioner=lambda shape, dtype: [2, 1])
prev_val = np.concatenate([weights[0], weights[1]], axis=0)
# New session and new graph.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
fruit_weights = variable_scope.get_variable(
"new_scope/fruit_weights",
shape=[4, 1],
initializer=[[0.], [0.], [0.], [0.]],
partitioner=lambda shape, dtype: [2, 1])
self.assertTrue(
isinstance(fruit_weights, variables.PartitionedVariable))
prev_tensor_name, var = ws_util._get_var_info(
fruit_weights, prev_tensor_name="old_scope/fruit_weights")
checkpoint_utils.init_from_checkpoint(self.get_temp_dir(),
{prev_tensor_name: var})
self.evaluate(variables.global_variables_initializer())
fruit_weights = fruit_weights._get_variable_list()
new_val = np.concatenate(
[fruit_weights[0].eval(sess), fruit_weights[1].eval(sess)], axis=0)
self.assertAllClose(prev_val, new_val)
def testWarmStartVarWithVocab(self):
prev_vocab_path = self._write_vocab(["apple", "banana", "guava", "orange"],
"old_vocab")
self._create_prev_run_var(
"fruit_weights", initializer=[[0.5], [1.], [1.5], [2.]])
# New vocab with elements in reverse order and one new element.
new_vocab_path = self._write_vocab(
["orange", "guava", "banana", "apple", "raspberry"], "new_vocab")
# New session and new graph.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
fruit_weights = variable_scope.get_variable(
"fruit_weights", initializer=[[0.], [0.], [0.], [0.], [0.]])
ws_util._warm_start_var_with_vocab(fruit_weights, new_vocab_path, 5,
self.get_temp_dir(), prev_vocab_path)
self.evaluate(variables.global_variables_initializer())
self.assertAllClose([[2.], [1.5], [1.], [0.5], [0.]],
fruit_weights.eval(sess))
def testWarmStartVarWithColumnVocab(self):
prev_vocab_path = self._write_vocab(["apple", "orange"], "old_vocab")
self._create_prev_run_var(
"fruit_output_layer",
initializer=[[0.5, 0.3], [1., 0.8], [1.5, 1.2], [2., 2.3]])
# New vocab with elements in reverse order and one new element.
new_vocab_path = self._write_vocab(["orange", "apple", "banana"],
"new_vocab")
# New session and new graph.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
fruit_output_layer = variable_scope.get_variable(
"fruit_output_layer",
initializer=[[0., 0., 0.], [0., 0., 0.], [0., 0., 0.],
[0., 0., 0.]])
ws_util._warm_start_var_with_vocab(fruit_output_layer, new_vocab_path,
current_vocab_size=3,
prev_ckpt=self.get_temp_dir(),
prev_vocab_path=prev_vocab_path,
axis=1)
self.evaluate(variables.global_variables_initializer())
self.assertAllClose([[0.3, 0.5, 0.], [0.8, 1.0, 0.], [1.2, 1.5, 0.],
[2.3, 2., 0.]], fruit_output_layer.eval(sess))
def testWarmStartVarWithVocabConstrainedOldVocabSize(self):
prev_vocab_path = self._write_vocab(["apple", "banana", "guava", "orange"],
"old_vocab")
self._create_prev_run_var(
"fruit_weights", initializer=[[0.5], [1.], [1.5], [2.]])
# New vocab with elements in reverse order and one new element.
new_vocab_path = self._write_vocab(
["orange", "guava", "banana", "apple", "raspberry"], "new_vocab")
# New session and new graph.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
fruit_weights = variable_scope.get_variable(
"fruit_weights", initializer=[[0.], [0.], [0.], [0.], [0.]])
ws_util._warm_start_var_with_vocab(
fruit_weights,
new_vocab_path,
5,
self.get_temp_dir(),
prev_vocab_path,
previous_vocab_size=2)
self.evaluate(variables.global_variables_initializer())
# Old vocabulary limited to ['apple', 'banana'].
self.assertAllClose([[0.], [0.], [1.], [0.5], [0.]],
fruit_weights.eval(sess))
def testWarmStartVarWithVocabPrevVarPartitioned(self):
prev_vocab_path = self._write_vocab(["apple", "banana", "guava", "orange"],
"old_vocab")
self._create_prev_run_var(
"fruit_weights",
shape=[4, 1],
initializer=[[0.5], [1.], [1.5], [2.]],
partitioner=lambda shape, dtype: [2, 1])
# New vocab with elements in reverse order and one new element.
new_vocab_path = self._write_vocab(
["orange", "guava", "banana", "apple", "raspberry"], "new_vocab")
# New session and new graph.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
fruit_weights = variable_scope.get_variable(
"fruit_weights", initializer=[[0.], [0.], [0.], [0.], [0.]])
ws_util._warm_start_var_with_vocab(fruit_weights, new_vocab_path, 5,
self.get_temp_dir(), prev_vocab_path)
self.evaluate(variables.global_variables_initializer())
self.assertAllClose([[2.], [1.5], [1.], [0.5], [0.]],
fruit_weights.eval(sess))
def testWarmStartVarWithColumnVocabPrevVarPartitioned(self):
prev_vocab_path = self._write_vocab(["apple", "orange"], "old_vocab")
self._create_prev_run_var(
"fruit_output_layer",
shape=[4, 2],
initializer=[[0.5, 0.3], [1., 0.8], [1.5, 1.2], [2., 2.3]],
partitioner=lambda shape, dtype: [2, 1])
# New vocab with elements in reverse order and one new element.
new_vocab_path = self._write_vocab(["orange", "apple", "banana"],
"new_vocab")
# New session and new graph.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
fruit_output_layer = variable_scope.get_variable(
"fruit_output_layer",
initializer=[[0., 0., 0.], [0., 0., 0.], [0., 0., 0.],
[0., 0., 0.]])
ws_util._warm_start_var_with_vocab(fruit_output_layer, new_vocab_path,
current_vocab_size=3,
prev_ckpt=self.get_temp_dir(),
prev_vocab_path=prev_vocab_path,
axis=1)
self.evaluate(variables.global_variables_initializer())
self.assertAllClose([[0.3, 0.5, 0.], [0.8, 1.0, 0.], [1.2, 1.5, 0.],
[2.3, 2., 0.]], fruit_output_layer.eval(sess))
def testWarmStartVarWithVocabCurrentVarPartitioned(self):
prev_vocab_path = self._write_vocab(["apple", "banana", "guava", "orange"],
"old_vocab")
self._create_prev_run_var(
"fruit_weights", initializer=[[0.5], [1.], [1.5], [2.]])
# New vocab with elements in reverse order and one new element.
new_vocab_path = self._write_vocab(
["orange", "guava", "banana", "apple", "raspberry"], "new_vocab")
# New session and new graph.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
fruit_weights = variable_scope.get_variable(
"fruit_weights",
shape=[6, 1],
initializer=[[0.], [0.], [0.], [0.], [0.], [0.]],
partitioner=lambda shape, dtype: [2, 1])
ws_util._warm_start_var_with_vocab(
fruit_weights,
new_vocab_path,
5,
self.get_temp_dir(),
prev_vocab_path,
current_oov_buckets=1)
self.evaluate(variables.global_variables_initializer())
self.assertTrue(
isinstance(fruit_weights, variables.PartitionedVariable))
fruit_weights_vars = fruit_weights._get_variable_list()
self.assertAllClose([[2.], [1.5], [1.]],
fruit_weights_vars[0].eval(sess))
self.assertAllClose([[0.5], [0.], [0.]],
fruit_weights_vars[1].eval(sess))
def testWarmStartVarWithColumnVocabCurrentVarPartitioned(self):
prev_vocab_path = self._write_vocab(["apple", "orange"], "old_vocab")
self._create_prev_run_var(
"fruit_output_layer",
initializer=[[0.5, 0.3], [1., 0.8], [1.5, 1.2], [2., 2.3]])
# New vocab with elements in reverse order and one new element.
new_vocab_path = self._write_vocab(["orange", "apple", "banana"],
"new_vocab")
# New session and new graph.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
fruit_output_layer = variable_scope.get_variable(
"fruit_output_layer",
shape=[4, 3],
initializer=[[0., 0., 0.], [0., 0., 0.], [0., 0., 0.],
[0., 0., 0.]],
partitioner=lambda shape, dtype: [2, 1])
ws_util._warm_start_var_with_vocab(fruit_output_layer, new_vocab_path,
current_vocab_size=3,
prev_ckpt=self.get_temp_dir(),
prev_vocab_path=prev_vocab_path,
axis=1)
self.evaluate(variables.global_variables_initializer())
self.assertTrue(
isinstance(fruit_output_layer, variables.PartitionedVariable))
fruit_output_layer_vars = fruit_output_layer._get_variable_list()
self.assertAllClose([[0.3, 0.5, 0.], [0.8, 1.0, 0.]],
fruit_output_layer_vars[0].eval(sess))
self.assertAllClose([[1.2, 1.5, 0.], [2.3, 2., 0.]],
fruit_output_layer_vars[1].eval(sess))
def testWarmStartVarWithVocabBothVarsPartitioned(self):
prev_vocab_path = self._write_vocab(["apple", "banana", "guava", "orange"],
"old_vocab")
self._create_prev_run_var(
"fruit_weights",
shape=[4, 1],
initializer=[[0.5], [1.], [1.5], [2.]],
partitioner=lambda shape, dtype: [2, 1])
# New vocab with elements in reverse order and two new elements.
new_vocab_path = self._write_vocab(
["orange", "guava", "banana", "apple", "raspberry",
"blueberry"], "new_vocab")
# New session and new graph.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
fruit_weights = variable_scope.get_variable(
"fruit_weights",
shape=[6, 1],
initializer=[[0.], [0.], [0.], [0.], [0.], [0.]],
partitioner=lambda shape, dtype: [2, 1])
ws_util._warm_start_var_with_vocab(fruit_weights, new_vocab_path, 6,
self.get_temp_dir(), prev_vocab_path)
self.evaluate(variables.global_variables_initializer())
self.assertTrue(
isinstance(fruit_weights, variables.PartitionedVariable))
fruit_weights_vars = fruit_weights._get_variable_list()
self.assertAllClose([[2.], [1.5], [1.]],
fruit_weights_vars[0].eval(sess))
self.assertAllClose([[0.5], [0.], [0.]],
fruit_weights_vars[1].eval(sess))
def testWarmStartVarWithColumnVocabBothVarsPartitioned(self):
prev_vocab_path = self._write_vocab(["apple", "orange"], "old_vocab")
self._create_prev_run_var(
"fruit_output_layer",
shape=[4, 2],
initializer=[[0.5, 0.3], [1., 0.8], [1.5, 1.2], [2., 2.3]],
partitioner=lambda shape, dtype: [2, 1])
# New vocab with elements in reverse order and one new element.
new_vocab_path = self._write_vocab(["orange", "apple", "banana"],
"new_vocab")
# New session and new graph.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
fruit_output_layer = variable_scope.get_variable(
"fruit_output_layer",
shape=[4, 3],
initializer=[[0., 0., 0.], [0., 0., 0.], [0., 0., 0.],
[0., 0., 0.]],
partitioner=lambda shape, dtype: [2, 1])
ws_util._warm_start_var_with_vocab(fruit_output_layer, new_vocab_path,
current_vocab_size=3,
prev_ckpt=self.get_temp_dir(),
prev_vocab_path=prev_vocab_path,
axis=1)
self.evaluate(variables.global_variables_initializer())
self.assertTrue(
isinstance(fruit_output_layer, variables.PartitionedVariable))
fruit_output_layer_vars = fruit_output_layer._get_variable_list()
self.assertAllClose([[0.3, 0.5, 0.], [0.8, 1.0, 0.]],
fruit_output_layer_vars[0].eval(sess))
self.assertAllClose([[1.2, 1.5, 0.], [2.3, 2., 0.]],
fruit_output_layer_vars[1].eval(sess))
def testWarmStart_ListOfVariables(self):
# Save checkpoint from which to warm-start.
_, prev_int_val = self._create_prev_run_var("v1", shape=[10, 1],
initializer=ones())
# Verify we initialized the values correctly.
self.assertAllEqual(np.ones([10, 1]), prev_int_val)
# New graph, new session with warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
# Initialize with zeros.
var = variable_scope.get_variable(
"v1",
shape=[10, 1],
initializer=zeros())
ws_util.warm_start(self.get_temp_dir(), vars_to_warm_start=[var])
self.evaluate(variables.global_variables_initializer())
# Verify weights were correctly warm-started (init overridden to ones).
self.assertAllEqual(var.eval(), prev_int_val)
def testWarmStart_ListOfStrings(self):
# Save checkpoint from which to warm-start.
_, prev_int_val = self._create_prev_run_var("v1", shape=[10, 1],
initializer=ones())
# Verify we initialized the values correctly.
self.assertAllEqual(np.ones([10, 1]), prev_int_val)
# New graph, new session with warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
# Initialize with zeros.
var = variable_scope.get_variable(
"v1",
shape=[10, 1],
initializer=zeros())
ws_util.warm_start(self.get_temp_dir(), vars_to_warm_start=["v1"])
self.evaluate(variables.global_variables_initializer())
# Verify weights were correctly warm-started (init overridden to ones).
self.assertAllEqual(var.eval(), prev_int_val)
def testWarmStart_ListOfRegexes(self):
# Save checkpoint from which to warm-start.
[prev_v1_val, prev_v1_momentum_val,
prev_v2_val, _] = self._create_prev_run_vars(
var_names=["v1", "v1/Momentum", "v2", "v2/Momentum"],
shapes=[[10, 1]] * 4,
initializers=[ones()] * 4)
# New graph, new session with warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
# Initialize with zeros.
v1 = variable_scope.get_variable(
"v1",
shape=[10, 1],
initializer=zeros())
v1_momentum = variable_scope.get_variable(
"v1/Momentum",
shape=[10, 1],
initializer=zeros())
v2 = variable_scope.get_variable(
"v2",
shape=[10, 1],
initializer=zeros())
v2_momentum = variable_scope.get_variable(
"v2/Momentum",
shape=[10, 1],
initializer=zeros())
ws_util.warm_start(self.get_temp_dir(),
# This warm-starts both v1 and v1/Momentum, but only
# v2 (and not v2/Momentum).
vars_to_warm_start=["v1", "v2[^/]"])
self.evaluate(variables.global_variables_initializer())
# Verify the selection of weights were correctly warm-started (init
# overridden to ones).
self.assertAllEqual(v1.eval(), prev_v1_val)
self.assertAllEqual(v1_momentum.eval(), prev_v1_momentum_val)
self.assertAllEqual(v2.eval(), prev_v2_val)
self.assertAllEqual(v2_momentum.eval(), np.zeros([10, 1]))
def testWarmStart_SparseColumnIntegerized(self):
# Create feature column.
sc_int = fc.categorical_column_with_identity("sc_int", num_buckets=10)
# Save checkpoint from which to warm-start.
_, prev_int_val = self._create_prev_run_var(
"linear_model/sc_int/weights", shape=[10, 1], initializer=ones())
# Verify we initialized the values correctly.
self.assertAllEqual(np.ones([10, 1]), prev_int_val)
partitioner = lambda shape, dtype: [1] * len(shape)
# New graph, new session WITHOUT warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = self._create_linear_model([sc_int], partitioner)
self.evaluate(variables.global_variables_initializer())
# Without warm-starting, the weights should be initialized using default
# initializer (which is init_ops.zeros_initializer).
self._assert_cols_to_vars(cols_to_vars, {sc_int: [np.zeros([10, 1])]},
sess)
# New graph, new session with warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = self._create_linear_model([sc_int], partitioner)
ws_util.warm_start(self.get_temp_dir(), vars_to_warm_start=".*sc_int.*")
self.evaluate(variables.global_variables_initializer())
# Verify weights were correctly warm-started.
self._assert_cols_to_vars(cols_to_vars, {sc_int: [prev_int_val]}, sess)
def testWarmStart_SparseColumnHashed(self):
# Create feature column.
sc_hash = fc.categorical_column_with_hash_bucket(
"sc_hash", hash_bucket_size=15)
# Save checkpoint from which to warm-start.
_, prev_hash_val = self._create_prev_run_var(
"linear_model/sc_hash/weights", shape=[15, 1], initializer=norms())
partitioner = lambda shape, dtype: [1] * len(shape)
# New graph, new session WITHOUT warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = self._create_linear_model([sc_hash], partitioner)
self.evaluate(variables.global_variables_initializer())
# Without warm-starting, the weights should be initialized using default
# initializer (which is init_ops.zeros_initializer).
self._assert_cols_to_vars(cols_to_vars, {sc_hash: [np.zeros([15, 1])]},
sess)
# New graph, new session with warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = self._create_linear_model([sc_hash], partitioner)
ws_util.warm_start(
self.get_temp_dir(), vars_to_warm_start=".*sc_hash.*")
self.evaluate(variables.global_variables_initializer())
# Verify weights were correctly warm-started.
self._assert_cols_to_vars(cols_to_vars, {sc_hash: [prev_hash_val]},
sess)
def testWarmStart_SparseColumnVocabulary(self):
# Create vocab for sparse column "sc_vocab".
vocab_path = self._write_vocab(["apple", "banana", "guava", "orange"],
"vocab")
# Create feature column.
sc_vocab = fc.categorical_column_with_vocabulary_file(
"sc_vocab", vocabulary_file=vocab_path, vocabulary_size=4)
# Save checkpoint from which to warm-start.
_, prev_vocab_val = self._create_prev_run_var(
"linear_model/sc_vocab/weights", shape=[4, 1], initializer=ones())
partitioner = lambda shape, dtype: [1] * len(shape)
# New graph, new session WITHOUT warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = self._create_linear_model([sc_vocab], partitioner)
self.evaluate(variables.global_variables_initializer())
# Without warm-starting, the weights should be initialized using default
# initializer (which is init_ops.zeros_initializer).
self._assert_cols_to_vars(cols_to_vars, {sc_vocab: [np.zeros([4, 1])]},
sess)
# New graph, new session with warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = self._create_linear_model([sc_vocab], partitioner)
# Since old vocab is not explicitly set in WarmStartSettings, the old
# vocab is assumed to be same as new vocab.
ws_util.warm_start(
self.get_temp_dir(), vars_to_warm_start=".*sc_vocab.*")
self.evaluate(variables.global_variables_initializer())
# Verify weights were correctly warm-started.
self._assert_cols_to_vars(cols_to_vars, {sc_vocab: [prev_vocab_val]},
sess)
def testWarmStart_ExplicitCheckpointFile(self):
# Create vocab for sparse column "sc_vocab".
vocab_path = self._write_vocab(["apple", "banana", "guava", "orange"],
"vocab")
# Create feature column.
sc_vocab = fc.categorical_column_with_vocabulary_file(
"sc_vocab", vocabulary_file=vocab_path, vocabulary_size=4)
# Save checkpoint from which to warm-start.
_, prev_vocab_val = self._create_prev_run_var(
"linear_model/sc_vocab/weights", shape=[4, 1], initializer=ones())
partitioner = lambda shape, dtype: [1] * len(shape)
# New graph, new session WITHOUT warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = self._create_linear_model([sc_vocab], partitioner)
self.evaluate(variables.global_variables_initializer())
# Without warm-starting, the weights should be initialized using default
# initializer (which is init_ops.zeros_initializer).
self._assert_cols_to_vars(cols_to_vars, {sc_vocab: [np.zeros([4, 1])]},
sess)
# New graph, new session with warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = self._create_linear_model([sc_vocab], partitioner)
# Since old vocab is not explicitly set in WarmStartSettings, the old
# vocab is assumed to be same as new vocab.
ws_util.warm_start(
# Explicitly provide the file prefix instead of just the dir.
os.path.join(self.get_temp_dir(), "model-0"),
vars_to_warm_start=".*sc_vocab.*")
self.evaluate(variables.global_variables_initializer())
# Verify weights were correctly warm-started.
self._assert_cols_to_vars(cols_to_vars, {sc_vocab: [prev_vocab_val]},
sess)
def testWarmStart_SparseColumnVocabularyConstrainedVocabSizes(self):
# Create old vocabulary, and use a size smaller than the total number of
# entries.
old_vocab_path = self._write_vocab(["apple", "guava", "banana"],
"old_vocab")
old_vocab_size = 2 # ['apple', 'guava']
# Create new vocab for sparse column "sc_vocab".
current_vocab_path = self._write_vocab(
["apple", "banana", "guava", "orange"], "current_vocab")
# Create feature column. Only use 2 of the actual entries, resulting in
# ['apple', 'banana'] for the new vocabulary.
sc_vocab = fc.categorical_column_with_vocabulary_file(
"sc_vocab", vocabulary_file=current_vocab_path, vocabulary_size=2)
# Save checkpoint from which to warm-start.
self._create_prev_run_var(
"linear_model/sc_vocab/weights", shape=[2, 1], initializer=ones())
partitioner = lambda shape, dtype: [1] * len(shape)
# New graph, new session WITHOUT warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = self._create_linear_model([sc_vocab], partitioner)
self.evaluate(variables.global_variables_initializer())
# Without warm-starting, the weights should be initialized using default
# initializer (which is init_ops.zeros_initializer).
self._assert_cols_to_vars(cols_to_vars, {sc_vocab: [np.zeros([2, 1])]},
sess)
# New graph, new session with warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = self._create_linear_model([sc_vocab], partitioner)
vocab_info = ws_util.VocabInfo(
new_vocab=sc_vocab.vocabulary_file,
new_vocab_size=sc_vocab.vocabulary_size,
num_oov_buckets=sc_vocab.num_oov_buckets,
old_vocab=old_vocab_path,
old_vocab_size=old_vocab_size)
ws_util.warm_start(
ckpt_to_initialize_from=self.get_temp_dir(),
vars_to_warm_start=".*sc_vocab.*",
var_name_to_vocab_info={
"linear_model/sc_vocab/weights": vocab_info
})
self.evaluate(variables.global_variables_initializer())
# Verify weights were correctly warm-started. 'banana' isn't in the
# first two entries of the old vocabulary, so it's newly initialized.
self._assert_cols_to_vars(cols_to_vars, {sc_vocab: [[[1], [0]]]}, sess)
def testWarmStart_BucketizedColumn(self):
# Create feature column.
real = fc.numeric_column("real")
real_bucket = fc.bucketized_column(real, boundaries=[0., 1., 2., 3.])
# Save checkpoint from which to warm-start.
_, prev_bucket_val = self._create_prev_run_var(
"linear_model/real_bucketized/weights",
shape=[5, 1],
initializer=norms())
partitioner = lambda shape, dtype: [1] * len(shape)
# New graph, new session WITHOUT warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = self._create_linear_model([real_bucket], partitioner)
self.evaluate(variables.global_variables_initializer())
# Without warm-starting, the weights should be initialized using default
# initializer (which is init_ops.zeros_initializer).
self._assert_cols_to_vars(cols_to_vars,
{real_bucket: [np.zeros([5, 1])]}, sess)
# New graph, new session with warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = self._create_linear_model([real_bucket], partitioner)
ws_util.warm_start(
self.get_temp_dir(), vars_to_warm_start=".*real_bucketized.*")
self.evaluate(variables.global_variables_initializer())
# Verify weights were correctly warm-started.
self._assert_cols_to_vars(cols_to_vars,
{real_bucket: [prev_bucket_val]}, sess)
def testWarmStart_MultipleCols(self):
# Create vocab for sparse column "sc_vocab".
vocab_path = self._write_vocab(["apple", "banana", "guava", "orange"],
"vocab")
# Create feature columns.
sc_int = fc.categorical_column_with_identity("sc_int", num_buckets=10)
sc_hash = fc.categorical_column_with_hash_bucket(
"sc_hash", hash_bucket_size=15)
sc_keys = fc.categorical_column_with_vocabulary_list(
"sc_keys", vocabulary_list=["a", "b", "c", "e"])
sc_vocab = fc.categorical_column_with_vocabulary_file(
"sc_vocab", vocabulary_file=vocab_path, vocabulary_size=4)
real = fc.numeric_column("real")
real_bucket = fc.bucketized_column(real, boundaries=[0., 1., 2., 3.])
cross = fc.crossed_column([sc_keys, sc_vocab], hash_bucket_size=20)
all_linear_cols = [sc_int, sc_hash, sc_keys, sc_vocab, real_bucket, cross]
# Save checkpoint from which to warm-start. Also create a bias variable,
# so we can check that it's also warm-started.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
sc_int_weights = variable_scope.get_variable(
"linear_model/sc_int/weights", shape=[10, 1], initializer=ones())
sc_hash_weights = variable_scope.get_variable(
"linear_model/sc_hash/weights", shape=[15, 1], initializer=norms())
sc_keys_weights = variable_scope.get_variable(
"linear_model/sc_keys/weights", shape=[4, 1], initializer=rand())
sc_vocab_weights = variable_scope.get_variable(
"linear_model/sc_vocab/weights", shape=[4, 1], initializer=ones())
real_bucket_weights = variable_scope.get_variable(
"linear_model/real_bucketized/weights",
shape=[5, 1],
initializer=norms())
cross_weights = variable_scope.get_variable(
"linear_model/sc_keys_X_sc_vocab/weights",
shape=[20, 1],
initializer=rand())
bias = variable_scope.get_variable(
"linear_model/bias_weights",
shape=[1],
initializer=rand())
self._write_checkpoint(sess)
(prev_int_val, prev_hash_val, prev_keys_val, prev_vocab_val,
prev_bucket_val, prev_cross_val, prev_bias_val) = sess.run([
sc_int_weights, sc_hash_weights, sc_keys_weights, sc_vocab_weights,
real_bucket_weights, cross_weights, bias
])
partitioner = lambda shape, dtype: [1] * len(shape)
# New graph, new session WITHOUT warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = self._create_linear_model(all_linear_cols, partitioner)
self.evaluate(variables.global_variables_initializer())
# Without warm-starting, all weights should be initialized using default
# initializer (which is init_ops.zeros_initializer).
self._assert_cols_to_vars(cols_to_vars, {
sc_int: [np.zeros([10, 1])],
sc_hash: [np.zeros([15, 1])],
sc_keys: [np.zeros([4, 1])],
sc_vocab: [np.zeros([4, 1])],
real_bucket: [np.zeros([5, 1])],
cross: [np.zeros([20, 1])],
}, sess)
# New graph, new session with warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = self._create_linear_model(all_linear_cols, partitioner)
vocab_info = ws_util.VocabInfo(
new_vocab=sc_vocab.vocabulary_file,
new_vocab_size=sc_vocab.vocabulary_size,
num_oov_buckets=sc_vocab.num_oov_buckets,
old_vocab=vocab_path)
ws_util.warm_start(
self.get_temp_dir(),
var_name_to_vocab_info={
"linear_model/sc_vocab/weights": vocab_info
})
self.evaluate(variables.global_variables_initializer())
# Verify weights were correctly warm-started.
self._assert_cols_to_vars(cols_to_vars, {
sc_int: [prev_int_val],
sc_hash: [prev_hash_val],
sc_keys: [prev_keys_val],
sc_vocab: [prev_vocab_val],
real_bucket: [prev_bucket_val],
cross: [prev_cross_val],
"bias": [prev_bias_val],
}, sess)
def testWarmStartMoreSettings(self):
# Create old and new vocabs for sparse column "sc_vocab".
prev_vocab_path = self._write_vocab(["apple", "banana", "guava", "orange"],
"old_vocab")
new_vocab_path = self._write_vocab(
["orange", "guava", "banana", "apple", "raspberry",
"blueberry"], "new_vocab")
# Create feature columns.
sc_hash = fc.categorical_column_with_hash_bucket(
"sc_hash", hash_bucket_size=15)
sc_keys = fc.categorical_column_with_vocabulary_list(
"sc_keys", vocabulary_list=["a", "b", "c", "e"])
sc_vocab = fc.categorical_column_with_vocabulary_file(
"sc_vocab", vocabulary_file=new_vocab_path, vocabulary_size=6)
all_linear_cols = [sc_hash, sc_keys, sc_vocab]
# Save checkpoint from which to warm-start.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
variable_scope.get_variable(
"linear_model/sc_hash/weights", shape=[15, 1], initializer=norms())
sc_keys_weights = variable_scope.get_variable(
"some_other_name", shape=[4, 1], initializer=rand())
variable_scope.get_variable(
"linear_model/sc_vocab/weights",
initializer=[[0.5], [1.], [2.], [3.]])
self._write_checkpoint(sess)
prev_keys_val = self.evaluate(sc_keys_weights)
def _partitioner(shape, dtype): # pylint:disable=unused-argument
# Partition each var into 2 equal slices.
partitions = [1] * len(shape)
partitions[0] = min(2, shape.dims[0].value)
return partitions
# New graph, new session with warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = self._create_linear_model(all_linear_cols, _partitioner)
vocab_info = ws_util.VocabInfo(
new_vocab=sc_vocab.vocabulary_file,
new_vocab_size=sc_vocab.vocabulary_size,
num_oov_buckets=sc_vocab.num_oov_buckets,
old_vocab=prev_vocab_path)
ws_util.warm_start(
self.get_temp_dir(),
vars_to_warm_start=".*(sc_keys|sc_vocab).*",
var_name_to_vocab_info={
ws_util._infer_var_name(cols_to_vars[sc_vocab]): vocab_info
},
var_name_to_prev_var_name={
ws_util._infer_var_name(cols_to_vars[sc_keys]):
"some_other_name"
})
self.evaluate(variables.global_variables_initializer())
# Verify weights were correctly warm-started. Var corresponding to
# sc_hash should not be warm-started. Var corresponding to sc_vocab
# should be correctly warm-started after vocab remapping.
self._assert_cols_to_vars(cols_to_vars, {
sc_keys:
np.split(prev_keys_val, 2),
sc_hash: [np.zeros([8, 1]), np.zeros([7, 1])],
sc_vocab: [
np.array([[3.], [2.], [1.]]),
np.array([[0.5], [0.], [0.]])
]
}, sess)
def testWarmStartMoreSettingsNoPartitioning(self):
# Create old and new vocabs for sparse column "sc_vocab".
prev_vocab_path = self._write_vocab(["apple", "banana", "guava", "orange"],
"old_vocab")
new_vocab_path = self._write_vocab(
["orange", "guava", "banana", "apple", "raspberry",
"blueberry"], "new_vocab")
# Create feature columns.
sc_hash = fc.categorical_column_with_hash_bucket(
"sc_hash", hash_bucket_size=15)
sc_keys = fc.categorical_column_with_vocabulary_list(
"sc_keys", vocabulary_list=["a", "b", "c", "e"])
sc_vocab = fc.categorical_column_with_vocabulary_file(
"sc_vocab", vocabulary_file=new_vocab_path, vocabulary_size=6)
all_linear_cols = [sc_hash, sc_keys, sc_vocab]
# Save checkpoint from which to warm-start.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
variable_scope.get_variable(
"linear_model/sc_hash/weights", shape=[15, 1], initializer=norms())
sc_keys_weights = variable_scope.get_variable(
"some_other_name", shape=[4, 1], initializer=rand())
variable_scope.get_variable(
"linear_model/sc_vocab/weights",
initializer=[[0.5], [1.], [2.], [3.]])
self._write_checkpoint(sess)
prev_keys_val = self.evaluate(sc_keys_weights)
# New graph, new session with warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = self._create_linear_model(all_linear_cols,
partitioner=None)
vocab_info = ws_util.VocabInfo(
new_vocab=sc_vocab.vocabulary_file,
new_vocab_size=sc_vocab.vocabulary_size,
num_oov_buckets=sc_vocab.num_oov_buckets,
old_vocab=prev_vocab_path)
ws_util.warm_start(
self.get_temp_dir(),
vars_to_warm_start=".*(sc_keys|sc_vocab).*",
var_name_to_vocab_info={
ws_util._infer_var_name(cols_to_vars[sc_vocab]): vocab_info
},
var_name_to_prev_var_name={
ws_util._infer_var_name(cols_to_vars[sc_keys]):
"some_other_name"
})
self.evaluate(variables.global_variables_initializer())
# Verify weights were correctly warm-started. Var corresponding to
# sc_hash should not be warm-started. Var corresponding to sc_vocab
# should be correctly warm-started after vocab remapping.
self._assert_cols_to_vars(cols_to_vars, {
sc_keys: [prev_keys_val],
sc_hash: [np.zeros([15, 1])],
sc_vocab: [np.array([[3.], [2.], [1.], [0.5], [0.], [0.]])]
}, sess)
def testWarmStartVarsToWarmstartIsNone(self):
# Create old and new vocabs for sparse column "sc_vocab".
prev_vocab_path = self._write_vocab(["apple", "banana", "guava", "orange"],
"old_vocab")
new_vocab_path = self._write_vocab(
["orange", "guava", "banana", "apple", "raspberry",
"blueberry"], "new_vocab")
# Create feature columns.
sc_hash = fc.categorical_column_with_hash_bucket(
"sc_hash", hash_bucket_size=15)
sc_keys = fc.categorical_column_with_vocabulary_list(
"sc_keys", vocabulary_list=["a", "b", "c", "e"])
sc_vocab = fc.categorical_column_with_vocabulary_file(
"sc_vocab", vocabulary_file=new_vocab_path, vocabulary_size=6)
all_linear_cols = [sc_hash, sc_keys, sc_vocab]
# Save checkpoint from which to warm-start.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
variable_scope.get_variable(
"linear_model/sc_hash/weights", shape=[15, 1], initializer=norms())
variable_scope.get_variable(
"some_other_name", shape=[4, 1], initializer=rand())
variable_scope.get_variable(
"linear_model/sc_vocab/weights",
initializer=[[0.5], [1.], [2.], [3.]])
self._write_checkpoint(sess)
def _partitioner(shape, dtype): # pylint:disable=unused-argument
# Partition each var into 2 equal slices.
partitions = [1] * len(shape)
partitions[0] = min(2, shape.dims[0].value)
return partitions
# New graph, new session with warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = self._create_linear_model(all_linear_cols, _partitioner)
vocab_info = ws_util.VocabInfo(
new_vocab=sc_vocab.vocabulary_file,
new_vocab_size=sc_vocab.vocabulary_size,
num_oov_buckets=sc_vocab.num_oov_buckets,
old_vocab=prev_vocab_path)
ws_util.warm_start(
self.get_temp_dir(),
# The special value of None here will ensure that only the variable
# specified in var_name_to_vocab_info (sc_vocab embedding) is
# warm-started.
vars_to_warm_start=None,
var_name_to_vocab_info={
ws_util._infer_var_name(cols_to_vars[sc_vocab]): vocab_info
},
# Even though this is provided, the None value for
# vars_to_warm_start overrides the logic, and this will not be
# warm-started.
var_name_to_prev_var_name={
ws_util._infer_var_name(cols_to_vars[sc_keys]):
"some_other_name"
})
self.evaluate(variables.global_variables_initializer())
# Verify weights were correctly warm-started. Var corresponding to
# sc_vocab should be correctly warm-started after vocab remapping,
# and neither of the other two should be warm-started..
self._assert_cols_to_vars(cols_to_vars, {
sc_keys: [np.zeros([2, 1]), np.zeros([2, 1])],
sc_hash: [np.zeros([8, 1]), np.zeros([7, 1])],
sc_vocab: [
np.array([[3.], [2.], [1.]]),
np.array([[0.5], [0.], [0.]])
]
}, sess)
def testWarmStartEmbeddingColumn(self):
# Create old and new vocabs for embedding column "sc_vocab".
prev_vocab_path = self._write_vocab(["apple", "banana", "guava", "orange"],
"old_vocab")
new_vocab_path = self._write_vocab(
["orange", "guava", "banana", "apple", "raspberry", "blueberry"],
"new_vocab")
# Save checkpoint from which to warm-start.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
variable_scope.get_variable(
"input_layer/sc_vocab_embedding/embedding_weights",
initializer=[[0.5, 0.4], [1., 1.1], [2., 2.2], [3., 3.3]])
self._write_checkpoint(sess)
def _partitioner(shape, dtype): # pylint:disable=unused-argument
# Partition each var into 2 equal slices.
partitions = [1] * len(shape)
partitions[0] = min(2, shape.dims[0].value)
return partitions
# Create feature columns.
sc_vocab = fc.categorical_column_with_vocabulary_file(
"sc_vocab", vocabulary_file=new_vocab_path, vocabulary_size=6)
emb_vocab_column = fc.embedding_column(
categorical_column=sc_vocab,
dimension=2)
all_deep_cols = [emb_vocab_column]
# New graph, new session with warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = {}
with variable_scope.variable_scope("", partitioner=_partitioner):
# Create the variables.
fc.input_layer(
features=self._create_dummy_inputs(),
feature_columns=all_deep_cols,
cols_to_vars=cols_to_vars)
vocab_info = ws_util.VocabInfo(
new_vocab=sc_vocab.vocabulary_file,
new_vocab_size=sc_vocab.vocabulary_size,
num_oov_buckets=sc_vocab.num_oov_buckets,
old_vocab=prev_vocab_path,
# Can't use constant_initializer with load_and_remap. In practice,
# use a truncated normal initializer.
backup_initializer=init_ops.random_uniform_initializer(
minval=0.42, maxval=0.42))
ws_util.warm_start(
self.get_temp_dir(),
var_name_to_vocab_info={
ws_util._infer_var_name(cols_to_vars[emb_vocab_column]):
vocab_info
})
self.evaluate(variables.global_variables_initializer())
# Verify weights were correctly warm-started. Var corresponding to
# emb_vocab_column should be correctly warm-started after vocab
# remapping. Missing values are filled in with the EmbeddingColumn's
# initializer.
self._assert_cols_to_vars(
cols_to_vars, {
emb_vocab_column: [
np.array([[3., 3.3], [2., 2.2], [1., 1.1]]),
np.array([[0.5, 0.4], [0.42, 0.42], [0.42, 0.42]])
]
}, sess)
def testWarmStartEmbeddingColumnLinearModel(self):
# Create old and new vocabs for embedding column "sc_vocab".
prev_vocab_path = self._write_vocab(["apple", "banana", "guava", "orange"],
"old_vocab")
new_vocab_path = self._write_vocab(
["orange", "guava", "banana", "apple", "raspberry", "blueberry"],
"new_vocab")
# Save checkpoint from which to warm-start.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
variable_scope.get_variable(
"linear_model/sc_vocab_embedding/embedding_weights",
initializer=[[0.5, 0.4], [1., 1.1], [2., 2.2], [3., 3.3]])
variable_scope.get_variable(
"linear_model/sc_vocab_embedding/weights",
initializer=[[0.69], [0.71]])
self._write_checkpoint(sess)
def _partitioner(shape, dtype): # pylint:disable=unused-argument
# Partition each var into 2 equal slices.
partitions = [1] * len(shape)
partitions[0] = min(2, shape.dims[0].value)
return partitions
# Create feature columns.
sc_vocab = fc.categorical_column_with_vocabulary_file(
"sc_vocab", vocabulary_file=new_vocab_path, vocabulary_size=6)
emb_vocab = fc.embedding_column(
categorical_column=sc_vocab,
dimension=2)
all_deep_cols = [emb_vocab]
# New graph, new session with warm-starting.
with ops.Graph().as_default() as g:
with self.session(graph=g) as sess:
cols_to_vars = {}
with variable_scope.variable_scope("", partitioner=_partitioner):
# Create the variables.
fc.linear_model(
features=self._create_dummy_inputs(),
feature_columns=all_deep_cols,
cols_to_vars=cols_to_vars)
# Construct the vocab_info for the embedding weight.
vocab_info = ws_util.VocabInfo(
new_vocab=sc_vocab.vocabulary_file,
new_vocab_size=sc_vocab.vocabulary_size,
num_oov_buckets=sc_vocab.num_oov_buckets,
old_vocab=prev_vocab_path,
# Can't use constant_initializer with load_and_remap. In practice,
# use a truncated normal initializer.
backup_initializer=init_ops.random_uniform_initializer(
minval=0.42, maxval=0.42))
ws_util.warm_start(
self.get_temp_dir(),
vars_to_warm_start=".*sc_vocab.*",
var_name_to_vocab_info={
"linear_model/sc_vocab_embedding/embedding_weights": vocab_info
})
self.evaluate(variables.global_variables_initializer())
# Verify weights were correctly warm-started. Var corresponding to
# emb_vocab should be correctly warm-started after vocab remapping.
# Missing values are filled in with the EmbeddingColumn's initializer.
self._assert_cols_to_vars(
cols_to_vars,
{
emb_vocab: [
# linear weights part 0.
np.array([[0.69]]),
# linear weights part 1.
np.array([[0.71]]),
# embedding_weights part 0.
np.array([[3., 3.3], [2., 2.2], [1., 1.1]]),
# embedding_weights part 1.
np.array([[0.5, 0.4], [0.42, 0.42], [0.42, 0.42]])
]
},
sess)
def testErrorConditions(self):
x = variable_scope.get_variable(
"x",
shape=[4, 1],
initializer=ones(),
partitioner=lambda shape, dtype: [2, 1])
# List of PartitionedVariable is invalid type when warm-starting with vocab.
self.assertRaises(TypeError, ws_util._warm_start_var_with_vocab, [x],
"/tmp", 5, "/tmp", "/tmp")
# Unused variable names raises ValueError.
with ops.Graph().as_default():
with self.cached_session() as sess:
x = variable_scope.get_variable(
"x",
shape=[4, 1],
initializer=ones(),
partitioner=lambda shape, dtype: [2, 1])
self._write_checkpoint(sess)
self.assertRaises(
ValueError,
ws_util.warm_start,
self.get_temp_dir(),
var_name_to_vocab_info={"y": ws_util.VocabInfo("", 1, 0, "")})
self.assertRaises(
ValueError,
ws_util.warm_start,
self.get_temp_dir(),
var_name_to_prev_var_name={"y": "y2"})
if __name__ == "__main__":
test.main()
|
|
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
try:
# future >= 0.12
from future.backports.test.support import import_fresh_module
except ImportError:
from future.standard_library.test.support import import_fresh_module
from io import StringIO
import os
import unittest
import warnings
from tempfile import mkstemp
from skbio.io import (DuplicateRegistrationError, UnprovenFormatWarning,
UnrecognizedFormatError, ArgumentOverrideWarning)
from skbio.io._registry import empty_file_sniffer
class TestClass(object):
def __init__(self, l):
self.list = l
def __eq__(self, other):
# They are only equal when the class is EXACTLY the same. We don't want
# readers to return knockoff instances...
return self.__class__ is other.__class__ and self.list == other.list
def __repr__(self):
return "%s(%s)" % (str(self.__class__.__name__), str(self.list))
class TestClassA(TestClass):
pass
class TestClassB(TestClass):
pass
class RegistryTest(unittest.TestCase):
def setUp(self):
self.module = import_fresh_module('skbio.io._registry')
self.fd1, self.fp1 = mkstemp()
self.fd2, self.fp2 = mkstemp()
def tearDown(self):
os.remove(self.fp1)
os.close(self.fd1)
os.remove(self.fp2)
os.close(self.fd2)
class TestRegisterAndGetReader(RegistryTest):
def test_get_reader_no_match(self):
self.assertEqual(None, self.module.get_reader('not_a_format',
TestClass))
self.assertEqual(None, self.module.get_reader(['not_a_format',
'still_not'],
TestClass))
self.assertEqual(None, self.module.get_reader('Nope, Sorry',
TestClass))
def test_register_reader_on_generator(self):
@self.module.register_reader('format1')
def format1_reader_generator(fh):
yield
@self.module.register_reader(['compound', 'format'])
def compound_format_reader(fh1, fh2):
yield
self.assertEqual(format1_reader_generator,
self.module.get_reader('format1'))
self.assertEqual(format1_reader_generator,
self.module.get_reader('format1', None))
self.assertEqual(compound_format_reader,
self.module.get_reader(['compound', 'format']))
self.assertEqual(compound_format_reader,
self.module.get_reader('compound, format'))
@self.module.register_reader('format2', None)
def format2_reader_generator(fh):
yield
self.assertEqual(format2_reader_generator,
self.module.get_reader('format2'))
self.assertEqual(format2_reader_generator,
self.module.get_reader('format2', None))
def test_get_reader_when_only_writer_exists(self):
@self.module.register_writer('format', TestClass)
def format_reader(fh):
return
self.assertEqual(None, self.module.get_reader('format', TestClass))
def test_register_reader_on_many(self):
@self.module.register_reader('format1', TestClassA)
def format1_reader(fh):
return
@self.module.register_reader('format1', TestClassB)
def format1_reader_b(fh):
return
@self.module.register_reader('format2', TestClassA)
def format2_reader(fh):
return
@self.module.register_reader('format3', TestClassB)
def format3_reader(fh):
return
self.assertEqual(format1_reader,
self.module.get_reader('format1', TestClassA))
self.assertEqual(format1_reader_b,
self.module.get_reader('format1', TestClassB))
self.assertEqual(format2_reader,
self.module.get_reader('format2', TestClassA))
self.assertEqual(None,
self.module.get_reader('format2', TestClassB))
self.assertEqual(None,
self.module.get_reader('format3', TestClassA))
self.assertEqual(format3_reader,
self.module.get_reader('format3', TestClassB))
def test_register_reader_over_existing(self):
with self.assertRaises(DuplicateRegistrationError) as cm:
@self.module.register_reader('format1', TestClassA)
def format1_reader(fh):
return
@self.module.register_reader('format1', TestClassA)
def duplicate_format1_reader(fh):
return
self.assertTrue('format1' in str(cm.exception))
self.assertTrue('reader' in str(cm.exception))
self.assertTrue(TestClassA.__name__ in str(cm.exception))
class TestRegisterAndGetWriter(RegistryTest):
def test_get_writer_no_match(self):
self.assertEqual(None, self.module.get_writer('not_a_format',
TestClass))
self.assertEqual(None, self.module.get_writer(['not_a_format',
'still_not'],
TestClass))
self.assertEqual(None, self.module.get_writer('Nope, Sorry',
TestClass))
def test_get_writer_when_only_reader_exists(self):
@self.module.register_reader('format', TestClass)
def format_reader(fh):
return
self.assertEqual(None, self.module.get_writer('format', TestClass))
def test_register_writer_on_generator(self):
@self.module.register_writer('format1')
def format1_writer_generator(obj, fh):
yield
@self.module.register_writer(['compound', 'format'])
def compound_format_writer(fh1, fh2):
yield
self.assertEqual(format1_writer_generator,
self.module.get_writer('format1'))
self.assertEqual(format1_writer_generator,
self.module.get_writer('format1', None))
self.assertEqual(compound_format_writer,
self.module.get_writer(['compound', 'format']))
self.assertEqual(compound_format_writer,
self.module.get_writer('compound, format'))
@self.module.register_writer('format2', None)
def format2_writer_generator(obj, fh):
yield
self.assertEqual(format2_writer_generator,
self.module.get_writer('format2'))
self.assertEqual(format2_writer_generator,
self.module.get_writer('format2', None))
def test_register_writer_on_many(self):
@self.module.register_writer('format1', TestClassA)
def format1_writer(obj, fh):
return
@self.module.register_writer('format1', TestClassB)
def format1_writer_b(obj, fh):
return
@self.module.register_writer('format2', TestClassA)
def format2_writer(obj, fh):
return
@self.module.register_writer('format3', TestClassB)
def format3_writer(obj, fh):
return
self.assertEqual(format1_writer,
self.module.get_writer('format1', TestClassA))
self.assertEqual(format1_writer_b,
self.module.get_writer('format1', TestClassB))
self.assertEqual(format2_writer,
self.module.get_writer('format2', TestClassA))
self.assertEqual(None,
self.module.get_writer('format2', TestClassB))
self.assertEqual(None,
self.module.get_writer('format3', TestClassA))
self.assertEqual(format3_writer,
self.module.get_writer('format3', TestClassB))
def test_register_writer_over_existing(self):
with self.assertRaises(DuplicateRegistrationError) as cm:
@self.module.register_writer('format1', TestClassA)
def format1_writer(obj, fh):
return
@self.module.register_writer('format1', TestClassA)
def duplicate_format1_writer(obj, fh):
return
self.assertTrue('format1' in str(cm.exception))
self.assertTrue('writer' in str(cm.exception))
self.assertTrue(TestClassA.__name__ in str(cm.exception))
def test_register_writer_over_existing_generator(self):
with self.assertRaises(DuplicateRegistrationError) as cm:
@self.module.register_writer('format1')
def format1_writer(obj, fh):
return
@self.module.register_writer('format1')
def duplicate_format1_writer(obj, fh):
return
self.assertTrue('format1' in str(cm.exception))
self.assertTrue('writer' in str(cm.exception))
self.assertTrue('generator' in str(cm.exception))
class TestRegisterAndGetSniffer(RegistryTest):
def test_get_sniffer_no_match(self):
self.assertEqual(None, self.module.get_sniffer('not_a_format'))
def test_register_compound_sniffer(self):
with self.assertRaises(ValueError):
@self.module.register_sniffer(['f1', 'f2'])
def this_wont_work(fh1, fh2):
return False, {}
with self.assertRaises(ValueError):
@self.module.register_sniffer('f1, f2')
def this_still_wont_work(fh1, fh2):
return False, {}
def test_register_sniffer_on_many(self):
fh1 = StringIO(u'1')
fh2 = StringIO(u'2')
fh3 = StringIO(u'3')
@self.module.register_sniffer('format1')
def format1_sniffer(fh):
return '1' in fh.readline(), {}
@self.module.register_sniffer('format2')
def format2_sniffer(fh):
return '2' in fh.readline(), {}
@self.module.register_sniffer('format3')
def format3_sniffer(fh):
return '3' in fh.readline(), {}
self.assertEqual(format1_sniffer,
self.module.get_sniffer('format1'))
self.assertEqual(format2_sniffer,
self.module.get_sniffer('format2'))
self.assertEqual(format3_sniffer,
self.module.get_sniffer('format3'))
compound_sniffer = self.module.get_sniffer(['format3',
'format2'])
compound_sniffer2 = self.module.get_sniffer('format3, format2')
dont_do_this_ever = self.module.get_sniffer([['format2', 'format1'],
'format3'])
self.assertTrue(compound_sniffer([fh3, fh2])[0])
self.assertTrue(compound_sniffer2([fh3, fh2])[0])
self.assertTrue(not compound_sniffer2([fh2, fh3])[0])
self.assertTrue(dont_do_this_ever([[fh2, fh1], fh3])[0])
with self.assertRaises(ValueError):
dont_do_this_ever(fh1)
with self.assertRaises(ValueError):
dont_do_this_ever([fh1, fh2, fh3])
def test_register_sniffer_over_existing(self):
with self.assertRaises(DuplicateRegistrationError) as cm:
@self.module.register_sniffer('format1')
def format1_sniffer(fh):
return False, {}
@self.module.register_sniffer('format1')
def duplicate_format1_sniffer(fh):
return False, {}
self.assertTrue('format1' in str(cm.exception))
class TestListReadFormats(RegistryTest):
def test_no_read_formats(self):
@self.module.register_reader('format1', TestClassA)
def this_isnt_on_clsB(fh):
return
self.assertEqual([], self.module.list_read_formats(TestClassB))
def test_one_read_format(self):
@self.module.register_reader('format1', TestClass)
def format1_cls(fh):
return
self.assertEqual(['format1'], self.module.list_read_formats(TestClass))
def test_many_read_formats(self):
@self.module.register_reader('format1', TestClassA)
def format1_clsA(fh):
return
@self.module.register_reader('format2', TestClassA)
def format2_clsA(fh):
return
@self.module.register_reader('format3', TestClassA)
def format3_clsA(fh):
return
@self.module.register_reader('format3', TestClassB)
def format3_clsB(fh):
return
@self.module.register_reader('format4', TestClassB)
def format4_clsB(fh):
return
@self.module.register_writer('format5', TestClassA)
def format5_clsA(fh):
return
@self.module.register_reader('formatB, formatA', TestClassA)
def formatAB_clsA(fh):
return
@self.module.register_reader(['formatX', 'formatY'], TestClassA)
def formatXY_clsA(fh):
return
@self.module.register_reader(['formatM', 'formatN'], TestClassB)
def formatMN_clsB(fh):
return
formats = self.module.list_read_formats(TestClassA)
self.assertTrue('format1' in formats)
self.assertTrue('format2' in formats)
self.assertTrue('format3' in formats)
self.assertTrue('format4' not in formats)
self.assertTrue('format5' not in formats)
self.assertTrue('formatB, formatA' in formats)
self.assertTrue('formatX, formatY' in formats)
self.assertTrue('formatM, formatN' not in formats)
class TestListWriteFormats(RegistryTest):
def test_no_read_formats(self):
@self.module.register_writer('format1', TestClassA)
def this_isnt_on_clsB(fh):
return
self.assertEqual([], self.module.list_write_formats(TestClassB))
def test_one_read_format(self):
@self.module.register_writer('format1', TestClass)
def format1_cls(fh):
return
self.assertEqual(['format1'],
self.module.list_write_formats(TestClass))
def test_many_read_formats(self):
@self.module.register_writer('format1', TestClassA)
def format1_clsA(fh):
return
@self.module.register_writer('format2', TestClassA)
def format2_clsA(fh):
return
@self.module.register_writer('format3', TestClassA)
def format3_clsA(fh):
return
@self.module.register_writer('format3', TestClassB)
def format3_clsB(fh):
return
@self.module.register_writer('format4', TestClassB)
def format4_clsB(fh):
return
@self.module.register_reader('format5', TestClassA)
def format5_clsA(fh):
return
@self.module.register_writer('formatB, formatA', TestClassA)
def formatAB_clsA(fh):
return
@self.module.register_writer(['formatX', 'formatY'], TestClassA)
def formatXY_clsA(fh):
return
@self.module.register_writer(['formatM', 'formatN'], TestClassB)
def formatMN_clsB(fh):
return
formats = self.module.list_write_formats(TestClassA)
self.assertTrue('format1' in formats)
self.assertTrue('format2' in formats)
self.assertTrue('format3' in formats)
self.assertTrue('format4' not in formats)
self.assertTrue('format5' not in formats)
self.assertTrue('formatB, formatA' in formats)
self.assertTrue('formatX, formatY' in formats)
self.assertTrue('formatM, formatN' not in formats)
class TestSniff(RegistryTest):
def setUp(self):
super(TestSniff, self).setUp()
@self.module.register_sniffer('format1')
def format1_sniffer(fh):
return '1' in fh.readline(), {}
@self.module.register_sniffer('format2')
def format2_sniffer(fh):
return '2' in fh.readline(), {}
@self.module.register_sniffer('format3')
def format3_sniffer(fh):
return '3' in fh.readline(), {}
@self.module.register_sniffer('format4')
def format4_sniffer(fh):
return '4' in fh.readline(), {}
@self.module.register_reader('format3', TestClass)
def reader3(fh):
return
@self.module.register_reader('format4', TestClass)
def reader4(fh):
return
def test_no_matches(self):
fh = StringIO(u"no matches here")
fh2 = StringIO(u"no matches here")
with self.assertRaises(UnrecognizedFormatError) as cm:
self.module.sniff(fh)
self.assertTrue(str(fh) in str(cm.exception))
with self.assertRaises(UnrecognizedFormatError) as cm:
self.module.sniff(fh, cls=TestClass)
with self.assertRaises(UnrecognizedFormatError) as cm:
self.module.sniff(fh, cls=TestClassB)
with self.assertRaises(UnrecognizedFormatError) as cm:
self.module.sniff([fh, fh2], cls=TestClassB)
fh.close()
def test_one_match(self):
fh = StringIO(u"contains a 3")
self.assertEqual('format3', self.module.sniff(fh)[0])
def test_many_matches(self):
fh = StringIO(u"1234 will match all")
with self.assertRaises(UnrecognizedFormatError) as cm:
self.module.sniff(fh)
self.assertTrue("format1" in str(cm.exception))
self.assertTrue("format2" in str(cm.exception))
self.assertTrue("format3" in str(cm.exception))
self.assertTrue("format4" in str(cm.exception))
fh.close()
def test_compound_matches(self):
fh = StringIO(u'1')
fh2 = StringIO(u'2')
self.assertEqual('format1, format2', self.module.sniff([fh, fh2])[0])
self.assertEqual('format2, format1', self.module.sniff([fh2, fh])[0])
def test_no_matches_w_cls(self):
fh = StringIO(u"no matches here")
with self.assertRaises(UnrecognizedFormatError) as cm:
self.module.sniff(fh, cls=TestClass)
self.assertTrue(str(fh) in str(cm.exception))
fh.close()
def test_one_match_w_cls(self):
fh = StringIO(u"contains a 3")
self.assertEqual('format3',
self.module.sniff(fh, cls=TestClass)[0])
def test_many_matches_w_cls(self):
fh = StringIO(u"1234 will only format3 and format4 w/ class")
with self.assertRaises(UnrecognizedFormatError) as cm:
self.module.sniff(fh, cls=TestClass)
self.assertTrue("format1" not in str(cm.exception))
self.assertTrue("format2" not in str(cm.exception))
# Only format3 and format4 have a definition for the provided class.
self.assertTrue("format3" in str(cm.exception))
self.assertTrue("format4" in str(cm.exception))
fh.close()
def test_compound_matches_w_cls(self):
fh = StringIO(u'3')
fh2 = StringIO(u'4')
self.assertEqual('format3, format4',
self.module.sniff([fh, fh2], cls=TestClass)[0])
self.assertEqual('format4, format3',
self.module.sniff([fh2, fh], cls=TestClass)[0])
def test_that_mode_is_used(self):
fp = self.fp1
with open(fp, 'w') as fh:
fh.write('@\n#\n')
@self.module.register_sniffer('format')
def sniffer(fh):
self.assertEqual(self.expected_mode, fh.mode)
return '@' in fh.readline(), {}
self.expected_mode = 'U'
self.module.sniff(fp)
self.expected_mode = 'r'
self.module.sniff(fp, mode='r')
def test_that_mode_is_used_compound(self):
fp1 = self.fp1
with open(fp1, 'w') as fh:
fh.write('@\n#\n')
fp2 = self.fp2
with open(fp2, 'w') as fh:
fh.write('!\n#\n')
@self.module.register_sniffer('c1')
def c1_sniffer(fh):
self.assertEqual(self.expected_mode, fh.mode)
return '@' in fh.readline(), {}
@self.module.register_sniffer('c2')
def c2_sniffer(fh):
self.assertEqual(self.expected_mode, fh.mode)
return '!' in fh.readline(), {}
self.expected_mode = 'U'
self.module.sniff([fp1, fp2])
self.expected_mode = 'r'
self.module.sniff([fp1, fp2], mode='r')
class TestRead(RegistryTest):
def test_format_and_into_are_none(self):
fh = StringIO()
with self.assertRaises(ValueError):
self.module.read(fh)
fh.close()
def test_format_is_none(self):
fh = StringIO(u'1\n2\n3\n4')
@self.module.register_sniffer('format')
def sniffer(fh):
return '1' in fh.readline(), {}
@self.module.register_reader('format', TestClass)
def reader(fh):
return TestClass([int(x) for x in fh.read().split('\n')])
instance = self.module.read(fh, into=TestClass)
self.assertEqual(TestClass([1, 2, 3, 4]), instance)
fh.close()
def test_into_is_none(self):
fh = StringIO(u'1\n2\n3\n4')
@self.module.register_reader('format')
def reader(fh):
for value in [int(x) for x in fh.read().split('\n')]:
yield value
generator = self.module.read(fh, format='format')
first_run = True
for a, b in zip(generator, [1, 2, 3, 4]):
if first_run:
fh.seek(3)
first_run = False
self.assertEqual(a, b)
self.assertEqual(3, fh.tell())
fh.close()
def test_into_is_none_compound_format(self):
fh = StringIO(u'1\n3')
fh2 = StringIO(u'2\n4')
@self.module.register_reader(['odd', 'even'])
def reader(odd, even):
for o, e in zip(odd, even):
yield int(o.rstrip('\n'))
yield int(e.rstrip('\n'))
generator = self.module.read([fh, fh2], format='odd, even')
first_run = True
for a, b in zip(generator, [1, 2, 3, 4]):
if first_run:
fh.seek(3)
fh2.seek(2)
first_run = False
self.assertEqual(a, b)
self.assertEqual(3, fh.tell())
self.assertEqual(2, fh2.tell())
fh2.seek(0)
fh.seek(0)
generator = self.module.read([fh2, fh], format='even, odd')
first_run = True
for a, b in zip(generator, [1, 2, 3, 4]):
if first_run:
fh.seek(5)
fh2.seek(1)
first_run = False
self.assertEqual(a, b)
self.assertEqual(5, fh.tell())
self.assertEqual(1, fh2.tell())
with self.assertRaises(ValueError):
self.module.read([fh], format='even, odd')
fh.close()
fh2.close()
def test_into_is_none_real_file(self):
fp = self.fp1
with open(fp, 'w') as fh:
fh.write('1\n2\n3\n4')
self._test_fh = None
@self.module.register_reader('format')
def reader(fh):
self._test_fh = fh
for value in [int(x) for x in fh.read().split('\n')]:
yield value
generator = self.module.read(fp, format='format')
for a, b in zip(generator, [1, 2, 3, 4]):
self.assertEqual(a, b)
self.assertTrue(self._test_fh.closed)
def test_reader_does_not_exist(self):
with self.assertRaises(UnrecognizedFormatError) as cm:
self.module.read(None, format='not_a_format', into=TestClass)
self.assertTrue(TestClass.__name__ in str(cm.exception))
self.assertTrue('not_a_format' in str(cm.exception))
with self.assertRaises(UnrecognizedFormatError) as cm:
self.module.read(None, format='not_a_format2')
self.assertTrue('generator' in str(cm.exception))
self.assertTrue('not_a_format2' in str(cm.exception))
def test_reader_exists_with_verify_true(self):
fh = StringIO(u'1\n2\n3\n4')
@self.module.register_sniffer('format')
def sniffer(fh):
self.was_verified = True
return '1' in fh.readline(), {}
@self.module.register_reader('format', TestClass)
def reader(fh):
return TestClass([int(x) for x in fh.read().split('\n')])
self.was_verified = False
instance = self.module.read(fh, format='format', into=TestClass,
verify=True)
self.assertEqual(TestClass([1, 2, 3, 4]), instance)
self.assertTrue(self.was_verified)
self.was_verified = False
instance = self.module.read(fh, format='format', into=TestClass)
self.assertEqual(TestClass([1, 2, 3, 4]), instance)
self.assertTrue(self.was_verified)
fh.close()
def test_reader_compound_format_w_verify(self):
fh = StringIO(u'1\n3')
fh2 = StringIO(u'2\n4')
fh3 = StringIO(u'9\n9')
@self.module.register_reader('odd, even, nines', TestClass)
def reader(odd, even, nines):
state = []
for o, e, n in zip(odd, even, nines):
state.append(int(o.rstrip('\n')))
state.append(int(e.rstrip('\n')))
state.append(int(n.rstrip('\n')))
return TestClass(state)
@self.module.register_sniffer('nines')
def nines_sniffer(fh):
self.was_verified_n = True
return '9' in fh.readline(), {}
@self.module.register_sniffer('odd')
def odd_sniffer(fh):
self.was_verified_o = True
return '1' in fh.readline(), {}
@self.module.register_sniffer('even')
def even_sniffer(fh):
self.was_verified_e = True
return '2' in fh.readline(), {}
self.was_verified_e = False
self.was_verified_o = False
self.was_verified_n = False
instance = self.module.read([fh2, fh3, fh], format=['even', 'nines',
'odd'],
into=TestClass, verify=True)
self.assertEqual(TestClass([1, 2, 9, 3, 4, 9]), instance)
self.assertTrue(self.was_verified_e)
self.assertTrue(self.was_verified_o)
self.assertTrue(self.was_verified_n)
self.was_verified_e = False
self.was_verified_o = False
self.was_verified_n = False
instance = self.module.read([fh, fh2, fh3], format=['odd',
'even', 'nines'],
into=TestClass)
self.assertEqual(TestClass([1, 2, 9, 3, 4, 9]), instance)
self.assertTrue(self.was_verified_e)
self.assertTrue(self.was_verified_o)
self.assertTrue(self.was_verified_n)
fh.close()
fh2.close()
fh3.close()
def test_warning_raised(self):
fh = StringIO(u'1\n2\n3\n4')
@self.module.register_sniffer('format')
def sniffer(fh):
self.was_verified = True
return False, {}
@self.module.register_reader('format', TestClass)
def reader(fh):
return TestClass([int(x) for x in fh.read().split('\n')])
with warnings.catch_warnings(record=True):
warnings.simplefilter("error")
with self.assertRaises(UnprovenFormatWarning):
self.was_verified = False
instance = self.module.read(fh, format='format',
into=TestClass, verify=True)
self.assertEqual(TestClass([1, 2, 3, 4]), instance)
self.assertTrue(self.was_verified)
with warnings.catch_warnings(record=True):
warnings.simplefilter("error")
with self.assertRaises(UnprovenFormatWarning):
self.was_verified = False
instance = self.module.read(fh, format='format',
into=TestClass)
self.assertEqual(TestClass([1, 2, 3, 4]), instance)
self.assertTrue(self.was_verified)
fh.close()
def test_reader_exists_with_verify_false(self):
fh = StringIO(u'1\n2\n3\n4')
@self.module.register_sniffer('format')
def sniffer(fh):
self.was_verified = True
return '1' in fh.readline(), {}
@self.module.register_reader('format', TestClass)
def reader(fh):
return TestClass([int(x) for x in fh.read().split('\n')])
self.was_verified = False
instance = self.module.read(fh, format='format', into=TestClass,
verify=False)
self.assertEqual(TestClass([1, 2, 3, 4]), instance)
self.assertFalse(self.was_verified)
fh.close()
def test_reader_exists_real_file(self):
fp = self.fp1
with open(fp, 'w') as fh:
fh.write('1\n2\n3\n4')
@self.module.register_sniffer('format')
def sniffer(fh):
return '1' in fh.readline(), {}
@self.module.register_reader('format', TestClass)
def reader(fh):
return TestClass([int(x) for x in fh.read().split('\n')])
instance = self.module.read(fp, format='format', into=TestClass)
self.assertEqual(TestClass([1, 2, 3, 4]), instance)
def test_reader_into_none_w_mutate_fh(self):
fh = StringIO(u'1\n2\n3\n4')
@self.module.register_sniffer('format')
def sniffer(fh):
return '1' in fh.readline(), {}
@self.module.register_reader('format')
def reader(fh):
for x in fh.read().split('\n'):
yield int(x)
fh.seek(0)
generator = self.module.read(fh, format='format', mutate_fh=True)
for a, b in zip(generator, [1, 2, 3, 4]):
self.assertEqual(a, b)
self.assertNotEqual(0, fh.tell())
fh.close()
def test_reader_w_mutate_fh(self):
fh = StringIO(u'1\n2\n3\n4')
@self.module.register_sniffer('format')
def sniffer(fh):
return '1' in fh.readline(), {}
@self.module.register_reader('format', TestClass)
def reader(fh):
return TestClass([int(x) for x in fh.read().split('\n')])
fh.seek(0)
instance = self.module.read(fh, format='format', into=TestClass,
mutate_fh=True)
self.assertNotEqual(0, fh.tell())
self.assertEqual(TestClass([1, 2, 3, 4]), instance)
fh.close()
def test_read_kwargs_passed_generator(self):
@self.module.register_sniffer('format')
def sniffer(fh):
return True, {'arg1': 15, 'arg2': 'abc'}
@self.module.register_reader('format')
def reader(fh, **kwargs):
self.assertEqual(kwargs['arg1'], 15)
self.assertEqual(kwargs['arg2'], 'abc')
self.assertEqual(kwargs['arg3'], [1])
yield
next(self.module.read(StringIO(), format='format', arg3=[1]))
def test_read_kwargs_passed_and_override(self):
@self.module.register_sniffer('format')
def sniffer(fh):
return True, {'arg1': 15, 'arg2': 'abc', 'override': 30}
@self.module.register_reader('format', TestClass)
def reader(fh, **kwargs):
self.assertEqual(kwargs['arg1'], 15)
self.assertEqual(kwargs['arg2'], 'abc')
self.assertEqual(kwargs['arg3'], [1])
return
self.module.read(StringIO(), into=TestClass, arg3=[1])
with warnings.catch_warnings(record=True):
warnings.simplefilter("error")
# Should raise no warning and thus no error.
self.module.read(StringIO(), into=TestClass, arg3=[1],
override=30)
# Should raise a warning and thus an error.
with self.assertRaises(ArgumentOverrideWarning):
self.module.read(StringIO(), into=TestClass, arg3=[1],
override=100)
def test_read_kwargs_passed_w_compound_format(self):
@self.module.register_sniffer('format1')
def format1_sniffer(fh):
return True, {'partial': 1, 'overlap': 1, 'list': 0}
@self.module.register_sniffer('format2')
def format2_sniffer(fh):
return True, {'arg1': 'a', 'overlap': 2, 'list': [1, 2]}
@self.module.register_sniffer('format3')
def format3_sniffer(fh):
return True, {'partial': 3, 'overlap': 3, 'list': [3, 4]}
@self.module.register_reader(['format1', 'format2', 'format3'])
def reader(f1, f2, f3, **kwargs):
self.assertEqual(kwargs['partial'], [1, None, 3])
self.assertEqual(kwargs['overlap'], [1, 2, 3])
self.assertEqual(kwargs['arg1'], [None, 'a', None])
self.assertEqual(kwargs['provided'], [True, True, True])
self.assertEqual(kwargs['list'], [0, [1, 2], [3, 4]])
return
fh1 = StringIO()
fh2 = StringIO()
fh3 = StringIO()
self.module.read([fh1, fh2, fh3],
format='format1, format2, format3',
provided=True)
self.module.read([fh3, fh1, fh2],
format='format3, format1, format2',
provided=True)
fh1.close()
fh2.close()
fh3.close()
def test_that_mode_is_used(self):
fp = self.fp1
with open(fp, 'w') as fh:
fh.write('1\n2\n3\n4')
@self.module.register_sniffer('format')
def sniffer(fh):
return '1' in fh.readline(), {}
@self.module.register_reader('format', TestClass)
def reader(fh):
self.assertEqual(self.expected_mode, fh.mode)
return TestClass([int(x) for x in fh.read().split('\n')])
self.expected_mode = 'U'
instance = self.module.read(fp, format='format', into=TestClass)
self.assertEqual(TestClass([1, 2, 3, 4]), instance)
self.expected_mode = 'r'
instance = self.module.read(fp, format='format', into=TestClass,
mode='r')
self.assertEqual(TestClass([1, 2, 3, 4]), instance)
class TestWrite(RegistryTest):
def test_format_is_none(self):
fh = StringIO()
with self.assertRaises(ValueError):
self.module.write({}, into=fh)
fh.close()
def test_into_is_none(self):
fh = StringIO()
with self.assertRaises(ValueError):
self.module.write({}, format='format')
fh.close()
def test_writer_does_not_exist(self):
fh = StringIO()
with self.assertRaises(UnrecognizedFormatError) as cm:
self.module.write({}, format='not_a_format', into=fh)
self.assertTrue('not_a_format' in str(cm.exception))
self.assertTrue(str(fh) in str(cm.exception))
fh.close()
def test_writer_exists(self):
obj = TestClass(['1', '2', '3', '4'])
fh = StringIO()
@self.module.register_writer('format', TestClass)
def writer(obj, fh):
fh.write(u'\n'.join(obj.list))
self.module.write(obj, format='format', into=fh)
fh.seek(0)
self.assertEqual("1\n2\n3\n4", fh.read())
fh.close()
def test_writer_compound_format(self):
fh = StringIO()
fh2 = StringIO()
fh3 = StringIO()
fhb = StringIO()
fh2b = StringIO()
fh3b = StringIO()
@self.module.register_writer('odd, even, nines', TestClass)
def writer(obj, odd, even, nines):
i = 0
while i+2 < len(obj.list):
odd.write(str(obj.list[i]) + u"\n")
even.write(str(obj.list[i+1]) + u"\n")
nines.write(str(obj.list[i+2]) + u"\n")
i = i+3
self.module.write(TestClass([1, 2, 9, 3, 4, 9]), into=[fh2, fh3, fh],
format=['even', 'nines', 'odd'])
self.module.write(TestClass([1, 2, 9, 3, 4, 9]), into=[fhb, fh2b,
fh3b],
format='odd, even, nines')
fh.seek(0)
fh2.seek(0)
fh3.seek(0)
fhb.seek(0)
fh2b.seek(0)
fh3b.seek(0)
self.assertEqual(u'1\n3\n', fh.read())
self.assertEqual(u'1\n3\n', fhb.read())
self.assertEqual(u'2\n4\n', fh2.read())
self.assertEqual(u'2\n4\n', fh2b.read())
self.assertEqual(u'9\n9\n', fh3.read())
self.assertEqual(u'9\n9\n', fh3b.read())
fh.close()
fh2.close()
fh3.close()
fhb.close()
fh2b.close()
fh3b.close()
with self.assertRaises(ValueError):
self.module.write(TestClass([1, 2, 9]), into=[fh],
format='even, odd, nines')
def test_writer_exists_real_file(self):
obj = TestClass(['1', '2', '3', '4'])
fp = self.fp1
@self.module.register_writer('format', TestClass)
def writer(obj, fh):
fh.write('\n'.join(obj.list))
self.module.write(obj, format='format', into=fp)
with open(fp, 'U') as fh:
self.assertEqual("1\n2\n3\n4", fh.read())
def test_writer_passed_kwargs(self):
@self.module.register_reader('format')
def reader(fh):
yield
@self.module.register_writer('format')
def writer(obj, fh, **kwargs):
self.assertEqual(kwargs['passed'], True)
generator = self.module.get_reader('format')(None)
self.module.write(generator, format='format',
into=StringIO(), passed=True)
def test_that_mode_is_used(self):
obj = TestClass(['1', '2', '3', '4'])
fp = self.fp1
@self.module.register_writer('format', TestClass)
def writer(obj, fh):
fh.write('\n'.join(obj.list))
self.assertEqual(self.expected_mode, fh.mode)
self.expected_mode = 'w'
self.module.write(obj, format='format', into=fp)
with open(fp, 'U') as fh:
self.assertEqual("1\n2\n3\n4", fh.read())
fp = self.fp2
self.expected_mode = 'a'
self.module.write(obj, format='format', into=fp, mode='a')
with open(fp, 'U') as fh:
self.assertEqual("1\n2\n3\n4", fh.read())
class TestEmptyFileSniffer(unittest.TestCase):
def test_blank_file(self):
fh = StringIO()
self.assertTrue(empty_file_sniffer(fh)[0])
fh.close()
def test_whitespace_file(self):
fh = StringIO(u' ')
self.assertTrue(empty_file_sniffer(fh)[0])
fh.close()
fh = StringIO(u'\n')
self.assertTrue(empty_file_sniffer(fh)[0])
fh.close()
fh = StringIO(u'\t')
self.assertTrue(empty_file_sniffer(fh)[0])
fh.close()
def test_mixed_whitespace_file(self):
fh = StringIO(u'\n\n\t\n \t \t \n \n \n\n')
self.assertTrue(empty_file_sniffer(fh)[0])
fh.close()
def test_not_empty_file(self):
fh = StringIO(u'\n\n\t\n a\t \t \n \n \n\n')
self.assertFalse(empty_file_sniffer(fh)[0])
fh.close()
if __name__ == '__main__':
unittest.main()
|
|
# coding=utf-8
# flake8: noqa E302
"""
Cmd2 testing for argument parsing
"""
import argparse
from typing import (
Optional,
)
import pytest
import cmd2
from .conftest import (
run_cmd,
)
class ArgparseApp(cmd2.Cmd):
def __init__(self):
self.maxrepeats = 3
cmd2.Cmd.__init__(self)
def namespace_provider(self) -> argparse.Namespace:
ns = argparse.Namespace()
ns.custom_stuff = "custom"
return ns
say_parser = cmd2.Cmd2ArgumentParser()
say_parser.add_argument('-p', '--piglatin', action='store_true', help='atinLay')
say_parser.add_argument('-s', '--shout', action='store_true', help='N00B EMULATION MODE')
say_parser.add_argument('-r', '--repeat', type=int, help='output [n] times')
say_parser.add_argument('words', nargs='+', help='words to say')
@cmd2.with_argparser(say_parser)
def do_say(self, args, *, keyword_arg: Optional[str] = None):
"""Repeat what you tell me to."""
words = []
for word in args.words:
if word is None:
word = ''
if args.piglatin:
word = '%s%say' % (word[1:], word[0])
if args.shout:
word = word.upper()
words.append(word)
repetitions = args.repeat or 1
for i in range(min(repetitions, self.maxrepeats)):
self.stdout.write(' '.join(words))
self.stdout.write('\n')
if keyword_arg is not None:
print(keyword_arg)
tag_parser = cmd2.Cmd2ArgumentParser(description='create a html tag')
tag_parser.add_argument('tag', help='tag')
tag_parser.add_argument('content', nargs='+', help='content to surround with tag')
@cmd2.with_argparser(tag_parser, preserve_quotes=True)
def do_tag(self, args):
self.stdout.write('<{0}>{1}</{0}>'.format(args.tag, ' '.join(args.content)))
self.stdout.write('\n')
@cmd2.with_argparser(cmd2.Cmd2ArgumentParser(), ns_provider=namespace_provider)
def do_test_argparse_ns(self, args):
self.stdout.write('{}'.format(args.custom_stuff))
@cmd2.with_argument_list
def do_arglist(self, arglist, *, keyword_arg: Optional[str] = None):
if isinstance(arglist, list):
self.stdout.write('True')
else:
self.stdout.write('False')
if keyword_arg is not None:
print(keyword_arg)
@cmd2.with_argument_list(preserve_quotes=True)
def do_preservelist(self, arglist):
self.stdout.write('{}'.format(arglist))
known_parser = cmd2.Cmd2ArgumentParser()
known_parser.add_argument('-p', '--piglatin', action='store_true', help='atinLay')
known_parser.add_argument('-s', '--shout', action='store_true', help='N00B EMULATION MODE')
known_parser.add_argument('-r', '--repeat', type=int, help='output [n] times')
@cmd2.with_argparser(known_parser, with_unknown_args=True)
def do_speak(self, args, extra, *, keyword_arg: Optional[str] = None):
"""Repeat what you tell me to."""
words = []
for word in extra:
if word is None:
word = ''
if args.piglatin:
word = '%s%say' % (word[1:], word[0])
if args.shout:
word = word.upper()
words.append(word)
repetitions = args.repeat or 1
for i in range(min(repetitions, self.maxrepeats)):
self.stdout.write(' '.join(words))
self.stdout.write('\n')
if keyword_arg is not None:
print(keyword_arg)
@cmd2.with_argparser(cmd2.Cmd2ArgumentParser(), preserve_quotes=True, with_unknown_args=True)
def do_test_argparse_with_list_quotes(self, args, extra):
self.stdout.write('{}'.format(' '.join(extra)))
@cmd2.with_argparser(cmd2.Cmd2ArgumentParser(), ns_provider=namespace_provider, with_unknown_args=True)
def do_test_argparse_with_list_ns(self, args, extra):
self.stdout.write('{}'.format(args.custom_stuff))
@pytest.fixture
def argparse_app():
app = ArgparseApp()
return app
def test_invalid_syntax(argparse_app):
out, err = run_cmd(argparse_app, 'speak "')
assert err[0] == "Invalid syntax: No closing quotation"
def test_argparse_basic_command(argparse_app):
out, err = run_cmd(argparse_app, 'say hello')
assert out == ['hello']
def test_argparse_remove_quotes(argparse_app):
out, err = run_cmd(argparse_app, 'say "hello there"')
assert out == ['hello there']
def test_argparser_kwargs(argparse_app, capsys):
"""Test with_argparser wrapper passes through kwargs to command function"""
argparse_app.do_say('word', keyword_arg="foo")
out, err = capsys.readouterr()
assert out == "foo\n"
def test_argparse_preserve_quotes(argparse_app):
out, err = run_cmd(argparse_app, 'tag mytag "hello"')
assert out[0] == '<mytag>"hello"</mytag>'
def test_argparse_custom_namespace(argparse_app):
out, err = run_cmd(argparse_app, 'test_argparse_ns')
assert out[0] == 'custom'
def test_argparse_with_list(argparse_app):
out, err = run_cmd(argparse_app, 'speak -s hello world!')
assert out == ['HELLO WORLD!']
def test_argparse_with_list_remove_quotes(argparse_app):
out, err = run_cmd(argparse_app, 'speak -s hello "world!"')
assert out == ['HELLO WORLD!']
def test_argparse_with_list_preserve_quotes(argparse_app):
out, err = run_cmd(argparse_app, 'test_argparse_with_list_quotes "hello" person')
assert out[0] == '"hello" person'
def test_argparse_with_list_custom_namespace(argparse_app):
out, err = run_cmd(argparse_app, 'test_argparse_with_list_ns')
assert out[0] == 'custom'
def test_argparse_with_list_and_empty_doc(argparse_app):
out, err = run_cmd(argparse_app, 'speak -s hello world!')
assert out == ['HELLO WORLD!']
def test_argparser_correct_args_with_quotes_and_midline_options(argparse_app):
out, err = run_cmd(argparse_app, "speak 'This is a' -s test of the emergency broadcast system!")
assert out == ['THIS IS A TEST OF THE EMERGENCY BROADCAST SYSTEM!']
def test_argparser_and_unknown_args_kwargs(argparse_app, capsys):
"""Test with_argparser wrapper passing through kwargs to command function"""
argparse_app.do_speak('', keyword_arg="foo")
out, err = capsys.readouterr()
assert out == "foo\n"
def test_argparse_quoted_arguments_multiple(argparse_app):
out, err = run_cmd(argparse_app, 'say "hello there" "rick & morty"')
assert out == ['hello there rick & morty']
def test_argparse_help_docstring(argparse_app):
out, err = run_cmd(argparse_app, 'help say')
assert out[0].startswith('Usage: say')
assert out[1] == ''
assert out[2] == 'Repeat what you tell me to.'
def test_argparse_help_description(argparse_app):
out, err = run_cmd(argparse_app, 'help tag')
assert out[0].startswith('Usage: tag')
assert out[1] == ''
assert out[2] == 'create a html tag'
def test_argparse_prog(argparse_app):
out, err = run_cmd(argparse_app, 'help tag')
progname = out[0].split(' ')[1]
assert progname == 'tag'
def test_arglist(argparse_app):
out, err = run_cmd(argparse_app, 'arglist "we should" get these')
assert out[0] == 'True'
def test_arglist_kwargs(argparse_app, capsys):
"""Test with_argument_list wrapper passes through kwargs to command function"""
argparse_app.do_arglist('arg', keyword_arg="foo")
out, err = capsys.readouterr()
assert out == "foo\n"
def test_preservelist(argparse_app):
out, err = run_cmd(argparse_app, 'preservelist foo "bar baz"')
assert out[0] == "['foo', '\"bar baz\"']"
class SubcommandApp(cmd2.Cmd):
"""Example cmd2 application where we a base command which has a couple subcommands."""
def __init__(self):
cmd2.Cmd.__init__(self)
# subcommand functions for the base command
def base_foo(self, args):
"""foo subcommand of base command"""
self.poutput(args.x * args.y)
def base_bar(self, args):
"""bar subcommand of base command"""
self.poutput('((%s))' % args.z)
def base_helpless(self, args):
"""helpless subcommand of base command"""
self.poutput('((%s))' % args.z)
# create the top-level parser for the base command
base_parser = cmd2.Cmd2ArgumentParser()
base_subparsers = base_parser.add_subparsers(dest='subcommand', metavar='SUBCOMMAND')
base_subparsers.required = True
# create the parser for the "foo" subcommand
parser_foo = base_subparsers.add_parser('foo', help='foo help')
parser_foo.add_argument('-x', type=int, default=1, help='integer')
parser_foo.add_argument('y', type=float, help='float')
parser_foo.set_defaults(func=base_foo)
# create the parser for the "bar" subcommand
parser_bar = base_subparsers.add_parser('bar', help='bar help', aliases=['bar_1', 'bar_2'])
parser_bar.add_argument('z', help='string')
parser_bar.set_defaults(func=base_bar)
# create the parser for the "helpless" subcommand
# This subcommand has aliases and no help text. It exists to prevent changes to _set_parser_prog() which
# use an approach which relies on action._choices_actions list. See comment in that function for more
# details.
parser_bar = base_subparsers.add_parser('helpless', aliases=['helpless_1', 'helpless_2'])
parser_bar.add_argument('z', help='string')
parser_bar.set_defaults(func=base_bar)
@cmd2.with_argparser(base_parser)
def do_base(self, args):
"""Base command help"""
# Call whatever subcommand function was selected
func = getattr(args, 'func')
func(self, args)
# Add subcommands using as_subcommand_to decorator
has_subcmds_parser = cmd2.Cmd2ArgumentParser(description="Tests as_subcmd_to decorator")
has_subcmds_subparsers = has_subcmds_parser.add_subparsers(dest='subcommand', metavar='SUBCOMMAND')
has_subcmds_subparsers.required = True
@cmd2.with_argparser(has_subcmds_parser)
def do_test_subcmd_decorator(self, args: argparse.Namespace):
handler = args.cmd2_handler.get()
handler(args)
subcmd_parser = cmd2.Cmd2ArgumentParser(description="A subcommand")
@cmd2.as_subcommand_to('test_subcmd_decorator', 'subcmd', subcmd_parser, help=subcmd_parser.description.lower())
def subcmd_func(self, args: argparse.Namespace):
# Make sure printing the Namespace works. The way we originally added cmd2_hander to it resulted in a RecursionError.
self.poutput(args)
helpless_subcmd_parser = cmd2.Cmd2ArgumentParser(add_help=False, description="A subcommand with no help")
@cmd2.as_subcommand_to(
'test_subcmd_decorator', 'helpless_subcmd', helpless_subcmd_parser, help=helpless_subcmd_parser.description.lower()
)
def helpless_subcmd_func(self, args: argparse.Namespace):
# Make sure vars(Namespace) works. The way we originally added cmd2_hander to it resulted in a RecursionError.
self.poutput(vars(args))
@pytest.fixture
def subcommand_app():
app = SubcommandApp()
return app
def test_subcommand_foo(subcommand_app):
out, err = run_cmd(subcommand_app, 'base foo -x2 5.0')
assert out == ['10.0']
def test_subcommand_bar(subcommand_app):
out, err = run_cmd(subcommand_app, 'base bar baz')
assert out == ['((baz))']
def test_subcommand_invalid(subcommand_app):
out, err = run_cmd(subcommand_app, 'base baz')
assert err[0].startswith('Usage: base')
assert err[1].startswith("Error: argument SUBCOMMAND: invalid choice: 'baz'")
def test_subcommand_base_help(subcommand_app):
out, err = run_cmd(subcommand_app, 'help base')
assert out[0].startswith('Usage: base')
assert out[1] == ''
assert out[2] == 'Base command help'
def test_subcommand_help(subcommand_app):
# foo has no aliases
out, err = run_cmd(subcommand_app, 'help base foo')
assert out[0].startswith('Usage: base foo')
assert out[1] == ''
assert out[2] == 'positional arguments:'
# bar has aliases (usage should never show alias name)
out, err = run_cmd(subcommand_app, 'help base bar')
assert out[0].startswith('Usage: base bar')
assert out[1] == ''
assert out[2] == 'positional arguments:'
out, err = run_cmd(subcommand_app, 'help base bar_1')
assert out[0].startswith('Usage: base bar')
assert out[1] == ''
assert out[2] == 'positional arguments:'
out, err = run_cmd(subcommand_app, 'help base bar_2')
assert out[0].startswith('Usage: base bar')
assert out[1] == ''
assert out[2] == 'positional arguments:'
# helpless has aliases and no help text (usage should never show alias name)
out, err = run_cmd(subcommand_app, 'help base helpless')
assert out[0].startswith('Usage: base helpless')
assert out[1] == ''
assert out[2] == 'positional arguments:'
out, err = run_cmd(subcommand_app, 'help base helpless_1')
assert out[0].startswith('Usage: base helpless')
assert out[1] == ''
assert out[2] == 'positional arguments:'
out, err = run_cmd(subcommand_app, 'help base helpless_2')
assert out[0].startswith('Usage: base helpless')
assert out[1] == ''
assert out[2] == 'positional arguments:'
def test_subcommand_invalid_help(subcommand_app):
out, err = run_cmd(subcommand_app, 'help base baz')
assert out[0].startswith('Usage: base')
def test_add_another_subcommand(subcommand_app):
"""
This tests makes sure _set_parser_prog() sets _prog_prefix on every _SubParsersAction so that all future calls
to add_parser() write the correct prog value to the parser being added.
"""
new_parser = subcommand_app.base_subparsers.add_parser('new_sub', help="stuff")
assert new_parser.prog == "base new_sub"
def test_subcmd_decorator(subcommand_app):
# Test subcommand that has help option
out, err = run_cmd(subcommand_app, 'test_subcmd_decorator subcmd')
assert out[0].startswith('Namespace(')
out, err = run_cmd(subcommand_app, 'help test_subcmd_decorator subcmd')
assert out[0] == 'Usage: test_subcmd_decorator subcmd [-h]'
out, err = run_cmd(subcommand_app, 'test_subcmd_decorator subcmd -h')
assert out[0] == 'Usage: test_subcmd_decorator subcmd [-h]'
# Test subcommand that has no help option
out, err = run_cmd(subcommand_app, 'test_subcmd_decorator helpless_subcmd')
assert "'subcommand': 'helpless_subcmd'" in out[0]
out, err = run_cmd(subcommand_app, 'help test_subcmd_decorator helpless_subcmd')
assert out[0] == 'Usage: test_subcmd_decorator helpless_subcmd'
assert not err
out, err = run_cmd(subcommand_app, 'test_subcmd_decorator helpless_subcmd -h')
assert not out
assert err[0] == 'Usage: test_subcmd_decorator [-h] SUBCOMMAND ...'
assert err[1] == 'Error: unrecognized arguments: -h'
def test_unittest_mock():
from unittest import (
mock,
)
from cmd2 import (
CommandSetRegistrationError,
)
with mock.patch.object(ArgparseApp, 'namespace_provider'):
with pytest.raises(CommandSetRegistrationError):
app = ArgparseApp()
with mock.patch.object(ArgparseApp, 'namespace_provider', spec=True):
app = ArgparseApp()
with mock.patch.object(ArgparseApp, 'namespace_provider', spec_set=True):
app = ArgparseApp()
with mock.patch.object(ArgparseApp, 'namespace_provider', autospec=True):
app = ArgparseApp()
def test_pytest_mock_invalid(mocker):
from cmd2 import (
CommandSetRegistrationError,
)
mocker.patch.object(ArgparseApp, 'namespace_provider')
with pytest.raises(CommandSetRegistrationError):
app = ArgparseApp()
@pytest.mark.parametrize(
'spec_param',
[
{'spec': True},
{'spec_set': True},
{'autospec': True},
],
)
def test_pytest_mock_valid(mocker, spec_param):
mocker.patch.object(ArgparseApp, 'namespace_provider', **spec_param)
app = ArgparseApp()
|
|
"""Tests for classes defining properties of ground domains, e.g. ZZ, QQ, ZZ[x] ... """
from sympy.polys.algebratools import (
ZZ, QQ, RR, PolynomialRing, FractionField, EX
)
from sympy.polys.polyerrors import (
UnificationFailed,
GeneratorsNeeded,
DomainError,
)
from sympy import S, sqrt, sin, oo, raises
from sympy.abc import x, y
ALG = QQ.algebraic_field(sqrt(2)+sqrt(3))
def test_Algebra__unify():
assert ZZ.unify(ZZ) == ZZ
assert QQ.unify(QQ) == QQ
assert ZZ.unify(QQ) == QQ
assert QQ.unify(ZZ) == QQ
assert EX.unify(EX) == EX
assert ZZ.unify(EX) == EX
assert QQ.unify(EX) == EX
assert EX.unify(ZZ) == EX
assert EX.unify(QQ) == EX
assert ZZ.poly_ring('x').unify(EX) == EX
assert ZZ.frac_field('x').unify(EX) == EX
assert EX.unify(ZZ.poly_ring('x')) == EX
assert EX.unify(ZZ.frac_field('x')) == EX
assert ZZ.poly_ring('x','y').unify(EX) == EX
assert ZZ.frac_field('x','y').unify(EX) == EX
assert EX.unify(ZZ.poly_ring('x','y')) == EX
assert EX.unify(ZZ.frac_field('x','y')) == EX
assert QQ.poly_ring('x').unify(EX) == EX
assert QQ.frac_field('x').unify(EX) == EX
assert EX.unify(QQ.poly_ring('x')) == EX
assert EX.unify(QQ.frac_field('x')) == EX
assert QQ.poly_ring('x','y').unify(EX) == EX
assert QQ.frac_field('x','y').unify(EX) == EX
assert EX.unify(QQ.poly_ring('x','y')) == EX
assert EX.unify(QQ.frac_field('x','y')) == EX
assert ZZ.poly_ring('x').unify(ZZ) == ZZ.poly_ring('x')
assert ZZ.poly_ring('x').unify(QQ) == QQ.poly_ring('x')
assert QQ.poly_ring('x').unify(ZZ) == QQ.poly_ring('x')
assert QQ.poly_ring('x').unify(QQ) == QQ.poly_ring('x')
assert ZZ.unify(ZZ.poly_ring('x')) == ZZ.poly_ring('x')
assert QQ.unify(ZZ.poly_ring('x')) == QQ.poly_ring('x')
assert ZZ.unify(QQ.poly_ring('x')) == QQ.poly_ring('x')
assert QQ.unify(QQ.poly_ring('x')) == QQ.poly_ring('x')
assert ZZ.poly_ring('x','y').unify(ZZ) == ZZ.poly_ring('x','y')
assert ZZ.poly_ring('x','y').unify(QQ) == QQ.poly_ring('x','y')
assert QQ.poly_ring('x','y').unify(ZZ) == QQ.poly_ring('x','y')
assert QQ.poly_ring('x','y').unify(QQ) == QQ.poly_ring('x','y')
assert ZZ.unify(ZZ.poly_ring('x','y')) == ZZ.poly_ring('x','y')
assert QQ.unify(ZZ.poly_ring('x','y')) == QQ.poly_ring('x','y')
assert ZZ.unify(QQ.poly_ring('x','y')) == QQ.poly_ring('x','y')
assert QQ.unify(QQ.poly_ring('x','y')) == QQ.poly_ring('x','y')
assert ZZ.frac_field('x').unify(ZZ) == ZZ.frac_field('x')
assert ZZ.frac_field('x').unify(QQ) == EX # QQ.frac_field('x')
assert QQ.frac_field('x').unify(ZZ) == EX # QQ.frac_field('x')
assert QQ.frac_field('x').unify(QQ) == QQ.frac_field('x')
assert ZZ.unify(ZZ.frac_field('x')) == ZZ.frac_field('x')
assert QQ.unify(ZZ.frac_field('x')) == EX # QQ.frac_field('x')
assert ZZ.unify(QQ.frac_field('x')) == EX # QQ.frac_field('x')
assert QQ.unify(QQ.frac_field('x')) == QQ.frac_field('x')
assert ZZ.frac_field('x','y').unify(ZZ) == ZZ.frac_field('x','y')
assert ZZ.frac_field('x','y').unify(QQ) == EX # QQ.frac_field('x','y')
assert QQ.frac_field('x','y').unify(ZZ) == EX # QQ.frac_field('x','y')
assert QQ.frac_field('x','y').unify(QQ) == QQ.frac_field('x','y')
assert ZZ.unify(ZZ.frac_field('x','y')) == ZZ.frac_field('x','y')
assert QQ.unify(ZZ.frac_field('x','y')) == EX # QQ.frac_field('x','y')
assert ZZ.unify(QQ.frac_field('x','y')) == EX # QQ.frac_field('x','y')
assert QQ.unify(QQ.frac_field('x','y')) == QQ.frac_field('x','y')
assert ZZ.poly_ring('x').unify(ZZ.poly_ring('x')) == ZZ.poly_ring('x')
assert ZZ.poly_ring('x').unify(QQ.poly_ring('x')) == QQ.poly_ring('x')
assert QQ.poly_ring('x').unify(ZZ.poly_ring('x')) == QQ.poly_ring('x')
assert QQ.poly_ring('x').unify(QQ.poly_ring('x')) == QQ.poly_ring('x')
assert ZZ.poly_ring('x','y').unify(ZZ.poly_ring('x')) == ZZ.poly_ring('x','y')
assert ZZ.poly_ring('x','y').unify(QQ.poly_ring('x')) == QQ.poly_ring('x','y')
assert QQ.poly_ring('x','y').unify(ZZ.poly_ring('x')) == QQ.poly_ring('x','y')
assert QQ.poly_ring('x','y').unify(QQ.poly_ring('x')) == QQ.poly_ring('x','y')
assert ZZ.poly_ring('x').unify(ZZ.poly_ring('x','y')) == ZZ.poly_ring('x','y')
assert ZZ.poly_ring('x').unify(QQ.poly_ring('x','y')) == QQ.poly_ring('x','y')
assert QQ.poly_ring('x').unify(ZZ.poly_ring('x','y')) == QQ.poly_ring('x','y')
assert QQ.poly_ring('x').unify(QQ.poly_ring('x','y')) == QQ.poly_ring('x','y')
assert ZZ.poly_ring('x','y').unify(ZZ.poly_ring('x','z')) == ZZ.poly_ring('x','y','z')
assert ZZ.poly_ring('x','y').unify(QQ.poly_ring('x','z')) == QQ.poly_ring('x','y','z')
assert QQ.poly_ring('x','y').unify(ZZ.poly_ring('x','z')) == QQ.poly_ring('x','y','z')
assert QQ.poly_ring('x','y').unify(QQ.poly_ring('x','z')) == QQ.poly_ring('x','y','z')
assert ZZ.frac_field('x').unify(ZZ.frac_field('x')) == ZZ.frac_field('x')
assert ZZ.frac_field('x').unify(QQ.frac_field('x')) == QQ.frac_field('x')
assert QQ.frac_field('x').unify(ZZ.frac_field('x')) == QQ.frac_field('x')
assert QQ.frac_field('x').unify(QQ.frac_field('x')) == QQ.frac_field('x')
assert ZZ.frac_field('x','y').unify(ZZ.frac_field('x')) == ZZ.frac_field('x','y')
assert ZZ.frac_field('x','y').unify(QQ.frac_field('x')) == QQ.frac_field('x','y')
assert QQ.frac_field('x','y').unify(ZZ.frac_field('x')) == QQ.frac_field('x','y')
assert QQ.frac_field('x','y').unify(QQ.frac_field('x')) == QQ.frac_field('x','y')
assert ZZ.frac_field('x').unify(ZZ.frac_field('x','y')) == ZZ.frac_field('x','y')
assert ZZ.frac_field('x').unify(QQ.frac_field('x','y')) == QQ.frac_field('x','y')
assert QQ.frac_field('x').unify(ZZ.frac_field('x','y')) == QQ.frac_field('x','y')
assert QQ.frac_field('x').unify(QQ.frac_field('x','y')) == QQ.frac_field('x','y')
assert ZZ.frac_field('x','y').unify(ZZ.frac_field('x','z')) == ZZ.frac_field('x','y','z')
assert ZZ.frac_field('x','y').unify(QQ.frac_field('x','z')) == QQ.frac_field('x','y','z')
assert QQ.frac_field('x','y').unify(ZZ.frac_field('x','z')) == QQ.frac_field('x','y','z')
assert QQ.frac_field('x','y').unify(QQ.frac_field('x','z')) == QQ.frac_field('x','y','z')
assert ZZ.poly_ring('x').unify(ZZ.frac_field('x')) == ZZ.frac_field('x')
assert ZZ.poly_ring('x').unify(QQ.frac_field('x')) == EX # QQ.frac_field('x')
assert QQ.poly_ring('x').unify(ZZ.frac_field('x')) == EX # QQ.frac_field('x')
assert QQ.poly_ring('x').unify(QQ.frac_field('x')) == QQ.frac_field('x')
assert ZZ.poly_ring('x','y').unify(ZZ.frac_field('x')) == ZZ.frac_field('x','y')
assert ZZ.poly_ring('x','y').unify(QQ.frac_field('x')) == EX # QQ.frac_field('x','y')
assert QQ.poly_ring('x','y').unify(ZZ.frac_field('x')) == EX # QQ.frac_field('x','y')
assert QQ.poly_ring('x','y').unify(QQ.frac_field('x')) == QQ.frac_field('x','y')
assert ZZ.poly_ring('x').unify(ZZ.frac_field('x','y')) == ZZ.frac_field('x','y')
assert ZZ.poly_ring('x').unify(QQ.frac_field('x','y')) == EX # QQ.frac_field('x','y')
assert QQ.poly_ring('x').unify(ZZ.frac_field('x','y')) == EX # QQ.frac_field('x','y')
assert QQ.poly_ring('x').unify(QQ.frac_field('x','y')) == QQ.frac_field('x','y')
assert ZZ.poly_ring('x','y').unify(ZZ.frac_field('x','z')) == ZZ.frac_field('x','y','z')
assert ZZ.poly_ring('x','y').unify(QQ.frac_field('x','z')) == EX # QQ.frac_field('x','y','z')
assert QQ.poly_ring('x','y').unify(ZZ.frac_field('x','z')) == EX # QQ.frac_field('x','y','z')
assert QQ.poly_ring('x','y').unify(QQ.frac_field('x','z')) == QQ.frac_field('x','y','z')
assert ZZ.frac_field('x').unify(ZZ.poly_ring('x')) == ZZ.frac_field('x')
assert ZZ.frac_field('x').unify(QQ.poly_ring('x')) == EX # QQ.frac_field('x')
assert QQ.frac_field('x').unify(ZZ.poly_ring('x')) == EX # QQ.frac_field('x')
assert QQ.frac_field('x').unify(QQ.poly_ring('x')) == QQ.frac_field('x')
assert ZZ.frac_field('x','y').unify(ZZ.poly_ring('x')) == ZZ.frac_field('x','y')
assert ZZ.frac_field('x','y').unify(QQ.poly_ring('x')) == EX # QQ.frac_field('x','y')
assert QQ.frac_field('x','y').unify(ZZ.poly_ring('x')) == EX # QQ.frac_field('x','y')
assert QQ.frac_field('x','y').unify(QQ.poly_ring('x')) == QQ.frac_field('x','y')
assert ZZ.frac_field('x').unify(ZZ.poly_ring('x','y')) == ZZ.frac_field('x','y')
assert ZZ.frac_field('x').unify(QQ.poly_ring('x','y')) == EX # QQ.frac_field('x','y')
assert QQ.frac_field('x').unify(ZZ.poly_ring('x','y')) == EX # QQ.frac_field('x','y')
assert QQ.frac_field('x').unify(QQ.poly_ring('x','y')) == QQ.frac_field('x','y')
assert ZZ.frac_field('x','y').unify(ZZ.poly_ring('x','z')) == ZZ.frac_field('x','y','z')
assert ZZ.frac_field('x','y').unify(QQ.poly_ring('x','z')) == EX # QQ.frac_field('x','y','z')
assert QQ.frac_field('x','y').unify(ZZ.poly_ring('x','z')) == EX # QQ.frac_field('x','y','z')
assert QQ.frac_field('x','y').unify(QQ.poly_ring('x','z')) == QQ.frac_field('x','y','z')
raises(UnificationFailed, "ZZ.poly_ring('x','y').unify(ZZ, gens=('y', 'z'))")
raises(UnificationFailed, "ZZ.unify(ZZ.poly_ring('x','y'), gens=('y', 'z'))")
def test_Algebra__contains__():
assert (0 in EX) == True
assert (0 in ZZ) == True
assert (0 in QQ) == True
assert (0 in RR) == True
assert (0 in ALG) == True
assert (0 in ZZ[x,y]) == True
assert (0 in QQ[x,y]) == True
assert (0 in RR[x,y]) == True
assert (-7 in EX) == True
assert (-7 in ZZ) == True
assert (-7 in QQ) == True
assert (-7 in RR) == True
assert (-7 in ALG) == True
assert (-7 in ZZ[x,y]) == True
assert (-7 in QQ[x,y]) == True
assert (-7 in RR[x,y]) == True
assert (17 in EX) == True
assert (17 in ZZ) == True
assert (17 in QQ) == True
assert (17 in RR) == True
assert (17 in ALG) == True
assert (17 in ZZ[x,y]) == True
assert (17 in QQ[x,y]) == True
assert (17 in RR[x,y]) == True
assert (-S(1)/7 in EX) == True
assert (-S(1)/7 in ZZ) == False
assert (-S(1)/7 in QQ) == True
assert (-S(1)/7 in RR) == True
assert (-S(1)/7 in ALG) == True
assert (-S(1)/7 in ZZ[x,y]) == False
assert (-S(1)/7 in QQ[x,y]) == True
assert (-S(1)/7 in RR[x,y]) == True
assert (S(3)/5 in EX) == True
assert (S(3)/5 in ZZ) == False
assert (S(3)/5 in QQ) == True
assert (S(3)/5 in RR) == True
assert (S(3)/5 in ALG) == True
assert (S(3)/5 in ZZ[x,y]) == False
assert (S(3)/5 in QQ[x,y]) == True
assert (S(3)/5 in RR[x,y]) == True
assert (3.0 in EX) == True
assert (3.0 in ZZ) == True
assert (3.0 in QQ) == True
assert (3.0 in RR) == True
assert (3.0 in ALG) == True
assert (3.0 in ZZ[x,y]) == True
assert (3.0 in QQ[x,y]) == True
assert (3.0 in RR[x,y]) == True
assert (3.14 in EX) == True
assert (3.14 in ZZ) == False
assert (3.14 in QQ) == True
assert (3.14 in RR) == True
assert (3.14 in ALG) == True
assert (3.14 in ZZ[x,y]) == False
assert (3.14 in QQ[x,y]) == True
assert (3.14 in RR[x,y]) == True
assert (oo in EX) == True
assert (oo in ZZ) == False
assert (oo in QQ) == False
assert (oo in RR) == False
assert (oo in ALG) == False
assert (oo in ZZ[x,y]) == False
assert (oo in QQ[x,y]) == False
assert (oo in RR[x,y]) == False
assert (-oo in EX) == True
assert (-oo in ZZ) == False
assert (-oo in QQ) == False
assert (-oo in RR) == False
assert (-oo in ALG) == False
assert (-oo in ZZ[x,y]) == False
assert (-oo in QQ[x,y]) == False
assert (-oo in RR[x,y]) == False
assert (sqrt(7) in EX) == True
assert (sqrt(7) in ZZ) == False
assert (sqrt(7) in QQ) == False
assert (sqrt(7) in RR) == True
assert (sqrt(7) in ALG) == False
assert (sqrt(7) in ZZ[x,y]) == False
assert (sqrt(7) in QQ[x,y]) == False
assert (sqrt(7) in RR[x,y]) == True
assert (2*sqrt(3)+1 in EX) == True
assert (2*sqrt(3)+1 in ZZ) == False
assert (2*sqrt(3)+1 in QQ) == False
assert (2*sqrt(3)+1 in RR) == True
assert (2*sqrt(3)+1 in ALG) == True
assert (2*sqrt(3)+1 in ZZ[x,y]) == False
assert (2*sqrt(3)+1 in QQ[x,y]) == False
assert (2*sqrt(3)+1 in RR[x,y]) == True
assert (sin(1) in EX) == True
assert (sin(1) in ZZ) == False
assert (sin(1) in QQ) == False
assert (sin(1) in RR) == True
assert (sin(1) in ALG) == False
assert (sin(1) in ZZ[x,y]) == False
assert (sin(1) in QQ[x,y]) == False
assert (sin(1) in RR[x,y]) == True
assert (x**2 + 1 in EX) == True
assert (x**2 + 1 in ZZ) == False
assert (x**2 + 1 in QQ) == False
assert (x**2 + 1 in RR) == False
assert (x**2 + 1 in ALG) == False
assert (x**2 + 1 in ZZ[x]) == True
assert (x**2 + 1 in QQ[x]) == True
assert (x**2 + 1 in RR[x]) == True
assert (x**2 + 1 in ZZ[x,y]) == True
assert (x**2 + 1 in QQ[x,y]) == True
assert (x**2 + 1 in RR[x,y]) == True
assert (x**2 + y**2 in EX) == True
assert (x**2 + y**2 in ZZ) == False
assert (x**2 + y**2 in QQ) == False
assert (x**2 + y**2 in RR) == False
assert (x**2 + y**2 in ALG) == False
assert (x**2 + y**2 in ZZ[x]) == False
assert (x**2 + y**2 in QQ[x]) == False
assert (x**2 + y**2 in RR[x]) == False
assert (x**2 + y**2 in ZZ[x,y]) == True
assert (x**2 + y**2 in QQ[x,y]) == True
assert (x**2 + y**2 in RR[x,y]) == True
def test_Algebra_get_ring():
assert ZZ.has_assoc_Ring == True
assert QQ.has_assoc_Ring == True
assert ZZ[x].has_assoc_Ring == True
assert QQ[x].has_assoc_Ring == True
assert ZZ[x,y].has_assoc_Ring == True
assert QQ[x,y].has_assoc_Ring == True
assert ZZ.frac_field(x).has_assoc_Ring == True
assert QQ.frac_field(x).has_assoc_Ring == True
assert ZZ.frac_field(x,y).has_assoc_Ring == True
assert QQ.frac_field(x,y).has_assoc_Ring == True
assert EX.has_assoc_Ring == False
assert RR.has_assoc_Ring == False
assert ALG.has_assoc_Ring == False
assert ZZ.get_ring() == ZZ
assert QQ.get_ring() == ZZ
assert ZZ[x].get_ring() == ZZ[x]
assert QQ[x].get_ring() == ZZ[x]
assert ZZ[x,y].get_ring() == ZZ[x,y]
assert QQ[x,y].get_ring() == ZZ[x,y]
assert ZZ.frac_field(x).get_ring() == ZZ[x]
assert QQ.frac_field(x).get_ring() == ZZ[x]
assert ZZ.frac_field(x,y).get_ring() == ZZ[x,y]
assert QQ.frac_field(x,y).get_ring() == ZZ[x,y]
raises(DomainError, "EX.get_ring()")
raises(DomainError, "RR.get_ring()")
raises(DomainError, "ALG.get_ring()")
def test_Algebra_get_field():
assert EX.has_assoc_Field == True
assert ZZ.has_assoc_Field == True
assert QQ.has_assoc_Field == True
assert ALG.has_assoc_Field == True
assert ZZ[x].has_assoc_Field == True
assert QQ[x].has_assoc_Field == True
assert ZZ[x,y].has_assoc_Field == True
assert QQ[x,y].has_assoc_Field == True
assert RR.has_assoc_Field == False
assert EX.get_field() == EX
assert ZZ.get_field() == QQ
assert QQ.get_field() == QQ
assert ALG.get_field() == ALG
assert ZZ[x].get_field() == ZZ.frac_field(x)
assert QQ[x].get_field() == ZZ.frac_field(x)
assert ZZ[x,y].get_field() == ZZ.frac_field(x,y)
assert QQ[x,y].get_field() == ZZ.frac_field(x,y)
raises(DomainError, "RR.get_field()")
def test_Algebra_get_exact():
assert EX.get_exact() == EX
assert ZZ.get_exact() == ZZ
assert QQ.get_exact() == QQ
assert RR.get_exact() == QQ
assert ALG.get_exact() == ALG
assert ZZ[x].get_exact() == ZZ[x]
assert QQ[x].get_exact() == QQ[x]
assert ZZ[x,y].get_exact() == ZZ[x,y]
assert QQ[x,y].get_exact() == QQ[x,y]
assert ZZ.frac_field(x).get_exact() == ZZ.frac_field(x)
assert QQ.frac_field(x).get_exact() == QQ.frac_field(x)
assert ZZ.frac_field(x,y).get_exact() == ZZ.frac_field(x,y)
assert QQ.frac_field(x,y).get_exact() == QQ.frac_field(x,y)
def test_PolynomialRing__init():
raises(GeneratorsNeeded, "ZZ.poly_ring()")
def test_FractionField__init():
raises(GeneratorsNeeded, "ZZ.frac_field()")
|
|
"""Methods and classes related to executing Z-Wave commands and publishing these to hass."""
from __future__ import annotations
import asyncio
import logging
from typing import Any
import voluptuous as vol
from zwave_js_server.client import Client as ZwaveClient
from zwave_js_server.const import CommandStatus
from zwave_js_server.exceptions import SetValueFailed
from zwave_js_server.model.node import Node as ZwaveNode
from zwave_js_server.model.value import get_value_id
from zwave_js_server.util.multicast import async_multicast_set_value
from zwave_js_server.util.node import (
async_bulk_set_partial_config_parameters,
async_set_config_parameter,
)
from homeassistant.components.group import expand_entity_ids
from homeassistant.const import ATTR_AREA_ID, ATTR_DEVICE_ID, ATTR_ENTITY_ID
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.helpers import device_registry as dr, entity_registry as er
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from . import const
from .helpers import (
async_get_node_from_device_id,
async_get_node_from_entity_id,
async_get_nodes_from_area_id,
)
_LOGGER = logging.getLogger(__name__)
def parameter_name_does_not_need_bitmask(
val: dict[str, int | str | list[str]]
) -> dict[str, int | str | list[str]]:
"""Validate that if a parameter name is provided, bitmask is not as well."""
if (
isinstance(val[const.ATTR_CONFIG_PARAMETER], str)
and const.ATTR_CONFIG_PARAMETER_BITMASK in val
):
raise vol.Invalid(
"Don't include a bitmask when a parameter name is specified",
path=[const.ATTR_CONFIG_PARAMETER, const.ATTR_CONFIG_PARAMETER_BITMASK],
)
return val
def broadcast_command(val: dict[str, Any]) -> dict[str, Any]:
"""Validate that the service call is for a broadcast command."""
if val.get(const.ATTR_BROADCAST):
return val
raise vol.Invalid(
"Either `broadcast` must be set to True or multiple devices/entities must be "
"specified"
)
class ZWaveServices:
"""Class that holds our services (Zwave Commands) that should be published to hass."""
def __init__(
self,
hass: HomeAssistant,
ent_reg: er.EntityRegistry,
dev_reg: dr.DeviceRegistry,
) -> None:
"""Initialize with hass object."""
self._hass = hass
self._ent_reg = ent_reg
self._dev_reg = dev_reg
@callback
def async_register(self) -> None:
"""Register all our services."""
@callback
def get_nodes_from_service_data(val: dict[str, Any]) -> dict[str, Any]:
"""Get nodes set from service data."""
nodes: set[ZwaveNode] = set()
# Convert all entity IDs to nodes
for entity_id in expand_entity_ids(self._hass, val.pop(ATTR_ENTITY_ID, [])):
try:
nodes.add(
async_get_node_from_entity_id(
self._hass, entity_id, self._ent_reg, self._dev_reg
)
)
except ValueError as err:
const.LOGGER.warning(err.args[0])
# Convert all area IDs to nodes
for area_id in val.pop(ATTR_AREA_ID, []):
nodes.update(
async_get_nodes_from_area_id(
self._hass, area_id, self._ent_reg, self._dev_reg
)
)
# Convert all device IDs to nodes
for device_id in val.pop(ATTR_DEVICE_ID, []):
try:
nodes.add(
async_get_node_from_device_id(
self._hass, device_id, self._dev_reg
)
)
except ValueError as err:
const.LOGGER.warning(err.args[0])
val[const.ATTR_NODES] = nodes
return val
@callback
def validate_multicast_nodes(val: dict[str, Any]) -> dict[str, Any]:
"""Validate the input nodes for multicast."""
nodes: set[ZwaveNode] = val[const.ATTR_NODES]
broadcast: bool = val[const.ATTR_BROADCAST]
# User must specify a node if they are attempting a broadcast and have more
# than one zwave-js network.
if (
broadcast
and not nodes
and len(self._hass.config_entries.async_entries(const.DOMAIN)) > 1
):
raise vol.Invalid(
"You must include at least one entity or device in the service call"
)
first_node = next((node for node in nodes), None)
# If any nodes don't have matching home IDs, we can't run the command because
# we can't multicast across multiple networks
if first_node and any(
node.client.driver.controller.home_id
!= first_node.client.driver.controller.home_id
for node in nodes
):
raise vol.Invalid(
"Multicast commands only work on devices in the same network"
)
return val
@callback
def validate_entities(val: dict[str, Any]) -> dict[str, Any]:
"""Validate entities exist and are from the zwave_js platform."""
val[ATTR_ENTITY_ID] = expand_entity_ids(self._hass, val[ATTR_ENTITY_ID])
for entity_id in val[ATTR_ENTITY_ID]:
entry = self._ent_reg.async_get(entity_id)
if entry is None or entry.platform != const.DOMAIN:
raise vol.Invalid(
f"Entity {entity_id} is not a valid {const.DOMAIN} entity."
)
return val
self._hass.services.async_register(
const.DOMAIN,
const.SERVICE_SET_CONFIG_PARAMETER,
self.async_set_config_parameter,
schema=vol.Schema(
vol.All(
{
vol.Optional(ATTR_AREA_ID): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(ATTR_DEVICE_ID): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Any(
vol.Coerce(int), cv.string
),
vol.Optional(const.ATTR_CONFIG_PARAMETER_BITMASK): vol.Any(
vol.Coerce(int), const.BITMASK_SCHEMA
),
vol.Required(const.ATTR_CONFIG_VALUE): vol.Any(
vol.Coerce(int), const.BITMASK_SCHEMA, cv.string
),
},
cv.has_at_least_one_key(
ATTR_DEVICE_ID, ATTR_ENTITY_ID, ATTR_AREA_ID
),
parameter_name_does_not_need_bitmask,
get_nodes_from_service_data,
),
),
)
self._hass.services.async_register(
const.DOMAIN,
const.SERVICE_BULK_SET_PARTIAL_CONFIG_PARAMETERS,
self.async_bulk_set_partial_config_parameters,
schema=vol.Schema(
vol.All(
{
vol.Optional(ATTR_AREA_ID): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(ATTR_DEVICE_ID): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Coerce(int),
vol.Required(const.ATTR_CONFIG_VALUE): vol.Any(
vol.Coerce(int),
{
vol.Any(
vol.Coerce(int), const.BITMASK_SCHEMA, cv.string
): vol.Any(
vol.Coerce(int), const.BITMASK_SCHEMA, cv.string
)
},
),
},
cv.has_at_least_one_key(
ATTR_DEVICE_ID, ATTR_ENTITY_ID, ATTR_AREA_ID
),
get_nodes_from_service_data,
),
),
)
self._hass.services.async_register(
const.DOMAIN,
const.SERVICE_REFRESH_VALUE,
self.async_poll_value,
schema=vol.Schema(
vol.All(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_ids,
vol.Optional(
const.ATTR_REFRESH_ALL_VALUES, default=False
): cv.boolean,
},
validate_entities,
)
),
)
self._hass.services.async_register(
const.DOMAIN,
const.SERVICE_SET_VALUE,
self.async_set_value,
schema=vol.Schema(
vol.All(
{
vol.Optional(ATTR_AREA_ID): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(ATTR_DEVICE_ID): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(const.ATTR_COMMAND_CLASS): vol.Coerce(int),
vol.Required(const.ATTR_PROPERTY): vol.Any(
vol.Coerce(int), str
),
vol.Optional(const.ATTR_PROPERTY_KEY): vol.Any(
vol.Coerce(int), str
),
vol.Optional(const.ATTR_ENDPOINT): vol.Coerce(int),
vol.Required(const.ATTR_VALUE): const.VALUE_SCHEMA,
vol.Optional(const.ATTR_WAIT_FOR_RESULT): cv.boolean,
vol.Optional(const.ATTR_OPTIONS): {
cv.string: const.VALUE_SCHEMA
},
},
cv.has_at_least_one_key(
ATTR_DEVICE_ID, ATTR_ENTITY_ID, ATTR_AREA_ID
),
get_nodes_from_service_data,
),
),
)
self._hass.services.async_register(
const.DOMAIN,
const.SERVICE_MULTICAST_SET_VALUE,
self.async_multicast_set_value,
schema=vol.Schema(
vol.All(
{
vol.Optional(ATTR_AREA_ID): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(ATTR_DEVICE_ID): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Optional(const.ATTR_BROADCAST, default=False): cv.boolean,
vol.Required(const.ATTR_COMMAND_CLASS): vol.Coerce(int),
vol.Required(const.ATTR_PROPERTY): vol.Any(
vol.Coerce(int), str
),
vol.Optional(const.ATTR_PROPERTY_KEY): vol.Any(
vol.Coerce(int), str
),
vol.Optional(const.ATTR_ENDPOINT): vol.Coerce(int),
vol.Required(const.ATTR_VALUE): const.VALUE_SCHEMA,
vol.Optional(const.ATTR_OPTIONS): {
cv.string: const.VALUE_SCHEMA
},
},
vol.Any(
cv.has_at_least_one_key(
ATTR_DEVICE_ID, ATTR_ENTITY_ID, ATTR_AREA_ID
),
broadcast_command,
),
get_nodes_from_service_data,
validate_multicast_nodes,
),
),
)
self._hass.services.async_register(
const.DOMAIN,
const.SERVICE_PING,
self.async_ping,
schema=vol.Schema(
vol.All(
{
vol.Optional(ATTR_AREA_ID): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(ATTR_DEVICE_ID): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
},
cv.has_at_least_one_key(
ATTR_DEVICE_ID, ATTR_ENTITY_ID, ATTR_AREA_ID
),
get_nodes_from_service_data,
),
),
)
async def async_set_config_parameter(self, service: ServiceCall) -> None:
"""Set a config value on a node."""
# pylint: disable=no-self-use
nodes = service.data[const.ATTR_NODES]
property_or_property_name = service.data[const.ATTR_CONFIG_PARAMETER]
property_key = service.data.get(const.ATTR_CONFIG_PARAMETER_BITMASK)
new_value = service.data[const.ATTR_CONFIG_VALUE]
for node in nodes:
zwave_value, cmd_status = await async_set_config_parameter(
node,
new_value,
property_or_property_name,
property_key=property_key,
)
if cmd_status == CommandStatus.ACCEPTED:
msg = "Set configuration parameter %s on Node %s with value %s"
else:
msg = (
"Added command to queue to set configuration parameter %s on Node "
"%s with value %s. Parameter will be set when the device wakes up"
)
_LOGGER.info(msg, zwave_value, node, new_value)
async def async_bulk_set_partial_config_parameters(
self, service: ServiceCall
) -> None:
"""Bulk set multiple partial config values on a node."""
# pylint: disable=no-self-use
nodes = service.data[const.ATTR_NODES]
property_ = service.data[const.ATTR_CONFIG_PARAMETER]
new_value = service.data[const.ATTR_CONFIG_VALUE]
for node in nodes:
cmd_status = await async_bulk_set_partial_config_parameters(
node,
property_,
new_value,
)
if cmd_status == CommandStatus.ACCEPTED:
msg = "Bulk set partials for configuration parameter %s on Node %s"
else:
msg = (
"Added command to queue to bulk set partials for configuration "
"parameter %s on Node %s"
)
_LOGGER.info(msg, property_, node)
async def async_poll_value(self, service: ServiceCall) -> None:
"""Poll value on a node."""
for entity_id in service.data[ATTR_ENTITY_ID]:
entry = self._ent_reg.async_get(entity_id)
assert entry # Schema validation would have failed if we can't do this
async_dispatcher_send(
self._hass,
f"{const.DOMAIN}_{entry.unique_id}_poll_value",
service.data[const.ATTR_REFRESH_ALL_VALUES],
)
async def async_set_value(self, service: ServiceCall) -> None:
"""Set a value on a node."""
# pylint: disable=no-self-use
nodes: set[ZwaveNode] = service.data[const.ATTR_NODES]
command_class = service.data[const.ATTR_COMMAND_CLASS]
property_ = service.data[const.ATTR_PROPERTY]
property_key = service.data.get(const.ATTR_PROPERTY_KEY)
endpoint = service.data.get(const.ATTR_ENDPOINT)
new_value = service.data[const.ATTR_VALUE]
wait_for_result = service.data.get(const.ATTR_WAIT_FOR_RESULT)
options = service.data.get(const.ATTR_OPTIONS)
for node in nodes:
value_id = get_value_id(
node,
command_class,
property_,
endpoint=endpoint,
property_key=property_key,
)
# If value has a string type but the new value is not a string, we need to
# convert it to one. We use new variable `new_value_` to convert the data
# so we can preserve the original `new_value` for every node.
if (
value_id in node.values
and node.values[value_id].metadata.type == "string"
and not isinstance(new_value, str)
):
new_value_ = str(new_value)
else:
new_value_ = new_value
success = await node.async_set_value(
value_id,
new_value_,
options=options,
wait_for_result=wait_for_result,
)
if success is False:
raise SetValueFailed(
"Unable to set value, refer to "
"https://zwave-js.github.io/node-zwave-js/#/api/node?id=setvalue "
"for possible reasons"
)
async def async_multicast_set_value(self, service: ServiceCall) -> None:
"""Set a value via multicast to multiple nodes."""
nodes = service.data[const.ATTR_NODES]
broadcast: bool = service.data[const.ATTR_BROADCAST]
options = service.data.get(const.ATTR_OPTIONS)
if not broadcast and len(nodes) == 1:
const.LOGGER.info(
"Passing the zwave_js.multicast_set_value service call to the "
"zwave_js.set_value service since only one node was targeted"
)
await self.async_set_value(service)
return
command_class = service.data[const.ATTR_COMMAND_CLASS]
property_ = service.data[const.ATTR_PROPERTY]
property_key = service.data.get(const.ATTR_PROPERTY_KEY)
endpoint = service.data.get(const.ATTR_ENDPOINT)
value = {
"commandClass": command_class,
"property": property_,
"propertyKey": property_key,
"endpoint": endpoint,
}
new_value = service.data[const.ATTR_VALUE]
# If there are no nodes, we can assume there is only one config entry due to
# schema validation and can use that to get the client, otherwise we can just
# get the client from the node.
client: ZwaveClient = None
first_node: ZwaveNode = next((node for node in nodes), None)
if first_node:
client = first_node.client
else:
entry_id = self._hass.config_entries.async_entries(const.DOMAIN)[0].entry_id
client = self._hass.data[const.DOMAIN][entry_id][const.DATA_CLIENT]
first_node = next(
node
for node in client.driver.controller.nodes.values()
if get_value_id(node, command_class, property_, endpoint, property_key)
in node.values
)
# If value has a string type but the new value is not a string, we need to
# convert it to one
value_id = get_value_id(
first_node, command_class, property_, endpoint, property_key
)
if (
value_id in first_node.values
and first_node.values[value_id].metadata.type == "string"
and not isinstance(new_value, str)
):
new_value = str(new_value)
success = await async_multicast_set_value(
client=client,
new_value=new_value,
value_data={k: v for k, v in value.items() if v is not None},
nodes=None if broadcast else list(nodes),
options=options,
)
if success is False:
raise SetValueFailed("Unable to set value via multicast")
async def async_ping(self, service: ServiceCall) -> None:
"""Ping node(s)."""
# pylint: disable=no-self-use
const.LOGGER.warning(
"This service is deprecated in favor of the ping button entity. Service "
"calls will still work for now but the service will be removed in a "
"future release"
)
nodes: set[ZwaveNode] = service.data[const.ATTR_NODES]
await asyncio.gather(*(node.async_ping() for node in nodes))
|
|
# -*- coding: utf-8 -*-
from urlparse import urlparse
from nose.tools import * # flake8: noqa
from framework.auth.core import Auth
from website.models import NodeLog
from website.views import find_dashboard
from website.util import permissions
from website.util.sanitize import strip_html
from api.base.settings.defaults import API_BASE
from api_tests import utils as test_utils
from tests.base import ApiTestCase, fake
from tests.factories import (
NodeFactory,
ProjectFactory,
RegistrationFactory,
AuthUserFactory,
FolderFactory,
CommentFactory,
RetractedRegistrationFactory
)
from tests.utils import assert_logs, assert_not_logs
class TestNodeDetail(ApiTestCase):
def setUp(self):
super(TestNodeDetail, self).setUp()
self.user = AuthUserFactory()
self.user_two = AuthUserFactory()
self.public_project = ProjectFactory(title="Project One", is_public=True, creator=self.user)
self.private_project = ProjectFactory(title="Project Two", is_public=False, creator=self.user)
self.public_url = '/{}nodes/{}/'.format(API_BASE, self.public_project._id)
self.private_url = '/{}nodes/{}/'.format(API_BASE, self.private_project._id)
self.public_component = NodeFactory(parent=self.public_project, creator=self.user, is_public=True)
self.public_component_url = '/{}nodes/{}/'.format(API_BASE, self.public_component._id)
self.read_permissions = ['read']
self.write_permissions = ['read', 'write']
self.admin_permissions = ['read', 'admin', 'write']
def test_return_public_project_details_logged_out(self):
res = self.app.get(self.public_url)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
assert_equal(res.json['data']['attributes']['title'], self.public_project.title)
assert_equal(res.json['data']['attributes']['description'], self.public_project.description)
assert_equal(res.json['data']['attributes']['category'], self.public_project.category)
assert_items_equal(res.json['data']['attributes']['current_user_permissions'], self.read_permissions)
def test_return_public_project_details_contributor_logged_in(self):
res = self.app.get(self.public_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
assert_equal(res.json['data']['attributes']['title'], self.public_project.title)
assert_equal(res.json['data']['attributes']['description'], self.public_project.description)
assert_equal(res.json['data']['attributes']['category'], self.public_project.category)
assert_items_equal(res.json['data']['attributes']['current_user_permissions'], self.admin_permissions)
def test_return_public_project_details_non_contributor_logged_in(self):
res = self.app.get(self.public_url, auth=self.user_two.auth)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
assert_equal(res.json['data']['attributes']['title'], self.public_project.title)
assert_equal(res.json['data']['attributes']['description'], self.public_project.description)
assert_equal(res.json['data']['attributes']['category'], self.public_project.category)
assert_items_equal(res.json['data']['attributes']['current_user_permissions'], self.read_permissions)
def test_return_private_project_details_logged_out(self):
res = self.app.get(self.private_url, expect_errors=True)
assert_equal(res.status_code, 401)
assert_in('detail', res.json['errors'][0])
def test_return_private_project_details_logged_in_admin_contributor(self):
res = self.app.get(self.private_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
assert_equal(res.json['data']['attributes']['title'], self.private_project.title)
assert_equal(res.json['data']['attributes']['description'], self.private_project.description)
assert_equal(res.json['data']['attributes']['category'], self.private_project.category)
assert_items_equal(res.json['data']['attributes']['current_user_permissions'], self.admin_permissions)
def test_return_private_project_details_logged_in_write_contributor(self):
self.private_project.add_contributor(contributor=self.user_two, auth=Auth(self.user), save=True)
res = self.app.get(self.private_url, auth=self.user_two.auth)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
assert_equal(res.json['data']['attributes']['title'], self.private_project.title)
assert_equal(res.json['data']['attributes']['description'], self.private_project.description)
assert_equal(res.json['data']['attributes']['category'], self.private_project.category)
assert_items_equal(res.json['data']['attributes']['current_user_permissions'], self.write_permissions)
def test_return_private_project_details_logged_in_non_contributor(self):
res = self.app.get(self.private_url, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
assert_in('detail', res.json['errors'][0])
def test_top_level_project_has_no_parent(self):
res = self.app.get(self.public_url)
assert_equal(res.status_code, 200)
assert_not_in('parent', res.json['data']['relationships'].keys())
assert_equal(res.content_type, 'application/vnd.api+json')
def test_child_project_has_parent(self):
public_component = NodeFactory(parent=self.public_project, creator=self.user, is_public=True)
public_component_url = '/{}nodes/{}/'.format(API_BASE, public_component._id)
res = self.app.get(public_component_url)
assert_equal(res.status_code, 200)
url = res.json['data']['relationships']['parent']['links']['related']['href']
assert_equal(urlparse(url).path, self.public_url)
def test_node_has_children_link(self):
res = self.app.get(self.public_url)
url = res.json['data']['relationships']['children']['links']['related']['href']
expected_url = self.public_url + 'children/'
assert_equal(urlparse(url).path, expected_url)
def test_node_has_contributors_link(self):
res = self.app.get(self.public_url)
url = res.json['data']['relationships']['contributors']['links']['related']['href']
expected_url = self.public_url + 'contributors/'
assert_equal(urlparse(url).path, expected_url)
def test_node_has_node_links_link(self):
res = self.app.get(self.public_url)
url = res.json['data']['relationships']['node_links']['links']['related']['href']
expected_url = self.public_url + 'node_links/'
assert_equal(urlparse(url).path, expected_url)
def test_node_has_registrations_link(self):
res = self.app.get(self.public_url)
url = res.json['data']['relationships']['registrations']['links']['related']['href']
expected_url = self.public_url + 'registrations/'
assert_equal(urlparse(url).path, expected_url)
def test_node_has_files_link(self):
res = self.app.get(self.public_url)
url = res.json['data']['relationships']['files']['links']['related']['href']
expected_url = self.public_url + 'files/'
assert_equal(urlparse(url).path, expected_url)
def test_node_has_comments_link(self):
res = self.app.get(self.public_url)
assert_equal(res.status_code, 200)
assert_in('comments', res.json['data']['relationships'].keys())
def test_node_has_correct_unread_comments_count(self):
contributor = AuthUserFactory()
self.public_project.add_contributor(contributor=contributor, auth=Auth(self.user), save=True)
comment = CommentFactory(node=self.public_project, user=contributor, page='node')
res = self.app.get(self.public_url + '?related_counts=True', auth=self.user.auth)
unread = res.json['data']['relationships']['comments']['links']['related']['meta']['unread']
unread_comments_node = unread['node']
assert_equal(unread_comments_node, 1)
def test_node_properties(self):
res = self.app.get(self.public_url)
assert_equal(res.json['data']['attributes']['public'], True)
assert_equal(res.json['data']['attributes']['registration'], False)
assert_equal(res.json['data']['attributes']['collection'], False)
assert_equal(res.json['data']['attributes']['dashboard'], False)
assert_equal(res.json['data']['attributes']['tags'], [])
def test_requesting_folder_returns_error(self):
folder = NodeFactory(is_folder=True, creator=self.user)
res = self.app.get(
'/{}nodes/{}/'.format(API_BASE, folder._id),
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 404)
def test_can_not_return_registrations_at_node_detail_endpoint(self):
registration = RegistrationFactory(project=self.public_project, creator=self.user)
res = self.app.get('/{}nodes/{}/'.format(API_BASE, registration._id), auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_cannot_return_folder_at_node_detail_endpoint(self):
folder = FolderFactory(creator=self.user)
res = self.app.get('/{}nodes/{}/'.format(API_BASE, folder._id), auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_cannot_return_a_retraction_at_node_detail_endpoint(self):
registration = RegistrationFactory(creator=self.user, project=self.public_project)
url = '/{}nodes/{}/'.format(API_BASE, registration._id)
retraction = RetractedRegistrationFactory(registration=registration, user=registration.creator)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 404)
class NodeCRUDTestCase(ApiTestCase):
def setUp(self):
super(NodeCRUDTestCase, self).setUp()
self.user = AuthUserFactory()
self.user_two = AuthUserFactory()
self.title = 'Cool Project'
self.new_title = 'Super Cool Project'
self.description = 'A Properly Cool Project'
self.new_description = 'An even cooler project'
self.category = 'data'
self.new_category = 'project'
self.public_project = ProjectFactory(title=self.title,
description=self.description,
category=self.category,
is_public=True,
creator=self.user)
self.public_url = '/{}nodes/{}/'.format(API_BASE, self.public_project._id)
self.private_project = ProjectFactory(title=self.title,
description=self.description,
category=self.category,
is_public=False,
creator=self.user)
self.private_url = '/{}nodes/{}/'.format(API_BASE, self.private_project._id)
self.fake_url = '/{}nodes/{}/'.format(API_BASE, '12345')
def make_node_payload(node, attributes):
return {
'data': {
'id': node._id,
'type': 'nodes',
'attributes': attributes,
}
}
class TestNodeUpdate(NodeCRUDTestCase):
def test_node_update_invalid_data(self):
res = self.app.put_json_api(self.public_url, "Incorrect data", auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], "Malformed request.")
res = self.app.put_json_api(self.public_url, ["Incorrect data"], auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], "Malformed request.")
@assert_not_logs(NodeLog.MADE_PUBLIC, 'private_project')
def test_cannot_make_project_public_if_non_contributor(self):
non_contrib = AuthUserFactory()
res = self.app.patch_json(
self.private_url,
make_node_payload(self.private_project, {'public': True}),
auth=non_contrib.auth, expect_errors=True
)
assert_equal(res.status_code, 403)
def test_cannot_make_project_public_if_non_admin_contributor(self):
non_admin = AuthUserFactory()
self.private_project.add_contributor(
non_admin,
permissions=(permissions.READ, permissions.WRITE),
auth=Auth(self.private_project.creator)
)
self.private_project.save()
res = self.app.patch_json(
self.private_url,
make_node_payload(self.private_project, {'public': True}),
auth=non_admin.auth, expect_errors=True
)
assert_equal(res.status_code, 403)
self.private_project.reload()
assert_false(self.private_project.is_public)
@assert_logs(NodeLog.MADE_PUBLIC, 'private_project')
def test_can_make_project_public_if_admin_contributor(self):
admin_user = AuthUserFactory()
self.private_project.add_contributor(
admin_user,
permissions=(permissions.READ, permissions.WRITE, permissions.ADMIN),
auth=Auth(self.private_project.creator)
)
self.private_project.save()
res = self.app.patch_json_api(
self.private_url,
make_node_payload(self.private_project, {'public': True}),
auth=admin_user.auth # self.user is creator/admin
)
assert_equal(res.status_code, 200)
self.private_project.reload()
assert_true(self.private_project.is_public)
def test_update_project_properties_not_nested(self):
res = self.app.put_json_api(self.public_url, {
'id': self.public_project._id,
'type': 'nodes',
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': True,
}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Request must include /data.')
assert_equal(res.json['errors'][0]['source']['pointer'], '/data')
def test_update_invalid_id(self):
res = self.app.put_json_api(self.public_url, {
'data': {
'id': '12345',
'type': 'nodes',
'attributes': {
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': True
}
}
}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 409)
def test_update_invalid_type(self):
res = self.app.put_json_api(self.public_url, {
'data': {
'id': self.public_project._id,
'type': 'node',
'attributes': {
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': True
}
}
}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 409)
def test_update_no_id(self):
res = self.app.put_json_api(self.public_url, {
'data': {
'type': 'nodes',
'attributes': {
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': True
}
}
}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'This field may not be null.')
assert_equal(res.json['errors'][0]['source']['pointer'], '/data/id')
def test_update_no_type(self):
res = self.app.put_json_api(self.public_url, {
'data': {
'id': self.public_project._id,
'attributes': {
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': True
}
}
}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'This field may not be null.')
assert_equal(res.json['errors'][0]['source']['pointer'], '/data/type')
def test_update_public_project_logged_out(self):
res = self.app.put_json_api(self.public_url, {
'data': {
'id': self.public_project._id,
'type': 'nodes',
'attributes': {
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': True
}
}
}, expect_errors=True)
assert_equal(res.status_code, 401)
assert_in('detail', res.json['errors'][0])
@assert_logs(NodeLog.UPDATED_FIELDS, 'public_project')
def test_update_public_project_logged_in(self):
res = self.app.put_json_api(self.public_url, {
'data': {
'id': self.public_project._id,
'type': 'nodes',
'attributes': {
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': True
}
}
}, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
assert_equal(res.json['data']['attributes']['title'], self.new_title)
assert_equal(res.json['data']['attributes']['description'], self.new_description)
assert_equal(res.json['data']['attributes']['category'], self.new_category)
def test_update_public_project_logged_in_but_unauthorized(self):
res = self.app.put_json_api(self.public_url, {
'data': {
'id': self.private_project._id,
'type': 'nodes',
'attributes': {
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': True
}
}
}, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
assert_in('detail', res.json['errors'][0])
def test_cannot_update_a_registration(self):
registration = RegistrationFactory(project=self.public_project, creator=self.user)
original_title = registration.title
original_description = registration.description
url = '/{}nodes/{}/'.format(API_BASE, registration._id)
res = self.app.put_json_api(url, {
'data': {
'id': registration._id,
'type': 'nodes',
'attributes': {
'title': fake.catch_phrase(),
'description': fake.bs(),
'category': 'hypothesis',
'public': True
}
}
}, auth=self.user.auth, expect_errors=True)
registration.reload()
assert_equal(res.status_code, 404)
assert_equal(registration.title, original_title)
assert_equal(registration.description, original_description)
def test_cannot_update_a_retraction(self):
registration = RegistrationFactory(creator=self.user, project=self.public_project)
url = '/{}nodes/{}/'.format(API_BASE, registration._id)
retraction = RetractedRegistrationFactory(registration=registration, user=registration.creator)
res = self.app.put_json_api(url, {
'data': {
'id': registration._id,
'type': 'nodes',
'attributes': {
'title': fake.catch_phrase(),
'description': fake.bs(),
'category': 'hypothesis',
'public': True
}
}
}, auth=self.user.auth, expect_errors=True)
registration.reload()
assert_equal(res.status_code, 404)
assert_equal(registration.title, registration.title)
assert_equal(registration.description, registration.description)
def test_update_private_project_logged_out(self):
res = self.app.put_json_api(self.private_url, {
'data': {
'id': self.private_project._id,
'type': 'nodes',
'attributes': {
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': False
}
}
}, expect_errors=True)
assert_equal(res.status_code, 401)
assert_in('detail', res.json['errors'][0])
@assert_logs(NodeLog.UPDATED_FIELDS, 'private_project')
def test_update_private_project_logged_in_contributor(self):
res = self.app.put_json_api(self.private_url, {
'data': {
'id': self.private_project._id,
'type': 'nodes',
'attributes': {
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': False
}
}
}, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
assert_equal(res.json['data']['attributes']['title'], self.new_title)
assert_equal(res.json['data']['attributes']['description'], self.new_description)
assert_equal(res.json['data']['attributes']['category'], self.new_category)
def test_update_private_project_logged_in_non_contributor(self):
res = self.app.put_json_api(self.private_url, {
'data': {
'id': self.private_project._id,
'type': 'nodes',
'attributes': {
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': False
}
}
}, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
assert_in('detail', res.json['errors'][0])
@assert_logs(NodeLog.UPDATED_FIELDS, 'public_project')
def test_update_project_sanitizes_html_properly(self):
"""Post request should update resource, and any HTML in fields should be stripped"""
new_title = '<strong>Super</strong> Cool Project'
new_description = 'An <script>alert("even cooler")</script> project'
res = self.app.put_json_api(self.public_url, {
'data': {
'id': self.public_project._id,
'type': 'nodes',
'attributes': {
'title': new_title,
'description': new_description,
'category': self.new_category,
'public': True,
}
}
}, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
assert_equal(res.json['data']['attributes']['title'], strip_html(new_title))
assert_equal(res.json['data']['attributes']['description'], strip_html(new_description))
@assert_logs(NodeLog.EDITED_TITLE, 'public_project')
def test_partial_update_project_updates_project_correctly_and_sanitizes_html(self):
new_title = 'An <script>alert("even cooler")</script> project'
res = self.app.patch_json_api(self.public_url, {
'data': {
'id': self.public_project._id,
'type': 'nodes',
'attributes': {
'title': new_title
}
}
}, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
res = self.app.get(self.public_url)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
assert_equal(res.json['data']['attributes']['title'], strip_html(new_title))
assert_equal(res.json['data']['attributes']['description'], self.description)
assert_equal(res.json['data']['attributes']['category'], self.category)
def test_write_to_public_field_non_contrib_forbidden(self):
# Test non-contrib writing to public field
res = self.app.patch_json_api(self.public_url, {
'data': {
'attributes': {
'public': False},
'id': self.public_project._id,
'type': 'nodes'
}
}, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
assert_in('detail', res.json['errors'][0])
def test_partial_update_public_project_logged_out(self):
res = self.app.patch_json_api(self.public_url, {
'data': {
'id': self.public_project._id,
'type': 'nodes',
'attributes': {
'title': self.new_title
}
}
}, expect_errors=True)
assert_equal(res.status_code, 401)
assert_in('detail', res.json['errors'][0])
@assert_logs(NodeLog.EDITED_TITLE, 'public_project')
def test_partial_update_public_project_logged_in(self):
res = self.app.patch_json_api(self.public_url, {
'data': {
'id': self.public_project._id,
'type': 'nodes',
'attributes': {
'title': self.new_title,
}
}
}, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
assert_equal(res.json['data']['attributes']['title'], self.new_title)
assert_equal(res.json['data']['attributes']['description'], self.description)
assert_equal(res.json['data']['attributes']['category'], self.category)
def test_partial_update_public_project_logged_in_but_unauthorized(self):
# Public resource, logged in, unauthorized
res = self.app.patch_json_api(self.public_url, {
'data': {
'attributes': {
'title': self.new_title},
'id': self.public_project._id,
'type': 'nodes',
}
}, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
assert_in('detail', res.json['errors'][0])
def test_partial_update_private_project_logged_out(self):
res = self.app.patch_json_api(self.private_url, {
'data': {
'id': self.private_project._id,
'type': 'nodes',
'attributes': {
'title': self.new_title
}
}
}, expect_errors=True)
assert_equal(res.status_code, 401)
assert_in('detail', res.json['errors'][0])
@assert_logs(NodeLog.EDITED_TITLE, 'private_project')
def test_partial_update_private_project_logged_in_contributor(self):
res = self.app.patch_json_api(self.private_url, {
'data': {
'attributes': {
'title': self.new_title},
'id': self.private_project._id,
'type': 'nodes',
}
}, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
assert_equal(res.json['data']['attributes']['title'], self.new_title)
assert_equal(res.json['data']['attributes']['description'], self.description)
assert_equal(res.json['data']['attributes']['category'], self.category)
def test_partial_update_private_project_logged_in_non_contributor(self):
res = self.app.patch_json_api(self.private_url, {
'data': {
'attributes': {
'title': self.new_title},
'id': self.private_project._id,
'type': 'nodes',
}
}, auth=self.user_two.auth,expect_errors=True)
assert_equal(res.status_code, 403)
assert_in('detail', res.json['errors'][0])
def test_multiple_patch_requests_with_same_category_generates_one_log(self):
self.private_project.category = 'project'
self.private_project.save()
new_category = 'data'
payload = make_node_payload(self.private_project, attributes={'category': new_category})
original_n_logs = len(self.private_project.logs)
res = self.app.patch_json_api(self.private_url, payload, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.private_project.reload()
assert_equal(self.private_project.category, new_category)
assert_equal(len(self.private_project.logs), original_n_logs + 1) # sanity check
res = self.app.patch_json_api(self.private_url, payload, auth=self.user.auth)
self.private_project.reload()
assert_equal(self.private_project.category, new_category)
assert_equal(len(self.private_project.logs), original_n_logs + 1)
def test_partial_update_invalid_id(self):
res = self.app.patch_json_api(self.public_url, {
'data': {
'id': '12345',
'type': 'nodes',
'attributes': {
'title': self.new_title,
}
}
}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 409)
def test_partial_update_invalid_type(self):
res = self.app.patch_json_api(self.public_url, {
'data': {
'id': self.public_project._id,
'type': 'node',
'attributes': {
'title': self.new_title,
}
}
}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 409)
def test_partial_update_no_id(self):
res = self.app.patch_json_api(self.public_url, {
'data': {
'type': 'nodes',
'attributes': {
'title': self.new_title,
}
}
}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'This field may not be null.')
assert_equal(res.json['errors'][0]['source']['pointer'], '/data/id')
def test_partial_update_no_type(self):
res = self.app.patch_json_api(self.public_url, {
'data': {
'id': self.public_project._id,
'attributes': {
'title': self.new_title,
}
}
}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'This field may not be null.')
assert_equal(res.json['errors'][0]['source']['pointer'], '/data/type')
# Nothing will be updated here
def test_partial_update_project_properties_not_nested(self):
res = self.app.patch_json_api(self.public_url, {
'data': {
'id': self.public_project._id,
'type': 'nodes',
'title': self.new_title,
}
}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_update_project_invalid_title(self):
project = {
'data': {
'type': 'nodes',
'id': self.public_project._id,
'attributes': {
'title': 'A' * 201,
'category': 'project',
}
}
}
res = self.app.put_json_api(self.public_url, project, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Title cannot exceed 200 characters.')
class TestNodeDelete(NodeCRUDTestCase):
def test_deletes_public_node_logged_out(self):
res = self.app.delete(self.public_url, expect_errors=True)
assert_equal(res.status_code, 401)
assert 'detail' in res.json['errors'][0]
def test_requesting_deleted_returns_410(self):
self.public_project.is_deleted = True
self.public_project.save()
res = self.app.get(self.public_url, expect_errors=True)
assert_equal(res.status_code, 410)
assert 'detail' in res.json['errors'][0]
def test_deletes_public_node_fails_if_unauthorized(self):
res = self.app.delete_json_api(self.public_url, auth=self.user_two.auth, expect_errors=True)
self.public_project.reload()
assert_equal(res.status_code, 403)
assert_equal(self.public_project.is_deleted, False)
assert 'detail' in res.json['errors'][0]
@assert_logs(NodeLog.PROJECT_DELETED, 'public_project')
def test_deletes_public_node_succeeds_as_owner(self):
res = self.app.delete_json_api(self.public_url, auth=self.user.auth, expect_errors=True)
self.public_project.reload()
assert_equal(res.status_code, 204)
assert_equal(self.public_project.is_deleted, True)
def test_deletes_private_node_logged_out(self):
res = self.app.delete(self.private_url, expect_errors=True)
assert_equal(res.status_code, 401)
assert 'detail' in res.json['errors'][0]
@assert_logs(NodeLog.PROJECT_DELETED, 'private_project')
def test_deletes_private_node_logged_in_contributor(self):
res = self.app.delete(self.private_url, auth=self.user.auth, expect_errors=True)
self.private_project.reload()
assert_equal(res.status_code, 204)
assert_equal(self.private_project.is_deleted, True)
def test_deletes_private_node_logged_in_non_contributor(self):
res = self.app.delete(self.private_url, auth=self.user_two.auth, expect_errors=True)
self.private_project.reload()
assert_equal(res.status_code, 403)
assert_equal(self.private_project.is_deleted, False)
assert 'detail' in res.json['errors'][0]
def test_deletes_private_node_logged_in_read_only_contributor(self):
self.private_project.add_contributor(self.user_two, permissions=[permissions.READ])
self.private_project.save()
res = self.app.delete(self.private_url, auth=self.user_two.auth, expect_errors=True)
self.private_project.reload()
assert_equal(res.status_code, 403)
assert_equal(self.private_project.is_deleted, False)
assert 'detail' in res.json['errors'][0]
def test_deletes_invalid_node(self):
res = self.app.delete(self.fake_url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 404)
assert 'detail' in res.json['errors'][0]
def test_delete_project_with_component_returns_error(self):
project = ProjectFactory(creator=self.user)
component = NodeFactory(parent=project, creator=self.user)
# Return a 400 because component must be deleted before deleting the parent
res = self.app.delete_json_api(
'/{}nodes/{}/'.format(API_BASE, project._id),
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
errors = res.json['errors']
assert_equal(len(errors), 1)
assert_equal(
errors[0]['detail'],
'Any child components must be deleted prior to deleting this project.'
)
def test_delete_dashboard_returns_error(self):
dashboard_node = find_dashboard(self.user)
res = self.app.delete_json_api(
'/{}nodes/{}/'.format(API_BASE, dashboard_node._id),
auth=self.user.auth,
expect_errors=True
)
# Dashboards are a folder, so a 404 is returned
assert_equal(res.status_code, 404)
def test_cannot_delete_a_retraction(self):
registration = RegistrationFactory(creator=self.user, project=self.public_project)
url = '/{}nodes/{}/'.format(API_BASE, registration._id)
retraction = RetractedRegistrationFactory(registration=registration, user=registration.creator)
res = self.app.delete_json_api(url, auth=self.user.auth, expect_errors=True)
registration.reload()
assert_equal(res.status_code, 404)
assert_equal(registration.title, registration.title)
assert_equal(registration.description, registration.description)
class TestReturnDeletedNode(ApiTestCase):
def setUp(self):
super(TestReturnDeletedNode, self).setUp()
self.user = AuthUserFactory()
self.non_contrib = AuthUserFactory()
self.public_deleted = ProjectFactory(is_deleted=True, creator=self.user,
title='This public project has been deleted', category='project',
is_public=True)
self.private_deleted = ProjectFactory(is_deleted=True, creator=self.user,
title='This private project has been deleted', category='project',
is_public=False)
self.private = ProjectFactory(is_public=False, creator=self.user, title='A boring project', category='project')
self.public = ProjectFactory(is_public=True, creator=self.user, title='A fun project', category='project')
self.new_title = 'This deleted node has been edited'
self.public_url = '/{}nodes/{}/'.format(API_BASE, self.public_deleted._id)
self.private_url = '/{}nodes/{}/'.format(API_BASE, self.private_deleted._id)
def test_return_deleted_public_node(self):
res = self.app.get(self.public_url, expect_errors=True)
assert_equal(res.status_code, 410)
def test_return_deleted_private_node(self):
res = self.app.get(self.private_url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 410)
def test_edit_deleted_public_node(self):
res = self.app.put_json_api(self.public_url, params={'title': self.new_title,
'node_id': self.public_deleted._id,
'category': self.public_deleted.category},
auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 410)
def test_edit_deleted_private_node(self):
res = self.app.put_json_api(self.private_url, params={'title': self.new_title,
'node_id': self.private_deleted._id,
'category': self.private_deleted.category},
auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 410)
def test_delete_deleted_public_node(self):
res = self.app.delete(self.public_url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 410)
def test_delete_deleted_private_node(self):
res = self.app.delete(self.private_url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 410)
class TestNodeTags(ApiTestCase):
def setUp(self):
super(TestNodeTags, self).setUp()
self.user = AuthUserFactory()
self.admin = AuthUserFactory()
self.user_two = AuthUserFactory()
self.read_only_contributor = AuthUserFactory()
self.public_project = ProjectFactory(title="Project One", is_public=True, creator=self.user)
self.public_project.add_contributor(self.user, permissions=permissions.DEFAULT_CONTRIBUTOR_PERMISSIONS, save=True)
self.private_project = ProjectFactory(title="Project Two", is_public=False, creator=self.user)
self.private_project.add_contributor(self.user, permissions=permissions.DEFAULT_CONTRIBUTOR_PERMISSIONS, save=True)
self.private_project.add_contributor(self.admin, permissions=permissions.CREATOR_PERMISSIONS, save=True)
self.public_url = '/{}nodes/{}/'.format(API_BASE, self.public_project._id)
self.private_url = '/{}nodes/{}/'.format(API_BASE, self.private_project._id)
self.one_new_tag_json = {
'data': {
'id': self.public_project._id,
'type': 'nodes',
'attributes': {
'tags': ['new-tag']
}
}
}
self.private_payload = {
'data': {
'id': self.private_project._id,
'type': 'nodes',
'attributes': {
'tags': ['new-tag']
}
}
}
def test_public_project_starts_with_no_tags(self):
res = self.app.get(self.public_url)
assert_equal(res.status_code, 200)
assert_equal(len(res.json['data']['attributes']['tags']), 0)
@assert_logs(NodeLog.TAG_ADDED, 'public_project')
def test_contributor_can_add_tag_to_public_project(self):
res = self.app.patch_json_api(self.public_url, self.one_new_tag_json, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 200)
# Ensure data is correct from the PATCH response
assert_equal(len(res.json['data']['attributes']['tags']), 1)
assert_equal(res.json['data']['attributes']['tags'][0], 'new-tag')
# Ensure data is correct in the database
self.public_project.reload()
assert_equal(len(self.public_project.tags), 1)
assert_equal(self.public_project.tags[0]._id, 'new-tag')
# Ensure data is correct when GETting the resource again
reload_res = self.app.get(self.public_url)
assert_equal(len(reload_res.json['data']['attributes']['tags']), 1)
assert_equal(reload_res.json['data']['attributes']['tags'][0], 'new-tag')
@assert_logs(NodeLog.TAG_ADDED, 'private_project')
def test_contributor_can_add_tag_to_private_project(self):
res = self.app.patch_json_api(self.private_url, self.private_payload, auth=self.user.auth)
assert_equal(res.status_code, 200)
# Ensure data is correct from the PATCH response
assert_equal(len(res.json['data']['attributes']['tags']), 1)
assert_equal(res.json['data']['attributes']['tags'][0], 'new-tag')
# Ensure data is correct in the database
self.private_project.reload()
assert_equal(len(self.private_project.tags), 1)
assert_equal(self.private_project.tags[0]._id, 'new-tag')
# Ensure data is correct when GETting the resource again
reload_res = self.app.get(self.private_url, auth=self.user.auth)
assert_equal(len(reload_res.json['data']['attributes']['tags']), 1)
assert_equal(reload_res.json['data']['attributes']['tags'][0], 'new-tag')
def test_partial_update_project_does_not_clear_tags(self):
res = self.app.patch_json_api(self.private_url, self.private_payload, auth=self.admin.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json['data']['attributes']['tags']), 1)
new_payload = {
'data': {
'id': self.private_project._id,
'type': 'nodes',
'attributes': {
'public': True
}
}
}
res = self.app.patch_json_api(self.private_url, new_payload, auth=self.admin.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json['data']['attributes']['tags']), 1)
new_payload['data']['attributes']['public'] = False
res = self.app.patch_json_api(self.private_url, new_payload, auth=self.admin.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json['data']['attributes']['tags']), 1)
def test_non_authenticated_user_cannot_add_tag_to_public_project(self):
res = self.app.patch_json_api(self.public_url, self.one_new_tag_json, expect_errors=True, auth=None)
assert_equal(res.status_code, 401)
def test_non_authenticated_user_cannot_add_tag_to_private_project(self):
res = self.app.patch_json_api(self.private_url, self.private_payload, expect_errors=True, auth=None)
assert_equal(res.status_code, 401)
def test_non_contributor_cannot_add_tag_to_public_project(self):
res = self.app.patch_json_api(self.public_url, self.one_new_tag_json, expect_errors=True, auth=self.user_two.auth)
assert_equal(res.status_code, 403)
def test_non_contributor_cannot_add_tag_to_private_project(self):
res = self.app.patch_json_api(self.private_url, self.private_payload, expect_errors=True, auth=self.user_two.auth)
assert_equal(res.status_code, 403)
def test_read_only_contributor_cannot_add_tag_to_public_project(self):
res = self.app.patch_json_api(self.public_url, self.one_new_tag_json, expect_errors=True, auth=self.read_only_contributor.auth)
assert_equal(res.status_code, 403)
def test_read_only_contributor_cannot_add_tag_to_private_project(self):
res = self.app.patch_json_api(self.private_url, self.private_payload, expect_errors=True, auth=self.read_only_contributor.auth)
assert_equal(res.status_code, 403)\
@assert_logs(NodeLog.TAG_ADDED, 'private_project', -4)
@assert_logs(NodeLog.TAG_ADDED, 'private_project', -3)
@assert_logs(NodeLog.TAG_REMOVED, 'private_project', -2)
@assert_logs(NodeLog.TAG_REMOVED, 'private_project')
def test_tags_add_and_remove_properly(self):
res = self.app.patch_json_api(self.private_url, self.private_payload, auth=self.user.auth)
assert_equal(res.status_code, 200)
# Ensure adding tag data is correct from the PATCH response
assert_equal(len(res.json['data']['attributes']['tags']), 1)
assert_equal(res.json['data']['attributes']['tags'][0], 'new-tag')
# Ensure removing and adding tag data is correct from the PATCH response
res = self.app.patch_json_api(self.private_url, {'data': {'id': self.private_project._id, 'type':'nodes', 'attributes': {'tags':['newer-tag']}}}, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json['data']['attributes']['tags']), 1)
assert_equal(res.json['data']['attributes']['tags'][0], 'newer-tag')
# Ensure removing tag data is correct from the PATCH response
res = self.app.patch_json_api(self.private_url, {'data': {'id': self.private_project._id, 'type':'nodes', 'attributes': {'tags': []}}}, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json['data']['attributes']['tags']), 0)
def test_tags_post_object_instead_of_list(self):
url = '/{}nodes/'.format(API_BASE)
payload = {'data': {
'type': 'nodes',
'attributes': {
'title': 'new title',
'category': 'project',
'tags': {'foo': 'bar'}
}
}}
res = self.app.post_json_api(url, payload, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Expected a list of items but got type "dict".')
def test_tags_patch_object_instead_of_list(self):
self.one_new_tag_json['data']['attributes']['tags'] = {'foo': 'bar'}
res = self.app.patch_json_api(self.public_url, self.one_new_tag_json, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Expected a list of items but got type "dict".')
|
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from dateutil import parser
from ._common_conversion import _to_str
try:
from xml.etree import cElementTree as ETree
except ImportError:
from xml.etree import ElementTree as ETree
from .models import (
ServiceProperties,
Logging,
Metrics,
CorsRule,
AccessPolicy,
_dict,
GeoReplication,
ServiceStats,
DeleteRetentionPolicy,
StaticWebsite,
)
def _to_int(value):
return value if value is None else int(value)
def _bool(value):
return value.lower() == 'true'
def _to_upper_str(value):
return _to_str(value).upper() if value is not None else None
def _get_download_size(start_range, end_range, resource_size):
if start_range is not None:
end_range = end_range if end_range else (resource_size if resource_size else None)
if end_range is not None:
return end_range - start_range
else:
return None
else:
return resource_size
GET_PROPERTIES_ATTRIBUTE_MAP = {
'last-modified': (None, 'last_modified', parser.parse),
'etag': (None, 'etag', _to_str),
'x-ms-blob-type': (None, 'blob_type', _to_str),
'content-length': (None, 'content_length', _to_int),
'content-range': (None, 'content_range', _to_str),
'x-ms-blob-sequence-number': (None, 'page_blob_sequence_number', _to_int),
'x-ms-blob-committed-block-count': (None, 'append_blob_committed_block_count', _to_int),
'x-ms-blob-public-access': (None, 'public_access', _to_str),
'x-ms-access-tier': (None, 'blob_tier', _to_str),
'x-ms-access-tier-change-time': (None, 'blob_tier_change_time', parser.parse),
'x-ms-access-tier-inferred': (None, 'blob_tier_inferred', _bool),
'x-ms-archive-status': (None, 'rehydration_status', _to_str),
'x-ms-share-quota': (None, 'quota', _to_int),
'x-ms-server-encrypted': (None, 'server_encrypted', _bool),
'x-ms-encryption-key-sha256': (None, 'encryption_key_sha256', _to_str),
'x-ms-creation-time': (None, 'creation_time', parser.parse),
'content-type': ('content_settings', 'content_type', _to_str),
'cache-control': ('content_settings', 'cache_control', _to_str),
'content-encoding': ('content_settings', 'content_encoding', _to_str),
'content-disposition': ('content_settings', 'content_disposition', _to_str),
'content-language': ('content_settings', 'content_language', _to_str),
'content-md5': ('content_settings', 'content_md5', _to_str),
'x-ms-lease-status': ('lease', 'status', _to_str),
'x-ms-lease-state': ('lease', 'state', _to_str),
'x-ms-lease-duration': ('lease', 'duration', _to_str),
'x-ms-copy-id': ('copy', 'id', _to_str),
'x-ms-copy-source': ('copy', 'source', _to_str),
'x-ms-copy-status': ('copy', 'status', _to_str),
'x-ms-copy-progress': ('copy', 'progress', _to_str),
'x-ms-copy-completion-time': ('copy', 'completion_time', parser.parse),
'x-ms-copy-destination-snapshot': ('copy', 'destination_snapshot_time', _to_str),
'x-ms-copy-status-description': ('copy', 'status_description', _to_str),
'x-ms-has-immutability-policy': (None, 'has_immutability_policy', _bool),
'x-ms-has-legal-hold': (None, 'has_legal_hold', _bool),
'x-ms-file-attributes': ('smb_properties', 'ntfs_attributes', _to_str),
'x-ms-file-creation-time': ('smb_properties', 'creation_time', parser.parse, True),
'x-ms-file-last-write-time': ('smb_properties', 'last_write_time', parser.parse, True),
'x-ms-file-change-time': ('smb_properties', 'change_time', parser.parse, True),
'x-ms-file-permission-key': ('smb_properties', 'permission_key', _to_str),
'x-ms-file-id': ('smb_properties', 'file_id', _to_str),
'x-ms-file-parent-id': ('smb_properties', 'parent_id', _to_str),
}
def _parse_metadata(response):
'''
Extracts out resource metadata information.
'''
if response is None or response.headers is None:
return None
metadata = _dict()
for key, value in response.headers.items():
if key.lower().startswith('x-ms-meta-'):
metadata[key[10:]] = _to_str(value)
return metadata
def _parse_properties(response, result_class):
'''
Extracts out resource properties and metadata information.
Ignores the standard http headers.
'''
if response is None or response.headers is None:
return None
props = result_class()
for key, value in response.headers.items():
info = GET_PROPERTIES_ATTRIBUTE_MAP.get(key)
if info:
if info[0] is None:
setattr(props, info[1], info[2](value))
else:
attr = getattr(props, info[0])
# if info[3] is True, time zones in parsed strings are ignored and a naive :class:`datetime` object
# will be returned.
ignoretz = info[3] if len(info) > 3 else False
header_value = info[2](value, ignoretz=ignoretz) if info[2] is parser.parse else info[2](value)
setattr(attr, info[1], header_value)
if hasattr(props, 'blob_type') and props.blob_type == 'PageBlob' and hasattr(props, 'blob_tier') and props.blob_tier is not None:
props.blob_tier = _to_upper_str(props.blob_tier)
return props
def _parse_length_from_content_range(content_range):
'''
Parses the blob length from the content range header: bytes 1-3/65537
'''
if content_range is None:
return None
# First, split in space and take the second half: '1-3/65537'
# Next, split on slash and take the second half: '65537'
# Finally, convert to an int: 65537
return int(content_range.split(' ', 1)[1].split('/', 1)[1])
def _convert_xml_to_signed_identifiers(response):
'''
<?xml version="1.0" encoding="utf-8"?>
<SignedIdentifiers>
<SignedIdentifier>
<Id>unique-value</Id>
<AccessPolicy>
<Start>start-time</Start>
<Expiry>expiry-time</Expiry>
<Permission>abbreviated-permission-list</Permission>
</AccessPolicy>
</SignedIdentifier>
</SignedIdentifiers>
'''
if response is None or response.body is None:
return None
list_element = ETree.fromstring(response.body)
signed_identifiers = _dict()
for signed_identifier_element in list_element.findall('SignedIdentifier'):
# Id element
id = signed_identifier_element.find('Id').text
# Access policy element
access_policy = AccessPolicy()
access_policy_element = signed_identifier_element.find('AccessPolicy')
if access_policy_element is not None:
start_element = access_policy_element.find('Start')
if start_element is not None:
access_policy.start = parser.parse(start_element.text)
expiry_element = access_policy_element.find('Expiry')
if expiry_element is not None:
access_policy.expiry = parser.parse(expiry_element.text)
access_policy.permission = access_policy_element.findtext('Permission')
signed_identifiers[id] = access_policy
return signed_identifiers
def _convert_xml_to_service_stats(response):
'''
<?xml version="1.0" encoding="utf-8"?>
<StorageServiceStats>
<GeoReplication>
<Status>live|bootstrap|unavailable</Status>
<LastSyncTime>sync-time|<empty></LastSyncTime>
</GeoReplication>
</StorageServiceStats>
'''
if response is None or response.body is None:
return None
service_stats_element = ETree.fromstring(response.body)
geo_replication_element = service_stats_element.find('GeoReplication')
geo_replication = GeoReplication()
geo_replication.status = geo_replication_element.find('Status').text
last_sync_time = geo_replication_element.find('LastSyncTime').text
geo_replication.last_sync_time = parser.parse(last_sync_time) if last_sync_time else None
service_stats = ServiceStats()
service_stats.geo_replication = geo_replication
return service_stats
def _convert_xml_to_service_properties(response):
'''
<?xml version="1.0" encoding="utf-8"?>
<StorageServiceProperties>
<Logging>
<Version>version-number</Version>
<Delete>true|false</Delete>
<Read>true|false</Read>
<Write>true|false</Write>
<RetentionPolicy>
<Enabled>true|false</Enabled>
<Days>number-of-days</Days>
</RetentionPolicy>
</Logging>
<HourMetrics>
<Version>version-number</Version>
<Enabled>true|false</Enabled>
<IncludeAPIs>true|false</IncludeAPIs>
<RetentionPolicy>
<Enabled>true|false</Enabled>
<Days>number-of-days</Days>
</RetentionPolicy>
</HourMetrics>
<MinuteMetrics>
<Version>version-number</Version>
<Enabled>true|false</Enabled>
<IncludeAPIs>true|false</IncludeAPIs>
<RetentionPolicy>
<Enabled>true|false</Enabled>
<Days>number-of-days</Days>
</RetentionPolicy>
</MinuteMetrics>
<Cors>
<CorsRule>
<AllowedOrigins>comma-separated-list-of-allowed-origins</AllowedOrigins>
<AllowedMethods>comma-separated-list-of-HTTP-verb</AllowedMethods>
<MaxAgeInSeconds>max-caching-age-in-seconds</MaxAgeInSeconds>
<ExposedHeaders>comma-seperated-list-of-response-headers</ExposedHeaders>
<AllowedHeaders>comma-seperated-list-of-request-headers</AllowedHeaders>
</CorsRule>
</Cors>
<DeleteRetentionPolicy>
<Enabled>true|false</Enabled>
<Days>number-of-days</Days>
</DeleteRetentionPolicy>
<StaticWebsite>
<Enabled>true|false</Enabled>
<IndexDocument></IndexDocument>
<ErrorDocument404Path></ErrorDocument404Path>
</StaticWebsite>
</StorageServiceProperties>
'''
if response is None or response.body is None:
return None
service_properties_element = ETree.fromstring(response.body)
service_properties = ServiceProperties()
# Logging
logging = service_properties_element.find('Logging')
if logging is not None:
service_properties.logging = Logging()
service_properties.logging.version = logging.find('Version').text
service_properties.logging.delete = _bool(logging.find('Delete').text)
service_properties.logging.read = _bool(logging.find('Read').text)
service_properties.logging.write = _bool(logging.find('Write').text)
_convert_xml_to_retention_policy(logging.find('RetentionPolicy'),
service_properties.logging.retention_policy)
# HourMetrics
hour_metrics_element = service_properties_element.find('HourMetrics')
if hour_metrics_element is not None:
service_properties.hour_metrics = Metrics()
_convert_xml_to_metrics(hour_metrics_element, service_properties.hour_metrics)
# MinuteMetrics
minute_metrics_element = service_properties_element.find('MinuteMetrics')
if minute_metrics_element is not None:
service_properties.minute_metrics = Metrics()
_convert_xml_to_metrics(minute_metrics_element, service_properties.minute_metrics)
# CORS
cors = service_properties_element.find('Cors')
if cors is not None:
service_properties.cors = list()
for rule in cors.findall('CorsRule'):
allowed_origins = rule.find('AllowedOrigins').text.split(',')
allowed_methods = rule.find('AllowedMethods').text.split(',')
max_age_in_seconds = int(rule.find('MaxAgeInSeconds').text)
cors_rule = CorsRule(allowed_origins, allowed_methods, max_age_in_seconds)
exposed_headers = rule.find('ExposedHeaders').text
if exposed_headers is not None:
cors_rule.exposed_headers = exposed_headers.split(',')
allowed_headers = rule.find('AllowedHeaders').text
if allowed_headers is not None:
cors_rule.allowed_headers = allowed_headers.split(',')
service_properties.cors.append(cors_rule)
# Target version
target_version = service_properties_element.find('DefaultServiceVersion')
if target_version is not None:
service_properties.target_version = target_version.text
# DeleteRetentionPolicy
delete_retention_policy_element = service_properties_element.find('DeleteRetentionPolicy')
if delete_retention_policy_element is not None:
service_properties.delete_retention_policy = DeleteRetentionPolicy()
policy_enabled = _bool(delete_retention_policy_element.find('Enabled').text)
service_properties.delete_retention_policy.enabled = policy_enabled
if policy_enabled:
service_properties.delete_retention_policy.days = int(delete_retention_policy_element.find('Days').text)
# StaticWebsite
static_website_element = service_properties_element.find('StaticWebsite')
if static_website_element is not None:
service_properties.static_website = StaticWebsite()
service_properties.static_website.enabled = _bool(static_website_element.find('Enabled').text)
index_document_element = static_website_element.find('IndexDocument')
if index_document_element is not None:
service_properties.static_website.index_document = index_document_element.text
error_document_element = static_website_element.find('ErrorDocument404Path')
if error_document_element is not None:
service_properties.static_website.error_document_404_path = error_document_element.text
return service_properties
def _convert_xml_to_metrics(xml, metrics):
'''
<Version>version-number</Version>
<Enabled>true|false</Enabled>
<IncludeAPIs>true|false</IncludeAPIs>
<RetentionPolicy>
<Enabled>true|false</Enabled>
<Days>number-of-days</Days>
</RetentionPolicy>
'''
# Version
metrics.version = xml.find('Version').text
# Enabled
metrics.enabled = _bool(xml.find('Enabled').text)
# IncludeAPIs
include_apis_element = xml.find('IncludeAPIs')
if include_apis_element is not None:
metrics.include_apis = _bool(include_apis_element.text)
# RetentionPolicy
_convert_xml_to_retention_policy(xml.find('RetentionPolicy'), metrics.retention_policy)
def _convert_xml_to_retention_policy(xml, retention_policy):
'''
<Enabled>true|false</Enabled>
<Days>number-of-days</Days>
'''
# Enabled
retention_policy.enabled = _bool(xml.find('Enabled').text)
# Days
days_element = xml.find('Days')
if days_element is not None:
retention_policy.days = int(days_element.text)
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
import unittest
import routes
import webob
from webtest import AppError
from webtest import TestApp
from quantum.api import extensions
from quantum.api.extensions import (
ExtensionManager,
ExtensionMiddleware,
PluginAwareExtensionManager,
)
from quantum.common import config
from quantum.common import exceptions
from quantum.db.db_base_plugin_v2 import QuantumDbPluginV2
from quantum.openstack.common import jsonutils
from quantum.plugins.common import constants
from quantum.tests.unit import BaseTest
from quantum.tests.unit.extension_stubs import (
ExtensionExpectingPluginInterface,
StubBaseAppController,
StubExtension,
StubPlugin,
)
import quantum.tests.unit.extensions
from quantum import wsgi
LOG = logging.getLogger('quantum.tests.test_extensions')
ROOTDIR = os.path.dirname(os.path.dirname(__file__))
ETCDIR = os.path.join(ROOTDIR, 'etc')
def etcdir(*p):
return os.path.join(ETCDIR, *p)
extensions_path = ':'.join(quantum.tests.unit.extensions.__path__)
class ExtensionsTestApp(wsgi.Router):
def __init__(self, options={}):
mapper = routes.Mapper()
controller = StubBaseAppController()
mapper.resource("dummy_resource", "/dummy_resources",
controller=controller)
super(ExtensionsTestApp, self).__init__(mapper)
class FakePluginWithExtension(QuantumDbPluginV2):
"""A fake plugin used only for extension testing in this file."""
supported_extension_aliases = ["FOXNSOX"]
def method_to_support_foxnsox_extension(self, context):
self._log("method_to_support_foxnsox_extension", context)
class ResourceExtensionTest(unittest.TestCase):
class ResourceExtensionController(wsgi.Controller):
def index(self, request):
return "resource index"
def show(self, request, id):
return {'data': {'id': id}}
def notimplemented_function(self, request, id):
return webob.exc.HTTPClientError(NotImplementedError())
def custom_member_action(self, request, id):
return {'member_action': 'value'}
def custom_collection_action(self, request, **kwargs):
return {'collection': 'value'}
class DummySvcPlugin(wsgi.Controller):
def get_plugin_type(self):
return constants.DUMMY
def index(self, request, **kwargs):
return "resource index"
def custom_member_action(self, request, **kwargs):
return {'member_action': 'value'}
def collection_action(self, request, **kwargs):
return {'collection': 'value'}
def show(self, request, id):
return {'data': {'id': id}}
def test_exceptions_notimplemented(self):
controller = self.ResourceExtensionController()
member = {'notimplemented_function': "GET"}
res_ext = extensions.ResourceExtension('tweedles', controller,
member_actions=member)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
# Ideally we would check for a 501 code here but webtest doesn't take
# anything that is below 200 or above 400 so we can't actually check
# it. It thows AppError instead.
try:
response = (
test_app.get("/tweedles/some_id/notimplemented_function"))
# Shouldn't be reached
self.assertTrue(False)
except AppError:
pass
def test_resource_can_be_added_as_extension(self):
res_ext = extensions.ResourceExtension(
'tweedles', self.ResourceExtensionController())
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
index_response = test_app.get("/tweedles")
self.assertEqual(200, index_response.status_int)
self.assertEqual("resource index", index_response.body)
show_response = test_app.get("/tweedles/25266")
self.assertEqual({'data': {'id': "25266"}}, show_response.json)
def test_resource_gets_prefix_of_plugin(self):
class DummySvcPlugin(wsgi.Controller):
def index(self, request):
return ""
def get_plugin_type(self):
return constants.DUMMY
res_ext = extensions.ResourceExtension(
'tweedles', DummySvcPlugin(), path_prefix="/dummy_svc")
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
index_response = test_app.get("/dummy_svc/tweedles")
self.assertEqual(200, index_response.status_int)
def test_resource_extension_with_custom_member_action(self):
controller = self.ResourceExtensionController()
member = {'custom_member_action': "GET"}
res_ext = extensions.ResourceExtension('tweedles', controller,
member_actions=member)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
response = test_app.get("/tweedles/some_id/custom_member_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['member_action'],
"value")
def test_resource_ext_with_custom_member_action_gets_plugin_prefix(self):
controller = self.DummySvcPlugin()
member = {'custom_member_action': "GET"}
collections = {'collection_action': "GET"}
res_ext = extensions.ResourceExtension('tweedles', controller,
path_prefix="/dummy_svc",
member_actions=member,
collection_actions=collections)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
response = test_app.get("/dummy_svc/tweedles/1/custom_member_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['member_action'],
"value")
response = test_app.get("/dummy_svc/tweedles/collection_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['collection'],
"value")
def test_plugin_prefix_with_parent_resource(self):
controller = self.DummySvcPlugin()
parent = dict(member_name="tenant",
collection_name="tenants")
member = {'custom_member_action': "GET"}
collections = {'collection_action': "GET"}
res_ext = extensions.ResourceExtension('tweedles', controller, parent,
path_prefix="/dummy_svc",
member_actions=member,
collection_actions=collections)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
index_response = test_app.get("/dummy_svc/tenants/1/tweedles")
self.assertEqual(200, index_response.status_int)
response = test_app.get("/dummy_svc/tenants/1/"
"tweedles/1/custom_member_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['member_action'],
"value")
response = test_app.get("/dummy_svc/tenants/2/"
"tweedles/collection_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['collection'],
"value")
def test_resource_extension_for_get_custom_collection_action(self):
controller = self.ResourceExtensionController()
collections = {'custom_collection_action': "GET"}
res_ext = extensions.ResourceExtension('tweedles', controller,
collection_actions=collections)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
response = test_app.get("/tweedles/custom_collection_action")
self.assertEqual(200, response.status_int)
LOG.debug(jsonutils.loads(response.body))
self.assertEqual(jsonutils.loads(response.body)['collection'], "value")
def test_resource_extension_for_put_custom_collection_action(self):
controller = self.ResourceExtensionController()
collections = {'custom_collection_action': "PUT"}
res_ext = extensions.ResourceExtension('tweedles', controller,
collection_actions=collections)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
response = test_app.put("/tweedles/custom_collection_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['collection'], 'value')
def test_resource_extension_for_post_custom_collection_action(self):
controller = self.ResourceExtensionController()
collections = {'custom_collection_action': "POST"}
res_ext = extensions.ResourceExtension('tweedles', controller,
collection_actions=collections)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
response = test_app.post("/tweedles/custom_collection_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['collection'], 'value')
def test_resource_extension_for_delete_custom_collection_action(self):
controller = self.ResourceExtensionController()
collections = {'custom_collection_action': "DELETE"}
res_ext = extensions.ResourceExtension('tweedles', controller,
collection_actions=collections)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
response = test_app.delete("/tweedles/custom_collection_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['collection'], 'value')
def test_resource_ext_for_formatted_req_on_custom_collection_action(self):
controller = self.ResourceExtensionController()
collections = {'custom_collection_action': "GET"}
res_ext = extensions.ResourceExtension('tweedles', controller,
collection_actions=collections)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
response = test_app.get("/tweedles/custom_collection_action.json")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['collection'], "value")
def test_resource_ext_for_nested_resource_custom_collection_action(self):
controller = self.ResourceExtensionController()
collections = {'custom_collection_action': "GET"}
parent = dict(collection_name='beetles', member_name='beetle')
res_ext = extensions.ResourceExtension('tweedles', controller,
collection_actions=collections,
parent=parent)
test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext))
response = test_app.get("/beetles/beetle_id"
"/tweedles/custom_collection_action")
self.assertEqual(200, response.status_int)
self.assertEqual(jsonutils.loads(response.body)['collection'], "value")
def test_returns_404_for_non_existent_extension(self):
test_app = _setup_extensions_test_app(SimpleExtensionManager(None))
response = test_app.get("/non_extistant_extension", status='*')
self.assertEqual(404, response.status_int)
class ActionExtensionTest(unittest.TestCase):
def setUp(self):
super(ActionExtensionTest, self).setUp()
self.extension_app = _setup_extensions_test_app()
def test_extended_action_for_adding_extra_data(self):
action_name = 'FOXNSOX:add_tweedle'
action_params = dict(name='Beetle')
req_body = jsonutils.dumps({action_name: action_params})
response = self.extension_app.post('/dummy_resources/1/action',
req_body,
content_type='application/json')
self.assertEqual("Tweedle Beetle Added.", response.body)
def test_extended_action_for_deleting_extra_data(self):
action_name = 'FOXNSOX:delete_tweedle'
action_params = dict(name='Bailey')
req_body = jsonutils.dumps({action_name: action_params})
response = self.extension_app.post("/dummy_resources/1/action",
req_body,
content_type='application/json')
self.assertEqual("Tweedle Bailey Deleted.", response.body)
def test_returns_404_for_non_existent_action(self):
non_existent_action = 'blah_action'
action_params = dict(name="test")
req_body = jsonutils.dumps({non_existent_action: action_params})
response = self.extension_app.post("/dummy_resources/1/action",
req_body,
content_type='application/json',
status='*')
self.assertEqual(404, response.status_int)
def test_returns_404_for_non_existent_resource(self):
action_name = 'add_tweedle'
action_params = dict(name='Beetle')
req_body = jsonutils.dumps({action_name: action_params})
response = self.extension_app.post("/asdf/1/action", req_body,
content_type='application/json',
status='*')
self.assertEqual(404, response.status_int)
class RequestExtensionTest(BaseTest):
def test_headers_can_be_extended(self):
def extend_headers(req, res):
assert req.headers['X-NEW-REQUEST-HEADER'] == "sox"
res.headers['X-NEW-RESPONSE-HEADER'] = "response_header_data"
return res
app = self._setup_app_with_request_handler(extend_headers, 'GET')
response = app.get("/dummy_resources/1",
headers={'X-NEW-REQUEST-HEADER': "sox"})
self.assertEqual(response.headers['X-NEW-RESPONSE-HEADER'],
"response_header_data")
def test_extend_get_resource_response(self):
def extend_response_data(req, res):
data = jsonutils.loads(res.body)
data['FOXNSOX:extended_key'] = req.GET.get('extended_key')
res.body = jsonutils.dumps(data)
return res
app = self._setup_app_with_request_handler(extend_response_data, 'GET')
response = app.get("/dummy_resources/1?extended_key=extended_data")
self.assertEqual(200, response.status_int)
response_data = jsonutils.loads(response.body)
self.assertEqual('extended_data',
response_data['FOXNSOX:extended_key'])
self.assertEqual('knox', response_data['fort'])
def test_get_resources(self):
app = _setup_extensions_test_app()
response = app.get("/dummy_resources/1?chewing=newblue")
response_data = jsonutils.loads(response.body)
self.assertEqual('newblue', response_data['FOXNSOX:googoose'])
self.assertEqual("Pig Bands!", response_data['FOXNSOX:big_bands'])
def test_edit_previously_uneditable_field(self):
def _update_handler(req, res):
data = jsonutils.loads(res.body)
data['uneditable'] = req.params['uneditable']
res.body = jsonutils.dumps(data)
return res
base_app = TestApp(setup_base_app())
response = base_app.put("/dummy_resources/1",
{'uneditable': "new_value"})
self.assertEqual(response.json['uneditable'], "original_value")
ext_app = self._setup_app_with_request_handler(_update_handler,
'PUT')
ext_response = ext_app.put("/dummy_resources/1",
{'uneditable': "new_value"})
self.assertEqual(ext_response.json['uneditable'], "new_value")
def _setup_app_with_request_handler(self, handler, verb):
req_ext = extensions.RequestExtension(verb,
'/dummy_resources/:(id)',
handler)
manager = SimpleExtensionManager(None, None, req_ext)
return _setup_extensions_test_app(manager)
class ExtensionManagerTest(unittest.TestCase):
def test_invalid_extensions_are_not_registered(self):
class InvalidExtension(object):
"""
This Extension doesn't implement extension methods :
get_name, get_description, get_namespace and get_updated
"""
def get_alias(self):
return "invalid_extension"
ext_mgr = ExtensionManager('')
ext_mgr.add_extension(InvalidExtension())
ext_mgr.add_extension(StubExtension("valid_extension"))
self.assertTrue('valid_extension' in ext_mgr.extensions)
self.assertFalse('invalid_extension' in ext_mgr.extensions)
class PluginAwareExtensionManagerTest(unittest.TestCase):
def test_unsupported_extensions_are_not_loaded(self):
stub_plugin = StubPlugin(supported_extensions=["e1", "e3"])
ext_mgr = PluginAwareExtensionManager('',
{constants.CORE: stub_plugin})
ext_mgr.add_extension(StubExtension("e1"))
ext_mgr.add_extension(StubExtension("e2"))
ext_mgr.add_extension(StubExtension("e3"))
self.assertTrue("e1" in ext_mgr.extensions)
self.assertFalse("e2" in ext_mgr.extensions)
self.assertTrue("e3" in ext_mgr.extensions)
def test_extensions_are_not_loaded_for_plugins_unaware_of_extensions(self):
class ExtensionUnawarePlugin(object):
"""
This plugin does not implement supports_extension method.
Extensions will not be loaded when this plugin is used.
"""
pass
ext_mgr = PluginAwareExtensionManager('',
{constants.CORE:
ExtensionUnawarePlugin()})
ext_mgr.add_extension(StubExtension("e1"))
self.assertFalse("e1" in ext_mgr.extensions)
def test_extensions_not_loaded_for_plugin_without_expected_interface(self):
class PluginWithoutExpectedIface(object):
"""
Plugin does not implement get_foo method as expected by extension
"""
supported_extension_aliases = ["supported_extension"]
ext_mgr = PluginAwareExtensionManager('',
{constants.CORE:
PluginWithoutExpectedIface()})
ext_mgr.add_extension(
ExtensionExpectingPluginInterface("supported_extension"))
self.assertFalse("e1" in ext_mgr.extensions)
def test_extensions_are_loaded_for_plugin_with_expected_interface(self):
class PluginWithExpectedInterface(object):
"""
This Plugin implements get_foo method as expected by extension
"""
supported_extension_aliases = ["supported_extension"]
def get_foo(self, bar=None):
pass
ext_mgr = PluginAwareExtensionManager('',
{constants.CORE:
PluginWithExpectedInterface()})
ext_mgr.add_extension(
ExtensionExpectingPluginInterface("supported_extension"))
self.assertTrue("supported_extension" in ext_mgr.extensions)
def test_extensions_expecting_quantum_plugin_interface_are_loaded(self):
class ExtensionForQuamtumPluginInterface(StubExtension):
"""
This Extension does not implement get_plugin_interface method.
This will work with any plugin implementing QuantumPluginBase
"""
pass
stub_plugin = StubPlugin(supported_extensions=["e1"])
ext_mgr = PluginAwareExtensionManager('', {constants.CORE:
stub_plugin})
ext_mgr.add_extension(ExtensionForQuamtumPluginInterface("e1"))
self.assertTrue("e1" in ext_mgr.extensions)
def test_extensions_without_need_for__plugin_interface_are_loaded(self):
class ExtensionWithNoNeedForPluginInterface(StubExtension):
"""
This Extension does not need any plugin interface.
This will work with any plugin implementing QuantumPluginBase
"""
def get_plugin_interface(self):
return None
stub_plugin = StubPlugin(supported_extensions=["e1"])
ext_mgr = PluginAwareExtensionManager('', {constants.CORE:
stub_plugin})
ext_mgr.add_extension(ExtensionWithNoNeedForPluginInterface("e1"))
self.assertTrue("e1" in ext_mgr.extensions)
def test_extension_loaded_for_non_core_plugin(self):
class NonCorePluginExtenstion(StubExtension):
def get_plugin_interface(self):
return None
stub_plugin = StubPlugin(supported_extensions=["e1"])
ext_mgr = PluginAwareExtensionManager('', {constants.DUMMY:
stub_plugin})
ext_mgr.add_extension(NonCorePluginExtenstion("e1"))
self.assertTrue("e1" in ext_mgr.extensions)
class ExtensionControllerTest(unittest.TestCase):
def setUp(self):
super(ExtensionControllerTest, self).setUp()
self.test_app = _setup_extensions_test_app()
def test_index_gets_all_registerd_extensions(self):
response = self.test_app.get("/extensions")
foxnsox = response.json["extensions"][0]
self.assertEqual(foxnsox["alias"], "FOXNSOX")
self.assertEqual(foxnsox["namespace"],
"http://www.fox.in.socks/api/ext/pie/v1.0")
def test_extension_can_be_accessed_by_alias(self):
foxnsox_extension = self.test_app.get("/extensions/FOXNSOX").json
foxnsox_extension = foxnsox_extension['extension']
self.assertEqual(foxnsox_extension["alias"], "FOXNSOX")
self.assertEqual(foxnsox_extension["namespace"],
"http://www.fox.in.socks/api/ext/pie/v1.0")
def test_show_returns_not_found_for_non_existent_extension(self):
response = self.test_app.get("/extensions/non_existent", status="*")
self.assertEqual(response.status_int, 404)
def app_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
return ExtensionsTestApp(conf)
def setup_base_app():
config_file = 'quantum.conf.test'
args = ['--config-file', etcdir(config_file)]
config.parse(args=args)
app = config.load_paste_app('extensions_test_app')
return app
def setup_extensions_middleware(extension_manager=None):
extension_manager = (extension_manager or
PluginAwareExtensionManager(
extensions_path,
{constants.CORE: FakePluginWithExtension()}))
config_file = 'quantum.conf.test'
args = ['--config-file', etcdir(config_file)]
config.parse(args=args)
app = config.load_paste_app('extensions_test_app')
return ExtensionMiddleware(app, ext_mgr=extension_manager)
def _setup_extensions_test_app(extension_manager=None):
return TestApp(setup_extensions_middleware(extension_manager))
class SimpleExtensionManager(object):
def __init__(self, resource_ext=None, action_ext=None, request_ext=None):
self.resource_ext = resource_ext
self.action_ext = action_ext
self.request_ext = request_ext
def get_resources(self):
resource_exts = []
if self.resource_ext:
resource_exts.append(self.resource_ext)
return resource_exts
def get_actions(self):
action_exts = []
if self.action_ext:
action_exts.append(self.action_ext)
return action_exts
def get_request_extensions(self):
request_extensions = []
if self.request_ext:
request_extensions.append(self.request_ext)
return request_extensions
|
|
"""
Integration tests for the vault execution module
"""
import logging
import time
import pytest
import salt.utils.path
from tests.support.case import ModuleCase
from tests.support.runtests import RUNTIME_VARS
from tests.support.sminion import create_sminion
from tests.support.unit import SkipTest, skipIf
log = logging.getLogger(__name__)
VAULT_BINARY_PATH = salt.utils.path.which("vault")
@skipIf(not salt.utils.path.which("dockerd"), "Docker not installed")
@skipIf(not VAULT_BINARY_PATH, "Vault not installed")
@pytest.mark.destructive_test
class VaultTestCase(ModuleCase):
"""
Test vault module
"""
@classmethod
def setUpClass(cls):
cls.sminion = sminion = create_sminion()
config = (
'{"backend": {"file": {"path": "/vault/file"}}, "default_lease_ttl":'
' "168h", "max_lease_ttl": "720h", "disable_mlock": true}'
)
sminion.states.docker_image.present(name="vault", tag="0.9.6")
login_attempts = 1
container_created = False
while True:
if container_created:
sminion.states.docker_container.stopped(name="vault")
sminion.states.docker_container.absent(name="vault")
ret = sminion.states.docker_container.running(
name="vault",
image="vault:0.9.6",
port_bindings="8200:8200",
environment={
"VAULT_DEV_ROOT_TOKEN_ID": "testsecret",
"VAULT_LOCAL_CONFIG": config,
},
)
log.debug("docker_container.running return: %s", ret)
container_created = ret["result"]
time.sleep(5)
ret = sminion.functions.cmd.run_all(
cmd="{} login token=testsecret".format(VAULT_BINARY_PATH),
env={"VAULT_ADDR": "http://127.0.0.1:8200"},
hide_output=False,
)
if ret["retcode"] == 0:
break
log.debug("Vault login failed. Return: %s", ret)
login_attempts += 1
if login_attempts >= 3:
raise SkipTest("unable to login to vault")
ret = sminion.functions.cmd.retcode(
cmd="{} policy write testpolicy {}/vault.hcl".format(
VAULT_BINARY_PATH, RUNTIME_VARS.FILES
),
env={"VAULT_ADDR": "http://127.0.0.1:8200"},
)
if ret != 0:
raise SkipTest("unable to assign policy to vault")
@classmethod
def tearDownClass(cls):
cls.sminion.states.docker_container.stopped(name="vault")
cls.sminion.states.docker_container.absent(name="vault")
cls.sminion.states.docker_image.absent(name="vault", force=True)
cls.sminion = None
@pytest.mark.slow_test
def test_write_read_secret(self):
write_return = self.run_function(
"vault.write_secret", path="secret/my/secret", user="foo", password="bar"
)
self.assertEqual(write_return, True)
assert self.run_function("vault.read_secret", arg=["secret/my/secret"]) == {
"password": "bar",
"user": "foo",
}
assert (
self.run_function("vault.read_secret", arg=["secret/my/secret", "user"])
== "foo"
)
@pytest.mark.slow_test
def test_write_raw_read_secret(self):
assert (
self.run_function(
"vault.write_raw",
path="secret/my/secret2",
raw={"user2": "foo2", "password2": "bar2"},
)
is True
)
assert self.run_function("vault.read_secret", arg=["secret/my/secret2"]) == {
"password2": "bar2",
"user2": "foo2",
}
@pytest.mark.slow_test
def test_delete_secret(self):
assert (
self.run_function(
"vault.write_secret",
path="secret/my/secret",
user="foo",
password="bar",
)
is True
)
assert (
self.run_function("vault.delete_secret", arg=["secret/my/secret"]) is True
)
@pytest.mark.slow_test
def test_list_secrets(self):
assert (
self.run_function(
"vault.write_secret",
path="secret/my/secret",
user="foo",
password="bar",
)
is True
)
assert self.run_function("vault.list_secrets", arg=["secret/my/"]) == {
"keys": ["secret"]
}
@skipIf(not salt.utils.path.which("dockerd"), "Docker not installed")
@skipIf(not salt.utils.path.which("vault"), "Vault not installed")
@pytest.mark.destructive_test
class VaultTestCaseCurrent(ModuleCase):
"""
Test vault module against current vault
"""
@classmethod
def setUpClass(cls):
cls.sminion = sminion = create_sminion()
config = (
'{"backend": {"file": {"path": "/vault/file"}}, "default_lease_ttl":'
' "168h", "max_lease_ttl": "720h", "disable_mlock": true}'
)
sminion.states.docker_image.present(name="vault", tag="1.3.1")
login_attempts = 1
container_created = False
while True:
if container_created:
sminion.states.docker_container.stopped(name="vault")
sminion.states.docker_container.absent(name="vault")
ret = sminion.states.docker_container.running(
name="vault",
image="vault:1.3.1",
port_bindings="8200:8200",
environment={
"VAULT_DEV_ROOT_TOKEN_ID": "testsecret",
"VAULT_LOCAL_CONFIG": config,
},
)
log.debug("docker_container.running return: %s", ret)
container_created = ret["result"]
time.sleep(5)
ret = sminion.functions.cmd.run_all(
cmd="{} login token=testsecret".format(VAULT_BINARY_PATH),
env={"VAULT_ADDR": "http://127.0.0.1:8200"},
hide_output=False,
)
if ret["retcode"] == 0:
break
log.debug("Vault login failed. Return: %s", ret)
login_attempts += 1
if login_attempts >= 3:
raise SkipTest("unable to login to vault")
ret = sminion.functions.cmd.retcode(
cmd="{} policy write testpolicy {}/vault.hcl".format(
VAULT_BINARY_PATH, RUNTIME_VARS.FILES
),
env={"VAULT_ADDR": "http://127.0.0.1:8200"},
)
if ret != 0:
raise SkipTest("unable to assign policy to vault")
@classmethod
def tearDownClass(cls):
cls.sminion.states.docker_container.stopped(name="vault")
cls.sminion.states.docker_container.absent(name="vault")
cls.sminion.states.docker_image.absent(name="vault", force=True)
cls.sminion = None
@pytest.mark.slow_test
def test_write_read_secret_kv2(self):
write_return = self.run_function(
"vault.write_secret", path="secret/my/secret", user="foo", password="bar"
)
# write_secret output:
# {'created_time': '2020-01-12T23:09:34.571294241Z', 'destroyed': False,
# 'version': 1, 'deletion_time': ''}
expected_write = {"destroyed": False, "deletion_time": ""}
self.assertDictContainsSubset(expected_write, write_return)
read_return = self.run_function(
"vault.read_secret", arg=["secret/my/secret"], metadata=True
)
# read_secret output:
# {'data': {'password': 'bar', 'user': 'foo'},
# 'metadata': {'created_time': '2020-01-12T23:07:18.829326918Z', 'destroyed': False,
# 'version': 1, 'deletion_time': ''}}
expected_read = {"data": {"password": "bar", "user": "foo"}}
self.assertDictContainsSubset(expected_read, read_return)
expected_read = {"password": "bar", "user": "foo"}
read_return = self.run_function("vault.read_secret", arg=["secret/my/secret"])
self.assertDictContainsSubset(expected_read, read_return)
read_return = self.run_function(
"vault.read_secret", arg=["secret/my/secret", "user"]
)
self.assertEqual(read_return, "foo")
@pytest.mark.slow_test
def test_list_secrets_kv2(self):
write_return = self.run_function(
"vault.write_secret", path="secret/my/secret", user="foo", password="bar"
)
expected_write = {"destroyed": False, "deletion_time": ""}
self.assertDictContainsSubset(expected_write, write_return)
list_return = self.run_function("vault.list_secrets", arg=["secret/my/"])
self.assertIn("secret", list_return["keys"])
@pytest.mark.slow_test
def test_write_raw_read_secret_kv2(self):
write_return = self.run_function(
"vault.write_raw",
path="secret/my/secret2",
raw={"user2": "foo2", "password2": "bar2"},
)
expected_write = {"destroyed": False, "deletion_time": ""}
self.assertDictContainsSubset(expected_write, write_return)
read_return = self.run_function(
"vault.read_secret", arg=["secret/my/secret2"], metadata=True
)
expected_read = {"data": {"password2": "bar2", "user2": "foo2"}}
self.assertDictContainsSubset(expected_read, read_return)
read_return = self.run_function("vault.read_secret", arg=["secret/my/secret2"])
expected_read = {"password2": "bar2", "user2": "foo2"}
self.assertDictContainsSubset(expected_read, read_return)
@pytest.mark.slow_test
def test_delete_secret_kv2(self):
write_return = self.run_function(
"vault.write_secret",
path="secret/my/secret3",
user3="foo3",
password3="bar3",
)
expected_write = {"destroyed": False, "deletion_time": ""}
self.assertDictContainsSubset(expected_write, write_return)
delete_return = self.run_function(
"vault.delete_secret", arg=["secret/my/secret3"]
)
self.assertEqual(delete_return, True)
@pytest.mark.slow_test
def test_destroy_secret_kv2(self):
write_return = self.run_function(
"vault.write_secret",
path="secret/my/secret4",
user3="foo4",
password4="bar4",
)
expected_write = {"destroyed": False, "deletion_time": ""}
self.assertDictContainsSubset(expected_write, write_return)
destroy_return = self.run_function(
"vault.destroy_secret", arg=["secret/my/secret4", "1"]
)
self.assertEqual(destroy_return, True)
# self.assertIsNone(self.run_function('vault.read_secret', arg=['secret/my/secret4']))
# list_return = self.run_function('vault.list_secrets', arg=['secret/my/'])
# self.assertNotIn('secret4', list_return['keys'])
|
|
# Copyright (C) 2013-2016 Martin Vejmelka, CU Denver
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR
# A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import absolute_import
from utils import ensure_dir, symlink_unless_exists, timedelta_hours, readhead, Dict
from .downloader import download_url, DownloadError
from datetime import datetime, timedelta
import pytz
import requests
import os
import os.path as osp
import sys
import logging
import six
from six.moves import zip
class GribError(Exception):
"""
Raised when a GribSource cannot retrieve GRIBs.
"""
pass
class GribSource(object):
"""
The parent class of all GRIB2 sources that implements common functionality, for example
- local GRIB2 validation (file size check)
- GRIB2 retrieval with retries (smart check whether server implements http-range)
- symlinking GRIB2 files for ungrib
"""
def __init__(self, js):
"""
Initialize grib source with ingest directory (where GRIB files are stored).
:param js: job structure with at least ingest_path root of GRIB storage and sys_install_path
"""
self.ingest_dir = osp.abspath(osp.join(js.get('ingest_path','ingest'),self.id))
self.cache_dir = osp.abspath(js.get('cache_path','cache'))
self.sys_dir = osp.abspath(js.get('sys_install_path',None))
self.interval_seconds = 3600 * self.period_hours
def colmet_files(self, colmet_files_utc):
"""
Compute the names of list of colment files from their datetime_utc.
:param colmet_files_utc: list of datetime uct times
:return: file names
"""
return ['%s:%04d-%02d-%02d_%02d' % (self.prefix, x.year, x.month, x.day, x.hour) for x in colmet_files_utc]
def namelist_wps_keys(self):
"""
Returns the namelist keys that must be modified for this source
return: a dictionary of namelist entries
"""
return {}
def vtables(self):
"""
Returns the vtables that must be used with this source as a table with keys:
geogrid_vtable, ungrib_table, metgrid_table.
:return: a dictionary mapping vtable keys to specific table files
"""
return {}
def namelist_keys(self):
"""
Some GRIB2 source files require that in namelist.input, certain parameters have
particular values. Such keys should be returned here.
:return: a dictionary mapping section names to keys that must be modified.
"""
return {}
def clone_vtables(self, tgt):
"""
Clone all vtables (build symlink name, ensure directories exist, create the symlink)
:param tgt: target directory into which WPS is cloned
"""
# where are the symlink locations for vtable files (name of symlink)
vtable_locs = {'geogrid_vtable': 'geogrid/GEOGRID.TBL',
'ungrib_vtable': 'Vtable',
'metgrid_vtable': 'metgrid/METGRID.TBL'}
vtables = self.vtables()
# vtables: a dictionary with keys from list ['geogrid_vtable', 'ungrib_vtable', 'metgrid_vtable'],
# which contain paths of the variable tables relative to 'etc/vtables'
for vtable_id, vtable_path in six.iteritems(vtables):
# build path to link location
symlink_path = osp.join(tgt, vtable_locs[vtable_id])
if not osp.exists(symlink_path):
symlink_tgt = osp.join(self.sys_dir, "etc/vtables", vtable_path)
symlink_unless_exists(symlink_tgt, ensure_dir(symlink_path))
def retrieve_gribs(self, from_utc, to_utc, ref_utc = None, cycle_start_utc = None, download_all_gribs = False):
"""
Attempts to retrieve the GRIB files for the forecast time range.
It should be first verified whether the GRIB2 files are available locally.
For any unavailable files, downloads should be initiated.
:param from_utc: forecast start time
:param to_utc: forecast end time
:param ref_utc: a reference time which defines 'now' for the purpose of
retrieval, None means datetime.utcnow().
:return: a list of paths to local GRIB files
"""
pass
def download_grib(self, url_base, rel_path):
"""
Download a GRIB file from a GRIB service and stream to <rel_path> in ingest_dir.
:param url_base: the base URL part of the GRIB service
:param rel_path: the relative path of the file (w.r.t GRIB base url and w.r.t self.ingest_dir)
:param max_retries: how many times we may retry to download the file
"""
url = url_base + '/' + rel_path
logging.info('downloading %s grib from %s' % (self.id, url))
grib_path = osp.join(self.ingest_dir, rel_path)
try:
download_url(url, grib_path)
except DownloadError as e:
logging.error('%s cannot download grib file %s' % (self.id, url))
logging.warning('Please check %s for %s' % (self.info_url, self.info))
raise GribError('GribSource: failed to download file %s' % url)
def grib_available_locally(self, path):
"""
Check if a GRIB2 file is available locally and if it's file size checks out.
:param path: the GRIB2 file path
"""
info_path = path + '.size'
if osp.exists(path) and osp.exists(info_path):
content_size = int(open(info_path).read())
return osp.getsize(path) == content_size
else:
return False
def symlink_gribs(self, manifest, wps_dir):
"""
Make symlinks in the form GRIBFILE.XYZ to all manifest files into wps_dir.
:param manifest: relative paths (w.r.t. ingest_dir) to GRIB files we want linked
:param wps_dir: the WPS directory where we want the symlinks to appear
:return:
"""
for rel_path, grib_name in zip(manifest, generate_grib_names()):
logging.info('Linking %s -> %s' % ( osp.join(self.ingest_dir, rel_path), osp.join(wps_dir, grib_name)) )
symlink_unless_exists(osp.join(self.ingest_dir, rel_path), osp.join(wps_dir, grib_name))
def colmet_missing(self,colmet_prefix,colmet_files):
"""
Make list of files missing in the cache
:param colmet prefix: the cache subdirectory the files should be in
:param colmet_files: List of all files needed
:return: list of all files not in cache
"""
logging.info('%s: %d COLMET intermediate files needed' % (self.id,len(colmet_files)) )
for f in colmet_files:
logging.info('Will need file ' +f)
# check what colmet files are available locally
colmet_missing = [f for f in colmet_files if not osp.isfile(osp.join(self.cache_dir, colmet_prefix, f))]
logging.info('%s: %d COLMET intermediate files not in cache' % (self.id,len(colmet_missing)) )
for f in colmet_missing:
logging.info('Missing in cache ' +f)
return colmet_missing
def available_online(self,links):
"""
Make link available online from a list of links
:param links: list of links
:return: link available online, '' otherwise
"""
logging.info('GribSource: Looking for grib links available online')
available = [x for x in links if readhead(self.remote_url + '/' + x, msg_level=0).status_code == 200]
if len(available) > 0:
return available[0]
else:
raise GribError('GribSource: failed to find an available online file')
# instance variables
prefix = 'COLMET'
id = None
period_hours = None
## Utility functions
def generate_grib_names():
"""
Keeps generating gribfile names from GRIBFILE.AAA to ZZZ.
"""
alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
for c1 in alphabet:
for c2 in alphabet:
for c3 in alphabet:
yield "GRIBFILE." + c1 + c2 + c3
|
|
from __future__ import division
from iotbx.pdb.multimer_reconstruction import multimer
from iotbx.ncs.ncs_preprocess import ncs_only
import mmtbx.monomer_library.server
import iotbx.reflection_file_utils
from libtbx.utils import null_out
from iotbx.pdb import fetch
from libtbx import easy_run
import cPickle as pickle
from glob import glob
import mmtbx.utils
import mmtbx.masks
import iotbx.ncs
import iotbx.pdb
import iotbx.mtz
import shutil
import sys
import os
import re
"""
Collection of all pdb files with NCS relations, either with only master and
MTRIX records or when NCS is found.
Those files are then filtered for resolution and data requirements as
described in the paper
NCS search if done using the default NCS search parameters
(minimum chains in master copy, not minimum transforms)
"""
__author__ = 'Youval Dar'
class File_records(object):
""" Information collected on every PDB structure """
def __init__(self):
self.pdb_id = ''
self.n_ncs_copies = None
self.n_ncs_groups = None
self.year = None
self.resolution = None
self.data_completeness = None
self.solvent_fraction = None
self.experiment_type = None
self.only_master_in_pdb = None
self.ncs_reported_in_pdb = None
self.n_atoms_in_asu = None
# data_to_param_ratio: f_obs.size / atom_number
self.data_to_param_ratio = None
self.data_to_param_ratio_ncs = None
# refinement_records contains Refinement_results objects
# for example refinement_records['using cartesian NCS']
self.refinement_records = {}
self.r_free_header = None
self.r_work_header = None
# list containing issues in data or pdb
self.issues = []
# model vs data
self.r_free_model_vs_data = None
self.r_work_model_vs_data = None
def __repr__(self):
""" prints object's summary """
s = '{:<35}:{:<10}'
out_lst = get_dict_as_list(self.__dict__,s)
return '\n'.join(out_lst)
class Refinement_results(object):
"""
Collect the results of a particular refinement test
"""
def __init__(self):
self.r_free_init = 0
self.r_work_init = 0
self.r_free_final = 0
self.r_work_final = 0
self.refinement_time = None
self.normalized_sym_nbo = None
self.clashscore = None
self.clashscore_final = None
self.c_beta_deviation = None
self.c_beta_final = None
self.map_cc = None
self.rmsd = None
self.rama_outliers = None
self.rama_final = None
self.rotamer_outliers = None
self.rotamer_final = None
def __repr__(self):
""" prints object's summary """
s = '{:<35}:{:<10}'
out_lst = get_dict_as_list(self.__dict__,s)
out_lst.sort()
return '\n'.join(out_lst)
class ncs_paper_data_collection(object):
def __init__(self):
self.files_list = []
self.pdbs_dict = {}
osType = sys.platform
if osType.startswith('win'):
s = r'C:\Phenix\Dev\work\work\work\NCS\ncs_paper\ncs_paper_data_files'
self.ncs_dir = s
else:
s= '/net/cci/youval/work/work/NCS/ncs_paper/ncs_paper_data_files/'
self.ncs_dir = s
self.asu_dir = os.path.join(self.ncs_dir,'asu')
self.mtz_dir = os.path.join(self.ncs_dir,'mtz')
self.pdb_dir = os.path.join(self.ncs_dir,'pdb')
self.cif_dir = os.path.join(self.ncs_dir,'cif')
self.data_dir = os.path.join(self.ncs_dir,'data')
self.figures_dir = os.path.join(self.ncs_dir,'figures')
# refinement folders
self.refine_no_ncs_dir = os.path.join(self.ncs_dir,'refine_no_ncs')
self.refine_cartesian_ncs = os.path.join(self.ncs_dir,'refine_cartesian_ncs')
self.refine_torsion_ncs = os.path.join(self.ncs_dir,'refine_torsion_ncs')
self.refine_ncs_sites_no_oper = os.path.join(self.ncs_dir,'refine_ncs_sites_no_oper')
self.refine_ncs_sites_oper = os.path.join(self.ncs_dir,'refine_ncs_sites_oper')
self.refine_ncs_adp_oper = os.path.join(self.ncs_dir,'refine_ncs_adp_oper')
self.refine_ncs_con_all = os.path.join(self.ncs_dir,'refine_ncs_site_adp_oper')
#
self.model_vs_data_dir = os.path.join(self.ncs_dir,'model_vs_data')
self.pdb_not_used_dir = os.path.join(self.ncs_dir,'pdb_with_ncs_not_used')
self.current_dir = os.getcwd()
def get_pdb_file_info(self,pdb_id):
"""
Collect pdb file NCS and header info if there are NCS relations
and reslution >= 3
Args:
pdb_id (str): collect info for a single pdb file
Return:
new_rec (File_records object): object containing the collected info
"""
new_rec = File_records()
new_rec.pdb_id = pdb_id
fetched_file = pdb_id + '.pdb'
if not os.path.isfile(fetched_file):
fetched_file = fetch.get_pdb (
id=pdb_id, data_type='pdb',
mirror='rcsb',quiet=True,log=null_out())
else:
print 'file exist in local folder, did not fetch it..'
fn = os.path.realpath(fetched_file)
pdb_inp = iotbx.pdb.input(file_name=fn)
mtrix_info = pdb_inp.process_mtrix_records(eps=0.01)
t = (mtrix_info.as_pdb_string() == '')
t |= (not ncs_only(mtrix_info))
new_rec.only_master_in_pdb = not t
if len(mtrix_info.r) > 0 and new_rec.only_master_in_pdb:
m = multimer(file_name=fn,reconstruction_type='cau')
m.write(
pdb_output_file_name=fn,
crystal_symmetry=pdb_inp.crystal_symmetry())
# note that now the fn is a complete ASU
ncs_obj = iotbx.ncs.input(file_name=fn)
if (ncs_obj.number_of_ncs_groups == 0):
os.remove(fn)
return None
new_rec.n_ncs_groups = ncs_obj.number_of_ncs_groups
new_rec.n_ncs_copies = len(ncs_obj.ncs_transform)
pio = pdb_inp.get_r_rfree_sigma()
new_rec.resolution = pio.resolution
if new_rec.resolution and (new_rec.resolution < 3):
os.remove(fn)
return None
new_rec.year = pdb_inp.extract_header_year()
new_rec.experiment_type = pdb_inp.get_experiment_type()
new_rec.r_work_header = pio.r_work
new_rec.r_free_header = pio.r_free
try:
shutil.move(fn,self.asu_dir)
except:
# avoid error if file already exist
pass
return new_rec
def write_to_file(self,file_name,file_record):
"""
writes pickled object to file_name
Args:
file_name (str): such as log_"pdb id"
file_record (obj): file record object
"""
if file_record:
pickle.dump(file_record,open(file_name,'wb'))
def read_from_file(self,file_name,path=''):
"""
reads pickled object from file_name
Args:
file_name (str): such as log_"pdb id"
Returns:
File_records object
"""
fn = os.path.join(path,file_name)
if os.path.isfile(fn):
return pickle.load(open(fn,'rb'))
else:
return None
def make_mtz_file(self,file_record):
"""
get cif file and create mtz and add info to pdb_file_records
Args:
file_record (obj): File_records object
Return:
updated file_record
"""
cif = get_cif_file(file_record.pdb_id)
pdb = os.path.join(self.asu_dir,file_record.pdb_id + '.pdb')
if cif:
f_obs,r_free_flags,completeness,data_size = get_structure_factors(
pdb,cif,self.mtz_dir)
else:
return None
if f_obs:
file_record.data_completeness = completeness
pdb_inp = iotbx.pdb.input(file_name=pdb)
#
n_atoms = pdb_inp.atoms().size()
file_record.data_to_param_ratio = data_size/3.0/n_atoms
file_record.n_atoms_in_asu = n_atoms
#
xrs_asu = pdb_inp.xray_structure_simple()
file_record.solvent_fraction = mmtbx.masks.asu_mask(
xray_structure=xrs_asu,
d_min=f_obs.d_min()).asu_mask.contact_surface_fraction
return file_record
else:
return None
def get_data_to_ncs_ratio(self,file_record):
"""
Add data-to-parameters ration for a single ncs copy
Args:
file_record (obj): File_records object
Return:
file_record (obj): File_records object
"""
pdb = os.path.join(self.asu_dir,file_record.pdb_id + '.pdb')
ncs_obj = iotbx.ncs.input(file_name=pdb)
data_size = file_record.data_to_param_ratio * file_record.n_atoms_in_asu
# fixme: finish this len(ncs_obj.something)
n_atoms_in_ncs = ncs_obj.ncs_atom_selection.count(True)
file_record.data_to_param_ratio_ncs = round(data_size/n_atoms_in_ncs,2)
return file_record
def collect_all_file_records(self):
"""
Collect the information on all files
Returns a dictionary of all data collected
dict key: pdb_id
dict val: File_records object
"""
files_list = glob(os.path.join(self.data_dir,'log_*'))
for fn in files_list:
r = pickle.load(open(fn,'rb'))
self.pdbs_dict[r.pdb_id] = r
self.files_list = [x[-4:] for x in files_list]
return self.pdbs_dict
def collect_refinement_results(self,pdb_id_str=None):
""" updates records with refinement results
Args:
pdb_id_str (str): When given, collect info only for that PDB ID
"""
paths = get_refinement_folders()
refine_test_names = get_refine_test_names()
records = self.collect_all_file_records()
assert len(refine_test_names) == len(paths)
for test_name,data_path in zip(refine_test_names,paths):
# get all folders in directory
if os.path.isdir(data_path):
if pdb_id_str:
pdb_id_dirs = [os.path.join(data_path,pdb_id_str)]
if not os.path.isdir(pdb_id_dirs[0]): continue
else:
pdb_id_dirs = glob(os.path.join(data_path,'*'))
for pdb_dir in pdb_id_dirs:
# update relevant record with new data
pdb_id = pdb_dir[-4:]
pdb_info = records[pdb_id]
refine_results = collect_refine_data(pdb_dir)
if refine_results:
pdb_info.refinement_records[test_name] = refine_results
fn = os.path.join(self.data_dir,'log_' + pdb_id)
self.write_to_file(fn,pdb_info)
def make_csv_file(self,file_name='',records=None,out_path=''):
"""
creates a csv file from all data collected
Args:
file_name (str): output file name
records (dict): dictionary containing all records
out_path (str): output file path
"""
# get pdb IDs to collect data on
if not records:
records = self.collect_all_file_records()
if not records: return False
if not file_name:
file_name = 'ncs_paper_data.csv'
if not out_path:
out_path = self.ncs_dir
headers, table_pos_map = table_headers()
l = len(table_pos_map)
h = [(v,k) for k,v in table_pos_map.iteritems()]
h.sort()
table = [[k for (v,k) in h]]
file_rec = File_records()
for pdb_id in records:
pdb_info = records[pdb_id]
new_row = ['',] * l
for key in file_rec.__dict__.iterkeys():
# fixme: remove the following test since all keys should be present
if pdb_info.__dict__.has_key(key):
v = pdb_info.__dict__[key]
else:
v = None
if not (v is None):
if key == 'refinement_records':
# unpack the dictionary containing different refinement tests
for ref_type in v.iterkeys():
ref_rec = v[ref_type]
if not (ref_rec is None):
# iterate over refinement test results
for ref_k in ref_rec.__dict__.iterkeys():
head = headers.refinement_records[ref_type]
h = head.__dict__[ref_k]
if not (h is None):
i = table_pos_map[h]
d = ref_rec.__dict__[ref_k]
if d is None: d = ''
new_row[i] = str(d)
elif key != 'issues':
h = headers.__dict__[key]
i = table_pos_map[h]
new_row[i] = str(v)
table.append(new_row)
table = [','.join(x) for x in table]
table = '\n'.join(table)
fn = os.path.join(out_path,file_name)
open(fn,'w').write(table)
return True
def get_cif_file(pdb_id):
""" get the cif file """
try:
fetched_file = fetch.get_pdb(
id=pdb_id, data_type='xray',
mirror='rcsb',quiet=True,log=null_out())
return fetched_file
except:
# avoid error if file already exist
pass
return False
def get_4_letters_pdb_id(file_name):
"""(str) -> str
clean a pdb file name, remove path and file extensions
Args:
file_name (str): pdb file name that may look like pdb1a37.pdb
Return:
pdb_id (str): the 4 letter pdb id
>>>get_4_letters_pdb_id('pdb1a37.pdb')
1a37
>>>get_4_letters_pdb_id('1a37')
1a37
"""
basename = os.path.basename(file_name)
file_name, file_type = os.path.splitext(basename)
if len(file_name)>4:
if 'pdb' in file_name:
i = file_name.find('pdb')
pdb_id = file_name[i+3:i+7]
else:
pdb_id = file_name.replace('-sf','')
elif len(file_name)==4:
pdb_id = file_name
else:
pdb_id = None
return pdb_id
def get_structure_factors(pdb,cif,mtz_folder):
"""
Get f_obs and r_free_flags From cif file
Args:
mtz_folder (str): path to the folder mtz file will be saved to
pdb (str): pdb file path
cif (str): cif file path
Returns:
f_obs, i_obs, r_free_flags, completeness, data_set_size
Data completeness: Fraction of unmeasured reflections within the
"""
f_obs = None
i_obs = None
r_free_flags = None
if not (os.path.isfile(pdb) and os.path.isfile(cif)):
return None,None,0,0
miller_arrays = get_miller_arrays(pdb,cif,mtz_folder)
try:
inputs = mmtbx.utils.process_command_line_args(args = [pdb,cif])
df = mmtbx.utils.determine_data_and_flags(
reflection_file_server = inputs.get_reflection_file_server(),
log = null_out())
f_obs = df.f_obs
r_free_flags = df.r_free_flags
except:
# if the simple way did not work try the following
fobs_type = ["FOBS,SIGFOBS",'FOBS','FOBS,PHIM',
"F(+),SIGF(+),F(-),SIGF(-)","F(+),F(-)"]
iobs_type = ["IOBS,SIGIOBS",'IOBS','IOBS,PHIM',
'I(+),SIGI(+),I(-),SIGI(-)']
# print miller_arrays[0].completeness()
if miller_arrays:
for ma in miller_arrays:
ls = ma.info().label_string()
if (ls in fobs_type):
# Consider using Bijvoet mates
ma = ma.average_bijvoet_mates()
f_obs = abs(ma)
elif ls == "R-free-flags":
r_free_flags = abs(ma)
elif not f_obs and (ls in iobs_type):
# Consider using Bijvoet mates
ma = ma.average_bijvoet_mates()
# convert i_obs to f_obs
i_obs = ma
elif not r_free_flags and ls == "R-free-flags-1":
r_free_flags = abs(ma.french_wilson(log=null_out()))
# When fobs where not found via string look of other fobs forms
if (not f_obs) and (not i_obs):
for ma in miller_arrays:
if ma.is_xray_amplitude_array():
ma = ma.average_bijvoet_mates()
f_obs = abs(ma)
if (not f_obs) and (not i_obs):
for ma in miller_arrays:
if ma.is_xray_intensity_array():
ma = ma.average_bijvoet_mates()
i_obs = ma
if not f_obs and i_obs:
f_obs = abs(i_obs.french_wilson(log=null_out()))
#
if f_obs:
# f_obs.show_summary()
data_set_size = f_obs.size()
if r_free_flags:
f_obs, r_free_flags = f_obs.common_sets(r_free_flags)
r_free_flags = make_r_free_boolean(r_free_flags)
else:
r_free_flags = f_obs.generate_r_free_flags()
# Data completeness: Fraction of unmeasured reflections within the
# [d_min, d_max] range,where d_min and d_max are highest and lowest
# resolution of data set correspondingly.
completeness = f_obs.array().completeness()
else:
return None,None,0,0
return f_obs, r_free_flags, completeness,data_set_size
def get_miller_arrays(pdb,cif,mtz_folder):
"""
convert cif to mtz and write it in the mtz_folder
convert cif of the format 'r_name_sf.ent.gz' to mtz file
creates mtz file with crystal symmetry in current folder
Returns:
"""
if not (os.path.isfile(pdb) and os.path.isfile(cif)):
return None
pdb_id = get_4_letters_pdb_id(cif)
mtz_fn = os.path.join(mtz_folder, pdb_id + '.mtz')
cmd_list = []
cmd_list.append('phenix.cif_as_mtz')
cmd_list.append(cif)
cmd_list.append('--output-file-name={}'.format(mtz_fn))
cmd_list.append("--merge")
cmd_list.append("--remove-systematic-absences")
cmd_list.append("--map-to-asu")
cmd_list.append("--ignore-bad-sigmas")
cmd_list.append("--extend-flags")
cmd = ' '.join(cmd_list)
r = easy_run.go(cmd)
# NOTE !!! in windows r does not returns the errors as expected
tmp = [x for x in r.stdout_lines if '--' in x]
tmp2 = ''.join(tmp)
run_cmd_again = False
if '--incompatible_flags_to_work_set' in tmp2:
cmd_list.append('--incompatible_flags_to_work_set')
run_cmd_again = True
if '--symmetry' in tmp2:
cmd_list.append('--symmetry={}'.format(pdb))
run_cmd_again = True
if run_cmd_again:
cmd = ' '.join(cmd_list)
easy_run.go(cmd)
try:
# Get miller arrays from mtz file
mtz_object = iotbx.mtz.object(file_name=mtz_fn)
miller_arrays = mtz_object.as_miller_arrays()
except:
miller_arrays = None
# cleanup
os.remove(cif)
if not miller_arrays:
return None
return miller_arrays
def make_r_free_boolean(r_free_flags):
'''
Convert r_free_flag from any of the conventions possible
to a boolean
Posiblle convention for free and working set flags:
CCP4 assigns the flag r_free_flags to be 0 for the free set and 1,
...n-1 for the working set.
XPLOR assigns the flag TEST to be 1 for the free set and 0 for the
working set.
CNS assigns the flag TEST to be 1 for the free set and 0,2,...n-1 for
the working set.
SHELX assigns a flag with -1 for the free set and 1 for the working set.
TNT assigns a flag with 0 to indicate the free set.
'''
flag_value = iotbx.reflection_file_utils.guess_r_free_flag_value(
miller_array =r_free_flags)
if flag_value is None:
return None
else:
return r_free_flags.customized_copy(data=r_free_flags.data()==flag_value)
def get_dict_as_list(d,template,add_title=False):
"""
recursively expands dictionary for printing
Args:
d (dict): a dictionary
template (str): a template used to format the key and value of dict
add_title (bool): add title to dictionary printout
Returns:
out_lst (list): a list of string containing the formatted key-value pairs
"""
out_lst = []
keys = d.keys()
keys.sort()
for k in keys:
if add_title:
out_lst.extend([k,'-'*len(k)])
v = d[k]
if type(v) is dict:
x = get_dict_as_list(v,template,add_title=True)
out_lst.extend(x)
elif 'refinement_records' in type(v).__name__:
out_lst.append(v.__repr__())
elif (not (v is None)) and (v.__str__() != '0') and (v != []) and (v != {}):
out_lst.append(template.format(k,v.__str__()))
if add_title:
out_lst.append('-'*40)
return out_lst
def collect_refine_data(pdb_dir):
""" collecting data from refinement log """
refine_results = Refinement_results()
log_files_list = glob(os.path.join(pdb_dir,'*.log'))
pdb_files_list = glob(os.path.join(pdb_dir,'*.pdb'))
if len(log_files_list) > 1:
msg = "There are several refinement log files in: \n{}\n"
msg += "please remove the .log extension from the files you do not collect"
print msg.format(pdb_dir)
return None
elif log_files_list:
data = open(log_files_list[0],'r').read().splitlines()
i = 0
for l in data:
# collect data from file
start_r_val = re.search(r'(Start R-work =)(.*)(\,.*R-free =)(.*)',l)
final_r_val = re.search(r'(Final R-work =)(.*)(\,.*R-free =)(.*)',l)
clashscore = re.search(r'(all-atom clashscore.*:)(.*)',l)
rotamer_outliers = re.search(r'(rotamer outliers.*:)(.*)(\%)',l)
c_beta_deviation = re.search(r'(cbeta deviations.*:)(.*)',l)
cpu_time = re.search(r'(Total CPU time:)(.*)(minutes)',l)
molprobity_statistics = re.search(r'Molprobity statistics',l)
i += 1
# update record
if start_r_val:
refine_results.r_work_init = float(start_r_val.group(2))
refine_results.r_free_init = float(start_r_val.group(4))
if final_r_val:
refine_results.r_work_final = float(final_r_val.group(2))
refine_results.r_free_final = float(final_r_val.group(4))
if cpu_time:
# convert cpu time to seconds
refine_results.refinement_time = round(60*float(cpu_time.group(2)),1)
if clashscore:
refine_results.clashscore = float(clashscore.group(2))
if rotamer_outliers:
refine_results.rotamer_outliers = float(rotamer_outliers.group(2))
if c_beta_deviation:
refine_results.c_beta_deviation = float(c_beta_deviation.group(2))
if molprobity_statistics:
d = data[i + 2].split()
if d[0].lower() == 'outliers':
refine_results.rama_outliers = float(d[2])
if len(pdb_files_list) == 1:
try:
pdb_inp = iotbx.pdb.input(file_name=pdb_files_list[0])
except:
# skip files with refinement issues
pdb_inp = None
if pdb_inp:
data = pdb_inp.remark_section().as_1d()
i = 0
for l in data:
# collect data from refined pdb file
l = l.lower()
clashscore = re.search(r'(all-atom clashscore.*:)(.*)',l)
rotamer_outliers = re.search(r'(rotamer outliers.*:)(.*)(\%)',l)
c_beta_deviation = re.search(r'(cbeta deviations.*:)(.*)',l)
molprobity_statistics = re.search(r'Molprobity statistics',l)
i += 1
# update record
if clashscore:
refine_results.clashscore_final = float(clashscore.group(2))
if rotamer_outliers:
refine_results.rotamer_final = float(rotamer_outliers.group(2))
if c_beta_deviation:
refine_results.c_beta_final = float(c_beta_deviation.group(2))
if molprobity_statistics:
d = data[i + 2].split()
if d[0].lower() == 'outliers':
refine_results.rama_final = float(d[2])
return refine_results
else:
return None
def table_headers():
"""
Returns:
headers (dict): map data and experiment to header
table_pos_map (dict): map header to a location in the table
"""
headers = File_records()
headers.pdb_id = 'pdb id'
headers.n_ncs_copies = 'n copies'
headers.n_ncs_groups = 'n groups'
headers.year = 'year'
headers.resolution = 'resolution'
headers.data_completeness = 'completeness'
headers.solvent_fraction = 'solvent fraction'
headers.experiment_type = 'experiment'
headers.only_master_in_pdb = 'master only'
headers.n_atoms_in_asu = 'atoms in asu'
headers.data_to_param_ratio_ncs = 'p-to-d ratio ncs'
headers.data_to_param_ratio = 'p-to-d ratio asu'
headers.r_free_header = 'r-free header'
headers.r_work_header = 'r-work header'
headers.r_free_model_vs_data = 'r-free model vs data'
headers.r_work_model_vs_data = 'r-work model vs data'
#
test = Refinement_results()
test.r_free_init = 'r-free init : no ncs'
test.r_work_init = 'r-work init : no ncs'
test.r_free_final = 'r-free final : no ncs'
test.r_work_final = 'r-work final : no ncs'
test.refinement_time = 'refinement time : no ncs'
test.clashscore = 'all-atom clashscore : no ncs'
test.clashscore_final = 'final clashscore : no ncs'
test.rotamer_outliers = 'rotamer outliers : no ncs'
test.rotamer_final = 'rotamer final : no ncs'
test.c_beta_deviation = 'cbeta deviations : no ncs'
test.c_beta_final = 'cbeta final : no ncs'
test.rama_outliers = 'rama outliers : no ncs'
test.rama_final = 'rama final : no ncs'
headers.refinement_records['no ncs'] = test
#
test = Refinement_results()
test.r_free_init = 'r-free init : cartesian ncs restraints'
test.r_work_init = 'r-work init : cartesian ncs restraints'
test.r_free_final = 'r-free final : cartesian ncs restraints'
test.r_work_final = 'r-work final : cartesian ncs restraints'
test.refinement_time = 'refinement time : cartesian ncs restraints'
test.clashscore = 'all-atom clashscore : cartesian ncs restraints'
test.clashscore_final = 'final clashscore : cartesian ncs restraints'
test.rotamer_outliers = 'rotamer outliers : cartesian ncs restraints'
test.rotamer_final = 'rotamer final : cartesian ncs restraints'
test.c_beta_deviation = 'cbeta deviations : cartesian ncs restraints'
test.c_beta_final = 'cbeta final : cartesian ncs restraints'
test.rama_outliers = 'rama outliers : cartesian ncs restraints'
test.rama_final = 'rama final : cartesian ncs restraints'
headers.refinement_records['cartesian ncs restraints'] = test
#
test = Refinement_results()
test.r_free_init = 'r-free init : torsion ncs restraints'
test.r_work_init = 'r-work init : torsion ncs restraints'
test.r_free_final = 'r-free final : torsion ncs restraints'
test.r_work_final = 'r-work final : torsion ncs restraints'
test.refinement_time = 'refinement time : torsion ncs restraints'
test.clashscore = 'all-atom clashscore : torsion ncs restraints'
test.clashscore_final = 'final clashscore : torsion ncs restraints'
test.rotamer_outliers = 'rotamer outliers : torsion ncs restraints'
test.rotamer_final = 'rotamer final : torsion ncs restraints'
test.c_beta_deviation = 'cbeta deviations : torsion ncs restraints'
test.c_beta_final = 'cbeta final : torsion ncs restraints'
test.rama_outliers = 'rama outliers : torsion ncs restraints'
test.rama_final = 'rama final : torsion ncs restraints'
headers.refinement_records['torsion ncs restraints'] = test
#
test = Refinement_results()
test.r_free_init = 'r-free init : ncs constraints sites'
test.r_work_init = 'r-work init : ncs constraints sites'
test.r_free_final = 'r-free final : ncs constraints sites'
test.r_work_final = 'r-work final : ncs constraints sites'
test.refinement_time = 'refinement time : ncs constraints sites'
test.clashscore = 'all-atom clashscore : ncs constraints sites'
test.clashscore_final = 'final clashscore : ncs constraints sites'
test.rotamer_outliers = 'rotamer outliers : ncs constraints sites'
test.rotamer_final = 'rotamer final : ncs constraints sites'
test.c_beta_deviation = 'cbeta deviations : ncs constraints sites'
test.c_beta_final = 'cbeta final : ncs constraints sites'
test.rama_outliers = 'rama outliers : ncs constraints sites'
test.rama_final = 'rama final : ncs constraints sites'
headers.refinement_records['ncs constraints sites'] = test
test = Refinement_results()
#
test.r_free_init = 'r-free init : ncs constraints sites and operators'
test.r_work_init = 'r-work init : ncs constraints sites and operators'
test.r_free_final = 'r-free final : ncs constraints sites and operators'
test.r_work_final = 'r-work final : ncs constraints sites and operators'
test.refinement_time = 'refinement time : ncs constraints sites and operators'
test.clashscore = 'all-atom clashscore : ncs constraints sites and operators'
test.clashscore_final = 'final clashscore : ncs constraints sites and operators'
test.rotamer_outliers = 'rotamer outliers : ncs constraints sites and operators'
test.rotamer_final = 'rotamer final : ncs constraints sites and operators'
test.c_beta_deviation = 'cbeta deviations : ncs constraints sites and operators'
test.c_beta_final = 'cbeta final : ncs constraints sites and operators'
test.rama_outliers = 'rama outliers : ncs constraints sites and operators'
test.rama_final = 'rama final : ncs constraints sites and operators'
headers.refinement_records['ncs constraints sites and operators'] = test
#
test = Refinement_results()
test.r_free_init = 'r-free init : ncs constraints adp and operators'
test.r_work_init = 'r-work init : ncs constraints adp and operators'
test.r_free_final = 'r-free final : ncs constraints adp and operators'
test.r_work_final = 'r-work final : ncs constraints adp and operators'
test.refinement_time = 'refinement time : ncs constraints adp and operators'
test.clashscore = 'all-atom clashscore : ncs constraints adp and operators'
test.clashscore_final = 'final clashscore : ncs constraints adp and operators'
test.rotamer_outliers = 'rotamer outliers : ncs constraints adp and operators'
test.rotamer_final = 'rotamer final : ncs constraints adp and operators'
test.c_beta_deviation = 'cbeta deviations : ncs constraints adp and operators'
test.c_beta_final = 'cbeta final : ncs constraints adp and operators'
test.rama_outliers = 'rama outliers : ncs constraints adp and operators'
test.rama_final = 'rama final : ncs constraints adp and operators'
headers.refinement_records['ncs constraints adp and operators'] = test
#
test = Refinement_results()
test.r_free_init = 'r-free init : ncs constraints all'
test.r_work_init = 'r-work init : ncs constraints all'
test.r_free_final = 'r-free final : ncs constraints all'
test.r_work_final = 'r-work final : ncs constraints all'
test.refinement_time = 'refinement time : ncs constraints all'
test.clashscore = 'all-atom clashscore : ncs constraints all'
test.clashscore_final = 'final clashscore : ncs constraints all'
test.rotamer_outliers = 'rotamer outliers : ncs constraints all'
test.rotamer_final = 'rotamer final : ncs constraints all'
test.c_beta_deviation = 'cbeta deviations : ncs constraints all'
test.c_beta_final = 'cbeta final : ncs constraints all'
test.rama_outliers = 'rama outliers : ncs constraints all'
test.rama_final = 'rama final : ncs constraints all'
headers.refinement_records['ncs constraints all'] = test
#
# map dictionary record to a location in the table
headers_list = [
'pdb id',
'n copies',
'n groups',
'year',
'resolution',
'completeness',
'solvent fraction',
'experiment',
'master only',
'atoms in asu',
'p-to-d ratio ncs',
'p-to-d ratio asu',
'r-free header',
'r-work header',
'r-free model vs data',
'r-work model vs data',
'r-free init : no ncs',
'r-work init : no ncs',
'r-free final : no ncs',
'r-work final : no ncs',
'refinement time : no ncs',
'all-atom clashscore : no ncs',
'final clashscore : no ncs',
'rotamer outliers : no ncs',
'rotamer final : no ncs',
'cbeta deviations : no ncs',
'cbeta final : no ncs',
'rama outliers : no ncs',
'rama final : no ncs',
'r-free init : cartesian ncs restraints',
'r-work init : cartesian ncs restraints',
'r-free final : cartesian ncs restraints',
'r-work final : cartesian ncs restraints',
'refinement time : cartesian ncs restraints',
'all-atom clashscore : cartesian ncs restraints',
'final clashscore : cartesian ncs restraints',
'rotamer outliers : cartesian ncs restraints',
'rotamer final : cartesian ncs restraints',
'cbeta deviations : cartesian ncs restraints',
'cbeta final : cartesian ncs restraints',
'rama outliers : cartesian ncs restraints',
'rama final : cartesian ncs restraints',
'r-free init : torsion ncs restraints',
'r-work init : torsion ncs restraints',
'r-free final : torsion ncs restraints',
'r-work final : torsion ncs restraints',
'refinement time : torsion ncs restraints',
'all-atom clashscore : torsion ncs restraints',
'final clashscore : torsion ncs restraints',
'rotamer outliers : torsion ncs restraints',
'rotamer final : torsion ncs restraints',
'cbeta deviations : torsion ncs restraints',
'cbeta final : torsion ncs restraints',
'rama outliers : torsion ncs restraints',
'rama final : torsion ncs restraints',
'r-free init : ncs constraints sites',
'r-work init : ncs constraints sites',
'r-free final : ncs constraints sites',
'r-work final : ncs constraints sites',
'refinement time : ncs constraints sites',
'all-atom clashscore : ncs constraints sites',
'final clashscore : ncs constraints sites',
'rotamer outliers : ncs constraints sites',
'rotamer final : ncs constraints sites',
'cbeta deviations : ncs constraints sites',
'cbeta final : ncs constraints sites',
'rama outliers : ncs constraints sites',
'rama final : ncs constraints sites',
'r-free init : ncs constraints sites and operators',
'r-work init : ncs constraints sites and operators',
'r-free final : ncs constraints sites and operators',
'r-work final : ncs constraints sites and operators',
'refinement time : ncs constraints sites and operators',
'all-atom clashscore : ncs constraints sites and operators',
'final clashscore : ncs constraints sites and operators',
'rotamer outliers : ncs constraints sites and operators',
'rotamer final : ncs constraints sites and operators',
'cbeta deviations : ncs constraints sites and operators',
'cbeta final : ncs constraints sites and operators',
'rama outliers : ncs constraints sites and operators',
'rama final : ncs constraints sites and operators',
'r-free init : ncs constraints adp and operators',
'r-work init : ncs constraints adp and operators',
'r-free final : ncs constraints adp and operators',
'r-work final : ncs constraints adp and operators',
'refinement time : ncs constraints adp and operators',
'all-atom clashscore : ncs constraints adp and operators',
'final clashscore : ncs constraints adp and operators',
'rotamer outliers : ncs constraints adp and operators',
'rotamer final : ncs constraints adp and operators',
'cbeta deviations : ncs constraints adp and operators',
'cbeta final : ncs constraints adp and operators',
'rama outliers : ncs constraints adp and operators',
'rama final : ncs constraints adp and operators',
'r-free init : ncs constraints all',
'r-work init : ncs constraints all',
'r-free final : ncs constraints all',
'r-work final : ncs constraints all',
'refinement time : ncs constraints all',
'all-atom clashscore : ncs constraints all',
'final clashscore : ncs constraints all',
'rotamer outliers : ncs constraints all',
'rotamer final : ncs constraints all',
'cbeta deviations : ncs constraints all',
'cbeta final : ncs constraints all',
'rama outliers : ncs constraints all',
'rama final : ncs constraints all'
]
table_pos_map = {x:i for i,x in enumerate(headers_list)}
return headers, table_pos_map
def get_refine_test_names():
refine_test_names = [
'no ncs',
'cartesian ncs restraints',
'torsion ncs restraints',
'ncs constraints sites',
'ncs constraints sites and operators',
'ncs constraints adp and operators',
'ncs constraints all']
return refine_test_names
def get_refinement_folders():
c = ncs_paper_data_collection()
refinement_folders = [
c.refine_no_ncs_dir,
c.refine_cartesian_ncs,
c.refine_torsion_ncs,
c.refine_ncs_sites_no_oper,
c.refine_ncs_sites_oper,
c.refine_ncs_adp_oper,
c.refine_ncs_con_all]
return refinement_folders
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.