repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
zangree/ryu | ryu/tests/unit/packet/test_lldp.py | 17 | 22983 | # Copyright (C) 2013 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import unittest
import logging
import six
import struct
import inspect
from nose.tools import ok_, eq_, nottest
from ryu.ofproto import ether
from ryu.lib.packet import packet
from ryu.lib.packet import ethernet
from ryu.lib.packet import lldp
from ryu.lib import addrconv
LOG = logging.getLogger(__name__)
class TestLLDPMandatoryTLV(unittest.TestCase):
def setUp(self):
# sample data is based on:
# http://wiki.wireshark.org/LinkLayerDiscoveryProtocol
#
# mandatory TLV only
self.data = b'\x01\x80\xc2\x00\x00\x0e\x00\x04' \
+ b'\x96\x1f\xa7\x26\x88\xcc\x02\x07' \
+ b'\x04\x00\x04\x96\x1f\xa7\x26\x04' \
+ b'\x04\x05\x31\x2f\x33\x06\x02\x00' \
+ b'\x78\x00\x00'
def tearDown(self):
pass
def test_get_tlv_type(self):
buf = b'\x02\x07\x04\x00\x04\x96\x1f\xa7\x26'
eq_(lldp.LLDPBasicTLV.get_type(buf), lldp.LLDP_TLV_CHASSIS_ID)
def test_parse_without_ethernet(self):
buf = self.data[ethernet.ethernet._MIN_LEN:]
(lldp_pkt, cls, rest_buf) = lldp.lldp.parser(buf)
eq_(len(rest_buf), 0)
tlvs = lldp_pkt.tlvs
eq_(tlvs[0].tlv_type, lldp.LLDP_TLV_CHASSIS_ID)
eq_(tlvs[0].len, 7)
eq_(tlvs[0].subtype, lldp.ChassisID.SUB_MAC_ADDRESS)
eq_(tlvs[0].chassis_id, b'\x00\x04\x96\x1f\xa7\x26')
eq_(tlvs[1].tlv_type, lldp.LLDP_TLV_PORT_ID)
eq_(tlvs[1].len, 4)
eq_(tlvs[1].subtype, lldp.PortID.SUB_INTERFACE_NAME)
eq_(tlvs[1].port_id, b'1/3')
eq_(tlvs[2].tlv_type, lldp.LLDP_TLV_TTL)
eq_(tlvs[2].len, 2)
eq_(tlvs[2].ttl, 120)
eq_(tlvs[3].tlv_type, lldp.LLDP_TLV_END)
def test_parse(self):
buf = self.data
pkt = packet.Packet(buf)
i = iter(pkt)
eq_(type(next(i)), ethernet.ethernet)
eq_(type(next(i)), lldp.lldp)
def test_tlv(self):
tlv = lldp.ChassisID(subtype=lldp.ChassisID.SUB_MAC_ADDRESS,
chassis_id=b'\x00\x04\x96\x1f\xa7\x26')
eq_(tlv.tlv_type, lldp.LLDP_TLV_CHASSIS_ID)
eq_(tlv.len, 7)
(typelen, ) = struct.unpack('!H', b'\x02\x07')
eq_(tlv.typelen, typelen)
def test_serialize_without_ethernet(self):
tlv_chassis_id = lldp.ChassisID(subtype=lldp.ChassisID.SUB_MAC_ADDRESS,
chassis_id=b'\x00\x04\x96\x1f\xa7\x26')
tlv_port_id = lldp.PortID(subtype=lldp.PortID.SUB_INTERFACE_NAME,
port_id=b'1/3')
tlv_ttl = lldp.TTL(ttl=120)
tlv_end = lldp.End()
tlvs = (tlv_chassis_id, tlv_port_id, tlv_ttl, tlv_end)
lldp_pkt = lldp.lldp(tlvs)
eq_(lldp_pkt.serialize(None, None),
self.data[ethernet.ethernet._MIN_LEN:])
def test_serialize(self):
pkt = packet.Packet()
dst = lldp.LLDP_MAC_NEAREST_BRIDGE
src = '00:04:96:1f:a7:26'
ethertype = ether.ETH_TYPE_LLDP
eth_pkt = ethernet.ethernet(dst, src, ethertype)
pkt.add_protocol(eth_pkt)
tlv_chassis_id = lldp.ChassisID(subtype=lldp.ChassisID.SUB_MAC_ADDRESS,
chassis_id=addrconv.mac.
text_to_bin(src))
tlv_port_id = lldp.PortID(subtype=lldp.PortID.SUB_INTERFACE_NAME,
port_id=b'1/3')
tlv_ttl = lldp.TTL(ttl=120)
tlv_end = lldp.End()
tlvs = (tlv_chassis_id, tlv_port_id, tlv_ttl, tlv_end)
lldp_pkt = lldp.lldp(tlvs)
pkt.add_protocol(lldp_pkt)
eq_(len(pkt.protocols), 2)
pkt.serialize()
eq_(pkt.data, self.data)
def test_to_string(self):
chassis_id = lldp.ChassisID(subtype=lldp.ChassisID.SUB_MAC_ADDRESS,
chassis_id=b'\x00\x04\x96\x1f\xa7\x26')
port_id = lldp.PortID(subtype=lldp.PortID.SUB_INTERFACE_NAME,
port_id=b'1/3')
ttl = lldp.TTL(ttl=120)
end = lldp.End()
tlvs = (chassis_id, port_id, ttl, end)
lldp_pkt = lldp.lldp(tlvs)
chassis_id_values = {'subtype': lldp.ChassisID.SUB_MAC_ADDRESS,
'chassis_id': b'\x00\x04\x96\x1f\xa7\x26',
'len': chassis_id.len,
'typelen': chassis_id.typelen}
_ch_id_str = ','.join(['%s=%s' % (k, repr(chassis_id_values[k]))
for k, v in inspect.getmembers(chassis_id)
if k in chassis_id_values])
tlv_chassis_id_str = '%s(%s)' % (lldp.ChassisID.__name__, _ch_id_str)
port_id_values = {'subtype': port_id.subtype,
'port_id': port_id.port_id,
'len': port_id.len,
'typelen': port_id.typelen}
_port_id_str = ','.join(['%s=%s' % (k, repr(port_id_values[k]))
for k, v in inspect.getmembers(port_id)
if k in port_id_values])
tlv_port_id_str = '%s(%s)' % (lldp.PortID.__name__, _port_id_str)
ttl_values = {'ttl': ttl.ttl,
'len': ttl.len,
'typelen': ttl.typelen}
_ttl_str = ','.join(['%s=%s' % (k, repr(ttl_values[k]))
for k, v in inspect.getmembers(ttl)
if k in ttl_values])
tlv_ttl_str = '%s(%s)' % (lldp.TTL.__name__, _ttl_str)
end_values = {'len': end.len,
'typelen': end.typelen}
_end_str = ','.join(['%s=%s' % (k, repr(end_values[k]))
for k, v in inspect.getmembers(end)
if k in end_values])
tlv_end_str = '%s(%s)' % (lldp.End.__name__, _end_str)
_tlvs_str = '(%s, %s, %s, %s)'
tlvs_str = _tlvs_str % (tlv_chassis_id_str,
tlv_port_id_str,
tlv_ttl_str,
tlv_end_str)
_lldp_str = '%s(tlvs=%s)'
lldp_str = _lldp_str % (lldp.lldp.__name__,
tlvs_str)
eq_(str(lldp_pkt), lldp_str)
eq_(repr(lldp_pkt), lldp_str)
def test_json(self):
chassis_id = lldp.ChassisID(subtype=lldp.ChassisID.SUB_MAC_ADDRESS,
chassis_id=b'\x00\x04\x96\x1f\xa7\x26')
port_id = lldp.PortID(subtype=lldp.PortID.SUB_INTERFACE_NAME,
port_id=b'1/3')
ttl = lldp.TTL(ttl=120)
end = lldp.End()
tlvs = (chassis_id, port_id, ttl, end)
lldp1 = lldp.lldp(tlvs)
jsondict = lldp1.to_jsondict()
lldp2 = lldp.lldp.from_jsondict(jsondict['lldp'])
eq_(str(lldp1), str(lldp2))
class TestLLDPOptionalTLV(unittest.TestCase):
def setUp(self):
# sample data is based on:
# http://wiki.wireshark.org/LinkLayerDiscoveryProtocol
#
# include optional TLV
self.data = b'\x01\x80\xc2\x00\x00\x0e\x00\x01' \
+ b'\x30\xf9\xad\xa0\x88\xcc\x02\x07' \
+ b'\x04\x00\x01\x30\xf9\xad\xa0\x04' \
+ b'\x04\x05\x31\x2f\x31\x06\x02\x00' \
+ b'\x78\x08\x17\x53\x75\x6d\x6d\x69' \
+ b'\x74\x33\x30\x30\x2d\x34\x38\x2d' \
+ b'\x50\x6f\x72\x74\x20\x31\x30\x30' \
+ b'\x31\x00\x0a\x0d\x53\x75\x6d\x6d' \
+ b'\x69\x74\x33\x30\x30\x2d\x34\x38' \
+ b'\x00\x0c\x4c\x53\x75\x6d\x6d\x69' \
+ b'\x74\x33\x30\x30\x2d\x34\x38\x20' \
+ b'\x2d\x20\x56\x65\x72\x73\x69\x6f' \
+ b'\x6e\x20\x37\x2e\x34\x65\x2e\x31' \
+ b'\x20\x28\x42\x75\x69\x6c\x64\x20' \
+ b'\x35\x29\x20\x62\x79\x20\x52\x65' \
+ b'\x6c\x65\x61\x73\x65\x5f\x4d\x61' \
+ b'\x73\x74\x65\x72\x20\x30\x35\x2f' \
+ b'\x32\x37\x2f\x30\x35\x20\x30\x34' \
+ b'\x3a\x35\x33\x3a\x31\x31\x00\x0e' \
+ b'\x05\x01\x00\x14\x00\x14\x10\x0e' \
+ b'\x07' \
+ b'\x06\x00\x01\x30\xf9\xad\xa0\x02' \
+ b'\x00\x00\x03\xe9\x00\xfe\x07\x00' \
+ b'\x12\x0f\x02\x07\x01\x00\xfe\x09' \
+ b'\x00\x12\x0f\x01\x03\x6c\x00\x00' \
+ b'\x10\xfe\x09\x00\x12\x0f\x03\x01' \
+ b'\x00\x00\x00\x00\xfe\x06\x00\x12' \
+ b'\x0f\x04\x05\xf2\xfe\x06\x00\x80' \
+ b'\xc2\x01\x01\xe8\xfe\x07\x00\x80' \
+ b'\xc2\x02\x01\x00\x00\xfe\x17\x00' \
+ b'\x80\xc2\x03\x01\xe8\x10\x76\x32' \
+ b'\x2d\x30\x34\x38\x38\x2d\x30\x33' \
+ b'\x2d\x30\x35\x30\x35\x00\xfe\x05' \
+ b'\x00\x80\xc2\x04\x00\x00\x00'
def tearDown(self):
pass
def test_parse(self):
buf = self.data
pkt = packet.Packet(buf)
i = iter(pkt)
eq_(type(next(i)), ethernet.ethernet)
lldp_pkt = next(i)
eq_(type(lldp_pkt), lldp.lldp)
tlvs = lldp_pkt.tlvs
# Port Description
eq_(tlvs[3].tlv_type, lldp.LLDP_TLV_PORT_DESCRIPTION)
eq_(tlvs[3].port_description, b'Summit300-48-Port 1001\x00')
# System Name
eq_(tlvs[4].tlv_type, lldp.LLDP_TLV_SYSTEM_NAME)
eq_(tlvs[4].system_name, b'Summit300-48\x00')
# System Description
eq_(tlvs[5].tlv_type, lldp.LLDP_TLV_SYSTEM_DESCRIPTION)
eq_(tlvs[5].system_description,
b'Summit300-48 - Version 7.4e.1 (Build 5) '
+ b'by Release_Master 05/27/05 04:53:11\x00')
# SystemCapabilities
eq_(tlvs[6].tlv_type, lldp.LLDP_TLV_SYSTEM_CAPABILITIES)
eq_(tlvs[6].subtype, lldp.ChassisID.SUB_CHASSIS_COMPONENT)
eq_(tlvs[6].system_cap & lldp.SystemCapabilities.CAP_MAC_BRIDGE,
lldp.SystemCapabilities.CAP_MAC_BRIDGE)
eq_(tlvs[6].enabled_cap & lldp.SystemCapabilities.CAP_MAC_BRIDGE,
lldp.SystemCapabilities.CAP_MAC_BRIDGE)
eq_(tlvs[6].system_cap & lldp.SystemCapabilities.CAP_TELEPHONE, 0)
eq_(tlvs[6].enabled_cap & lldp.SystemCapabilities.CAP_TELEPHONE, 0)
# Management Address
eq_(tlvs[7].tlv_type, lldp.LLDP_TLV_MANAGEMENT_ADDRESS)
eq_(tlvs[7].addr_len, 7)
eq_(tlvs[7].addr, b'\x00\x01\x30\xf9\xad\xa0')
eq_(tlvs[7].intf_num, 1001)
# Organizationally Specific
eq_(tlvs[8].tlv_type, lldp.LLDP_TLV_ORGANIZATIONALLY_SPECIFIC)
eq_(tlvs[8].oui, b'\x00\x12\x0f') # IEEE 802.3
eq_(tlvs[8].subtype, 0x02) # Power Via MDI
# End
eq_(tlvs[16].tlv_type, lldp.LLDP_TLV_END)
def test_parse_corrupted(self):
buf = self.data
pkt = packet.Packet(buf[:128])
def test_serialize(self):
pkt = packet.Packet()
dst = lldp.LLDP_MAC_NEAREST_BRIDGE
src = '00:01:30:f9:ad:a0'
ethertype = ether.ETH_TYPE_LLDP
eth_pkt = ethernet.ethernet(dst, src, ethertype)
pkt.add_protocol(eth_pkt)
tlv_chassis_id = lldp.ChassisID(subtype=lldp.ChassisID.SUB_MAC_ADDRESS,
chassis_id=addrconv.mac.
text_to_bin(src))
tlv_port_id = lldp.PortID(subtype=lldp.PortID.SUB_INTERFACE_NAME,
port_id=b'1/1')
tlv_ttl = lldp.TTL(ttl=120)
tlv_port_description = lldp.PortDescription(
port_description=b'Summit300-48-Port 1001\x00')
tlv_system_name = lldp.SystemName(system_name=b'Summit300-48\x00')
tlv_system_description = lldp.SystemDescription(
system_description=b'Summit300-48 - Version 7.4e.1 (Build 5) '
+ b'by Release_Master 05/27/05 04:53:11\x00')
tlv_system_capabilities = lldp.SystemCapabilities(
subtype=lldp.ChassisID.SUB_CHASSIS_COMPONENT,
system_cap=0x14,
enabled_cap=0x14)
tlv_management_address = lldp.ManagementAddress(
addr_subtype=0x06, addr=b'\x00\x01\x30\xf9\xad\xa0',
intf_subtype=0x02, intf_num=1001,
oid=b'')
tlv_organizationally_specific = lldp.OrganizationallySpecific(
oui=b'\x00\x12\x0f', subtype=0x02, info=b'\x07\x01\x00')
tlv_end = lldp.End()
tlvs = (tlv_chassis_id, tlv_port_id, tlv_ttl, tlv_port_description,
tlv_system_name, tlv_system_description,
tlv_system_capabilities, tlv_management_address,
tlv_organizationally_specific, tlv_end)
lldp_pkt = lldp.lldp(tlvs)
pkt.add_protocol(lldp_pkt)
eq_(len(pkt.protocols), 2)
pkt.serialize()
# self.data has many organizationally specific TLVs
data = six.binary_type(pkt.data[:-2])
eq_(data, self.data[:len(data)])
def test_to_string(self):
chassis_id = lldp.ChassisID(subtype=lldp.ChassisID.SUB_MAC_ADDRESS,
chassis_id=b'\x00\x01\x30\xf9\xad\xa0')
port_id = lldp.PortID(subtype=lldp.PortID.SUB_INTERFACE_NAME,
port_id=b'1/1')
ttl = lldp.TTL(ttl=120)
port_desc = lldp.PortDescription(
port_description=b'Summit300-48-Port 1001\x00')
sys_name = lldp.SystemName(system_name=b'Summit300-48\x00')
sys_desc = lldp.SystemDescription(
system_description=b'Summit300-48 - Version 7.4e.1 (Build 5) '
+ b'by Release_Master 05/27/05 04:53:11\x00')
sys_cap = lldp.SystemCapabilities(
subtype=lldp.ChassisID.SUB_CHASSIS_COMPONENT,
system_cap=0x14,
enabled_cap=0x14)
man_addr = lldp.ManagementAddress(
addr_subtype=0x06, addr=b'\x00\x01\x30\xf9\xad\xa0',
intf_subtype=0x02, intf_num=1001,
oid='')
org_spec = lldp.OrganizationallySpecific(
oui=b'\x00\x12\x0f', subtype=0x02, info=b'\x07\x01\x00')
end = lldp.End()
tlvs = (chassis_id, port_id, ttl, port_desc, sys_name,
sys_desc, sys_cap, man_addr, org_spec, end)
lldp_pkt = lldp.lldp(tlvs)
# ChassisID string
chassis_id_values = {'subtype': lldp.ChassisID.SUB_MAC_ADDRESS,
'chassis_id': b'\x00\x01\x30\xf9\xad\xa0',
'len': chassis_id.len,
'typelen': chassis_id.typelen}
_ch_id_str = ','.join(['%s=%s' % (k, repr(chassis_id_values[k]))
for k, v in inspect.getmembers(chassis_id)
if k in chassis_id_values])
tlv_chassis_id_str = '%s(%s)' % (lldp.ChassisID.__name__, _ch_id_str)
# PortID string
port_id_values = {'subtype': port_id.subtype,
'port_id': port_id.port_id,
'len': port_id.len,
'typelen': port_id.typelen}
_port_id_str = ','.join(['%s=%s' % (k, repr(port_id_values[k]))
for k, v in inspect.getmembers(port_id)
if k in port_id_values])
tlv_port_id_str = '%s(%s)' % (lldp.PortID.__name__, _port_id_str)
# TTL string
ttl_values = {'ttl': ttl.ttl,
'len': ttl.len,
'typelen': ttl.typelen}
_ttl_str = ','.join(['%s=%s' % (k, repr(ttl_values[k]))
for k, v in inspect.getmembers(ttl)
if k in ttl_values])
tlv_ttl_str = '%s(%s)' % (lldp.TTL.__name__, _ttl_str)
# PortDescription string
port_desc_values = {'tlv_info': port_desc.tlv_info,
'len': port_desc.len,
'typelen': port_desc.typelen}
_port_desc_str = ','.join(['%s=%s' % (k, repr(port_desc_values[k]))
for k, v in inspect.getmembers(port_desc)
if k in port_desc_values])
tlv_port_desc_str = '%s(%s)' % (lldp.PortDescription.__name__,
_port_desc_str)
# SystemName string
sys_name_values = {'tlv_info': sys_name.tlv_info,
'len': sys_name.len,
'typelen': sys_name.typelen}
_system_name_str = ','.join(['%s=%s' % (k, repr(sys_name_values[k]))
for k, v in inspect.getmembers(sys_name)
if k in sys_name_values])
tlv_system_name_str = '%s(%s)' % (lldp.SystemName.__name__,
_system_name_str)
# SystemDescription string
sys_desc_values = {'tlv_info': sys_desc.tlv_info,
'len': sys_desc.len,
'typelen': sys_desc.typelen}
_sys_desc_str = ','.join(['%s=%s' % (k, repr(sys_desc_values[k]))
for k, v in inspect.getmembers(sys_desc)
if k in sys_desc_values])
tlv_sys_desc_str = '%s(%s)' % (lldp.SystemDescription.__name__,
_sys_desc_str)
# SystemCapabilities string
sys_cap_values = {'subtype': lldp.ChassisID.SUB_CHASSIS_COMPONENT,
'system_cap': 0x14,
'enabled_cap': 0x14,
'len': sys_cap.len,
'typelen': sys_cap.typelen}
_sys_cap_str = ','.join(['%s=%s' % (k, repr(sys_cap_values[k]))
for k, v in inspect.getmembers(sys_cap)
if k in sys_cap_values])
tlv_sys_cap_str = '%s(%s)' % (lldp.SystemCapabilities.__name__,
_sys_cap_str)
# ManagementAddress string
man_addr_values = {'addr_subtype': 0x06,
'addr': b'\x00\x01\x30\xf9\xad\xa0',
'addr_len': man_addr.addr_len,
'intf_subtype': 0x02,
'intf_num': 1001,
'oid': '',
'oid_len': man_addr.oid_len,
'len': man_addr.len,
'typelen': man_addr.typelen}
_man_addr_str = ','.join(['%s=%s' % (k, repr(man_addr_values[k]))
for k, v in inspect.getmembers(man_addr)
if k in man_addr_values])
tlv_man_addr_str = '%s(%s)' % (lldp.ManagementAddress.__name__,
_man_addr_str)
# OrganizationallySpecific string
org_spec_values = {'oui': b'\x00\x12\x0f',
'subtype': 0x02,
'info': b'\x07\x01\x00',
'len': org_spec.len,
'typelen': org_spec.typelen}
_org_spec_str = ','.join(['%s=%s' % (k, repr(org_spec_values[k]))
for k, v in inspect.getmembers(org_spec)
if k in org_spec_values])
tlv_org_spec_str = '%s(%s)' % (lldp.OrganizationallySpecific.__name__,
_org_spec_str)
# End string
end_values = {'len': end.len,
'typelen': end.typelen}
_end_str = ','.join(['%s=%s' % (k, repr(end_values[k]))
for k, v in inspect.getmembers(end)
if k in end_values])
tlv_end_str = '%s(%s)' % (lldp.End.__name__, _end_str)
# tlvs string
_tlvs_str = '(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)'
tlvs_str = _tlvs_str % (tlv_chassis_id_str,
tlv_port_id_str,
tlv_ttl_str,
tlv_port_desc_str,
tlv_system_name_str,
tlv_sys_desc_str,
tlv_sys_cap_str,
tlv_man_addr_str,
tlv_org_spec_str,
tlv_end_str)
# lldp string
_lldp_str = '%s(tlvs=%s)'
lldp_str = _lldp_str % (lldp.lldp.__name__,
tlvs_str)
eq_(str(lldp_pkt), lldp_str)
eq_(repr(lldp_pkt), lldp_str)
def test_json(self):
chassis_id = lldp.ChassisID(subtype=lldp.ChassisID.SUB_MAC_ADDRESS,
chassis_id=b'\x00\x01\x30\xf9\xad\xa0')
port_id = lldp.PortID(subtype=lldp.PortID.SUB_INTERFACE_NAME,
port_id=b'1/1')
ttl = lldp.TTL(ttl=120)
port_desc = lldp.PortDescription(
port_description=b'Summit300-48-Port 1001\x00')
sys_name = lldp.SystemName(system_name=b'Summit300-48\x00')
sys_desc = lldp.SystemDescription(
system_description=b'Summit300-48 - Version 7.4e.1 (Build 5) '
+ b'by Release_Master 05/27/05 04:53:11\x00')
sys_cap = lldp.SystemCapabilities(
subtype=lldp.ChassisID.SUB_CHASSIS_COMPONENT,
system_cap=0x14,
enabled_cap=0x14)
man_addr = lldp.ManagementAddress(
addr_subtype=0x06, addr=b'\x00\x01\x30\xf9\xad\xa0',
intf_subtype=0x02, intf_num=1001,
oid='')
org_spec = lldp.OrganizationallySpecific(
oui=b'\x00\x12\x0f', subtype=0x02, info=b'\x07\x01\x00')
end = lldp.End()
tlvs = (chassis_id, port_id, ttl, port_desc, sys_name,
sys_desc, sys_cap, man_addr, org_spec, end)
lldp1 = lldp.lldp(tlvs)
jsondict = lldp1.to_jsondict()
lldp2 = lldp.lldp.from_jsondict(jsondict['lldp'])
eq_(str(lldp1), str(lldp2))
| apache-2.0 |
camradal/ansible | lib/ansible/modules/system/known_hosts.py | 5 | 11771 | #!/usr/bin/python
"""
Ansible module to manage the ssh known_hosts file.
Copyright(c) 2014, Matthew Vernon <mcv21@cam.ac.uk>
This module is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This module is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this module. If not, see <http://www.gnu.org/licenses/>.
"""
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: known_hosts
short_description: Add or remove a host from the C(known_hosts) file
description:
- The M(known_hosts) module lets you add or remove a host keys from the C(known_hosts) file.
- Starting at Ansible 2.2, multiple entries per host are allowed, but only one for each key type supported by ssh.
This is useful if you're going to want to use the M(git) module over ssh, for example.
- If you have a very large number of host keys to manage, you will find the M(template) module more useful.
version_added: "1.9"
options:
name:
aliases: [ 'host' ]
description:
- The host to add or remove (must match a host specified in key)
required: true
default: null
key:
description:
- The SSH public host key, as a string (required if state=present, optional when state=absent, in which case all keys for the host are removed). The key must be in the right format for ssh (see sshd(1), section "SSH_KNOWN_HOSTS FILE FORMAT")
required: false
default: null
path:
description:
- The known_hosts file to edit
required: no
default: "(homedir)+/.ssh/known_hosts"
hash_host:
description:
- Hash the hostname in the known_hosts file
required: no
default: no
version_added: "2.3"
state:
description:
- I(present) to add the host key, I(absent) to remove it.
choices: [ "present", "absent" ]
required: no
default: present
requirements: [ ]
author: "Matthew Vernon (@mcv21)"
'''
EXAMPLES = '''
- name: tell the host about our servers it might want to ssh to
known_hosts:
path: /etc/ssh/ssh_known_hosts
name: foo.com.invalid
key: "{{ lookup('file', 'pubkeys/foo.com.invalid') }}"
'''
# Makes sure public host keys are present or absent in the given known_hosts
# file.
#
# Arguments
# =========
# name = hostname whose key should be added (alias: host)
# key = line(s) to add to known_hosts file
# path = the known_hosts file to edit (default: ~/.ssh/known_hosts)
# hash_host = yes|no (default: no) hash the hostname in the known_hosts file
# state = absent|present (default: present)
import os
import os.path
import tempfile
import errno
import re
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils.basic import AnsibleModule
def enforce_state(module, params):
"""
Add or remove key.
"""
host = params["name"]
key = params.get("key",None)
port = params.get("port",None)
path = params.get("path")
hash_host = params.get("hash_host")
state = params.get("state")
#Find the ssh-keygen binary
sshkeygen = module.get_bin_path("ssh-keygen",True)
# Trailing newline in files gets lost, so re-add if necessary
if key and key[-1] != '\n':
key+='\n'
if key is None and state != "absent":
module.fail_json(msg="No key specified when adding a host")
sanity_check(module,host,key,sshkeygen)
found,replace_or_add,found_line,key=search_for_host_key(module,host,key,hash_host,path,sshkeygen)
#We will change state if found==True & state!="present"
#or found==False & state=="present"
#i.e found XOR (state=="present")
#Alternatively, if replace is true (i.e. key present, and we must change it)
if module.check_mode:
module.exit_json(changed = replace_or_add or (state=="present") != found)
#Now do the work.
#Only remove whole host if found and no key provided
if found and key is None and state=="absent":
module.run_command([sshkeygen,'-R',host,'-f',path], check_rc=True)
params['changed'] = True
#Next, add a new (or replacing) entry
if replace_or_add or found != (state=="present"):
try:
inf=open(path,"r")
except IOError:
e = get_exception()
if e.errno == errno.ENOENT:
inf=None
else:
module.fail_json(msg="Failed to read %s: %s" % \
(path,str(e)))
try:
outf=tempfile.NamedTemporaryFile(dir=os.path.dirname(path))
if inf is not None:
for line_number, line in enumerate(inf):
if found_line==(line_number + 1) and (replace_or_add or state=='absent'):
continue # skip this line to replace its key
outf.write(line)
inf.close()
if state == 'present':
outf.write(key)
outf.flush()
module.atomic_move(outf.name,path)
except (IOError,OSError):
e = get_exception()
module.fail_json(msg="Failed to write to file %s: %s" % \
(path,str(e)))
try:
outf.close()
except:
pass
params['changed'] = True
return params
def sanity_check(module,host,key,sshkeygen):
'''Check supplied key is sensible
host and key are parameters provided by the user; If the host
provided is inconsistent with the key supplied, then this function
quits, providing an error to the user.
sshkeygen is the path to ssh-keygen, found earlier with get_bin_path
'''
#If no key supplied, we're doing a removal, and have nothing to check here.
if key is None:
return
#Rather than parsing the key ourselves, get ssh-keygen to do it
#(this is essential for hashed keys, but otherwise useful, as the
#key question is whether ssh-keygen thinks the key matches the host).
#The approach is to write the key to a temporary file,
#and then attempt to look up the specified host in that file.
try:
outf=tempfile.NamedTemporaryFile()
outf.write(key)
outf.flush()
except IOError:
e = get_exception()
module.fail_json(msg="Failed to write to temporary file %s: %s" % \
(outf.name,str(e)))
rc,stdout,stderr=module.run_command([sshkeygen,'-F',host,
'-f',outf.name],
check_rc=True)
try:
outf.close()
except:
pass
if stdout=='': #host not found
module.fail_json(msg="Host parameter does not match hashed host field in supplied key")
def search_for_host_key(module,host,key,hash_host,path,sshkeygen):
'''search_for_host_key(module,host,key,path,sshkeygen) -> (found,replace_or_add,found_line)
Looks up host and keytype in the known_hosts file path; if it's there, looks to see
if one of those entries matches key. Returns:
found (Boolean): is host found in path?
replace_or_add (Boolean): is the key in path different to that supplied by user?
found_line (int or None): the line where a key of the same type was found
if found=False, then replace is always False.
sshkeygen is the path to ssh-keygen, found earlier with get_bin_path
'''
if os.path.exists(path)==False:
return False, False, None, key
sshkeygen_command=[sshkeygen,'-F',host,'-f',path]
#openssh >=6.4 has changed ssh-keygen behaviour such that it returns
#1 if no host is found, whereas previously it returned 0
rc,stdout,stderr=module.run_command(sshkeygen_command,
check_rc=False)
if stdout=='' and stderr=='' and (rc==0 or rc==1):
return False, False, None, key #host not found, no other errors
if rc!=0: #something went wrong
module.fail_json(msg="ssh-keygen failed (rc=%d,stdout='%s',stderr='%s')" % (rc,stdout,stderr))
#If user supplied no key, we don't want to try and replace anything with it
if key is None:
return True, False, None, key
lines=stdout.split('\n')
new_key = normalize_known_hosts_key(key)
sshkeygen_command.insert(1,'-H')
rc,stdout,stderr=module.run_command(sshkeygen_command,check_rc=False)
if rc!=0: #something went wrong
module.fail_json(msg="ssh-keygen failed to hash host (rc=%d,stdout='%s',stderr='%s')" % (rc,stdout,stderr))
hashed_lines=stdout.split('\n')
for lnum,l in enumerate(lines):
if l=='':
continue
elif l[0]=='#': # info output from ssh-keygen; contains the line number where key was found
try:
# This output format has been hardcoded in ssh-keygen since at least OpenSSH 4.0
# It always outputs the non-localized comment before the found key
found_line = int(re.search(r'found: line (\d+)', l).group(1))
except IndexError:
module.fail_json(msg="failed to parse output of ssh-keygen for line number: '%s'" % l)
else:
found_key = normalize_known_hosts_key(l)
if hash_host==True:
if found_key['host'][:3]=='|1|':
new_key['host']=found_key['host']
else:
hashed_host=normalize_known_hosts_key(hashed_lines[lnum])
found_key['host']=hashed_host['host']
key=key.replace(host,found_key['host'])
if new_key==found_key: #found a match
return True, False, found_line, key #found exactly the same key, don't replace
elif new_key['type'] == found_key['type']: # found a different key for the same key type
return True, True, found_line, key
#No match found, return found and replace, but no line
return True, True, None, key
def normalize_known_hosts_key(key):
'''
Transform a key, either taken from a known_host file or provided by the
user, into a normalized form.
The host part (which might include multiple hostnames or be hashed) gets
replaced by the provided host. Also, any spurious information gets removed
from the end (like the username@host tag usually present in hostkeys, but
absent in known_hosts files)
'''
k=key.strip() #trim trailing newline
k=key.split()
d = dict()
#The optional "marker" field, used for @cert-authority or @revoked
if k[0][0] == '@':
d['options'] = k[0]
d['host']=k[1]
d['type']=k[2]
d['key']=k[3]
else:
d['host']=k[0]
d['type']=k[1]
d['key']=k[2]
return d
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True, type='str', aliases=['host']),
key = dict(required=False, type='str'),
path = dict(default="~/.ssh/known_hosts", type='path'),
hash_host = dict(required=False, type='bool' ,default=False),
state = dict(default='present', choices=['absent','present']),
),
supports_check_mode = True
)
results = enforce_state(module,module.params)
module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 |
LostItem/roundware-server | tests/roundware/api2/test.py | 2 | 14288 | # Roundware Server is released under the GNU Affero General Public License v3.
# See COPYRIGHT.txt, AUTHORS.txt, and LICENSE.txt in the project root directory.
from __future__ import unicode_literals
import datetime
from model_mommy import mommy
from model_mommy.generators import gen_file_field
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
from roundware.rw.models import (ListeningHistoryItem, Asset, Project,
Audiotrack, Session, Envelope,
Speaker, LocalizedString, UIGroup, UIItem,
Language, Tag, TagCategory)
from roundware.settings import DEFAULT_SESSION_ID
from rest_framework import status
from rest_framework.test import APITestCase
TEST_POLYGONS = {
"crazy_shape": "MULTIPOLYGON(((-0.774183051414968 -0.120296667618684,-0.697181433024807 0.197879831012361,-0.52645517133469 0.200040922932489,-0.444333678369823 -0.0571290155627506,-0.468105689491232 -0.245144012613892,-0.774183051414968 -0.120296667618684)),((-1.25042096457759 0.204363106772745,-1.01702303720376 0.504754883670546,-0.599932296619044 0.625776031197718,-0.152586269152534 0.448566493747217,0.0354287278986072 0.00122046628070716,-0.109364430749973 -0.30349349445735,-0.340601266203676 -0.487186307668236,-0.811719304791594 -0.487186307668236,-1.0969834382485 -0.331587689419015,-1.25042096457759 0.204363106772745),(-0.774183051414968 -0.120296667618684,-0.811719304791594 -0.275399299495685,-0.504844252133409 -0.374809527821576,-0.314668163162139 -0.327265505578759,-0.239029945957657 -0.0506457398023664,-0.362212185404957 0.325384254299917,-0.796591661350698 0.35563954118171,-0.880874246235692 0.122241613807879,-0.958673555360303 -0.0917064862847996,-0.889518613916205 -0.0247126367608296,-0.796591661350698 -0.111156313565952,-0.774183051414968 -0.120296667618684)))",
"square": "MULTIPOLYGON(((10 10, 10 20, 20 20, 20 10, 10 10)))"
}
def validated_file_field_gen():
return gen_file_field()
class TestServer(APITestCase):
""" test api.py methods
"""
def setUp(self):
super(type(self), TestServer).setUp(self)
self.maxDiff = None
generator_dict = {
'validatedfile.fields.ValidatedFileField':
validated_file_field_gen
}
# can't set this directly in settings: db ENGINE not yet available
setattr(settings, 'MOMMY_CUSTOM_FIELDS_GEN', generator_dict)
# setup basics
self.default_session = mommy.make(Session)
self.english = mommy.make(Language, language_code='en')
self.spanish = mommy.make(Language, language_code='es')
self.english_msg = mommy.make(LocalizedString, localized_string="One",
language=self.english)
self.spanish_msg = mommy.make(LocalizedString, localized_string="Uno",
language=self.spanish)
# create project and session
self.project1 = mommy.make(
Project,
id = 1,
name = 'Uno',
recording_radius = 10,
audio_format = 'ogg',
demo_stream_message_loc = [self.english_msg, self.spanish_msg],
out_of_range_url='http://rw.com:8000/outofrange.mp3',
geo_listen_enabled=True
)
self.session = mommy.make(
Session,
project=self.project1,
language=self.english
)
# setup tag categories
self.tagcat1 = mommy.make(TagCategory, name='gender')
self.tagcat2 = mommy.make(TagCategory, name='age')
self.tagcat3 = mommy.make(TagCategory, name='color')
# setup tags
self.tag1 = mommy.make(
Tag,
project = self.project1,
tag_category = self.tagcat1,
value = 'male',
loc_description = [self.english_msg, self.spanish_msg],
loc_msg = [self.english_msg, self.spanish_msg],
data = None,
filter = "",
location = None
)
self.tag2 = mommy.make(
Tag,
project = self.project1,
tag_category=self.tagcat2,
value = 'young'
)
self.tag3 = mommy.make(
Tag,
project = None, # this tag should not appear in projects_tags_get
tag_category = self.tagcat3,
value = 'red'
)
# setup ui_groups
self.uigroup1 = mommy.make(
UIGroup,
project=self.project1,
ui_mode=UIGroup.LISTEN,
tag_category=self.tagcat1
)
self.uigroup2 = mommy.make(
UIGroup,
project=self.project1,
ui_mode=UIGroup.LISTEN,
tag_category=self.tagcat2
)
self.uigroup3 = mommy.make(
UIGroup,
project=self.project1,
ui_mode=UIGroup.SPEAK,
tag_category=self.tagcat1
)
self.uigroup4 = mommy.make(
UIGroup,
project=self.project1,
ui_mode=UIGroup.SPEAK,
tag_category=self.tagcat2
)
self.uigroup5 = mommy.make(
UIGroup,
project=self.project1,
ui_mode=UIGroup.SPEAK,
tag_category=self.tagcat3
)
# setup ui_items
self.uiitem1 = mommy.make(
UIItem,
ui_group=self.uigroup1,
tag=self.tag1,
active=True
)
self.uiitem2 = mommy.make(
UIItem,
ui_group=self.uigroup1,
tag=self.tag2,
active=True
)
# setup assets and envelopes
self.asset1 = mommy.make(Asset, project=self.project1, id=1,
audiolength=5000000000, volume=0.9,
created=datetime.datetime(
2013, 11, 21, 21, 3, 6, 616402),
latitude='0.1', longitude='0.1',
language=self.english,
tags=(self.tag1,))
self.asset2 = mommy.make(Asset, project=self.project1, id=2,
audiolength=10000000000,
language=self.english,
tags=(self.tag1,))
self.envelope1 = mommy.make(Envelope, session=self.session,
assets=[self.asset1, ])
self.envelope2 = mommy.make(Envelope, session=self.session,
assets=[self.asset2, ])
# setup audio elements
self.history1 = mommy.make(ListeningHistoryItem, asset=self.asset1,
session=self.session,
starttime=datetime.datetime(
2013, 11, 21, 17, 29, 10, 173061),
duration=5000000)
self.history2 = mommy.make(ListeningHistoryItem, asset=self.asset2,
session=self.session,
starttime=datetime.datetime(
2013, 11, 21, 17, 29, 44, 610672),
duration=6000000)
self.track1 = mommy.make(Audiotrack, project=self.project1, id=1)
self.speaker1 = mommy.make(Speaker, project=self.project1,
shape=TEST_POLYGONS["crazy_shape"],
attenuation_distance=100, activeyn=True)
def test_api2_in_order(self):
self.users_post()
self.sessions_post()
self.projects_get()
self.projects_tags_get()
self.projects_assets_get()
self.vote_assets_post()
self.vote_assets_get()
self.assets_random_get()
# some endpoints cannot be tested currently
# self.streams_post()
# self.streams_patch()
# self.streams_heartbeat_post()
# self.streams_next_post()
# self.streams_current_get()
self.ensure_token_required()
def users_post(self):
url = reverse('user-list')
data = {"device_id": "12891038109281",
"client_type": "phone",
"client_system": "iOS"}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
# check that a username was generated and token returned
self.assertIsNotNone(response.data["username"])
self.assertIsNotNone(response.data["token"])
# set the token for later requests
self.client.credentials(HTTP_AUTHORIZATION='Token ' + response.data["token"])
def sessions_post(self):
url = reverse('session-list')
# first pass no geo_listen_enabled
data = {"timezone": "-0500",
"project_id": 1,
"client_system": "iOS"}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
# language wasn't provided, so should be set default to "en"
self.assertEqual(response.data["language"], "en")
# geo_listen_enabled not provided, so set to project.geo_listen_enabled value
self.assertEqual(response.data["geo_listen_enabled"], True)
# check returned data matches data provided
self.assertEqual(response.data["timezone"], data["timezone"])
self.assertEqual(response.data["client_system"], data["client_system"])
self.assertEqual(response.data["project_id"], data["project_id"])
self.assertIsNotNone(response.data["session_id"])
# save session_id for later requests
self.session_id = response.data["session_id"]
# now pass geo_listen_enabled
data = {"timezone": "-0500",
"project_id": 1,
"client_system": "iOS",
"geo_listen_enabled": False}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
# check geo_listen_enabled returns same as passed param, not project value
self.assertEqual(response.data["geo_listen_enabled"], False)
def projects_get(self):
url = "%s?session_id=%s" % (reverse('project-detail', args=[self.project1.id]), self.session_id)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
# ensure _loc fields are transformed
self.assertIn("out_of_range_message", response.data)
self.assertNotIn("out_of_range_message_loc", response.data)
self.assertEqual(self.project1.id, response.data["project_id"])
def projects_tags_get(self):
# Strictly speaking, session_id is necessary only for localization purposes
# url = "%s?session_id=%s" % (reverse('project-tags', args=[self.project1.id]), self.session_id)
url = "%s" % (reverse('project-tags', args=[self.project1.id]))
data = {}
response = self.client.get(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1) # { "tags" : [] }
self.assertEqual(len(response.data["tags"]), 2) # one tag excluded
self.assertEqual(response.data["tags"][0]["project"], self.project1.id)
def projects_assets_get(self):
url = reverse('project-assets', args=[self.project1.id])
data = {}
response = self.client.get(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 2)
self.assertEqual(response.data[0]["project"], self.project1.id)
def vote_assets_post(self):
data = {"device_id": "12891038109281",
"session_id": self.session_id,
"vote_type": "rate",
"value": 2}
response = self.client.post('/api/2/assets/1/votes/', data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
# check returned data matches data provided
User = get_user_model()
user_id = User.objects.filter(userprofile__device_id=data["device_id"]) \
.values_list('id', flat=True)[0]
self.assertEqual(response.data["voter"], user_id)
self.assertEqual(response.data["session_id"], data["session_id"])
self.assertEqual(response.data["type"], data["vote_type"])
self.assertEqual(response.data["value"], data["value"])
self.assertIsNotNone(response.data["vote_id"])
def vote_assets_get(self):
response = self.client.get('/api/2/assets/1/votes/')
self.assertEqual(response.status_code, status.HTTP_200_OK)
# check returned data matches votes provided
self.assertEqual(response.data[0]["type"], "rate")
self.assertEqual(response.data[0]["avg"], 2)
def assets_random_get(self):
data = {"mediatype": "audio",
"project_id": self.project1.id,
"audiolength__lte": 8,
"limit": 2}
response = self.client.get('/api/2/assets/random/', data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
data = {"mediatype": "audio",
"project_id": self.project1.id,
"audiolength__lte": 12,
"limit": 2}
response = self.client.get('/api/2/assets/random/', data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 2)
def ensure_token_required(self):
self.client.credentials(HTTP_AUTHORIZATION='')
self.assertRaises(AssertionError, self.sessions_post)
self.assertRaises(AssertionError, self.projects_get)
self.assertRaises(AssertionError, self.projects_tags_get)
self.assertRaises(AssertionError, self.projects_assets_get)
self.assertRaises(AssertionError, self.vote_assets_post)
| lgpl-3.0 |
ChenJunor/hue | desktop/core/ext-py/configobj/validate.py | 42 | 46768 | # validate.py
# A Validator object
# Copyright (C) 2005 Michael Foord, Mark Andrews, Nicola Larosa
# E-mail: fuzzyman AT voidspace DOT org DOT uk
# mark AT la-la DOT com
# nico AT tekNico DOT net
# This software is licensed under the terms of the BSD license.
# http://www.voidspace.org.uk/python/license.shtml
# Basically you're free to copy, modify, distribute and relicense it,
# So long as you keep a copy of the license with it.
# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml
# For information about bugfixes, updates and support, please join the
# ConfigObj mailing list:
# http://lists.sourceforge.net/lists/listinfo/configobj-develop
# Comments, suggestions and bug reports welcome.
"""
The Validator object is used to check that supplied values
conform to a specification.
The value can be supplied as a string - e.g. from a config file.
In this case the check will also *convert* the value to
the required type. This allows you to add validation
as a transparent layer to access data stored as strings.
The validation checks that the data is correct *and*
converts it to the expected type.
Some standard checks are provided for basic data types.
Additional checks are easy to write. They can be
provided when the ``Validator`` is instantiated or
added afterwards.
The standard functions work with the following basic data types :
* integers
* floats
* booleans
* strings
* ip_addr
plus lists of these datatypes
Adding additional checks is done through coding simple functions.
The full set of standard checks are :
* 'integer': matches integer values (including negative)
Takes optional 'min' and 'max' arguments : ::
integer()
integer(3, 9) # any value from 3 to 9
integer(min=0) # any positive value
integer(max=9)
* 'float': matches float values
Has the same parameters as the integer check.
* 'boolean': matches boolean values - ``True`` or ``False``
Acceptable string values for True are :
true, on, yes, 1
Acceptable string values for False are :
false, off, no, 0
Any other value raises an error.
* 'ip_addr': matches an Internet Protocol address, v.4, represented
by a dotted-quad string, i.e. '1.2.3.4'.
* 'string': matches any string.
Takes optional keyword args 'min' and 'max'
to specify min and max lengths of the string.
* 'list': matches any list.
Takes optional keyword args 'min', and 'max' to specify min and
max sizes of the list. (Always returns a list.)
* 'tuple': matches any tuple.
Takes optional keyword args 'min', and 'max' to specify min and
max sizes of the tuple. (Always returns a tuple.)
* 'int_list': Matches a list of integers.
Takes the same arguments as list.
* 'float_list': Matches a list of floats.
Takes the same arguments as list.
* 'bool_list': Matches a list of boolean values.
Takes the same arguments as list.
* 'ip_addr_list': Matches a list of IP addresses.
Takes the same arguments as list.
* 'string_list': Matches a list of strings.
Takes the same arguments as list.
* 'mixed_list': Matches a list with different types in
specific positions. List size must match
the number of arguments.
Each position can be one of :
'integer', 'float', 'ip_addr', 'string', 'boolean'
So to specify a list with two strings followed
by two integers, you write the check as : ::
mixed_list('string', 'string', 'integer', 'integer')
* 'pass': This check matches everything ! It never fails
and the value is unchanged.
It is also the default if no check is specified.
* 'option': This check matches any from a list of options.
You specify this check with : ::
option('option 1', 'option 2', 'option 3')
You can supply a default value (returned if no value is supplied)
using the default keyword argument.
You specify a list argument for default using a list constructor syntax in
the check : ::
checkname(arg1, arg2, default=list('val 1', 'val 2', 'val 3'))
A badly formatted set of arguments will raise a ``VdtParamError``.
"""
__docformat__ = "restructuredtext en"
__version__ = '1.0.0'
__revision__ = '$Id: validate.py 123 2005-09-08 08:54:28Z fuzzyman $'
__all__ = (
'__version__',
'dottedQuadToNum',
'numToDottedQuad',
'ValidateError',
'VdtUnknownCheckError',
'VdtParamError',
'VdtTypeError',
'VdtValueError',
'VdtValueTooSmallError',
'VdtValueTooBigError',
'VdtValueTooShortError',
'VdtValueTooLongError',
'VdtMissingValue',
'Validator',
'is_integer',
'is_float',
'is_boolean',
'is_list',
'is_tuple',
'is_ip_addr',
'is_string',
'is_int_list',
'is_bool_list',
'is_float_list',
'is_string_list',
'is_ip_addr_list',
'is_mixed_list',
'is_option',
'__docformat__',
)
import re
_list_arg = re.compile(r'''
(?:
([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*list\(
(
(?:
\s*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)
\s*,\s*
)*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)? # last one
)
\)
)
''', re.VERBOSE | re.DOTALL) # two groups
_list_members = re.compile(r'''
(
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s=][^,=]*?) # unquoted
)
(?:
(?:\s*,\s*)|(?:\s*$) # comma
)
''', re.VERBOSE | re.DOTALL) # one group
_paramstring = r'''
(?:
(
(?:
[a-zA-Z_][a-zA-Z0-9_]*\s*=\s*list\(
(?:
\s*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)
\s*,\s*
)*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)? # last one
\)
)|
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s=][^,=]*?)| # unquoted
(?: # keyword argument
[a-zA-Z_][a-zA-Z0-9_]*\s*=\s*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s=][^,=]*?) # unquoted
)
)
)
)
(?:
(?:\s*,\s*)|(?:\s*$) # comma
)
)
'''
_matchstring = '^%s*' % _paramstring
# Python pre 2.2.1 doesn't have bool
try:
bool
except NameError:
def bool(val):
"""Simple boolean equivalent function. """
if val:
return 1
else:
return 0
def dottedQuadToNum(ip):
"""
Convert decimal dotted quad string to long integer
>>> int(dottedQuadToNum('1 '))
1
>>> int(dottedQuadToNum(' 1.2'))
16777218
>>> int(dottedQuadToNum(' 1.2.3 '))
16908291
>>> int(dottedQuadToNum('1.2.3.4'))
16909060
>>> dottedQuadToNum('1.2.3. 4')
16909060
>>> dottedQuadToNum('255.255.255.255')
4294967295L
>>> dottedQuadToNum('255.255.255.256')
Traceback (most recent call last):
ValueError: Not a good dotted-quad IP: 255.255.255.256
"""
# import here to avoid it when ip_addr values are not used
import socket, struct
try:
return struct.unpack('!L',
socket.inet_aton(ip.strip()))[0]
except socket.error:
# bug in inet_aton, corrected in Python 2.3
if ip.strip() == '255.255.255.255':
return 0xFFFFFFFFL
else:
raise ValueError('Not a good dotted-quad IP: %s' % ip)
return
def numToDottedQuad(num):
"""
Convert long int to dotted quad string
>>> numToDottedQuad(-1L)
Traceback (most recent call last):
ValueError: Not a good numeric IP: -1
>>> numToDottedQuad(1L)
'0.0.0.1'
>>> numToDottedQuad(16777218L)
'1.0.0.2'
>>> numToDottedQuad(16908291L)
'1.2.0.3'
>>> numToDottedQuad(16909060L)
'1.2.3.4'
>>> numToDottedQuad(4294967295L)
'255.255.255.255'
>>> numToDottedQuad(4294967296L)
Traceback (most recent call last):
ValueError: Not a good numeric IP: 4294967296
"""
# import here to avoid it when ip_addr values are not used
import socket, struct
# no need to intercept here, 4294967295L is fine
if num > 4294967295L or num < 0:
raise ValueError('Not a good numeric IP: %s' % num)
try:
return socket.inet_ntoa(
struct.pack('!L', long(num)))
except (socket.error, struct.error, OverflowError):
raise ValueError('Not a good numeric IP: %s' % num)
class ValidateError(Exception):
"""
This error indicates that the check failed.
It can be the base class for more specific errors.
Any check function that fails ought to raise this error.
(or a subclass)
>>> raise ValidateError
Traceback (most recent call last):
ValidateError
"""
class VdtMissingValue(ValidateError):
"""No value was supplied to a check that needed one."""
class VdtUnknownCheckError(ValidateError):
"""An unknown check function was requested"""
def __init__(self, value):
"""
>>> raise VdtUnknownCheckError('yoda')
Traceback (most recent call last):
VdtUnknownCheckError: the check "yoda" is unknown.
"""
ValidateError.__init__(self, 'the check "%s" is unknown.' % (value,))
class VdtParamError(SyntaxError):
"""An incorrect parameter was passed"""
def __init__(self, name, value):
"""
>>> raise VdtParamError('yoda', 'jedi')
Traceback (most recent call last):
VdtParamError: passed an incorrect value "jedi" for parameter "yoda".
"""
SyntaxError.__init__(self, 'passed an incorrect value "%s" for parameter "%s".' % (value, name))
class VdtTypeError(ValidateError):
"""The value supplied was of the wrong type"""
def __init__(self, value):
"""
>>> raise VdtTypeError('jedi')
Traceback (most recent call last):
VdtTypeError: the value "jedi" is of the wrong type.
"""
ValidateError.__init__(self, 'the value "%s" is of the wrong type.' % (value,))
class VdtValueError(ValidateError):
"""The value supplied was of the correct type, but was not an allowed value."""
def __init__(self, value):
"""
>>> raise VdtValueError('jedi')
Traceback (most recent call last):
VdtValueError: the value "jedi" is unacceptable.
"""
ValidateError.__init__(self, 'the value "%s" is unacceptable.' % (value,))
class VdtValueTooSmallError(VdtValueError):
"""The value supplied was of the correct type, but was too small."""
def __init__(self, value):
"""
>>> raise VdtValueTooSmallError('0')
Traceback (most recent call last):
VdtValueTooSmallError: the value "0" is too small.
"""
ValidateError.__init__(self, 'the value "%s" is too small.' % (value,))
class VdtValueTooBigError(VdtValueError):
"""The value supplied was of the correct type, but was too big."""
def __init__(self, value):
"""
>>> raise VdtValueTooBigError('1')
Traceback (most recent call last):
VdtValueTooBigError: the value "1" is too big.
"""
ValidateError.__init__(self, 'the value "%s" is too big.' % (value,))
class VdtValueTooShortError(VdtValueError):
"""The value supplied was of the correct type, but was too short."""
def __init__(self, value):
"""
>>> raise VdtValueTooShortError('jed')
Traceback (most recent call last):
VdtValueTooShortError: the value "jed" is too short.
"""
ValidateError.__init__(
self,
'the value "%s" is too short.' % (value,))
class VdtValueTooLongError(VdtValueError):
"""The value supplied was of the correct type, but was too long."""
def __init__(self, value):
"""
>>> raise VdtValueTooLongError('jedie')
Traceback (most recent call last):
VdtValueTooLongError: the value "jedie" is too long.
"""
ValidateError.__init__(self, 'the value "%s" is too long.' % (value,))
class Validator(object):
"""
Validator is an object that allows you to register a set of 'checks'.
These checks take input and test that it conforms to the check.
This can also involve converting the value from a string into
the correct datatype.
The ``check`` method takes an input string which configures which
check is to be used and applies that check to a supplied value.
An example input string would be:
'int_range(param1, param2)'
You would then provide something like:
>>> def int_range_check(value, min, max):
... # turn min and max from strings to integers
... min = int(min)
... max = int(max)
... # check that value is of the correct type.
... # possible valid inputs are integers or strings
... # that represent integers
... if not isinstance(value, (int, long, basestring)):
... raise VdtTypeError(value)
... elif isinstance(value, basestring):
... # if we are given a string
... # attempt to convert to an integer
... try:
... value = int(value)
... except ValueError:
... raise VdtValueError(value)
... # check the value is between our constraints
... if not min <= value:
... raise VdtValueTooSmallError(value)
... if not value <= max:
... raise VdtValueTooBigError(value)
... return value
>>> fdict = {'int_range': int_range_check}
>>> vtr1 = Validator(fdict)
>>> vtr1.check('int_range(20, 40)', '30')
30
>>> vtr1.check('int_range(20, 40)', '60')
Traceback (most recent call last):
VdtValueTooBigError: the value "60" is too big.
New functions can be added with : ::
>>> vtr2 = Validator()
>>> vtr2.functions['int_range'] = int_range_check
Or by passing in a dictionary of functions when Validator
is instantiated.
Your functions *can* use keyword arguments,
but the first argument should always be 'value'.
If the function doesn't take additional arguments,
the parentheses are optional in the check.
It can be written with either of : ::
keyword = function_name
keyword = function_name()
The first program to utilise Validator() was Michael Foord's
ConfigObj, an alternative to ConfigParser which supports lists and
can validate a config file using a config schema.
For more details on using Validator with ConfigObj see:
http://www.voidspace.org.uk/python/configobj.html
"""
# this regex does the initial parsing of the checks
_func_re = re.compile(r'(.+?)\((.*)\)', re.DOTALL)
# this regex takes apart keyword arguments
_key_arg = re.compile(r'^([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.*)$', re.DOTALL)
# this regex finds keyword=list(....) type values
_list_arg = _list_arg
# this regex takes individual values out of lists - in one pass
_list_members = _list_members
# These regexes check a set of arguments for validity
# and then pull the members out
_paramfinder = re.compile(_paramstring, re.VERBOSE | re.DOTALL)
_matchfinder = re.compile(_matchstring, re.VERBOSE | re.DOTALL)
def __init__(self, functions=None):
"""
>>> vtri = Validator()
"""
self.functions = {
'': self._pass,
'integer': is_integer,
'float': is_float,
'boolean': is_boolean,
'ip_addr': is_ip_addr,
'string': is_string,
'list': is_list,
'tuple': is_tuple,
'int_list': is_int_list,
'float_list': is_float_list,
'bool_list': is_bool_list,
'ip_addr_list': is_ip_addr_list,
'string_list': is_string_list,
'mixed_list': is_mixed_list,
'pass': self._pass,
'option': is_option,
'force_list': force_list,
}
if functions is not None:
self.functions.update(functions)
# tekNico: for use by ConfigObj
self.baseErrorClass = ValidateError
self._cache = {}
def check(self, check, value, missing=False):
"""
Usage: check(check, value)
Arguments:
check: string representing check to apply (including arguments)
value: object to be checked
Returns value, converted to correct type if necessary
If the check fails, raises a ``ValidateError`` subclass.
>>> vtor.check('yoda', '')
Traceback (most recent call last):
VdtUnknownCheckError: the check "yoda" is unknown.
>>> vtor.check('yoda()', '')
Traceback (most recent call last):
VdtUnknownCheckError: the check "yoda" is unknown.
>>> vtor.check('string(default="")', '', missing=True)
''
"""
fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check)
if missing:
if default is None:
# no information needed here - to be handled by caller
raise VdtMissingValue()
value = self._handle_none(default)
if value is None:
return None
return self._check_value(value, fun_name, fun_args, fun_kwargs)
def _handle_none(self, value):
if value == 'None':
value = None
elif value in ("'None'", '"None"'):
# Special case a quoted None
value = self._unquote(value)
return value
def _parse_with_caching(self, check):
if check in self._cache:
fun_name, fun_args, fun_kwargs, default = self._cache[check]
# We call list and dict below to work with *copies* of the data
# rather than the original (which are mutable of course)
fun_args = list(fun_args)
fun_kwargs = dict(fun_kwargs)
else:
fun_name, fun_args, fun_kwargs, default = self._parse_check(check)
fun_kwargs = dict((str(key), value) for (key, value) in fun_kwargs.items())
self._cache[check] = fun_name, list(fun_args), dict(fun_kwargs), default
return fun_name, fun_args, fun_kwargs, default
def _check_value(self, value, fun_name, fun_args, fun_kwargs):
try:
fun = self.functions[fun_name]
except KeyError:
raise VdtUnknownCheckError(fun_name)
else:
return fun(value, *fun_args, **fun_kwargs)
def _parse_check(self, check):
fun_match = self._func_re.match(check)
if fun_match:
fun_name = fun_match.group(1)
arg_string = fun_match.group(2)
arg_match = self._matchfinder.match(arg_string)
if arg_match is None:
# Bad syntax
raise VdtParamError('Bad syntax in check "%s".' % check)
fun_args = []
fun_kwargs = {}
# pull out args of group 2
for arg in self._paramfinder.findall(arg_string):
# args may need whitespace removing (before removing quotes)
arg = arg.strip()
listmatch = self._list_arg.match(arg)
if listmatch:
key, val = self._list_handle(listmatch)
fun_kwargs[key] = val
continue
keymatch = self._key_arg.match(arg)
if keymatch:
val = keymatch.group(2)
if not val in ("'None'", '"None"'):
# Special case a quoted None
val = self._unquote(val)
fun_kwargs[keymatch.group(1)] = val
continue
fun_args.append(self._unquote(arg))
else:
# allows for function names without (args)
return check, (), {}, None
# Default must be deleted if the value is specified too,
# otherwise the check function will get a spurious "default" keyword arg
try:
default = fun_kwargs.pop('default', None)
except AttributeError:
# Python 2.2 compatibility
default = None
try:
default = fun_kwargs['default']
del fun_kwargs['default']
except KeyError:
pass
return fun_name, fun_args, fun_kwargs, default
def _unquote(self, val):
"""Unquote a value if necessary."""
if (len(val) >= 2) and (val[0] in ("'", '"')) and (val[0] == val[-1]):
val = val[1:-1]
return val
def _list_handle(self, listmatch):
"""Take apart a ``keyword=list('val, 'val')`` type string."""
out = []
name = listmatch.group(1)
args = listmatch.group(2)
for arg in self._list_members.findall(args):
out.append(self._unquote(arg))
return name, out
def _pass(self, value):
"""
Dummy check that always passes
>>> vtor.check('', 0)
0
>>> vtor.check('', '0')
'0'
"""
return value
def get_default_value(self, check):
"""
Given a check, return the default value for the check
(converted to the right type).
If the check doesn't specify a default value then a
``KeyError`` will be raised.
"""
fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check)
if default is None:
raise KeyError('Check "%s" has no default value.' % check)
value = self._handle_none(default)
if value is None:
return value
return self._check_value(value, fun_name, fun_args, fun_kwargs)
def _is_num_param(names, values, to_float=False):
"""
Return numbers from inputs or raise VdtParamError.
Lets ``None`` pass through.
Pass in keyword argument ``to_float=True`` to
use float for the conversion rather than int.
>>> _is_num_param(('', ''), (0, 1.0))
[0, 1]
>>> _is_num_param(('', ''), (0, 1.0), to_float=True)
[0.0, 1.0]
>>> _is_num_param(('a'), ('a'))
Traceback (most recent call last):
VdtParamError: passed an incorrect value "a" for parameter "a".
"""
fun = to_float and float or int
out_params = []
for (name, val) in zip(names, values):
if val is None:
out_params.append(val)
elif isinstance(val, (int, long, float, basestring)):
try:
out_params.append(fun(val))
except ValueError, e:
raise VdtParamError(name, val)
else:
raise VdtParamError(name, val)
return out_params
# built in checks
# you can override these by setting the appropriate name
# in Validator.functions
# note: if the params are specified wrongly in your input string,
# you will also raise errors.
def is_integer(value, min=None, max=None):
"""
A check that tests that a given value is an integer (int, or long)
and optionally, between bounds. A negative value is accepted, while
a float will fail.
If the value is a string, then the conversion is done - if possible.
Otherwise a VdtError is raised.
>>> vtor.check('integer', '-1')
-1
>>> vtor.check('integer', '0')
0
>>> vtor.check('integer', 9)
9
>>> vtor.check('integer', 'a')
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
>>> vtor.check('integer', '2.2')
Traceback (most recent call last):
VdtTypeError: the value "2.2" is of the wrong type.
>>> vtor.check('integer(10)', '20')
20
>>> vtor.check('integer(max=20)', '15')
15
>>> vtor.check('integer(10)', '9')
Traceback (most recent call last):
VdtValueTooSmallError: the value "9" is too small.
>>> vtor.check('integer(10)', 9)
Traceback (most recent call last):
VdtValueTooSmallError: the value "9" is too small.
>>> vtor.check('integer(max=20)', '35')
Traceback (most recent call last):
VdtValueTooBigError: the value "35" is too big.
>>> vtor.check('integer(max=20)', 35)
Traceback (most recent call last):
VdtValueTooBigError: the value "35" is too big.
>>> vtor.check('integer(0, 9)', False)
0
"""
(min_val, max_val) = _is_num_param(('min', 'max'), (min, max))
if not isinstance(value, (int, long, basestring)):
raise VdtTypeError(value)
if isinstance(value, basestring):
# if it's a string - does it represent an integer ?
try:
value = int(value)
except ValueError:
raise VdtTypeError(value)
if (min_val is not None) and (value < min_val):
raise VdtValueTooSmallError(value)
if (max_val is not None) and (value > max_val):
raise VdtValueTooBigError(value)
return value
def is_float(value, min=None, max=None):
"""
A check that tests that a given value is a float
(an integer will be accepted), and optionally - that it is between bounds.
If the value is a string, then the conversion is done - if possible.
Otherwise a VdtError is raised.
This can accept negative values.
>>> vtor.check('float', '2')
2.0
From now on we multiply the value to avoid comparing decimals
>>> vtor.check('float', '-6.8') * 10
-68.0
>>> vtor.check('float', '12.2') * 10
122.0
>>> vtor.check('float', 8.4) * 10
84.0
>>> vtor.check('float', 'a')
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
>>> vtor.check('float(10.1)', '10.2') * 10
102.0
>>> vtor.check('float(max=20.2)', '15.1') * 10
151.0
>>> vtor.check('float(10.0)', '9.0')
Traceback (most recent call last):
VdtValueTooSmallError: the value "9.0" is too small.
>>> vtor.check('float(max=20.0)', '35.0')
Traceback (most recent call last):
VdtValueTooBigError: the value "35.0" is too big.
"""
(min_val, max_val) = _is_num_param(
('min', 'max'), (min, max), to_float=True)
if not isinstance(value, (int, long, float, basestring)):
raise VdtTypeError(value)
if not isinstance(value, float):
# if it's a string - does it represent a float ?
try:
value = float(value)
except ValueError:
raise VdtTypeError(value)
if (min_val is not None) and (value < min_val):
raise VdtValueTooSmallError(value)
if (max_val is not None) and (value > max_val):
raise VdtValueTooBigError(value)
return value
bool_dict = {
True: True, 'on': True, '1': True, 'true': True, 'yes': True,
False: False, 'off': False, '0': False, 'false': False, 'no': False,
}
def is_boolean(value):
"""
Check if the value represents a boolean.
>>> vtor.check('boolean', 0)
0
>>> vtor.check('boolean', False)
0
>>> vtor.check('boolean', '0')
0
>>> vtor.check('boolean', 'off')
0
>>> vtor.check('boolean', 'false')
0
>>> vtor.check('boolean', 'no')
0
>>> vtor.check('boolean', 'nO')
0
>>> vtor.check('boolean', 'NO')
0
>>> vtor.check('boolean', 1)
1
>>> vtor.check('boolean', True)
1
>>> vtor.check('boolean', '1')
1
>>> vtor.check('boolean', 'on')
1
>>> vtor.check('boolean', 'true')
1
>>> vtor.check('boolean', 'yes')
1
>>> vtor.check('boolean', 'Yes')
1
>>> vtor.check('boolean', 'YES')
1
>>> vtor.check('boolean', '')
Traceback (most recent call last):
VdtTypeError: the value "" is of the wrong type.
>>> vtor.check('boolean', 'up')
Traceback (most recent call last):
VdtTypeError: the value "up" is of the wrong type.
"""
if isinstance(value, basestring):
try:
return bool_dict[value.lower()]
except KeyError:
raise VdtTypeError(value)
# we do an equality test rather than an identity test
# this ensures Python 2.2 compatibilty
# and allows 0 and 1 to represent True and False
if value == False:
return False
elif value == True:
return True
else:
raise VdtTypeError(value)
def is_ip_addr(value):
"""
Check that the supplied value is an Internet Protocol address, v.4,
represented by a dotted-quad string, i.e. '1.2.3.4'.
>>> vtor.check('ip_addr', '1 ')
'1'
>>> vtor.check('ip_addr', ' 1.2')
'1.2'
>>> vtor.check('ip_addr', ' 1.2.3 ')
'1.2.3'
>>> vtor.check('ip_addr', '1.2.3.4')
'1.2.3.4'
>>> vtor.check('ip_addr', '0.0.0.0')
'0.0.0.0'
>>> vtor.check('ip_addr', '255.255.255.255')
'255.255.255.255'
>>> vtor.check('ip_addr', '255.255.255.256')
Traceback (most recent call last):
VdtValueError: the value "255.255.255.256" is unacceptable.
>>> vtor.check('ip_addr', '1.2.3.4.5')
Traceback (most recent call last):
VdtValueError: the value "1.2.3.4.5" is unacceptable.
>>> vtor.check('ip_addr', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
"""
if not isinstance(value, basestring):
raise VdtTypeError(value)
value = value.strip()
try:
dottedQuadToNum(value)
except ValueError:
raise VdtValueError(value)
return value
def is_list(value, min=None, max=None):
"""
Check that the value is a list of values.
You can optionally specify the minimum and maximum number of members.
It does no check on list members.
>>> vtor.check('list', ())
[]
>>> vtor.check('list', [])
[]
>>> vtor.check('list', (1, 2))
[1, 2]
>>> vtor.check('list', [1, 2])
[1, 2]
>>> vtor.check('list(3)', (1, 2))
Traceback (most recent call last):
VdtValueTooShortError: the value "(1, 2)" is too short.
>>> vtor.check('list(max=5)', (1, 2, 3, 4, 5, 6))
Traceback (most recent call last):
VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long.
>>> vtor.check('list(min=3, max=5)', (1, 2, 3, 4))
[1, 2, 3, 4]
>>> vtor.check('list', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('list', '12')
Traceback (most recent call last):
VdtTypeError: the value "12" is of the wrong type.
"""
(min_len, max_len) = _is_num_param(('min', 'max'), (min, max))
if isinstance(value, basestring):
raise VdtTypeError(value)
try:
num_members = len(value)
except TypeError:
raise VdtTypeError(value)
if min_len is not None and num_members < min_len:
raise VdtValueTooShortError(value)
if max_len is not None and num_members > max_len:
raise VdtValueTooLongError(value)
return list(value)
def is_tuple(value, min=None, max=None):
"""
Check that the value is a tuple of values.
You can optionally specify the minimum and maximum number of members.
It does no check on members.
>>> vtor.check('tuple', ())
()
>>> vtor.check('tuple', [])
()
>>> vtor.check('tuple', (1, 2))
(1, 2)
>>> vtor.check('tuple', [1, 2])
(1, 2)
>>> vtor.check('tuple(3)', (1, 2))
Traceback (most recent call last):
VdtValueTooShortError: the value "(1, 2)" is too short.
>>> vtor.check('tuple(max=5)', (1, 2, 3, 4, 5, 6))
Traceback (most recent call last):
VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long.
>>> vtor.check('tuple(min=3, max=5)', (1, 2, 3, 4))
(1, 2, 3, 4)
>>> vtor.check('tuple', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('tuple', '12')
Traceback (most recent call last):
VdtTypeError: the value "12" is of the wrong type.
"""
return tuple(is_list(value, min, max))
def is_string(value, min=None, max=None):
"""
Check that the supplied value is a string.
You can optionally specify the minimum and maximum number of members.
>>> vtor.check('string', '0')
'0'
>>> vtor.check('string', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('string(2)', '12')
'12'
>>> vtor.check('string(2)', '1')
Traceback (most recent call last):
VdtValueTooShortError: the value "1" is too short.
>>> vtor.check('string(min=2, max=3)', '123')
'123'
>>> vtor.check('string(min=2, max=3)', '1234')
Traceback (most recent call last):
VdtValueTooLongError: the value "1234" is too long.
"""
if not isinstance(value, basestring):
raise VdtTypeError(value)
(min_len, max_len) = _is_num_param(('min', 'max'), (min, max))
try:
num_members = len(value)
except TypeError:
raise VdtTypeError(value)
if min_len is not None and num_members < min_len:
raise VdtValueTooShortError(value)
if max_len is not None and num_members > max_len:
raise VdtValueTooLongError(value)
return value
def is_int_list(value, min=None, max=None):
"""
Check that the value is a list of integers.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is an integer.
>>> vtor.check('int_list', ())
[]
>>> vtor.check('int_list', [])
[]
>>> vtor.check('int_list', (1, 2))
[1, 2]
>>> vtor.check('int_list', [1, 2])
[1, 2]
>>> vtor.check('int_list', [1, 'a'])
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
"""
return [is_integer(mem) for mem in is_list(value, min, max)]
def is_bool_list(value, min=None, max=None):
"""
Check that the value is a list of booleans.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is a boolean.
>>> vtor.check('bool_list', ())
[]
>>> vtor.check('bool_list', [])
[]
>>> check_res = vtor.check('bool_list', (True, False))
>>> check_res == [True, False]
1
>>> check_res = vtor.check('bool_list', [True, False])
>>> check_res == [True, False]
1
>>> vtor.check('bool_list', [True, 'a'])
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
"""
return [is_boolean(mem) for mem in is_list(value, min, max)]
def is_float_list(value, min=None, max=None):
"""
Check that the value is a list of floats.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is a float.
>>> vtor.check('float_list', ())
[]
>>> vtor.check('float_list', [])
[]
>>> vtor.check('float_list', (1, 2.0))
[1.0, 2.0]
>>> vtor.check('float_list', [1, 2.0])
[1.0, 2.0]
>>> vtor.check('float_list', [1, 'a'])
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
"""
return [is_float(mem) for mem in is_list(value, min, max)]
def is_string_list(value, min=None, max=None):
"""
Check that the value is a list of strings.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is a string.
>>> vtor.check('string_list', ())
[]
>>> vtor.check('string_list', [])
[]
>>> vtor.check('string_list', ('a', 'b'))
['a', 'b']
>>> vtor.check('string_list', ['a', 1])
Traceback (most recent call last):
VdtTypeError: the value "1" is of the wrong type.
>>> vtor.check('string_list', 'hello')
Traceback (most recent call last):
VdtTypeError: the value "hello" is of the wrong type.
"""
if isinstance(value, basestring):
raise VdtTypeError(value)
return [is_string(mem) for mem in is_list(value, min, max)]
def is_ip_addr_list(value, min=None, max=None):
"""
Check that the value is a list of IP addresses.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is an IP address.
>>> vtor.check('ip_addr_list', ())
[]
>>> vtor.check('ip_addr_list', [])
[]
>>> vtor.check('ip_addr_list', ('1.2.3.4', '5.6.7.8'))
['1.2.3.4', '5.6.7.8']
>>> vtor.check('ip_addr_list', ['a'])
Traceback (most recent call last):
VdtValueError: the value "a" is unacceptable.
"""
return [is_ip_addr(mem) for mem in is_list(value, min, max)]
def force_list(value, min=None, max=None):
"""
Check that a value is a list, coercing strings into
a list with one member. Useful where users forget the
trailing comma that turns a single value into a list.
You can optionally specify the minimum and maximum number of members.
A minumum of greater than one will fail if the user only supplies a
string.
>>> vtor.check('force_list', ())
[]
>>> vtor.check('force_list', [])
[]
>>> vtor.check('force_list', 'hello')
['hello']
"""
if not isinstance(value, (list, tuple)):
value = [value]
return is_list(value, min, max)
fun_dict = {
'integer': is_integer,
'float': is_float,
'ip_addr': is_ip_addr,
'string': is_string,
'boolean': is_boolean,
}
def is_mixed_list(value, *args):
"""
Check that the value is a list.
Allow specifying the type of each member.
Work on lists of specific lengths.
You specify each member as a positional argument specifying type
Each type should be one of the following strings :
'integer', 'float', 'ip_addr', 'string', 'boolean'
So you can specify a list of two strings, followed by
two integers as :
mixed_list('string', 'string', 'integer', 'integer')
The length of the list must match the number of positional
arguments you supply.
>>> mix_str = "mixed_list('integer', 'float', 'ip_addr', 'string', 'boolean')"
>>> check_res = vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', True))
>>> check_res == [1, 2.0, '1.2.3.4', 'a', True]
1
>>> check_res = vtor.check(mix_str, ('1', '2.0', '1.2.3.4', 'a', 'True'))
>>> check_res == [1, 2.0, '1.2.3.4', 'a', True]
1
>>> vtor.check(mix_str, ('b', 2.0, '1.2.3.4', 'a', True))
Traceback (most recent call last):
VdtTypeError: the value "b" is of the wrong type.
>>> vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a'))
Traceback (most recent call last):
VdtValueTooShortError: the value "(1, 2.0, '1.2.3.4', 'a')" is too short.
>>> vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', 1, 'b'))
Traceback (most recent call last):
VdtValueTooLongError: the value "(1, 2.0, '1.2.3.4', 'a', 1, 'b')" is too long.
>>> vtor.check(mix_str, 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
This test requires an elaborate setup, because of a change in error string
output from the interpreter between Python 2.2 and 2.3 .
>>> res_seq = (
... 'passed an incorrect value "',
... 'yoda',
... '" for parameter "mixed_list".',
... )
>>> res_str = "'".join(res_seq)
>>> try:
... vtor.check('mixed_list("yoda")', ('a'))
... except VdtParamError, err:
... str(err) == res_str
1
"""
try:
length = len(value)
except TypeError:
raise VdtTypeError(value)
if length < len(args):
raise VdtValueTooShortError(value)
elif length > len(args):
raise VdtValueTooLongError(value)
try:
return [fun_dict[arg](val) for arg, val in zip(args, value)]
except KeyError, e:
raise VdtParamError('mixed_list', e)
def is_option(value, *options):
"""
This check matches the value to any of a set of options.
>>> vtor.check('option("yoda", "jedi")', 'yoda')
'yoda'
>>> vtor.check('option("yoda", "jedi")', 'jed')
Traceback (most recent call last):
VdtValueError: the value "jed" is unacceptable.
>>> vtor.check('option("yoda", "jedi")', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
"""
if not isinstance(value, basestring):
raise VdtTypeError(value)
if not value in options:
raise VdtValueError(value)
return value
def _test(value, *args, **keywargs):
"""
A function that exists for test purposes.
>>> checks = [
... '3, 6, min=1, max=3, test=list(a, b, c)',
... '3',
... '3, 6',
... '3,',
... 'min=1, test="a b c"',
... 'min=5, test="a, b, c"',
... 'min=1, max=3, test="a, b, c"',
... 'min=-100, test=-99',
... 'min=1, max=3',
... '3, 6, test="36"',
... '3, 6, test="a, b, c"',
... '3, max=3, test=list("a", "b", "c")',
... '''3, max=3, test=list("'a'", 'b', "x=(c)")''',
... "test='x=fish(3)'",
... ]
>>> v = Validator({'test': _test})
>>> for entry in checks:
... print v.check(('test(%s)' % entry), 3)
(3, ('3', '6'), {'test': ['a', 'b', 'c'], 'max': '3', 'min': '1'})
(3, ('3',), {})
(3, ('3', '6'), {})
(3, ('3',), {})
(3, (), {'test': 'a b c', 'min': '1'})
(3, (), {'test': 'a, b, c', 'min': '5'})
(3, (), {'test': 'a, b, c', 'max': '3', 'min': '1'})
(3, (), {'test': '-99', 'min': '-100'})
(3, (), {'max': '3', 'min': '1'})
(3, ('3', '6'), {'test': '36'})
(3, ('3', '6'), {'test': 'a, b, c'})
(3, ('3',), {'test': ['a', 'b', 'c'], 'max': '3'})
(3, ('3',), {'test': ["'a'", 'b', 'x=(c)'], 'max': '3'})
(3, (), {'test': 'x=fish(3)'})
>>> v = Validator()
>>> v.check('integer(default=6)', '3')
3
>>> v.check('integer(default=6)', None, True)
6
>>> v.get_default_value('integer(default=6)')
6
>>> v.get_default_value('float(default=6)')
6.0
>>> v.get_default_value('pass(default=None)')
>>> v.get_default_value("string(default='None')")
'None'
>>> v.get_default_value('pass')
Traceback (most recent call last):
KeyError: 'Check "pass" has no default value.'
>>> v.get_default_value('pass(default=list(1, 2, 3, 4))')
['1', '2', '3', '4']
>>> v = Validator()
>>> v.check("pass(default=None)", None, True)
>>> v.check("pass(default='None')", None, True)
'None'
>>> v.check('pass(default="None")', None, True)
'None'
>>> v.check('pass(default=list(1, 2, 3, 4))', None, True)
['1', '2', '3', '4']
Bug test for unicode arguments
>>> v = Validator()
>>> v.check(u'string(min=4)', u'test')
u'test'
>>> v = Validator()
>>> v.get_default_value(u'string(min=4, default="1234")')
u'1234'
>>> v.check(u'string(min=4, default="1234")', u'test')
u'test'
>>> v = Validator()
>>> default = v.get_default_value('string(default=None)')
>>> default == None
1
"""
return (value, args, keywargs)
def _test2():
"""
>>>
>>> v = Validator()
>>> v.get_default_value('string(default="#ff00dd")')
'#ff00dd'
>>> v.get_default_value('integer(default=3) # comment')
3
"""
def _test3():
r"""
>>> vtor.check('string(default="")', '', missing=True)
''
>>> vtor.check('string(default="\n")', '', missing=True)
'\n'
>>> print vtor.check('string(default="\n")', '', missing=True),
<BLANKLINE>
>>> vtor.check('string()', '\n')
'\n'
>>> vtor.check('string(default="\n\n\n")', '', missing=True)
'\n\n\n'
>>> vtor.check('string()', 'random \n text goes here\n\n')
'random \n text goes here\n\n'
>>> vtor.check('string(default=" \nrandom text\ngoes \n here\n\n ")',
... '', missing=True)
' \nrandom text\ngoes \n here\n\n '
>>> vtor.check("string(default='\n\n\n')", '', missing=True)
'\n\n\n'
>>> vtor.check("option('\n','a','b',default='\n')", '', missing=True)
'\n'
>>> vtor.check("string_list()", ['foo', '\n', 'bar'])
['foo', '\n', 'bar']
>>> vtor.check("string_list(default=list('\n'))", '', missing=True)
['\n']
"""
if __name__ == '__main__':
# run the code tests in doctest format
import sys
import doctest
m = sys.modules.get('__main__')
globs = m.__dict__.copy()
globs.update({
'vtor': Validator(),
})
doctest.testmod(m, globs=globs)
| apache-2.0 |
junhuac/MQUIC | depot_tools/recipe_modules/bot_update/test_api.py | 1 | 2904 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import hashlib
import os
import struct
import sys
from recipe_engine import recipe_test_api
# TODO(phajdan.jr): Clean up this somewhat ugly import.
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'resources'))
import bot_update
class BotUpdateTestApi(recipe_test_api.RecipeTestApi):
def output_json(self, master, builder, slave, root, first_sln,
revision_mapping, git_mode, force=False, fail_patch=False,
output_manifest=False, fixed_revisions=None):
"""Deterministically synthesize json.output test data for gclient's
--output-json option.
"""
active = bot_update.check_valid_host(master, builder, slave) or force
output = {
'did_run': active,
'patch_failure': False
}
# Add in extra json output if active.
if active:
properties = {
property_name: self.gen_revision(project_name, git_mode)
for project_name, property_name in revision_mapping.iteritems()
}
properties.update({
'%s_cp' % property_name: ('refs/heads/master@{#%s}' %
self.gen_revision(project_name, False))
for project_name, property_name in revision_mapping.iteritems()
})
# We also want to simulate outputting "got_revision_git": ...
# when git mode is off to match what bot_update.py does.
if not git_mode:
properties.update({
'%s_git' % property_name: self.gen_revision(project_name, True)
for project_name, property_name in revision_mapping.iteritems()
})
output.update({
'patch_root': root or first_sln,
'root': first_sln,
'properties': properties,
'step_text': 'Some step text'
})
if output_manifest:
output.update({
'manifest': {
project_name: {
'repository': 'https://fake.org/%s.git' % project_name,
'revision': self.gen_revision(project_name, git_mode),
}
for project_name in revision_mapping
}
})
if fixed_revisions:
output['fixed_revisions'] = fixed_revisions
if fail_patch:
output['log_lines'] = [('patch error', 'Patch failed to apply'),]
output['patch_failure'] = True
output['patch_apply_return_code'] = 1
if fail_patch == 'download':
output['patch_apply_return_code'] = 3
return self.m.json.output(output)
@staticmethod
def gen_revision(project, GIT_MODE):
"""Hash project to bogus deterministic revision values."""
h = hashlib.sha1(project)
if GIT_MODE:
return h.hexdigest()
else:
return struct.unpack('!I', h.digest()[:4])[0] % 300000
| mit |
dxwu/BinderFilter | resources/android-toolchain-16/lib/python2.7/poplib.py | 223 | 12388 | """A POP3 client class.
Based on the J. Myers POP3 draft, Jan. 96
"""
# Author: David Ascher <david_ascher@brown.edu>
# [heavily stealing from nntplib.py]
# Updated: Piers Lauder <piers@cs.su.oz.au> [Jul '97]
# String method conversion and test jig improvements by ESR, February 2001.
# Added the POP3_SSL class. Methods loosely based on IMAP_SSL. Hector Urtubia <urtubia@mrbook.org> Aug 2003
# Example (see the test function at the end of this file)
# Imports
import re, socket
__all__ = ["POP3","error_proto"]
# Exception raised when an error or invalid response is received:
class error_proto(Exception): pass
# Standard Port
POP3_PORT = 110
# POP SSL PORT
POP3_SSL_PORT = 995
# Line terminators (we always output CRLF, but accept any of CRLF, LFCR, LF)
CR = '\r'
LF = '\n'
CRLF = CR+LF
class POP3:
"""This class supports both the minimal and optional command sets.
Arguments can be strings or integers (where appropriate)
(e.g.: retr(1) and retr('1') both work equally well.
Minimal Command Set:
USER name user(name)
PASS string pass_(string)
STAT stat()
LIST [msg] list(msg = None)
RETR msg retr(msg)
DELE msg dele(msg)
NOOP noop()
RSET rset()
QUIT quit()
Optional Commands (some servers support these):
RPOP name rpop(name)
APOP name digest apop(name, digest)
TOP msg n top(msg, n)
UIDL [msg] uidl(msg = None)
Raises one exception: 'error_proto'.
Instantiate with:
POP3(hostname, port=110)
NB: the POP protocol locks the mailbox from user
authorization until QUIT, so be sure to get in, suck
the messages, and quit, each time you access the
mailbox.
POP is a line-based protocol, which means large mail
messages consume lots of python cycles reading them
line-by-line.
If it's available on your mail server, use IMAP4
instead, it doesn't suffer from the two problems
above.
"""
def __init__(self, host, port=POP3_PORT,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.host = host
self.port = port
self.sock = socket.create_connection((host, port), timeout)
self.file = self.sock.makefile('rb')
self._debugging = 0
self.welcome = self._getresp()
def _putline(self, line):
if self._debugging > 1: print '*put*', repr(line)
self.sock.sendall('%s%s' % (line, CRLF))
# Internal: send one command to the server (through _putline())
def _putcmd(self, line):
if self._debugging: print '*cmd*', repr(line)
self._putline(line)
# Internal: return one line from the server, stripping CRLF.
# This is where all the CPU time of this module is consumed.
# Raise error_proto('-ERR EOF') if the connection is closed.
def _getline(self):
line = self.file.readline()
if self._debugging > 1: print '*get*', repr(line)
if not line: raise error_proto('-ERR EOF')
octets = len(line)
# server can send any combination of CR & LF
# however, 'readline()' returns lines ending in LF
# so only possibilities are ...LF, ...CRLF, CR...LF
if line[-2:] == CRLF:
return line[:-2], octets
if line[0] == CR:
return line[1:-1], octets
return line[:-1], octets
# Internal: get a response from the server.
# Raise 'error_proto' if the response doesn't start with '+'.
def _getresp(self):
resp, o = self._getline()
if self._debugging > 1: print '*resp*', repr(resp)
c = resp[:1]
if c != '+':
raise error_proto(resp)
return resp
# Internal: get a response plus following text from the server.
def _getlongresp(self):
resp = self._getresp()
list = []; octets = 0
line, o = self._getline()
while line != '.':
if line[:2] == '..':
o = o-1
line = line[1:]
octets = octets + o
list.append(line)
line, o = self._getline()
return resp, list, octets
# Internal: send a command and get the response
def _shortcmd(self, line):
self._putcmd(line)
return self._getresp()
# Internal: send a command and get the response plus following text
def _longcmd(self, line):
self._putcmd(line)
return self._getlongresp()
# These can be useful:
def getwelcome(self):
return self.welcome
def set_debuglevel(self, level):
self._debugging = level
# Here are all the POP commands:
def user(self, user):
"""Send user name, return response
(should indicate password required).
"""
return self._shortcmd('USER %s' % user)
def pass_(self, pswd):
"""Send password, return response
(response includes message count, mailbox size).
NB: mailbox is locked by server from here to 'quit()'
"""
return self._shortcmd('PASS %s' % pswd)
def stat(self):
"""Get mailbox status.
Result is tuple of 2 ints (message count, mailbox size)
"""
retval = self._shortcmd('STAT')
rets = retval.split()
if self._debugging: print '*stat*', repr(rets)
numMessages = int(rets[1])
sizeMessages = int(rets[2])
return (numMessages, sizeMessages)
def list(self, which=None):
"""Request listing, return result.
Result without a message number argument is in form
['response', ['mesg_num octets', ...], octets].
Result when a message number argument is given is a
single response: the "scan listing" for that message.
"""
if which is not None:
return self._shortcmd('LIST %s' % which)
return self._longcmd('LIST')
def retr(self, which):
"""Retrieve whole message number 'which'.
Result is in form ['response', ['line', ...], octets].
"""
return self._longcmd('RETR %s' % which)
def dele(self, which):
"""Delete message number 'which'.
Result is 'response'.
"""
return self._shortcmd('DELE %s' % which)
def noop(self):
"""Does nothing.
One supposes the response indicates the server is alive.
"""
return self._shortcmd('NOOP')
def rset(self):
"""Unmark all messages marked for deletion."""
return self._shortcmd('RSET')
def quit(self):
"""Signoff: commit changes on server, unlock mailbox, close connection."""
try:
resp = self._shortcmd('QUIT')
except error_proto, val:
resp = val
self.file.close()
self.sock.close()
del self.file, self.sock
return resp
#__del__ = quit
# optional commands:
def rpop(self, user):
"""Not sure what this does."""
return self._shortcmd('RPOP %s' % user)
timestamp = re.compile(r'\+OK.*(<[^>]+>)')
def apop(self, user, secret):
"""Authorisation
- only possible if server has supplied a timestamp in initial greeting.
Args:
user - mailbox user;
secret - secret shared between client and server.
NB: mailbox is locked by server from here to 'quit()'
"""
m = self.timestamp.match(self.welcome)
if not m:
raise error_proto('-ERR APOP not supported by server')
import hashlib
digest = hashlib.md5(m.group(1)+secret).digest()
digest = ''.join(map(lambda x:'%02x'%ord(x), digest))
return self._shortcmd('APOP %s %s' % (user, digest))
def top(self, which, howmuch):
"""Retrieve message header of message number 'which'
and first 'howmuch' lines of message body.
Result is in form ['response', ['line', ...], octets].
"""
return self._longcmd('TOP %s %s' % (which, howmuch))
def uidl(self, which=None):
"""Return message digest (unique id) list.
If 'which', result contains unique id for that message
in the form 'response mesgnum uid', otherwise result is
the list ['response', ['mesgnum uid', ...], octets]
"""
if which is not None:
return self._shortcmd('UIDL %s' % which)
return self._longcmd('UIDL')
try:
import ssl
except ImportError:
pass
else:
class POP3_SSL(POP3):
"""POP3 client class over SSL connection
Instantiate with: POP3_SSL(hostname, port=995, keyfile=None, certfile=None)
hostname - the hostname of the pop3 over ssl server
port - port number
keyfile - PEM formatted file that countains your private key
certfile - PEM formatted certificate chain file
See the methods of the parent class POP3 for more documentation.
"""
def __init__(self, host, port = POP3_SSL_PORT, keyfile = None, certfile = None):
self.host = host
self.port = port
self.keyfile = keyfile
self.certfile = certfile
self.buffer = ""
msg = "getaddrinfo returns an empty list"
self.sock = None
for res in socket.getaddrinfo(self.host, self.port, 0, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
try:
self.sock = socket.socket(af, socktype, proto)
self.sock.connect(sa)
except socket.error, msg:
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
raise socket.error, msg
self.file = self.sock.makefile('rb')
self.sslobj = ssl.wrap_socket(self.sock, self.keyfile, self.certfile)
self._debugging = 0
self.welcome = self._getresp()
def _fillBuffer(self):
localbuf = self.sslobj.read()
if len(localbuf) == 0:
raise error_proto('-ERR EOF')
self.buffer += localbuf
def _getline(self):
line = ""
renewline = re.compile(r'.*?\n')
match = renewline.match(self.buffer)
while not match:
self._fillBuffer()
match = renewline.match(self.buffer)
line = match.group(0)
self.buffer = renewline.sub('' ,self.buffer, 1)
if self._debugging > 1: print '*get*', repr(line)
octets = len(line)
if line[-2:] == CRLF:
return line[:-2], octets
if line[0] == CR:
return line[1:-1], octets
return line[:-1], octets
def _putline(self, line):
if self._debugging > 1: print '*put*', repr(line)
line += CRLF
bytes = len(line)
while bytes > 0:
sent = self.sslobj.write(line)
if sent == bytes:
break # avoid copy
line = line[sent:]
bytes = bytes - sent
def quit(self):
"""Signoff: commit changes on server, unlock mailbox, close connection."""
try:
resp = self._shortcmd('QUIT')
except error_proto, val:
resp = val
self.sock.close()
del self.sslobj, self.sock
return resp
__all__.append("POP3_SSL")
if __name__ == "__main__":
import sys
a = POP3(sys.argv[1])
print a.getwelcome()
a.user(sys.argv[2])
a.pass_(sys.argv[3])
a.list()
(numMsgs, totalSize) = a.stat()
for i in range(1, numMsgs + 1):
(header, msg, octets) = a.retr(i)
print "Message %d:" % i
for line in msg:
print ' ' + line
print '-----------------------'
a.quit()
| mit |
MicheleDamian/ConnectopicMapping | setup.py | 1 | 1540 | from codecs import open
from os import path
from setuptools import setup, Extension
from Cython.Distutils import build_ext
import numpy
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
# Requirements
install_requires=['cython>=0.24.1',
'numpy>=1.6.1',
'scipy>=0.16',
'matplotlib>=1.5.1',
'scikit-learn>=0.17.1',
'nibabel>=2.0.2',
'nilearn>=0.2.4',
'GPy>=1.0.7']
setup(
name='connectopic_mapping',
version='0.3.0',
description='Connectopic mapping',
long_description=long_description,
author='Michele Damian',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Medical Science Apps.',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
keywords='neuroscience connectopic mapping research',
packages=['connectopic_mapping'],
install_requires=install_requires,
cmdclass={'build_ext': build_ext},
ext_modules=[Extension("connectopic_mapping.haak", ["connectopic_mapping/haak.pyx"], include_dirs=[numpy.get_include()])],
)
| apache-2.0 |
beegieb/mltools | deeplearning/graphrbm.py | 1 | 5949 | from core.generalized import GeneralizedModel
from utils.functions import *
from scipy import *
from numpy.random import normal, permutation, rand, uniform
LAYER_MODEL_FNS = { 'binary': sigmoid,
'linear': linear }
LAYER_SAMPLE_FNS = { 'binary': sample_bernoulli,
'linear': linear }
def init_bias(opts):
biases = []
for o in opts:
biases.append(zeros(o['size']))
return biases
def init_weights(vopts, hopts, edges, randomize=True):
weights = {}
for i, j in edges:
vsize, hsize = vopts[i]['size'], hopts[j]['size']
if randomize:
weights[i,j] = uniform(-1, 1, size=(hsize, vsize)) / sqrt(hsize + vsize)
else:
weights[i,j] = zeros((hsize, vsize))
return weights
def init_persist(h_opts, batch_size):
persist = []
for o in opts:
persist.append(zeros((batch_size, o['size'])))
return persist
class GraphRBM(GeneralizedModel):
attrs_ = ['trainfn', 'n', 'batch_size', 'epochs', 'learn_rate',
'beta', 'momentum', 'verbose']
def __init__(self, vis_opts, hid_opts, edges, trainfn='fpcd', n=1,
batch_size=100, epochs=1, learn_rate = 0.1, beta=0.,
momentum=0., verbose=0):
self.vis_opts = vis_opts
self.hid_opts = hid_opts
self.edges = edges
self.vbiases = init_bias(vis_opts)
self.hbiases = init_bias(hid_opts)
self.weights = init_weights(vis_opts, hid_opts, edges)
self.fast_weights = init_weights(vis_opts, hid_opts, edges, False)
self.persist_chain = init_persist(hid_opts, batch_size)
self.trainfn = trainfn
self.n = n
self.batch_size = batch_size
self.epochs = epochs
self.learn_rate = learn_rate
self.flr = self.learn_rate*exp(1)
self.beta = beta
self.momentum = momentum
self.verbose = verbose
self.prevgrad = None
def propup(self, vis, fw=False):
hid_act = [h for h in self.hbiases]
for i,j in self.edges:
W = self.fast_weights[i,j] + self.weights[i,j] if fw else self.weights[i,j]
hid_act[j] = hid_act[j] + dot(vis[i], W.T)
for h_opt in self.hid_opts:
f = LAYER_MODEL_FNS[h_opt['type']]
g = LAYER_SAMPLE_FNS[h_opt['type']]
non_lin = [f(h) for h in hid_act]
sample = [g(h) for h in non_lin]
return sample, non_lin, hid_act
def propdown(self, hid, fw=False):
vis_act = [v for v in self.vbiases]
for i,j in self.edges:
W = self.fast_weights[i,j] + self.weights[i,j] if fw else self.weights[i,j]
vis_act[i] = vis_act[i] + dot(hid[j], W)
for v_opt in self.vis_opts:
f = LAYER_MODEL_FNS[v_opt['type']]
g = LAYER_SAMPLE_FNS[v_opt['type']]
non_lin = [f(v) for v in vis_act]
sample = [g(v) for v in non_lin]
return sample, non_lin, hid_act
def gibbs_hvh(self, h, mf=False, **args):
v_samples = self.propdown(h, **args)
v = v_samples[1] if mf else v_samples[0]
h_samples = self.propup(v, **args)
return v_samples, h_samples
def gibbs_vhv(self, v, mf=False, **args):
h_samples = self.propup(v, **args)
h = h_samples[1] if mf else h_samples[0]
v_samples = self.propdown(h, **args)
return v_samples, h_samples
def cost(self, v):
use_fw = self.trainfn == 'fpcd'
use_persist = use_fw or self.trainfn == 'pcd'
num_points = v[0].shape[0]
# positive phase
pos_h_samples = self.propup(v)
# negative phase
nh0 = self.persist_chain if use_persist else pos_h_samples[0]
for i in range(self.n):
neg_v_samples, neg_h_samples = self.gibbs_hvh(nh0, fw=use_fw)
nh0 = neg_h_samples[0]
# compute gradients
grads = self.grad(v, pos_h_samples, neg_v_samples, neg_h_samples)
self.persist_chain = nh0
# compute reconstruction error
if self.trainfn=='cdn':
cost = sum([sum(square(v[i] - neg_v_samples[1][i])) / self.batch_size for i, vis in enumerate(v)])
else:
cost = sum([sum(square(v[i] - self.gibbs_vhv(v)[0][1][i])) / self.batch_size for i, vis in enumerate(v)])
return cost, grads
def grad(self, pv0, pos_h, neg_v, neg_h):
grad = {'W':{},'hbias':{},'vbias':{}}
num_points = pv0[0].shape[0]
E_v = neg_v[1]
E_h = neg_h[1]
E_hgv = pos_h[1]
for i,j in self.edges:
E_vh = np.dot(E_h[j].T, E_v[i])
E_vhgv = np.dot(E_hgv[j].T, pv0[i])
grad['W'][i,j] = (E_vhgv - E_vh) / num_points
for i, v in enumerate(pv0)
grad['vbias'][i] = mean(pv0[i] - E_v[i], 0)
for j, h in enumerate(pos_h):
grad['hbias'][j] = mean(E_hgv[j] - E_h[j], 0)
return grad
def update(self, grad):
dW = {}
dh = {}
dv = {}
for i,j in self.edges:
dW[i,j] = self.momentum * prev_grad['W'][i,j] + self.learn_rate * (grad['W'][i,j] - self.beta * self.weights[i,j])
self.weights[i,j] += dW[i,j]
# Fast weight update for FPCD
if self.trainfn == 'fpcd':
self.fast_weights[i,j] = (49./50)*self.fast_weights[i,j] + self.flr * grad['W'][i,j]
for i, v in enumerate(self.vbiases):
dv[i] = self.momentum * prev_grad['vbias'][i] + self.learn_rate * grad['vbias'][i]
self.vbiases[i] += dv[i]
for j, h in enumerate(self.hbiases):
dh[j] = self.momentum * prev_grad['hbias'][j] + self.learn_rate * grad['hbias'][j]
self.hbiases[j] += dh[j]
# Fast weight update for P
self.prevgrad['W'] = dW
self.prevgrad['hbias'] = dh
self.prevgrad['vbias'] = dv
return self
| gpl-2.0 |
luofei98/qgis | python/plugins/processing/algs/grass7/Grass7AlgorithmProvider.py | 3 | 4313 | # -*- coding: utf-8 -*-
"""
***************************************************************************
Grass7AlgorithmProvider.py
---------------------
Date : April 2014
Copyright : (C) 2014 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'April 2014'
__copyright__ = '(C) 2014, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from processing.core.ProcessingConfig import ProcessingConfig, Setting
from processing.core.AlgorithmProvider import AlgorithmProvider
from processing.core.ProcessingLog import ProcessingLog
from Grass7Utils import Grass7Utils
from Grass7Algorithm import Grass7Algorithm
from processing.tools.system import *
from nviz7 import nviz7
class Grass7AlgorithmProvider(AlgorithmProvider):
def __init__(self):
AlgorithmProvider.__init__(self)
self.createAlgsList() # Preloading algorithms to speed up
def initializeSettings(self):
AlgorithmProvider.initializeSettings(self)
if isWindows() or isMac():
ProcessingConfig.addSetting(Setting(self.getDescription(),
Grass7Utils.GRASS_FOLDER, 'GRASS7 folder',
Grass7Utils.grassPath()))
ProcessingConfig.addSetting(Setting(self.getDescription(),
Grass7Utils.GRASS_WIN_SHELL, 'Msys folder',
Grass7Utils.grassWinShell()))
ProcessingConfig.addSetting(Setting(self.getDescription(),
Grass7Utils.GRASS_LOG_COMMANDS,
'Log execution commands', False))
ProcessingConfig.addSetting(Setting(self.getDescription(),
Grass7Utils.GRASS_LOG_CONSOLE,
'Log console output', False))
def unload(self):
AlgorithmProvider.unload(self)
if isWindows() or isMac():
ProcessingConfig.removeSetting(Grass7Utils.GRASS_FOLDER)
ProcessingConfig.removeSetting(Grass7Utils.GRASS_WIN_SHELL)
ProcessingConfig.removeSetting(Grass7Utils.GRASS_LOG_COMMANDS)
ProcessingConfig.removeSetting(Grass7Utils.GRASS_LOG_CONSOLE)
def createAlgsList(self):
self.preloadedAlgs = []
folder = Grass7Utils.grassDescriptionPath()
for descriptionFile in os.listdir(folder):
if descriptionFile.endswith('txt'):
try:
alg = Grass7Algorithm(os.path.join(folder, descriptionFile))
if alg.name.strip() != '':
self.preloadedAlgs.append(alg)
else:
ProcessingLog.addToLog(ProcessingLog.LOG_ERROR,
'Could not open GRASS GIS 7 algorithm: '
+ descriptionFile)
except Exception, e:
ProcessingLog.addToLog(ProcessingLog.LOG_ERROR,
'Could not open GRASS GIS 7 algorithm: '
+ descriptionFile)
self.preloadedAlgs.append(nviz7())
def _loadAlgorithms(self):
self.algs = self.preloadedAlgs
def getDescription(self):
return 'GRASS GIS 7 commands'
def getName(self):
return 'grass70'
def getIcon(self):
return QIcon(os.path.dirname(__file__) + '/../../images/grass.png')
def getSupportedOutputVectorLayerExtensions(self):
return ['shp']
def getSupportedOutputRasterLayerExtensions(self):
return ['tif']
| gpl-2.0 |
Workday/OpenFrame | tools/telemetry/third_party/gsutilz/third_party/oauth2client/tests/test_clientsecrets.py | 21 | 4735 | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for oauth2client.clientsecrets."""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import os
import unittest
from io import StringIO
import httplib2
from oauth2client import clientsecrets
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
VALID_FILE = os.path.join(DATA_DIR, 'client_secrets.json')
INVALID_FILE = os.path.join(DATA_DIR, 'unfilled_client_secrets.json')
NONEXISTENT_FILE = os.path.join(__file__, '..', 'afilethatisntthere.json')
class OAuth2CredentialsTests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_validate_error(self):
ERRORS = [
('{}', 'Invalid'),
('{"foo": {}}', 'Unknown'),
('{"web": {}}', 'Missing'),
('{"web": {"client_id": "dkkd"}}', 'Missing'),
("""{
"web": {
"client_id": "[[CLIENT ID REQUIRED]]",
"client_secret": "[[CLIENT SECRET REQUIRED]]",
"redirect_uris": ["http://localhost:8080/oauth2callback"],
"auth_uri": "",
"token_uri": ""
}
}
""", 'Property'),
]
for src, match in ERRORS:
# Ensure that it is unicode
try:
src = src.decode('utf-8')
except AttributeError:
pass
# Test load(s)
try:
clientsecrets.loads(src)
self.fail(src + ' should not be a valid client_secrets file.')
except clientsecrets.InvalidClientSecretsError as e:
self.assertTrue(str(e).startswith(match))
# Test loads(fp)
try:
fp = StringIO(src)
clientsecrets.load(fp)
self.fail(src + ' should not be a valid client_secrets file.')
except clientsecrets.InvalidClientSecretsError as e:
self.assertTrue(str(e).startswith(match))
def test_load_by_filename(self):
try:
clientsecrets._loadfile(NONEXISTENT_FILE)
self.fail('should fail to load a missing client_secrets file.')
except clientsecrets.InvalidClientSecretsError as e:
self.assertTrue(str(e).startswith('File'))
class CachedClientsecretsTests(unittest.TestCase):
class CacheMock(object):
def __init__(self):
self.cache = {}
self.last_get_ns = None
self.last_set_ns = None
def get(self, key, namespace=''):
# ignoring namespace for easier testing
self.last_get_ns = namespace
return self.cache.get(key, None)
def set(self, key, value, namespace=''):
# ignoring namespace for easier testing
self.last_set_ns = namespace
self.cache[key] = value
def setUp(self):
self.cache_mock = self.CacheMock()
def test_cache_miss(self):
client_type, client_info = clientsecrets.loadfile(
VALID_FILE, cache=self.cache_mock)
self.assertEqual('web', client_type)
self.assertEqual('foo_client_secret', client_info['client_secret'])
cached = self.cache_mock.cache[VALID_FILE]
self.assertEqual({client_type: client_info}, cached)
# make sure we're using non-empty namespace
ns = self.cache_mock.last_set_ns
self.assertTrue(bool(ns))
# make sure they're equal
self.assertEqual(ns, self.cache_mock.last_get_ns)
def test_cache_hit(self):
self.cache_mock.cache[NONEXISTENT_FILE] = { 'web': 'secret info' }
client_type, client_info = clientsecrets.loadfile(
NONEXISTENT_FILE, cache=self.cache_mock)
self.assertEqual('web', client_type)
self.assertEqual('secret info', client_info)
# make sure we didn't do any set() RPCs
self.assertEqual(None, self.cache_mock.last_set_ns)
def test_validation(self):
try:
clientsecrets.loadfile(INVALID_FILE, cache=self.cache_mock)
self.fail('Expected InvalidClientSecretsError to be raised '
'while loading %s' % INVALID_FILE)
except clientsecrets.InvalidClientSecretsError:
pass
def test_without_cache(self):
# this also ensures loadfile() is backward compatible
client_type, client_info = clientsecrets.loadfile(VALID_FILE)
self.assertEqual('web', client_type)
self.assertEqual('foo_client_secret', client_info['client_secret'])
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-3.1/Lib/encodings/iso8859_9.py | 272 | 13156 | """ Python Character Mapping Codec iso8859_9 generated from 'MAPPINGS/ISO8859/8859-9.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-9',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\x80' # 0x80 -> <control>
'\x81' # 0x81 -> <control>
'\x82' # 0x82 -> <control>
'\x83' # 0x83 -> <control>
'\x84' # 0x84 -> <control>
'\x85' # 0x85 -> <control>
'\x86' # 0x86 -> <control>
'\x87' # 0x87 -> <control>
'\x88' # 0x88 -> <control>
'\x89' # 0x89 -> <control>
'\x8a' # 0x8A -> <control>
'\x8b' # 0x8B -> <control>
'\x8c' # 0x8C -> <control>
'\x8d' # 0x8D -> <control>
'\x8e' # 0x8E -> <control>
'\x8f' # 0x8F -> <control>
'\x90' # 0x90 -> <control>
'\x91' # 0x91 -> <control>
'\x92' # 0x92 -> <control>
'\x93' # 0x93 -> <control>
'\x94' # 0x94 -> <control>
'\x95' # 0x95 -> <control>
'\x96' # 0x96 -> <control>
'\x97' # 0x97 -> <control>
'\x98' # 0x98 -> <control>
'\x99' # 0x99 -> <control>
'\x9a' # 0x9A -> <control>
'\x9b' # 0x9B -> <control>
'\x9c' # 0x9C -> <control>
'\x9d' # 0x9D -> <control>
'\x9e' # 0x9E -> <control>
'\x9f' # 0x9F -> <control>
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\xa1' # 0xA1 -> INVERTED EXCLAMATION MARK
'\xa2' # 0xA2 -> CENT SIGN
'\xa3' # 0xA3 -> POUND SIGN
'\xa4' # 0xA4 -> CURRENCY SIGN
'\xa5' # 0xA5 -> YEN SIGN
'\xa6' # 0xA6 -> BROKEN BAR
'\xa7' # 0xA7 -> SECTION SIGN
'\xa8' # 0xA8 -> DIAERESIS
'\xa9' # 0xA9 -> COPYRIGHT SIGN
'\xaa' # 0xAA -> FEMININE ORDINAL INDICATOR
'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xac' # 0xAC -> NOT SIGN
'\xad' # 0xAD -> SOFT HYPHEN
'\xae' # 0xAE -> REGISTERED SIGN
'\xaf' # 0xAF -> MACRON
'\xb0' # 0xB0 -> DEGREE SIGN
'\xb1' # 0xB1 -> PLUS-MINUS SIGN
'\xb2' # 0xB2 -> SUPERSCRIPT TWO
'\xb3' # 0xB3 -> SUPERSCRIPT THREE
'\xb4' # 0xB4 -> ACUTE ACCENT
'\xb5' # 0xB5 -> MICRO SIGN
'\xb6' # 0xB6 -> PILCROW SIGN
'\xb7' # 0xB7 -> MIDDLE DOT
'\xb8' # 0xB8 -> CEDILLA
'\xb9' # 0xB9 -> SUPERSCRIPT ONE
'\xba' # 0xBA -> MASCULINE ORDINAL INDICATOR
'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
'\xbf' # 0xBF -> INVERTED QUESTION MARK
'\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE
'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE
'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\xcc' # 0xCC -> LATIN CAPITAL LETTER I WITH GRAVE
'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
'\u011e' # 0xD0 -> LATIN CAPITAL LETTER G WITH BREVE
'\xd1' # 0xD1 -> LATIN CAPITAL LETTER N WITH TILDE
'\xd2' # 0xD2 -> LATIN CAPITAL LETTER O WITH GRAVE
'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xd7' # 0xD7 -> MULTIPLICATION SIGN
'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
'\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE
'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\u0130' # 0xDD -> LATIN CAPITAL LETTER I WITH DOT ABOVE
'\u015e' # 0xDE -> LATIN CAPITAL LETTER S WITH CEDILLA
'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE
'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE
'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
'\xec' # 0xEC -> LATIN SMALL LETTER I WITH GRAVE
'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
'\u011f' # 0xF0 -> LATIN SMALL LETTER G WITH BREVE
'\xf1' # 0xF1 -> LATIN SMALL LETTER N WITH TILDE
'\xf2' # 0xF2 -> LATIN SMALL LETTER O WITH GRAVE
'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf7' # 0xF7 -> DIVISION SIGN
'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE
'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
'\u0131' # 0xFD -> LATIN SMALL LETTER DOTLESS I
'\u015f' # 0xFE -> LATIN SMALL LETTER S WITH CEDILLA
'\xff' # 0xFF -> LATIN SMALL LETTER Y WITH DIAERESIS
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| mit |
liorvh/golismero | golismero/api/data/vulnerability/__init__.py | 8 | 46532 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Vulnerability types.
"""
__license__ = """
GoLismero 2.0 - The web knife - Copyright (C) 2011-2014
Golismero project site: https://github.com/golismero
Golismero project mail: contact@golismero-project.com
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
__all__ = [
"Vulnerability",
"WebVulnerability",
]
from .vuln_utils import convert_vuln_ids_to_references, \
convert_references_to_vuln_ids, _vuln_ref_regex, CVSS, TAXONOMY_NAMES
from .. import Data, identity, merge, keep_newer, keep_true, LocalDataCache
from ..resource import Resource
from ..resource.url import BaseURL, FolderURL, URL
from ...config import Config
from ...plugin import get_plugin_info
from ...text.text_utils import to_utf8
from collections import defaultdict
from inspect import getmro
from textwrap import dedent
from warnings import warn
#------------------------------------------------------------------------------
# Merge strategy for taxonomy IDs.
class merge_vuln_ids(merge):
#--------------------------------------------------------------------------
@staticmethod
def do_merge(old_data, new_data, key):
# Get the original value.
my_value = getattr(old_data, key, None)
# Get the new value.
their_value = getattr(new_data, key, None)
# Concatenate the tuples.
new_value = my_value + their_value
# Sanitize the vulnerability IDs.
new_value = sanitize_vuln_ids(new_value)
# Return the merged value.
return new_value
#------------------------------------------------------------------------------
def sanitize_vuln_ids(vid):
if vid:
if isinstance(vid, basestring):
return (str(to_utf8(vid)),)
else:
return tuple(sorted(set(str(to_utf8(x)) for x in vid)))
else:
return ()
#------------------------------------------------------------------------------
# Base class for all vulnerabilities.
class Vulnerability(Data):
"""
Vulnerability Title.
After the title comes the vulnerability description. You can make it as
long as you wish, as long as you don't leave blank lines in between. If
you need a more specific text for each instance of your vulnerability,
override the "title", "description" and/or "solution" values of the
"DEFAULT" class variable. But don't forget to make a copy of the
dictionary first! You don't want your changes to affect the parent class.
The third paragraph is the solution to the vulnerability. Here you want to
include tips on how to avoid this vulnerability in source code, or maybe
some general security recommendations. Links to external resources belong
belong in the "references" section instead, so don't add them here.
"""
data_type = Data.TYPE_VULNERABILITY
data_subtype = "vulnerability/abstract"
max_vulnerabilities = 0
# Vulnerability levels.
VULN_LEVELS = ("informational", "low", "middle", "high", "critical")
# Default vulnerability properties.
# Note: plugin_id and custom_id must NOT be defined here.
DEFAULTS = {
"level": "low",
"impact": 0,
"severity": 0,
"risk": 0,
"title": None, # special value, do not change!
"description": None, # special value, do not change!
"solution": None, # special value, do not change!
"references": (),
"cvss_base": "0.0",
"cvss_score": None,
"cvss_vector": None,
"tool_id": None,
}
DEFAULTS.update( { x: () for x in TAXONOMY_NAMES } )
# Class of the target for the vulnerabilities of this type.
TARGET_CLASS = Resource
#--------------------------------------------------------------------------
def __init__(self, target, **kwargs):
"""
:param target: This is where the vulnerability was found.
If TARGET_DATA is set for this class, the target object
must be an instance of that class.
:type target: Resource
:keyword title: Title used for vulnerability.
:type title: str
:keyword description: Free form text describing the vulnerability.
:type description: str
:keyword solution: Free form text describing a possible solution.
:type solution: str
:keyword plugin_id: ID of the plugin that found the vulnerability.
Defaults to the calling plugin ID.
:type plugin_id: str
:keyword tool_id: Plugin-defined tool ID. This may be used by plugins
that run external tools, to track down which tool (or which
plugin/addon of that tool) has found the vulnerability. Other
plugins can safely leave this as None (the default).
:type tool_id: str | None
:keyword custom_id: Customized vulnerability ID. This advanced argument
may be used by plugins that know how to uniquely identify their
own vulnerabilities, in order to detect when the same vulnerability
was detected multiple times. Most plugins will leave this value as
None and let GoLismero do the vulnerability duplicates matching.
:type custom_id: str | None
:keyword level: User-friendly vulnerability level.
Must be one of the following values: "critical", "high", "middle",
"low" or "informational". Ignored if a CVSS vector is given.
:type level: str
:keyword impact: Impact rating. A number between 0-4.
:type impact: int
:keyword severity: Severity rating. A number between 0-4.
:type severity: int
:keyword risk: Risk rating. A number between 0-4.
:type risk: int
:keyword references: Reference URLs.
:type references: tuple(str)
:keyword cvss_base: CVSS base score. Ignored if a vector is given.
:type cvss_base: str
:keyword cvss_score: CVSS score. Ignored if a vector is given.
:type cvss_score: str
:keyword cvss_vector: CVSS vector. Overrides cvss_base and cvss_score,
as well as the user-friendly level.
:type cvss_vector: str
:keyword bid: Bugtraq IDs.
:type bid: tuple( str, ... )
:keyword ca: CERT Advisory IDs.
:type ca: tuple( str, ... )
:keyword capec: CAPEC IDs.
:type capec: tuple( str, ... )
:keyword cisco: Cisco Security Advisory IDs.
:type cisco: tuple( str, ... )
:keyword cve: CVE IDs.
:type cve: tuple( str, ... )
:keyword cwe: CVE IDs.
:type cwe: tuple( str, ... )
:keyword dsa: Debian Security Advisory IDs.
:type dsa: tuple( str, ... )
:keyword edb: ExploitDB IDs.
:type edb: tuple( str, ... )
:keyword glsa: Gentoo Linux Security Advisory IDs.
:type glsa: tuple( str, ... )
:keyword mdvsa: Mandriva Security Advisory IDs.
:type mdvsa: tuple( str, ... )
:keyword ms: Microsoft Advisory IDs.
:type ms: tuple( str, ... )
:keyword mskb: Microsoft Knowledge Base IDs.
:type mskb: tuple( str, ... )
:keyword nessus: Nessus Plugin IDs.
:type nessus: tuple( str, ... )
:keyword osvdb: OSVDB IDs.
:type osvdb: tuple( str, ... )
:keyword rhsa: RedHat Security Advisory IDs.
:type rhsa: tuple( str, ... )
:keyword sa: Secunia Advisory IDs.
:type sa: tuple( str, ... )
:keyword sectrack: Security Tracker IDs.
:type sectrack: tuple( str, ... )
:keyword usn: Ubuntu Security Notice IDs.
:type usn: tuple( str, ... )
:keyword vmsa: VMWare Security Advisory IDs.
:type vmsa: tuple( str, ... )
:keyword vu: CERT Vulnerability Note IDs.
:type vu: tuple( str, ... )
:keyword xf: ISS X-Force IDs.
:type xf: tuple( str, ... )
"""
# Do not allow abstract vulnerability types to be instanced.
if self.data_subtype == "vulnerability/abstract":
raise TypeError("Class %s is abstract!" % self.__class__.__name__)
# Check the target parameter.
if not isinstance(target, self.TARGET_CLASS):
raise TypeError("Expected %s, got %r instead" %
(self.TARGET_CLASS.__name__, type(target)))
# Store the target ID. Since this is part of our ID, it ensures we'll
# have different Vulnerability instances for different targets, even
# if all other parameters are the same.
self.__target_id = target.identity
# Store the custom ID, if any.
custom_id = kwargs.pop("custom_id", None)
if custom_id is not None and type(custom_id) is not str:
raise TypeError("Custom ID may only be a string!")
self.__custom_id = custom_id
# Newly found vulns are undecided by default.
# However, if previously marked as false positives and found again,
# the false positive mark stays put (@keep_true strategy).
self.__false_positive = None
# Set the plugin ID. We need to do this before setting the rest of the
# properties, because some of them use the plugin ID, like for example
# the vulnerability title.
self.plugin_id = kwargs.pop("plugin_id", None)
# Validate the remaining keyword arguments.
not_found = set(kwargs.iterkeys())
not_found.difference_update(Vulnerability.DEFAULTS.iterkeys())
if not_found:
raise TypeError(
"Unexpected keyword arguments: %s"
% ", ".join(sorted(not_found)))
del not_found
# If a CVSS vector is given, ignore any other CVSS scores given and
# recalculate them from the vector instead.
if "cvss_vector" in kwargs:
try:
del kwargs["cvss_base"]
except KeyError:
pass
try:
del kwargs["cvss_score"]
except KeyError:
pass
try:
del kwargs["level"]
except KeyError:
pass
# Set the properties, first the ones with a default value defined,
# then the ones with no default value defined.
# TODO inherit the dicts instead of forcing subclasses to copy() it
propnames = [
k for k,v in Vulnerability.DEFAULTS.iteritems() if v is not None]
propnames.extend(
k for k,v in Vulnerability.DEFAULTS.iteritems() if v is None)
for prop in propnames:
value = kwargs.get(prop)
if value is None:
value = self.DEFAULTS.get(prop)
if value is None:
value = Vulnerability.DEFAULTS.get(prop)
setattr(self, prop, value)
# TODO extract URLs from text and add them to the references.
# Feed back references to vulnerability IDs.
vuln_ids = convert_references_to_vuln_ids(self.references)
for key, value in vuln_ids.iteritems():
value.extend(getattr(self, key, []))
setattr(self, key, value) # auto-sanitizes
# Feed back vulnerability IDs to references.
refs = convert_vuln_ids_to_references(self.taxonomies)
refs.extend(self.references)
self.references = refs # auto-sanitizes
# Call the parent constructor.
super(Vulnerability, self).__init__()
# Associate the vulnerability to the target.
self.add_link(target)
#--------------------------------------------------------------------------
@identity
def target_id(self):
"""
:returns: Identity hash of the target
where the vulnerability was found.
:rtype: str
"""
return self.__target_id
#--------------------------------------------------------------------------
@property
def target(self):
"""
:returns: Target where the vulnerability was found.
:rtype: Data
"""
return self.resolve(self.target_id)
#--------------------------------------------------------------------------
def __repr__(self):
return "<%s plugin_id=%r level=%r title=%r>" % (
self.__class__.__name__,
self.plugin_id,
self.level,
self.title,
)
#--------------------------------------------------------------------------
def __set_text(self, name, text):
if text is None:
text = self.__get_default_text(name)
elif isinstance(text, unicode):
text = text.encode("UTF-8")
elif type(text) is not str:
raise TypeError(
"Expected string, got %r instead" % type(text))
setattr(self, "_Vulnerability__%s" % name, text)
#--------------------------------------------------------------------------
def __get_default_texts_from_docstring(self, section):
"""
Retrieves the default title, description and solution texts from the
class docstring.
:param section: The docstring should be divided into three paragraphs,
the first for the title, the second for the description, and the
third for the solution. This parameter specifies which one to get:
0 for the title, 1 for the description or 2 for the solution.
:type section: int
:returns: The retrieved text on success, or an empty string on error.
Indentation and extra whitespace are removed. Newline characters
are also removed. Tabs are converted to four spaces.
:rtype: str
"""
text = ""
mro = [
clazz for clazz in getmro(self.__class__)
if hasattr(clazz, "vulnerability_type") and
clazz.vulnerability_type not in ("vulnerability/abstract",
"vulnerability/generic")
]
for clazz in mro:
text = getattr(clazz, "__doc__", None)
if text:
break
if text:
try:
text = dedent(text).strip()
text = text.split("\n\n", section + 1)[section].strip()
text = text.replace("\t", " ")
text = text.replace("\n", " ")
except Exception:#, e:
##warn(str(e), Warning, stacklevel=2)
text = ""
return text
#--------------------------------------------------------------------------
@property
def display_name(self):
text = self.__get_default_texts_from_docstring(0)
if text:
text.strip()
if text and text.endswith("."):
text = text[:-1].strip()
if not text:
text = super(Vulnerability, self).display_name
return text
#--------------------------------------------------------------------------
def __get_default_text(self, propname):
text = self.DEFAULTS.get(propname, None)
if text is None:
if propname == "title":
text = self.display_name
if text == "Uncategorized Vulnerability":
if self.level == "informational":
text = "User attention required by"
else:
text = "Vulnerability found by"
if not self.plugin_id or \
self.plugin_id.lower() == "golismero":
text += " GoLismero"
elif self.plugin_id.startswith("ui/"):
text += " the user"
else:
text += ": "
try:
text += get_plugin_info(
self.plugin_id).display_name
except Exception:
text += self.plugin_id
elif propname == "description":
text = self.__get_default_texts_from_docstring(1)
if not text:
if self.references:
if len(self.references) > 1:
text = ("Please visit the reference website"
" for more information")
else:
text = ("Please visit the reference websites"
" for more information")
else:
text = "No additional details are available"
if self.level == "informational":
text += "."
else:
text += " for this vulnerability."
elif propname == "solution":
text = self.__get_default_texts_from_docstring(2)
if not text:
if self.references:
if len(self.references) > 1:
text = ("Please visit the reference websites"
" for more information")
else:
text = ("Please visit the reference website"
" for more information")
else:
text = "No additional details are available"
if self.level == "informational":
text += "."
else:
text += " on how to patch this vulnerability."
else:
text = ""
return text
#--------------------------------------------------------------------------
@identity
def custom_id(self):
"""
:returns: Customized vulnerability ID. This advanced argument may be
used by plugins that know how to uniquely identify their own
vulnerabilities, in order to detect when the same vulnerability was
found multiple times. Most plugins will leave this value as None
and let GoLismero do the vulnerability duplicates matching.
:rtype: str | None
"""
return self.__custom_id
#--------------------------------------------------------------------------
@keep_true
def false_positive(self):
"""
:returns: True for false positives, False for real vulnerabilities.
None means the user hasn't evaluated this vulnerability yet.
:rtype: bool | None
"""
return self.__false_positive
#--------------------------------------------------------------------------
@false_positive.setter
def false_positive(self, false_positive):
"""
:param false_positive:
True for false positives, False for real vulnerabilities.
Don't use None here, it will be interpreted as False! Once a
vulnerability has been marked as false positive or not, you
can't go back to the undecided state.
:type false_positive: bool
"""
self.__false_positive = bool(false_positive)
#--------------------------------------------------------------------------
@keep_newer
def plugin_id(self):
"""
:returns: ID of the plugin that found the vulnerability.
:rtype: str
"""
return self.__plugin_id
#--------------------------------------------------------------------------
@plugin_id.setter
def plugin_id(self, plugin_id):
"""
:param plugin_id: ID of the plugin that found the vulnerability.
Defaults to the calling plugin ID.
:type plugin_id: str
"""
if not plugin_id:
try:
plugin_id = Config.plugin_id
except Exception:
plugin_id = "GoLismero"
elif type(plugin_id) is not str:
raise TypeError(
"Expected string, got %r instead" % type(plugin_id))
self.__plugin_id = plugin_id
#--------------------------------------------------------------------------
@keep_newer
def tool_id(self):
"""
:returns: Plugin-defined tool ID. This may be used by plugins
that run external tools, to track down which tool (or which
plugin/addon of that tool) has found the vulnerability. Other
plugins can safely leave this as None (the default).
:rtype: str | None
"""
return self.__tool_id
#--------------------------------------------------------------------------
@tool_id.setter
def tool_id(self, tool_id):
"""
:param tool_id: Plugin-defined tool ID. This may be used by plugins
that run external tools, to track down which tool (or which
plugin/addon of that tool) has found the vulnerability. Other
plugins can safely leave this as None (the default).
:type tool_id: str | None
"""
tool_id = to_utf8(tool_id)
if not tool_id:
tool_id = None
elif not isinstance(tool_id, str):
raise TypeError("Expected string, got %r instead" % type(tool_id))
self.__tool_id = tool_id
#--------------------------------------------------------------------------
@keep_newer
def level(self):
"""
:return: Vulnerability level.
:rtype: str
"""
return self.__level
#--------------------------------------------------------------------------
@level.setter
def level(self, level):
"""
.. note: Setting this property manually deletes the CVSS vector.
:param level: User-friendly vulnerability level.
:type level: str
"""
level = to_utf8(level)
if not isinstance(level, str):
raise TypeError("Expected str, got %r instead" % type(level))
elif level.lower() not in self.VULN_LEVELS:
raise ValueError("Unknown level: %r" % level)
self.__level = level.lower()
self.__cvss_vector = None
#--------------------------------------------------------------------------
@keep_newer
def impact(self):
"""
:returns: Impact rating.
:rtype: int
"""
return self.__impact
#--------------------------------------------------------------------------
@impact.setter
def impact(self, impact):
"""
:param impact: Impact rating.
:type impact: int
"""
impact = int(impact)
if impact < 0 or impact > 4:
raise ValueError("Invalid impact value: %d" % impact)
self.__impact = impact
#--------------------------------------------------------------------------
@keep_newer
def severity(self):
"""
:returns: Severity rating.
:rtype: int
"""
return self.__severity
#--------------------------------------------------------------------------
@severity.setter
def severity(self, severity):
"""
:param severity: Severity rating.
:type severity: int
"""
severity = int(severity)
if severity < 0 or severity > 4:
raise ValueError("Invalid severity value: %d" % severity)
self.__severity = severity
#--------------------------------------------------------------------------
@keep_newer
def risk(self):
"""
:returns: Risk rating.
:rtype: int
"""
return self.__risk
#--------------------------------------------------------------------------
@risk.setter
def risk(self, risk):
"""
:param risk: Risk rating.
:type risk: int
"""
risk = int(risk)
if risk < 0 or risk > 4:
raise ValueError("Invalid risk value: %d" % risk)
self.__risk = risk
#--------------------------------------------------------------------------
@keep_newer
def title(self):
"""
:returns: Title of the vulnerability.
:rtype: str
"""
return self.__title
#--------------------------------------------------------------------------
@title.setter
def title(self, title):
"""
:param title: Title of the vulnerability.
Use None to set the default.
:type title: str | None
"""
self.__set_text("title", title)
#--------------------------------------------------------------------------
@keep_newer
def description(self):
"""
:returns: Free form text describing the vulnerability.
:rtype: str
"""
return self.__description
#--------------------------------------------------------------------------
@description.setter
def description(self, description):
"""
:param description: Free form text describing the vulnerability.
Use None to set the default.
:type description: str
"""
self.__set_text("description", description)
#--------------------------------------------------------------------------
@keep_newer
def solution(self):
"""
:returns: Free form text describing a possible solution.
Use None to set the default.
:rtype: str
"""
return self.__solution
#--------------------------------------------------------------------------
@solution.setter
def solution(self, solution):
"""
:param solution: Free form text describing a possible solution.
:type solution: str
"""
self.__set_text("solution", solution)
#--------------------------------------------------------------------------
@keep_newer
def references(self):
"""
:returns: Reference URLs.
:rtype: tuple(str)
"""
return self.__references
#--------------------------------------------------------------------------
@references.setter
def references(self, references):
"""
:param references: Reference URLs.
:type references: tuple(str)
"""
# Remove the duplicates and convert to list.
if not references:
references = []
elif isinstance(references, basestring):
references = [str(to_utf8(references))]
else:
references = list(set(str(to_utf8(x)) for x in references))
# Remove the redundant references and sort the list.
if references:
tmp = defaultdict(list)
for ref in references:
tmp2 = convert_references_to_vuln_ids([ref])
for vuln_ids in tmp2.itervalues():
for vid in vuln_ids:
tmp[vid].append(ref)
for vid, refs in tmp.iteritems():
for ref in refs:
references.remove(ref)
tmp3 = convert_vuln_ids_to_references([vid])
references.append( tmp3[0] )
references.sort()
# Save the references as a tuple.
self.__references = tuple(references)
#--------------------------------------------------------------------------
@keep_newer
def cvss_base(self):
"""
:returns: CVSS base score.
:rtype: str | None
"""
return self.__cvss_base
#--------------------------------------------------------------------------
@cvss_base.setter
def cvss_base(self, cvss_base):
"""
.. note: Setting this property manually deletes the CVSS vector.
:param cvss_base: CVSS base score.
:type cvss_base: str
"""
if not cvss_base:
cvss_base = None
elif isinstance(cvss_base, unicode):
cvss_base = cvss_base.encode("UTF-8")
if not cvss_base:
cvss_base = None
if cvss_base:
value = float(cvss_base)
if value > 10.0 or value < 0.0:
raise ValueError("Invalid CVSS base score: %s" % cvss_base)
cvss_base = "%.1f" % value
self.__cvss_base = cvss_base
self.__cvss_vector = None
#--------------------------------------------------------------------------
@keep_newer
def cvss_score(self):
"""
:returns: CVSS score.
:rtype: str | None
"""
return self.__cvss_score
#--------------------------------------------------------------------------
@cvss_score.setter
def cvss_score(self, cvss_score):
"""
.. note: Setting this property manually deletes the CVSS vector.
:param cvss_score: CVSS score.
:type cvss_score: str
"""
if not cvss_score:
cvss_score = None
elif isinstance(cvss_score, unicode):
cvss_score = cvss_score.encode("UTF-8")
if cvss_score:
value = float(cvss_score)
if value > 10.0 or value < 0.0:
raise ValueError("Invalid CVSS score: %s" % cvss_score)
cvss_score = "%.1f" % value
self.__cvss_score = cvss_score
self.__cvss_vector = None
#--------------------------------------------------------------------------
@keep_newer
def cvss_vector(self):
"""
:returns: CVSS vector.
:rtype: str | None
"""
return self.__cvss_vector
#--------------------------------------------------------------------------
@cvss_vector.setter
def cvss_vector(self, cvss_vector):
"""
:param cvss_vector: CVSS vector.
:type cvss_vector: str
"""
if not cvss_vector:
cvss_vector = None
elif isinstance(cvss_vector, unicode):
cvss_vector = cvss_vector.encode("UTF-8")
elif type(cvss_vector) is not str:
raise TypeError(
"Expected string, got %r instead" % type(cvss_vector))
if cvss_vector:
cvss = CVSS(cvss_vector)
self.__level = cvss.level.lower()
self.__cvss_base = cvss.base_score
self.__cvss_score = cvss.score
self.__cvss_vector = cvss.vector
else:
self.__cvss_vector = None
#--------------------------------------------------------------------------
@property
def taxonomies(self):
"""
This alias concatenates all vulnerability IDs for all supported
taxonomies into a single list.
:returns: All vulnerability IDs for all taxonomies.
:rtype: list(str)
"""
result = []
for vuln_type in TAXONOMY_NAMES:
result.extend( getattr(self, vuln_type) )
return result
#--------------------------------------------------------------------------
@merge_vuln_ids
def bid(self):
"""
:returns: Bugtraq IDs.
:rtype: tuple( str, ... )
"""
return self.__bid
#--------------------------------------------------------------------------
@bid.setter
def bid(self, bid):
"""
:param bid: Bugtraq IDs.
:type bid: tuple( str, ... )
"""
self.__bid = sanitize_vuln_ids(bid)
#--------------------------------------------------------------------------
@merge_vuln_ids
def ca(self):
"""
:returns: CERT Advisory IDs.
:rtype: tuple( str, ... )
"""
return self.__ca
#--------------------------------------------------------------------------
@ca.setter
def ca(self, ca):
"""
:param ca: CERT Advisory IDs.
:type ca: tuple( str, ... )
"""
self.__ca = sanitize_vuln_ids(ca)
#--------------------------------------------------------------------------
@merge_vuln_ids
def capec(self):
"""
:returns: CAPEC IDs.
:rtype: tuple( str, ... )
"""
return self.__capec
#--------------------------------------------------------------------------
@capec.setter
def capec(self, capec):
"""
:param capec: CAPEC IDs.
:type capec: tuple( str, ... )
"""
self.__capec = sanitize_vuln_ids(capec)
#--------------------------------------------------------------------------
@merge_vuln_ids
def cisco(self):
"""
:returns: Cisco Security Advisory IDs.
:rtype: tuple( str, ... )
"""
return self.__cisco
#--------------------------------------------------------------------------
@cisco.setter
def cisco(self, cisco):
"""
:param cisco: Cisco Security Advisory IDs.
:type cisco: tuple( str, ... )
"""
self.__cisco = sanitize_vuln_ids(cisco)
#--------------------------------------------------------------------------
@merge_vuln_ids
def cve(self):
"""
:returns: CVE IDs.
:rtype: tuple( str, ... )
"""
return self.__cve
#--------------------------------------------------------------------------
@cve.setter
def cve(self, cve):
"""
:param cve: CVE IDs.
:type cve: tuple( str, ... )
"""
self.__cve = sanitize_vuln_ids(cve)
#--------------------------------------------------------------------------
@merge_vuln_ids
def cwe(self):
"""
:returns: CWE IDs.
:rtype: tuple( str, ... )
"""
return self.__cwe
#--------------------------------------------------------------------------
@cwe.setter
def cwe(self, cwe):
"""
:param cwe: CWE IDs.
:type cwe: tuple( str, ... )
"""
self.__cwe = sanitize_vuln_ids(cwe)
#--------------------------------------------------------------------------
@merge_vuln_ids
def dsa(self):
"""
:returns: Debian Security Advisory IDs.
:rtype: tuple( str, ... )
"""
return self.__dsa
#--------------------------------------------------------------------------
@dsa.setter
def dsa(self, dsa):
"""
:param dsa: Debian Security Advisory IDs.
:type dsa: tuple( str, ... )
"""
self.__dsa = sanitize_vuln_ids(dsa)
#--------------------------------------------------------------------------
@merge_vuln_ids
def edb(self):
"""
:returns: ExploitDB IDs.
:rtype: tuple( str, ... )
"""
return self.__edb
#--------------------------------------------------------------------------
@edb.setter
def edb(self, edb):
"""
:param edb: ExploitDB IDs.
:type edb: tuple( str, ... )
"""
self.__edb = sanitize_vuln_ids(edb)
#--------------------------------------------------------------------------
@merge_vuln_ids
def glsa(self):
"""
:returns: Gentoo Linux Security Advisory IDs.
:rtype: tuple( str, ... )
"""
return self.__glsa
#--------------------------------------------------------------------------
@glsa.setter
def glsa(self, glsa):
"""
:param glsa: Gentoo Linux Security Advisory IDs.
:type glsa: tuple( str, ... )
"""
self.__glsa = sanitize_vuln_ids(glsa)
#--------------------------------------------------------------------------
@merge_vuln_ids
def mdvsa(self):
"""
:returns: Mandriva Security Advisory IDs.
:rtype: tuple( str, ... )
"""
return self.__mdvsa
#--------------------------------------------------------------------------
@mdvsa.setter
def mdvsa(self, mdvsa):
"""
:param mdvsa: Mandriva Security Advisory IDs.
:type mdvsa: tuple( str, ... )
"""
self.__mdvsa = sanitize_vuln_ids(mdvsa)
#--------------------------------------------------------------------------
@merge_vuln_ids
def ms(self):
"""
:returns: Microsoft Advisory IDs.
:rtype: tuple( str, ... )
"""
return self.__ms
#--------------------------------------------------------------------------
@ms.setter
def ms(self, ms):
"""
:param ms: Microsoft Advisory IDs.
:type ms: tuple( str, ... )
"""
self.__ms = sanitize_vuln_ids(ms)
#--------------------------------------------------------------------------
@merge_vuln_ids
def mskb(self):
"""
:returns: Microsoft Knowledge Base IDs.
:rtype: tuple( str, ... )
"""
return self.__mskb
#--------------------------------------------------------------------------
@mskb.setter
def mskb(self, mskb):
"""
:param mskb: Microsoft Knowledge Base IDs.
:type mskb: tuple( str, ... )
"""
self.__mskb = sanitize_vuln_ids(mskb)
#--------------------------------------------------------------------------
@merge_vuln_ids
def nessus(self):
"""
:returns: Nessus Plugin IDs.
:rtype: tuple( str, ... )
"""
return self.__nessus
#--------------------------------------------------------------------------
@nessus.setter
def nessus(self, nessus):
"""
:param nessus: Nessus Plugin IDs.
:type nessus: tuple( str, ... )
"""
self.__nessus = sanitize_vuln_ids(nessus)
#--------------------------------------------------------------------------
@merge_vuln_ids
def osvdb(self):
"""
:returns: OSVDB IDs.
:rtype: tuple( str, ... )
"""
return self.__osvdb
#--------------------------------------------------------------------------
@osvdb.setter
def osvdb(self, osvdb):
"""
:param osvdb: OSVDB IDs.
:type osvdb: tuple( str, ... )
"""
self.__osvdb = sanitize_vuln_ids(osvdb)
#--------------------------------------------------------------------------
@merge_vuln_ids
def rhsa(self):
"""
:returns: RedHat Security Advisory IDs.
:rtype: tuple( str, ... )
"""
return self.__rhsa
#--------------------------------------------------------------------------
@rhsa.setter
def rhsa(self, rhsa):
"""
:param rhsa: RedHat Security Advisory IDs.
:type rhsa: tuple( str, ... )
"""
self.__rhsa = sanitize_vuln_ids(rhsa)
#--------------------------------------------------------------------------
@merge_vuln_ids
def sa(self):
"""
:returns: Secunia Advisory IDs.
:rtype: tuple( str, ... )
"""
return self.__sa
#--------------------------------------------------------------------------
@sa.setter
def sa(self, sa):
"""
:param sa: Secunia Advisory IDs.
:type sa: tuple( str, ... )
"""
self.__sa = sanitize_vuln_ids(sa)
#--------------------------------------------------------------------------
@merge_vuln_ids
def sectrack(self):
"""
:returns: Security Tracker IDs.
:rtype: tuple( str, ... )
"""
return self.__sectrack
#--------------------------------------------------------------------------
@sectrack.setter
def sectrack(self, sectrack):
"""
:param sectrack: Security Tracker IDs.
:type sectrack: tuple( str, ... )
"""
self.__sectrack = sanitize_vuln_ids(sectrack)
#--------------------------------------------------------------------------
@merge_vuln_ids
def usn(self):
"""
:returns: Ubuntu Security Notice IDs.
:rtype: tuple( str, ... )
"""
return self.__usn
#--------------------------------------------------------------------------
@usn.setter
def usn(self, usn):
"""
:param usn: Ubuntu Security Notice IDs.
:type usn: tuple( str, ... )
"""
self.__usn = sanitize_vuln_ids(usn)
#--------------------------------------------------------------------------
@merge_vuln_ids
def vmsa(self):
"""
:returns: VMWare Security Advisory IDs.
:rtype: tuple( str, ... )
"""
return self.__vmsa
#--------------------------------------------------------------------------
@vmsa.setter
def vmsa(self, vmsa):
"""
:param vmsa: VMWare Security Advisory IDs.
:type vmsa: tuple( str, ... )
"""
self.__vmsa = sanitize_vuln_ids(vmsa)
#--------------------------------------------------------------------------
@merge_vuln_ids
def vu(self):
"""
:returns: Vulnerability Note IDs.
:rtype: tuple( str, ... )
"""
return self.__vu
#--------------------------------------------------------------------------
@vu.setter
def vu(self, vu):
"""
:param vu: Vulnerability Note IDs.
:type vu: tuple( str, ... )
"""
self.__vu = sanitize_vuln_ids(vu)
#--------------------------------------------------------------------------
@merge_vuln_ids
def xf(self):
"""
:returns: ISS X-Force IDs.
:rtype: tuple( str, ... )
"""
return self.__xf
#--------------------------------------------------------------------------
@xf.setter
def xf(self, xf):
"""
:param xf: ISS X-Force IDs.
:type xf: tuple( str, ... )
"""
self.__xf = sanitize_vuln_ids(xf)
#------------------------------------------------------------------------------
# Simple checks to make sure we're not missing any taxonomy names.
assert set(TAXONOMY_NAMES.iterkeys()) == \
set(_vuln_ref_regex.iterkeys()),\
set(TAXONOMY_NAMES.iterkeys()).symmetric_difference(
set(_vuln_ref_regex.iterkeys()))
assert len(set(TAXONOMY_NAMES.iterkeys()).difference(
set(Vulnerability.DEFAULTS.iterkeys()))) == 0, \
set(TAXONOMY_NAMES.iterkeys()).difference(
set(Vulnerability.DEFAULTS.iterkeys()))
assert len(set(Vulnerability.DEFAULTS.iterkeys()).difference(
set(dir(Vulnerability)))) == 0, \
set(Vulnerability.DEFAULTS.iterkeys()).difference(set(dir(Vulnerability)))
#------------------------------------------------------------------------------
class UncategorizedVulnerability(Vulnerability):
"""
Generic vulnerability.
This is useful for plugins that for some reason can't categorize the
vulnerabilities they find. Avoid using it whenever possible! If you must,
make sure you set the title, description and solution texts accordingly,
and *ALWAYS* use the custom_id property.
"""
data_subtype = "vulnerability/generic"
#------------------------------------------------------------------------------
class WebVulnerability(Vulnerability):
"""
Base class for web vulnerabilities - more specifically, those whose target
is an URL.
Do not instance this class! Only use it for subclassing.
"""
data_subtype = "vulnerability/abstract"
TARGET_CLASS = URL
#--------------------------------------------------------------------------
def __init__(self, target, **kwargs):
# Sanitize the "target" argument.
target = self._sanitize_url(self, target)
# Save the raw URL.
self.__url = target.url
# Parent constructor.
super(WebVulnerability, self).__init__(target, **kwargs)
#--------------------------------------------------------------------------
@staticmethod
def _sanitize_url(self, url, stacklevel = 2):
if (
not isinstance(url, URL) and
not isinstance(url, FolderURL) and
not isinstance(url, BaseURL)
):
if isinstance(url, basestring):
msg = "You should pass an URL object" \
" to %s instead of a string!"
msg %= self.__class__.__name__
url = URL(str(url))
LocalDataCache.on_autogeneration(url)
elif hasattr(url, "url"):
try:
t = url.__class__.__name__
except Exception:
t = str(type(url))
msg = "You should pass an URL object to %s instead of %s!"
msg %= (self.__class__.__name__, t)
url = url.url
if isinstance(url, basestring):
url = URL(str(url))
LocalDataCache.on_autogeneration(url)
elif not isinstance(url, URL):
raise TypeError("Expected URL, got %r instead" % t)
warn(msg, RuntimeWarning, stacklevel=stacklevel+1)
return url
#--------------------------------------------------------------------------
def __str__(self):
return self.url
#--------------------------------------------------------------------------
def __repr__(self):
return "<%s url=%r plugin_id=%r level=%r desc=%r>" % (
self.__class__.__name__,
self.url,
self.plugin_id,
self.level,
self.description,
)
#--------------------------------------------------------------------------
@property
def url(self):
"""
:return: Raw URL where the vulnerability was found.
:rtype: str
"""
return self.__url
| gpl-2.0 |
thresholdsoftware/asylum | openerp/addons/mrp/report/mrp_report.py | 56 | 3807 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
class report_workcenter_load(osv.osv):
_name="report.workcenter.load"
_description="Work Center Load"
_auto = False
_log_access = False
_columns = {
'name': fields.char('Week', size=64, required=True),
'workcenter_id': fields.many2one('mrp.workcenter', 'Work Center', required=True),
'cycle': fields.float('Number of Cycles'),
'hour': fields.float('Number of Hours'),
}
def init(self, cr):
cr.execute("""
create or replace view report_workcenter_load as (
SELECT
min(wl.id) as id,
to_char(p.date_planned,'YYYY:mm:dd') as name,
SUM(wl.hour) AS hour,
SUM(wl.cycle) AS cycle,
wl.workcenter_id as workcenter_id
FROM
mrp_production_workcenter_line wl
LEFT JOIN mrp_production p
ON p.id = wl.production_id
GROUP BY
wl.workcenter_id,
to_char(p.date_planned,'YYYY:mm:dd')
)""")
report_workcenter_load()
class report_mrp_inout(osv.osv):
_name="report.mrp.inout"
_description="Stock value variation"
_auto = False
_log_access = False
_rec_name = 'date'
_columns = {
'date': fields.char('Week', size=64, required=True),
'value': fields.float('Stock value', required=True, digits=(16,2)),
}
def init(self, cr):
cr.execute("""
create or replace view report_mrp_inout as (
select
min(sm.id) as id,
to_char(sm.date,'YYYY:IW') as date,
sum(case when (sl.usage='internal') then
pt.standard_price * sm.product_qty
else
0.0
end - case when (sl2.usage='internal') then
pt.standard_price * sm.product_qty
else
0.0
end) as value
from
stock_move sm
left join product_product pp
on (pp.id = sm.product_id)
left join product_template pt
on (pt.id = pp.product_tmpl_id)
left join stock_location sl
on ( sl.id = sm.location_id)
left join stock_location sl2
on ( sl2.id = sm.location_dest_id)
where
sm.state in ('waiting','confirmed','assigned')
group by
to_char(sm.date,'YYYY:IW')
)""")
report_mrp_inout()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
sql-sith/cdc2015 | battleship_3.py | 1 | 2381 | '''
What could be better:
- what if the guesses are not valid ints?
- let them know if it's a near miss (a la Sub Search)?
- variable-sized grid?
- variable number of ships?
- show location of ship when player loses?
'''
from random import randint
from reportlab.lib.validators import isInt
def getIntFromUser(prompt, _min, _max):
done = False
while not done:
the_string = raw_input(prompt)
if isInt(the_string):
the_int = int(the_string)
if the_int >= _min and the_int <= _max:
done = True
else:
print('Error - the integer must be between {} and {}!'
.format(_min, _max))
else:
print('{} is not an integer. Please try again.'.format(the_string))
return(the_int)
def print_board(_board):
for row in _board:
print(" ".join(row))
def random_row(_board):
return randint(0, len(_board) - 1)
def random_col(_board):
return randint(0, len(_board[0]) - 1)
_rows = getIntFromUser("How many rows? ", 1, 10)
_cols = getIntFromUser("How many columns? ", 1, 15)
_turns = getIntFromUser("How many turns? ", 1, 20)
_board = []
_debug = True
for x in range(_rows):
_board.append(["O"] * _cols)
print("Let's play Battleship!")
print_board(_board)
ship_row = random_row(_board)
ship_col = random_col(_board)
if _debug:
print("Ship location: {0}.{1}"
.format(ship_row, ship_col))
# Everything from here on should go in your for loop!
# Be sure to indent four spaces!
for turn in range(_turns):
print("")
print("Turn {0}\n".format(turn + 1))
_guess_row = int(raw_input("Guess Row: "))
_guess_col = int(raw_input("Guess Col: "))
if _guess_row == ship_row and _guess_col == ship_col:
print("Congratulations! You sunk my battleship!")
break
else:
if ((_guess_row < 0 or _guess_row > _rows - 1) or
(_guess_col < 0 or _guess_col > _cols - 1)):
print("Oops, that's not even in the ocean.")
elif(_board[_guess_row][_guess_col] == "X"):
print("You guessed that one already.")
else:
print("You missed my battleship!")
_board[_guess_row][_guess_col] = "X"
if turn == _turns - 1:
print("Game Over")
print("")
print_board(_board)
| gpl-3.0 |
dougn/HalfCaff | halfcaff/power.py | 1 | 3449 | import ctypes
import CoreFoundation
import objc
import subprocess
import time
## from http://benden.us/journal/2014/OS-X-Power-Management-No-Sleep-Howto/
## http://alistra.ghost.io/2015/03/15/making-your-os-x-not-sleep-while-running-scripts/
def SetUpIOFramework():
# load the IOKit library
framework = ctypes.cdll.LoadLibrary(
'/System/Library/Frameworks/IOKit.framework/IOKit')
# declare parameters as described in IOPMLib.h
framework.IOPMAssertionCreateWithName.argtypes = [
ctypes.c_void_p, # CFStringRef
ctypes.c_uint32, # IOPMAssertionLevel
ctypes.c_void_p, # CFStringRef
ctypes.POINTER(ctypes.c_uint32)] # IOPMAssertionID
framework.IOPMAssertionRelease.argtypes = [
ctypes.c_uint32] # IOPMAssertionID
return framework
def StringToCFString(string):
# we'll need to convert our strings before use
return objc.pyobjc_id(
CoreFoundation.CFStringCreateWithCString(None, string,
CoreFoundation.kCFStringEncodingASCII).nsstring())
def AssertionCreateWithName(framework, a_type,
a_level, a_reason):
# this method will create an assertion using the IOKit library
# several parameters
a_id = ctypes.c_uint32(0)
a_type = StringToCFString(a_type)
a_reason = StringToCFString(a_reason)
a_error = framework.IOPMAssertionCreateWithName(
a_type, a_level, a_reason, ctypes.byref(a_id))
# we get back a 0 or stderr, along with a unique c_uint
# representing the assertion ID so we can release it later
return a_error, a_id
def AssertionRelease(framework, assertion_id):
# releasing the assertion is easy, and also returns a 0 on
# success, or stderr otherwise
return framework.IOPMAssertionRelease(assertion_id)
class Control(object):
def __init__(self, no_idle='NoIdleSleepAssertion',
reason='HalfCaff - VPN live connection'):
self.no_idle = no_idle
self.reason = reason
self.framework = SetUpIOFramework()
self.a_id = None
def caffeinate(self, no_idle=None, reason=None):
if not no_idle:
no_idle = self.no_idle
if not reason:
reason = self.reason
if self.a_id:
return
ret, self.a_id = AssertionCreateWithName(
self.framework, no_idle, 255, reason)
return ret
def decaffeinate(self):
if not self.a_id:
return
AssertionRelease(self.framework, self.a_id)
self.a_id = None
def main():
# let's create a no idle assertion for 30 seconds
no_idle = 'NoIdleSleepAssertion'
reason = 'Test of Pythonic power assertions'
# first, we'll need the IOKit framework
framework = SetUpIOFramework()
# next, create the assertion and save the ID!
ret, a_id = AssertionCreateWithName(framework, no_idle, 255, reason)
print '\n\nCreating power assertion: status %s, id %s\n\n' % (ret, a_id)
# subprocess a call to pmset to verify the assertion worked
subprocess.call(['pmset', '-g', 'assertions'])
time.sleep(5)
# finally, release the assertion of the ID we saved earlier
AssertionRelease(framework, a_id)
print '\n\nReleasing power assertion: id %s\n\n' % a_id
# verify the assertion has been removed
subprocess.call(['pmset', '-g', 'assertions'])
if __name__ == '__main__':
main() | mit |
benthomasson/behave | bin/toxcmd3.py | 17 | 9591 | #!/usr/bin/env python3
# -*- coding: UTF-8 -*-
"""
Provides a command container for additional tox commands, used in "tox.ini".
COMMANDS:
* copytree
* copy
* py2to3
REQUIRES:
* argparse
"""
from glob import glob
import argparse
import inspect
import os.path
import shutil
import sys
import collections
__author__ = "Jens Engel"
__copyright__ = "(c) 2013 by Jens Engel"
__license__ = "BSD"
# -----------------------------------------------------------------------------
# CONSTANTS:
# -----------------------------------------------------------------------------
VERSION = "0.1.0"
FORMATTER_CLASS = argparse.RawDescriptionHelpFormatter
# -----------------------------------------------------------------------------
# SUBCOMMAND: copytree
# -----------------------------------------------------------------------------
def command_copytree(args):
"""
Copy one or more source directory(s) below a destination directory.
Parts of the destination directory path are created if needed.
Similar to the UNIX command: 'cp -R srcdir destdir'
"""
for srcdir in args.srcdirs:
basename = os.path.basename(srcdir)
destdir2 = os.path.normpath(os.path.join(args.destdir, basename))
if os.path.exists(destdir2):
shutil.rmtree(destdir2)
sys.stdout.write("copytree: %s => %s\n" % (srcdir, destdir2))
shutil.copytree(srcdir, destdir2)
return 0
def setup_parser_copytree(parser):
parser.add_argument("srcdirs", nargs="+", help="Source directory(s)")
parser.add_argument("destdir", help="Destination directory")
command_copytree.usage = "%(prog)s srcdir... destdir"
command_copytree.short = "Copy source dir(s) below a destination directory."
command_copytree.setup_parser = setup_parser_copytree
# -----------------------------------------------------------------------------
# SUBCOMMAND: copy
# -----------------------------------------------------------------------------
def command_copy(args):
"""
Copy one or more source-files(s) to a destpath (destfile or destdir).
Destdir mode is used if:
* More than one srcfile is provided
* Last parameter ends with a slash ("/").
* Last parameter is an existing directory
Destination directory path is created if needed.
Similar to the UNIX command: 'cp srcfile... destpath'
"""
sources = args.sources
destpath = args.destpath
source_files = []
for file_ in sources:
if "*" in file_:
selected = glob(file_)
source_files.extend(selected)
elif os.path.isfile(file_):
source_files.append(file_)
if destpath.endswith("/") or os.path.isdir(destpath) or len(sources) > 1:
# -- DESTDIR-MODE: Last argument is a directory.
destdir = destpath
else:
# -- DESTFILE-MODE: Copy (and rename) one file.
assert len(source_files) == 1
destdir = os.path.dirname(destpath)
# -- WORK-HORSE: Copy one or more files to destpath.
if not os.path.isdir(destdir):
sys.stdout.write("copy: Create dir %s\n" % destdir)
os.makedirs(destdir)
for source in source_files:
destname = os.path.join(destdir, os.path.basename(source))
sys.stdout.write("copy: %s => %s\n" % (source, destname))
shutil.copy(source, destname)
return 0
def setup_parser_copy(parser):
parser.add_argument("sources", nargs="+", help="Source files.")
parser.add_argument("destpath", help="Destination path")
command_copy.usage = "%(prog)s sources... destpath"
command_copy.short = "Copy one or more source files to a destinition."
command_copy.setup_parser = setup_parser_copy
# -----------------------------------------------------------------------------
# SUBCOMMAND: mkdir
# -----------------------------------------------------------------------------
def command_mkdir(args):
"""
Create a non-existing directory (or more ...).
If the directory exists, the step is skipped.
Similar to the UNIX command: 'mkdir -p dir'
"""
errors = 0
for directory in args.dirs:
if os.path.exists(directory):
if not os.path.isdir(directory):
# -- SANITY CHECK: directory exists, but as file...
sys.stdout.write("mkdir: %s\n" % directory)
sys.stdout.write("ERROR: Exists already, but as file...\n")
errors += 1
else:
# -- NORMAL CASE: Directory does not exits yet.
assert not os.path.isdir(directory)
sys.stdout.write("mkdir: %s\n" % directory)
os.makedirs(directory)
return errors
def setup_parser_mkdir(parser):
parser.add_argument("dirs", nargs="+", help="Directory(s)")
command_mkdir.usage = "%(prog)s dir..."
command_mkdir.short = "Create non-existing directory (or more...)."
command_mkdir.setup_parser = setup_parser_mkdir
# -----------------------------------------------------------------------------
# SUBCOMMAND: py2to3
# -----------------------------------------------------------------------------
command_py2to4_work_around3k = True
def command_py2to3(args):
"""
Apply '2to3' tool (Python2 to Python3 conversion tool) to Python sources.
"""
from lib2to3.main import main
args2 = []
if command_py2to4_work_around3k:
if args.no_diffs:
args2.append("--no-diffs")
if args.write:
args2.append("-w")
if args.nobackups:
args2.append("-n")
args2.extend(args.sources)
sys.exit(main("lib2to3.fixes", args=args2))
def setup_parser4py2to3(parser):
if command_py2to4_work_around3k:
parser.add_argument("--no-diffs", action="store_true",
help="Don't show diffs of the refactoring")
parser.add_argument("-w", "--write", action="store_true",
help="Write back modified files")
parser.add_argument("-n", "--nobackups", action="store_true", default=False,
help="Don't write backups for modified files.")
parser.add_argument("sources", nargs="+", help="Source files.")
command_py2to3.name = "2to3"
command_py2to3.usage = "%(prog)s sources..."
command_py2to3.short = "Apply python's 2to3 tool to Python sources."
command_py2to3.setup_parser = setup_parser4py2to3
# -----------------------------------------------------------------------------
# COMMAND HELPERS/UTILS:
# -----------------------------------------------------------------------------
def discover_commands():
commands = []
for name, func in inspect.getmembers(inspect.getmodule(toxcmd_main)):
if name.startswith("__"):
continue
if name.startswith("command_") and isinstance(func, collections.Callable):
command_name0 = name.replace("command_", "")
command_name = getattr(func, "name", command_name0)
commands.append(Command(command_name, func))
return commands
class Command(object):
def __init__(self, name, func):
assert isinstance(name, str)
assert isinstance(func, collections.Callable)
self.name = name
self.func = func
self.parser = None
def setup_parser(self, command_parser):
setup_parser = getattr(self.func, "setup_parser", None)
if setup_parser and isinstance(setup_parser, collections.Callable):
setup_parser(command_parser)
else:
command_parser.add_argument("args", nargs="*")
@property
def usage(self):
usage = getattr(self.func, "usage", None)
return usage
@property
def short_description(self):
short_description = getattr(self.func, "short", "")
return short_description
@property
def description(self):
return inspect.getdoc(self.func)
def __call__(self, args):
return self.func(args)
# -----------------------------------------------------------------------------
# MAIN-COMMAND:
# -----------------------------------------------------------------------------
def toxcmd_main(args=None):
"""Command util with subcommands for tox environments."""
usage = "USAGE: %(prog)s [OPTIONS] COMMAND args..."
if args is None:
args = sys.argv[1:]
# -- STEP: Build command-line parser.
parser = argparse.ArgumentParser(description=inspect.getdoc(toxcmd_main),
formatter_class=FORMATTER_CLASS)
common_parser = parser.add_argument_group("Common options")
common_parser.add_argument("--version", action="version", version=VERSION)
subparsers = parser.add_subparsers(help="commands")
for command in discover_commands():
command_parser = subparsers.add_parser(command.name,
usage=command.usage,
description=command.description,
help=command.short_description,
formatter_class=FORMATTER_CLASS)
command_parser.set_defaults(func=command)
command.setup_parser(command_parser)
command.parser = command_parser
# -- STEP: Process command-line and run command.
options = parser.parse_args(args)
command_function = options.func
return command_function(options)
# -----------------------------------------------------------------------------
# MAIN:
# -----------------------------------------------------------------------------
if __name__ == "__main__":
sys.exit(toxcmd_main())
| bsd-2-clause |
azoft-dev-team/imagrium | env/Lib/test/test_userstring.py | 88 | 4850 | #!/usr/bin/env python
# UserString is a wrapper around the native builtin string type.
# UserString instances should behave similar to builtin string objects.
import string
from test import test_support, string_tests
from UserString import UserString, MutableString
import warnings
class UserStringTest(
string_tests.CommonTest,
string_tests.MixinStrUnicodeUserStringTest,
string_tests.MixinStrStringUserStringTest,
string_tests.MixinStrUserStringTest
):
type2test = UserString
# Overwrite the three testing methods, because UserString
# can't cope with arguments propagated to UserString
# (and we don't test with subclasses)
def checkequal(self, result, object, methodname, *args):
result = self.fixtype(result)
object = self.fixtype(object)
# we don't fix the arguments, because UserString can't cope with it
realresult = getattr(object, methodname)(*args)
self.assertEqual(
result,
realresult
)
def checkraises(self, exc, object, methodname, *args):
object = self.fixtype(object)
# we don't fix the arguments, because UserString can't cope with it
self.assertRaises(
exc,
getattr(object, methodname),
*args
)
def checkcall(self, object, methodname, *args):
object = self.fixtype(object)
# we don't fix the arguments, because UserString can't cope with it
getattr(object, methodname)(*args)
class MutableStringTest(UserStringTest):
type2test = MutableString
# MutableStrings can be hashed => deactivate test
def test_hash(self):
pass
def test_setitem(self):
s = self.type2test("foo")
self.assertRaises(IndexError, s.__setitem__, -4, "bar")
self.assertRaises(IndexError, s.__setitem__, 3, "bar")
s[-1] = "bar"
self.assertEqual(s, "fobar")
s[0] = "bar"
self.assertEqual(s, "barobar")
def test_delitem(self):
s = self.type2test("foo")
self.assertRaises(IndexError, s.__delitem__, -4)
self.assertRaises(IndexError, s.__delitem__, 3)
del s[-1]
self.assertEqual(s, "fo")
del s[0]
self.assertEqual(s, "o")
del s[0]
self.assertEqual(s, "")
def test_setslice(self):
s = self.type2test("foo")
s[:] = "bar"
self.assertEqual(s, "bar")
s[1:2] = "foo"
self.assertEqual(s, "bfoor")
s[1:-1] = UserString("a")
self.assertEqual(s, "bar")
s[0:10] = 42
self.assertEqual(s, "42")
def test_delslice(self):
s = self.type2test("foobar")
del s[3:10]
self.assertEqual(s, "foo")
del s[-1:10]
self.assertEqual(s, "fo")
def test_extended_set_del_slice(self):
indices = (0, None, 1, 3, 19, 100, -1, -2, -31, -100)
orig = string.ascii_letters + string.digits
for start in indices:
for stop in indices:
# Use indices[1:] when MutableString can handle real
# extended slices
for step in (None, 1, -1):
s = self.type2test(orig)
L = list(orig)
# Make sure we have a slice of exactly the right length,
# but with (hopefully) different data.
data = L[start:stop:step]
data.reverse()
L[start:stop:step] = data
s[start:stop:step] = "".join(data)
self.assertEqual(s, "".join(L))
del L[start:stop:step]
del s[start:stop:step]
self.assertEqual(s, "".join(L))
def test_immutable(self):
s = self.type2test("foobar")
s2 = s.immutable()
self.assertEqual(s, s2)
self.assertIsInstance(s2, UserString)
def test_iadd(self):
s = self.type2test("foo")
s += "bar"
self.assertEqual(s, "foobar")
s += UserString("baz")
self.assertEqual(s, "foobarbaz")
s += 42
self.assertEqual(s, "foobarbaz42")
def test_imul(self):
s = self.type2test("foo")
s *= 1
self.assertEqual(s, "foo")
s *= 2
self.assertEqual(s, "foofoo")
s *= -1
self.assertEqual(s, "")
def test_main():
with warnings.catch_warnings():
warnings.filterwarnings("ignore", ".*MutableString has been removed",
DeprecationWarning)
warnings.filterwarnings("ignore",
".*__(get|set|del)slice__ has been removed",
DeprecationWarning)
test_support.run_unittest(UserStringTest, MutableStringTest)
if __name__ == "__main__":
test_main()
| mit |
supersven/intellij-community | python/lib/Lib/encodings/iso8859_6.py | 593 | 11089 | """ Python Character Mapping Codec iso8859_6 generated from 'MAPPINGS/ISO8859/8859-6.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-6',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\u060c' # 0xAC -> ARABIC COMMA
u'\xad' # 0xAD -> SOFT HYPHEN
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\u061b' # 0xBB -> ARABIC SEMICOLON
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\u061f' # 0xBF -> ARABIC QUESTION MARK
u'\ufffe'
u'\u0621' # 0xC1 -> ARABIC LETTER HAMZA
u'\u0622' # 0xC2 -> ARABIC LETTER ALEF WITH MADDA ABOVE
u'\u0623' # 0xC3 -> ARABIC LETTER ALEF WITH HAMZA ABOVE
u'\u0624' # 0xC4 -> ARABIC LETTER WAW WITH HAMZA ABOVE
u'\u0625' # 0xC5 -> ARABIC LETTER ALEF WITH HAMZA BELOW
u'\u0626' # 0xC6 -> ARABIC LETTER YEH WITH HAMZA ABOVE
u'\u0627' # 0xC7 -> ARABIC LETTER ALEF
u'\u0628' # 0xC8 -> ARABIC LETTER BEH
u'\u0629' # 0xC9 -> ARABIC LETTER TEH MARBUTA
u'\u062a' # 0xCA -> ARABIC LETTER TEH
u'\u062b' # 0xCB -> ARABIC LETTER THEH
u'\u062c' # 0xCC -> ARABIC LETTER JEEM
u'\u062d' # 0xCD -> ARABIC LETTER HAH
u'\u062e' # 0xCE -> ARABIC LETTER KHAH
u'\u062f' # 0xCF -> ARABIC LETTER DAL
u'\u0630' # 0xD0 -> ARABIC LETTER THAL
u'\u0631' # 0xD1 -> ARABIC LETTER REH
u'\u0632' # 0xD2 -> ARABIC LETTER ZAIN
u'\u0633' # 0xD3 -> ARABIC LETTER SEEN
u'\u0634' # 0xD4 -> ARABIC LETTER SHEEN
u'\u0635' # 0xD5 -> ARABIC LETTER SAD
u'\u0636' # 0xD6 -> ARABIC LETTER DAD
u'\u0637' # 0xD7 -> ARABIC LETTER TAH
u'\u0638' # 0xD8 -> ARABIC LETTER ZAH
u'\u0639' # 0xD9 -> ARABIC LETTER AIN
u'\u063a' # 0xDA -> ARABIC LETTER GHAIN
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\u0640' # 0xE0 -> ARABIC TATWEEL
u'\u0641' # 0xE1 -> ARABIC LETTER FEH
u'\u0642' # 0xE2 -> ARABIC LETTER QAF
u'\u0643' # 0xE3 -> ARABIC LETTER KAF
u'\u0644' # 0xE4 -> ARABIC LETTER LAM
u'\u0645' # 0xE5 -> ARABIC LETTER MEEM
u'\u0646' # 0xE6 -> ARABIC LETTER NOON
u'\u0647' # 0xE7 -> ARABIC LETTER HEH
u'\u0648' # 0xE8 -> ARABIC LETTER WAW
u'\u0649' # 0xE9 -> ARABIC LETTER ALEF MAKSURA
u'\u064a' # 0xEA -> ARABIC LETTER YEH
u'\u064b' # 0xEB -> ARABIC FATHATAN
u'\u064c' # 0xEC -> ARABIC DAMMATAN
u'\u064d' # 0xED -> ARABIC KASRATAN
u'\u064e' # 0xEE -> ARABIC FATHA
u'\u064f' # 0xEF -> ARABIC DAMMA
u'\u0650' # 0xF0 -> ARABIC KASRA
u'\u0651' # 0xF1 -> ARABIC SHADDA
u'\u0652' # 0xF2 -> ARABIC SUKUN
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| apache-2.0 |
drawks/ansible | lib/ansible/modules/storage/zfs/zfs_delegate_admin.py | 44 | 9524 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2015, Nate Coraor <nate@coraor.org>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = r'''
---
module: zfs_delegate_admin
short_description: Manage ZFS delegated administration (user admin privileges)
description:
- Manages ZFS file system delegated administration permissions, which allow unprivileged users to perform ZFS
operations normally restricted to the superuser.
- See the C(zfs allow) section of C(zfs(1M)) for detailed explanations of options.
- This module attempts to adhere to the behavior of the command line tool as much as possible.
requirements:
- "A ZFS/OpenZFS implementation that supports delegation with `zfs allow`, including: Solaris >= 10, illumos (all
versions), FreeBSD >= 8.0R, ZFS on Linux >= 0.7.0."
version_added: '2.8'
options:
name:
description:
- File system or volume name e.g. C(rpool/myfs).
required: true
type: str
state:
description:
- Whether to allow (C(present)), or unallow (C(absent)) a permission.
- When set to C(present), at least one "entity" param of I(users), I(groups), or I(everyone) are required.
- When set to C(absent), removes permissions from the specified entities, or removes all permissions if no entity params are specified.
required: true
choices: [ absent, present ]
default: present
users:
description:
- List of users to whom permission(s) should be granted.
type: list
groups:
description:
- List of groups to whom permission(s) should be granted.
type: list
everyone:
description:
- Apply permissions to everyone.
type: bool
default: no
permissions:
description:
- The list of permission(s) to delegate (required if C(state) is C(present)).
type: list
choices: [ allow, clone, create, destroy, mount, promote, readonly, receive, rename, rollback, send, share, snapshot, unallow ]
local:
description:
- Apply permissions to C(name) locally (C(zfs allow -l)).
type: bool
descendents:
description:
- Apply permissions to C(name)'s descendents (C(zfs allow -d)).
type: bool
recursive:
description:
- Unallow permissions recursively (ignored when C(state) is C(present)).
type: bool
default: no
author:
- Nate Coraor (@natefoo)
'''
EXAMPLES = r'''
- name: Grant `zfs allow` and `unallow` permission to the `adm` user with the default local+descendents scope
zfs_delegate_admin:
name: rpool/myfs
users: adm
permissions: allow,unallow
- name: Grant `zfs send` to everyone, plus the group `backup`
zfs_delegate_admin:
name: rpool/myvol
groups: backup
everyone: yes
permissions: send
- name: Grant `zfs send,receive` to users `foo` and `bar` with local scope only
zfs_delegate_admin:
name: rpool/myfs
users: foo,bar
permissions: send,receive
local: yes
- name: Revoke all permissions from everyone (permissions specifically assigned to users and groups remain)
- zfs_delegate_admin:
name: rpool/myfs
everyone: yes
state: absent
'''
# This module does not return anything other than the standard
# changed/state/msg/stdout
RETURN = '''
'''
from itertools import product
from ansible.module_utils.basic import AnsibleModule
class ZfsDelegateAdmin(object):
def __init__(self, module):
self.module = module
self.name = module.params.get('name')
self.state = module.params.get('state')
self.users = module.params.get('users')
self.groups = module.params.get('groups')
self.everyone = module.params.get('everyone')
self.perms = module.params.get('permissions')
self.scope = None
self.changed = False
self.initial_perms = None
self.subcommand = 'allow'
self.recursive_opt = []
self.run_method = self.update
self.setup(module)
def setup(self, module):
""" Validate params and set up for run.
"""
if self.state == 'absent':
self.subcommand = 'unallow'
if module.params.get('recursive'):
self.recursive_opt = ['-r']
local = module.params.get('local')
descendents = module.params.get('descendents')
if (local and descendents) or (not local and not descendents):
self.scope = 'ld'
elif local:
self.scope = 'l'
elif descendents:
self.scope = 'd'
else:
self.module.fail_json(msg='Impossible value for local and descendents')
if not (self.users or self.groups or self.everyone):
if self.state == 'present':
self.module.fail_json(msg='One of `users`, `groups`, or `everyone` must be set')
elif self.state == 'absent':
self.run_method = self.clear
# ansible ensures the else cannot happen here
self.zfs_path = module.get_bin_path('zfs', True)
@property
def current_perms(self):
""" Parse the output of `zfs allow <name>` to retrieve current permissions.
"""
out = self.run_zfs_raw(subcommand='allow')
perms = {
'l': {'u': {}, 'g': {}, 'e': []},
'd': {'u': {}, 'g': {}, 'e': []},
'ld': {'u': {}, 'g': {}, 'e': []},
}
linemap = {
'Local permissions:': 'l',
'Descendent permissions:': 'd',
'Local+Descendent permissions:': 'ld',
}
scope = None
for line in out.splitlines():
scope = linemap.get(line, scope)
if not scope:
continue
try:
if line.startswith('\tuser ') or line.startswith('\tgroup '):
ent_type, ent, cur_perms = line.split()
perms[scope][ent_type[0]][ent] = cur_perms.split(',')
elif line.startswith('\teveryone '):
perms[scope]['e'] = line.split()[1].split(',')
except ValueError:
self.module.fail_json(msg="Cannot parse user/group permission output by `zfs allow`: '%s'" % line)
return perms
def run_zfs_raw(self, subcommand=None, args=None):
""" Run a raw zfs command, fail on error.
"""
cmd = [self.zfs_path, subcommand or self.subcommand] + (args or []) + [self.name]
rc, out, err = self.module.run_command(cmd)
if rc:
self.module.fail_json(msg='Command `%s` failed: %s' % (' '.join(cmd), err))
return out
def run_zfs(self, args):
""" Run zfs allow/unallow with appropriate options as per module arguments.
"""
args = self.recursive_opt + ['-' + self.scope] + args
if self.perms:
args.append(','.join(self.perms))
return self.run_zfs_raw(args=args)
def clear(self):
""" Called by run() to clear all permissions.
"""
changed = False
stdout = ''
for scope, ent_type in product(('ld', 'l', 'd'), ('u', 'g')):
for ent in self.initial_perms[scope][ent_type].keys():
stdout += self.run_zfs(['-%s' % ent_type, ent])
changed = True
for scope in ('ld', 'l', 'd'):
if self.initial_perms[scope]['e']:
stdout += self.run_zfs(['-e'])
changed = True
return (changed, stdout)
def update(self):
""" Update permissions as per module arguments.
"""
stdout = ''
for ent_type, entities in (('u', self.users), ('g', self.groups)):
if entities:
stdout += self.run_zfs(['-%s' % ent_type, ','.join(entities)])
if self.everyone:
stdout += self.run_zfs(['-e'])
return (self.initial_perms != self.current_perms, stdout)
def run(self):
""" Run an operation, return results for Ansible.
"""
exit_args = {'state': self.state}
self.initial_perms = self.current_perms
exit_args['changed'], stdout = self.run_method()
if exit_args['changed']:
exit_args['msg'] = 'ZFS delegated admin permissions updated'
exit_args['stdout'] = stdout
self.module.exit_json(**exit_args)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(type='str', required=True),
state=dict(type='str', default='present', choices=['absent', 'present']),
users=dict(type='list'),
groups=dict(type='list'),
everyone=dict(type='bool', default=False),
permissions=dict(type='list',
choices=['allow', 'clone', 'create', 'destroy', 'mount', 'promote', 'readonly', 'receive',
'rename', 'rollback', 'send', 'share', 'snapshot', 'unallow']),
local=dict(type='bool'),
descendents=dict(type='bool'),
recursive=dict(type='bool', default=False),
),
supports_check_mode=False,
required_if=[('state', 'present', ['permissions'])],
)
zfs_delegate_admin = ZfsDelegateAdmin(module)
zfs_delegate_admin.run()
if __name__ == '__main__':
main()
| gpl-3.0 |
marshmallow-code/marshmallow-sqlalchemy | tests/test_sqlalchemy_schema.py | 1 | 16699 | import pytest
from marshmallow import validate, ValidationError, Schema
import marshmallow
import sqlalchemy as sa
from marshmallow_sqlalchemy import SQLAlchemySchema, SQLAlchemyAutoSchema, auto_field
from marshmallow_sqlalchemy.exceptions import IncorrectSchemaTypeError
from marshmallow_sqlalchemy.fields import Related
# -----------------------------------------------------------------------------
@pytest.fixture
def teacher(models, session):
school = models.School(id=42, name="Univ. Of Whales")
teacher_ = models.Teacher(
id=24, full_name="Teachy McTeachFace", current_school=school
)
session.add(teacher_)
session.flush()
return teacher_
@pytest.fixture
def school(models, session):
school = models.School(id=42, name="Univ. Of Whales")
students = [
models.Student(id=35, full_name="Bob Smith", current_school=school),
models.Student(id=53, full_name="John Johnson", current_school=school),
]
session.add_all(students)
session.flush()
return school
class EntityMixin:
id = auto_field(dump_only=True)
# Auto schemas with default options
@pytest.fixture
def sqla_auto_model_schema(models, request):
class TeacherSchema(SQLAlchemyAutoSchema):
class Meta:
model = models.Teacher
full_name = auto_field(validate=validate.Length(max=20))
return TeacherSchema()
@pytest.fixture
def sqla_auto_table_schema(models, request):
class TeacherSchema(SQLAlchemyAutoSchema):
class Meta:
table = models.Teacher.__table__
full_name = auto_field(validate=validate.Length(max=20))
return TeacherSchema()
# Schemas with relationships
@pytest.fixture
def sqla_schema_with_relationships(models, request):
class TeacherSchema(EntityMixin, SQLAlchemySchema):
class Meta:
model = models.Teacher
full_name = auto_field(validate=validate.Length(max=20))
current_school = auto_field()
substitute = auto_field()
return TeacherSchema()
@pytest.fixture
def sqla_auto_model_schema_with_relationships(models, request):
class TeacherSchema(SQLAlchemyAutoSchema):
class Meta:
model = models.Teacher
include_relationships = True
full_name = auto_field(validate=validate.Length(max=20))
return TeacherSchema()
# Schemas with foreign keys
@pytest.fixture
def sqla_schema_with_fks(models, request):
class TeacherSchema(EntityMixin, SQLAlchemySchema):
class Meta:
model = models.Teacher
full_name = auto_field(validate=validate.Length(max=20))
current_school_id = auto_field()
return TeacherSchema()
@pytest.fixture
def sqla_auto_model_schema_with_fks(models, request):
class TeacherSchema(SQLAlchemyAutoSchema):
class Meta:
model = models.Teacher
include_fk = True
include_relationships = False
full_name = auto_field(validate=validate.Length(max=20))
return TeacherSchema()
# -----------------------------------------------------------------------------
@pytest.mark.parametrize(
"schema",
(
pytest.lazy_fixture("sqla_schema_with_relationships"),
pytest.lazy_fixture("sqla_auto_model_schema_with_relationships"),
),
)
def test_dump_with_relationships(teacher, schema):
assert schema.dump(teacher) == {
"id": teacher.id,
"full_name": teacher.full_name,
"current_school": 42,
"substitute": None,
}
@pytest.mark.parametrize(
"schema",
(
pytest.lazy_fixture("sqla_schema_with_fks"),
pytest.lazy_fixture("sqla_auto_model_schema_with_fks"),
),
)
def test_dump_with_foreign_keys(teacher, schema):
assert schema.dump(teacher) == {
"id": teacher.id,
"full_name": teacher.full_name,
"current_school_id": 42,
}
def test_table_schema_dump(teacher, sqla_auto_table_schema):
assert sqla_auto_table_schema.dump(teacher) == {
"id": teacher.id,
"full_name": teacher.full_name,
}
@pytest.mark.parametrize(
"schema",
(
pytest.lazy_fixture("sqla_schema_with_relationships"),
pytest.lazy_fixture("sqla_schema_with_fks"),
pytest.lazy_fixture("sqla_auto_model_schema"),
pytest.lazy_fixture("sqla_auto_table_schema"),
),
)
def test_load(schema):
assert schema.load({"full_name": "Teachy T"}) == {"full_name": "Teachy T"}
class TestLoadInstancePerSchemaInstance:
@pytest.fixture
def schema_no_load_instance(self, models, session):
class TeacherSchema(SQLAlchemySchema):
class Meta:
model = models.Teacher
sqla_session = session
# load_instance = False is the default
full_name = auto_field(validate=validate.Length(max=20))
current_school = auto_field()
substitute = auto_field()
return TeacherSchema
@pytest.fixture
def schema_with_load_instance(self, schema_no_load_instance):
class TeacherSchema(schema_no_load_instance):
class Meta(schema_no_load_instance.Meta):
load_instance = True
return TeacherSchema
@pytest.fixture
def auto_schema_no_load_instance(self, models, session):
class TeacherSchema(SQLAlchemyAutoSchema):
class Meta:
model = models.Teacher
sqla_session = session
# load_instance = False is the default
return TeacherSchema
@pytest.fixture
def auto_schema_with_load_instance(self, auto_schema_no_load_instance):
class TeacherSchema(auto_schema_no_load_instance):
class Meta(auto_schema_no_load_instance.Meta):
load_instance = True
return TeacherSchema
@pytest.mark.parametrize(
"Schema",
(
pytest.lazy_fixture("schema_no_load_instance"),
pytest.lazy_fixture("schema_with_load_instance"),
pytest.lazy_fixture("auto_schema_no_load_instance"),
pytest.lazy_fixture("auto_schema_with_load_instance"),
),
)
def test_toggle_load_instance_per_schema(self, models, Schema):
tname = "Teachy T"
source = {"full_name": tname}
# No per-instance override
load_instance_default = Schema()
result = load_instance_default.load(source)
default = load_instance_default.opts.load_instance
default_type = models.Teacher if default else dict
assert isinstance(result, default_type)
# Override the default
override = Schema(load_instance=not default)
result = override.load(source)
override_type = dict if default else models.Teacher
assert isinstance(result, override_type)
@pytest.mark.parametrize(
"schema",
(
pytest.lazy_fixture("sqla_schema_with_relationships"),
pytest.lazy_fixture("sqla_schema_with_fks"),
pytest.lazy_fixture("sqla_auto_model_schema"),
pytest.lazy_fixture("sqla_auto_table_schema"),
),
)
def test_load_validation_errors(schema):
with pytest.raises(ValidationError):
schema.load({"full_name": "x" * 21})
def test_auto_field_on_plain_schema_raises_error():
class BadSchema(Schema):
name = auto_field()
with pytest.raises(IncorrectSchemaTypeError):
BadSchema()
def test_cannot_set_both_model_and_table(models):
with pytest.raises(ValueError, match="Cannot set both"):
class BadWidgetSchema(SQLAlchemySchema):
class Meta:
model = models.Teacher
table = models.Teacher
def test_passing_model_to_auto_field(models, teacher):
class TeacherSchema(SQLAlchemySchema):
current_school_id = auto_field(model=models.Teacher)
schema = TeacherSchema()
assert schema.dump(teacher) == {"current_school_id": teacher.current_school_id}
def test_passing_table_to_auto_field(models, teacher):
class TeacherSchema(SQLAlchemySchema):
current_school_id = auto_field(table=models.Teacher.__table__)
schema = TeacherSchema()
assert schema.dump(teacher) == {"current_school_id": teacher.current_school_id}
# https://github.com/marshmallow-code/marshmallow-sqlalchemy/issues/190
def test_auto_schema_skips_synonyms(models):
class TeacherSchema(SQLAlchemyAutoSchema):
class Meta:
model = models.Teacher
include_fk = True
schema = TeacherSchema()
assert "current_school_id" in schema.fields
assert "curr_school_id" not in schema.fields
def test_auto_field_works_with_synonym(models):
class TeacherSchema(SQLAlchemyAutoSchema):
class Meta:
model = models.Teacher
include_fk = True
curr_school_id = auto_field()
schema = TeacherSchema()
assert "current_school_id" in schema.fields
assert "curr_school_id" in schema.fields
# Regresion test https://github.com/marshmallow-code/marshmallow-sqlalchemy/issues/306
def test_auto_field_works_with_ordered_flag(models):
class StudentSchema(SQLAlchemyAutoSchema):
class Meta:
model = models.Student
ordered = True
full_name = auto_field()
schema = StudentSchema()
# Declared fields precede auto-generated fields
assert tuple(schema.fields.keys()) == ("full_name", "id", "dob", "date_created")
class TestAliasing:
@pytest.fixture
def aliased_schema(self, models):
class TeacherSchema(SQLAlchemySchema):
class Meta:
model = models.Teacher
# Generate field from "full_name", pull from "full_name" attribute, output to "name"
name = auto_field("full_name")
return TeacherSchema()
@pytest.fixture
def aliased_auto_schema(self, models):
class TeacherSchema(SQLAlchemyAutoSchema):
class Meta:
model = models.Teacher
exclude = ("full_name",)
# Generate field from "full_name", pull from "full_name" attribute, output to "name"
name = auto_field("full_name")
return TeacherSchema()
@pytest.fixture
def aliased_attribute_schema(self, models):
class TeacherSchema(SQLAlchemySchema):
class Meta:
model = models.Teacher
# Generate field from "full_name", pull from "fname" attribute, output to "name"
name = auto_field("full_name", attribute="fname")
return TeacherSchema()
@pytest.mark.parametrize(
"schema",
(
pytest.lazy_fixture("aliased_schema"),
pytest.lazy_fixture("aliased_auto_schema"),
),
)
def test_passing_column_name(self, schema, teacher):
assert schema.fields["name"].attribute == "full_name"
dumped = schema.dump(teacher)
assert dumped["name"] == teacher.full_name
def test_passing_column_name_and_attribute(self, teacher, aliased_attribute_schema):
assert aliased_attribute_schema.fields["name"].attribute == "fname"
dumped = aliased_attribute_schema.dump(teacher)
assert dumped["name"] == teacher.fname
class TestModelInstanceDeserialization:
@pytest.fixture
def sqla_schema_class(self, models, session):
class TeacherSchema(SQLAlchemySchema):
class Meta:
model = models.Teacher
load_instance = True
sqla_session = session
full_name = auto_field(validate=validate.Length(max=20))
current_school = auto_field()
substitute = auto_field()
return TeacherSchema
@pytest.fixture
def sqla_auto_schema_class(self, models, session):
class TeacherSchema(SQLAlchemyAutoSchema):
class Meta:
model = models.Teacher
include_relationships = True
load_instance = True
sqla_session = session
return TeacherSchema
@pytest.mark.parametrize(
"SchemaClass",
(
pytest.lazy_fixture("sqla_schema_class"),
pytest.lazy_fixture("sqla_auto_schema_class"),
),
)
def test_load(self, teacher, SchemaClass, models):
schema = SchemaClass(unknown=marshmallow.INCLUDE)
dump_data = schema.dump(teacher)
load_data = schema.load(dump_data)
assert isinstance(load_data, models.Teacher)
def test_load_transient(self, models, teacher):
class TeacherSchema(SQLAlchemyAutoSchema):
class Meta:
model = models.Teacher
load_instance = True
transient = True
schema = TeacherSchema()
dump_data = schema.dump(teacher)
load_data = schema.load(dump_data)
assert isinstance(load_data, models.Teacher)
state = sa.inspect(load_data)
assert state.transient
def test_override_transient(self, models, teacher):
# marshmallow-code/marshmallow-sqlalchemy#388
class TeacherSchema(SQLAlchemyAutoSchema):
class Meta:
model = models.Teacher
load_instance = True
transient = True
schema = TeacherSchema(transient=False)
assert schema.transient is False
def test_related_when_model_attribute_name_distinct_from_column_name(
models,
session,
teacher,
):
class TeacherSchema(SQLAlchemyAutoSchema):
class Meta:
model = models.Teacher
load_instance = True
sqla_session = session
current_school = Related(["id", "name"])
dump_data = TeacherSchema().dump(teacher)
assert "school_id" not in dump_data["current_school"]
assert dump_data["current_school"]["id"] == teacher.current_school.id
new_teacher = TeacherSchema().load(dump_data, transient=True)
assert new_teacher.current_school.id == teacher.current_school.id
assert TeacherSchema().load(dump_data) is teacher
# https://github.com/marshmallow-code/marshmallow-sqlalchemy/issues/338
def test_auto_field_works_with_assoc_proxy(models):
class StudentSchema(SQLAlchemySchema):
class Meta:
model = models.Student
possible_teachers = auto_field()
schema = StudentSchema()
assert "possible_teachers" in schema.fields
def test_dump_and_load_with_assoc_proxy_multiplicity(models, session, school):
class SchoolSchema(SQLAlchemySchema):
class Meta:
model = models.School
load_instance = True
sqla_session = session
student_ids = auto_field()
schema = SchoolSchema()
assert "student_ids" in schema.fields
dump_data = schema.dump(school)
assert "student_ids" in dump_data
assert dump_data["student_ids"] == list(school.student_ids)
new_school = schema.load(dump_data, transient=True)
assert list(new_school.student_ids) == list(school.student_ids)
def test_dump_and_load_with_assoc_proxy_multiplicity_dump_only_kwargs(
models, session, school
):
class SchoolSchema(SQLAlchemySchema):
class Meta:
model = models.School
load_instance = True
sqla_session = session
student_ids = auto_field(dump_only=True, data_key="student_identifiers")
schema = SchoolSchema()
assert "student_ids" in schema.fields
assert schema.fields["student_ids"] not in schema.load_fields.values()
assert schema.fields["student_ids"] in schema.dump_fields.values()
dump_data = schema.dump(school)
assert "student_ids" not in dump_data
assert "student_identifiers" in dump_data
assert dump_data["student_identifiers"] == list(school.student_ids)
with pytest.raises(ValidationError):
schema.load(dump_data, transient=True)
def test_dump_and_load_with_assoc_proxy_multiplicity_load_only_only_kwargs(
models, session, school
):
class SchoolSchema(SQLAlchemySchema):
class Meta:
model = models.School
load_instance = True
sqla_session = session
student_ids = auto_field(load_only=True, data_key="student_identifiers")
schema = SchoolSchema()
assert "student_ids" in schema.fields
assert schema.fields["student_ids"] not in schema.dump_fields.values()
assert schema.fields["student_ids"] in schema.load_fields.values()
dump_data = schema.dump(school)
assert "student_identifers" not in dump_data
new_school = schema.load(
{"student_identifiers": list(school.student_ids)}, transient=True
)
assert list(new_school.student_ids) == list(school.student_ids)
| mit |
lukedeo/fancy-cnn | prepare_yelp_sentences.py | 2 | 6769 | """
prepare_yelp_sentences.py
description: prepare the yelp data for training in convolutional recurrent architectures over sentences
"""
from nlpdatahandlers import YelpDataHandler
import cPickle as pickle
import logging
import numpy as np
from textclf.wordvectors.glove import GloVeBox
LOGGER_PREFIX = ' %s'
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def log(msg, logger=logger):
logger.info(LOGGER_PREFIX % msg)
YELP_USEFUL_TRAIN = '../yelp-dataset/TrainSet_useful_185292'
YELP_USEFUL_DEV = '../yelp-dataset/DevSet_useful_185292'
YELP_USEFUL_TEST = '../yelp-dataset/TestSet_useful_185292'
YELP_FUNNY_TRAIN = '../yelp-dataset/TrainSet_funny_75064'
YELP_FUNNY_DEV = '../yelp-dataset/DevSet_funny_75064'
YELP_FUNNY_TEST = '../yelp-dataset/TestSet_funny_75064'
YELP_COOL_TRAIN = '../yelp-dataset/TrainSet_cool_88698'
YELP_COOL_DEV = '../yelp-dataset/DevSet_cool_88698'
YELP_COOL_TEST = '../yelp-dataset/TestSet_cool_88698'
GLOBAL_WV_FILE = './embeddings/wv/glove.42B.300d.120000.txt'
YELP_WV_FILE = './embeddings/wv/Yelp-GloVe-300dim.txt'
WORDS_PER_SENTENCE = 50
SENTENCES_PER_PARAGRAPH = 50
PREPEND = False
if __name__ == '__main__':
log('Building word vectors from {}'.format(YELP_WV_FILE))
yelp_gb = GloVeBox(YELP_WV_FILE)
yelp_gb.build(zero_token=True, normalize_variance=False, normalize_norm=True)
log('Building global word vectors from {}'.format(GLOBAL_WV_FILE))
global_gb = GloVeBox(GLOBAL_WV_FILE)
global_gb.build(zero_token=True, normalize_variance=False, normalize_norm=True)
log('writing GloVeBox pickle...')
pickle.dump(yelp_gb, open(YELP_WV_FILE.replace('.txt', '-glovebox.pkl'), 'wb'), pickle.HIGHEST_PROTOCOL)
pickle.dump(global_gb, open(GLOBAL_WV_FILE.replace('.txt', '-glovebox.pkl'), 'wb'), pickle.HIGHEST_PROTOCOL)
yelp = YelpDataHandler()
##################################
### YELP USEFUL
##################################
log('Creating "useful" reviews sentence-datasets')
(train_reviews, train_labels, test_reviews, test_labels) = \
yelp.get_data(YELP_USEFUL_TRAIN, YELP_USEFUL_DEV, YELP_USEFUL_TEST)
log('Converting to sentences: global word vectors')
train_global_wvs_reviews = yelp.to_sentence_level_idx(train_reviews, SENTENCES_PER_PARAGRAPH,
WORDS_PER_SENTENCE, global_gb)
test_global_wvs_reviews = yelp.to_sentence_level_idx(test_reviews, SENTENCES_PER_PARAGRAPH,
WORDS_PER_SENTENCE, global_gb)
log('Converting to sentences: yelp word vectors')
train_yelp_wvs_reviews = yelp.to_sentence_level_idx(train_reviews, SENTENCES_PER_PARAGRAPH,
WORDS_PER_SENTENCE, yelp_gb)
test_yelp_wvs_reviews = yelp.to_sentence_level_idx(test_reviews, SENTENCES_PER_PARAGRAPH,
WORDS_PER_SENTENCE, yelp_gb)
# -- training data save
np.save('Yelp_useful_sentences_train_yelp_glove_X.npy', train_yelp_wvs_reviews)
np.save('Yelp_useful_sentences_train_global_glove_X.npy', train_global_wvs_reviews)
np.save('Yelp_useful_sentences_train_glove_y.npy', train_labels)
# -- testing data save
np.save('Yelp_useful_sentences_test_yelp_glove_X.npy', test_yelp_wvs_reviews)
np.save('Yelp_useful_sentences_test_global_glove_X.npy', test_global_wvs_reviews)
np.save('Yelp_useful_sentences_test_glove_y.npy', test_labels)
##################################
### YELP FUNNY
##################################
log('Creating "funny" reviews sentence-datasets')
(train_reviews, train_labels, test_reviews, test_labels) = \
yelp.get_data(YELP_FUNNY_TRAIN, YELP_FUNNY_DEV, YELP_FUNNY_TEST)
log('Converting to sentences: global word vectors')
train_global_wvs_reviews = yelp.to_sentence_level_idx(train_reviews, SENTENCES_PER_PARAGRAPH,
WORDS_PER_SENTENCE, global_gb)
test_global_wvs_reviews = yelp.to_sentence_level_idx(test_reviews, SENTENCES_PER_PARAGRAPH,
WORDS_PER_SENTENCE, global_gb)
log('Converting to sentences: yelp word vectors')
train_yelp_wvs_reviews = yelp.to_sentence_level_idx(train_reviews, SENTENCES_PER_PARAGRAPH,
WORDS_PER_SENTENCE, yelp_gb)
test_yelp_wvs_reviews = yelp.to_sentence_level_idx(test_reviews, SENTENCES_PER_PARAGRAPH,
WORDS_PER_SENTENCE, yelp_gb)
# -- training data save
np.save('Yelp_funny_sentences_train_yelp_glove_X.npy', train_yelp_wvs_reviews)
np.save('Yelp_funny_sentences_train_global_glove_X.npy', train_global_wvs_reviews)
np.save('Yelp_funny_sentences_train_glove_y.npy', train_labels)
# -- testing data save
np.save('Yelp_funny_sentences_test_yelp_glove_X.npy', test_yelp_wvs_reviews)
np.save('Yelp_funny_sentences_test_global_glove_X.npy', test_global_wvs_reviews)
np.save('Yelp_funny_sentences_test_glove_y.npy', test_labels)
##################################
### YELP COOL
##################################
log('Creating "cool" reviews sentence-datasets')
(train_reviews, train_labels, test_reviews, test_labels) = \
yelp.get_data(YELP_COOL_TRAIN, YELP_COOL_DEV, YELP_COOL_TEST)
log('Converting to sentences: global word vectors')
train_global_wvs_reviews = yelp.to_sentence_level_idx(train_reviews, SENTENCES_PER_PARAGRAPH,
WORDS_PER_SENTENCE, global_gb)
test_global_wvs_reviews = yelp.to_sentence_level_idx(test_reviews, SENTENCES_PER_PARAGRAPH,
WORDS_PER_SENTENCE, global_gb)
log('Converting to sentences: yelp word vectors')
train_yelp_wvs_reviews = yelp.to_sentence_level_idx(train_reviews, SENTENCES_PER_PARAGRAPH,
WORDS_PER_SENTENCE, yelp_gb)
test_yelp_wvs_reviews = yelp.to_sentence_level_idx(test_reviews, SENTENCES_PER_PARAGRAPH,
WORDS_PER_SENTENCE, yelp_gb)
# -- training data save
np.save('Yelp_cool_sentences_train_yelp_glove_X.npy', train_yelp_wvs_reviews)
np.save('Yelp_cool_sentences_train_global_glove_X.npy', train_global_wvs_reviews)
np.save('Yelp_cool_sentences_train_glove_y.npy', train_labels)
# -- testing data save
np.save('Yelp_cool_sentences_test_yelp_glove_X.npy', test_yelp_wvs_reviews)
np.save('Yelp_cool_sentences_test_global_glove_X.npy', test_global_wvs_reviews)
np.save('Yelp_cool_sentences_test_glove_y.npy', test_labels)
| mit |
orchidinfosys/odoo | addons/account/__openerp__.py | 2 | 3530 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name' : 'Invoicing',
'version' : '1.1',
'summary': 'Send Invoices and Track Payments',
'sequence': 30,
'description': """
Invoicing & Payments
====================
The specific and easy-to-use Invoicing system in Odoo allows you to keep track of your accounting, even when you are not an accountant. It provides an easy way to follow up on your vendors and customers.
You could use this simplified accounting in case you work with an (external) account to keep your books, and you still want to keep track of payments. This module also offers you an easy method of registering payments, without having to encode complete abstracts of account.
""",
'category' : 'Accounting & Finance',
'website': 'https://www.odoo.com/page/billing',
'images' : ['images/accounts.jpeg','images/bank_statement.jpeg','images/cash_register.jpeg','images/chart_of_accounts.jpeg','images/customer_invoice.jpeg','images/journal_entries.jpeg'],
'depends' : ['base_setup', 'product', 'analytic', 'report', 'web_tip', 'web_planner'],
'data': [
'security/account_security.xml',
'security/ir.model.access.csv',
'data/data_account_type.xml',
'data/account_data.xml',
'views/account_menuitem.xml',
'views/account_payment_view.xml',
'wizard/account_reconcile_view.xml',
'wizard/account_unreconcile_view.xml',
'wizard/account_move_reversal_view.xml',
'views/account_view.xml',
'views/account_report.xml',
'wizard/account_invoice_refund_view.xml',
'wizard/account_validate_move_view.xml',
'wizard/account_invoice_state_view.xml',
'wizard/pos_box.xml',
'views/account_end_fy.xml',
'views/account_invoice_view.xml',
'data/invoice_action_data.xml',
'views/account_invoice_workflow.xml',
'views/partner_view.xml',
'views/product_view.xml',
'views/account_analytic_view.xml',
'views/company_view.xml',
'views/res_config_view.xml',
'views/account_tip_data.xml',
'views/account.xml',
'views/report_invoice.xml',
'report/account_invoice_report_view.xml',
'report/inherited_layouts.xml',
'views/account_journal_dashboard_view.xml',
'views/report_overdue.xml',
'views/web_planner_data.xml',
'views/report_overdue.xml',
'wizard/account_report_common_view.xml',
'wizard/account_report_print_journal_view.xml',
'views/report_journal.xml',
'wizard/account_report_partner_ledger_view.xml',
'views/report_partnerledger.xml',
'wizard/account_report_general_ledger_view.xml',
'views/report_generalledger.xml',
'wizard/account_report_trial_balance_view.xml',
'views/report_trialbalance.xml',
'views/account_financial_report_data.xml',
'wizard/account_financial_report_view.xml',
'views/report_financial.xml',
'wizard/account_report_aged_partner_balance_view.xml',
'views/report_agedpartnerbalance.xml',
],
'demo': [
'demo/account_demo.xml',
],
'qweb': [
"static/src/xml/account_reconciliation.xml",
"static/src/xml/account_payment.xml",
"static/src/xml/account_report_backend.xml",
],
'installable': True,
'application': True,
'auto_install': False,
'post_init_hook': '_auto_install_l10n',
}
| gpl-3.0 |
andreparrish/python-for-android | python3-alpha/python3-src/Lib/test/test_sys.py | 47 | 32005 | import unittest, test.support
import sys, io, os
import struct
import subprocess
import textwrap
import warnings
import operator
import codecs
# count the number of test runs, used to create unique
# strings to intern in test_intern()
numruns = 0
try:
import threading
except ImportError:
threading = None
class SysModuleTest(unittest.TestCase):
def setUp(self):
self.orig_stdout = sys.stdout
self.orig_stderr = sys.stderr
self.orig_displayhook = sys.displayhook
def tearDown(self):
sys.stdout = self.orig_stdout
sys.stderr = self.orig_stderr
sys.displayhook = self.orig_displayhook
test.support.reap_children()
def test_original_displayhook(self):
import builtins
out = io.StringIO()
sys.stdout = out
dh = sys.__displayhook__
self.assertRaises(TypeError, dh)
if hasattr(builtins, "_"):
del builtins._
dh(None)
self.assertEqual(out.getvalue(), "")
self.assertTrue(not hasattr(builtins, "_"))
dh(42)
self.assertEqual(out.getvalue(), "42\n")
self.assertEqual(builtins._, 42)
del sys.stdout
self.assertRaises(RuntimeError, dh, 42)
def test_lost_displayhook(self):
del sys.displayhook
code = compile("42", "<string>", "single")
self.assertRaises(RuntimeError, eval, code)
def test_custom_displayhook(self):
def baddisplayhook(obj):
raise ValueError
sys.displayhook = baddisplayhook
code = compile("42", "<string>", "single")
self.assertRaises(ValueError, eval, code)
def test_original_excepthook(self):
err = io.StringIO()
sys.stderr = err
eh = sys.__excepthook__
self.assertRaises(TypeError, eh)
try:
raise ValueError(42)
except ValueError as exc:
eh(*sys.exc_info())
self.assertTrue(err.getvalue().endswith("ValueError: 42\n"))
def test_excepthook(self):
with test.support.captured_output("stderr") as stderr:
sys.excepthook(1, '1', 1)
self.assertTrue("TypeError: print_exception(): Exception expected for " \
"value, str found" in stderr.getvalue())
# FIXME: testing the code for a lost or replaced excepthook in
# Python/pythonrun.c::PyErr_PrintEx() is tricky.
def test_exit(self):
self.assertRaises(TypeError, sys.exit, 42, 42)
# call without argument
try:
sys.exit(0)
except SystemExit as exc:
self.assertEqual(exc.code, 0)
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# call with tuple argument with one entry
# entry will be unpacked
try:
sys.exit(42)
except SystemExit as exc:
self.assertEqual(exc.code, 42)
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# call with integer argument
try:
sys.exit((42,))
except SystemExit as exc:
self.assertEqual(exc.code, 42)
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# call with string argument
try:
sys.exit("exit")
except SystemExit as exc:
self.assertEqual(exc.code, "exit")
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# call with tuple argument with two entries
try:
sys.exit((17, 23))
except SystemExit as exc:
self.assertEqual(exc.code, (17, 23))
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# test that the exit machinery handles SystemExits properly
rc = subprocess.call([sys.executable, "-c",
"raise SystemExit(47)"])
self.assertEqual(rc, 47)
def check_exit_message(code, expected, env=None):
process = subprocess.Popen([sys.executable, "-c", code],
stderr=subprocess.PIPE, env=env)
stdout, stderr = process.communicate()
self.assertEqual(process.returncode, 1)
self.assertTrue(stderr.startswith(expected),
"%s doesn't start with %s" % (ascii(stderr), ascii(expected)))
# test that stderr buffer if flushed before the exit message is written
# into stderr
check_exit_message(
r'import sys; sys.stderr.write("unflushed,"); sys.exit("message")',
b"unflushed,message")
# test that the exit message is written with backslashreplace error
# handler to stderr
check_exit_message(
r'import sys; sys.exit("surrogates:\uDCFF")',
b"surrogates:\\udcff")
# test that the unicode message is encoded to the stderr encoding
# instead of the default encoding (utf8)
env = os.environ.copy()
env['PYTHONIOENCODING'] = 'latin-1'
check_exit_message(
r'import sys; sys.exit("h\xe9")',
b"h\xe9", env=env)
def test_getdefaultencoding(self):
self.assertRaises(TypeError, sys.getdefaultencoding, 42)
# can't check more than the type, as the user might have changed it
self.assertIsInstance(sys.getdefaultencoding(), str)
# testing sys.settrace() is done in test_sys_settrace.py
# testing sys.setprofile() is done in test_sys_setprofile.py
def test_setcheckinterval(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
self.assertRaises(TypeError, sys.setcheckinterval)
orig = sys.getcheckinterval()
for n in 0, 100, 120, orig: # orig last to restore starting state
sys.setcheckinterval(n)
self.assertEqual(sys.getcheckinterval(), n)
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_switchinterval(self):
self.assertRaises(TypeError, sys.setswitchinterval)
self.assertRaises(TypeError, sys.setswitchinterval, "a")
self.assertRaises(ValueError, sys.setswitchinterval, -1.0)
self.assertRaises(ValueError, sys.setswitchinterval, 0.0)
orig = sys.getswitchinterval()
# sanity check
self.assertTrue(orig < 0.5, orig)
try:
for n in 0.00001, 0.05, 3.0, orig:
sys.setswitchinterval(n)
self.assertAlmostEqual(sys.getswitchinterval(), n)
finally:
sys.setswitchinterval(orig)
def test_recursionlimit(self):
self.assertRaises(TypeError, sys.getrecursionlimit, 42)
oldlimit = sys.getrecursionlimit()
self.assertRaises(TypeError, sys.setrecursionlimit)
self.assertRaises(ValueError, sys.setrecursionlimit, -42)
sys.setrecursionlimit(10000)
self.assertEqual(sys.getrecursionlimit(), 10000)
sys.setrecursionlimit(oldlimit)
@unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
'fatal error if run with a trace function')
def test_recursionlimit_recovery(self):
# NOTE: this test is slightly fragile in that it depends on the current
# recursion count when executing the test being low enough so as to
# trigger the recursion recovery detection in the _Py_MakeEndRecCheck
# macro (see ceval.h).
oldlimit = sys.getrecursionlimit()
def f():
f()
try:
for i in (50, 1000):
# Issue #5392: stack overflow after hitting recursion limit twice
sys.setrecursionlimit(i)
self.assertRaises(RuntimeError, f)
self.assertRaises(RuntimeError, f)
finally:
sys.setrecursionlimit(oldlimit)
def test_recursionlimit_fatalerror(self):
# A fatal error occurs if a second recursion limit is hit when recovering
# from a first one.
if os.name == "nt":
raise unittest.SkipTest(
"under Windows, test would generate a spurious crash dialog")
code = textwrap.dedent("""
import sys
def f():
try:
f()
except RuntimeError:
f()
sys.setrecursionlimit(%d)
f()""")
for i in (50, 1000):
sub = subprocess.Popen([sys.executable, '-c', code % i],
stderr=subprocess.PIPE)
err = sub.communicate()[1]
self.assertTrue(sub.returncode, sub.returncode)
self.assertTrue(
b"Fatal Python error: Cannot recover from stack overflow" in err,
err)
def test_getwindowsversion(self):
# Raise SkipTest if sys doesn't have getwindowsversion attribute
test.support.get_attribute(sys, "getwindowsversion")
v = sys.getwindowsversion()
self.assertEqual(len(v), 5)
self.assertIsInstance(v[0], int)
self.assertIsInstance(v[1], int)
self.assertIsInstance(v[2], int)
self.assertIsInstance(v[3], int)
self.assertIsInstance(v[4], str)
self.assertRaises(IndexError, operator.getitem, v, 5)
self.assertIsInstance(v.major, int)
self.assertIsInstance(v.minor, int)
self.assertIsInstance(v.build, int)
self.assertIsInstance(v.platform, int)
self.assertIsInstance(v.service_pack, str)
self.assertIsInstance(v.service_pack_minor, int)
self.assertIsInstance(v.service_pack_major, int)
self.assertIsInstance(v.suite_mask, int)
self.assertIsInstance(v.product_type, int)
self.assertEqual(v[0], v.major)
self.assertEqual(v[1], v.minor)
self.assertEqual(v[2], v.build)
self.assertEqual(v[3], v.platform)
self.assertEqual(v[4], v.service_pack)
# This is how platform.py calls it. Make sure tuple
# still has 5 elements
maj, min, buildno, plat, csd = sys.getwindowsversion()
def test_call_tracing(self):
self.assertRaises(TypeError, sys.call_tracing, type, 2)
def test_dlopenflags(self):
if hasattr(sys, "setdlopenflags"):
self.assertTrue(hasattr(sys, "getdlopenflags"))
self.assertRaises(TypeError, sys.getdlopenflags, 42)
oldflags = sys.getdlopenflags()
self.assertRaises(TypeError, sys.setdlopenflags)
sys.setdlopenflags(oldflags+1)
self.assertEqual(sys.getdlopenflags(), oldflags+1)
sys.setdlopenflags(oldflags)
def test_refcount(self):
# n here must be a global in order for this test to pass while
# tracing with a python function. Tracing calls PyFrame_FastToLocals
# which will add a copy of any locals to the frame object, causing
# the reference count to increase by 2 instead of 1.
global n
self.assertRaises(TypeError, sys.getrefcount)
c = sys.getrefcount(None)
n = None
self.assertEqual(sys.getrefcount(None), c+1)
del n
self.assertEqual(sys.getrefcount(None), c)
if hasattr(sys, "gettotalrefcount"):
self.assertIsInstance(sys.gettotalrefcount(), int)
def test_getframe(self):
self.assertRaises(TypeError, sys._getframe, 42, 42)
self.assertRaises(ValueError, sys._getframe, 2000000000)
self.assertTrue(
SysModuleTest.test_getframe.__code__ \
is sys._getframe().f_code
)
# sys._current_frames() is a CPython-only gimmick.
def test_current_frames(self):
have_threads = True
try:
import _thread
except ImportError:
have_threads = False
if have_threads:
self.current_frames_with_threads()
else:
self.current_frames_without_threads()
# Test sys._current_frames() in a WITH_THREADS build.
@test.support.reap_threads
def current_frames_with_threads(self):
import threading, _thread
import traceback
# Spawn a thread that blocks at a known place. Then the main
# thread does sys._current_frames(), and verifies that the frames
# returned make sense.
entered_g = threading.Event()
leave_g = threading.Event()
thread_info = [] # the thread's id
def f123():
g456()
def g456():
thread_info.append(_thread.get_ident())
entered_g.set()
leave_g.wait()
t = threading.Thread(target=f123)
t.start()
entered_g.wait()
# At this point, t has finished its entered_g.set(), although it's
# impossible to guess whether it's still on that line or has moved on
# to its leave_g.wait().
self.assertEqual(len(thread_info), 1)
thread_id = thread_info[0]
d = sys._current_frames()
main_id = _thread.get_ident()
self.assertIn(main_id, d)
self.assertIn(thread_id, d)
# Verify that the captured main-thread frame is _this_ frame.
frame = d.pop(main_id)
self.assertTrue(frame is sys._getframe())
# Verify that the captured thread frame is blocked in g456, called
# from f123. This is a litte tricky, since various bits of
# threading.py are also in the thread's call stack.
frame = d.pop(thread_id)
stack = traceback.extract_stack(frame)
for i, (filename, lineno, funcname, sourceline) in enumerate(stack):
if funcname == "f123":
break
else:
self.fail("didn't find f123() on thread's call stack")
self.assertEqual(sourceline, "g456()")
# And the next record must be for g456().
filename, lineno, funcname, sourceline = stack[i+1]
self.assertEqual(funcname, "g456")
self.assertIn(sourceline, ["leave_g.wait()", "entered_g.set()"])
# Reap the spawned thread.
leave_g.set()
t.join()
# Test sys._current_frames() when thread support doesn't exist.
def current_frames_without_threads(self):
# Not much happens here: there is only one thread, with artificial
# "thread id" 0.
d = sys._current_frames()
self.assertEqual(len(d), 1)
self.assertIn(0, d)
self.assertTrue(d[0] is sys._getframe())
def test_attributes(self):
self.assertIsInstance(sys.api_version, int)
self.assertIsInstance(sys.argv, list)
self.assertIn(sys.byteorder, ("little", "big"))
self.assertIsInstance(sys.builtin_module_names, tuple)
self.assertIsInstance(sys.copyright, str)
self.assertIsInstance(sys.exec_prefix, str)
self.assertIsInstance(sys.executable, str)
self.assertEqual(len(sys.float_info), 11)
self.assertEqual(sys.float_info.radix, 2)
self.assertEqual(len(sys.int_info), 2)
self.assertTrue(sys.int_info.bits_per_digit % 5 == 0)
self.assertTrue(sys.int_info.sizeof_digit >= 1)
self.assertEqual(type(sys.int_info.bits_per_digit), int)
self.assertEqual(type(sys.int_info.sizeof_digit), int)
self.assertIsInstance(sys.hexversion, int)
self.assertEqual(len(sys.hash_info), 5)
self.assertLess(sys.hash_info.modulus, 2**sys.hash_info.width)
# sys.hash_info.modulus should be a prime; we do a quick
# probable primality test (doesn't exclude the possibility of
# a Carmichael number)
for x in range(1, 100):
self.assertEqual(
pow(x, sys.hash_info.modulus-1, sys.hash_info.modulus),
1,
"sys.hash_info.modulus {} is a non-prime".format(
sys.hash_info.modulus)
)
self.assertIsInstance(sys.hash_info.inf, int)
self.assertIsInstance(sys.hash_info.nan, int)
self.assertIsInstance(sys.hash_info.imag, int)
self.assertIsInstance(sys.maxsize, int)
self.assertIsInstance(sys.maxunicode, int)
self.assertIsInstance(sys.platform, str)
self.assertIsInstance(sys.prefix, str)
self.assertIsInstance(sys.version, str)
vi = sys.version_info
self.assertIsInstance(vi[:], tuple)
self.assertEqual(len(vi), 5)
self.assertIsInstance(vi[0], int)
self.assertIsInstance(vi[1], int)
self.assertIsInstance(vi[2], int)
self.assertIn(vi[3], ("alpha", "beta", "candidate", "final"))
self.assertIsInstance(vi[4], int)
self.assertIsInstance(vi.major, int)
self.assertIsInstance(vi.minor, int)
self.assertIsInstance(vi.micro, int)
self.assertIn(vi.releaselevel, ("alpha", "beta", "candidate", "final"))
self.assertIsInstance(vi.serial, int)
self.assertEqual(vi[0], vi.major)
self.assertEqual(vi[1], vi.minor)
self.assertEqual(vi[2], vi.micro)
self.assertEqual(vi[3], vi.releaselevel)
self.assertEqual(vi[4], vi.serial)
self.assertTrue(vi > (1,0,0))
self.assertIsInstance(sys.float_repr_style, str)
self.assertIn(sys.float_repr_style, ('short', 'legacy'))
if not sys.platform.startswith('win'):
self.assertIsInstance(sys.abiflags, str)
def test_43581(self):
# Can't use sys.stdout, as this is a StringIO object when
# the test runs under regrtest.
self.assertEqual(sys.__stdout__.encoding, sys.__stderr__.encoding)
def test_intern(self):
global numruns
numruns += 1
self.assertRaises(TypeError, sys.intern)
s = "never interned before" + str(numruns)
self.assertTrue(sys.intern(s) is s)
s2 = s.swapcase().swapcase()
self.assertTrue(sys.intern(s2) is s)
# Subclasses of string can't be interned, because they
# provide too much opportunity for insane things to happen.
# We don't want them in the interned dict and if they aren't
# actually interned, we don't want to create the appearance
# that they are by allowing intern() to succeed.
class S(str):
def __hash__(self):
return 123
self.assertRaises(TypeError, sys.intern, S("abc"))
def test_sys_flags(self):
self.assertTrue(sys.flags)
attrs = ("debug", "division_warning",
"inspect", "interactive", "optimize", "dont_write_bytecode",
"no_user_site", "no_site", "ignore_environment", "verbose",
"bytes_warning", "quiet")
for attr in attrs:
self.assertTrue(hasattr(sys.flags, attr), attr)
self.assertEqual(type(getattr(sys.flags, attr)), int, attr)
self.assertTrue(repr(sys.flags))
self.assertEqual(len(sys.flags), len(attrs))
def test_clear_type_cache(self):
sys._clear_type_cache()
def test_ioencoding(self):
env = dict(os.environ)
# Test character: cent sign, encoded as 0x4A (ASCII J) in CP424,
# not representable in ASCII.
env["PYTHONIOENCODING"] = "cp424"
p = subprocess.Popen([sys.executable, "-c", 'print(chr(0xa2))'],
stdout = subprocess.PIPE, env=env)
out = p.communicate()[0].strip()
self.assertEqual(out, "\xa2\n".encode("cp424"))
env["PYTHONIOENCODING"] = "ascii:replace"
p = subprocess.Popen([sys.executable, "-c", 'print(chr(0xa2))'],
stdout = subprocess.PIPE, env=env)
out = p.communicate()[0].strip()
self.assertEqual(out, b'?')
def test_executable(self):
# Issue #7774: Ensure that sys.executable is an empty string if argv[0]
# has been set to an non existent program name and Python is unable to
# retrieve the real program name
# For a normal installation, it should work without 'cwd'
# argument. For test runs in the build directory, see #7774.
python_dir = os.path.dirname(os.path.realpath(sys.executable))
p = subprocess.Popen(
["nonexistent", "-c",
'import sys; print(sys.executable.encode("ascii", "backslashreplace"))'],
executable=sys.executable, stdout=subprocess.PIPE, cwd=python_dir)
stdout = p.communicate()[0]
executable = stdout.strip().decode("ASCII")
p.wait()
self.assertIn(executable, ["b''", repr(sys.executable.encode("ascii", "backslashreplace"))])
def check_fsencoding(self, fs_encoding, expected=None):
self.assertIsNotNone(fs_encoding)
codecs.lookup(fs_encoding)
if expected:
self.assertEqual(fs_encoding, expected)
def test_getfilesystemencoding(self):
fs_encoding = sys.getfilesystemencoding()
if sys.platform == 'darwin':
expected = 'utf-8'
elif sys.platform == 'win32':
expected = 'mbcs'
else:
expected = None
self.check_fsencoding(fs_encoding, expected)
class SizeofTest(unittest.TestCase):
TPFLAGS_HAVE_GC = 1<<14
TPFLAGS_HEAPTYPE = 1<<9
def setUp(self):
self.c = len(struct.pack('c', b' '))
self.H = len(struct.pack('H', 0))
self.i = len(struct.pack('i', 0))
self.l = len(struct.pack('l', 0))
self.P = len(struct.pack('P', 0))
# due to missing size_t information from struct, it is assumed that
# sizeof(Py_ssize_t) = sizeof(void*)
self.header = 'PP'
self.vheader = self.header + 'P'
if hasattr(sys, "gettotalrefcount"):
self.header += '2P'
self.vheader += '2P'
self.longdigit = sys.int_info.sizeof_digit
import _testcapi
self.gc_headsize = _testcapi.SIZEOF_PYGC_HEAD
self.file = open(test.support.TESTFN, 'wb')
def tearDown(self):
self.file.close()
test.support.unlink(test.support.TESTFN)
def check_sizeof(self, o, size):
result = sys.getsizeof(o)
# add GC header size
if ((type(o) == type) and (o.__flags__ & self.TPFLAGS_HEAPTYPE) or\
((type(o) != type) and (type(o).__flags__ & self.TPFLAGS_HAVE_GC))):
size += self.gc_headsize
msg = 'wrong size for %s: got %d, expected %d' \
% (type(o), result, size)
self.assertEqual(result, size, msg)
def calcsize(self, fmt):
"""Wrapper around struct.calcsize which enforces the alignment of the
end of a structure to the alignment requirement of pointer.
Note: This wrapper should only be used if a pointer member is included
and no member with a size larger than a pointer exists.
"""
return struct.calcsize(fmt + '0P')
def test_gc_head_size(self):
# Check that the gc header size is added to objects tracked by the gc.
h = self.header
vh = self.vheader
size = self.calcsize
gc_header_size = self.gc_headsize
# bool objects are not gc tracked
self.assertEqual(sys.getsizeof(True), size(vh) + self.longdigit)
# but lists are
self.assertEqual(sys.getsizeof([]), size(vh + 'PP') + gc_header_size)
def test_default(self):
h = self.header
vh = self.vheader
size = self.calcsize
self.assertEqual(sys.getsizeof(True), size(vh) + self.longdigit)
self.assertEqual(sys.getsizeof(True, -1), size(vh) + self.longdigit)
def test_objecttypes(self):
# check all types defined in Objects/
h = self.header
vh = self.vheader
size = self.calcsize
check = self.check_sizeof
# bool
check(True, size(vh) + self.longdigit)
# buffer
# XXX
# builtin_function_or_method
check(len, size(h + '3P'))
# bytearray
samples = [b'', b'u'*100000]
for sample in samples:
x = bytearray(sample)
check(x, size(vh + 'iPP') + x.__alloc__() * self.c)
# bytearray_iterator
check(iter(bytearray()), size(h + 'PP'))
# cell
def get_cell():
x = 42
def inner():
return x
return inner
check(get_cell().__closure__[0], size(h + 'P'))
# code
check(get_cell().__code__, size(h + '5i8Pi3P'))
# complex
check(complex(0,1), size(h + '2d'))
# method_descriptor (descriptor object)
check(str.lower, size(h + '2PP'))
# classmethod_descriptor (descriptor object)
# XXX
# member_descriptor (descriptor object)
import datetime
check(datetime.timedelta.days, size(h + '2PP'))
# getset_descriptor (descriptor object)
import collections
check(collections.defaultdict.default_factory, size(h + '2PP'))
# wrapper_descriptor (descriptor object)
check(int.__add__, size(h + '2P2P'))
# method-wrapper (descriptor object)
check({}.__iter__, size(h + '2P'))
# dict
check({}, size(h + '3P2P' + 8*'P2P'))
longdict = {1:1, 2:2, 3:3, 4:4, 5:5, 6:6, 7:7, 8:8}
check(longdict, size(h + '3P2P' + 8*'P2P') + 16*size('P2P'))
# dictionary-keyiterator
check({}.keys(), size(h + 'P'))
# dictionary-valueiterator
check({}.values(), size(h + 'P'))
# dictionary-itemiterator
check({}.items(), size(h + 'P'))
# dictproxy
class C(object): pass
check(C.__dict__, size(h + 'P'))
# BaseException
check(BaseException(), size(h + '5P'))
# UnicodeEncodeError
check(UnicodeEncodeError("", "", 0, 0, ""), size(h + '5P 2P2PP'))
# UnicodeDecodeError
# XXX
# check(UnicodeDecodeError("", "", 0, 0, ""), size(h + '5P2PP'))
# UnicodeTranslateError
check(UnicodeTranslateError("", 0, 1, ""), size(h + '5P 2P2PP'))
# ellipses
check(Ellipsis, size(h + ''))
# EncodingMap
import codecs, encodings.iso8859_3
x = codecs.charmap_build(encodings.iso8859_3.decoding_table)
check(x, size(h + '32B2iB'))
# enumerate
check(enumerate([]), size(h + 'l3P'))
# reverse
check(reversed(''), size(h + 'PP'))
# float
check(float(0), size(h + 'd'))
# sys.floatinfo
check(sys.float_info, size(vh) + self.P * len(sys.float_info))
# frame
import inspect
CO_MAXBLOCKS = 20
x = inspect.currentframe()
ncells = len(x.f_code.co_cellvars)
nfrees = len(x.f_code.co_freevars)
extras = x.f_code.co_stacksize + x.f_code.co_nlocals +\
ncells + nfrees - 1
check(x, size(vh + '12P3i' + CO_MAXBLOCKS*'3i' + 'P' + extras*'P'))
# function
def func(): pass
check(func, size(h + '11P'))
class c():
@staticmethod
def foo():
pass
@classmethod
def bar(cls):
pass
# staticmethod
check(foo, size(h + 'P'))
# classmethod
check(bar, size(h + 'P'))
# generator
def get_gen(): yield 1
check(get_gen(), size(h + 'Pi2P'))
# iterator
check(iter('abc'), size(h + 'lP'))
# callable-iterator
import re
check(re.finditer('',''), size(h + '2P'))
# list
samples = [[], [1,2,3], ['1', '2', '3']]
for sample in samples:
check(sample, size(vh + 'PP') + len(sample)*self.P)
# sortwrapper (list)
# XXX
# cmpwrapper (list)
# XXX
# listiterator (list)
check(iter([]), size(h + 'lP'))
# listreverseiterator (list)
check(reversed([]), size(h + 'lP'))
# long
check(0, size(vh))
check(1, size(vh) + self.longdigit)
check(-1, size(vh) + self.longdigit)
PyLong_BASE = 2**sys.int_info.bits_per_digit
check(int(PyLong_BASE), size(vh) + 2*self.longdigit)
check(int(PyLong_BASE**2-1), size(vh) + 2*self.longdigit)
check(int(PyLong_BASE**2), size(vh) + 3*self.longdigit)
# memory
check(memoryview(b''), size(h + 'PP2P2i7P'))
# module
check(unittest, size(h + '3P'))
# None
check(None, size(h + ''))
# NotImplementedType
check(NotImplemented, size(h))
# object
check(object(), size(h + ''))
# property (descriptor object)
class C(object):
def getx(self): return self.__x
def setx(self, value): self.__x = value
def delx(self): del self.__x
x = property(getx, setx, delx, "")
check(x, size(h + '4Pi'))
# PyCapsule
# XXX
# rangeiterator
check(iter(range(1)), size(h + '4l'))
# reverse
check(reversed(''), size(h + 'PP'))
# range
check(range(1), size(h + '4P'))
check(range(66000), size(h + '4P'))
# set
# frozenset
PySet_MINSIZE = 8
samples = [[], range(10), range(50)]
s = size(h + '3P2P' + PySet_MINSIZE*'lP' + 'lP')
for sample in samples:
minused = len(sample)
if minused == 0: tmp = 1
# the computation of minused is actually a bit more complicated
# but this suffices for the sizeof test
minused = minused*2
newsize = PySet_MINSIZE
while newsize <= minused:
newsize = newsize << 1
if newsize <= 8:
check(set(sample), s)
check(frozenset(sample), s)
else:
check(set(sample), s + newsize*struct.calcsize('lP'))
check(frozenset(sample), s + newsize*struct.calcsize('lP'))
# setiterator
check(iter(set()), size(h + 'P3P'))
# slice
check(slice(0), size(h + '3P'))
# super
check(super(int), size(h + '3P'))
# tuple
check((), size(vh))
check((1,2,3), size(vh) + 3*self.P)
# type
# (PyTypeObject + PyNumberMethods + PyMappingMethods +
# PySequenceMethods + PyBufferProcs)
s = size(vh + 'P2P15Pl4PP9PP11PI') + size('16Pi17P 3P 10P 2P 2P')
check(int, s)
# class
class newstyleclass(object): pass
check(newstyleclass, s)
# unicode
usize = len('\0'.encode('unicode-internal'))
samples = ['', '1'*100]
# we need to test for both sizes, because we don't know if the string
# has been cached
for s in samples:
basicsize = size(h + 'PPPiP') + usize * (len(s) + 1)
check(s, basicsize)
# weakref
import weakref
check(weakref.ref(int), size(h + '2Pl2P'))
# weakproxy
# XXX
# weakcallableproxy
check(weakref.proxy(int), size(h + '2Pl2P'))
def test_pythontypes(self):
# check all types defined in Python/
h = self.header
vh = self.vheader
size = self.calcsize
check = self.check_sizeof
# _ast.AST
import _ast
check(_ast.AST(), size(h + ''))
# imp.NullImporter
import imp
check(imp.NullImporter(self.file.name), size(h + ''))
try:
raise TypeError
except TypeError:
tb = sys.exc_info()[2]
# traceback
if tb != None:
check(tb, size(h + '2P2i'))
# symtable entry
# XXX
# sys.flags
check(sys.flags, size(vh) + self.P * len(sys.flags))
def test_main():
test.support.run_unittest(SysModuleTest, SizeofTest)
if __name__ == "__main__":
test_main()
| apache-2.0 |
pchauncey/ansible | lib/ansible/modules/cloud/amazon/ec2_vpc_vgw.py | 29 | 20182 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = '''
module: ec2_vpc_vgw
short_description: Create and delete AWS VPN Virtual Gateways.
description:
- Creates AWS VPN Virtual Gateways
- Deletes AWS VPN Virtual Gateways
- Attaches Virtual Gateways to VPCs
- Detaches Virtual Gateways from VPCs
version_added: "2.2"
requirements: [ boto3 ]
options:
state:
description:
- present to ensure resource is created.
- absent to remove resource
required: false
default: present
choices: [ "present", "absent"]
name:
description:
- name of the vgw to be created or deleted
required: false
type:
description:
- type of the virtual gateway to be created
required: false
choices: [ "ipsec.1" ]
vpn_gateway_id:
description:
- vpn gateway id of an existing virtual gateway
required: false
vpc_id:
description:
- the vpc-id of a vpc to attach or detach
required: false
wait_timeout:
description:
- number of seconds to wait for status during vpc attach and detach
required: false
default: 320
tags:
description:
- dictionary of resource tags
required: false
default: null
aliases: [ "resource_tags" ]
author: Nick Aslanidis (@naslanidis)
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
- name: Create a new vgw attached to a specific VPC
ec2_vpc_vgw:
state: present
region: ap-southeast-2
profile: personal
vpc_id: vpc-12345678
name: personal-testing
type: ipsec.1
register: created_vgw
- name: Create a new unattached vgw
ec2_vpc_vgw:
state: present
region: ap-southeast-2
profile: personal
name: personal-testing
type: ipsec.1
tags:
environment: production
owner: ABC
register: created_vgw
- name: Remove a new vgw using the name
ec2_vpc_vgw:
state: absent
region: ap-southeast-2
profile: personal
name: personal-testing
type: ipsec.1
register: deleted_vgw
- name: Remove a new vgw using the vpn_gateway_id
ec2_vpc_vgw:
state: absent
region: ap-southeast-2
profile: personal
vpn_gateway_id: vgw-3a9aa123
register: deleted_vgw
'''
RETURN = '''
result:
description: The result of the create, or delete action.
returned: success
type: dictionary
'''
import time
import traceback
try:
import botocore
import boto3
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import HAS_BOTO3, boto3_conn, ec2_argument_spec, get_aws_connection_info
from ansible.module_utils._text import to_native
def get_vgw_info(vgws):
if not isinstance(vgws, list):
return
for vgw in vgws:
vgw_info = {
'id': vgw['VpnGatewayId'],
'type': vgw['Type'],
'state': vgw['State'],
'vpc_id': None,
'tags': dict()
}
for tag in vgw['Tags']:
vgw_info['tags'][tag['Key']] = tag['Value']
if len(vgw['VpcAttachments']) != 0 and vgw['VpcAttachments'][0]['State'] == 'attached':
vgw_info['vpc_id'] = vgw['VpcAttachments'][0]['VpcId']
return vgw_info
def wait_for_status(client, module, vpn_gateway_id, status):
polling_increment_secs = 15
max_retries = (module.params.get('wait_timeout') // polling_increment_secs)
status_achieved = False
for x in range(0, max_retries):
try:
response = find_vgw(client, module, vpn_gateway_id)
if response[0]['VpcAttachments'][0]['State'] == status:
status_achieved = True
break
else:
time.sleep(polling_increment_secs)
except botocore.exceptions.ClientError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
result = response
return status_achieved, result
def attach_vgw(client, module, vpn_gateway_id):
params = dict()
params['VpcId'] = module.params.get('vpc_id')
try:
response = client.attach_vpn_gateway(VpnGatewayId=vpn_gateway_id, VpcId=params['VpcId'])
except botocore.exceptions.ClientError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
status_achieved, vgw = wait_for_status(client, module, [vpn_gateway_id], 'attached')
if not status_achieved:
module.fail_json(msg='Error waiting for vpc to attach to vgw - please check the AWS console')
result = response
return result
def detach_vgw(client, module, vpn_gateway_id, vpc_id=None):
params = dict()
params['VpcId'] = module.params.get('vpc_id')
if vpc_id:
try:
response = client.detach_vpn_gateway(VpnGatewayId=vpn_gateway_id, VpcId=vpc_id)
except botocore.exceptions.ClientError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
else:
try:
response = client.detach_vpn_gateway(VpnGatewayId=vpn_gateway_id, VpcId=params['VpcId'])
except botocore.exceptions.ClientError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
status_achieved, vgw = wait_for_status(client, module, [vpn_gateway_id], 'detached')
if not status_achieved:
module.fail_json(msg='Error waiting for vpc to detach from vgw - please check the AWS console')
result = response
return result
def create_vgw(client, module):
params = dict()
params['Type'] = module.params.get('type')
try:
response = client.create_vpn_gateway(Type=params['Type'])
except botocore.exceptions.ClientError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
result = response
return result
def delete_vgw(client, module, vpn_gateway_id):
try:
response = client.delete_vpn_gateway(VpnGatewayId=vpn_gateway_id)
except botocore.exceptions.ClientError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
#return the deleted VpnGatewayId as this is not included in the above response
result = vpn_gateway_id
return result
def create_tags(client, module, vpn_gateway_id):
params = dict()
try:
response = client.create_tags(Resources=[vpn_gateway_id],Tags=load_tags(module))
except botocore.exceptions.ClientError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
result = response
return result
def delete_tags(client, module, vpn_gateway_id, tags_to_delete=None):
params = dict()
if tags_to_delete:
try:
response = client.delete_tags(Resources=[vpn_gateway_id], Tags=tags_to_delete)
except botocore.exceptions.ClientError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
else:
try:
response = client.delete_tags(Resources=[vpn_gateway_id])
except botocore.exceptions.ClientError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
result = response
return result
def load_tags(module):
tags = []
if module.params.get('tags'):
for name, value in module.params.get('tags').items():
tags.append({'Key': name, 'Value': str(value)})
tags.append({'Key': "Name", 'Value': module.params.get('name')})
else:
tags.append({'Key': "Name", 'Value': module.params.get('name')})
return tags
def find_tags(client, module, resource_id=None):
if resource_id:
try:
response = client.describe_tags(Filters=[
{'Name': 'resource-id', 'Values': [resource_id]}
])
except botocore.exceptions.ClientError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
result = response
return result
def check_tags(client, module, existing_vgw, vpn_gateway_id):
params = dict()
params['Tags'] = module.params.get('tags')
vgw = existing_vgw
changed = False
tags_list = {}
#format tags for comparison
for tags in existing_vgw[0]['Tags']:
if tags['Key'] != 'Name':
tags_list[tags['Key']] = tags['Value']
# if existing tags don't match the tags arg, delete existing and recreate with new list
if params['Tags'] is not None and tags_list != params['Tags']:
delete_tags(client, module, vpn_gateway_id)
create_tags(client, module, vpn_gateway_id)
vgw = find_vgw(client, module)
changed = True
#if no tag args are supplied, delete any existing tags with the exception of the name tag
if params['Tags'] is None and tags_list != {}:
tags_to_delete = []
for tags in existing_vgw[0]['Tags']:
if tags['Key'] != 'Name':
tags_to_delete.append(tags)
delete_tags(client, module, vpn_gateway_id, tags_to_delete)
vgw = find_vgw(client, module)
changed = True
return vgw, changed
def find_vpc(client, module):
params = dict()
params['vpc_id'] = module.params.get('vpc_id')
if params['vpc_id']:
try:
response = client.describe_vpcs(VpcIds=[params['vpc_id']])
except botocore.exceptions.ClientError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
result = response
return result
def find_vgw(client, module, vpn_gateway_id=None):
params = dict()
params['Name'] = module.params.get('name')
params['Type'] = module.params.get('type')
params['State'] = module.params.get('state')
if params['State'] == 'present':
try:
response = client.describe_vpn_gateways(Filters=[
{'Name': 'type', 'Values': [params['Type']]},
{'Name': 'tag:Name', 'Values': [params['Name']]}
])
except botocore.exceptions.ClientError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
else:
if vpn_gateway_id:
try:
response = client.describe_vpn_gateways(VpnGatewayIds=vpn_gateway_id)
except botocore.exceptions.ClientError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
else:
try:
response = client.describe_vpn_gateways(Filters=[
{'Name': 'type', 'Values': [params['Type']]},
{'Name': 'tag:Name', 'Values': [params['Name']]}
])
except botocore.exceptions.ClientError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
result = response['VpnGateways']
return result
def ensure_vgw_present(client, module):
# If an existing vgw name and type matches our args, then a match is considered to have been
# found and we will not create another vgw.
changed = False
params = dict()
result = dict()
params['Name'] = module.params.get('name')
params['VpcId'] = module.params.get('vpc_id')
params['Type'] = module.params.get('type')
params['Tags'] = module.params.get('tags')
params['VpnGatewayIds'] = module.params.get('vpn_gateway_id')
# Check that a name argument has been supplied.
if not module.params.get('name'):
module.fail_json(msg='A name is required when a status of \'present\' is suppled')
# check if a gateway matching our module args already exists
existing_vgw = find_vgw(client, module)
if existing_vgw != [] and existing_vgw[0]['State'] != 'deleted':
vpn_gateway_id = existing_vgw[0]['VpnGatewayId']
vgw, changed = check_tags(client, module, existing_vgw, vpn_gateway_id)
# if a vpc_id was provided, check if it exists and if it's attached
if params['VpcId']:
# check that the vpc_id exists. If not, an exception is thrown
vpc = find_vpc(client, module)
current_vpc_attachments = existing_vgw[0]['VpcAttachments']
if current_vpc_attachments != [] and current_vpc_attachments[0]['State'] == 'attached':
if current_vpc_attachments[0]['VpcId'] == params['VpcId'] and current_vpc_attachments[0]['State'] == 'attached':
changed = False
else:
# detach the existing vpc from the virtual gateway
vpc_to_detach = current_vpc_attachments[0]['VpcId']
detach_vgw(client, module, vpn_gateway_id, vpc_to_detach)
time.sleep(5)
attached_vgw = attach_vgw(client, module, vpn_gateway_id)
vgw = find_vgw(client, module, [vpn_gateway_id])
changed = True
else:
# attach the vgw to the supplied vpc
attached_vgw = attach_vgw(client, module, vpn_gateway_id)
vgw = find_vgw(client, module, [vpn_gateway_id])
changed = True
# if params['VpcId'] is not provided, check the vgw is attached to a vpc. if so, detach it.
else:
existing_vgw = find_vgw(client, module, [vpn_gateway_id])
if existing_vgw[0]['VpcAttachments'] != []:
if existing_vgw[0]['VpcAttachments'][0]['State'] == 'attached':
# detach the vpc from the vgw
vpc_to_detach = existing_vgw[0]['VpcAttachments'][0]['VpcId']
detach_vgw(client, module, vpn_gateway_id, vpc_to_detach)
changed = True
vgw = find_vgw(client, module, [vpn_gateway_id])
else:
# create a new vgw
new_vgw = create_vgw(client, module)
changed = True
vpn_gateway_id = new_vgw['VpnGateway']['VpnGatewayId']
# tag the new virtual gateway
create_tags(client, module, vpn_gateway_id)
# return current state of the vgw
vgw = find_vgw(client, module, [vpn_gateway_id])
# if a vpc-id was supplied, attempt to attach it to the vgw
if params['VpcId']:
attached_vgw = attach_vgw(client, module, vpn_gateway_id)
changed = True
vgw = find_vgw(client, module, [vpn_gateway_id])
result = get_vgw_info(vgw)
return changed, result
def ensure_vgw_absent(client, module):
# If an existing vgw name and type matches our args, then a match is considered to have been
# found and we will take steps to delete it.
changed = False
params = dict()
result = dict()
params['Name'] = module.params.get('name')
params['VpcId'] = module.params.get('vpc_id')
params['Type'] = module.params.get('type')
params['Tags'] = module.params.get('tags')
params['VpnGatewayIds'] = module.params.get('vpn_gateway_id')
# check if a gateway matching our module args already exists
if params['VpnGatewayIds']:
existing_vgw_with_id = find_vgw(client, module, [params['VpnGatewayIds']])
if existing_vgw_with_id != [] and existing_vgw_with_id[0]['State'] != 'deleted':
existing_vgw = existing_vgw_with_id
if existing_vgw[0]['VpcAttachments'] != [] and existing_vgw[0]['VpcAttachments'][0]['State'] == 'attached':
if params['VpcId']:
if params['VpcId'] != existing_vgw[0]['VpcAttachments'][0]['VpcId']:
module.fail_json(msg='The vpc-id provided does not match the vpc-id currently attached - please check the AWS console')
else:
# detach the vpc from the vgw
detach_vgw(client, module, params['VpnGatewayIds'], params['VpcId'])
deleted_vgw = delete_vgw(client, module, params['VpnGatewayIds'])
changed = True
else:
# attempt to detach any attached vpcs
vpc_to_detach = existing_vgw[0]['VpcAttachments'][0]['VpcId']
detach_vgw(client, module, params['VpnGatewayIds'], vpc_to_detach)
deleted_vgw = delete_vgw(client, module, params['VpnGatewayIds'])
changed = True
else:
# no vpc's are attached so attempt to delete the vgw
deleted_vgw = delete_vgw(client, module, params['VpnGatewayIds'])
changed = True
else:
changed = False
deleted_vgw = "Nothing to do"
else:
#Check that a name and type argument has been supplied if no vgw-id
if not module.params.get('name') or not module.params.get('type'):
module.fail_json(msg='A name and type is required when no vgw-id and a status of \'absent\' is suppled')
existing_vgw = find_vgw(client, module)
if existing_vgw != [] and existing_vgw[0]['State'] != 'deleted':
vpn_gateway_id = existing_vgw[0]['VpnGatewayId']
if existing_vgw[0]['VpcAttachments'] != [] and existing_vgw[0]['VpcAttachments'][0]['State'] == 'attached':
if params['VpcId']:
if params['VpcId'] != existing_vgw[0]['VpcAttachments'][0]['VpcId']:
module.fail_json(msg='The vpc-id provided does not match the vpc-id currently attached - please check the AWS console')
else:
# detach the vpc from the vgw
detach_vgw(client, module, vpn_gateway_id, params['VpcId'])
#now that the vpc has been detached, delete the vgw
deleted_vgw = delete_vgw(client, module, vpn_gateway_id)
changed = True
else:
# attempt to detach any attached vpcs
vpc_to_detach = existing_vgw[0]['VpcAttachments'][0]['VpcId']
detach_vgw(client, module, vpn_gateway_id, vpc_to_detach)
changed = True
#now that the vpc has been detached, delete the vgw
deleted_vgw = delete_vgw(client, module, vpn_gateway_id)
else:
# no vpc's are attached so attempt to delete the vgw
deleted_vgw = delete_vgw(client, module, vpn_gateway_id)
changed = True
else:
changed = False
deleted_vgw = None
result = deleted_vgw
return changed, result
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state=dict(default='present', choices=['present', 'absent']),
region=dict(required=True),
name=dict(),
vpn_gateway_id=dict(),
vpc_id=dict(),
wait_timeout=dict(type='int', default=320),
type=dict(default='ipsec.1', choices=['ipsec.1']),
tags=dict(default=None, required=False, type='dict', aliases=['resource_tags']),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO3:
module.fail_json(msg='json and boto3 is required.')
state = module.params.get('state').lower()
try:
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
client = boto3_conn(module, conn_type='client', resource='ec2', region=region, endpoint=ec2_url, **aws_connect_kwargs)
except botocore.exceptions.NoCredentialsError as e:
module.fail_json(msg="Can't authorize connection - %s" % to_native(e), exception=traceback.format_exc())
if state == 'present':
(changed, results) = ensure_vgw_present(client, module)
else:
(changed, results) = ensure_vgw_absent(client, module)
module.exit_json(changed=changed, vgw=results)
if __name__ == '__main__':
main()
| gpl-3.0 |
fuesika/teispice | examples/tlinew.py | 2 | 1090 | from eispice import *
from numpy import matrix
R0 = [[0.861113, 0], [0, 0.861113]]
L0 = [[231.832e-9, 38.1483e-9],[38.1483e-9, 231.819e-9]]
G0 = [[0,0],[0,0]]
C0 = [[156.163e-12, -8.60102e-12],[-8.60102e-12, 156.193e-12]]
Rs = [[0.368757e-3, 0],[0, 0.368757e-3]]
Gd = [[0,0],[0,0]]
cct = Circuit("TlineW Test")
cct.Vs1 = device.V('vs1', GND, 1,
waveform.Pulse(1, 0, '5n', '1n', '1n', '5n', '10n'))
cct.Vs2 = device.V('vs2', GND, 1,
waveform.Pulse(1, 0, '5n', '1n', '1n', '5n', '10n'))
cct.Rs1 = device.R('vs1', 'vi1', 1)
cct.Rs2 = device.R('vs2', 'vi2', 1)
cct.Tg = device.W(('vi1','vi2'), GND, ('vo1','vo2'), GND,
0.0265, R0, L0, C0, G0, Rs, Gd, M=9)
cct.Rl1 = device.R('vo1', GND, '10k')
cct.Rl2 = device.R('vo2', GND, '10k')
#~ cct.Vs1 = device.V('vs1', GND, 1)
#~ cct.Vs2 = device.V('vs2', GND, 1)
#~ cct.Tg = device.W(('vs1','vs2'), GND, ('vo1','vo2'), GND,
#~ 0.0265, R0, L0, C0, G0, Rs, Gd, M=9)
#~ cct.Rl1 = device.R('vo1', GND, 0.022819)
#~ cct.Rl2 = device.R('vo2', GND, 0.022819)
cct.tran('0.5n', '2n')
eispice.plot(cct)
print cct.v['vo1'](0)
print cct.v['vo2'](0)
| gpl-2.0 |
Gogistics/prjGogistics | prjGogisticsWINEVER/src/bs4/tests/test_docs.py | 607 | 1067 | "Test harness for doctests."
# pylint: disable-msg=E0611,W0142
__metaclass__ = type
__all__ = [
'additional_tests',
]
import atexit
import doctest
import os
#from pkg_resources import (
# resource_filename, resource_exists, resource_listdir, cleanup_resources)
import unittest
DOCTEST_FLAGS = (
doctest.ELLIPSIS |
doctest.NORMALIZE_WHITESPACE |
doctest.REPORT_NDIFF)
# def additional_tests():
# "Run the doc tests (README.txt and docs/*, if any exist)"
# doctest_files = [
# os.path.abspath(resource_filename('bs4', 'README.txt'))]
# if resource_exists('bs4', 'docs'):
# for name in resource_listdir('bs4', 'docs'):
# if name.endswith('.txt'):
# doctest_files.append(
# os.path.abspath(
# resource_filename('bs4', 'docs/%s' % name)))
# kwargs = dict(module_relative=False, optionflags=DOCTEST_FLAGS)
# atexit.register(cleanup_resources)
# return unittest.TestSuite((
# doctest.DocFileSuite(*doctest_files, **kwargs)))
| mit |
jcrudy/clinvoc | clinvoc/hcpcs.py | 1 | 2356 | from .base import RegexVocabulary, left_pad, NoWildcardsVocabulary, NoRangeFillVocabulary, NoCheckVocabulary,\
ProcedureVocabulary, ModifierVocabulary
import re
from itertools import product
_hcpcs_split_regex = re.compile('^([A-Z]*)([0-9]+)([A-Z]*)$')
def hcpcs_split(code):
match = _hcpcs_split_regex.match(code)
letter_part = match.groups()[0]
number_part = match.groups()[1]
return letter_part, number_part
def hcpcs_join(letter_part, number_part):
digits = 5 - len(letter_part)
return letter_part + (('%%.%dd' % digits) % int(number_part))
class HCPCS(RegexVocabulary, NoCheckVocabulary, ProcedureVocabulary):
vocab_name = 'HCPCS'
def __init__(self):
RegexVocabulary.__init__(self, '([\*ABCDEGHJKLMPQRSTVX\d][\d\*]{3}[FMTU\d\*])|([\d\*]{1,4}[FMTU\d\*])|([\d\*]{1,5})', ignore_case=True)
def _fill_range(self, lower, upper):
lower_start_letter, lower_number, lower_end_letter = _hcpcs_split_regex.match(lower).groups()
upper_start_letter, upper_number, upper_end_letter = _hcpcs_split_regex.match(upper).groups()
assert lower_start_letter == upper_start_letter
assert lower_end_letter == upper_end_letter
result = []
for num in range(int(lower_number), int(upper_number) + 1):
n = 5 - len(lower_start_letter) - len(lower_end_letter)
result.append(lower_start_letter + left_pad(str(num), n) + lower_end_letter)
return result
_places = ['ABCDEGHJKLMPQRSTVX0123456789'] + \
3 * ['0123456789'] + \
['FMTU0123456789']
def _match_pattern(self, pattern):
options = []
for i, item in enumerate(pattern):
if item == '*':
options.append(self._places[i])
else:
options.append([item])
return map(''.join, product(*options))
def _standardize(self, code):
return left_pad(code.strip().upper(), 5)
class HCPCSModifier(RegexVocabulary, NoWildcardsVocabulary, NoRangeFillVocabulary, NoCheckVocabulary, ModifierVocabulary):
vocab_name = 'HCPCSMOD'
def __init__(self):
RegexVocabulary.__init__(self, '[A-Za-z\d]{2}')
def _standardize(self, code):
result = code.strip().upper()
assert len(result) == 2
return result
| mit |
webgeodatavore/django | django/db/backends/mysql/operations.py | 45 | 8598 | from __future__ import unicode_literals
import uuid
from django.conf import settings
from django.db.backends.base.operations import BaseDatabaseOperations
from django.utils import six, timezone
from django.utils.encoding import force_text
class DatabaseOperations(BaseDatabaseOperations):
compiler_module = "django.db.backends.mysql.compiler"
# MySQL stores positive fields as UNSIGNED ints.
integer_field_ranges = dict(BaseDatabaseOperations.integer_field_ranges,
PositiveSmallIntegerField=(0, 4294967295),
PositiveIntegerField=(0, 18446744073709551615),
)
def date_extract_sql(self, lookup_type, field_name):
# http://dev.mysql.com/doc/mysql/en/date-and-time-functions.html
if lookup_type == 'week_day':
# DAYOFWEEK() returns an integer, 1-7, Sunday=1.
# Note: WEEKDAY() returns 0-6, Monday=0.
return "DAYOFWEEK(%s)" % field_name
else:
return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
def date_trunc_sql(self, lookup_type, field_name):
fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape.
format_def = ('0000-', '01', '-01', ' 00:', '00', ':00')
try:
i = fields.index(lookup_type) + 1
except ValueError:
sql = field_name
else:
format_str = ''.join([f for f in format[:i]] + [f for f in format_def[i:]])
sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str)
return sql
def _convert_field_to_tz(self, field_name, tzname):
if settings.USE_TZ:
field_name = "CONVERT_TZ(%s, 'UTC', %%s)" % field_name
params = [tzname]
else:
params = []
return field_name, params
def datetime_cast_date_sql(self, field_name, tzname):
field_name, params = self._convert_field_to_tz(field_name, tzname)
sql = "DATE(%s)" % field_name
return sql, params
def datetime_extract_sql(self, lookup_type, field_name, tzname):
field_name, params = self._convert_field_to_tz(field_name, tzname)
sql = self.date_extract_sql(lookup_type, field_name)
return sql, params
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
field_name, params = self._convert_field_to_tz(field_name, tzname)
fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape.
format_def = ('0000-', '01', '-01', ' 00:', '00', ':00')
try:
i = fields.index(lookup_type) + 1
except ValueError:
sql = field_name
else:
format_str = ''.join([f for f in format[:i]] + [f for f in format_def[i:]])
sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str)
return sql, params
def date_interval_sql(self, timedelta):
return "INTERVAL '%d 0:0:%d:%d' DAY_MICROSECOND" % (
timedelta.days, timedelta.seconds, timedelta.microseconds), []
def format_for_duration_arithmetic(self, sql):
if self.connection.features.supports_microsecond_precision:
return 'INTERVAL %s MICROSECOND' % sql
else:
return 'INTERVAL FLOOR(%s / 1000000) SECOND' % sql
def drop_foreignkey_sql(self):
return "DROP FOREIGN KEY"
def force_no_ordering(self):
"""
"ORDER BY NULL" prevents MySQL from implicitly ordering by grouped
columns. If no ordering would otherwise be applied, we don't want any
implicit sorting going on.
"""
return [(None, ("NULL", [], False))]
def fulltext_search_sql(self, field_name):
return 'MATCH (%s) AGAINST (%%s IN BOOLEAN MODE)' % field_name
def last_executed_query(self, cursor, sql, params):
# With MySQLdb, cursor objects have an (undocumented) "_last_executed"
# attribute where the exact query sent to the database is saved.
# See MySQLdb/cursors.py in the source distribution.
return force_text(getattr(cursor, '_last_executed', None), errors='replace')
def no_limit_value(self):
# 2**64 - 1, as recommended by the MySQL documentation
return 18446744073709551615
def quote_name(self, name):
if name.startswith("`") and name.endswith("`"):
return name # Quoting once is enough.
return "`%s`" % name
def random_function_sql(self):
return 'RAND()'
def sql_flush(self, style, tables, sequences, allow_cascade=False):
# NB: The generated SQL below is specific to MySQL
# 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements
# to clear all tables of all data
if tables:
sql = ['SET FOREIGN_KEY_CHECKS = 0;']
for table in tables:
sql.append('%s %s;' % (
style.SQL_KEYWORD('TRUNCATE'),
style.SQL_FIELD(self.quote_name(table)),
))
sql.append('SET FOREIGN_KEY_CHECKS = 1;')
sql.extend(self.sequence_reset_by_name_sql(style, sequences))
return sql
else:
return []
def validate_autopk_value(self, value):
# MySQLism: zero in AUTO_INCREMENT field does not work. Refs #17653.
if value == 0:
raise ValueError('The database backend does not accept 0 as a '
'value for AutoField.')
return value
def adapt_datetimefield_value(self, value):
if value is None:
return None
# MySQL doesn't support tz-aware datetimes
if timezone.is_aware(value):
if settings.USE_TZ:
value = timezone.make_naive(value, self.connection.timezone)
else:
raise ValueError("MySQL backend does not support timezone-aware datetimes when USE_TZ is False.")
if not self.connection.features.supports_microsecond_precision:
value = value.replace(microsecond=0)
return six.text_type(value)
def adapt_timefield_value(self, value):
if value is None:
return None
# MySQL doesn't support tz-aware times
if timezone.is_aware(value):
raise ValueError("MySQL backend does not support timezone-aware times.")
return six.text_type(value)
def max_name_length(self):
return 64
def bulk_insert_sql(self, fields, placeholder_rows):
placeholder_rows_sql = (", ".join(row) for row in placeholder_rows)
values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql)
return "VALUES " + values_sql
def combine_expression(self, connector, sub_expressions):
"""
MySQL requires special cases for ^ operators in query expressions
"""
if connector == '^':
return 'POW(%s)' % ','.join(sub_expressions)
return super(DatabaseOperations, self).combine_expression(connector, sub_expressions)
def get_db_converters(self, expression):
converters = super(DatabaseOperations, self).get_db_converters(expression)
internal_type = expression.output_field.get_internal_type()
if internal_type == 'TextField':
converters.append(self.convert_textfield_value)
elif internal_type in ['BooleanField', 'NullBooleanField']:
converters.append(self.convert_booleanfield_value)
elif internal_type == 'DateTimeField':
converters.append(self.convert_datetimefield_value)
elif internal_type == 'UUIDField':
converters.append(self.convert_uuidfield_value)
return converters
def convert_textfield_value(self, value, expression, connection, context):
if value is not None:
value = force_text(value)
return value
def convert_booleanfield_value(self, value, expression, connection, context):
if value in (0, 1):
value = bool(value)
return value
def convert_datetimefield_value(self, value, expression, connection, context):
if value is not None:
if settings.USE_TZ:
value = timezone.make_aware(value, self.connection.timezone)
return value
def convert_uuidfield_value(self, value, expression, connection, context):
if value is not None:
value = uuid.UUID(value)
return value
| bsd-3-clause |
napkindrawing/ansible | lib/ansible/parsing/dataloader.py | 7 | 18437 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import copy
import os
import json
import tempfile
from yaml import YAMLError
from ansible.errors import AnsibleFileNotFound, AnsibleParserError
from ansible.errors.yaml_strings import YAML_SYNTAX_ERROR
from ansible.module_utils.basic import is_executable
from ansible.module_utils.six import text_type, string_types
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.parsing.vault import VaultLib, b_HEADER, is_encrypted, is_encrypted_file
from ansible.parsing.quoting import unquote
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleUnicode
from ansible.utils.path import unfrackpath
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class DataLoader:
'''
The DataLoader class is used to load and parse YAML or JSON content,
either from a given file name or from a string that was previously
read in through other means. A Vault password can be specified, and
any vault-encrypted files will be decrypted.
Data read from files will also be cached, so the file will never be
read from disk more than once.
Usage:
dl = DataLoader()
# optionally: dl.set_vault_password('foo')
ds = dl.load('...')
ds = dl.load_from_file('/path/to/file')
'''
def __init__(self):
self._basedir = '.'
self._FILE_CACHE = dict()
self._tempfiles = set()
# initialize the vault stuff with an empty password
self.set_vault_password(None)
def set_vault_password(self, b_vault_password):
self._b_vault_password = b_vault_password
self._vault = VaultLib(b_password=b_vault_password)
def load(self, data, file_name='<string>', show_content=True):
'''
Creates a python datastructure from the given data, which can be either
a JSON or YAML string.
'''
new_data = None
# YAML parser will take JSON as it is a subset.
if isinstance(data, AnsibleUnicode):
# The PyYAML's libyaml bindings use PyUnicode_CheckExact so
# they are unable to cope with our subclass.
# Unwrap and re-wrap the unicode so we can keep track of line
# numbers
in_data = text_type(data)
else:
in_data = data
try:
# we first try to load this data as JSON
new_data = json.loads(data)
except:
# must not be JSON, let the rest try
if isinstance(data, AnsibleUnicode):
# The PyYAML's libyaml bindings use PyUnicode_CheckExact so
# they are unable to cope with our subclass.
# Unwrap and re-wrap the unicode so we can keep track of line
# numbers
in_data = text_type(data)
else:
in_data = data
try:
new_data = self._safe_load(in_data, file_name=file_name)
except YAMLError as yaml_exc:
self._handle_error(yaml_exc, file_name, show_content)
if isinstance(data, AnsibleUnicode):
new_data = AnsibleUnicode(new_data)
new_data.ansible_pos = data.ansible_pos
return new_data
def load_from_file(self, file_name, cache=True, unsafe=False):
''' Loads data from a file, which can contain either JSON or YAML. '''
file_name = self.path_dwim(file_name)
display.debug("Loading data from %s" % file_name)
# if the file has already been read in and cached, we'll
# return those results to avoid more file/vault operations
if cache and file_name in self._FILE_CACHE:
parsed_data = self._FILE_CACHE[file_name]
else:
# read the file contents and load the data structure from them
(b_file_data, show_content) = self._get_file_contents(file_name)
file_data = to_text(b_file_data, errors='surrogate_or_strict')
parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)
# cache the file contents for next time
self._FILE_CACHE[file_name] = parsed_data
if unsafe:
return parsed_data
else:
# return a deep copy here, so the cache is not affected
return copy.deepcopy(parsed_data)
def path_exists(self, path):
path = self.path_dwim(path)
return os.path.exists(to_bytes(path, errors='surrogate_or_strict'))
def is_file(self, path):
path = self.path_dwim(path)
return os.path.isfile(to_bytes(path, errors='surrogate_or_strict')) or path == os.devnull
def is_directory(self, path):
path = self.path_dwim(path)
return os.path.isdir(to_bytes(path, errors='surrogate_or_strict'))
def list_directory(self, path):
path = self.path_dwim(path)
return os.listdir(path)
def is_executable(self, path):
'''is the given path executable?'''
path = self.path_dwim(path)
return is_executable(path)
def _safe_load(self, stream, file_name=None):
''' Implements yaml.safe_load(), except using our custom loader class. '''
loader = AnsibleLoader(stream, file_name, self._b_vault_password)
try:
return loader.get_single_data()
finally:
try:
loader.dispose()
except AttributeError:
pass # older versions of yaml don't have dispose function, ignore
def _get_file_contents(self, file_name):
'''
Reads the file contents from the given file name, and will decrypt them
if they are found to be vault-encrypted.
'''
if not file_name or not isinstance(file_name, string_types):
raise AnsibleParserError("Invalid filename: '%s'" % str(file_name))
b_file_name = to_bytes(file_name)
if not self.path_exists(b_file_name) or not self.is_file(b_file_name):
raise AnsibleFileNotFound("the file named '%s' does not exist, or is not readable" % file_name)
show_content = True
try:
with open(b_file_name, 'rb') as f:
data = f.read()
if is_encrypted(data):
data = self._vault.decrypt(data, filename=b_file_name)
show_content = False
return (data, show_content)
except (IOError, OSError) as e:
raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (file_name, str(e)), orig_exc=e)
def _handle_error(self, yaml_exc, file_name, show_content):
'''
Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
file name/position where a YAML exception occurred, and raises an AnsibleParserError
to display the syntax exception information.
'''
# if the YAML exception contains a problem mark, use it to construct
# an object the error class can use to display the faulty line
err_obj = None
if hasattr(yaml_exc, 'problem_mark'):
err_obj = AnsibleBaseYAMLObject()
err_obj.ansible_pos = (file_name, yaml_exc.problem_mark.line + 1, yaml_exc.problem_mark.column + 1)
raise AnsibleParserError(YAML_SYNTAX_ERROR, obj=err_obj, show_content=show_content, orig_exc=yaml_exc)
def get_basedir(self):
''' returns the current basedir '''
return self._basedir
def set_basedir(self, basedir):
''' sets the base directory, used to find files when a relative path is given '''
if basedir is not None:
self._basedir = to_text(basedir)
def path_dwim(self, given):
'''
make relative paths work like folks expect.
'''
given = unquote(given)
given = to_text(given, errors='surrogate_or_strict')
if given.startswith(u"/"):
return os.path.abspath(given)
elif given.startswith(u"~"):
return os.path.abspath(os.path.expanduser(given))
else:
basedir = to_text(self._basedir, errors='surrogate_or_strict')
return os.path.abspath(os.path.join(basedir, given))
def _is_role(self, path):
''' imperfect role detection, roles are still valid w/o main.yml/yaml/etc '''
isit = False
b_path = to_bytes(path, errors='surrogate_or_strict')
b_upath = to_bytes(unfrackpath(path), errors='surrogate_or_strict')
for suffix in (b'.yml', b'.yaml', b''):
b_main = b'main%s' % (suffix)
b_tasked = b'tasks/%s' % (b_main)
if (
b_path.endswith(b'tasks') and
os.path.exists(os.path.join(b_path, b_main)) or
os.path.exists(os.path.join(b_upath, b_tasked)) or
os.path.exists(os.path.join(os.path.dirname(b_path), b_tasked))
):
isit = True
break
return isit
def path_dwim_relative(self, path, dirname, source, is_role=False):
'''
find one file in either a role or playbook dir with or without
explicitly named dirname subdirs
Used in action plugins and lookups to find supplemental files that
could be in either place.
'''
search = []
# I have full path, nothing else needs to be looked at
if source.startswith('~') or source.startswith(os.path.sep):
search.append(self.path_dwim(source))
else:
# base role/play path + templates/files/vars + relative filename
search.append(os.path.join(path, dirname, source))
basedir = unfrackpath(path)
# not told if role, but detect if it is a role and if so make sure you get correct base path
if not is_role:
is_role = self._is_role(path)
if is_role and path.endswith('tasks'):
basedir = unfrackpath(os.path.dirname(path))
cur_basedir = self._basedir
self.set_basedir(basedir)
# resolved base role/play path + templates/files/vars + relative filename
search.append(self.path_dwim(os.path.join(basedir, dirname, source)))
self.set_basedir(cur_basedir)
if is_role and not source.endswith(dirname):
# look in role's tasks dir w/o dirname
search.append(self.path_dwim(os.path.join(basedir, 'tasks', source)))
# try to create absolute path for loader basedir + templates/files/vars + filename
search.append(self.path_dwim(os.path.join(dirname, source)))
search.append(self.path_dwim(os.path.join(basedir, source)))
# try to create absolute path for loader basedir + filename
search.append(self.path_dwim(source))
for candidate in search:
if os.path.exists(to_bytes(candidate, errors='surrogate_or_strict')):
break
return candidate
def path_dwim_relative_stack(self, paths, dirname, source, is_role=False):
'''
find one file in first path in stack taking roles into account and adding play basedir as fallback
:arg paths: A list of text strings which are the paths to look for the filename in.
:arg dirname: A text string representing a directory. The directory
is prepended to the source to form the path to search for.
:arg source: A text string which is the filename to search for
:rtype: A text string
:returns: An absolute path to the filename ``source``
'''
b_dirname = to_bytes(dirname)
b_source = to_bytes(source)
result = None
if source is None:
display.warning('Invalid request to find a file that matches a "null" value')
elif source and (source.startswith('~') or source.startswith(os.path.sep)):
# path is absolute, no relative needed, check existence and return source
test_path = unfrackpath(b_source)
if os.path.exists(to_bytes(test_path, errors='surrogate_or_strict')):
result = test_path
else:
search = []
display.debug(u'evaluation_path:\n\t%s' % '\n\t'.join(paths))
for path in paths:
upath = unfrackpath(path)
b_upath = to_bytes(upath, errors='surrogate_or_strict')
b_mydir = os.path.dirname(b_upath)
# FIXME: this detection fails with non main.yml roles
# if path is in role and 'tasks' not there already, add it into the search
if is_role or self._is_role(path):
if b_mydir.endswith(b'tasks'):
search.append(os.path.join(os.path.dirname(b_mydir), b_dirname, b_source))
search.append(os.path.join(b_mydir, b_source))
else:
# don't add dirname if user already is using it in source
if b_source.split(b'/')[0] != b_dirname:
search.append(os.path.join(b_upath, b_dirname, b_source))
search.append(os.path.join(b_upath, b_source))
elif b_dirname not in b_source.split(b'/'):
# don't add dirname if user already is using it in source
if b_source.split(b'/')[0] != dirname:
search.append(os.path.join(b_upath, b_dirname, b_source))
search.append(os.path.join(b_upath, b_source))
# always append basedir as last resort
# don't add dirname if user already is using it in source
if b_source.split(b'/')[0] != dirname:
search.append(os.path.join(to_bytes(self.get_basedir()), b_dirname, b_source))
search.append(os.path.join(to_bytes(self.get_basedir()), b_source))
display.debug(u'search_path:\n\t%s' % to_text(b'\n\t'.join(search)))
for b_candidate in search:
display.vvvvv(u'looking for "%s" at "%s"' % (source, to_text(b_candidate)))
if os.path.exists(b_candidate):
result = to_text(b_candidate)
break
return result
def _create_content_tempfile(self, content):
''' Create a tempfile containing defined content '''
fd, content_tempfile = tempfile.mkstemp()
f = os.fdopen(fd, 'wb')
content = to_bytes(content)
try:
f.write(content)
except Exception as err:
os.remove(content_tempfile)
raise Exception(err)
finally:
f.close()
return content_tempfile
def get_real_file(self, file_path, decrypt=True):
"""
If the file is vault encrypted return a path to a temporary decrypted file
If the file is not encrypted then the path is returned
Temporary files are cleanup in the destructor
"""
if not file_path or not isinstance(file_path, string_types):
raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_path))
b_file_path = to_bytes(file_path, errors='surrogate_or_strict')
if not self.path_exists(b_file_path) or not self.is_file(b_file_path):
raise AnsibleFileNotFound("the file named '%s' does not exist, or is not accessible" % to_native(file_path))
if not self._vault:
self._vault = VaultLib(b_password="")
real_path = self.path_dwim(file_path)
try:
if decrypt:
with open(to_bytes(real_path), 'rb') as f:
# Limit how much of the file is read since we do not know
# whether this is a vault file and therefore it could be very
# large.
if is_encrypted_file(f, count=len(b_HEADER)):
# if the file is encrypted and no password was specified,
# the decrypt call would throw an error, but we check first
# since the decrypt function doesn't know the file name
data = f.read()
if not self._b_vault_password:
raise AnsibleParserError("A vault password must be specified to decrypt %s" % file_path)
data = self._vault.decrypt(data, filename=real_path)
# Make a temp file
real_path = self._create_content_tempfile(data)
self._tempfiles.add(real_path)
return real_path
except (IOError, OSError) as e:
raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (to_native(real_path), to_native(e)), orig_exc=e)
def cleanup_tmp_file(self, file_path):
"""
Removes any temporary files created from a previous call to
get_real_file. file_path must be the path returned from a
previous call to get_real_file.
"""
if file_path in self._tempfiles:
os.unlink(file_path)
self._tempfiles.remove(file_path)
def cleanup_all_tmp_files(self):
for f in self._tempfiles:
try:
self.cleanup_tmp_file(f)
except Exception as e:
display.warning("Unable to cleanup temp files: %s" % to_native(e))
| gpl-3.0 |
somsak/youtube-dl | youtube_dl/extractor/miomio.py | 95 | 3521 | # coding: utf-8
from __future__ import unicode_literals
import random
from .common import InfoExtractor
from ..utils import (
xpath_text,
int_or_none,
ExtractorError,
)
class MioMioIE(InfoExtractor):
IE_NAME = 'miomio.tv'
_VALID_URL = r'https?://(?:www\.)?miomio\.tv/watch/cc(?P<id>[0-9]+)'
_TESTS = [{
# "type=video" in flashvars
'url': 'http://www.miomio.tv/watch/cc88912/',
'md5': '317a5f7f6b544ce8419b784ca8edae65',
'info_dict': {
'id': '88912',
'ext': 'flv',
'title': '【SKY】字幕 铠武昭和VS平成 假面骑士大战FEAT战队 魔星字幕组 字幕',
'duration': 5923,
},
}, {
'url': 'http://www.miomio.tv/watch/cc184024/',
'info_dict': {
'id': '43729',
'title': '《动漫同人插画绘制》',
},
'playlist_mincount': 86,
'skip': 'This video takes time too long for retrieving the URL',
}, {
'url': 'http://www.miomio.tv/watch/cc173113/',
'info_dict': {
'id': '173113',
'title': 'The New Macbook 2015 上手试玩与简评'
},
'playlist_mincount': 2,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
title = self._html_search_meta(
'description', webpage, 'title', fatal=True)
mioplayer_path = self._search_regex(
r'src="(/mioplayer/[^"]+)"', webpage, 'ref_path')
xml_config = self._search_regex(
r'flashvars="type=(?:sina|video)&(.+?)&',
webpage, 'xml config')
# skipping the following page causes lags and eventually connection drop-outs
self._request_webpage(
'http://www.miomio.tv/mioplayer/mioplayerconfigfiles/xml.php?id=%s&r=%s' % (id, random.randint(100, 999)),
video_id)
# the following xml contains the actual configuration information on the video file(s)
vid_config = self._download_xml(
'http://www.miomio.tv/mioplayer/mioplayerconfigfiles/sina.php?{0}'.format(xml_config),
video_id)
http_headers = {
'Referer': 'http://www.miomio.tv%s' % mioplayer_path,
}
if not int_or_none(xpath_text(vid_config, 'timelength')):
raise ExtractorError('Unable to load videos!', expected=True)
entries = []
for f in vid_config.findall('./durl'):
segment_url = xpath_text(f, 'url', 'video url')
if not segment_url:
continue
order = xpath_text(f, 'order', 'order')
segment_id = video_id
segment_title = title
if order:
segment_id += '-%s' % order
segment_title += ' part %s' % order
entries.append({
'id': segment_id,
'url': segment_url,
'title': segment_title,
'duration': int_or_none(xpath_text(f, 'length', 'duration'), 1000),
'http_headers': http_headers,
})
if len(entries) == 1:
segment = entries[0]
segment['id'] = video_id
segment['title'] = title
return segment
return {
'_type': 'multi_video',
'id': video_id,
'entries': entries,
'title': title,
'http_headers': http_headers,
}
| unlicense |
jimi-c/ansible | lib/ansible/modules/cloud/vmware/vmware_host_config_manager.py | 3 | 7965 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Abhijeet Kasurde <akasurde@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: vmware_host_config_manager
short_description: Manage advance configurations about an ESXi host
description:
- This module can be used to manage advance configuration information about an ESXi host when ESXi hostname or Cluster name is given.
version_added: '2.5'
author:
- Abhijeet Kasurde (@Akasurde)
notes:
- Tested on vSphere 6.5
requirements:
- python >= 2.6
- PyVmomi
options:
cluster_name:
description:
- Name of the cluster.
- Settings are applied to every ESXi host system in given cluster.
- If C(esxi_hostname) is not given, this parameter is required.
esxi_hostname:
description:
- ESXi hostname.
- Settings are applied to this ESXi host system.
- If C(cluster_name) is not given, this parameter is required.
options:
description:
- A dictionary of advance configuration parameters.
- Invalid options will cause module to error.
default: {}
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Manage Log level setting for all ESXi Host in given Cluster
vmware_host_config_manager:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
cluster_name: cluster_name
options:
'Config.HostAgent.log.level': 'info'
delegate_to: localhost
- name: Manage Log level setting for an ESXi Host
vmware_host_config_manager:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
esxi_hostname: '{{ esxi_hostname }}'
options:
'Config.HostAgent.log.level': 'verbose'
delegate_to: localhost
- name: Manage multiple settings for an ESXi Host
vmware_host_config_manager:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
esxi_hostname: '{{ esxi_hostname }}'
options:
'Config.HostAgent.log.level': 'verbose'
'Annotations.WelcomeMessage': 'Hello World'
'Config.HostAgent.plugins.solo.enableMob': false
delegate_to: localhost
'''
RETURN = r'''#
'''
try:
from pyVmomi import vim, vmodl, VmomiSupport
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import vmware_argument_spec, PyVmomi
from ansible.module_utils._text import to_native
from ansible.module_utils.six import integer_types, string_types
class VmwareConfigManager(PyVmomi):
def __init__(self, module):
super(VmwareConfigManager, self).__init__(module)
cluster_name = self.params.get('cluster_name', None)
esxi_host_name = self.params.get('esxi_hostname', None)
self.options = self.params.get('options', dict())
self.hosts = self.get_all_host_objs(cluster_name=cluster_name, esxi_host_name=esxi_host_name)
@staticmethod
def is_integer(value, type_of='int'):
try:
VmomiSupport.vmodlTypes[type_of](value)
return True
except (TypeError, ValueError):
return False
@staticmethod
def is_boolean(value):
if str(value).lower() in ['true', 'on', 'yes', 'false', 'off', 'no']:
return True
return False
@staticmethod
def is_truthy(value):
if str(value).lower() in ['true', 'on', 'yes']:
return True
return False
def set_host_configuration_facts(self):
changed = False
for host in self.hosts:
option_manager = host.configManager.advancedOption
host_facts = {}
for option in option_manager.QueryOptions():
host_facts[option.key] = dict(value=option.value)
for s_option in option_manager.supportedOption:
host_facts[s_option.key].update(
option_type=s_option.optionType,
)
change_option_list = []
for option_key, option_value in self.options.items():
if option_key in host_facts:
# Make sure option_type is defined some values do not have
# it defined and appear to be read only.
if 'option_type' in host_facts[option_key]:
# We handle all supported types here so we can give meaningful errors.
option_type = host_facts[option_key]['option_type']
if self.is_boolean(option_value) and isinstance(option_type, vim.option.BoolOption):
option_value = self.is_truthy(option_value)
elif (isinstance(option_value, integer_types) or self.is_integer(option_value))\
and isinstance(option_type, vim.option.IntOption):
option_value = VmomiSupport.vmodlTypes['int'](option_value)
elif (isinstance(option_value, integer_types) or self.is_integer(option_value, 'long'))\
and isinstance(option_type, vim.option.LongOption):
option_value = VmomiSupport.vmodlTypes['long'](option_value)
elif isinstance(option_value, float) and isinstance(option_type, vim.option.FloatOption):
pass
elif isinstance(option_value, string_types) and isinstance(option_type, (vim.option.StringOption, vim.option.ChoiceOption)):
pass
else:
self.module.fail_json(msg="Provided value is of type %s."
" Option %s expects: %s" % (type(option_value), option_key, type(option_type)))
else:
self.module.fail_json(msg="Cannot change read only option %s to %s." % (option_key, option_value))
if option_value != host_facts[option_key]['value']:
change_option_list.append(vim.option.OptionValue(key=option_key, value=option_value))
changed = True
else: # Don't silently drop unknown options. This prevents typos from falling through the cracks.
self.module.fail_json(msg="Unknown option %s" % option_key)
if changed:
try:
option_manager.UpdateOptions(changedValue=change_option_list)
except vmodl.fault.InvalidArgument as e:
self.module.fail_json(msg="Failed to update option/s as one or more OptionValue "
"contains an invalid value: %s" % to_native(e.msg))
except vim.fault.InvalidName as e:
self.module.fail_json(msg="Failed to update option/s as one or more OptionValue "
"objects refers to a non-existent option : %s" % to_native(e.msg))
self.module.exit_json(changed=changed)
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
cluster_name=dict(type='str', required=False),
esxi_hostname=dict(type='str', required=False),
options=dict(type='dict', default=dict(), required=False),
)
module = AnsibleModule(
argument_spec=argument_spec,
required_one_of=[
['cluster_name', 'esxi_hostname'],
]
)
vmware_host_config = VmwareConfigManager(module)
vmware_host_config.set_host_configuration_facts()
if __name__ == "__main__":
main()
| gpl-3.0 |
runt18/mojo | build/android/pylib/android_commands_unittest.py | 94 | 7591 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import sys
import unittest
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
from pylib import android_commands
# pylint: disable=W0212,W0702
class TestDeviceTempFile(unittest.TestCase):
def setUp(self):
if not os.getenv('BUILDTYPE'):
os.environ['BUILDTYPE'] = 'Debug'
devices = android_commands.GetAttachedDevices()
self.assertGreater(len(devices), 0, 'No device attached!')
self.ac = android_commands.AndroidCommands(device=devices[0])
def testTempFileDeleted(self):
"""Tests that DeviceTempFile deletes files when closed."""
temp_file = android_commands.DeviceTempFile(self.ac)
self.assertFalse(self.ac.FileExistsOnDevice(temp_file.name))
self.ac.SetFileContents(temp_file.name, "contents")
self.assertTrue(self.ac.FileExistsOnDevice(temp_file.name))
temp_file.close()
self.assertFalse(self.ac.FileExistsOnDevice(temp_file.name))
with android_commands.DeviceTempFile(self.ac) as with_temp_file:
self.assertFalse(self.ac.FileExistsOnDevice(with_temp_file.name))
self.ac.SetFileContents(with_temp_file.name, "contents")
self.assertTrue(self.ac.FileExistsOnDevice(with_temp_file.name))
self.assertFalse(self.ac.FileExistsOnDevice(with_temp_file.name))
def testTempFileNotWritten(self):
"""Tests that device temp files work successfully even if not written to."""
temp_file = android_commands.DeviceTempFile(self.ac)
temp_file.close()
self.assertFalse(self.ac.FileExistsOnDevice(temp_file.name))
with android_commands.DeviceTempFile(self.ac) as with_temp_file:
pass
self.assertFalse(self.ac.FileExistsOnDevice(with_temp_file.name))
def testNaming(self):
"""Tests that returned filenames are as requested."""
temp_file = android_commands.DeviceTempFile(self.ac, prefix="cat")
self.assertTrue(os.path.basename(temp_file.name).startswith("cat"))
temp_file = android_commands.DeviceTempFile(self.ac, suffix="dog")
self.assertTrue(temp_file.name.endswith("dog"))
temp_file = android_commands.DeviceTempFile(
self.ac, prefix="cat", suffix="dog")
self.assertTrue(os.path.basename(temp_file.name).startswith("cat"))
self.assertTrue(temp_file.name.endswith("dog"))
class TestGetFilesChanged(unittest.TestCase):
def setUp(self):
if not os.getenv('BUILDTYPE'):
os.environ['BUILDTYPE'] = 'Debug'
devices = android_commands.GetAttachedDevices()
self.assertGreater(len(devices), 0, 'No device attached!')
self.ac = android_commands.AndroidCommands(device=devices[0])
self.host_data_dir = os.path.realpath('test_push_data')
self.device_data_dir = '%s/test_push_data' % (
self.ac.RunShellCommand('realpath %s' %
self.ac.GetExternalStorage())[0])
os.mkdir(self.host_data_dir)
for i in xrange(1, 10):
with open('%s/%d.txt' % (self.host_data_dir, i), 'w') as f:
f.write('file #%d' % i)
self.ac.RunShellCommand('mkdir %s' % self.device_data_dir)
def testGetFilesChangedAllNeeded(self):
""" Tests GetFilesChanged when none of the files are on the device.
"""
expected = [('%s/%d.txt' % (self.host_data_dir, i),
'%s/%d.txt' % (self.device_data_dir, i))
for i in xrange(1, 10)]
actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
self.assertSequenceEqual(expected, actual)
def testGetFilesChangedSomeIdentical(self):
""" Tests GetFilesChanged when some of the files are on the device.
"""
for i in xrange(1, 5):
self.ac._adb.Push('%s/%d.txt' % (self.host_data_dir, i),
self.device_data_dir)
expected = [('%s/%d.txt' % (self.host_data_dir, i),
'%s/%d.txt' % (self.device_data_dir, i))
for i in xrange(5, 10)]
actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
self.assertSequenceEqual(expected, actual)
def testGetFilesChangedAllIdentical(self):
""" Tests GetFilesChanged when all of the files are on the device.
"""
for i in xrange(1, 10):
self.ac._adb.Push('%s/%d.txt' % (self.host_data_dir, i),
self.device_data_dir)
expected = []
actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
self.assertSequenceEqual(expected, actual)
def testGetFilesChangedRename(self):
""" Tests GetFilesChanged when one of the files has been renamed.
This tests both with and without the ignore_filenames flag set.
"""
for i in xrange(5, 10):
self.ac._adb.Push('%s/%d.txt' % (self.host_data_dir, i),
self.device_data_dir)
os.rename('%s/5.txt' % (self.host_data_dir),
'%s/99.txt' % (self.host_data_dir))
expected = [('%s/%d.txt' % (self.host_data_dir, i),
'%s/%d.txt' % (self.device_data_dir, i))
for i in xrange(1, 5)]
actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir,
ignore_filenames=True)
self.assertSequenceEqual(expected, actual)
expected.append(('%s/99.txt' % self.host_data_dir,
'%s/99.txt' % self.device_data_dir))
actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
self.assertSequenceEqual(expected, actual)
def testGetFilesChangedCopy(self):
""" Tests GetFilesChanged when one of the files has been copied.
This tests both with and without the ignore_filenames flag set.
"""
for i in xrange(5, 10):
self.ac._adb.Push('%s/%d.txt' % (self.host_data_dir, i),
self.device_data_dir)
shutil.copy('%s/5.txt' % self.host_data_dir,
'%s/99.txt' % self.host_data_dir)
expected = [('%s/%d.txt' % (self.host_data_dir, i),
'%s/%d.txt' % (self.device_data_dir, i))
for i in xrange(1, 5)]
actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir,
ignore_filenames=True)
self.assertSequenceEqual(expected, actual)
expected.append(('%s/99.txt' % self.host_data_dir,
'%s/99.txt' % self.device_data_dir))
actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
self.assertSequenceEqual(expected, actual)
def testGetFilesChangedIndividual(self):
""" Tests GetFilesChanged when provided one file.
"""
expected = [('%s/1.txt' % self.host_data_dir,
'%s/1.txt' % self.device_data_dir)]
actual = self.ac.GetFilesChanged('%s/1.txt' % self.host_data_dir,
'%s/1.txt' % self.device_data_dir)
self.assertSequenceEqual(expected, actual)
def testGetFilesChangedFileToDirectory(self):
""" Tests GetFilesChanged when provided a file from the host and a
directory on the device.
"""
expected = [('%s/1.txt' % self.host_data_dir,
'%s' % self.device_data_dir)]
actual = self.ac.GetFilesChanged('%s/1.txt' % self.host_data_dir,
'%s' % self.device_data_dir)
self.assertSequenceEqual(expected, actual)
def tearDown(self):
try:
shutil.rmtree(self.host_data_dir)
self.ac.RunShellCommand('rm -rf %s' % self.device_data_dir)
except:
pass
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
minhphung171093/GreenERP_V8 | openerp/addons/crm/report/report_businessopp.py | 377 | 6269 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import os, time
import random
import StringIO
from openerp.report.render import render
from openerp.report.interface import report_int
from pychart import *
theme.use_color = 1
class external_pdf(render):
""" Generate External PDF """
def __init__(self, pdf):
render.__init__(self)
self.pdf = pdf
self.output_type = 'pdf'
def _render(self):
return self.pdf
class report_custom(report_int):
""" Create Custom Report """
def create(self, cr, uid, ids, datas, context=None):
""" @param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: List of IDs
@param context: A standard dictionary for contextual values """
assert len(ids), 'You should provide some ids!'
responsible_data = {}
responsible_names = {}
data = []
minbenef = 999999999999999999999
maxbenef = 0
cr.execute('select probability, planned_revenue, planned_cost, user_id,\
res_users.name as name from crm_case left join res_users on \
(crm_case.user_id=res_users.id) where crm_case.id IN %s order by user_id',(tuple(ids),))
res = cr.dictfetchall()
for row in res:
proba = row['probability'] or 0 / 100.0
cost = row['planned_cost'] or 0
revenue = row['planned_revenue'] or 0
userid = row['user_id'] or 0
benefit = revenue - cost
if benefit > maxbenef:
maxbenef = benefit
if benefit < minbenef:
minbenef = benefit
tuple_benefit = (proba * 100, benefit)
responsible_data.setdefault(userid, [])
responsible_data[userid].append(tuple_benefit)
tuple_benefit = (proba * 100, cost, benefit)
data.append(tuple_benefit)
responsible_names[userid] = (row['name'] or '/').replace('/','//')
minbenef -= maxbenef * 0.05
maxbenef *= 1.2
ratio = 0.5
minmaxdiff2 = (maxbenef - minbenef)/2
for l in responsible_data.itervalues():
for i in range(len(l)):
percent, benef = l[i]
proba = percent/100
current_ratio = 1 + (ratio-1) * proba
newbenef = minmaxdiff2 + ((benef - minbenef - minmaxdiff2) * current_ratio)
l[i] = (percent, newbenef)
#TODO:
#-group by "categorie de probabilites ds graphe du haut"
#-echelle variable
pdf_string = StringIO.StringIO()
can = canvas.init(fname = pdf_string, format = 'pdf')
chart_object.set_defaults(line_plot.T, line_style=None)
xaxis = axis.X(label=None, format="%d%%", tic_interval=20)
yaxis = axis.Y()
x_range_a, x_range_b = (0, 100)
y_range_a, y_range_b = (minbenef, maxbenef)
if y_range_a == 0.0:
y_range_a += 0.0001
ar = area.T(
size = (300,200),
y_grid_interval = 10000,
y_grid_style = None,
x_range = (x_range_a, x_range_b),
y_range = (y_range_a, y_range_b),
x_axis = xaxis,
y_axis = None,
legend = legend.T()
)
#import pydb; pydb.debugger()
for k, d in responsible_data.iteritems():
fill = fill_style.Plain(bgcolor=color.T(r=random.random(), g=random.random(), b=random.random()))
tick = tick_mark.Square(size=6, fill_style=fill)
ar.add_plot(line_plot.T(label=responsible_names[k], data=d, tick_mark=tick))
ar.draw(can)
# second graph (top right)
ar = area.T(legend = legend.T(),
size = (200,100),
loc = (100,250),
x_grid_interval = lambda min, max: [40,60,80,100],
x_grid_style = line_style.gray70_dash1,
x_range = (33, 100),
x_axis = axis.X(label=None, minor_tic_interval = lambda min,max: [50, 70, 90],\
format=lambda x: ""),
y_axis = axis.Y(label="Planned amounts"))
bar_plot.fill_styles.reset();
plot1 = bar_plot.T(label="Cost", data=data, fill_style=fill_style.red)
plot2 = bar_plot.T(label="Revenue", data=data, hcol=2, stack_on = plot1, fill_style=fill_style.blue)
ar.add_plot(plot1, plot2)
ar.draw(can)
# diagonal "pipeline" lines
can.line(line_style.black, 0, 200, 300, 150)
can.line(line_style.black, 0, 0, 300, 50)
# vertical lines
ls = line_style.T(width=0.4, color=color.gray70, dash=(2, 2))
for x in range(120, 300, 60):
can.line(ls, x, 0, x, 250)
# draw arrows to the right
a = arrow.fat1
for y in range(60, 150, 10):
a.draw([(285, y), (315, y)], can=can)
# close canvas so that the file is written to "disk"
can.close()
self.obj = external_pdf(pdf_string.getvalue())
self.obj.render()
pdf_string.close()
return (self.obj.pdf, 'pdf')
report_custom('report.crm.case')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
public-ink/public-ink | server/appengine/lib/numpy/distutils/tests/test_fcompiler_gnu.py | 155 | 2242 | from __future__ import division, absolute_import, print_function
from numpy.testing import TestCase, assert_, run_module_suite
import numpy.distutils.fcompiler
g77_version_strings = [
('GNU Fortran 0.5.25 20010319 (prerelease)', '0.5.25'),
('GNU Fortran (GCC 3.2) 3.2 20020814 (release)', '3.2'),
('GNU Fortran (GCC) 3.3.3 20040110 (prerelease) (Debian)', '3.3.3'),
('GNU Fortran (GCC) 3.3.3 (Debian 20040401)', '3.3.3'),
('GNU Fortran (GCC 3.2.2 20030222 (Red Hat Linux 3.2.2-5)) 3.2.2'
' 20030222 (Red Hat Linux 3.2.2-5)', '3.2.2'),
]
gfortran_version_strings = [
('GNU Fortran 95 (GCC 4.0.3 20051023 (prerelease) (Debian 4.0.2-3))',
'4.0.3'),
('GNU Fortran 95 (GCC) 4.1.0', '4.1.0'),
('GNU Fortran 95 (GCC) 4.2.0 20060218 (experimental)', '4.2.0'),
('GNU Fortran (GCC) 4.3.0 20070316 (experimental)', '4.3.0'),
('GNU Fortran (rubenvb-4.8.0) 4.8.0', '4.8.0'),
('4.8.0', '4.8.0'),
('4.0.3-7', '4.0.3'),
("gfortran: warning: couldn't understand kern.osversion '14.1.0\n4.9.1",
'4.9.1'),
("gfortran: warning: couldn't understand kern.osversion '14.1.0\n"
"gfortran: warning: yet another warning\n4.9.1",
'4.9.1')
]
class TestG77Versions(TestCase):
def test_g77_version(self):
fc = numpy.distutils.fcompiler.new_fcompiler(compiler='gnu')
for vs, version in g77_version_strings:
v = fc.version_match(vs)
assert_(v == version, (vs, v))
def test_not_g77(self):
fc = numpy.distutils.fcompiler.new_fcompiler(compiler='gnu')
for vs, _ in gfortran_version_strings:
v = fc.version_match(vs)
assert_(v is None, (vs, v))
class TestGFortranVersions(TestCase):
def test_gfortran_version(self):
fc = numpy.distutils.fcompiler.new_fcompiler(compiler='gnu95')
for vs, version in gfortran_version_strings:
v = fc.version_match(vs)
assert_(v == version, (vs, v))
def test_not_gfortran(self):
fc = numpy.distutils.fcompiler.new_fcompiler(compiler='gnu95')
for vs, _ in g77_version_strings:
v = fc.version_match(vs)
assert_(v is None, (vs, v))
if __name__ == '__main__':
run_module_suite()
| gpl-3.0 |
dakcarto/QGIS | python/console/console.py | 1 | 35613 | # -*- coding:utf-8 -*-
"""
/***************************************************************************
Python Console for QGIS
-------------------
begin : 2012-09-10
copyright : (C) 2012 by Salvatore Larosa
email : lrssvtml (at) gmail (dot) com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
Some portions of code were taken from https://code.google.com/p/pydee/
"""
import os
from PyQt4.QtCore import Qt, QTimer, QSettings, QCoreApplication, QSize, QByteArray, QFileInfo, SIGNAL, QUrl, QDir
from PyQt4.QtGui import QDockWidget, QToolBar, QToolButton, QWidget,\
QSplitter, QTreeWidget, QAction, QFileDialog, QCheckBox, QSizePolicy, QMenu, QGridLayout, QApplication, \
QDesktopServices
from PyQt4.QtGui import QVBoxLayout
from PyQt4 import pyqtconfig
from qgis.utils import iface
from console_sci import ShellScintilla
from console_output import ShellOutputScintilla
from console_editor import EditorTabWidget
from console_settings import optionsDialog
from qgis.core import QgsApplication, QgsContextHelp
from qgis.gui import QgsFilterLineEdit
import sys
_console = None
def show_console():
""" called from QGIS to open the console """
global _console
if _console is None:
parent = iface.mainWindow() if iface else None
_console = PythonConsole(parent)
_console.show() # force show even if it was restored as hidden
# set focus to the console so the user can start typing
# defer the set focus event so it works also whether the console not visible yet
QTimer.singleShot(0, _console.activate)
else:
_console.setVisible(not _console.isVisible())
# set focus to the console so the user can start typing
if _console.isVisible():
_console.activate()
## Shows help on first launch of the console
settings = QSettings()
if settings.value('pythonConsole/contextHelpOnFirstLaunch', True, type=bool):
QgsContextHelp.run("PythonConsole")
settings.setValue('pythonConsole/contextHelpOnFirstLaunch', False)
_old_stdout = sys.stdout
_console_output = None
# hook for python console so all output will be redirected
# and then shown in console
def console_displayhook(obj):
global _console_output
_console_output = obj
class PythonConsole(QDockWidget):
def __init__(self, parent=None):
QDockWidget.__init__(self, parent)
self.setObjectName("PythonConsole")
self.setWindowTitle(QCoreApplication.translate("PythonConsole", "Python Console"))
#self.setAllowedAreas(Qt.BottomDockWidgetArea)
self.console = PythonConsoleWidget(self)
self.setWidget(self.console)
self.setFocusProxy(self.console)
# try to restore position from stored main window state
if iface and not iface.mainWindow().restoreDockWidget(self):
iface.mainWindow().addDockWidget(Qt.BottomDockWidgetArea, self)
def activate(self):
self.activateWindow()
self.raise_()
QDockWidget.setFocus(self)
def closeEvent(self, event):
self.console.saveSettingsConsole()
QWidget.closeEvent(self, event)
class PythonConsoleWidget(QWidget):
def __init__(self, parent=None):
QWidget.__init__(self, parent)
self.setWindowTitle(QCoreApplication.translate("PythonConsole", "Python Console"))
self.settings = QSettings()
self.shell = ShellScintilla(self)
self.setFocusProxy(self.shell)
self.shellOut = ShellOutputScintilla(self)
self.tabEditorWidget = EditorTabWidget(self)
##------------ UI -------------------------------
self.splitterEditor = QSplitter(self)
self.splitterEditor.setOrientation(Qt.Horizontal)
self.splitterEditor.setHandleWidth(6)
self.splitterEditor.setChildrenCollapsible(True)
self.shellOutWidget = QWidget(self)
self.shellOutWidget.setLayout(QVBoxLayout())
self.shellOutWidget.layout().setContentsMargins(0, 0, 0, 0)
self.shellOutWidget.layout().addWidget(self.shellOut)
self.splitter = QSplitter(self.splitterEditor)
self.splitter.setOrientation(Qt.Vertical)
self.splitter.setHandleWidth(3)
self.splitter.setChildrenCollapsible(False)
self.splitter.addWidget(self.shellOutWidget)
self.splitter.addWidget(self.shell)
#self.splitterEditor.addWidget(self.tabEditorWidget)
self.splitterObj = QSplitter(self.splitterEditor)
self.splitterObj.setHandleWidth(3)
self.splitterObj.setOrientation(Qt.Horizontal)
#self.splitterObj.setSizes([0, 0])
#self.splitterObj.setStretchFactor(0, 1)
self.widgetEditor = QWidget(self.splitterObj)
self.widgetFind = QWidget(self)
self.listClassMethod = QTreeWidget(self.splitterObj)
self.listClassMethod.setColumnCount(2)
objInspLabel = QCoreApplication.translate("PythonConsole", "Object Inspector")
self.listClassMethod.setHeaderLabels([objInspLabel, ''])
self.listClassMethod.setColumnHidden(1, True)
self.listClassMethod.setAlternatingRowColors(True)
#self.splitterEditor.addWidget(self.widgetEditor)
#self.splitterObj.addWidget(self.listClassMethod)
#self.splitterObj.addWidget(self.widgetEditor)
# Hide side editor on start up
self.splitterObj.hide()
self.listClassMethod.hide()
# Hide search widget on start up
self.widgetFind.hide()
sizes = self.splitter.sizes()
self.splitter.setSizes(sizes)
##----------------Restore Settings------------------------------------
self.restoreSettingsConsole()
##------------------Toolbar Editor-------------------------------------
## Action for Open File
openFileBt = QCoreApplication.translate("PythonConsole", "Open file")
self.openFileButton = QAction(self)
self.openFileButton.setCheckable(False)
self.openFileButton.setEnabled(True)
self.openFileButton.setIcon(QgsApplication.getThemeIcon("console/iconOpenConsole.png"))
self.openFileButton.setMenuRole(QAction.PreferencesRole)
self.openFileButton.setIconVisibleInMenu(True)
self.openFileButton.setToolTip(openFileBt)
self.openFileButton.setText(openFileBt)
openExtEditorBt = QCoreApplication.translate("PythonConsole", "Open in external editor")
self.openInEditorButton = QAction(self)
self.openInEditorButton.setCheckable(False)
self.openInEditorButton.setEnabled(True)
self.openInEditorButton.setIcon(QgsApplication.getThemeIcon("console/iconShowEditorConsole.png"))
self.openInEditorButton.setMenuRole(QAction.PreferencesRole)
self.openInEditorButton.setIconVisibleInMenu(True)
self.openInEditorButton.setToolTip(openExtEditorBt)
self.openInEditorButton.setText(openExtEditorBt)
## Action for Save File
saveFileBt = QCoreApplication.translate("PythonConsole", "Save")
self.saveFileButton = QAction(self)
self.saveFileButton.setCheckable(False)
self.saveFileButton.setEnabled(False)
self.saveFileButton.setIcon(QgsApplication.getThemeIcon("console/iconSaveConsole.png"))
self.saveFileButton.setMenuRole(QAction.PreferencesRole)
self.saveFileButton.setIconVisibleInMenu(True)
self.saveFileButton.setToolTip(saveFileBt)
self.saveFileButton.setText(saveFileBt)
## Action for Save File As
saveAsFileBt = QCoreApplication.translate("PythonConsole", "Save As...")
self.saveAsFileButton = QAction(self)
self.saveAsFileButton.setCheckable(False)
self.saveAsFileButton.setEnabled(True)
self.saveAsFileButton.setIcon(QgsApplication.getThemeIcon("console/iconSaveAsConsole.png"))
self.saveAsFileButton.setMenuRole(QAction.PreferencesRole)
self.saveAsFileButton.setIconVisibleInMenu(True)
self.saveAsFileButton.setToolTip(saveAsFileBt)
self.saveAsFileButton.setText(saveAsFileBt)
## Action Cut
cutEditorBt = QCoreApplication.translate("PythonConsole", "Cut")
self.cutEditorButton = QAction(self)
self.cutEditorButton.setCheckable(False)
self.cutEditorButton.setEnabled(True)
self.cutEditorButton.setIcon(QgsApplication.getThemeIcon("console/iconCutEditorConsole.png"))
self.cutEditorButton.setMenuRole(QAction.PreferencesRole)
self.cutEditorButton.setIconVisibleInMenu(True)
self.cutEditorButton.setToolTip(cutEditorBt)
self.cutEditorButton.setText(cutEditorBt)
## Action Copy
copyEditorBt = QCoreApplication.translate("PythonConsole", "Copy")
self.copyEditorButton = QAction(self)
self.copyEditorButton.setCheckable(False)
self.copyEditorButton.setEnabled(True)
self.copyEditorButton.setIcon(QgsApplication.getThemeIcon("console/iconCopyEditorConsole.png"))
self.copyEditorButton.setMenuRole(QAction.PreferencesRole)
self.copyEditorButton.setIconVisibleInMenu(True)
self.copyEditorButton.setToolTip(copyEditorBt)
self.copyEditorButton.setText(copyEditorBt)
## Action Paste
pasteEditorBt = QCoreApplication.translate("PythonConsole", "Paste")
self.pasteEditorButton = QAction(self)
self.pasteEditorButton.setCheckable(False)
self.pasteEditorButton.setEnabled(True)
self.pasteEditorButton.setIcon(QgsApplication.getThemeIcon("console/iconPasteEditorConsole.png"))
self.pasteEditorButton.setMenuRole(QAction.PreferencesRole)
self.pasteEditorButton.setIconVisibleInMenu(True)
self.pasteEditorButton.setToolTip(pasteEditorBt)
self.pasteEditorButton.setText(pasteEditorBt)
## Action Run Script (subprocess)
runScriptEditorBt = QCoreApplication.translate("PythonConsole", "Run script")
self.runScriptEditorButton = QAction(self)
self.runScriptEditorButton.setCheckable(False)
self.runScriptEditorButton.setEnabled(True)
self.runScriptEditorButton.setIcon(QgsApplication.getThemeIcon("console/iconRunScriptConsole.png"))
self.runScriptEditorButton.setMenuRole(QAction.PreferencesRole)
self.runScriptEditorButton.setIconVisibleInMenu(True)
self.runScriptEditorButton.setToolTip(runScriptEditorBt)
self.runScriptEditorButton.setText(runScriptEditorBt)
## Action Run Script (subprocess)
commentEditorBt = QCoreApplication.translate("PythonConsole", "Comment")
self.commentEditorButton = QAction(self)
self.commentEditorButton.setCheckable(False)
self.commentEditorButton.setEnabled(True)
self.commentEditorButton.setIcon(QgsApplication.getThemeIcon("console/iconCommentEditorConsole.png"))
self.commentEditorButton.setMenuRole(QAction.PreferencesRole)
self.commentEditorButton.setIconVisibleInMenu(True)
self.commentEditorButton.setToolTip(commentEditorBt)
self.commentEditorButton.setText(commentEditorBt)
## Action Run Script (subprocess)
uncommentEditorBt = QCoreApplication.translate("PythonConsole", "Uncomment")
self.uncommentEditorButton = QAction(self)
self.uncommentEditorButton.setCheckable(False)
self.uncommentEditorButton.setEnabled(True)
self.uncommentEditorButton.setIcon(QgsApplication.getThemeIcon("console/iconUncommentEditorConsole.png"))
self.uncommentEditorButton.setMenuRole(QAction.PreferencesRole)
self.uncommentEditorButton.setIconVisibleInMenu(True)
self.uncommentEditorButton.setToolTip(uncommentEditorBt)
self.uncommentEditorButton.setText(uncommentEditorBt)
## Action for Object browser
objList = QCoreApplication.translate("PythonConsole", "Object Inspector")
self.objectListButton = QAction(self)
self.objectListButton.setCheckable(True)
self.objectListButton.setEnabled(self.settings.value("pythonConsole/enableObjectInsp",
False, type=bool))
self.objectListButton.setIcon(QgsApplication.getThemeIcon("console/iconClassBrowserConsole.png"))
self.objectListButton.setMenuRole(QAction.PreferencesRole)
self.objectListButton.setIconVisibleInMenu(True)
self.objectListButton.setToolTip(objList)
self.objectListButton.setText(objList)
## Action for Find text
findText = QCoreApplication.translate("PythonConsole", "Find Text")
self.findTextButton = QAction(self)
self.findTextButton.setCheckable(True)
self.findTextButton.setEnabled(True)
self.findTextButton.setIcon(QgsApplication.getThemeIcon("console/iconSearchEditorConsole.png"))
self.findTextButton.setMenuRole(QAction.PreferencesRole)
self.findTextButton.setIconVisibleInMenu(True)
self.findTextButton.setToolTip(findText)
self.findTextButton.setText(findText)
##----------------Toolbar Console-------------------------------------
## Action Show Editor
showEditor = QCoreApplication.translate("PythonConsole", "Show editor")
self.showEditorButton = QAction(self)
self.showEditorButton.setEnabled(True)
self.showEditorButton.setCheckable(True)
self.showEditorButton.setIcon(QgsApplication.getThemeIcon("console/iconShowEditorConsole.png"))
self.showEditorButton.setMenuRole(QAction.PreferencesRole)
self.showEditorButton.setIconVisibleInMenu(True)
self.showEditorButton.setToolTip(showEditor)
self.showEditorButton.setText(showEditor)
## Action for Clear button
clearBt = QCoreApplication.translate("PythonConsole", "Clear console")
self.clearButton = QAction(self)
self.clearButton.setCheckable(False)
self.clearButton.setEnabled(True)
self.clearButton.setIcon(QgsApplication.getThemeIcon("console/iconClearConsole.png"))
self.clearButton.setMenuRole(QAction.PreferencesRole)
self.clearButton.setIconVisibleInMenu(True)
self.clearButton.setToolTip(clearBt)
self.clearButton.setText(clearBt)
## Action for settings
optionsBt = QCoreApplication.translate("PythonConsole", "Settings")
self.optionsButton = QAction(self)
self.optionsButton.setCheckable(False)
self.optionsButton.setEnabled(True)
self.optionsButton.setIcon(QgsApplication.getThemeIcon("console/iconSettingsConsole.png"))
self.optionsButton.setMenuRole(QAction.PreferencesRole)
self.optionsButton.setIconVisibleInMenu(True)
self.optionsButton.setToolTip(optionsBt)
self.optionsButton.setText(optionsBt)
## Action menu for class
actionClassBt = QCoreApplication.translate("PythonConsole", "Import Class")
self.actionClass = QAction(self)
self.actionClass.setCheckable(False)
self.actionClass.setEnabled(True)
self.actionClass.setIcon(QgsApplication.getThemeIcon("console/iconClassConsole.png"))
self.actionClass.setMenuRole(QAction.PreferencesRole)
self.actionClass.setIconVisibleInMenu(True)
self.actionClass.setToolTip(actionClassBt)
self.actionClass.setText(actionClassBt)
## Import Processing class
loadProcessingBt = QCoreApplication.translate("PythonConsole", "Import Processing class")
self.loadProcessingButton = QAction(self)
self.loadProcessingButton.setCheckable(False)
self.loadProcessingButton.setEnabled(True)
self.loadProcessingButton.setIcon(QgsApplication.getThemeIcon("console/iconProcessingConsole.png"))
self.loadProcessingButton.setMenuRole(QAction.PreferencesRole)
self.loadProcessingButton.setIconVisibleInMenu(True)
self.loadProcessingButton.setToolTip(loadProcessingBt)
self.loadProcessingButton.setText(loadProcessingBt)
## Import QtCore class
loadQtCoreBt = QCoreApplication.translate("PythonConsole", "Import PyQt.QtCore class")
self.loadQtCoreButton = QAction(self)
self.loadQtCoreButton.setCheckable(False)
self.loadQtCoreButton.setEnabled(True)
self.loadQtCoreButton.setIcon(QgsApplication.getThemeIcon("console/iconQtCoreConsole.png"))
self.loadQtCoreButton.setMenuRole(QAction.PreferencesRole)
self.loadQtCoreButton.setIconVisibleInMenu(True)
self.loadQtCoreButton.setToolTip(loadQtCoreBt)
self.loadQtCoreButton.setText(loadQtCoreBt)
## Import QtGui class
loadQtGuiBt = QCoreApplication.translate("PythonConsole", "Import PyQt.QtGui class")
self.loadQtGuiButton = QAction(self)
self.loadQtGuiButton.setCheckable(False)
self.loadQtGuiButton.setEnabled(True)
self.loadQtGuiButton.setIcon(QgsApplication.getThemeIcon("console/iconQtGuiConsole.png"))
self.loadQtGuiButton.setMenuRole(QAction.PreferencesRole)
self.loadQtGuiButton.setIconVisibleInMenu(True)
self.loadQtGuiButton.setToolTip(loadQtGuiBt)
self.loadQtGuiButton.setText(loadQtGuiBt)
## Action for Run script
runBt = QCoreApplication.translate("PythonConsole", "Run command")
self.runButton = QAction(self)
self.runButton.setCheckable(False)
self.runButton.setEnabled(True)
self.runButton.setIcon(QgsApplication.getThemeIcon("console/iconRunConsole.png"))
self.runButton.setMenuRole(QAction.PreferencesRole)
self.runButton.setIconVisibleInMenu(True)
self.runButton.setToolTip(runBt)
self.runButton.setText(runBt)
## Help action
helpBt = QCoreApplication.translate("PythonConsole", "Help")
self.helpButton = QAction(self)
self.helpButton.setCheckable(False)
self.helpButton.setEnabled(True)
self.helpButton.setIcon(QgsApplication.getThemeIcon("console/iconHelpConsole.png"))
self.helpButton.setMenuRole(QAction.PreferencesRole)
self.helpButton.setIconVisibleInMenu(True)
self.helpButton.setToolTip(helpBt)
self.helpButton.setText(helpBt)
self.toolBar = QToolBar()
self.toolBar.setEnabled(True)
self.toolBar.setFocusPolicy(Qt.NoFocus)
self.toolBar.setContextMenuPolicy(Qt.DefaultContextMenu)
self.toolBar.setLayoutDirection(Qt.LeftToRight)
self.toolBar.setIconSize(QSize(16, 16))
self.toolBar.setMovable(False)
self.toolBar.setFloatable(False)
self.toolBar.addAction(self.clearButton)
self.toolBar.addAction(self.actionClass)
self.toolBar.addAction(self.runButton)
self.toolBar.addSeparator()
self.toolBar.addAction(self.showEditorButton)
self.toolBar.addSeparator()
self.toolBar.addAction(self.optionsButton)
self.toolBar.addAction(self.helpButton)
self.toolBarEditor = QToolBar()
self.toolBarEditor.setEnabled(False)
self.toolBarEditor.setFocusPolicy(Qt.NoFocus)
self.toolBarEditor.setContextMenuPolicy(Qt.DefaultContextMenu)
self.toolBarEditor.setLayoutDirection(Qt.LeftToRight)
self.toolBarEditor.setIconSize(QSize(16, 16))
self.toolBarEditor.setMovable(False)
self.toolBarEditor.setFloatable(False)
self.toolBarEditor.addAction(self.openFileButton)
self.toolBarEditor.addAction(self.openInEditorButton)
self.toolBarEditor.addSeparator()
self.toolBarEditor.addAction(self.saveFileButton)
self.toolBarEditor.addAction(self.saveAsFileButton)
self.toolBarEditor.addSeparator()
self.toolBarEditor.addAction(self.runScriptEditorButton)
self.toolBarEditor.addSeparator()
self.toolBarEditor.addAction(self.findTextButton)
self.toolBarEditor.addSeparator()
self.toolBarEditor.addAction(self.cutEditorButton)
self.toolBarEditor.addAction(self.copyEditorButton)
self.toolBarEditor.addAction(self.pasteEditorButton)
self.toolBarEditor.addSeparator()
self.toolBarEditor.addAction(self.commentEditorButton)
self.toolBarEditor.addAction(self.uncommentEditorButton)
self.toolBarEditor.addSeparator()
self.toolBarEditor.addAction(self.objectListButton)
## Menu Import Class
self.classMenu = QMenu()
self.classMenu.addAction(self.loadProcessingButton)
self.classMenu.addAction(self.loadQtCoreButton)
self.classMenu.addAction(self.loadQtGuiButton)
cM = self.toolBar.widgetForAction(self.actionClass)
cM.setMenu(self.classMenu)
cM.setPopupMode(QToolButton.InstantPopup)
self.widgetButton = QWidget()
sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.widgetButton.sizePolicy().hasHeightForWidth())
self.widgetButton.setSizePolicy(sizePolicy)
self.widgetButtonEditor = QWidget(self.widgetEditor)
sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.widgetButtonEditor.sizePolicy().hasHeightForWidth())
self.widgetButtonEditor.setSizePolicy(sizePolicy)
sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.shellOut.sizePolicy().hasHeightForWidth())
self.shellOut.setSizePolicy(sizePolicy)
self.shellOut.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)
self.shell.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)
##------------ Layout -------------------------------
self.mainLayout = QGridLayout(self)
self.mainLayout.setMargin(0)
self.mainLayout.setSpacing(0)
self.mainLayout.addWidget(self.widgetButton, 0, 0, 1, 1)
self.mainLayout.addWidget(self.splitterEditor, 0, 1, 1, 1)
self.shellOutWidget.layout().insertWidget(0, self.toolBar)
self.layoutEditor = QGridLayout(self.widgetEditor)
self.layoutEditor.setMargin(0)
self.layoutEditor.setSpacing(0)
self.layoutEditor.addWidget(self.toolBarEditor, 0, 1, 1, 1)
self.layoutEditor.addWidget(self.widgetButtonEditor, 1, 0, 2, 1)
self.layoutEditor.addWidget(self.tabEditorWidget, 1, 1, 1, 1)
self.layoutEditor.addWidget(self.widgetFind, 2, 1, 1, 1)
## Layout for the find widget
self.layoutFind = QGridLayout(self.widgetFind)
self.layoutFind.setContentsMargins(0, 0, 0, 0)
self.lineEditFind = QgsFilterLineEdit()
placeHolderTxt = QCoreApplication.translate("PythonConsole", "Enter text to find...")
if pyqtconfig.Configuration().qt_version >= 0x40700:
self.lineEditFind.setPlaceholderText(placeHolderTxt)
else:
self.lineEditFind.setToolTip(placeHolderTxt)
self.findNextButton = QToolButton()
self.findNextButton.setEnabled(False)
toolTipfindNext = QCoreApplication.translate("PythonConsole", "Find Next")
self.findNextButton.setToolTip(toolTipfindNext)
self.findNextButton.setIcon(QgsApplication.getThemeIcon("console/iconSearchNextEditorConsole.png"))
self.findNextButton.setIconSize(QSize(24, 24))
self.findNextButton.setAutoRaise(True)
self.findPrevButton = QToolButton()
self.findPrevButton.setEnabled(False)
toolTipfindPrev = QCoreApplication.translate("PythonConsole", "Find Previous")
self.findPrevButton.setToolTip(toolTipfindPrev)
self.findPrevButton.setIcon(QgsApplication.getThemeIcon("console/iconSearchPrevEditorConsole.png"))
self.findPrevButton.setIconSize(QSize(24, 24))
self.findPrevButton.setAutoRaise(True)
self.caseSensitive = QCheckBox()
caseSensTr = QCoreApplication.translate("PythonConsole", "Case Sensitive")
self.caseSensitive.setText(caseSensTr)
self.wholeWord = QCheckBox()
wholeWordTr = QCoreApplication.translate("PythonConsole", "Whole Word")
self.wholeWord.setText(wholeWordTr)
self.wrapAround = QCheckBox()
self.wrapAround.setChecked(True)
wrapAroundTr = QCoreApplication.translate("PythonConsole", "Wrap Around")
self.wrapAround.setText(wrapAroundTr)
self.layoutFind.addWidget(self.lineEditFind, 0, 1, 1, 1)
self.layoutFind.addWidget(self.findPrevButton, 0, 2, 1, 1)
self.layoutFind.addWidget(self.findNextButton, 0, 3, 1, 1)
self.layoutFind.addWidget(self.caseSensitive, 0, 4, 1, 1)
self.layoutFind.addWidget(self.wholeWord, 0, 5, 1, 1)
self.layoutFind.addWidget(self.wrapAround, 0, 6, 1, 1)
##------------ Add first Tab in Editor -------------------------------
#self.tabEditorWidget.newTabEditor(tabName='first', filename=None)
##------------ Signal -------------------------------
self.findTextButton.toggled.connect(self.findTextEditor)
self.objectListButton.toggled.connect(self.toggleObjectListWidget)
self.commentEditorButton.triggered.connect(self.commentCode)
self.uncommentEditorButton.triggered.connect(self.uncommentCode)
self.runScriptEditorButton.triggered.connect(self.runScriptEditor)
self.cutEditorButton.triggered.connect(self.cutEditor)
self.copyEditorButton.triggered.connect(self.copyEditor)
self.pasteEditorButton.triggered.connect(self.pasteEditor)
self.showEditorButton.toggled.connect(self.toggleEditor)
self.clearButton.triggered.connect(self.shellOut.clearConsole)
self.optionsButton.triggered.connect(self.openSettings)
self.loadProcessingButton.triggered.connect(self.processing)
self.loadQtCoreButton.triggered.connect(self.qtCore)
self.loadQtGuiButton.triggered.connect(self.qtGui)
self.runButton.triggered.connect(self.shell.entered)
self.openFileButton.triggered.connect(self.openScriptFile)
self.openInEditorButton.triggered.connect(self.openScriptFileExtEditor)
self.saveFileButton.triggered.connect(self.saveScriptFile)
self.saveAsFileButton.triggered.connect(self.saveAsScriptFile)
self.helpButton.triggered.connect(self.openHelp)
self.connect(self.listClassMethod, SIGNAL('itemClicked(QTreeWidgetItem*, int)'),
self.onClickGoToLine)
self.lineEditFind.returnPressed.connect(self._findText)
self.findNextButton.clicked.connect(self._findNext)
self.findPrevButton.clicked.connect(self._findPrev)
self.lineEditFind.textChanged.connect(self._textFindChanged)
def _findText(self):
self.tabEditorWidget.currentWidget().newEditor.findText(True)
def _findNext(self):
self.tabEditorWidget.currentWidget().newEditor.findText(True)
def _findPrev(self):
self.tabEditorWidget.currentWidget().newEditor.findText(False)
def _textFindChanged(self):
if self.lineEditFind.text():
self.findNextButton.setEnabled(True)
self.findPrevButton.setEnabled(True)
else:
self.lineEditFind.setStyleSheet('')
self.findNextButton.setEnabled(False)
self.findPrevButton.setEnabled(False)
def onClickGoToLine(self, item, column):
tabEditor = self.tabEditorWidget.currentWidget().newEditor
if item.text(1) == 'syntaxError':
check = tabEditor.syntaxCheck(fromContextMenu=False)
if check and not tabEditor.isReadOnly():
self.tabEditorWidget.currentWidget().save()
return
linenr = int(item.text(1))
itemName = str(item.text(0))
charPos = itemName.find(' ')
if charPos != -1:
objName = itemName[0:charPos]
else:
objName = itemName
tabEditor.goToLine(objName, linenr)
def processing(self):
self.shell.commandConsole('processing')
def qtCore(self):
self.shell.commandConsole('qtCore')
def qtGui(self):
self.shell.commandConsole('qtGui')
def toggleEditor(self, checked):
self.splitterObj.show() if checked else self.splitterObj.hide()
if not self.tabEditorWidget:
self.tabEditorWidget.enableToolBarEditor(checked)
self.tabEditorWidget.restoreTabsOrAddNew()
def toggleObjectListWidget(self, checked):
self.listClassMethod.show() if checked else self.listClassMethod.hide()
def findTextEditor(self, checked):
self.widgetFind.show() if checked else self.widgetFind.hide()
def pasteEditor(self):
self.tabEditorWidget.currentWidget().newEditor.paste()
def cutEditor(self):
self.tabEditorWidget.currentWidget().newEditor.cut()
def copyEditor(self):
self.tabEditorWidget.currentWidget().newEditor.copy()
def runScriptEditor(self):
self.tabEditorWidget.currentWidget().newEditor.runScriptCode()
def commentCode(self):
self.tabEditorWidget.currentWidget().newEditor.commentEditorCode(True)
def uncommentCode(self):
self.tabEditorWidget.currentWidget().newEditor.commentEditorCode(False)
def openScriptFileExtEditor(self):
tabWidget = self.tabEditorWidget.currentWidget()
path = tabWidget.path
import subprocess
try:
subprocess.Popen([os.environ['EDITOR'], path])
except KeyError:
QDesktopServices.openUrl(QUrl.fromLocalFile(path))
def openScriptFile(self):
lastDirPath = self.settings.value("pythonConsole/lastDirPath", QDir.home())
openFileTr = QCoreApplication.translate("PythonConsole", "Open File")
fileList = QFileDialog.getOpenFileNames(
self, openFileTr, lastDirPath, "Script file (*.py)")
if fileList:
for pyFile in fileList:
for i in range(self.tabEditorWidget.count()):
tabWidget = self.tabEditorWidget.widget(i)
if tabWidget.path == pyFile:
self.tabEditorWidget.setCurrentWidget(tabWidget)
break
else:
tabName = QFileInfo(pyFile).fileName()
self.tabEditorWidget.newTabEditor(tabName, pyFile)
lastDirPath = QFileInfo(pyFile).path()
self.settings.setValue("pythonConsole/lastDirPath", pyFile)
self.updateTabListScript(pyFile, action='append')
def saveScriptFile(self):
tabWidget = self.tabEditorWidget.currentWidget()
try:
tabWidget.save()
except (IOError, OSError) as error:
msgText = QCoreApplication.translate('PythonConsole',
'The file <b>{0}</b> could not be saved. Error: {1}').format(tabWidget.path,
error.strerror)
self.callWidgetMessageBarEditor(msgText, 2, False)
def saveAsScriptFile(self, index=None):
tabWidget = self.tabEditorWidget.currentWidget()
if not index:
index = self.tabEditorWidget.currentIndex()
if not tabWidget.path:
fileName = self.tabEditorWidget.tabText(index) + '.py'
folder = self.settings.value("pythonConsole/lastDirPath", QDir.home())
pathFileName = os.path.join(folder, fileName)
fileNone = True
else:
pathFileName = tabWidget.path
fileNone = False
saveAsFileTr = QCoreApplication.translate("PythonConsole", "Save File As")
filename = QFileDialog.getSaveFileName(self,
saveAsFileTr,
pathFileName, "Script file (*.py)")
if filename:
try:
tabWidget.save(filename)
except (IOError, OSError) as error:
msgText = QCoreApplication.translate('PythonConsole',
'The file <b>{0}</b> could not be saved. Error: {1}').format(tabWidget.path,
error.strerror)
self.callWidgetMessageBarEditor(msgText, 2, False)
if fileNone:
tabWidget.path = None
else:
tabWidget.path = pathFileName
return
if not fileNone:
self.updateTabListScript(pathFileName, action='remove')
def openHelp(self):
QgsContextHelp.run("PythonConsole")
def openSettings(self):
if optionsDialog(self).exec_():
self.shell.refreshSettingsShell()
self.shellOut.refreshSettingsOutput()
self.tabEditorWidget.refreshSettingsEditor()
def callWidgetMessageBar(self, text):
self.shellOut.widgetMessageBar(iface, text)
def callWidgetMessageBarEditor(self, text, level, timed):
self.tabEditorWidget.widgetMessageBar(iface, text, level, timed)
def updateTabListScript(self, script, action=None):
if action == 'remove':
self.tabListScript.remove(script)
elif action == 'append':
if not self.tabListScript:
self.tabListScript = []
if script not in self.tabListScript:
self.tabListScript.append(script)
else:
self.tabListScript = []
self.settings.setValue("pythonConsole/tabScripts",
self.tabListScript)
def saveSettingsConsole(self):
self.settings.setValue("pythonConsole/splitterConsole", self.splitter.saveState())
self.settings.setValue("pythonConsole/splitterObj", self.splitterObj.saveState())
self.settings.setValue("pythonConsole/splitterEditor", self.splitterEditor.saveState())
self.shell.writeHistoryFile(True)
def restoreSettingsConsole(self):
storedTabScripts = self.settings.value("pythonConsole/tabScripts", [])
self.tabListScript = storedTabScripts
self.splitter.restoreState(self.settings.value("pythonConsole/splitterConsole", QByteArray()))
self.splitterEditor.restoreState(self.settings.value("pythonConsole/splitterEditor", QByteArray()))
self.splitterObj.restoreState(self.settings.value("pythonConsole/splitterObj", QByteArray()))
if __name__ == '__main__':
a = QApplication(sys.argv)
console = PythonConsoleWidget()
console.show()
a.exec_()
| gpl-2.0 |
KitKatXperience/platform_external_chromium_org | native_client_sdk/src/build_tools/tests/verify_ppapi_test.py | 24 | 7271 | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import unittest
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
BUILD_TOOLS_DIR = os.path.dirname(SCRIPT_DIR)
CHROME_SRC = os.path.dirname(os.path.dirname(os.path.dirname(BUILD_TOOLS_DIR)))
MOCK_DIR = os.path.join(CHROME_SRC, "third_party", "pymock")
# For the mock library
sys.path.append(MOCK_DIR)
import mock
sys.path.append(BUILD_TOOLS_DIR)
import verify_ppapi
class TestPartition(unittest.TestCase):
def testBasic(self):
filenames = [
os.path.join('ppapi', 'c', 'ppb_foo.h'),
os.path.join('ppapi', 'cpp', 'foo.h'),
os.path.join('ppapi', 'cpp', 'foo.cc'),
]
result = verify_ppapi.PartitionFiles(filenames)
self.assertTrue(filenames[0] in result['ppapi'])
self.assertTrue(filenames[1] in result['ppapi_cpp'])
self.assertTrue(filenames[2] in result['ppapi_cpp'])
self.assertEqual(0, len(result['ppapi_cpp_private']))
def testIgnoreDocumentation(self):
filenames = [
os.path.join('ppapi', 'c', 'documentation', 'Doxyfile'),
os.path.join('ppapi', 'c', 'documentation', 'index.dox'),
os.path.join('ppapi', 'cpp', 'documentation', 'footer.html'),
]
result = verify_ppapi.PartitionFiles(filenames)
self.assertEqual(0, len(result['ppapi']))
self.assertEqual(0, len(result['ppapi_cpp']))
self.assertEqual(0, len(result['ppapi_cpp_private']))
def testIgnoreTrusted(self):
filenames = [
os.path.join('ppapi', 'c', 'trusted', 'ppb_broker_trusted.h'),
os.path.join('ppapi', 'c', 'trusted', 'ppb_file_io_trusted.h'),
os.path.join('ppapi', 'cpp', 'trusted', 'file_chooser_trusted.cc'),
]
result = verify_ppapi.PartitionFiles(filenames)
self.assertEqual(0, len(result['ppapi']))
self.assertEqual(0, len(result['ppapi_cpp']))
self.assertEqual(0, len(result['ppapi_cpp_private']))
def testIgnoreIfNotSourceOrHeader(self):
filenames = [
os.path.join('ppapi', 'c', 'DEPS'),
os.path.join('ppapi', 'c', 'blah', 'foo.xml'),
os.path.join('ppapi', 'cpp', 'DEPS'),
os.path.join('ppapi', 'cpp', 'foobar.py'),
]
result = verify_ppapi.PartitionFiles(filenames)
self.assertEqual(0, len(result['ppapi']))
self.assertEqual(0, len(result['ppapi_cpp']))
self.assertEqual(0, len(result['ppapi_cpp_private']))
def testIgnoreOtherDirectories(self):
ignored_directories = ['api', 'examples', 'generators', 'host', 'lib',
'native_client', 'proxy', 'shared_impl', 'tests', 'thunk']
# Generate some random files in the ignored directories.
filenames = []
for dirname in ignored_directories:
filenames = os.path.join('ppapi', dirname, 'foo.cc')
filenames = os.path.join('ppapi', dirname, 'subdir', 'foo.h')
filenames = os.path.join('ppapi', dirname, 'DEPS')
result = verify_ppapi.PartitionFiles(filenames)
self.assertEqual(0, len(result['ppapi']))
self.assertEqual(0, len(result['ppapi_cpp']))
self.assertEqual(0, len(result['ppapi_cpp_private']))
class TestGetChangedAndRemoved(unittest.TestCase):
def testBasic(self):
modified_filenames = [
os.path.join('ppapi', 'cpp', 'audio.cc'),
os.path.join('ppapi', 'cpp', 'graphics_2d.cc'),
os.path.join('ppapi', 'cpp', 'foobar.cc'),
os.path.join('ppapi', 'cpp', 'var.cc'),
]
directory_list = [
os.path.join('ppapi', 'cpp', 'audio.cc'),
os.path.join('ppapi', 'cpp', 'graphics_2d.cc'),
]
changed, removed = verify_ppapi.GetChangedAndRemovedFilenames(
modified_filenames, directory_list)
self.assertTrue(modified_filenames[0] in changed)
self.assertTrue(modified_filenames[1] in changed)
self.assertTrue(modified_filenames[2] in removed)
self.assertTrue(modified_filenames[3] in removed)
class TestVerify(unittest.TestCase):
def testBasic(self):
dsc_filename = 'native_client_sdk/src/libraries/ppapi/library.dsc'
# The .dsc files typically uses basenames, not full paths.
dsc_sources_and_headers = [
'ppb_audio.h',
'ppb_console.h',
'ppb_gamepad.h',
'ppb.h',
'ppp_zoom_dev.h',
]
changed_filenames = [
os.path.join('ppapi', 'c', 'ppb_audio.h'),
os.path.join('ppapi', 'c', 'ppb_console.h'),
]
removed_filenames = []
# Should not raise.
verify_ppapi.Verify(dsc_filename, dsc_sources_and_headers,
changed_filenames, removed_filenames)
# Raise, because we removed ppp_zoom_dev.h.
removed_filenames = [
os.path.join('ppapi', 'c', 'ppb_console.h'),
]
self.assertRaises(verify_ppapi.VerifyException, verify_ppapi.Verify,
dsc_filename, dsc_sources_and_headers, changed_filenames,
removed_filenames)
# Raise, because we added ppb_foo.h.
removed_filenames = []
changed_filenames = [
os.path.join('ppapi', 'c', 'ppb_audio.h'),
os.path.join('ppapi', 'c', 'ppb_console.h'),
os.path.join('ppapi', 'c', 'ppb_foo.h'),
]
self.assertRaises(verify_ppapi.VerifyException, verify_ppapi.Verify,
dsc_filename, dsc_sources_and_headers, changed_filenames,
removed_filenames)
def testVerifyPrivate(self):
dsc_filename = \
'native_client_sdk/src/libraries/ppapi_cpp_private/library.dsc'
# The .dsc files typically uses basenames, not full paths.
dsc_sources_and_headers = [
'ext_crx_file_system_private.cc',
'file_io_private.cc',
'ppb_ext_crx_file_system_private.h',
'ppb_file_io_private.h',
'host_resolver_private.h',
'net_address_private.h',
]
changed_filenames = [
os.path.join('ppapi', 'c', 'private', 'ppb_foo_private.h'),
]
removed_filenames = []
with mock.patch('sys.stderr') as sys_stderr:
# When a new private file is added, just print to stderr, but don't fail.
result = verify_ppapi.VerifyOrPrintError(
dsc_filename, dsc_sources_and_headers, changed_filenames,
removed_filenames, is_private=True)
self.assertTrue(result)
self.assertTrue(sys_stderr.write.called)
# If is_private is False, then adding a new interface without updating the
# .dsc is an error.
sys_stderr.reset_mock()
result = verify_ppapi.VerifyOrPrintError(
dsc_filename, dsc_sources_and_headers, changed_filenames,
removed_filenames, is_private=False)
self.assertFalse(result)
self.assertTrue(sys_stderr.write.called)
# Removing a file without updating the .dsc is always an error.
sys_stderr.reset_mock()
changed_filenames = []
removed_filenames = [
os.path.join('ppapi', 'c', 'private', 'net_address_private.h'),
]
result = verify_ppapi.VerifyOrPrintError(
dsc_filename, dsc_sources_and_headers, changed_filenames,
removed_filenames, is_private=True)
self.assertFalse(result)
self.assertTrue(sys_stderr.write.called)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
byt3smith/CIRTKit | modules/reversing/viper/rats/bluebanana.py | 1 | 1370 | # Originally written by Kevin Breen (@KevTheHermit):
# https://github.com/kevthehermit/RATDecoders/blob/master/BlueBanana.py
import os
import sys
import string
from zipfile import ZipFile
from cStringIO import StringIO
from Crypto.Cipher import AES
from lib.common.out import *
def decrypt_aes(key, data):
cipher = AES.new(key)
return cipher.decrypt(data)
def decrypt_conf(conFile):
key1 = '15af8sd4s1c5s511'
key2 = '4e3f5a4c592b243f'
first = decrypt_aes(key1, conFile.decode('hex'))
second = decrypt_aes(key2, first[:-16].decode('hex'))
return second
def extract_config(raw_conf):
conf = {}
clean = filter(lambda x: x in string.printable, raw_conf)
fields = clean.split('<separator>')
conf['Domain'] = fields[0]
conf['Password'] = fields[1]
conf['Port1'] = fields[2]
conf['Port2'] = fields[3]
if len(fields) > 4:
conf['InstallName'] = fields[4]
conf['JarName'] = fields[5]
return conf
def config(data):
new_zip = StringIO(data)
with ZipFile(new_zip) as zip_handle:
for name in zip_handle.namelist():
# This file contains the encrypted config.
if name == 'config.txt':
conf_data = zip_handle.read(name)
if conf_data:
raw_conf = decrypt_conf(conf_data)
conf = extract_config(raw_conf)
return conf
| mit |
yarikoptic/pystatsmodels | statsmodels/examples/l1_demo/short_demo.py | 3 | 3644 | """
You can fit your LikelihoodModel using l1 regularization by changing
the method argument and adding an argument alpha. See code for
details.
The Story
---------
The maximum likelihood (ML) solution works well when the number of data
points is large and the noise is small. When the ML solution starts
"breaking", the regularized solution should do better.
The l1 Solvers
--------------
The standard l1 solver is fmin_slsqp and is included with scipy. It
sometimes has trouble verifying convergence when the data size is
large.
The l1_cvxopt_cp solver is part of CVXOPT and this package needs to be
installed separately. It works well even for larger data sizes.
"""
import statsmodels.api as sm
import matplotlib.pyplot as plt
import numpy as np
import pdb # pdb.set_trace()
## Load the data from Spector and Mazzeo (1980)
spector_data = sm.datasets.spector.load()
spector_data.exog = sm.add_constant(spector_data.exog)
N = len(spector_data.endog)
K = spector_data.exog.shape[1]
### Logit Model
logit_mod = sm.Logit(spector_data.endog, spector_data.exog)
## Standard logistic regression
logit_res = logit_mod.fit()
## Regularized regression
# Set the reularization parameter to something reasonable
alpha = 0.05 * N * np.ones(K)
# Use l1, which solves via a built-in (scipy.optimize) solver
logit_l1_res = logit_mod.fit_regularized(method='l1', alpha=alpha, acc=1e-6)
# Use l1_cvxopt_cp, which solves with a CVXOPT solver
logit_l1_cvxopt_res = logit_mod.fit_regularized(
method='l1_cvxopt_cp', alpha=alpha)
## Print results
print "============ Results for Logit ================="
print "ML results"
print logit_res.summary()
print "l1 results"
print logit_l1_res.summary()
print logit_l1_cvxopt_res.summary()
### Multinomial Logit Example using American National Election Studies Data
anes_data = sm.datasets.anes96.load()
anes_exog = anes_data.exog
anes_exog = sm.add_constant(anes_exog, prepend=False)
mlogit_mod = sm.MNLogit(anes_data.endog, anes_exog)
mlogit_res = mlogit_mod.fit()
## Set the regularization parameter.
alpha = 10 * np.ones((mlogit_mod.J - 1, mlogit_mod.K))
# Don't regularize the constant
alpha[-1,:] = 0
mlogit_l1_res = mlogit_mod.fit_regularized(method='l1', alpha=alpha)
print mlogit_l1_res.params
#mlogit_l1_res = mlogit_mod.fit_regularized(
# method='l1_cvxopt_cp', alpha=alpha, abstol=1e-10, trim_tol=1e-6)
#print mlogit_l1_res.params
## Print results
print "============ Results for MNLogit ================="
print "ML results"
print mlogit_res.summary()
print "l1 results"
print mlogit_l1_res.summary()
#
#
#### Logit example with many params, sweeping alpha
spector_data = sm.datasets.spector.load()
X = spector_data.exog
Y = spector_data.endog
## Fit
N = 50 # number of points to solve at
K = X.shape[1]
logit_mod = sm.Logit(Y, X)
coeff = np.zeros((N, K)) # Holds the coefficients
alphas = 1 / np.logspace(-0.5, 2, N)
## Sweep alpha and store the coefficients
# QC check doesn't always pass with the default options.
# Use the options QC_verbose=True and disp=True
# to to see what is happening. It just barely doesn't pass, so I decreased
# acc and increased QC_tol to make it pass
for n, alpha in enumerate(alphas):
logit_res = logit_mod.fit_regularized(
method='l1', alpha=alpha, trim_mode='off', QC_tol=0.1, disp=False,
QC_verbose=True, acc=1e-15)
coeff[n,:] = logit_res.params
## Plot
plt.figure(1);plt.clf();plt.grid()
plt.title('Regularization Path');
plt.xlabel('alpha');
plt.ylabel('Parameter value');
for i in xrange(K):
plt.plot(alphas, coeff[:,i], label='X'+str(i), lw=3)
plt.legend(loc='best')
plt.show()
| bsd-3-clause |
bbrieber/emacs-for-python | python-libs/epy-unittest.py | 12 | 1091 | from Pymacs import Let, lisp
from unittest import TestLoader
from itertools import groupby
import os
def symbol(sym):
return lisp[sym]
def discover(root_dir):
if not os.path.exists(root_dir):
return []
loader = TestLoader()
prev_dir = os.curdir
os.chdir(root_dir)
tests = loader.discover(root_dir, top_level_dir=root_dir)
os.chdir(prev_dir)
ret = []
for suite in tests:
for suite2 in suite:
if suite2.__class__.__name__ == 'ModuleImportFailure':
continue
for test in suite2:
name = ".".join((test.__class__.__name__, test._testMethodName))
module = test.__module__
ret.append([symbol(":name"), name,
symbol(":module"), module,
symbol(":root"), root_dir])
modkey = lambda x: x[3]
ret.sort(key=modkey)
return [[k, list(g)] for k,g in groupby(ret, key=modkey)] # Converting to a list of lists
if __name__ == '__main__':
a = discover("/home/galois/workspace/shit/")
| gpl-3.0 |
h0wl/FuzzLabs | engine/requests/file_JPEG_S.py | 7 | 8600 | # =============================================================================
# JPEG Image Descriptor - SMALL
# This file is part of the FuzzLabs Fuzzing Framework
#
# Author: FuzzLabs
# Date: 21/07/2015
#
# Original file MD5 sum: 4dde17f30fee6e6120a58d890a4ec572
# Original file SHA1 sum: 1e1d1c90b4b0dd9ad5719be96dcbfabf32ff9aee
#
# =============================================================================
from sulley import *
JPEG_SOI = "\xFF\xD8" # Start of Image Marker
JPEG_EOI = "\xFF\xD9" # End of Image Marker
JPEG_DQT = "\xFF\xDB" # Quantization Table
JPEG_DHT = "\xFF\xC4" # Huffman Table
JPEG_SOS = "\xFF\xDA" # Start of Scan
JPEG_COM = "\xFF\xFE" # Comment
JPEG_APP0 = "\xFF\xE0" # Application Marker 0
JPEG_APP1 = "\xFF\xE1" # Application Marker 1
JPEG_SOF0 = "\xFF\xC0" # Start of Frame - Baseline DCT
JPEG_SOF2 = "\xFF\xC2" # Start of Frame - Progressive DCT
s_initialize("JPEG")
# -----------------------------------------------------------------------------
# Section Bin name Size Start offset End offset
# JPEG_SOI ffd8 0 0x0 0x2
# -----------------------------------------------------------------------------
if s_block_start("JPEG_IMG_HDR"):
s_binary(JPEG_SOI)
s_block_end("JPEG_IMG_HDR")
s_repeat("JPEG_IMG_HDR", min_reps=0, max_reps=100, step=10)
# -----------------------------------------------------------------------------
# Section Bin name Size Start offset End offset
# JPEG_APP0 ffe0 16 0x2 0x14
# -----------------------------------------------------------------------------
if s_block_start("O_JPEG_DATA_HDR"):
s_binary(JPEG_APP0)
s_size("JPEG_DATA_HDR", endian=">", inclusive=True, length=2,
fuzzable=True)
if s_block_start("JPEG_DATA_HDR"):
s_string("JFIF")
s_string("\x00")
s_byte(0x1) # Major Version
s_byte(0x1) # Minor Version
s_byte(0x1) # Density unit
s_word(0x48, endian=">") # Xdensity
s_word(0x48, endian=">") # Ydensity
s_byte(0x0) # Xthumbnail
s_byte(0x0) # Ythumbnail
s_block_end("JPEG_DATA_HDR")
s_block_end("O_JPEG_DATA_HDR")
s_repeat("O_JPEG_DATA_HDR", min_reps=0, max_reps=100, step=10)
# -----------------------------------------------------------------------------
# Section Bin name Size Start offset End offset
# JPEG_COM fffe 16 0x14 0x26
# -----------------------------------------------------------------------------
if s_block_start("O_JPEG_DATA_COM"):
s_binary(JPEG_COM)
s_size("JPEG_DATA_COM", endian=">", inclusive=True, length=2,
fuzzable=True)
if s_block_start("JPEG_DATA_COM"):
s_string("NCC_GROUP_TEST")
s_block_end("JPEG_DATA_COM")
s_block_end("O_JPEG_DATA_COM")
s_repeat("O_JPEG_DATA_COM", min_reps=0, max_reps=100, step=10)
# -----------------------------------------------------------------------------
# Section Bin name Size Start offset End offset
# JPEG_DQT ffdb 67 0x26 0x6b
# -----------------------------------------------------------------------------
s_binary(JPEG_DQT)
s_size("JPEG_DATA_DQT_1", endian=">", inclusive=True, length=2, fuzzable=True)
if s_block_start("JPEG_DATA_DQT_1"):
s_binary("\x00\x03\x02\x02\x03\x02\x02\x03" +\
"\x03\x03\x03\x04\x03\x03\x04\x05" +\
"\x08\x05\x05\x04\x04\x05\x0A\x07" +\
"\x07\x06\x08\x0C\x0A\x0C\x0C\x0B" +\
"\x0A\x0B\x0B\x0D\x0E\x12\x10\x0D" +\
"\x0E\x11\x0E\x0B\x0B\x10\x16\x10" +\
"\x11\x13\x14\x15\x15\x15\x0C\x0F" +\
"\x17\x18\x16\x14\x18\x12\x14\x15" +\
"\x14")
s_block_end("JPEG_DATA_DQT_1")
# -----------------------------------------------------------------------------
# Section Bin name Size Start offset End offset
# JPEG_DQT ffdb 67 0x6b 0xb0
# -----------------------------------------------------------------------------
if s_block_start("O_JPEG_DATA_DQT_2"):
s_binary(JPEG_DQT)
s_size("JPEG_DATA_DQT_2", endian=">", inclusive=True, length=2,
fuzzable=True)
if s_block_start("JPEG_DATA_DQT_2"):
s_binary("\x01\x03\x04\x04\x05\x04\x05\x09" +\
"\x05\x05\x09\x14\x0D\x0B\x0D\x14" +\
"\x14\x14\x14\x14\x14\x14\x14\x14" +\
"\x14\x14\x14\x14\x14\x14\x14\x14" +\
"\x14\x14\x14\x14\x14\x14\x14\x14" +\
"\x14\x14\x14\x14\x14\x14\x14\x14" +\
"\x14\x14\x14\x14\x14\x14\x14\x14" +\
"\x14\x14\x14\x14\x14\x14\x14\x14" +\
"\x14")
s_block_end("JPEG_DATA_DQT_2")
s_block_end("O_JPEG_DATA_DQT_2")
s_repeat("O_JPEG_DATA_DQT_2", min_reps=0, max_reps=100, step=10)
# -----------------------------------------------------------------------------
# Section Bin name Size Start offset End offset
# JPEG_SOF2 ffc2 17 0xb0 0xc3
# -----------------------------------------------------------------------------
if s_block_start("O_JPEG_DATA_SOF2_1"):
s_binary(JPEG_SOF2)
s_size("JPEG_DATA_SOF2_1", endian=">", inclusive=True, length=2,
fuzzable=True)
if s_block_start("JPEG_DATA_SOF2_1"):
s_binary("\x08\x00\x01\x00\x01\x03\x01\x11" +\
"\x00\x02\x11\x01\x03\x11\x01")
s_block_end("JPEG_DATA_SOF2_1")
s_block_end("O_JPEG_DATA_SOF2_1")
s_repeat("O_JPEG_DATA_SOF2_1", min_reps=0, max_reps=100, step=10)
# -----------------------------------------------------------------------------
# Section Bin name Size Start offset End offset
# JPEG_DHT ffc4 20 0xc3 0xd9
# -----------------------------------------------------------------------------
s_binary(JPEG_DHT)
s_size("JPEG_DATA_DHT_1", endian=">", inclusive=True, length=2, fuzzable=True)
if s_block_start("JPEG_DATA_DHT_1"):
s_binary("\x00\x01\x00\x00\x00\x00\x00\x00" +\
"\x00\x00\x00\x00\x00\x00\x00\x00" +\
"\x00\x08")
s_block_end("JPEG_DATA_DHT_1")
# -----------------------------------------------------------------------------
# Section Bin name Size Start offset End offset
# JPEG_DHT ffc4 20 0xd9 0xef
# -----------------------------------------------------------------------------
if s_block_start("O_JPEG_DATA_DHT_2"):
s_binary(JPEG_DHT)
s_size("JPEG_DATA_DHT_2", endian=">", inclusive=True, length=2,
fuzzable=True)
if s_block_start("JPEG_DATA_DHT_2"):
s_binary("\x01\x01\x00\x00\x00\x00\x00\x00" +\
"\x00\x00\x00\x00\x00\x00\x00\x00" +\
"\x00\x00")
s_block_end("JPEG_DATA_DHT_2")
s_block_end("O_JPEG_DATA_DHT_2")
s_repeat("O_JPEG_DATA_SOF2_1", min_reps=0, max_reps=100, step=10)
# -----------------------------------------------------------------------------
# Section Bin name Size Start offset End offset
# JPEG_SOS ffda 12 0xef 0xff
# -----------------------------------------------------------------------------
if s_block_start("O_JPEG_DATA_SOS_1"):
s_binary(JPEG_SOS)
s_size("JPEG_DATA_SOS_1", endian=">", inclusive=False, length=2,
fuzzable=True)
if s_block_start("JPEG_DATA_SOS_1"):
s_binary("\x03\x01\x00\x02\x10\x03\x10\x00" +\
"\x00\x01\x2A\x9F")
s_block_end("JPEG_DATA_SOS_1")
s_block_end("O_JPEG_DATA_SOS_1")
s_repeat("O_JPEG_DATA_SOS_1", min_reps=0, max_reps=100, step=10)
# -----------------------------------------------------------------------------
# Section Bin name Size Start offset End offset
# JPEG_EOI ffd9 0 0xff 0x101
# -----------------------------------------------------------------------------
s_string(JPEG_EOI)
| gpl-2.0 |
Zhongqilong/mykbengineer | kbe/res/scripts/common/Lib/configparser.py | 84 | 49437 | """Configuration file parser.
A configuration file consists of sections, lead by a "[section]" header,
and followed by "name: value" entries, with continuations and such in
the style of RFC 822.
Intrinsic defaults can be specified by passing them into the
ConfigParser constructor as a dictionary.
class:
ConfigParser -- responsible for parsing a list of
configuration files, and managing the parsed database.
methods:
__init__(defaults=None, dict_type=_default_dict, allow_no_value=False,
delimiters=('=', ':'), comment_prefixes=('#', ';'),
inline_comment_prefixes=None, strict=True,
empty_lines_in_values=True):
Create the parser. When `defaults' is given, it is initialized into the
dictionary or intrinsic defaults. The keys must be strings, the values
must be appropriate for %()s string interpolation.
When `dict_type' is given, it will be used to create the dictionary
objects for the list of sections, for the options within a section, and
for the default values.
When `delimiters' is given, it will be used as the set of substrings
that divide keys from values.
When `comment_prefixes' is given, it will be used as the set of
substrings that prefix comments in empty lines. Comments can be
indented.
When `inline_comment_prefixes' is given, it will be used as the set of
substrings that prefix comments in non-empty lines.
When `strict` is True, the parser won't allow for any section or option
duplicates while reading from a single source (file, string or
dictionary). Default is True.
When `empty_lines_in_values' is False (default: True), each empty line
marks the end of an option. Otherwise, internal empty lines of
a multiline option are kept as part of the value.
When `allow_no_value' is True (default: False), options without
values are accepted; the value presented for these is None.
sections()
Return all the configuration section names, sans DEFAULT.
has_section(section)
Return whether the given section exists.
has_option(section, option)
Return whether the given option exists in the given section.
options(section)
Return list of configuration options for the named section.
read(filenames, encoding=None)
Read and parse the list of named configuration files, given by
name. A single filename is also allowed. Non-existing files
are ignored. Return list of successfully read files.
read_file(f, filename=None)
Read and parse one configuration file, given as a file object.
The filename defaults to f.name; it is only used in error
messages (if f has no `name' attribute, the string `<???>' is used).
read_string(string)
Read configuration from a given string.
read_dict(dictionary)
Read configuration from a dictionary. Keys are section names,
values are dictionaries with keys and values that should be present
in the section. If the used dictionary type preserves order, sections
and their keys will be added in order. Values are automatically
converted to strings.
get(section, option, raw=False, vars=None, fallback=_UNSET)
Return a string value for the named option. All % interpolations are
expanded in the return values, based on the defaults passed into the
constructor and the DEFAULT section. Additional substitutions may be
provided using the `vars' argument, which must be a dictionary whose
contents override any pre-existing defaults. If `option' is a key in
`vars', the value from `vars' is used.
getint(section, options, raw=False, vars=None, fallback=_UNSET)
Like get(), but convert value to an integer.
getfloat(section, options, raw=False, vars=None, fallback=_UNSET)
Like get(), but convert value to a float.
getboolean(section, options, raw=False, vars=None, fallback=_UNSET)
Like get(), but convert value to a boolean (currently case
insensitively defined as 0, false, no, off for False, and 1, true,
yes, on for True). Returns False or True.
items(section=_UNSET, raw=False, vars=None)
If section is given, return a list of tuples with (name, value) for
each option in the section. Otherwise, return a list of tuples with
(section_name, section_proxy) for each section, including DEFAULTSECT.
remove_section(section)
Remove the given file section and all its options.
remove_option(section, option)
Remove the given option from the given section.
set(section, option, value)
Set the given option.
write(fp, space_around_delimiters=True)
Write the configuration state in .ini format. If
`space_around_delimiters' is True (the default), delimiters
between keys and values are surrounded by spaces.
"""
from collections.abc import MutableMapping
from collections import OrderedDict as _default_dict, ChainMap as _ChainMap
import functools
import io
import itertools
import re
import sys
import warnings
__all__ = ["NoSectionError", "DuplicateOptionError", "DuplicateSectionError",
"NoOptionError", "InterpolationError", "InterpolationDepthError",
"InterpolationSyntaxError", "ParsingError",
"MissingSectionHeaderError",
"ConfigParser", "SafeConfigParser", "RawConfigParser",
"DEFAULTSECT", "MAX_INTERPOLATION_DEPTH"]
DEFAULTSECT = "DEFAULT"
MAX_INTERPOLATION_DEPTH = 10
# exception classes
class Error(Exception):
"""Base class for ConfigParser exceptions."""
def __init__(self, msg=''):
self.message = msg
Exception.__init__(self, msg)
def __repr__(self):
return self.message
__str__ = __repr__
class NoSectionError(Error):
"""Raised when no section matches a requested option."""
def __init__(self, section):
Error.__init__(self, 'No section: %r' % (section,))
self.section = section
self.args = (section, )
class DuplicateSectionError(Error):
"""Raised when a section is repeated in an input source.
Possible repetitions that raise this exception are: multiple creation
using the API or in strict parsers when a section is found more than once
in a single input file, string or dictionary.
"""
def __init__(self, section, source=None, lineno=None):
msg = [repr(section), " already exists"]
if source is not None:
message = ["While reading from ", repr(source)]
if lineno is not None:
message.append(" [line {0:2d}]".format(lineno))
message.append(": section ")
message.extend(msg)
msg = message
else:
msg.insert(0, "Section ")
Error.__init__(self, "".join(msg))
self.section = section
self.source = source
self.lineno = lineno
self.args = (section, source, lineno)
class DuplicateOptionError(Error):
"""Raised by strict parsers when an option is repeated in an input source.
Current implementation raises this exception only when an option is found
more than once in a single file, string or dictionary.
"""
def __init__(self, section, option, source=None, lineno=None):
msg = [repr(option), " in section ", repr(section),
" already exists"]
if source is not None:
message = ["While reading from ", repr(source)]
if lineno is not None:
message.append(" [line {0:2d}]".format(lineno))
message.append(": option ")
message.extend(msg)
msg = message
else:
msg.insert(0, "Option ")
Error.__init__(self, "".join(msg))
self.section = section
self.option = option
self.source = source
self.lineno = lineno
self.args = (section, option, source, lineno)
class NoOptionError(Error):
"""A requested option was not found."""
def __init__(self, option, section):
Error.__init__(self, "No option %r in section: %r" %
(option, section))
self.option = option
self.section = section
self.args = (option, section)
class InterpolationError(Error):
"""Base class for interpolation-related exceptions."""
def __init__(self, option, section, msg):
Error.__init__(self, msg)
self.option = option
self.section = section
self.args = (option, section, msg)
class InterpolationMissingOptionError(InterpolationError):
"""A string substitution required a setting which was not available."""
def __init__(self, option, section, rawval, reference):
msg = ("Bad value substitution:\n"
"\tsection: [%s]\n"
"\toption : %s\n"
"\tkey : %s\n"
"\trawval : %s\n"
% (section, option, reference, rawval))
InterpolationError.__init__(self, option, section, msg)
self.reference = reference
self.args = (option, section, rawval, reference)
class InterpolationSyntaxError(InterpolationError):
"""Raised when the source text contains invalid syntax.
Current implementation raises this exception when the source text into
which substitutions are made does not conform to the required syntax.
"""
class InterpolationDepthError(InterpolationError):
"""Raised when substitutions are nested too deeply."""
def __init__(self, option, section, rawval):
msg = ("Value interpolation too deeply recursive:\n"
"\tsection: [%s]\n"
"\toption : %s\n"
"\trawval : %s\n"
% (section, option, rawval))
InterpolationError.__init__(self, option, section, msg)
self.args = (option, section, rawval)
class ParsingError(Error):
"""Raised when a configuration file does not follow legal syntax."""
def __init__(self, source=None, filename=None):
# Exactly one of `source'/`filename' arguments has to be given.
# `filename' kept for compatibility.
if filename and source:
raise ValueError("Cannot specify both `filename' and `source'. "
"Use `source'.")
elif not filename and not source:
raise ValueError("Required argument `source' not given.")
elif filename:
source = filename
Error.__init__(self, 'Source contains parsing errors: %r' % source)
self.source = source
self.errors = []
self.args = (source, )
@property
def filename(self):
"""Deprecated, use `source'."""
warnings.warn(
"The 'filename' attribute will be removed in future versions. "
"Use 'source' instead.",
DeprecationWarning, stacklevel=2
)
return self.source
@filename.setter
def filename(self, value):
"""Deprecated, user `source'."""
warnings.warn(
"The 'filename' attribute will be removed in future versions. "
"Use 'source' instead.",
DeprecationWarning, stacklevel=2
)
self.source = value
def append(self, lineno, line):
self.errors.append((lineno, line))
self.message += '\n\t[line %2d]: %s' % (lineno, line)
class MissingSectionHeaderError(ParsingError):
"""Raised when a key-value pair is found before any section header."""
def __init__(self, filename, lineno, line):
Error.__init__(
self,
'File contains no section headers.\nfile: %r, line: %d\n%r' %
(filename, lineno, line))
self.source = filename
self.lineno = lineno
self.line = line
self.args = (filename, lineno, line)
# Used in parser getters to indicate the default behaviour when a specific
# option is not found it to raise an exception. Created to enable `None' as
# a valid fallback value.
_UNSET = object()
class Interpolation:
"""Dummy interpolation that passes the value through with no changes."""
def before_get(self, parser, section, option, value, defaults):
return value
def before_set(self, parser, section, option, value):
return value
def before_read(self, parser, section, option, value):
return value
def before_write(self, parser, section, option, value):
return value
class BasicInterpolation(Interpolation):
"""Interpolation as implemented in the classic ConfigParser.
The option values can contain format strings which refer to other values in
the same section, or values in the special default section.
For example:
something: %(dir)s/whatever
would resolve the "%(dir)s" to the value of dir. All reference
expansions are done late, on demand. If a user needs to use a bare % in
a configuration file, she can escape it by writing %%. Other % usage
is considered a user error and raises `InterpolationSyntaxError'."""
_KEYCRE = re.compile(r"%\(([^)]+)\)s")
def before_get(self, parser, section, option, value, defaults):
L = []
self._interpolate_some(parser, option, L, value, section, defaults, 1)
return ''.join(L)
def before_set(self, parser, section, option, value):
tmp_value = value.replace('%%', '') # escaped percent signs
tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax
if '%' in tmp_value:
raise ValueError("invalid interpolation syntax in %r at "
"position %d" % (value, tmp_value.find('%')))
return value
def _interpolate_some(self, parser, option, accum, rest, section, map,
depth):
if depth > MAX_INTERPOLATION_DEPTH:
raise InterpolationDepthError(option, section, rest)
while rest:
p = rest.find("%")
if p < 0:
accum.append(rest)
return
if p > 0:
accum.append(rest[:p])
rest = rest[p:]
# p is no longer used
c = rest[1:2]
if c == "%":
accum.append("%")
rest = rest[2:]
elif c == "(":
m = self._KEYCRE.match(rest)
if m is None:
raise InterpolationSyntaxError(option, section,
"bad interpolation variable reference %r" % rest)
var = parser.optionxform(m.group(1))
rest = rest[m.end():]
try:
v = map[var]
except KeyError:
raise InterpolationMissingOptionError(
option, section, rest, var)
if "%" in v:
self._interpolate_some(parser, option, accum, v,
section, map, depth + 1)
else:
accum.append(v)
else:
raise InterpolationSyntaxError(
option, section,
"'%%' must be followed by '%%' or '(', "
"found: %r" % (rest,))
class ExtendedInterpolation(Interpolation):
"""Advanced variant of interpolation, supports the syntax used by
`zc.buildout'. Enables interpolation between sections."""
_KEYCRE = re.compile(r"\$\{([^}]+)\}")
def before_get(self, parser, section, option, value, defaults):
L = []
self._interpolate_some(parser, option, L, value, section, defaults, 1)
return ''.join(L)
def before_set(self, parser, section, option, value):
tmp_value = value.replace('$$', '') # escaped dollar signs
tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax
if '$' in tmp_value:
raise ValueError("invalid interpolation syntax in %r at "
"position %d" % (value, tmp_value.find('$')))
return value
def _interpolate_some(self, parser, option, accum, rest, section, map,
depth):
if depth > MAX_INTERPOLATION_DEPTH:
raise InterpolationDepthError(option, section, rest)
while rest:
p = rest.find("$")
if p < 0:
accum.append(rest)
return
if p > 0:
accum.append(rest[:p])
rest = rest[p:]
# p is no longer used
c = rest[1:2]
if c == "$":
accum.append("$")
rest = rest[2:]
elif c == "{":
m = self._KEYCRE.match(rest)
if m is None:
raise InterpolationSyntaxError(option, section,
"bad interpolation variable reference %r" % rest)
path = m.group(1).split(':')
rest = rest[m.end():]
sect = section
opt = option
try:
if len(path) == 1:
opt = parser.optionxform(path[0])
v = map[opt]
elif len(path) == 2:
sect = path[0]
opt = parser.optionxform(path[1])
v = parser.get(sect, opt, raw=True)
else:
raise InterpolationSyntaxError(
option, section,
"More than one ':' found: %r" % (rest,))
except (KeyError, NoSectionError, NoOptionError):
raise InterpolationMissingOptionError(
option, section, rest, ":".join(path))
if "$" in v:
self._interpolate_some(parser, opt, accum, v, sect,
dict(parser.items(sect, raw=True)),
depth + 1)
else:
accum.append(v)
else:
raise InterpolationSyntaxError(
option, section,
"'$' must be followed by '$' or '{', "
"found: %r" % (rest,))
class LegacyInterpolation(Interpolation):
"""Deprecated interpolation used in old versions of ConfigParser.
Use BasicInterpolation or ExtendedInterpolation instead."""
_KEYCRE = re.compile(r"%\(([^)]*)\)s|.")
def before_get(self, parser, section, option, value, vars):
rawval = value
depth = MAX_INTERPOLATION_DEPTH
while depth: # Loop through this until it's done
depth -= 1
if value and "%(" in value:
replace = functools.partial(self._interpolation_replace,
parser=parser)
value = self._KEYCRE.sub(replace, value)
try:
value = value % vars
except KeyError as e:
raise InterpolationMissingOptionError(
option, section, rawval, e.args[0])
else:
break
if value and "%(" in value:
raise InterpolationDepthError(option, section, rawval)
return value
def before_set(self, parser, section, option, value):
return value
@staticmethod
def _interpolation_replace(match, parser):
s = match.group(1)
if s is None:
return match.group()
else:
return "%%(%s)s" % parser.optionxform(s)
class RawConfigParser(MutableMapping):
"""ConfigParser that does not do interpolation."""
# Regular expressions for parsing section headers and options
_SECT_TMPL = r"""
\[ # [
(?P<header>[^]]+) # very permissive!
\] # ]
"""
_OPT_TMPL = r"""
(?P<option>.*?) # very permissive!
\s*(?P<vi>{delim})\s* # any number of space/tab,
# followed by any of the
# allowed delimiters,
# followed by any space/tab
(?P<value>.*)$ # everything up to eol
"""
_OPT_NV_TMPL = r"""
(?P<option>.*?) # very permissive!
\s*(?: # any number of space/tab,
(?P<vi>{delim})\s* # optionally followed by
# any of the allowed
# delimiters, followed by any
# space/tab
(?P<value>.*))?$ # everything up to eol
"""
# Interpolation algorithm to be used if the user does not specify another
_DEFAULT_INTERPOLATION = Interpolation()
# Compiled regular expression for matching sections
SECTCRE = re.compile(_SECT_TMPL, re.VERBOSE)
# Compiled regular expression for matching options with typical separators
OPTCRE = re.compile(_OPT_TMPL.format(delim="=|:"), re.VERBOSE)
# Compiled regular expression for matching options with optional values
# delimited using typical separators
OPTCRE_NV = re.compile(_OPT_NV_TMPL.format(delim="=|:"), re.VERBOSE)
# Compiled regular expression for matching leading whitespace in a line
NONSPACECRE = re.compile(r"\S")
# Possible boolean values in the configuration.
BOOLEAN_STATES = {'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False}
def __init__(self, defaults=None, dict_type=_default_dict,
allow_no_value=False, *, delimiters=('=', ':'),
comment_prefixes=('#', ';'), inline_comment_prefixes=None,
strict=True, empty_lines_in_values=True,
default_section=DEFAULTSECT,
interpolation=_UNSET):
self._dict = dict_type
self._sections = self._dict()
self._defaults = self._dict()
self._proxies = self._dict()
self._proxies[default_section] = SectionProxy(self, default_section)
if defaults:
for key, value in defaults.items():
self._defaults[self.optionxform(key)] = value
self._delimiters = tuple(delimiters)
if delimiters == ('=', ':'):
self._optcre = self.OPTCRE_NV if allow_no_value else self.OPTCRE
else:
d = "|".join(re.escape(d) for d in delimiters)
if allow_no_value:
self._optcre = re.compile(self._OPT_NV_TMPL.format(delim=d),
re.VERBOSE)
else:
self._optcre = re.compile(self._OPT_TMPL.format(delim=d),
re.VERBOSE)
self._comment_prefixes = tuple(comment_prefixes or ())
self._inline_comment_prefixes = tuple(inline_comment_prefixes or ())
self._strict = strict
self._allow_no_value = allow_no_value
self._empty_lines_in_values = empty_lines_in_values
self.default_section=default_section
self._interpolation = interpolation
if self._interpolation is _UNSET:
self._interpolation = self._DEFAULT_INTERPOLATION
if self._interpolation is None:
self._interpolation = Interpolation()
def defaults(self):
return self._defaults
def sections(self):
"""Return a list of section names, excluding [DEFAULT]"""
# self._sections will never have [DEFAULT] in it
return list(self._sections.keys())
def add_section(self, section):
"""Create a new section in the configuration.
Raise DuplicateSectionError if a section by the specified name
already exists. Raise ValueError if name is DEFAULT.
"""
if section == self.default_section:
raise ValueError('Invalid section name: %r' % section)
if section in self._sections:
raise DuplicateSectionError(section)
self._sections[section] = self._dict()
self._proxies[section] = SectionProxy(self, section)
def has_section(self, section):
"""Indicate whether the named section is present in the configuration.
The DEFAULT section is not acknowledged.
"""
return section in self._sections
def options(self, section):
"""Return a list of option names for the given section name."""
try:
opts = self._sections[section].copy()
except KeyError:
raise NoSectionError(section)
opts.update(self._defaults)
return list(opts.keys())
def read(self, filenames, encoding=None):
"""Read and parse a filename or a list of filenames.
Files that cannot be opened are silently ignored; this is
designed so that you can specify a list of potential
configuration file locations (e.g. current directory, user's
home directory, systemwide directory), and all existing
configuration files in the list will be read. A single
filename may also be given.
Return list of successfully read files.
"""
if isinstance(filenames, str):
filenames = [filenames]
read_ok = []
for filename in filenames:
try:
with open(filename, encoding=encoding) as fp:
self._read(fp, filename)
except OSError:
continue
read_ok.append(filename)
return read_ok
def read_file(self, f, source=None):
"""Like read() but the argument must be a file-like object.
The `f' argument must be iterable, returning one line at a time.
Optional second argument is the `source' specifying the name of the
file being read. If not given, it is taken from f.name. If `f' has no
`name' attribute, `<???>' is used.
"""
if source is None:
try:
source = f.name
except AttributeError:
source = '<???>'
self._read(f, source)
def read_string(self, string, source='<string>'):
"""Read configuration from a given string."""
sfile = io.StringIO(string)
self.read_file(sfile, source)
def read_dict(self, dictionary, source='<dict>'):
"""Read configuration from a dictionary.
Keys are section names, values are dictionaries with keys and values
that should be present in the section. If the used dictionary type
preserves order, sections and their keys will be added in order.
All types held in the dictionary are converted to strings during
reading, including section names, option names and keys.
Optional second argument is the `source' specifying the name of the
dictionary being read.
"""
elements_added = set()
for section, keys in dictionary.items():
section = str(section)
try:
self.add_section(section)
except (DuplicateSectionError, ValueError):
if self._strict and section in elements_added:
raise
elements_added.add(section)
for key, value in keys.items():
key = self.optionxform(str(key))
if value is not None:
value = str(value)
if self._strict and (section, key) in elements_added:
raise DuplicateOptionError(section, key, source)
elements_added.add((section, key))
self.set(section, key, value)
def readfp(self, fp, filename=None):
"""Deprecated, use read_file instead."""
warnings.warn(
"This method will be removed in future versions. "
"Use 'parser.read_file()' instead.",
DeprecationWarning, stacklevel=2
)
self.read_file(fp, source=filename)
def get(self, section, option, *, raw=False, vars=None, fallback=_UNSET):
"""Get an option value for a given section.
If `vars' is provided, it must be a dictionary. The option is looked up
in `vars' (if provided), `section', and in `DEFAULTSECT' in that order.
If the key is not found and `fallback' is provided, it is used as
a fallback value. `None' can be provided as a `fallback' value.
If interpolation is enabled and the optional argument `raw' is False,
all interpolations are expanded in the return values.
Arguments `raw', `vars', and `fallback' are keyword only.
The section DEFAULT is special.
"""
try:
d = self._unify_values(section, vars)
except NoSectionError:
if fallback is _UNSET:
raise
else:
return fallback
option = self.optionxform(option)
try:
value = d[option]
except KeyError:
if fallback is _UNSET:
raise NoOptionError(option, section)
else:
return fallback
if raw or value is None:
return value
else:
return self._interpolation.before_get(self, section, option, value,
d)
def _get(self, section, conv, option, **kwargs):
return conv(self.get(section, option, **kwargs))
def getint(self, section, option, *, raw=False, vars=None,
fallback=_UNSET):
try:
return self._get(section, int, option, raw=raw, vars=vars)
except (NoSectionError, NoOptionError):
if fallback is _UNSET:
raise
else:
return fallback
def getfloat(self, section, option, *, raw=False, vars=None,
fallback=_UNSET):
try:
return self._get(section, float, option, raw=raw, vars=vars)
except (NoSectionError, NoOptionError):
if fallback is _UNSET:
raise
else:
return fallback
def getboolean(self, section, option, *, raw=False, vars=None,
fallback=_UNSET):
try:
return self._get(section, self._convert_to_boolean, option,
raw=raw, vars=vars)
except (NoSectionError, NoOptionError):
if fallback is _UNSET:
raise
else:
return fallback
def items(self, section=_UNSET, raw=False, vars=None):
"""Return a list of (name, value) tuples for each option in a section.
All % interpolations are expanded in the return values, based on the
defaults passed into the constructor, unless the optional argument
`raw' is true. Additional substitutions may be provided using the
`vars' argument, which must be a dictionary whose contents overrides
any pre-existing defaults.
The section DEFAULT is special.
"""
if section is _UNSET:
return super().items()
d = self._defaults.copy()
try:
d.update(self._sections[section])
except KeyError:
if section != self.default_section:
raise NoSectionError(section)
# Update with the entry specific variables
if vars:
for key, value in vars.items():
d[self.optionxform(key)] = value
value_getter = lambda option: self._interpolation.before_get(self,
section, option, d[option], d)
if raw:
value_getter = lambda option: d[option]
return [(option, value_getter(option)) for option in d.keys()]
def popitem(self):
"""Remove a section from the parser and return it as
a (section_name, section_proxy) tuple. If no section is present, raise
KeyError.
The section DEFAULT is never returned because it cannot be removed.
"""
for key in self.sections():
value = self[key]
del self[key]
return key, value
raise KeyError
def optionxform(self, optionstr):
return optionstr.lower()
def has_option(self, section, option):
"""Check for the existence of a given option in a given section.
If the specified `section' is None or an empty string, DEFAULT is
assumed. If the specified `section' does not exist, returns False."""
if not section or section == self.default_section:
option = self.optionxform(option)
return option in self._defaults
elif section not in self._sections:
return False
else:
option = self.optionxform(option)
return (option in self._sections[section]
or option in self._defaults)
def set(self, section, option, value=None):
"""Set an option."""
if value:
value = self._interpolation.before_set(self, section, option,
value)
if not section or section == self.default_section:
sectdict = self._defaults
else:
try:
sectdict = self._sections[section]
except KeyError:
raise NoSectionError(section)
sectdict[self.optionxform(option)] = value
def write(self, fp, space_around_delimiters=True):
"""Write an .ini-format representation of the configuration state.
If `space_around_delimiters' is True (the default), delimiters
between keys and values are surrounded by spaces.
"""
if space_around_delimiters:
d = " {} ".format(self._delimiters[0])
else:
d = self._delimiters[0]
if self._defaults:
self._write_section(fp, self.default_section,
self._defaults.items(), d)
for section in self._sections:
self._write_section(fp, section,
self._sections[section].items(), d)
def _write_section(self, fp, section_name, section_items, delimiter):
"""Write a single section to the specified `fp'."""
fp.write("[{}]\n".format(section_name))
for key, value in section_items:
value = self._interpolation.before_write(self, section_name, key,
value)
if value is not None or not self._allow_no_value:
value = delimiter + str(value).replace('\n', '\n\t')
else:
value = ""
fp.write("{}{}\n".format(key, value))
fp.write("\n")
def remove_option(self, section, option):
"""Remove an option."""
if not section or section == self.default_section:
sectdict = self._defaults
else:
try:
sectdict = self._sections[section]
except KeyError:
raise NoSectionError(section)
option = self.optionxform(option)
existed = option in sectdict
if existed:
del sectdict[option]
return existed
def remove_section(self, section):
"""Remove a file section."""
existed = section in self._sections
if existed:
del self._sections[section]
del self._proxies[section]
return existed
def __getitem__(self, key):
if key != self.default_section and not self.has_section(key):
raise KeyError(key)
return self._proxies[key]
def __setitem__(self, key, value):
# To conform with the mapping protocol, overwrites existing values in
# the section.
# XXX this is not atomic if read_dict fails at any point. Then again,
# no update method in configparser is atomic in this implementation.
if key == self.default_section:
self._defaults.clear()
elif key in self._sections:
self._sections[key].clear()
self.read_dict({key: value})
def __delitem__(self, key):
if key == self.default_section:
raise ValueError("Cannot remove the default section.")
if not self.has_section(key):
raise KeyError(key)
self.remove_section(key)
def __contains__(self, key):
return key == self.default_section or self.has_section(key)
def __len__(self):
return len(self._sections) + 1 # the default section
def __iter__(self):
# XXX does it break when underlying container state changed?
return itertools.chain((self.default_section,), self._sections.keys())
def _read(self, fp, fpname):
"""Parse a sectioned configuration file.
Each section in a configuration file contains a header, indicated by
a name in square brackets (`[]'), plus key/value options, indicated by
`name' and `value' delimited with a specific substring (`=' or `:' by
default).
Values can span multiple lines, as long as they are indented deeper
than the first line of the value. Depending on the parser's mode, blank
lines may be treated as parts of multiline values or ignored.
Configuration files may include comments, prefixed by specific
characters (`#' and `;' by default). Comments may appear on their own
in an otherwise empty line or may be entered in lines holding values or
section names.
"""
elements_added = set()
cursect = None # None, or a dictionary
sectname = None
optname = None
lineno = 0
indent_level = 0
e = None # None, or an exception
for lineno, line in enumerate(fp, start=1):
comment_start = sys.maxsize
# strip inline comments
inline_prefixes = {p: -1 for p in self._inline_comment_prefixes}
while comment_start == sys.maxsize and inline_prefixes:
next_prefixes = {}
for prefix, index in inline_prefixes.items():
index = line.find(prefix, index+1)
if index == -1:
continue
next_prefixes[prefix] = index
if index == 0 or (index > 0 and line[index-1].isspace()):
comment_start = min(comment_start, index)
inline_prefixes = next_prefixes
# strip full line comments
for prefix in self._comment_prefixes:
if line.strip().startswith(prefix):
comment_start = 0
break
if comment_start == sys.maxsize:
comment_start = None
value = line[:comment_start].strip()
if not value:
if self._empty_lines_in_values:
# add empty line to the value, but only if there was no
# comment on the line
if (comment_start is None and
cursect is not None and
optname and
cursect[optname] is not None):
cursect[optname].append('') # newlines added at join
else:
# empty line marks end of value
indent_level = sys.maxsize
continue
# continuation line?
first_nonspace = self.NONSPACECRE.search(line)
cur_indent_level = first_nonspace.start() if first_nonspace else 0
if (cursect is not None and optname and
cur_indent_level > indent_level):
cursect[optname].append(value)
# a section header or option header?
else:
indent_level = cur_indent_level
# is it a section header?
mo = self.SECTCRE.match(value)
if mo:
sectname = mo.group('header')
if sectname in self._sections:
if self._strict and sectname in elements_added:
raise DuplicateSectionError(sectname, fpname,
lineno)
cursect = self._sections[sectname]
elements_added.add(sectname)
elif sectname == self.default_section:
cursect = self._defaults
else:
cursect = self._dict()
self._sections[sectname] = cursect
self._proxies[sectname] = SectionProxy(self, sectname)
elements_added.add(sectname)
# So sections can't start with a continuation line
optname = None
# no section header in the file?
elif cursect is None:
raise MissingSectionHeaderError(fpname, lineno, line)
# an option line?
else:
mo = self._optcre.match(value)
if mo:
optname, vi, optval = mo.group('option', 'vi', 'value')
if not optname:
e = self._handle_error(e, fpname, lineno, line)
optname = self.optionxform(optname.rstrip())
if (self._strict and
(sectname, optname) in elements_added):
raise DuplicateOptionError(sectname, optname,
fpname, lineno)
elements_added.add((sectname, optname))
# This check is fine because the OPTCRE cannot
# match if it would set optval to None
if optval is not None:
optval = optval.strip()
cursect[optname] = [optval]
else:
# valueless option handling
cursect[optname] = None
else:
# a non-fatal parsing error occurred. set up the
# exception but keep going. the exception will be
# raised at the end of the file and will contain a
# list of all bogus lines
e = self._handle_error(e, fpname, lineno, line)
# if any parsing errors occurred, raise an exception
if e:
raise e
self._join_multiline_values()
def _join_multiline_values(self):
defaults = self.default_section, self._defaults
all_sections = itertools.chain((defaults,),
self._sections.items())
for section, options in all_sections:
for name, val in options.items():
if isinstance(val, list):
val = '\n'.join(val).rstrip()
options[name] = self._interpolation.before_read(self,
section,
name, val)
def _handle_error(self, exc, fpname, lineno, line):
if not exc:
exc = ParsingError(fpname)
exc.append(lineno, repr(line))
return exc
def _unify_values(self, section, vars):
"""Create a sequence of lookups with 'vars' taking priority over
the 'section' which takes priority over the DEFAULTSECT.
"""
sectiondict = {}
try:
sectiondict = self._sections[section]
except KeyError:
if section != self.default_section:
raise NoSectionError(section)
# Update with the entry specific variables
vardict = {}
if vars:
for key, value in vars.items():
if value is not None:
value = str(value)
vardict[self.optionxform(key)] = value
return _ChainMap(vardict, sectiondict, self._defaults)
def _convert_to_boolean(self, value):
"""Return a boolean value translating from other types if necessary.
"""
if value.lower() not in self.BOOLEAN_STATES:
raise ValueError('Not a boolean: %s' % value)
return self.BOOLEAN_STATES[value.lower()]
def _validate_value_types(self, *, section="", option="", value=""):
"""Raises a TypeError for non-string values.
The only legal non-string value if we allow valueless
options is None, so we need to check if the value is a
string if:
- we do not allow valueless options, or
- we allow valueless options but the value is not None
For compatibility reasons this method is not used in classic set()
for RawConfigParsers. It is invoked in every case for mapping protocol
access and in ConfigParser.set().
"""
if not isinstance(section, str):
raise TypeError("section names must be strings")
if not isinstance(option, str):
raise TypeError("option keys must be strings")
if not self._allow_no_value or value:
if not isinstance(value, str):
raise TypeError("option values must be strings")
class ConfigParser(RawConfigParser):
"""ConfigParser implementing interpolation."""
_DEFAULT_INTERPOLATION = BasicInterpolation()
def set(self, section, option, value=None):
"""Set an option. Extends RawConfigParser.set by validating type and
interpolation syntax on the value."""
self._validate_value_types(option=option, value=value)
super().set(section, option, value)
def add_section(self, section):
"""Create a new section in the configuration. Extends
RawConfigParser.add_section by validating if the section name is
a string."""
self._validate_value_types(section=section)
super().add_section(section)
class SafeConfigParser(ConfigParser):
"""ConfigParser alias for backwards compatibility purposes."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
warnings.warn(
"The SafeConfigParser class has been renamed to ConfigParser "
"in Python 3.2. This alias will be removed in future versions."
" Use ConfigParser directly instead.",
DeprecationWarning, stacklevel=2
)
class SectionProxy(MutableMapping):
"""A proxy for a single section from a parser."""
def __init__(self, parser, name):
"""Creates a view on a section of the specified `name` in `parser`."""
self._parser = parser
self._name = name
def __repr__(self):
return '<Section: {}>'.format(self._name)
def __getitem__(self, key):
if not self._parser.has_option(self._name, key):
raise KeyError(key)
return self._parser.get(self._name, key)
def __setitem__(self, key, value):
self._parser._validate_value_types(option=key, value=value)
return self._parser.set(self._name, key, value)
def __delitem__(self, key):
if not (self._parser.has_option(self._name, key) and
self._parser.remove_option(self._name, key)):
raise KeyError(key)
def __contains__(self, key):
return self._parser.has_option(self._name, key)
def __len__(self):
return len(self._options())
def __iter__(self):
return self._options().__iter__()
def _options(self):
if self._name != self._parser.default_section:
return self._parser.options(self._name)
else:
return self._parser.defaults()
def get(self, option, fallback=None, *, raw=False, vars=None):
return self._parser.get(self._name, option, raw=raw, vars=vars,
fallback=fallback)
def getint(self, option, fallback=None, *, raw=False, vars=None):
return self._parser.getint(self._name, option, raw=raw, vars=vars,
fallback=fallback)
def getfloat(self, option, fallback=None, *, raw=False, vars=None):
return self._parser.getfloat(self._name, option, raw=raw, vars=vars,
fallback=fallback)
def getboolean(self, option, fallback=None, *, raw=False, vars=None):
return self._parser.getboolean(self._name, option, raw=raw, vars=vars,
fallback=fallback)
@property
def parser(self):
# The parser object of the proxy is read-only.
return self._parser
@property
def name(self):
# The name of the section on a proxy is read-only.
return self._name
| lgpl-3.0 |
konfabproject/konfab-consumer | ebdata/templatemaker/tests/hole.py | 1 | 3954 | # Copyright 2007,2008,2009,2011 Everyblock LLC, OpenPlans, and contributors
#
# This file is part of ebdata
#
# ebdata is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ebdata is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ebdata. If not, see <http://www.gnu.org/licenses/>.
#
from ebdata.templatemaker.hole import Hole, OrHole, RegexHole, IgnoreHole
import unittest
class HoleEquality(unittest.TestCase):
def test_equal_hole(self):
self.assertEqual(Hole(), Hole())
def test_nonequal_hole(self):
self.assertNotEqual(Hole(), OrHole())
def test_equal_orhole(self):
self.assertEqual(OrHole('a', 'b'), OrHole('a', 'b'))
def test_nonequal_orhole1(self):
self.assertNotEqual(OrHole('a'), OrHole('a', 'b'))
def test_nonequal_orhole2(self):
self.assertNotEqual(OrHole('a'), OrHole('b'))
def test_equal_regexhole1(self):
self.assertEqual(RegexHole('\d\d', False), RegexHole('\d\d', False))
def test_equal_regexhole2(self):
self.assertEqual(RegexHole('(\d\d)', True), RegexHole('(\d\d)', True))
def test_nonequal_regexhole1(self):
self.assertNotEqual(RegexHole('\d\d', False), RegexHole('\d', False))
def test_nonequal_regexhole2(self):
self.assertNotEqual(RegexHole('\d', False), IgnoreHole())
def test_nonequal_regexhole3(self):
self.assertNotEqual(RegexHole('\d', False), Hole())
def test_nonequal_regexhole4(self):
self.assertNotEqual(RegexHole('\d\d', False), RegexHole('\d\d', True))
def test_nonequal_regexhole5(self):
self.assertNotEqual(RegexHole('\d\d', False), RegexHole('(\d\d)', False))
def test_equal_ignorehole(self):
self.assertEqual(IgnoreHole(), IgnoreHole())
def test_nonequal_ignorehole1(self):
self.assertNotEqual(IgnoreHole(), Hole())
def test_nonequal_ignorehole2(self):
self.assertNotEqual(IgnoreHole(), OrHole('a'))
class HoleRepr(unittest.TestCase):
def test_hole(self):
self.assertEqual(repr(Hole()), '<Hole>')
def test_orhole(self):
self.assertEqual(repr(OrHole(1, 2, 3, 4)), '<OrHole: (1, 2, 3, 4)>')
def test_regexhole(self):
self.assertEqual(repr(RegexHole('\d\d-\d\d', False)), '<RegexHole: \d\d-\d\d>')
def test_ignorehole(self):
self.assertEqual(repr(IgnoreHole()), '<IgnoreHole>')
class Regexes(unittest.TestCase):
def test_hole(self):
self.assertEqual(Hole().regex(), '(.*?)')
def test_orhole1(self):
self.assertEqual(OrHole('a', 'b').regex(), '(a|b)')
def test_orhole2(self):
self.assertEqual(OrHole('?', '.').regex(), '(\?|\.)')
def test_regexhole(self):
self.assertEqual(RegexHole('\d\d-\d\d', False).regex(), '\d\d-\d\d')
def test_ignorehole(self):
self.assertEqual(IgnoreHole().regex(), '.*?')
class HoleCapture(unittest.TestCase):
def test_hole(self):
self.assertEqual(Hole().capture, True)
def test_orhole(self):
self.assertEqual(OrHole('a', 'b').capture, True)
def test_regexhole1(self):
self.assertEqual(RegexHole('\d\d-\d\d', False).capture, False)
def test_regexhole2(self):
self.assertEqual(RegexHole('(\d\d-\d\d)', True).capture, True)
def test_regexhole3(self):
self.assertEqual(RegexHole('(\d\d-\d\d)', False).capture, False)
def test_ignorehole(self):
self.assertEqual(IgnoreHole().capture, False)
if __name__ == "__main__":
unittest.main()
| gpl-3.0 |
sserrot/champion_relationships | venv/Lib/site-packages/pip/_vendor/requests/_internal_utils.py | 414 | 1096 | # -*- coding: utf-8 -*-
"""
requests._internal_utils
~~~~~~~~~~~~~~
Provides utility functions that are consumed internally by Requests
which depend on extremely few external helpers (such as compat)
"""
from .compat import is_py2, builtin_str, str
def to_native_string(string, encoding='ascii'):
"""Given a string object, regardless of type, returns a representation of
that string in the native string type, encoding and decoding where
necessary. This assumes ASCII unless told otherwise.
"""
if isinstance(string, builtin_str):
out = string
else:
if is_py2:
out = string.encode(encoding)
else:
out = string.decode(encoding)
return out
def unicode_is_ascii(u_string):
"""Determine if unicode string only contains ASCII characters.
:param str u_string: unicode string to check. Must be unicode
and not Python 2 `str`.
:rtype: bool
"""
assert isinstance(u_string, str)
try:
u_string.encode('ascii')
return True
except UnicodeEncodeError:
return False
| mit |
wangxiangyu/horizon | openstack_dashboard/dashboards/project/databases/forms.py | 34 | 3867 | # Copyright 2014 Tesora Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django.forms import ValidationError # noqa
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard import api
class ResizeVolumeForm(forms.SelfHandlingForm):
instance_id = forms.CharField(widget=forms.HiddenInput())
orig_size = forms.IntegerField(
label=_("Current Size (GB)"),
widget=forms.TextInput(attrs={'readonly': 'readonly'}),
required=False,
)
new_size = forms.IntegerField(label=_("New Size (GB)"))
def clean(self):
cleaned_data = super(ResizeVolumeForm, self).clean()
new_size = cleaned_data.get('new_size')
if new_size <= self.initial['orig_size']:
raise ValidationError(
_("New size for volume must be greater than current size."))
return cleaned_data
def handle(self, request, data):
instance = data.get('instance_id')
try:
api.trove.instance_resize_volume(request,
instance,
data['new_size'])
messages.success(request, _('Resizing volume "%s"') % instance)
except Exception as e:
redirect = reverse("horizon:project:databases:index")
exceptions.handle(request, _('Unable to resize volume. %s') %
e.message, redirect=redirect)
return True
class ResizeInstanceForm(forms.SelfHandlingForm):
instance_id = forms.CharField(widget=forms.HiddenInput())
old_flavor_name = forms.CharField(label=_("Old Flavor"),
required=False,
widget=forms.TextInput(
attrs={'readonly': 'readonly'}))
new_flavor = forms.ChoiceField(label=_("New Flavor"),
help_text=_("Choose a new instance "
"flavor."))
def __init__(self, request, *args, **kwargs):
super(ResizeInstanceForm, self).__init__(request, *args, **kwargs)
old_flavor_id = kwargs.get('initial', {}).get('old_flavor_id')
choices = kwargs.get('initial', {}).get('flavors')
# Remove current flavor from the list of flavor choices
choices = [(flavor_id, name) for (flavor_id, name) in choices
if flavor_id != old_flavor_id]
if choices:
choices.insert(0, ("", _("Select a new flavor")))
else:
choices.insert(0, ("", _("No flavors available")))
self.fields['new_flavor'].choices = choices
def handle(self, request, data):
instance = data.get('instance_id')
flavor = data.get('new_flavor')
try:
api.trove.instance_resize(request, instance, flavor)
messages.success(request, _('Resizing instance "%s"') % instance)
except Exception as e:
redirect = reverse("horizon:project:databases:index")
exceptions.handle(request, _('Unable to resize instance. %s') %
e.message, redirect=redirect)
return True
| apache-2.0 |
StackPointCloud/libcloud | libcloud/dns/drivers/softlayer.py | 28 | 7480 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License.You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unexpected-keyword-arg
__all__ = [
'SoftLayerDNSDriver'
]
from libcloud.common.softlayer import SoftLayerConnection
from libcloud.common.softlayer import SoftLayerObjectDoesntExist
from libcloud.dns.types import Provider, RecordType
from libcloud.dns.types import ZoneDoesNotExistError, RecordDoesNotExistError
from libcloud.dns.base import DNSDriver, Zone, Record
VALID_RECORD_EXTRA_PARAMS = ['priority', 'ttl']
class SoftLayerDNSDriver(DNSDriver):
type = Provider.SOFTLAYER
name = 'Softlayer DNS'
website = 'https://www.softlayer.com'
connectionCls = SoftLayerConnection
RECORD_TYPE_MAP = {
RecordType.A: 'a',
RecordType.AAAA: 'aaaa',
RecordType.CNAME: 'cname',
RecordType.MX: 'mx',
RecordType.NS: 'ns',
RecordType.PTR: 'ptr',
RecordType.SOA: 'soa',
RecordType.SPF: 'spf',
RecordType.SRV: 'srv',
RecordType.TXT: 'txt',
}
def create_zone(self, domain, ttl=None, extra=None):
self.connection.set_context({'resource': 'zone', 'id': domain})
data = {
'name': domain,
'resourceRecords': []
}
response = self.connection.request(
'SoftLayer_Dns_Domain', 'createObject', data
).object
zone = Zone(id=response['id'], domain=domain,
type='master', ttl=3600, driver=self)
return zone
def get_zone(self, zone_id):
self.connection.set_context({'resource': 'zone', 'id': zone_id})
try:
response = self.connection.request(
'SoftLayer_Dns_Domain', 'getObject', id=zone_id
).object
except SoftLayerObjectDoesntExist:
raise ZoneDoesNotExistError(value='', driver=self,
zone_id=zone_id)
return self._to_zone(response)
def delete_zone(self, zone):
self.connection.set_context({'resource': 'zone', 'id': zone.id})
try:
self.connection.request(
'SoftLayer_Dns_Domain', 'deleteObject', id=zone.id
).object
except SoftLayerObjectDoesntExist:
raise ZoneDoesNotExistError(value='', driver=self,
zone_id=zone.id)
else:
return True
def iterate_zones(self):
zones_list = self.connection.request(
'SoftLayer_Dns_Domain', 'getByDomainName', '.'
).object
for item in zones_list:
yield self._to_zone(item)
def iterate_records(self, zone):
self.connection.set_context({'resource': 'zone', 'id': zone.id})
records_list = self.connection.request(
'SoftLayer_Dns_Domain', 'getResourceRecords', id=zone.id
).object
for item in records_list:
yield self._to_record(item, zone=zone)
def get_record(self, zone_id, record_id):
try:
record = self.connection.request(
'SoftLayer_Dns_Domain_ResourceRecord',
'getObject',
id=record_id
).object
return self._to_record(record, zone=self.get_zone(zone_id))
except SoftLayerObjectDoesntExist:
raise RecordDoesNotExistError(value='', driver=self,
record_id=record_id)
def delete_record(self, record):
try:
self.connection.request(
'SoftLayer_Dns_Domain_ResourceRecord',
'deleteObject',
id=record.id
).object
except SoftLayerObjectDoesntExist:
raise RecordDoesNotExistError(value='', driver=self,
record_id=record.id)
else:
return True
def create_record(self, name, zone, type, data, extra=None):
params = {
'domainId': zone.id,
'type': self.RECORD_TYPE_MAP[type],
'host': name,
'data': data
}
if extra:
if extra.get('ttl'):
params['ttl'] = extra['ttl']
if extra.get('refresh'):
params['refresh'] = extra['refresh']
if extra.get('retry'):
params['retry'] = extra['retry']
if extra.get('expire'):
params['expire'] = extra['expire']
if extra.get('priority'):
params['mxPriority'] = extra['priority']
response = self.connection.request(
'SoftLayer_Dns_Domain_ResourceRecord',
'createObject',
params
).object
return self._to_record(response, zone=zone)
def update_record(
self, record, name=None, type=None, data=None, extra=None):
params = {}
if type:
params['type'] = self.RECORD_TYPE_MAP[type]
if name:
params['host'] = name
if data:
params['data'] = data
if extra:
if extra.get('ttl'):
params['ttl'] = extra['ttl']
if extra.get('refresh'):
params['refresh'] = extra['refresh']
if extra.get('retry'):
params['retry'] = extra['retry']
if extra.get('expire'):
params['expire'] = extra['expire']
if extra.get('priority'):
params['mxPriority'] = extra['priority']
response = self.connection.request(
'SoftLayer_Dns_Domain_ResourceRecord',
'editObject',
params,
id=record.id,
).object
if response:
changed_record = self.connection.request(
'SoftLayer_Dns_Domain_ResourceRecord',
'getObject',
id=record.id,
).object
return self._to_record(changed_record, zone=record.zone)
else:
return False
def _to_zone(self, item):
ttl = item.get('ttl', 3600)
zone = Zone(id=item['id'], domain=item['name'],
type='master', ttl=ttl, driver=self)
return zone
def _to_record(self, item, zone=None):
extra = {
'ttl': item['ttl'],
'expire': item['expire'],
'mxPriority': item['mxPriority'],
'refresh': item['refresh'],
'retry': item['retry'],
}
record = Record(
id=item['id'],
name=item['host'],
type=self._string_to_record_type(item['type']),
data=item['data'],
zone=zone,
driver=self,
ttl=item['ttl'],
extra=extra
)
return record
| apache-2.0 |
stuartarchibald/numba | numba/cuda/intrinsic_wrapper.py | 7 | 2240 | from .decorators import jit
import numba
@jit(device=True)
def all_sync(mask, predicate):
"""
If for all threads in the masked warp the predicate is true, then
a non-zero value is returned, otherwise 0 is returned.
"""
return numba.cuda.vote_sync_intrinsic(mask, 0, predicate)[1]
@jit(device=True)
def any_sync(mask, predicate):
"""
If for any thread in the masked warp the predicate is true, then
a non-zero value is returned, otherwise 0 is returned.
"""
return numba.cuda.vote_sync_intrinsic(mask, 1, predicate)[1]
@jit(device=True)
def eq_sync(mask, predicate):
"""
If for all threads in the masked warp the boolean predicate is the same,
then a non-zero value is returned, otherwise 0 is returned.
"""
return numba.cuda.vote_sync_intrinsic(mask, 2, predicate)[1]
@jit(device=True)
def ballot_sync(mask, predicate):
"""
Returns a mask of all threads in the warp whoose predicate is true,
and are within the given mask.
"""
return numba.cuda.vote_sync_intrinsic(mask, 3, predicate)[0]
@jit(device=True)
def shfl_sync(mask, value, src_lane):
"""
Shuffles value across the masked warp and returns the value
from src_lane. If this is outside the warp, then the
given value is returned.
"""
return numba.cuda.shfl_sync_intrinsic(mask, 0, value, src_lane, 0x1f)[0]
@jit(device=True)
def shfl_up_sync(mask, value, delta):
"""
Shuffles value across the masked warp and returns the value
from (laneid - delta). If this is outside the warp, then the
given value is returned.
"""
return numba.cuda.shfl_sync_intrinsic(mask, 1, value, delta, 0)[0]
@jit(device=True)
def shfl_down_sync(mask, value, delta):
"""
Shuffles value across the masked warp and returns the value
from (laneid + delta). If this is outside the warp, then the
given value is returned.
"""
return numba.cuda.shfl_sync_intrinsic(mask, 2, value, delta, 0x1f)[0]
@jit(device=True)
def shfl_xor_sync(mask, value, lane_mask):
"""
Shuffles value across the masked warp and returns the value
from (laneid ^ lane_mask).
"""
return numba.cuda.shfl_sync_intrinsic(mask, 3, value, lane_mask, 0x1f)[0]
| bsd-2-clause |
hamzehd/edx-platform | common/test/acceptance/pages/lms/video/video.py | 19 | 27462 | """
Video player in the courseware.
"""
import time
import json
import requests
from selenium.webdriver.common.action_chains import ActionChains
from bok_choy.page_object import PageObject
from bok_choy.promise import EmptyPromise, Promise
from bok_choy.javascript import wait_for_js, js_defined
import logging
log = logging.getLogger('VideoPage')
VIDEO_BUTTONS = {
'transcript': '.lang',
'transcript_button': '.toggle-transcript',
'volume': '.volume',
'play': '.video_control.play',
'pause': '.video_control.pause',
'fullscreen': '.add-fullscreen',
'download_transcript': '.video-tracks > a',
'speed': '.speeds',
'quality': '.quality-control',
'do_not_show_again': '.skip-control',
'skip_bumper': '.play-skip-control',
}
CSS_CLASS_NAMES = {
'closed_captions': '.video.closed',
'captions_rendered': '.video.is-captions-rendered',
'captions': '.subtitles',
'captions_text': '.subtitles > li',
'error_message': '.video .video-player h3',
'video_container': '.video',
'video_sources': '.video-player video source',
'video_spinner': '.video-wrapper .spinner',
'video_xmodule': '.xmodule_VideoModule',
'video_init': '.is-initialized',
'video_time': '.vidtime',
'video_display_name': '.vert h2',
'captions_lang_list': '.langs-list li',
'video_speed': '.speeds .value',
'poster': '.poster',
}
VIDEO_MODES = {
'html5': '.video video',
'youtube': '.video iframe'
}
VIDEO_MENUS = {
'language': '.lang .menu',
'speed': '.speed .menu',
'download_transcript': '.video-tracks .a11y-menu-list',
'transcript-format': '.video-tracks .a11y-menu-button'
}
@js_defined('window.Video', 'window.RequireJS.require', 'window.jQuery',
'window.MathJax', 'window.MathJax.isReady')
class VideoPage(PageObject):
"""
Video player in the courseware.
"""
url = None
current_video_display_name = None
@wait_for_js
def is_browser_on_page(self):
return self.q(css='div{0}'.format(CSS_CLASS_NAMES['video_xmodule'])).present
@wait_for_js
def wait_for_video_class(self):
"""
Wait until element with class name `video` appeared in DOM.
"""
self.wait_for_ajax()
video_selector = '{0}'.format(CSS_CLASS_NAMES['video_container'])
self.wait_for_element_presence(video_selector, 'Video is initialized')
@wait_for_js
def wait_for_video_player_render(self, autoplay=False):
"""
Wait until Video Player Rendered Completely.
"""
self.wait_for_video_class()
self.wait_for_element_presence(CSS_CLASS_NAMES['video_init'], 'Video Player Initialized')
self.wait_for_element_presence(CSS_CLASS_NAMES['video_time'], 'Video Player Initialized')
video_player_buttons = ['volume', 'fullscreen', 'speed']
if autoplay:
video_player_buttons.append('pause')
else:
video_player_buttons.append('play')
for button in video_player_buttons:
self.wait_for_element_visibility(VIDEO_BUTTONS[button], '{} button is visible'.format(button))
def _is_finished_loading():
"""
Check if video loading completed.
Returns:
bool: Tells Video Finished Loading.
"""
return not self.q(css=CSS_CLASS_NAMES['video_spinner']).visible
EmptyPromise(_is_finished_loading, 'Finished loading the video', timeout=200).fulfill()
self.wait_for_ajax()
@wait_for_js
def wait_for_video_bumper_render(self):
"""
Wait until Poster, Video Pre-Roll and main Video Player are Rendered Completely.
"""
self.wait_for_video_class()
self.wait_for_element_presence(CSS_CLASS_NAMES['video_init'], 'Video Player Initialized')
self.wait_for_element_presence(CSS_CLASS_NAMES['video_time'], 'Video Player Initialized')
video_player_buttons = ['do_not_show_again', 'skip_bumper', 'volume']
for button in video_player_buttons:
self.wait_for_element_visibility(VIDEO_BUTTONS[button], '{} button is visible'.format(button))
@property
def is_poster_shown(self):
"""
Check whether a poster is show.
"""
selector = self.get_element_selector(CSS_CLASS_NAMES['poster'])
return self.q(css=selector).visible
def click_on_poster(self):
"""
Click on the video poster.
"""
selector = self.get_element_selector(CSS_CLASS_NAMES['poster'])
self.q(css=selector).click()
def get_video_vertical_selector(self, video_display_name=None):
"""
Get selector for a video vertical with display name specified by `video_display_name`.
Arguments:
video_display_name (str or None): Display name of a Video. Default vertical selector if None.
Returns:
str: Vertical Selector for video.
"""
if video_display_name:
video_display_names = self.q(css=CSS_CLASS_NAMES['video_display_name']).text
if video_display_name not in video_display_names:
raise ValueError("Incorrect Video Display Name: '{0}'".format(video_display_name))
return '.vert.vert-{}'.format(video_display_names.index(video_display_name))
else:
return '.vert.vert-0'
def get_element_selector(self, class_name, vertical=True):
"""
Construct unique element selector.
Arguments:
class_name (str): css class name for an element.
vertical (bool): do we need vertical css selector or not. vertical css selector is not present in Studio
Returns:
str: Element Selector.
"""
if vertical:
return '{vertical} {video_element}'.format(
vertical=self.get_video_vertical_selector(self.current_video_display_name),
video_element=class_name)
else:
return class_name
def use_video(self, video_display_name):
"""
Set current video display name.
Arguments:
video_display_name (str): Display name of a Video.
"""
self.current_video_display_name = video_display_name
def is_video_rendered(self, mode):
"""
Check that if video is rendered in `mode`.
Arguments:
mode (str): Video mode, `html5` or `youtube`.
Returns:
bool: Tells if video is rendered in `mode`.
"""
selector = self.get_element_selector(VIDEO_MODES[mode])
def _is_element_present():
"""
Check if a web element is present in DOM.
Returns:
tuple: (is_satisfied, result)`, where `is_satisfied` is a boolean indicating whether the promise was
satisfied, and `result` is a value to return from the fulfilled `Promise`.
"""
is_present = self.q(css=selector).present
return is_present, is_present
return Promise(_is_element_present, 'Video Rendering Failed in {0} mode.'.format(mode)).fulfill()
@property
def is_autoplay_enabled(self):
"""
Extract autoplay value of `data-metadata` attribute to check video autoplay is enabled or disabled.
Returns:
bool: Tells if autoplay enabled/disabled.
"""
selector = self.get_element_selector(CSS_CLASS_NAMES['video_container'])
auto_play = json.loads(self.q(css=selector).attrs('data-metadata')[0])['autoplay']
return auto_play
@property
def is_error_message_shown(self):
"""
Checks if video player error message shown.
Returns:
bool: Tells about error message visibility.
"""
selector = self.get_element_selector(CSS_CLASS_NAMES['error_message'])
return self.q(css=selector).visible
@property
def is_spinner_shown(self):
"""
Checks if video spinner shown.
Returns:
bool: Tells about spinner visibility.
"""
selector = self.get_element_selector(CSS_CLASS_NAMES['video_spinner'])
return self.q(css=selector).visible
@property
def error_message_text(self):
"""
Extract video player error message text.
Returns:
str: Error message text.
"""
selector = self.get_element_selector(CSS_CLASS_NAMES['error_message'])
return self.q(css=selector).text[0]
def is_button_shown(self, button_id):
"""
Check if a video button specified by `button_id` is visible.
Arguments:
button_id (str): key in VIDEO_BUTTONS dictionary, its value will give us the css selector for button.
Returns:
bool: Tells about a buttons visibility.
"""
selector = self.get_element_selector(VIDEO_BUTTONS[button_id])
return self.q(css=selector).visible
def show_captions(self):
"""
Make Captions Visible.
"""
self._captions_visibility(True)
def hide_captions(self):
"""
Make Captions Invisible.
"""
self._captions_visibility(False)
def is_captions_visible(self):
"""
Get current visibility sate of captions.
Returns:
bool: True means captions are visible, False means captions are not visible
"""
self.wait_for_ajax()
caption_state_selector = self.get_element_selector(CSS_CLASS_NAMES['closed_captions'])
return not self.q(css=caption_state_selector).present
@wait_for_js
def _captions_visibility(self, captions_new_state):
"""
Set the video captions visibility state.
Arguments:
captions_new_state (bool): True means show captions, False means hide captions
"""
states = {True: 'Shown', False: 'Hidden'}
state = states[captions_new_state]
# Make sure that the transcript button is there
EmptyPromise(lambda: self.is_button_shown('transcript_button'),
"transcript button is shown").fulfill()
# toggle captions visibility state if needed
if self.is_captions_visible() != captions_new_state:
self.click_player_button('transcript_button')
# Verify that captions state is toggled/changed
EmptyPromise(lambda: self.is_captions_visible() == captions_new_state,
"Captions are {state}".format(state=state)).fulfill()
@property
def captions_text(self):
"""
Extract captions text.
Returns:
str: Captions Text.
"""
self.wait_for_captions()
captions_selector = self.get_element_selector(CSS_CLASS_NAMES['captions_text'])
subs = self.q(css=captions_selector).html
return ' '.join(subs)
@property
def speed(self):
"""
Get current video speed value.
Return:
str: speed value
"""
speed_selector = self.get_element_selector(CSS_CLASS_NAMES['video_speed'])
return self.q(css=speed_selector).text[0]
@speed.setter
def speed(self, speed):
"""
Change the video play speed.
Arguments:
speed (str): Video speed value
"""
# mouse over to video speed button
speed_menu_selector = self.get_element_selector(VIDEO_BUTTONS['speed'])
element_to_hover_over = self.q(css=speed_menu_selector).results[0]
hover = ActionChains(self.browser).move_to_element(element_to_hover_over)
hover.perform()
speed_selector = self.get_element_selector('li[data-speed="{speed}"] .control'.format(speed=speed))
self.q(css=speed_selector).first.click()
def verify_speed_changed(self, expected_speed):
"""
Wait for the video to change its speed to the expected value. If it does not change,
the wait call will fail the test.
"""
self.wait_for(lambda: self.speed == expected_speed, "Video speed changed")
def click_player_button(self, button):
"""
Click on `button`.
Arguments:
button (str): key in VIDEO_BUTTONS dictionary, its value will give us the css selector for `button`
"""
button_selector = self.get_element_selector(VIDEO_BUTTONS[button])
# If we are going to click pause button, Ensure that player is not in buffering state
if button == 'pause':
self.wait_for(lambda: self.state != 'buffering', 'Player is Ready for Pause')
self.q(css=button_selector).first.click()
self.wait_for_ajax()
def _get_element_dimensions(self, selector):
"""
Gets the width and height of element specified by `selector`
Arguments:
selector (str): css selector of a web element
Returns:
dict: Dimensions of a web element.
"""
element = self.q(css=selector).results[0]
return element.size
@property
def _dimensions(self):
"""
Gets the video player dimensions.
Returns:
tuple: Dimensions
"""
iframe_selector = self.get_element_selector('.video-player iframe,')
video_selector = self.get_element_selector(' .video-player video')
video = self._get_element_dimensions(iframe_selector + video_selector)
wrapper = self._get_element_dimensions(self.get_element_selector('.tc-wrapper'))
controls = self._get_element_dimensions(self.get_element_selector('.video-controls'))
progress_slider = self._get_element_dimensions(
self.get_element_selector('.video-controls > .slider'))
expected = dict(wrapper)
expected['height'] -= controls['height'] + 0.5 * progress_slider['height']
return video, expected
def is_aligned(self, is_transcript_visible):
"""
Check if video is aligned properly.
Arguments:
is_transcript_visible (bool): Transcript is visible or not.
Returns:
bool: Alignment result.
"""
# Width of the video container in css equal 75% of window if transcript enabled
wrapper_width = 75 if is_transcript_visible else 100
initial = self.browser.get_window_size()
self.browser.set_window_size(300, 600)
# Wait for browser to resize completely
# Currently there is no other way to wait instead of explicit wait
time.sleep(0.2)
real, expected = self._dimensions
width = round(100 * real['width'] / expected['width']) == wrapper_width
self.browser.set_window_size(600, 300)
# Wait for browser to resize completely
# Currently there is no other way to wait instead of explicit wait
time.sleep(0.2)
real, expected = self._dimensions
height = abs(expected['height'] - real['height']) <= 5
# Restore initial window size
self.browser.set_window_size(
initial['width'], initial['height']
)
return all([width, height])
def _get_transcript(self, url):
"""
Download Transcript from `url`
"""
kwargs = dict()
session_id = [{i['name']: i['value']} for i in self.browser.get_cookies() if i['name'] == u'sessionid']
if session_id:
kwargs.update({
'cookies': session_id[0]
})
response = requests.get(url, **kwargs)
return response.status_code < 400, response.headers, response.content
def downloaded_transcript_contains_text(self, transcript_format, text_to_search):
"""
Download the transcript in format `transcript_format` and check that it contains the text `text_to_search`
Arguments:
transcript_format (str): Transcript file format `srt` or `txt`
text_to_search (str): Text to search in Transcript.
Returns:
bool: Transcript download result.
"""
transcript_selector = self.get_element_selector(VIDEO_MENUS['transcript-format'])
# check if we have a transcript with correct format
if '.' + transcript_format not in self.q(css=transcript_selector).text[0]:
return False
formats = {
'srt': 'application/x-subrip',
'txt': 'text/plain',
}
transcript_url_selector = self.get_element_selector(VIDEO_BUTTONS['download_transcript'])
url = self.q(css=transcript_url_selector).attrs('href')[0]
result, headers, content = self._get_transcript(url)
if result is False:
return False
if formats[transcript_format] not in headers.get('content-type', ''):
return False
if text_to_search not in content.decode('utf-8'):
return False
return True
def current_language(self):
"""
Get current selected video transcript language.
"""
selector = self.get_element_selector(VIDEO_MENUS["language"] + ' li.is-active')
return self.q(css=selector).first.attrs('data-lang-code')[0]
def select_language(self, code):
"""
Select captions for language `code`.
Arguments:
code (str): two character language code like `en`, `zh`.
"""
self.wait_for_ajax()
# mouse over to transcript button
cc_button_selector = self.get_element_selector(VIDEO_BUTTONS["transcript"])
element_to_hover_over = self.q(css=cc_button_selector).results[0]
ActionChains(self.browser).move_to_element(element_to_hover_over).perform()
language_selector = VIDEO_MENUS["language"] + ' li[data-lang-code="{code}"]'.format(code=code)
language_selector = self.get_element_selector(language_selector)
self.wait_for_element_visibility(language_selector, 'language menu is visible')
self.q(css=language_selector).first.click()
# Sometimes language is not clicked correctly. So, if the current language code
# differs form the expected, we try to change it again.
if self.current_language() != code:
self.select_language(code)
if 'is-active' != self.q(css=language_selector).attrs('class')[0]:
return False
active_lang_selector = self.get_element_selector(VIDEO_MENUS["language"] + ' li.is-active')
if len(self.q(css=active_lang_selector).results) != 1:
return False
# Make sure that all ajax requests that affects the display of captions are finished.
# For example, request to get new translation etc.
self.wait_for_ajax()
captions_selector = self.get_element_selector(CSS_CLASS_NAMES['captions'])
EmptyPromise(lambda: self.q(css=captions_selector).visible, 'Subtitles Visible').fulfill()
self.wait_for_captions()
return True
def is_menu_present(self, menu_name):
"""
Check if menu `menu_name` exists.
Arguments:
menu_name (str): Menu key from VIDEO_MENUS.
Returns:
bool: Menu existence result
"""
selector = self.get_element_selector(VIDEO_MENUS[menu_name])
return self.q(css=selector).present
def select_transcript_format(self, transcript_format):
"""
Select transcript with format `transcript_format`.
Arguments:
transcript_format (st): Transcript file format `srt` or `txt`.
Returns:
bool: Selection Result.
"""
button_selector = self.get_element_selector(VIDEO_MENUS['transcript-format'])
button = self.q(css=button_selector).results[0]
hover = ActionChains(self.browser).move_to_element(button)
hover.perform()
if '...' not in self.q(css=button_selector).text[0]:
return False
menu_selector = self.get_element_selector(VIDEO_MENUS['download_transcript'])
menu_items = self.q(css=menu_selector + ' a').results
for item in menu_items:
if item.get_attribute('data-value') == transcript_format:
ActionChains(self.browser).move_to_element(item).click().perform()
self.wait_for_ajax()
break
self.browser.execute_script("window.scrollTo(0, 0);")
if self.q(css=menu_selector + ' .active a').attrs('data-value')[0] != transcript_format:
return False
if '.' + transcript_format not in self.q(css=button_selector).text[0]:
return False
return True
@property
def sources(self):
"""
Extract all video source urls on current page.
Returns:
list: Video Source URLs.
"""
sources_selector = self.get_element_selector(CSS_CLASS_NAMES['video_sources'])
return self.q(css=sources_selector).map(lambda el: el.get_attribute('src').split('?')[0]).results
@property
def caption_languages(self):
"""
Get caption languages available for a video.
Returns:
dict: Language Codes('en', 'zh' etc) as keys and Language Names as Values('English', 'Chinese' etc)
"""
languages_selector = self.get_element_selector(CSS_CLASS_NAMES['captions_lang_list'])
language_codes = self.q(css=languages_selector).attrs('data-lang-code')
language_names = self.q(css=languages_selector).attrs('textContent')
return dict(zip(language_codes, language_names))
@property
def position(self):
"""
Gets current video slider position.
Returns:
str: current seek position in format min:sec.
"""
selector = self.get_element_selector(CSS_CLASS_NAMES['video_time'])
current_seek_position = self.q(css=selector).text[0]
return current_seek_position.split('/')[0].strip()
@property
def seconds(self):
"""
Extract seconds part from current video slider position.
Returns:
str
"""
return int(self.position.split(':')[1])
@property
def state(self):
"""
Extract the current state (play, pause etc) of video.
Returns:
str: current video state
"""
state_selector = self.get_element_selector(CSS_CLASS_NAMES['video_container'])
current_state = self.q(css=state_selector).attrs('class')[0]
# For troubleshooting purposes show what the current state is.
# The debug statements will only be displayed in the event of a failure.
logging.debug("Current state of '{}' element is '{}'".format(state_selector, current_state))
# See the JS video player's onStateChange function
if 'is-playing' in current_state:
return 'playing'
elif 'is-paused' in current_state:
return 'pause'
elif 'is-buffered' in current_state:
return 'buffering'
elif 'is-ended' in current_state:
return 'finished'
def _wait_for(self, check_func, desc, result=False, timeout=200, try_interval=0.2):
"""
Calls the method provided as an argument until the Promise satisfied or BrokenPromise
Arguments:
check_func (callable): Function that accepts no arguments and returns a boolean indicating whether the promise is fulfilled.
desc (str): Description of the Promise, used in log messages.
result (bool): Indicates whether we need a results from Promise or not
timeout (float): Maximum number of seconds to wait for the Promise to be satisfied before timing out.
"""
if result:
return Promise(check_func, desc, timeout=timeout, try_interval=try_interval).fulfill()
else:
return EmptyPromise(check_func, desc, timeout=timeout, try_interval=try_interval).fulfill()
def wait_for_state(self, state):
"""
Wait until `state` occurs.
Arguments:
state (str): State we wait for.
"""
self._wait_for(
lambda: self.state == state,
'State is {state}'.format(state=state)
)
def seek(self, seek_value):
"""
Seek the video to position specified by `seek_value`.
Arguments:
seek_value (str): seek value
"""
seek_time = _parse_time_str(seek_value)
seek_selector = self.get_element_selector(' .video')
js_code = "$('{seek_selector}').data('video-player-state').videoPlayer.onSlideSeek({{time: {seek_time}}})".format(
seek_selector=seek_selector, seek_time=seek_time)
self.browser.execute_script(js_code)
# after seek, player goes into `is-buffered` state. we need to get
# out of this state before doing any further operation/action.
def _is_buffering_completed():
"""
Check if buffering completed
"""
return self.state != 'buffering'
self._wait_for(_is_buffering_completed, 'Buffering completed after Seek.')
def reload_page(self):
"""
Reload/Refresh the current video page.
"""
self.browser.refresh()
self.wait_for_video_player_render()
@property
def duration(self):
"""
Extract video duration.
Returns:
str: duration in format min:sec
"""
selector = self.get_element_selector(CSS_CLASS_NAMES['video_time'])
# The full time has the form "0:32 / 3:14" elapsed/duration
all_times = self.q(css=selector).text[0]
duration_str = all_times.split('/')[1]
return duration_str.strip()
def wait_for_position(self, position):
"""
Wait until current will be equal to `position`.
Arguments:
position (str): position we wait for.
"""
self._wait_for(
lambda: self.position == position,
'Position is {position}'.format(position=position)
)
@property
def is_quality_button_visible(self):
"""
Get the visibility state of quality button
Returns:
bool: visibility status
"""
selector = self.get_element_selector(VIDEO_BUTTONS['quality'])
return self.q(css=selector).visible
@property
def is_quality_button_active(self):
"""
Check if quality button is active or not.
Returns:
bool: active status
"""
selector = self.get_element_selector(VIDEO_BUTTONS['quality'])
classes = self.q(css=selector).attrs('class')[0].split()
return 'active' in classes
def wait_for_captions(self):
"""
Wait until captions rendered completely.
"""
captions_rendered_selector = self.get_element_selector(CSS_CLASS_NAMES['captions_rendered'])
self.wait_for_element_presence(captions_rendered_selector, 'Captions Rendered')
def _parse_time_str(time_str):
"""
Parse a string of the form 1:23 into seconds (int).
Arguments:
time_str (str): seek value
Returns:
int: seek value in seconds
"""
time_obj = time.strptime(time_str, '%M:%S')
return time_obj.tm_min * 60 + time_obj.tm_sec
| agpl-3.0 |
robocomp/robocomp-robolab | components/hardware/imu/pyimu/src/specificworker.py | 1 | 2340 | #
# Copyright (C) 2018 by YOUR NAME HERE
#
# This file is part of RoboComp
#
# RoboComp is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RoboComp is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with RoboComp. If not, see <http://www.gnu.org/licenses/>.
#
import sys, os, traceback, time
from genericworker import *
class SpecificWorker(GenericWorker):
def __init__(self, proxy_map):
super(SpecificWorker, self).__init__(proxy_map)
self.timer.timeout.connect(self.compute)
self.imu = DataImu()
self.Period = 100
self.timer.start(self.Period)
print("Start with period: ", self.Period)
def setParams(self, params):
try:
self.puerto = open(params["device"], "r")
print ("Device opened:",)
except IOError:
print("Error opening serial port:", params["device"], "check device is connected")
sys.exit(-1)
return True
@QtCore.Slot()
def compute(self):
print ('SpecificWorker.compute...')
try:
line = self.puerto.readline()
values = line.strip().split(' ')
self.imu.rot.Yaw = float(values[0])
self.imu.rot.Roll = float(values[1])
self.imu.rot.Pitch = float(values[2])
print ("Data(y,r,p):", self.imu.rot.Yaw, self.imu.rot.Roll, self.imu.rot.Pitch)
self.imupub_proxy.publish(self.imu)
except Ice.Exception as e:
traceback.print_exc()
print(e)
return True
# IMU implementation
# resetImu
#
def resetImu(self):
print("ERROR: not implemented yet")
#
# getAngularVel
#
def getAngularVel(self):
ret = Gyroscope()
return ret
#
# getOrientation
#
def getOrientation(self):
ret = Orientation()
return ret
#
# getDataImu
#
def getDataImu(self):
return DataImu()
#
# getMagneticFields
#
def getMagneticFields(self):
ret = Magnetic()
return ret
#
# getAcceleration
#
def getAcceleration(self):
ret = Acceleration()
| gpl-3.0 |
conrado/ansible-modules-core | database/mysql/mysql_db.py | 82 | 14607 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Mark Theunissen <mark.theunissen@gmail.com>
# Sponsored by Four Kitchens http://fourkitchens.com.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: mysql_db
short_description: Add or remove MySQL databases from a remote host.
description:
- Add or remove MySQL databases from a remote host.
version_added: "0.6"
options:
name:
description:
- name of the database to add or remove
- name=all May only be provided if I(state) is C(dump) or C(import).
- if name=all Works like --all-databases option for mysqldump (Added in 2.0)
required: true
default: null
aliases: [ db ]
login_user:
description:
- The username used to authenticate with
required: false
default: null
login_password:
description:
- The password used to authenticate with
required: false
default: null
login_host:
description:
- Host running the database
required: false
default: localhost
login_port:
description:
- Port of the MySQL server. Requires login_host be defined as other then localhost if login_port is used
required: false
default: 3306
login_unix_socket:
description:
- The path to a Unix domain socket for local connections
required: false
default: null
state:
description:
- The database state
required: false
default: present
choices: [ "present", "absent", "dump", "import" ]
collation:
description:
- Collation mode
required: false
default: null
encoding:
description:
- Encoding mode
required: false
default: null
target:
description:
- Location, on the remote host, of the dump file to read from or write to. Uncompressed SQL
files (C(.sql)) as well as bzip2 (C(.bz2)), gzip (C(.gz)) and xz compressed files are supported.
required: false
notes:
- Requires the MySQLdb Python package on the remote host. For Ubuntu, this
is as easy as apt-get install python-mysqldb. (See M(apt).) For CentOS/Fedora, this
is as easy as yum install MySQL-python. (See M(yum).)
- Both I(login_password) and I(login_user) are required when you are
passing credentials. If none are present, the module will attempt to read
the credentials from C(~/.my.cnf), and finally fall back to using the MySQL
default login of C(root) with no password.
requirements: [ ConfigParser ]
author: "Mark Theunissen (@marktheunissen)"
'''
EXAMPLES = '''
# Create a new database with name 'bobdata'
- mysql_db: name=bobdata state=present
# Copy database dump file to remote host and restore it to database 'my_db'
- copy: src=dump.sql.bz2 dest=/tmp
- mysql_db: name=my_db state=import target=/tmp/dump.sql.bz2
# Dumps all databases to hostname.sql
- mysql_db: state=dump name=all target=/tmp/{{ inventory_hostname }}.sql
# Imports file.sql similiar to mysql -u <username> -p <password> < hostname.sql
- mysql_db: state=import name=all target=/tmp/{{ inventory_hostname }}.sql
'''
import ConfigParser
import os
import pipes
import stat
import subprocess
try:
import MySQLdb
except ImportError:
mysqldb_found = False
else:
mysqldb_found = True
# ===========================================
# MySQL module specific support methods.
#
def db_exists(cursor, db):
res = cursor.execute("SHOW DATABASES LIKE %s", (db.replace("_","\_"),))
return bool(res)
def db_delete(cursor, db):
query = "DROP DATABASE %s" % mysql_quote_identifier(db, 'database')
cursor.execute(query)
return True
def db_dump(module, host, user, password, db_name, target, all_databases, port, socket=None):
cmd = module.get_bin_path('mysqldump', True)
cmd += " --quick --user=%s --password=%s" % (pipes.quote(user), pipes.quote(password))
if socket is not None:
cmd += " --socket=%s" % pipes.quote(socket)
else:
cmd += " --host=%s --port=%i" % (pipes.quote(host), port)
if all_databases:
cmd += " --all-databases"
else:
cmd += " %s" % pipes.quote(db_name)
path = None
if os.path.splitext(target)[-1] == '.gz':
path = module.get_bin_path('gzip', True)
elif os.path.splitext(target)[-1] == '.bz2':
path = module.get_bin_path('bzip2', True)
elif os.path.splitext(target)[-1] == '.xz':
path = module.get_bin_path('xz', True)
if path:
cmd = '%s | %s > %s' % (cmd, path, pipes.quote(target))
else:
cmd += " > %s" % pipes.quote(target)
rc, stdout, stderr = module.run_command(cmd, use_unsafe_shell=True)
return rc, stdout, stderr
def db_import(module, host, user, password, db_name, target, all_databases, port, socket=None):
if not os.path.exists(target):
return module.fail_json(msg="target %s does not exist on the host" % target)
cmd = [module.get_bin_path('mysql', True)]
if user:
cmd.append("--user=%s" % pipes.quote(user))
if password:
cmd.append("--password=%s" % pipes.quote(password))
if socket is not None:
cmd.append("--socket=%s" % pipes.quote(socket))
else:
cmd.append("--host=%s" % pipes.quote(host))
cmd.append("--port=%i" % port)
if not all_databases:
cmd.append("-D")
cmd.append(pipes.quote(db_name))
comp_prog_path = None
if os.path.splitext(target)[-1] == '.gz':
comp_prog_path = module.get_bin_path('gzip', required=True)
elif os.path.splitext(target)[-1] == '.bz2':
comp_prog_path = module.get_bin_path('bzip2', required=True)
elif os.path.splitext(target)[-1] == '.xz':
comp_prog_path = module.get_bin_path('xz', required=True)
if comp_prog_path:
p1 = subprocess.Popen([comp_prog_path, '-dc', target], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p2 = subprocess.Popen(cmd, stdin=p1.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout2, stderr2) = p2.communicate()
p1.stdout.close()
p1.wait()
if p1.returncode != 0:
stderr1 = p1.stderr.read()
return p1.returncode, '', stderr1
else:
return p2.returncode, stdout2, stderr2
else:
cmd = ' '.join(cmd)
cmd += " < %s" % pipes.quote(target)
rc, stdout, stderr = module.run_command(cmd, use_unsafe_shell=True)
return rc, stdout, stderr
def db_create(cursor, db, encoding, collation):
query_params = dict(enc=encoding, collate=collation)
query = ['CREATE DATABASE %s' % mysql_quote_identifier(db, 'database')]
if encoding:
query.append("CHARACTER SET %(enc)s")
if collation:
query.append("COLLATE %(collate)s")
query = ' '.join(query)
res = cursor.execute(query, query_params)
return True
def strip_quotes(s):
""" Remove surrounding single or double quotes
>>> print strip_quotes('hello')
hello
>>> print strip_quotes('"hello"')
hello
>>> print strip_quotes("'hello'")
hello
>>> print strip_quotes("'hello")
'hello
"""
single_quote = "'"
double_quote = '"'
if s.startswith(single_quote) and s.endswith(single_quote):
s = s.strip(single_quote)
elif s.startswith(double_quote) and s.endswith(double_quote):
s = s.strip(double_quote)
return s
def config_get(config, section, option):
""" Calls ConfigParser.get and strips quotes
See: http://dev.mysql.com/doc/refman/5.0/en/option-files.html
"""
return strip_quotes(config.get(section, option))
def load_mycnf():
config = ConfigParser.RawConfigParser()
mycnf = os.path.expanduser('~/.my.cnf')
if not os.path.exists(mycnf):
return False
try:
config.readfp(open(mycnf))
except (IOError):
return False
# We support two forms of passwords in .my.cnf, both pass= and password=,
# as these are both supported by MySQL.
try:
passwd = config_get(config, 'client', 'password')
except (ConfigParser.NoOptionError):
try:
passwd = config_get(config, 'client', 'pass')
except (ConfigParser.NoOptionError):
return False
try:
creds = dict(user=config_get(config, 'client', 'user'),passwd=passwd)
except (ConfigParser.NoOptionError):
return False
return creds
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec = dict(
login_user=dict(default=None),
login_password=dict(default=None),
login_host=dict(default="localhost"),
login_port=dict(default=3306, type='int'),
login_unix_socket=dict(default=None),
name=dict(required=True, aliases=['db']),
encoding=dict(default=""),
collation=dict(default=""),
target=dict(default=None),
state=dict(default="present", choices=["absent", "present","dump", "import"]),
)
)
if not mysqldb_found:
module.fail_json(msg="the python mysqldb module is required")
db = module.params["name"]
encoding = module.params["encoding"]
collation = module.params["collation"]
state = module.params["state"]
target = module.params["target"]
socket = module.params["login_unix_socket"]
login_port = module.params["login_port"]
if login_port < 0 or login_port > 65535:
module.fail_json(msg="login_port must be a valid unix port number (0-65535)")
# make sure the target path is expanded for ~ and $HOME
if target is not None:
target = os.path.expandvars(os.path.expanduser(target))
# Either the caller passes both a username and password with which to connect to
# mysql, or they pass neither and allow this module to read the credentials from
# ~/.my.cnf.
login_password = module.params["login_password"]
login_user = module.params["login_user"]
if login_user is None and login_password is None:
mycnf_creds = load_mycnf()
if mycnf_creds is False:
login_user = "root"
login_password = ""
else:
login_user = mycnf_creds["user"]
login_password = mycnf_creds["passwd"]
elif login_password is None or login_user is None:
module.fail_json(msg="when supplying login arguments, both login_user and login_password must be provided")
login_host = module.params["login_host"]
if state in ['dump','import']:
if target is None:
module.fail_json(msg="with state=%s target is required" % (state))
if db == 'all':
connect_to_db = 'mysql'
db = 'mysql'
all_databases = True
else:
connect_to_db = db
all_databases = False
else:
if db == 'all':
module.fail_json(msg="name is not allowed to equal 'all' unless state equals import, or dump.")
connect_to_db = ''
try:
if socket:
try:
socketmode = os.stat(socket).st_mode
if not stat.S_ISSOCK(socketmode):
module.fail_json(msg="%s, is not a socket, unable to connect" % socket)
except OSError:
module.fail_json(msg="%s, does not exist, unable to connect" % socket)
db_connection = MySQLdb.connect(host=module.params["login_host"], unix_socket=socket, user=login_user, passwd=login_password, db=connect_to_db)
elif login_port != 3306 and module.params["login_host"] == "localhost":
module.fail_json(msg="login_host is required when login_port is defined, login_host cannot be localhost when login_port is defined")
else:
db_connection = MySQLdb.connect(host=module.params["login_host"], port=login_port, user=login_user, passwd=login_password, db=connect_to_db)
cursor = db_connection.cursor()
except Exception, e:
errno, errstr = e.args
if "Unknown database" in str(e):
module.fail_json(msg="ERROR: %s %s" % (errno, errstr))
else:
module.fail_json(msg="unable to connect, check login credentials (login_user, and login_password, which can be defined in ~/.my.cnf), check that mysql socket exists and mysql server is running (ERROR: %s %s)" % (errno, errstr))
changed = False
if db_exists(cursor, db):
if state == "absent":
try:
changed = db_delete(cursor, db)
except Exception, e:
module.fail_json(msg="error deleting database: " + str(e))
elif state == "dump":
rc, stdout, stderr = db_dump(module, login_host, login_user,
login_password, db, target, all_databases,
port=login_port,
socket=module.params['login_unix_socket'])
if rc != 0:
module.fail_json(msg="%s" % stderr)
else:
module.exit_json(changed=True, db=db, msg=stdout)
elif state == "import":
rc, stdout, stderr = db_import(module, login_host, login_user,
login_password, db, target, all_databases,
port=login_port,
socket=module.params['login_unix_socket'])
if rc != 0:
module.fail_json(msg="%s" % stderr)
else:
module.exit_json(changed=True, db=db, msg=stdout)
else:
if state == "present":
try:
changed = db_create(cursor, db, encoding, collation)
except Exception, e:
module.fail_json(msg="error creating database: " + str(e))
module.exit_json(changed=changed, db=db)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.database import *
if __name__ == '__main__':
main()
| gpl-3.0 |
chrissmall22/odl-client | odlclient/rest.py | 1 | 4785 | #!/usr/bin/env python
#
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import requests
from odlclient.version import __version__
from odlclient.datatypes import JsonObjectFactory, JSON_MAP, PLURALS
from odlclient.error import raise_errors, NotFound
UA = {
'content-type': 'application/json',
'user-agent': 'odlnclient/{0} '.format(__version__) +
'python-requests/{0}'.format(requests.__version__)
}
class RestClient(object):
def __init__(self, auth):
self.auth = auth
self.args = {"auth": self.auth,
"verify": False,
"headers": UA,
"timeout": 30
}
def _download_args(self):
args = copy.deepcopy(self.args)
args["headers"]["content-type"] = 'application/zip'
args["timeout"] = 60
args["stream"] = True
return args
def _upload_args(self, filename):
args = copy.deepcopy(self.args)
args["headers"]["content-type"] = 'application/zip'
args["headers"]["Filename"] = filename
args["timeout"] = 60
return args
def _get(self, url, is_file=False):
if is_file:
args = self._download_args()
else:
args = self.args
r = requests.get(url, **args)
return r
def _put(self, url, data):
r = requests.put(url, data=data, **self.args)
return r
def _post(self, url, data, is_file=False):
if is_file:
args = self._upload_args(data)
with open(data) as f:
r = requests.post(url, data=f, **args)
else:
args = self.args
r = requests.post(url, data=data, **args)
return r
def _delete(self, url, data=None):
if data is None:
r = requests.delete(url, **self.args)
else:
r = requests.delete(url, data=data, **self.args)
return r
def _head(self, url):
r = requests.head(url, **self.args)
return r
def get(self, url, is_file=False):
result = []
if is_file:
r = self._get(url, is_file=True)
else:
r = self._get(url)
raise_errors(r)
content = r.headers['Content-Type']
if content == 'application/json':
data = r.json()
for k in list(data):
if not k == 'version':
key = k
if key not in PLURALS:
try:
datatype = JSON_MAP[key]
except KeyError:
raise NotFound(key)
if datatype is None:
result = data[key]
else:
result = JsonObjectFactory.create(datatype, data[key])
else:
datatype = PLURALS[key]
for d in data[key]:
result.append(JsonObjectFactory.create(datatype, d))
elif content == 'text/plain':
result = r.text
elif r.headers['Content-Type'] == 'application/zip':
# Strip the 'attachment; filename='' from Content-Disposition
filename = r.headers["Content-Disposition"][21:]
# Save the data to a file
with open(filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
if type(chunk) == bytes:
f.write(chunk)
else:
f.write(chunk.encode("UTF-8"))
f.flush()
return filename
else:
result = None
return result
def post(self, url, data, is_file=False):
r = self._post(url, data, is_file)
raise_errors(r)
return r
def put(self, url, data):
r = self._put(url, data)
raise_errors(r)
return r
def delete(self, url, data=None):
r = self._delete(url, data)
raise_errors(r)
return r
def head(self, url):
r = self._head(url)
raise_errors(r)
return r
| apache-2.0 |
equialgo/scikit-learn | examples/cluster/plot_adjusted_for_chance_measures.py | 105 | 4300 | """
==========================================================
Adjustment for chance in clustering performance evaluation
==========================================================
The following plots demonstrate the impact of the number of clusters and
number of samples on various clustering performance evaluation metrics.
Non-adjusted measures such as the V-Measure show a dependency between
the number of clusters and the number of samples: the mean V-Measure
of random labeling increases significantly as the number of clusters is
closer to the total number of samples used to compute the measure.
Adjusted for chance measure such as ARI display some random variations
centered around a mean score of 0.0 for any number of samples and
clusters.
Only adjusted measures can hence safely be used as a consensus index
to evaluate the average stability of clustering algorithms for a given
value of k on various overlapping sub-samples of the dataset.
"""
print(__doc__)
# Author: Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from time import time
from sklearn import metrics
def uniform_labelings_scores(score_func, n_samples, n_clusters_range,
fixed_n_classes=None, n_runs=5, seed=42):
"""Compute score for 2 random uniform cluster labelings.
Both random labelings have the same number of clusters for each value
possible value in ``n_clusters_range``.
When fixed_n_classes is not None the first labeling is considered a ground
truth class assignment with fixed number of classes.
"""
random_labels = np.random.RandomState(seed).randint
scores = np.zeros((len(n_clusters_range), n_runs))
if fixed_n_classes is not None:
labels_a = random_labels(low=0, high=fixed_n_classes, size=n_samples)
for i, k in enumerate(n_clusters_range):
for j in range(n_runs):
if fixed_n_classes is None:
labels_a = random_labels(low=0, high=k, size=n_samples)
labels_b = random_labels(low=0, high=k, size=n_samples)
scores[i, j] = score_func(labels_a, labels_b)
return scores
score_funcs = [
metrics.adjusted_rand_score,
metrics.v_measure_score,
metrics.adjusted_mutual_info_score,
metrics.mutual_info_score,
]
# 2 independent random clusterings with equal cluster number
n_samples = 100
n_clusters_range = np.linspace(2, n_samples, 10).astype(np.int)
plt.figure(1)
plots = []
names = []
for score_func in score_funcs:
print("Computing %s for %d values of n_clusters and n_samples=%d"
% (score_func.__name__, len(n_clusters_range), n_samples))
t0 = time()
scores = uniform_labelings_scores(score_func, n_samples, n_clusters_range)
print("done in %0.3fs" % (time() - t0))
plots.append(plt.errorbar(
n_clusters_range, np.median(scores, axis=1), scores.std(axis=1))[0])
names.append(score_func.__name__)
plt.title("Clustering measures for 2 random uniform labelings\n"
"with equal number of clusters")
plt.xlabel('Number of clusters (Number of samples is fixed to %d)' % n_samples)
plt.ylabel('Score value')
plt.legend(plots, names)
plt.ylim(ymin=-0.05, ymax=1.05)
# Random labeling with varying n_clusters against ground class labels
# with fixed number of clusters
n_samples = 1000
n_clusters_range = np.linspace(2, 100, 10).astype(np.int)
n_classes = 10
plt.figure(2)
plots = []
names = []
for score_func in score_funcs:
print("Computing %s for %d values of n_clusters and n_samples=%d"
% (score_func.__name__, len(n_clusters_range), n_samples))
t0 = time()
scores = uniform_labelings_scores(score_func, n_samples, n_clusters_range,
fixed_n_classes=n_classes)
print("done in %0.3fs" % (time() - t0))
plots.append(plt.errorbar(
n_clusters_range, scores.mean(axis=1), scores.std(axis=1))[0])
names.append(score_func.__name__)
plt.title("Clustering measures for random uniform labeling\n"
"against reference assignment with %d classes" % n_classes)
plt.xlabel('Number of clusters (Number of samples is fixed to %d)' % n_samples)
plt.ylabel('Score value')
plt.ylim(ymin=-0.05, ymax=1.05)
plt.legend(plots, names)
plt.show()
| bsd-3-clause |
laysakura/chainer | cupy/logic/ops.py | 9 | 1033 | from cupy import elementwise
from cupy.logic import ufunc
logical_and = ufunc.create_comparison(
'logical_and', '&&',
'''Computes the logical AND of two arrays.
.. seealso:: :data:`numpy.logical_and`
''')
logical_or = ufunc.create_comparison(
'logical_or', '||',
'''Computes the logical OR of two arrays.
.. seealso:: :data:`numpy.logical_or`
''')
logical_not = elementwise.create_ufunc(
'cupy_logical_not',
('?->?', 'b->?', 'B->?', 'h->?', 'H->?', 'i->?', 'I->?', 'l->?', 'L->?',
'q->?', 'Q->?', 'e->?', 'f->?', 'd->?'),
'out0 = !in0',
doc='''Computes the logical NOT of an array.
.. seealso:: :data:`numpy.logical_not`
''')
logical_xor = elementwise.create_ufunc(
'cupy_logical_xor',
('??->?', 'bb->?', 'BB->?', 'hh->?', 'HH->?', 'ii->?', 'II->?', 'll->?',
'LL->?', 'qq->?', 'QQ->?', 'ee->?', 'ff->?', 'dd->?'),
'out0 = !in0 != !in1',
doc='''Computes the logical XOR of two arrays.
.. seealso:: :data:`numpy.logical_xor`
''')
| mit |
eagleeyetom/android_kernel_oppo_msm8974 | tools/perf/scripts/python/syscall-counts-by-pid.py | 11180 | 1927 | # system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events by comm/pid:\n\n",
print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].iteritems(), \
key = lambda(k, v): (v, k), reverse = True):
print " %-38s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
MarkTheF4rth/youtube-dl | youtube_dl/extractor/googlesearch.py | 168 | 1699 | from __future__ import unicode_literals
import itertools
import re
from .common import SearchInfoExtractor
from ..compat import (
compat_urllib_parse,
)
class GoogleSearchIE(SearchInfoExtractor):
IE_DESC = 'Google Video search'
_MAX_RESULTS = 1000
IE_NAME = 'video.google:search'
_SEARCH_KEY = 'gvsearch'
_TEST = {
'url': 'gvsearch15:python language',
'info_dict': {
'id': 'python language',
'title': 'python language',
},
'playlist_count': 15,
}
def _get_n_results(self, query, n):
"""Get a specified number of results for a query"""
entries = []
res = {
'_type': 'playlist',
'id': query,
'title': query,
}
for pagenum in itertools.count():
result_url = (
'http://www.google.com/search?tbm=vid&q=%s&start=%s&hl=en'
% (compat_urllib_parse.quote_plus(query), pagenum * 10))
webpage = self._download_webpage(
result_url, 'gvsearch:' + query,
note='Downloading result page ' + str(pagenum + 1))
for hit_idx, mobj in enumerate(re.finditer(
r'<h3 class="r"><a href="([^"]+)"', webpage)):
# Skip playlists
if not re.search(r'id="vidthumb%d"' % (hit_idx + 1), webpage):
continue
entries.append({
'_type': 'url',
'url': mobj.group(1)
})
if (len(entries) >= n) or not re.search(r'id="pnnext"', webpage):
res['entries'] = entries[:n]
return res
| unlicense |
mbauskar/phr-frappe | frappe/model/base_document.py | 9 | 11588 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe, json, sys
from frappe import _
from frappe.utils import cint, flt, now, cstr, strip_html
from frappe.model import default_fields
from frappe.model.naming import set_new_name
class BaseDocument(object):
ignore_in_getter = ("doctype", "_meta", "meta", "_table_fields", "_valid_columns")
def __init__(self, d):
self.update(d)
self.dont_update_if_missing = []
@property
def meta(self):
if not hasattr(self, "_meta"):
self._meta = frappe.get_meta(self.doctype)
return self._meta
def update(self, d):
if "doctype" in d:
self.set("doctype", d.get("doctype"))
# first set default field values of base document
for key in default_fields:
if key in d:
self.set(key, d.get(key))
for key, value in d.iteritems():
self.set(key, value)
return self
def update_if_missing(self, d):
if isinstance(d, BaseDocument):
d = d.get_valid_dict()
if "doctype" in d:
self.set("doctype", d.get("doctype"))
for key, value in d.iteritems():
# dont_update_if_missing is a list of fieldnames, for which, you don't want to set default value
if (self.get(key) is None) and (value is not None) and (key not in self.dont_update_if_missing):
self.set(key, value)
def get_db_value(self, key):
return frappe.db.get_value(self.doctype, self.name, key)
def get(self, key=None, filters=None, limit=None, default=None):
if key:
if isinstance(key, dict):
return _filter(self.get_all_children(), key, limit=limit)
if filters:
if isinstance(filters, dict):
value = _filter(self.__dict__.get(key, []), filters, limit=limit)
else:
default = filters
filters = None
value = self.__dict__.get(key, default)
else:
value = self.__dict__.get(key, default)
if value is None and key not in self.ignore_in_getter \
and key in (d.fieldname for d in self.meta.get_table_fields()):
self.set(key, [])
value = self.__dict__.get(key)
return value
else:
return self.__dict__
def getone(self, key, filters=None):
return self.get(key, filters=filters, limit=1)[0]
def set(self, key, value):
if isinstance(value, list):
self.__dict__[key] = []
self.extend(key, value)
else:
self.__dict__[key] = value
def delete_key(self, key):
if key in self.__dict__:
del self.__dict__[key]
def append(self, key, value=None):
if value==None:
value={}
if isinstance(value, (dict, BaseDocument)):
if not self.__dict__.get(key):
self.__dict__[key] = []
value = self._init_child(value, key)
self.__dict__[key].append(value)
return value
else:
raise ValueError
def extend(self, key, value):
if isinstance(value, list):
for v in value:
self.append(key, v)
else:
raise ValueError
def remove(self, doc):
self.get(doc.parentfield).remove(doc)
def _init_child(self, value, key):
if not self.doctype:
return value
if not isinstance(value, BaseDocument):
if "doctype" not in value:
value["doctype"] = self.get_table_field_doctype(key)
if not value["doctype"]:
raise AttributeError, key
value = BaseDocument(value)
value.init_valid_columns()
value.parent = self.name
value.parenttype = self.doctype
value.parentfield = key
if not getattr(value, "idx", None):
value.idx = len(self.get(key) or []) + 1
if not getattr(value, "name", None):
value.__dict__['__islocal'] = 1
return value
def get_valid_dict(self):
d = {}
for fieldname in self.meta.get_valid_columns():
d[fieldname] = self.get(fieldname)
return d
def init_valid_columns(self):
for key in default_fields:
if key not in self.__dict__:
self.__dict__[key] = None
if self.doctype in ("DocField", "DocPerm") and self.parent in ("DocType", "DocField", "DocPerm"):
from frappe.model.meta import get_table_columns
valid = get_table_columns(self.doctype)
else:
valid = self.meta.get_valid_columns()
for key in valid:
if key not in self.__dict__:
self.__dict__[key] = None
def is_new(self):
return self.get("__islocal")
def as_dict(self, no_nulls=False):
doc = self.get_valid_dict()
doc["doctype"] = self.doctype
for df in self.meta.get_table_fields():
children = self.get(df.fieldname) or []
doc[df.fieldname] = [d.as_dict(no_nulls=no_nulls) for d in children]
if no_nulls:
for k in doc.keys():
if doc[k] is None:
del doc[k]
if self.get("_user_tags"):
doc["_user_tags"] = self.get("_user_tags")
if self.get("__islocal"):
doc["__islocal"] = 1
elif self.get("__onload"):
doc["__onload"] = self.get("__onload")
return doc
def as_json(self):
return json.dumps(self.as_dict(), indent=1, sort_keys=True)
def get_table_field_doctype(self, fieldname):
return self.meta.get_field(fieldname).options
def get_parentfield_of_doctype(self, doctype):
fieldname = [df.fieldname for df in self.meta.get_table_fields() if df.options==doctype]
return fieldname[0] if fieldname else None
def db_insert(self):
set_new_name(self)
d = self.get_valid_dict()
columns = d.keys()
try:
frappe.db.sql("""insert into `tab{doctype}`
({columns}) values ({values})""".format(
doctype = self.doctype,
columns = ", ".join(["`"+c+"`" for c in columns]),
values = ", ".join(["%s"] * len(columns))
), d.values())
except Exception, e:
if e.args[0]==1062:
type, value, traceback = sys.exc_info()
frappe.msgprint(_("Duplicate name {0} {1}").format(self.doctype, self.name))
raise frappe.NameError, (self.doctype, self.name, e), traceback
else:
raise
self.set("__islocal", False)
def db_update(self):
if self.get("__islocal") or not self.name:
self.db_insert()
return
d = self.get_valid_dict()
columns = d.keys()
frappe.db.sql("""update `tab{doctype}`
set {values} where name=%s""".format(
doctype = self.doctype,
values = ", ".join(["`"+c+"`=%s" for c in columns])
), d.values() + [d.get("name")])
def db_set(self, fieldname, value):
self.set(fieldname, value)
self.set("modified", now())
self.set("modified_by", frappe.session.user)
frappe.db.set_value(self.doctype, self.name, fieldname, value, self.modified, self.modified_by)
def _fix_numeric_types(self):
for df in self.meta.get("fields"):
if df.fieldtype == "Check":
self.set(df.fieldname, cint(self.get(df.fieldname)))
elif self.get(df.fieldname) is not None:
if df.fieldtype == "Int":
self.set(df.fieldname, cint(self.get(df.fieldname)))
elif df.fieldtype in ("Float", "Currency", "Percent"):
self.set(df.fieldname, flt(self.get(df.fieldname)))
if self.docstatus is not None:
self.docstatus = cint(self.docstatus)
def _get_missing_mandatory_fields(self):
"""Get mandatory fields that do not have any values"""
def get_msg(df):
if df.fieldtype == "Table":
return "{}: {}: {}".format(_("Error"), _("Data missing in table"), _(df.label))
elif self.parentfield:
return "{}: {} #{}: {}: {}".format(_("Error"), _("Row"), self.idx,
_("Value missing for"), _(df.label))
else:
return "{}: {}: {}".format(_("Error"), _("Value missing for"), _(df.label))
missing = []
for df in self.meta.get("fields", {"reqd": 1}):
if self.get(df.fieldname) in (None, []) or not strip_html(cstr(self.get(df.fieldname))).strip():
missing.append((df.fieldname, get_msg(df)))
return missing
def get_invalid_links(self, is_submittable=False):
def get_msg(df, docname):
if self.parentfield:
return "{} #{}: {}: {}".format(_("Row"), self.idx, _(df.label), docname)
else:
return "{}: {}".format(_(df.label), docname)
invalid_links = []
cancelled_links = []
for df in self.meta.get_link_fields() + self.meta.get("fields",
{"fieldtype":"Dynamic Link"}):
docname = self.get(df.fieldname)
if docname:
if df.fieldtype=="Link":
doctype = df.options
if not doctype:
frappe.throw(_("Options not set for link field {0}").format(df.fieldname))
else:
doctype = self.get(df.options)
if not doctype:
frappe.throw(_("{0} must be set first").format(self.meta.get_label(df.options)))
# MySQL is case insensitive. Preserve case of the original docname in the Link Field.
value = frappe.db.get_value(doctype, docname)
setattr(self, df.fieldname, value)
if not value:
invalid_links.append((df.fieldname, docname, get_msg(df, docname)))
elif (df.fieldname != "amended_from"
and (is_submittable or self.meta.is_submittable) and frappe.get_meta(doctype).is_submittable
and cint(frappe.db.get_value(doctype, docname, "docstatus"))==2):
cancelled_links.append((df.fieldname, docname, get_msg(df, docname)))
return invalid_links, cancelled_links
def _validate_selects(self):
if frappe.flags.in_import:
return
for df in self.meta.get_select_fields():
if df.fieldname=="naming_series" or not (self.get(df.fieldname) and df.options):
continue
options = (df.options or "").split("\n")
# if only empty options
if not filter(None, options):
continue
# strip and set
self.set(df.fieldname, cstr(self.get(df.fieldname)).strip())
value = self.get(df.fieldname)
if value not in options and not (frappe.flags.in_test and value.startswith("_T-")):
# show an elaborate message
prefix = _("Row #{0}:").format(self.idx) if self.get("parentfield") else ""
label = _(self.meta.get_label(df.fieldname))
comma_options = '", "'.join(_(each) for each in options)
frappe.throw(_('{0} {1} cannot be "{2}". It should be one of "{3}"').format(prefix, label,
value, comma_options))
def _validate_constants(self):
if frappe.flags.in_import or self.is_new():
return
constants = [d.fieldname for d in self.meta.get("fields", {"set_only_once": 1})]
if constants:
values = frappe.db.get_value(self.doctype, self.name, constants, as_dict=True)
for fieldname in constants:
if self.get(fieldname) != values.get(fieldname):
frappe.throw(_("Value cannot be changed for {0}").format(self.meta.get_label(fieldname)),
frappe.CannotChangeConstantError)
def _validate_update_after_submit(self):
current = frappe.db.get_value(self.doctype, self.name, "*", as_dict=True)
for key, value in current.iteritems():
df = self.meta.get_field(key)
if df and not df.allow_on_submit and (self.get(key) or value) and self.get(key) != value:
frappe.throw(_("Not allowed to change {0} after submission").format(df.label),
frappe.UpdateAfterSubmitError)
def get_formatted(self, fieldname, doc=None, currency=None):
from frappe.utils.formatters import format_value
return format_value(self.get(fieldname), self.meta.get_field(fieldname),
doc=doc or self, currency=currency)
def _filter(data, filters, limit=None):
"""pass filters as:
{"key": "val", "key": ["!=", "val"],
"key": ["in", "val"], "key": ["not in", "val"], "key": "^val",
"key" : True (exists), "key": False (does not exist) }"""
out = []
for d in data:
add = True
for f in filters:
fval = filters[f]
if fval is True:
fval = ("not None", fval)
elif fval is False:
fval = ("None", fval)
elif not isinstance(fval, (tuple, list)):
if isinstance(fval, basestring) and fval.startswith("^"):
fval = ("^", fval[1:])
else:
fval = ("=", fval)
if not frappe.compare(getattr(d, f, None), fval[0], fval[1]):
add = False
break
if add:
out.append(d)
if limit and (len(out)-1)==limit:
break
return out
| mit |
onesfreedom/pybuilder | src/unittest/python/graph_utils_tests.py | 7 | 2674 | # -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2015 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import TestCase
from pybuilder.graph_utils import Graph, GraphHasCycles
class GraphUtilsTests(TestCase):
def test_should_find_trivial_cycle_in_graph_when_there_is_one(self):
graph_with_trivial_cycle = Graph({"a": "a"})
self.assertRaises(GraphHasCycles, graph_with_trivial_cycle.assert_no_trivial_cycles_present)
def test_should_find_trivial_cycle_in_graph_when_there_are_two(self):
graph_with_trivial_cycles = Graph({"a": "a", "b": "b"})
self.assertRaises(GraphHasCycles, graph_with_trivial_cycles.assert_no_trivial_cycles_present)
def test_should_find_trivial_cycle_in_graph_when_searching_for_cycles(self):
graph_with_trivial_cycle = Graph({"a": "a"})
self.assertRaises(GraphHasCycles, graph_with_trivial_cycle.assert_no_cycles_present)
def test_should_not_find_trivial_cycles_in_graph_when_there_are_none(self):
graph_without_trivial_cycle = Graph({"a": "b", "b": "c", "d": "e"})
graph_without_trivial_cycle.assert_no_trivial_cycles_present()
def test_should_not_find_cycles_in_graph_when_there_are_none(self):
graph_without_cycle = Graph({"a": "b", "b": "c", "d": "e"})
graph_without_cycle.assert_no_cycles_present()
def test_should_find_simple_nontrivial_cycle_in_graph_when_there_is_one(self):
graph_with_simple_cycle = Graph({"a": "b", "b": "a"})
self.assertRaises(GraphHasCycles, graph_with_simple_cycle.assert_no_cycles_present)
def test_should_find_long_nontrivial_cycle_in_graph_when_there_is_one(self):
graph_with_long_cycle = Graph({"a": "b", "b": "c", "c": "d", "d": "b"})
self.assertRaises(GraphHasCycles, graph_with_long_cycle.assert_no_cycles_present)
def test_should_find_long_nontrivial_cycle_in_graph_when_there_are_two(self):
graph_with_long_cycle = Graph({"a": "b", "b": "c", "c": "a", "d": "e", "e": "f", "f": "d"})
self.assertRaises(GraphHasCycles, graph_with_long_cycle.assert_no_cycles_present)
| apache-2.0 |
donkirkby/django | tests/auth_tests/test_views.py | 16 | 45028 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import itertools
import os
import re
from importlib import import_module
from django.apps import apps
from django.conf import settings
from django.contrib.admin.models import LogEntry
from django.contrib.auth import REDIRECT_FIELD_NAME, SESSION_KEY
from django.contrib.auth.forms import (
AuthenticationForm, PasswordChangeForm, SetPasswordForm,
)
from django.contrib.auth.models import User
from django.contrib.auth.tests.custom_user import CustomUser
from django.contrib.auth.views import login as login_view, redirect_to_login
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sites.requests import RequestSite
from django.core import mail
from django.core.urlresolvers import NoReverseMatch, reverse, reverse_lazy
from django.db import connection
from django.http import HttpRequest, QueryDict
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.test import (
TestCase, ignore_warnings, modify_settings, override_settings,
)
from django.test.utils import patch_logger
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import force_text
from django.utils.http import urlquote
from django.utils.six.moves.urllib.parse import ParseResult, urlparse
from django.utils.translation import LANGUAGE_SESSION_KEY
from .models import UUIDUser
from .settings import AUTH_TEMPLATES
@override_settings(
LANGUAGES=[
('en', 'English'),
],
LANGUAGE_CODE='en',
TEMPLATES=AUTH_TEMPLATES,
USE_TZ=False,
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='auth_tests.urls',
)
class AuthViewsTestCase(TestCase):
"""
Helper base class for all the follow test cases.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='testclient',
first_name='Test', last_name='Client', email='testclient@example.com', is_staff=False, is_active=True,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u2 = User.objects.create(
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='inactive',
first_name='Inactive', last_name='User', email='testclient2@example.com', is_staff=False, is_active=False,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u3 = User.objects.create(
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='staff',
first_name='Staff', last_name='Member', email='staffmember@example.com', is_staff=True, is_active=True,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u4 = User.objects.create(
password='', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False,
username='empty_password', first_name='Empty', last_name='Password', email='empty_password@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u5 = User.objects.create(
password='$', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False,
username='unmanageable_password', first_name='Unmanageable', last_name='Password',
email='unmanageable_password@example.com', is_staff=False, is_active=True,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
cls.u6 = User.objects.create(
password='foo$bar', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False,
username='unknown_password', first_name='Unknown', last_name='Password',
email='unknown_password@example.com', is_staff=False, is_active=True,
date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31)
)
def login(self, username='testclient', password='password'):
response = self.client.post('/login/', {
'username': username,
'password': password,
})
self.assertIn(SESSION_KEY, self.client.session)
return response
def logout(self):
response = self.client.get('/admin/logout/')
self.assertEqual(response.status_code, 200)
self.assertNotIn(SESSION_KEY, self.client.session)
def assertFormError(self, response, error):
"""Assert that error is found in response.context['form'] errors"""
form_errors = list(itertools.chain(*response.context['form'].errors.values()))
self.assertIn(force_text(error), form_errors)
def assertURLEqual(self, url, expected, parse_qs=False):
"""
Given two URLs, make sure all their components (the ones given by
urlparse) are equal, only comparing components that are present in both
URLs.
If `parse_qs` is True, then the querystrings are parsed with QueryDict.
This is useful if you don't want the order of parameters to matter.
Otherwise, the query strings are compared as-is.
"""
fields = ParseResult._fields
for attr, x, y in zip(fields, urlparse(url), urlparse(expected)):
if parse_qs and attr == 'query':
x, y = QueryDict(x), QueryDict(y)
if x and y and x != y:
self.fail("%r != %r (%s doesn't match)" % (url, expected, attr))
@override_settings(ROOT_URLCONF='django.contrib.auth.urls')
class AuthViewNamedURLTests(AuthViewsTestCase):
def test_named_urls(self):
"Named URLs should be reversible"
expected_named_urls = [
('login', [], {}),
('logout', [], {}),
('password_change', [], {}),
('password_change_done', [], {}),
('password_reset', [], {}),
('password_reset_done', [], {}),
('password_reset_confirm', [], {
'uidb64': 'aaaaaaa',
'token': '1111-aaaaa',
}),
('password_reset_complete', [], {}),
]
for name, args, kwargs in expected_named_urls:
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail("Reversal of url named '%s' failed with NoReverseMatch" % name)
class PasswordResetTest(AuthViewsTestCase):
def test_email_not_found(self):
"""If the provided email is not registered, don't raise any error but
also don't send any email."""
response = self.client.get('/password_reset/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': 'not_a_real_email@email.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn("http://", mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# optional multipart text/html email has been added. Make sure original,
# default functionality is 100% the same
self.assertFalse(mail.outbox[0].message().is_multipart())
def test_extra_email_context(self):
"""
extra_email_context should be available in the email template context.
"""
response = self.client.post(
'/password_reset_extra_email_context/',
{'email': 'staffmember@example.com'},
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Email email context: "Hello!"', mail.outbox[0].body)
def test_html_mail_template(self):
"""
A multipart email with text/plain and text/html is sent
if the html_email_template parameter is passed to the view
"""
response = self.client.post('/password_reset/html_email_template/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0].message()
self.assertEqual(len(message.get_payload()), 2)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
self.assertNotIn('<html>', message.get_payload(0).get_payload())
self.assertIn('<html>', message.get_payload(1).get_payload())
def test_email_found_custom_from(self):
"Email is sent if a valid email address is provided for password reset when a custom from_email is provided."
response = self.client.post('/password_reset_from_email/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("staffmember@example.com", mail.outbox[0].from_email)
@ignore_warnings(category=RemovedInDjango110Warning)
@override_settings(ALLOWED_HOSTS=['adminsite.com'])
def test_admin_reset(self):
"If the reset view is marked as being for admin, the HTTP_HOST header is used for a domain override."
response = self.client.post('/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='adminsite.com'
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn("http://adminsite.com", mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host(self):
"Poisoned HTTP_HOST headers can't be used for reset emails"
# This attack is based on the way browsers handle URLs. The colon
# should be used to separate the port, but if the URL contains an @,
# the colon is interpreted as part of a username for login purposes,
# making 'evil.com' the request domain. Since HTTP_HOST is used to
# produce a meaningful reset URL, we need to be certain that the
# HTTP_HOST header isn't poisoned. This is done as a check when get_host()
# is invoked, but we check here as a practical consequence.
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
response = self.client.post(
'/password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
self.assertEqual(len(logger_calls), 1)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host_admin_site(self):
"Poisoned HTTP_HOST headers can't be used for reset emails on admin views"
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
response = self.client.post(
'/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
self.assertEqual(len(logger_calls), 1)
def _test_confirm_start(self):
# Start by creating the email
self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0" * 4) + path[-1]
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_user(self):
# Ensure that we get a 200 response for a non-existent user, not a 404
response = self.client.get('/reset/123456/1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_overflow_user(self):
# Ensure that we get a 200 response for a base36 user id that overflows int
response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying
# to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0" * 4) + path[-1]
self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': ' anewpassword',
})
# Check the password has not been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(not u.check_password("anewpassword"))
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
# Check the password has been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(u.check_password("anewpassword"))
# Check we can't use the link again
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'x'})
self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch'])
def test_reset_redirect_default(self):
response = self.client.post('/password_reset/',
{'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/done/')
def test_reset_custom_redirect(self):
response = self.client.post('/password_reset/custom_redirect/',
{'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/custom/')
def test_reset_custom_redirect_named(self):
response = self.client.post('/password_reset/custom_redirect/named/',
{'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
def test_confirm_redirect_default(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/reset/done/')
def test_confirm_redirect_custom(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/custom/')
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/custom/')
def test_confirm_redirect_custom_named(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/custom/named/')
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
def test_confirm_display_user_from_form(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# #16919 -- The ``password_reset_confirm`` view should pass the user
# object to the ``SetPasswordForm``, even on GET requests.
# For this test, we render ``{{ form.user }}`` in the template
# ``registration/password_reset_confirm.html`` so that we can test this.
username = User.objects.get(email='staffmember@example.com').username
self.assertContains(response, "Hello, %s." % username)
# However, the view should NOT pass any user object on a form if the
# password reset link was invalid.
response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/')
self.assertContains(response, "Hello, .")
@override_settings(AUTH_USER_MODEL='auth.CustomUser')
class CustomUserPasswordResetTest(AuthViewsTestCase):
user_email = 'staffmember@example.com'
@classmethod
def setUpTestData(cls):
cls.u1 = CustomUser.custom_objects.create(
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), email='staffmember@example.com', is_active=True,
is_admin=False, date_of_birth=datetime.date(1976, 11, 8)
)
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': self.user_email})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid_custom_user(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
# then submit a new password
response = self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': 'anewpassword',
})
self.assertRedirects(response, '/reset/done/')
@override_settings(AUTH_USER_MODEL='auth.UUIDUser')
class UUIDUserPasswordResetTest(CustomUserPasswordResetTest):
def _test_confirm_start(self):
# instead of fixture
UUIDUser.objects.create_user(
email=self.user_email,
username='foo',
password='foo',
)
return super(UUIDUserPasswordResetTest, self)._test_confirm_start()
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password,
})
self.assertFormError(response, AuthenticationForm.error_messages['invalid_login'] % {
'username': User._meta.get_field('username').verbose_name
})
def logout(self):
self.client.get('/logout/')
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertFormError(response, PasswordChangeForm.error_messages['password_incorrect'])
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
})
self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch'])
def test_password_change_succeeds(self):
self.login()
self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.fail_login()
self.login(password='password1')
def test_password_change_done_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_change/done/')
@override_settings(LOGIN_URL='/login/')
def test_password_change_done_fails(self):
response = self.client.get('/password_change/done/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/login/?next=/password_change/done/')
def test_password_change_redirect_default(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_change/done/')
def test_password_change_redirect_custom(self):
self.login()
response = self.client.post('/password_change/custom/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/custom/')
def test_password_change_redirect_custom_named(self):
self.login()
response = self.client.post('/password_change/custom/named/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
@modify_settings(MIDDLEWARE_CLASSES={
'append': 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
})
class SessionAuthenticationTests(AuthViewsTestCase):
def test_user_password_change_updates_session(self):
"""
#21649 - Ensure contrib.auth.views.password_change updates the user's
session auth hash after a password change so the session isn't logged out.
"""
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
# if the hash isn't updated, retrieving the redirection page will fail.
self.assertRedirects(response, '/password_change/done/')
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse('login'))
self.assertEqual(response.status_code, 200)
if apps.is_installed('django.contrib.sites'):
Site = apps.get_model('sites.Site')
site = Site.objects.get_current()
self.assertEqual(response.context['site'], site)
self.assertEqual(response.context['site_name'], site.name)
else:
self.assertIsInstance(response.context['site'], RequestSite)
self.assertIsInstance(response.context['form'], AuthenticationForm)
def test_security_check(self, password='password'):
login_url = reverse('login')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'http:///example.com',
'https://example.com',
'ftp://exampel.com',
'///example.com',
'//example.com',
'javascript:alert("XSS")'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
response = self.client.post(nasty_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertNotIn(bad_url, response.url,
"%s should be blocked" % bad_url)
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https://testserver/',
'HTTPS://testserver/',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
response = self.client.post(safe_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, "%s should be allowed" % good_url)
def test_login_form_contains_request(self):
# 15198
self.client.post('/custom_requestauth_login/', {
'username': 'testclient',
'password': 'password',
}, follow=True)
# the custom authentication form used by this login asserts
# that a request is passed to the form successfully.
def test_login_csrf_rotate(self, password='password'):
"""
Makes sure that a login rotates the currently-used CSRF token.
"""
# Do a GET to establish a CSRF token
# TestClient isn't used here as we're testing middleware, essentially.
req = HttpRequest()
CsrfViewMiddleware().process_view(req, login_view, (), {})
# get_token() triggers CSRF token inclusion in the response
get_token(req)
resp = login_view(req)
resp2 = CsrfViewMiddleware().process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)
token1 = csrf_cookie.coded_value
# Prepare the POST request
req = HttpRequest()
req.COOKIES[settings.CSRF_COOKIE_NAME] = token1
req.method = "POST"
req.POST = {'username': 'testclient', 'password': password, 'csrfmiddlewaretoken': token1}
# Use POST request to log in
SessionMiddleware().process_request(req)
CsrfViewMiddleware().process_view(req, login_view, (), {})
req.META["SERVER_NAME"] = "testserver" # Required to have redirect work in login view
req.META["SERVER_PORT"] = 80
resp = login_view(req)
resp2 = CsrfViewMiddleware().process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)
token2 = csrf_cookie.coded_value
# Check the CSRF token switched
self.assertNotEqual(token1, token2)
def test_session_key_flushed_on_login(self):
"""
To avoid reusing another user's session, ensure a new, empty session is
created if the existing session corresponds to a different authenticated
user.
"""
self.login()
original_session_key = self.client.session.session_key
self.login(username='staff')
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_session_key_flushed_on_login_after_password_change(self):
"""
As above, but same user logging in after a password change.
"""
self.login()
original_session_key = self.client.session.session_key
# If no password change, session key should not be flushed.
self.login()
self.assertEqual(original_session_key, self.client.session.session_key)
user = User.objects.get(username='testclient')
user.set_password('foobar')
user.save()
self.login(password='foobar')
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_login_session_without_hash_session_key(self):
"""
Session without django.contrib.auth.HASH_SESSION_KEY should login
without an exception.
"""
user = User.objects.get(username='testclient')
engine = import_module(settings.SESSION_ENGINE)
session = engine.SessionStore()
session[SESSION_KEY] = user.id
session.save()
original_session_key = session.session_key
self.client.cookies[settings.SESSION_COOKIE_NAME] = original_session_key
self.login()
self.assertNotEqual(original_session_key, self.client.session.session_key)
class LoginURLSettings(AuthViewsTestCase):
"""Tests for settings.LOGIN_URL."""
def assertLoginURLEquals(self, url, parse_qs=False):
response = self.client.get('/login_required/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, url, parse_qs=parse_qs)
@override_settings(LOGIN_URL='/login/')
def test_standard_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
@override_settings(LOGIN_URL='login')
def test_named_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
@override_settings(LOGIN_URL='http://remote.example.com/login')
def test_remote_login_url(self):
quoted_next = urlquote('http://testserver/login_required/')
expected = 'http://remote.example.com/login?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL='https:///login/')
def test_https_login_url(self):
quoted_next = urlquote('http://testserver/login_required/')
expected = 'https:///login/?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL='/login/?pretty=1')
def test_login_url_with_querystring(self):
self.assertLoginURLEquals('/login/?pretty=1&next=/login_required/', parse_qs=True)
@override_settings(LOGIN_URL='http://remote.example.com/login/?next=/default/')
def test_remote_login_url_with_next_querystring(self):
quoted_next = urlquote('http://testserver/login_required/')
expected = 'http://remote.example.com/login/?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL=reverse_lazy('login'))
def test_lazy_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
class LoginRedirectUrlTest(AuthViewsTestCase):
"""Tests for settings.LOGIN_REDIRECT_URL."""
def assertLoginRedirectURLEqual(self, url):
response = self.login()
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, url)
def test_default(self):
self.assertLoginRedirectURLEqual('/accounts/profile/')
@override_settings(LOGIN_REDIRECT_URL='/custom/')
def test_custom(self):
self.assertLoginRedirectURLEqual('/custom/')
@override_settings(LOGIN_REDIRECT_URL='password_reset')
def test_named(self):
self.assertLoginRedirectURLEqual('/password_reset/')
@override_settings(LOGIN_REDIRECT_URL='http://remote.example.com/welcome/')
def test_remote(self):
self.assertLoginRedirectURLEqual('http://remote.example.com/welcome/')
class RedirectToLoginTests(AuthViewsTestCase):
"""Tests for the redirect_to_login view"""
@override_settings(LOGIN_URL=reverse_lazy('login'))
def test_redirect_to_login_with_lazy(self):
login_redirect_response = redirect_to_login(next='/else/where/')
expected = '/login/?next=/else/where/'
self.assertEqual(expected, login_redirect_response.url)
@override_settings(LOGIN_URL=reverse_lazy('login'))
def test_redirect_to_login_with_lazy_and_unicode(self):
login_redirect_response = redirect_to_login(next='/else/where/झ/')
expected = '/login/?next=/else/where/%E0%A4%9D/'
self.assertEqual(expected, login_redirect_response.url)
class LogoutTest(AuthViewsTestCase):
def confirm_logged_out(self):
self.assertNotIn(SESSION_KEY, self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.get('/logout/')
self.assertContains(response, 'Logged out')
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.get('/logout/')
self.assertIn('site', response.context)
def test_logout_with_overridden_redirect_url(self):
# Bug 11223
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/somewhere/')
response = self.client.get('/logout/next_page/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/login/')
self.confirm_logged_out()
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/somewhere/')
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.get('/logout/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/login/')
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.get('/logout/custom_query/?follow=/somewhere/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/somewhere/')
self.confirm_logged_out()
def test_logout_with_named_redirect(self):
"Logout resolves names or URLs passed as next_page."
self.login()
response = self.client.get('/logout/next_page/named/')
self.assertEqual(response.status_code, 302)
self.assertURLEqual(response.url, '/password_reset/')
self.confirm_logged_out()
def test_security_check(self, password='password'):
logout_url = reverse('logout')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'http:///example.com',
'https://example.com',
'ftp://exampel.com',
'///example.com',
'//example.com',
'javascript:alert("XSS")'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
self.login()
response = self.client.get(nasty_url)
self.assertEqual(response.status_code, 302)
self.assertNotIn(bad_url, response.url,
"%s should be blocked" % bad_url)
self.confirm_logged_out()
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https://testserver/',
'HTTPS://testserver/',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
self.login()
response = self.client.get(safe_url)
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, "%s should be allowed" % good_url)
self.confirm_logged_out()
def test_logout_preserve_language(self):
"""Check that language stored in session is preserved after logout"""
# Create a new session with language
engine = import_module(settings.SESSION_ENGINE)
session = engine.SessionStore()
session[LANGUAGE_SESSION_KEY] = 'pl'
session.save()
self.client.cookies[settings.SESSION_COOKIE_NAME] = session.session_key
self.client.get('/logout/')
self.assertEqual(self.client.session[LANGUAGE_SESSION_KEY], 'pl')
# Redirect in test_user_change_password will fail if session auth hash
# isn't updated after password change (#21649)
@modify_settings(MIDDLEWARE_CLASSES={
'append': 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
})
@override_settings(
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='auth_tests.urls_admin',
)
class ChangelistTests(AuthViewsTestCase):
def setUp(self):
# Make me a superuser before logging in.
User.objects.filter(username='testclient').update(is_staff=True, is_superuser=True)
self.login()
self.admin = User.objects.get(pk=self.u1.pk)
def get_user_data(self, user):
return {
'username': user.username,
'password': user.password,
'email': user.email,
'is_active': user.is_active,
'is_staff': user.is_staff,
'is_superuser': user.is_superuser,
'last_login_0': user.last_login.strftime('%Y-%m-%d'),
'last_login_1': user.last_login.strftime('%H:%M:%S'),
'initial-last_login_0': user.last_login.strftime('%Y-%m-%d'),
'initial-last_login_1': user.last_login.strftime('%H:%M:%S'),
'date_joined_0': user.date_joined.strftime('%Y-%m-%d'),
'date_joined_1': user.date_joined.strftime('%H:%M:%S'),
'initial-date_joined_0': user.date_joined.strftime('%Y-%m-%d'),
'initial-date_joined_1': user.date_joined.strftime('%H:%M:%S'),
'first_name': user.first_name,
'last_name': user.last_name,
}
# #20078 - users shouldn't be allowed to guess password hashes via
# repeated password__startswith queries.
def test_changelist_disallows_password_lookups(self):
# A lookup that tries to filter on password isn't OK
with patch_logger('django.security.DisallowedModelAdminLookup', 'error') as logger_calls:
response = self.client.get(reverse('auth_test_admin:auth_user_changelist') + '?password__startswith=sha1$')
self.assertEqual(response.status_code, 400)
self.assertEqual(len(logger_calls), 1)
def test_user_change_email(self):
data = self.get_user_data(self.admin)
data['email'] = 'new_' + data['email']
response = self.client.post(
reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)),
data
)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist'))
row = LogEntry.objects.latest('id')
self.assertEqual(row.change_message, 'Changed email.')
def test_user_not_change(self):
response = self.client.post(
reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)),
self.get_user_data(self.admin)
)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist'))
row = LogEntry.objects.latest('id')
self.assertEqual(row.change_message, 'No fields changed.')
def test_user_change_password(self):
user_change_url = reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,))
password_change_url = reverse('auth_test_admin:auth_user_password_change', args=(self.admin.pk,))
response = self.client.get(user_change_url)
# Test the link inside password field help_text.
rel_link = re.search(
r'you can change the password using <a href="([^"]*)">this form</a>',
force_text(response.content)
).groups()[0]
self.assertEqual(
os.path.normpath(user_change_url + rel_link),
os.path.normpath(password_change_url)
)
response = self.client.post(
password_change_url,
{
'password1': 'password1',
'password2': 'password1',
}
)
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest('id')
self.assertEqual(row.change_message, 'Changed password.')
self.logout()
self.login(password='password1')
def test_user_change_different_user_password(self):
u = User.objects.get(email='staffmember@example.com')
response = self.client.post(
reverse('auth_test_admin:auth_user_password_change', args=(u.pk,)),
{
'password1': 'password1',
'password2': 'password1',
}
)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_change', args=(u.pk,)))
row = LogEntry.objects.latest('id')
self.assertEqual(row.user_id, self.admin.pk)
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.change_message, 'Changed password.')
def test_password_change_bad_url(self):
response = self.client.get(reverse('auth_test_admin:auth_user_password_change', args=('foobar',)))
self.assertEqual(response.status_code, 404)
@override_settings(
AUTH_USER_MODEL='auth.UUIDUser',
ROOT_URLCONF='auth_tests.urls_custom_user_admin',
)
class UUIDUserTests(TestCase):
def test_admin_password_change(self):
u = UUIDUser.objects.create_superuser(username='uuid', email='foo@bar.com', password='test')
self.assertTrue(self.client.login(username='uuid', password='test'))
user_change_url = reverse('custom_user_admin:auth_uuiduser_change', args=(u.pk,))
response = self.client.get(user_change_url)
self.assertEqual(response.status_code, 200)
password_change_url = reverse('custom_user_admin:auth_user_password_change', args=(u.pk,))
response = self.client.get(password_change_url)
self.assertEqual(response.status_code, 200)
# A LogEntry is created with pk=1 which breaks a FK constraint on MySQL
with connection.constraint_checks_disabled():
response = self.client.post(password_change_url, {
'password1': 'password1',
'password2': 'password1',
})
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest('id')
self.assertEqual(row.user_id, 1) # harcoded in CustomUserAdmin.log_change()
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.change_message, 'Changed password.')
| bsd-3-clause |
schwartzmx/ansible-modules-extras | packaging/os/svr4pkg.py | 51 | 7426 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Boyd Adamson <boyd () boydadamson.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: svr4pkg
short_description: Manage Solaris SVR4 packages
description:
- Manages SVR4 packages on Solaris 10 and 11.
- These were the native packages on Solaris <= 10 and are available
as a legacy feature in Solaris 11.
- Note that this is a very basic packaging system. It will not enforce
dependencies on install or remove.
version_added: "0.9"
author: Boyd Adamson
options:
name:
description:
- Package name, e.g. C(SUNWcsr)
required: true
state:
description:
- Whether to install (C(present)), or remove (C(absent)) a package.
- If the package is to be installed, then I(src) is required.
- The SVR4 package system doesn't provide an upgrade operation. You need to uninstall the old, then install the new package.
required: true
choices: ["present", "absent"]
src:
description:
- Specifies the location to install the package from. Required when C(state=present).
- "Can be any path acceptable to the C(pkgadd) command's C(-d) option. e.g.: C(somefile.pkg), C(/dir/with/pkgs), C(http:/server/mypkgs.pkg)."
- If using a file or directory, they must already be accessible by the host. See the M(copy) module for a way to get them there.
proxy:
description:
- HTTP[s] proxy to be used if C(src) is a URL.
response_file:
description:
- Specifies the location of a response file to be used if package expects input on install. (added in Ansible 1.4)
required: false
zone:
description:
- Whether to install the package only in the current zone, or install it into all zones.
- The installation into all zones works only if you are working with the global zone.
required: false
default: "all"
choices: ["current", "all"]
version_added: "1.6"
category:
description:
- Install/Remove category instead of a single package.
required: false
choices: ["true", "false"]
version_added: "1.6"
'''
EXAMPLES = '''
# Install a package from an already copied file
- svr4pkg: name=CSWcommon src=/tmp/cswpkgs.pkg state=present
# Install a package directly from an http site
- svr4pkg: name=CSWpkgutil src=http://get.opencsw.org/now state=present zone=current
# Install a package with a response file
- svr4pkg: name=CSWggrep src=/tmp/third-party.pkg response_file=/tmp/ggrep.response state=present
# Ensure that a package is not installed.
- svr4pkg: name=SUNWgnome-sound-recorder state=absent
# Ensure that a category is not installed.
- svr4pkg: name=FIREFOX state=absent category=true
'''
import os
import tempfile
def package_installed(module, name, category):
cmd = [module.get_bin_path('pkginfo', True)]
cmd.append('-q')
if category:
cmd.append('-c')
cmd.append(name)
rc, out, err = module.run_command(' '.join(cmd))
if rc == 0:
return True
else:
return False
def create_admin_file():
(desc, filename) = tempfile.mkstemp(prefix='ansible_svr4pkg', text=True)
fullauto = '''
mail=
instance=unique
partial=nocheck
runlevel=quit
idepend=nocheck
rdepend=nocheck
space=quit
setuid=nocheck
conflict=nocheck
action=nocheck
networktimeout=60
networkretries=3
authentication=quit
keystore=/var/sadm/security
proxy=
basedir=default
'''
os.write(desc, fullauto)
os.close(desc)
return filename
def run_command(module, cmd):
progname = cmd[0]
cmd[0] = module.get_bin_path(progname, True)
return module.run_command(cmd)
def package_install(module, name, src, proxy, response_file, zone, category):
adminfile = create_admin_file()
cmd = [ 'pkgadd', '-n']
if zone == 'current':
cmd += [ '-G' ]
cmd += [ '-a', adminfile, '-d', src ]
if proxy is not None:
cmd += [ '-x', proxy ]
if response_file is not None:
cmd += [ '-r', response_file ]
if category:
cmd += [ '-Y' ]
cmd.append(name)
(rc, out, err) = run_command(module, cmd)
os.unlink(adminfile)
return (rc, out, err)
def package_uninstall(module, name, src, category):
adminfile = create_admin_file()
if category:
cmd = [ 'pkgrm', '-na', adminfile, '-Y', name ]
else:
cmd = [ 'pkgrm', '-na', adminfile, name]
(rc, out, err) = run_command(module, cmd)
os.unlink(adminfile)
return (rc, out, err)
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required = True),
state = dict(required = True, choices=['present', 'absent']),
src = dict(default = None),
proxy = dict(default = None),
response_file = dict(default = None),
zone = dict(required=False, default = 'all', choices=['current','all']),
category = dict(default=False, type='bool')
),
supports_check_mode=True
)
state = module.params['state']
name = module.params['name']
src = module.params['src']
proxy = module.params['proxy']
response_file = module.params['response_file']
zone = module.params['zone']
category = module.params['category']
rc = None
out = ''
err = ''
result = {}
result['name'] = name
result['state'] = state
if state == 'present':
if src is None:
module.fail_json(name=name,
msg="src is required when state=present")
if not package_installed(module, name, category):
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = package_install(module, name, src, proxy, response_file, zone, category)
# Stdout is normally empty but for some packages can be
# very long and is not often useful
if len(out) > 75:
out = out[:75] + '...'
elif state == 'absent':
if package_installed(module, name, category):
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = package_uninstall(module, name, src, category)
out = out[:75]
# Success, Warning, Interruption, Reboot all, Reboot this return codes
if rc in (0, 2, 3, 10, 20):
result['changed'] = True
# no install nor uninstall, or failed
else:
result['changed'] = False
# Fatal error, Administration, Administration Interaction return codes
if rc in (1, 4 , 5):
result['failed'] = True
else:
result['failed'] = False
if out:
result['stdout'] = out
if err:
result['stderr'] = err
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
idrogeno/FusionOE | lib/python/Screens/StreamingClientsInfo.py | 9 | 1536 | from Screen import Screen
from Components.ActionMap import ActionMap
from Components.Converter.ClientsStreaming import ClientsStreaming
import skin
import gettext
from Components.Sources.StaticText import StaticText
class StreamingClientsInfo(Screen):
skin ="""<screen name="StreamingClientsInfo" position="center,center" size="600,500">
<eLabel position="center,117" zPosition="-2" size="600,500" backgroundColor="#25062748" />
<widget source="Title" render="Label" position="center,126" size="580,44" font="Regular; 35" valign="top" zPosition="0" backgroundColor="#25062748" halign="center" />
<widget source="total" render="Label" position="center,174" size="580,50" zPosition="1" font="Regular; 22" halign="left" backgroundColor="#25062748" valign="center" />
<widget source="liste" render="Label" position="center,234" size="580,370" zPosition="1" noWrap="1" font="Regular; 20" valign="top" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self.setTitle(_("Streaming clients info"))
if ClientsStreaming("NUMBER").getText() == "0":
self["total"] = StaticText( _("No streaming Channel from this STB at this moment") )
text = ""
else:
self["total"] = StaticText( _("Total Clients streaming: ") + ClientsStreaming("NUMBER").getText())
text = ClientsStreaming("EXTRA_INFO").getText()
self["liste"] = StaticText(text)
self["actions"] = ActionMap(["ColorActions", "SetupActions", "DirectionActions"],
{
"cancel": self.close,
"ok": self.close
}) | gpl-2.0 |
ryfeus/lambda-packs | Tensorflow_OpenCV_Nightly/source/google/protobuf/internal/unknown_fields_test.py | 32 | 13868 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Test for preservation of unknown fields in the pure Python implementation."""
__author__ = 'bohdank@google.com (Bohdan Koval)'
try:
import unittest2 as unittest #PY26
except ImportError:
import unittest
from google.protobuf import unittest_mset_pb2
from google.protobuf import unittest_pb2
from google.protobuf import unittest_proto3_arena_pb2
from google.protobuf.internal import api_implementation
from google.protobuf.internal import encoder
from google.protobuf.internal import message_set_extensions_pb2
from google.protobuf.internal import missing_enum_values_pb2
from google.protobuf.internal import test_util
from google.protobuf.internal import testing_refleaks
from google.protobuf.internal import type_checkers
BaseTestCase = testing_refleaks.BaseTestCase
# CheckUnknownField() cannot be used by the C++ implementation because
# some protect members are called. It is not a behavior difference
# for python and C++ implementation.
def SkipCheckUnknownFieldIfCppImplementation(func):
return unittest.skipIf(
api_implementation.Type() == 'cpp' and api_implementation.Version() == 2,
'Addtional test for pure python involved protect members')(func)
class UnknownFieldsTest(BaseTestCase):
def setUp(self):
self.descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR
self.all_fields = unittest_pb2.TestAllTypes()
test_util.SetAllFields(self.all_fields)
self.all_fields_data = self.all_fields.SerializeToString()
self.empty_message = unittest_pb2.TestEmptyMessage()
self.empty_message.ParseFromString(self.all_fields_data)
def testSerialize(self):
data = self.empty_message.SerializeToString()
# Don't use assertEqual because we don't want to dump raw binary data to
# stdout.
self.assertTrue(data == self.all_fields_data)
def expectSerializeProto3(self, preserve):
message = unittest_proto3_arena_pb2.TestEmptyMessage()
message.ParseFromString(self.all_fields_data)
if preserve:
self.assertEqual(self.all_fields_data, message.SerializeToString())
else:
self.assertEqual(0, len(message.SerializeToString()))
def testSerializeProto3(self):
# Verify that proto3 unknown fields behavior.
default_preserve = (api_implementation
.GetPythonProto3PreserveUnknownsDefault())
self.expectSerializeProto3(default_preserve)
api_implementation.SetPythonProto3PreserveUnknownsDefault(
not default_preserve)
self.expectSerializeProto3(not default_preserve)
api_implementation.SetPythonProto3PreserveUnknownsDefault(default_preserve)
def testByteSize(self):
self.assertEqual(self.all_fields.ByteSize(), self.empty_message.ByteSize())
def testListFields(self):
# Make sure ListFields doesn't return unknown fields.
self.assertEqual(0, len(self.empty_message.ListFields()))
def testSerializeMessageSetWireFormatUnknownExtension(self):
# Create a message using the message set wire format with an unknown
# message.
raw = unittest_mset_pb2.RawMessageSet()
# Add an unknown extension.
item = raw.item.add()
item.type_id = 98418603
message1 = message_set_extensions_pb2.TestMessageSetExtension1()
message1.i = 12345
item.message = message1.SerializeToString()
serialized = raw.SerializeToString()
# Parse message using the message set wire format.
proto = message_set_extensions_pb2.TestMessageSet()
proto.MergeFromString(serialized)
# Verify that the unknown extension is serialized unchanged
reserialized = proto.SerializeToString()
new_raw = unittest_mset_pb2.RawMessageSet()
new_raw.MergeFromString(reserialized)
self.assertEqual(raw, new_raw)
def testEquals(self):
message = unittest_pb2.TestEmptyMessage()
message.ParseFromString(self.all_fields_data)
self.assertEqual(self.empty_message, message)
self.all_fields.ClearField('optional_string')
message.ParseFromString(self.all_fields.SerializeToString())
self.assertNotEqual(self.empty_message, message)
def testDiscardUnknownFields(self):
self.empty_message.DiscardUnknownFields()
self.assertEqual(b'', self.empty_message.SerializeToString())
# Test message field and repeated message field.
message = unittest_pb2.TestAllTypes()
other_message = unittest_pb2.TestAllTypes()
other_message.optional_string = 'discard'
message.optional_nested_message.ParseFromString(
other_message.SerializeToString())
message.repeated_nested_message.add().ParseFromString(
other_message.SerializeToString())
self.assertNotEqual(
b'', message.optional_nested_message.SerializeToString())
self.assertNotEqual(
b'', message.repeated_nested_message[0].SerializeToString())
message.DiscardUnknownFields()
self.assertEqual(b'', message.optional_nested_message.SerializeToString())
self.assertEqual(
b'', message.repeated_nested_message[0].SerializeToString())
class UnknownFieldsAccessorsTest(BaseTestCase):
def setUp(self):
self.descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR
self.all_fields = unittest_pb2.TestAllTypes()
test_util.SetAllFields(self.all_fields)
self.all_fields_data = self.all_fields.SerializeToString()
self.empty_message = unittest_pb2.TestEmptyMessage()
self.empty_message.ParseFromString(self.all_fields_data)
# CheckUnknownField() is an additional Pure Python check which checks
# a detail of unknown fields. It cannot be used by the C++
# implementation because some protect members are called.
# The test is added for historical reasons. It is not necessary as
# serialized string is checked.
def CheckUnknownField(self, name, expected_value):
field_descriptor = self.descriptor.fields_by_name[name]
wire_type = type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type]
field_tag = encoder.TagBytes(field_descriptor.number, wire_type)
result_dict = {}
for tag_bytes, value in self.empty_message._unknown_fields:
if tag_bytes == field_tag:
decoder = unittest_pb2.TestAllTypes._decoders_by_tag[tag_bytes][0]
decoder(value, 0, len(value), self.all_fields, result_dict)
self.assertEqual(expected_value, result_dict[field_descriptor])
@SkipCheckUnknownFieldIfCppImplementation
def testCheckUnknownFieldValue(self):
# Test enum.
self.CheckUnknownField('optional_nested_enum',
self.all_fields.optional_nested_enum)
# Test repeated enum.
self.CheckUnknownField('repeated_nested_enum',
self.all_fields.repeated_nested_enum)
# Test varint.
self.CheckUnknownField('optional_int32',
self.all_fields.optional_int32)
# Test fixed32.
self.CheckUnknownField('optional_fixed32',
self.all_fields.optional_fixed32)
# Test fixed64.
self.CheckUnknownField('optional_fixed64',
self.all_fields.optional_fixed64)
# Test lengthd elimited.
self.CheckUnknownField('optional_string',
self.all_fields.optional_string)
# Test group.
self.CheckUnknownField('optionalgroup',
self.all_fields.optionalgroup)
def testCopyFrom(self):
message = unittest_pb2.TestEmptyMessage()
message.CopyFrom(self.empty_message)
self.assertEqual(message.SerializeToString(), self.all_fields_data)
def testMergeFrom(self):
message = unittest_pb2.TestAllTypes()
message.optional_int32 = 1
message.optional_uint32 = 2
source = unittest_pb2.TestEmptyMessage()
source.ParseFromString(message.SerializeToString())
message.ClearField('optional_int32')
message.optional_int64 = 3
message.optional_uint32 = 4
destination = unittest_pb2.TestEmptyMessage()
destination.ParseFromString(message.SerializeToString())
destination.MergeFrom(source)
# Check that the fields where correctly merged, even stored in the unknown
# fields set.
message.ParseFromString(destination.SerializeToString())
self.assertEqual(message.optional_int32, 1)
self.assertEqual(message.optional_uint32, 2)
self.assertEqual(message.optional_int64, 3)
def testClear(self):
self.empty_message.Clear()
# All cleared, even unknown fields.
self.assertEqual(self.empty_message.SerializeToString(), b'')
def testUnknownExtensions(self):
message = unittest_pb2.TestEmptyMessageWithExtensions()
message.ParseFromString(self.all_fields_data)
self.assertEqual(message.SerializeToString(), self.all_fields_data)
class UnknownEnumValuesTest(BaseTestCase):
def setUp(self):
self.descriptor = missing_enum_values_pb2.TestEnumValues.DESCRIPTOR
self.message = missing_enum_values_pb2.TestEnumValues()
# TestEnumValues.ZERO = 0, but does not exist in the other NestedEnum.
self.message.optional_nested_enum = (
missing_enum_values_pb2.TestEnumValues.ZERO)
self.message.repeated_nested_enum.extend([
missing_enum_values_pb2.TestEnumValues.ZERO,
missing_enum_values_pb2.TestEnumValues.ONE,
])
self.message.packed_nested_enum.extend([
missing_enum_values_pb2.TestEnumValues.ZERO,
missing_enum_values_pb2.TestEnumValues.ONE,
])
self.message_data = self.message.SerializeToString()
self.missing_message = missing_enum_values_pb2.TestMissingEnumValues()
self.missing_message.ParseFromString(self.message_data)
# CheckUnknownField() is an additional Pure Python check which checks
# a detail of unknown fields. It cannot be used by the C++
# implementation because some protect members are called.
# The test is added for historical reasons. It is not necessary as
# serialized string is checked.
def CheckUnknownField(self, name, expected_value):
field_descriptor = self.descriptor.fields_by_name[name]
wire_type = type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type]
field_tag = encoder.TagBytes(field_descriptor.number, wire_type)
result_dict = {}
for tag_bytes, value in self.missing_message._unknown_fields:
if tag_bytes == field_tag:
decoder = missing_enum_values_pb2.TestEnumValues._decoders_by_tag[
tag_bytes][0]
decoder(value, 0, len(value), self.message, result_dict)
self.assertEqual(expected_value, result_dict[field_descriptor])
def testUnknownParseMismatchEnumValue(self):
just_string = missing_enum_values_pb2.JustString()
just_string.dummy = 'blah'
missing = missing_enum_values_pb2.TestEnumValues()
# The parse is invalid, storing the string proto into the set of
# unknown fields.
missing.ParseFromString(just_string.SerializeToString())
# Fetching the enum field shouldn't crash, instead returning the
# default value.
self.assertEqual(missing.optional_nested_enum, 0)
def testUnknownEnumValue(self):
self.assertFalse(self.missing_message.HasField('optional_nested_enum'))
self.assertEqual(self.missing_message.optional_nested_enum, 2)
# Clear does not do anything.
serialized = self.missing_message.SerializeToString()
self.missing_message.ClearField('optional_nested_enum')
self.assertEqual(self.missing_message.SerializeToString(), serialized)
def testUnknownRepeatedEnumValue(self):
self.assertEqual([], self.missing_message.repeated_nested_enum)
def testUnknownPackedEnumValue(self):
self.assertEqual([], self.missing_message.packed_nested_enum)
@SkipCheckUnknownFieldIfCppImplementation
def testCheckUnknownFieldValueForEnum(self):
self.CheckUnknownField('optional_nested_enum',
self.message.optional_nested_enum)
self.CheckUnknownField('repeated_nested_enum',
self.message.repeated_nested_enum)
self.CheckUnknownField('packed_nested_enum',
self.message.packed_nested_enum)
def testRoundTrip(self):
new_message = missing_enum_values_pb2.TestEnumValues()
new_message.ParseFromString(self.missing_message.SerializeToString())
self.assertEqual(self.message, new_message)
if __name__ == '__main__':
unittest.main()
| mit |
tiagocardosos/stoq | stoqlib/gui/test/test_loandetails.py | 2 | 1257 | # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2012 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
## Author(s): Stoq Team <stoq-devel@async.com.br>
##
from stoqlib.gui.test.uitestutils import GUITest
from stoqlib.gui.dialogs.loandetails import LoanDetailsDialog
class TestLoanDetails(GUITest):
def test_create(self):
loan = self.create_loan()
self.create_loan_item(loan=loan)
dialog = LoanDetailsDialog(self.store, loan)
self.check_dialog(dialog, 'dialog-loan-details-create')
| gpl-2.0 |
jeremiahyan/odoo | addons/google_calendar/utils/google_event.py | 1 | 8268 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.api import model
from odoo.tools import email_normalize
from odoo.tools.sql import existing_tables
import pytz
import logging
from typing import Iterator, Mapping
from collections import abc
from dateutil.parser import parse
from dateutil.relativedelta import relativedelta
from odoo import _
_logger = logging.getLogger(__name__)
class GoogleEvent(abc.Set):
"""This helper class holds the values of a Google event.
Inspired by Odoo recordset, one instance can be a single Google event or a
(immutable) set of Google events.
All usual set operations are supported (union, intersection, etc).
A list of all attributes can be found in the API documentation.
https://developers.google.com/calendar/v3/reference/events#resource
:param iterable: iterable of GoogleCalendar instances or iterable of dictionnaries
"""
def __init__(self, iterable=()):
self._events = {}
for item in iterable:
if isinstance(item, self.__class__):
self._events[item.id] = item._events[item.id]
elif isinstance(item, Mapping):
self._events[item.get('id')] = item
else:
raise ValueError("Only %s or iterable of dict are supported" % self.__class__.__name__)
def __iter__(self) -> Iterator['GoogleEvent']:
return iter(GoogleEvent([vals]) for vals in self._events.values())
def __contains__(self, google_event):
return google_event.id in self._events
def __len__(self):
return len(self._events)
def __bool__(self):
return bool(self._events)
def __getattr__(self, name):
# ensure_one
try:
event, = self._events.keys()
except ValueError:
raise ValueError("Expected singleton: %s" % self)
event_id = list(self._events.keys())[0]
return self._events[event_id].get(name)
def __repr__(self):
return '%s%s' % (self.__class__.__name__, self.ids)
@property
def ids(self):
return tuple(e.id for e in self)
@property
def rrule(self):
if self.recurrence:
# Find the rrule in the list
rrule = next(rr for rr in self.recurrence if 'RRULE:' in rr)
return rrule[6:] # skip "RRULE:" in the rrule string
def odoo_id(self, env):
self.odoo_ids(env) # load ids
return self._odoo_id
def _meta_odoo_id(self, dbname):
"""Returns the Odoo id stored in the Google Event metadata.
This id might not actually exists in the database.
"""
properties = self.extendedProperties and (self.extendedProperties.get('shared', {}) or self.extendedProperties.get('private', {})) or {}
o_id = properties.get('%s_odoo_id' % dbname)
if o_id:
return int(o_id)
def odoo_ids(self, env):
ids = tuple(e._odoo_id for e in self if e._odoo_id)
if len(ids) == len(self):
return ids
model = self._get_model(env)
found = self._load_odoo_ids_from_db(env, model)
unsure = self - found
if unsure:
unsure._load_odoo_ids_from_metadata(env, model)
return tuple(e._odoo_id for e in self)
def _load_odoo_ids_from_metadata(self, env, model):
unsure_odoo_ids = tuple(e._meta_odoo_id(env.cr.dbname) for e in self)
odoo_events = model.browse(_id for _id in unsure_odoo_ids if _id)
# Extended properties are copied when splitting a recurrence Google side.
# Hence, we may have two Google recurrences linked to the same Odoo id.
# Therefore, we only consider Odoo records without google id when trying
# to match events.
o_ids = odoo_events.exists().filtered(lambda e: not e.google_id).ids
for e in self:
odoo_id = e._meta_odoo_id(env.cr.dbname)
if odoo_id in o_ids:
e._events[e.id]['_odoo_id'] = odoo_id
def _load_odoo_ids_from_db(self, env, model):
odoo_events = model.with_context(active_test=False)._from_google_ids(self.ids)
mapping = {e.google_id: e.id for e in odoo_events} # {google_id: odoo_id}
existing_google_ids = odoo_events.mapped('google_id')
for e in self:
odoo_id = mapping.get(e.id)
if odoo_id:
e._events[e.id]['_odoo_id'] = odoo_id
return self.filter(lambda e: e.id in existing_google_ids)
def owner(self, env):
# Owner/organizer could be desynchronised between Google and Odoo.
# Let userA, userB be two new users (never synced to Google before).
# UserA creates an event in Odoo (he is the owner) but userB syncs first.
# There is no way to insert the event into userA's calendar since we don't have
# any authentication access. The event is therefore inserted into userB's calendar
# (he is the organizer in Google). The "real" owner (in Odoo) is stored as an
# extended property. There is currently no support to "transfert" ownership when
# userA syncs his calendar the first time.
real_owner_id = self.extendedProperties and self.extendedProperties.get('shared', {}).get('%s_owner_id' % env.cr.dbname)
try:
# If we create an event without user_id, the event properties will be 'false'
# and python will interpret this a a NoneType, that's why we have the 'except TypeError'
real_owner_id = int(real_owner_id)
except (ValueError, TypeError):
real_owner_id = False
real_owner = real_owner_id and env['res.users'].browse(real_owner_id) or env['res.users']
if real_owner_id and real_owner.exists():
return real_owner
elif self.organizer and self.organizer.get('self'):
return env.user
elif self.organizer and self.organizer.get('email'):
# In Google: 1 email = 1 user; but in Odoo several users might have the same email :/
org_email = email_normalize(self.organizer.get('email'))
return env['res.users'].search([('email_normalized', '=', org_email)], limit=1)
else:
return env['res.users']
def filter(self, func) -> 'GoogleEvent':
return GoogleEvent(e for e in self if func(e))
def clear_type_ambiguity(self, env):
ambiguous_events = self.filter(GoogleEvent._is_type_ambiguous)
recurrences = ambiguous_events._load_odoo_ids_from_db(env, env['calendar.recurrence'])
for recurrence in recurrences:
self._events[recurrence.id]['recurrence'] = True
for event in ambiguous_events - recurrences:
self._events[event.id]['recurrence'] = False
def is_recurrence(self):
if self._is_type_ambiguous():
_logger.warning("Ambiguous event type: cannot accurately tell whether a cancelled event is a recurrence or not")
return bool(self.recurrence)
def is_recurrent(self):
return bool(self.recurringEventId or self.is_recurrence())
def is_cancelled(self):
return self.status == 'cancelled'
def is_recurrence_follower(self):
return bool(not self.originalStartTime or self.originalStartTime == self.start)
def cancelled(self):
return self.filter(lambda e: e.status == 'cancelled')
def exists(self, env) -> 'GoogleEvent':
recurrences = self.filter(GoogleEvent.is_recurrence)
events = self - recurrences
recurrences.odoo_ids(env)
events.odoo_ids(env)
return self.filter(lambda e: e._odoo_id)
def _is_type_ambiguous(self):
"""For cancelled events/recurrences, Google only send the id and
the cancelled status. There is no way to know if it was a recurrence
or simple event."""
return self.is_cancelled() and 'recurrence' not in self._events[self.id]
def _get_model(self, env):
if all(e.is_recurrence() for e in self):
return env['calendar.recurrence']
if all(not e.is_recurrence() for e in self):
return env['calendar.event']
raise TypeError("Mixing Google events and Google recurrences")
| gpl-3.0 |
siutanwong/scikit-learn | sklearn/ensemble/tests/test_forest.py | 48 | 35412 | """
Testing for the forest module (sklearn.ensemble.forest).
"""
# Authors: Gilles Louppe,
# Brian Holt,
# Andreas Mueller,
# Arnaud Joly
# License: BSD 3 clause
import pickle
from collections import defaultdict
from itertools import product
import numpy as np
from scipy.sparse import csr_matrix, csc_matrix, coo_matrix
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_false, assert_true
from sklearn.utils.testing import assert_less, assert_greater
from sklearn.utils.testing import assert_greater_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import ignore_warnings
from sklearn import datasets
from sklearn.decomposition import TruncatedSVD
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.ensemble import ExtraTreesRegressor
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import RandomForestRegressor
from sklearn.ensemble import RandomTreesEmbedding
from sklearn.grid_search import GridSearchCV
from sklearn.svm import LinearSVC
from sklearn.utils.validation import check_random_state
from sklearn.tree.tree import SPARSE_SPLITTERS
# toy sample
X = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]]
y = [-1, -1, -1, 1, 1, 1]
T = [[-1, -1], [2, 2], [3, 2]]
true_result = [-1, 1, 1]
# also load the iris dataset
# and randomly permute it
iris = datasets.load_iris()
rng = check_random_state(0)
perm = rng.permutation(iris.target.size)
iris.data = iris.data[perm]
iris.target = iris.target[perm]
# also load the boston dataset
# and randomly permute it
boston = datasets.load_boston()
perm = rng.permutation(boston.target.size)
boston.data = boston.data[perm]
boston.target = boston.target[perm]
FOREST_CLASSIFIERS = {
"ExtraTreesClassifier": ExtraTreesClassifier,
"RandomForestClassifier": RandomForestClassifier,
}
FOREST_REGRESSORS = {
"ExtraTreesRegressor": ExtraTreesRegressor,
"RandomForestRegressor": RandomForestRegressor,
}
FOREST_TRANSFORMERS = {
"RandomTreesEmbedding": RandomTreesEmbedding,
}
FOREST_ESTIMATORS = dict()
FOREST_ESTIMATORS.update(FOREST_CLASSIFIERS)
FOREST_ESTIMATORS.update(FOREST_REGRESSORS)
FOREST_ESTIMATORS.update(FOREST_TRANSFORMERS)
def check_classification_toy(name):
"""Check classification on a toy dataset."""
ForestClassifier = FOREST_CLASSIFIERS[name]
clf = ForestClassifier(n_estimators=10, random_state=1)
clf.fit(X, y)
assert_array_equal(clf.predict(T), true_result)
assert_equal(10, len(clf))
clf = ForestClassifier(n_estimators=10, max_features=1, random_state=1)
clf.fit(X, y)
assert_array_equal(clf.predict(T), true_result)
assert_equal(10, len(clf))
# also test apply
leaf_indices = clf.apply(X)
assert_equal(leaf_indices.shape, (len(X), clf.n_estimators))
def test_classification_toy():
for name in FOREST_CLASSIFIERS:
yield check_classification_toy, name
def check_iris_criterion(name, criterion):
# Check consistency on dataset iris.
ForestClassifier = FOREST_CLASSIFIERS[name]
clf = ForestClassifier(n_estimators=10, criterion=criterion,
random_state=1)
clf.fit(iris.data, iris.target)
score = clf.score(iris.data, iris.target)
assert_greater(score, 0.9, "Failed with criterion %s and score = %f"
% (criterion, score))
clf = ForestClassifier(n_estimators=10, criterion=criterion,
max_features=2, random_state=1)
clf.fit(iris.data, iris.target)
score = clf.score(iris.data, iris.target)
assert_greater(score, 0.5, "Failed with criterion %s and score = %f"
% (criterion, score))
def test_iris():
for name, criterion in product(FOREST_CLASSIFIERS, ("gini", "entropy")):
yield check_iris_criterion, name, criterion
def check_boston_criterion(name, criterion):
# Check consistency on dataset boston house prices.
ForestRegressor = FOREST_REGRESSORS[name]
clf = ForestRegressor(n_estimators=5, criterion=criterion, random_state=1)
clf.fit(boston.data, boston.target)
score = clf.score(boston.data, boston.target)
assert_greater(score, 0.95, "Failed with max_features=None, criterion %s "
"and score = %f" % (criterion, score))
clf = ForestRegressor(n_estimators=5, criterion=criterion,
max_features=6, random_state=1)
clf.fit(boston.data, boston.target)
score = clf.score(boston.data, boston.target)
assert_greater(score, 0.95, "Failed with max_features=6, criterion %s "
"and score = %f" % (criterion, score))
def test_boston():
for name, criterion in product(FOREST_REGRESSORS, ("mse", )):
yield check_boston_criterion, name, criterion
def check_regressor_attributes(name):
# Regression models should not have a classes_ attribute.
r = FOREST_REGRESSORS[name](random_state=0)
assert_false(hasattr(r, "classes_"))
assert_false(hasattr(r, "n_classes_"))
r.fit([[1, 2, 3], [4, 5, 6]], [1, 2])
assert_false(hasattr(r, "classes_"))
assert_false(hasattr(r, "n_classes_"))
def test_regressor_attributes():
for name in FOREST_REGRESSORS:
yield check_regressor_attributes, name
def check_probability(name):
# Predict probabilities.
ForestClassifier = FOREST_CLASSIFIERS[name]
with np.errstate(divide="ignore"):
clf = ForestClassifier(n_estimators=10, random_state=1, max_features=1,
max_depth=1)
clf.fit(iris.data, iris.target)
assert_array_almost_equal(np.sum(clf.predict_proba(iris.data), axis=1),
np.ones(iris.data.shape[0]))
assert_array_almost_equal(clf.predict_proba(iris.data),
np.exp(clf.predict_log_proba(iris.data)))
def test_probability():
for name in FOREST_CLASSIFIERS:
yield check_probability, name
def check_importances(name, X, y):
# Check variable importances.
ForestClassifier = FOREST_CLASSIFIERS[name]
for n_jobs in [1, 2]:
clf = ForestClassifier(n_estimators=10, n_jobs=n_jobs)
clf.fit(X, y)
importances = clf.feature_importances_
n_important = np.sum(importances > 0.1)
assert_equal(importances.shape[0], 10)
assert_equal(n_important, 3)
X_new = clf.transform(X, threshold="mean")
assert_less(0 < X_new.shape[1], X.shape[1])
# Check with sample weights
sample_weight = np.ones(y.shape)
sample_weight[y == 1] *= 100
clf = ForestClassifier(n_estimators=50, n_jobs=n_jobs, random_state=0)
clf.fit(X, y, sample_weight=sample_weight)
importances = clf.feature_importances_
assert_true(np.all(importances >= 0.0))
clf = ForestClassifier(n_estimators=50, n_jobs=n_jobs, random_state=0)
clf.fit(X, y, sample_weight=3 * sample_weight)
importances_bis = clf.feature_importances_
assert_almost_equal(importances, importances_bis)
def test_importances():
X, y = datasets.make_classification(n_samples=1000, n_features=10,
n_informative=3, n_redundant=0,
n_repeated=0, shuffle=False,
random_state=0)
for name in FOREST_CLASSIFIERS:
yield check_importances, name, X, y
def check_unfitted_feature_importances(name):
assert_raises(ValueError, getattr, FOREST_ESTIMATORS[name](random_state=0),
"feature_importances_")
def test_unfitted_feature_importances():
for name in FOREST_ESTIMATORS:
yield check_unfitted_feature_importances, name
def check_oob_score(name, X, y, n_estimators=20):
# Check that oob prediction is a good estimation of the generalization
# error.
# Proper behavior
est = FOREST_ESTIMATORS[name](oob_score=True, random_state=0,
n_estimators=n_estimators, bootstrap=True)
n_samples = X.shape[0]
est.fit(X[:n_samples // 2, :], y[:n_samples // 2])
test_score = est.score(X[n_samples // 2:, :], y[n_samples // 2:])
if name in FOREST_CLASSIFIERS:
assert_less(abs(test_score - est.oob_score_), 0.1)
else:
assert_greater(test_score, est.oob_score_)
assert_greater(est.oob_score_, .8)
# Check warning if not enough estimators
with np.errstate(divide="ignore", invalid="ignore"):
est = FOREST_ESTIMATORS[name](oob_score=True, random_state=0,
n_estimators=1, bootstrap=True)
assert_warns(UserWarning, est.fit, X, y)
def test_oob_score():
for name in FOREST_CLASSIFIERS:
yield check_oob_score, name, iris.data, iris.target
# csc matrix
yield check_oob_score, name, csc_matrix(iris.data), iris.target
# non-contiguous targets in classification
yield check_oob_score, name, iris.data, iris.target * 2 + 1
for name in FOREST_REGRESSORS:
yield check_oob_score, name, boston.data, boston.target, 50
# csc matrix
yield check_oob_score, name, csc_matrix(boston.data), boston.target, 50
def check_oob_score_raise_error(name):
ForestEstimator = FOREST_ESTIMATORS[name]
if name in FOREST_TRANSFORMERS:
for oob_score in [True, False]:
assert_raises(TypeError, ForestEstimator, oob_score=oob_score)
assert_raises(NotImplementedError, ForestEstimator()._set_oob_score,
X, y)
else:
# Unfitted / no bootstrap / no oob_score
for oob_score, bootstrap in [(True, False), (False, True),
(False, False)]:
est = ForestEstimator(oob_score=oob_score, bootstrap=bootstrap,
random_state=0)
assert_false(hasattr(est, "oob_score_"))
# No bootstrap
assert_raises(ValueError, ForestEstimator(oob_score=True,
bootstrap=False).fit, X, y)
def test_oob_score_raise_error():
for name in FOREST_ESTIMATORS:
yield check_oob_score_raise_error, name
def check_gridsearch(name):
forest = FOREST_CLASSIFIERS[name]()
clf = GridSearchCV(forest, {'n_estimators': (1, 2), 'max_depth': (1, 2)})
clf.fit(iris.data, iris.target)
def test_gridsearch():
# Check that base trees can be grid-searched.
for name in FOREST_CLASSIFIERS:
yield check_gridsearch, name
def check_parallel(name, X, y):
"""Check parallel computations in classification"""
ForestEstimator = FOREST_ESTIMATORS[name]
forest = ForestEstimator(n_estimators=10, n_jobs=3, random_state=0)
forest.fit(X, y)
assert_equal(len(forest), 10)
forest.set_params(n_jobs=1)
y1 = forest.predict(X)
forest.set_params(n_jobs=2)
y2 = forest.predict(X)
assert_array_almost_equal(y1, y2, 3)
def test_parallel():
for name in FOREST_CLASSIFIERS:
yield check_parallel, name, iris.data, iris.target
for name in FOREST_REGRESSORS:
yield check_parallel, name, boston.data, boston.target
def check_pickle(name, X, y):
# Check pickability.
ForestEstimator = FOREST_ESTIMATORS[name]
obj = ForestEstimator(random_state=0)
obj.fit(X, y)
score = obj.score(X, y)
pickle_object = pickle.dumps(obj)
obj2 = pickle.loads(pickle_object)
assert_equal(type(obj2), obj.__class__)
score2 = obj2.score(X, y)
assert_equal(score, score2)
def test_pickle():
for name in FOREST_CLASSIFIERS:
yield check_pickle, name, iris.data[::2], iris.target[::2]
for name in FOREST_REGRESSORS:
yield check_pickle, name, boston.data[::2], boston.target[::2]
def check_multioutput(name):
# Check estimators on multi-output problems.
X_train = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1], [-2, 1],
[-1, 1], [-1, 2], [2, -1], [1, -1], [1, -2]]
y_train = [[-1, 0], [-1, 0], [-1, 0], [1, 1], [1, 1], [1, 1], [-1, 2],
[-1, 2], [-1, 2], [1, 3], [1, 3], [1, 3]]
X_test = [[-1, -1], [1, 1], [-1, 1], [1, -1]]
y_test = [[-1, 0], [1, 1], [-1, 2], [1, 3]]
est = FOREST_ESTIMATORS[name](random_state=0, bootstrap=False)
y_pred = est.fit(X_train, y_train).predict(X_test)
assert_array_almost_equal(y_pred, y_test)
if name in FOREST_CLASSIFIERS:
with np.errstate(divide="ignore"):
proba = est.predict_proba(X_test)
assert_equal(len(proba), 2)
assert_equal(proba[0].shape, (4, 2))
assert_equal(proba[1].shape, (4, 4))
log_proba = est.predict_log_proba(X_test)
assert_equal(len(log_proba), 2)
assert_equal(log_proba[0].shape, (4, 2))
assert_equal(log_proba[1].shape, (4, 4))
def test_multioutput():
for name in FOREST_CLASSIFIERS:
yield check_multioutput, name
for name in FOREST_REGRESSORS:
yield check_multioutput, name
def check_classes_shape(name):
# Test that n_classes_ and classes_ have proper shape.
ForestClassifier = FOREST_CLASSIFIERS[name]
# Classification, single output
clf = ForestClassifier(random_state=0).fit(X, y)
assert_equal(clf.n_classes_, 2)
assert_array_equal(clf.classes_, [-1, 1])
# Classification, multi-output
_y = np.vstack((y, np.array(y) * 2)).T
clf = ForestClassifier(random_state=0).fit(X, _y)
assert_array_equal(clf.n_classes_, [2, 2])
assert_array_equal(clf.classes_, [[-1, 1], [-2, 2]])
def test_classes_shape():
for name in FOREST_CLASSIFIERS:
yield check_classes_shape, name
def test_random_trees_dense_type():
# Test that the `sparse_output` parameter of RandomTreesEmbedding
# works by returning a dense array.
# Create the RTE with sparse=False
hasher = RandomTreesEmbedding(n_estimators=10, sparse_output=False)
X, y = datasets.make_circles(factor=0.5)
X_transformed = hasher.fit_transform(X)
# Assert that type is ndarray, not scipy.sparse.csr.csr_matrix
assert_equal(type(X_transformed), np.ndarray)
def test_random_trees_dense_equal():
# Test that the `sparse_output` parameter of RandomTreesEmbedding
# works by returning the same array for both argument values.
# Create the RTEs
hasher_dense = RandomTreesEmbedding(n_estimators=10, sparse_output=False,
random_state=0)
hasher_sparse = RandomTreesEmbedding(n_estimators=10, sparse_output=True,
random_state=0)
X, y = datasets.make_circles(factor=0.5)
X_transformed_dense = hasher_dense.fit_transform(X)
X_transformed_sparse = hasher_sparse.fit_transform(X)
# Assert that dense and sparse hashers have same array.
assert_array_equal(X_transformed_sparse.toarray(), X_transformed_dense)
def test_random_hasher():
# test random forest hashing on circles dataset
# make sure that it is linearly separable.
# even after projected to two SVD dimensions
# Note: Not all random_states produce perfect results.
hasher = RandomTreesEmbedding(n_estimators=30, random_state=1)
X, y = datasets.make_circles(factor=0.5)
X_transformed = hasher.fit_transform(X)
# test fit and transform:
hasher = RandomTreesEmbedding(n_estimators=30, random_state=1)
assert_array_equal(hasher.fit(X).transform(X).toarray(),
X_transformed.toarray())
# one leaf active per data point per forest
assert_equal(X_transformed.shape[0], X.shape[0])
assert_array_equal(X_transformed.sum(axis=1), hasher.n_estimators)
svd = TruncatedSVD(n_components=2)
X_reduced = svd.fit_transform(X_transformed)
linear_clf = LinearSVC()
linear_clf.fit(X_reduced, y)
assert_equal(linear_clf.score(X_reduced, y), 1.)
def test_random_hasher_sparse_data():
X, y = datasets.make_multilabel_classification(return_indicator=True,
random_state=0)
hasher = RandomTreesEmbedding(n_estimators=30, random_state=1)
X_transformed = hasher.fit_transform(X)
X_transformed_sparse = hasher.fit_transform(csc_matrix(X))
assert_array_equal(X_transformed_sparse.toarray(), X_transformed.toarray())
def test_parallel_train():
rng = check_random_state(12321)
n_samples, n_features = 80, 30
X_train = rng.randn(n_samples, n_features)
y_train = rng.randint(0, 2, n_samples)
clfs = [
RandomForestClassifier(n_estimators=20, n_jobs=n_jobs,
random_state=12345).fit(X_train, y_train)
for n_jobs in [1, 2, 3, 8, 16, 32]
]
X_test = rng.randn(n_samples, n_features)
probas = [clf.predict_proba(X_test) for clf in clfs]
for proba1, proba2 in zip(probas, probas[1:]):
assert_array_almost_equal(proba1, proba2)
def test_distribution():
rng = check_random_state(12321)
# Single variable with 4 values
X = rng.randint(0, 4, size=(1000, 1))
y = rng.rand(1000)
n_trees = 500
clf = ExtraTreesRegressor(n_estimators=n_trees, random_state=42).fit(X, y)
uniques = defaultdict(int)
for tree in clf.estimators_:
tree = "".join(("%d,%d/" % (f, int(t)) if f >= 0 else "-")
for f, t in zip(tree.tree_.feature,
tree.tree_.threshold))
uniques[tree] += 1
uniques = sorted([(1. * count / n_trees, tree)
for tree, count in uniques.items()])
# On a single variable problem where X_0 has 4 equiprobable values, there
# are 5 ways to build a random tree. The more compact (0,1/0,0/--0,2/--) of
# them has probability 1/3 while the 4 others have probability 1/6.
assert_equal(len(uniques), 5)
assert_greater(0.20, uniques[0][0]) # Rough approximation of 1/6.
assert_greater(0.20, uniques[1][0])
assert_greater(0.20, uniques[2][0])
assert_greater(0.20, uniques[3][0])
assert_greater(uniques[4][0], 0.3)
assert_equal(uniques[4][1], "0,1/0,0/--0,2/--")
# Two variables, one with 2 values, one with 3 values
X = np.empty((1000, 2))
X[:, 0] = np.random.randint(0, 2, 1000)
X[:, 1] = np.random.randint(0, 3, 1000)
y = rng.rand(1000)
clf = ExtraTreesRegressor(n_estimators=100, max_features=1,
random_state=1).fit(X, y)
uniques = defaultdict(int)
for tree in clf.estimators_:
tree = "".join(("%d,%d/" % (f, int(t)) if f >= 0 else "-")
for f, t in zip(tree.tree_.feature,
tree.tree_.threshold))
uniques[tree] += 1
uniques = [(count, tree) for tree, count in uniques.items()]
assert_equal(len(uniques), 8)
def check_max_leaf_nodes_max_depth(name, X, y):
# Test precedence of max_leaf_nodes over max_depth.
ForestEstimator = FOREST_ESTIMATORS[name]
est = ForestEstimator(max_depth=1, max_leaf_nodes=4,
n_estimators=1).fit(X, y)
assert_greater(est.estimators_[0].tree_.max_depth, 1)
est = ForestEstimator(max_depth=1, n_estimators=1).fit(X, y)
assert_equal(est.estimators_[0].tree_.max_depth, 1)
def test_max_leaf_nodes_max_depth():
X, y = datasets.make_hastie_10_2(n_samples=100, random_state=1)
for name in FOREST_ESTIMATORS:
yield check_max_leaf_nodes_max_depth, name, X, y
def check_min_samples_leaf(name, X, y):
# Test if leaves contain more than leaf_count training examples
ForestEstimator = FOREST_ESTIMATORS[name]
# test both DepthFirstTreeBuilder and BestFirstTreeBuilder
# by setting max_leaf_nodes
for max_leaf_nodes in (None, 1000):
est = ForestEstimator(min_samples_leaf=5,
max_leaf_nodes=max_leaf_nodes,
random_state=0)
est.fit(X, y)
out = est.estimators_[0].tree_.apply(X)
node_counts = np.bincount(out)
# drop inner nodes
leaf_count = node_counts[node_counts != 0]
assert_greater(np.min(leaf_count), 4,
"Failed with {0}".format(name))
def test_min_samples_leaf():
X, y = datasets.make_hastie_10_2(n_samples=100, random_state=1)
X = X.astype(np.float32)
for name in FOREST_ESTIMATORS:
yield check_min_samples_leaf, name, X, y
def check_min_weight_fraction_leaf(name, X, y):
# Test if leaves contain at least min_weight_fraction_leaf of the
# training set
ForestEstimator = FOREST_ESTIMATORS[name]
rng = np.random.RandomState(0)
weights = rng.rand(X.shape[0])
total_weight = np.sum(weights)
# test both DepthFirstTreeBuilder and BestFirstTreeBuilder
# by setting max_leaf_nodes
for max_leaf_nodes in (None, 1000):
for frac in np.linspace(0, 0.5, 6):
est = ForestEstimator(min_weight_fraction_leaf=frac,
max_leaf_nodes=max_leaf_nodes,
random_state=0)
if isinstance(est, (RandomForestClassifier,
RandomForestRegressor)):
est.bootstrap = False
est.fit(X, y, sample_weight=weights)
out = est.estimators_[0].tree_.apply(X)
node_weights = np.bincount(out, weights=weights)
# drop inner nodes
leaf_weights = node_weights[node_weights != 0]
assert_greater_equal(
np.min(leaf_weights),
total_weight * est.min_weight_fraction_leaf,
"Failed with {0} "
"min_weight_fraction_leaf={1}".format(
name, est.min_weight_fraction_leaf))
def test_min_weight_fraction_leaf():
X, y = datasets.make_hastie_10_2(n_samples=100, random_state=1)
X = X.astype(np.float32)
for name in FOREST_ESTIMATORS:
yield check_min_weight_fraction_leaf, name, X, y
def check_sparse_input(name, X, X_sparse, y):
ForestEstimator = FOREST_ESTIMATORS[name]
dense = ForestEstimator(random_state=0, max_depth=2).fit(X, y)
sparse = ForestEstimator(random_state=0, max_depth=2).fit(X_sparse, y)
assert_array_almost_equal(sparse.apply(X), dense.apply(X))
if name in FOREST_CLASSIFIERS or name in FOREST_REGRESSORS:
assert_array_almost_equal(sparse.predict(X), dense.predict(X))
assert_array_almost_equal(sparse.feature_importances_,
dense.feature_importances_)
if name in FOREST_CLASSIFIERS:
assert_array_almost_equal(sparse.predict_proba(X),
dense.predict_proba(X))
assert_array_almost_equal(sparse.predict_log_proba(X),
dense.predict_log_proba(X))
if name in FOREST_TRANSFORMERS:
assert_array_almost_equal(sparse.transform(X).toarray(),
dense.transform(X).toarray())
assert_array_almost_equal(sparse.fit_transform(X).toarray(),
dense.fit_transform(X).toarray())
def test_sparse_input():
X, y = datasets.make_multilabel_classification(return_indicator=True,
random_state=0,
n_samples=40)
for name, sparse_matrix in product(FOREST_ESTIMATORS,
(csr_matrix, csc_matrix, coo_matrix)):
yield check_sparse_input, name, X, sparse_matrix(X), y
def check_memory_layout(name, dtype):
# Check that it works no matter the memory layout
est = FOREST_ESTIMATORS[name](random_state=0, bootstrap=False)
# Nothing
X = np.asarray(iris.data, dtype=dtype)
y = iris.target
assert_array_equal(est.fit(X, y).predict(X), y)
# C-order
X = np.asarray(iris.data, order="C", dtype=dtype)
y = iris.target
assert_array_equal(est.fit(X, y).predict(X), y)
# F-order
X = np.asarray(iris.data, order="F", dtype=dtype)
y = iris.target
assert_array_equal(est.fit(X, y).predict(X), y)
# Contiguous
X = np.ascontiguousarray(iris.data, dtype=dtype)
y = iris.target
assert_array_equal(est.fit(X, y).predict(X), y)
if est.base_estimator.splitter in SPARSE_SPLITTERS:
# csr matrix
X = csr_matrix(iris.data, dtype=dtype)
y = iris.target
assert_array_equal(est.fit(X, y).predict(X), y)
# csc_matrix
X = csc_matrix(iris.data, dtype=dtype)
y = iris.target
assert_array_equal(est.fit(X, y).predict(X), y)
# coo_matrix
X = coo_matrix(iris.data, dtype=dtype)
y = iris.target
assert_array_equal(est.fit(X, y).predict(X), y)
# Strided
X = np.asarray(iris.data[::3], dtype=dtype)
y = iris.target[::3]
assert_array_equal(est.fit(X, y).predict(X), y)
def test_memory_layout():
for name, dtype in product(FOREST_CLASSIFIERS, [np.float64, np.float32]):
yield check_memory_layout, name, dtype
for name, dtype in product(FOREST_REGRESSORS, [np.float64, np.float32]):
yield check_memory_layout, name, dtype
def check_1d_input(name, X, X_2d, y):
ForestEstimator = FOREST_ESTIMATORS[name]
assert_raises(ValueError, ForestEstimator(random_state=0).fit, X, y)
est = ForestEstimator(random_state=0)
est.fit(X_2d, y)
if name in FOREST_CLASSIFIERS or name in FOREST_REGRESSORS:
assert_raises(ValueError, est.predict, X)
def test_1d_input():
X = iris.data[:, 0].ravel()
X_2d = iris.data[:, 0].reshape((-1, 1))
y = iris.target
for name in FOREST_ESTIMATORS:
yield check_1d_input, name, X, X_2d, y
def check_class_weights(name):
# Check class_weights resemble sample_weights behavior.
ForestClassifier = FOREST_CLASSIFIERS[name]
# Iris is balanced, so no effect expected for using 'balanced' weights
clf1 = ForestClassifier(random_state=0)
clf1.fit(iris.data, iris.target)
clf2 = ForestClassifier(class_weight='balanced', random_state=0)
clf2.fit(iris.data, iris.target)
assert_almost_equal(clf1.feature_importances_, clf2.feature_importances_)
# Make a multi-output problem with three copies of Iris
iris_multi = np.vstack((iris.target, iris.target, iris.target)).T
# Create user-defined weights that should balance over the outputs
clf3 = ForestClassifier(class_weight=[{0: 2., 1: 2., 2: 1.},
{0: 2., 1: 1., 2: 2.},
{0: 1., 1: 2., 2: 2.}],
random_state=0)
clf3.fit(iris.data, iris_multi)
assert_almost_equal(clf2.feature_importances_, clf3.feature_importances_)
# Check against multi-output "balanced" which should also have no effect
clf4 = ForestClassifier(class_weight='balanced', random_state=0)
clf4.fit(iris.data, iris_multi)
assert_almost_equal(clf3.feature_importances_, clf4.feature_importances_)
# Inflate importance of class 1, check against user-defined weights
sample_weight = np.ones(iris.target.shape)
sample_weight[iris.target == 1] *= 100
class_weight = {0: 1., 1: 100., 2: 1.}
clf1 = ForestClassifier(random_state=0)
clf1.fit(iris.data, iris.target, sample_weight)
clf2 = ForestClassifier(class_weight=class_weight, random_state=0)
clf2.fit(iris.data, iris.target)
assert_almost_equal(clf1.feature_importances_, clf2.feature_importances_)
# Check that sample_weight and class_weight are multiplicative
clf1 = ForestClassifier(random_state=0)
clf1.fit(iris.data, iris.target, sample_weight ** 2)
clf2 = ForestClassifier(class_weight=class_weight, random_state=0)
clf2.fit(iris.data, iris.target, sample_weight)
assert_almost_equal(clf1.feature_importances_, clf2.feature_importances_)
def test_class_weights():
for name in FOREST_CLASSIFIERS:
yield check_class_weights, name
def check_class_weight_balanced_and_bootstrap_multi_output(name):
# Test class_weight works for multi-output"""
ForestClassifier = FOREST_CLASSIFIERS[name]
_y = np.vstack((y, np.array(y) * 2)).T
clf = ForestClassifier(class_weight='balanced', random_state=0)
clf.fit(X, _y)
clf = ForestClassifier(class_weight=[{-1: 0.5, 1: 1.}, {-2: 1., 2: 1.}],
random_state=0)
clf.fit(X, _y)
# smoke test for subsample and balanced subsample
clf = ForestClassifier(class_weight='balanced_subsample', random_state=0)
clf.fit(X, _y)
clf = ForestClassifier(class_weight='subsample', random_state=0)
ignore_warnings(clf.fit)(X, _y)
def test_class_weight_balanced_and_bootstrap_multi_output():
for name in FOREST_CLASSIFIERS:
yield check_class_weight_balanced_and_bootstrap_multi_output, name
def check_class_weight_errors(name):
# Test if class_weight raises errors and warnings when expected.
ForestClassifier = FOREST_CLASSIFIERS[name]
_y = np.vstack((y, np.array(y) * 2)).T
# Invalid preset string
clf = ForestClassifier(class_weight='the larch', random_state=0)
assert_raises(ValueError, clf.fit, X, y)
assert_raises(ValueError, clf.fit, X, _y)
# Warning warm_start with preset
clf = ForestClassifier(class_weight='auto', warm_start=True,
random_state=0)
assert_warns(UserWarning, clf.fit, X, y)
assert_warns(UserWarning, clf.fit, X, _y)
# Not a list or preset for multi-output
clf = ForestClassifier(class_weight=1, random_state=0)
assert_raises(ValueError, clf.fit, X, _y)
# Incorrect length list for multi-output
clf = ForestClassifier(class_weight=[{-1: 0.5, 1: 1.}], random_state=0)
assert_raises(ValueError, clf.fit, X, _y)
def test_class_weight_errors():
for name in FOREST_CLASSIFIERS:
yield check_class_weight_errors, name
def check_warm_start(name, random_state=42):
# Test if fitting incrementally with warm start gives a forest of the
# right size and the same results as a normal fit.
X, y = datasets.make_hastie_10_2(n_samples=20, random_state=1)
ForestEstimator = FOREST_ESTIMATORS[name]
clf_ws = None
for n_estimators in [5, 10]:
if clf_ws is None:
clf_ws = ForestEstimator(n_estimators=n_estimators,
random_state=random_state,
warm_start=True)
else:
clf_ws.set_params(n_estimators=n_estimators)
clf_ws.fit(X, y)
assert_equal(len(clf_ws), n_estimators)
clf_no_ws = ForestEstimator(n_estimators=10, random_state=random_state,
warm_start=False)
clf_no_ws.fit(X, y)
assert_equal(set([tree.random_state for tree in clf_ws]),
set([tree.random_state for tree in clf_no_ws]))
assert_array_equal(clf_ws.apply(X), clf_no_ws.apply(X),
err_msg="Failed with {0}".format(name))
def test_warm_start():
for name in FOREST_ESTIMATORS:
yield check_warm_start, name
def check_warm_start_clear(name):
# Test if fit clears state and grows a new forest when warm_start==False.
X, y = datasets.make_hastie_10_2(n_samples=20, random_state=1)
ForestEstimator = FOREST_ESTIMATORS[name]
clf = ForestEstimator(n_estimators=5, max_depth=1, warm_start=False,
random_state=1)
clf.fit(X, y)
clf_2 = ForestEstimator(n_estimators=5, max_depth=1, warm_start=True,
random_state=2)
clf_2.fit(X, y) # inits state
clf_2.set_params(warm_start=False, random_state=1)
clf_2.fit(X, y) # clears old state and equals clf
assert_array_almost_equal(clf_2.apply(X), clf.apply(X))
def test_warm_start_clear():
for name in FOREST_ESTIMATORS:
yield check_warm_start_clear, name
def check_warm_start_smaller_n_estimators(name):
# Test if warm start second fit with smaller n_estimators raises error.
X, y = datasets.make_hastie_10_2(n_samples=20, random_state=1)
ForestEstimator = FOREST_ESTIMATORS[name]
clf = ForestEstimator(n_estimators=5, max_depth=1, warm_start=True)
clf.fit(X, y)
clf.set_params(n_estimators=4)
assert_raises(ValueError, clf.fit, X, y)
def test_warm_start_smaller_n_estimators():
for name in FOREST_ESTIMATORS:
yield check_warm_start_smaller_n_estimators, name
def check_warm_start_equal_n_estimators(name):
# Test if warm start with equal n_estimators does nothing and returns the
# same forest and raises a warning.
X, y = datasets.make_hastie_10_2(n_samples=20, random_state=1)
ForestEstimator = FOREST_ESTIMATORS[name]
clf = ForestEstimator(n_estimators=5, max_depth=3, warm_start=True,
random_state=1)
clf.fit(X, y)
clf_2 = ForestEstimator(n_estimators=5, max_depth=3, warm_start=True,
random_state=1)
clf_2.fit(X, y)
# Now clf_2 equals clf.
clf_2.set_params(random_state=2)
assert_warns(UserWarning, clf_2.fit, X, y)
# If we had fit the trees again we would have got a different forest as we
# changed the random state.
assert_array_equal(clf.apply(X), clf_2.apply(X))
def test_warm_start_equal_n_estimators():
for name in FOREST_ESTIMATORS:
yield check_warm_start_equal_n_estimators, name
def check_warm_start_oob(name):
# Test that the warm start computes oob score when asked.
X, y = datasets.make_hastie_10_2(n_samples=20, random_state=1)
ForestEstimator = FOREST_ESTIMATORS[name]
# Use 15 estimators to avoid 'some inputs do not have OOB scores' warning.
clf = ForestEstimator(n_estimators=15, max_depth=3, warm_start=False,
random_state=1, bootstrap=True, oob_score=True)
clf.fit(X, y)
clf_2 = ForestEstimator(n_estimators=5, max_depth=3, warm_start=False,
random_state=1, bootstrap=True, oob_score=False)
clf_2.fit(X, y)
clf_2.set_params(warm_start=True, oob_score=True, n_estimators=15)
clf_2.fit(X, y)
assert_true(hasattr(clf_2, 'oob_score_'))
assert_equal(clf.oob_score_, clf_2.oob_score_)
# Test that oob_score is computed even if we don't need to train
# additional trees.
clf_3 = ForestEstimator(n_estimators=15, max_depth=3, warm_start=True,
random_state=1, bootstrap=True, oob_score=False)
clf_3.fit(X, y)
assert_true(not(hasattr(clf_3, 'oob_score_')))
clf_3.set_params(oob_score=True)
ignore_warnings(clf_3.fit)(X, y)
assert_equal(clf.oob_score_, clf_3.oob_score_)
def test_warm_start_oob():
for name in FOREST_CLASSIFIERS:
yield check_warm_start_oob, name
for name in FOREST_REGRESSORS:
yield check_warm_start_oob, name
def test_dtype_convert():
classifier = RandomForestClassifier()
CLASSES = 15
X = np.eye(CLASSES)
y = [ch for ch in 'ABCDEFGHIJKLMNOPQRSTU'[:CLASSES]]
result = classifier.fit(X, y).predict(X)
assert_array_equal(result, y) | bsd-3-clause |
atheed/servo | tests/wpt/web-platform-tests/tools/manifest/tests/test_XMLParser.py | 142 | 1445 | from xml.etree.ElementTree import ParseError
import pytest
from ..XMLParser import XMLParser
@pytest.mark.parametrize("s", [
'<foo> </foo>',
'<!DOCTYPE foo><foo> </foo>',
'<!DOCTYPE foo PUBLIC "fake" "id"><foo> </foo>',
'<!DOCTYPE foo PUBLIC "fake" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"><foo> </foo>',
'<!DOCTYPE foo PUBLIC "fake-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"><foo> </foo>'
])
def test_undefined_entity(s):
with pytest.raises(ParseError):
p = XMLParser()
p.feed(s)
p.close()
@pytest.mark.parametrize("s", [
'<!DOCTYPE foo PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"><foo> </foo>'
])
def test_defined_entity(s):
p = XMLParser()
p.feed(s)
d = p.close()
assert d.tag == u"foo"
assert d.text == u"\u00A0"
def test_pi():
p = XMLParser()
p.feed('<foo><?foo bar?></foo>')
d = p.close()
assert d.tag == u"foo"
assert len(d) == 0
def test_comment():
p = XMLParser()
p.feed('<foo><!-- data --></foo>')
d = p.close()
assert d.tag == u"foo"
assert len(d) == 0
def test_unsupported_encoding():
p = XMLParser()
p.feed(u"<?xml version='1.0' encoding='Shift-JIS'?><foo>\u3044</foo>".encode("shift-jis"))
d = p.close()
assert d.tag == u"foo"
assert d.text == u"\u3044"
| mpl-2.0 |
ROMFactory/android_external_chromium_org | third_party/protobuf/python/google/protobuf/internal/service_reflection_test.py | 560 | 5127 | #! /usr/bin/python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for google.protobuf.internal.service_reflection."""
__author__ = 'petar@google.com (Petar Petrov)'
import unittest
from google.protobuf import unittest_pb2
from google.protobuf import service_reflection
from google.protobuf import service
class FooUnitTest(unittest.TestCase):
def testService(self):
class MockRpcChannel(service.RpcChannel):
def CallMethod(self, method, controller, request, response, callback):
self.method = method
self.controller = controller
self.request = request
callback(response)
class MockRpcController(service.RpcController):
def SetFailed(self, msg):
self.failure_message = msg
self.callback_response = None
class MyService(unittest_pb2.TestService):
pass
self.callback_response = None
def MyCallback(response):
self.callback_response = response
rpc_controller = MockRpcController()
channel = MockRpcChannel()
srvc = MyService()
srvc.Foo(rpc_controller, unittest_pb2.FooRequest(), MyCallback)
self.assertEqual('Method Foo not implemented.',
rpc_controller.failure_message)
self.assertEqual(None, self.callback_response)
rpc_controller.failure_message = None
service_descriptor = unittest_pb2.TestService.GetDescriptor()
srvc.CallMethod(service_descriptor.methods[1], rpc_controller,
unittest_pb2.BarRequest(), MyCallback)
self.assertEqual('Method Bar not implemented.',
rpc_controller.failure_message)
self.assertEqual(None, self.callback_response)
class MyServiceImpl(unittest_pb2.TestService):
def Foo(self, rpc_controller, request, done):
self.foo_called = True
def Bar(self, rpc_controller, request, done):
self.bar_called = True
srvc = MyServiceImpl()
rpc_controller.failure_message = None
srvc.Foo(rpc_controller, unittest_pb2.FooRequest(), MyCallback)
self.assertEqual(None, rpc_controller.failure_message)
self.assertEqual(True, srvc.foo_called)
rpc_controller.failure_message = None
srvc.CallMethod(service_descriptor.methods[1], rpc_controller,
unittest_pb2.BarRequest(), MyCallback)
self.assertEqual(None, rpc_controller.failure_message)
self.assertEqual(True, srvc.bar_called)
def testServiceStub(self):
class MockRpcChannel(service.RpcChannel):
def CallMethod(self, method, controller, request,
response_class, callback):
self.method = method
self.controller = controller
self.request = request
callback(response_class())
self.callback_response = None
def MyCallback(response):
self.callback_response = response
channel = MockRpcChannel()
stub = unittest_pb2.TestService_Stub(channel)
rpc_controller = 'controller'
request = 'request'
# GetDescriptor now static, still works as instance method for compatability
self.assertEqual(unittest_pb2.TestService_Stub.GetDescriptor(),
stub.GetDescriptor())
# Invoke method.
stub.Foo(rpc_controller, request, MyCallback)
self.assertTrue(isinstance(self.callback_response,
unittest_pb2.FooResponse))
self.assertEqual(request, channel.request)
self.assertEqual(rpc_controller, channel.controller)
self.assertEqual(stub.GetDescriptor().methods[0], channel.method)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
charlesty/rpi2_xtrinsic | mag3110.py | 1 | 3719 | #!/usr/bin/env python2
# MAG3110 : Three-axis magnetometer
# This script can calibrate the sensor and acquire data.
# It will output magnetic field on the three axis in micro Tesla.
import smbus
import os
import time
# I2C constants
bus = smbus.SMBus(1) # 0 if /dev/i2c-0 exists, 1 if /dev/i2c-1 exists
ADDR = 0x0E
CTRL_REG1 = 0x10
CTRL_REG2 = 0x11
### MAG3110's class
class Mag3110:
def __init__(self):
who_am_i = bus.read_byte_data( ADDR, 0x07 )
if who_am_i == 0xc4:
bus.write_byte_data( ADDR, CTRL_REG1, 0x00 )
bus.write_byte_data( ADDR, CTRL_REG2, 0x80 )
return
else:
print "Device ID is", hex(who_am_i), " instead of 0xC4"
exit(1)
def get_mag(self):
x = 0
y = 1
z = 2
mag=[0,0,0]
# Set TM = 1, and output data rate = 10 Hz (100 ms between each acquisition)
bus.write_byte_data( ADDR, CTRL_REG1, 0x1A )
while 1:
dr_status = bus.read_byte_data( ADDR, 0x00 )
if dr_status & 0x8:
break
# Read data
mag_block_data = bus.read_i2c_block_data( ADDR, 0x01, 6 )
mag[x] = (mag_block_data[0] << 8) + mag_block_data[1]
mag[y] = (mag_block_data[2] << 8) + mag_block_data[3]
mag[z] = (mag_block_data[4] << 8) + mag_block_data[5]
# Convert 2's complement to integer
for i in range (0, 3):
if mag[i] & (1<<15):
mag[i] -= 1<<16
return (mag[x], mag[y], mag[z])
def calibrate(self):
x_max = -3000
x_min = 3000
y_max = -3000
y_min = 3000
z_max = -3000
z_min = 3000
print "Calibration :"
print "Rotate your board for 360 degrees on the three axis."
print "Press Ctrl+C when you finish."
try:
while 1:
(x, y, z) = self.get_mag()
if x>x_max: x_max=x
if x<x_min: x_min=x
if y>y_max: y_max=y
if y<y_min: y_min=y
if z>z_max: z_max=z
if z<z_min: z_min=z
time.sleep(0.1)
except KeyboardInterrupt:
self.standby()
print
x_off = ( x_max + x_min )/2
y_off = ( y_max + y_min )/2
z_off = ( z_max + z_min )/2
print "X_MAX :", x_max, "\tX_MIN :", x_min, "\tX_OFF :", x_off
print "Y_MAX :", y_max, "\tY_MIN :", y_min, "\tY_OFF :", y_off
print "Z_MAX :", z_max, "\tZ_MIN :", z_min, "\tZ_OFF :", z_off
with open("mag_offsets.txt","w") as f:
f.write( str(x_off) + " " + str(y_off) + " " + str(z_off) )
return
def acquisition(self):
try:
x_off = 0
y_off = 0
z_off = 0
x_max = -3000
x_min = 3000
y_max = -3000
y_min = 3000
z_max = -3000
z_min = 3000
if os.path.exists("mag_offsets.txt")==False:
print "WARNING: No calibration was made."
print " Therefore, the acquisition will output incorrect values"
else:
with open("mag_offsets.txt", "r") as f:
s = f.readline()
[x_off, y_off, z_off] = s.split()
x_off = int(x_off)
y_off = int(y_off)
z_off = int(z_off)
print "Press Ctrl+C to stop acquisition and go back to the menu"
time.sleep(1)
while 1:
(x, y, z) = self.get_mag()
print "MAG3110:\tX.", x-x_off, "uT",\
"\tY.", y-y_off, "uT",\
"\tZ.", z-z_off, "uT"
time.sleep(0.1)
except KeyboardInterrupt:
self.standby()
print
finally:
mag.standby()
def standby(self):
bus.write_byte_data(ADDR, CTRL_REG1, 0x00)
return
### Main
choice = 0
mag = Mag3110()
options = { 0:exit,
1:mag.calibrate,
2:mag.acquisition,
}
while 1:
try:
choice = int( raw_input("\nYou should calibrate the sensor before the acquisition.\n"\
"What do you want to do?\n"\
"\t1 : Calibrate the MAG3110 sensor\n"\
"\t2 : Acquisition\n"\
"\t0 : Exit\n"\
"Enter your choice : " ) )
if options.has_key(choice):
options[choice]()
else:
print "\nWrong number. Try again."
except ValueError:
print "\nEnter a number. Try again."
| gpl-2.0 |
jcanizales/grpc | src/python/grpcio/grpc/beta/utilities.py | 34 | 5375 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Utilities for the gRPC Python Beta API."""
import threading
import time
# implementations is referenced from specification in this module.
from grpc.beta import implementations # pylint: disable=unused-import
from grpc.beta import interfaces
from grpc.framework.foundation import callable_util
from grpc.framework.foundation import future
_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE = (
'Exception calling connectivity future "done" callback!')
class _ChannelReadyFuture(future.Future):
def __init__(self, channel):
self._condition = threading.Condition()
self._channel = channel
self._matured = False
self._cancelled = False
self._done_callbacks = []
def _block(self, timeout):
until = None if timeout is None else time.time() + timeout
with self._condition:
while True:
if self._cancelled:
raise future.CancelledError()
elif self._matured:
return
else:
if until is None:
self._condition.wait()
else:
remaining = until - time.time()
if remaining < 0:
raise future.TimeoutError()
else:
self._condition.wait(timeout=remaining)
def _update(self, connectivity):
with self._condition:
if (not self._cancelled and
connectivity is interfaces.ChannelConnectivity.READY):
self._matured = True
self._channel.unsubscribe(self._update)
self._condition.notify_all()
done_callbacks = tuple(self._done_callbacks)
self._done_callbacks = None
else:
return
for done_callback in done_callbacks:
callable_util.call_logging_exceptions(
done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self)
def cancel(self):
with self._condition:
if not self._matured:
self._cancelled = True
self._channel.unsubscribe(self._update)
self._condition.notify_all()
done_callbacks = tuple(self._done_callbacks)
self._done_callbacks = None
else:
return False
for done_callback in done_callbacks:
callable_util.call_logging_exceptions(
done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self)
def cancelled(self):
with self._condition:
return self._cancelled
def running(self):
with self._condition:
return not self._cancelled and not self._matured
def done(self):
with self._condition:
return self._cancelled or self._matured
def result(self, timeout=None):
self._block(timeout)
return None
def exception(self, timeout=None):
self._block(timeout)
return None
def traceback(self, timeout=None):
self._block(timeout)
return None
def add_done_callback(self, fn):
with self._condition:
if not self._cancelled and not self._matured:
self._done_callbacks.append(fn)
return
fn(self)
def start(self):
with self._condition:
self._channel.subscribe(self._update, try_to_connect=True)
def __del__(self):
with self._condition:
if not self._cancelled and not self._matured:
self._channel.unsubscribe(self._update)
def channel_ready_future(channel):
"""Creates a future.Future tracking when an implementations.Channel is ready.
Cancelling the returned future.Future does not tell the given
implementations.Channel to abandon attempts it may have been making to
connect; cancelling merely deactivates the return future.Future's
subscription to the given implementations.Channel's connectivity.
Args:
channel: An implementations.Channel.
Returns:
A future.Future that matures when the given Channel has connectivity
interfaces.ChannelConnectivity.READY.
"""
ready_future = _ChannelReadyFuture(channel)
ready_future.start()
return ready_future
| bsd-3-clause |
vaginessa/pyload | module/plugins/internal/XFSAccount.py | 2 | 5941 | # -*- coding: utf-8 -*-
import re
import time
import urlparse
from module.plugins.internal.Account import Account
from module.plugins.internal.Plugin import parse_html_form, set_cookie
class XFSAccount(Account):
__name__ = "XFSAccount"
__type__ = "account"
__version__ = "0.42"
__status__ = "testing"
__description__ = """XFileSharing account plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg" , "zoidberg@mujmail.cz"),
("Walter Purcaro", "vuolter@gmail.com" )]
HOSTER_DOMAIN = None
HOSTER_URL = None
LOGIN_URL = None
COOKIES = True
PREMIUM_PATTERN = r'\(Premium only\)'
VALID_UNTIL_PATTERN = r'Premium.[Aa]ccount expire:.*?(\d{1,2} [\w^_]+ \d{4})'
TRAFFIC_LEFT_PATTERN = r'Traffic available today:.*?<b>\s*(?P<S>[\d.,]+|[Uu]nlimited)\s*(?:(?P<U>[\w^_]+)\s*)?</b>'
TRAFFIC_LEFT_UNIT = "MB" #: Used only if no group <U> was found
LEECH_TRAFFIC_PATTERN = r'Leech Traffic left:<b>.*?(?P<S>[\d.,]+|[Uu]nlimited)\s*(?:(?P<U>[\w^_]+)\s*)?</b>'
LEECH_TRAFFIC_UNIT = "MB" #: Used only if no group <U> was found
LOGIN_FAIL_PATTERN = r'Incorrect Login or Password|account was banned|Error<'
def parse_info(self, user, password, data, req):
validuntil = None
trafficleft = None
leechtraffic = None
premium = None
if not self.HOSTER_URL: #@TODO: Remove in 0.4.10
return {'validuntil' : validuntil,
'trafficleft' : trafficleft,
'leechtraffic': leechtraffic,
'premium' : premium}
html = self.load(self.HOSTER_URL,
get={'op': "my_account"},
cookies=self.COOKIES)
premium = True if re.search(self.PREMIUM_PATTERN, html) else False
m = re.search(self.VALID_UNTIL_PATTERN, html)
if m:
expiredate = m.group(1).strip()
self.log_debug("Expire date: " + expiredate)
try:
validuntil = time.mktime(time.strptime(expiredate, "%d %B %Y"))
except Exception, e:
self.log_error(e)
else:
self.log_debug("Valid until: %s" % validuntil)
if validuntil > time.mktime(time.gmtime()):
premium = True
trafficleft = -1
else:
premium = False
validuntil = None #: Registered account type (not premium)
else:
self.log_debug("VALID_UNTIL_PATTERN not found")
m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
if m:
try:
traffic = m.groupdict()
size = traffic['S']
if "nlimited" in size:
trafficleft = -1
if validuntil is None:
validuntil = -1
else:
if 'U' in traffic:
unit = traffic['U']
elif isinstance(self.TRAFFIC_LEFT_UNIT, basestring):
unit = self.TRAFFIC_LEFT_UNIT
else:
unit = ""
trafficleft = self.parse_traffic(size + unit)
except Exception, e:
self.log_error(e)
else:
self.log_debug("TRAFFIC_LEFT_PATTERN not found")
leech = [m.groupdict() for m in re.finditer(self.LEECH_TRAFFIC_PATTERN, html)]
if leech:
leechtraffic = 0
try:
for traffic in leech:
size = traffic['S']
if "nlimited" in size:
leechtraffic = -1
if validuntil is None:
validuntil = -1
break
else:
if 'U' in traffic:
unit = traffic['U']
elif isinstance(self.LEECH_TRAFFIC_UNIT, basestring):
unit = self.LEECH_TRAFFIC_UNIT
else:
unit = ""
leechtraffic += self.parse_traffic(size + unit)
except Exception, e:
self.log_error(e)
else:
self.log_debug("LEECH_TRAFFIC_PATTERN not found")
return {'validuntil' : validuntil,
'trafficleft' : trafficleft,
'leechtraffic': leechtraffic,
'premium' : premium}
def login(self, user, password, data, req):
if self.HOSTER_DOMAIN:
if not self.HOSTER_URL:
self.HOSTER_URL = "http://www.%s/" % self.HOSTER_DOMAIN
if self.COOKIES:
if isinstance(self.COOKIES, list) and not self.COOKIES.count((self.HOSTER_DOMAIN, "lang", "english")):
self.COOKIES.insert((self.HOSTER_DOMAIN, "lang", "english"))
else:
set_cookie(self.req.cj, self.HOSTER_DOMAIN, "lang", "english")
if not self.HOSTER_URL:
self.login_fail(_("Missing HOSTER_URL"))
if not self.LOGIN_URL:
self.LOGIN_URL = urlparse.urljoin(self.HOSTER_URL, "login.html")
html = self.load(self.LOGIN_URL, cookies=self.COOKIES)
action, inputs = parse_html_form('name="FL"', html)
if not inputs:
inputs = {'op' : "login",
'redirect': self.HOSTER_URL}
inputs.update({'login' : user,
'password': password})
if action:
url = urlparse.urljoin("http://", action)
else:
url = self.HOSTER_URL
html = self.load(url, post=inputs, cookies=self.COOKIES)
if re.search(self.LOGIN_FAIL_PATTERN, html):
self.login_fail()
| gpl-3.0 |
kastriothaliti/techstitution | venv/lib/python2.7/site-packages/pip/pep425tags.py | 340 | 10980 | """Generate and work with PEP 425 Compatibility Tags."""
from __future__ import absolute_import
import re
import sys
import warnings
import platform
import logging
try:
import sysconfig
except ImportError: # pragma nocover
# Python < 2.7
import distutils.sysconfig as sysconfig
import distutils.util
from pip.compat import OrderedDict
import pip.utils.glibc
logger = logging.getLogger(__name__)
_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')
def get_config_var(var):
try:
return sysconfig.get_config_var(var)
except IOError as e: # Issue #1074
warnings.warn("{0}".format(e), RuntimeWarning)
return None
def get_abbr_impl():
"""Return abbreviated implementation name."""
if hasattr(sys, 'pypy_version_info'):
pyimpl = 'pp'
elif sys.platform.startswith('java'):
pyimpl = 'jy'
elif sys.platform == 'cli':
pyimpl = 'ip'
else:
pyimpl = 'cp'
return pyimpl
def get_impl_ver():
"""Return implementation version."""
impl_ver = get_config_var("py_version_nodot")
if not impl_ver or get_abbr_impl() == 'pp':
impl_ver = ''.join(map(str, get_impl_version_info()))
return impl_ver
def get_impl_version_info():
"""Return sys.version_info-like tuple for use in decrementing the minor
version."""
if get_abbr_impl() == 'pp':
# as per https://github.com/pypa/pip/issues/2882
return (sys.version_info[0], sys.pypy_version_info.major,
sys.pypy_version_info.minor)
else:
return sys.version_info[0], sys.version_info[1]
def get_impl_tag():
"""
Returns the Tag for this specific implementation.
"""
return "{0}{1}".format(get_abbr_impl(), get_impl_ver())
def get_flag(var, fallback, expected=True, warn=True):
"""Use a fallback method for determining SOABI flags if the needed config
var is unset or unavailable."""
val = get_config_var(var)
if val is None:
if warn:
logger.debug("Config variable '%s' is unset, Python ABI tag may "
"be incorrect", var)
return fallback()
return val == expected
def get_abi_tag():
"""Return the ABI tag based on SOABI (if available) or emulate SOABI
(CPython 2, PyPy)."""
soabi = get_config_var('SOABI')
impl = get_abbr_impl()
if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'):
d = ''
m = ''
u = ''
if get_flag('Py_DEBUG',
lambda: hasattr(sys, 'gettotalrefcount'),
warn=(impl == 'cp')):
d = 'd'
if get_flag('WITH_PYMALLOC',
lambda: impl == 'cp',
warn=(impl == 'cp')):
m = 'm'
if get_flag('Py_UNICODE_SIZE',
lambda: sys.maxunicode == 0x10ffff,
expected=4,
warn=(impl == 'cp' and
sys.version_info < (3, 3))) \
and sys.version_info < (3, 3):
u = 'u'
abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
elif soabi and soabi.startswith('cpython-'):
abi = 'cp' + soabi.split('-')[1]
elif soabi:
abi = soabi.replace('.', '_').replace('-', '_')
else:
abi = None
return abi
def _is_running_32bit():
return sys.maxsize == 2147483647
def get_platform():
"""Return our platform name 'win32', 'linux_x86_64'"""
if sys.platform == 'darwin':
# distutils.util.get_platform() returns the release based on the value
# of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may
# be significantly older than the user's current machine.
release, _, machine = platform.mac_ver()
split_ver = release.split('.')
if machine == "x86_64" and _is_running_32bit():
machine = "i386"
elif machine == "ppc64" and _is_running_32bit():
machine = "ppc"
return 'macosx_{0}_{1}_{2}'.format(split_ver[0], split_ver[1], machine)
# XXX remove distutils dependency
result = distutils.util.get_platform().replace('.', '_').replace('-', '_')
if result == "linux_x86_64" and _is_running_32bit():
# 32 bit Python program (running on a 64 bit Linux): pip should only
# install and run 32 bit compiled extensions in that case.
result = "linux_i686"
return result
def is_manylinux1_compatible():
# Only Linux, and only x86-64 / i686
if get_platform() not in ("linux_x86_64", "linux_i686"):
return False
# Check for presence of _manylinux module
try:
import _manylinux
return bool(_manylinux.manylinux1_compatible)
except (ImportError, AttributeError):
# Fall through to heuristic check below
pass
# Check glibc version. CentOS 5 uses glibc 2.5.
return pip.utils.glibc.have_compatible_glibc(2, 5)
def get_darwin_arches(major, minor, machine):
"""Return a list of supported arches (including group arches) for
the given major, minor and machine architecture of an macOS machine.
"""
arches = []
def _supports_arch(major, minor, arch):
# Looking at the application support for macOS versions in the chart
# provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears
# our timeline looks roughly like:
#
# 10.0 - Introduces ppc support.
# 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64
# and x86_64 support is CLI only, and cannot be used for GUI
# applications.
# 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.
# 10.6 - Drops support for ppc64
# 10.7 - Drops support for ppc
#
# Given that we do not know if we're installing a CLI or a GUI
# application, we must be conservative and assume it might be a GUI
# application and behave as if ppc64 and x86_64 support did not occur
# until 10.5.
#
# Note: The above information is taken from the "Application support"
# column in the chart not the "Processor support" since I believe
# that we care about what instruction sets an application can use
# not which processors the OS supports.
if arch == 'ppc':
return (major, minor) <= (10, 5)
if arch == 'ppc64':
return (major, minor) == (10, 5)
if arch == 'i386':
return (major, minor) >= (10, 4)
if arch == 'x86_64':
return (major, minor) >= (10, 5)
if arch in groups:
for garch in groups[arch]:
if _supports_arch(major, minor, garch):
return True
return False
groups = OrderedDict([
("fat", ("i386", "ppc")),
("intel", ("x86_64", "i386")),
("fat64", ("x86_64", "ppc64")),
("fat32", ("x86_64", "i386", "ppc")),
])
if _supports_arch(major, minor, machine):
arches.append(machine)
for garch in groups:
if machine in groups[garch] and _supports_arch(major, minor, garch):
arches.append(garch)
arches.append('universal')
return arches
def get_supported(versions=None, noarch=False, platform=None,
impl=None, abi=None):
"""Return a list of supported tags for each version specified in
`versions`.
:param versions: a list of string versions, of the form ["33", "32"],
or None. The first version will be assumed to support our ABI.
:param platform: specify the exact platform you want valid
tags for, or None. If None, use the local system platform.
:param impl: specify the exact implementation you want valid
tags for, or None. If None, use the local interpreter impl.
:param abi: specify the exact abi you want valid
tags for, or None. If None, use the local interpreter abi.
"""
supported = []
# Versions must be given with respect to the preference
if versions is None:
versions = []
version_info = get_impl_version_info()
major = version_info[:-1]
# Support all previous minor Python versions.
for minor in range(version_info[-1], -1, -1):
versions.append(''.join(map(str, major + (minor,))))
impl = impl or get_abbr_impl()
abis = []
abi = abi or get_abi_tag()
if abi:
abis[0:0] = [abi]
abi3s = set()
import imp
for suffix in imp.get_suffixes():
if suffix[0].startswith('.abi'):
abi3s.add(suffix[0].split('.', 2)[1])
abis.extend(sorted(list(abi3s)))
abis.append('none')
if not noarch:
arch = platform or get_platform()
if arch.startswith('macosx'):
# support macosx-10.6-intel on macosx-10.9-x86_64
match = _osx_arch_pat.match(arch)
if match:
name, major, minor, actual_arch = match.groups()
tpl = '{0}_{1}_%i_%s'.format(name, major)
arches = []
for m in reversed(range(int(minor) + 1)):
for a in get_darwin_arches(int(major), m, actual_arch):
arches.append(tpl % (m, a))
else:
# arch pattern didn't match (?!)
arches = [arch]
elif platform is None and is_manylinux1_compatible():
arches = [arch.replace('linux', 'manylinux1'), arch]
else:
arches = [arch]
# Current version, current API (built specifically for our Python):
for abi in abis:
for arch in arches:
supported.append(('%s%s' % (impl, versions[0]), abi, arch))
# abi3 modules compatible with older version of Python
for version in versions[1:]:
# abi3 was introduced in Python 3.2
if version in ('31', '30'):
break
for abi in abi3s: # empty set if not Python 3
for arch in arches:
supported.append(("%s%s" % (impl, version), abi, arch))
# Has binaries, does not use the Python API:
for arch in arches:
supported.append(('py%s' % (versions[0][0]), 'none', arch))
# No abi / arch, but requires our implementation:
supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
# Tagged specifically as being cross-version compatible
# (with just the major version specified)
supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
# No abi / arch, generic Python
for i, version in enumerate(versions):
supported.append(('py%s' % (version,), 'none', 'any'))
if i == 0:
supported.append(('py%s' % (version[0]), 'none', 'any'))
return supported
supported_tags = get_supported()
supported_tags_noarch = get_supported(noarch=True)
implementation_tag = get_impl_tag()
| gpl-3.0 |
sup95/zulip | zerver/views/webhooks/pingdom.py | 8 | 2521 | # Webhooks for external integrations.
from __future__ import absolute_import
from typing import Any
from django.utils.translation import ugettext as _
from django.http import HttpRequest, HttpResponse
from zerver.lib.actions import check_send_message
from zerver.lib.response import json_success, json_error
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.models import Client, UserProfile
import ujson
import six
PINGDOM_SUBJECT_TEMPLATE = '{name} status.'
PINGDOM_MESSAGE_TEMPLATE = 'Service {service_url} changed its {type} status from {previous_state} to {current_state}.'
PINGDOM_MESSAGE_DESCRIPTION_TEMPLATE = 'Description: {description}.'
SUPPORTED_CHECK_TYPES = (
'HTTP',
'HTTP_CUSTOM'
'HTTPS',
'SMTP',
'POP3',
'IMAP',
'PING',
'DNS',
'UDP',
'PORT_TCP',
)
@api_key_only_webhook_view('Pingdom')
@has_request_variables
def api_pingdom_webhook(request, user_profile, client, payload=REQ(argument_type='body'),
stream=REQ(default='pingdom')):
# type: (HttpRequest, UserProfile, Client, Dict[str, Any], six.text_type) -> HttpResponse
check_type = get_check_type(payload)
if check_type in SUPPORTED_CHECK_TYPES:
subject = get_subject_for_http_request(payload)
body = get_body_for_http_request(payload)
else:
return json_error(_('Unsupported check_type: {check_type}').format(check_type=check_type))
check_send_message(user_profile, client, 'stream', [stream], subject, body)
return json_success()
def get_subject_for_http_request(payload):
# type: (Dict[str, Any]) -> six.text_type
return PINGDOM_SUBJECT_TEMPLATE.format(name=payload['check_name'])
def get_body_for_http_request(payload):
# type: (Dict[str, Any]) -> six.text_type
current_state = payload['current_state']
previous_state = payload['previous_state']
data = {
'service_url': payload['check_params']['hostname'],
'previous_state': previous_state,
'current_state': current_state,
'type': get_check_type(payload)
}
body = PINGDOM_MESSAGE_TEMPLATE.format(**data)
if current_state == 'DOWN' and previous_state == 'UP':
description = PINGDOM_MESSAGE_DESCRIPTION_TEMPLATE.format(description=payload['long_description'])
body += '\n{description}'.format(description=description)
return body
def get_check_type(payload):
# type: (Dict[str, Any]) -> six.text_type
return payload['check_type']
| apache-2.0 |
mzizzi/ansible | lib/ansible/modules/utilities/logic/include_role.py | 13 | 2819 | #!/usr/bin/python
# -*- mode: python -*-
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
author:
- "Ansible Core Team (@ansible)"
module: include_role
short_description: Load and execute a role
description:
- Loads and executes a role as a task, this frees roles from the `role:` directive and allows them to be treated more as tasks.
- This module is also supported for Windows targets.
version_added: "2.2"
options:
name:
description:
- The name of the role to be executed.
required: True
tasks_from:
description:
- "File to load from a Role's tasks/ directory."
required: False
default: 'main'
vars_from:
description:
- "File to load from a Role's vars/ directory."
required: False
default: 'main'
defaults_from:
description:
- "File to load from a Role's defaults/ directory."
required: False
default: 'main'
allow_duplicates:
description:
- Overrides the role's metadata setting to allow using a role more than once with the same parameters.
required: False
default: True
private:
description:
- If True the variables from defaults/ and vars/ in a role will not be made available to the rest of the play.
default: None
notes:
- Handlers are made available to the whole play.
- simple dependencies seem to work fine.
- As with C(include) this task can be static or dynamic, If static it implies that it won't need templating nor loops nor conditionals and will
show included tasks in the --list options. Ansible will try to autodetect what is needed, but you can set `static` to `yes` or `no` at task
level to control this.
- This module is also supported for Windows targets.
'''
EXAMPLES = """
- include_role:
name: myrole
- name: Run tasks/other.yml instead of 'main'
include_role:
name: myrole
tasks_from: other
- name: Pass variables to role
include_role:
name: myrole
vars:
rolevar1: 'value from task'
- name: Use role in loop
include_role:
name: myrole
with_items:
- '{{ roleinput1 }}'
- '{{ roleinput2 }}'
loop_control:
loop_var: roleinputvar
- name: conditional role
include_role:
name: myrole
when: not idontwanttorun
"""
RETURN = """
# this module does not return anything except tasks to execute
"""
| gpl-3.0 |
slank/ansible | lib/ansible/module_utils/cloud.py | 119 | 3974 | #
# (c) 2016 Allen Sanabria, <asanabria@linuxdynasty.org>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
"""
This module adds shared support for generic cloud modules
In order to use this module, include it as part of a custom
module as shown below.
from ansible.module_utils.cloud import *
The 'cloud' module provides the following common classes:
* CloudRetry
- The base class to be used by other cloud providers, in order to
provide a backoff/retry decorator based on status codes.
- Example using the AWSRetry class which inherits from CloudRetry.
@AWSRetry.retry(tries=20, delay=2, backoff=2)
get_ec2_security_group_ids_from_names()
"""
from functools import wraps
import syslog
import time
from ansible.module_utils.pycompat24 import get_exception
class CloudRetry(object):
""" CloudRetry can be used by any cloud provider, in order to implement a
backoff algorithm/retry effect based on Status Code from Exceptions.
"""
# This is the base class of the exception.
# AWS Example botocore.exceptions.ClientError
base_class = None
@staticmethod
def status_code_from_exception(error):
""" Return the status code from the exception object
Args:
error (object): The exception itself.
"""
pass
@staticmethod
def found(response_code):
""" Return True if the Response Code to retry on was found.
Args:
response_code (str): This is the Response Code that is being matched against.
"""
pass
@classmethod
def backoff(cls, tries=10, delay=3, backoff=1.1):
""" Retry calling the Cloud decorated function using an exponential backoff.
Kwargs:
tries (int): Number of times to try (not retry) before giving up
default=10
delay (int): Initial delay between retries in seconds
default=3
backoff (int): backoff multiplier e.g. value of 2 will double the delay each retry
default=2
"""
def deco(f):
@wraps(f)
def retry_func(*args, **kwargs):
max_tries, max_delay = tries, delay
while max_tries > 1:
try:
return f(*args, **kwargs)
except Exception:
e = get_exception()
if isinstance(e, cls.base_class):
response_code = cls.status_code_from_exception(e)
if cls.found(response_code):
msg = "{0}: Retrying in {1} seconds...".format(str(e), max_delay)
syslog.syslog(syslog.LOG_INFO, msg)
time.sleep(max_delay)
max_tries -= 1
max_delay *= backoff
else:
# Return original exception if exception is not a ClientError
raise e
else:
# Return original exception if exception is not a ClientError
raise e
return f(*args, **kwargs)
return retry_func # true decorator
return deco
| gpl-3.0 |
teoliphant/scipy | scipy/io/matlab/tests/test_streams.py | 4 | 2621 | """ Testing
"""
import os
import sys
if sys.version_info[0] >= 3:
from io import BytesIO
cStringIO = BytesIO
else:
from cStringIO import StringIO as cStringIO
from StringIO import StringIO as BytesIO
from tempfile import mkstemp
import numpy as np
from numpy.compat import asbytes
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal, \
run_module_suite
from scipy.io.matlab.streams import make_stream, \
GenericStream, cStringStream, FileStream, \
_read_into, _read_string
def setup():
val = asbytes('a\x00string')
global fs, gs, cs, fname
fd, fname = mkstemp()
fs = os.fdopen(fd, 'wb')
fs.write(val)
fs.close()
fs = open(fname, 'rb')
gs = BytesIO(val)
cs = cStringIO(val)
def teardown():
global fname, fs
fs.close()
del fs
os.unlink(fname)
def test_make_stream():
global fs, gs, cs
# test stream initialization
assert_true(isinstance(make_stream(gs), GenericStream))
if sys.version_info[0] < 3:
assert_true(isinstance(make_stream(cs), cStringStream))
assert_true(isinstance(make_stream(fs), FileStream))
def test_tell_seek():
global fs, gs, cs
for s in (fs, gs, cs):
st = make_stream(s)
res = st.seek(0)
yield assert_equal, res, 0
yield assert_equal, st.tell(), 0
res = st.seek(5)
yield assert_equal, res, 0
yield assert_equal, st.tell(), 5
res = st.seek(2, 1)
yield assert_equal, res, 0
yield assert_equal, st.tell(), 7
res = st.seek(-2, 2)
yield assert_equal, res, 0
yield assert_equal, st.tell(), 6
def test_read():
global fs, gs, cs
for s in (fs, gs, cs):
st = make_stream(s)
st.seek(0)
res = st.read(-1)
yield assert_equal, res, asbytes('a\x00string')
st.seek(0)
res = st.read(4)
yield assert_equal, res, asbytes('a\x00st')
# read into
st.seek(0)
res = _read_into(st, 4)
yield assert_equal, res, asbytes('a\x00st')
res = _read_into(st, 4)
yield assert_equal, res, asbytes('ring')
yield assert_raises, IOError, _read_into, st, 2
# read alloc
st.seek(0)
res = _read_string(st, 4)
yield assert_equal, res, asbytes('a\x00st')
res = _read_string(st, 4)
yield assert_equal, res, asbytes('ring')
yield assert_raises, IOError, _read_string, st, 2
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause |
ionanrozenfeld/networkx | networkx/algorithms/isomorphism/isomorphvf2.py | 18 | 36806 | # -*- coding: utf-8 -*-
"""
*************
VF2 Algorithm
*************
An implementation of VF2 algorithm for graph ismorphism testing.
The simplest interface to use this module is to call networkx.is_isomorphic().
Introduction
------------
The GraphMatcher and DiGraphMatcher are responsible for matching
graphs or directed graphs in a predetermined manner. This
usually means a check for an isomorphism, though other checks
are also possible. For example, a subgraph of one graph
can be checked for isomorphism to a second graph.
Matching is done via syntactic feasibility. It is also possible
to check for semantic feasibility. Feasibility, then, is defined
as the logical AND of the two functions.
To include a semantic check, the (Di)GraphMatcher class should be
subclassed, and the semantic_feasibility() function should be
redefined. By default, the semantic feasibility function always
returns True. The effect of this is that semantics are not
considered in the matching of G1 and G2.
Examples
--------
Suppose G1 and G2 are isomorphic graphs. Verification is as follows:
>>> from networkx.algorithms import isomorphism
>>> G1 = nx.path_graph(4)
>>> G2 = nx.path_graph(4)
>>> GM = isomorphism.GraphMatcher(G1,G2)
>>> GM.is_isomorphic()
True
GM.mapping stores the isomorphism mapping from G1 to G2.
>>> GM.mapping
{0: 0, 1: 1, 2: 2, 3: 3}
Suppose G1 and G2 are isomorphic directed graphs
graphs. Verification is as follows:
>>> G1 = nx.path_graph(4, create_using=nx.DiGraph())
>>> G2 = nx.path_graph(4, create_using=nx.DiGraph())
>>> DiGM = isomorphism.DiGraphMatcher(G1,G2)
>>> DiGM.is_isomorphic()
True
DiGM.mapping stores the isomorphism mapping from G1 to G2.
>>> DiGM.mapping
{0: 0, 1: 1, 2: 2, 3: 3}
Subgraph Isomorphism
--------------------
Graph theory literature can be ambiguious about the meaning of the
above statement, and we seek to clarify it now.
In the VF2 literature, a mapping M is said to be a graph-subgraph
isomorphism iff M is an isomorphism between G2 and a subgraph of G1.
Thus, to say that G1 and G2 are graph-subgraph isomorphic is to say
that a subgraph of G1 is isomorphic to G2.
Other literature uses the phrase 'subgraph isomorphic' as in 'G1 does
not have a subgraph isomorphic to G2'. Another use is as an in adverb
for isomorphic. Thus, to say that G1 and G2 are subgraph isomorphic
is to say that a subgraph of G1 is isomorphic to G2.
Finally, the term 'subgraph' can have multiple meanings. In this
context, 'subgraph' always means a 'node-induced subgraph'. Edge-induced
subgraph isomorphisms are not directly supported, but one should be
able to perform the check by making use of nx.line_graph(). For
subgraphs which are not induced, the term 'monomorphism' is preferred
over 'isomorphism'. Currently, it is not possible to check for
monomorphisms.
Let G=(N,E) be a graph with a set of nodes N and set of edges E.
If G'=(N',E') is a subgraph, then:
N' is a subset of N
E' is a subset of E
If G'=(N',E') is a node-induced subgraph, then:
N' is a subset of N
E' is the subset of edges in E relating nodes in N'
If G'=(N',E') is an edge-induced subgrpah, then:
N' is the subset of nodes in N related by edges in E'
E' is a subset of E
References
----------
[1] Luigi P. Cordella, Pasquale Foggia, Carlo Sansone, Mario Vento,
"A (Sub)Graph Isomorphism Algorithm for Matching Large Graphs",
IEEE Transactions on Pattern Analysis and Machine Intelligence,
vol. 26, no. 10, pp. 1367-1372, Oct., 2004.
http://ieeexplore.ieee.org/iel5/34/29305/01323804.pdf
[2] L. P. Cordella, P. Foggia, C. Sansone, M. Vento, "An Improved
Algorithm for Matching Large Graphs", 3rd IAPR-TC15 Workshop
on Graph-based Representations in Pattern Recognition, Cuen,
pp. 149-159, 2001.
http://amalfi.dis.unina.it/graph/db/papers/vf-algorithm.pdf
See Also
--------
syntactic_feasibliity(), semantic_feasibility()
Notes
-----
Modified to handle undirected graphs.
Modified to handle multiple edges.
In general, this problem is NP-Complete.
"""
# Copyright (C) 2007-2009 by the NetworkX maintainers
# All rights reserved.
# BSD license.
# This work was originally coded by Christopher Ellison
# as part of the Computational Mechanics Python (CMPy) project.
# James P. Crutchfield, principal investigator.
# Complexity Sciences Center and Physics Department, UC Davis.
import sys
import networkx as nx
__all__ = ['GraphMatcher',
'DiGraphMatcher']
class GraphMatcher(object):
"""Implementation of VF2 algorithm for matching undirected graphs.
Suitable for Graph and MultiGraph instances.
"""
def __init__(self, G1, G2):
"""Initialize GraphMatcher.
Parameters
----------
G1,G2: NetworkX Graph or MultiGraph instances.
The two graphs to check for isomorphism.
Examples
--------
To create a GraphMatcher which checks for syntactic feasibility:
>>> from networkx.algorithms import isomorphism
>>> G1 = nx.path_graph(4)
>>> G2 = nx.path_graph(4)
>>> GM = isomorphism.GraphMatcher(G1,G2)
"""
self.G1 = G1
self.G2 = G2
self.G1_nodes = set(G1.nodes())
self.G2_nodes = set(G2.nodes())
# Set recursion limit.
self.old_recursion_limit = sys.getrecursionlimit()
expected_max_recursion_level = len(self.G2)
if self.old_recursion_limit < 1.5 * expected_max_recursion_level:
# Give some breathing room.
sys.setrecursionlimit(int(1.5 * expected_max_recursion_level))
# Declare that we will be searching for a graph-graph isomorphism.
self.test = 'graph'
# Initialize state
self.initialize()
def reset_recursion_limit(self):
"""Restores the recursion limit."""
### TODO:
### Currently, we use recursion and set the recursion level higher.
### It would be nice to restore the level, but because the
### (Di)GraphMatcher classes make use of cyclic references, garbage
### collection will never happen when we define __del__() to
### restore the recursion level. The result is a memory leak.
### So for now, we do not automatically restore the recursion level,
### and instead provide a method to do this manually. Eventually,
### we should turn this into a non-recursive implementation.
sys.setrecursionlimit(self.old_recursion_limit)
def candidate_pairs_iter(self):
"""Iterator over candidate pairs of nodes in G1 and G2."""
# All computations are done using the current state!
G1_nodes = self.G1_nodes
G2_nodes = self.G2_nodes
# First we compute the inout-terminal sets.
T1_inout = [node for node in G1_nodes if (node in self.inout_1) and (node not in self.core_1)]
T2_inout = [node for node in G2_nodes if (node in self.inout_2) and (node not in self.core_2)]
# If T1_inout and T2_inout are both nonempty.
# P(s) = T1_inout x {min T2_inout}
if T1_inout and T2_inout:
for node in T1_inout:
yield node, min(T2_inout)
else:
# If T1_inout and T2_inout were both empty....
# P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
##if not (T1_inout or T2_inout): # as suggested by [2], incorrect
if 1: # as inferred from [1], correct
# First we determine the candidate node for G2
other_node = min(G2_nodes - set(self.core_2))
for node in self.G1:
if node not in self.core_1:
yield node, other_node
# For all other cases, we don't have any candidate pairs.
def initialize(self):
"""Reinitializes the state of the algorithm.
This method should be redefined if using something other than GMState.
If only subclassing GraphMatcher, a redefinition is not necessary.
"""
# core_1[n] contains the index of the node paired with n, which is m,
# provided n is in the mapping.
# core_2[m] contains the index of the node paired with m, which is n,
# provided m is in the mapping.
self.core_1 = {}
self.core_2 = {}
# See the paper for definitions of M_x and T_x^{y}
# inout_1[n] is non-zero if n is in M_1 or in T_1^{inout}
# inout_2[m] is non-zero if m is in M_2 or in T_2^{inout}
#
# The value stored is the depth of the SSR tree when the node became
# part of the corresponding set.
self.inout_1 = {}
self.inout_2 = {}
# Practically, these sets simply store the nodes in the subgraph.
self.state = GMState(self)
# Provide a convienient way to access the isomorphism mapping.
self.mapping = self.core_1.copy()
def is_isomorphic(self):
"""Returns True if G1 and G2 are isomorphic graphs."""
# Let's do two very quick checks!
# QUESTION: Should we call faster_graph_could_be_isomorphic(G1,G2)?
# For now, I just copy the code.
# Check global properties
if self.G1.order() != self.G2.order(): return False
# Check local properties
d1 = sorted(d for n,d in self.G1.degree())
d2 = sorted(d for n,d in self.G2.degree())
if d1 != d2: return False
try:
x = next(self.isomorphisms_iter())
return True
except StopIteration:
return False
def isomorphisms_iter(self):
"""Generator over isomorphisms between G1 and G2."""
# Declare that we are looking for a graph-graph isomorphism.
self.test = 'graph'
self.initialize()
for mapping in self.match():
yield mapping
def match(self):
"""Extends the isomorphism mapping.
This function is called recursively to determine if a complete
isomorphism can be found between G1 and G2. It cleans up the class
variables after each recursive call. If an isomorphism is found,
we yield the mapping.
"""
if len(self.core_1) == len(self.G2):
# Save the final mapping, otherwise garbage collection deletes it.
self.mapping = self.core_1.copy()
# The mapping is complete.
yield self.mapping
else:
for G1_node, G2_node in self.candidate_pairs_iter():
if self.syntactic_feasibility(G1_node, G2_node):
if self.semantic_feasibility(G1_node, G2_node):
# Recursive call, adding the feasible state.
newstate = self.state.__class__(self, G1_node, G2_node)
for mapping in self.match():
yield mapping
# restore data structures
newstate.restore()
def semantic_feasibility(self, G1_node, G2_node):
"""Returns True if adding (G1_node, G2_node) is symantically feasible.
The semantic feasibility function should return True if it is
acceptable to add the candidate pair (G1_node, G2_node) to the current
partial isomorphism mapping. The logic should focus on semantic
information contained in the edge data or a formalized node class.
By acceptable, we mean that the subsequent mapping can still become a
complete isomorphism mapping. Thus, if adding the candidate pair
definitely makes it so that the subsequent mapping cannot become a
complete isomorphism mapping, then this function must return False.
The default semantic feasibility function always returns True. The
effect is that semantics are not considered in the matching of G1
and G2.
The semantic checks might differ based on the what type of test is
being performed. A keyword description of the test is stored in
self.test. Here is a quick description of the currently implemented
tests::
test='graph'
Indicates that the graph matcher is looking for a graph-graph
isomorphism.
test='subgraph'
Indicates that the graph matcher is looking for a subgraph-graph
isomorphism such that a subgraph of G1 is isomorphic to G2.
Any subclass which redefines semantic_feasibility() must maintain
the above form to keep the match() method functional. Implementations
should consider multigraphs.
"""
return True
def subgraph_is_isomorphic(self):
"""Returns True if a subgraph of G1 is isomorphic to G2."""
try:
x = next(self.subgraph_isomorphisms_iter())
return True
except StopIteration:
return False
# subgraph_is_isomorphic.__doc__ += "\n" + subgraph.replace('\n','\n'+indent)
def subgraph_isomorphisms_iter(self):
"""Generator over isomorphisms between a subgraph of G1 and G2."""
# Declare that we are looking for graph-subgraph isomorphism.
self.test = 'subgraph'
self.initialize()
for mapping in self.match():
yield mapping
# subgraph_isomorphisms_iter.__doc__ += "\n" + subgraph.replace('\n','\n'+indent)
def syntactic_feasibility(self, G1_node, G2_node):
"""Returns True if adding (G1_node, G2_node) is syntactically feasible.
This function returns True if it is adding the candidate pair
to the current partial isomorphism mapping is allowable. The addition
is allowable if the inclusion of the candidate pair does not make it
impossible for an isomorphism to be found.
"""
# The VF2 algorithm was designed to work with graphs having, at most,
# one edge connecting any two nodes. This is not the case when
# dealing with an MultiGraphs.
#
# Basically, when we test the look-ahead rules R_neighbor, we will
# make sure that the number of edges are checked. We also add
# a R_self check to verify that the number of selfloops is acceptable.
#
# Users might be comparing Graph instances with MultiGraph instances.
# So the generic GraphMatcher class must work with MultiGraphs.
# Care must be taken since the value in the innermost dictionary is a
# singlet for Graph instances. For MultiGraphs, the value in the
# innermost dictionary is a list.
###
### Test at each step to get a return value as soon as possible.
###
### Look ahead 0
# R_self
# The number of selfloops for G1_node must equal the number of
# self-loops for G2_node. Without this check, we would fail on
# R_neighbor at the next recursion level. But it is good to prune the
# search tree now.
if self.G1.number_of_edges(G1_node,G1_node) != self.G2.number_of_edges(G2_node,G2_node):
return False
# R_neighbor
# For each neighbor n' of n in the partial mapping, the corresponding
# node m' is a neighbor of m, and vice versa. Also, the number of
# edges must be equal.
for neighbor in self.G1[G1_node]:
if neighbor in self.core_1:
if not (self.core_1[neighbor] in self.G2[G2_node]):
return False
elif self.G1.number_of_edges(neighbor, G1_node) != self.G2.number_of_edges(self.core_1[neighbor], G2_node):
return False
for neighbor in self.G2[G2_node]:
if neighbor in self.core_2:
if not (self.core_2[neighbor] in self.G1[G1_node]):
return False
elif self.G1.number_of_edges(self.core_2[neighbor], G1_node) != self.G2.number_of_edges(neighbor, G2_node):
return False
### Look ahead 1
# R_terminout
# The number of neighbors of n that are in T_1^{inout} is equal to the
# number of neighbors of m that are in T_2^{inout}, and vice versa.
num1 = 0
for neighbor in self.G1[G1_node]:
if (neighbor in self.inout_1) and (neighbor not in self.core_1):
num1 += 1
num2 = 0
for neighbor in self.G2[G2_node]:
if (neighbor in self.inout_2) and (neighbor not in self.core_2):
num2 += 1
if self.test == 'graph':
if not (num1 == num2):
return False
else: # self.test == 'subgraph'
if not (num1 >= num2):
return False
### Look ahead 2
# R_new
# The number of neighbors of n that are neither in the core_1 nor
# T_1^{inout} is equal to the number of neighbors of m
# that are neither in core_2 nor T_2^{inout}.
num1 = 0
for neighbor in self.G1[G1_node]:
if neighbor not in self.inout_1:
num1 += 1
num2 = 0
for neighbor in self.G2[G2_node]:
if neighbor not in self.inout_2:
num2 += 1
if self.test == 'graph':
if not (num1 == num2):
return False
else: # self.test == 'subgraph'
if not (num1 >= num2):
return False
# Otherwise, this node pair is syntactically feasible!
return True
class DiGraphMatcher(GraphMatcher):
"""Implementation of VF2 algorithm for matching directed graphs.
Suitable for DiGraph and MultiDiGraph instances.
"""
# __doc__ += "Notes\n%s-----" % (indent,) + sources.replace('\n','\n'+indent)
def __init__(self, G1, G2):
"""Initialize DiGraphMatcher.
G1 and G2 should be nx.Graph or nx.MultiGraph instances.
Examples
--------
To create a GraphMatcher which checks for syntactic feasibility:
>>> from networkx.algorithms import isomorphism
>>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
>>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
>>> DiGM = isomorphism.DiGraphMatcher(G1,G2)
"""
super(DiGraphMatcher, self).__init__(G1, G2)
def candidate_pairs_iter(self):
"""Iterator over candidate pairs of nodes in G1 and G2."""
# All computations are done using the current state!
G1_nodes = self.G1_nodes
G2_nodes = self.G2_nodes
# First we compute the out-terminal sets.
T1_out = [node for node in G1_nodes if (node in self.out_1) and (node not in self.core_1)]
T2_out = [node for node in G2_nodes if (node in self.out_2) and (node not in self.core_2)]
# If T1_out and T2_out are both nonempty.
# P(s) = T1_out x {min T2_out}
if T1_out and T2_out:
node_2 = min(T2_out)
for node_1 in T1_out:
yield node_1, node_2
# If T1_out and T2_out were both empty....
# We compute the in-terminal sets.
##elif not (T1_out or T2_out): # as suggested by [2], incorrect
else: # as suggested by [1], correct
T1_in = [node for node in G1_nodes if (node in self.in_1) and (node not in self.core_1)]
T2_in = [node for node in G2_nodes if (node in self.in_2) and (node not in self.core_2)]
# If T1_in and T2_in are both nonempty.
# P(s) = T1_out x {min T2_out}
if T1_in and T2_in:
node_2 = min(T2_in)
for node_1 in T1_in:
yield node_1, node_2
# If all terminal sets are empty...
# P(s) = (N_1 - M_1) x {min (N_2 - M_2)}
##elif not (T1_in or T2_in): # as suggested by [2], incorrect
else: # as inferred from [1], correct
node_2 = min(G2_nodes - set(self.core_2))
for node_1 in G1_nodes:
if node_1 not in self.core_1:
yield node_1, node_2
# For all other cases, we don't have any candidate pairs.
def initialize(self):
"""Reinitializes the state of the algorithm.
This method should be redefined if using something other than DiGMState.
If only subclassing GraphMatcher, a redefinition is not necessary.
"""
# core_1[n] contains the index of the node paired with n, which is m,
# provided n is in the mapping.
# core_2[m] contains the index of the node paired with m, which is n,
# provided m is in the mapping.
self.core_1 = {}
self.core_2 = {}
# See the paper for definitions of M_x and T_x^{y}
# in_1[n] is non-zero if n is in M_1 or in T_1^{in}
# out_1[n] is non-zero if n is in M_1 or in T_1^{out}
#
# in_2[m] is non-zero if m is in M_2 or in T_2^{in}
# out_2[m] is non-zero if m is in M_2 or in T_2^{out}
#
# The value stored is the depth of the search tree when the node became
# part of the corresponding set.
self.in_1 = {}
self.in_2 = {}
self.out_1 = {}
self.out_2 = {}
self.state = DiGMState(self)
# Provide a convienient way to access the isomorphism mapping.
self.mapping = self.core_1.copy()
def syntactic_feasibility(self, G1_node, G2_node):
"""Returns True if adding (G1_node, G2_node) is syntactically feasible.
This function returns True if it is adding the candidate pair
to the current partial isomorphism mapping is allowable. The addition
is allowable if the inclusion of the candidate pair does not make it
impossible for an isomorphism to be found.
"""
# The VF2 algorithm was designed to work with graphs having, at most,
# one edge connecting any two nodes. This is not the case when
# dealing with an MultiGraphs.
#
# Basically, when we test the look-ahead rules R_pred and R_succ, we
# will make sure that the number of edges are checked. We also add
# a R_self check to verify that the number of selfloops is acceptable.
# Users might be comparing DiGraph instances with MultiDiGraph
# instances. So the generic DiGraphMatcher class must work with
# MultiDiGraphs. Care must be taken since the value in the innermost
# dictionary is a singlet for DiGraph instances. For MultiDiGraphs,
# the value in the innermost dictionary is a list.
###
### Test at each step to get a return value as soon as possible.
###
### Look ahead 0
# R_self
# The number of selfloops for G1_node must equal the number of
# self-loops for G2_node. Without this check, we would fail on R_pred
# at the next recursion level. This should prune the tree even further.
if self.G1.number_of_edges(G1_node,G1_node) != self.G2.number_of_edges(G2_node,G2_node):
return False
# R_pred
# For each predecessor n' of n in the partial mapping, the
# corresponding node m' is a predecessor of m, and vice versa. Also,
# the number of edges must be equal
for predecessor in self.G1.pred[G1_node]:
if predecessor in self.core_1:
if not (self.core_1[predecessor] in self.G2.pred[G2_node]):
return False
elif self.G1.number_of_edges(predecessor, G1_node) != self.G2.number_of_edges(self.core_1[predecessor], G2_node):
return False
for predecessor in self.G2.pred[G2_node]:
if predecessor in self.core_2:
if not (self.core_2[predecessor] in self.G1.pred[G1_node]):
return False
elif self.G1.number_of_edges(self.core_2[predecessor], G1_node) != self.G2.number_of_edges(predecessor, G2_node):
return False
# R_succ
# For each successor n' of n in the partial mapping, the corresponding
# node m' is a successor of m, and vice versa. Also, the number of
# edges must be equal.
for successor in self.G1[G1_node]:
if successor in self.core_1:
if not (self.core_1[successor] in self.G2[G2_node]):
return False
elif self.G1.number_of_edges(G1_node, successor) != self.G2.number_of_edges(G2_node, self.core_1[successor]):
return False
for successor in self.G2[G2_node]:
if successor in self.core_2:
if not (self.core_2[successor] in self.G1[G1_node]):
return False
elif self.G1.number_of_edges(G1_node, self.core_2[successor]) != self.G2.number_of_edges(G2_node, successor):
return False
### Look ahead 1
# R_termin
# The number of predecessors of n that are in T_1^{in} is equal to the
# number of predecessors of m that are in T_2^{in}.
num1 = 0
for predecessor in self.G1.pred[G1_node]:
if (predecessor in self.in_1) and (predecessor not in self.core_1):
num1 += 1
num2 = 0
for predecessor in self.G2.pred[G2_node]:
if (predecessor in self.in_2) and (predecessor not in self.core_2):
num2 += 1
if self.test == 'graph':
if not (num1 == num2):
return False
else: # self.test == 'subgraph'
if not (num1 >= num2):
return False
# The number of successors of n that are in T_1^{in} is equal to the
# number of successors of m that are in T_2^{in}.
num1 = 0
for successor in self.G1[G1_node]:
if (successor in self.in_1) and (successor not in self.core_1):
num1 += 1
num2 = 0
for successor in self.G2[G2_node]:
if (successor in self.in_2) and (successor not in self.core_2):
num2 += 1
if self.test == 'graph':
if not (num1 == num2):
return False
else: # self.test == 'subgraph'
if not (num1 >= num2):
return False
# R_termout
# The number of predecessors of n that are in T_1^{out} is equal to the
# number of predecessors of m that are in T_2^{out}.
num1 = 0
for predecessor in self.G1.pred[G1_node]:
if (predecessor in self.out_1) and (predecessor not in self.core_1):
num1 += 1
num2 = 0
for predecessor in self.G2.pred[G2_node]:
if (predecessor in self.out_2) and (predecessor not in self.core_2):
num2 += 1
if self.test == 'graph':
if not (num1 == num2):
return False
else: # self.test == 'subgraph'
if not (num1 >= num2):
return False
# The number of successors of n that are in T_1^{out} is equal to the
# number of successors of m that are in T_2^{out}.
num1 = 0
for successor in self.G1[G1_node]:
if (successor in self.out_1) and (successor not in self.core_1):
num1 += 1
num2 = 0
for successor in self.G2[G2_node]:
if (successor in self.out_2) and (successor not in self.core_2):
num2 += 1
if self.test == 'graph':
if not (num1 == num2):
return False
else: # self.test == 'subgraph'
if not (num1 >= num2):
return False
### Look ahead 2
# R_new
# The number of predecessors of n that are neither in the core_1 nor
# T_1^{in} nor T_1^{out} is equal to the number of predecessors of m
# that are neither in core_2 nor T_2^{in} nor T_2^{out}.
num1 = 0
for predecessor in self.G1.pred[G1_node]:
if (predecessor not in self.in_1) and (predecessor not in self.out_1):
num1 += 1
num2 = 0
for predecessor in self.G2.pred[G2_node]:
if (predecessor not in self.in_2) and (predecessor not in self.out_2):
num2 += 1
if self.test == 'graph':
if not (num1 == num2):
return False
else: # self.test == 'subgraph'
if not (num1 >= num2):
return False
# The number of successors of n that are neither in the core_1 nor
# T_1^{in} nor T_1^{out} is equal to the number of successors of m
# that are neither in core_2 nor T_2^{in} nor T_2^{out}.
num1 = 0
for successor in self.G1[G1_node]:
if (successor not in self.in_1) and (successor not in self.out_1):
num1 += 1
num2 = 0
for successor in self.G2[G2_node]:
if (successor not in self.in_2) and (successor not in self.out_2):
num2 += 1
if self.test == 'graph':
if not (num1 == num2):
return False
else: # self.test == 'subgraph'
if not (num1 >= num2):
return False
# Otherwise, this node pair is syntactically feasible!
return True
class GMState(object):
"""Internal representation of state for the GraphMatcher class.
This class is used internally by the GraphMatcher class. It is used
only to store state specific data. There will be at most G2.order() of
these objects in memory at a time, due to the depth-first search
strategy employed by the VF2 algorithm.
"""
def __init__(self, GM, G1_node=None, G2_node=None):
"""Initializes GMState object.
Pass in the GraphMatcher to which this GMState belongs and the
new node pair that will be added to the GraphMatcher's current
isomorphism mapping.
"""
self.GM = GM
# Initialize the last stored node pair.
self.G1_node = None
self.G2_node = None
self.depth = len(GM.core_1)
if G1_node is None or G2_node is None:
# Then we reset the class variables
GM.core_1 = {}
GM.core_2 = {}
GM.inout_1 = {}
GM.inout_2 = {}
# Watch out! G1_node == 0 should evaluate to True.
if G1_node is not None and G2_node is not None:
# Add the node pair to the isomorphism mapping.
GM.core_1[G1_node] = G2_node
GM.core_2[G2_node] = G1_node
# Store the node that was added last.
self.G1_node = G1_node
self.G2_node = G2_node
# Now we must update the other two vectors.
# We will add only if it is not in there already!
self.depth = len(GM.core_1)
# First we add the new nodes...
if G1_node not in GM.inout_1:
GM.inout_1[G1_node] = self.depth
if G2_node not in GM.inout_2:
GM.inout_2[G2_node] = self.depth
# Now we add every other node...
# Updates for T_1^{inout}
new_nodes = set([])
for node in GM.core_1:
new_nodes.update([neighbor for neighbor in GM.G1[node] if neighbor not in GM.core_1])
for node in new_nodes:
if node not in GM.inout_1:
GM.inout_1[node] = self.depth
# Updates for T_2^{inout}
new_nodes = set([])
for node in GM.core_2:
new_nodes.update([neighbor for neighbor in GM.G2[node] if neighbor not in GM.core_2])
for node in new_nodes:
if node not in GM.inout_2:
GM.inout_2[node] = self.depth
def restore(self):
"""Deletes the GMState object and restores the class variables."""
# First we remove the node that was added from the core vectors.
# Watch out! G1_node == 0 should evaluate to True.
if self.G1_node is not None and self.G2_node is not None:
del self.GM.core_1[self.G1_node]
del self.GM.core_2[self.G2_node]
# Now we revert the other two vectors.
# Thus, we delete all entries which have this depth level.
for vector in (self.GM.inout_1, self.GM.inout_2):
for node in list(vector.keys()):
if vector[node] == self.depth:
del vector[node]
class DiGMState(object):
"""Internal representation of state for the DiGraphMatcher class.
This class is used internally by the DiGraphMatcher class. It is used
only to store state specific data. There will be at most G2.order() of
these objects in memory at a time, due to the depth-first search
strategy employed by the VF2 algorithm.
"""
def __init__(self, GM, G1_node=None, G2_node=None):
"""Initializes DiGMState object.
Pass in the DiGraphMatcher to which this DiGMState belongs and the
new node pair that will be added to the GraphMatcher's current
isomorphism mapping.
"""
self.GM = GM
# Initialize the last stored node pair.
self.G1_node = None
self.G2_node = None
self.depth = len(GM.core_1)
if G1_node is None or G2_node is None:
# Then we reset the class variables
GM.core_1 = {}
GM.core_2 = {}
GM.in_1 = {}
GM.in_2 = {}
GM.out_1 = {}
GM.out_2 = {}
# Watch out! G1_node == 0 should evaluate to True.
if G1_node is not None and G2_node is not None:
# Add the node pair to the isomorphism mapping.
GM.core_1[G1_node] = G2_node
GM.core_2[G2_node] = G1_node
# Store the node that was added last.
self.G1_node = G1_node
self.G2_node = G2_node
# Now we must update the other four vectors.
# We will add only if it is not in there already!
self.depth = len(GM.core_1)
# First we add the new nodes...
for vector in (GM.in_1, GM.out_1):
if G1_node not in vector:
vector[G1_node] = self.depth
for vector in (GM.in_2, GM.out_2):
if G2_node not in vector:
vector[G2_node] = self.depth
# Now we add every other node...
# Updates for T_1^{in}
new_nodes = set([])
for node in GM.core_1:
new_nodes.update([predecessor for predecessor in GM.G1.predecessors(node) if predecessor not in GM.core_1])
for node in new_nodes:
if node not in GM.in_1:
GM.in_1[node] = self.depth
# Updates for T_2^{in}
new_nodes = set([])
for node in GM.core_2:
new_nodes.update([predecessor for predecessor in GM.G2.predecessors(node) if predecessor not in GM.core_2])
for node in new_nodes:
if node not in GM.in_2:
GM.in_2[node] = self.depth
# Updates for T_1^{out}
new_nodes = set([])
for node in GM.core_1:
new_nodes.update([successor for successor in GM.G1.successors(node) if successor not in GM.core_1])
for node in new_nodes:
if node not in GM.out_1:
GM.out_1[node] = self.depth
# Updates for T_2^{out}
new_nodes = set([])
for node in GM.core_2:
new_nodes.update([successor for successor in GM.G2.successors(node) if successor not in GM.core_2])
for node in new_nodes:
if node not in GM.out_2:
GM.out_2[node] = self.depth
def restore(self):
"""Deletes the DiGMState object and restores the class variables."""
# First we remove the node that was added from the core vectors.
# Watch out! G1_node == 0 should evaluate to True.
if self.G1_node is not None and self.G2_node is not None:
del self.GM.core_1[self.G1_node]
del self.GM.core_2[self.G2_node]
# Now we revert the other four vectors.
# Thus, we delete all entries which have this depth level.
for vector in (self.GM.in_1, self.GM.in_2, self.GM.out_1, self.GM.out_2):
for node in list(vector.keys()):
if vector[node] == self.depth:
del vector[node]
| bsd-3-clause |
silvermagic/jhbuild | scripts/changecvsroot.py | 5 | 1619 | #!/usr/bin/env python2
# jhbuild - a tool to ease building collections of source packages
# Copyright (C) 2001-2006 James Henstridge
#
# changecvsroot.py: script to alter the CVS root of a working copy
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
def changecvsroot(oldroot, newroot, *dirs):
def handle((oldroot, newroot), dirname, fnames):
if os.path.basename(dirname) == 'CVS' and 'Root' in fnames:
r = open(os.path.join(dirname, 'Root'), 'r').read().strip()
if r == oldroot:
fp = open(os.path.join(dirname, 'Root'), 'w')
fp.write('%s\n' % newroot)
fp.close()
for dir in dirs:
os.path.walk(dir, handle, (oldroot, newroot))
if __name__ == '__main__':
import sys
if len(sys.argv) < 4:
sys.stderr.write('usage: changecvsroot.py oldroot newroot dirs ...\n')
sys.exit(1)
changecvsroot(sys.argv[1], sys.argv[2], *sys.argv[2:])
| gpl-2.0 |
alexschlueter/cern-root | tutorials/pyroot/fillrandom.py | 17 | 1676 | #
# To see the output of this macro, click begin_html <a href="gif/fillrandom.gif">here</a>. end_html
#
from ROOT import TCanvas, TPad, TFormula, TF1, TPaveLabel, TH1F, TFile
from ROOT import gROOT, gBenchmark
gROOT.Reset()
c1 = TCanvas( 'c1', 'The FillRandom example', 200, 10, 700, 900 )
c1.SetFillColor( 18 )
pad1 = TPad( 'pad1', 'The pad with the function', 0.05, 0.50, 0.95, 0.95, 21 )
pad2 = TPad( 'pad2', 'The pad with the histogram', 0.05, 0.05, 0.95, 0.45, 21 )
pad1.Draw()
pad2.Draw()
pad1.cd()
gBenchmark.Start( 'fillrandom' )
#
# A function (any dimension) or a formula may reference
# an already defined formula
#
form1 = TFormula( 'form1', 'abs(sin(x)/x)' )
sqroot = TF1( 'sqroot', 'x*gaus(0) + [3]*form1', 0, 10 )
sqroot.SetParameters( 10, 4, 1, 20 )
pad1.SetGridx()
pad1.SetGridy()
pad1.GetFrame().SetFillColor( 42 )
pad1.GetFrame().SetBorderMode( -1 )
pad1.GetFrame().SetBorderSize( 5 )
sqroot.SetLineColor( 4 )
sqroot.SetLineWidth( 6 )
sqroot.Draw()
lfunction = TPaveLabel( 5, 39, 9.8, 46, 'The sqroot function' )
lfunction.SetFillColor( 41 )
lfunction.Draw()
c1.Update()
#
# Create a one dimensional histogram (one float per bin)
# and fill it following the distribution in function sqroot.
#
pad2.cd();
pad2.GetFrame().SetFillColor( 42 )
pad2.GetFrame().SetBorderMode( -1 )
pad2.GetFrame().SetBorderSize( 5 )
h1f = TH1F( 'h1f', 'Test random numbers', 200, 0, 10 )
h1f.SetFillColor( 45 )
h1f.FillRandom( 'sqroot', 10000 )
h1f.Draw()
c1.Update()
#
# Open a ROOT file and save the formula, function and histogram
#
myfile = TFile( 'fillrandom.root', 'RECREATE' )
form1.Write()
sqroot.Write()
h1f.Write()
myfile.Close()
gBenchmark.Show( 'fillrandom' )
| lgpl-2.1 |
syslo/gaminator | gaminator-src/events.py | 1 | 1959 | # -*- coding: utf-8 -*-
import heapq
from .thing_type import _ThingType
def PTI_decorator__event(name):
def decorator(f):
if not hasattr(f, "_gaminator_events"):
f._gaminator_events = []
f._gaminator_events.append(name)
return f
return decorator
class _EventEmitterMixim(object):
def __init__(self):
self._events_queue = []
self._events_queue_id = 0
def PTI_invoker__timed_event(self, PTI__time, PTI__event, *args, **kwargs):
heapq.heappush(self._events_queue, (
self.PTI__time + PTI__time, self._events_queue_id,
PTI__event, args, kwargs,
))
self._events_queue_id += 1
for subworld in self.PTI_world__subworlds:
subworld.PTI_invoker__timed_event(
PTI__time, PTI__event, *args, **kwargs
)
def PTI_invoker__event(self, PTI__event, *args, **kwargs):
self.PTI_invoker__timed_event(0, PTI__event, *args, **kwargs)
def _tick_events(self):
heapq.heappush(self._events_queue, (
self.PTI__time, -1, 'PTI__STEP', [], {},
))
calls = []
while self._events_queue and self._events_queue[0][0] <= self.PTI__time:
(_time, _id, event, args, kwargs) = self._events_queue[0]
for cls in self._things_by_class:
if isinstance(cls, _ThingType):
for fname in cls._gaminator_events[event]:
for thing in self._things_by_class[cls]:
calls.append((getattr(thing, fname), args, kwargs))
for cls in self.__class__.mro():
if isinstance(cls, _ThingType):
for fname in cls._gaminator_events[event]:
calls.append((getattr(self, fname), args, kwargs))
heapq.heappop(self._events_queue)
for f, args, kwargs in calls:
f(*args, **kwargs)
| mit |
zhujzhuo/openstack-trove | trove/db/sqlalchemy/migrate_repo/versions/015_add_service_type.py | 10 | 1315 | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.schema import Column
from sqlalchemy.schema import MetaData
from trove.db.sqlalchemy.migrate_repo.schema import String
from trove.db.sqlalchemy.migrate_repo.schema import Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
instances = Table('instances', meta, autoload=True)
service_type = Column('service_type', String(36))
instances.create_column(service_type)
instances.update().values({'service_type': 'mysql'}).execute()
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
# modify column:
instances = Table('instances', meta, autoload=True)
instances.drop_column('service_type')
| apache-2.0 |
windyuuy/opera | chromium/src/v8/tools/testrunner/objects/peer.py | 123 | 3561 | # Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class Peer(object):
def __init__(self, address, jobs, rel_perf, pubkey):
self.address = address # string: IP address
self.jobs = jobs # integer: number of CPUs
self.relative_performance = rel_perf
self.pubkey = pubkey # string: pubkey's fingerprint
self.shells = set() # set of strings
self.needed_work = 0
self.assigned_work = 0
self.tests = [] # list of TestCase objects
self.trusting_me = False # This peer trusts my public key.
self.trusted = False # I trust this peer's public key.
def __str__(self):
return ("Peer at %s, jobs: %d, performance: %.2f, trust I/O: %s/%s" %
(self.address, self.jobs, self.relative_performance,
self.trusting_me, self.trusted))
def AddTests(self, shell):
"""Adds tests from |shell| to this peer.
Stops when self.needed_work reaches zero, or when all of shell's tests
are assigned."""
assert self.needed_work > 0
if shell.shell not in self.shells:
self.shells.add(shell.shell)
while len(shell.tests) > 0 and self.needed_work > 0:
t = shell.tests.pop()
self.needed_work -= t.duration
self.assigned_work += t.duration
shell.total_duration -= t.duration
self.tests.append(t)
def ForceAddOneTest(self, test, shell):
"""Forcibly adds another test to this peer, disregarding needed_work."""
if shell.shell not in self.shells:
self.shells.add(shell.shell)
self.needed_work -= test.duration
self.assigned_work += test.duration
shell.total_duration -= test.duration
self.tests.append(test)
def Pack(self):
"""Creates a JSON serializable representation of this Peer."""
return [self.address, self.jobs, self.relative_performance]
@staticmethod
def Unpack(packed):
"""Creates a Peer object built from a packed representation."""
pubkey_dummy = "" # Callers of this don't care (only the server does).
return Peer(packed[0], packed[1], packed[2], pubkey_dummy)
| bsd-3-clause |
vikas-parashar/zulip | zerver/views/webhooks/helloworld.py | 11 | 1540 | # Webhooks for external integrations.
from __future__ import absolute_import
from django.utils.translation import ugettext as _
from zerver.lib.actions import check_send_message
from zerver.lib.response import json_success, json_error
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.lib.validator import check_dict, check_string
from zerver.models import Client, UserProfile
from django.http import HttpRequest, HttpResponse
from six import text_type
from typing import Dict, Any, Iterable, Optional
@api_key_only_webhook_view('HelloWorld')
@has_request_variables
def api_helloworld_webhook(request, user_profile, client,
payload=REQ(argument_type='body'), stream=REQ(default='test'),
topic=REQ(default='Hello World')):
# type: (HttpRequest, UserProfile, Client, Dict[str, Iterable[Dict[str, Any]]], text_type, Optional[text_type]) -> HttpResponse
# construct the body of the message
body = 'Hello! I am happy to be here! :smile:'
# try to add the Wikipedia article of the day
# return appropriate error if not successful
try:
body_template = '\nThe Wikipedia featured article for today is **[{featured_title}]({featured_url})**'
body += body_template.format(**payload)
except KeyError as e:
return json_error(_("Missing key {} in JSON").format(str(e)))
# send the message
check_send_message(user_profile, client, 'stream', [stream], topic, body)
return json_success()
| apache-2.0 |
pymedusa/Medusa | ext/boto/manage/cmdshell.py | 153 | 14739 | # Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
The cmdshell module uses the paramiko package to create SSH connections
to the servers that are represented by instance objects. The module has
functions for running commands, managing files, and opening interactive
shell sessions over those connections.
"""
from boto.mashups.interactive import interactive_shell
import boto
import os
import time
import shutil
import paramiko
import socket
import subprocess
from boto.compat import StringIO
class SSHClient(object):
"""
This class creates a paramiko.SSHClient() object that represents
a session with an SSH server. You can use the SSHClient object to send
commands to the remote host and manipulate files on the remote host.
:ivar server: A Server object or FakeServer object.
:ivar host_key_file: The path to the user's .ssh key files.
:ivar uname: The username for the SSH connection. Default = 'root'.
:ivar timeout: The optional timeout variable for the TCP connection.
:ivar ssh_pwd: An optional password to use for authentication or for
unlocking the private key.
"""
def __init__(self, server,
host_key_file='~/.ssh/known_hosts',
uname='root', timeout=None, ssh_pwd=None):
self.server = server
self.host_key_file = host_key_file
self.uname = uname
self._timeout = timeout
self._pkey = paramiko.RSAKey.from_private_key_file(server.ssh_key_file,
password=ssh_pwd)
self._ssh_client = paramiko.SSHClient()
self._ssh_client.load_system_host_keys()
self._ssh_client.load_host_keys(os.path.expanduser(host_key_file))
self._ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.connect()
def connect(self, num_retries=5):
"""
Connect to an SSH server and authenticate with it.
:type num_retries: int
:param num_retries: The maximum number of connection attempts.
"""
retry = 0
while retry < num_retries:
try:
self._ssh_client.connect(self.server.hostname,
username=self.uname,
pkey=self._pkey,
timeout=self._timeout)
return
except socket.error as xxx_todo_changeme:
(value, message) = xxx_todo_changeme.args
if value in (51, 61, 111):
print('SSH Connection refused, will retry in 5 seconds')
time.sleep(5)
retry += 1
else:
raise
except paramiko.BadHostKeyException:
print("%s has an entry in ~/.ssh/known_hosts and it doesn't match" % self.server.hostname)
print('Edit that file to remove the entry and then hit return to try again')
raw_input('Hit Enter when ready')
retry += 1
except EOFError:
print('Unexpected Error from SSH Connection, retry in 5 seconds')
time.sleep(5)
retry += 1
print('Could not establish SSH connection')
def open_sftp(self):
"""
Open an SFTP session on the SSH server.
:rtype: :class:`paramiko.sftp_client.SFTPClient`
:return: An SFTP client object.
"""
return self._ssh_client.open_sftp()
def get_file(self, src, dst):
"""
Open an SFTP session on the remote host, and copy a file from
the remote host to the specified path on the local host.
:type src: string
:param src: The path to the target file on the remote host.
:type dst: string
:param dst: The path on your local host where you want to
store the file.
"""
sftp_client = self.open_sftp()
sftp_client.get(src, dst)
def put_file(self, src, dst):
"""
Open an SFTP session on the remote host, and copy a file from
the local host to the specified path on the remote host.
:type src: string
:param src: The path to the target file on your local host.
:type dst: string
:param dst: The path on the remote host where you want to store
the file.
"""
sftp_client = self.open_sftp()
sftp_client.put(src, dst)
def open(self, filename, mode='r', bufsize=-1):
"""
Open an SFTP session to the remote host, and open a file on
that host.
:type filename: string
:param filename: The path to the file on the remote host.
:type mode: string
:param mode: The file interaction mode.
:type bufsize: integer
:param bufsize: The file buffer size.
:rtype: :class:`paramiko.sftp_file.SFTPFile`
:return: A paramiko proxy object for a file on the remote server.
"""
sftp_client = self.open_sftp()
return sftp_client.open(filename, mode, bufsize)
def listdir(self, path):
"""
List all of the files and subdirectories at the specified path
on the remote host.
:type path: string
:param path: The base path from which to obtain the list.
:rtype: list
:return: A list of files and subdirectories at the specified path.
"""
sftp_client = self.open_sftp()
return sftp_client.listdir(path)
def isdir(self, path):
"""
Check the specified path on the remote host to determine if
it is a directory.
:type path: string
:param path: The path to the directory that you want to check.
:rtype: integer
:return: If the path is a directory, the function returns 1.
If the path is a file or an invalid path, the function
returns 0.
"""
status = self.run('[ -d %s ] || echo "FALSE"' % path)
if status[1].startswith('FALSE'):
return 0
return 1
def exists(self, path):
"""
Check the remote host for the specified path, or a file
at the specified path. This function returns 1 if the
path or the file exist on the remote host, and returns 0 if
the path or the file does not exist on the remote host.
:type path: string
:param path: The path to the directory or file that you want to check.
:rtype: integer
:return: If the path or the file exist, the function returns 1.
If the path or the file do not exist on the remote host,
the function returns 0.
"""
status = self.run('[ -a %s ] || echo "FALSE"' % path)
if status[1].startswith('FALSE'):
return 0
return 1
def shell(self):
"""
Start an interactive shell session with the remote host.
"""
channel = self._ssh_client.invoke_shell()
interactive_shell(channel)
def run(self, command):
"""
Run a command on the remote host.
:type command: string
:param command: The command that you want to send to the remote host.
:rtype: tuple
:return: This function returns a tuple that contains an integer status,
the stdout from the command, and the stderr from the command.
"""
boto.log.debug('running:%s on %s' % (command, self.server.instance_id))
status = 0
try:
t = self._ssh_client.exec_command(command)
except paramiko.SSHException:
status = 1
std_out = t[1].read()
std_err = t[2].read()
t[0].close()
t[1].close()
t[2].close()
boto.log.debug('stdout: %s' % std_out)
boto.log.debug('stderr: %s' % std_err)
return (status, std_out, std_err)
def run_pty(self, command):
"""
Request a pseudo-terminal from a server, and execute a command on that
server.
:type command: string
:param command: The command that you want to run on the remote host.
:rtype: :class:`paramiko.channel.Channel`
:return: An open channel object.
"""
boto.log.debug('running:%s on %s' % (command, self.server.instance_id))
channel = self._ssh_client.get_transport().open_session()
channel.get_pty()
channel.exec_command(command)
return channel
def close(self):
"""
Close an SSH session and any open channels that are tied to it.
"""
transport = self._ssh_client.get_transport()
transport.close()
self.server.reset_cmdshell()
class LocalClient(object):
"""
:ivar server: A Server object or FakeServer object.
:ivar host_key_file: The path to the user's .ssh key files.
:ivar uname: The username for the SSH connection. Default = 'root'.
"""
def __init__(self, server, host_key_file=None, uname='root'):
self.server = server
self.host_key_file = host_key_file
self.uname = uname
def get_file(self, src, dst):
"""
Copy a file from one directory to another.
"""
shutil.copyfile(src, dst)
def put_file(self, src, dst):
"""
Copy a file from one directory to another.
"""
shutil.copyfile(src, dst)
def listdir(self, path):
"""
List all of the files and subdirectories at the specified path.
:rtype: list
:return: Return a list containing the names of the entries
in the directory given by path.
"""
return os.listdir(path)
def isdir(self, path):
"""
Check the specified path to determine if it is a directory.
:rtype: boolean
:return: Returns True if the path is an existing directory.
"""
return os.path.isdir(path)
def exists(self, path):
"""
Check for the specified path, or check a file at the specified path.
:rtype: boolean
:return: If the path or the file exist, the function returns True.
"""
return os.path.exists(path)
def shell(self):
raise NotImplementedError('shell not supported with LocalClient')
def run(self):
"""
Open a subprocess and run a command on the local host.
:rtype: tuple
:return: This function returns a tuple that contains an integer status
and a string with the combined stdout and stderr output.
"""
boto.log.info('running:%s' % self.command)
log_fp = StringIO()
process = subprocess.Popen(self.command, shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
while process.poll() is None:
time.sleep(1)
t = process.communicate()
log_fp.write(t[0])
log_fp.write(t[1])
boto.log.info(log_fp.getvalue())
boto.log.info('output: %s' % log_fp.getvalue())
return (process.returncode, log_fp.getvalue())
def close(self):
pass
class FakeServer(object):
"""
This object has a subset of the variables that are normally in a
:class:`boto.manage.server.Server` object. You can use this FakeServer
object to create a :class:`boto.manage.SSHClient` object if you
don't have a real Server object.
:ivar instance: A boto Instance object.
:ivar ssh_key_file: The path to the SSH key file.
"""
def __init__(self, instance, ssh_key_file):
self.instance = instance
self.ssh_key_file = ssh_key_file
self.hostname = instance.dns_name
self.instance_id = self.instance.id
def start(server):
"""
Connect to the specified server.
:return: If the server is local, the function returns a
:class:`boto.manage.cmdshell.LocalClient` object.
If the server is remote, the function returns a
:class:`boto.manage.cmdshell.SSHClient` object.
"""
instance_id = boto.config.get('Instance', 'instance-id', None)
if instance_id == server.instance_id:
return LocalClient(server)
else:
return SSHClient(server)
def sshclient_from_instance(instance, ssh_key_file,
host_key_file='~/.ssh/known_hosts',
user_name='root', ssh_pwd=None):
"""
Create and return an SSHClient object given an
instance object.
:type instance: :class`boto.ec2.instance.Instance` object
:param instance: The instance object.
:type ssh_key_file: string
:param ssh_key_file: A path to the private key file that is
used to log into the instance.
:type host_key_file: string
:param host_key_file: A path to the known_hosts file used
by the SSH client.
Defaults to ~/.ssh/known_hosts
:type user_name: string
:param user_name: The username to use when logging into
the instance. Defaults to root.
:type ssh_pwd: string
:param ssh_pwd: The passphrase, if any, associated with
private key.
"""
s = FakeServer(instance, ssh_key_file)
return SSHClient(s, host_key_file, user_name, ssh_pwd)
| gpl-3.0 |
hynnet/openwrt-mt7620 | staging_dir/host/lib/python2.7/unittest/__init__.py | 228 | 2719 | """
Python unit testing framework, based on Erich Gamma's JUnit and Kent Beck's
Smalltalk testing framework.
This module contains the core framework classes that form the basis of
specific test cases and suites (TestCase, TestSuite etc.), and also a
text-based utility class for running the tests and reporting the results
(TextTestRunner).
Simple usage:
import unittest
class IntegerArithmenticTestCase(unittest.TestCase):
def testAdd(self): ## test method names begin 'test*'
self.assertEqual((1 + 2), 3)
self.assertEqual(0 + 1, 1)
def testMultiply(self):
self.assertEqual((0 * 10), 0)
self.assertEqual((5 * 8), 40)
if __name__ == '__main__':
unittest.main()
Further information is available in the bundled documentation, and from
http://docs.python.org/library/unittest.html
Copyright (c) 1999-2003 Steve Purcell
Copyright (c) 2003-2010 Python Software Foundation
This module is free software, and you may redistribute it and/or modify
it under the same terms as Python itself, so long as this copyright message
and disclaimer are retained in their original form.
IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
"""
__all__ = ['TestResult', 'TestCase', 'TestSuite',
'TextTestRunner', 'TestLoader', 'FunctionTestCase', 'main',
'defaultTestLoader', 'SkipTest', 'skip', 'skipIf', 'skipUnless',
'expectedFailure', 'TextTestResult', 'installHandler',
'registerResult', 'removeResult', 'removeHandler']
# Expose obsolete functions for backwards compatibility
__all__.extend(['getTestCaseNames', 'makeSuite', 'findTestCases'])
__unittest = True
from .result import TestResult
from .case import (TestCase, FunctionTestCase, SkipTest, skip, skipIf,
skipUnless, expectedFailure)
from .suite import BaseTestSuite, TestSuite
from .loader import (TestLoader, defaultTestLoader, makeSuite, getTestCaseNames,
findTestCases)
from .main import TestProgram, main
from .runner import TextTestRunner, TextTestResult
from .signals import installHandler, registerResult, removeResult, removeHandler
# deprecated
_TextTestResult = TextTestResult
| gpl-2.0 |
tuxfux-hlp-notes/python-batches | batch-67/12-modules/myenv/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/__init__.py | 360 | 2852 | """
urllib3 - Thread-safe connection pooling and re-using.
"""
from __future__ import absolute_import
import warnings
from .connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
connection_from_url
)
from . import exceptions
from .filepost import encode_multipart_formdata
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
from .response import HTTPResponse
from .util.request import make_headers
from .util.url import get_host
from .util.timeout import Timeout
from .util.retry import Retry
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
__license__ = 'MIT'
__version__ = '1.16'
__all__ = (
'HTTPConnectionPool',
'HTTPSConnectionPool',
'PoolManager',
'ProxyManager',
'HTTPResponse',
'Retry',
'Timeout',
'add_stderr_logger',
'connection_from_url',
'disable_warnings',
'encode_multipart_formdata',
'get_host',
'make_headers',
'proxy_from_url',
)
logging.getLogger(__name__).addHandler(NullHandler())
def add_stderr_logger(level=logging.DEBUG):
"""
Helper for quickly adding a StreamHandler to the logger. Useful for
debugging.
Returns the handler after adding it.
"""
# This method needs to be in this __init__.py to get the __name__ correct
# even if urllib3 is vendored within another package.
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
logger.addHandler(handler)
logger.setLevel(level)
logger.debug('Added a stderr logging handler to logger: %s', __name__)
return handler
# ... Clean up.
del NullHandler
# All warning filters *must* be appended unless you're really certain that they
# shouldn't be: otherwise, it's very hard for users to use most Python
# mechanisms to silence them.
# SecurityWarning's always go off by default.
warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
# SubjectAltNameWarning's should go off once per host
warnings.simplefilter('default', exceptions.SubjectAltNameWarning, append=True)
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
append=True)
# SNIMissingWarnings should go off only once.
warnings.simplefilter('default', exceptions.SNIMissingWarning, append=True)
def disable_warnings(category=exceptions.HTTPWarning):
"""
Helper for quickly disabling all urllib3 warnings.
"""
warnings.simplefilter('ignore', category)
| gpl-3.0 |
Mic92/ansible | test/units/playbook/test_play_context.py | 17 | 13272 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible import constants as C
from ansible.cli import CLI
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.playbook.play_context import PlayContext
from units.mock.loader import DictDataLoader
class TestPlayContext(unittest.TestCase):
def setUp(self):
self._parser = CLI.base_parser(
runas_opts = True,
meta_opts = True,
runtask_opts = True,
vault_opts = True,
async_opts = True,
connect_opts = True,
subset_opts = True,
check_opts = True,
inventory_opts = True,
)
def tearDown(self):
pass
def test_play_context(self):
(options, args) = self._parser.parse_args(['-vv', '--check'])
play_context = PlayContext(options=options)
self.assertEqual(play_context.connection, 'smart')
self.assertEqual(play_context.remote_addr, None)
self.assertEqual(play_context.remote_user, None)
self.assertEqual(play_context.password, '')
self.assertEqual(play_context.port, None)
self.assertEqual(play_context.private_key_file, C.DEFAULT_PRIVATE_KEY_FILE)
self.assertEqual(play_context.timeout, C.DEFAULT_TIMEOUT)
self.assertEqual(play_context.shell, None)
self.assertEqual(play_context.verbosity, 2)
self.assertEqual(play_context.check_mode, True)
self.assertEqual(play_context.no_log, None)
mock_play = MagicMock()
mock_play.connection = 'mock'
mock_play.remote_user = 'mock'
mock_play.port = 1234
mock_play.become = True
mock_play.become_method = 'mock'
mock_play.become_user = 'mockroot'
mock_play.no_log = True
play_context = PlayContext(play=mock_play, options=options)
self.assertEqual(play_context.connection, 'mock')
self.assertEqual(play_context.remote_user, 'mock')
self.assertEqual(play_context.password, '')
self.assertEqual(play_context.port, 1234)
self.assertEqual(play_context.become, True)
self.assertEqual(play_context.become_method, "mock")
self.assertEqual(play_context.become_user, "mockroot")
mock_task = MagicMock()
mock_task.connection = 'mocktask'
mock_task.remote_user = 'mocktask'
mock_task.no_log = mock_play.no_log
mock_task.become = True
mock_task.become_method = 'mocktask'
mock_task.become_user = 'mocktaskroot'
mock_task.become_pass = 'mocktaskpass'
mock_task._local_action = False
mock_task.delegate_to = None
all_vars = dict(
ansible_connection = 'mock_inventory',
ansible_ssh_port = 4321,
)
mock_templar = MagicMock()
play_context = PlayContext(play=mock_play, options=options)
play_context = play_context.set_task_and_variable_override(task=mock_task, variables=all_vars, templar=mock_templar)
self.assertEqual(play_context.connection, 'mock_inventory')
self.assertEqual(play_context.remote_user, 'mocktask')
self.assertEqual(play_context.port, 4321)
self.assertEqual(play_context.no_log, True)
self.assertEqual(play_context.become, True)
self.assertEqual(play_context.become_method, "mocktask")
self.assertEqual(play_context.become_user, "mocktaskroot")
self.assertEqual(play_context.become_pass, "mocktaskpass")
mock_task.no_log = False
play_context = play_context.set_task_and_variable_override(task=mock_task, variables=all_vars, templar=mock_templar)
self.assertEqual(play_context.no_log, False)
def test_play_context_make_become_cmd(self):
(options, args) = self._parser.parse_args([])
play_context = PlayContext(options=options)
default_cmd = "/bin/foo"
default_exe = "/bin/bash"
sudo_exe = C.DEFAULT_SUDO_EXE or 'sudo'
sudo_flags = C.DEFAULT_SUDO_FLAGS
su_exe = C.DEFAULT_SU_EXE or 'su'
su_flags = C.DEFAULT_SU_FLAGS or ''
pbrun_exe = 'pbrun'
pbrun_flags = ''
pfexec_exe = 'pfexec'
pfexec_flags = ''
doas_exe = 'doas'
doas_flags = ' -n -u foo '
dzdo_exe = 'dzdo'
cmd = play_context.make_become_cmd(cmd=default_cmd, executable=default_exe)
self.assertEqual(cmd, default_cmd)
play_context.become = True
play_context.become_user = 'foo'
play_context.become_method = 'sudo'
cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
self.assertEqual(cmd, """%s %s -u %s %s -c 'echo %s; %s'""" % (sudo_exe, sudo_flags, play_context.become_user, default_exe, play_context.success_key, default_cmd))
play_context.become_pass = 'testpass'
cmd = play_context.make_become_cmd(cmd=default_cmd, executable=default_exe)
self.assertEqual(cmd, """%s %s -p "%s" -u %s %s -c 'echo %s; %s'""" % (sudo_exe, sudo_flags.replace('-n',''), play_context.prompt, play_context.become_user, default_exe, play_context.success_key, default_cmd))
play_context.become_pass = None
play_context.become_method = 'su'
cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
self.assertEqual(cmd, """%s %s -c '%s -c '"'"'echo %s; %s'"'"''""" % (su_exe, play_context.become_user, default_exe, play_context.success_key, default_cmd))
play_context.become_method = 'pbrun'
cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
self.assertEqual(cmd, """%s -b %s -u %s 'echo %s; %s'""" % (pbrun_exe, pbrun_flags, play_context.become_user, play_context.success_key, default_cmd))
play_context.become_method = 'pfexec'
cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
self.assertEqual(cmd, '''%s %s "'echo %s; %s'"''' % (pfexec_exe, pfexec_flags, play_context.success_key, default_cmd))
play_context.become_method = 'doas'
cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
self.assertEqual(cmd, """%s %s echo %s && %s %s env ANSIBLE=true %s""" % (doas_exe, doas_flags, play_context.success_key, doas_exe, doas_flags, default_cmd))
play_context.become_method = 'bad'
self.assertRaises(AnsibleError, play_context.make_become_cmd, cmd=default_cmd, executable="/bin/bash")
play_context.become_method = 'dzdo'
cmd = play_context.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
self.assertEqual(cmd, """%s -u %s %s -c 'echo %s; %s'""" % (dzdo_exe, play_context.become_user, default_exe, play_context.success_key, default_cmd))
class TestTaskAndVariableOverrride(unittest.TestCase):
inventory_vars = (
('preferred_names',
dict(ansible_connection='local',
ansible_user='ansibull',
ansible_become_user='ansibull',
ansible_become_method='su',
ansible_become_pass='ansibullwuzhere',),
dict(connection='local',
remote_user='ansibull',
become_user='ansibull',
become_method='su',
become_pass='ansibullwuzhere',)
),
('alternate_names',
dict(ansible_become_password='ansibullwuzhere',),
dict(become_pass='ansibullwuzhere',)
),
('deprecated_names',
dict(ansible_ssh_user='ansibull',
ansible_sudo_user='ansibull',
ansible_sudo_pass='ansibullwuzhere',),
dict(remote_user='ansibull',
become_method='sudo',
become_user='ansibull',
become_pass='ansibullwuzhere',)
),
('deprecated_names2',
dict(ansible_ssh_user='ansibull',
ansible_su_user='ansibull',
ansible_su_pass='ansibullwuzhere',),
dict(remote_user='ansibull',
become_method='su',
become_user='ansibull',
become_pass='ansibullwuzhere',)
),
('deprecated_alt_names',
dict(ansible_sudo_password='ansibullwuzhere',),
dict(become_method='sudo',
become_pass='ansibullwuzhere',)
),
('deprecated_alt_names2',
dict(ansible_su_password='ansibullwuzhere',),
dict(become_method='su',
become_pass='ansibullwuzhere',)
),
('deprecated_and_preferred_names',
dict(ansible_user='ansibull',
ansible_ssh_user='badbull',
ansible_become_user='ansibull',
ansible_sudo_user='badbull',
ansible_become_method='su',
ansible_become_pass='ansibullwuzhere',
ansible_sudo_pass='badbull',
),
dict(connection='local',
remote_user='ansibull',
become_user='ansibull',
become_method='su',
become_pass='ansibullwuzhere',)
),
)
def setUp(self):
parser = CLI.base_parser(
runas_opts = True,
meta_opts = True,
runtask_opts = True,
vault_opts = True,
async_opts = True,
connect_opts = True,
subset_opts = True,
check_opts = True,
inventory_opts = True,
)
(options, args) = parser.parse_args(['-vv', '--check'])
mock_play = MagicMock()
mock_play.connection = 'mock'
mock_play.remote_user = 'mock'
mock_play.port = 1234
mock_play.become = True
mock_play.become_method = 'mock'
mock_play.become_user = 'mockroot'
mock_play.no_log = True
self.play_context = PlayContext(play=mock_play, options=options)
mock_task = MagicMock()
mock_task.connection = mock_play.connection
mock_task.remote_user = mock_play.remote_user
mock_task.no_log = mock_play.no_log
mock_task.become = mock_play.become
mock_task.become_method = mock_play.becom_method
mock_task.become_user = mock_play.become_user
mock_task.become_pass = 'mocktaskpass'
mock_task._local_action = False
mock_task.delegate_to = None
self.mock_task = mock_task
self.mock_templar = MagicMock()
def tearDown(self):
pass
def _check_vars_overridden(self):
self.assertEqual(play_context.connection, 'mock_inventory')
self.assertEqual(play_context.remote_user, 'mocktask')
self.assertEqual(play_context.port, 4321)
self.assertEqual(play_context.no_log, True)
self.assertEqual(play_context.become, True)
self.assertEqual(play_context.become_method, "mocktask")
self.assertEqual(play_context.become_user, "mocktaskroot")
self.assertEqual(play_context.become_pass, "mocktaskpass")
mock_task.no_log = False
play_context = play_context.set_task_and_variable_override(task=mock_task, variables=all_vars, templar=mock_templar)
self.assertEqual(play_context.no_log, False)
def test_override_magic_variables(self):
play_context = play_context.set_task_and_variable_override(task=self.mock_task, variables=all_vars, templar=self.mock_templar)
mock_play.connection = 'mock'
mock_play.remote_user = 'mock'
mock_play.port = 1234
mock_play.become_method = 'mock'
mock_play.become_user = 'mockroot'
mock_task.become_pass = 'mocktaskpass'
# Inventory vars override things set from cli vars (--become, -user,
# etc... [notably, not --extravars])
for test_name, all_vars, expected in self.inventory_vars:
yield self._check_vars_overriden, test_name, all_vars, expected
| gpl-3.0 |
pylbert/upm | examples/python/urm37-uart.py | 6 | 2109 | #!/usr/bin/env python
# Author: Jon Trulson <jtrulson@ics.com>
# Copyright (c) 2015 Intel Corporation.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
import time, sys, signal, atexit
from upm import pyupm_urm37 as sensorObj
def main():
# Instantiate a URM37 sensor on UART 0, with the reset pin on D2
sensor = sensorObj.URM37(0, 2)
## Exit handlers ##
# This function stops python from printing a stacktrace when you hit control-C
def SIGINTHandler(signum, frame):
raise SystemExit
# This function lets you run code on exit
def exitHandler():
print("Exiting")
sys.exit(0)
# Register exit handlers
atexit.register(exitHandler)
signal.signal(signal.SIGINT, SIGINTHandler)
# Every half a second, sample the URM37 and output the measured
# distance in cm, and temperature in degrees C
while (1):
print("Detected distance (cm):", sensor.getDistance())
print("Temperature (C):", sensor.getTemperature())
time.sleep(.5)
if __name__ == '__main__':
main()
| mit |
hujiajie/chromium-crosswalk | tools/export_tarball/export_v8_tarball.py | 118 | 3960 | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Creates a tarball with V8 sources, but without .svn directories.
This allows easy packaging of V8, synchronized with browser releases.
Example usage:
export_v8_tarball.py /foo/bar
The above will create file /foo/bar/v8-VERSION.tar.bz2 if it doesn't exist.
"""
import optparse
import os
import re
import subprocess
import sys
import tarfile
_V8_MAJOR_VERSION_PATTERN = re.compile(r'#define\s+MAJOR_VERSION\s+(.*)')
_V8_MINOR_VERSION_PATTERN = re.compile(r'#define\s+MINOR_VERSION\s+(.*)')
_V8_BUILD_NUMBER_PATTERN = re.compile(r'#define\s+BUILD_NUMBER\s+(.*)')
_V8_PATCH_LEVEL_PATTERN = re.compile(r'#define\s+PATCH_LEVEL\s+(.*)')
_V8_PATTERNS = [
_V8_MAJOR_VERSION_PATTERN,
_V8_MINOR_VERSION_PATTERN,
_V8_BUILD_NUMBER_PATTERN,
_V8_PATCH_LEVEL_PATTERN]
_NONESSENTIAL_DIRS = (
'third_party/icu',
)
def GetV8Version(v8_directory):
"""
Returns version number as string based on the string
contents of version.cc file.
"""
with open(os.path.join(v8_directory, 'src', 'version.cc')) as version_file:
version_contents = version_file.read()
version_components = []
for pattern in _V8_PATTERNS:
version_components.append(pattern.search(version_contents).group(1).strip())
if version_components[len(version_components) - 1] == '0':
version_components.pop()
return '.'.join(version_components)
def GetSourceDirectory():
return os.path.realpath(
os.path.join(os.path.dirname(__file__), '..', '..', '..', 'src'))
def GetV8Directory():
return os.path.join(GetSourceDirectory(), 'v8')
# Workaround lack of the exclude parameter in add method in python-2.4.
# TODO(phajdan.jr): remove the workaround when it's not needed on the bot.
class MyTarFile(tarfile.TarFile):
def set_remove_nonessential_files(self, remove):
self.__remove_nonessential_files = remove
def add(self, name, arcname=None, recursive=True, exclude=None, filter=None):
head, tail = os.path.split(name)
if tail in ('.svn', '.git'):
return
if self.__remove_nonessential_files:
# Remove contents of non-essential directories, but preserve gyp files,
# so that build/gyp_chromium can work.
for nonessential_dir in _NONESSENTIAL_DIRS:
dir_path = os.path.join(GetV8Directory(), nonessential_dir)
if (name.startswith(dir_path) and
os.path.isfile(name) and
'gyp' not in name):
return
tarfile.TarFile.add(self, name, arcname=arcname, recursive=recursive)
def main(argv):
parser = optparse.OptionParser()
options, args = parser.parse_args(argv)
if len(args) != 1:
print 'You must provide only one argument: output file directory'
return 1
v8_directory = GetV8Directory()
if not os.path.exists(v8_directory):
print 'Cannot find the v8 directory.'
return 1
v8_version = GetV8Version(v8_directory)
print 'Packaging V8 version %s...' % v8_version
subprocess.check_call(["make", "dependencies"], cwd=v8_directory)
output_basename = 'v8-%s' % v8_version
# Package full tarball.
output_fullname = os.path.join(args[0], output_basename + '.tar.bz2')
if not os.path.exists(output_fullname):
archive = MyTarFile.open(output_fullname, 'w:bz2')
archive.set_remove_nonessential_files(False)
try:
archive.add(v8_directory, arcname=output_basename)
finally:
archive.close()
# Package lite tarball.
output_fullname = os.path.join(args[0], output_basename + '-lite.tar.bz2')
if not os.path.exists(output_fullname):
archive = MyTarFile.open(output_fullname, 'w:bz2')
archive.set_remove_nonessential_files(True)
try:
archive.add(v8_directory, arcname=output_basename)
finally:
archive.close()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| bsd-3-clause |
LIKAIMO/MissionPlanner | Lib/site-packages/scipy/lib/lapack/tests/common.py | 63 | 1358 | import numpy as np
from scipy.lib.lapack import flapack, clapack
FUNCS_TP = {'ssygv' : np.float32,
'dsygv': np.float,
'ssygvd' : np.float32,
'dsygvd' : np.float,
'ssyev' : np.float32,
'dsyev': np.float,
'ssyevr' : np.float32,
'dsyevr' : np.float,
'ssyevr' : np.float32,
'dsyevr' : np.float,
'sgehrd' : np.float32,
'dgehrd' : np.float,
'sgebal' : np.float32,
'dgebal': np.float}
# Test FLAPACK if not empty
if hasattr(flapack, 'empty_module'):
FLAPACK_IS_EMPTY = True
else:
FLAPACK_IS_EMPTY = False
# Test CLAPACK if not empty and not the same as clapack
if hasattr(clapack, 'empty_module') or (clapack == flapack):
CLAPACK_IS_EMPTY = True
else:
CLAPACK_IS_EMPTY = False
funcs = ['ssygv', 'dsygv', 'ssygvd', 'dsygvd', 'ssyev', 'dsyev', 'ssyevr',
'dsyevr', 'sgehrd', 'dgehrd', 'sgebal', 'dgebal']
if not FLAPACK_IS_EMPTY:
FUNCS_FLAPACK = {}
for f in funcs:
FUNCS_FLAPACK[f] = getattr(flapack, f)
else:
FUNCS_FLAPACK = None
if not CLAPACK_IS_EMPTY:
FUNCS_CLAPACK = {}
for f in funcs:
try:
FUNCS_CLAPACK[f] = getattr(clapack, f)
except AttributeError:
FUNCS_CLAPACK[f] = None
else:
FUNCS_CLAPACK = None
PREC = {np.float32: 5, np.float: 12}
| gpl-3.0 |
Harunx9/Transaltors | Lexer/Lexer/env/Lib/site-packages/pip/backwardcompat/__init__.py | 394 | 3756 | """Stuff that differs in different Python versions and platform
distributions."""
import os
import imp
import sys
import site
__all__ = ['WindowsError']
uses_pycache = hasattr(imp, 'cache_from_source')
class NeverUsedException(Exception):
"""this exception should never be raised"""
try:
WindowsError = WindowsError
except NameError:
WindowsError = NeverUsedException
try:
#new in Python 3.3
PermissionError = PermissionError
except NameError:
PermissionError = NeverUsedException
console_encoding = sys.__stdout__.encoding
if sys.version_info >= (3,):
from io import StringIO, BytesIO
from functools import reduce
from urllib.error import URLError, HTTPError
from queue import Queue, Empty
from urllib.request import url2pathname, urlretrieve, pathname2url
from email import message as emailmessage
import urllib.parse as urllib
import urllib.request as urllib2
import configparser as ConfigParser
import xmlrpc.client as xmlrpclib
import urllib.parse as urlparse
import http.client as httplib
def cmp(a, b):
return (a > b) - (a < b)
def b(s):
return s.encode('utf-8')
def u(s):
return s.decode('utf-8')
def console_to_str(s):
try:
return s.decode(console_encoding)
except UnicodeDecodeError:
return s.decode('utf_8')
def get_http_message_param(http_message, param, default_value):
return http_message.get_param(param, default_value)
bytes = bytes
string_types = (str,)
raw_input = input
else:
from cStringIO import StringIO
from urllib2 import URLError, HTTPError
from Queue import Queue, Empty
from urllib import url2pathname, urlretrieve, pathname2url
from email import Message as emailmessage
import urllib
import urllib2
import urlparse
import ConfigParser
import xmlrpclib
import httplib
def b(s):
return s
def u(s):
return s
def console_to_str(s):
return s
def get_http_message_param(http_message, param, default_value):
result = http_message.getparam(param)
return result or default_value
bytes = str
string_types = (basestring,)
reduce = reduce
cmp = cmp
raw_input = raw_input
BytesIO = StringIO
from distutils.sysconfig import get_python_lib, get_python_version
#site.USER_SITE was created in py2.6
user_site = getattr(site, 'USER_SITE', None)
def product(*args, **kwds):
# product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy
# product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111
pools = list(map(tuple, args)) * kwds.get('repeat', 1)
result = [[]]
for pool in pools:
result = [x + [y] for x in result for y in pool]
for prod in result:
yield tuple(prod)
def get_path_uid(path):
"""
Return path's uid.
Does not follow symlinks: https://github.com/pypa/pip/pull/935#discussion_r5307003
Placed this function in backwardcompat due to differences on AIX and Jython,
that should eventually go away.
:raises OSError: When path is a symlink or can't be read.
"""
if hasattr(os, 'O_NOFOLLOW'):
fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
file_uid = os.fstat(fd).st_uid
os.close(fd)
else: # AIX and Jython
# WARNING: time of check vulnerabity, but best we can do w/o NOFOLLOW
if not os.path.islink(path):
# older versions of Jython don't have `os.fstat`
file_uid = os.stat(path).st_uid
else:
# raise OSError for parity with os.O_NOFOLLOW above
raise OSError("%s is a symlink; Will not return uid for symlinks" % path)
return file_uid
| mit |
Zord13appdesa/python-for-android | python-modules/twisted/twisted/persisted/journal/picklelog.py | 64 | 1199 | # Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
#
# -*- test-case-name: twisted.test.test_journal -*-
"""Logging that uses pickles.
TODO: add log that logs to a file.
"""
# twisted imports
from twisted.persisted import dirdbm
from twisted.internet import defer
from zope.interface import implements
# sibling imports
import base
class DirDBMLog:
"""Log pickles to DirDBM directory."""
implements(base.ICommandLog)
def __init__(self, logPath):
self.db = dirdbm.Shelf(logPath)
indexs = map(int, self.db.keys())
if indexs:
self.currentIndex = max(indexs)
else:
self.currentIndex = 0
def logCommand(self, command, time):
"""Log a command."""
self.currentIndex += 1
self.db[str(self.currentIndex)] = (time, command)
return defer.succeed(1)
def getCurrentIndex(self):
"""Return index of last command logged."""
return self.currentIndex
def getCommandsSince(self, index):
result = []
for i in range(index, self.currentIndex + 1):
result.append(self.db[str(i)])
return result
| apache-2.0 |
rogeriofalcone/treeio | knowledge/models.py | 1 | 5381 | # encoding: utf-8
# Copyright 2011 Tree.io Limited
# This file is part of Treeio.
# License www.tree.io/license
"""
Knowledge base module objects
"""
from django.db import models
from treeio.core.models import Object
from django.core.urlresolvers import reverse
from django.template import defaultfilters
from unidecode import unidecode
# KnowledgeFolder model
class KnowledgeFolder(Object):
""" KnowledgeFolder """
name = models.CharField(max_length=255)
details = models.TextField(max_length=255, null=True, blank=True)
parent = models.ForeignKey('self', blank=True, null=True, related_name='child_set')
treepath = models.CharField(max_length=800)
access_inherit = ('parent', '*module', '*user')
def __unicode__(self):
return self.name
class Meta:
" Type "
ordering = ['name']
def get_absolute_url(self):
"Returns absolute URL of the object"
try:
return reverse('knowledge_folder_view', args=[self.treepath])
except Exception:
return ""
def treewalk(self, save=True):
"Walks up the tree to construct Type treepath"
treepath = ''
for folder in self.get_tree_path():
slug = unicode(folder.name).replace(" ", "-")
slug = defaultfilters.slugify(unidecode(slug))
treepath += slug + "/"
self.treepath = treepath
if save:
self.save()
return self
def by_path(treePath):
"Returns a KnowledgeFolder instance matching the given treepath"
folder = KnowledgeFolder.objects.filter(treepath=unicode(treePath))
if folder:
folder = folder[0]
else:
folder = None
return folder
by_path = staticmethod(by_path)
def save(self, *args, **kwargs):
"Overridden save() method to compute treepath and full names"
self.treewalk(save=False)
super(KnowledgeFolder, self).save(*args, **kwargs)
# KnowledgeCategory model
class KnowledgeCategory(Object):
""" Knowledge Category that contains Knowledge Items"""
name = models.CharField(max_length=255)
details = models.TextField(max_length=255, null=True, blank=True)
treepath = models.CharField(max_length=800)
def __unicode__(self):
return self.name
class Meta:
" Category "
ordering = ['name']
def get_absolute_url(self):
"Returns absolute URL of the object"
try:
return reverse('knowledge_category_view', args=[self.treepath])
except Exception:
return ""
def treewalk(self, save=True):
"Walks up the tree to construct Category"
slug = unicode(self.name).replace(" ", "-")
slug = defaultfilters.slugify(unidecode(slug))
treepath = slug + "/"
self.treepath = treepath
if save:
self.save()
return self
def by_path(path):
"Returns a Knowledge Category instance matching the given treepath"
category = KnowledgeCategory.objects.filter(treepath=unidecode(path))
if category:
category = category[0]
else:
category = None
return category
by_path = staticmethod(by_path)
def save(self, *args, **kwargs):
"Overridden save() method to compute treepath and full names"
self.treewalk(save=False)
super(KnowledgeCategory, self).save(*args, **kwargs)
# KnowledgeItem model
class KnowledgeItem(Object):
"""" A readable piece of knowledge """
name = models.CharField(max_length=255)
folder = models.ForeignKey(KnowledgeFolder)
category = models.ForeignKey(KnowledgeCategory, blank=True, null=True, on_delete=models.SET_NULL)
body = models.TextField(null=True, blank=True)
treepath = models.CharField(max_length=800)
access_inherit = ('folder', '*module', '*user')
def __unicode__(self):
return self.name
class Meta:
" Item "
ordering = ['-last_updated']
def get_absolute_url(self):
"Returns absolute URL of the object"
try:
return reverse('knowledge_item_view', args=[self.folder.treepath, self.treepath])
except Exception:
return ""
def treewalk(self, save=True):
"Walks up the tree to construct both Item treepath and item.name from database"
slug = unicode(self.name).replace(" ", "-")
slug = defaultfilters.slugify(unidecode(slug))
treepath = slug + "/"
self.treepath = treepath
if save:
self.save()
return self
def by_path(treePath, itemPath):
"Returns a Knowledge Item instance matching the given treepath"
folder = KnowledgeFolder.by_path(unidecode(treePath))
item = KnowledgeItem.objects.filter(treepath=unidecode(itemPath), folder=folder)
if item:
item = item[0]
else:
item = None
return item
by_path = staticmethod(by_path)
def save(self, *args, **kwargs):
"Overridden save() method to compute treepath and full names"
self.treewalk(save=False)
super(KnowledgeItem, self).save(*args, **kwargs)
| mit |
ronniehedrick/scapeshift | client/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py | 1843 | 1786 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the ninja.py file. """
import gyp.generator.ninja as ninja
import unittest
import StringIO
import sys
import TestCommon
class TestPrefixesAndSuffixes(unittest.TestCase):
def test_BinaryNamesWindows(self):
# These cannot run on non-Windows as they require a VS installation to
# correctly handle variable expansion.
if sys.platform.startswith('win'):
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
'build.ninja', 'win')
spec = { 'target_name': 'wee' }
self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
endswith('.exe'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
endswith('.dll'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
endswith('.lib'))
def test_BinaryNamesLinux(self):
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
'build.ninja', 'linux')
spec = { 'target_name': 'wee' }
self.assertTrue('.' not in writer.ComputeOutputFileName(spec,
'executable'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
startswith('lib'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
startswith('lib'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
endswith('.so'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
endswith('.a'))
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
leigh123linux/cinnamon-screensaver | src/dbusdepot/cinnamonClient.py | 2 | 1184 | #!/usr/bin/python3
from gi.repository import Gio, CScreensaver
from dbusdepot.baseClient import BaseClient
class CinnamonClient(BaseClient):
"""
Simple client to talk to Cinnamon's dbus interface. Currently
its only use is for attempting to force an exit from overview
and expo mode (both of which do a fullscreen grab and would prevent
the screensaver from acquiring one.)
"""
CINNAMON_SERVICE = "org.Cinnamon"
CINNAMON_PATH = "/org/Cinnamon"
def __init__(self):
super(CinnamonClient, self).__init__(Gio.BusType.SESSION,
CScreensaver.CinnamonProxy,
self.CINNAMON_SERVICE,
self.CINNAMON_PATH)
def on_client_setup_complete(self):
pass
def exit_expo_and_overview(self):
if self.ensure_proxy_alive():
self.proxy.set_property("overview-active", False)
self.proxy.set_property("expo-active", False)
def on_failure(self, *args):
print("Failed to connect to Cinnamon - screensaver will not activate when expo or overview modes are active.")
| gpl-2.0 |
dewtx29/python_ann | project/num/c++/forword.py | 1 | 1567 | from math import cos,sin, factorial
import random
import matplotlib.pyplot as plt
def randominput(n):
listA = []
for i in range(0,n):
ran = random.random()
listB = [i, ran]
listA.append( listB)
return listA
def forward(listinput, x):
dataSize = len(listinput)
#print listinput , dataSize
summation = 0.0
for i in range (0,dataSize):
tmp = f0(x,i)
term = 1.0
for j in range (0, i):
term = term * (x - listinput[j][0])
summation = summation + (tmp* term)
return summation
#print "summation" , summation
def f0(x,dataSize):
f=0.0
if dataSize % 4 == 0:
f = sin(x)
elif dataSize % 4 == 1:
f = cos(x)
elif dataSize % 4 == 2:
f = -sin(x)
elif dataSize == 3:
f = -cos(x)
return f
dotList = []
dotList2 = []
xList = []
cur = 0.0
while (cur < 50.0 ) :
y = sin(cur)
# dotList.append(y)
tmpList = [cur, y]
xList.append(tmpList)
cur = cur + 1
listinput = randominput(20)
print listinput
for i in range(0,len(listinput)):
dotList.append(listinput[i][1])
for i in range (0 , len (listinput)):
#print xList[i]\
y = forward(listinput , listinput[i][0]);
#print y
dotList2.append(y)
dotList = dotList[0:len(dotList)-3]
dotList2 = dotList2[0:len(dotList2)-3]
plt.plot(dotList, 'ro',color="g")
plt.plot(dotList2, 'ro',color="r")
#plt.axis([0, 1000, 0, 1000])
plt.show()
forward(listinput,1.0)
| gpl-3.0 |
Kilhog/odoo | openerp/cli/__init__.py | 185 | 2008 | import logging
import sys
import os
import openerp
from openerp import tools
from openerp.modules import module
_logger = logging.getLogger(__name__)
commands = {}
class CommandType(type):
def __init__(cls, name, bases, attrs):
super(CommandType, cls).__init__(name, bases, attrs)
name = getattr(cls, name, cls.__name__.lower())
cls.name = name
if name != 'command':
commands[name] = cls
class Command(object):
"""Subclass this class to define new openerp subcommands """
__metaclass__ = CommandType
def run(self, args):
pass
class Help(Command):
"""Display the list of available commands"""
def run(self, args):
print "Available commands:\n"
padding = max([len(k) for k in commands.keys()]) + 2
for k, v in commands.items():
print " %s%s" % (k.ljust(padding, ' '), v.__doc__ or '')
print "\nUse '%s <command> --help' for individual command help." % sys.argv[0].split(os.path.sep)[-1]
import server
import deploy
import scaffold
import start
def main():
args = sys.argv[1:]
# The only shared option is '--addons-path=' needed to discover additional
# commands from modules
if len(args) > 1 and args[0].startswith('--addons-path=') and not args[1].startswith("-"):
# parse only the addons-path, do not setup the logger...
tools.config._parse_config([args[0]])
args = args[1:]
# Default legacy command
command = "server"
# Subcommand discovery
if len(args) and not args[0].startswith("-"):
logging.disable(logging.CRITICAL)
for m in module.get_modules():
m_path = module.get_module_path(m)
if os.path.isdir(os.path.join(m_path, 'cli')):
__import__('openerp.addons.' + m)
logging.disable(logging.NOTSET)
command = args[0]
args = args[1:]
if command in commands:
o = commands[command]()
o.run(args)
# vim:et:ts=4:sw=4:
| agpl-3.0 |
kaiyuanl/gem5 | src/mem/slicc/ast/CheckAllocateStatementAST.py | 91 | 2239 | # Copyright (c) 1999-2008 Mark D. Hill and David A. Wood
# Copyright (c) 2009 The Hewlett-Packard Development Company
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from slicc.ast.StatementAST import StatementAST
class CheckAllocateStatementAST(StatementAST):
def __init__(self, slicc, variable):
super(StatementAST, self).__init__(slicc)
self.variable = variable
def __repr__(self):
return "[CheckAllocateStatementAst: %r]" % self.variable
def generate(self, code, return_type):
# FIXME - check the type of the variable
# Make sure the variable is valid
self.variable.var
def findResources(self, resources):
var = self.variable.var
res_count = int(resources.get(var, 0))
resources[var] = str(res_count + 1)
| bsd-3-clause |
zerocool240/plugin.video.amerika | servers/sharpfile.py | 44 | 1445 | # -*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para sharpfile
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def test_video_exists( page_url ):
logger.info("[sharpfile.py] test_video_exists(page_url='%s')" % page_url)
return True,""
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("[sharpfile.py] get_video_url(page_url='%s')" % page_url)
video_urls = []
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
#http://www.sharpfile.com/8fgbj6dtq4xc/house.05x19.pionnerdj.avi.html
patronvideos = "http://(www.sharpfile.com/.*?\.html)"
logger.info("[sharpfile.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[sharpfile]"
url = "http://"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'sharpfile' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
| gpl-2.0 |
snnn/tensorflow | tensorflow/python/ops/distributions/categorical.py | 3 | 12597 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The Categorical distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops.distributions import distribution
from tensorflow.python.ops.distributions import kullback_leibler
from tensorflow.python.ops.distributions import util as distribution_util
from tensorflow.python.util import deprecation
from tensorflow.python.util.tf_export import tf_export
def _broadcast_cat_event_and_params(event, params, base_dtype):
"""Broadcasts the event or distribution parameters."""
if event.dtype.is_integer:
pass
elif event.dtype.is_floating:
# When `validate_args=True` we've already ensured int/float casting
# is closed.
event = math_ops.cast(event, dtype=dtypes.int32)
else:
raise TypeError("`value` should have integer `dtype` or "
"`self.dtype` ({})".format(base_dtype))
shape_known_statically = (
params.shape.ndims is not None and
params.shape[:-1].is_fully_defined() and
event.shape.is_fully_defined())
if not shape_known_statically or params.shape[:-1] != event.shape:
params *= array_ops.ones_like(event[..., array_ops.newaxis],
dtype=params.dtype)
params_shape = array_ops.shape(params)[:-1]
event *= array_ops.ones(params_shape, dtype=event.dtype)
if params.shape.ndims is not None:
event.set_shape(tensor_shape.TensorShape(params.shape[:-1]))
return event, params
@tf_export("distributions.Categorical")
class Categorical(distribution.Distribution):
"""Categorical distribution.
The Categorical distribution is parameterized by either probabilities or
log-probabilities of a set of `K` classes. It is defined over the integers
`{0, 1, ..., K}`.
The Categorical distribution is closely related to the `OneHotCategorical` and
`Multinomial` distributions. The Categorical distribution can be intuited as
generating samples according to `argmax{ OneHotCategorical(probs) }` itself
being identical to `argmax{ Multinomial(probs, total_count=1) }`.
#### Mathematical Details
The probability mass function (pmf) is,
```none
pmf(k; pi) = prod_j pi_j**[k == j]
```
#### Pitfalls
The number of classes, `K`, must not exceed:
- the largest integer representable by `self.dtype`, i.e.,
`2**(mantissa_bits+1)` (IEEE 754),
- the maximum `Tensor` index, i.e., `2**31-1`.
In other words,
```python
K <= min(2**31-1, {
tf.float16: 2**11,
tf.float32: 2**24,
tf.float64: 2**53 }[param.dtype])
```
Note: This condition is validated only when `self.validate_args = True`.
#### Examples
Creates a 3-class distribution with the 2nd class being most likely.
```python
dist = Categorical(probs=[0.1, 0.5, 0.4])
n = 1e4
empirical_prob = tf.cast(
tf.histogram_fixed_width(
dist.sample(int(n)),
[0., 2],
nbins=3),
dtype=tf.float32) / n
# ==> array([ 0.1005, 0.5037, 0.3958], dtype=float32)
```
Creates a 3-class distribution with the 2nd class being most likely.
Parameterized by [logits](https://en.wikipedia.org/wiki/Logit) rather than
probabilities.
```python
dist = Categorical(logits=np.log([0.1, 0.5, 0.4])
n = 1e4
empirical_prob = tf.cast(
tf.histogram_fixed_width(
dist.sample(int(n)),
[0., 2],
nbins=3),
dtype=tf.float32) / n
# ==> array([0.1045, 0.5047, 0.3908], dtype=float32)
```
Creates a 3-class distribution with the 3rd class being most likely.
The distribution functions can be evaluated on counts.
```python
# counts is a scalar.
p = [0.1, 0.4, 0.5]
dist = Categorical(probs=p)
dist.prob(0) # Shape []
# p will be broadcast to [[0.1, 0.4, 0.5], [0.1, 0.4, 0.5]] to match counts.
counts = [1, 0]
dist.prob(counts) # Shape [2]
# p will be broadcast to shape [3, 5, 7, 3] to match counts.
counts = [[...]] # Shape [5, 7, 3]
dist.prob(counts) # Shape [5, 7, 3]
```
"""
@deprecation.deprecated(
"2019-01-01",
"The TensorFlow Distributions library has moved to "
"TensorFlow Probability "
"(https://github.com/tensorflow/probability). You "
"should update all references to use `tfp.distributions` "
"instead of `tf.distributions`.",
warn_once=True)
def __init__(
self,
logits=None,
probs=None,
dtype=dtypes.int32,
validate_args=False,
allow_nan_stats=True,
name="Categorical"):
"""Initialize Categorical distributions using class log-probabilities.
Args:
logits: An N-D `Tensor`, `N >= 1`, representing the log probabilities
of a set of Categorical distributions. The first `N - 1` dimensions
index into a batch of independent distributions and the last dimension
represents a vector of logits for each class. Only one of `logits` or
`probs` should be passed in.
probs: An N-D `Tensor`, `N >= 1`, representing the probabilities
of a set of Categorical distributions. The first `N - 1` dimensions
index into a batch of independent distributions and the last dimension
represents a vector of probabilities for each class. Only one of
`logits` or `probs` should be passed in.
dtype: The type of the event samples (default: int32).
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = dict(locals())
with ops.name_scope(name, values=[logits, probs]) as name:
self._logits, self._probs = distribution_util.get_logits_and_probs(
logits=logits,
probs=probs,
validate_args=validate_args,
multidimensional=True,
name=name)
if validate_args:
self._logits = distribution_util.embed_check_categorical_event_shape(
self._logits)
logits_shape_static = self._logits.get_shape().with_rank_at_least(1)
if logits_shape_static.ndims is not None:
self._batch_rank = ops.convert_to_tensor(
logits_shape_static.ndims - 1,
dtype=dtypes.int32,
name="batch_rank")
else:
with ops.name_scope(name="batch_rank"):
self._batch_rank = array_ops.rank(self._logits) - 1
logits_shape = array_ops.shape(self._logits, name="logits_shape")
if logits_shape_static[-1].value is not None:
self._event_size = ops.convert_to_tensor(
logits_shape_static[-1].value,
dtype=dtypes.int32,
name="event_size")
else:
with ops.name_scope(name="event_size"):
self._event_size = logits_shape[self._batch_rank]
if logits_shape_static[:-1].is_fully_defined():
self._batch_shape_val = constant_op.constant(
logits_shape_static[:-1].as_list(),
dtype=dtypes.int32,
name="batch_shape")
else:
with ops.name_scope(name="batch_shape"):
self._batch_shape_val = logits_shape[:-1]
super(Categorical, self).__init__(
dtype=dtype,
reparameterization_type=distribution.NOT_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
graph_parents=[self._logits,
self._probs],
name=name)
@property
def event_size(self):
"""Scalar `int32` tensor: the number of classes."""
return self._event_size
@property
def logits(self):
"""Vector of coordinatewise logits."""
return self._logits
@property
def probs(self):
"""Vector of coordinatewise probabilities."""
return self._probs
def _batch_shape_tensor(self):
return array_ops.identity(self._batch_shape_val)
def _batch_shape(self):
return self.logits.get_shape()[:-1]
def _event_shape_tensor(self):
return constant_op.constant([], dtype=dtypes.int32)
def _event_shape(self):
return tensor_shape.scalar()
def _sample_n(self, n, seed=None):
if self.logits.get_shape().ndims == 2:
logits_2d = self.logits
else:
logits_2d = array_ops.reshape(self.logits, [-1, self.event_size])
sample_dtype = dtypes.int64 if self.dtype.size > 4 else dtypes.int32
draws = random_ops.multinomial(
logits_2d, n, seed=seed, output_dtype=sample_dtype)
draws = array_ops.reshape(
array_ops.transpose(draws),
array_ops.concat([[n], self.batch_shape_tensor()], 0))
return math_ops.cast(draws, self.dtype)
def _cdf(self, k):
k = ops.convert_to_tensor(k, name="k")
if self.validate_args:
k = distribution_util.embed_check_integer_casting_closed(
k, target_dtype=dtypes.int32)
k, probs = _broadcast_cat_event_and_params(
k, self.probs, base_dtype=self.dtype.base_dtype)
# batch-flatten everything in order to use `sequence_mask()`.
batch_flattened_probs = array_ops.reshape(probs,
(-1, self._event_size))
batch_flattened_k = array_ops.reshape(k, [-1])
to_sum_over = array_ops.where(
array_ops.sequence_mask(batch_flattened_k, self._event_size),
batch_flattened_probs,
array_ops.zeros_like(batch_flattened_probs))
batch_flattened_cdf = math_ops.reduce_sum(to_sum_over, axis=-1)
# Reshape back to the shape of the argument.
return array_ops.reshape(batch_flattened_cdf, array_ops.shape(k))
def _log_prob(self, k):
k = ops.convert_to_tensor(k, name="k")
if self.validate_args:
k = distribution_util.embed_check_integer_casting_closed(
k, target_dtype=dtypes.int32)
k, logits = _broadcast_cat_event_and_params(
k, self.logits, base_dtype=self.dtype.base_dtype)
return -nn_ops.sparse_softmax_cross_entropy_with_logits(labels=k,
logits=logits)
def _entropy(self):
return -math_ops.reduce_sum(
nn_ops.log_softmax(self.logits) * self.probs, axis=-1)
def _mode(self):
ret = math_ops.argmax(self.logits, axis=self._batch_rank)
ret = math_ops.cast(ret, self.dtype)
ret.set_shape(self.batch_shape)
return ret
@kullback_leibler.RegisterKL(Categorical, Categorical)
def _kl_categorical_categorical(a, b, name=None):
"""Calculate the batched KL divergence KL(a || b) with a and b Categorical.
Args:
a: instance of a Categorical distribution object.
b: instance of a Categorical distribution object.
name: (optional) Name to use for created operations.
default is "kl_categorical_categorical".
Returns:
Batchwise KL(a || b)
"""
with ops.name_scope(name, "kl_categorical_categorical",
values=[a.logits, b.logits]):
# sum(probs log(probs / (1 - probs)))
delta_log_probs1 = (nn_ops.log_softmax(a.logits) -
nn_ops.log_softmax(b.logits))
return math_ops.reduce_sum(nn_ops.softmax(a.logits) * delta_log_probs1,
axis=-1)
| apache-2.0 |
bryceliu/ansible | lib/ansible/parsing/utils/jsonify.py | 203 | 1451 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
try:
import json
except ImportError:
import simplejson as json
def jsonify(result, format=False):
''' format JSON output (uncompressed or uncompressed) '''
if result is None:
return "{}"
result2 = result.copy()
for key, value in result2.items():
if type(value) is str:
result2[key] = value.decode('utf-8', 'ignore')
indent = None
if format:
indent = 4
try:
return json.dumps(result2, sort_keys=True, indent=indent, ensure_ascii=False)
except UnicodeDecodeError:
return json.dumps(result2, sort_keys=True, indent=indent)
| gpl-3.0 |
newerthcom/savagerebirth | libs/python-2.72/Lib/copy.py | 253 | 11519 | """Generic (shallow and deep) copying operations.
Interface summary:
import copy
x = copy.copy(y) # make a shallow copy of y
x = copy.deepcopy(y) # make a deep copy of y
For module specific errors, copy.Error is raised.
The difference between shallow and deep copying is only relevant for
compound objects (objects that contain other objects, like lists or
class instances).
- A shallow copy constructs a new compound object and then (to the
extent possible) inserts *the same objects* into it that the
original contains.
- A deep copy constructs a new compound object and then, recursively,
inserts *copies* into it of the objects found in the original.
Two problems often exist with deep copy operations that don't exist
with shallow copy operations:
a) recursive objects (compound objects that, directly or indirectly,
contain a reference to themselves) may cause a recursive loop
b) because deep copy copies *everything* it may copy too much, e.g.
administrative data structures that should be shared even between
copies
Python's deep copy operation avoids these problems by:
a) keeping a table of objects already copied during the current
copying pass
b) letting user-defined classes override the copying operation or the
set of components copied
This version does not copy types like module, class, function, method,
nor stack trace, stack frame, nor file, socket, window, nor array, nor
any similar types.
Classes can use the same interfaces to control copying that they use
to control pickling: they can define methods called __getinitargs__(),
__getstate__() and __setstate__(). See the documentation for module
"pickle" for information on these methods.
"""
import types
import weakref
from copy_reg import dispatch_table
class Error(Exception):
pass
error = Error # backward compatibility
try:
from org.python.core import PyStringMap
except ImportError:
PyStringMap = None
__all__ = ["Error", "copy", "deepcopy"]
def copy(x):
"""Shallow copy operation on arbitrary Python objects.
See the module's __doc__ string for more info.
"""
cls = type(x)
copier = _copy_dispatch.get(cls)
if copier:
return copier(x)
copier = getattr(cls, "__copy__", None)
if copier:
return copier(x)
reductor = dispatch_table.get(cls)
if reductor:
rv = reductor(x)
else:
reductor = getattr(x, "__reduce_ex__", None)
if reductor:
rv = reductor(2)
else:
reductor = getattr(x, "__reduce__", None)
if reductor:
rv = reductor()
else:
raise Error("un(shallow)copyable object of type %s" % cls)
return _reconstruct(x, rv, 0)
_copy_dispatch = d = {}
def _copy_immutable(x):
return x
for t in (type(None), int, long, float, bool, str, tuple,
frozenset, type, xrange, types.ClassType,
types.BuiltinFunctionType, type(Ellipsis),
types.FunctionType, weakref.ref):
d[t] = _copy_immutable
for name in ("ComplexType", "UnicodeType", "CodeType"):
t = getattr(types, name, None)
if t is not None:
d[t] = _copy_immutable
def _copy_with_constructor(x):
return type(x)(x)
for t in (list, dict, set):
d[t] = _copy_with_constructor
def _copy_with_copy_method(x):
return x.copy()
if PyStringMap is not None:
d[PyStringMap] = _copy_with_copy_method
def _copy_inst(x):
if hasattr(x, '__copy__'):
return x.__copy__()
if hasattr(x, '__getinitargs__'):
args = x.__getinitargs__()
y = x.__class__(*args)
else:
y = _EmptyClass()
y.__class__ = x.__class__
if hasattr(x, '__getstate__'):
state = x.__getstate__()
else:
state = x.__dict__
if hasattr(y, '__setstate__'):
y.__setstate__(state)
else:
y.__dict__.update(state)
return y
d[types.InstanceType] = _copy_inst
del d
def deepcopy(x, memo=None, _nil=[]):
"""Deep copy operation on arbitrary Python objects.
See the module's __doc__ string for more info.
"""
if memo is None:
memo = {}
d = id(x)
y = memo.get(d, _nil)
if y is not _nil:
return y
cls = type(x)
copier = _deepcopy_dispatch.get(cls)
if copier:
y = copier(x, memo)
else:
try:
issc = issubclass(cls, type)
except TypeError: # cls is not a class (old Boost; see SF #502085)
issc = 0
if issc:
y = _deepcopy_atomic(x, memo)
else:
copier = getattr(x, "__deepcopy__", None)
if copier:
y = copier(memo)
else:
reductor = dispatch_table.get(cls)
if reductor:
rv = reductor(x)
else:
reductor = getattr(x, "__reduce_ex__", None)
if reductor:
rv = reductor(2)
else:
reductor = getattr(x, "__reduce__", None)
if reductor:
rv = reductor()
else:
raise Error(
"un(deep)copyable object of type %s" % cls)
y = _reconstruct(x, rv, 1, memo)
memo[d] = y
_keep_alive(x, memo) # Make sure x lives at least as long as d
return y
_deepcopy_dispatch = d = {}
def _deepcopy_atomic(x, memo):
return x
d[type(None)] = _deepcopy_atomic
d[type(Ellipsis)] = _deepcopy_atomic
d[int] = _deepcopy_atomic
d[long] = _deepcopy_atomic
d[float] = _deepcopy_atomic
d[bool] = _deepcopy_atomic
try:
d[complex] = _deepcopy_atomic
except NameError:
pass
d[str] = _deepcopy_atomic
try:
d[unicode] = _deepcopy_atomic
except NameError:
pass
try:
d[types.CodeType] = _deepcopy_atomic
except AttributeError:
pass
d[type] = _deepcopy_atomic
d[xrange] = _deepcopy_atomic
d[types.ClassType] = _deepcopy_atomic
d[types.BuiltinFunctionType] = _deepcopy_atomic
d[types.FunctionType] = _deepcopy_atomic
d[weakref.ref] = _deepcopy_atomic
def _deepcopy_list(x, memo):
y = []
memo[id(x)] = y
for a in x:
y.append(deepcopy(a, memo))
return y
d[list] = _deepcopy_list
def _deepcopy_tuple(x, memo):
y = []
for a in x:
y.append(deepcopy(a, memo))
d = id(x)
try:
return memo[d]
except KeyError:
pass
for i in range(len(x)):
if x[i] is not y[i]:
y = tuple(y)
break
else:
y = x
memo[d] = y
return y
d[tuple] = _deepcopy_tuple
def _deepcopy_dict(x, memo):
y = {}
memo[id(x)] = y
for key, value in x.iteritems():
y[deepcopy(key, memo)] = deepcopy(value, memo)
return y
d[dict] = _deepcopy_dict
if PyStringMap is not None:
d[PyStringMap] = _deepcopy_dict
def _deepcopy_method(x, memo): # Copy instance methods
return type(x)(x.im_func, deepcopy(x.im_self, memo), x.im_class)
_deepcopy_dispatch[types.MethodType] = _deepcopy_method
def _keep_alive(x, memo):
"""Keeps a reference to the object x in the memo.
Because we remember objects by their id, we have
to assure that possibly temporary objects are kept
alive by referencing them.
We store a reference at the id of the memo, which should
normally not be used unless someone tries to deepcopy
the memo itself...
"""
try:
memo[id(memo)].append(x)
except KeyError:
# aha, this is the first one :-)
memo[id(memo)]=[x]
def _deepcopy_inst(x, memo):
if hasattr(x, '__deepcopy__'):
return x.__deepcopy__(memo)
if hasattr(x, '__getinitargs__'):
args = x.__getinitargs__()
args = deepcopy(args, memo)
y = x.__class__(*args)
else:
y = _EmptyClass()
y.__class__ = x.__class__
memo[id(x)] = y
if hasattr(x, '__getstate__'):
state = x.__getstate__()
else:
state = x.__dict__
state = deepcopy(state, memo)
if hasattr(y, '__setstate__'):
y.__setstate__(state)
else:
y.__dict__.update(state)
return y
d[types.InstanceType] = _deepcopy_inst
def _reconstruct(x, info, deep, memo=None):
if isinstance(info, str):
return x
assert isinstance(info, tuple)
if memo is None:
memo = {}
n = len(info)
assert n in (2, 3, 4, 5)
callable, args = info[:2]
if n > 2:
state = info[2]
else:
state = {}
if n > 3:
listiter = info[3]
else:
listiter = None
if n > 4:
dictiter = info[4]
else:
dictiter = None
if deep:
args = deepcopy(args, memo)
y = callable(*args)
memo[id(x)] = y
if state:
if deep:
state = deepcopy(state, memo)
if hasattr(y, '__setstate__'):
y.__setstate__(state)
else:
if isinstance(state, tuple) and len(state) == 2:
state, slotstate = state
else:
slotstate = None
if state is not None:
y.__dict__.update(state)
if slotstate is not None:
for key, value in slotstate.iteritems():
setattr(y, key, value)
if listiter is not None:
for item in listiter:
if deep:
item = deepcopy(item, memo)
y.append(item)
if dictiter is not None:
for key, value in dictiter:
if deep:
key = deepcopy(key, memo)
value = deepcopy(value, memo)
y[key] = value
return y
del d
del types
# Helper for instance creation without calling __init__
class _EmptyClass:
pass
def _test():
l = [None, 1, 2L, 3.14, 'xyzzy', (1, 2L), [3.14, 'abc'],
{'abc': 'ABC'}, (), [], {}]
l1 = copy(l)
print l1==l
l1 = map(copy, l)
print l1==l
l1 = deepcopy(l)
print l1==l
class C:
def __init__(self, arg=None):
self.a = 1
self.arg = arg
if __name__ == '__main__':
import sys
file = sys.argv[0]
else:
file = __file__
self.fp = open(file)
self.fp.close()
def __getstate__(self):
return {'a': self.a, 'arg': self.arg}
def __setstate__(self, state):
for key, value in state.iteritems():
setattr(self, key, value)
def __deepcopy__(self, memo=None):
new = self.__class__(deepcopy(self.arg, memo))
new.a = self.a
return new
c = C('argument sketch')
l.append(c)
l2 = copy(l)
print l == l2
print l
print l2
l2 = deepcopy(l)
print l == l2
print l
print l2
l.append({l[1]: l, 'xyz': l[2]})
l3 = copy(l)
import repr
print map(repr.repr, l)
print map(repr.repr, l1)
print map(repr.repr, l2)
print map(repr.repr, l3)
l3 = deepcopy(l)
import repr
print map(repr.repr, l)
print map(repr.repr, l1)
print map(repr.repr, l2)
print map(repr.repr, l3)
class odict(dict):
def __init__(self, d = {}):
self.a = 99
dict.__init__(self, d)
def __setitem__(self, k, i):
dict.__setitem__(self, k, i)
self.a
o = odict({"A" : "B"})
x = deepcopy(o)
print(o, x)
if __name__ == '__main__':
_test()
| gpl-2.0 |
brunolimawd/talk-nodebots-roll-out | node_modules/grunt-sass/node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py | 388 | 91069 | # Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Notes:
#
# This is all roughly based on the Makefile system used by the Linux
# kernel, but is a non-recursive make -- we put the entire dependency
# graph in front of make and let it figure it out.
#
# The code below generates a separate .mk file for each target, but
# all are sourced by the top-level Makefile. This means that all
# variables in .mk-files clobber one another. Be careful to use :=
# where appropriate for immediate evaluation, and similarly to watch
# that you're not relying on a variable value to last beween different
# .mk files.
#
# TODOs:
#
# Global settings and utility functions are currently stuffed in the
# toplevel Makefile. It may make sense to generate some .mk files on
# the side to keep the the files readable.
import os
import re
import sys
import subprocess
import gyp
import gyp.common
import gyp.xcode_emulation
from gyp.common import GetEnvironFallback
from gyp.common import GypError
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'SHARED_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/$(TARGET)/geni',
'SHARED_INTERMEDIATE_DIR': '$(obj)/gen',
'PRODUCT_DIR': '$(builddir)',
'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python.
'RULE_INPUT_PATH': '$(abspath $<)',
'RULE_INPUT_EXT': '$(suffix $<)',
'RULE_INPUT_NAME': '$(notdir $<)',
'CONFIGURATION_NAME': '$(BUILDTYPE)',
}
# Make supports multiple toolsets
generator_supports_multiple_toolsets = True
# Request sorted dependencies in the order from dependents to dependencies.
generator_wants_sorted_dependencies = False
# Placates pylint.
generator_additional_non_configuration_keys = []
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
generator_filelist_paths = None
def CalculateVariables(default_variables, params):
"""Calculate additional variables for use in the build (called by gyp)."""
flavor = gyp.common.GetFlavor(params)
if flavor == 'mac':
default_variables.setdefault('OS', 'mac')
default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
default_variables.setdefault('SHARED_LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
default_variables.setdefault('LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
# Copy additional generator configuration data from Xcode, which is shared
# by the Mac Make generator.
import gyp.generator.xcode as xcode_generator
global generator_additional_non_configuration_keys
generator_additional_non_configuration_keys = getattr(xcode_generator,
'generator_additional_non_configuration_keys', [])
global generator_additional_path_sections
generator_additional_path_sections = getattr(xcode_generator,
'generator_additional_path_sections', [])
global generator_extra_sources_for_rules
generator_extra_sources_for_rules = getattr(xcode_generator,
'generator_extra_sources_for_rules', [])
COMPILABLE_EXTENSIONS.update({'.m': 'objc', '.mm' : 'objcxx'})
else:
operating_system = flavor
if flavor == 'android':
operating_system = 'linux' # Keep this legacy behavior for now.
default_variables.setdefault('OS', operating_system)
default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
default_variables.setdefault('SHARED_LIB_DIR','$(builddir)/lib.$(TOOLSET)')
default_variables.setdefault('LIB_DIR', '$(obj).$(TOOLSET)')
def CalculateGeneratorInputInfo(params):
"""Calculate the generator specific info that gets fed to input (called by
gyp)."""
generator_flags = params.get('generator_flags', {})
android_ndk_version = generator_flags.get('android_ndk_version', None)
# Android NDK requires a strict link order.
if android_ndk_version:
global generator_wants_sorted_dependencies
generator_wants_sorted_dependencies = True
output_dir = params['options'].generator_output or \
params['options'].toplevel_dir
builddir_name = generator_flags.get('output_dir', 'out')
qualified_out_dir = os.path.normpath(os.path.join(
output_dir, builddir_name, 'gypfiles'))
global generator_filelist_paths
generator_filelist_paths = {
'toplevel': params['options'].toplevel_dir,
'qualified_out_dir': qualified_out_dir,
}
# The .d checking code below uses these functions:
# wildcard, sort, foreach, shell, wordlist
# wildcard can handle spaces, the rest can't.
# Since I could find no way to make foreach work with spaces in filenames
# correctly, the .d files have spaces replaced with another character. The .d
# file for
# Chromium\ Framework.framework/foo
# is for example
# out/Release/.deps/out/Release/Chromium?Framework.framework/foo
# This is the replacement character.
SPACE_REPLACEMENT = '?'
LINK_COMMANDS_LINUX = """\
quiet_cmd_alink = AR($(TOOLSET)) $@
cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
quiet_cmd_alink_thin = AR($(TOOLSET)) $@
cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
# Due to circular dependencies between libraries :(, we wrap the
# special "figure out circular dependencies" flags around the entire
# input list during linking.
quiet_cmd_link = LINK($(TOOLSET)) $@
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
# We support two kinds of shared objects (.so):
# 1) shared_library, which is just bundling together many dependent libraries
# into a link line.
# 2) loadable_module, which is generating a module intended for dlopen().
#
# They differ only slightly:
# In the former case, we want to package all dependent code into the .so.
# In the latter case, we want to package just the API exposed by the
# outermost module.
# This means shared_library uses --whole-archive, while loadable_module doesn't.
# (Note that --whole-archive is incompatible with the --start-group used in
# normal linking.)
# Other shared-object link notes:
# - Set SONAME to the library filename so our binaries don't reference
# the local, absolute paths used on the link command-line.
quiet_cmd_solink = SOLINK($(TOOLSET)) $@
cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
"""
LINK_COMMANDS_MAC = """\
quiet_cmd_alink = LIBTOOL-STATIC $@
cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
quiet_cmd_link = LINK($(TOOLSET)) $@
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
quiet_cmd_solink = SOLINK($(TOOLSET)) $@
cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
"""
LINK_COMMANDS_ANDROID = """\
quiet_cmd_alink = AR($(TOOLSET)) $@
cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
quiet_cmd_alink_thin = AR($(TOOLSET)) $@
cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
# Due to circular dependencies between libraries :(, we wrap the
# special "figure out circular dependencies" flags around the entire
# input list during linking.
quiet_cmd_link = LINK($(TOOLSET)) $@
quiet_cmd_link_host = LINK($(TOOLSET)) $@
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
cmd_link_host = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
# Other shared-object link notes:
# - Set SONAME to the library filename so our binaries don't reference
# the local, absolute paths used on the link command-line.
quiet_cmd_solink = SOLINK($(TOOLSET)) $@
cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
quiet_cmd_solink_module_host = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
"""
LINK_COMMANDS_AIX = """\
quiet_cmd_alink = AR($(TOOLSET)) $@
cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
quiet_cmd_alink_thin = AR($(TOOLSET)) $@
cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
quiet_cmd_link = LINK($(TOOLSET)) $@
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
quiet_cmd_solink = SOLINK($(TOOLSET)) $@
cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
"""
# Header of toplevel Makefile.
# This should go into the build tree, but it's easier to keep it here for now.
SHARED_HEADER = ("""\
# We borrow heavily from the kernel build setup, though we are simpler since
# we don't have Kconfig tweaking settings on us.
# The implicit make rules have it looking for RCS files, among other things.
# We instead explicitly write all the rules we care about.
# It's even quicker (saves ~200ms) to pass -r on the command line.
MAKEFLAGS=-r
# The source directory tree.
srcdir := %(srcdir)s
abs_srcdir := $(abspath $(srcdir))
# The name of the builddir.
builddir_name ?= %(builddir)s
# The V=1 flag on command line makes us verbosely print command lines.
ifdef V
quiet=
else
quiet=quiet_
endif
# Specify BUILDTYPE=Release on the command line for a release build.
BUILDTYPE ?= %(default_configuration)s
# Directory all our build output goes into.
# Note that this must be two directories beneath src/ for unit tests to pass,
# as they reach into the src/ directory for data with relative paths.
builddir ?= $(builddir_name)/$(BUILDTYPE)
abs_builddir := $(abspath $(builddir))
depsdir := $(builddir)/.deps
# Object output directory.
obj := $(builddir)/obj
abs_obj := $(abspath $(obj))
# We build up a list of every single one of the targets so we can slurp in the
# generated dependency rule Makefiles in one pass.
all_deps :=
%(make_global_settings)s
CC.target ?= %(CC.target)s
CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
CXX.target ?= %(CXX.target)s
CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
LINK.target ?= %(LINK.target)s
LDFLAGS.target ?= $(LDFLAGS)
AR.target ?= $(AR)
# C++ apps need to be linked with g++.
LINK ?= $(CXX.target)
# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
# to replicate this environment fallback in make as well.
CC.host ?= %(CC.host)s
CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
CXX.host ?= %(CXX.host)s
CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
LINK.host ?= %(LINK.host)s
LDFLAGS.host ?=
AR.host ?= %(AR.host)s
# Define a dir function that can handle spaces.
# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
# "leading spaces cannot appear in the text of the first argument as written.
# These characters can be put into the argument value by variable substitution."
empty :=
space := $(empty) $(empty)
# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
replace_spaces = $(subst $(space),""" + SPACE_REPLACEMENT + """,$1)
unreplace_spaces = $(subst """ + SPACE_REPLACEMENT + """,$(space),$1)
dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
# Flags to make gcc output dependency info. Note that you need to be
# careful here to use the flags that ccache and distcc can understand.
# We write to a dep file on the side first and then rename at the end
# so we can't end up with a broken dep file.
depfile = $(depsdir)/$(call replace_spaces,$@).d
DEPFLAGS = -MMD -MF $(depfile).raw
# We have to fixup the deps output in a few ways.
# (1) the file output should mention the proper .o file.
# ccache or distcc lose the path to the target, so we convert a rule of
# the form:
# foobar.o: DEP1 DEP2
# into
# path/to/foobar.o: DEP1 DEP2
# (2) we want missing files not to cause us to fail to build.
# We want to rewrite
# foobar.o: DEP1 DEP2 \\
# DEP3
# to
# DEP1:
# DEP2:
# DEP3:
# so if the files are missing, they're just considered phony rules.
# We have to do some pretty insane escaping to get those backslashes
# and dollar signs past make, the shell, and sed at the same time.
# Doesn't work with spaces, but that's fine: .d files have spaces in
# their names replaced with other characters."""
r"""
define fixup_dep
# The depfile may not exist if the input file didn't have any #includes.
touch $(depfile).raw
# Fixup path as in (1).
sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
# Add extra rules as in (2).
# We remove slashes and replace spaces with new lines;
# remove blank lines;
# delete the first line and append a colon to the remaining lines.
sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
grep -v '^$$' |\
sed -e 1d -e 's|$$|:|' \
>> $(depfile)
rm $(depfile).raw
endef
"""
"""
# Command definitions:
# - cmd_foo is the actual command to run;
# - quiet_cmd_foo is the brief-output summary of the command.
quiet_cmd_cc = CC($(TOOLSET)) $@
cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
quiet_cmd_cxx = CXX($(TOOLSET)) $@
cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
%(extra_commands)s
quiet_cmd_touch = TOUCH $@
cmd_touch = touch $@
quiet_cmd_copy = COPY $@
# send stderr to /dev/null to ignore messages when linking directories.
cmd_copy = rm -rf "$@" && cp %(copy_archive_args)s "$<" "$@"
%(link_commands)s
"""
r"""
# Define an escape_quotes function to escape single quotes.
# This allows us to handle quotes properly as long as we always use
# use single quotes and escape_quotes.
escape_quotes = $(subst ','\'',$(1))
# This comment is here just to include a ' to unconfuse syntax highlighting.
# Define an escape_vars function to escape '$' variable syntax.
# This allows us to read/write command lines with shell variables (e.g.
# $LD_LIBRARY_PATH), without triggering make substitution.
escape_vars = $(subst $$,$$$$,$(1))
# Helper that expands to a shell command to echo a string exactly as it is in
# make. This uses printf instead of echo because printf's behaviour with respect
# to escape sequences is more portable than echo's across different shells
# (e.g., dash, bash).
exact_echo = printf '%%s\n' '$(call escape_quotes,$(1))'
"""
"""
# Helper to compare the command we're about to run against the command
# we logged the last time we ran the command. Produces an empty
# string (false) when the commands match.
# Tricky point: Make has no string-equality test function.
# The kernel uses the following, but it seems like it would have false
# positives, where one string reordered its arguments.
# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\
# $(filter-out $(cmd_$@), $(cmd_$(1))))
# We instead substitute each for the empty string into the other, and
# say they're equal if both substitutions produce the empty string.
# .d files contain """ + SPACE_REPLACEMENT + \
""" instead of spaces, take that into account.
command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\\
$(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
# Helper that is non-empty when a prerequisite changes.
# Normally make does this implicitly, but we force rules to always run
# so we can check their command lines.
# $? -- new prerequisites
# $| -- order-only dependencies
prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
# Helper that executes all postbuilds until one fails.
define do_postbuilds
@E=0;\\
for p in $(POSTBUILDS); do\\
eval $$p;\\
E=$$?;\\
if [ $$E -ne 0 ]; then\\
break;\\
fi;\\
done;\\
if [ $$E -ne 0 ]; then\\
rm -rf "$@";\\
exit $$E;\\
fi
endef
# do_cmd: run a command via the above cmd_foo names, if necessary.
# Should always run for a given target to handle command-line changes.
# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
# Third argument, if non-zero, makes it do POSTBUILDS processing.
# Note: We intentionally do NOT call dirx for depfile, since it contains """ + \
SPACE_REPLACEMENT + """ for
# spaces already and dirx strips the """ + SPACE_REPLACEMENT + \
""" characters.
define do_cmd
$(if $(or $(command_changed),$(prereq_changed)),
@$(call exact_echo, $($(quiet)cmd_$(1)))
@mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
$(if $(findstring flock,$(word %(flock_index)d,$(cmd_$1))),
@$(cmd_$(1))
@echo " $(quiet_cmd_$(1)): Finished",
@$(cmd_$(1))
)
@$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
@$(if $(2),$(fixup_dep))
$(if $(and $(3), $(POSTBUILDS)),
$(call do_postbuilds)
)
)
endef
# Declare the "%(default_target)s" target first so it is the default,
# even though we don't have the deps yet.
.PHONY: %(default_target)s
%(default_target)s:
# make looks for ways to re-generate included makefiles, but in our case, we
# don't have a direct way. Explicitly telling make that it has nothing to do
# for them makes it go faster.
%%.d: ;
# Use FORCE_DO_CMD to force a target to run. Should be coupled with
# do_cmd.
.PHONY: FORCE_DO_CMD
FORCE_DO_CMD:
""")
SHARED_HEADER_MAC_COMMANDS = """
quiet_cmd_objc = CXX($(TOOLSET)) $@
cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
quiet_cmd_objcxx = CXX($(TOOLSET)) $@
cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
# Commands for precompiled header files.
quiet_cmd_pch_c = CXX($(TOOLSET)) $@
cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
quiet_cmd_pch_m = CXX($(TOOLSET)) $@
cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
# gyp-mac-tool is written next to the root Makefile by gyp.
# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
# already.
quiet_cmd_mac_tool = MACTOOL $(4) $<
cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
quiet_cmd_infoplist = INFOPLIST $@
cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
"""
def WriteRootHeaderSuffixRules(writer):
extensions = sorted(COMPILABLE_EXTENSIONS.keys(), key=str.lower)
writer.write('# Suffix rules, putting all outputs into $(obj).\n')
for ext in extensions:
writer.write('$(obj).$(TOOLSET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD\n' % ext)
writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
writer.write('\n# Try building from generated source, too.\n')
for ext in extensions:
writer.write(
'$(obj).$(TOOLSET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD\n' % ext)
writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
writer.write('\n')
for ext in extensions:
writer.write('$(obj).$(TOOLSET)/%%.o: $(obj)/%%%s FORCE_DO_CMD\n' % ext)
writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
writer.write('\n')
SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\
# Suffix rules, putting all outputs into $(obj).
""")
SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\
# Try building from generated source, too.
""")
SHARED_FOOTER = """\
# "all" is a concatenation of the "all" targets from all the included
# sub-makefiles. This is just here to clarify.
all:
# Add in dependency-tracking rules. $(all_deps) is the list of every single
# target in our tree. Only consider the ones with .d (dependency) info:
d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
ifneq ($(d_files),)
include $(d_files)
endif
"""
header = """\
# This file is generated by gyp; do not edit.
"""
# Maps every compilable file extension to the do_cmd that compiles it.
COMPILABLE_EXTENSIONS = {
'.c': 'cc',
'.cc': 'cxx',
'.cpp': 'cxx',
'.cxx': 'cxx',
'.s': 'cc',
'.S': 'cc',
}
def Compilable(filename):
"""Return true if the file is compilable (should be in OBJS)."""
for res in (filename.endswith(e) for e in COMPILABLE_EXTENSIONS):
if res:
return True
return False
def Linkable(filename):
"""Return true if the file is linkable (should be on the link line)."""
return filename.endswith('.o')
def Target(filename):
"""Translate a compilable filename to its .o target."""
return os.path.splitext(filename)[0] + '.o'
def EscapeShellArgument(s):
"""Quotes an argument so that it will be interpreted literally by a POSIX
shell. Taken from
http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
"""
return "'" + s.replace("'", "'\\''") + "'"
def EscapeMakeVariableExpansion(s):
"""Make has its own variable expansion syntax using $. We must escape it for
string to be interpreted literally."""
return s.replace('$', '$$')
def EscapeCppDefine(s):
"""Escapes a CPP define so that it will reach the compiler unaltered."""
s = EscapeShellArgument(s)
s = EscapeMakeVariableExpansion(s)
# '#' characters must be escaped even embedded in a string, else Make will
# treat it as the start of a comment.
return s.replace('#', r'\#')
def QuoteIfNecessary(string):
"""TODO: Should this ideally be replaced with one or more of the above
functions?"""
if '"' in string:
string = '"' + string.replace('"', '\\"') + '"'
return string
def StringToMakefileVariable(string):
"""Convert a string to a value that is acceptable as a make variable name."""
return re.sub('[^a-zA-Z0-9_]', '_', string)
srcdir_prefix = ''
def Sourceify(path):
"""Convert a path to its source directory form."""
if '$(' in path:
return path
if os.path.isabs(path):
return path
return srcdir_prefix + path
def QuoteSpaces(s, quote=r'\ '):
return s.replace(' ', quote)
# TODO: Avoid code duplication with _ValidateSourcesForMSVSProject in msvs.py.
def _ValidateSourcesForOSX(spec, all_sources):
"""Makes sure if duplicate basenames are not specified in the source list.
Arguments:
spec: The target dictionary containing the properties of the target.
"""
if spec.get('type', None) != 'static_library':
return
basenames = {}
for source in all_sources:
name, ext = os.path.splitext(source)
is_compiled_file = ext in [
'.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
if not is_compiled_file:
continue
basename = os.path.basename(name) # Don't include extension.
basenames.setdefault(basename, []).append(source)
error = ''
for basename, files in basenames.iteritems():
if len(files) > 1:
error += ' %s: %s\n' % (basename, ' '.join(files))
if error:
print('static library %s has several files with the same basename:\n' %
spec['target_name'] + error + 'libtool on OS X will generate' +
' warnings for them.')
raise GypError('Duplicate basenames in sources section, see list above')
# Map from qualified target to path to output.
target_outputs = {}
# Map from qualified target to any linkable output. A subset
# of target_outputs. E.g. when mybinary depends on liba, we want to
# include liba in the linker line; when otherbinary depends on
# mybinary, we just want to build mybinary first.
target_link_deps = {}
class MakefileWriter(object):
"""MakefileWriter packages up the writing of one target-specific foobar.mk.
Its only real entry point is Write(), and is mostly used for namespacing.
"""
def __init__(self, generator_flags, flavor):
self.generator_flags = generator_flags
self.flavor = flavor
self.suffix_rules_srcdir = {}
self.suffix_rules_objdir1 = {}
self.suffix_rules_objdir2 = {}
# Generate suffix rules for all compilable extensions.
for ext in COMPILABLE_EXTENSIONS.keys():
# Suffix rules for source folder.
self.suffix_rules_srcdir.update({ext: ("""\
$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD
@$(call do_cmd,%s,1)
""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
# Suffix rules for generated source files.
self.suffix_rules_objdir1.update({ext: ("""\
$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD
@$(call do_cmd,%s,1)
""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
self.suffix_rules_objdir2.update({ext: ("""\
$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
@$(call do_cmd,%s,1)
""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
def Write(self, qualified_target, base_path, output_filename, spec, configs,
part_of_all):
"""The main entry point: writes a .mk file for a single target.
Arguments:
qualified_target: target we're generating
base_path: path relative to source root we're building in, used to resolve
target-relative paths
output_filename: output .mk file name to write
spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all'
"""
gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, 'w')
self.fp.write(header)
self.qualified_target = qualified_target
self.path = base_path
self.target = spec['target_name']
self.type = spec['type']
self.toolset = spec['toolset']
self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
if self.flavor == 'mac':
self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
else:
self.xcode_settings = None
deps, link_deps = self.ComputeDeps(spec)
# Some of the generation below can add extra output, sources, or
# link dependencies. All of the out params of the functions that
# follow use names like extra_foo.
extra_outputs = []
extra_sources = []
extra_link_deps = []
extra_mac_bundle_resources = []
mac_bundle_deps = []
if self.is_mac_bundle:
self.output = self.ComputeMacBundleOutput(spec)
self.output_binary = self.ComputeMacBundleBinaryOutput(spec)
else:
self.output = self.output_binary = self.ComputeOutput(spec)
self.is_standalone_static_library = bool(
spec.get('standalone_static_library', 0))
self._INSTALLABLE_TARGETS = ('executable', 'loadable_module',
'shared_library')
if (self.is_standalone_static_library or
self.type in self._INSTALLABLE_TARGETS):
self.alias = os.path.basename(self.output)
install_path = self._InstallableTargetInstallPath()
else:
self.alias = self.output
install_path = self.output
self.WriteLn("TOOLSET := " + self.toolset)
self.WriteLn("TARGET := " + self.target)
# Actions must come first, since they can generate more OBJs for use below.
if 'actions' in spec:
self.WriteActions(spec['actions'], extra_sources, extra_outputs,
extra_mac_bundle_resources, part_of_all)
# Rules must be early like actions.
if 'rules' in spec:
self.WriteRules(spec['rules'], extra_sources, extra_outputs,
extra_mac_bundle_resources, part_of_all)
if 'copies' in spec:
self.WriteCopies(spec['copies'], extra_outputs, part_of_all)
# Bundle resources.
if self.is_mac_bundle:
all_mac_bundle_resources = (
spec.get('mac_bundle_resources', []) + extra_mac_bundle_resources)
self.WriteMacBundleResources(all_mac_bundle_resources, mac_bundle_deps)
self.WriteMacInfoPlist(mac_bundle_deps)
# Sources.
all_sources = spec.get('sources', []) + extra_sources
if all_sources:
if self.flavor == 'mac':
# libtool on OS X generates warnings for duplicate basenames in the same
# target.
_ValidateSourcesForOSX(spec, all_sources)
self.WriteSources(
configs, deps, all_sources, extra_outputs,
extra_link_deps, part_of_all,
gyp.xcode_emulation.MacPrefixHeader(
self.xcode_settings, lambda p: Sourceify(self.Absolutify(p)),
self.Pchify))
sources = filter(Compilable, all_sources)
if sources:
self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
extensions = set([os.path.splitext(s)[1] for s in sources])
for ext in extensions:
if ext in self.suffix_rules_srcdir:
self.WriteLn(self.suffix_rules_srcdir[ext])
self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2)
for ext in extensions:
if ext in self.suffix_rules_objdir1:
self.WriteLn(self.suffix_rules_objdir1[ext])
for ext in extensions:
if ext in self.suffix_rules_objdir2:
self.WriteLn(self.suffix_rules_objdir2[ext])
self.WriteLn('# End of this set of suffix rules')
# Add dependency from bundle to bundle binary.
if self.is_mac_bundle:
mac_bundle_deps.append(self.output_binary)
self.WriteTarget(spec, configs, deps, extra_link_deps + link_deps,
mac_bundle_deps, extra_outputs, part_of_all)
# Update global list of target outputs, used in dependency tracking.
target_outputs[qualified_target] = install_path
# Update global list of link dependencies.
if self.type in ('static_library', 'shared_library'):
target_link_deps[qualified_target] = self.output_binary
# Currently any versions have the same effect, but in future the behavior
# could be different.
if self.generator_flags.get('android_ndk_version', None):
self.WriteAndroidNdkModuleRule(self.target, all_sources, link_deps)
self.fp.close()
def WriteSubMake(self, output_filename, makefile_path, targets, build_dir):
"""Write a "sub-project" Makefile.
This is a small, wrapper Makefile that calls the top-level Makefile to build
the targets from a single gyp file (i.e. a sub-project).
Arguments:
output_filename: sub-project Makefile name to write
makefile_path: path to the top-level Makefile
targets: list of "all" targets for this sub-project
build_dir: build output directory, relative to the sub-project
"""
gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, 'w')
self.fp.write(header)
# For consistency with other builders, put sub-project build output in the
# sub-project dir (see test/subdirectory/gyptest-subdir-all.py).
self.WriteLn('export builddir_name ?= %s' %
os.path.join(os.path.dirname(output_filename), build_dir))
self.WriteLn('.PHONY: all')
self.WriteLn('all:')
if makefile_path:
makefile_path = ' -C ' + makefile_path
self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets)))
self.fp.close()
def WriteActions(self, actions, extra_sources, extra_outputs,
extra_mac_bundle_resources, part_of_all):
"""Write Makefile code for any 'actions' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
actions (used to make other pieces dependent on these
actions)
part_of_all: flag indicating this target is part of 'all'
"""
env = self.GetSortedXcodeEnv()
for action in actions:
name = StringToMakefileVariable('%s_%s' % (self.qualified_target,
action['action_name']))
self.WriteLn('### Rules for action "%s":' % action['action_name'])
inputs = action['inputs']
outputs = action['outputs']
# Build up a list of outputs.
# Collect the output dirs we'll need.
dirs = set()
for out in outputs:
dir = os.path.split(out)[0]
if dir:
dirs.add(dir)
if int(action.get('process_outputs_as_sources', False)):
extra_sources += outputs
if int(action.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += outputs
# Write the actual command.
action_commands = action['action']
if self.flavor == 'mac':
action_commands = [gyp.xcode_emulation.ExpandEnvVars(command, env)
for command in action_commands]
command = gyp.common.EncodePOSIXShellList(action_commands)
if 'message' in action:
self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message']))
else:
self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, name))
if len(dirs) > 0:
command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
cd_action = 'cd %s; ' % Sourceify(self.path or '.')
# command and cd_action get written to a toplevel variable called
# cmd_foo. Toplevel variables can't handle things that change per
# makefile like $(TARGET), so hardcode the target.
command = command.replace('$(TARGET)', self.target)
cd_action = cd_action.replace('$(TARGET)', self.target)
# Set LD_LIBRARY_PATH in case the action runs an executable from this
# build which links to shared libs from this build.
# actions run on the host, so they should in theory only use host
# libraries, but until everything is made cross-compile safe, also use
# target libraries.
# TODO(piman): when everything is cross-compile safe, remove lib.target
self.WriteLn('cmd_%s = LD_LIBRARY_PATH=$(builddir)/lib.host:'
'$(builddir)/lib.target:$$LD_LIBRARY_PATH; '
'export LD_LIBRARY_PATH; '
'%s%s'
% (name, cd_action, command))
self.WriteLn()
outputs = map(self.Absolutify, outputs)
# The makefile rules are all relative to the top dir, but the gyp actions
# are defined relative to their containing dir. This replaces the obj
# variable for the action rule with an absolute version so that the output
# goes in the right place.
# Only write the 'obj' and 'builddir' rules for the "primary" output (:1);
# it's superfluous for the "extra outputs", and this avoids accidentally
# writing duplicate dummy rules for those outputs.
# Same for environment.
self.WriteLn("%s: obj := $(abs_obj)" % QuoteSpaces(outputs[0]))
self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(outputs[0]))
self.WriteSortedXcodeEnv(outputs[0], self.GetSortedXcodeEnv())
for input in inputs:
assert ' ' not in input, (
"Spaces in action input filenames not supported (%s)" % input)
for output in outputs:
assert ' ' not in output, (
"Spaces in action output filenames not supported (%s)" % output)
# See the comment in WriteCopies about expanding env vars.
outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)),
part_of_all=part_of_all, command=name)
# Stuff the outputs in a variable so we can refer to them later.
outputs_variable = 'action_%s_outputs' % name
self.WriteLn('%s := %s' % (outputs_variable, ' '.join(outputs)))
extra_outputs.append('$(%s)' % outputs_variable)
self.WriteLn()
self.WriteLn()
def WriteRules(self, rules, extra_sources, extra_outputs,
extra_mac_bundle_resources, part_of_all):
"""Write Makefile code for any 'rules' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
rules (used to make other pieces dependent on these rules)
part_of_all: flag indicating this target is part of 'all'
"""
env = self.GetSortedXcodeEnv()
for rule in rules:
name = StringToMakefileVariable('%s_%s' % (self.qualified_target,
rule['rule_name']))
count = 0
self.WriteLn('### Generated for rule %s:' % name)
all_outputs = []
for rule_source in rule.get('rule_sources', []):
dirs = set()
(rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
(rule_source_root, rule_source_ext) = \
os.path.splitext(rule_source_basename)
outputs = [self.ExpandInputRoot(out, rule_source_root,
rule_source_dirname)
for out in rule['outputs']]
for out in outputs:
dir = os.path.dirname(out)
if dir:
dirs.add(dir)
if int(rule.get('process_outputs_as_sources', False)):
extra_sources += outputs
if int(rule.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += outputs
inputs = map(Sourceify, map(self.Absolutify, [rule_source] +
rule.get('inputs', [])))
actions = ['$(call do_cmd,%s_%d)' % (name, count)]
if name == 'resources_grit':
# HACK: This is ugly. Grit intentionally doesn't touch the
# timestamp of its output file when the file doesn't change,
# which is fine in hash-based dependency systems like scons
# and forge, but not kosher in the make world. After some
# discussion, hacking around it here seems like the least
# amount of pain.
actions += ['@touch --no-create $@']
# See the comment in WriteCopies about expanding env vars.
outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
outputs = map(self.Absolutify, outputs)
all_outputs += outputs
# Only write the 'obj' and 'builddir' rules for the "primary" output
# (:1); it's superfluous for the "extra outputs", and this avoids
# accidentally writing duplicate dummy rules for those outputs.
self.WriteLn('%s: obj := $(abs_obj)' % outputs[0])
self.WriteLn('%s: builddir := $(abs_builddir)' % outputs[0])
self.WriteMakeRule(outputs, inputs, actions,
command="%s_%d" % (name, count))
# Spaces in rule filenames are not supported, but rule variables have
# spaces in them (e.g. RULE_INPUT_PATH expands to '$(abspath $<)').
# The spaces within the variables are valid, so remove the variables
# before checking.
variables_with_spaces = re.compile(r'\$\([^ ]* \$<\)')
for output in outputs:
output = re.sub(variables_with_spaces, '', output)
assert ' ' not in output, (
"Spaces in rule filenames not yet supported (%s)" % output)
self.WriteLn('all_deps += %s' % ' '.join(outputs))
action = [self.ExpandInputRoot(ac, rule_source_root,
rule_source_dirname)
for ac in rule['action']]
mkdirs = ''
if len(dirs) > 0:
mkdirs = 'mkdir -p %s; ' % ' '.join(dirs)
cd_action = 'cd %s; ' % Sourceify(self.path or '.')
# action, cd_action, and mkdirs get written to a toplevel variable
# called cmd_foo. Toplevel variables can't handle things that change
# per makefile like $(TARGET), so hardcode the target.
if self.flavor == 'mac':
action = [gyp.xcode_emulation.ExpandEnvVars(command, env)
for command in action]
action = gyp.common.EncodePOSIXShellList(action)
action = action.replace('$(TARGET)', self.target)
cd_action = cd_action.replace('$(TARGET)', self.target)
mkdirs = mkdirs.replace('$(TARGET)', self.target)
# Set LD_LIBRARY_PATH in case the rule runs an executable from this
# build which links to shared libs from this build.
# rules run on the host, so they should in theory only use host
# libraries, but until everything is made cross-compile safe, also use
# target libraries.
# TODO(piman): when everything is cross-compile safe, remove lib.target
self.WriteLn(
"cmd_%(name)s_%(count)d = LD_LIBRARY_PATH="
"$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; "
"export LD_LIBRARY_PATH; "
"%(cd_action)s%(mkdirs)s%(action)s" % {
'action': action,
'cd_action': cd_action,
'count': count,
'mkdirs': mkdirs,
'name': name,
})
self.WriteLn(
'quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@' % {
'count': count,
'name': name,
})
self.WriteLn()
count += 1
outputs_variable = 'rule_%s_outputs' % name
self.WriteList(all_outputs, outputs_variable)
extra_outputs.append('$(%s)' % outputs_variable)
self.WriteLn('### Finished generating for rule: %s' % name)
self.WriteLn()
self.WriteLn('### Finished generating for all rules')
self.WriteLn('')
def WriteCopies(self, copies, extra_outputs, part_of_all):
"""Write Makefile code for any 'copies' from the gyp input.
extra_outputs: a list that will be filled in with any outputs of this action
(used to make other pieces dependent on this action)
part_of_all: flag indicating this target is part of 'all'
"""
self.WriteLn('### Generated for copy rule.')
variable = StringToMakefileVariable(self.qualified_target + '_copies')
outputs = []
for copy in copies:
for path in copy['files']:
# Absolutify() may call normpath, and will strip trailing slashes.
path = Sourceify(self.Absolutify(path))
filename = os.path.split(path)[1]
output = Sourceify(self.Absolutify(os.path.join(copy['destination'],
filename)))
# If the output path has variables in it, which happens in practice for
# 'copies', writing the environment as target-local doesn't work,
# because the variables are already needed for the target name.
# Copying the environment variables into global make variables doesn't
# work either, because then the .d files will potentially contain spaces
# after variable expansion, and .d file handling cannot handle spaces.
# As a workaround, manually expand variables at gyp time. Since 'copies'
# can't run scripts, there's no need to write the env then.
# WriteDoCmd() will escape spaces for .d files.
env = self.GetSortedXcodeEnv()
output = gyp.xcode_emulation.ExpandEnvVars(output, env)
path = gyp.xcode_emulation.ExpandEnvVars(path, env)
self.WriteDoCmd([output], [path], 'copy', part_of_all)
outputs.append(output)
self.WriteLn('%s = %s' % (variable, ' '.join(map(QuoteSpaces, outputs))))
extra_outputs.append('$(%s)' % variable)
self.WriteLn()
def WriteMacBundleResources(self, resources, bundle_deps):
"""Writes Makefile code for 'mac_bundle_resources'."""
self.WriteLn('### Generated for mac_bundle_resources')
for output, res in gyp.xcode_emulation.GetMacBundleResources(
generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
map(Sourceify, map(self.Absolutify, resources))):
_, ext = os.path.splitext(output)
if ext != '.xcassets':
# Make does not supports '.xcassets' emulation.
self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource',
part_of_all=True)
bundle_deps.append(output)
def WriteMacInfoPlist(self, bundle_deps):
"""Write Makefile code for bundle Info.plist files."""
info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
lambda p: Sourceify(self.Absolutify(p)))
if not info_plist:
return
if defines:
# Create an intermediate file to store preprocessed results.
intermediate_plist = ('$(obj).$(TOOLSET)/$(TARGET)/' +
os.path.basename(info_plist))
self.WriteList(defines, intermediate_plist + ': INFOPLIST_DEFINES', '-D',
quoter=EscapeCppDefine)
self.WriteMakeRule([intermediate_plist], [info_plist],
['$(call do_cmd,infoplist)',
# "Convert" the plist so that any weird whitespace changes from the
# preprocessor do not affect the XML parser in mac_tool.
'@plutil -convert xml1 $@ $@'])
info_plist = intermediate_plist
# plists can contain envvars and substitute them into the file.
self.WriteSortedXcodeEnv(
out, self.GetSortedXcodeEnv(additional_settings=extra_env))
self.WriteDoCmd([out], [info_plist], 'mac_tool,,,copy-info-plist',
part_of_all=True)
bundle_deps.append(out)
def WriteSources(self, configs, deps, sources,
extra_outputs, extra_link_deps,
part_of_all, precompiled_header):
"""Write Makefile code for any 'sources' from the gyp input.
These are source files necessary to build the current target.
configs, deps, sources: input from gyp.
extra_outputs: a list of extra outputs this action should be dependent on;
used to serialize action/rules before compilation
extra_link_deps: a list that will be filled in with any outputs of
compilation (to be used in link lines)
part_of_all: flag indicating this target is part of 'all'
"""
# Write configuration-specific variables for CFLAGS, etc.
for configname in sorted(configs.keys()):
config = configs[configname]
self.WriteList(config.get('defines'), 'DEFS_%s' % configname, prefix='-D',
quoter=EscapeCppDefine)
if self.flavor == 'mac':
cflags = self.xcode_settings.GetCflags(configname)
cflags_c = self.xcode_settings.GetCflagsC(configname)
cflags_cc = self.xcode_settings.GetCflagsCC(configname)
cflags_objc = self.xcode_settings.GetCflagsObjC(configname)
cflags_objcc = self.xcode_settings.GetCflagsObjCC(configname)
else:
cflags = config.get('cflags')
cflags_c = config.get('cflags_c')
cflags_cc = config.get('cflags_cc')
self.WriteLn("# Flags passed to all source files.");
self.WriteList(cflags, 'CFLAGS_%s' % configname)
self.WriteLn("# Flags passed to only C files.");
self.WriteList(cflags_c, 'CFLAGS_C_%s' % configname)
self.WriteLn("# Flags passed to only C++ files.");
self.WriteList(cflags_cc, 'CFLAGS_CC_%s' % configname)
if self.flavor == 'mac':
self.WriteLn("# Flags passed to only ObjC files.");
self.WriteList(cflags_objc, 'CFLAGS_OBJC_%s' % configname)
self.WriteLn("# Flags passed to only ObjC++ files.");
self.WriteList(cflags_objcc, 'CFLAGS_OBJCC_%s' % configname)
includes = config.get('include_dirs')
if includes:
includes = map(Sourceify, map(self.Absolutify, includes))
self.WriteList(includes, 'INCS_%s' % configname, prefix='-I')
compilable = filter(Compilable, sources)
objs = map(self.Objectify, map(self.Absolutify, map(Target, compilable)))
self.WriteList(objs, 'OBJS')
for obj in objs:
assert ' ' not in obj, (
"Spaces in object filenames not supported (%s)" % obj)
self.WriteLn('# Add to the list of files we specially track '
'dependencies for.')
self.WriteLn('all_deps += $(OBJS)')
self.WriteLn()
# Make sure our dependencies are built first.
if deps:
self.WriteMakeRule(['$(OBJS)'], deps,
comment = 'Make sure our dependencies are built '
'before any of us.',
order_only = True)
# Make sure the actions and rules run first.
# If they generate any extra headers etc., the per-.o file dep tracking
# will catch the proper rebuilds, so order only is still ok here.
if extra_outputs:
self.WriteMakeRule(['$(OBJS)'], extra_outputs,
comment = 'Make sure our actions/rules run '
'before any of us.',
order_only = True)
pchdeps = precompiled_header.GetObjDependencies(compilable, objs )
if pchdeps:
self.WriteLn('# Dependencies from obj files to their precompiled headers')
for source, obj, gch in pchdeps:
self.WriteLn('%s: %s' % (obj, gch))
self.WriteLn('# End precompiled header dependencies')
if objs:
extra_link_deps.append('$(OBJS)')
self.WriteLn("""\
# CFLAGS et al overrides must be target-local.
# See "Target-specific Variable Values" in the GNU Make manual.""")
self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)")
self.WriteLn("$(OBJS): GYP_CFLAGS := "
"$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
"%s " % precompiled_header.GetInclude('c') +
"$(CFLAGS_$(BUILDTYPE)) "
"$(CFLAGS_C_$(BUILDTYPE))")
self.WriteLn("$(OBJS): GYP_CXXFLAGS := "
"$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
"%s " % precompiled_header.GetInclude('cc') +
"$(CFLAGS_$(BUILDTYPE)) "
"$(CFLAGS_CC_$(BUILDTYPE))")
if self.flavor == 'mac':
self.WriteLn("$(OBJS): GYP_OBJCFLAGS := "
"$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
"%s " % precompiled_header.GetInclude('m') +
"$(CFLAGS_$(BUILDTYPE)) "
"$(CFLAGS_C_$(BUILDTYPE)) "
"$(CFLAGS_OBJC_$(BUILDTYPE))")
self.WriteLn("$(OBJS): GYP_OBJCXXFLAGS := "
"$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
"%s " % precompiled_header.GetInclude('mm') +
"$(CFLAGS_$(BUILDTYPE)) "
"$(CFLAGS_CC_$(BUILDTYPE)) "
"$(CFLAGS_OBJCC_$(BUILDTYPE))")
self.WritePchTargets(precompiled_header.GetPchBuildCommands())
# If there are any object files in our input file list, link them into our
# output.
extra_link_deps += filter(Linkable, sources)
self.WriteLn()
def WritePchTargets(self, pch_commands):
"""Writes make rules to compile prefix headers."""
if not pch_commands:
return
for gch, lang_flag, lang, input in pch_commands:
extra_flags = {
'c': '$(CFLAGS_C_$(BUILDTYPE))',
'cc': '$(CFLAGS_CC_$(BUILDTYPE))',
'm': '$(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))',
'mm': '$(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))',
}[lang]
var_name = {
'c': 'GYP_PCH_CFLAGS',
'cc': 'GYP_PCH_CXXFLAGS',
'm': 'GYP_PCH_OBJCFLAGS',
'mm': 'GYP_PCH_OBJCXXFLAGS',
}[lang]
self.WriteLn("%s: %s := %s " % (gch, var_name, lang_flag) +
"$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
"$(CFLAGS_$(BUILDTYPE)) " +
extra_flags)
self.WriteLn('%s: %s FORCE_DO_CMD' % (gch, input))
self.WriteLn('\t@$(call do_cmd,pch_%s,1)' % lang)
self.WriteLn('')
assert ' ' not in gch, (
"Spaces in gch filenames not supported (%s)" % gch)
self.WriteLn('all_deps += %s' % gch)
self.WriteLn('')
def ComputeOutputBasename(self, spec):
"""Return the 'output basename' of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'libfoobar.so'
"""
assert not self.is_mac_bundle
if self.flavor == 'mac' and self.type in (
'static_library', 'executable', 'shared_library', 'loadable_module'):
return self.xcode_settings.GetExecutablePath()
target = spec['target_name']
target_prefix = ''
target_ext = ''
if self.type == 'static_library':
if target[:3] == 'lib':
target = target[3:]
target_prefix = 'lib'
target_ext = '.a'
elif self.type in ('loadable_module', 'shared_library'):
if target[:3] == 'lib':
target = target[3:]
target_prefix = 'lib'
target_ext = '.so'
elif self.type == 'none':
target = '%s.stamp' % target
elif self.type != 'executable':
print ("ERROR: What output file should be generated?",
"type", self.type, "target", target)
target_prefix = spec.get('product_prefix', target_prefix)
target = spec.get('product_name', target)
product_ext = spec.get('product_extension')
if product_ext:
target_ext = '.' + product_ext
return target_prefix + target + target_ext
def _InstallImmediately(self):
return self.toolset == 'target' and self.flavor == 'mac' and self.type in (
'static_library', 'executable', 'shared_library', 'loadable_module')
def ComputeOutput(self, spec):
"""Return the 'output' (full output path) of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'$(obj)/baz/libfoobar.so'
"""
assert not self.is_mac_bundle
path = os.path.join('$(obj).' + self.toolset, self.path)
if self.type == 'executable' or self._InstallImmediately():
path = '$(builddir)'
path = spec.get('product_dir', path)
return os.path.join(path, self.ComputeOutputBasename(spec))
def ComputeMacBundleOutput(self, spec):
"""Return the 'output' (full output path) to a bundle output directory."""
assert self.is_mac_bundle
path = generator_default_variables['PRODUCT_DIR']
return os.path.join(path, self.xcode_settings.GetWrapperName())
def ComputeMacBundleBinaryOutput(self, spec):
"""Return the 'output' (full output path) to the binary in a bundle."""
path = generator_default_variables['PRODUCT_DIR']
return os.path.join(path, self.xcode_settings.GetExecutablePath())
def ComputeDeps(self, spec):
"""Compute the dependencies of a gyp spec.
Returns a tuple (deps, link_deps), where each is a list of
filenames that will need to be put in front of make for either
building (deps) or linking (link_deps).
"""
deps = []
link_deps = []
if 'dependencies' in spec:
deps.extend([target_outputs[dep] for dep in spec['dependencies']
if target_outputs[dep]])
for dep in spec['dependencies']:
if dep in target_link_deps:
link_deps.append(target_link_deps[dep])
deps.extend(link_deps)
# TODO: It seems we need to transitively link in libraries (e.g. -lfoo)?
# This hack makes it work:
# link_deps.extend(spec.get('libraries', []))
return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
def WriteDependencyOnExtraOutputs(self, target, extra_outputs):
self.WriteMakeRule([self.output_binary], extra_outputs,
comment = 'Build our special outputs first.',
order_only = True)
def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps,
extra_outputs, part_of_all):
"""Write Makefile code to produce the final target of the gyp spec.
spec, configs: input from gyp.
deps, link_deps: dependency lists; see ComputeDeps()
extra_outputs: any extra outputs that our target should depend on
part_of_all: flag indicating this target is part of 'all'
"""
self.WriteLn('### Rules for final target.')
if extra_outputs:
self.WriteDependencyOnExtraOutputs(self.output_binary, extra_outputs)
self.WriteMakeRule(extra_outputs, deps,
comment=('Preserve order dependency of '
'special output on deps.'),
order_only = True)
target_postbuilds = {}
if self.type != 'none':
for configname in sorted(configs.keys()):
config = configs[configname]
if self.flavor == 'mac':
ldflags = self.xcode_settings.GetLdflags(configname,
generator_default_variables['PRODUCT_DIR'],
lambda p: Sourceify(self.Absolutify(p)))
# TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on.
gyp_to_build = gyp.common.InvertRelativePath(self.path)
target_postbuild = self.xcode_settings.AddImplicitPostbuilds(
configname,
QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
self.output))),
QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
self.output_binary))))
if target_postbuild:
target_postbuilds[configname] = target_postbuild
else:
ldflags = config.get('ldflags', [])
# Compute an rpath for this output if needed.
if any(dep.endswith('.so') or '.so.' in dep for dep in deps):
# We want to get the literal string "$ORIGIN" into the link command,
# so we need lots of escaping.
ldflags.append(r'-Wl,-rpath=\$$ORIGIN/lib.%s/' % self.toolset)
ldflags.append(r'-Wl,-rpath-link=\$(builddir)/lib.%s/' %
self.toolset)
library_dirs = config.get('library_dirs', [])
ldflags += [('-L%s' % library_dir) for library_dir in library_dirs]
self.WriteList(ldflags, 'LDFLAGS_%s' % configname)
if self.flavor == 'mac':
self.WriteList(self.xcode_settings.GetLibtoolflags(configname),
'LIBTOOLFLAGS_%s' % configname)
libraries = spec.get('libraries')
if libraries:
# Remove duplicate entries
libraries = gyp.common.uniquer(libraries)
if self.flavor == 'mac':
libraries = self.xcode_settings.AdjustLibraries(libraries)
self.WriteList(libraries, 'LIBS')
self.WriteLn('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' %
QuoteSpaces(self.output_binary))
self.WriteLn('%s: LIBS := $(LIBS)' % QuoteSpaces(self.output_binary))
if self.flavor == 'mac':
self.WriteLn('%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))' %
QuoteSpaces(self.output_binary))
# Postbuild actions. Like actions, but implicitly depend on the target's
# output.
postbuilds = []
if self.flavor == 'mac':
if target_postbuilds:
postbuilds.append('$(TARGET_POSTBUILDS_$(BUILDTYPE))')
postbuilds.extend(
gyp.xcode_emulation.GetSpecPostbuildCommands(spec))
if postbuilds:
# Envvars may be referenced by TARGET_POSTBUILDS_$(BUILDTYPE),
# so we must output its definition first, since we declare variables
# using ":=".
self.WriteSortedXcodeEnv(self.output, self.GetSortedXcodePostbuildEnv())
for configname in target_postbuilds:
self.WriteLn('%s: TARGET_POSTBUILDS_%s := %s' %
(QuoteSpaces(self.output),
configname,
gyp.common.EncodePOSIXShellList(target_postbuilds[configname])))
# Postbuilds expect to be run in the gyp file's directory, so insert an
# implicit postbuild to cd to there.
postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path]))
for i in xrange(len(postbuilds)):
if not postbuilds[i].startswith('$'):
postbuilds[i] = EscapeShellArgument(postbuilds[i])
self.WriteLn('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output))
self.WriteLn('%s: POSTBUILDS := %s' % (
QuoteSpaces(self.output), ' '.join(postbuilds)))
# A bundle directory depends on its dependencies such as bundle resources
# and bundle binary. When all dependencies have been built, the bundle
# needs to be packaged.
if self.is_mac_bundle:
# If the framework doesn't contain a binary, then nothing depends
# on the actions -- make the framework depend on them directly too.
self.WriteDependencyOnExtraOutputs(self.output, extra_outputs)
# Bundle dependencies. Note that the code below adds actions to this
# target, so if you move these two lines, move the lines below as well.
self.WriteList(map(QuoteSpaces, bundle_deps), 'BUNDLE_DEPS')
self.WriteLn('%s: $(BUNDLE_DEPS)' % QuoteSpaces(self.output))
# After the framework is built, package it. Needs to happen before
# postbuilds, since postbuilds depend on this.
if self.type in ('shared_library', 'loadable_module'):
self.WriteLn('\t@$(call do_cmd,mac_package_framework,,,%s)' %
self.xcode_settings.GetFrameworkVersion())
# Bundle postbuilds can depend on the whole bundle, so run them after
# the bundle is packaged, not already after the bundle binary is done.
if postbuilds:
self.WriteLn('\t@$(call do_postbuilds)')
postbuilds = [] # Don't write postbuilds for target's output.
# Needed by test/mac/gyptest-rebuild.py.
self.WriteLn('\t@true # No-op, used by tests')
# Since this target depends on binary and resources which are in
# nested subfolders, the framework directory will be older than
# its dependencies usually. To prevent this rule from executing
# on every build (expensive, especially with postbuilds), expliclity
# update the time on the framework directory.
self.WriteLn('\t@touch -c %s' % QuoteSpaces(self.output))
if postbuilds:
assert not self.is_mac_bundle, ('Postbuilds for bundles should be done '
'on the bundle, not the binary (target \'%s\')' % self.target)
assert 'product_dir' not in spec, ('Postbuilds do not work with '
'custom product_dir')
if self.type == 'executable':
self.WriteLn('%s: LD_INPUTS := %s' % (
QuoteSpaces(self.output_binary),
' '.join(map(QuoteSpaces, link_deps))))
if self.toolset == 'host' and self.flavor == 'android':
self.WriteDoCmd([self.output_binary], link_deps, 'link_host',
part_of_all, postbuilds=postbuilds)
else:
self.WriteDoCmd([self.output_binary], link_deps, 'link', part_of_all,
postbuilds=postbuilds)
elif self.type == 'static_library':
for link_dep in link_deps:
assert ' ' not in link_dep, (
"Spaces in alink input filenames not supported (%s)" % link_dep)
if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
self.is_standalone_static_library):
self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin',
part_of_all, postbuilds=postbuilds)
else:
self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all,
postbuilds=postbuilds)
elif self.type == 'shared_library':
self.WriteLn('%s: LD_INPUTS := %s' % (
QuoteSpaces(self.output_binary),
' '.join(map(QuoteSpaces, link_deps))))
self.WriteDoCmd([self.output_binary], link_deps, 'solink', part_of_all,
postbuilds=postbuilds)
elif self.type == 'loadable_module':
for link_dep in link_deps:
assert ' ' not in link_dep, (
"Spaces in module input filenames not supported (%s)" % link_dep)
if self.toolset == 'host' and self.flavor == 'android':
self.WriteDoCmd([self.output_binary], link_deps, 'solink_module_host',
part_of_all, postbuilds=postbuilds)
else:
self.WriteDoCmd(
[self.output_binary], link_deps, 'solink_module', part_of_all,
postbuilds=postbuilds)
elif self.type == 'none':
# Write a stamp line.
self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all,
postbuilds=postbuilds)
else:
print "WARNING: no output for", self.type, target
# Add an alias for each target (if there are any outputs).
# Installable target aliases are created below.
if ((self.output and self.output != self.target) and
(self.type not in self._INSTALLABLE_TARGETS)):
self.WriteMakeRule([self.target], [self.output],
comment='Add target alias', phony = True)
if part_of_all:
self.WriteMakeRule(['all'], [self.target],
comment = 'Add target alias to "all" target.',
phony = True)
# Add special-case rules for our installable targets.
# 1) They need to install to the build dir or "product" dir.
# 2) They get shortcuts for building (e.g. "make chrome").
# 3) They are part of "make all".
if (self.type in self._INSTALLABLE_TARGETS or
self.is_standalone_static_library):
if self.type == 'shared_library':
file_desc = 'shared library'
elif self.type == 'static_library':
file_desc = 'static library'
else:
file_desc = 'executable'
install_path = self._InstallableTargetInstallPath()
installable_deps = [self.output]
if (self.flavor == 'mac' and not 'product_dir' in spec and
self.toolset == 'target'):
# On mac, products are created in install_path immediately.
assert install_path == self.output, '%s != %s' % (
install_path, self.output)
# Point the target alias to the final binary output.
self.WriteMakeRule([self.target], [install_path],
comment='Add target alias', phony = True)
if install_path != self.output:
assert not self.is_mac_bundle # See comment a few lines above.
self.WriteDoCmd([install_path], [self.output], 'copy',
comment = 'Copy this to the %s output path.' %
file_desc, part_of_all=part_of_all)
installable_deps.append(install_path)
if self.output != self.alias and self.alias != self.target:
self.WriteMakeRule([self.alias], installable_deps,
comment = 'Short alias for building this %s.' %
file_desc, phony = True)
if part_of_all:
self.WriteMakeRule(['all'], [install_path],
comment = 'Add %s to "all" target.' % file_desc,
phony = True)
def WriteList(self, value_list, variable=None, prefix='',
quoter=QuoteIfNecessary):
"""Write a variable definition that is a list of values.
E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
foo = blaha blahb
but in a pretty-printed style.
"""
values = ''
if value_list:
value_list = [quoter(prefix + l) for l in value_list]
values = ' \\\n\t' + ' \\\n\t'.join(value_list)
self.fp.write('%s :=%s\n\n' % (variable, values))
def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None,
postbuilds=False):
"""Write a Makefile rule that uses do_cmd.
This makes the outputs dependent on the command line that was run,
as well as support the V= make command line flag.
"""
suffix = ''
if postbuilds:
assert ',' not in command
suffix = ',,1' # Tell do_cmd to honor $POSTBUILDS
self.WriteMakeRule(outputs, inputs,
actions = ['$(call do_cmd,%s%s)' % (command, suffix)],
comment = comment,
command = command,
force = True)
# Add our outputs to the list of targets we read depfiles from.
# all_deps is only used for deps file reading, and for deps files we replace
# spaces with ? because escaping doesn't work with make's $(sort) and
# other functions.
outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs]
self.WriteLn('all_deps += %s' % ' '.join(outputs))
def WriteMakeRule(self, outputs, inputs, actions=None, comment=None,
order_only=False, force=False, phony=False, command=None):
"""Write a Makefile rule, with some extra tricks.
outputs: a list of outputs for the rule (note: this is not directly
supported by make; see comments below)
inputs: a list of inputs for the rule
actions: a list of shell commands to run for the rule
comment: a comment to put in the Makefile above the rule (also useful
for making this Python script's code self-documenting)
order_only: if true, makes the dependency order-only
force: if true, include FORCE_DO_CMD as an order-only dep
phony: if true, the rule does not actually generate the named output, the
output is just a name to run the rule
command: (optional) command name to generate unambiguous labels
"""
outputs = map(QuoteSpaces, outputs)
inputs = map(QuoteSpaces, inputs)
if comment:
self.WriteLn('# ' + comment)
if phony:
self.WriteLn('.PHONY: ' + ' '.join(outputs))
if actions:
self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0])
force_append = ' FORCE_DO_CMD' if force else ''
if order_only:
# Order only rule: Just write a simple rule.
# TODO(evanm): just make order_only a list of deps instead of this hack.
self.WriteLn('%s: | %s%s' %
(' '.join(outputs), ' '.join(inputs), force_append))
elif len(outputs) == 1:
# Regular rule, one output: Just write a simple rule.
self.WriteLn('%s: %s%s' % (outputs[0], ' '.join(inputs), force_append))
else:
# Regular rule, more than one output: Multiple outputs are tricky in
# make. We will write three rules:
# - All outputs depend on an intermediate file.
# - Make .INTERMEDIATE depend on the intermediate.
# - The intermediate file depends on the inputs and executes the
# actual command.
# - The intermediate recipe will 'touch' the intermediate file.
# - The multi-output rule will have an do-nothing recipe.
intermediate = "%s.intermediate" % (command if command else self.target)
self.WriteLn('%s: %s' % (' '.join(outputs), intermediate))
self.WriteLn('\t%s' % '@:');
self.WriteLn('%s: %s' % ('.INTERMEDIATE', intermediate))
self.WriteLn('%s: %s%s' %
(intermediate, ' '.join(inputs), force_append))
actions.insert(0, '$(call do_cmd,touch)')
if actions:
for action in actions:
self.WriteLn('\t%s' % action)
self.WriteLn()
def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps):
"""Write a set of LOCAL_XXX definitions for Android NDK.
These variable definitions will be used by Android NDK but do nothing for
non-Android applications.
Arguments:
module_name: Android NDK module name, which must be unique among all
module names.
all_sources: A list of source files (will be filtered by Compilable).
link_deps: A list of link dependencies, which must be sorted in
the order from dependencies to dependents.
"""
if self.type not in ('executable', 'shared_library', 'static_library'):
return
self.WriteLn('# Variable definitions for Android applications')
self.WriteLn('include $(CLEAR_VARS)')
self.WriteLn('LOCAL_MODULE := ' + module_name)
self.WriteLn('LOCAL_CFLAGS := $(CFLAGS_$(BUILDTYPE)) '
'$(DEFS_$(BUILDTYPE)) '
# LOCAL_CFLAGS is applied to both of C and C++. There is
# no way to specify $(CFLAGS_C_$(BUILDTYPE)) only for C
# sources.
'$(CFLAGS_C_$(BUILDTYPE)) '
# $(INCS_$(BUILDTYPE)) includes the prefix '-I' while
# LOCAL_C_INCLUDES does not expect it. So put it in
# LOCAL_CFLAGS.
'$(INCS_$(BUILDTYPE))')
# LOCAL_CXXFLAGS is obsolete and LOCAL_CPPFLAGS is preferred.
self.WriteLn('LOCAL_CPPFLAGS := $(CFLAGS_CC_$(BUILDTYPE))')
self.WriteLn('LOCAL_C_INCLUDES :=')
self.WriteLn('LOCAL_LDLIBS := $(LDFLAGS_$(BUILDTYPE)) $(LIBS)')
# Detect the C++ extension.
cpp_ext = {'.cc': 0, '.cpp': 0, '.cxx': 0}
default_cpp_ext = '.cpp'
for filename in all_sources:
ext = os.path.splitext(filename)[1]
if ext in cpp_ext:
cpp_ext[ext] += 1
if cpp_ext[ext] > cpp_ext[default_cpp_ext]:
default_cpp_ext = ext
self.WriteLn('LOCAL_CPP_EXTENSION := ' + default_cpp_ext)
self.WriteList(map(self.Absolutify, filter(Compilable, all_sources)),
'LOCAL_SRC_FILES')
# Filter out those which do not match prefix and suffix and produce
# the resulting list without prefix and suffix.
def DepsToModules(deps, prefix, suffix):
modules = []
for filepath in deps:
filename = os.path.basename(filepath)
if filename.startswith(prefix) and filename.endswith(suffix):
modules.append(filename[len(prefix):-len(suffix)])
return modules
# Retrieve the default value of 'SHARED_LIB_SUFFIX'
params = {'flavor': 'linux'}
default_variables = {}
CalculateVariables(default_variables, params)
self.WriteList(
DepsToModules(link_deps,
generator_default_variables['SHARED_LIB_PREFIX'],
default_variables['SHARED_LIB_SUFFIX']),
'LOCAL_SHARED_LIBRARIES')
self.WriteList(
DepsToModules(link_deps,
generator_default_variables['STATIC_LIB_PREFIX'],
generator_default_variables['STATIC_LIB_SUFFIX']),
'LOCAL_STATIC_LIBRARIES')
if self.type == 'executable':
self.WriteLn('include $(BUILD_EXECUTABLE)')
elif self.type == 'shared_library':
self.WriteLn('include $(BUILD_SHARED_LIBRARY)')
elif self.type == 'static_library':
self.WriteLn('include $(BUILD_STATIC_LIBRARY)')
self.WriteLn()
def WriteLn(self, text=''):
self.fp.write(text + '\n')
def GetSortedXcodeEnv(self, additional_settings=None):
return gyp.xcode_emulation.GetSortedXcodeEnv(
self.xcode_settings, "$(abs_builddir)",
os.path.join("$(abs_srcdir)", self.path), "$(BUILDTYPE)",
additional_settings)
def GetSortedXcodePostbuildEnv(self):
# CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
# TODO(thakis): It would be nice to have some general mechanism instead.
strip_save_file = self.xcode_settings.GetPerTargetSetting(
'CHROMIUM_STRIP_SAVE_FILE', '')
# Even if strip_save_file is empty, explicitly write it. Else a postbuild
# might pick up an export from an earlier target.
return self.GetSortedXcodeEnv(
additional_settings={'CHROMIUM_STRIP_SAVE_FILE': strip_save_file})
def WriteSortedXcodeEnv(self, target, env):
for k, v in env:
# For
# foo := a\ b
# the escaped space does the right thing. For
# export foo := a\ b
# it does not -- the backslash is written to the env as literal character.
# So don't escape spaces in |env[k]|.
self.WriteLn('%s: export %s := %s' % (QuoteSpaces(target), k, v))
def Objectify(self, path):
"""Convert a path to its output directory form."""
if '$(' in path:
path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/' % self.toolset)
if not '$(obj)' in path:
path = '$(obj).%s/$(TARGET)/%s' % (self.toolset, path)
return path
def Pchify(self, path, lang):
"""Convert a prefix header path to its output directory form."""
path = self.Absolutify(path)
if '$(' in path:
path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/pch-%s' %
(self.toolset, lang))
return path
return '$(obj).%s/$(TARGET)/pch-%s/%s' % (self.toolset, lang, path)
def Absolutify(self, path):
"""Convert a subdirectory-relative path into a base-relative path.
Skips over paths that contain variables."""
if '$(' in path:
# Don't call normpath in this case, as it might collapse the
# path too aggressively if it features '..'. However it's still
# important to strip trailing slashes.
return path.rstrip('/')
return os.path.normpath(os.path.join(self.path, path))
def ExpandInputRoot(self, template, expansion, dirname):
if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template:
return template
path = template % {
'INPUT_ROOT': expansion,
'INPUT_DIRNAME': dirname,
}
return path
def _InstallableTargetInstallPath(self):
"""Returns the location of the final output for an installable target."""
# Xcode puts shared_library results into PRODUCT_DIR, and some gyp files
# rely on this. Emulate this behavior for mac.
# XXX(TooTallNate): disabling this code since we don't want this behavior...
#if (self.type == 'shared_library' and
# (self.flavor != 'mac' or self.toolset != 'target')):
# # Install all shared libs into a common directory (per toolset) for
# # convenient access with LD_LIBRARY_PATH.
# return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias)
return '$(builddir)/' + self.alias
def WriteAutoRegenerationRule(params, root_makefile, makefile_name,
build_files):
"""Write the target to regenerate the Makefile."""
options = params['options']
build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir)
for filename in params['build_files_arg']]
gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'],
options.toplevel_dir)
if not gyp_binary.startswith(os.sep):
gyp_binary = os.path.join('.', gyp_binary)
root_makefile.write(
"quiet_cmd_regen_makefile = ACTION Regenerating $@\n"
"cmd_regen_makefile = cd $(srcdir); %(cmd)s\n"
"%(makefile_name)s: %(deps)s\n"
"\t$(call do_cmd,regen_makefile)\n\n" % {
'makefile_name': makefile_name,
'deps': ' '.join(map(Sourceify, build_files)),
'cmd': gyp.common.EncodePOSIXShellList(
[gyp_binary, '-fmake'] +
gyp.RegenerateFlags(options) +
build_files_args)})
def PerformBuild(data, configurations, params):
options = params['options']
for config in configurations:
arguments = ['make']
if options.toplevel_dir and options.toplevel_dir != '.':
arguments += '-C', options.toplevel_dir
arguments.append('BUILDTYPE=' + config)
print 'Building [%s]: %s' % (config, arguments)
subprocess.check_call(arguments)
def GenerateOutput(target_list, target_dicts, data, params):
options = params['options']
flavor = gyp.common.GetFlavor(params)
generator_flags = params.get('generator_flags', {})
builddir_name = generator_flags.get('output_dir', 'out')
android_ndk_version = generator_flags.get('android_ndk_version', None)
default_target = generator_flags.get('default_target', 'all')
def CalculateMakefilePath(build_file, base_name):
"""Determine where to write a Makefile for a given gyp file."""
# Paths in gyp files are relative to the .gyp file, but we want
# paths relative to the source root for the master makefile. Grab
# the path of the .gyp file as the base to relativize against.
# E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
base_path = gyp.common.RelativePath(os.path.dirname(build_file),
options.depth)
# We write the file in the base_path directory.
output_file = os.path.join(options.depth, base_path, base_name)
if options.generator_output:
output_file = os.path.join(
options.depth, options.generator_output, base_path, base_name)
base_path = gyp.common.RelativePath(os.path.dirname(build_file),
options.toplevel_dir)
return base_path, output_file
# TODO: search for the first non-'Default' target. This can go
# away when we add verification that all targets have the
# necessary configurations.
default_configuration = None
toolsets = set([target_dicts[target]['toolset'] for target in target_list])
for target in target_list:
spec = target_dicts[target]
if spec['default_configuration'] != 'Default':
default_configuration = spec['default_configuration']
break
if not default_configuration:
default_configuration = 'Default'
srcdir = '.'
makefile_name = 'Makefile' + options.suffix
makefile_path = os.path.join(options.toplevel_dir, makefile_name)
if options.generator_output:
global srcdir_prefix
makefile_path = os.path.join(
options.toplevel_dir, options.generator_output, makefile_name)
srcdir = gyp.common.RelativePath(srcdir, options.generator_output)
srcdir_prefix = '$(srcdir)/'
flock_command= 'flock'
copy_archive_arguments = '-af'
header_params = {
'default_target': default_target,
'builddir': builddir_name,
'default_configuration': default_configuration,
'flock': flock_command,
'flock_index': 1,
'link_commands': LINK_COMMANDS_LINUX,
'extra_commands': '',
'srcdir': srcdir,
'copy_archive_args': copy_archive_arguments,
}
if flavor == 'mac':
flock_command = './gyp-mac-tool flock'
header_params.update({
'flock': flock_command,
'flock_index': 2,
'link_commands': LINK_COMMANDS_MAC,
'extra_commands': SHARED_HEADER_MAC_COMMANDS,
})
elif flavor == 'android':
header_params.update({
'link_commands': LINK_COMMANDS_ANDROID,
})
elif flavor == 'solaris':
header_params.update({
'flock': './gyp-flock-tool flock',
'flock_index': 2,
})
elif flavor == 'freebsd':
# Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific.
header_params.update({
'flock': 'lockf',
})
elif flavor == 'openbsd':
copy_archive_arguments = '-pPRf'
header_params.update({
'copy_archive_args': copy_archive_arguments,
})
elif flavor == 'aix':
copy_archive_arguments = '-pPRf'
header_params.update({
'copy_archive_args': copy_archive_arguments,
'link_commands': LINK_COMMANDS_AIX,
'flock': './gyp-flock-tool flock',
'flock_index': 2,
})
header_params.update({
'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'),
'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'),
'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'),
'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'),
'CC.host': GetEnvironFallback(('CC_host',), 'gcc'),
'AR.host': GetEnvironFallback(('AR_host',), 'ar'),
'CXX.host': GetEnvironFallback(('CXX_host',), 'g++'),
'LINK.host': GetEnvironFallback(('LINK_host',), '$(CXX.host)'),
})
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings_array = data[build_file].get('make_global_settings', [])
wrappers = {}
for key, value in make_global_settings_array:
if key.endswith('_wrapper'):
wrappers[key[:-len('_wrapper')]] = '$(abspath %s)' % value
make_global_settings = ''
for key, value in make_global_settings_array:
if re.match('.*_wrapper', key):
continue
if value[0] != '$':
value = '$(abspath %s)' % value
wrapper = wrappers.get(key)
if wrapper:
value = '%s %s' % (wrapper, value)
del wrappers[key]
if key in ('CC', 'CC.host', 'CXX', 'CXX.host'):
make_global_settings += (
'ifneq (,$(filter $(origin %s), undefined default))\n' % key)
# Let gyp-time envvars win over global settings.
env_key = key.replace('.', '_') # CC.host -> CC_host
if env_key in os.environ:
value = os.environ[env_key]
make_global_settings += ' %s = %s\n' % (key, value)
make_global_settings += 'endif\n'
else:
make_global_settings += '%s ?= %s\n' % (key, value)
# TODO(ukai): define cmd when only wrapper is specified in
# make_global_settings.
header_params['make_global_settings'] = make_global_settings
gyp.common.EnsureDirExists(makefile_path)
root_makefile = open(makefile_path, 'w')
root_makefile.write(SHARED_HEADER % header_params)
# Currently any versions have the same effect, but in future the behavior
# could be different.
if android_ndk_version:
root_makefile.write(
'# Define LOCAL_PATH for build of Android applications.\n'
'LOCAL_PATH := $(call my-dir)\n'
'\n')
for toolset in toolsets:
root_makefile.write('TOOLSET := %s\n' % toolset)
WriteRootHeaderSuffixRules(root_makefile)
# Put build-time support tools next to the root Makefile.
dest_path = os.path.dirname(makefile_path)
gyp.common.CopyTool(flavor, dest_path)
# Find the list of targets that derive from the gyp file(s) being built.
needed_targets = set()
for build_file in params['build_files']:
for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
needed_targets.add(target)
build_files = set()
include_list = set()
for qualified_target in target_list:
build_file, target, toolset = gyp.common.ParseQualifiedTarget(
qualified_target)
this_make_global_settings = data[build_file].get('make_global_settings', [])
assert make_global_settings_array == this_make_global_settings, (
"make_global_settings needs to be the same for all targets. %s vs. %s" %
(this_make_global_settings, make_global_settings))
build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir))
included_files = data[build_file]['included_files']
for included_file in included_files:
# The included_files entries are relative to the dir of the build file
# that included them, so we have to undo that and then make them relative
# to the root dir.
relative_include_file = gyp.common.RelativePath(
gyp.common.UnrelativePath(included_file, build_file),
options.toplevel_dir)
abs_include_file = os.path.abspath(relative_include_file)
# If the include file is from the ~/.gyp dir, we should use absolute path
# so that relocating the src dir doesn't break the path.
if (params['home_dot_gyp'] and
abs_include_file.startswith(params['home_dot_gyp'])):
build_files.add(abs_include_file)
else:
build_files.add(relative_include_file)
base_path, output_file = CalculateMakefilePath(build_file,
target + '.' + toolset + options.suffix + '.mk')
spec = target_dicts[qualified_target]
configs = spec['configurations']
if flavor == 'mac':
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
writer = MakefileWriter(generator_flags, flavor)
writer.Write(qualified_target, base_path, output_file, spec, configs,
part_of_all=qualified_target in needed_targets)
# Our root_makefile lives at the source root. Compute the relative path
# from there to the output_file for including.
mkfile_rel_path = gyp.common.RelativePath(output_file,
os.path.dirname(makefile_path))
include_list.add(mkfile_rel_path)
# Write out per-gyp (sub-project) Makefiles.
depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd())
for build_file in build_files:
# The paths in build_files were relativized above, so undo that before
# testing against the non-relativized items in target_list and before
# calculating the Makefile path.
build_file = os.path.join(depth_rel_path, build_file)
gyp_targets = [target_dicts[target]['target_name'] for target in target_list
if target.startswith(build_file) and
target in needed_targets]
# Only generate Makefiles for gyp files with targets.
if not gyp_targets:
continue
base_path, output_file = CalculateMakefilePath(build_file,
os.path.splitext(os.path.basename(build_file))[0] + '.Makefile')
makefile_rel_path = gyp.common.RelativePath(os.path.dirname(makefile_path),
os.path.dirname(output_file))
writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets,
builddir_name)
# Write out the sorted list of includes.
root_makefile.write('\n')
for include_file in sorted(include_list):
# We wrap each .mk include in an if statement so users can tell make to
# not load a file by setting NO_LOAD. The below make code says, only
# load the .mk file if the .mk filename doesn't start with a token in
# NO_LOAD.
root_makefile.write(
"ifeq ($(strip $(foreach prefix,$(NO_LOAD),\\\n"
" $(findstring $(join ^,$(prefix)),\\\n"
" $(join ^," + include_file + ")))),)\n")
root_makefile.write(" include " + include_file + "\n")
root_makefile.write("endif\n")
root_makefile.write('\n')
if (not generator_flags.get('standalone')
and generator_flags.get('auto_regeneration', True)):
WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files)
root_makefile.write(SHARED_FOOTER)
root_makefile.close()
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.