text
stringlengths 4
1.02M
| meta
dict |
|---|---|
"""Bencode plugin related functions and classes for testing."""
from tests.parsers import test_lib
class BencodePluginTestCase(test_lib.ParserTestCase):
"""The unit test case for a bencode plugin."""
|
{
"content_hash": "32efd2c4389e801cff7b0cba92ed7d94",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 63,
"avg_line_length": 29.285714285714285,
"alnum_prop": 0.7658536585365854,
"repo_name": "8u1a/plaso",
"id": "8f0c04febc0d171b530c7faa249016e02d166110",
"size": "229",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tests/parsers/bencode_plugins/test_lib.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1276"
},
{
"name": "Makefile",
"bytes": "1151"
},
{
"name": "Protocol Buffer",
"bytes": "13930"
},
{
"name": "Python",
"bytes": "3179107"
},
{
"name": "Shell",
"bytes": "47305"
}
],
"symlink_target": ""
}
|
import contextlib
import fixtures
import os
import time
import mock
from oslo.config import cfg
from nova import exception
from nova.openstack.common import processutils
from nova.storage import linuxscsi
from nova import test
from nova.tests.virt.libvirt import fake_libvirt_utils
from nova import utils
from nova.virt import fake
from nova.virt.libvirt import utils as libvirt_utils
from nova.virt.libvirt import volume
CONF = cfg.CONF
class LibvirtVolumeTestCase(test.NoDBTestCase):
def setUp(self):
super(LibvirtVolumeTestCase, self).setUp()
self.executes = []
def fake_execute(*cmd, **kwargs):
self.executes.append(cmd)
return None, None
self.stubs.Set(utils, 'execute', fake_execute)
class FakeLibvirtDriver(object):
def __init__(self, hyperv="QEMU", version=1005001):
self.hyperv = hyperv
self.version = version
def get_hypervisor_version(self):
return self.version
def get_hypervisor_type(self):
return self.hyperv
def get_all_block_devices(self):
return []
self.fake_conn = FakeLibvirtDriver(fake.FakeVirtAPI())
self.connr = {
'ip': '127.0.0.1',
'initiator': 'fake_initiator',
'host': 'fake_host'
}
self.disk_info = {
"bus": "virtio",
"dev": "vde",
"type": "disk",
}
self.name = 'volume-00000001'
self.location = '10.0.2.15:3260'
self.iqn = 'iqn.2010-10.org.openstack:%s' % self.name
self.vol = {'id': 1, 'name': self.name}
self.uuid = '875a8070-d0b9-4949-8b31-104d125c9a64'
self.user = 'foo'
def _assertNetworkAndProtocolEquals(self, tree):
self.assertEqual(tree.get('type'), 'network')
self.assertEqual(tree.find('./source').get('protocol'), 'rbd')
rbd_name = '%s/%s' % ('rbd', self.name)
self.assertEqual(tree.find('./source').get('name'), rbd_name)
def _assertFileTypeEquals(self, tree, file_path):
self.assertEqual(tree.get('type'), 'file')
self.assertEqual(tree.find('./source').get('file'), file_path)
def _assertDiskInfoEquals(self, tree, disk_info):
self.assertEqual(tree.get('device'), disk_info['type'])
self.assertEqual(tree.find('./target').get('bus'),
disk_info['bus'])
self.assertEqual(tree.find('./target').get('dev'),
disk_info['dev'])
def _test_libvirt_volume_driver_disk_info(self):
libvirt_driver = volume.LibvirtVolumeDriver(self.fake_conn)
connection_info = {
'driver_volume_type': 'fake',
'data': {
'device_path': '/foo',
},
'serial': 'fake_serial',
}
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
self._assertDiskInfoEquals(tree, self.disk_info)
def test_libvirt_volume_disk_info_type(self):
self.disk_info['type'] = 'cdrom'
self._test_libvirt_volume_driver_disk_info()
def test_libvirt_volume_disk_info_dev(self):
self.disk_info['dev'] = 'hdc'
self._test_libvirt_volume_driver_disk_info()
def test_libvirt_volume_disk_info_bus(self):
self.disk_info['bus'] = 'scsi'
self._test_libvirt_volume_driver_disk_info()
def test_libvirt_volume_driver_serial(self):
libvirt_driver = volume.LibvirtVolumeDriver(self.fake_conn)
connection_info = {
'driver_volume_type': 'fake',
'data': {
'device_path': '/foo',
},
'serial': 'fake_serial',
}
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
self.assertEqual('block', tree.get('type'))
self.assertEqual('fake_serial', tree.find('./serial').text)
self.assertIsNone(tree.find('./blockio'))
def test_libvirt_volume_driver_blockio(self):
libvirt_driver = volume.LibvirtVolumeDriver(self.fake_conn)
connection_info = {
'driver_volume_type': 'fake',
'data': {
'device_path': '/foo',
'logical_block_size': '4096',
'physical_block_size': '4096',
},
'serial': 'fake_serial',
}
disk_info = {
"bus": "virtio",
"dev": "vde",
"type": "disk",
}
conf = libvirt_driver.connect_volume(connection_info, disk_info)
tree = conf.format_dom()
blockio = tree.find('./blockio')
self.assertEqual('4096', blockio.get('logical_block_size'))
self.assertEqual('4096', blockio.get('physical_block_size'))
def test_libvirt_volume_driver_iotune(self):
libvirt_driver = volume.LibvirtVolumeDriver(self.fake_conn)
connection_info = {
'driver_volume_type': 'fake',
'data': {
"device_path": "/foo",
'qos_specs': 'bar',
},
}
disk_info = {
"bus": "virtio",
"dev": "vde",
"type": "disk",
}
conf = libvirt_driver.connect_volume(connection_info, disk_info)
tree = conf.format_dom()
iotune = tree.find('./iotune')
# ensure invalid qos_specs is ignored
self.assertIsNone(iotune)
specs = {
'total_bytes_sec': '102400',
'read_bytes_sec': '51200',
'write_bytes_sec': '0',
'total_iops_sec': '0',
'read_iops_sec': '200',
'write_iops_sec': '200',
}
del connection_info['data']['qos_specs']
connection_info['data'].update(dict(qos_specs=specs))
conf = libvirt_driver.connect_volume(connection_info, disk_info)
tree = conf.format_dom()
self.assertEqual('102400', tree.find('./iotune/total_bytes_sec').text)
self.assertEqual('51200', tree.find('./iotune/read_bytes_sec').text)
self.assertEqual('0', tree.find('./iotune/write_bytes_sec').text)
self.assertEqual('0', tree.find('./iotune/total_iops_sec').text)
self.assertEqual('200', tree.find('./iotune/read_iops_sec').text)
self.assertEqual('200', tree.find('./iotune/write_iops_sec').text)
def test_libvirt_volume_driver_readonly(self):
libvirt_driver = volume.LibvirtVolumeDriver(self.fake_conn)
connection_info = {
'driver_volume_type': 'fake',
'data': {
"device_path": "/foo",
'access_mode': 'bar',
},
}
disk_info = {
"bus": "virtio",
"dev": "vde",
"type": "disk",
}
self.assertRaises(exception.InvalidVolumeAccessMode,
libvirt_driver.connect_volume,
connection_info, self.disk_info)
connection_info['data']['access_mode'] = 'rw'
conf = libvirt_driver.connect_volume(connection_info, disk_info)
tree = conf.format_dom()
readonly = tree.find('./readonly')
self.assertIsNone(readonly)
connection_info['data']['access_mode'] = 'ro'
conf = libvirt_driver.connect_volume(connection_info, disk_info)
tree = conf.format_dom()
readonly = tree.find('./readonly')
self.assertIsNotNone(readonly)
def iscsi_connection(self, volume, location, iqn):
return {
'driver_volume_type': 'iscsi',
'data': {
'volume_id': volume['id'],
'target_portal': location,
'target_iqn': iqn,
'target_lun': 1,
'qos_specs': {
'total_bytes_sec': '102400',
'read_iops_sec': '200',
}
}
}
def test_libvirt_iscsi_driver(self):
# NOTE(vish) exists is to make driver assume connecting worked
self.stubs.Set(os.path, 'exists', lambda x: True)
libvirt_driver = volume.LibvirtISCSIVolumeDriver(self.fake_conn)
connection_info = self.iscsi_connection(self.vol, self.location,
self.iqn)
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
self.assertEqual('qemu', conf.driver_name)
tree = conf.format_dom()
dev_str = '/dev/disk/by-path/ip-%s-iscsi-%s-lun-1' % (self.location,
self.iqn)
self.assertEqual(tree.get('type'), 'block')
self.assertEqual(tree.find('./source').get('dev'), dev_str)
libvirt_driver.disconnect_volume(connection_info, "vde")
expected_commands = [('iscsiadm', '-m', 'node', '-T', self.iqn,
'-p', self.location),
('iscsiadm', '-m', 'session'),
('iscsiadm', '-m', 'node', '-T', self.iqn,
'-p', self.location, '--login'),
('iscsiadm', '-m', 'node', '-T', self.iqn,
'-p', self.location, '--op', 'update',
'-n', 'node.startup', '-v', 'automatic'),
('iscsiadm', '-m', 'node', '-T', self.iqn,
'-p', self.location, '--rescan'),
('iscsiadm', '-m', 'node', '-T', self.iqn,
'-p', self.location, '--op', 'update',
'-n', 'node.startup', '-v', 'manual'),
('iscsiadm', '-m', 'node', '-T', self.iqn,
'-p', self.location, '--logout'),
('iscsiadm', '-m', 'node', '-T', self.iqn,
'-p', self.location, '--op', 'delete')]
self.assertEqual('102400', tree.find('./iotune/total_bytes_sec').text)
self.assertEqual(self.executes, expected_commands)
def test_libvirt_iscsi_driver_still_in_use(self):
# NOTE(vish) exists is to make driver assume connecting worked
self.stubs.Set(os.path, 'exists', lambda x: True)
libvirt_driver = volume.LibvirtISCSIVolumeDriver(self.fake_conn)
devs = ['/dev/disk/by-path/ip-%s-iscsi-%s-lun-2' % (self.location,
self.iqn)]
self.stubs.Set(self.fake_conn, 'get_all_block_devices', lambda: devs)
vol = {'id': 1, 'name': self.name}
connection_info = self.iscsi_connection(vol, self.location, self.iqn)
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
dev_name = 'ip-%s-iscsi-%s-lun-1' % (self.location, self.iqn)
dev_str = '/dev/disk/by-path/%s' % dev_name
self.assertEqual(tree.get('type'), 'block')
self.assertEqual(tree.find('./source').get('dev'), dev_str)
libvirt_driver.disconnect_volume(connection_info, "vde")
expected_commands = [('iscsiadm', '-m', 'node', '-T', self.iqn,
'-p', self.location),
('iscsiadm', '-m', 'session'),
('iscsiadm', '-m', 'node', '-T', self.iqn,
'-p', self.location, '--login'),
('iscsiadm', '-m', 'node', '-T', self.iqn,
'-p', self.location, '--op', 'update',
'-n', 'node.startup', '-v', 'automatic'),
('iscsiadm', '-m', 'node', '-T', self.iqn,
'-p', self.location, '--rescan'),
('cp', '/dev/stdin',
'/sys/block/%s/device/delete' % dev_name)]
self.assertEqual(self.executes, expected_commands)
def test_libvirt_iscsi_driver_disconnect_multipath_error(self):
libvirt_driver = volume.LibvirtISCSIVolumeDriver(self.fake_conn)
devs = ['/dev/disk/by-path/ip-%s-iscsi-%s-lun-2' % (self.location,
self.iqn)]
with contextlib.nested(
mock.patch.object(os.path, 'exists', return_value=True),
mock.patch.object(self.fake_conn, 'get_all_block_devices',
return_value=devs),
mock.patch.object(libvirt_driver, '_rescan_multipath'),
mock.patch.object(libvirt_driver, '_run_multipath'),
mock.patch.object(libvirt_driver, '_get_multipath_device_name',
return_value='/dev/mapper/fake-multipath-devname'),
mock.patch.object(libvirt_driver,
'_get_target_portals_from_iscsiadm_output',
return_value=[('fake-ip', 'fake-portal')]),
mock.patch.object(libvirt_driver, '_get_multipath_iqn',
return_value='fake-portal'),
) as (mock_exists, mock_devices, mock_rescan_multipath,
mock_run_multipath, mock_device_name, mock_get_portals,
mock_get_iqn):
mock_run_multipath.side_effect = processutils.ProcessExecutionError
vol = {'id': 1, 'name': self.name}
connection_info = self.iscsi_connection(vol, self.location,
self.iqn)
conf = libvirt_driver.connect_volume(connection_info,
self.disk_info)
tree = conf.format_dom()
dev_name = 'ip-%s-iscsi-%s-lun-1' % (self.location, self.iqn)
dev_str = '/dev/disk/by-path/%s' % dev_name
self.assertEqual('block', tree.get('type'))
self.assertEqual(dev_str, tree.find('./source').get('dev'))
libvirt_driver.use_multipath = True
libvirt_driver.disconnect_volume(connection_info, "vde")
mock_run_multipath.assert_called_once_with(
['-f', 'fake-multipath-devname'],
check_exit_code=[0, 1])
def iser_connection(self, volume, location, iqn):
return {
'driver_volume_type': 'iser',
'data': {
'volume_id': volume['id'],
'target_portal': location,
'target_iqn': iqn,
'target_lun': 1,
}
}
def sheepdog_connection(self, volume):
return {
'driver_volume_type': 'sheepdog',
'data': {
'name': volume['name']
}
}
def test_libvirt_sheepdog_driver(self):
libvirt_driver = volume.LibvirtNetVolumeDriver(self.fake_conn)
connection_info = self.sheepdog_connection(self.vol)
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
self.assertEqual(tree.get('type'), 'network')
self.assertEqual(tree.find('./source').get('protocol'), 'sheepdog')
self.assertEqual(tree.find('./source').get('name'), self.name)
libvirt_driver.disconnect_volume(connection_info, "vde")
def rbd_connection(self, volume):
return {
'driver_volume_type': 'rbd',
'data': {
'name': '%s/%s' % ('rbd', volume['name']),
'auth_enabled': CONF.libvirt.rbd_secret_uuid is not None,
'auth_username': CONF.libvirt.rbd_user,
'secret_type': 'ceph',
'secret_uuid': CONF.libvirt.rbd_secret_uuid,
'qos_specs': {
'total_bytes_sec': '1048576',
'read_iops_sec': '500',
}
}
}
def test_libvirt_rbd_driver(self):
libvirt_driver = volume.LibvirtNetVolumeDriver(self.fake_conn)
connection_info = self.rbd_connection(self.vol)
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
self._assertNetworkAndProtocolEquals(tree)
self.assertIsNone(tree.find('./source/auth'))
self.assertEqual('1048576', tree.find('./iotune/total_bytes_sec').text)
self.assertEqual('500', tree.find('./iotune/read_iops_sec').text)
libvirt_driver.disconnect_volume(connection_info, "vde")
def test_libvirt_rbd_driver_hosts(self):
libvirt_driver = volume.LibvirtNetVolumeDriver(self.fake_conn)
connection_info = self.rbd_connection(self.vol)
hosts = ['example.com', '1.2.3.4', '::1']
ports = [None, '6790', '6791']
connection_info['data']['hosts'] = hosts
connection_info['data']['ports'] = ports
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
self._assertNetworkAndProtocolEquals(tree)
self.assertIsNone(tree.find('./source/auth'))
found_hosts = tree.findall('./source/host')
self.assertEqual([host.get('name') for host in found_hosts], hosts)
self.assertEqual([host.get('port') for host in found_hosts], ports)
libvirt_driver.disconnect_volume(connection_info, "vde")
def test_libvirt_rbd_driver_auth_enabled(self):
libvirt_driver = volume.LibvirtNetVolumeDriver(self.fake_conn)
connection_info = self.rbd_connection(self.vol)
secret_type = 'ceph'
connection_info['data']['auth_enabled'] = True
connection_info['data']['auth_username'] = self.user
connection_info['data']['secret_type'] = secret_type
connection_info['data']['secret_uuid'] = self.uuid
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
self._assertNetworkAndProtocolEquals(tree)
self.assertEqual(tree.find('./auth').get('username'), self.user)
self.assertEqual(tree.find('./auth/secret').get('type'), secret_type)
self.assertEqual(tree.find('./auth/secret').get('uuid'), self.uuid)
libvirt_driver.disconnect_volume(connection_info, "vde")
def test_libvirt_rbd_driver_auth_enabled_flags_override(self):
libvirt_driver = volume.LibvirtNetVolumeDriver(self.fake_conn)
connection_info = self.rbd_connection(self.vol)
secret_type = 'ceph'
connection_info['data']['auth_enabled'] = True
connection_info['data']['auth_username'] = self.user
connection_info['data']['secret_type'] = secret_type
connection_info['data']['secret_uuid'] = self.uuid
flags_uuid = '37152720-1785-11e2-a740-af0c1d8b8e4b'
flags_user = 'bar'
self.flags(rbd_user=flags_user,
rbd_secret_uuid=flags_uuid,
group='libvirt')
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
self._assertNetworkAndProtocolEquals(tree)
self.assertEqual(tree.find('./auth').get('username'), flags_user)
self.assertEqual(tree.find('./auth/secret').get('type'), secret_type)
self.assertEqual(tree.find('./auth/secret').get('uuid'), flags_uuid)
libvirt_driver.disconnect_volume(connection_info, "vde")
def test_libvirt_rbd_driver_auth_disabled(self):
libvirt_driver = volume.LibvirtNetVolumeDriver(self.fake_conn)
connection_info = self.rbd_connection(self.vol)
secret_type = 'ceph'
connection_info['data']['auth_enabled'] = False
connection_info['data']['auth_username'] = self.user
connection_info['data']['secret_type'] = secret_type
connection_info['data']['secret_uuid'] = self.uuid
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
self._assertNetworkAndProtocolEquals(tree)
self.assertIsNone(tree.find('./auth'))
libvirt_driver.disconnect_volume(connection_info, "vde")
def test_libvirt_rbd_driver_auth_disabled_flags_override(self):
libvirt_driver = volume.LibvirtNetVolumeDriver(self.fake_conn)
connection_info = self.rbd_connection(self.vol)
secret_type = 'ceph'
connection_info['data']['auth_enabled'] = False
connection_info['data']['auth_username'] = self.user
connection_info['data']['secret_type'] = secret_type
connection_info['data']['secret_uuid'] = self.uuid
# NOTE: Supplying the rbd_secret_uuid will enable authentication
# locally in nova-compute even if not enabled in nova-volume/cinder
flags_uuid = '37152720-1785-11e2-a740-af0c1d8b8e4b'
flags_user = 'bar'
self.flags(rbd_user=flags_user,
rbd_secret_uuid=flags_uuid,
group='libvirt')
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
self._assertNetworkAndProtocolEquals(tree)
self.assertEqual(tree.find('./auth').get('username'), flags_user)
self.assertEqual(tree.find('./auth/secret').get('type'), secret_type)
self.assertEqual(tree.find('./auth/secret').get('uuid'), flags_uuid)
libvirt_driver.disconnect_volume(connection_info, "vde")
def test_libvirt_kvm_volume(self):
self.stubs.Set(os.path, 'exists', lambda x: True)
libvirt_driver = volume.LibvirtISCSIVolumeDriver(self.fake_conn)
connection_info = self.iscsi_connection(self.vol, self.location,
self.iqn)
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
dev_str = '/dev/disk/by-path/ip-%s-iscsi-%s-lun-1' % (self.location,
self.iqn)
self.assertEqual(tree.get('type'), 'block')
self.assertEqual(tree.find('./source').get('dev'), dev_str)
libvirt_driver.disconnect_volume(connection_info, 'vde')
def test_libvirt_kvm_volume_with_multipath(self):
self.flags(iscsi_use_multipath=True, group='libvirt')
self.stubs.Set(os.path, 'exists', lambda x: True)
devs = ['/dev/mapper/sda', '/dev/mapper/sdb']
self.stubs.Set(self.fake_conn, 'get_all_block_devices', lambda: devs)
libvirt_driver = volume.LibvirtISCSIVolumeDriver(self.fake_conn)
connection_info = self.iscsi_connection(self.vol, self.location,
self.iqn)
mpdev_filepath = '/dev/mapper/foo'
connection_info['data']['device_path'] = mpdev_filepath
libvirt_driver._get_multipath_device_name = lambda x: mpdev_filepath
self.stubs.Set(libvirt_driver,
'_get_target_portals_from_iscsiadm_output',
lambda x: [[self.location, self.iqn]])
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
self.assertEqual(tree.find('./source').get('dev'), mpdev_filepath)
libvirt_driver._get_multipath_iqn = lambda x: self.iqn
libvirt_driver.disconnect_volume(connection_info, 'vde')
expected_multipath_cmd = ('multipath', '-f', 'foo')
self.assertIn(expected_multipath_cmd, self.executes)
def test_libvirt_kvm_volume_with_multipath_still_in_use(self):
name = 'volume-00000001'
location = '10.0.2.15:3260'
iqn = 'iqn.2010-10.org.openstack:%s' % name
mpdev_filepath = '/dev/mapper/foo'
def _get_multipath_device_name(path):
if '%s-lun-1' % iqn in path:
return mpdev_filepath
return '/dev/mapper/donotdisconnect'
self.flags(iscsi_use_multipath=True, group='libvirt')
self.stubs.Set(os.path, 'exists', lambda x: True)
libvirt_driver = volume.LibvirtISCSIVolumeDriver(self.fake_conn)
libvirt_driver._get_multipath_device_name =\
lambda x: _get_multipath_device_name(x)
block_devs = ['/dev/disks/by-path/%s-iscsi-%s-lun-2' % (location, iqn)]
self.stubs.Set(self.fake_conn, 'get_all_block_devices',
lambda: block_devs)
vol = {'id': 1, 'name': name}
connection_info = self.iscsi_connection(vol, location, iqn)
connection_info['data']['device_path'] = mpdev_filepath
libvirt_driver._get_multipath_iqn = lambda x: iqn
iscsi_devs = ['1.2.3.4-iscsi-%s-lun-1' % iqn,
'%s-iscsi-%s-lun-1' % (location, iqn),
'%s-iscsi-%s-lun-2' % (location, iqn)]
libvirt_driver._get_iscsi_devices = lambda: iscsi_devs
self.stubs.Set(libvirt_driver,
'_get_target_portals_from_iscsiadm_output',
lambda x: [[location, iqn]])
# Set up disconnect volume mock expectations
self.mox.StubOutWithMock(libvirt_driver, '_delete_device')
self.mox.StubOutWithMock(libvirt_driver, '_rescan_multipath')
libvirt_driver._rescan_multipath()
libvirt_driver._delete_device('/dev/disk/by-path/%s' % iscsi_devs[0])
libvirt_driver._delete_device('/dev/disk/by-path/%s' % iscsi_devs[1])
libvirt_driver._rescan_multipath()
# Ensure that the mpath devices are deleted
self.mox.ReplayAll()
libvirt_driver.disconnect_volume(connection_info, 'vde')
def test_libvirt_kvm_volume_with_multipath_getmpdev(self):
self.flags(iscsi_use_multipath=True, group='libvirt')
self.stubs.Set(os.path, 'exists', lambda x: True)
libvirt_driver = volume.LibvirtISCSIVolumeDriver(self.fake_conn)
name0 = 'volume-00000000'
iqn0 = 'iqn.2010-10.org.openstack:%s' % name0
dev0 = '/dev/disk/by-path/ip-%s-iscsi-%s-lun-0' % (self.location, iqn0)
dev = '/dev/disk/by-path/ip-%s-iscsi-%s-lun-1' % (self.location,
self.iqn)
devs = [dev0, dev]
self.stubs.Set(self.fake_conn, 'get_all_block_devices', lambda: devs)
connection_info = self.iscsi_connection(self.vol, self.location,
self.iqn)
mpdev_filepath = '/dev/mapper/foo'
libvirt_driver._get_multipath_device_name = lambda x: mpdev_filepath
self.stubs.Set(libvirt_driver,
'_get_target_portals_from_iscsiadm_output',
lambda x: [['fake_portal1', 'fake_iqn1']])
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
self.assertEqual(tree.find('./source').get('dev'), mpdev_filepath)
libvirt_driver.disconnect_volume(connection_info, 'vde')
def test_libvirt_kvm_iser_volume_with_multipath(self):
self.flags(iser_use_multipath=True, group='libvirt')
self.stubs.Set(os.path, 'exists', lambda x: True)
self.stubs.Set(time, 'sleep', lambda x: None)
devs = ['/dev/mapper/sda', '/dev/mapper/sdb']
self.stubs.Set(self.fake_conn, 'get_all_block_devices', lambda: devs)
libvirt_driver = volume.LibvirtISERVolumeDriver(self.fake_conn)
name = 'volume-00000001'
location = '10.0.2.15:3260'
iqn = 'iqn.2010-10.org.iser.openstack:%s' % name
vol = {'id': 1, 'name': name}
connection_info = self.iser_connection(vol, location, iqn)
mpdev_filepath = '/dev/mapper/foo'
connection_info['data']['device_path'] = mpdev_filepath
disk_info = {
"bus": "virtio",
"dev": "vde",
"type": "disk",
}
libvirt_driver._get_multipath_device_name = lambda x: mpdev_filepath
self.stubs.Set(libvirt_driver,
'_get_target_portals_from_iscsiadm_output',
lambda x: [[location, iqn]])
conf = libvirt_driver.connect_volume(connection_info, disk_info)
tree = conf.format_dom()
self.assertEqual(tree.find('./source').get('dev'), mpdev_filepath)
libvirt_driver._get_multipath_iqn = lambda x: iqn
libvirt_driver.disconnect_volume(connection_info, 'vde')
expected_multipath_cmd = ('multipath', '-f', 'foo')
self.assertIn(expected_multipath_cmd, self.executes)
def test_libvirt_kvm_iser_volume_with_multipath_getmpdev(self):
self.flags(iser_use_multipath=True, group='libvirt')
self.stubs.Set(os.path, 'exists', lambda x: True)
self.stubs.Set(time, 'sleep', lambda x: None)
libvirt_driver = volume.LibvirtISERVolumeDriver(self.fake_conn)
name0 = 'volume-00000000'
location0 = '10.0.2.15:3260'
iqn0 = 'iqn.2010-10.org.iser.openstack:%s' % name0
dev0 = '/dev/disk/by-path/ip-%s-iscsi-%s-lun-0' % (location0, iqn0)
name = 'volume-00000001'
location = '10.0.2.15:3260'
iqn = 'iqn.2010-10.org.iser.openstack:%s' % name
vol = {'id': 1, 'name': name}
dev = '/dev/disk/by-path/ip-%s-iscsi-%s-lun-1' % (location, iqn)
devs = [dev0, dev]
self.stubs.Set(self.fake_conn, 'get_all_block_devices', lambda: devs)
self.stubs.Set(libvirt_driver, '_get_iscsi_devices', lambda: [])
connection_info = self.iser_connection(vol, location, iqn)
mpdev_filepath = '/dev/mapper/foo'
disk_info = {
"bus": "virtio",
"dev": "vde",
"type": "disk",
}
libvirt_driver._get_multipath_device_name = lambda x: mpdev_filepath
self.stubs.Set(libvirt_driver,
'_get_target_portals_from_iscsiadm_output',
lambda x: [['fake_portal1', 'fake_iqn1']])
conf = libvirt_driver.connect_volume(connection_info, disk_info)
tree = conf.format_dom()
self.assertEqual(tree.find('./source').get('dev'), mpdev_filepath)
libvirt_driver.disconnect_volume(connection_info, 'vde')
def test_libvirt_nfs_driver(self):
# NOTE(vish) exists is to make driver assume connecting worked
mnt_base = '/mnt'
self.flags(nfs_mount_point_base=mnt_base, group='libvirt')
libvirt_driver = volume.LibvirtNFSVolumeDriver(self.fake_conn)
self.stubs.Set(libvirt_utils, 'is_mounted', lambda x, d: False)
export_string = '192.168.1.1:/nfs/share1'
export_mnt_base = os.path.join(mnt_base,
utils.get_hash_str(export_string))
file_path = os.path.join(export_mnt_base, self.name)
connection_info = {'data': {'export': export_string,
'name': self.name}}
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
self._assertFileTypeEquals(tree, file_path)
libvirt_driver.disconnect_volume(connection_info, "vde")
expected_commands = [
('mkdir', '-p', export_mnt_base),
('mount', '-t', 'nfs', export_string, export_mnt_base),
('umount', export_mnt_base)]
self.assertEqual(expected_commands, self.executes)
def test_libvirt_nfs_driver_already_mounted(self):
# NOTE(vish) exists is to make driver assume connecting worked
mnt_base = '/mnt'
self.flags(nfs_mount_point_base=mnt_base, group='libvirt')
libvirt_driver = volume.LibvirtNFSVolumeDriver(self.fake_conn)
export_string = '192.168.1.1:/nfs/share1'
export_mnt_base = os.path.join(mnt_base,
utils.get_hash_str(export_string))
file_path = os.path.join(export_mnt_base, self.name)
connection_info = {'data': {'export': export_string,
'name': self.name}}
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
self._assertFileTypeEquals(tree, file_path)
libvirt_driver.disconnect_volume(connection_info, "vde")
expected_commands = [
('findmnt', '--target', export_mnt_base, '--source',
export_string),
('umount', export_mnt_base)]
self.assertEqual(self.executes, expected_commands)
def test_libvirt_nfs_driver_with_opts(self):
mnt_base = '/mnt'
self.flags(nfs_mount_point_base=mnt_base, group='libvirt')
libvirt_driver = volume.LibvirtNFSVolumeDriver(self.fake_conn)
self.stubs.Set(libvirt_utils, 'is_mounted', lambda x, d: False)
export_string = '192.168.1.1:/nfs/share1'
options = '-o intr,nfsvers=3'
export_mnt_base = os.path.join(mnt_base,
utils.get_hash_str(export_string))
file_path = os.path.join(export_mnt_base, self.name)
connection_info = {'data': {'export': export_string,
'name': self.name,
'options': options}}
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
self._assertFileTypeEquals(tree, file_path)
libvirt_driver.disconnect_volume(connection_info, "vde")
expected_commands = [
('mkdir', '-p', export_mnt_base),
('mount', '-t', 'nfs', '-o', 'intr,nfsvers=3',
export_string, export_mnt_base),
('umount', export_mnt_base),
]
self.assertEqual(expected_commands, self.executes)
def aoe_connection(self, shelf, lun):
return {
'driver_volume_type': 'aoe',
'data': {
'target_shelf': shelf,
'target_lun': lun,
}
}
def test_libvirt_aoe_driver(self):
# NOTE(jbr_) exists is to make driver assume connecting worked
self.stubs.Set(os.path, 'exists', lambda x: True)
libvirt_driver = volume.LibvirtAOEVolumeDriver(self.fake_conn)
shelf = '100'
lun = '1'
connection_info = self.aoe_connection(shelf, lun)
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
aoedevpath = '/dev/etherd/e%s.%s' % (shelf, lun)
self.assertEqual(tree.get('type'), 'block')
self.assertEqual(tree.find('./source').get('dev'), aoedevpath)
libvirt_driver.disconnect_volume(connection_info, "vde")
def test_libvirt_glusterfs_driver(self):
mnt_base = '/mnt'
self.flags(glusterfs_mount_point_base=mnt_base, group='libvirt')
libvirt_driver = volume.LibvirtGlusterfsVolumeDriver(self.fake_conn)
self.stubs.Set(libvirt_utils, 'is_mounted', lambda x, d: False)
export_string = '192.168.1.1:/volume-00001'
export_mnt_base = os.path.join(mnt_base,
utils.get_hash_str(export_string))
file_path = os.path.join(export_mnt_base, self.name)
connection_info = {'data': {'export': export_string,
'name': self.name}}
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
self._assertFileTypeEquals(tree, file_path)
libvirt_driver.disconnect_volume(connection_info, "vde")
expected_commands = [
('mkdir', '-p', export_mnt_base),
('mount', '-t', 'glusterfs', export_string, export_mnt_base),
('umount', export_mnt_base)]
self.assertEqual(expected_commands, self.executes)
def test_libvirt_glusterfs_driver_already_mounted(self):
mnt_base = '/mnt'
self.flags(glusterfs_mount_point_base=mnt_base, group='libvirt')
libvirt_driver = volume.LibvirtGlusterfsVolumeDriver(self.fake_conn)
export_string = '192.168.1.1:/volume-00001'
export_mnt_base = os.path.join(mnt_base,
utils.get_hash_str(export_string))
file_path = os.path.join(export_mnt_base, self.name)
connection_info = {'data': {'export': export_string,
'name': self.name}}
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
self._assertFileTypeEquals(tree, file_path)
libvirt_driver.disconnect_volume(connection_info, "vde")
expected_commands = [
('findmnt', '--target', export_mnt_base,
'--source', export_string),
('umount', export_mnt_base)]
self.assertEqual(self.executes, expected_commands)
def test_libvirt_glusterfs_driver_qcow2(self):
libvirt_driver = volume.LibvirtGlusterfsVolumeDriver(self.fake_conn)
self.stubs.Set(libvirt_utils, 'is_mounted', lambda x, d: False)
export_string = '192.168.1.1:/volume-00001'
name = 'volume-00001'
format = 'qcow2'
connection_info = {'data': {'export': export_string,
'name': name,
'format': format}}
disk_info = {
"bus": "virtio",
"dev": "vde",
"type": "disk",
}
conf = libvirt_driver.connect_volume(connection_info, disk_info)
tree = conf.format_dom()
self.assertEqual(tree.get('type'), 'file')
self.assertEqual(tree.find('./driver').get('type'), 'qcow2')
libvirt_driver.disconnect_volume(connection_info, "vde")
def test_libvirt_glusterfs_driver_with_opts(self):
mnt_base = '/mnt'
self.flags(glusterfs_mount_point_base=mnt_base, group='libvirt')
libvirt_driver = volume.LibvirtGlusterfsVolumeDriver(self.fake_conn)
self.stubs.Set(libvirt_utils, 'is_mounted', lambda x, d: False)
export_string = '192.168.1.1:/volume-00001'
options = '-o backupvolfile-server=192.168.1.2'
export_mnt_base = os.path.join(mnt_base,
utils.get_hash_str(export_string))
file_path = os.path.join(export_mnt_base, self.name)
connection_info = {'data': {'export': export_string,
'name': self.name,
'options': options}}
conf = libvirt_driver.connect_volume(connection_info, self.disk_info)
tree = conf.format_dom()
self._assertFileTypeEquals(tree, file_path)
libvirt_driver.disconnect_volume(connection_info, "vde")
expected_commands = [
('mkdir', '-p', export_mnt_base),
('mount', '-t', 'glusterfs',
'-o', 'backupvolfile-server=192.168.1.2',
export_string, export_mnt_base),
('umount', export_mnt_base),
]
self.assertEqual(self.executes, expected_commands)
def test_libvirt_glusterfs_libgfapi(self):
self.flags(qemu_allowed_storage_drivers=['gluster'], group='libvirt')
libvirt_driver = volume.LibvirtGlusterfsVolumeDriver(self.fake_conn)
self.stubs.Set(libvirt_utils, 'is_mounted', lambda x, d: False)
export_string = '192.168.1.1:/volume-00001'
name = 'volume-00001'
connection_info = {'data': {'export': export_string, 'name': name}}
disk_info = {
"dev": "vde",
"type": "disk",
"bus": "virtio",
}
conf = libvirt_driver.connect_volume(connection_info, disk_info)
tree = conf.format_dom()
self.assertEqual(tree.get('type'), 'network')
self.assertEqual(tree.find('./driver').get('type'), 'raw')
source = tree.find('./source')
self.assertEqual(source.get('protocol'), 'gluster')
self.assertEqual(source.get('name'), 'volume-00001/volume-00001')
self.assertEqual(source.find('./host').get('name'), '192.168.1.1')
self.assertEqual(source.find('./host').get('port'), '24007')
libvirt_driver.disconnect_volume(connection_info, "vde")
def fibrechan_connection(self, volume, location, wwn):
return {
'driver_volume_type': 'fibrechan',
'data': {
'volume_id': volume['id'],
'target_portal': location,
'target_wwn': wwn,
'target_lun': 1,
}
}
def test_libvirt_fibrechan_driver(self):
self.stubs.Set(libvirt_utils, 'get_fc_hbas',
fake_libvirt_utils.get_fc_hbas)
self.stubs.Set(libvirt_utils, 'get_fc_hbas_info',
fake_libvirt_utils.get_fc_hbas_info)
# NOTE(vish) exists is to make driver assume connecting worked
self.stubs.Set(os.path, 'exists', lambda x: True)
self.stubs.Set(os.path, 'realpath', lambda x: '/dev/sdb')
libvirt_driver = volume.LibvirtFibreChannelVolumeDriver(self.fake_conn)
multipath_devname = '/dev/md-1'
devices = {"device": multipath_devname,
"id": "1234567890",
"devices": [{'device': '/dev/sdb',
'address': '1:0:0:1',
'host': 1, 'channel': 0,
'id': 0, 'lun': 1}]}
self.stubs.Set(linuxscsi, 'find_multipath_device', lambda x: devices)
self.stubs.Set(linuxscsi, 'remove_device', lambda x: None)
# Should work for string, unicode, and list
wwns = ['1234567890123456', unicode('1234567890123456'),
['1234567890123456', '1234567890123457']]
for wwn in wwns:
connection_info = self.fibrechan_connection(self.vol,
self.location, wwn)
mount_device = "vde"
conf = libvirt_driver.connect_volume(connection_info,
self.disk_info)
tree = conf.format_dom()
self.assertEqual(tree.get('type'), 'block')
self.assertEqual(tree.find('./source').get('dev'),
multipath_devname)
connection_info["data"]["devices"] = devices["devices"]
libvirt_driver.disconnect_volume(connection_info, mount_device)
expected_commands = []
self.assertEqual(self.executes, expected_commands)
# Should not work for anything other than string, unicode, and list
connection_info = self.fibrechan_connection(self.vol,
self.location, 123)
self.assertRaises(exception.NovaException,
libvirt_driver.connect_volume,
connection_info, self.disk_info)
self.stubs.Set(libvirt_utils, 'get_fc_hbas', lambda: [])
self.stubs.Set(libvirt_utils, 'get_fc_hbas_info', lambda: [])
self.assertRaises(exception.NovaException,
libvirt_driver.connect_volume,
connection_info, self.disk_info)
def test_libvirt_fibrechan_getpci_num(self):
libvirt_driver = volume.LibvirtFibreChannelVolumeDriver(self.fake_conn)
hba = {'device_path': "/sys/devices/pci0000:00/0000:00:03.0"
"/0000:05:00.3/host2/fc_host/host2"}
pci_num = libvirt_driver._get_pci_num(hba)
self.assertEqual("0000:05:00.3", pci_num)
hba = {'device_path': "/sys/devices/pci0000:00/0000:00:03.0"
"/0000:05:00.3/0000:06:00.6/host2/fc_host/host2"}
pci_num = libvirt_driver._get_pci_num(hba)
self.assertEqual("0000:06:00.6", pci_num)
def test_libvirt_scality_driver(self):
tempdir = self.useFixture(fixtures.TempDir()).path
TEST_MOUNT = os.path.join(tempdir, 'fake_mount')
TEST_CONFIG = os.path.join(tempdir, 'fake_config')
TEST_VOLDIR = 'volumes'
TEST_VOLNAME = 'volume_name'
TEST_CONN_INFO = {
'data': {
'sofs_path': os.path.join(TEST_VOLDIR, TEST_VOLNAME)
}
}
TEST_VOLPATH = os.path.join(TEST_MOUNT,
TEST_VOLDIR,
TEST_VOLNAME)
open(TEST_CONFIG, "w+").close()
os.makedirs(os.path.join(TEST_MOUNT, 'sys'))
def _access_wrapper(path, flags):
if path == '/sbin/mount.sofs':
return True
else:
return os.access(path, flags)
self.stubs.Set(os, 'access', _access_wrapper)
self.flags(scality_sofs_config=TEST_CONFIG,
scality_sofs_mount_point=TEST_MOUNT,
group='libvirt')
driver = volume.LibvirtScalityVolumeDriver(self.fake_conn)
conf = driver.connect_volume(TEST_CONN_INFO, self.disk_info)
tree = conf.format_dom()
self._assertFileTypeEquals(tree, TEST_VOLPATH)
|
{
"content_hash": "d4f697ed64fff86f203c247ce1ca0e5a",
"timestamp": "",
"source": "github",
"line_count": 984,
"max_line_length": 79,
"avg_line_length": 45.76829268292683,
"alnum_prop": 0.5619282351896261,
"repo_name": "afrolov1/nova",
"id": "3ead953e19e09a1b3eaecef72b978aeeeb1c5c2e",
"size": "45692",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/tests/virt/libvirt/test_volume.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "14057622"
},
{
"name": "Shell",
"bytes": "17451"
}
],
"symlink_target": ""
}
|
import eventlet
import eventlet.db_pool
import base64
import hashlib
import os
from psycopg2 import IntegrityError
import logging
class Auth(object):
def __init__(self,salt):
self.salt = salt
self.log = logging.getLogger('fairywren.auth')
self.log.info('Created')
def _saltPwhash(self,pwHash):
if len(pwHash) != 64:
raise ValueError('password hash should be 64 bytes')
storedHash = hashlib.sha512()
storedHash.update(self.salt)
storedHash.update(pwHash)
return base64.urlsafe_b64encode(storedHash.digest()).replace('=','')
def setConnectionPool(self,pool):
self.connPool = pool
def isUserMemberOfRole(self,userId,roles):
with self.connPool.item() as conn:
cur = conn.cursor()
cur.execute("SELECT roles.name from rolemember left join roles on roles.id=rolemember.roleid where userid=%s;",(userId,));
retVal = False
for role, in iter(cur.fetchone,None):
if role in roles:
retVal = True
conn.rollback()
cur.close()
return retVal
def changePassword(self,userId,pwHash):
saltedPw = self._saltPwhash(pwHash)
with self.connPool.item() as conn:
cur = conn.cursor()
try:
cur.execute("UPDATE users SET password=%s where id=%s;",
(saltedPw,userId,))
except StandardError as e :
self.log.error(e)
return None
finally:
conn.commit()
cur.close()
return True
def authenticateSecretKey(self,key):
with self.connPool.item() as conn:
cur = conn.cursor()
cur.execute("Select id from users where secretKey=%s and password is not null;",
(base64.urlsafe_b64encode(key).replace('=','') ,))
r = cur.fetchone()
if r != None:
r, = r
cur.close()
conn.rollback()
return r
def authorizeInfoHash(self,info_hash):
with self.connPool.item() as conn:
cur = conn.cursor()
cur.execute("Select id from torrents where infoHash=%(infoHash)s",
{'infoHash' : base64.urlsafe_b64encode(info_hash).replace('=','') })
result = cur.fetchone()
cur.close()
conn.rollback()
if result!= None:
result, = result
return result
def authenticateUser(self,username,password):
passwordHash = hashlib.sha512()
passwordHash.update(self.salt)
passwordHash.update(password)
passwordHash = base64.urlsafe_b64encode(passwordHash.digest()).replace('=','')
with self.connPool.item() as conn:
cur = conn.cursor()
cur.execute("Select id from users where name=%s and password=%s ;",
(username,passwordHash))
allowed = cur.fetchone()
cur.close()
conn.rollback()
if allowed == None:
return None
userId, = allowed
return userId
|
{
"content_hash": "3e2ea34b69cb0718c3448c187d1f5172",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 125,
"avg_line_length": 22.478991596638654,
"alnum_prop": 0.6631775700934579,
"repo_name": "hydrogen18/fairywren",
"id": "0ee04ceda05f040ee0e1b8af7546a1fb9e0c462b",
"size": "2675",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "auth.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "67010"
},
{
"name": "Python",
"bytes": "219448"
},
{
"name": "Shell",
"bytes": "389"
}
],
"symlink_target": ""
}
|
import argparse
import sys
import urllib2
from scripts.checkUtil import working_directory
from subprocess import call
from subprocess import check_output
import scripts.CheckList as cl
def main():
args = parse_parameters(sys.argv)
project = args.dir
config = args.config
tasks = ''
if args.tasks :
tasks = args.tasks
print tasks
apk_location = args.apk
try:
check_output(["apktool"])
except:
apktool_loading()
tester = cl.Checklist(project,apk_location,config)
tester.executeTests(config)
with working_directory("/tmp"):
check_output(["rm","-rf","/app-external-release"])
#function for loading apktool to the system
#TODO maintanence for the jar version
def apktool_loading():
scriptFile = urllib2.urlopen("https://raw.githubusercontent.com/iBotPeaches/Apktool/master/scripts/osx/apktool")
jarFile = urllib2.urlopen("https://bitbucket.org/iBotPeaches/apktool/downloads/apktool_2.2.0.jar")
filename_script = "apktool"
filename_jar = "apktool.jar"
with working_directory("/tmp"):
print "Downloading script..."
with open(filename_script, "wb") as output_script:
output_script.write(scriptFile.read())
print "Downloading jar file..."
with open(filename_jar, "wb") as output_jar:
output_jar.write(jarFile.read())
call(["mv",filename_script,"/usr/local/bin"])
call(["mv", filename_jar, "/usr/local/bin"])
with working_directory("/usr/local/bin"):
print "Changing the mod of the script..."
call(["chmod","+x",filename_script])
print "Changing the mod of the jar..."
call(["chmod", "+x", filename_jar])
#This function parses paramenters
def parse_parameters(argv):
parser = argparse.ArgumentParser()
parser.add_argument('-d','--dir',
nargs='?',
help='Directory location')
parser.add_argument('-c', '--config',
nargs='?',
help='Config File Location')
parser.add_argument('-a', '--apk',
nargs='?',
help='Apk Location')
parser.add_argument('-t','--tasks',
nargs=1,
help='Optional task file to import check functions')
return parser.parse_args()
if __name__ == "__main__":
main()
|
{
"content_hash": "3961a6375e59a04850e8da38344cc786",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 116,
"avg_line_length": 32.58108108108108,
"alnum_prop": 0.6009954375777685,
"repo_name": "wickedoto/ApkParser",
"id": "08737bd24f1fbbe52038103da3cd629e47430e17",
"size": "2437",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "initializer.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "120"
},
{
"name": "Python",
"bytes": "263634"
}
],
"symlink_target": ""
}
|
"""Extend the basic Accessory and Bridge functions."""
from datetime import timedelta
from functools import partial, wraps
from inspect import getmodule
import logging
from pyhap.accessory import Accessory, Bridge
from pyhap.accessory_driver import AccessoryDriver
from pyhap.const import CATEGORY_OTHER
from homeassistant.components import cover, vacuum
from homeassistant.components.cover import DEVICE_CLASS_GARAGE, DEVICE_CLASS_GATE
from homeassistant.components.media_player import DEVICE_CLASS_TV
from homeassistant.const import (
ATTR_BATTERY_CHARGING,
ATTR_BATTERY_LEVEL,
ATTR_DEVICE_CLASS,
ATTR_ENTITY_ID,
ATTR_SERVICE,
ATTR_SUPPORTED_FEATURES,
ATTR_UNIT_OF_MEASUREMENT,
CONF_NAME,
CONF_TYPE,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_ILLUMINANCE,
DEVICE_CLASS_TEMPERATURE,
STATE_ON,
STATE_UNAVAILABLE,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
UNIT_PERCENTAGE,
__version__,
)
from homeassistant.core import Context, callback as ha_callback, split_entity_id
from homeassistant.helpers.event import (
async_track_state_change_event,
track_point_in_utc_time,
)
from homeassistant.util import dt as dt_util
from homeassistant.util.decorator import Registry
from .const import (
ATTR_DISPLAY_NAME,
ATTR_INTERGRATION,
ATTR_MANUFACTURER,
ATTR_MODEL,
ATTR_SOFTWARE_VERSION,
ATTR_VALUE,
BRIDGE_MODEL,
BRIDGE_SERIAL_NUMBER,
CHAR_BATTERY_LEVEL,
CHAR_CHARGING_STATE,
CHAR_STATUS_LOW_BATTERY,
CONF_FEATURE_LIST,
CONF_LINKED_BATTERY_CHARGING_SENSOR,
CONF_LINKED_BATTERY_SENSOR,
CONF_LOW_BATTERY_THRESHOLD,
DEBOUNCE_TIMEOUT,
DEFAULT_LOW_BATTERY_THRESHOLD,
DEVICE_CLASS_CO,
DEVICE_CLASS_CO2,
DEVICE_CLASS_PM25,
EVENT_HOMEKIT_CHANGED,
HK_CHARGING,
HK_NOT_CHARGABLE,
HK_NOT_CHARGING,
MANUFACTURER,
SERV_BATTERY_SERVICE,
TYPE_FAUCET,
TYPE_OUTLET,
TYPE_SHOWER,
TYPE_SPRINKLER,
TYPE_SWITCH,
TYPE_VALVE,
)
from .util import (
convert_to_float,
dismiss_setup_message,
format_sw_version,
show_setup_message,
validate_media_player_features,
)
_LOGGER = logging.getLogger(__name__)
SWITCH_TYPES = {
TYPE_FAUCET: "Valve",
TYPE_OUTLET: "Outlet",
TYPE_SHOWER: "Valve",
TYPE_SPRINKLER: "Valve",
TYPE_SWITCH: "Switch",
TYPE_VALVE: "Valve",
}
TYPES = Registry()
def debounce(func):
"""Decorate function to debounce callbacks from HomeKit."""
@ha_callback
def call_later_listener(self, *args):
"""Handle call_later callback."""
debounce_params = self.debounce.pop(func.__name__, None)
if debounce_params:
self.hass.async_add_executor_job(func, self, *debounce_params[1:])
@wraps(func)
def wrapper(self, *args):
"""Start async timer."""
debounce_params = self.debounce.pop(func.__name__, None)
if debounce_params:
debounce_params[0]() # remove listener
remove_listener = track_point_in_utc_time(
self.hass,
partial(call_later_listener, self),
dt_util.utcnow() + timedelta(seconds=DEBOUNCE_TIMEOUT),
)
self.debounce[func.__name__] = (remove_listener, *args)
logger.debug(
"%s: Start %s timeout", self.entity_id, func.__name__.replace("set_", "")
)
name = getmodule(func).__name__
logger = logging.getLogger(name)
return wrapper
def get_accessory(hass, driver, state, aid, config):
"""Take state and return an accessory object if supported."""
if not aid:
_LOGGER.warning(
'The entity "%s" is not supported, since it '
"generates an invalid aid, please change it",
state.entity_id,
)
return None
a_type = None
name = config.get(CONF_NAME, state.name)
if state.domain == "alarm_control_panel":
a_type = "SecuritySystem"
elif state.domain in ("binary_sensor", "device_tracker", "person"):
a_type = "BinarySensor"
elif state.domain == "climate":
a_type = "Thermostat"
elif state.domain == "cover":
device_class = state.attributes.get(ATTR_DEVICE_CLASS)
features = state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
if device_class in (DEVICE_CLASS_GARAGE, DEVICE_CLASS_GATE) and features & (
cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE
):
a_type = "GarageDoorOpener"
elif features & cover.SUPPORT_SET_POSITION:
a_type = "WindowCovering"
elif features & (cover.SUPPORT_OPEN | cover.SUPPORT_CLOSE):
a_type = "WindowCoveringBasic"
elif state.domain == "fan":
a_type = "Fan"
elif state.domain == "humidifier":
a_type = "HumidifierDehumidifier"
elif state.domain == "light":
a_type = "Light"
elif state.domain == "lock":
a_type = "Lock"
elif state.domain == "media_player":
device_class = state.attributes.get(ATTR_DEVICE_CLASS)
feature_list = config.get(CONF_FEATURE_LIST, [])
if device_class == DEVICE_CLASS_TV:
a_type = "TelevisionMediaPlayer"
elif validate_media_player_features(state, feature_list):
a_type = "MediaPlayer"
elif state.domain == "sensor":
device_class = state.attributes.get(ATTR_DEVICE_CLASS)
unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
if device_class == DEVICE_CLASS_TEMPERATURE or unit in (
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
):
a_type = "TemperatureSensor"
elif device_class == DEVICE_CLASS_HUMIDITY and unit == UNIT_PERCENTAGE:
a_type = "HumiditySensor"
elif device_class == DEVICE_CLASS_PM25 or DEVICE_CLASS_PM25 in state.entity_id:
a_type = "AirQualitySensor"
elif device_class == DEVICE_CLASS_CO:
a_type = "CarbonMonoxideSensor"
elif device_class == DEVICE_CLASS_CO2 or DEVICE_CLASS_CO2 in state.entity_id:
a_type = "CarbonDioxideSensor"
elif device_class == DEVICE_CLASS_ILLUMINANCE or unit in ("lm", "lx"):
a_type = "LightSensor"
elif state.domain == "switch":
switch_type = config.get(CONF_TYPE, TYPE_SWITCH)
a_type = SWITCH_TYPES[switch_type]
elif state.domain == "vacuum":
features = state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
if features & (vacuum.SUPPORT_START | vacuum.SUPPORT_RETURN_HOME):
a_type = "DockVacuum"
else:
a_type = "Switch"
elif state.domain in ("automation", "input_boolean", "remote", "scene", "script"):
a_type = "Switch"
elif state.domain == "water_heater":
a_type = "WaterHeater"
elif state.domain == "camera":
a_type = "Camera"
if a_type is None:
return None
_LOGGER.debug('Add "%s" as "%s"', state.entity_id, a_type)
return TYPES[a_type](hass, driver, name, state.entity_id, aid, config)
class HomeAccessory(Accessory):
"""Adapter class for Accessory."""
def __init__(
self,
hass,
driver,
name,
entity_id,
aid,
config,
*args,
category=CATEGORY_OTHER,
**kwargs,
):
"""Initialize a Accessory object."""
super().__init__(driver=driver, display_name=name, aid=aid, *args, **kwargs)
self.config = config or {}
domain = split_entity_id(entity_id)[0].replace("_", " ")
if ATTR_MANUFACTURER in self.config:
manufacturer = self.config[ATTR_MANUFACTURER]
elif ATTR_INTERGRATION in self.config:
manufacturer = self.config[ATTR_INTERGRATION].replace("_", " ").title()
else:
manufacturer = f"{MANUFACTURER} {domain}".title()
if ATTR_MODEL in self.config:
model = self.config[ATTR_MODEL]
else:
model = domain.title()
if ATTR_SOFTWARE_VERSION in self.config:
sw_version = format_sw_version(self.config[ATTR_SOFTWARE_VERSION])
else:
sw_version = __version__
self.set_info_service(
manufacturer=manufacturer,
model=model,
serial_number=entity_id,
firmware_revision=sw_version,
)
self.category = category
self.entity_id = entity_id
self.hass = hass
self.debounce = {}
self._subscriptions = []
self._char_battery = None
self._char_charging = None
self._char_low_battery = None
self.linked_battery_sensor = self.config.get(CONF_LINKED_BATTERY_SENSOR)
self.linked_battery_charging_sensor = self.config.get(
CONF_LINKED_BATTERY_CHARGING_SENSOR
)
self.low_battery_threshold = self.config.get(
CONF_LOW_BATTERY_THRESHOLD, DEFAULT_LOW_BATTERY_THRESHOLD
)
"""Add battery service if available"""
entity_attributes = self.hass.states.get(self.entity_id).attributes
battery_found = entity_attributes.get(ATTR_BATTERY_LEVEL)
if self.linked_battery_sensor:
state = self.hass.states.get(self.linked_battery_sensor)
if state is not None:
battery_found = state.state
else:
self.linked_battery_sensor = None
_LOGGER.warning(
"%s: Battery sensor state missing: %s",
self.entity_id,
self.linked_battery_sensor,
)
if not battery_found:
return
_LOGGER.debug("%s: Found battery level", self.entity_id)
if self.linked_battery_charging_sensor:
state = self.hass.states.get(self.linked_battery_charging_sensor)
if state is None:
self.linked_battery_charging_sensor = None
_LOGGER.warning(
"%s: Battery charging binary_sensor state missing: %s",
self.entity_id,
self.linked_battery_charging_sensor,
)
else:
_LOGGER.debug("%s: Found battery charging", self.entity_id)
serv_battery = self.add_preload_service(SERV_BATTERY_SERVICE)
self._char_battery = serv_battery.configure_char(CHAR_BATTERY_LEVEL, value=0)
self._char_charging = serv_battery.configure_char(
CHAR_CHARGING_STATE, value=HK_NOT_CHARGABLE
)
self._char_low_battery = serv_battery.configure_char(
CHAR_STATUS_LOW_BATTERY, value=0
)
@property
def available(self):
"""Return if accessory is available."""
state = self.hass.states.get(self.entity_id)
return state is not None and state.state != STATE_UNAVAILABLE
async def run(self):
"""Handle accessory driver started event.
Run inside the HAP-python event loop.
"""
self.hass.add_job(self.run_handler)
async def run_handler(self):
"""Handle accessory driver started event.
Run inside the Home Assistant event loop.
"""
state = self.hass.states.get(self.entity_id)
self.async_update_state_callback(state)
self._subscriptions.append(
async_track_state_change_event(
self.hass, [self.entity_id], self.async_update_event_state_callback
)
)
battery_charging_state = None
battery_state = None
if self.linked_battery_sensor:
linked_battery_sensor_state = self.hass.states.get(
self.linked_battery_sensor
)
battery_state = linked_battery_sensor_state.state
battery_charging_state = linked_battery_sensor_state.attributes.get(
ATTR_BATTERY_CHARGING
)
self._subscriptions.append(
async_track_state_change_event(
self.hass,
[self.linked_battery_sensor],
self.async_update_linked_battery_callback,
)
)
elif state is not None:
battery_state = state.attributes.get(ATTR_BATTERY_LEVEL)
if self.linked_battery_charging_sensor:
state = self.hass.states.get(self.linked_battery_charging_sensor)
battery_charging_state = state and state.state == STATE_ON
self._subscriptions.append(
async_track_state_change_event(
self.hass,
[self.linked_battery_charging_sensor],
self.async_update_linked_battery_charging_callback,
)
)
elif battery_charging_state is None and state is not None:
battery_charging_state = state.attributes.get(ATTR_BATTERY_CHARGING)
if battery_state is not None or battery_charging_state is not None:
self.async_update_battery(battery_state, battery_charging_state)
@ha_callback
def async_update_event_state_callback(self, event):
"""Handle state change event listener callback."""
self.async_update_state_callback(event.data.get("new_state"))
@ha_callback
def async_update_state_callback(self, new_state):
"""Handle state change listener callback."""
_LOGGER.debug("New_state: %s", new_state)
if new_state is None:
return
battery_state = None
battery_charging_state = None
if (
not self.linked_battery_sensor
and ATTR_BATTERY_LEVEL in new_state.attributes
):
battery_state = new_state.attributes.get(ATTR_BATTERY_LEVEL)
if (
not self.linked_battery_charging_sensor
and ATTR_BATTERY_CHARGING in new_state.attributes
):
battery_charging_state = new_state.attributes.get(ATTR_BATTERY_CHARGING)
if battery_state is not None or battery_charging_state is not None:
self.async_update_battery(battery_state, battery_charging_state)
self.async_update_state(new_state)
@ha_callback
def async_update_linked_battery_callback(self, event):
"""Handle linked battery sensor state change listener callback."""
new_state = event.data.get("new_state")
if new_state is None:
return
if self.linked_battery_charging_sensor:
battery_charging_state = None
else:
battery_charging_state = new_state.attributes.get(ATTR_BATTERY_CHARGING)
self.async_update_battery(new_state.state, battery_charging_state)
@ha_callback
def async_update_linked_battery_charging_callback(self, event):
"""Handle linked battery charging sensor state change listener callback."""
new_state = event.data.get("new_state")
if new_state is None:
return
self.async_update_battery(None, new_state.state == STATE_ON)
@ha_callback
def async_update_battery(self, battery_level, battery_charging):
"""Update battery service if available.
Only call this function if self._support_battery_level is True.
"""
if not self._char_battery:
# Battery appeared after homekit was started
return
battery_level = convert_to_float(battery_level)
if battery_level is not None:
if self._char_battery.value != battery_level:
self._char_battery.set_value(battery_level)
is_low_battery = 1 if battery_level < self.low_battery_threshold else 0
if self._char_low_battery.value != is_low_battery:
self._char_low_battery.set_value(is_low_battery)
_LOGGER.debug(
"%s: Updated battery level to %d", self.entity_id, battery_level
)
# Charging state can appear after homekit was started
if battery_charging is None or not self._char_charging:
return
hk_charging = HK_CHARGING if battery_charging else HK_NOT_CHARGING
if self._char_charging.value != hk_charging:
self._char_charging.set_value(hk_charging)
_LOGGER.debug(
"%s: Updated battery charging to %d", self.entity_id, hk_charging
)
@ha_callback
def async_update_state(self, new_state):
"""Handle state change to update HomeKit value.
Overridden by accessory types.
"""
raise NotImplementedError()
def call_service(self, domain, service, service_data, value=None):
"""Fire event and call service for changes from HomeKit."""
self.hass.add_job(self.async_call_service, domain, service, service_data, value)
async def async_call_service(self, domain, service, service_data, value=None):
"""Fire event and call service for changes from HomeKit.
This method must be run in the event loop.
"""
event_data = {
ATTR_ENTITY_ID: self.entity_id,
ATTR_DISPLAY_NAME: self.display_name,
ATTR_SERVICE: service,
ATTR_VALUE: value,
}
context = Context()
self.hass.bus.async_fire(EVENT_HOMEKIT_CHANGED, event_data, context=context)
await self.hass.services.async_call(
domain, service, service_data, context=context
)
@ha_callback
def async_stop(self):
"""Cancel any subscriptions when the bridge is stopped."""
while self._subscriptions:
self._subscriptions.pop(0)()
class HomeBridge(Bridge):
"""Adapter class for Bridge."""
def __init__(self, hass, driver, name):
"""Initialize a Bridge object."""
super().__init__(driver, name)
self.set_info_service(
firmware_revision=__version__,
manufacturer=MANUFACTURER,
model=BRIDGE_MODEL,
serial_number=BRIDGE_SERIAL_NUMBER,
)
self.hass = hass
def setup_message(self):
"""Prevent print of pyhap setup message to terminal."""
def get_snapshot(self, info):
"""Get snapshot from accessory if supported."""
acc = self.accessories.get(info["aid"])
if acc is None:
raise ValueError("Requested snapshot for missing accessory")
if not hasattr(acc, "get_snapshot"):
raise ValueError(
"Got a request for snapshot, but the Accessory "
'does not define a "get_snapshot" method'
)
return acc.get_snapshot(info)
class HomeDriver(AccessoryDriver):
"""Adapter class for AccessoryDriver."""
def __init__(self, hass, entry_id, bridge_name, **kwargs):
"""Initialize a AccessoryDriver object."""
super().__init__(**kwargs)
self.hass = hass
self._entry_id = entry_id
self._bridge_name = bridge_name
def pair(self, client_uuid, client_public):
"""Override super function to dismiss setup message if paired."""
success = super().pair(client_uuid, client_public)
if success:
dismiss_setup_message(self.hass, self._entry_id)
return success
def unpair(self, client_uuid):
"""Override super function to show setup message if unpaired."""
super().unpair(client_uuid)
show_setup_message(
self.hass,
self._entry_id,
self._bridge_name,
self.state.pincode,
self.accessory.xhm_uri(),
)
|
{
"content_hash": "b70c32121297087806c657767a498cea",
"timestamp": "",
"source": "github",
"line_count": 563,
"max_line_length": 88,
"avg_line_length": 34.62344582593251,
"alnum_prop": 0.6064741189144821,
"repo_name": "titilambert/home-assistant",
"id": "68b61772ce84a2fa69cc33f5ced5f77d92b1d9df",
"size": "19493",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/homekit/accessories.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1488"
},
{
"name": "Python",
"bytes": "25849092"
},
{
"name": "Shell",
"bytes": "4410"
}
],
"symlink_target": ""
}
|
import errno
import logging
import re
import time
from net import gorpc
from vtdb import tablet2
from vtdb import dbexceptions
# NOTE(msolomon) this sketchy import allows upstream code to mostly interpret
# our exceptions as if they came from MySQLdb. Good for a cutover, probably
# bad long-term.
import MySQLdb as MySQLErrors
_errno_pattern = re.compile('\(errno (\d+)\)')
# NOTE(msolomon) This mapping helps us mimic the behavior of mysql errors
# even though the relationship between connections and failures is now quite
# different. In general, we map vtocc errors to DatabaseError, unless there
# is a pressing reason to be more precise. Otherwise, these errors can get
# misinterpreted futher up the call chain.
_mysql_error_map = {
1062: MySQLErrors.IntegrityError,
}
# Errors fall into three classes based on recovery strategy.
#
# APP_LEVEL is for routine programmer errors (bad input etc) -- nothing can be
# done here, so just propagate the error upstream.
#
# RETRY means a simple reconnect (and immediate) reconnect to the same
# host will likely fix things. This is usually due vtocc restarting. In general
# this can be handled transparently unless the error is within a transaction.
#
# FATAL indicates that retrying an action on the host is likely to fail.
ERROR_APP_LEVEL = 'app_level'
ERROR_RETRY = 'retry'
ERROR_FATAL = 'fatal'
RECONNECT_DELAY = 0.002
# simple class to trap and re-export only variables referenced from the sql
# statement. bind dictionaries can be *very* noisy.
# this is by-product of converting the mysql %(name)s syntax to vtocc :name
class BindVarsProxy(object):
def __init__(self, bind_vars):
self.bind_vars = bind_vars
self.accessed_keys = set()
def __getitem__(self, name):
self.bind_vars[name]
self.accessed_keys.add(name)
return ':%s' % name
def export_bind_vars(self):
return dict([(k, self.bind_vars[k]) for k in self.accessed_keys])
# Provide compatibility with the MySQLdb query param style and prune bind_vars
class VtOCCConnection(tablet2.TabletConnection):
max_attempts = 2
def dial(self):
tablet2.TabletConnection.dial(self)
try:
response = self.client.call('OccManager.GetSessionId', self.dbname)
self.set_session_id(response.reply)
except gorpc.GoRpcError, e:
raise dbexceptions.OperationalError(*e.args)
def _convert_error(self, exception, *error_hints):
message = str(exception[0]).lower()
# NOTE(msolomon) extract a mysql error code so we can push this up the code
# stack. At this point, this is almost exclusively for handling integrity
# errors from duplicate key inserts.
match = _errno_pattern.search(message)
if match:
err = int(match.group(1))
elif isinstance(exception[0], IOError):
err = exception[0].errno
else:
err = -1
if message.startswith('fatal'):
# Force this error code upstream so MySQL code understands this as a
# permanent failure on this host. Feels a little dirty, but probably the
# most consistent way since this correctly communicates the recovery
# strategy upstream.
raise MySQLErrors.OperationalError(2003, str(exception), self.addr,
*error_hints)
elif message.startswith('retry'):
# Retry means that a trivial redial of this host will fix things. This
# is frequently due to vtocc being restarted independently of the mysql
# instance behind it.
error_type = ERROR_RETRY
elif 'curl error 7' in message:
# Client side error - sometimes the listener is unavailable for a few
# milliseconds during a restart.
error_type = ERROR_RETRY
elif err in (errno.ECONNREFUSED, errno.EPIPE):
error_type = ERROR_RETRY
else:
# Everything else is app level - just process the failure and continue
# to use the existing connection.
error_type = ERROR_APP_LEVEL
if error_type == ERROR_RETRY and self.transaction_id:
# With a transaction, you cannot retry, so just redial. The next action
# will be successful. Masquerade as commands-out-of-sync - an operational
# error that can be reattempted at the app level.
error_type = ERROR_APP_LEVEL
error_hints += ('cannot retry action within a transaction',)
try:
time.sleep(RECONNECT_DELAY)
self.dial()
except Exception, e:
# If this fails now, the code will retry later as the session_id
# won't be valid until the handshake finishes.
logging.warning('error dialing vtocc %s (%s)', self.addr, e)
exc_class = _mysql_error_map.get(err, MySQLErrors.DatabaseError)
return error_type, exc_class(err, str(exception), self.addr,
*error_hints)
def begin(self):
attempt = 0
while True:
try:
return tablet2.TabletConnection.begin(self)
except dbexceptions.OperationalError, e:
error_type, e = self._convert_error(e, 'begin')
if error_type == ERROR_RETRY:
attempt += 1
if attempt < self.max_attempts:
try:
time.sleep(RECONNECT_DELAY)
self.dial()
except dbexceptions.OperationalError, dial_error:
logging.warning('error dialing vtocc on begin %s (%s)',
self.addr, dial_error)
continue
logging.warning('Failing with 2003 on begin')
raise MySQLErrors.OperationalError(2003, str(e), self.addr, 'begin')
raise e
def commit(self):
try:
return tablet2.TabletConnection.commit(self)
except dbexceptions.OperationalError, e:
error_type, e = self._convert_error(e, 'commit')
raise e
def _execute(self, sql, bind_variables):
bind_vars_proxy = BindVarsProxy(bind_variables)
try:
# convert bind style from %(name)s to :name
sql = sql % bind_vars_proxy
except KeyError, e:
raise dbexceptions.InterfaceError(e[0], sql, bind_variables)
sane_bind_vars = bind_vars_proxy.export_bind_vars()
attempt = 0
while True:
try:
return tablet2.TabletConnection._execute(self, sql, sane_bind_vars)
except dbexceptions.OperationalError, e:
error_type, e = self._convert_error(e, sql, sane_bind_vars)
if error_type == ERROR_RETRY:
attempt += 1
if attempt < self.max_attempts:
try:
time.sleep(RECONNECT_DELAY)
self.dial()
except dbexceptions.OperationalError, dial_error:
logging.warning('error dialing vtocc on execute %s (%s)',
self.addr, dial_error)
continue
logging.warning('Failing with 2003 on %s: %s, %s', str(e), sql, sane_bind_vars)
raise MySQLErrors.OperationalError(2003, str(e), self.addr, sql, sane_bind_vars)
raise e
def connect(addr, timeout, dbname=None):
conn = VtOCCConnection(addr, dbname, timeout)
conn.dial()
return conn
|
{
"content_hash": "4f7fa873f5912653a1df19cdf07a1844",
"timestamp": "",
"source": "github",
"line_count": 191,
"max_line_length": 90,
"avg_line_length": 36.7434554973822,
"alnum_prop": 0.6675691080079795,
"repo_name": "oopos/vitess",
"id": "c387498ec60bf5d2e3cc9dec825d36af511065e6",
"size": "8566",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "py/vtdb/vt_occ2.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import asyncio
from hello import HelloRemote, HelloRequest
from venom.rpc.comms.grpc import Client
client = Client(HelloRemote, 'localhost', 50053)
async def request_say_hello(name):
response = await client.invoke(HelloRemote.say_hello, HelloRequest(name=name))
print('response:', response.message)
loop = asyncio.get_event_loop()
loop.run_until_complete(request_say_hello('world'))
|
{
"content_hash": "7a62fea902ab28de6b5e369bdf29f71c",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 82,
"avg_line_length": 30.307692307692307,
"alnum_prop": 0.7690355329949239,
"repo_name": "biosustain/venom",
"id": "dc62771f8ad7dae8b2c69e148e770a9561587cca",
"size": "394",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/grpc/client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "174784"
}
],
"symlink_target": ""
}
|
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_role
short_description: Manages user roles on Apache CloudStack based clouds.
description:
- Create, update, delete user roles.
version_added: '2.3'
author: René Moser (@resmo)
options:
name:
description:
- Name of the role.
type: str
required: true
id:
description:
- ID of the role.
- If provided, I(id) is used as key.
type: str
aliases: [ uuid ]
role_type:
description:
- Type of the role.
- Only considered for creation.
type: str
default: User
choices: [ User, DomainAdmin, ResourceAdmin, Admin ]
description:
description:
- Description of the role.
type: str
state:
description:
- State of the role.
type: str
default: present
choices: [ present, absent ]
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
- name: Ensure an user role is present
cs_role:
name: myrole_user
delegate_to: localhost
- name: Ensure a role having particular ID is named as myrole_user
cs_role:
name: myrole_user
id: 04589590-ac63-4ffc-93f5-b698b8ac38b6
delegate_to: localhost
- name: Ensure a role is absent
cs_role:
name: myrole_user
state: absent
delegate_to: localhost
'''
RETURN = '''
---
id:
description: UUID of the role.
returned: success
type: str
sample: 04589590-ac63-4ffc-93f5-b698b8ac38b6
name:
description: Name of the role.
returned: success
type: str
sample: myrole
description:
description: Description of the role.
returned: success
type: str
sample: "This is my role description"
role_type:
description: Type of the role.
returned: success
type: str
sample: User
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
cs_argument_spec,
cs_required_together,
)
class AnsibleCloudStackRole(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackRole, self).__init__(module)
self.returns = {
'type': 'role_type',
}
def get_role(self):
uuid = self.module.params.get('uuid')
if uuid:
args = {
'id': uuid,
}
roles = self.query_api('listRoles', **args)
if roles:
return roles['role'][0]
else:
args = {
'name': self.module.params.get('name'),
}
roles = self.query_api('listRoles', **args)
if roles:
return roles['role'][0]
return None
def present_role(self):
role = self.get_role()
if role:
role = self._update_role(role)
else:
role = self._create_role(role)
return role
def _create_role(self, role):
self.result['changed'] = True
args = {
'name': self.module.params.get('name'),
'type': self.module.params.get('role_type'),
'description': self.module.params.get('description'),
}
if not self.module.check_mode:
res = self.query_api('createRole', **args)
role = res['role']
return role
def _update_role(self, role):
args = {
'id': role['id'],
'name': self.module.params.get('name'),
'description': self.module.params.get('description'),
}
if self.has_changed(args, role):
self.result['changed'] = True
if not self.module.check_mode:
res = self.query_api('updateRole', **args)
# The API as in 4.9 does not return an updated role yet
if 'role' not in res:
role = self.get_role()
else:
role = res['role']
return role
def absent_role(self):
role = self.get_role()
if role:
self.result['changed'] = True
args = {
'id': role['id'],
}
if not self.module.check_mode:
self.query_api('deleteRole', **args)
return role
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
uuid=dict(aliases=['id']),
name=dict(required=True),
description=dict(),
role_type=dict(choices=['User', 'DomainAdmin', 'ResourceAdmin', 'Admin'], default='User'),
state=dict(choices=['present', 'absent'], default='present'),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
acs_role = AnsibleCloudStackRole(module)
state = module.params.get('state')
if state == 'absent':
role = acs_role.absent_role()
else:
role = acs_role.present_role()
result = acs_role.get_result(role)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
{
"content_hash": "34b60c42904671659f128d0ce1a39fdd",
"timestamp": "",
"source": "github",
"line_count": 202,
"max_line_length": 98,
"avg_line_length": 25.301980198019802,
"alnum_prop": 0.5666210135002935,
"repo_name": "SergeyCherepanov/ansible",
"id": "001cdf924ffa992059f7ec4ce96faf56bf5efb96",
"size": "5295",
"binary": false,
"copies": "26",
"ref": "refs/heads/master",
"path": "ansible/ansible/modules/cloud/cloudstack/cs_role.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Shell",
"bytes": "824"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
import re
import os
import xml.etree.ElementTree as ET
import json
import copy
from svtplay_dl.utils.urllib import urlparse, parse_qs, quote_plus, Cookie
from svtplay_dl.service import Service, OpenGraphThumbMixin
from svtplay_dl.utils import get_http_data, is_py2_old, filenamify, CookieJar
from svtplay_dl.log import log
from svtplay_dl.fetcher.hls import hlsparse, HLS
from svtplay_dl.fetcher.rtmp import RTMP
from svtplay_dl.fetcher.hds import hdsparse
from svtplay_dl.subtitle import subtitle
class Tv4play(Service, OpenGraphThumbMixin):
supported_domains = ['tv4play.se', 'tv4.se']
def __init__(self, url):
Service.__init__(self, url)
self.subtitle = None
self.cj = CookieJar()
def get(self, options):
error, data = self.get_urldata()
if error:
log.error("Can't get the page")
return
vid = findvid(self.url, data)
if vid is None:
log.error("Can't find video id for %s", self.url)
return
if options.username and options.password:
# Need a dummy cookie to save cookies..
cc = Cookie(version=0, name='dummy',
value="",
port=None, port_specified=False,
domain='www.tv4play.se',
domain_specified=True,
domain_initial_dot=True, path='/',
path_specified=True, secure=False,
expires=None, discard=True, comment=None,
comment_url=None, rest={'HttpOnly': None})
self.cj.set_cookie(cc)
options.cookies = self.cj
error, data = get_http_data("https://www.tv4play.se/session/new?https=", cookiejar=self.cj)
auth_token = re.search('name="authenticity_token" ([a-z]+="[^"]+" )?value="([^"]+)"', data)
if not auth_token:
log.error("Can't find authenticity_token needed for user / passwdord")
return
url = "https://www.tv4play.se/session"
postdata3 = quote_plus("user_name=%s&password=%s&authenticity_token=%s" % (options.username, options.password, auth_token.group(2)), "=&")
error, data = get_http_data(url, post=postdata3, cookiejar=self.cj)
fail = re.search("<p class='failed-login'>([^<]+)</p>", data)
if fail:
log.error(fail.group(1))
return
url = "http://premium.tv4play.se/api/web/asset/%s/play" % vid
error, data = get_http_data(url, cookiejar=self.cj)
if error:
xml = ET.XML(data)
code = xml.find("code").text
if code == "SESSION_NOT_AUTHENTICATED":
log.error("Can't access premium content")
elif code == "ASSET_PLAYBACK_INVALID_GEO_LOCATION":
log.error("Can't downoad this video because of geoblocked.")
else:
log.error("Can't find any info for that video")
return
xml = ET.XML(data)
ss = xml.find("items")
if is_py2_old:
sa = list(ss.getiterator("item"))
else:
sa = list(ss.iter("item"))
if xml.find("live").text:
if xml.find("live").text != "false":
options.live = True
if xml.find("drmProtected").text == "true":
log.error("We cant download DRM protected content from this site.")
return
if options.output_auto:
directory = os.path.dirname(options.output)
options.service = "tv4play"
title = "%s-%s-%s" % (options.output, vid, options.service)
title = filenamify(title)
if len(directory):
options.output = "%s/%s" % (directory, title)
else:
options.output = title
if self.exclude(options):
return
for i in sa:
if i.find("mediaFormat").text == "mp4":
base = urlparse(i.find("base").text)
parse = urlparse(i.find("url").text)
if "rtmp" in base.scheme:
swf = "http://www.tv4play.se/flash/tv4playflashlets.swf"
options.other = "-W %s -y %s" % (swf, i.find("url").text)
yield RTMP(copy.copy(options), i.find("base").text, i.find("bitrate").text)
elif parse.path[len(parse.path)-3:len(parse.path)] == "f4m":
streams = hdsparse(copy.copy(options), i.find("url").text)
if streams:
for n in list(streams.keys()):
yield streams[n]
elif i.find("mediaFormat").text == "smi":
yield subtitle(copy.copy(options), "smi", i.find("url").text)
url = "http://premium.tv4play.se/api/web/asset/%s/play?protocol=hls" % vid
error, data = get_http_data(url, cookiejar=self.cj)
if error:
return
xml = ET.XML(data)
ss = xml.find("items")
if is_py2_old:
sa = list(ss.getiterator("item"))
else:
sa = list(ss.iter("item"))
for i in sa:
if i.find("mediaFormat").text == "mp4":
parse = urlparse(i.find("url").text)
if parse.path.endswith("m3u8"):
streams = hlsparse(i.find("url").text)
for n in list(streams.keys()):
yield HLS(copy.copy(options), streams[n], n)
def find_all_episodes(self, options):
parse = urlparse(self.url)
show = parse.path[parse.path.find("/", 1)+1:]
if not re.search("%", show):
show = quote_plus(show)
error, data = get_http_data("http://webapi.tv4play.se/play/video_assets?type=episode&is_live=false&platform=web&node_nids=%s&per_page=99999" % show)
if error:
log.error("Can't get api page")
return
jsondata = json.loads(data)
episodes = []
n = 1
for i in jsondata["results"]:
try:
days = int(i["availability"]["availability_group_free"])
except (ValueError, TypeError):
days = 999
if days > 0:
video_id = i["id"]
url = "http://www.tv4play.se/program/%s?video_id=%s" % (
show, video_id)
episodes.append(url)
if n == options.all_last:
break
n += 1
return episodes
def findvid(url, data):
parse = urlparse(url)
if "tv4play.se" in url:
try:
vid = parse_qs(parse.query)["video_id"][0]
except KeyError:
return None
else:
match = re.search(r"\"vid\":\"(\d+)\",", data)
if match:
vid = match.group(1)
else:
match = re.search(r"-(\d+)$", url)
if match:
vid = match.group(1)
else:
match = re.search(r"meta content='([^']+)' property='og:video'", data)
if match:
match = re.search(r"vid=(\d+)&", match.group(1))
if match:
vid = match.group(1)
else:
log.error("Can't find video id for %s", url)
return
else:
return None
return vid
|
{
"content_hash": "9d243214e4120ddcef6b846f65739ea7",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 156,
"avg_line_length": 39.97872340425532,
"alnum_prop": 0.5119744544970729,
"repo_name": "OakNinja/svtplay-dl",
"id": "3e2d361d52aa2b64b3da006f14cc9eb8c4631e51",
"size": "7605",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/svtplay_dl/service/tv4play.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "3731"
},
{
"name": "Perl",
"bytes": "4077"
},
{
"name": "Python",
"bytes": "154413"
},
{
"name": "Shell",
"bytes": "1995"
}
],
"symlink_target": ""
}
|
"""
Abstraction for different Python Qt bindings.
Supported Python Qt bindings are PyQt4 and PySide.
The Qt modules can be imported like this:
from DTL.qt.QtCore import QObject
from DTL.qt import QtGui, loadUi
The name of the selected binding is available in QT_BINDING.
The version of the selected binding is available in QT_BINDING_VERSION.
All available Qt modules are listed in QT_BINDING_MODULES.
The default binding order ('pyqt', 'pyside') can be overridden with a
SELECT_QT_BINDING_ORDER attribute on sys:
setattr(sys, 'SELECT_QT_BINDING_ORDER', [FIRST_NAME, NEXT_NAME, ..])
A specific binding can be selected with a SELECT_QT_BINDING attribute on sys:
setattr(sys, 'SELECT_QT_BINDING', MY_BINDING_NAME)
"""
import sys
from .binding_helper import loadUi, wrapinstance, QT_BINDING, QT_BINDING_MODULES, QT_BINDING_VERSION # @UnusedImport
# register all binding modules as sub modules of this package (python_qt_binding) for easy importing
for module_name, module in QT_BINDING_MODULES.items():
sys.modules[__name__ + '.' + module_name] = module
setattr(sys.modules[__name__], module_name, module)
del module_name
del module
del sys
|
{
"content_hash": "a115919ba77bad8cc78994daaf385b36",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 117,
"avg_line_length": 36.46875,
"alnum_prop": 0.7532133676092545,
"repo_name": "rocktavious/DevToolsLib",
"id": "eace3df21edd3dc7be7df76c07239274b8ebe681",
"size": "2778",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "DTL/qt/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "155"
},
{
"name": "Python",
"bytes": "382205"
},
{
"name": "Shell",
"bytes": "1460"
}
],
"symlink_target": ""
}
|
import sys
import libsinan.jsax
class SimpleTaskHandler(object):
def __init__(self):
self.event_type = None
self.type = None
self.desc = None
self.task = None
self.fault = None
def set_event_type(self, value):
self.event_type = value
def set_type(self, value):
self.type = value
def set_desc(self, value):
self.desc = value
def set_task(self, value):
self.task = value
def object_begin(self):
return True
def key(self, value):
if value == "event_type":
self.next = self.set_event_type
elif value == "type":
self.next = self.set_type
elif value == "desc":
self.next = self.set_desc
elif value == "task":
self.next = self.set_task
return True
def value_begin(self):
return True
def string(self, value):
self.next(value)
return True
def number(self, value):
self.next(value)
return True
def true(self):
self.next(True)
return True
def false(self):
self.next(False)
return True
def null(self):
self.next(None)
return True
def array_begin(self):
self.array = []
return True
def array_end(self):
self.next(self.array)
self.array = None
return True
def object_end(self):
""" We only get one object per right now so
lets print it out when we get it """
if (self.type == "task_event" and self.desc and
(self.event_type == "io"
or self.event_type == "wip")):
sys.stdout.write(self.desc)
sys.stdout.flush()
elif self.type == "task_event" and self.desc:
addition = ""
if self.event_type == "fault":
addition = " fault!!"
self.fault = True
print "[" + self.task + addition + "]", self.desc
elif self.type == "task_event":
print "[" + self.task + "]", self.event_type
elif self.type == "run_event" and self.event_type == "stop" and self.desc:
print "stopping, " + self.desc
elif self.type == "run_event" and self.event_type == "fault":
self.fault = True
if self.desc:
print self.desc
print "run complete with faults"
self.event_type = None
self.type = None
self.desc = None
self.task = None
self.next = None
return True
def value_end(self):
return True
def handle(task, conn):
""" Handles output from the server. For the most part this just
parses the default types of event layout and prints it to standard out
in special cases it may do something else """
if conn.status == 200:
try:
handler = SimpleTaskHandler()
libsinan.jsax.parse(conn, SimpleTaskHandler())
if handler.fault:
return 1
return 0
except ValueError, msg:
print "Got an error back from sinan. Check the logs at ~/.sinan/logs/kernel.log"
else:
print conn.read()
return 1
|
{
"content_hash": "14de25dc3fa8cfa4a489d6152ac08e16",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 92,
"avg_line_length": 26.57377049180328,
"alnum_prop": 0.5363972856261567,
"repo_name": "asceth/sinan",
"id": "b08ace63698b09a9cecc4def6314bc6dede42cd0",
"size": "3242",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "client/libsinan/output.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Erlang",
"bytes": "392798"
},
{
"name": "Python",
"bytes": "44935"
},
{
"name": "Ruby",
"bytes": "510"
}
],
"symlink_target": ""
}
|
import os
import numpy as np
from utils import env_paths
from parmesan.datasets import load_mnist_realval
from data_helper import create_semi_supervised, pad_targets
def _download():
"""
Download the MNIST dataset if it is not present.
:return: The train, test and validation set.
"""
data = load_mnist_realval(os.path.join(env_paths.get_data_path("mnist"), "mnist.pkl.gz"))
train_x, train_t, valid_x, valid_t, test_x, test_t = data
return (train_x, train_t), (test_x, test_t), (valid_x, valid_t)
def load_supervised(filter_std=0.1, train_valid_combine=False):
"""
Load the MNIST dataset.
:param filter_std: The standard deviation threshold for keeping features.
:param train_valid_combine: If the train set and validation set should be combined.
:return: The train, test and validation sets.
"""
train_set, test_set, valid_set = _download()
if train_valid_combine:
train_set = np.append(train_set[0], valid_set[0], axis=0), np.append(train_set[1], valid_set[1], axis=0)
# Filter out the features with a low standard deviation.
if filter_std > .0:
train_x, train_t = train_set
idx_keep = np.std(train_x, axis=0) > filter_std
train_x = train_x[:, idx_keep]
valid_set = (valid_set[0][:, idx_keep], valid_set[1])
test_set = (test_set[0][:, idx_keep], test_set[1])
train_set = (train_x, train_t)
test_set = pad_targets(test_set)
valid_set = pad_targets(valid_set)
train_set = pad_targets(train_set)
return train_set, test_set, valid_set
def load_semi_supervised(n_labeled=100, filter_std=0.1, seed=123456, train_valid_combine=False):
"""
Load the MNIST dataset where only a fraction of data points are labeled. The amount
of labeled data will be evenly distributed accross classes.
:param n_labeled: Number of labeled data points.
:param filter_std: The standard deviation threshold for keeping features.
:param seed: The seed for the pseudo random shuffle of data points.
:param train_valid_combine: If the train set and validation set should be combined.
:return: Train set unlabeled and labeled, test set, validation set.
"""
train_set, test_set, valid_set = _download()
# Combine the train set and validation set.
if train_valid_combine:
train_set = np.append(train_set[0], valid_set[0], axis=0), np.append(train_set[1], valid_set[1], axis=0)
rng = np.random.RandomState(seed=seed)
# Create the labeled and unlabeled data evenly distributed across classes.
x_l, y_l, x_u, y_u = create_semi_supervised(train_set, n_labeled, rng)
# Filter out the features with a low standard deviation.
if filter_std > .0:
idx_keep = np.std(x_u, axis=0) > filter_std
x_l, x_u = x_l[:, idx_keep], x_u[:, idx_keep]
valid_set = (valid_set[0][:, idx_keep], valid_set[1])
test_set = (test_set[0][:, idx_keep], test_set[1])
train_set = (x_u, y_u)
train_set_labeled = (x_l, y_l)
# shuffle data
train_x, train_t = train_set
train_collect = np.append(train_x, train_t, axis=1)
rng.shuffle(train_collect)
train_set = (train_collect[:, :-10], train_collect[:, -10:])
test_set = pad_targets(test_set)
if valid_set is not None:
valid_set = pad_targets(valid_set)
return train_set, train_set_labeled, test_set, valid_set
|
{
"content_hash": "de99f3137a12a21fec3fe54b2029c89c",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 112,
"avg_line_length": 38.68181818181818,
"alnum_prop": 0.6551116333725029,
"repo_name": "JohnReid/auxiliary-deep-generative-models",
"id": "e8087fa2b2053e5eb47aa3b1c371048e01910601",
"size": "3404",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "adgm/data_loaders/mnist.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "93290"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='StatusPoint',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('address', models.CharField(max_length=255)),
('contact', models.CharField(max_length=200)),
('phone', models.CharField(max_length=20)),
('indicator', models.IntegerField()),
('latitude', models.FloatField()),
('longitude', models.FloatField()),
],
),
]
|
{
"content_hash": "afc27e178fb69fd9be1f108dee90f0da",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 114,
"avg_line_length": 30.59259259259259,
"alnum_prop": 0.5472154963680388,
"repo_name": "LuckyMagpie/Ubi",
"id": "d871fadf865a28d8295f113461bc085c7ae15212",
"size": "898",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Django/Ubi/status_point/migrations/0001_initial.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "710"
},
{
"name": "HTML",
"bytes": "755"
},
{
"name": "JavaScript",
"bytes": "858216"
},
{
"name": "Python",
"bytes": "14960"
}
],
"symlink_target": ""
}
|
import netaddr
import testscenarios
from neutron.agent.linux import ip_lib
from neutron.agent.linux import ovs_lib
from neutron.agent.linux import utils
from neutron.common import constants as n_const
from neutron.openstack.common import uuidutils
from neutron.tests.functional import base as functional_base
from neutron.tests import sub_base
BR_PREFIX = 'test-br'
PORT_PREFIX = 'test-port'
MARK_VALUE = '0x1'
MARK_MASK = '0xffffffff'
ICMP_MARK_RULE = ('-j MARK --set-xmark %(value)s/%(mask)s'
% {'value': MARK_VALUE, 'mask': MARK_MASK})
MARKED_BLOCK_RULE = '-m mark --mark %s -j DROP' % MARK_VALUE
ICMP_BLOCK_RULE = '-p icmp -j DROP'
VETH_PREFIX = 'tst-vth'
#TODO(jschwarz): Move these two functions to neutron/tests/common/
get_rand_name = sub_base.get_rand_name
def get_rand_veth_name():
return get_rand_name(max_length=n_const.DEVICE_NAME_MAX_LEN,
prefix=VETH_PREFIX)
def get_rand_port_name():
return get_rand_name(prefix=PORT_PREFIX,
max_length=n_const.DEVICE_NAME_MAX_LEN)
class BaseLinuxTestCase(functional_base.BaseSudoTestCase):
def setUp(self):
super(BaseLinuxTestCase, self).setUp()
def check_command(self, cmd, error_text, skip_msg, run_as_root=False):
try:
utils.execute(cmd, run_as_root=run_as_root)
except RuntimeError as e:
if error_text in str(e) and not self.fail_on_missing_deps:
self.skipTest(skip_msg)
raise
def _create_namespace(self):
ip_cmd = ip_lib.IPWrapper()
name = "func-%s" % uuidutils.generate_uuid()
namespace = ip_cmd.ensure_namespace(name)
self.addCleanup(namespace.netns.delete, namespace.namespace)
return namespace
def create_resource(self, name_prefix, creation_func, *args, **kwargs):
"""Create a new resource that does not already exist.
:param name_prefix: The prefix for a randomly generated name
:param creation_func: A function taking the name of the resource
to be created as it's first argument. An error is assumed
to indicate a name collision.
:param *args *kwargs: These will be passed to the create function.
"""
while True:
name = get_rand_name(max_length=n_const.DEVICE_NAME_MAX_LEN,
prefix=name_prefix)
try:
return creation_func(name, *args, **kwargs)
except RuntimeError:
continue
def create_veth(self):
ip_wrapper = ip_lib.IPWrapper()
name1 = get_rand_veth_name()
name2 = get_rand_veth_name()
self.addCleanup(ip_wrapper.del_veth, name1)
veth1, veth2 = ip_wrapper.add_veth(name1, name2)
return veth1, veth2
def set_namespace_gateway(self, port_dev, gateway_ip):
"""Set gateway for the namespace associated to the port."""
if not port_dev.namespace:
self.fail('tests should not change test machine gateway')
port_dev.route.add_gateway(gateway_ip)
def shift_ip_cidr(self, ip_cidr, offset=1):
"""Shift ip_cidr offset times.
example: shift_ip_cidr("1.2.3.4/24", 2) ==> "1.2.3.6/24"
"""
net = netaddr.IPNetwork(ip_cidr)
net.value += offset
return str(net)
# Regarding MRO, it goes BaseOVSLinuxTestCase, WithScenarios,
# BaseLinuxTestCase, ..., UnitTest, object. setUp is not dfined in
# WithScenarios, so it will correctly be found in BaseLinuxTestCase.
class BaseOVSLinuxTestCase(testscenarios.WithScenarios, BaseLinuxTestCase):
scenarios = [
('vsctl', dict(ovsdb_interface='vsctl')),
]
def setUp(self):
super(BaseOVSLinuxTestCase, self).setUp()
self.config(group='OVS', ovsdb_interface=self.ovsdb_interface)
self.ovs = ovs_lib.BaseOVS()
self.ip = ip_lib.IPWrapper()
def create_ovs_bridge(self, br_prefix=BR_PREFIX):
br = self.create_resource(br_prefix, self.ovs.add_bridge)
self.addCleanup(br.destroy)
return br
def get_ovs_bridge(self, br_name):
return ovs_lib.OVSBridge(br_name)
def create_ovs_port_in_ns(self, br, ns):
def create_port(name):
br.replace_port(name, ('type', 'internal'))
self.addCleanup(br.delete_port, name)
return name
port_name = self.create_resource(PORT_PREFIX, create_port)
port_dev = self.ip.device(port_name)
ns.add_device_to_namespace(port_dev)
port_dev.link.set_up()
return port_dev
def bind_namespace_to_cidr(self, namespace, br, ip_cidr):
"""Bind namespace to cidr (on layer2 and 3).
Bind the namespace to a subnet by creating an ovs port in the namespace
and configuring port ip.
"""
net = netaddr.IPNetwork(ip_cidr)
port_dev = self.create_ovs_port_in_ns(br, namespace)
port_dev.addr.add(net.version, str(net), net.broadcast)
return port_dev
class BaseIPVethTestCase(BaseLinuxTestCase):
SRC_ADDRESS = '192.168.0.1'
DST_ADDRESS = '192.168.0.2'
BROADCAST_ADDRESS = '192.168.0.255'
@staticmethod
def _set_ip_up(device, cidr, broadcast, ip_version=4):
device.addr.add(ip_version=ip_version, cidr=cidr, broadcast=broadcast)
device.link.set_up()
def prepare_veth_pairs(self, src_addr=None,
dst_addr=None,
broadcast_addr=None,
src_ns=None, dst_ns=None,
src_veth=None,
dst_veth=None):
src_addr = src_addr or self.SRC_ADDRESS
dst_addr = dst_addr or self.DST_ADDRESS
broadcast_addr = broadcast_addr or self.BROADCAST_ADDRESS
src_veth = src_veth or get_rand_veth_name()
dst_veth = dst_veth or get_rand_veth_name()
src_ns = src_ns or self._create_namespace()
dst_ns = dst_ns or self._create_namespace()
src_veth, dst_veth = src_ns.add_veth(src_veth,
dst_veth,
dst_ns.namespace)
self._set_ip_up(src_veth, '%s/24' % src_addr, broadcast_addr)
self._set_ip_up(dst_veth, '%s/24' % dst_addr, broadcast_addr)
return src_ns, dst_ns
|
{
"content_hash": "d0bb05f4306c5e3881f8735fa2c73347",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 79,
"avg_line_length": 35.954802259887,
"alnum_prop": 0.6134506599622879,
"repo_name": "cloudbase/neutron-virtualbox",
"id": "e822e1362b9aa7889101bf6eaa175f21839b105c",
"size": "6976",
"binary": false,
"copies": "1",
"ref": "refs/heads/virtualbox_agent",
"path": "neutron/tests/functional/agent/linux/base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1043"
},
{
"name": "Python",
"bytes": "8448838"
},
{
"name": "Shell",
"bytes": "12510"
}
],
"symlink_target": ""
}
|
"""Tests for the Chrome Preferences file parser."""
import unittest
from plaso.parsers import chrome_preferences
from tests.parsers import test_lib
class ChromePreferencesParserTest(test_lib.ParserTestCase):
"""Tests for the Google Chrome Preferences file parser."""
def testParseFile(self):
"""Tests parsing a default profile Preferences file."""
parser = chrome_preferences.ChromePreferencesParser()
storage_writer = self._ParseFile(['Preferences'], parser)
number_of_event_data = storage_writer.GetNumberOfAttributeContainers(
'event_data')
self.assertEqual(number_of_event_data, 30)
number_of_events = storage_writer.GetNumberOfAttributeContainers('event')
self.assertEqual(number_of_events, 30)
number_of_warnings = storage_writer.GetNumberOfAttributeContainers(
'extraction_warning')
self.assertEqual(number_of_warnings, 0)
number_of_warnings = storage_writer.GetNumberOfAttributeContainers(
'recovery_warning')
self.assertEqual(number_of_warnings, 0)
expected_event_values = {
'data_type': 'chrome:preferences:extensions_autoupdater',
'message': 'Chrome extensions autoupdater last run',
'recorded_time': '2014-11-12T13:01:43.926143+00:00'}
event_data = storage_writer.GetAttributeContainerByIndex('event_data', 0)
self.CheckEventData(event_data, expected_event_values)
expected_event_values = {
'data_type': 'chrome:preferences:extensions_autoupdater',
'message': 'Chrome extensions autoupdater next run',
'recorded_time': '2014-11-12T18:20:21.519200+00:00'}
event_data = storage_writer.GetAttributeContainerByIndex('event_data', 1)
self.CheckEventData(event_data, expected_event_values)
expected_event_values = {
'data_type': 'chrome:preferences:extensions_autoupdater',
'message': 'Chrome history was cleared by user',
'recorded_time': '2016-06-08T16:17:47.453766+00:00'}
event_data = storage_writer.GetAttributeContainerByIndex('event_data', 2)
self.CheckEventData(event_data, expected_event_values)
expected_event_values = {
'data_type': 'chrome:preferences:extension_installation',
'extension_identifier': 'mgndgikekgjfcpckkfioiadnlibdjbkf',
'extension_name': 'Chrome',
'installation_time': '2014-11-05T18:31:24.154837+00:00',
'path': (
'C:\\Program Files\\Google\\Chrome\\Application\\38.0.2125.111\\'
'resources\\chrome_app')}
event_data = storage_writer.GetAttributeContainerByIndex('event_data', 17)
self.CheckEventData(event_data, expected_event_values)
expected_event_values = {
'data_type': 'chrome:preferences:content_settings:exceptions',
'last_visited_time': '2016-11-11T16:20:09.866137+00:00',
'permission': 'midi_sysex',
'primary_url': 'https://rawgit.com:443'}
event_data = storage_writer.GetAttributeContainerByIndex('event_data', 27)
self.CheckEventData(event_data, expected_event_values)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "5deffb476b44e8ae2b063cf57c0f9604",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 78,
"avg_line_length": 38.55,
"alnum_prop": 0.6987678339818417,
"repo_name": "joachimmetz/plaso",
"id": "47858fa2e9df49f35d8736fac6fdf40b69f2e3f7",
"size": "3131",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "tests/parsers/chrome_preferences.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "4301"
},
{
"name": "Makefile",
"bytes": "122"
},
{
"name": "PowerShell",
"bytes": "1305"
},
{
"name": "Python",
"bytes": "5345755"
},
{
"name": "Shell",
"bytes": "27279"
},
{
"name": "YARA",
"bytes": "507"
}
],
"symlink_target": ""
}
|
import json
from os import makedirs
from os.path import dirname, exists
class Config(object):
_keys = (
'homedir', 'hostname', 'password', 'username',
)
@classmethod
def load(cls, filename):
makedirs(dirname(filename), exist_ok=True)
if exists(filename):
with open(filename, 'r') as fp:
data = json.load(fp)
else:
data = {}
config = cls()
config._filename = filename
for k in Config._keys:
setattr(config, k, data.get(k, None))
return config
def save(self, filename=None):
if filename is None:
filename = self._filename
data = {k: getattr(self, k) for k in Config._keys}
makedirs(dirname(filename), exist_ok=True)
with open(filename, 'w') as fp:
json.dump(data, fp)
|
{
"content_hash": "40670a4fc779bcb7e20aa850f4a40460",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 58,
"avg_line_length": 26.181818181818183,
"alnum_prop": 0.5543981481481481,
"repo_name": "MarkusH/django-vault-client",
"id": "1244c44e43c3009070de751088402f039bd40cf4",
"size": "864",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vault_client/lib/config.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "16397"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from django.http import HttpRequest, HttpResponse
from django.utils.translation import ugettext as _
from six import text_type
from zerver.decorator import to_non_negative_int
from zerver.lib.actions import do_update_pointer
from zerver.lib.request import has_request_variables, JsonableError, REQ
from zerver.lib.response import json_success
from zerver.models import UserProfile, UserMessage
def get_pointer_backend(request, user_profile):
# type: (HttpRequest, UserProfile) -> HttpResponse
return json_success({'pointer': user_profile.pointer})
@has_request_variables
def update_pointer_backend(request, user_profile,
pointer=REQ(converter=to_non_negative_int)):
# type: (HttpRequest, UserProfile, int) -> HttpResponse
if pointer <= user_profile.pointer:
return json_success()
try:
UserMessage.objects.get(
user_profile=user_profile,
message__id=pointer
)
except UserMessage.DoesNotExist:
raise JsonableError(_("Invalid message ID"))
request._log_data["extra"] = "[%s]" % (pointer,)
update_flags = (request.client.name.lower() in ['android', "zulipandroid"])
do_update_pointer(user_profile, pointer, update_flags=update_flags)
return json_success()
|
{
"content_hash": "a504d2eb844d821a199be6bc71adb80f",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 79,
"avg_line_length": 36.69444444444444,
"alnum_prop": 0.7093111279333838,
"repo_name": "joyhchen/zulip",
"id": "890efba89520a891847f4cebf226327e06fefc8e",
"size": "1321",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "zerver/views/pointer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "249244"
},
{
"name": "Groovy",
"bytes": "5509"
},
{
"name": "HTML",
"bytes": "471134"
},
{
"name": "JavaScript",
"bytes": "1414673"
},
{
"name": "Nginx",
"bytes": "1280"
},
{
"name": "Pascal",
"bytes": "1113"
},
{
"name": "Perl",
"bytes": "401825"
},
{
"name": "Puppet",
"bytes": "82787"
},
{
"name": "Python",
"bytes": "3069570"
},
{
"name": "Ruby",
"bytes": "249748"
},
{
"name": "Shell",
"bytes": "37195"
}
],
"symlink_target": ""
}
|
"""Copyright 2008 Orbitz WorldWide
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License."""
# Django settings for graphite project.
# DO NOT MODIFY THIS FILE DIRECTLY - use local_settings.py instead
import sys, os
from os.path import join, dirname, abspath
WEBAPP_VERSION = '0.9.9'
DEBUG = False
JAVASCRIPT_DEBUG = False
# Filesystem layout (all directores should end in a /)
WEB_DIR = dirname( abspath(__file__) ) + '/'
WEBAPP_DIR = dirname( dirname(WEB_DIR) ) + '/'
GRAPHITE_ROOT = dirname( dirname(WEBAPP_DIR) ) + '/'
CONTENT_DIR = WEBAPP_DIR + 'content/'
CSS_DIR = CONTENT_DIR + 'css/'
THIRDPARTY_DIR = WEB_DIR + 'thirdparty/'
CONF_DIR = os.environ.get('GRAPHITE_CONF_DIR', GRAPHITE_ROOT + 'conf/')
STORAGE_DIR = os.environ.get('GRAPHITE_STORAGE_DIR', GRAPHITE_ROOT + 'storage/')
LISTS_DIR = STORAGE_DIR + 'lists/'
INDEX_FILE = STORAGE_DIR + 'index'
WHITELIST_FILE = LISTS_DIR + 'whitelist'
LOG_DIR = STORAGE_DIR + 'log/webapp/'
CLUSTER_SERVERS = []
sys.path.insert(0, WEBAPP_DIR)
# Allow local versions of these libs to take precedence
sys.path.append(THIRDPARTY_DIR)
# Do not override WHISPER_DIR, RRD_DIR, etc directly in
# local_settings.py, instead you should override DATA_DIRS
# to list all directories that should be searched for files
# of a supported format.
WHISPER_DIR = STORAGE_DIR + 'whisper/'
RRD_DIR = STORAGE_DIR + 'rrd/'
try:
import rrdtool
DATA_DIRS = [WHISPER_DIR, RRD_DIR]
except:
DATA_DIRS = [WHISPER_DIR]
#Memcache settings
MEMCACHE_HOSTS = []
DEFAULT_CACHE_DURATION = 60 #metric data and graphs are cached for one minute by default
LOG_CACHE_PERFORMANCE = False
# Remote store settings
REMOTE_STORE_FETCH_TIMEOUT = 6
REMOTE_STORE_FIND_TIMEOUT = 2.5
REMOTE_STORE_RETRY_DELAY = 60
REMOTE_FIND_CACHE_DURATION = 300
#Remote rendering settings
REMOTE_RENDERING = False #if True, rendering is delegated to RENDERING_HOSTS
RENDERING_HOSTS = []
REMOTE_RENDER_CONNECT_TIMEOUT = 1.0
LOG_RENDERING_PERFORMANCE = False
#Miscellaneous settings
CARBONLINK_HOSTS = ["127.0.0.1:7002"]
CARBONLINK_TIMEOUT = 1.0
SMTP_SERVER = "localhost"
DOCUMENTATION_URL = "http://graphite.readthedocs.org/"
ALLOW_ANONYMOUS_CLI = True
LOG_METRIC_ACCESS = False
LEGEND_MAX_ITEMS = 10
#Authentication settings
USE_LDAP_AUTH = False
LDAP_SERVER = "" # "ldapserver.mydomain.com"
LDAP_PORT = 389
LDAP_SEARCH_BASE = "" # "OU=users,DC=mydomain,DC=com"
LDAP_BASE_USER = "" # "CN=some_readonly_account,DC=mydomain,DC=com"
LDAP_BASE_PASS = "" # "my_password"
LDAP_USER_QUERY = "" # "(username=%s)" For Active Directory use "(sAMAccountName=%s)"
LDAP_URI = None
#Set this to True to delegate authentication to the web server
USE_REMOTE_USER_AUTHENTICATION = False
#Additional authentication backends to prepend
ADDITIONAL_AUTHENTICATION_BACKENDS = []
#Database settings, sqlite is intended for single-server setups
DATABASE_ENGINE = 'sqlite3' # 'postgresql', 'mysql', 'sqlite3' or 'ado_mssql'.
DATABASE_NAME = STORAGE_DIR + 'graphite.db' # Or path to database file if using sqlite3.
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
DASHBOARD_CONF = join(CONF_DIR, 'dashboard.conf')
GRAPHTEMPLATES_CONF = join(CONF_DIR, 'graphTemplates.conf')
ADMINS = ()
MANAGERS = ADMINS
TEMPLATE_DIRS = (
join(WEB_DIR, 'templates'),
)
#Pull in overrides from local_settings.py
try:
from graphite.local_settings import *
except ImportError:
print >> sys.stderr, "Could not import graphite.local_settings, using defaults!"
if USE_LDAP_AUTH and LDAP_URI is None:
LDAP_URI = "ldap://%s:%d/" % (LDAP_SERVER, LDAP_PORT)
#Django settings below, do not touch!
APPEND_SLASH = False
TEMPLATE_DEBUG = DEBUG
if MEMCACHE_HOSTS:
CACHE_BACKEND = 'memcached://' + ';'.join(MEMCACHE_HOSTS) + ('/?timeout=%d' % DEFAULT_CACHE_DURATION)
else:
CACHE_BACKEND = "dummy:///"
# Local time zone for this installation. All choices can be found here:
# http://www.postgresql.org/docs/current/static/datetime-keywords.html#DATETIME-TIMEZONE-SET-TABLE
#TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.w3.org/TR/REC-html40/struct/dirlang.html#langcodes
# http://blogs.law.harvard.edu/tech/stories/storyReader$15
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# Absolute path to the directory that holds media.
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT.
# Example: "http://media.lawrence.com"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = ''
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.middleware.gzip.GZipMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
if USE_REMOTE_USER_AUTHENTICATION:
MIDDLEWARE_CLASSES += ('django.contrib.auth.middleware.RemoteUserMiddleware',)
ROOT_URLCONF = 'graphite.urls'
INSTALLED_APPS = (
'graphite.metrics',
'graphite.render',
'graphite.cli',
'graphite.browser',
'graphite.composer',
'graphite.account',
'graphite.dashboard',
'graphite.whitelist',
'graphite.events',
'django.contrib.auth',
'django.contrib.sessions',
'django.contrib.admin',
'django.contrib.contenttypes',
'tagging',
)
if USE_REMOTE_USER_AUTHENTICATION:
AUTHENTICATION_BACKENDS = ['django.contrib.auth.backends.RemoteUserBackend']
else:
AUTHENTICATION_BACKENDS = ['django.contrib.auth.backends.ModelBackend']
if USE_LDAP_AUTH:
AUTHENTICATION_BACKENDS.insert(0,'graphite.account.ldapBackend.LDAPBackend')
if ADDITIONAL_AUTHENTICATION_BACKENDS:
for backend in ADDITIONAL_AUTHENTICATION_BACKENDS:
AUTHENTICATION_BACKENDS.insert(0, backend)
|
{
"content_hash": "1636a4727e3f5b4f3c0559511dcf48ad",
"timestamp": "",
"source": "github",
"line_count": 207,
"max_line_length": 103,
"avg_line_length": 32.32367149758454,
"alnum_prop": 0.7387535495441638,
"repo_name": "crocodoc/graphite",
"id": "c684064d16576f50aef2e3147fa0e93ccbe43cf0",
"size": "6691",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webapp/graphite/settings.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
from sklearn import datasets
iris = datasets.load_iris()
X = iris.data
y = iris.target
from sklearn.cross_validation import train_test_split
# 150 in iris dataset, 75 for training, 75 for testing
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=.5)
# # decision tree
# from sklearn import tree
# my_classifier = tree.DecisionTreeClassifier()
# K-nearest neighbors
from sklearn.neighbors import KNeighborsClassifier
my_classifier = KNeighborsClassifier()
my_classifier.fit(X_train, y_train)
predictions = my_classifier.predict(X_test)
print predictions
from sklearn.metrics import accuracy_score
print accuracy_score(y_test, predictions)
|
{
"content_hash": "a537739fe6559a40b2d3cb19f85feee7",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 71,
"avg_line_length": 27.583333333333332,
"alnum_prop": 0.7764350453172205,
"repo_name": "jacky-ttt/CodingEveryday",
"id": "ec10b3f92c74d3be4260b705bf7e993f49e3eb7b",
"size": "681",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Day004-Machine Learning#3-4/#4/pipeline.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "30613"
},
{
"name": "CMake",
"bytes": "1978"
},
{
"name": "CSS",
"bytes": "1377"
},
{
"name": "HTML",
"bytes": "25071"
},
{
"name": "Java",
"bytes": "28278"
},
{
"name": "JavaScript",
"bytes": "1392463"
},
{
"name": "Kotlin",
"bytes": "3828"
},
{
"name": "Objective-C",
"bytes": "1473"
},
{
"name": "Python",
"bytes": "3639"
},
{
"name": "Ruby",
"bytes": "1107"
},
{
"name": "Swift",
"bytes": "29392"
},
{
"name": "TeX",
"bytes": "224"
}
],
"symlink_target": ""
}
|
from typing import Dict
from .base import GenericTensor, Pipeline
# Can't use @add_end_docstrings(PIPELINE_INIT_ARGS) here because this one does not accept `binary_output`
class FeatureExtractionPipeline(Pipeline):
"""
Feature extraction pipeline using no model head. This pipeline extracts the hidden states from the base
transformer, which can be used as features in downstream tasks.
Example:
```python
>>> from transformers import pipeline
>>> extractor = pipeline(model="bert-base-uncased", task="feature-extraction")
>>> result = extractor("This is a simple test.", return_tensors=True)
>>> result.shape # This is a tensor of shape [1, sequence_lenth, hidden_dimension] representing the input string.
torch.Size([1, 8, 768])
```
Learn more about the basics of using a pipeline in the [pipeline tutorial](../pipeline_tutorial)
This feature extraction pipeline can currently be loaded from [`pipeline`] using the task identifier:
`"feature-extraction"`.
All models may be used for this pipeline. See a list of all models, including community-contributed models on
[huggingface.co/models](https://huggingface.co/models).
Arguments:
model ([`PreTrainedModel`] or [`TFPreTrainedModel`]):
The model that will be used by the pipeline to make predictions. This needs to be a model inheriting from
[`PreTrainedModel`] for PyTorch and [`TFPreTrainedModel`] for TensorFlow.
tokenizer ([`PreTrainedTokenizer`]):
The tokenizer that will be used by the pipeline to encode data for the model. This object inherits from
[`PreTrainedTokenizer`].
modelcard (`str` or [`ModelCard`], *optional*):
Model card attributed to the model for this pipeline.
framework (`str`, *optional*):
The framework to use, either `"pt"` for PyTorch or `"tf"` for TensorFlow. The specified framework must be
installed.
If no framework is specified, will default to the one currently installed. If no framework is specified and
both frameworks are installed, will default to the framework of the `model`, or to PyTorch if no model is
provided.
return_tensor (`bool`, *optional*):
If `True`, returns a tensor according to the specified framework, otherwise returns a list.
task (`str`, defaults to `""`):
A task-identifier for the pipeline.
args_parser ([`~pipelines.ArgumentHandler`], *optional*):
Reference to the object in charge of parsing supplied pipeline parameters.
device (`int`, *optional*, defaults to -1):
Device ordinal for CPU/GPU supports. Setting this to -1 will leverage CPU, a positive will run the model on
the associated CUDA device id.
"""
def _sanitize_parameters(self, truncation=None, tokenize_kwargs=None, return_tensors=None, **kwargs):
if tokenize_kwargs is None:
tokenize_kwargs = {}
if truncation is not None:
if "truncation" in tokenize_kwargs:
raise ValueError(
"truncation parameter defined twice (given as keyword argument as well as in tokenize_kwargs)"
)
tokenize_kwargs["truncation"] = truncation
preprocess_params = tokenize_kwargs
postprocess_params = {}
if return_tensors is not None:
postprocess_params["return_tensors"] = return_tensors
return preprocess_params, {}, postprocess_params
def preprocess(self, inputs, **tokenize_kwargs) -> Dict[str, GenericTensor]:
return_tensors = self.framework
model_inputs = self.tokenizer(inputs, return_tensors=return_tensors, **tokenize_kwargs)
return model_inputs
def _forward(self, model_inputs):
model_outputs = self.model(**model_inputs)
return model_outputs
def postprocess(self, model_outputs, return_tensors=False):
# [0] is the first available tensor, logits or last_hidden_state.
if return_tensors:
return model_outputs[0]
if self.framework == "pt":
return model_outputs[0].tolist()
elif self.framework == "tf":
return model_outputs[0].numpy().tolist()
def __call__(self, *args, **kwargs):
"""
Extract the features of the input(s).
Args:
args (`str` or `List[str]`): One or several texts (or one list of texts) to get the features of.
Return:
A nested list of `float`: The features computed by the model.
"""
return super().__call__(*args, **kwargs)
|
{
"content_hash": "ff4393eee198b7ea0e21a96515acb061",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 119,
"avg_line_length": 44.65714285714286,
"alnum_prop": 0.6508850501172958,
"repo_name": "huggingface/transformers",
"id": "212ad0a121972a84657e750f9a0bef88e7b46ac7",
"size": "4689",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/transformers/pipelines/feature_extraction.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "6021"
},
{
"name": "C++",
"bytes": "12959"
},
{
"name": "Cuda",
"bytes": "175419"
},
{
"name": "Dockerfile",
"bytes": "18218"
},
{
"name": "Jsonnet",
"bytes": "937"
},
{
"name": "Makefile",
"bytes": "3430"
},
{
"name": "Python",
"bytes": "35742012"
},
{
"name": "Shell",
"bytes": "30374"
}
],
"symlink_target": ""
}
|
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
'sphinx.ext.napoleon',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'tvb-hpc'
copyright = '2017, TVB-HPC Contributers'
author = 'TVB-HPC Contributers'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0'
# The full version, including alpha/beta/rc tags.
release = '0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'tvb-hpcdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'tvb-hpc.tex', 'tvb-hpc Documentation',
'TVB-HPC Contributers', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'tvb-hpc', 'tvb-hpc Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'tvb-hpc', 'tvb-hpc Documentation',
author, 'tvb-hpc', 'One line description of project.',
'Miscellaneous'),
]
|
{
"content_hash": "74f6f33de6313bac1b076d2e7bd0edcb",
"timestamp": "",
"source": "github",
"line_count": 130,
"max_line_length": 78,
"avg_line_length": 29.246153846153845,
"alnum_prop": 0.6601788532351394,
"repo_name": "the-virtual-brain/tvb-hpc",
"id": "d0727ea4bf8bc1e4018a62cbf61a28db51072897",
"size": "4877",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "77071"
},
{
"name": "Cuda",
"bytes": "379"
},
{
"name": "Dockerfile",
"bytes": "916"
},
{
"name": "Python",
"bytes": "321186"
},
{
"name": "Shell",
"bytes": "602"
}
],
"symlink_target": ""
}
|
from csc.nl.euro import StemmedEuroNL
class NL(StemmedEuroNL):
def __init__(self):
super(NL, self).__init__('it')
|
{
"content_hash": "18c9e92d790c597637c961cb91fcc2b6",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 38,
"avg_line_length": 25.4,
"alnum_prop": 0.6299212598425197,
"repo_name": "pbarton666/buzz_bot",
"id": "2b559b66060eee35f0b7a8de28271e66a8d46db5",
"size": "127",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "djangoproj/djangoapp/csc/nl/it/__init__.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "10587"
},
{
"name": "CSS",
"bytes": "36405"
},
{
"name": "Genshi",
"bytes": "61664"
},
{
"name": "Groff",
"bytes": "3300"
},
{
"name": "HTML",
"bytes": "201497"
},
{
"name": "JavaScript",
"bytes": "39255"
},
{
"name": "Makefile",
"bytes": "200"
},
{
"name": "Python",
"bytes": "10629713"
},
{
"name": "Ruby",
"bytes": "12049"
}
],
"symlink_target": ""
}
|
'''
EPOW (Emergency POWer off)
--------------------------
This module tests the EPOW feature incase of FSP systems.
1. EPOW3Random.
Simulate random EPOW3 temperature to check whether
OPAL notify EPOW notification to Host OS. Once Host
gets notified Host should do a graceful shutdown.
2. EPOW3LOW
Simualate temperatures less than EPOW3 threshold
and check whether Host OS is alive or not.
'''
import time
import subprocess
import subprocess
import re
import sys
import pexpect
import random
from common.OpTestConstants import OpTestConstants as BMC_CONST
from common.OpTestError import OpTestError
import unittest
import OpTestConfiguration
from common.OpTestSystem import OpSystemState
import logging
import OpTestLogger
log = OpTestLogger.optest_logger_glob.get_logger(__name__)
class EPOWBase(unittest.TestCase):
def setUp(self):
conf = OpTestConfiguration.conf
self.cv_IPMI = conf.ipmi()
self.cv_SYSTEM = conf.system()
self.cv_HOST = conf.host()
self.cv_FSP = conf.bmc()
self.platform = conf.platform()
self.bmc_type = conf.args.bmc_type
self.util = self.cv_SYSTEM.util
self.cv_SYSTEM.goto_state(OpSystemState.OS)
self.util.PingFunc(self.cv_HOST.ip, BMC_CONST.PING_RETRY_POWERCYCLE)
def get_epow_limits(self):
fsp_MTM = self.cv_FSP.get_raw_mtm()
matchObj = re.search("-\d{2}.", fsp_MTM)
if matchObj:
x = matchObj.group()
y = x[1:3]
var = y[1] + "s" + y[0] + "u"
self.proc_gen = self.cv_HOST.host_get_proc_gen(console=1)
print((self.proc_gen))
if self.proc_gen in ["POWER8", "POWER8E"]:
file = '/opt/fips/components/engd/power_management_tul_%s.def' % (
var)
elif self.proc_gen in ["POWER9", "POWER9P"]:
file = '/opt/fips/components/engd/power_management_zz_%s.def' % (
var)
# Check for Nebs enable\disable
cmd = "registry -l svpd/NebsEnabled | sed -n '2p' | awk {'print $1'}"
rc = self.cv_FSP.fspc.run_command(cmd)
if int(rc) == 0:
cmd = "cat %s | grep -i -e 'EPOW' -e 'CRITICAL' | head -n 6" % file
else:
cmd = "cat %s | grep -i -e 'EPOW' -e 'CRITICAL' | tail -n 6" % file
# Checking for file existence
rc = self.cv_FSP.fspc.run_command("test -f %s" % file)
log.debug("The def file for this machine is available")
limits = self.cv_FSP.fspc.run_command(cmd)
log.debug(limits)
cmd = cmd + "| cut -d '#' -f 1"
limits = self.cv_FSP.fspc.run_command(cmd)
limits = limits.splitlines()
dic = {}
for i in range(len(limits)):
pair = ((limits[i]).replace(" ", "")).replace("\t", "")
l_pair = pair.split("=")
dic[l_pair[0]] = l_pair[1]
return dic
def get_ambient_temp_ipmi(self):
res = self.cv_IPMI.ipmitool.run('sdr list')
log.debug(res)
temp = r"Inlet Temp \| (\d{2,})"
searchObj = re.search(temp, res)
if searchObj:
ambient_temp = searchObj.group(1)
return ambient_temp
else:
raise OpTestError("IPMI: failed to read Inlet temperature")
def get_cmd_for_temp(self, temp):
val_d = temp * 4
val_h = (str(hex(val_d))).replace('0x', '')
cmd = 'echo "0000D000A0220004000700%s" | spif -' % val_h
log.debug(cmd)
return cmd
def check_graceful_shutdown(self, pty):
try:
rc = pty.expect_exact(
["reboot: Power down", "Power down"], timeout=120)
if rc == 0 or rc == 1:
res = pty.before
log.debug(pty.after)
log.debug("System got graceful shutdown")
except pexpect.TIMEOUT as e:
log.debug("System is in active state")
log.debug(pty.before)
def get_epow_list_temps(self):
self.limits = self.get_epow_limits()
log.debug(self.limits)
EPOW3 = self.limits['EPOW3']
CRITICAL = self.limits['CRITICAL']
EPOW3_RESET = self.limits['EPOW3_RESET']
CRITICAL_RESET = self.limits['CRITICAL_RESET']
l = []
for temp in range(int(EPOW3), int(CRITICAL)):
l.append(temp)
return l
return None
def get_temp_for_param(self, param):
return self.limits[param]
class EPOW3Random(EPOWBase):
'''
This testcase tests the EPOW feature of the FSP. Thus, it is only applicable
to FSP based systems and will be skipped on other BMC types.
This test will:
1. It will gather EPOW limits
2. We will choose some random EPOW temp(test_temp) in b/w those limits
3. Simulate that temperature(test_temp)
4. Verify graceful shutdown happened or not
5. Once system reaches standby, simulate the ambient temp
to EPOW3_RESET temperature(reset_temp) to bring back the system.
6. Bring back the system again to runtime.
If user faces any problem in bringing the system UP please run below
command "smgr toolReset"
'''
def runTest(self):
if "FSP" not in self.bmc_type:
self.skipTest("FSP specific OPAL EPOW Test.")
console = self.cv_SYSTEM.console
console.run_command("uname -a")
# Range of EPOW temperatures from EPOW3 to CRITICAL
temp_list = self.get_epow_list_temps()
test_temp = int(random.choice(temp_list))
log.debug(temp_list)
log.debug(test_temp)
log.debug(
"========================EPOW3_RANDOM:%i==========================" % test_temp)
log.debug(
"****************Testing EPOW3 at Random EPOW Temperature*****************")
temp_prev = self.get_ambient_temp_ipmi()
log.debug("Current ambient temp: %s " % temp_prev)
log.debug("Current system status: %s" % self.cv_FSP.get_sys_status())
cmd = self.get_cmd_for_temp(test_temp)
log.debug("Simulating the Ambient Temp: %s" % test_temp)
log.debug("Running the command on FSP: %s" % cmd)
self.cv_FSP.fspc.run_command(cmd)
self.check_graceful_shutdown(console.pty)
self.cv_FSP.wait_for_standby()
temp_current = self.get_ambient_temp_ipmi()
log.debug("Current ambient temp: %s " % temp_current)
self.assertEqual(int(temp_current), int(test_temp),
"EPOW3 is working, looks like temp simulated is slightly different")
log.debug("EPOW3 is successfull")
self.cv_FSP.wait_for_standby()
# simulate EPOW3 reset temperature to bring back the system
reset_temp = self.get_temp_for_param('EPOW3_RESET')
cmd = self.get_cmd_for_temp(int(reset_temp))
log.debug("Issuing the EPOW3 reset temp to bring back the system up")
log.debug("Simulating the Ambient Temp: %s" % reset_temp)
log.debug("Running the command on FSP: %s" % cmd)
res = self.cv_FSP.fspc.run_command(cmd)
log.debug(res)
temp_current = self.get_ambient_temp_ipmi()
log.debug("Current ambient temp: %s " % temp_current)
self.assertEqual(int(temp_current), int(reset_temp),
"Temperature simulated is not equal to EPOW3_RESET")
log.debug("EPOW3 RESET Done: Temperature simulated to EPOW3_RESET")
# Power on the system after issuing EPOW3 RESET
self.cv_FSP.power_on_sys()
self.util.PingFunc(self.cv_HOST.ip, BMC_CONST.PING_RETRY_POWERCYCLE)
class EPOW3LOW(EPOWBase):
'''
This test case will follow below procedure:
1. Based on Nebsenabled will get EPOW limits from FSP using def file present
in `/opt/fips/components/engd/`. Different systems have different EPOW limits.
2. Test EPOW3_LOW---> Will test temperatures lower than EPOW3 temperature,
a. From FSP it simulate to lesser ambient temperatures than EPOW3 temperature
b. In this case system should be alive and it should not cause system shut-down.
If user faces any problem in bringing the system UP please run below
command "smgr toolReset" in fsp console
'''
def runTest(self):
if "FSP" not in self.bmc_type:
self.skipTest("FSP specific OPAL EPOW Test.")
console = self.cv_SYSTEM.console
# Range of EPOW temperatures from EPOW3 to CRITICAL
temp_list = self.get_epow_list_temps()
log.debug(temp_list)
# Testing ambient temperatures lower than EPOW3, system should be alive
EPOW3 = self.get_temp_for_param('EPOW3')
temp_2 = int(EPOW3)-2
temp_1 = int(EPOW3)-1
for test_temp in [temp_1, temp_2]:
log.debug(
"========================EPOW3_LOW:%i==========================" % test_temp)
log.debug(
"*********Testing ambient temperatures lower than EPOW3, system should be alive***********")
temp_prev = self.get_ambient_temp_ipmi()
log.debug("Current ambient temp: %s " % temp_prev)
cmd = self.get_cmd_for_temp(test_temp)
log.debug("Simulating the Ambient Temp: %s" % test_temp)
log.debug("Running the command on FSP: %s" % cmd)
res = self.cv_FSP.fspc.run_command(cmd)
log.debug(res)
# Monitor the system status for any chanages from runtime
tries = 10
for i in range(1, tries+1):
state = self.cv_FSP.get_sys_status()
log.debug("Current system status: %s" % state)
self.assertEqual(state, 'runtime',
"EPOW3_LOW is failing at this temp: %s" % test_temp)
time.sleep(6)
self.util.PingFunc(
self.cv_HOST.ip, BMC_CONST.PING_RETRY_POWERCYCLE)
temp_current = self.get_ambient_temp_ipmi()
log.debug("Current ambient temp: %s " % temp_current)
self.assertEqual(int(temp_current), int(test_temp),
"EPOW3_LOW is working, looks like temp simulated is different")
def suite():
s = unittest.TestSuite()
s.addTest(EPOW3LOW())
s.addTest(EPOW3Random())
# TODO: s.addTest(EPOW3CRITICAL())
return s
|
{
"content_hash": "e06adae400178b8bfccf54f4200c2eda",
"timestamp": "",
"source": "github",
"line_count": 263,
"max_line_length": 108,
"avg_line_length": 39.19771863117871,
"alnum_prop": 0.5925889999029974,
"repo_name": "open-power/op-test-framework",
"id": "3a217237a86881ecd12347387225985aae875057",
"size": "11151",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "testcases/EPOW.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2080"
},
{
"name": "Python",
"bytes": "1311268"
},
{
"name": "Shell",
"bytes": "71724"
},
{
"name": "Tcl",
"bytes": "18813"
}
],
"symlink_target": ""
}
|
class WindowAdapter(object) :
def __init__(self, *mth, **kw) :
object.__init__(self)
for i, j in kw.items() :
setattr(self, i, j)
def windowActivated(self, e) :
pass
def windowClosed(self, e) :
pass
def windowClosing(self, e) :
pass
def windowDeactivated(self, e) :
pass
def windowDeiconified(self, e) :
pass
def windowIconified(self, e) :
pass
def windowOpened(self, e) :
pass
|
{
"content_hash": "dc178af588ad5073cf2d160c8562937b",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 37,
"avg_line_length": 19.814814814814813,
"alnum_prop": 0.497196261682243,
"repo_name": "rrader/jpype",
"id": "0826ee0d90884e03e1b83bc516dd0151fc07dd41",
"size": "535",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "jpype/awt/event/WindowAdapter.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1842"
},
{
"name": "C",
"bytes": "37981"
},
{
"name": "C++",
"bytes": "495450"
},
{
"name": "Java",
"bytes": "31200"
},
{
"name": "Objective-C",
"bytes": "387"
},
{
"name": "PowerShell",
"bytes": "9130"
},
{
"name": "Python",
"bytes": "131922"
},
{
"name": "XSLT",
"bytes": "3460"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/component/weapon/shared_base_vibro_unit_enhancement.iff"
result.attribute_template_id = -1
result.stfName("craft_weapon_ingredients_n","blade_vibro_unit_enhancement")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "debc48cafde7c5afb656a8eb0e1a9a4d",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 92,
"avg_line_length": 27.53846153846154,
"alnum_prop": 0.723463687150838,
"repo_name": "obi-two/Rebelion",
"id": "a191d343d8a617d057dbef771c6ce40dc302a525",
"size": "503",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/tangible/component/weapon/shared_base_vibro_unit_enhancement.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('participants', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Answer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, primary_key=True, auto_created=True)),
('answer', models.CharField(max_length=255)),
('correct', models.BooleanField()),
],
),
migrations.CreateModel(
name='GivenAnswer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, primary_key=True, auto_created=True)),
('given_answer', models.ForeignKey(to='questions.Answer')),
('participant', models.ForeignKey(to='participants.Participant')),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, primary_key=True, auto_created=True)),
('question', models.CharField(max_length=255)),
],
),
migrations.AddField(
model_name='answer',
name='question',
field=models.ForeignKey(to='questions.Question'),
),
]
|
{
"content_hash": "4f2b8ba5096632e902f064c0275b5761",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 114,
"avg_line_length": 33.90243902439025,
"alnum_prop": 0.5424460431654676,
"repo_name": "kkujawinski/talktoyourapp",
"id": "d57361ebad097d909167f205ab843fc630f6bf49",
"size": "1414",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "questions/migrations/0001_initial.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "9151"
},
{
"name": "HTML",
"bytes": "9381"
},
{
"name": "JavaScript",
"bytes": "672"
},
{
"name": "Python",
"bytes": "26700"
},
{
"name": "Shell",
"bytes": "141"
}
],
"symlink_target": ""
}
|
from yafblog import app
app.run()
|
{
"content_hash": "d3b112a4ba613d0b2426e8b95e2a631e",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 23,
"avg_line_length": 17,
"alnum_prop": 0.7647058823529411,
"repo_name": "suligap/yafblog",
"id": "427d7543222d31b16b0410f169eeb658f1e6de04",
"size": "34",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "run.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "28431"
}
],
"symlink_target": ""
}
|
"""Mapping for additional replays for battles
Revision ID: 533b503ed449
Revises: 5a5fa1c1dc8e
Create Date: 2014-03-27 20:41:56.503403
"""
# revision identifiers, used by Alembic.
revision = '533b503ed449'
down_revision = '5a5fa1c1dc8e'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('replay', sa.Column('associated_battle_id', sa.Integer(), nullable=True))
op.add_column('replay', sa.Column('player_name', sa.String(length=100), nullable=True))
def downgrade():
op.drop_column('replay', 'player_name')
op.drop_column('replay', 'associated_battle_id')
|
{
"content_hash": "fe1192f2e18690daf20149a57a41926a",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 91,
"avg_line_length": 25.166666666666668,
"alnum_prop": 0.7235099337748344,
"repo_name": "ceari/whyattend",
"id": "8c5be93f5f2862728583bdf750d97f01520a0af3",
"size": "604",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "whyattend/alembic/versions/533b503ed449_mapping_for_additional_replays_for_.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "9584"
},
{
"name": "HTML",
"bytes": "110189"
},
{
"name": "JavaScript",
"bytes": "38939"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "138633"
},
{
"name": "Ruby",
"bytes": "229"
},
{
"name": "Shell",
"bytes": "586"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('bongo', '0003_auto_20141226_1717'),
]
operations = [
migrations.AlterField(
model_name='tip',
name='respond_to',
field=models.EmailField(max_length=254, null=True, blank=True),
),
]
|
{
"content_hash": "9aa03bd99e570f7533387f8be001bb16",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 75,
"avg_line_length": 22.333333333333332,
"alnum_prop": 0.5970149253731343,
"repo_name": "BowdoinOrient/bongo",
"id": "8a62d3d6fffb63b275d23df333d0893edea64ce0",
"size": "426",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "bongo/apps/bongo/migrations/0004_auto_20150415_0049.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "26609"
},
{
"name": "HTML",
"bytes": "20898"
},
{
"name": "JavaScript",
"bytes": "3005"
},
{
"name": "Python",
"bytes": "169382"
},
{
"name": "Shell",
"bytes": "2173"
}
],
"symlink_target": ""
}
|
import utils
class plugin_inject:
def __init__(self, parent, content, position, arguments):
if len(arguments) == 1:
srvName = arguments[0]
if not srvName in parent.objects:
objectName = utils.genObjectName(srvName)
parent.objects[srvName] = objectName
else:
objectName = parent.objects[srvName]
[pos, class_name] = utils.getWrapperName(content, position)
attribute = '\tstatic ' + srvName + '& ' + srvName.lower() + ';\n'
tail = srvName + '& ' + class_name + '::' + srvName.lower() + ' = ' + objectName + ';\n'
parent.toInsert.append([position, attribute])
parent.toInsert.append([-1, tail])
else:
raise Exception('Invalid arguments number for #inject: ' + str(len(arguments)))
|
{
"content_hash": "7317cae674680bfa7352737e59a6fba8",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 100,
"avg_line_length": 39,
"alnum_prop": 0.5571095571095571,
"repo_name": "OlegGS/acpp",
"id": "0e81c32e453d38f21a07860b4a0f88f8249d8af6",
"size": "858",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plugins/plugin_inject.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "1214"
},
{
"name": "Python",
"bytes": "14125"
}
],
"symlink_target": ""
}
|
"""Broadcast operators"""
from __future__ import absolute_import as _abs
from . import cpp as _cpp
def broadcast_to(data, shape):
"""Broadcast the src to the target shape
We follows the numpy broadcasting rule.
See also https://docs.scipy.org/doc/numpy/user/basics.broadcasting.html
Parameters
----------
data : tvm.te.Tensor
The input data
shape : list or tuple
The target shape to be broadcasted.
Returns
-------
ret : tvm.te.Tensor
"""
return _cpp.broadcast_to(data, shape)
def add(lhs, rhs):
"""Addition with auto-broadcasting
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.add(lhs, rhs)
def subtract(lhs, rhs):
"""Subtraction with auto-broadcasting
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.subtract(lhs, rhs)
def multiply(lhs, rhs):
"""Multiplication with auto-broadcasting
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.multiply(lhs, rhs)
def divide(lhs, rhs):
"""Division with auto-broadcasting
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.divide(lhs, rhs)
def floor_divide(lhs, rhs):
"""Floor division with auto-broadcasting
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.floor_divide(lhs, rhs)
def mod(lhs, rhs):
"""Modulus with auto-broadcasting
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.mod(lhs, rhs)
def floor_mod(lhs, rhs):
"""Floor modulus with auto-broadcasting
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.floor_mod(lhs, rhs)
def maximum(lhs, rhs):
"""Take element-wise maximum of two tensors with auto-broadcasting
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.maximum(lhs, rhs)
def minimum(lhs, rhs):
"""Take element-wise maximum of two tensors with auto-broadcasting
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.minimum(lhs, rhs)
def power(lhs, rhs):
"""Power with auto-broadcasting
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.power(lhs, rhs)
def left_shift(lhs, rhs):
"""Left shift with auto-broadcasting
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.left_shift(lhs, rhs)
def right_shift(lhs, rhs):
"""Right shift with auto-broadcasting
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.right_shift(lhs, rhs)
def greater(lhs, rhs):
"""Compute (lhs>rhs) with auto-broadcasting
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.greater(lhs, rhs)
def less(lhs, rhs):
"""Compute (lhs<rhs) with auto-broadcasting
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.less(lhs, rhs)
def equal(lhs, rhs):
"""Compute (lhs==rhs) with auto-broadcasting
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.equal(lhs, rhs)
def not_equal(lhs, rhs):
"""Compute (lhs!=rhs) with auto-broadcasting
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.not_equal(lhs, rhs)
def greater_equal(lhs, rhs):
"""Compute (lhs>=rhs) with auto-broadcasting
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.greater_equal(lhs, rhs)
def less_equal(lhs, rhs):
"""Compute (lhs<=rhs) with auto-broadcasting
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.less_equal(lhs, rhs)
def logical_and(lhs, rhs):
"""Compute element-wise logical and of data.
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.logical_and(lhs, rhs)
def logical_or(lhs, rhs):
"""Compute element-wise logical or of data.
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.logical_or(lhs, rhs)
def logical_xor(lhs, rhs):
"""Compute element-wise logical xor of data.
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.logical_xor(lhs, rhs)
def bitwise_and(lhs, rhs):
"""Compute element-wise bitwise and of data.
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.bitwise_and(lhs, rhs)
def bitwise_or(lhs, rhs):
"""Compute element-wise bitwise or of data.
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.bitwise_or(lhs, rhs)
def bitwise_xor(lhs, rhs):
"""Compute element-wise bitwise xor of data.
Parameters
----------
lhs : tvm.te.Tensor or Expr
The left operand
rhs : tvm.te.Tensor or Expr
The right operand
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if both operands are Expr.
Otherwise returns Tensor.
"""
return _cpp.bitwise_xor(lhs, rhs)
def logical_not(data):
"""Compute element-wise logical not of data.
Parameters
----------
data : tvm.te.Tensor or Expr
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if the operand are Expr.
Otherwise returns Tensor.
"""
return _cpp.logical_not(data)
def bitwise_not(data):
"""Compute element-wise bitwise not of data.
Parameters
----------
data : tvm.te.Tensor or Expr
Returns
-------
ret : tvm.te.Tensor or Expr
Returns Expr if the operand are Expr.
Otherwise returns Tensor.
"""
return _cpp.bitwise_not(data)
|
{
"content_hash": "8b78b31bd596b8075a41eca9a1156ac5",
"timestamp": "",
"source": "github",
"line_count": 512,
"max_line_length": 75,
"avg_line_length": 21.265625,
"alnum_prop": 0.5798126377663483,
"repo_name": "Laurawly/tvm-1",
"id": "2b350ff817d993c7649e2adf66ea79644df029ff",
"size": "11673",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "python/tvm/topi/broadcast.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4093"
},
{
"name": "C",
"bytes": "351611"
},
{
"name": "C++",
"bytes": "11660999"
},
{
"name": "CMake",
"bytes": "228510"
},
{
"name": "Cuda",
"bytes": "16902"
},
{
"name": "Cython",
"bytes": "28979"
},
{
"name": "Go",
"bytes": "111527"
},
{
"name": "HTML",
"bytes": "2664"
},
{
"name": "Java",
"bytes": "199950"
},
{
"name": "JavaScript",
"bytes": "15305"
},
{
"name": "Makefile",
"bytes": "67149"
},
{
"name": "Objective-C",
"bytes": "24259"
},
{
"name": "Objective-C++",
"bytes": "87655"
},
{
"name": "Python",
"bytes": "16256580"
},
{
"name": "RenderScript",
"bytes": "1895"
},
{
"name": "Rust",
"bytes": "391076"
},
{
"name": "Shell",
"bytes": "228674"
},
{
"name": "TypeScript",
"bytes": "94385"
}
],
"symlink_target": ""
}
|
import threading
import unittest
import os
import sys
try:
import pydevconsole
except:
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
import pydevconsole
from pydev_imports import xmlrpclib, SimpleXMLRPCServer
from pydev_localhost import get_localhost
try:
raw_input
raw_input_name = 'raw_input'
except NameError:
raw_input_name = 'input'
#=======================================================================================================================
# Test
#=======================================================================================================================
class Test(unittest.TestCase):
def startClientThread(self, client_port):
class ClientThread(threading.Thread):
def __init__(self, client_port):
threading.Thread.__init__(self)
self.client_port = client_port
def run(self):
class HandleRequestInput:
def RequestInput(self):
client_thread.requested_input = True
return 'RequestInput: OK'
def NotifyFinished(self, *args, **kwargs):
client_thread.notified_finished += 1
return 1
handle_request_input = HandleRequestInput()
import pydev_localhost
self.client_server = client_server = SimpleXMLRPCServer((pydev_localhost.get_localhost(), self.client_port), logRequests=False)
client_server.register_function(handle_request_input.RequestInput)
client_server.register_function(handle_request_input.NotifyFinished)
client_server.serve_forever()
def shutdown(self):
return
self.client_server.shutdown()
client_thread = ClientThread(client_port)
client_thread.requested_input = False
client_thread.notified_finished = 0
client_thread.setDaemon(True)
client_thread.start()
return client_thread
def getFreeAddresses(self):
import socket
s = socket.socket()
s.bind(('', 0))
port0 = s.getsockname()[1]
s1 = socket.socket()
s1.bind(('', 0))
port1 = s1.getsockname()[1]
s.close()
s1.close()
return port0, port1
def testServer(self):
# Just making sure that the singleton is created in this thread.
try:
from pydev_ipython_console_011 import get_pydev_frontend
except:
sys.stderr.write('Skipped test because IPython could not be imported.')
return
get_pydev_frontend(get_localhost(), 0)
client_port, server_port = self.getFreeAddresses()
class ServerThread(threading.Thread):
def __init__(self, client_port, server_port):
threading.Thread.__init__(self)
self.client_port = client_port
self.server_port = server_port
def run(self):
import pydev_localhost
print('Starting server with:', pydev_localhost.get_localhost(), self.server_port, self.client_port)
pydevconsole.StartServer(pydev_localhost.get_localhost(), self.server_port, self.client_port)
server_thread = ServerThread(client_port, server_port)
server_thread.setDaemon(True)
server_thread.start()
client_thread = self.startClientThread(client_port) #@UnusedVariable
try:
import time
time.sleep(.3) #let's give it some time to start the threads
import pydev_localhost
server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), server_port))
server.execLine("import sys; print('Running with: %s %s' % (sys.executable or sys.platform, sys.version))")
server.execLine('class Foo:')
server.execLine(' pass')
server.execLine('')
server.execLine('foo = Foo()')
server.execLine('a = %s()' % raw_input_name)
initial = time.time()
while not client_thread.requested_input:
if time.time() - initial > 2:
raise AssertionError('Did not get the return asked before the timeout.')
time.sleep(.1)
frame_xml = server.getFrame()
self.assert_('RequestInput' in frame_xml, 'Did not fid RequestInput in:\n%s' % (frame_xml,))
finally:
client_thread.shutdown()
#=======================================================================================================================
# main
#=======================================================================================================================
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "96b0a40a1b1973d1ba6da3d1567abf5c",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 143,
"avg_line_length": 37.75968992248062,
"alnum_prop": 0.5269965099568877,
"repo_name": "liangazhou/django-rdp",
"id": "10a62260c84e6394925da9f4fd185669309ff1f3",
"size": "4871",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "packages/eclipse/plugins/org.python.pydev_4.4.0.201510052309/pysrc/tests/test_check_pydevconsole.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "22310"
},
{
"name": "CSS",
"bytes": "5463444"
},
{
"name": "CoffeeScript",
"bytes": "83631"
},
{
"name": "Groff",
"bytes": "450"
},
{
"name": "HTML",
"bytes": "439341404"
},
{
"name": "JavaScript",
"bytes": "19561573"
},
{
"name": "PHP",
"bytes": "94083"
},
{
"name": "Perl",
"bytes": "9844"
},
{
"name": "Python",
"bytes": "8069"
},
{
"name": "Shell",
"bytes": "11480"
},
{
"name": "XSLT",
"bytes": "224454"
}
],
"symlink_target": ""
}
|
from rest_framework import serializers
from spiderhandler.models import QueryJob, QQInfo, WeiboInfo
class QueryRecordSerializer(serializers.ModelSerializer):
class Meta:
model = QueryJob
fields = ('createDate', 'username',
'forUserQQId', 'forUserWeiboId', 'forUserZhihuId',
'queryState', 'queryResult')
class QQInfoSerializer(serializers.ModelSerializer):
class Meta:
model = QQInfo
fields = ('qq', 'gender', 'age', 'isFamous', 'birthday', 'currentAddress', 'hometownAddress'
, 'marriage', 'bloodtype', 'career',) # 'firstQueryTime ', 'latestQueryTime')
class WeiboInfoSerializer(serializers.ModelSerializer):
"""
weiboId = models.CharField(max_length=20)
gender = models.CharField(max_length=5, null=True)
age = models.IntegerField(null=True)
weiboNum = models.IntegerField(null=True)
fansNum = models.IntegerField(null=True)
followNum = models.IntegerField(null=True)
address = models.CharField(max_length=40, null=True)
vip = models.IntegerField(True)
verified = models.IntegerField(null=True, default=-1)
relationshipStatus = models.CharField(max_length=10, null=True, default=-1)
tags = models.CharField(max_length=100, null=True)
nickname = models.CharField(max_length=30, null=True)
birthday = models.CharField(max_length=30, null=True, blank=True, default="1800-01-01")
queryCount = models.IntegerField(default=1)
recordTime = models.CharField(max_length=30, default="0000-00-00")
"""
class Meta:
model = WeiboInfo
fields = ('weiboId', 'gender', 'age', 'weiboNum', 'fansNum', 'followNum', 'birthday',
'address', 'vip', 'verified', 'relationshipStatus',
'tags', 'nickname', 'queryCount', 'firstQueryTime ') # , 'latestQueryTime')
|
{
"content_hash": "b3279d1ebdf3cf962a2efc2673b200f9",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 100,
"avg_line_length": 42.47727272727273,
"alnum_prop": 0.6655965757089353,
"repo_name": "zzhy1996/chase-or-not",
"id": "6e5daa024f01528b22cb010cf48f60d1858af4f6",
"size": "1869",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chaseornotwebapi/spiderhandler/queryrecords/serializers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1150"
},
{
"name": "HTML",
"bytes": "8547"
},
{
"name": "Java",
"bytes": "1051"
},
{
"name": "Python",
"bytes": "59154"
},
{
"name": "Scala",
"bytes": "46977"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
import pyrax
import os
import json
from docopt import docopt
USAGE = """Get some nodes from an application environment
Usage:
get-nodes.py APP_NAME ENVIRONMENT ROLE
get-nodes.py (-h | --help)
get-nodes.py --version
Arguments:
APP_NAME The application namespace. Should be unique within a Public Cloud Account.
ENVIRONMENT The name of the environment (e.g. stg, prd)
ROLE A role name (e.g. web)
Options:
-h --help Show this screen.
Environment variables:
OS_REGION A Rackspace Public Cloud region [default: LON]
OS_USERNAME A Rackspace Public Cloud username
OS_API_KEY A Rackspace Public Cloud API key
"""
# Parse our CLI arguments
arguments = docopt(USAGE, version='1.0.0')
# Set convenience variables from arguments/environment
app_name = arguments['APP_NAME']
environment_name = arguments['ENVIRONMENT']
role_name = arguments['ROLE']
# Authenticate
pyrax.set_setting("identity_type", "rackspace")
pyrax.set_setting("region", os.environ.get('OS_REGION', "LON"))
pyrax.set_credentials(os.environ.get('OS_USERNAME'), os.environ.get('OS_API_KEY'))
# Set up some aliases
cs = pyrax.cloudservers
# Filter our nodes on app, environment, and role metadata
filtered = (node for node in cs.list() if
"app" in node.metadata and
node.metadata["app"] == app_name and
"environment" in node.metadata and
node.metadata["environment"] == environment_name and
"role" in node.metadata and
node.metadata["role"] == role_name)
# Build a JSON-friendly list of nodes
target_nodes = []
for node in filtered:
target_nodes.append({
"name": node.name,
"id": node.id,
"ip": node.accessIPv4,
"metadata": node.metadata,
})
print(json.dumps(target_nodes, indent=4, separators=(',', ': ')))
|
{
"content_hash": "a3b1765f55ed44187fc25316a457e1a1",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 98,
"avg_line_length": 30.140625,
"alnum_prop": 0.656298600311042,
"repo_name": "iskandar/windows-automation-demo",
"id": "7c2beb23680b7ed22986a7352b2c7f928679a9e4",
"size": "1951",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "get-nodes.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "381"
},
{
"name": "Groovy",
"bytes": "1697"
},
{
"name": "PowerShell",
"bytes": "40660"
},
{
"name": "Python",
"bytes": "17888"
},
{
"name": "Ruby",
"bytes": "2949"
}
],
"symlink_target": ""
}
|
"""Registrations for LinearOperator.matmul."""
from tensorflow.python.ops.linalg import linear_operator
from tensorflow.python.ops.linalg import linear_operator_algebra
from tensorflow.python.ops.linalg import linear_operator_block_diag
from tensorflow.python.ops.linalg import linear_operator_circulant
from tensorflow.python.ops.linalg import linear_operator_composition
from tensorflow.python.ops.linalg import linear_operator_diag
from tensorflow.python.ops.linalg import linear_operator_identity
from tensorflow.python.ops.linalg import linear_operator_lower_triangular
from tensorflow.python.ops.linalg import linear_operator_zeros
from tensorflow.python.ops.linalg import registrations_util
# By default, use a LinearOperatorComposition to delay the computation.
@linear_operator_algebra.RegisterMatmul(
linear_operator.LinearOperator, linear_operator.LinearOperator)
def _matmul_linear_operator(linop_a, linop_b):
"""Generic matmul of two `LinearOperator`s."""
is_square = registrations_util.is_square(linop_a, linop_b)
is_non_singular = None
is_self_adjoint = None
is_positive_definite = None
if is_square:
is_non_singular = registrations_util.combined_non_singular_hint(
linop_a, linop_b)
elif is_square is False: # pylint:disable=g-bool-id-comparison
is_non_singular = False
is_self_adjoint = False
is_positive_definite = False
return linear_operator_composition.LinearOperatorComposition(
operators=[linop_a, linop_b],
is_non_singular=is_non_singular,
is_self_adjoint=is_self_adjoint,
is_positive_definite=is_positive_definite,
is_square=is_square,
)
# Identity
@linear_operator_algebra.RegisterMatmul(
linear_operator_identity.LinearOperatorIdentity,
linear_operator.LinearOperator)
def _matmul_linear_operator_identity_left(identity, linop):
del identity
return linop
@linear_operator_algebra.RegisterMatmul(
linear_operator.LinearOperator,
linear_operator_identity.LinearOperatorIdentity)
def _matmul_linear_operator_identity_right(linop, identity):
del identity
return linop
@linear_operator_algebra.RegisterMatmul(
linear_operator_identity.LinearOperatorScaledIdentity,
linear_operator_identity.LinearOperatorScaledIdentity)
def _matmul_linear_operator_scaled_identity(linop_a, linop_b):
"""Matmul of two ScaledIdentity `LinearOperators`."""
return linear_operator_identity.LinearOperatorScaledIdentity(
num_rows=linop_a.domain_dimension_tensor(),
multiplier=linop_a.multiplier * linop_b.multiplier,
is_non_singular=registrations_util.combined_non_singular_hint(
linop_a, linop_b),
is_self_adjoint=registrations_util.combined_commuting_self_adjoint_hint(
linop_a, linop_b),
is_positive_definite=(
registrations_util.combined_commuting_positive_definite_hint(
linop_a, linop_b)),
is_square=True)
# Zeros
@linear_operator_algebra.RegisterMatmul(
linear_operator.LinearOperator,
linear_operator_zeros.LinearOperatorZeros)
def _matmul_linear_operator_zeros_right(linop, zeros):
if not zeros.is_square or not linop.is_square:
raise ValueError("Matmul with non-square `LinearOperator`s or non-square "
"`LinearOperatorZeros` not supported at this time.")
return zeros
@linear_operator_algebra.RegisterMatmul(
linear_operator_zeros.LinearOperatorZeros,
linear_operator.LinearOperator)
def _matmul_linear_operator_zeros_left(zeros, linop):
if not zeros.is_square or not linop.is_square:
raise ValueError("Matmul with non-square `LinearOperator`s or non-square "
"`LinearOperatorZeros` not supported at this time.")
return zeros
# Diag.
@linear_operator_algebra.RegisterMatmul(
linear_operator_diag.LinearOperatorDiag,
linear_operator_diag.LinearOperatorDiag)
def _matmul_linear_operator_diag(linop_a, linop_b):
return linear_operator_diag.LinearOperatorDiag(
diag=linop_a.diag * linop_b.diag,
is_non_singular=registrations_util.combined_non_singular_hint(
linop_a, linop_b),
is_self_adjoint=registrations_util.combined_commuting_self_adjoint_hint(
linop_a, linop_b),
is_positive_definite=(
registrations_util.combined_commuting_positive_definite_hint(
linop_a, linop_b)),
is_square=True)
@linear_operator_algebra.RegisterMatmul(
linear_operator_diag.LinearOperatorDiag,
linear_operator_identity.LinearOperatorScaledIdentity)
def _matmul_linear_operator_diag_scaled_identity_right(
linop_diag, linop_scaled_identity):
return linear_operator_diag.LinearOperatorDiag(
diag=linop_diag.diag * linop_scaled_identity.multiplier,
is_non_singular=registrations_util.combined_non_singular_hint(
linop_diag, linop_scaled_identity),
is_self_adjoint=registrations_util.combined_commuting_self_adjoint_hint(
linop_diag, linop_scaled_identity),
is_positive_definite=(
registrations_util.combined_commuting_positive_definite_hint(
linop_diag, linop_scaled_identity)),
is_square=True)
@linear_operator_algebra.RegisterMatmul(
linear_operator_identity.LinearOperatorScaledIdentity,
linear_operator_diag.LinearOperatorDiag)
def _matmul_linear_operator_diag_scaled_identity_left(
linop_scaled_identity, linop_diag):
return linear_operator_diag.LinearOperatorDiag(
diag=linop_diag.diag * linop_scaled_identity.multiplier,
is_non_singular=registrations_util.combined_non_singular_hint(
linop_diag, linop_scaled_identity),
is_self_adjoint=registrations_util.combined_commuting_self_adjoint_hint(
linop_diag, linop_scaled_identity),
is_positive_definite=(
registrations_util.combined_commuting_positive_definite_hint(
linop_diag, linop_scaled_identity)),
is_square=True)
@linear_operator_algebra.RegisterMatmul(
linear_operator_diag.LinearOperatorDiag,
linear_operator_lower_triangular.LinearOperatorLowerTriangular)
def _matmul_linear_operator_diag_tril(linop_diag, linop_triangular):
return linear_operator_lower_triangular.LinearOperatorLowerTriangular(
tril=linop_diag.diag[..., None] * linop_triangular.to_dense(),
is_non_singular=registrations_util.combined_non_singular_hint(
linop_diag, linop_triangular),
# This is safe to do since the Triangular matrix is only self-adjoint
# when it is a diagonal matrix, and hence commutes.
is_self_adjoint=registrations_util.combined_commuting_self_adjoint_hint(
linop_diag, linop_triangular),
is_positive_definite=None,
is_square=True)
@linear_operator_algebra.RegisterMatmul(
linear_operator_lower_triangular.LinearOperatorLowerTriangular,
linear_operator_diag.LinearOperatorDiag)
def _matmul_linear_operator_tril_diag(linop_triangular, linop_diag):
return linear_operator_lower_triangular.LinearOperatorLowerTriangular(
tril=linop_triangular.to_dense() * linop_diag.diag,
is_non_singular=registrations_util.combined_non_singular_hint(
linop_diag, linop_triangular),
# This is safe to do since the Triangular matrix is only self-adjoint
# when it is a diagonal matrix, and hence commutes.
is_self_adjoint=registrations_util.combined_commuting_self_adjoint_hint(
linop_diag, linop_triangular),
is_positive_definite=None,
is_square=True)
# Circulant.
# pylint: disable=protected-access
@linear_operator_algebra.RegisterMatmul(
linear_operator_circulant._BaseLinearOperatorCirculant,
linear_operator_circulant._BaseLinearOperatorCirculant)
def _matmul_linear_operator_circulant_circulant(linop_a, linop_b):
if not isinstance(linop_a, linop_b.__class__):
return _matmul_linear_operator(linop_a, linop_b)
return linop_a.__class__(
spectrum=linop_a.spectrum * linop_b.spectrum,
is_non_singular=registrations_util.combined_non_singular_hint(
linop_a, linop_b),
is_self_adjoint=registrations_util.combined_commuting_self_adjoint_hint(
linop_a, linop_b),
is_positive_definite=(
registrations_util.combined_commuting_positive_definite_hint(
linop_a, linop_b)),
is_square=True)
# pylint: enable=protected-access
# Block Diag
@linear_operator_algebra.RegisterMatmul(
linear_operator_block_diag.LinearOperatorBlockDiag,
linear_operator_block_diag.LinearOperatorBlockDiag)
def _matmul_linear_operator_block_diag_block_diag(linop_a, linop_b):
return linear_operator_block_diag.LinearOperatorBlockDiag(
operators=[
o1.matmul(o2) for o1, o2 in zip(
linop_a.operators, linop_b.operators)],
is_non_singular=registrations_util.combined_non_singular_hint(
linop_a, linop_b),
# In general, a product of self-adjoint positive-definite block diagonal
# matrices is not self-=adjoint.
is_self_adjoint=None,
# In general, a product of positive-definite block diagonal matrices is
# not positive-definite.
is_positive_definite=None,
is_square=True)
|
{
"content_hash": "8da99ea8b93e066f89a6da4e3c34c742",
"timestamp": "",
"source": "github",
"line_count": 227,
"max_line_length": 78,
"avg_line_length": 40.31277533039648,
"alnum_prop": 0.7378428587039668,
"repo_name": "tensorflow/tensorflow-experimental_link_static_libraries_once",
"id": "662a5b5e6f06d23ca0ee01b6ee675ba76f4f7a51",
"size": "9840",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "tensorflow/python/ops/linalg/matmul_registrations.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "36962"
},
{
"name": "C",
"bytes": "1343737"
},
{
"name": "C#",
"bytes": "13584"
},
{
"name": "C++",
"bytes": "123969891"
},
{
"name": "CMake",
"bytes": "182027"
},
{
"name": "Cython",
"bytes": "5003"
},
{
"name": "Dockerfile",
"bytes": "416070"
},
{
"name": "Go",
"bytes": "2095490"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "1074471"
},
{
"name": "Jupyter Notebook",
"bytes": "789401"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "11067751"
},
{
"name": "Makefile",
"bytes": "2760"
},
{
"name": "Objective-C",
"bytes": "169288"
},
{
"name": "Objective-C++",
"bytes": "294177"
},
{
"name": "Pawn",
"bytes": "5552"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "42585406"
},
{
"name": "Roff",
"bytes": "5034"
},
{
"name": "Ruby",
"bytes": "9199"
},
{
"name": "Shell",
"bytes": "620507"
},
{
"name": "Smarty",
"bytes": "89545"
},
{
"name": "SourcePawn",
"bytes": "14577"
},
{
"name": "Starlark",
"bytes": "7486225"
},
{
"name": "Swift",
"bytes": "78435"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
}
|
"""
clint.textui.prompt
~~~~~~~~~~~~~~~~~~~
Module for simple interactive prompts handling
"""
from __future__ import absolute_import, print_function
from re import match, I
from .core import puts
from .colored import yellow
from .validators import RegexValidator, OptionValidator
try:
raw_input
except NameError:
raw_input = input
def yn(prompt, default='y', batch=False):
# A sanity check against default value
# If not y/n then y is assumed
if default not in ['y', 'n']:
default = 'y'
# Let's build the prompt
choicebox = '[Y/n]' if default == 'y' else '[y/N]'
prompt = prompt + ' ' + choicebox + ' '
# If input is not a yes/no variant or empty
# keep asking
while True:
# If batch option is True then auto reply
# with default input
if not batch:
input = raw_input(prompt).strip()
else:
print(prompt)
input = ''
# If input is empty default choice is assumed
# so we return True
if input == '':
return True
# Given 'yes' as input if default choice is y
# then return True, False otherwise
if match('y(?:es)?', input, I):
return True if default == 'y' else False
# Given 'no' as input if default choice is n
# then return True, False otherwise
elif match('n(?:o)?', input, I):
return True if default == 'n' else False
def query(prompt, default='', validators=None, batch=False):
# Set the nonempty validator as default
if validators is None:
validators = [RegexValidator(r'.+')]
# Let's build the prompt
if prompt[-1] is not ' ':
prompt += ' '
if default:
prompt += '[' + default + '] '
# If input is not valid keep asking
while True:
# If batch option is True then auto reply
# with default input
if not batch:
user_input = raw_input(prompt).strip() or default
else:
print(prompt)
user_input = ''
# Validate the user input
try:
for validator in validators:
user_input = validator(user_input)
return user_input
except Exception as e:
puts(yellow(e.message))
def options(prompt, options, default=None, batch=False):
'''
:param prompt:
:param options:
this can be either a list of strings, in which case it will be presented like:
prompt:
(1) this is the first string
(2) this is the second string
(3) this is the third string
please select 1-3:
or a list of dictionaries in the format of:
{ { 'selector' : 'this is what the user will enter to select the option'
'prompt': 'this is the string that will be displayed, this can be omitted if the selector is also a prompt',
'return': 'this is what is returned to the calling procedure, if omitted, the option selector will be used' }
so, to replicate the above, the dict could look like:
[ {'selector':1,'prompt':'this is the first string','return':1},
{'selector':2,'prompt':'this is the second string','return':2},
{'selector':3,'prompt':'this is the third string'}
:param default: should be set to the default selector (if desired)
:param batch: True/False, will auto-return the default
:return:
'''
# Build fix options and build validator
validator_list = []
return_dict = {}
if isinstance(options[0],dict):
for item in options:
item['selector'] = str(item['selector'])
item['prompt'] = str(item['prompt'])
if 'return' not in item:
item['return'] = item['selector']
validator_list.append(item['selector'])
return_dict[item['selector']] = item['return']
else:
options_strings = options
options = []
for key, opt in enumerate(options_strings):
item = {}
item['selector'] = str(key+1)
item['prompt'] = str(opt)
item['return'] = key+1
return_dict[item['selector']] = item['return']
validator_list.append(item['selector'])
options.append(item)
validators = [OptionValidator(validator_list)]
# Let's build the prompt
prompt += '\n'
# building the options list
for o in options:
prompt += '[{selector}] {prompt}\n'.format(**o)
prompt += '\n'
if default:
prompt += '[' + default + '] '
# If input is not valid keep asking
while True:
# If batch option is True then auto reply
# with default input
if not batch:
user_input = raw_input(prompt).strip() or default
else:
print(prompt)
user_input = ''
# Validate the user input
try:
for validator in validators:
user_input = validator(user_input)
# convert user input to defined return value
user_input = return_dict[user_input]
return user_input
except Exception as e:
puts(yellow(e.message))
|
{
"content_hash": "7766939be6e1c14db5f5a032902b1ef2",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 125,
"avg_line_length": 29.261111111111113,
"alnum_prop": 0.5657869755078793,
"repo_name": "dataworkshop/prerequisite",
"id": "7995e972f8152155068209b46e194a5dac74e420",
"size": "5291",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "clint/textui/prompt.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "81030"
}
],
"symlink_target": ""
}
|
from openerp import fields, models
class MailMessageSubtype(models.Model):
""" Class holding subtype definition for messages. Subtypes allow to tune
the follower subscription, allowing only some subtypes to be pushed
on the Wall. """
_name = 'mail.message.subtype'
_description = 'Message subtypes'
_order = 'sequence, id'
name = fields.Char(
'Message Type', required=True, translate=True,
help='Message subtype gives a more precise type on the message, '
'especially for system notifications. For example, it can be '
'a notification related to a new record (New), or to a stage '
'change in a process (Stage change). Message subtypes allow to '
'precisely tune the notifications the user want to receive on its wall.')
description = fields.Text(
'Description', translate=True,
help='Description that will be added in the message posted for this '
'subtype. If void, the name will be added instead.')
internal = fields.Boolean(
'Internal Only',
help='Messages with internal subtypes will be visible only by employees, aka members of base_user group')
parent_id = fields.Many2one(
'mail.message.subtype', string='Parent', ondelete='set null',
help='Parent subtype, used for automatic subscription. This field is not '
'correctly named. For example on a project, the parent_id of project '
'subtypes refers to task-related subtypes.')
relation_field = fields.Char(
'Relation field',
help='Field used to link the related model to the subtype model when '
'using automatic subscription on a related document. The field '
'is used to compute getattr(related_document.relation_field).')
res_model = fields.Char('Model', help="Model the subtype applies to. If False, this subtype applies to all models.")
default = fields.Boolean('Default', default=True, help="Activated by default when subscribing.")
sequence = fields.Integer('Sequence', default=1, help="Used to order subtypes.")
hidden = fields.Boolean('Hidden', help="Hide the subtype in the follower options")
|
{
"content_hash": "03793dd7e26d09762821beb460064a52",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 120,
"avg_line_length": 57.12820512820513,
"alnum_prop": 0.6741472172351886,
"repo_name": "vileopratama/vitech",
"id": "0f2316a19b710262d2d0a6049b99e0c0d4794689",
"size": "2253",
"binary": false,
"copies": "44",
"ref": "refs/heads/master",
"path": "src/addons/mail/models/mail_message_subtype.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "9611"
},
{
"name": "CSS",
"bytes": "2125999"
},
{
"name": "HTML",
"bytes": "252393"
},
{
"name": "Java",
"bytes": "1840167"
},
{
"name": "JavaScript",
"bytes": "6176224"
},
{
"name": "Makefile",
"bytes": "19072"
},
{
"name": "Mako",
"bytes": "7659"
},
{
"name": "NSIS",
"bytes": "16782"
},
{
"name": "Python",
"bytes": "9438805"
},
{
"name": "Ruby",
"bytes": "220"
},
{
"name": "Shell",
"bytes": "22312"
},
{
"name": "Vim script",
"bytes": "406"
},
{
"name": "XSLT",
"bytes": "11489"
}
],
"symlink_target": ""
}
|
import logging
import sys
# Separated from util to allow test-config tool to run in under 2 seconds
def get_logger(name):
logger = logging.getLogger(name)
logger.setLevel(logging.INFO)
ch = logging.StreamHandler(sys.stderr)
ch.setLevel(logging.INFO)
formatter = logging.Formatter(fmt='%(asctime)s [%(levelname)s] '
'[%(name)s] %(message)s',
datefmt='%H:%M:%S')
ch.setFormatter(formatter)
logger.addHandler(ch)
logger.propagate = False
return logger
|
{
"content_hash": "0919155ca2653ebf23fbb187d56aa213",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 73,
"avg_line_length": 32.88235294117647,
"alnum_prop": 0.6100178890876565,
"repo_name": "cloudify-cosmo/cloudify-system-tests",
"id": "0fa1d174eb9e4e266d31f23e81154721d1ac242f",
"size": "559",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cosmo_tester/framework/logger.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "487590"
},
{
"name": "Shell",
"bytes": "117"
}
],
"symlink_target": ""
}
|
import sys
import os.path
if sys.version_info[0] == 2 and sys.version_info[1] == 6:
# py26 needs backport unittest2
import unittest2 as unittest
else:
import unittest
if sys.version_info[0] == 2 and sys.version_info[1] == 7:
# py27 and py31 assertRaisesRegexp was renamed in py32 to assertRaisesRegex
unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
# py27 and py31 assertRegexpMatches was renamed in py32 to assertRegex
unittest.TestCase.assertRegex = unittest.TestCase.assertRegexpMatches
from testlink import TestlinkAPIGeneric, TestLinkHelper
from testlink.testlinkerrors import TLResponseError
# example text file attachment = this python file
# why not using os.path.realpath(__file__)
# -> cause __file__ could be compiled python file *.pyc, if the test run is
# repeated without changing the test code
ATTACHMENT_EXAMPLE_TEXT= os.path.join(os.path.dirname(__file__),
'testlinkapi_generic_online_test.py')
class TestLinkAPIOnlineTestCase(unittest.TestCase):
""" TestCases for TestlinkAPIClient - interacts with a TestLink Server.
works with the example project NEW_PROJECT_API (see TestLinkExample.py)
"""
def setUp(self):
self.client = TestLinkHelper().connect(TestlinkAPIGeneric)
# def tearDown(self):
# pass
def test_checkDevKey(self):
response = self.client.checkDevKey()
self.assertEqual(True, response)
def test_checkDevKey_unknownKey(self):
with self.assertRaisesRegex(TLResponseError, '2000.*invalid'):
self.client.checkDevKey(devKey='unknownKey')
def test_sayHello(self):
response = self.client.sayHello()
self.assertEqual('Hello!', response)
def test_repeat(self):
response = self.client.repeat('Yellow Submarine')
self.assertEqual('You said: Yellow Submarine', response)
def test_about(self):
response = self.client.about()
self.assertIn('Testlink API', response)
def test_doesUserExist_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '10000.*Big Bird'):
self.client.doesUserExist('Big Bird')
def test_createTestProject_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7001.*Empty name'):
self.client.createTestProject(testprojectname='',
testcaseprefix='P40000711')
def test_getProjects(self):
response = self.client.getProjects()
self.assertIsNotNone(response)
def test_createTestPlan_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7011.*40000712'):
self.client.createTestPlan('plan 40000711', 'project 40000712')
def test_createTestSuite_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7000.*40000711'):
self.client.createTestSuite( 40000711, 'suite 40000712', 'detail 40000713')
def test_createTestCase_unknownID(self):
tc_steps = []
with self.assertRaisesRegex(TLResponseError, '7000.*40000713'):
self.client.createTestCase('case 40000711', 40000712, 40000713,
'Big Bird', 'summary 40000714', tc_steps)
def test_getBuildsForTestPlan_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '3000.*40000711'):
self.client.getBuildsForTestPlan(40000711)
def test_getFirstLevelTestSuitesForTestProject_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7000.*40000711'):
self.client.getFirstLevelTestSuitesForTestProject(40000711)
def test_getFullPath_unknownID(self):
with self.assertRaisesRegex(TLResponseError, 'getFullPath.*234'):
self.client.getFullPath('40000711')
def test_getLastExecutionResult_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '3000.*40000711'):
self.client.getLastExecutionResult(40000711, testcaseid=40000712)
def test_getLatestBuildForTestPlan_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '3000.*40000711'):
self.client.getLatestBuildForTestPlan(40000711)
def test_getProjectTestPlans_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7000.*40000711'):
self.client.getProjectTestPlans(40000711)
def test_getProjectPlatforms_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7000.*40000711'):
self.client.getProjectPlatforms(40000711)
def test_getTestCase_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '5000.*40000711'):
self.client.getTestCase(testcaseid=40000711)
def test_getTestCase_unknownExternalID(self):
with self.assertRaisesRegex(TLResponseError, '5040.*GPROAPI-40000711'):
self.client.getTestCase(testcaseexternalid='GPROAPI-40000711')
def test_getTestCaseAttachments_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '5000.*40000711'):
self.client.getTestCaseAttachments(testcaseid=40000711)
def test_getTestCaseCustomFieldDesignValue_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7000.*40000711'):
self.client.getTestCaseCustomFieldDesignValue(
'TC-40000712', 1, 40000711, 'a_field', details='full')
def test_getTestCaseIDByName_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '5030.*Cannot find'):
self.client.getTestCaseIDByName('Big Bird')
def test_getTestCasesForTestPlan_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '3000.*40000711'):
self.client.getTestCasesForTestPlan(40000711)
def test_getTestCasesForTestSuite_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '8000.*40000711'):
self.client.getTestCasesForTestSuite(40000711)
def test_getTestPlanByName_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7011.*40000711'):
self.client.getTestPlanByName('project 40000711', 'plan 40000712')
def test_getTestPlanPlatforms_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '3000.*40000711'):
self.client.getTestPlanPlatforms(40000711)
def test_getTestProjectByName_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7011.*40000711'):
self.client.getTestProjectByName('project 40000711')
def test_getTestSuiteByID_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '8000.*40000711'):
self.client.getTestSuiteByID(40000711)
def test_getTestSuitesForTestPlan_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '3000.*40000711'):
self.client.getTestSuitesForTestPlan(40000711)
def test_getTestSuitesForTestSuite_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '8000.*40000711'):
self.client.getTestSuitesForTestSuite(40000711)
def test_getTotalsForTestPlan_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '3000.*40000711'):
self.client.getTotalsForTestPlan(40000711)
def test_createBuild_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '3000.*40000711'):
self.client.createBuild(40000711, 'Build 40000712', buildnotes='note 40000713')
def test_reportTCResult_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '5000.*40000711'):
self.client.reportTCResult(40000712, 'p', testcaseid=40000711,
buildname='build 40000713', notes='note 40000714' )
def test_uploadExecutionAttachment_unknownID(self):
attachemantFile = open(os.path.realpath(__file__), 'r')
with self.assertRaisesRegex(TLResponseError, '6004.*40000712'):
self.client.uploadExecutionAttachment(attachemantFile, 40000712,
title='title 40000713', description='descr. 40000714')
def test_createPlatform_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7011.*40000711'):
self.client.createPlatform('Project 40000711', 'Platform 40000712',
notes='note 40000713')
def test_addPlatformToTestPlan_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '3000.*40000711'):
self.client.addPlatformToTestPlan(40000711, 'Platform 40000712')
def test_removePlatformFromTestPlan_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '3000.*40000711'):
self.client.removePlatformFromTestPlan(40000711, 'Platform 40000712')
def test_addTestCaseToTestPlan_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7000.*40000711'):
self.client.addTestCaseToTestPlan(40000711, 40000712, 'N-40000713', 1)
def test_updateTestCase_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '5040.*N-40000711'):
self.client.updateTestCase('N-40000711', version=1)
def test_createTestCaseSteps_unknownID(self):
steps = [{'actions' : "Step action 6 -b added by updateTestCase" ,
'expected_results' : "Step result 6 - b added",
'step_number' : 6, 'execution_type' : 1}]
with self.assertRaisesRegex(TLResponseError, '5040.*N-40000711'):
self.client.createTestCaseSteps('update', steps,
testcaseexternalid='N-40000711', version=1)
def test_deleteTestCaseSteps_unknownID(self):
steps = [2,8]
with self.assertRaisesRegex(TLResponseError, '5040.*N-40000711'):
self.client.deleteTestCaseSteps('N-40000711', steps, version=1)
def test_uploadRequirementSpecificationAttachment_unknownID(self):
attachemantFile = open(ATTACHMENT_EXAMPLE_TEXT, 'r')
with self.assertRaisesRegex(TLResponseError, '6004.*40000712'):
self.client.uploadRequirementSpecificationAttachment(attachemantFile, 40000712,
title='title 40000713', description='descr. 40000714')
def test_uploadRequirementAttachment_unknownID(self):
attachemantFile = open(ATTACHMENT_EXAMPLE_TEXT, 'r')
with self.assertRaisesRegex(TLResponseError, '6004.*40000712'):
self.client.uploadRequirementAttachment(attachemantFile, 40000712,
title='title 40000713', description='descr. 40000714')
def test_uploadTestProjectAttachment_unknownID(self):
attachemantFile = open(ATTACHMENT_EXAMPLE_TEXT, 'r')
with self.assertRaisesRegex(TLResponseError, '7000.*40000712'):
self.client.uploadTestProjectAttachment(attachemantFile, 40000712,
title='title 40000713', description='descr. 40000714')
def test_uploadTestSuiteAttachment_unknownID(self):
attachemantFile = open(ATTACHMENT_EXAMPLE_TEXT, 'r')
with self.assertRaisesRegex(TLResponseError, '8000.*40000712'):
self.client.uploadTestSuiteAttachment(attachemantFile, 40000712,
title='title 40000713', description='descr. 40000714')
def test_uploadTestCaseAttachment_unknownID(self):
attachemantFile = open(ATTACHMENT_EXAMPLE_TEXT, 'r')
with self.assertRaisesRegex(TLResponseError, '5000.*testcaseid'):
self.client.uploadTestCaseAttachment(attachemantFile, 40000712,
title='title 40000713', description='descr. 40000714')
def test_uploadAttachment_unknownID(self):
attachemantFile = open(ATTACHMENT_EXAMPLE_TEXT, 'r')
with self.assertRaisesRegex(TLResponseError, '6004.*Invalid Foreign Key ID'):
self.client.uploadAttachment(attachemantFile, '0000', 'nodes_hierarchy',
title='title 40000713', description='descr. 40000714')
def test_testLinkVersion(self):
response = self.client.testLinkVersion()
self.assertRegex(response, '\d*\.\d*\.\d*')
def test_getUserByLogin_unknownKey(self):
with self.assertRaisesRegex(TLResponseError, '10000.*User Login'):
self.client.getUserByLogin('unknownUser')
def test_getUserByID_unknownKey(self):
with self.assertRaisesRegex(TLResponseError, 'NO_USER_BY_ID_LOGIN.*User with DB ID'):
self.client.getUserByID(40000711)
# def test_setTestMode(self):
# response = self.client.setTestMode(True)
# self.assertTrue(response)
# response = self.client.setTestMode(False)
# self.assertTrue(response)
def test_deleteExecution_unknownKey(self):
try:
response = self.client.deleteExecution(40000711)
# case: TL configuration allows deletion of executions
# response returns Success, even if executionID is unkown
self.assertEqual([{'status': True, 'message': 'Success!', 'id': 40000711,
'operation': 'deleteExecution'}], response)
except TLResponseError as tl_err:
# case: TL configuration does not allow deletion of executions
# Expects: 232: Configuration does not allow delete executions
self.assertEqual(232, tl_err.code)
def test_setTestCaseExecutionType_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7000.*40000712'):
self.client.setTestCaseExecutionType('N-40000711', 1, 40000712, 1)
def test_assignRequirements_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7000.*40000712'):
self.client.assignRequirements('N-40000711', 40000712,
[{'req_spec' : 40000713, 'requirements' : [40000714, 40000717]},
{'req_spec' : 4723, 'requirements' : [4725]}])
def test_getExecCountersByBuild_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '3000.*40000711'):
self.client.getExecCountersByBuild(40000711)
def test_getTestCaseCustomFieldExecutionValue_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7000.*40000711'):
self.client.getTestCaseCustomFieldExecutionValue(
'cf_full', '40000711', 1, '715', '40000713')
def test_getTestCaseCustomFieldTestPlanDesignValue_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7000.*40000711'):
self.client.getTestCaseCustomFieldTestPlanDesignValue(
'cf_full', '40000711', 1, '40000713', '615')
def test_updateTestCaseCustomFieldDesignValue_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7000.*40000711'):
self.client.updateTestCaseCustomFieldDesignValue(
'TC-40000712', 1, 40000711, {'cf_field1' : 'value1',
'cf_field2' : 'value2'})
def test_getTestSuiteCustomFieldDesignValue_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7000.*40000711'):
self.client.getTestSuiteCustomFieldDesignValue(
'cf_full', 40000711, 40000713)
def test_getTestPlanCustomFieldDesignValue_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7000.*40000711'):
self.client.getTestPlanCustomFieldDesignValue(
'cf_full', 40000711, 40000712)
def test_getReqSpecCustomFieldDesignValue_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7000.*40000711'):
self.client.getReqSpecCustomFieldDesignValue(
'cf_full', 40000711, 4732)
def test_getRequirementCustomFieldDesignValue_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7000.*40000711'):
self.client.getRequirementCustomFieldDesignValue(
'cf_full', 40000711, 4734)
def test_assignTestCaseExecutionTask_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '3000.*40000711'):
self.client.assignTestCaseExecutionTask('username', 40000711, 'TC-40000712',
buildname='build 40000713',
platformname='platform 40000714')
def test_getTestCaseBugs_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '3000.*40000711'):
self.client.getTestCaseBugs(40000711, testcaseexternalid='TC-40000712',
buildname='build 40000713',
platformname='platform 40000714')
def test_getTestCaseAssignedTester_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '3000.*40000711'):
self.client.getTestCaseAssignedTester(40000711, 'TC-40000712',
buildname='build 40000713',
platformname='platform 40000714')
def test_unassignTestCaseExecutionTask_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '3000.*40000711'):
self.client.unassignTestCaseExecutionTask(40000711, 'TC-40000712',
buildname='build 40000713',
platformname='platform 40000714',
user='username',action='unassignOne')
def test_getProjectKeywords_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '7000.*40000711'):
self.client.getProjectKeywords(40000711)
def test_getTestCaseKeywords_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '5000.*40000712'):
self.client.getTestCaseKeywords(testcaseid=40000712)
def test_getTestCaseKeywords_unknownID_external(self):
with self.assertRaisesRegex(TLResponseError, '5040.*TC-40000712'):
self.client.getTestCaseKeywords(testcaseexternalid='TC-40000712')
def test_deleteTestPlan_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '3000.*40000711'):
self.client.deleteTestPlan(40000711)
# test might fail during Travis test, cause used TestLink demo application
# represents still a 1.9.13 dev state from 26/12/14
# the keyword add method are added later and will be changed with 1.9.14
# the interface (see TL Mantis Task 6934)
@unittest.expectedFailure
def test_addTestCaseKeywords_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '5040.*TC-40000712'):
self.client.addTestCaseKeywords('TC-40000712',
['KeyWord01', 'KeyWord03'])
# test might fail during Travis test, cause used TestLink demo application
# represents still a 1.9.13 dev state from 26/12/14
# the keyword remove method are added later and will be changed with 1.9.14
# the interface (see TL Mantis Task 6907)
@unittest.expectedFailure
def test_removeTestCaseKeywords_unknownID(self):
with self.assertRaisesRegex(TLResponseError, '5040.*TC-40000712'):
self.client.removeTestCaseKeywords('TC-40000712',
['KeyWord01'])
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
{
"content_hash": "f72a09dce26d768d56ce1c4c78c8d7d6",
"timestamp": "",
"source": "github",
"line_count": 398,
"max_line_length": 98,
"avg_line_length": 50.46733668341709,
"alnum_prop": 0.6519466294931793,
"repo_name": "savagecm/TestLink-API-Python-client",
"id": "4c030fba425bf057ebb71be46eb315ccd2e339a4",
"size": "21410",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/utest-online/testlinkapi_generic_online_test.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "351302"
},
{
"name": "RobotFramework",
"bytes": "7384"
},
{
"name": "Shell",
"bytes": "7609"
}
],
"symlink_target": ""
}
|
"""Implement Gerrit Code Review API"""
|
{
"content_hash": "c1432d55ca7ced737f44ea4c8aff4bcc",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 38,
"avg_line_length": 39,
"alnum_prop": 0.717948717948718,
"repo_name": "JcDelay/pycr",
"id": "4ebeb8109abce34cc592f12fb3da146a8bcdd514",
"size": "39",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "libpycr/gerrit/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "136454"
}
],
"symlink_target": ""
}
|
from django.conf import settings
from django.core.management import call_command
from django.db import connection
from django.test import TestCase
from tenant_schemas.utils import get_public_schema_name, get_tenant_model
ALLOWED_TEST_DOMAIN = '.test.com'
class TenantTestCase(TestCase):
@classmethod
def add_allowed_test_domain(cls):
# ALLOWED_HOSTS is a special setting of Django setup_test_environment so we can't modify it with helpers
if ALLOWED_TEST_DOMAIN not in settings.ALLOWED_HOSTS:
settings.ALLOWED_HOSTS += [ALLOWED_TEST_DOMAIN]
@classmethod
def remove_allowed_test_domain(cls):
if ALLOWED_TEST_DOMAIN in settings.ALLOWED_HOSTS:
settings.ALLOWED_HOSTS.remove(ALLOWED_TEST_DOMAIN)
@classmethod
def setUpClass(cls):
cls.sync_shared()
cls.add_allowed_test_domain()
tenant_domain = 'tenant.test.com'
cls.tenant = get_tenant_model()(domain_url=tenant_domain, schema_name='test')
cls.tenant.save(verbosity=0) # todo: is there any way to get the verbosity from the test command here?
connection.set_tenant(cls.tenant)
@classmethod
def tearDownClass(cls):
connection.set_schema_to_public()
cls.tenant.delete()
cls.remove_allowed_test_domain()
cursor = connection.cursor()
cursor.execute('DROP SCHEMA IF EXISTS test CASCADE')
@classmethod
def sync_shared(cls):
call_command('migrate_schemas',
schema_name=get_public_schema_name(),
interactive=False,
verbosity=0)
class FastTenantTestCase(TenantTestCase):
@classmethod
def setUpClass(cls):
cls.sync_shared()
cls.add_allowed_test_domain()
tenant_domain = 'tenant.test.com'
TenantModel = get_tenant_model()
try:
cls.tenant = TenantModel.objects.get(domain_url=tenant_domain, schema_name='test')
except:
cls.tenant = TenantModel(domain_url=tenant_domain, schema_name='test')
cls.tenant.save(verbosity=0)
connection.set_tenant(cls.tenant)
@classmethod
def tearDownClass(cls):
connection.set_schema_to_public()
cls.remove_allowed_test_domain()
|
{
"content_hash": "11f876ea00dd2f97de11f9d690124937",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 112,
"avg_line_length": 33.01449275362319,
"alnum_prop": 0.6575943810359964,
"repo_name": "mcanaves/django-tenant-schemas",
"id": "d7a92f8e3238d5e9956a23cf7a6f9844347ac280",
"size": "2278",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tenant_schemas/test/cases.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "106800"
}
],
"symlink_target": ""
}
|
import sys
import os
import shutil
import glob
# This is Debian control file in a skeleton reusable block
control_skeleton='''
Maintainer: Albert Casals <skarbat@gmail.com>
Section: others
Package: {pkg_name}
Version: {pkg_version}
Conflicts: libqt5all-dev (<= 5.7-1)
Architecture: armhf
Depends: debconf (>= 0.5.00), {pkg_depends}
Priority: optional
Description: {pkg_description}
'''
postinst_script='''
#!/bin/bash
case "$1" in
configure)
mkdir -p mkdir -p /opt/rpi-tools/arm-bcm2708/gcc-linaro-arm-linux-gnueabihf-raspbian-x64/bin/
ln -sf /usr/bin/gcc /opt/rpi-tools/arm-bcm2708/gcc-linaro-arm-linux-gnueabihf-raspbian-x64/bin/arm-linux-gnueabihf-gcc
ln -sf /usr/bin/g++ /opt/rpi-tools/arm-bcm2708/gcc-linaro-arm-linux-gnueabihf-raspbian-x64/bin/arm-linux-gnueabihf-g++
ln -sf /usr/bin/objcopy /opt/rpi-tools/arm-bcm2708/gcc-linaro-arm-linux-gnueabihf-raspbian-x64/bin/arm-linux-gnueabihf-objcopy
ln -sf /usr/bin/strip /opt/rpi-tools/arm-bcm2708/gcc-linaro-arm-linux-gnueabihf-raspbian-x64/bin/arm-linux-gnueabihf-strip
;;
esac
#DEBHELPER#
exit 0
'''
postrm_script='''
#!/bin/bash
case "$1" in
remove)
rm -f /usr/bin/qmake
rm -rf /opt/rpi-tools
;;
esac
#DEBHELPER#
exit 0
'''
# Allows forget about setting PATH on build environments
qmake_link ='''#!/bin/bash
PATH=$PATH:/usr/local/qt5/bin /usr/local/qt5/bin/qmake
'''
extra_deps = ''
# These are the packages we are building
# For the moment we are collecting everyting in one single Debian pkg
packages=[
{ 'fileset': '',
'pkg_name': 'libqt5all-native-tools',
'pkg_version': 0,
'pkg_depends': 'libqt5all-dev (>= 5.9-0)',
'pkg_description': 'QT5 Native compilation tools for the RaspberryPI' }
]
def pack_tools(root_directory, source_directory, qt5_version, tools_directory, dry_run=False):
complete_source='{}/{}'.format(root_directory, source_directory)
# Sanity check
if not os.path.exists(complete_source):
print 'error: path not found', complete_source
sys.exit(1)
for pkg in packages:
pkg['pkg_version'] = qt5_version
pkg['fileset'] = [ tools_directory ]
# allocate a versioned directory name for the package
versioned_pkg_name = 'pkgs/{}_{}'.format(pkg['pkg_name'], qt5_version)
print 'Processing package {}...'.format(versioned_pkg_name)
# extract the files from the root file system preparing them for packaging
target_directory = '{}/{}'.format (versioned_pkg_name, source_directory)
for files in pkg['fileset']:
# Complete the pathname to the target directory
last_path = os.path.dirname(files)
target_files_path='{}/{}'.format(target_directory, last_path)
print 'Extracting {} into {}...'.format(os.path.join(complete_source, files), target_files_path)
if not os.path.exists(target_files_path) and not dry_run:
os.makedirs(target_files_path)
if not dry_run:
os.system('cp -rvP {} {}'.format(os.path.join(complete_source, files), target_files_path))
# create the Debian control file for "dpkg-deb" tool to know what to pack
if not dry_run:
debian_dir=os.path.join(versioned_pkg_name, 'DEBIAN')
if not os.path.exists(debian_dir):
os.makedirs(debian_dir)
with open(os.path.join(debian_dir, 'control'), 'w') as control_file:
control_file.writelines(control_skeleton.format(**pkg))
# package postinst & postrm scripts - resolve qmake PATH on native builds
if not dry_run:
postinst_filename='{}/postinst'.format(debian_dir)
postrm_filename='{}/postrm'.format(debian_dir)
qmake_filename='{}/usr/bin/qmake'.format(os.path.join(versioned_pkg_name))
with open(postinst_filename, 'w') as f:
f.write(postinst_script)
os.system('chmod ugo+rx {}'.format(postinst_filename))
with open(postrm_filename, 'w') as f:
f.write(postrm_script)
os.system('chmod ugo+rx {}'.format(postrm_filename))
os.makedirs(os.path.dirname(qmake_filename))
with open(qmake_filename, 'w') as f:
f.write(qmake_link)
os.system('chmod ugo+rx {}'.format(qmake_filename))
# finally call dpkg-deb and generate a debian package
if not dry_run:
rc=os.system('dpkg-deb --build {}'.format(versioned_pkg_name))
else:
rc=0
if not rc:
print 'Package {} created correctly'.format(versioned_pkg_name)
else:
print 'WARNING: Error creating package {}'.format(versioned_pkg_name)
|
{
"content_hash": "50777742125a620e931cf4b00e11baf3",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 134,
"avg_line_length": 33.6013986013986,
"alnum_prop": 0.6297606659729449,
"repo_name": "pipaos/qt5-rpi",
"id": "f4a2bf901e9996a90d3ae025a36dbad28b684277",
"size": "4851",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pack/native_tools.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "41175"
},
{
"name": "Shell",
"bytes": "5655"
}
],
"symlink_target": ""
}
|
import rospy
from map_labelling.srv import GoToLocation, GoToLocationResponse, MotionStatus,MotionStatusResponse
import random
locations = [ "grasp",
"charitys_office",
"vending_machines",
"levine_far_corner",
"towne_311",
"towne_321",
"to_skirkanich",
"empty_spot",
"bio_lab" ]
#====================================================
# node exectution
def runNode():
#start node, initial localization is hard coded into launch file
rospy.init_node('torture_test')
rospy.loginfo("torture_test node initialized")
rospy.wait_for_service('goto_location')
rospy.wait_for_service('motion_status')
rospy.loginfo("Navigation services are available")
LocationList = [Location(s) for s in locations]
rospy.sleep(10)
rospy.loginfo("Beginning tests")
#keep running while alive
while not rospy.is_shutdown():
#pick random place to go
L = random.choice(LocationList)
L.goto()
L.print_status()
#======================================
# Location class
#Information and methods pertaining to a location
class Location:
def __init__(self,name):
self.name = name
self.goToService = rospy.ServiceProxy('goto_location', GoToLocation)
self.motionStatus = rospy.ServiceProxy('motion_status',MotionStatus)
self.status = False
#Navigate to this location
def goto(self):
rospy.loginfo("Navigating to "+self.name)
try:
self.goToService(self.name)
except rospy.service.ServiceException:
rospy.logerr("Location " + self.name+ " does not exist.")
return
#wait for arrival
done = False
while not done:
rospy.sleep(1)
motionStatus = self.motionStatus().status
#0=PENDING,1=ACTIVE so these are considered running, anything else means the goal is no longer running
if not(motionStatus == 0 or motionStatus == 1):
done = True
if motionStatus == 3: #3 is succeeded motion
self.status = True
else:
self.status = False
def print_status(self):
if self.status:
rospy.loginfo("Navigating to "+self.name+" succeeded")
else:
rospy.loginfo("Navigating to "+self.name+" failed")
#=================================
# Run main
if __name__ == '__main__':
try:
runNode()
except rospy.ROSInterruptException:
pass
|
{
"content_hash": "136505bec8df725a77a4e10c5d4c1890",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 105,
"avg_line_length": 24.302083333333332,
"alnum_prop": 0.6318045435062152,
"repo_name": "GRASP-ML/ServiceRobots",
"id": "fe3cd442ab88bb56a247095c7954987063059e8b",
"size": "2423",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "navigation/robust_navigation/src/torture_test_controller.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "125339"
},
{
"name": "CMake",
"bytes": "41673"
},
{
"name": "Lua",
"bytes": "2537"
},
{
"name": "Python",
"bytes": "47682"
}
],
"symlink_target": ""
}
|
""" Example TAL program
Copyright (c) 2009 Colin Stewart (http://www.owlfish.com/)
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
If you make any bug fixes or feature enhancements please let me know!
As simple as it gets:
1 - Create a context
2 - Compile a template
3 - Expand the template
Module Dependencies: simpleTAL, simpleTALES
"""
from simpletal import simpleTAL, simpleTALES
import sys
# Creat the context that is used by the template
context = simpleTALES.Context(allowPythonPath=1)
# Add a string to the context under the variable title
context.addGlobal ("title", "Colours of the rainbow")
# A list of strings
colours = ["red", "orange", "yellow", "green", "blue", "indigo", "violet"]
# Add the list to the context under the variable rainbow
context.addGlobal ("rainbow", colours)
# Open the template file
templateFile = open ("basic.html", 'rt', encoding='utf-8')
# Compile a template
template = simpleTAL.compileHTMLTemplate (templateFile)
# Close the template file
templateFile.close()
# Expand the template as HTML using this context
template.expand (context, sys.stdout, outputEncoding="utf-8")
|
{
"content_hash": "15f4ab92c0162fbd6a297df3cddd15e8",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 75,
"avg_line_length": 39.80952380952381,
"alnum_prop": 0.7599681020733652,
"repo_name": "g2p/SimpleTAL",
"id": "2563fd3458e6d5785ddfb743e1c917ba592230ab",
"size": "2526",
"binary": false,
"copies": "1",
"ref": "refs/heads/python3",
"path": "examples/basic/basic-example.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "405219"
}
],
"symlink_target": ""
}
|
from paver.easy import *
from paver import doctools
from paver.setuputils import setup
options(
sphinx=Bunch(builddir=".build"),
)
def sphinx_builddir(options):
return path("docs") / options.sphinx.builddir / "html"
@task
def clean_docs(options):
sphinx_builddir(options).rmtree()
@task
@needs("clean_docs", "paver.doctools.html")
def html(options):
destdir = path("Documentation")
destdir.rmtree()
builtdocs = sphinx_builddir(options)
builtdocs.move(destdir)
@task
@needs("paver.doctools.html")
def qhtml(options):
destdir = path("Documentation")
builtdocs = sphinx_builddir(options)
sh("rsync -az %s/ %s" % (builtdocs, destdir))
@task
@needs("clean_docs", "paver.doctools.html")
def ghdocs(options):
builtdocs = sphinx_builddir(options)
sh("sphinx-to-github", cwd=builtdocs)
sh("git checkout gh-pages && \
cp -r %s/* . && \
git commit . -m 'Rendered documentation for Github Pages.' && \
git push origin gh-pages && \
git checkout master" % builtdocs)
@task
@needs("clean_docs", "paver.doctools.html")
def upload_pypi_docs(options):
builtdocs = path("docs") / options.builddir / "html"
sh("python setup.py upload_sphinx --upload-dir='%s'" % (builtdocs))
@task
@needs("upload_pypi_docs", "ghdocs")
def upload_docs(options):
pass
@task
def autodoc(options):
sh("contrib/release/doc4allmods pyes")
@task
def verifyindex(options):
sh("contrib/release/verify-reference-index.sh")
@task
def flakes(options):
sh("find pyes -name '*.py' | xargs pyflakes")
@task
def clean_readme(options):
path("README").unlink()
path("README.rst").unlink()
@task
@needs("clean_readme")
def readme(options):
sh("python contrib/release/sphinx-to-rst.py docs/templates/readme.txt \
> README.rst")
sh("ln -sf README.rst README")
@task
def bump(options):
sh("bump -c pyes")
@task
@cmdopts([
("coverage", "c", "Enable coverage"),
("quick", "q", "Quick test"),
("verbose", "V", "Make more noise"),
])
def test(options):
cmd = "CELERY_LOADER=default nosetests"
if getattr(options, "coverage", False):
cmd += " --with-coverage3"
if getattr(options, "quick", False):
cmd = "QUICKTEST=1 SKIP_RLIMITS=1 %s" % cmd
if getattr(options, "verbose", False):
cmd += " --verbosity=2"
sh(cmd)
@task
@cmdopts([
("noerror", "E", "Ignore errors"),
])
def pep8(options):
noerror = getattr(options, "noerror", False)
return sh("""find . -name "*.py" | xargs pep8 | perl -nle'\
print; $a=1 if $_}{exit($a)'""", ignore_error=noerror)
@task
def removepyc(options):
sh("find . -name '*.pyc' | xargs rm")
@task
@needs("removepyc")
def gitclean(options):
sh("git clean -xdn")
@task
@needs("removepyc")
def gitcleanforce(options):
sh("git clean -xdf")
@task
@needs("pep8", "autodoc", "verifyindex", "test", "gitclean")
def releaseok(options):
pass
@task
@needs("releaseok", "removepyc", "upload_docs")
def release(options):
pass
|
{
"content_hash": "13900d7da54dd50646140fc426df6c81",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 75,
"avg_line_length": 20.931972789115648,
"alnum_prop": 0.631459213519662,
"repo_name": "dbbhattacharya/kitsune",
"id": "1b7e4a6f2a1f786ca8faa9b0f1e3f503ef493063",
"size": "3077",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vendor/packages/pyes/pavement.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "2694"
},
{
"name": "CSS",
"bytes": "276585"
},
{
"name": "HTML",
"bytes": "600145"
},
{
"name": "JavaScript",
"bytes": "800276"
},
{
"name": "Python",
"bytes": "2762831"
},
{
"name": "Shell",
"bytes": "6720"
},
{
"name": "Smarty",
"bytes": "1752"
}
],
"symlink_target": ""
}
|
"""Philips Hue sensors platform tests."""
import asyncio
from unittest.mock import Mock
import aiohue
from homeassistant.components import hue
from homeassistant.components.hue.const import ATTR_HUE_EVENT
from homeassistant.components.hue.v1 import sensor_base
from homeassistant.helpers.entity import EntityCategory
from homeassistant.helpers.entity_registry import async_get
from homeassistant.util import dt as dt_util
from .conftest import create_mock_bridge, setup_platform
from tests.common import async_capture_events, async_fire_time_changed
PRESENCE_SENSOR_1_PRESENT = {
"state": {"presence": True, "lastupdated": "2019-01-01T01:00:00"},
"swupdate": {"state": "noupdates", "lastinstall": "2019-01-01T00:00:00"},
"config": {
"on": True,
"battery": 100,
"reachable": True,
"alert": "none",
"ledindication": False,
"usertest": False,
"sensitivity": 2,
"sensitivitymax": 2,
"pending": [],
},
"name": "Living room sensor",
"type": "ZLLPresence",
"modelid": "SML001",
"manufacturername": "Philips",
"productname": "Hue motion sensor",
"swversion": "6.1.1.27575",
"uniqueid": "00:11:22:33:44:55:66:77-02-0406",
"capabilities": {"certified": True},
}
LIGHT_LEVEL_SENSOR_1 = {
"state": {
"lightlevel": 1,
"dark": True,
"daylight": True,
"lastupdated": "2019-01-01T01:00:00",
},
"swupdate": {"state": "noupdates", "lastinstall": "2019-01-01T00:00:00"},
"config": {
"on": True,
"battery": 100,
"reachable": True,
"alert": "none",
"tholddark": 12467,
"tholdoffset": 7000,
"ledindication": False,
"usertest": False,
"pending": [],
},
"name": "Hue ambient light sensor 1",
"type": "ZLLLightLevel",
"modelid": "SML001",
"manufacturername": "Philips",
"productname": "Hue ambient light sensor",
"swversion": "6.1.1.27575",
"uniqueid": "00:11:22:33:44:55:66:77-02-0400",
"capabilities": {"certified": True},
}
TEMPERATURE_SENSOR_1 = {
"state": {"temperature": 1775, "lastupdated": "2019-01-01T01:00:00"},
"swupdate": {"state": "noupdates", "lastinstall": "2019-01-01T01:00:00"},
"config": {
"on": True,
"battery": 100,
"reachable": True,
"alert": "none",
"ledindication": False,
"usertest": False,
"pending": [],
},
"name": "Hue temperature sensor 1",
"type": "ZLLTemperature",
"modelid": "SML001",
"manufacturername": "Philips",
"productname": "Hue temperature sensor",
"swversion": "6.1.1.27575",
"uniqueid": "00:11:22:33:44:55:66:77-02-0402",
"capabilities": {"certified": True},
}
PRESENCE_SENSOR_2_NOT_PRESENT = {
"state": {"presence": False, "lastupdated": "2019-01-01T00:00:00"},
"swupdate": {"state": "noupdates", "lastinstall": "2019-01-01T01:00:00"},
"config": {
"on": True,
"battery": 100,
"reachable": True,
"alert": "none",
"ledindication": False,
"usertest": False,
"sensitivity": 2,
"sensitivitymax": 2,
"pending": [],
},
"name": "Kitchen sensor",
"type": "ZLLPresence",
"modelid": "SML001",
"manufacturername": "Philips",
"productname": "Hue motion sensor",
"swversion": "6.1.1.27575",
"uniqueid": "00:11:22:33:44:55:66:88-02-0406",
"capabilities": {"certified": True},
}
LIGHT_LEVEL_SENSOR_2 = {
"state": {
"lightlevel": 10001,
"dark": True,
"daylight": True,
"lastupdated": "2019-01-01T01:00:00",
},
"swupdate": {"state": "noupdates", "lastinstall": "2019-01-01T00:00:00"},
"config": {
"on": True,
"battery": 100,
"reachable": True,
"alert": "none",
"tholddark": 12467,
"tholdoffset": 7000,
"ledindication": False,
"usertest": False,
"pending": [],
},
"name": "Hue ambient light sensor 2",
"type": "ZLLLightLevel",
"modelid": "SML001",
"manufacturername": "Philips",
"productname": "Hue ambient light sensor",
"swversion": "6.1.1.27575",
"uniqueid": "00:11:22:33:44:55:66:88-02-0400",
"capabilities": {"certified": True},
}
TEMPERATURE_SENSOR_2 = {
"state": {"temperature": 1875, "lastupdated": "2019-01-01T01:00:00"},
"swupdate": {"state": "noupdates", "lastinstall": "2019-01-01T01:00:00"},
"config": {
"on": True,
"battery": 100,
"reachable": True,
"alert": "none",
"ledindication": False,
"usertest": False,
"pending": [],
},
"name": "Hue temperature sensor 2",
"type": "ZLLTemperature",
"modelid": "SML001",
"manufacturername": "Philips",
"productname": "Hue temperature sensor",
"swversion": "6.1.1.27575",
"uniqueid": "00:11:22:33:44:55:66:88-02-0402",
"capabilities": {"certified": True},
}
PRESENCE_SENSOR_3_PRESENT = {
"state": {"presence": True, "lastupdated": "2019-01-01T01:00:00"},
"swupdate": {"state": "noupdates", "lastinstall": "2019-01-01T00:00:00"},
"config": {
"on": True,
"battery": 100,
"reachable": True,
"alert": "none",
"ledindication": False,
"usertest": False,
"sensitivity": 2,
"sensitivitymax": 2,
"pending": [],
},
"name": "Bedroom sensor",
"type": "ZLLPresence",
"modelid": "SML001",
"manufacturername": "Philips",
"productname": "Hue motion sensor",
"swversion": "6.1.1.27575",
"uniqueid": "00:11:22:33:44:55:66:99-02-0406",
"capabilities": {"certified": True},
}
LIGHT_LEVEL_SENSOR_3 = {
"state": {
"lightlevel": 1,
"dark": True,
"daylight": True,
"lastupdated": "2019-01-01T01:00:00",
},
"swupdate": {"state": "noupdates", "lastinstall": "2019-01-01T00:00:00"},
"config": {
"on": True,
"battery": 100,
"reachable": True,
"alert": "none",
"tholddark": 12467,
"tholdoffset": 7000,
"ledindication": False,
"usertest": False,
"pending": [],
},
"name": "Hue ambient light sensor 3",
"type": "ZLLLightLevel",
"modelid": "SML001",
"manufacturername": "Philips",
"productname": "Hue ambient light sensor",
"swversion": "6.1.1.27575",
"uniqueid": "00:11:22:33:44:55:66:99-02-0400",
"capabilities": {"certified": True},
}
TEMPERATURE_SENSOR_3 = {
"state": {"temperature": 1775, "lastupdated": "2019-01-01T01:00:00"},
"swupdate": {"state": "noupdates", "lastinstall": "2019-01-01T01:00:00"},
"config": {
"on": True,
"battery": 100,
"reachable": True,
"alert": "none",
"ledindication": False,
"usertest": False,
"pending": [],
},
"name": "Hue temperature sensor 3",
"type": "ZLLTemperature",
"modelid": "SML001",
"manufacturername": "Philips",
"productname": "Hue temperature sensor",
"swversion": "6.1.1.27575",
"uniqueid": "00:11:22:33:44:55:66:99-02-0402",
"capabilities": {"certified": True},
}
UNSUPPORTED_SENSOR = {
"state": {"status": 0, "lastupdated": "2019-01-01T01:00:00"},
"config": {"on": True, "reachable": True},
"name": "Unsupported sensor",
"type": "CLIPGenericStatus",
"modelid": "PHWA01",
"manufacturername": "Philips",
"swversion": "1.0",
"uniqueid": "arbitrary",
"recycle": True,
}
HUE_TAP_REMOTE_1 = {
"state": {"buttonevent": 17, "lastupdated": "2019-06-22T14:43:50"},
"swupdate": {"state": "notupdatable", "lastinstall": None},
"config": {"on": True},
"name": "Hue Tap",
"type": "ZGPSwitch",
"modelid": "ZGPSWITCH",
"manufacturername": "Philips",
"productname": "Hue tap switch",
"diversityid": "d8cde5d5-0eef-4b95-b0f0-71ddd2952af4",
"uniqueid": "00:00:00:00:00:44:23:08-f2",
"capabilities": {"certified": True, "primary": True, "inputs": []},
}
HUE_DIMMER_REMOTE_1 = {
"state": {"buttonevent": 4002, "lastupdated": "2019-12-28T21:58:02"},
"swupdate": {"state": "noupdates", "lastinstall": "2019-10-13T13:16:15"},
"config": {"on": True, "battery": 100, "reachable": True, "pending": []},
"name": "Hue dimmer switch 1",
"type": "ZLLSwitch",
"modelid": "RWL021",
"manufacturername": "Philips",
"productname": "Hue dimmer switch",
"diversityid": "73bbabea-3420-499a-9856-46bf437e119b",
"swversion": "6.1.1.28573",
"uniqueid": "00:17:88:01:10:3e:3a:dc-02-fc00",
"capabilities": {"certified": True, "primary": True, "inputs": []},
}
SENSOR_RESPONSE = {
"1": PRESENCE_SENSOR_1_PRESENT,
"2": LIGHT_LEVEL_SENSOR_1,
"3": TEMPERATURE_SENSOR_1,
"4": PRESENCE_SENSOR_2_NOT_PRESENT,
"5": LIGHT_LEVEL_SENSOR_2,
"6": TEMPERATURE_SENSOR_2,
"7": HUE_TAP_REMOTE_1,
"8": HUE_DIMMER_REMOTE_1,
}
async def test_no_sensors(hass, mock_bridge_v1):
"""Test the update_items function when no sensors are found."""
mock_bridge_v1.mock_sensor_responses.append({})
await setup_platform(hass, mock_bridge_v1, ["binary_sensor", "sensor"])
assert len(mock_bridge_v1.mock_requests) == 1
assert len(hass.states.async_all()) == 0
async def test_sensors_with_multiple_bridges(hass, mock_bridge_v1):
"""Test the update_items function with some sensors."""
mock_bridge_2 = create_mock_bridge(hass, api_version=1)
mock_bridge_2.mock_sensor_responses.append(
{
"1": PRESENCE_SENSOR_3_PRESENT,
"2": LIGHT_LEVEL_SENSOR_3,
"3": TEMPERATURE_SENSOR_3,
}
)
mock_bridge_v1.mock_sensor_responses.append(SENSOR_RESPONSE)
await setup_platform(hass, mock_bridge_v1, ["binary_sensor", "sensor"])
await setup_platform(
hass, mock_bridge_2, ["binary_sensor", "sensor"], "mock-bridge-2"
)
assert len(mock_bridge_v1.mock_requests) == 1
assert len(mock_bridge_2.mock_requests) == 1
# 3 "physical" sensors with 3 virtual sensors each + 1 battery sensor
assert len(hass.states.async_all()) == 10
async def test_sensors(hass, mock_bridge_v1):
"""Test the update_items function with some sensors."""
mock_bridge_v1.mock_sensor_responses.append(SENSOR_RESPONSE)
await setup_platform(hass, mock_bridge_v1, ["binary_sensor", "sensor"])
assert len(mock_bridge_v1.mock_requests) == 1
# 2 "physical" sensors with 3 virtual sensors each
assert len(hass.states.async_all()) == 7
presence_sensor_1 = hass.states.get("binary_sensor.living_room_sensor_motion")
light_level_sensor_1 = hass.states.get("sensor.living_room_sensor_light_level")
temperature_sensor_1 = hass.states.get("sensor.living_room_sensor_temperature")
assert presence_sensor_1 is not None
assert presence_sensor_1.state == "on"
assert light_level_sensor_1 is not None
assert light_level_sensor_1.state == "1.0"
assert light_level_sensor_1.name == "Living room sensor light level"
assert temperature_sensor_1 is not None
assert temperature_sensor_1.state == "17.75"
assert temperature_sensor_1.name == "Living room sensor temperature"
presence_sensor_2 = hass.states.get("binary_sensor.kitchen_sensor_motion")
light_level_sensor_2 = hass.states.get("sensor.kitchen_sensor_light_level")
temperature_sensor_2 = hass.states.get("sensor.kitchen_sensor_temperature")
assert presence_sensor_2 is not None
assert presence_sensor_2.state == "off"
assert light_level_sensor_2 is not None
assert light_level_sensor_2.state == "10.0"
assert light_level_sensor_2.name == "Kitchen sensor light level"
assert temperature_sensor_2 is not None
assert temperature_sensor_2.state == "18.75"
assert temperature_sensor_2.name == "Kitchen sensor temperature"
battery_remote_1 = hass.states.get("sensor.hue_dimmer_switch_1_battery_level")
assert battery_remote_1 is not None
assert battery_remote_1.state == "100"
assert battery_remote_1.name == "Hue dimmer switch 1 battery level"
ent_reg = async_get(hass)
assert (
ent_reg.async_get("sensor.hue_dimmer_switch_1_battery_level").entity_category
== EntityCategory.DIAGNOSTIC
)
async def test_unsupported_sensors(hass, mock_bridge_v1):
"""Test that unsupported sensors don't get added and don't fail."""
response_with_unsupported = dict(SENSOR_RESPONSE)
response_with_unsupported["7"] = UNSUPPORTED_SENSOR
mock_bridge_v1.mock_sensor_responses.append(response_with_unsupported)
await setup_platform(hass, mock_bridge_v1, ["binary_sensor", "sensor"])
assert len(mock_bridge_v1.mock_requests) == 1
# 2 "physical" sensors with 3 virtual sensors each + 1 battery sensor
assert len(hass.states.async_all()) == 7
async def test_new_sensor_discovered(hass, mock_bridge_v1):
"""Test if 2nd update has a new sensor."""
mock_bridge_v1.mock_sensor_responses.append(SENSOR_RESPONSE)
await setup_platform(hass, mock_bridge_v1, ["binary_sensor", "sensor"])
assert len(mock_bridge_v1.mock_requests) == 1
assert len(hass.states.async_all()) == 7
new_sensor_response = dict(SENSOR_RESPONSE)
new_sensor_response.update(
{
"9": PRESENCE_SENSOR_3_PRESENT,
"10": LIGHT_LEVEL_SENSOR_3,
"11": TEMPERATURE_SENSOR_3,
}
)
mock_bridge_v1.mock_sensor_responses.append(new_sensor_response)
# Force updates to run again
await mock_bridge_v1.sensor_manager.coordinator.async_refresh()
await hass.async_block_till_done()
assert len(mock_bridge_v1.mock_requests) == 2
assert len(hass.states.async_all()) == 10
presence = hass.states.get("binary_sensor.bedroom_sensor_motion")
assert presence is not None
assert presence.state == "on"
temperature = hass.states.get("sensor.bedroom_sensor_temperature")
assert temperature is not None
assert temperature.state == "17.75"
async def test_sensor_removed(hass, mock_bridge_v1):
"""Test if 2nd update has removed sensor."""
mock_bridge_v1.mock_sensor_responses.append(SENSOR_RESPONSE)
await setup_platform(hass, mock_bridge_v1, ["binary_sensor", "sensor"])
assert len(mock_bridge_v1.mock_requests) == 1
assert len(hass.states.async_all()) == 7
mock_bridge_v1.mock_sensor_responses.clear()
keys = ("1", "2", "3")
mock_bridge_v1.mock_sensor_responses.append({k: SENSOR_RESPONSE[k] for k in keys})
# Force updates to run again
await mock_bridge_v1.sensor_manager.coordinator.async_refresh()
# To flush out the service call to update the group
await hass.async_block_till_done()
assert len(mock_bridge_v1.mock_requests) == 2
assert len(hass.states.async_all()) == 3
sensor = hass.states.get("binary_sensor.living_room_sensor_motion")
assert sensor is not None
removed_sensor = hass.states.get("binary_sensor.kitchen_sensor_motion")
assert removed_sensor is None
async def test_update_timeout(hass, mock_bridge_v1):
"""Test bridge marked as not available if timeout error during update."""
mock_bridge_v1.api.sensors.update = Mock(side_effect=asyncio.TimeoutError)
await setup_platform(hass, mock_bridge_v1, ["binary_sensor", "sensor"])
assert len(mock_bridge_v1.mock_requests) == 0
assert len(hass.states.async_all()) == 0
async def test_update_unauthorized(hass, mock_bridge_v1):
"""Test bridge marked as not authorized if unauthorized during update."""
mock_bridge_v1.api.sensors.update = Mock(side_effect=aiohue.Unauthorized)
await setup_platform(hass, mock_bridge_v1, ["binary_sensor", "sensor"])
assert len(mock_bridge_v1.mock_requests) == 0
assert len(hass.states.async_all()) == 0
assert len(mock_bridge_v1.handle_unauthorized_error.mock_calls) == 1
async def test_hue_events(hass, mock_bridge_v1, device_reg):
"""Test that hue remotes fire events when pressed."""
mock_bridge_v1.mock_sensor_responses.append(SENSOR_RESPONSE)
events = async_capture_events(hass, ATTR_HUE_EVENT)
await setup_platform(hass, mock_bridge_v1, ["binary_sensor", "sensor"])
assert len(mock_bridge_v1.mock_requests) == 1
assert len(hass.states.async_all()) == 7
assert len(events) == 0
hue_tap_device = device_reg.async_get_device(
{(hue.DOMAIN, "00:00:00:00:00:44:23:08")}
)
mock_bridge_v1.api.sensors["7"].last_event = {"type": "button"}
mock_bridge_v1.api.sensors["8"].last_event = {"type": "button"}
new_sensor_response = dict(SENSOR_RESPONSE)
new_sensor_response["7"] = dict(new_sensor_response["7"])
new_sensor_response["7"]["state"] = {
"buttonevent": 18,
"lastupdated": "2019-12-28T22:58:03",
}
mock_bridge_v1.mock_sensor_responses.append(new_sensor_response)
# Force updates to run again
async_fire_time_changed(
hass, dt_util.utcnow() + sensor_base.SensorManager.SCAN_INTERVAL
)
await hass.async_block_till_done()
assert len(mock_bridge_v1.mock_requests) == 2
assert len(hass.states.async_all()) == 7
assert len(events) == 1
assert events[-1].data == {
"device_id": hue_tap_device.id,
"id": "hue_tap",
"unique_id": "00:00:00:00:00:44:23:08-f2",
"event": 18,
"last_updated": "2019-12-28T22:58:03",
}
hue_dimmer_device = device_reg.async_get_device(
{(hue.DOMAIN, "00:17:88:01:10:3e:3a:dc")}
)
new_sensor_response = dict(new_sensor_response)
new_sensor_response["8"] = dict(new_sensor_response["8"])
new_sensor_response["8"]["state"] = {
"buttonevent": 3002,
"lastupdated": "2019-12-28T22:58:03",
}
mock_bridge_v1.mock_sensor_responses.append(new_sensor_response)
# Force updates to run again
async_fire_time_changed(
hass, dt_util.utcnow() + sensor_base.SensorManager.SCAN_INTERVAL
)
await hass.async_block_till_done()
assert len(mock_bridge_v1.mock_requests) == 3
assert len(hass.states.async_all()) == 7
assert len(events) == 2
assert events[-1].data == {
"device_id": hue_dimmer_device.id,
"id": "hue_dimmer_switch_1",
"unique_id": "00:17:88:01:10:3e:3a:dc-02-fc00",
"event": 3002,
"last_updated": "2019-12-28T22:58:03",
}
# Fire old event, it should be ignored
new_sensor_response = dict(new_sensor_response)
new_sensor_response["8"] = dict(new_sensor_response["8"])
new_sensor_response["8"]["state"] = {
"buttonevent": 18,
"lastupdated": "2019-12-28T22:58:02",
}
mock_bridge_v1.mock_sensor_responses.append(new_sensor_response)
# Force updates to run again
async_fire_time_changed(
hass, dt_util.utcnow() + sensor_base.SensorManager.SCAN_INTERVAL
)
await hass.async_block_till_done()
assert len(mock_bridge_v1.mock_requests) == 4
assert len(hass.states.async_all()) == 7
assert len(events) == 2
# Add a new remote. In discovery the new event is registered **but not fired**
new_sensor_response = dict(new_sensor_response)
new_sensor_response["21"] = {
"state": {
"rotaryevent": 2,
"expectedrotation": 208,
"expectedeventduration": 400,
"lastupdated": "2020-01-31T15:56:19",
},
"swupdate": {"state": "noupdates", "lastinstall": "2019-11-26T03:35:21"},
"config": {"on": True, "battery": 100, "reachable": True, "pending": []},
"name": "Lutron Aurora 1",
"type": "ZLLRelativeRotary",
"modelid": "Z3-1BRL",
"manufacturername": "Lutron",
"productname": "Lutron Aurora",
"diversityid": "2c3a75ff-55c4-4e4d-8c44-82d330b8eb9b",
"swversion": "3.4",
"uniqueid": "ff:ff:00:0f:e7:fd:bc:b7-01-fc00-0014",
"capabilities": {
"certified": True,
"primary": True,
"inputs": [
{
"repeatintervals": [400],
"events": [
{"rotaryevent": 1, "eventtype": "start"},
{"rotaryevent": 2, "eventtype": "repeat"},
],
}
],
},
}
mock_bridge_v1.mock_sensor_responses.append(new_sensor_response)
# Force updates to run again
async_fire_time_changed(
hass, dt_util.utcnow() + sensor_base.SensorManager.SCAN_INTERVAL
)
await hass.async_block_till_done()
assert len(mock_bridge_v1.mock_requests) == 5
assert len(hass.states.async_all()) == 8
assert len(events) == 2
# A new press fires the event
new_sensor_response["21"]["state"]["lastupdated"] = "2020-01-31T15:57:19"
mock_bridge_v1.mock_sensor_responses.append(new_sensor_response)
# Force updates to run again
async_fire_time_changed(
hass, dt_util.utcnow() + sensor_base.SensorManager.SCAN_INTERVAL
)
await hass.async_block_till_done()
hue_aurora_device = device_reg.async_get_device(
{(hue.DOMAIN, "ff:ff:00:0f:e7:fd:bc:b7")}
)
assert len(mock_bridge_v1.mock_requests) == 6
assert len(hass.states.async_all()) == 8
assert len(events) == 3
assert events[-1].data == {
"device_id": hue_aurora_device.id,
"id": "lutron_aurora_1",
"unique_id": "ff:ff:00:0f:e7:fd:bc:b7-01-fc00-0014",
"event": 2,
"last_updated": "2020-01-31T15:57:19",
}
|
{
"content_hash": "9a71e8491dc1eb341cc81faf66c02497",
"timestamp": "",
"source": "github",
"line_count": 607,
"max_line_length": 86,
"avg_line_length": 35.34761120263592,
"alnum_prop": 0.613627889634601,
"repo_name": "w1ll1am23/home-assistant",
"id": "acefbdd07fec974b62d7f6b130a534fa2e41ac89",
"size": "21456",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "tests/components/hue/test_sensor_v1.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2963"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "52277012"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
}
|
from ztag.annotation import Annotation
from ztag.annotation import Manufacturer
from ztag import protocols
import ztag.test
import re
class FtpNetApp(Annotation):
name = "NetApp FTP Server"
protocol = protocols.FTP
subprotocol = protocols.FTP.BANNER
port = None
manufact_re = re.compile(
"^220 .+ FTP server \(NetApp Release",
re.IGNORECASE
)
version_re = re.compile(
"^220 .+ FTP server \(NetApp Release (\d+\.\d+\.\d+)([-_a-zA-Z0-9]*)",
re.IGNORECASE
)
tests = {
"FtpNetApp_1": {
"global_metadata": {
"manufacturer": Manufacturer.NETAPP
},
"local_metadata": {
"version": "8.0.5",
"revision": "P1"
}
}
}
def process(self, obj, meta):
banner = obj["banner"]
if self.manufact_re.search(banner):
meta.global_metadata.manufacturer = Manufacturer.NETAPP
version = self.version_re.search(banner).group(1)
meta.local_metadata.version = version
rev = self.version_re.search(banner).group(2)
meta.local_metadata.revision = rev
return meta
|
{
"content_hash": "35d5a11542ca041ad048bf15f5556452",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 78,
"avg_line_length": 25.520833333333332,
"alnum_prop": 0.5551020408163265,
"repo_name": "zmap/ztag",
"id": "0bbdfe4c511220fa530b873fe59c2a70a3f19a84",
"size": "1225",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ztag/annotations/FtpNetApp.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "604209"
}
],
"symlink_target": ""
}
|
"""Module for outputting test record to JSON-formatted files."""
import base64
from json import JSONEncoder
from openhtf.output import callbacks
from openhtf.util import data
class OutputToJSON(callbacks.OutputToFile):
"""Return an output callback that writes JSON Test Records.
Example filename_patterns might be:
'/data/test_records/{dut_id}.{metadata[test_name]}.json', indent=4)) or
'/data/test_records/%(dut_id)s.%(start_time_millis)s'
To use this output mechanism:
test = openhtf.Test(PhaseOne, PhaseTwo)
test.add_output_callback(openhtf.output.callbacks.OutputToJson(
'/data/test_records/{dut_id}.{metadata[test_name]}.json'))
Args:
filename_pattern: A format string specifying the filename to write to,
will be formatted with the Test Record as a dictionary. May also be a
file-like object to write to directly.
inline_attachments: Whether attachments should be included inline in the
output. Set to False if you expect to have large binary attachments. If
True (the default), then attachments are base64 encoded to allow for
binary data that's not supported by JSON directly.
"""
def __init__(self, filename_pattern=None, inline_attachments=True, **kwargs):
super(OutputToJSON, self).__init__(filename_pattern)
self.inline_attachments = inline_attachments
self.json_encoder = JSONEncoder(**kwargs)
def serialize_test_record(self, test_record):
return self.json_encoder.encode(self.convert_to_dict(test_record))
def convert_to_dict(self, test_record):
if self.inline_attachments:
as_dict = data.convert_to_base_types(test_record)
for phase in as_dict['phases']:
for value in phase['attachments'].itervalues():
value['data'] = base64.standard_b64encode(value['data'])
else:
as_dict = data.convert_to_base_types(test_record,
ignore_keys=('attachments',))
return as_dict
|
{
"content_hash": "148dc39d2a6a4725eabf5137a55e5e84",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 79,
"avg_line_length": 41.0625,
"alnum_prop": 0.6996448503297819,
"repo_name": "fahhem/openhtf",
"id": "41f5e4e3b504cdb9cdd5fb7d8b128952372b42fb",
"size": "1971",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "openhtf/output/callbacks/json_factory.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "15703"
},
{
"name": "HTML",
"bytes": "27431"
},
{
"name": "JavaScript",
"bytes": "6024387"
},
{
"name": "Protocol Buffer",
"bytes": "12143"
},
{
"name": "Python",
"bytes": "759239"
},
{
"name": "TypeScript",
"bytes": "38232"
}
],
"symlink_target": ""
}
|
"""
Script to format python requests responses as
HTTP text.
"""
import six
import benchline.args
def format_response(protocol, status_code, reason, headers_dict, body):
"""Formats the response items as an HTTP response
>>> format_response("HTTP/1.1", "200", "OK", {"Content-type": "text/plain"}, "this is the response")
'HTTP/1.1 200 OK\\r\\nContent-type: text/plain\\r\\n\\r\\nthis is the response'
"""
formatted_response = "%s %s %s\r\n" % (protocol, status_code, reason)
formatted_response += "\r\n".join(["%s: %s" % (key, value) for key, value in six.iteritems(headers_dict)])
formatted_response += "\r\n\r\n"
formatted_response += str(body)
return formatted_response
def format_requests_response(requests_response_obj):
"""Formats a requests module response object
>>> import requests
>>> r = requests.get("http://www.byu.edu")
>>> format_requests_response(r) == format_response("HTTP/1.1", r.status_code, r.reason, r.headers, r.content)
True
:param requests_response_obj: requests module response object
:return: string
"""
return format_response("HTTP/1.1", requests_response_obj.status_code, requests_response_obj.reason,
requests_response_obj.headers, requests_response_obj.content)
def main():
benchline.args.go(__doc__, validate_args=None)
if __name__ == "__main__":
main()
|
{
"content_hash": "33dc23f04e06a774c47c4a1d250b968c",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 113,
"avg_line_length": 34.21951219512195,
"alnum_prop": 0.6557377049180327,
"repo_name": "pauldeden/benchline",
"id": "e0f900d1d0f95f9d20e7b9ca64d9f6520d446d5a",
"size": "1493",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "benchline/http_format.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "38819"
}
],
"symlink_target": ""
}
|
from urllib.parse import unquote
from azure.core.paging import PageIterator, ItemPaged
from azure.core.exceptions import HttpResponseError
from ._deserialize import (
get_blob_properties_from_generated_code,
load_many_xml_nodes,
load_xml_int,
load_xml_string,
parse_tags,
)
from ._generated.models import BlobItemInternal, BlobPrefix as GenBlobPrefix, FilterBlobItem
from ._generated._serialization import Deserializer
from ._models import BlobProperties, FilteredBlob
from ._shared.models import DictMixin
from ._shared.response_handlers import (
return_context_and_deserialized,
return_raw_deserialized,
process_storage_error,
)
class IgnoreListBlobsDeserializer(Deserializer):
def __call__(self, target_obj, response_data, content_type=None):
if target_obj == "ListBlobsFlatSegmentResponse":
return None
super().__call__(target_obj, response_data, content_type)
class BlobPropertiesPaged(PageIterator):
"""An Iterable of Blob properties.
:ivar str service_endpoint: The service URL.
:ivar str prefix: A blob name prefix being used to filter the list.
:ivar str marker: The continuation token of the current page of results.
:ivar int results_per_page: The maximum number of results retrieved per API call.
:ivar str continuation_token: The continuation token to retrieve the next page of results.
:ivar str location_mode: The location mode being used to list results. The available
options include "primary" and "secondary".
:ivar current_page: The current page of listed results.
:vartype current_page: list(~azure.storage.blob.BlobProperties)
:ivar str container: The container that the blobs are listed from.
:ivar str delimiter: A delimiting character used for hierarchy listing.
:param callable command: Function to retrieve the next page of items.
:param str container: The name of the container.
:param str prefix: Filters the results to return only blobs whose names
begin with the specified prefix.
:param int results_per_page: The maximum number of blobs to retrieve per
call.
:param str continuation_token: An opaque continuation token.
:param str delimiter:
Used to capture blobs whose names begin with the same substring up to
the appearance of the delimiter character. The delimiter may be a single
character or a string.
:param location_mode: Specifies the location the request should be sent to.
This mode only applies for RA-GRS accounts which allow secondary read access.
Options include 'primary' or 'secondary'.
"""
def __init__(
self, command,
container=None,
prefix=None,
results_per_page=None,
continuation_token=None,
delimiter=None,
location_mode=None):
super(BlobPropertiesPaged, self).__init__(
get_next=self._get_next_cb,
extract_data=self._extract_data_cb,
continuation_token=continuation_token or ""
)
self._command = command
self.service_endpoint = None
self.prefix = prefix
self.marker = None
self.results_per_page = results_per_page
self.container = container
self.delimiter = delimiter
self.current_page = None
self.location_mode = location_mode
def _get_next_cb(self, continuation_token):
try:
return self._command(
prefix=self.prefix,
marker=continuation_token or None,
maxresults=self.results_per_page,
cls=return_context_and_deserialized,
use_location=self.location_mode)
except HttpResponseError as error:
process_storage_error(error)
def _extract_data_cb(self, get_next_return):
self.location_mode, self._response = get_next_return
self.service_endpoint = self._response.service_endpoint
self.prefix = self._response.prefix
self.marker = self._response.marker
self.results_per_page = self._response.max_results
self.container = self._response.container_name
self.current_page = [self._build_item(item) for item in self._response.segment.blob_items]
return self._response.next_marker or None, self.current_page
def _build_item(self, item):
if isinstance(item, BlobProperties):
return item
if isinstance(item, BlobItemInternal):
blob = get_blob_properties_from_generated_code(item) # pylint: disable=protected-access
blob.container = self.container
return blob
return item
class BlobNamesPaged(PageIterator):
"""An Iterable of Blob names.
:ivar str service_endpoint: The service URL.
:ivar str prefix: A blob name prefix being used to filter the list.
:ivar str marker: The continuation token of the current page of results.
:ivar int results_per_page: The maximum number of results retrieved per API call.
:ivar str continuation_token: The continuation token to retrieve the next page of results.
:ivar str location_mode: The location mode being used to list results. The available
options include "primary" and "secondary".
:ivar current_page: The current page of listed results.
:vartype current_page: list(str)
:ivar str container: The container that the blobs are listed from.
:ivar str delimiter: A delimiting character used for hierarchy listing.
:param callable command: Function to retrieve the next page of items.
:param str container: The name of the container.
:param str prefix: Filters the results to return only blobs whose names
begin with the specified prefix.
:param int results_per_page: The maximum number of blobs to retrieve per
call.
:param str continuation_token: An opaque continuation token.
:param location_mode: Specifies the location the request should be sent to.
This mode only applies for RA-GRS accounts which allow secondary read access.
Options include 'primary' or 'secondary'.
"""
def __init__(
self, command,
container=None,
prefix=None,
results_per_page=None,
continuation_token=None,
location_mode=None):
super(BlobNamesPaged, self).__init__(
get_next=self._get_next_cb,
extract_data=self._extract_data_cb,
continuation_token=continuation_token or ""
)
self._command = command
self.service_endpoint = None
self.prefix = prefix
self.marker = None
self.results_per_page = results_per_page
self.container = container
self.current_page = None
self.location_mode = location_mode
def _get_next_cb(self, continuation_token):
try:
return self._command(
prefix=self.prefix,
marker=continuation_token or None,
maxresults=self.results_per_page,
cls=return_raw_deserialized,
use_location=self.location_mode)
except HttpResponseError as error:
process_storage_error(error)
def _extract_data_cb(self, get_next_return):
self.location_mode, self._response = get_next_return
self.service_endpoint = self._response.get('ServiceEndpoint')
self.prefix = load_xml_string(self._response, 'Prefix')
self.marker = load_xml_string(self._response, 'Marker')
self.results_per_page = load_xml_int(self._response, 'MaxResults')
self.container = self._response.get('ContainerName')
blobs = load_many_xml_nodes(self._response, 'Blob', wrapper='Blobs')
self.current_page = [load_xml_string(blob, 'Name') for blob in blobs]
next_marker = load_xml_string(self._response, 'NextMarker')
return next_marker or None, self.current_page
class BlobPrefixPaged(BlobPropertiesPaged):
def __init__(self, *args, **kwargs):
super(BlobPrefixPaged, self).__init__(*args, **kwargs)
self.name = self.prefix
def _extract_data_cb(self, get_next_return):
continuation_token, _ = super(BlobPrefixPaged, self)._extract_data_cb(get_next_return)
self.current_page = self._response.segment.blob_prefixes + self._response.segment.blob_items
self.current_page = [self._build_item(item) for item in self.current_page]
self.delimiter = self._response.delimiter
return continuation_token, self.current_page
def _build_item(self, item):
item = super(BlobPrefixPaged, self)._build_item(item)
if isinstance(item, GenBlobPrefix):
if item.name.encoded:
name = unquote(item.name.content)
else:
name = item.name.content
return BlobPrefix(
self._command,
container=self.container,
prefix=name,
results_per_page=self.results_per_page,
location_mode=self.location_mode)
return item
class BlobPrefix(ItemPaged, DictMixin):
"""An Iterable of Blob properties.
Returned from walk_blobs when a delimiter is used.
Can be thought of as a virtual blob directory.
:ivar str name: The prefix, or "directory name" of the blob.
:ivar str service_endpoint: The service URL.
:ivar str prefix: A blob name prefix being used to filter the list.
:ivar str marker: The continuation token of the current page of results.
:ivar int results_per_page: The maximum number of results retrieved per API call.
:ivar str next_marker: The continuation token to retrieve the next page of results.
:ivar str location_mode: The location mode being used to list results. The available
options include "primary" and "secondary".
:ivar current_page: The current page of listed results.
:vartype current_page: list(~azure.storage.blob.BlobProperties)
:ivar str container: The container that the blobs are listed from.
:ivar str delimiter: A delimiting character used for hierarchy listing.
:param callable command: Function to retrieve the next page of items.
:param str prefix: Filters the results to return only blobs whose names
begin with the specified prefix.
:param int results_per_page: The maximum number of blobs to retrieve per
call.
:param str marker: An opaque continuation token.
:param str delimiter:
Used to capture blobs whose names begin with the same substring up to
the appearance of the delimiter character. The delimiter may be a single
character or a string.
:param location_mode: Specifies the location the request should be sent to.
This mode only applies for RA-GRS accounts which allow secondary read access.
Options include 'primary' or 'secondary'.
"""
def __init__(self, *args, **kwargs):
super(BlobPrefix, self).__init__(*args, page_iterator_class=BlobPrefixPaged, **kwargs)
self.name = kwargs.get('prefix')
self.prefix = kwargs.get('prefix')
self.results_per_page = kwargs.get('results_per_page')
self.container = kwargs.get('container')
self.delimiter = kwargs.get('delimiter')
self.location_mode = kwargs.get('location_mode')
class FilteredBlobPaged(PageIterator):
"""An Iterable of Blob properties.
:ivar str service_endpoint: The service URL.
:ivar str prefix: A blob name prefix being used to filter the list.
:ivar str marker: The continuation token of the current page of results.
:ivar int results_per_page: The maximum number of results retrieved per API call.
:ivar str continuation_token: The continuation token to retrieve the next page of results.
:ivar str location_mode: The location mode being used to list results. The available
options include "primary" and "secondary".
:ivar current_page: The current page of listed results.
:vartype current_page: list(~azure.storage.blob.FilteredBlob)
:ivar str container: The container that the blobs are listed from.
:param callable command: Function to retrieve the next page of items.
:param str container: The name of the container.
:param int results_per_page: The maximum number of blobs to retrieve per
call.
:param str continuation_token: An opaque continuation token.
:param location_mode: Specifies the location the request should be sent to.
This mode only applies for RA-GRS accounts which allow secondary read access.
Options include 'primary' or 'secondary'.
"""
def __init__(
self, command,
container=None,
results_per_page=None,
continuation_token=None,
location_mode=None):
super(FilteredBlobPaged, self).__init__(
get_next=self._get_next_cb,
extract_data=self._extract_data_cb,
continuation_token=continuation_token or ""
)
self._command = command
self.service_endpoint = None
self.marker = continuation_token
self.results_per_page = results_per_page
self.container = container
self.current_page = None
self.location_mode = location_mode
def _get_next_cb(self, continuation_token):
try:
return self._command(
marker=continuation_token or None,
maxresults=self.results_per_page,
cls=return_context_and_deserialized,
use_location=self.location_mode)
except HttpResponseError as error:
process_storage_error(error)
def _extract_data_cb(self, get_next_return):
self.location_mode, self._response = get_next_return
self.service_endpoint = self._response.service_endpoint
self.marker = self._response.next_marker
self.current_page = [self._build_item(item) for item in self._response.blobs]
return self._response.next_marker or None, self.current_page
@staticmethod
def _build_item(item):
if isinstance(item, FilterBlobItem):
tags = parse_tags(item.tags)
blob = FilteredBlob(name=item.name, container_name=item.container_name, tags=tags)
return blob
return item
|
{
"content_hash": "e1ce5398239326e324e48b13a6ba5713",
"timestamp": "",
"source": "github",
"line_count": 327,
"max_line_length": 100,
"avg_line_length": 44.027522935779814,
"alnum_prop": 0.6675696325623394,
"repo_name": "Azure/azure-sdk-for-python",
"id": "6bbaa71c404f80ea571b283184b522c2eab0afb6",
"size": "14741",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/storage/azure-storage-blob/azure/storage/blob/_list_blobs_helper.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
}
|
from setuptools import setup, find_packages # Always prefer setuptools over distutils
from codecs import open # To use a consistent encoding
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f:
long_description = f.read()
with open('VERSION', 'rt') as version_file:
version = version_file.read().strip()
setup(
name='multienum',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version=version,
description='Enumerator type supporting multiple equivalent names',
long_description=long_description,
# The project's main homepage.
url='https://github.com/sorreltree/multienum',
# Author details
author='Michael T Bacon',
author_email='michael@sorreltree.com',
# Choose your license
license='Apache License 2.0',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: Apache Software License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
# 'Programming Language :: Python :: 2',
# 'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
# What does your project relate to?
keywords='enumeration enum',
py_modules = ['multienum'],
test_suite = 'nose.collector',
extras_require = {
'test': ['nose'],
},
)
|
{
"content_hash": "fa74b6f7956672ee0cf89ea99e540450",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 86,
"avg_line_length": 32.50704225352113,
"alnum_prop": 0.6451473136915078,
"repo_name": "sorreltree/multienum",
"id": "7408d3f2b90b40919ccac60215b57cc84b84cf9e",
"size": "2892",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "10719"
}
],
"symlink_target": ""
}
|
from tempest.api.identity import base
from tempest import test
class ExtensionTestJSON(base.BaseIdentityV2AdminTest):
_interface = 'json'
@test.attr(type='gate')
def test_list_extensions(self):
# List all the extensions
resp, body = self.non_admin_client.list_extensions()
self.assertEqual(200, resp.status)
self.assertNotEmpty(body)
keys = ['name', 'updated', 'alias', 'links',
'namespace', 'description']
for value in body:
for key in keys:
self.assertIn(key, value)
class ExtensionTestXML(ExtensionTestJSON):
_interface = 'xml'
|
{
"content_hash": "fd72e2e23b855c1d0f313824e7b36a2b",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 60,
"avg_line_length": 29.272727272727273,
"alnum_prop": 0.6319875776397516,
"repo_name": "vedujoshi/os_tempest",
"id": "67f20f43ee4031ad3725fd2436cd27f3cdc91fdf",
"size": "1275",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tempest/api/identity/test_extension.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3096313"
},
{
"name": "Shell",
"bytes": "8664"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from datetime import datetime
from django.core.urlresolvers import reverse
from sentry.models import Release, ReleaseCommit, ReleaseProject
from sentry.testutils import APITestCase
class ProjectReleaseListTest(APITestCase):
def test_simple(self):
self.login_as(user=self.user)
team = self.create_team()
project1 = self.create_project(team=team, name='foo')
project2 = self.create_project(team=team, name='bar')
release1 = Release.objects.create(
organization_id=project1.organization_id,
version='1',
date_added=datetime(2013, 8, 13, 3, 8, 24, 880386),
)
release1.add_project(project1)
ReleaseProject.objects.filter(project=project1, release=release1).update(new_groups=5)
release2 = Release.objects.create(
organization_id=project1.organization_id,
version='2',
date_added=datetime(2013, 8, 14, 3, 8, 24, 880386),
)
release2.add_project(project1)
release3 = Release.objects.create(
organization_id=project1.organization_id,
version='3',
date_added=datetime(2013, 8, 12, 3, 8, 24, 880386),
date_released=datetime(2013, 8, 15, 3, 8, 24, 880386),
)
release3.add_project(project1)
release4 = Release.objects.create(
organization_id=project2.organization_id,
version='4',
)
release4.add_project(project2)
url = reverse(
'sentry-api-0-project-releases',
kwargs={
'organization_slug': project1.organization.slug,
'project_slug': project1.slug,
}
)
response = self.client.get(url, format='json')
assert response.status_code == 200, response.content
assert len(response.data) == 3
assert response.data[0]['version'] == release3.version
assert response.data[1]['version'] == release2.version
assert response.data[2]['version'] == release1.version
assert response.data[2]['newGroups'] == 5
def test_query_filter(self):
self.login_as(user=self.user)
team = self.create_team()
project = self.create_project(team=team, name='foo')
release = Release.objects.create(
organization_id=project.organization_id,
version='foobar',
date_added=datetime(2013, 8, 13, 3, 8, 24, 880386),
)
release.add_project(project)
url = reverse(
'sentry-api-0-project-releases',
kwargs={
'organization_slug': project.organization.slug,
'project_slug': project.slug,
}
)
response = self.client.get(url + '?query=foo', format='json')
assert response.status_code == 200, response.content
assert len(response.data) == 1
assert response.data[0]['version'] == release.version
response = self.client.get(url + '?query=bar', format='json')
assert response.status_code == 200, response.content
assert len(response.data) == 0
class ProjectReleaseCreateTest(APITestCase):
def test_minimal(self):
self.login_as(user=self.user)
project = self.create_project(name='foo')
url = reverse(
'sentry-api-0-project-releases',
kwargs={
'organization_slug': project.organization.slug,
'project_slug': project.slug,
}
)
response = self.client.post(
url, data={
'version': '1.2.1',
}
)
assert response.status_code == 201, response.content
assert response.data['version']
release = Release.objects.get(
version=response.data['version'],
)
assert not release.owner
assert release.organization == project.organization
assert release.projects.first() == project
def test_ios_release(self):
self.login_as(user=self.user)
project = self.create_project(name='foo')
url = reverse(
'sentry-api-0-project-releases',
kwargs={
'organization_slug': project.organization.slug,
'project_slug': project.slug,
}
)
response = self.client.post(
url, data={
'version': '1.2.1 (123)',
}
)
assert response.status_code == 201, response.content
assert response.data['version']
release = Release.objects.get(
version=response.data['version'],
)
assert not release.owner
assert release.organization == project.organization
assert release.projects.first() == project
def test_duplicate(self):
self.login_as(user=self.user)
project = self.create_project(name='foo')
release = Release.objects.create(version='1.2.1', organization_id=project.organization_id)
release.add_project(project)
url = reverse(
'sentry-api-0-project-releases',
kwargs={
'organization_slug': project.organization.slug,
'project_slug': project.slug,
}
)
response = self.client.post(
url, data={
'version': '1.2.1',
}
)
assert response.status_code == 208, response.content
def test_duplicate_accross_org(self):
self.login_as(user=self.user)
project = self.create_project(name='foo')
release = Release.objects.create(version='1.2.1', organization_id=project.organization_id)
release.add_project(project)
project2 = self.create_project(name='bar', organization=project.organization)
url = reverse(
'sentry-api-0-project-releases',
kwargs={
'organization_slug': project2.organization.slug,
'project_slug': project2.slug,
}
)
response = self.client.post(
url, data={
'version': '1.2.1',
}
)
# since project2 was added, should be 201
assert response.status_code == 201, response.content
assert Release.objects.filter(
version='1.2.1', organization_id=project.organization_id
).count() == 1
assert ReleaseProject.objects.get(release=release, project=project)
assert ReleaseProject.objects.get(release=release, project=project2)
def test_version_whitespace(self):
self.login_as(user=self.user)
project = self.create_project(name='foo')
url = reverse(
'sentry-api-0-project-releases',
kwargs={
'organization_slug': project.organization.slug,
'project_slug': project.slug,
}
)
response = self.client.post(
url, data={
'version': '1.2.3\n',
}
)
assert response.status_code == 400, response.content
response = self.client.post(
url, data={
'version': '\n1.2.3',
}
)
assert response.status_code == 400, response.content
response = self.client.post(
url, data={
'version': '1.\n2.3',
}
)
assert response.status_code == 400, response.content
response = self.client.post(
url, data={
'version': '1.2.3\f',
}
)
assert response.status_code == 400, response.content
response = self.client.post(
url, data={
'version': '1.2.3\t',
}
)
assert response.status_code == 400, response.content
response = self.client.post(
url, data={
'version': '1.2.3',
}
)
assert response.status_code == 201, response.content
assert response.data['version'] == '1.2.3'
release = Release.objects.get(
organization_id=project.organization_id,
version=response.data['version'],
)
assert not release.owner
def test_features(self):
self.login_as(user=self.user)
project = self.create_project(name='foo')
url = reverse(
'sentry-api-0-project-releases',
kwargs={
'organization_slug': project.organization.slug,
'project_slug': project.slug,
}
)
response = self.client.post(
url, data={
'version': '1.2.1',
'owner': self.user.email,
}
)
assert response.status_code == 201, response.content
assert response.data['version']
release = Release.objects.get(
organization_id=project.organization_id,
version=response.data['version'],
)
assert release.owner == self.user
def test_commits(self):
self.login_as(user=self.user)
project = self.create_project(name='foo')
url = reverse(
'sentry-api-0-project-releases',
kwargs={
'organization_slug': project.organization.slug,
'project_slug': project.slug,
}
)
response = self.client.post(
url, data={'version': '1.2.1',
'commits': [
{
'id': 'a' * 40
},
{
'id': 'b' * 40
},
]}
)
assert response.status_code == 201, (response.status_code, response.content)
assert response.data['version']
release = Release.objects.get(
organization_id=project.organization_id,
version=response.data['version'],
)
rc_list = list(
ReleaseCommit.objects.filter(
release=release,
).select_related('commit', 'commit__author').order_by('order')
)
assert len(rc_list) == 2
for rc in rc_list:
assert rc.organization_id
|
{
"content_hash": "7d955a39b0709537e103422f9bb594a1",
"timestamp": "",
"source": "github",
"line_count": 337,
"max_line_length": 98,
"avg_line_length": 30.783382789317507,
"alnum_prop": 0.5380759591285907,
"repo_name": "gencer/sentry",
"id": "c4dc74eb0c500d07fd342336f1abf2b3b71372ad",
"size": "10374",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/sentry/api/endpoints/test_project_releases.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "318167"
},
{
"name": "HTML",
"bytes": "281885"
},
{
"name": "JavaScript",
"bytes": "2342569"
},
{
"name": "Lua",
"bytes": "65795"
},
{
"name": "Makefile",
"bytes": "8393"
},
{
"name": "Python",
"bytes": "28161647"
},
{
"name": "Ruby",
"bytes": "4233"
},
{
"name": "Shell",
"bytes": "2149"
}
],
"symlink_target": ""
}
|
"""SCons.Tool.mslink
Tool-specific initialization for the Microsoft linker.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/mslink.py 5357 2011/09/09 21:31:03 bdeegan"
import os.path
import SCons.Action
import SCons.Defaults
import SCons.Errors
import SCons.Platform.win32
import SCons.Tool
import SCons.Tool.msvc
import SCons.Tool.msvs
import SCons.Util
from MSCommon import msvc_setup_env_once, msvc_exists
def pdbGenerator(env, target, source, for_signature):
try:
return ['/PDB:%s' % target[0].attributes.pdb, '/DEBUG']
except (AttributeError, IndexError):
return None
def _dllTargets(target, source, env, for_signature, paramtp):
listCmd = []
dll = env.FindIxes(target, '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp)
if dll: listCmd.append("/out:%s"%dll.get_string(for_signature))
implib = env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX')
if implib: listCmd.append("/implib:%s"%implib.get_string(for_signature))
return listCmd
def _dllSources(target, source, env, for_signature, paramtp):
listCmd = []
deffile = env.FindIxes(source, "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX")
for src in source:
# Check explicitly for a non-None deffile so that the __cmp__
# method of the base SCons.Util.Proxy class used for some Node
# proxies doesn't try to use a non-existent __dict__ attribute.
if deffile and src == deffile:
# Treat this source as a .def file.
listCmd.append("/def:%s" % src.get_string(for_signature))
else:
# Just treat it as a generic source file.
listCmd.append(src)
return listCmd
def windowsShlinkTargets(target, source, env, for_signature):
return _dllTargets(target, source, env, for_signature, 'SHLIB')
def windowsShlinkSources(target, source, env, for_signature):
return _dllSources(target, source, env, for_signature, 'SHLIB')
def _windowsLdmodTargets(target, source, env, for_signature):
"""Get targets for loadable modules."""
return _dllTargets(target, source, env, for_signature, 'LDMODULE')
def _windowsLdmodSources(target, source, env, for_signature):
"""Get sources for loadable modules."""
return _dllSources(target, source, env, for_signature, 'LDMODULE')
def _dllEmitter(target, source, env, paramtp):
"""Common implementation of dll emitter."""
SCons.Tool.msvc.validate_vars(env)
extratargets = []
extrasources = []
dll = env.FindIxes(target, '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp)
no_import_lib = env.get('no_import_lib', 0)
if not dll:
raise SCons.Errors.UserError('A shared library should have exactly one target with the suffix: %s' % env.subst('$%sSUFFIX' % paramtp))
insert_def = env.subst("$WINDOWS_INSERT_DEF")
if not insert_def in ['', '0', 0] and \
not env.FindIxes(source, "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX"):
# append a def file to the list of sources
extrasources.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX"))
version_num, suite = SCons.Tool.msvs.msvs_parse_version(env.get('MSVS_VERSION', '6.0'))
if version_num >= 8.0 and \
(env.get('WINDOWS_INSERT_MANIFEST', 0) or env.get('WINDOWS_EMBED_MANIFEST', 0)):
# MSVC 8 and above automatically generate .manifest files that must be installed
extratargets.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"WINDOWSSHLIBMANIFESTPREFIX", "WINDOWSSHLIBMANIFESTSUFFIX"))
if 'PDB' in env and env['PDB']:
pdb = env.arg2nodes('$PDB', target=target, source=source)[0]
extratargets.append(pdb)
target[0].attributes.pdb = pdb
if not no_import_lib and \
not env.FindIxes(target, "LIBPREFIX", "LIBSUFFIX"):
# Append an import library to the list of targets.
extratargets.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"LIBPREFIX", "LIBSUFFIX"))
# and .exp file is created if there are exports from a DLL
extratargets.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"WINDOWSEXPPREFIX", "WINDOWSEXPSUFFIX"))
return (target+extratargets, source+extrasources)
def windowsLibEmitter(target, source, env):
return _dllEmitter(target, source, env, 'SHLIB')
def ldmodEmitter(target, source, env):
"""Emitter for loadable modules.
Loadable modules are identical to shared libraries on Windows, but building
them is subject to different parameters (LDMODULE*).
"""
return _dllEmitter(target, source, env, 'LDMODULE')
def prog_emitter(target, source, env):
SCons.Tool.msvc.validate_vars(env)
extratargets = []
exe = env.FindIxes(target, "PROGPREFIX", "PROGSUFFIX")
if not exe:
raise SCons.Errors.UserError("An executable should have exactly one target with the suffix: %s" % env.subst("$PROGSUFFIX"))
version_num, suite = SCons.Tool.msvs.msvs_parse_version(env.get('MSVS_VERSION', '6.0'))
if version_num >= 8.0 and \
(env.get('WINDOWS_INSERT_MANIFEST', 0) or env.get('WINDOWS_EMBED_MANIFEST', 0)):
# MSVC 8 and above automatically generate .manifest files that have to be installed
extratargets.append(
env.ReplaceIxes(exe,
"PROGPREFIX", "PROGSUFFIX",
"WINDOWSPROGMANIFESTPREFIX", "WINDOWSPROGMANIFESTSUFFIX"))
if 'PDB' in env and env['PDB']:
pdb = env.arg2nodes('$PDB', target=target, source=source)[0]
extratargets.append(pdb)
target[0].attributes.pdb = pdb
return (target+extratargets,source)
def RegServerFunc(target, source, env):
if 'register' in env and env['register']:
ret = regServerAction([target[0]], [source[0]], env)
if ret:
raise SCons.Errors.UserError("Unable to register %s" % target[0])
else:
print "Registered %s sucessfully" % target[0]
return ret
return 0
# These are the actual actions run to embed the manifest.
# They are only called from the Check versions below.
embedManifestExeAction = SCons.Action.Action('$MTEXECOM')
embedManifestDllAction = SCons.Action.Action('$MTSHLIBCOM')
def embedManifestDllCheck(target, source, env):
"""Function run by embedManifestDllCheckAction to check for existence of manifest
and other conditions, and embed the manifest by calling embedManifestDllAction if so."""
if env.get('WINDOWS_EMBED_MANIFEST', 0):
manifestSrc = target[0].abspath + '.manifest'
if os.path.exists(manifestSrc):
ret = (embedManifestDllAction) ([target[0]],None,env)
if ret:
raise SCons.Errors.UserError, "Unable to embed manifest into %s" % (target[0])
return ret
else:
print '(embed: no %s.manifest found; not embedding.)'%str(target[0])
return 0
def embedManifestExeCheck(target, source, env):
"""Function run by embedManifestExeCheckAction to check for existence of manifest
and other conditions, and embed the manifest by calling embedManifestExeAction if so."""
if env.get('WINDOWS_EMBED_MANIFEST', 0):
manifestSrc = target[0].abspath + '.manifest'
if os.path.exists(manifestSrc):
ret = (embedManifestExeAction) ([target[0]],None,env)
if ret:
raise SCons.Errors.UserError, "Unable to embed manifest into %s" % (target[0])
return ret
else:
print '(embed: no %s.manifest found; not embedding.)'%str(target[0])
return 0
embedManifestDllCheckAction = SCons.Action.Action(embedManifestDllCheck, None)
embedManifestExeCheckAction = SCons.Action.Action(embedManifestExeCheck, None)
regServerAction = SCons.Action.Action("$REGSVRCOM", "$REGSVRCOMSTR")
regServerCheck = SCons.Action.Action(RegServerFunc, None)
shlibLinkAction = SCons.Action.Action('${TEMPFILE("$SHLINK $SHLINKFLAGS $_SHLINK_TARGETS $_LIBDIRFLAGS $_LIBFLAGS $_PDB $_SHLINK_SOURCES")}')
compositeShLinkAction = shlibLinkAction + regServerCheck + embedManifestDllCheckAction
ldmodLinkAction = SCons.Action.Action('${TEMPFILE("$LDMODULE $LDMODULEFLAGS $_LDMODULE_TARGETS $_LIBDIRFLAGS $_LIBFLAGS $_PDB $_LDMODULE_SOURCES")}')
compositeLdmodAction = ldmodLinkAction + regServerCheck + embedManifestDllCheckAction
exeLinkAction = SCons.Action.Action('${TEMPFILE("$LINK $LINKFLAGS /OUT:$TARGET.windows $_LIBDIRFLAGS $_LIBFLAGS $_PDB $SOURCES.windows")}')
compositeLinkAction = exeLinkAction + embedManifestExeCheckAction
def generate(env):
"""Add Builders and construction variables for ar to an Environment."""
SCons.Tool.createSharedLibBuilder(env)
SCons.Tool.createProgBuilder(env)
env['SHLINK'] = '$LINK'
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS /dll')
env['_SHLINK_TARGETS'] = windowsShlinkTargets
env['_SHLINK_SOURCES'] = windowsShlinkSources
env['SHLINKCOM'] = compositeShLinkAction
env.Append(SHLIBEMITTER = [windowsLibEmitter])
env['LINK'] = 'link'
env['LINKFLAGS'] = SCons.Util.CLVar('/nologo')
env['_PDB'] = pdbGenerator
env['LINKCOM'] = compositeLinkAction
env.Append(PROGEMITTER = [prog_emitter])
env['LIBDIRPREFIX']='/LIBPATH:'
env['LIBDIRSUFFIX']=''
env['LIBLINKPREFIX']=''
env['LIBLINKSUFFIX']='$LIBSUFFIX'
env['WIN32DEFPREFIX'] = ''
env['WIN32DEFSUFFIX'] = '.def'
env['WIN32_INSERT_DEF'] = 0
env['WINDOWSDEFPREFIX'] = '${WIN32DEFPREFIX}'
env['WINDOWSDEFSUFFIX'] = '${WIN32DEFSUFFIX}'
env['WINDOWS_INSERT_DEF'] = '${WIN32_INSERT_DEF}'
env['WIN32EXPPREFIX'] = ''
env['WIN32EXPSUFFIX'] = '.exp'
env['WINDOWSEXPPREFIX'] = '${WIN32EXPPREFIX}'
env['WINDOWSEXPSUFFIX'] = '${WIN32EXPSUFFIX}'
env['WINDOWSSHLIBMANIFESTPREFIX'] = ''
env['WINDOWSSHLIBMANIFESTSUFFIX'] = '${SHLIBSUFFIX}.manifest'
env['WINDOWSPROGMANIFESTPREFIX'] = ''
env['WINDOWSPROGMANIFESTSUFFIX'] = '${PROGSUFFIX}.manifest'
env['REGSVRACTION'] = regServerCheck
env['REGSVR'] = os.path.join(SCons.Platform.win32.get_system_root(),'System32','regsvr32')
env['REGSVRFLAGS'] = '/s '
env['REGSVRCOM'] = '$REGSVR $REGSVRFLAGS ${TARGET.windows}'
env['WINDOWS_EMBED_MANIFEST'] = 0
env['MT'] = 'mt'
#env['MTFLAGS'] = ['-hashupdate']
env['MTFLAGS'] = SCons.Util.CLVar('/nologo')
# Note: use - here to prevent build failure if no manifest produced.
# This seems much simpler than a fancy system using a function action to see
# if the manifest actually exists before trying to run mt with it.
env['MTEXECOM'] = '-$MT $MTFLAGS -manifest ${TARGET}.manifest $_MANIFEST_SOURCES -outputresource:$TARGET;1'
env['MTSHLIBCOM'] = '-$MT $MTFLAGS -manifest ${TARGET}.manifest $_MANIFEST_SOURCES -outputresource:$TARGET;2'
# Future work garyo 27-Feb-11
env['_MANIFEST_SOURCES'] = None # _windowsManifestSources
# Set-up ms tools paths
msvc_setup_env_once(env)
# Loadable modules are on Windows the same as shared libraries, but they
# are subject to different build parameters (LDMODULE* variables).
# Therefore LDMODULE* variables correspond as much as possible to
# SHLINK*/SHLIB* ones.
SCons.Tool.createLoadableModuleBuilder(env)
env['LDMODULE'] = '$SHLINK'
env['LDMODULEPREFIX'] = '$SHLIBPREFIX'
env['LDMODULESUFFIX'] = '$SHLIBSUFFIX'
env['LDMODULEFLAGS'] = '$SHLINKFLAGS'
env['_LDMODULE_TARGETS'] = _windowsLdmodTargets
env['_LDMODULE_SOURCES'] = _windowsLdmodSources
env['LDMODULEEMITTER'] = [ldmodEmitter]
env['LDMODULECOM'] = compositeLdmodAction
def exists(env):
return msvc_exists()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
{
"content_hash": "1a178b250beb35dad71183e8f936d652",
"timestamp": "",
"source": "github",
"line_count": 318,
"max_line_length": 149,
"avg_line_length": 42.30503144654088,
"alnum_prop": 0.6708540845907975,
"repo_name": "Dwii/Master-Thesis",
"id": "ac533c8dcae6d87507c1d985913133221987b4e2",
"size": "13453",
"binary": false,
"copies": "21",
"ref": "refs/heads/master",
"path": "implementation/Palabos/palabos-master/scons/scons-local-2.1.0/SCons/Tool/mslink.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "192440"
},
{
"name": "C++",
"bytes": "18502050"
},
{
"name": "CMake",
"bytes": "9101"
},
{
"name": "Cuda",
"bytes": "388105"
},
{
"name": "Java",
"bytes": "83712"
},
{
"name": "Makefile",
"bytes": "256963"
},
{
"name": "Matlab",
"bytes": "11973"
},
{
"name": "Python",
"bytes": "1894038"
},
{
"name": "R",
"bytes": "1015"
},
{
"name": "Shell",
"bytes": "17047"
},
{
"name": "TeX",
"bytes": "243040"
}
],
"symlink_target": ""
}
|
import sqlite3
con = sqlite3.connect(":memory:")
con.execute("create table person (id integer primary key, firstname varchar unique)")
# Successful, con.commit() is called automatically afterwards
with con:
con.execute("insert into person(firstname) values (?)", ("Joe",))
# con.rollback() is called after the with block finishes with an exception, the
# exception is still raised and must be caught
try:
with con:
con.execute("insert into person(firstname) values (?)", ("Joe",))
except sqlite3.IntegrityError:
print "couldn't add Joe twice"
|
{
"content_hash": "7f6c49f511c1c8cd72b7cb4bdca98ddb",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 85,
"avg_line_length": 35.375,
"alnum_prop": 0.7208480565371025,
"repo_name": "mollstam/UnrealPy",
"id": "d6f27e6b2282adaf1909925ea8043a79ffa8f793",
"size": "566",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/Python-2.7.10/Doc/includes/sqlite3/ctx_manager.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "APL",
"bytes": "587"
},
{
"name": "ASP",
"bytes": "2753"
},
{
"name": "ActionScript",
"bytes": "5686"
},
{
"name": "Ada",
"bytes": "94225"
},
{
"name": "Agda",
"bytes": "3154"
},
{
"name": "Alloy",
"bytes": "6579"
},
{
"name": "ApacheConf",
"bytes": "12482"
},
{
"name": "AppleScript",
"bytes": "421"
},
{
"name": "Assembly",
"bytes": "1093261"
},
{
"name": "AutoHotkey",
"bytes": "3733"
},
{
"name": "AutoIt",
"bytes": "667"
},
{
"name": "Awk",
"bytes": "63276"
},
{
"name": "Batchfile",
"bytes": "147828"
},
{
"name": "BlitzBasic",
"bytes": "185102"
},
{
"name": "BlitzMax",
"bytes": "2387"
},
{
"name": "Boo",
"bytes": "1111"
},
{
"name": "Bro",
"bytes": "7337"
},
{
"name": "C",
"bytes": "108397183"
},
{
"name": "C#",
"bytes": "156749"
},
{
"name": "C++",
"bytes": "13535833"
},
{
"name": "CLIPS",
"bytes": "6933"
},
{
"name": "CMake",
"bytes": "12441"
},
{
"name": "COBOL",
"bytes": "114812"
},
{
"name": "CSS",
"bytes": "430375"
},
{
"name": "Ceylon",
"bytes": "1387"
},
{
"name": "Chapel",
"bytes": "4366"
},
{
"name": "Cirru",
"bytes": "2574"
},
{
"name": "Clean",
"bytes": "9679"
},
{
"name": "Clojure",
"bytes": "23871"
},
{
"name": "CoffeeScript",
"bytes": "20149"
},
{
"name": "ColdFusion",
"bytes": "9006"
},
{
"name": "Common Lisp",
"bytes": "49017"
},
{
"name": "Coq",
"bytes": "66"
},
{
"name": "Cucumber",
"bytes": "390"
},
{
"name": "Cuda",
"bytes": "776"
},
{
"name": "D",
"bytes": "7556"
},
{
"name": "DIGITAL Command Language",
"bytes": "425938"
},
{
"name": "DTrace",
"bytes": "6706"
},
{
"name": "Dart",
"bytes": "591"
},
{
"name": "Dylan",
"bytes": "6343"
},
{
"name": "Ecl",
"bytes": "2599"
},
{
"name": "Eiffel",
"bytes": "2145"
},
{
"name": "Elixir",
"bytes": "4340"
},
{
"name": "Emacs Lisp",
"bytes": "18303"
},
{
"name": "Erlang",
"bytes": "5746"
},
{
"name": "F#",
"bytes": "19156"
},
{
"name": "FORTRAN",
"bytes": "38458"
},
{
"name": "Factor",
"bytes": "10194"
},
{
"name": "Fancy",
"bytes": "2581"
},
{
"name": "Fantom",
"bytes": "25331"
},
{
"name": "GAP",
"bytes": "29880"
},
{
"name": "GLSL",
"bytes": "450"
},
{
"name": "Gnuplot",
"bytes": "11501"
},
{
"name": "Go",
"bytes": "5444"
},
{
"name": "Golo",
"bytes": "1649"
},
{
"name": "Gosu",
"bytes": "2853"
},
{
"name": "Groff",
"bytes": "3458639"
},
{
"name": "Groovy",
"bytes": "2586"
},
{
"name": "HTML",
"bytes": "92126540"
},
{
"name": "Haskell",
"bytes": "49593"
},
{
"name": "Haxe",
"bytes": "16812"
},
{
"name": "Hy",
"bytes": "7237"
},
{
"name": "IDL",
"bytes": "2098"
},
{
"name": "Idris",
"bytes": "2771"
},
{
"name": "Inform 7",
"bytes": "1944"
},
{
"name": "Inno Setup",
"bytes": "18796"
},
{
"name": "Ioke",
"bytes": "469"
},
{
"name": "Isabelle",
"bytes": "21392"
},
{
"name": "Jasmin",
"bytes": "9428"
},
{
"name": "Java",
"bytes": "4040623"
},
{
"name": "JavaScript",
"bytes": "223927"
},
{
"name": "Julia",
"bytes": "27687"
},
{
"name": "KiCad",
"bytes": "475"
},
{
"name": "Kotlin",
"bytes": "971"
},
{
"name": "LSL",
"bytes": "160"
},
{
"name": "Lasso",
"bytes": "18650"
},
{
"name": "Lean",
"bytes": "6921"
},
{
"name": "Limbo",
"bytes": "9891"
},
{
"name": "Liquid",
"bytes": "862"
},
{
"name": "LiveScript",
"bytes": "972"
},
{
"name": "Logos",
"bytes": "19509"
},
{
"name": "Logtalk",
"bytes": "7260"
},
{
"name": "Lua",
"bytes": "8677"
},
{
"name": "Makefile",
"bytes": "2053844"
},
{
"name": "Mask",
"bytes": "815"
},
{
"name": "Mathematica",
"bytes": "191"
},
{
"name": "Max",
"bytes": "296"
},
{
"name": "Modelica",
"bytes": "6213"
},
{
"name": "Modula-2",
"bytes": "23838"
},
{
"name": "Module Management System",
"bytes": "14798"
},
{
"name": "Monkey",
"bytes": "2587"
},
{
"name": "Moocode",
"bytes": "3343"
},
{
"name": "MoonScript",
"bytes": "14862"
},
{
"name": "Myghty",
"bytes": "3939"
},
{
"name": "NSIS",
"bytes": "7663"
},
{
"name": "Nemerle",
"bytes": "1517"
},
{
"name": "NewLisp",
"bytes": "42726"
},
{
"name": "Nimrod",
"bytes": "37191"
},
{
"name": "Nit",
"bytes": "55581"
},
{
"name": "Nix",
"bytes": "2448"
},
{
"name": "OCaml",
"bytes": "42416"
},
{
"name": "Objective-C",
"bytes": "104883"
},
{
"name": "Objective-J",
"bytes": "15340"
},
{
"name": "Opa",
"bytes": "172"
},
{
"name": "OpenEdge ABL",
"bytes": "49943"
},
{
"name": "PAWN",
"bytes": "6555"
},
{
"name": "PHP",
"bytes": "68611"
},
{
"name": "PLSQL",
"bytes": "45772"
},
{
"name": "Pan",
"bytes": "1241"
},
{
"name": "Pascal",
"bytes": "349743"
},
{
"name": "Perl",
"bytes": "5931502"
},
{
"name": "Perl6",
"bytes": "113623"
},
{
"name": "PigLatin",
"bytes": "6657"
},
{
"name": "Pike",
"bytes": "8479"
},
{
"name": "PostScript",
"bytes": "18216"
},
{
"name": "PowerShell",
"bytes": "14236"
},
{
"name": "Prolog",
"bytes": "43750"
},
{
"name": "Protocol Buffer",
"bytes": "3401"
},
{
"name": "Puppet",
"bytes": "130"
},
{
"name": "Python",
"bytes": "122886305"
},
{
"name": "QML",
"bytes": "3912"
},
{
"name": "R",
"bytes": "49247"
},
{
"name": "Racket",
"bytes": "11341"
},
{
"name": "Rebol",
"bytes": "17708"
},
{
"name": "Red",
"bytes": "10536"
},
{
"name": "Redcode",
"bytes": "830"
},
{
"name": "Ruby",
"bytes": "91403"
},
{
"name": "Rust",
"bytes": "6788"
},
{
"name": "SAS",
"bytes": "15603"
},
{
"name": "SaltStack",
"bytes": "1040"
},
{
"name": "Scala",
"bytes": "730"
},
{
"name": "Scheme",
"bytes": "50346"
},
{
"name": "Scilab",
"bytes": "943"
},
{
"name": "Shell",
"bytes": "2925518"
},
{
"name": "ShellSession",
"bytes": "320"
},
{
"name": "Smali",
"bytes": "832"
},
{
"name": "Smalltalk",
"bytes": "158636"
},
{
"name": "Smarty",
"bytes": "523"
},
{
"name": "SourcePawn",
"bytes": "130"
},
{
"name": "Standard ML",
"bytes": "36869"
},
{
"name": "Swift",
"bytes": "2035"
},
{
"name": "SystemVerilog",
"bytes": "265"
},
{
"name": "Tcl",
"bytes": "6077233"
},
{
"name": "TeX",
"bytes": "487999"
},
{
"name": "Tea",
"bytes": "391"
},
{
"name": "TypeScript",
"bytes": "535"
},
{
"name": "VHDL",
"bytes": "4446"
},
{
"name": "VimL",
"bytes": "32053"
},
{
"name": "Visual Basic",
"bytes": "19441"
},
{
"name": "XQuery",
"bytes": "4289"
},
{
"name": "XS",
"bytes": "178055"
},
{
"name": "XSLT",
"bytes": "1995174"
},
{
"name": "Xtend",
"bytes": "727"
},
{
"name": "Yacc",
"bytes": "25665"
},
{
"name": "Zephir",
"bytes": "485"
},
{
"name": "eC",
"bytes": "31545"
},
{
"name": "mupad",
"bytes": "2442"
},
{
"name": "nesC",
"bytes": "23697"
},
{
"name": "xBase",
"bytes": "3349"
}
],
"symlink_target": ""
}
|
import os
import pytest
from pymt import models
MODELS = [models.__dict__[name] for name in models.__all__]
MODELS.sort(key=lambda item: item.__name__)
@pytest.mark.parametrize("cls", MODELS)
def test_model_setup(cls):
model = cls()
args = model.setup()
assert os.path.isfile(os.path.join(args[1], args[0]))
@pytest.mark.parametrize("cls", MODELS)
def test_model_initialize(cls):
model = cls()
args = model.setup()
model.initialize(*args)
assert model.initdir == args[1]
assert model._initialized
@pytest.mark.parametrize("cls", MODELS)
def test_model_update(cls):
model = cls()
model.initialize(*model.setup())
model.update()
# assert model.time > model.start_time
@pytest.mark.parametrize("cls", MODELS)
def test_model_finalize(cls):
model = cls()
model.initialize(*model.setup())
model.update()
model.finalize()
assert not model._initialized
|
{
"content_hash": "4530b06586e4f4f6f820a620995d9772",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 59,
"avg_line_length": 21.53488372093023,
"alnum_prop": 0.6684665226781857,
"repo_name": "csdms/coupling",
"id": "eb4d9551632711e1a96e7ae7e71acfe096ce0c02",
"size": "949",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "469855"
},
{
"name": "Shell",
"bytes": "1528"
}
],
"symlink_target": ""
}
|
import os
import sys
from django.utils.importlib import import_module
def setup_environ(dunder_file=None, project_path=None, relative_project_path=None, settings_path=None):
assert not (dunder_file and project_path), ("You must not specify both "
"__file__ and project_path")
if dunder_file is not None:
file_path = os.path.abspath(os.path.dirname(dunder_file))
if relative_project_path is not None:
project_path = os.path.abspath(os.path.join(file_path, *relative_project_path))
else:
project_path = file_path
# the basename must be the project name and importable.
project_name = os.path.basename(project_path)
# setup Django correctly (the hard-coding of settings is only temporary.
# carljm's proposal will remove that)
if settings_path is None:
if "DJANGO_SETTINGS_MODULE" not in os.environ:
os.environ["DJANGO_SETTINGS_MODULE"] = "%s.settings" % project_name
else:
os.environ["DJANGO_SETTINGS_MODULE"] = settings_path
# ensure the importablity of project
sys.path.append(os.path.join(project_path, os.pardir))
import_module(project_name)
sys.path.pop()
# Pinax adds an app directory for users as a reliable location for
# Django apps
sys.path.insert(0, os.path.join(project_path, "apps"))
|
{
"content_hash": "5f122c23b23d3149dc3012a14449fb02",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 103,
"avg_line_length": 37.97222222222222,
"alnum_prop": 0.6708119970738844,
"repo_name": "espenak/pinax-oldversion-backup",
"id": "89841027a30a9bd0de47d673fef8948d6a506ef1",
"size": "1367",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "pinax/env.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('recipes', '0002_recipecollection_title'),
]
operations = [
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, primary_key=True, auto_created=True)),
('title', models.CharField(max_length=255)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='recipe',
name='tags',
field=models.ManyToManyField(to='recipes.Tag', related_name='recipes'),
preserve_default=True,
),
]
|
{
"content_hash": "50759aa2b0d7cebab0d25a72bbfeaa0b",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 114,
"avg_line_length": 28.428571428571427,
"alnum_prop": 0.5402010050251256,
"repo_name": "agnethesoraa/recipemaster",
"id": "1bbbd969c967d9d32547d814938484f498aacb8b",
"size": "820",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "recipemaster/recipes/migrations/0003_auto_20150325_2130.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1562"
},
{
"name": "HTML",
"bytes": "11795"
},
{
"name": "Python",
"bytes": "33132"
}
],
"symlink_target": ""
}
|
from django.contrib.auth.password_validation import validate_password
from django.core.exceptions import ValidationError
from django.http import Http404
from rest_framework import serializers
from rest_framework.decorators import api_view, permission_classes
from rest_framework.permissions import AllowAny
from rest_registration.decorators import (
api_view_serializer_class,
api_view_serializer_class_getter
)
from rest_registration.exceptions import UserNotFound
from rest_registration.notifications.email import (
send_verification_notification
)
from rest_registration.notifications.enums import NotificationType
from rest_registration.settings import registration_settings
from rest_registration.utils.responses import get_ok_response
from rest_registration.utils.users import (
get_user_by_verification_id,
get_user_verification_id
)
from rest_registration.utils.verification import verify_signer_or_bad_request
from rest_registration.verification import URLParamsSigner
class ResetPasswordSigner(URLParamsSigner):
SALT_BASE = 'reset-password'
USE_TIMESTAMP = True
def get_base_url(self):
return registration_settings.RESET_PASSWORD_VERIFICATION_URL
def get_valid_period(self):
return registration_settings.RESET_PASSWORD_VERIFICATION_PERIOD
def _calculate_salt(self, data):
if registration_settings.RESET_PASSWORD_VERIFICATION_ONE_TIME_USE:
user = get_user_by_verification_id(
data['user_id'], require_verified=False)
user_password_hash = user.password
# Use current user password hash as a part of the salt.
# If the password gets changed, then assume that the change
# was caused by previous password reset and the signature
# is not valid anymore because changed password hash implies
# changed salt used when verifying the input data.
salt = '{self.SALT_BASE}:{user_password_hash}'.format(
self=self, user_password_hash=user_password_hash)
else:
salt = self.SALT_BASE
return salt
@api_view_serializer_class_getter(
lambda: registration_settings.SEND_RESET_PASSWORD_LINK_SERIALIZER_CLASS)
@api_view(['POST'])
@permission_classes([AllowAny])
def send_reset_password_link(request):
'''
Send email with reset password link.
'''
if not registration_settings.RESET_PASSWORD_VERIFICATION_ENABLED:
raise Http404()
serializer_class = registration_settings.SEND_RESET_PASSWORD_LINK_SERIALIZER_CLASS # noqa: E501
serializer = serializer_class(
data=request.data,
context={'request': request},
)
serializer.is_valid(raise_exception=True)
user = serializer.get_user_or_none()
if not user:
raise UserNotFound()
signer = ResetPasswordSigner({
'user_id': get_user_verification_id(user),
}, request=request)
template_config_data = registration_settings.RESET_PASSWORD_VERIFICATION_EMAIL_TEMPLATES # noqa: E501
notification_data = {
'params_signer': signer,
}
send_verification_notification(
NotificationType.RESET_PASSWORD_VERIFICATION, user, notification_data,
template_config_data)
return get_ok_response('Reset link sent')
class ResetPasswordSerializer(serializers.Serializer): # noqa: E501 pylint: disable=abstract-method
user_id = serializers.CharField(required=True)
timestamp = serializers.IntegerField(required=True)
signature = serializers.CharField(required=True)
password = serializers.CharField(required=True)
@api_view_serializer_class(ResetPasswordSerializer)
@api_view(['POST'])
@permission_classes([AllowAny])
def reset_password(request):
'''
Reset password, given the signature and timestamp from the link.
'''
process_reset_password_data(
request.data, serializer_context={'request': request})
return get_ok_response('Reset password successful')
def process_reset_password_data(input_data, serializer_context=None):
if serializer_context is None:
serializer_context = {}
if not registration_settings.RESET_PASSWORD_VERIFICATION_ENABLED:
raise Http404()
serializer = ResetPasswordSerializer(
data=input_data,
context=serializer_context,
)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data.copy()
password = data.pop('password')
signer = ResetPasswordSigner(data)
verify_signer_or_bad_request(signer)
user = get_user_by_verification_id(data['user_id'], require_verified=False)
try:
validate_password(password, user=user)
except ValidationError as exc:
raise serializers.ValidationError(exc.messages[0])
user.set_password(password)
user.save()
|
{
"content_hash": "265d4ac7dd65e32ca7d55220d5d4df38",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 106,
"avg_line_length": 37.24031007751938,
"alnum_prop": 0.7214820982514571,
"repo_name": "szopu/django-rest-registration",
"id": "6fe46a9312cab4ae6edd430682a44ba6025e4ae7",
"size": "4804",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rest_registration/api/views/reset_password.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "106105"
},
{
"name": "Shell",
"bytes": "235"
}
],
"symlink_target": ""
}
|
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
from unicore.webhooks import models
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = models.Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
|
{
"content_hash": "63d8d78203f7dc28ad5123023b891103",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 69,
"avg_line_length": 28.757142857142856,
"alnum_prop": 0.7098857426726279,
"repo_name": "universalcore/unicore.distribute",
"id": "cc8d634e0c14cac6907f1fe9949678609c88a972",
"size": "2013",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "unicore/distribute/alembic/env.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Mako",
"bytes": "518"
},
{
"name": "Python",
"bytes": "86148"
},
{
"name": "Shell",
"bytes": "124"
}
],
"symlink_target": ""
}
|
from django.test import TestCase
from utils import get_random_entry
from models import ExampleModel
class ExampleAppTest(TestCase):
def test_get_random_entry_test(self):
entry = get_random_entry()
self.assertEquals(type(entry), ExampleModel)
self.assertGreater(len(entry.name), 0)
self.assertGreater(len(entry.description), 0)
self.assertGreater(entry.number, 0)
|
{
"content_hash": "ca3da1625fe92ce64d22215c750ee452",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 47,
"avg_line_length": 29.307692307692307,
"alnum_prop": 0.7690288713910761,
"repo_name": "tnajdek/django-kendoui-backend",
"id": "67c56f37681e46e0ba6a864a47800bb8dc1d912c",
"size": "381",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example/app/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2266"
},
{
"name": "Python",
"bytes": "60861"
}
],
"symlink_target": ""
}
|
import base64
import copy
import datetime
import functools
import iso8601
import os
import string
import tempfile
import fixtures
import mock
from oslo.config import cfg
from nova.api.ec2 import cloud
from nova.api.ec2 import ec2utils
from nova.api.ec2 import inst_state
from nova.api.metadata import password
from nova.compute import api as compute_api
from nova.compute import flavors
from nova.compute import power_state
from nova.compute import rpcapi as compute_rpcapi
from nova.compute import utils as compute_utils
from nova.compute import vm_states
from nova import context
from nova import db
from nova import exception
from nova.image import s3
from nova.network import api as network_api
from nova.network import base_api as base_network_api
from nova.network import model
from nova.network import neutronv2
from nova.objects import instance as instance_obj
from nova.objects import instance_info_cache as instance_info_cache_obj
from nova.objects import security_group as security_group_obj
from nova.openstack.common import log as logging
from nova.openstack.common import policy as common_policy
from nova.openstack.common import timeutils
from nova.openstack.common import uuidutils
from nova import test
from nova.tests.api.openstack.compute.contrib import (
test_neutron_security_groups as test_neutron)
from nova.tests import cast_as_call
from nova.tests import fake_block_device
from nova.tests import fake_network
from nova.tests import fake_utils
from nova.tests.image import fake
from nova.tests import matchers
from nova import utils
from nova.virt import fake as fake_virt
from nova import volume
CONF = cfg.CONF
CONF.import_opt('compute_driver', 'nova.virt.driver')
CONF.import_opt('default_flavor', 'nova.compute.flavors')
CONF.import_opt('use_ipv6', 'nova.netconf')
LOG = logging.getLogger(__name__)
HOST = "testhost"
def get_fake_cache(get_floating):
def _ip(ip, fixed=True, floats=None):
ip_dict = {'address': ip, 'type': 'fixed'}
if not fixed:
ip_dict['type'] = 'floating'
if fixed and floats:
ip_dict['floating_ips'] = [_ip(f, fixed=False) for f in floats]
return ip_dict
if get_floating:
ip_info = [_ip('192.168.0.3',
floats=['1.2.3.4', '5.6.7.8']),
_ip('192.168.0.4')]
else:
ip_info = [_ip('192.168.0.3'),
_ip('192.168.0.4')]
info = [{'address': 'aa:bb:cc:dd:ee:ff',
'id': 1,
'network': {'bridge': 'br0',
'id': 1,
'label': 'private',
'subnets': [{'cidr': '192.168.0.0/24',
'ips': ip_info}]}}]
if CONF.use_ipv6:
ipv6_addr = 'fe80:b33f::a8bb:ccff:fedd:eeff'
info[0]['network']['subnets'].append({'cidr': 'fe80:b33f::/64',
'ips': [_ip(ipv6_addr)]})
return model.NetworkInfo.hydrate(info)
def get_instances_with_cached_ips(orig_func, get_floating,
*args, **kwargs):
"""Kludge the cache into instance(s) without having to create DB
entries
"""
instances = orig_func(*args, **kwargs)
if kwargs.get('want_objects', False):
info_cache = instance_info_cache_obj.InstanceInfoCache()
info_cache.network_info = get_fake_cache(get_floating)
info_cache.obj_reset_changes()
else:
info_cache = {'network_info': get_fake_cache(get_floating)}
if isinstance(instances, (list, instance_obj.InstanceList)):
for instance in instances:
instance['info_cache'] = info_cache
else:
instances['info_cache'] = info_cache
return instances
class CloudTestCase(test.TestCase):
def setUp(self):
super(CloudTestCase, self).setUp()
self.useFixture(test.SampleNetworks())
ec2utils.reset_cache()
self.flags(compute_driver='nova.virt.fake.FakeDriver',
volume_api_class='nova.tests.fake_volume.API')
self.useFixture(fixtures.FakeLogger('boto'))
fake_utils.stub_out_utils_spawn_n(self.stubs)
def fake_show(meh, context, id):
return {'id': id,
'name': 'fake_name',
'container_format': 'ami',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine',
'image_state': 'available'}}
def fake_detail(_self, context, **kwargs):
image = fake_show(None, context, None)
image['name'] = kwargs.get('filters', {}).get('name')
return [image]
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail)
fake.stub_out_image_service(self.stubs)
def dumb(*args, **kwargs):
pass
self.stubs.Set(compute_utils, 'notify_about_instance_usage', dumb)
fake_network.set_stub_network_methods(self.stubs)
# set up our cloud
self.cloud = cloud.CloudController()
self.flags(scheduler_driver='nova.scheduler.chance.ChanceScheduler')
# Short-circuit the conductor service
self.flags(use_local=True, group='conductor')
# set up services
self.conductor = self.start_service('conductor',
manager=CONF.conductor.manager)
self.compute = self.start_service('compute')
self.scheduler = self.start_service('scheduler')
self.network = self.start_service('network')
self.consoleauth = self.start_service('consoleauth')
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id,
self.project_id,
is_admin=True)
self.volume_api = volume.API()
self.useFixture(cast_as_call.CastAsCall(self.stubs))
# make sure we can map ami-00000001/2 to a uuid in FakeImageService
db.s3_image_create(self.context,
'cedef40a-ed67-4d10-800e-17455edce175')
db.s3_image_create(self.context,
'76fa36fc-c930-4bf3-8c8a-ea2a2420deb6')
def tearDown(self):
self.volume_api.reset_fake_api(self.context)
super(CloudTestCase, self).tearDown()
fake.FakeImageService_reset()
def fake_get_target(obj, iqn):
return 1
def fake_remove_iscsi_target(obj, tid, lun, vol_id, **kwargs):
pass
def _stub_instance_get_with_fixed_ips(self,
func_name, get_floating=True):
orig_func = getattr(self.cloud.compute_api, func_name)
def fake_get(*args, **kwargs):
return get_instances_with_cached_ips(orig_func, get_floating,
*args, **kwargs)
self.stubs.Set(self.cloud.compute_api, func_name, fake_get)
def _create_key(self, name):
# NOTE(vish): create depends on pool, so just call helper directly
keypair_api = compute_api.KeypairAPI()
return keypair_api.create_key_pair(self.context, self.context.user_id,
name)
def test_describe_regions(self):
# Makes sure describe regions runs without raising an exception.
result = self.cloud.describe_regions(self.context)
self.assertEqual(len(result['regionInfo']), 1)
self.flags(region_list=["one=test_host1", "two=test_host2"])
result = self.cloud.describe_regions(self.context)
self.assertEqual(len(result['regionInfo']), 2)
def test_describe_addresses(self):
# Makes sure describe addresses runs without raising an exception.
address = "10.10.10.10"
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
self.flags(network_api_class='nova.network.api.API')
self.cloud.allocate_address(self.context)
self.cloud.describe_addresses(self.context)
self.cloud.release_address(self.context,
public_ip=address)
db.floating_ip_destroy(self.context, address)
def test_describe_addresses_in_neutron(self):
# Makes sure describe addresses runs without raising an exception.
address = "10.10.10.10"
self.flags(network_api_class='nova.network.neutronv2.api.API')
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
self.cloud.allocate_address(self.context)
self.cloud.describe_addresses(self.context)
self.cloud.release_address(self.context,
public_ip=address)
db.floating_ip_destroy(self.context, address)
def test_describe_specific_address(self):
# Makes sure describe specific address works.
addresses = ["10.10.10.10", "10.10.10.11"]
for address in addresses:
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
self.cloud.allocate_address(self.context)
result = self.cloud.describe_addresses(self.context)
self.assertEqual(len(result['addressesSet']), 2)
result = self.cloud.describe_addresses(self.context,
public_ip=['10.10.10.10'])
self.assertEqual(len(result['addressesSet']), 1)
for address in addresses:
self.cloud.release_address(self.context,
public_ip=address)
db.floating_ip_destroy(self.context, address)
def test_allocate_address(self):
address = "10.10.10.10"
allocate = self.cloud.allocate_address
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
self.assertEqual(allocate(self.context)['publicIp'], address)
db.floating_ip_destroy(self.context, address)
self.assertRaises(exception.NoMoreFloatingIps,
allocate,
self.context)
def test_release_address(self):
address = "10.10.10.10"
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova',
'project_id': self.project_id})
result = self.cloud.release_address(self.context, address)
self.assertEqual(result.get('return', None), 'true')
def test_associate_disassociate_address(self):
# Verifies associate runs cleanly without raising an exception.
address = "10.10.10.10"
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
self.cloud.allocate_address(self.context)
# TODO(jkoelker) Probably need to query for instance_type_id and
# make sure we get a valid one
inst = db.instance_create(self.context, {'host': self.compute.host,
'display_name': HOST,
'instance_type_id': 1})
networks = db.network_get_all(self.context)
for network in networks:
db.network_update(self.context, network['id'],
{'host': self.network.host})
project_id = self.context.project_id
nw_info = self.network.allocate_for_instance(self.context,
instance_id=inst['id'],
instance_uuid=inst['uuid'],
host=inst['host'],
vpn=None,
rxtx_factor=3,
project_id=project_id,
macs=None)
fixed_ips = nw_info.fixed_ips()
ec2_id = ec2utils.id_to_ec2_inst_id(inst['uuid'])
self.stubs.Set(ec2utils, 'get_ip_info_for_instance',
lambda *args: {'fixed_ips': ['10.0.0.1'],
'fixed_ip6s': [],
'floating_ips': []})
self.stubs.Set(network_api.API, 'get_instance_id_by_floating_address',
lambda *args: 1)
def fake_update_instance_cache_with_nw_info(api, context, instance,
nw_info=None,
update_cells=True):
return
self.stubs.Set(base_network_api, "update_instance_cache_with_nw_info",
fake_update_instance_cache_with_nw_info)
self.cloud.associate_address(self.context,
instance_id=ec2_id,
public_ip=address)
self.cloud.disassociate_address(self.context,
public_ip=address)
self.cloud.release_address(self.context,
public_ip=address)
self.network.deallocate_fixed_ip(self.context, fixed_ips[0]['address'],
inst['host'])
db.instance_destroy(self.context, inst['uuid'])
db.floating_ip_destroy(self.context, address)
def test_disassociate_auto_assigned_address(self):
"""Verifies disassociating auto assigned floating IP
raises an exception
"""
address = "10.10.10.10"
def fake_get(*args, **kwargs):
pass
def fake_disassociate_floating_ip(*args, **kwargs):
raise exception.CannotDisassociateAutoAssignedFloatingIP()
self.stubs.Set(network_api.API, 'get_instance_id_by_floating_address',
lambda *args: 1)
self.stubs.Set(self.cloud.compute_api, 'get', fake_get)
self.stubs.Set(network_api.API, 'disassociate_floating_ip',
fake_disassociate_floating_ip)
self.assertRaises(exception.CannotDisassociateAutoAssignedFloatingIP,
self.cloud.disassociate_address,
self.context, public_ip=address)
def test_disassociate_unassociated_address(self):
address = "10.10.10.10"
db.floating_ip_create(self.context,
{'address': address,
'pool': 'nova'})
self.cloud.allocate_address(self.context)
self.cloud.describe_addresses(self.context)
result = self.cloud.disassociate_address(self.context,
public_ip=address)
self.assertEqual(result['return'], 'true')
db.floating_ip_destroy(self.context, address)
def test_describe_security_groups(self):
# Makes sure describe_security_groups works and filters results.
sec = db.security_group_create(self.context,
{'project_id': self.context.project_id,
'name': 'test'})
result = self.cloud.describe_security_groups(self.context)
# NOTE(vish): should have the default group as well
self.assertEqual(len(result['securityGroupInfo']), 2)
result = self.cloud.describe_security_groups(self.context,
group_name=[sec['name']])
self.assertEqual(len(result['securityGroupInfo']), 1)
self.assertEqual(
result['securityGroupInfo'][0]['groupName'],
sec['name'])
db.security_group_destroy(self.context, sec['id'])
def test_describe_security_groups_all_tenants(self):
# Makes sure describe_security_groups works and filters results.
sec = db.security_group_create(self.context,
{'project_id': 'foobar',
'name': 'test'})
def _check_name(result, i, expected):
self.assertEqual(result['securityGroupInfo'][i]['groupName'],
expected)
# include all tenants
filter = [{'name': 'all-tenants', 'value': {'1': 1}}]
result = self.cloud.describe_security_groups(self.context,
filter=filter)
self.assertEqual(len(result['securityGroupInfo']), 2)
_check_name(result, 0, 'default')
_check_name(result, 1, sec['name'])
# exclude all tenants
filter = [{'name': 'all-tenants', 'value': {'1': 0}}]
result = self.cloud.describe_security_groups(self.context,
filter=filter)
self.assertEqual(len(result['securityGroupInfo']), 1)
_check_name(result, 0, 'default')
# default all tenants
result = self.cloud.describe_security_groups(self.context)
self.assertEqual(len(result['securityGroupInfo']), 1)
_check_name(result, 0, 'default')
db.security_group_destroy(self.context, sec['id'])
def test_describe_security_groups_by_id(self):
sec = db.security_group_create(self.context,
{'project_id': self.context.project_id,
'name': 'test'})
result = self.cloud.describe_security_groups(self.context,
group_id=[sec['id']])
self.assertEqual(len(result['securityGroupInfo']), 1)
self.assertEqual(
result['securityGroupInfo'][0]['groupName'],
sec['name'])
default = db.security_group_get_by_name(self.context,
self.context.project_id,
'default')
result = self.cloud.describe_security_groups(self.context,
group_id=[default['id']])
self.assertEqual(len(result['securityGroupInfo']), 1)
self.assertEqual(
result['securityGroupInfo'][0]['groupName'],
'default')
db.security_group_destroy(self.context, sec['id'])
def test_create_delete_security_group(self):
descript = 'test description'
create = self.cloud.create_security_group
result = create(self.context, 'testgrp', descript)
group_descript = result['securityGroupSet'][0]['groupDescription']
self.assertEqual(descript, group_descript)
delete = self.cloud.delete_security_group
self.assertTrue(delete(self.context, 'testgrp'))
def test_security_group_quota_limit(self):
self.flags(quota_security_groups=10)
for i in range(1, CONF.quota_security_groups):
name = 'test name %i' % i
descript = 'test description %i' % i
create = self.cloud.create_security_group
result = create(self.context, name, descript)
# 11'th group should fail
self.assertRaises(exception.SecurityGroupLimitExceeded,
create, self.context, 'foo', 'bar')
def test_delete_security_group_by_id(self):
sec = db.security_group_create(self.context,
{'project_id': self.context.project_id,
'name': 'test'})
delete = self.cloud.delete_security_group
self.assertTrue(delete(self.context, group_id=sec['id']))
def test_delete_security_group_with_bad_name(self):
delete = self.cloud.delete_security_group
notfound = exception.SecurityGroupNotFound
self.assertRaises(notfound, delete, self.context, 'badname')
def test_delete_security_group_with_bad_group_id(self):
delete = self.cloud.delete_security_group
notfound = exception.SecurityGroupNotFound
self.assertRaises(notfound, delete, self.context, group_id=999)
def test_delete_security_group_no_params(self):
delete = self.cloud.delete_security_group
self.assertRaises(exception.MissingParameter, delete, self.context)
def test_delete_security_group_policy_not_allowed(self):
rules = common_policy.Rules(
{'compute_extension:security_groups':
common_policy.parse_rule('project_id:%(project_id)s')})
common_policy.set_rules(rules)
with mock.patch.object(self.cloud.security_group_api,
'get') as get:
get.return_value = {'project_id': 'invalid'}
self.assertRaises(exception.PolicyNotAuthorized,
self.cloud.delete_security_group, self.context,
'fake-name', 'fake-id')
def test_authorize_security_group_ingress_policy_not_allowed(self):
rules = common_policy.Rules(
{'compute_extension:security_groups':
common_policy.parse_rule('project_id:%(project_id)s')})
common_policy.set_rules(rules)
with mock.patch.object(self.cloud.security_group_api,
'get') as get:
get.return_value = {'project_id': 'invalid'}
self.assertRaises(exception.PolicyNotAuthorized,
self.cloud.authorize_security_group_ingress, self.context,
'fake-name', 'fake-id')
def test_authorize_security_group_ingress(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
kwargs = {'to_port': '999', 'from_port': '999', 'ip_protocol': 'tcp'}
self.assertTrue(authz(self.context, group_name=sec['name'], **kwargs))
def test_authorize_security_group_ingress_ip_permissions_ip_ranges(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
kwargs = {'ip_permissions': [{'to_port': 81, 'from_port': 81,
'ip_ranges':
{'1': {'cidr_ip': u'0.0.0.0/0'},
'2': {'cidr_ip': u'10.10.10.10/32'}},
'ip_protocol': u'tcp'}]}
self.assertTrue(authz(self.context, group_name=sec['name'], **kwargs))
def test_authorize_security_group_fail_missing_source_group(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
kwargs = {'ip_permissions': [{'to_port': 81, 'from_port': 81,
'ip_ranges': {'1': {'cidr_ip': u'0.0.0.0/0'},
'2': {'cidr_ip': u'10.10.10.10/32'}},
'groups': {'1': {'user_id': u'someuser',
'group_name': u'somegroup1'}},
'ip_protocol': u'tcp'}]}
self.assertRaises(exception.SecurityGroupNotFound, authz,
self.context, group_name=sec['name'], **kwargs)
def test_authorize_security_group_ingress_ip_permissions_groups(self):
kwargs = {
'project_id': self.context.project_id,
'user_id': self.context.user_id,
'name': 'test'
}
sec = db.security_group_create(self.context,
{'project_id': 'someuser',
'user_id': 'someuser',
'description': '',
'name': 'somegroup1'})
sec = db.security_group_create(self.context,
{'project_id': 'someuser',
'user_id': 'someuser',
'description': '',
'name': 'othergroup2'})
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
kwargs = {'ip_permissions': [{'to_port': 81, 'from_port': 81,
'groups': {'1': {'user_id': u'someuser',
'group_name': u'somegroup1'},
'2': {'user_id': u'someuser',
'group_name': u'othergroup2'}},
'ip_protocol': u'tcp'}]}
self.assertTrue(authz(self.context, group_name=sec['name'], **kwargs))
def test_describe_security_group_ingress_groups(self):
kwargs = {
'project_id': self.context.project_id,
'user_id': self.context.user_id,
'name': 'test'
}
sec1 = db.security_group_create(self.context, kwargs)
sec2 = db.security_group_create(self.context,
{'project_id': 'someuser',
'user_id': 'someuser',
'description': '',
'name': 'somegroup1'})
sec3 = db.security_group_create(self.context,
{'project_id': 'someuser',
'user_id': 'someuser',
'description': '',
'name': 'othergroup2'})
authz = self.cloud.authorize_security_group_ingress
kwargs = {'ip_permissions': [
{'groups': {'1': {'user_id': u'someuser',
'group_name': u'somegroup1'}}},
{'ip_protocol': 'tcp',
'from_port': 80,
'to_port': 80,
'groups': {'1': {'user_id': u'someuser',
'group_name': u'othergroup2'}}}]}
self.assertTrue(authz(self.context, group_name=sec1['name'], **kwargs))
describe = self.cloud.describe_security_groups
groups = describe(self.context, group_name=['test'])
self.assertEqual(len(groups['securityGroupInfo']), 1)
actual_rules = groups['securityGroupInfo'][0]['ipPermissions']
self.assertEqual(len(actual_rules), 4)
expected_rules = [{'fromPort': -1,
'groups': [{'groupName': 'somegroup1',
'userId': 'someuser'}],
'ipProtocol': 'icmp',
'ipRanges': [],
'toPort': -1},
{'fromPort': 1,
'groups': [{'groupName': u'somegroup1',
'userId': u'someuser'}],
'ipProtocol': 'tcp',
'ipRanges': [],
'toPort': 65535},
{'fromPort': 1,
'groups': [{'groupName': u'somegroup1',
'userId': u'someuser'}],
'ipProtocol': 'udp',
'ipRanges': [],
'toPort': 65535},
{'fromPort': 80,
'groups': [{'groupName': u'othergroup2',
'userId': u'someuser'}],
'ipProtocol': u'tcp',
'ipRanges': [],
'toPort': 80}]
for rule in expected_rules:
self.assertIn(rule, actual_rules)
db.security_group_destroy(self.context, sec3['id'])
db.security_group_destroy(self.context, sec2['id'])
db.security_group_destroy(self.context, sec1['id'])
def test_revoke_security_group_ingress_policy_not_allowed(self):
rules = common_policy.Rules(
{'compute_extension:security_groups':
common_policy.parse_rule('project_id:%(project_id)s')})
common_policy.set_rules(rules)
with mock.patch.object(self.cloud.security_group_api,
'get') as get:
get.return_value = {'project_id': 'invalid'}
self.assertRaises(exception.PolicyNotAuthorized,
self.cloud.revoke_security_group_ingress, self.context,
'fake-name', 'fake-id')
def test_revoke_security_group_ingress(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
kwargs = {'to_port': '999', 'from_port': '999', 'ip_protocol': 'tcp'}
authz(self.context, group_id=sec['id'], **kwargs)
revoke = self.cloud.revoke_security_group_ingress
self.assertTrue(revoke(self.context, group_name=sec['name'], **kwargs))
def test_authorize_revoke_security_group_ingress_by_id(self):
sec = db.security_group_create(self.context,
{'project_id': self.context.project_id,
'name': 'test'})
authz = self.cloud.authorize_security_group_ingress
kwargs = {'to_port': '999', 'from_port': '999', 'ip_protocol': 'tcp'}
authz(self.context, group_id=sec['id'], **kwargs)
revoke = self.cloud.revoke_security_group_ingress
self.assertTrue(revoke(self.context, group_id=sec['id'], **kwargs))
def test_authorize_security_group_ingress_missing_protocol_params(self):
sec = db.security_group_create(self.context,
{'project_id': self.context.project_id,
'name': 'test'})
authz = self.cloud.authorize_security_group_ingress
self.assertRaises(exception.MissingParameter, authz, self.context,
'test')
def test_authorize_security_group_ingress_missing_group_name_or_id(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
authz = self.cloud.authorize_security_group_ingress
self.assertRaises(exception.MissingParameter, authz, self.context,
**kwargs)
def test_authorize_security_group_ingress_already_exists(self):
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
kwargs = {'to_port': '999', 'from_port': '999', 'ip_protocol': 'tcp'}
authz(self.context, group_name=sec['name'], **kwargs)
self.assertRaises(exception.SecurityGroupRuleExists, authz,
self.context, group_name=sec['name'], **kwargs)
def test_security_group_ingress_quota_limit(self):
self.flags(quota_security_group_rules=20)
kwargs = {'project_id': self.context.project_id, 'name': 'test'}
sec_group = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
for i in range(100, 120):
kwargs = {'to_port': i, 'from_port': i, 'ip_protocol': 'tcp'}
authz(self.context, group_id=sec_group['id'], **kwargs)
kwargs = {'to_port': 121, 'from_port': 121, 'ip_protocol': 'tcp'}
self.assertRaises(exception.SecurityGroupLimitExceeded, authz,
self.context, group_id=sec_group['id'], **kwargs)
def _test_authorize_security_group_no_ports_with_source_group(self, proto):
kwargs = {
'project_id': self.context.project_id,
'user_id': self.context.user_id,
'description': '',
'name': 'test'
}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
auth_kwargs = {'ip_protocol': proto,
'groups': {'1': {'user_id': self.context.user_id,
'group_name': u'test'}}}
self.assertTrue(authz(self.context, group_name=sec['name'],
**auth_kwargs))
describe = self.cloud.describe_security_groups
groups = describe(self.context, group_name=['test'])
self.assertEqual(len(groups['securityGroupInfo']), 1)
actual_rules = groups['securityGroupInfo'][0]['ipPermissions']
expected_rules = [{'groups': [{'groupName': 'test',
'userId': self.context.user_id}],
'ipProtocol': proto,
'ipRanges': []}]
if proto == 'icmp':
expected_rules[0]['fromPort'] = -1
expected_rules[0]['toPort'] = -1
else:
expected_rules[0]['fromPort'] = 1
expected_rules[0]['toPort'] = 65535
self.assertTrue(expected_rules == actual_rules)
describe = self.cloud.describe_security_groups
groups = describe(self.context, group_name=['test'])
db.security_group_destroy(self.context, sec['id'])
def _test_authorize_security_group_no_ports_no_source_group(self, proto):
kwargs = {
'project_id': self.context.project_id,
'user_id': self.context.user_id,
'description': '',
'name': 'test'
}
sec = db.security_group_create(self.context, kwargs)
authz = self.cloud.authorize_security_group_ingress
auth_kwargs = {'ip_protocol': proto}
self.assertRaises(exception.MissingParameter, authz, self.context,
group_name=sec['name'], **auth_kwargs)
db.security_group_destroy(self.context, sec['id'])
def test_authorize_security_group_no_ports_icmp(self):
self._test_authorize_security_group_no_ports_with_source_group('icmp')
self._test_authorize_security_group_no_ports_no_source_group('icmp')
def test_authorize_security_group_no_ports_tcp(self):
self._test_authorize_security_group_no_ports_with_source_group('tcp')
self._test_authorize_security_group_no_ports_no_source_group('tcp')
def test_authorize_security_group_no_ports_udp(self):
self._test_authorize_security_group_no_ports_with_source_group('udp')
self._test_authorize_security_group_no_ports_no_source_group('udp')
def test_revoke_security_group_ingress_missing_group_name_or_id(self):
kwargs = {'to_port': '999', 'from_port': '999', 'ip_protocol': 'tcp'}
revoke = self.cloud.revoke_security_group_ingress
self.assertRaises(exception.MissingParameter, revoke,
self.context, **kwargs)
def test_delete_security_group_in_use_by_group(self):
group1 = self.cloud.create_security_group(self.context, 'testgrp1',
"test group 1")
group2 = self.cloud.create_security_group(self.context, 'testgrp2',
"test group 2")
kwargs = {'groups': {'1': {'user_id': u'%s' % self.context.user_id,
'group_name': u'testgrp2'}},
}
self.cloud.authorize_security_group_ingress(self.context,
group_name='testgrp1', **kwargs)
group1 = db.security_group_get_by_name(self.context,
self.project_id, 'testgrp1')
get_rules = db.security_group_rule_get_by_security_group
self.assertTrue(get_rules(self.context, group1['id']))
self.cloud.delete_security_group(self.context, 'testgrp2')
self.assertFalse(get_rules(self.context, group1['id']))
def test_delete_security_group_in_use_by_instance(self):
# Ensure that a group can not be deleted if in use by an instance.
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
args = {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'vm_state': 'active'}
inst = db.instance_create(self.context, args)
args = {'user_id': self.context.user_id,
'project_id': self.context.project_id,
'name': 'testgrp',
'description': 'Test group'}
group = db.security_group_create(self.context, args)
db.instance_add_security_group(self.context, inst['uuid'], group['id'])
self.assertRaises(exception.InvalidGroup,
self.cloud.delete_security_group,
self.context, 'testgrp')
db.instance_destroy(self.context, inst['uuid'])
self.cloud.delete_security_group(self.context, 'testgrp')
def test_describe_availability_zones(self):
# Makes sure describe_availability_zones works and filters results.
service1 = db.service_create(self.context, {'host': 'host1_zones',
'binary': "nova-compute",
'topic': 'compute',
'report_count': 0})
service2 = db.service_create(self.context, {'host': 'host2_zones',
'binary': "nova-compute",
'topic': 'compute',
'report_count': 0})
# Aggregate based zones
agg = db.aggregate_create(self.context,
{'name': 'agg1'}, {'availability_zone': 'zone1'})
db.aggregate_host_add(self.context, agg['id'], 'host1_zones')
agg = db.aggregate_create(self.context,
{'name': 'agg2'}, {'availability_zone': 'zone2'})
db.aggregate_host_add(self.context, agg['id'], 'host2_zones')
result = self.cloud.describe_availability_zones(self.context)
self.assertEqual(len(result['availabilityZoneInfo']), 3)
admin_ctxt = context.get_admin_context(read_deleted="no")
result = self.cloud.describe_availability_zones(admin_ctxt,
zone_name='verbose')
self.assertEqual(len(result['availabilityZoneInfo']), 18)
db.service_destroy(self.context, service1['id'])
db.service_destroy(self.context, service2['id'])
def test_describe_availability_zones_verbose(self):
# Makes sure describe_availability_zones works and filters results.
service1 = db.service_create(self.context, {'host': 'host1_zones',
'binary': "nova-compute",
'topic': 'compute',
'report_count': 0})
service2 = db.service_create(self.context, {'host': 'host2_zones',
'binary': "nova-compute",
'topic': 'compute',
'report_count': 0})
agg = db.aggregate_create(self.context,
{'name': 'agg1'}, {'availability_zone': 'second_zone'})
db.aggregate_host_add(self.context, agg['id'], 'host2_zones')
admin_ctxt = context.get_admin_context(read_deleted="no")
result = self.cloud.describe_availability_zones(admin_ctxt,
zone_name='verbose')
self.assertEqual(len(result['availabilityZoneInfo']), 17)
db.service_destroy(self.context, service1['id'])
db.service_destroy(self.context, service2['id'])
def assertEqualSorted(self, x, y):
self.assertEqual(sorted(x), sorted(y))
def test_describe_instances(self):
# Makes sure describe_instances works and filters results.
self.flags(use_ipv6=True)
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
sys_meta['EC2_client_token'] = "client-token-1"
inst1 = db.instance_create(self.context, {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'hostname': 'server-1234',
'vm_state': 'active',
'system_metadata': sys_meta})
sys_meta['EC2_client_token'] = "client-token-2"
inst2 = db.instance_create(self.context, {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host2',
'hostname': 'server-4321',
'vm_state': 'active',
'system_metadata': sys_meta})
comp1 = db.service_create(self.context, {'host': 'host1',
'topic': "compute"})
agg = db.aggregate_create(self.context,
{'name': 'agg1'}, {'availability_zone': 'zone1'})
db.aggregate_host_add(self.context, agg['id'], 'host1')
comp2 = db.service_create(self.context, {'host': 'host2',
'topic': "compute"})
agg2 = db.aggregate_create(self.context,
{'name': 'agg2'}, {'availability_zone': 'zone2'})
db.aggregate_host_add(self.context, agg2['id'], 'host2')
result = self.cloud.describe_instances(self.context)
result = result['reservationSet'][0]
self.assertEqual(len(result['instancesSet']), 2)
# Now try filtering.
instance_id = ec2utils.id_to_ec2_inst_id(inst2['uuid'])
result = self.cloud.describe_instances(self.context,
instance_id=[instance_id])
result = result['reservationSet'][0]
self.assertEqual(len(result['instancesSet']), 1)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], instance_id)
self.assertEqual(instance['placement']['availabilityZone'], 'zone2')
self.assertEqual(instance['ipAddress'], '1.2.3.4')
self.assertEqual(instance['dnsName'], '1.2.3.4')
self.assertEqual(instance['tagSet'], [])
self.assertEqual(instance['privateDnsName'], 'server-4321')
self.assertEqual(instance['privateIpAddress'], '192.168.0.3')
self.assertEqual(instance['dnsNameV6'],
'fe80:b33f::a8bb:ccff:fedd:eeff')
self.assertEqual(instance['clientToken'], 'client-token-2')
# A filter with even one invalid id should cause an exception to be
# raised
self.assertRaises(exception.InstanceNotFound,
self.cloud.describe_instances, self.context,
instance_id=[instance_id, '435679'])
db.instance_destroy(self.context, inst1['uuid'])
db.instance_destroy(self.context, inst2['uuid'])
db.service_destroy(self.context, comp1['id'])
db.service_destroy(self.context, comp2['id'])
def test_describe_instances_all_invalid(self):
# Makes sure describe_instances works and filters results.
self.flags(use_ipv6=True)
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
instance_id = ec2utils.id_to_ec2_inst_id('435679')
self.assertRaises(exception.InstanceNotFound,
self.cloud.describe_instances, self.context,
instance_id=[instance_id])
def test_describe_instances_with_filters(self):
# Makes sure describe_instances works and filters results.
filters = {'filter': [{'name': 'test',
'value': ['a', 'b']},
{'name': 'another_test',
'value': 'a string'}]}
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': []})
def test_describe_instances_with_filters_tags(self):
# Makes sure describe_instances works and filters tag results.
# We need to stub network calls
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
# We need to stub out the MQ call - it won't succeed. We do want
# to check that the method is called, though
meta_changes = [None]
def fake_change_instance_metadata(inst, ctxt, diff, instance=None,
instance_uuid=None):
meta_changes[0] = diff
self.stubs.Set(compute_rpcapi.ComputeAPI, 'change_instance_metadata',
fake_change_instance_metadata)
utc = iso8601.iso8601.Utc()
# Create some test images
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
inst1_kwargs = {
'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'vm_state': 'active',
'launched_at': timeutils.utcnow(),
'hostname': 'server-1111',
'created_at': datetime.datetime(2012, 5, 1, 1, 1, 1,
tzinfo=utc),
'system_metadata': sys_meta
}
inst2_kwargs = {
'reservation_id': 'b',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host2',
'vm_state': 'active',
'launched_at': timeutils.utcnow(),
'hostname': 'server-1112',
'created_at': datetime.datetime(2012, 5, 1, 1, 1, 2,
tzinfo=utc),
'system_metadata': sys_meta
}
inst1 = db.instance_create(self.context, inst1_kwargs)
ec2_id1 = ec2utils.id_to_ec2_inst_id(inst1['uuid'])
inst2 = db.instance_create(self.context, inst2_kwargs)
ec2_id2 = ec2utils.id_to_ec2_inst_id(inst2['uuid'])
# Create some tags
# We get one overlapping pair, one overlapping key, and a
# disparate pair
# inst1 : {'foo': 'bar', 'baz': 'wibble', 'bax': 'wobble'}
# inst2 : {'foo': 'bar', 'baz': 'quux', 'zog': 'bobble'}
md = {'key': 'foo', 'value': 'bar'}
self.cloud.create_tags(self.context, resource_id=[ec2_id1, ec2_id2],
tag=[md])
md2 = {'key': 'baz', 'value': 'wibble'}
md3 = {'key': 'bax', 'value': 'wobble'}
self.cloud.create_tags(self.context, resource_id=[ec2_id1],
tag=[md2, md3])
md4 = {'key': 'baz', 'value': 'quux'}
md5 = {'key': 'zog', 'value': 'bobble'}
self.cloud.create_tags(self.context, resource_id=[ec2_id2],
tag=[md4, md5])
# We should be able to search by:
inst1_ret = {
'groupSet': None,
'instancesSet': [{'amiLaunchIndex': None,
'dnsName': '1.2.3.4',
'dnsNameV6': 'fe80:b33f::a8bb:ccff:fedd:eeff',
'imageId': 'ami-00000001',
'instanceId': 'i-00000001',
'instanceState': {'code': 16,
'name': 'running'},
'instanceType': u'm1.medium',
'ipAddress': '1.2.3.4',
'keyName': 'None (None, host1)',
'launchTime':
datetime.datetime(2012, 5, 1, 1, 1, 1,
tzinfo=utc),
'placement': {
'availabilityZone': 'nova'},
'privateDnsName': u'server-1111',
'privateIpAddress': '192.168.0.3',
'productCodesSet': None,
'rootDeviceName': '/dev/sda1',
'rootDeviceType': 'instance-store',
'tagSet': [{'key': u'foo',
'value': u'bar'},
{'key': u'baz',
'value': u'wibble'},
{'key': u'bax',
'value': u'wobble'}]}],
'ownerId': None,
'reservationId': u'a'}
inst2_ret = {
'groupSet': None,
'instancesSet': [{'amiLaunchIndex': None,
'dnsName': '1.2.3.4',
'dnsNameV6': 'fe80:b33f::a8bb:ccff:fedd:eeff',
'imageId': 'ami-00000001',
'instanceId': 'i-00000002',
'instanceState': {'code': 16,
'name': 'running'},
'instanceType': u'm1.medium',
'ipAddress': '1.2.3.4',
'keyName': u'None (None, host2)',
'launchTime':
datetime.datetime(2012, 5, 1, 1, 1, 2,
tzinfo=utc),
'placement': {
'availabilityZone': 'nova'},
'privateDnsName': u'server-1112',
'privateIpAddress': '192.168.0.3',
'productCodesSet': None,
'rootDeviceName': '/dev/sda1',
'rootDeviceType': 'instance-store',
'tagSet': [{'key': u'foo',
'value': u'bar'},
{'key': u'baz',
'value': u'quux'},
{'key': u'zog',
'value': u'bobble'}]}],
'ownerId': None,
'reservationId': u'b'}
# No filter
result = self.cloud.describe_instances(self.context)
self.assertEqual(result, {'reservationSet': [inst1_ret, inst2_ret]})
# Key search
# Both should have tags with key 'foo' and value 'bar'
filters = {'filter': [{'name': 'tag:foo',
'value': ['bar']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst1_ret, inst2_ret]})
# Both should have tags with key 'foo'
filters = {'filter': [{'name': 'tag-key',
'value': ['foo']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst1_ret, inst2_ret]})
# Value search
# Only inst2 should have tags with key 'baz' and value 'quux'
filters = {'filter': [{'name': 'tag:baz',
'value': ['quux']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst2_ret]})
# Only inst2 should have tags with value 'quux'
filters = {'filter': [{'name': 'tag-value',
'value': ['quux']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst2_ret]})
# Multiple values
# Both should have tags with key 'baz' and values in the set
# ['quux', 'wibble']
filters = {'filter': [{'name': 'tag:baz',
'value': ['quux', 'wibble']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst1_ret, inst2_ret]})
# Both should have tags with key 'baz' or tags with value 'bar'
filters = {'filter': [{'name': 'tag-key',
'value': ['baz']},
{'name': 'tag-value',
'value': ['bar']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst1_ret, inst2_ret]})
# Confirm deletion of tags
# Check for format 'tag:'
self.cloud.delete_tags(self.context, resource_id=[ec2_id1], tag=[md])
filters = {'filter': [{'name': 'tag:foo',
'value': ['bar']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst2_ret]})
# Check for format 'tag-'
filters = {'filter': [{'name': 'tag-key',
'value': ['foo']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst2_ret]})
filters = {'filter': [{'name': 'tag-value',
'value': ['bar']}]}
result = self.cloud.describe_instances(self.context, **filters)
self.assertEqual(result, {'reservationSet': [inst2_ret]})
# destroy the test instances
db.instance_destroy(self.context, inst1['uuid'])
db.instance_destroy(self.context, inst2['uuid'])
def test_describe_instances_sorting(self):
# Makes sure describe_instances works and is sorted as expected.
self.flags(use_ipv6=True)
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
inst_base = {
'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'vm_state': 'active',
'system_metadata': sys_meta,
}
utc = iso8601.iso8601.Utc()
inst1_kwargs = {}
inst1_kwargs.update(inst_base)
inst1_kwargs['host'] = 'host1'
inst1_kwargs['hostname'] = 'server-1111'
inst1_kwargs['created_at'] = datetime.datetime(2012, 5, 1, 1, 1, 1,
tzinfo=utc)
inst1 = db.instance_create(self.context, inst1_kwargs)
inst2_kwargs = {}
inst2_kwargs.update(inst_base)
inst2_kwargs['host'] = 'host2'
inst2_kwargs['hostname'] = 'server-2222'
inst2_kwargs['created_at'] = datetime.datetime(2012, 2, 1, 1, 1, 1,
tzinfo=utc)
inst2 = db.instance_create(self.context, inst2_kwargs)
inst3_kwargs = {}
inst3_kwargs.update(inst_base)
inst3_kwargs['host'] = 'host3'
inst3_kwargs['hostname'] = 'server-3333'
inst3_kwargs['created_at'] = datetime.datetime(2012, 2, 5, 1, 1, 1,
tzinfo=utc)
inst3 = db.instance_create(self.context, inst3_kwargs)
comp1 = db.service_create(self.context, {'host': 'host1',
'topic': "compute"})
comp2 = db.service_create(self.context, {'host': 'host2',
'topic': "compute"})
result = self.cloud.describe_instances(self.context)
result = result['reservationSet'][0]['instancesSet']
self.assertEqual(result[0]['launchTime'], inst2_kwargs['created_at'])
self.assertEqual(result[1]['launchTime'], inst3_kwargs['created_at'])
self.assertEqual(result[2]['launchTime'], inst1_kwargs['created_at'])
db.instance_destroy(self.context, inst1['uuid'])
db.instance_destroy(self.context, inst2['uuid'])
db.instance_destroy(self.context, inst3['uuid'])
db.service_destroy(self.context, comp1['id'])
db.service_destroy(self.context, comp2['id'])
def test_describe_instance_state(self):
# Makes sure describe_instances for instanceState works.
def test_instance_state(expected_code, expected_name,
power_state_, vm_state_, values=None):
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
values = values or {}
values.update({'image_ref': image_uuid, 'instance_type_id': 1,
'power_state': power_state_, 'vm_state': vm_state_,
'system_metadata': sys_meta})
inst = db.instance_create(self.context, values)
instance_id = ec2utils.id_to_ec2_inst_id(inst['uuid'])
result = self.cloud.describe_instances(self.context,
instance_id=[instance_id])
result = result['reservationSet'][0]
result = result['instancesSet'][0]['instanceState']
name = result['name']
code = result['code']
self.assertEqual(code, expected_code)
self.assertEqual(name, expected_name)
db.instance_destroy(self.context, inst['uuid'])
test_instance_state(inst_state.RUNNING_CODE, inst_state.RUNNING,
power_state.RUNNING, vm_states.ACTIVE)
test_instance_state(inst_state.STOPPED_CODE, inst_state.STOPPED,
power_state.NOSTATE, vm_states.STOPPED,
{'shutdown_terminate': False})
def test_describe_instances_no_ipv6(self):
# Makes sure describe_instances w/ no ipv6 works.
self.flags(use_ipv6=False)
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
inst1 = db.instance_create(self.context, {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'hostname': 'server-1234',
'vm_state': 'active',
'system_metadata': sys_meta})
comp1 = db.service_create(self.context, {'host': 'host1',
'topic': "compute"})
result = self.cloud.describe_instances(self.context)
result = result['reservationSet'][0]
self.assertEqual(len(result['instancesSet']), 1)
instance = result['instancesSet'][0]
instance_id = ec2utils.id_to_ec2_inst_id(inst1['uuid'])
self.assertEqual(instance['instanceId'], instance_id)
self.assertEqual(instance['ipAddress'], '1.2.3.4')
self.assertEqual(instance['dnsName'], '1.2.3.4')
self.assertEqual(instance['privateDnsName'], 'server-1234')
self.assertEqual(instance['privateIpAddress'], '192.168.0.3')
self.assertNotIn('dnsNameV6', instance)
db.instance_destroy(self.context, inst1['uuid'])
db.service_destroy(self.context, comp1['id'])
def test_describe_instances_deleted(self):
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
args1 = {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'vm_state': 'active',
'system_metadata': sys_meta}
inst1 = db.instance_create(self.context, args1)
args2 = {'reservation_id': 'b',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'vm_state': 'active',
'system_metadata': sys_meta}
inst2 = db.instance_create(self.context, args2)
db.instance_destroy(self.context, inst1['uuid'])
result = self.cloud.describe_instances(self.context)
self.assertEqual(len(result['reservationSet']), 1)
result1 = result['reservationSet'][0]['instancesSet']
self.assertEqual(result1[0]['instanceId'],
ec2utils.id_to_ec2_inst_id(inst2['uuid']))
def test_describe_instances_with_image_deleted(self):
image_uuid = 'aebef54a-ed67-4d10-912f-14455edce176'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
args1 = {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'vm_state': 'active',
'system_metadata': sys_meta}
inst1 = db.instance_create(self.context, args1)
args2 = {'reservation_id': 'b',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'vm_state': 'active',
'system_metadata': sys_meta}
inst2 = db.instance_create(self.context, args2)
result = self.cloud.describe_instances(self.context)
self.assertEqual(len(result['reservationSet']), 2)
def test_describe_instances_dnsName_set(self):
# Verifies dnsName doesn't get set if floating IP is set.
self._stub_instance_get_with_fixed_ips('get_all', get_floating=False)
self._stub_instance_get_with_fixed_ips('get', get_floating=False)
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
inst1 = db.instance_create(self.context, {'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'host': 'host1',
'hostname': 'server-1234',
'vm_state': 'active',
'system_metadata': sys_meta})
result = self.cloud.describe_instances(self.context)
result = result['reservationSet'][0]
instance = result['instancesSet'][0]
self.assertIsNone(instance['dnsName'])
def test_describe_instances_booting_from_a_volume(self):
sys_meta = flavors.save_flavor_info(
{}, flavors.get_flavor(1))
inst = instance_obj.Instance()
inst.reservation_id = 'a'
inst.image_ref = ''
inst.root_device_name = '/dev/sdh'
inst.instance_type_id = 1
inst.vm_state = vm_states.ACTIVE
inst.host = 'host1'
inst.system_metadata = sys_meta
inst.create(self.context)
result = self.cloud.describe_instances(self.context)
result = result['reservationSet'][0]
instance = result['instancesSet'][0]
self.assertIsNone(instance['imageId'])
def test_describe_images(self):
describe_images = self.cloud.describe_images
def fake_detail(meh, context, **kwargs):
return [{'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'}}]
def fake_show_none(meh, context, id):
raise exception.ImageNotFound(image_id='bad_image_id')
def fake_detail_none(self, context, **kwargs):
return []
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail)
# list all
result1 = describe_images(self.context)
result1 = result1['imagesSet'][0]
self.assertEqual(result1['imageId'], 'ami-00000001')
# provided a valid image_id
result2 = describe_images(self.context, ['ami-00000001'])
self.assertEqual(1, len(result2['imagesSet']))
# provide more than 1 valid image_id
result3 = describe_images(self.context, ['ami-00000001',
'ami-00000002'])
self.assertEqual(2, len(result3['imagesSet']))
# provide a non-existing image_id
self.stubs.UnsetAll()
self.stubs.Set(fake._FakeImageService, 'show', fake_show_none)
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail_none)
self.assertRaises(exception.ImageNotFound, describe_images,
self.context, ['ami-fake'])
def assertDictListUnorderedMatch(self, L1, L2, key):
self.assertEqual(len(L1), len(L2))
for d1 in L1:
self.assertIn(key, d1)
for d2 in L2:
self.assertIn(key, d2)
if d1[key] == d2[key]:
self.assertThat(d1, matchers.DictMatches(d2))
def _setUpImageSet(self, create_volumes_and_snapshots=False):
self.flags(max_local_block_devices=-1)
mappings1 = [
{'device': '/dev/sda1', 'virtual': 'root'},
{'device': 'sdb0', 'virtual': 'ephemeral0'},
{'device': 'sdb1', 'virtual': 'ephemeral1'},
{'device': 'sdb2', 'virtual': 'ephemeral2'},
{'device': 'sdb3', 'virtual': 'ephemeral3'},
{'device': 'sdb4', 'virtual': 'ephemeral4'},
{'device': 'sdc0', 'virtual': 'swap'},
{'device': 'sdc1', 'virtual': 'swap'},
{'device': 'sdc2', 'virtual': 'swap'},
{'device': 'sdc3', 'virtual': 'swap'},
{'device': 'sdc4', 'virtual': 'swap'}]
block_device_mapping1 = [
{'device_name': '/dev/sdb1',
'snapshot_id': 'ccec42a2-c220-4806-b762-6b12fbb592e3'},
{'device_name': '/dev/sdb2',
'volume_id': 'ccec42a2-c220-4806-b762-6b12fbb592e4'},
{'device_name': '/dev/sdb3', 'virtual_name': 'ephemeral5'},
{'device_name': '/dev/sdb4', 'no_device': True},
{'device_name': '/dev/sdc1',
'snapshot_id': 'ccec42a2-c220-4806-b762-6b12fbb592e5'},
{'device_name': '/dev/sdc2',
'volume_id': 'ccec42a2-c220-4806-b762-6b12fbb592e6'},
{'device_name': '/dev/sdc3', 'virtual_name': 'ephemeral6'},
{'device_name': '/dev/sdc4', 'no_device': True}]
image1 = {
'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine',
'image_state': 'available',
'mappings': mappings1,
'block_device_mapping': block_device_mapping1,
}
}
mappings2 = [{'device': '/dev/sda1', 'virtual': 'root'}]
block_device_mapping2 = [{'device_name': '/dev/sdb1',
'snapshot_id': 'ccec42a2-c220-4806-b762-6b12fbb592e7'}]
image2 = {
'id': '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
'name': 'fake_name',
'status': 'active',
'properties': {
'kernel_id': '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
'type': 'machine',
'root_device_name': '/dev/sdb1',
'mappings': mappings2,
'block_device_mapping': block_device_mapping2}}
def fake_show(meh, context, image_id):
_images = [copy.deepcopy(image1), copy.deepcopy(image2)]
for i in _images:
if str(i['id']) == str(image_id):
return i
raise exception.ImageNotFound(image_id=image_id)
def fake_detail(meh, context, **kwargs):
return [copy.deepcopy(image1), copy.deepcopy(image2)]
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail)
volumes = []
snapshots = []
if create_volumes_and_snapshots:
for bdm in block_device_mapping1:
if 'volume_id' in bdm:
vol = self._volume_create(bdm['volume_id'])
volumes.append(vol['id'])
if 'snapshot_id' in bdm:
snap = self._snapshot_create(bdm['snapshot_id'])
snapshots.append(snap['id'])
return (volumes, snapshots)
def _assertImageSet(self, result, root_device_type, root_device_name):
self.assertEqual(1, len(result['imagesSet']))
result = result['imagesSet'][0]
self.assertIn('rootDeviceType', result)
self.assertEqual(result['rootDeviceType'], root_device_type)
self.assertIn('rootDeviceName', result)
self.assertEqual(result['rootDeviceName'], root_device_name)
self.assertIn('blockDeviceMapping', result)
return result
_expected_root_device_name1 = '/dev/sda1'
# NOTE(yamahata): noDevice doesn't make sense when returning mapping
# It makes sense only when user overriding existing
# mapping.
_expected_bdms1 = [
{'deviceName': '/dev/sdb0', 'virtualName': 'ephemeral0'},
{'deviceName': '/dev/sdb1', 'ebs': {'snapshotId':
'snap-00000001'}},
{'deviceName': '/dev/sdb2', 'ebs': {'snapshotId':
'vol-00000001'}},
{'deviceName': '/dev/sdb3', 'virtualName': 'ephemeral5'},
# {'deviceName': '/dev/sdb4', 'noDevice': True},
{'deviceName': '/dev/sdc0', 'virtualName': 'swap'},
{'deviceName': '/dev/sdc1', 'ebs': {'snapshotId':
'snap-00000002'}},
{'deviceName': '/dev/sdc2', 'ebs': {'snapshotId':
'vol-00000002'}},
{'deviceName': '/dev/sdc3', 'virtualName': 'ephemeral6'},
# {'deviceName': '/dev/sdc4', 'noDevice': True}
]
_expected_root_device_name2 = '/dev/sdb1'
_expected_bdms2 = [{'deviceName': '/dev/sdb1',
'ebs': {'snapshotId': 'snap-00000003'}}]
# NOTE(yamahata):
# InstanceBlockDeviceMappingItemType
# rootDeviceType
# rootDeviceName
# blockDeviceMapping
# deviceName
# virtualName
# ebs
# snapshotId
# volumeSize
# deleteOnTermination
# noDevice
def test_describe_image_mapping(self):
# test for rootDeviceName and blockDeviceMapping.
describe_images = self.cloud.describe_images
self._setUpImageSet()
result = describe_images(self.context, ['ami-00000001'])
result = self._assertImageSet(result, 'instance-store',
self._expected_root_device_name1)
self.assertDictListUnorderedMatch(result['blockDeviceMapping'],
self._expected_bdms1, 'deviceName')
result = describe_images(self.context, ['ami-00000002'])
result = self._assertImageSet(result, 'ebs',
self._expected_root_device_name2)
self.assertDictListUnorderedMatch(result['blockDeviceMapping'],
self._expected_bdms2, 'deviceName')
def test_describe_image_attribute(self):
describe_image_attribute = self.cloud.describe_image_attribute
def fake_show(meh, context, id):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'container_format': 'ami',
'is_public': True}
def fake_detail(self, context, **kwargs):
image = fake_show(None, context, None)
image['name'] = kwargs.get('filters', {}).get('name')
return [image]
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail)
result = describe_image_attribute(self.context, 'ami-00000001',
'launchPermission')
self.assertEqual([{'group': 'all'}], result['launchPermission'])
result = describe_image_attribute(self.context, 'ami-00000001',
'kernel')
self.assertEqual('aki-00000001', result['kernel']['value'])
result = describe_image_attribute(self.context, 'ami-00000001',
'ramdisk')
self.assertEqual('ari-00000001', result['ramdisk']['value'])
def test_describe_image_attribute_root_device_name(self):
describe_image_attribute = self.cloud.describe_image_attribute
self._setUpImageSet()
result = describe_image_attribute(self.context, 'ami-00000001',
'rootDeviceName')
self.assertEqual(result['rootDeviceName'],
self._expected_root_device_name1)
result = describe_image_attribute(self.context, 'ami-00000002',
'rootDeviceName')
self.assertEqual(result['rootDeviceName'],
self._expected_root_device_name2)
def test_describe_image_attribute_block_device_mapping(self):
describe_image_attribute = self.cloud.describe_image_attribute
self._setUpImageSet()
result = describe_image_attribute(self.context, 'ami-00000001',
'blockDeviceMapping')
self.assertDictListUnorderedMatch(result['blockDeviceMapping'],
self._expected_bdms1, 'deviceName')
result = describe_image_attribute(self.context, 'ami-00000002',
'blockDeviceMapping')
self.assertDictListUnorderedMatch(result['blockDeviceMapping'],
self._expected_bdms2, 'deviceName')
def test_modify_image_attribute(self):
modify_image_attribute = self.cloud.modify_image_attribute
fake_metadata = {
'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'is_public': False}
def fake_show(meh, context, id):
return copy.deepcopy(fake_metadata)
def fake_detail(self, context, **kwargs):
image = fake_show(None, context, None)
image['name'] = kwargs.get('filters', {}).get('name')
return [image]
def fake_update(meh, context, image_id, metadata, data=None):
self.assertEqual(metadata['properties']['kernel_id'],
fake_metadata['properties']['kernel_id'])
self.assertEqual(metadata['properties']['ramdisk_id'],
fake_metadata['properties']['ramdisk_id'])
self.assertTrue(metadata['is_public'])
image = copy.deepcopy(fake_metadata)
image.update(metadata)
return image
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail)
self.stubs.Set(fake._FakeImageService, 'update', fake_update)
result = modify_image_attribute(self.context, 'ami-00000001',
'launchPermission', 'add',
user_group=['all'])
self.assertTrue(result['is_public'])
def test_register_image(self):
register_image = self.cloud.register_image
def fake_create(*args, **kwargs):
# NOTE(vish): We are mocking s3 so make sure we have converted
# to ids instead of uuids.
return {'id': 1,
'name': 'fake_name',
'container_format': 'ami',
'properties': {'kernel_id': 1,
'ramdisk_id': 1,
'type': 'machine'
},
'is_public': False
}
self.stubs.Set(s3.S3ImageService, 'create', fake_create)
image_location = 'fake_bucket/fake.img.manifest.xml'
result = register_image(self.context, image_location)
self.assertEqual(result['imageId'], 'ami-00000001')
def test_register_image_empty(self):
register_image = self.cloud.register_image
self.assertRaises(exception.MissingParameter, register_image,
self.context, image_location=None)
def test_register_image_name(self):
register_image = self.cloud.register_image
def fake_create(_self, context, metadata, data=None):
self.assertEqual(metadata['name'], self.expected_name)
metadata['id'] = 1
metadata['container_format'] = 'ami'
metadata['is_public'] = False
return metadata
self.stubs.Set(s3.S3ImageService, 'create', fake_create)
self.expected_name = 'fake_bucket/fake.img.manifest.xml'
result = register_image(self.context,
image_location=self.expected_name,
name=None)
self.expected_name = 'an image name'
result = register_image(self.context,
image_location='some_location',
name=self.expected_name)
def test_format_image(self):
image = {
'id': 1,
'container_format': 'ami',
'name': 'name',
'owner': 'someone',
'properties': {
'image_location': 'location',
'kernel_id': 1,
'ramdisk_id': 1,
'type': 'machine'},
'is_public': False}
expected = {'name': 'name',
'imageOwnerId': 'someone',
'isPublic': False,
'imageId': 'ami-00000001',
'imageState': None,
'rootDeviceType': 'instance-store',
'architecture': None,
'imageLocation': 'location',
'kernelId': 'aki-00000001',
'ramdiskId': 'ari-00000001',
'rootDeviceName': '/dev/sda1',
'imageType': 'machine',
'description': None}
result = self.cloud._format_image(image)
self.assertThat(result, matchers.DictMatches(expected))
image['properties']['image_location'] = None
expected['imageLocation'] = 'None (name)'
result = self.cloud._format_image(image)
self.assertThat(result, matchers.DictMatches(expected))
image['name'] = None
image['properties']['image_location'] = 'location'
expected['imageLocation'] = 'location'
expected['name'] = 'location'
result = self.cloud._format_image(image)
self.assertThat(result, matchers.DictMatches(expected))
def test_deregister_image(self):
deregister_image = self.cloud.deregister_image
def fake_delete(self, context, id):
return None
self.stubs.Set(fake._FakeImageService, 'delete', fake_delete)
# valid image
result = deregister_image(self.context, 'ami-00000001')
self.assertTrue(result)
# invalid image
self.stubs.UnsetAll()
def fake_detail_empty(self, context, **kwargs):
return []
self.stubs.Set(fake._FakeImageService, 'detail', fake_detail_empty)
self.assertRaises(exception.ImageNotFound, deregister_image,
self.context, 'ami-bad001')
def test_deregister_image_wrong_container_type(self):
deregister_image = self.cloud.deregister_image
def fake_delete(self, context, id):
return None
self.stubs.Set(fake._FakeImageService, 'delete', fake_delete)
self.assertRaises(exception.NotFound, deregister_image, self.context,
'aki-00000001')
def _run_instance(self, **kwargs):
rv = self.cloud.run_instances(self.context, **kwargs)
instance_id = rv['instancesSet'][0]['instanceId']
return instance_id
def test_get_password_data(self):
instance_id = self._run_instance(
image_id='ami-1',
instance_type=CONF.default_flavor,
max_count=1)
self.stubs.Set(password, 'extract_password', lambda i: 'fakepass')
output = self.cloud.get_password_data(context=self.context,
instance_id=[instance_id])
self.assertEqual(output['passwordData'], 'fakepass')
rv = self.cloud.terminate_instances(self.context, [instance_id])
def test_console_output(self):
instance_id = self._run_instance(
image_id='ami-1',
instance_type=CONF.default_flavor,
max_count=1)
output = self.cloud.get_console_output(context=self.context,
instance_id=[instance_id])
self.assertEqual(base64.b64decode(output['output']),
'FAKE CONSOLE OUTPUT\nANOTHER\nLAST LINE')
# TODO(soren): We need this until we can stop polling in the rpc code
# for unit tests.
rv = self.cloud.terminate_instances(self.context, [instance_id])
def test_key_generation(self):
result, private_key = self._create_key('test')
expected = db.key_pair_get(self.context,
self.context.user_id,
'test')['public_key']
(fd, fname) = tempfile.mkstemp()
os.write(fd, private_key)
public_key, err = utils.execute('ssh-keygen', '-e', '-f', fname)
os.unlink(fname)
# assert key fields are equal
self.assertEqual(''.join(public_key.split("\n")[2:-2]),
expected.split(" ")[1].strip())
def test_describe_key_pairs(self):
self._create_key('test1')
self._create_key('test2')
result = self.cloud.describe_key_pairs(self.context)
keys = result["keySet"]
self.assertTrue(filter(lambda k: k['keyName'] == 'test1', keys))
self.assertTrue(filter(lambda k: k['keyName'] == 'test2', keys))
def test_describe_bad_key_pairs(self):
self.assertRaises(exception.KeypairNotFound,
self.cloud.describe_key_pairs, self.context,
key_name=['DoesNotExist'])
def test_import_key_pair(self):
pubkey_path = os.path.join(os.path.dirname(__file__), 'public_key')
f = open(pubkey_path + '/dummy.pub', 'r')
dummypub = f.readline().rstrip()
f.close
f = open(pubkey_path + '/dummy.fingerprint', 'r')
dummyfprint = f.readline().rstrip()
f.close
key_name = 'testimportkey'
public_key_material = base64.b64encode(dummypub)
result = self.cloud.import_key_pair(self.context,
key_name,
public_key_material)
self.assertEqual(result['keyName'], key_name)
self.assertEqual(result['keyFingerprint'], dummyfprint)
keydata = db.key_pair_get(self.context,
self.context.user_id,
key_name)
self.assertEqual(dummypub, keydata['public_key'])
self.assertEqual(dummyfprint, keydata['fingerprint'])
def test_import_key_pair_quota_limit(self):
self.flags(quota_key_pairs=0)
pubkey_path = os.path.join(os.path.dirname(__file__), 'public_key')
f = open(pubkey_path + '/dummy.pub', 'r')
dummypub = f.readline().rstrip()
f.close
f = open(pubkey_path + '/dummy.fingerprint', 'r')
dummyfprint = f.readline().rstrip()
f.close
key_name = 'testimportkey'
public_key_material = base64.b64encode(dummypub)
self.assertRaises(exception.KeypairLimitExceeded,
self.cloud.import_key_pair, self.context, key_name,
public_key_material)
def test_create_key_pair(self):
good_names = ('a', 'a' * 255, string.ascii_letters + ' -_')
bad_names = ('', 'a' * 256, '*', '/')
for key_name in good_names:
result = self.cloud.create_key_pair(self.context,
key_name)
self.assertEqual(result['keyName'], key_name)
for key_name in bad_names:
self.assertRaises(exception.InvalidKeypair,
self.cloud.create_key_pair,
self.context,
key_name)
def test_create_key_pair_quota_limit(self):
self.flags(quota_key_pairs=10)
for i in range(0, 10):
key_name = 'key_%i' % i
result = self.cloud.create_key_pair(self.context,
key_name)
self.assertEqual(result['keyName'], key_name)
# 11'th group should fail
self.assertRaises(exception.KeypairLimitExceeded,
self.cloud.create_key_pair,
self.context,
'foo')
def test_delete_key_pair(self):
self._create_key('test')
self.cloud.delete_key_pair(self.context, 'test')
def test_run_instances(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1}
run_instances = self.cloud.run_instances
def fake_show(self, context, id):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'container_format': 'ami',
'status': 'active'}
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
def dumb(*args, **kwargs):
pass
self.stubs.Set(compute_utils, 'notify_about_instance_usage', dumb)
self.useFixture(cast_as_call.CastAsCall(self.stubs))
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['imageId'], 'ami-00000001')
self.assertEqual(instance['instanceId'], 'i-00000001')
self.assertEqual(instance['instanceState']['name'], 'running')
self.assertEqual(instance['instanceType'], 'm1.small')
def test_run_instances_invalid_maxcount(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 0}
run_instances = self.cloud.run_instances
def fake_show(self, context, id):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'status': 'active'}
self.stubs.UnsetAll()
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.assertRaises(exception.InvalidInput, run_instances,
self.context, **kwargs)
def test_run_instances_invalid_mincount(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'min_count': 0}
run_instances = self.cloud.run_instances
def fake_show(self, context, id):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'status': 'active'}
self.stubs.UnsetAll()
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.assertRaises(exception.InvalidInput, run_instances,
self.context, **kwargs)
def test_run_instances_invalid_count(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1,
'min_count': 2}
run_instances = self.cloud.run_instances
def fake_show(self, context, id):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'status': 'active'}
self.stubs.UnsetAll()
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.assertRaises(exception.InvalidInput, run_instances,
self.context, **kwargs)
def test_run_instances_availability_zone(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1,
'placement': {'availability_zone': 'fake'},
}
run_instances = self.cloud.run_instances
def fake_show(self, context, id):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'container_format': 'ami',
'status': 'active'}
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
self.useFixture(cast_as_call.CastAsCall(self.stubs))
def fake_format(*args, **kwargs):
pass
self.stubs.Set(self.cloud, '_format_run_instances', fake_format)
def fake_create(*args, **kwargs):
self.assertEqual(kwargs['availability_zone'], 'fake')
return ({'id': 'fake-instance'}, 'fake-res-id')
self.stubs.Set(self.cloud.compute_api, 'create', fake_create)
# NOTE(vish) the assert for this call is in the fake_create method.
run_instances(self.context, **kwargs)
def test_empty_reservation_id_from_token(self):
client_token = 'client-token-1'
def fake_get_all_system_metadata(context, search_filts):
reference = [{'key': ['EC2_client_token']},
{'value': ['client-token-1']}]
self.assertEqual(search_filts, reference)
return []
self.stubs.Set(self.cloud.compute_api, 'get_all_system_metadata',
fake_get_all_system_metadata)
resv_id = self.cloud._resv_id_from_token(self.context, client_token)
self.assertIsNone(resv_id)
def test_run_instances_idempotent(self):
# Ensure subsequent run_instances calls with same client token
# are idempotent and that ones with different client_token are not
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1}
run_instances = self.cloud.run_instances
def fake_show(self, context, id):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'container_format': 'ami',
'status': 'active'}
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
def dumb(*args, **kwargs):
pass
self.stubs.Set(compute_utils, 'notify_about_instance_usage', dumb)
self.useFixture(cast_as_call.CastAsCall(self.stubs))
kwargs['client_token'] = 'client-token-1'
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], 'i-00000001')
kwargs['client_token'] = 'client-token-2'
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], 'i-00000002')
kwargs['client_token'] = 'client-token-2'
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], 'i-00000002')
kwargs['client_token'] = 'client-token-1'
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], 'i-00000001')
kwargs['client_token'] = 'client-token-3'
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], 'i-00000003')
# make sure terminated instances lose their client tokens
self.cloud.stop_instances(self.context,
instance_id=[instance['instanceId']])
self.cloud.terminate_instances(self.context,
instance_id=[instance['instanceId']])
kwargs['client_token'] = 'client-token-3'
result = run_instances(self.context, **kwargs)
instance = result['instancesSet'][0]
self.assertEqual(instance['instanceId'], 'i-00000004')
def test_run_instances_image_state_none(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1}
run_instances = self.cloud.run_instances
def fake_show_no_state(self, context, id):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'}, 'container_format': 'ami'}
self.stubs.UnsetAll()
self.stubs.Set(fake._FakeImageService, 'show', fake_show_no_state)
self.assertRaises(exception.ImageNotActive, run_instances,
self.context, **kwargs)
def test_run_instances_image_state_invalid(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1}
run_instances = self.cloud.run_instances
def fake_show_decrypt(self, context, id):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine', 'image_state': 'decrypting'}}
self.stubs.UnsetAll()
self.stubs.Set(fake._FakeImageService, 'show', fake_show_decrypt)
self.assertRaises(exception.ImageNotActive, run_instances,
self.context, **kwargs)
def test_run_instances_image_status_active(self):
kwargs = {'image_id': 'ami-00000001',
'instance_type': CONF.default_flavor,
'max_count': 1}
run_instances = self.cloud.run_instances
def fake_show_stat_active(self, context, id):
return {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
'name': 'fake_name',
'container_format': 'ami',
'status': 'active',
'properties': {
'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'ramdisk_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'},
'status': 'active'}
def fake_id_to_glance_id(context, id):
return 'cedef40a-ed67-4d10-800e-17455edce175'
self.stubs.Set(fake._FakeImageService, 'show', fake_show_stat_active)
self.stubs.Set(ec2utils, 'id_to_glance_id', fake_id_to_glance_id)
result = run_instances(self.context, **kwargs)
self.assertEqual(len(result['instancesSet']), 1)
def _restart_compute_service(self, periodic_interval_max=None):
"""restart compute service. NOTE: fake driver forgets all instances."""
self.compute.kill()
if periodic_interval_max:
self.compute = self.start_service(
'compute', periodic_interval_max=periodic_interval_max)
else:
self.compute = self.start_service('compute')
def test_stop_start_instance(self):
# Makes sure stop/start instance works.
# enforce periodic tasks run in short time to avoid wait for 60s.
self._restart_compute_service(periodic_interval_max=0.3)
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
# a running instance can't be started.
self.assertRaises(exception.InstanceInvalidState,
self.cloud.start_instances,
self.context, [instance_id])
result = self.cloud.stop_instances(self.context, [instance_id])
self.assertTrue(result)
result = self.cloud.start_instances(self.context, [instance_id])
self.assertTrue(result)
result = self.cloud.stop_instances(self.context, [instance_id])
self.assertTrue(result)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 80,
'name': 'stopped'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context, [instance_id])
self.assertEqual(result, expected)
def test_start_instances(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
result = self.cloud.stop_instances(self.context, [instance_id])
self.assertTrue(result)
result = self.cloud.start_instances(self.context, [instance_id])
self.assertTrue(result)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context, [instance_id])
self.assertEqual(result, expected)
self._restart_compute_service()
def test_start_instances_policy_failed(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
rules = {
"compute:start":
common_policy.parse_rule("project_id:non_fake"),
}
common_policy.set_rules(common_policy.Rules(rules))
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.cloud.start_instances,
self.context, [instance_id])
self.assertIn("compute:start", exc.format_message())
self._restart_compute_service()
def test_stop_instances(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
result = self.cloud.stop_instances(self.context, [instance_id])
self.assertTrue(result)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 80,
'name': 'stopped'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context, [instance_id])
self.assertEqual(result, expected)
self._restart_compute_service()
def test_stop_instances_policy_failed(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
rules = {
"compute:stop":
common_policy.parse_rule("project_id:non_fake")
}
common_policy.set_rules(common_policy.Rules(rules))
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.cloud.stop_instances,
self.context, [instance_id])
self.assertIn("compute:stop", exc.format_message())
self._restart_compute_service()
def test_terminate_instances(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
# a running instance can't be started.
self.assertRaises(exception.InstanceInvalidState,
self.cloud.start_instances,
self.context, [instance_id])
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context, [instance_id])
self.assertEqual(result, expected)
self._restart_compute_service()
def test_terminate_instances_invalid_instance_id(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
self.assertRaises(exception.InstanceNotFound,
self.cloud.terminate_instances,
self.context, ['i-2'])
self._restart_compute_service()
def test_terminate_instances_disable_terminate(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
internal_uuid = db.get_instance_uuid_by_ec2_id(self.context,
ec2utils.ec2_id_to_id(instance_id))
instance = db.instance_update(self.context, internal_uuid,
{'disable_terminate': True})
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 16,
'name': 'running'}}]}
result = self.cloud.terminate_instances(self.context, [instance_id])
self.assertEqual(result, expected)
instance = db.instance_update(self.context, internal_uuid,
{'disable_terminate': False})
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context, [instance_id])
self.assertEqual(result, expected)
self._restart_compute_service()
def test_terminate_instances_two_instances(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
inst1 = self._run_instance(**kwargs)
inst2 = self._run_instance(**kwargs)
result = self.cloud.stop_instances(self.context, [inst1])
self.assertTrue(result)
expected = {'instancesSet': [
{'instanceId': 'i-00000001',
'previousState': {'code': 80,
'name': 'stopped'},
'currentState': {'code': 48,
'name': 'terminated'}},
{'instanceId': 'i-00000002',
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context, [inst1, inst2])
self.assertEqual(result, expected)
self._restart_compute_service()
def test_reboot_instances(self):
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1, }
instance_id = self._run_instance(**kwargs)
# a running instance can't be started.
self.assertRaises(exception.InstanceInvalidState,
self.cloud.start_instances,
self.context, [instance_id])
result = self.cloud.reboot_instances(self.context, [instance_id])
self.assertTrue(result)
def _volume_create(self, volume_id=None):
kwargs = {'name': 'test-volume',
'description': 'test volume description',
'status': 'available',
'host': 'fake',
'size': 1,
'attach_status': 'detached'}
if volume_id:
kwargs['volume_id'] = volume_id
return self.volume_api.create_with_kwargs(self.context, **kwargs)
def _snapshot_create(self, snapshot_id=None):
kwargs = {'volume_id': 'ccec42a2-c220-4806-b762-6b12fbb592e4',
'status': "available",
'volume_size': 1}
if snapshot_id:
kwargs['snap_id'] = snapshot_id
return self.volume_api.create_snapshot_with_kwargs(self.context,
**kwargs)
def _create_snapshot(self, ec2_volume_id):
result = self.cloud.create_snapshot(self.context,
volume_id=ec2_volume_id)
return result['snapshotId']
def _do_test_create_image(self, no_reboot):
"""Make sure that CreateImage works."""
# enforce periodic tasks run in short time to avoid wait for 60s.
self._restart_compute_service(periodic_interval_max=0.3)
(volumes, snapshots) = self._setUpImageSet(
create_volumes_and_snapshots=True)
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1}
ec2_instance_id = self._run_instance(**kwargs)
def fake_show(meh, context, id):
bdm = [dict(snapshot_id=snapshots[0],
volume_size=1,
device_name='sda1',
delete_on_termination=False)]
props = dict(kernel_id='cedef40a-ed67-4d10-800e-17455edce175',
ramdisk_id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
root_device_name='/dev/sda1',
block_device_mapping=bdm)
return dict(id=id,
properties=props,
container_format='ami',
status='active',
is_public=True)
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
def fake_block_device_mapping_get_all_by_instance(context, inst_id,
use_slave=False):
return [fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': volumes[0],
'snapshot_id': snapshots[0],
'source_type': 'snapshot',
'destination_type': 'volume',
'volume_size': 1,
'device_name': 'sda1',
'boot_index': 0,
'delete_on_termination': False,
'connection_info': '{"foo":"bar"}',
'no_device': None})]
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
fake_block_device_mapping_get_all_by_instance)
virt_driver = {}
def fake_power_on(self, context, instance, network_info,
block_device_info):
virt_driver['powered_on'] = True
self.stubs.Set(fake_virt.FakeDriver, 'power_on', fake_power_on)
def fake_power_off(self, instance):
virt_driver['powered_off'] = True
self.stubs.Set(fake_virt.FakeDriver, 'power_off', fake_power_off)
result = self.cloud.create_image(self.context, ec2_instance_id,
no_reboot=no_reboot)
ec2_ids = [result['imageId']]
created_image = self.cloud.describe_images(self.context,
ec2_ids)['imagesSet'][0]
self.assertIn('blockDeviceMapping', created_image)
bdm = created_image['blockDeviceMapping'][0]
self.assertEqual(bdm.get('deviceName'), 'sda1')
self.assertIn('ebs', bdm)
self.assertEqual(bdm['ebs'].get('snapshotId'),
ec2utils.id_to_ec2_snap_id(snapshots[0]))
self.assertEqual(created_image.get('kernelId'), 'aki-00000001')
self.assertEqual(created_image.get('ramdiskId'), 'ari-00000002')
self.assertEqual(created_image.get('rootDeviceType'), 'ebs')
self.assertNotEqual(virt_driver.get('powered_on'), no_reboot)
self.assertNotEqual(virt_driver.get('powered_off'), no_reboot)
self.cloud.terminate_instances(self.context, [ec2_instance_id])
self._restart_compute_service()
def test_create_image_no_reboot(self):
# Make sure that CreateImage works.
self._do_test_create_image(True)
def test_create_image_with_reboot(self):
# Make sure that CreateImage works.
self._do_test_create_image(False)
def test_create_image_instance_store(self):
"""Ensure CreateImage fails as expected for an instance-store-backed
instance
"""
# enforce periodic tasks run in short time to avoid wait for 60s.
self._restart_compute_service(periodic_interval_max=0.3)
(volumes, snapshots) = self._setUpImageSet(
create_volumes_and_snapshots=True)
kwargs = {'image_id': 'ami-1',
'instance_type': CONF.default_flavor,
'max_count': 1}
ec2_instance_id = self._run_instance(**kwargs)
def fake_block_device_mapping_get_all_by_instance(context, inst_id,
use_slave=False):
return [fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': volumes[0],
'snapshot_id': snapshots[0],
'source_type': 'snapshot',
'destination_type': 'volume',
'volume_size': 1,
'device_name': 'vda',
'delete_on_termination': False,
'no_device': None})]
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
fake_block_device_mapping_get_all_by_instance)
self.assertRaises(exception.InvalidParameterValue,
self.cloud.create_image,
self.context,
ec2_instance_id,
no_reboot=True)
@staticmethod
def _fake_bdm_get(ctxt, id, use_slave=False):
blockdms = [{'volume_id': 87654321,
'source_type': 'volume',
'destination_type': 'volume',
'snapshot_id': None,
'no_device': None,
'delete_on_termination': True,
'device_name': '/dev/sdh'},
{'volume_id': None,
'snapshot_id': 98765432,
'source_type': 'snapshot',
'destination_type': 'volume',
'no_device': None,
'delete_on_termination': True,
'device_name': '/dev/sdi'},
{'volume_id': None,
'snapshot_id': None,
'no_device': True,
'source_type': 'blank',
'destination_type': None,
'delete_on_termination': None,
'device_name': None},
{'volume_id': None,
'snapshot_id': None,
'no_device': None,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': None,
'delete_on_termination': None,
'device_name': '/dev/sdb'},
{'volume_id': None,
'snapshot_id': None,
'no_device': None,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': 'swap',
'delete_on_termination': None,
'device_name': '/dev/sdc'},
{'volume_id': None,
'snapshot_id': None,
'no_device': None,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': None,
'delete_on_termination': None,
'device_name': '/dev/sdd'},
{'volume_id': None,
'snapshot_id': None,
'no_device': None,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': None,
'delete_on_termination': None,
'device_name': '/dev/sd3'},
]
extra = {
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': 0,
'id': 0,
'device_type': None,
'disk_bus': None,
'instance_uuid': '',
'image_id': None,
'volume_size': None,
'connection_info': None,
'boot_index': None,
'guest_format': None,
}
for bdm in blockdms:
bdm.update(extra)
return blockdms
def test_describe_instance_attribute(self):
# Make sure that describe_instance_attribute works.
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
self._fake_bdm_get)
def fake_get(ctxt, instance_id, want_objects=False):
self.assertTrue(want_objects)
inst_type = flavors.get_default_flavor()
inst_type['name'] = 'fake_type'
sys_meta = flavors.save_flavor_info({}, inst_type)
secgroups = security_group_obj.SecurityGroupList()
secgroups.objects.append(
security_group_obj.SecurityGroup(name='fake0'))
secgroups.objects.append(
security_group_obj.SecurityGroup(name='fake1'))
instance = instance_obj.Instance()
instance.id = 0
instance.uuid = 'e5fe5518-0288-4fa3-b0c4-c79764101b85'
instance.root_device_name = '/dev/sdh'
instance.security_groups = secgroups
instance.vm_state = vm_states.STOPPED
instance.kernel_id = 'cedef40a-ed67-4d10-800e-17455edce175'
instance.ramdisk_id = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
instance.user_data = 'fake-user data'
instance.shutdown_terminate = False
instance.disable_terminate = False
instance.system_metadata = sys_meta
return instance
self.stubs.Set(self.cloud.compute_api, 'get', fake_get)
def fake_get_instance_uuid_by_ec2_id(ctxt, int_id):
if int_id == 305419896:
return 'e5fe5518-0288-4fa3-b0c4-c79764101b85'
raise exception.InstanceNotFound(instance_id=int_id)
self.stubs.Set(db, 'get_instance_uuid_by_ec2_id',
fake_get_instance_uuid_by_ec2_id)
get_attribute = functools.partial(
self.cloud.describe_instance_attribute,
self.context, 'i-12345678')
bdm = get_attribute('blockDeviceMapping')
bdm['blockDeviceMapping'].sort()
expected_bdm = {'instance_id': 'i-12345678',
'rootDeviceType': 'ebs',
'blockDeviceMapping': [
{'deviceName': '/dev/sdh',
'ebs': {'status': 'attached',
'deleteOnTermination': True,
'volumeId': 'vol-05397fb1',
'attachTime': '13:56:24'}}]}
expected_bdm['blockDeviceMapping'].sort()
self.assertEqual(bdm, expected_bdm)
groupSet = get_attribute('groupSet')
groupSet['groupSet'].sort()
expected_groupSet = {'instance_id': 'i-12345678',
'groupSet': [{'groupId': 'fake0'},
{'groupId': 'fake1'}]}
expected_groupSet['groupSet'].sort()
self.assertEqual(groupSet, expected_groupSet)
self.assertEqual(get_attribute('instanceInitiatedShutdownBehavior'),
{'instance_id': 'i-12345678',
'instanceInitiatedShutdownBehavior': 'stop'})
self.assertEqual(get_attribute('disableApiTermination'),
{'instance_id': 'i-12345678',
'disableApiTermination': False})
self.assertEqual(get_attribute('instanceType'),
{'instance_id': 'i-12345678',
'instanceType': 'fake_type'})
self.assertEqual(get_attribute('kernel'),
{'instance_id': 'i-12345678',
'kernel': 'aki-00000001'})
self.assertEqual(get_attribute('ramdisk'),
{'instance_id': 'i-12345678',
'ramdisk': 'ari-00000002'})
self.assertEqual(get_attribute('rootDeviceName'),
{'instance_id': 'i-12345678',
'rootDeviceName': '/dev/sdh'})
# NOTE(yamahata): this isn't supported
# get_attribute('sourceDestCheck')
self.assertEqual(get_attribute('userData'),
{'instance_id': 'i-12345678',
'userData': '}\xa9\x1e\xba\xc7\xabu\xabZ'})
def test_instance_initiated_shutdown_behavior(self):
def test_dia_iisb(expected_result, **kwargs):
"""test describe_instance_attribute
attribute instance_initiated_shutdown_behavior
"""
kwargs.update({'instance_type': CONF.default_flavor,
'max_count': 1})
instance_id = self._run_instance(**kwargs)
result = self.cloud.describe_instance_attribute(self.context,
instance_id, 'instanceInitiatedShutdownBehavior')
self.assertEqual(result['instanceInitiatedShutdownBehavior'],
expected_result)
expected = {'instancesSet': [
{'instanceId': instance_id,
'previousState': {'code': 16,
'name': 'running'},
'currentState': {'code': 48,
'name': 'terminated'}}]}
result = self.cloud.terminate_instances(self.context,
[instance_id])
self.assertEqual(result, expected)
self._restart_compute_service()
test_dia_iisb('stop', image_id='ami-1')
block_device_mapping = [{'device_name': '/dev/vdb',
'virtual_name': 'ephemeral0'}]
test_dia_iisb('stop', image_id='ami-2',
block_device_mapping=block_device_mapping)
def fake_show(self, context, id_):
LOG.debug("id_ %s", id_)
prop = {}
if id_ == 'ami-3':
pass
elif id_ == 'ami-4':
prop = {'mappings': [{'device': 'sdb0',
'virtual': 'ephemeral0'}]}
elif id_ == 'ami-5':
prop = {'block_device_mapping':
[{'device_name': '/dev/sdb0',
'virtual_name': 'ephemeral0'}]}
elif id_ == 'ami-6':
prop = {'mappings': [{'device': 'sdb0',
'virtual': 'ephemeral0'}],
'block_device_mapping':
[{'device_name': '/dev/sdb0',
'virtual_name': 'ephemeral0'}]}
prop_base = {'kernel_id': 'cedef40a-ed67-4d10-800e-17455edce175',
'type': 'machine'}
prop_base.update(prop)
return {
'id': id_,
'name': 'fake_name',
'properties': prop_base,
'container_format': 'ami',
'status': 'active'}
# NOTE(yamahata): create ami-3 ... ami-6
# ami-1 and ami-2 is already created by setUp()
for i in range(3, 7):
db.s3_image_create(self.context, 'ami-%d' % i)
self.stubs.Set(fake._FakeImageService, 'show', fake_show)
test_dia_iisb('stop', image_id='ami-3')
test_dia_iisb('stop', image_id='ami-4')
test_dia_iisb('stop', image_id='ami-5')
test_dia_iisb('stop', image_id='ami-6')
def test_create_delete_tags(self):
# We need to stub network calls
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
# We need to stub out the MQ call - it won't succeed. We do want
# to check that the method is called, though
meta_changes = [None]
def fake_change_instance_metadata(inst, ctxt, diff, instance=None,
instance_uuid=None):
meta_changes[0] = diff
self.stubs.Set(compute_rpcapi.ComputeAPI, 'change_instance_metadata',
fake_change_instance_metadata)
# Create a test image
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
inst1_kwargs = {
'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'vm_state': 'active',
'launched_at': timeutils.utcnow(),
'hostname': 'server-1111',
'created_at': datetime.datetime(2012, 5, 1, 1, 1, 1)
}
inst1 = db.instance_create(self.context, inst1_kwargs)
ec2_id = ec2utils.id_to_ec2_inst_id(inst1['uuid'])
# Create some tags
md = {'key': 'foo', 'value': 'bar'}
md_result = {'foo': 'bar'}
self.cloud.create_tags(self.context, resource_id=[ec2_id],
tag=[md])
metadata = self.cloud.compute_api.get_instance_metadata(self.context,
inst1)
self.assertEqual(metadata, md_result)
self.assertEqual(meta_changes, [{'foo': ['+', 'bar']}])
# Delete them
self.cloud.delete_tags(self.context, resource_id=[ec2_id],
tag=[{'key': 'foo', 'value': 'bar'}])
metadata = self.cloud.compute_api.get_instance_metadata(self.context,
inst1)
self.assertEqual(metadata, {})
self.assertEqual(meta_changes, [{'foo': ['-']}])
def test_describe_tags(self):
# We need to stub network calls
self._stub_instance_get_with_fixed_ips('get_all')
self._stub_instance_get_with_fixed_ips('get')
# We need to stub out the MQ call - it won't succeed. We do want
# to check that the method is called, though
meta_changes = [None]
def fake_change_instance_metadata(inst, ctxt, diff, instance=None,
instance_uuid=None):
meta_changes[0] = diff
self.stubs.Set(compute_rpcapi.ComputeAPI, 'change_instance_metadata',
fake_change_instance_metadata)
# Create some test images
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
inst1_kwargs = {
'reservation_id': 'a',
'image_ref': image_uuid,
'instance_type_id': 1,
'vm_state': 'active',
'launched_at': timeutils.utcnow(),
'hostname': 'server-1111',
'created_at': datetime.datetime(2012, 5, 1, 1, 1, 1)
}
inst2_kwargs = {
'reservation_id': 'b',
'image_ref': image_uuid,
'instance_type_id': 1,
'vm_state': 'active',
'launched_at': timeutils.utcnow(),
'hostname': 'server-1112',
'created_at': datetime.datetime(2012, 5, 1, 1, 1, 2)
}
inst1 = db.instance_create(self.context, inst1_kwargs)
ec2_id1 = ec2utils.id_to_ec2_inst_id(inst1['uuid'])
inst2 = db.instance_create(self.context, inst2_kwargs)
ec2_id2 = ec2utils.id_to_ec2_inst_id(inst2['uuid'])
# Create some tags
# We get one overlapping pair, and each has a different key value pair
# inst1 : {'foo': 'bar', 'bax': 'wibble'}
# inst1 : {'foo': 'bar', 'baz': 'quux'}
md = {'key': 'foo', 'value': 'bar'}
md_result = {'foo': 'bar'}
self.cloud.create_tags(self.context, resource_id=[ec2_id1, ec2_id2],
tag=[md])
self.assertEqual(meta_changes, [{'foo': ['+', 'bar']}])
metadata = self.cloud.compute_api.get_instance_metadata(self.context,
inst1)
self.assertEqual(metadata, md_result)
metadata = self.cloud.compute_api.get_instance_metadata(self.context,
inst2)
self.assertEqual(metadata, md_result)
md2 = {'key': 'baz', 'value': 'quux'}
md2_result = {'baz': 'quux'}
md2_result.update(md_result)
self.cloud.create_tags(self.context, resource_id=[ec2_id2],
tag=[md2])
self.assertEqual(meta_changes, [{'baz': ['+', 'quux']}])
metadata = self.cloud.compute_api.get_instance_metadata(self.context,
inst2)
self.assertEqual(metadata, md2_result)
md3 = {'key': 'bax', 'value': 'wibble'}
md3_result = {'bax': 'wibble'}
md3_result.update(md_result)
self.cloud.create_tags(self.context, resource_id=[ec2_id1],
tag=[md3])
self.assertEqual(meta_changes, [{'bax': ['+', 'wibble']}])
metadata = self.cloud.compute_api.get_instance_metadata(self.context,
inst1)
self.assertEqual(metadata, md3_result)
inst1_key_foo = {'key': u'foo', 'resource_id': 'i-00000001',
'resource_type': 'instance', 'value': u'bar'}
inst1_key_bax = {'key': u'bax', 'resource_id': 'i-00000001',
'resource_type': 'instance', 'value': u'wibble'}
inst2_key_foo = {'key': u'foo', 'resource_id': 'i-00000002',
'resource_type': 'instance', 'value': u'bar'}
inst2_key_baz = {'key': u'baz', 'resource_id': 'i-00000002',
'resource_type': 'instance', 'value': u'quux'}
# We should be able to search by:
# No filter
tags = self.cloud.describe_tags(self.context)['tagSet']
self.assertEqualSorted(tags, [inst1_key_foo, inst2_key_foo,
inst2_key_baz, inst1_key_bax])
# Resource ID
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'resource-id',
'value': [ec2_id1]}])['tagSet']
self.assertEqualSorted(tags, [inst1_key_foo, inst1_key_bax])
# Resource Type
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'resource-type',
'value': ['instance']}])['tagSet']
self.assertEqualSorted(tags, [inst1_key_foo, inst2_key_foo,
inst2_key_baz, inst1_key_bax])
# Key, either bare or with wildcards
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['foo']}])['tagSet']
self.assertEqualSorted(tags, [inst1_key_foo, inst2_key_foo])
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['baz']}])['tagSet']
self.assertEqualSorted(tags, [inst2_key_baz])
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['ba?']}])['tagSet']
self.assertEqualSorted(tags, [inst1_key_bax, inst2_key_baz])
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['b*']}])['tagSet']
self.assertEqualSorted(tags, [inst1_key_bax, inst2_key_baz])
# Value, either bare or with wildcards
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'value',
'value': ['bar']}])['tagSet']
self.assertEqualSorted(tags, [inst1_key_foo, inst2_key_foo])
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'value',
'value': ['wi*']}])['tagSet']
self.assertEqual(tags, [inst1_key_bax])
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'value',
'value': ['quu?']}])['tagSet']
self.assertEqual(tags, [inst2_key_baz])
# Multiple values
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['baz', 'bax']}])['tagSet']
self.assertEqualSorted(tags, [inst2_key_baz, inst1_key_bax])
# Multiple filters (AND): no match
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['baz']},
{'name': 'value',
'value': ['wibble']}])['tagSet']
self.assertEqual(tags, [])
# Multiple filters (AND): match
tags = self.cloud.describe_tags(self.context,
filter=[{'name': 'key',
'value': ['baz']},
{'name': 'value',
'value': ['quux']}])['tagSet']
self.assertEqualSorted(tags, [inst2_key_baz])
# And we should fail on supported resource types
self.assertRaises(exception.InvalidParameterValue,
self.cloud.describe_tags,
self.context,
filter=[{'name': 'resource-type',
'value': ['instance', 'volume']}])
def test_resource_type_from_id(self):
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'i-12345'),
'instance')
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'r-12345'),
'reservation')
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'vol-12345'),
'volume')
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'snap-12345'),
'snapshot')
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'ami-12345'),
'image')
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'ari-12345'),
'image')
self.assertEqual(
ec2utils.resource_type_from_id(self.context, 'aki-12345'),
'image')
self.assertIsNone(
ec2utils.resource_type_from_id(self.context, 'x-12345'))
@mock.patch.object(ec2utils, 'ec2_vol_id_to_uuid',
side_effect=lambda
ec2_volume_id: uuidutils.generate_uuid())
def test_detach_volume_unattched_error(self, mock_ec2_vol_id_to_uuid):
# Validates that VolumeUnattached is raised if the volume doesn't
# have an instance_uuid value.
ec2_volume_id = 'vol-987654321'
with mock.patch.object(self.cloud.volume_api, 'get',
side_effect=lambda context, volume_id:
{'id': volume_id}) as mock_get:
self.assertRaises(exception.VolumeUnattached,
self.cloud.detach_volume,
self.context,
ec2_volume_id)
mock_get.assert_called_once_with(self.context, mock.ANY)
mock_ec2_vol_id_to_uuid.assert_called_once_with(ec2_volume_id)
class CloudTestCaseNeutronProxy(test.TestCase):
def setUp(self):
super(CloudTestCaseNeutronProxy, self).setUp()
cfg.CONF.set_override('security_group_api', 'neutron')
self.cloud = cloud.CloudController()
self.original_client = neutronv2.get_client
neutronv2.get_client = test_neutron.get_client
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id,
self.project_id,
is_admin=True)
def tearDown(self):
neutronv2.get_client = self.original_client
test_neutron.get_client()._reset()
super(CloudTestCaseNeutronProxy, self).tearDown()
def test_describe_security_groups(self):
# Makes sure describe_security_groups works and filters results.
group_name = 'test'
description = 'test'
self.cloud.create_security_group(self.context, group_name,
description)
result = self.cloud.describe_security_groups(self.context)
# NOTE(vish): should have the default group as well
self.assertEqual(len(result['securityGroupInfo']), 2)
result = self.cloud.describe_security_groups(self.context,
group_name=[group_name])
self.assertEqual(len(result['securityGroupInfo']), 1)
self.assertEqual(result['securityGroupInfo'][0]['groupName'],
group_name)
self.cloud.delete_security_group(self.context, group_name)
def test_describe_security_groups_by_id(self):
group_name = 'test'
description = 'test'
self.cloud.create_security_group(self.context, group_name,
description)
neutron = test_neutron.get_client()
# Get id from neutron since cloud.create_security_group
# does not expose it.
search_opts = {'name': group_name}
groups = neutron.list_security_groups(
**search_opts)['security_groups']
result = self.cloud.describe_security_groups(self.context,
group_id=[groups[0]['id']])
self.assertEqual(len(result['securityGroupInfo']), 1)
self.assertEqual(
result['securityGroupInfo'][0]['groupName'],
group_name)
self.cloud.delete_security_group(self.context, group_name)
def test_create_delete_security_group(self):
descript = 'test description'
create = self.cloud.create_security_group
result = create(self.context, 'testgrp', descript)
group_descript = result['securityGroupSet'][0]['groupDescription']
self.assertEqual(descript, group_descript)
delete = self.cloud.delete_security_group
self.assertTrue(delete(self.context, 'testgrp'))
|
{
"content_hash": "93cd05e4505b1b0e96fe81c0871cfcce",
"timestamp": "",
"source": "github",
"line_count": 3138,
"max_line_length": 79,
"avg_line_length": 44.80752071383046,
"alnum_prop": 0.5221256560886449,
"repo_name": "CiscoSystems/nova",
"id": "f94d9b351ec1510e9f78bd8403181cdc1bd913de",
"size": "141431",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/tests/api/ec2/test_cloud.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "13926229"
},
{
"name": "Shell",
"bytes": "17451"
}
],
"symlink_target": ""
}
|
from django.shortcuts import render,redirect
from django.http import HttpResponse
from PIL import Image
from StringIO import StringIO
import ImageFilter
import random, math
'''
def is_point_inside(p, im):
return p[0] >= 0 and p[1] >= 0 and p[0] < im.size[0] and p[1] < im.size[1]
def getgray(rgb):
return (rgb[0] + rgb[1] + rgb[2]) / 3.0
def explode(im, origin, particles = 100):
impacts = []
im_edges = im.filter(ImageFilter.FIND_EDGES)
im_edges.show()
#im_edges = im_edges.filter(ImageFilter.MedianFilter())
#im_edges.show()
im.putpixel(origin, (255, 0, 0)) # debug
for i in range(particles):
p = origin
angle = random.uniform(0, 2*math.pi)
direction = (math.cos(angle), math.sin(angle))
length = 0.0
trace = []
max_diff = 0.0
# Keep within the bounds.
while is_point_inside(p, im):
last_value = getgray(im_edges.getpixel(p))
# Next ..
length += 1.0
offset = (direction[0]*length, direction[1]*length)
p = (int(round(origin[0] + offset[0])), int(round(origin[1] + offset[1])))
if is_point_inside(p, im):
grayness = getgray(im_edges.getpixel(p))
diff = math.fabs(last_value - grayness)
if diff > max_diff:
max_diff = diff
trace.append( (p, length, grayness, diff) )
else:
break
# Now the max_diff is known.
for (p, length, grayness, diff) in trace:
diff = diff / max_diff # Normalize
im.putpixel(p, (0, 255, 0)) # debug
if diff > 0.08:
break
#print length, diff
return impacts
'''
def get_board():
filename = "temp6.jpg"
return Image.open(filename)
def original_board(request):
im = get_board()
# Save and return
out = StringIO()
im.save(out, 'PNG')
return HttpResponse(out.getvalue(), content_type="image/png")
def default_board(request):
default_bounds = (957, 880, 904, 1609, 1668, 1641, 1669, 903)
return redirect('board', size=500, bounds=",".join(map(str, default_bounds)) )
# Create your views here.
def board(request, size, bounds):
im = get_board()
bounds = map(int, bounds.split(","))
size = int(size)
if len(bounds) == 8:
whiteboard = im.transform( (size, size), Image.QUAD, bounds, Image.BICUBIC)
# Save and return
out = StringIO()
whiteboard.save(out, 'PNG')
return HttpResponse(out.getvalue(), content_type="image/png")
|
{
"content_hash": "efa7426974f99a7715d7635138565e8e",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 79,
"avg_line_length": 28.556962025316455,
"alnum_prop": 0.6591312056737588,
"repo_name": "Socialsquare/WhiteboardCamera",
"id": "22f9bd27dcf9cd329e25676cb0cc5af30d233e49",
"size": "2256",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "BoardExtractor/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5561"
}
],
"symlink_target": ""
}
|
class RecursionError( OverflowError, ValueError ):
'''Unable to calculate result because of recursive structure'''
def sort(nodes, routes, noRecursion=1):
'''Passed a list of node IDs and a list of source,dest ID routes
attempt to create a list of stages where each sub list
is one stage in a process.
'''
children, parents = _buildChildrenLists(routes)
# first stage is those nodes
# having no incoming routes...
stage = []
stages = [stage]
taken = []
for node in nodes:
if (not parents.get(node)):
stage.append (node)
if nodes and not stage:
# there is no element which does not depend on
# some other element!!!
stage.append( nodes[0])
taken.extend( stage )
nodes = list(filter(lambda x, l=stage: x not in l, nodes))
while nodes:
previousStageChildren = []
nodelen = len(nodes)
# second stage are those nodes
# which are direct children of the first stage
for node in stage:
for child in children.get (node, []):
if child not in previousStageChildren and child not in taken:
previousStageChildren.append(child)
elif child in taken and noRecursion:
raise RecursionError( (child, node) )
# unless they are children of other direct children...
# TODO, actually do that...
stage = previousStageChildren
removes = []
for current in stage:
currentParents = parents.get( current, [] )
for parent in currentParents:
if parent in stage and parent != current:
# might wind up removing current...
if not current in parents.get(parent, []):
# is not mutually dependent...
removes.append( current )
for remove in removes:
while remove in stage:
stage.remove( remove )
stages.append( stage)
taken.extend( stage )
nodes = list(filter(lambda x, l=stage: x not in l, nodes))
if nodelen == len(nodes):
if noRecursion:
raise RecursionError( nodes )
else:
stages.append( nodes[:] )
nodes = []
return stages
def _buildChildrenLists (routes):
childrenTable = {}
parentTable = {}
for sourceID,destinationID in routes:
currentChildren = childrenTable.get( sourceID, [])
currentParents = parentTable.get( destinationID, [])
if not destinationID in currentChildren:
currentChildren.append ( destinationID)
if not sourceID in currentParents:
currentParents.append ( sourceID)
childrenTable[sourceID] = currentChildren
parentTable[destinationID] = currentParents
return childrenTable, parentTable
def toposort (nodes, routes, noRecursion=1):
'''Topological sort from Tim Peters, fairly efficient
in comparison (it seems).'''
#first calculate the recursion depth
dependencies = {}
inversedependencies = {}
if not nodes:
return []
if not routes:
return [nodes]
for node in nodes:
dependencies[ node ] = (0, node)
inversedependencies[ node ] = []
for depended, depends in routes:
# is it a null rule
try:
newdependencylevel, object = dependencies.get ( depends, (0, depends))
except TypeError:
print(depends)
raise
dependencies[ depends ] = (newdependencylevel + 1, depends)
# "dependency (existence) of depended-on"
newdependencylevel,object = dependencies.get ( depended, (0, depended) )
dependencies[ depended ] = (newdependencylevel, depended)
# Inverse dependency set up
dependencieslist = inversedependencies.get ( depended, [])
dependencieslist.append (depends)
inversedependencies[depended] = dependencieslist
### Now we do the actual sorting
# The first task is to create the sortable
# list of dependency-levels
sortinglist = dependencies.values()
sortinglist.sort ()
output = []
while sortinglist:
deletelist = []
generation = []
output.append( generation)
while sortinglist and sortinglist[0][0] == 0:
number, object = sortinglist[0]
generation.append ( object )
deletelist.append( object )
for inverse in inversedependencies.get(object, () ):
try:
oldcount, inverse = dependencies [ inverse]
if oldcount > 0:
# will be dealt with on later pass
dependencies [ inverse] = (oldcount-1, inverse)
else:
# will be dealt with on this pass,
# so needs not to be in the sorting list next time
deletelist.append( inverse )
# just in case a loop comes through
inversedependencies[object] = []
except KeyError:
# dealing with a recursion-breaking run...
pass
del sortinglist [0]
# if no elements could be deleted, then
# there is something which depends upon itself
if not deletelist:
if noRecursion:
raise RecursionError( sortinglist )
else:
# hack so that something gets deleted...
## import pdb
## pdb.set_trace()
dependencies[sortinglist[0][1]] = (0,sortinglist[0][1])
# delete the items that were dealt with
for item in deletelist:
try:
del dependencies [ item ]
except KeyError:
pass
# need to recreate the sortinglist
sortinglist = list(dependencies.values())
if not generation:
output.remove( generation )
sortinglist.sort ()
return output
if __name__ == "__main__":
nodes = ['a', 'b', 'c', 'd', 'e', 'f']
route = [('a', 'b'), ('b', 'c'), ('b', 'd'), ('e','f')]
for x in toposort( nodes, route):
for a in x:
print(a)
raise SystemExit
import pprint, traceback
nodes= [ 0,1,2,3,4,5 ]
testingValues = [
[ (0,1),(1,2),(2,3),(3,4),(4,5)],
[ (0,1),(0,2),(1,2),(3,4),(4,5)],
[
(0,1),
(0,2),
(0,2),
(2,4),
(2,5),
(3,2),
(0,3)],
[
(0,1), # 3-element cycle test, no orphan nodes
(1,2),
(2,0),
(2,4),
(2,5),
(3,2),
(0,3)],
[
(0,1),
(1,1),
(1,1),
(1,4),
(1,5),
(1,2),
(3,1),
(2,1),
(2,0)],
[
(0,1),
(1,0),
(0,2),
(0,3),
],
[
(0,1),
(1,0),
(0,2),
(3,1),
],
]
print('sort, no recursion allowed')
for index in range(len(testingValues)):
## print ' %s -- %s'%( index, testingValues[index])
try:
print(' %s' % sort( nodes, testingValues[index] ))
except:
print('exception raised')
print('toposort, no recursion allowed')
for index in range(len(testingValues)):
## print ' %s -- %s'%( index, testingValues[index])
try:
print(' %s' % toposort( nodes, testingValues[index] ))
except:
print('exception raised')
print('sort, recursion allowed')
for index in range(len(testingValues)):
## print ' %s -- %s'%( index, testingValues[index])
try:
print(' %s' % sort( nodes, testingValues[index],0 ))
except:
print('exception raised')
print('toposort, recursion allowed')
for index in range(len(testingValues)):
## print ' %s -- %s'%( index, testingValues[index])
try:
print(' %s' % toposort( nodes, testingValues[index],0 ))
except:
print('exception raised')
|
{
"content_hash": "13779d4cf654d096cce48d2bd8d5507e",
"timestamp": "",
"source": "github",
"line_count": 252,
"max_line_length": 82,
"avg_line_length": 33.21825396825397,
"alnum_prop": 0.5203679369250985,
"repo_name": "flavour/eden",
"id": "7ce233717ce16f5db6be9106e6dabba67e602c9e",
"size": "8622",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "static/scripts/tools/toposortmf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "727"
},
{
"name": "CSS",
"bytes": "3351335"
},
{
"name": "HTML",
"bytes": "1367727"
},
{
"name": "JavaScript",
"bytes": "20109418"
},
{
"name": "NSIS",
"bytes": "3934"
},
{
"name": "PHP",
"bytes": "15220"
},
{
"name": "Python",
"bytes": "31407527"
},
{
"name": "Ruby",
"bytes": "8291"
},
{
"name": "Shell",
"bytes": "5059"
},
{
"name": "XSLT",
"bytes": "3274119"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.contrib.auth import get_user_model
from django.db import models
from django.db.models.fields import FieldDoesNotExist
from django.utils.html import conditional_escape
from django.utils.safestring import mark_safe
from .app_settings import (
USER_EMAIL_FIELD, USER_FIRST_NAME_FIELD, USER_LAST_NAME_FIELD
)
# Not available in django 1.4 yet
def format_html(format_string, *args, **kwargs):
"""
Similar to str.format, but passes all arguments through conditional_escape,
and calls 'mark_safe' on the result. This function should be used instead
of str.format or % interpolation to build up small HTML fragments.
"""
args_safe = map(conditional_escape, args)
kwargs_safe = dict([(k, conditional_escape(v)) for (k, v) in
kwargs.iteritems()])
return mark_safe(format_string.format(*args_safe, **kwargs_safe))
def get_object_or_none(klass, *args, **kwargs):
"""
Modelled after get_object_or_404
"""
if isinstance(klass, models.query.QuerySet):
queryset = klass
elif isinstance(klass, models.manager.Manager):
queryset = klass.all()
else:
queryset = klass._default_manager.all()
try:
return queryset.get(*args, **kwargs)
except (queryset.model.DoesNotExist, ValueError):
return None
def get_user_search_fields():
''' Get names of searchable fields on user model
Due to custom user models the search_fields values for fields on the
user model can't be set statically. This model returns the available
fields on the model which can be used to dynamically generate
search_fields values
'''
# get field names for user fields
fields = [USER_EMAIL_FIELD, USER_FIRST_NAME_FIELD, USER_LAST_NAME_FIELD]
# remove fields that don't exist on the model
user_model = get_user_model()
for field_name in fields[:]:
try:
user_model._meta.get_field_by_name(field_name)
except FieldDoesNotExist:
fields.remove(field_name)
return fields
|
{
"content_hash": "c6fc3a75ad4e7f335df713ef0359ddff",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 79,
"avg_line_length": 32.98461538461538,
"alnum_prop": 0.6884328358208955,
"repo_name": "feinheit/zipfelchappe",
"id": "a54ef3ffde41757f2c572375451a5278afe1eb38",
"size": "2144",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "zipfelchappe/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "13748"
},
{
"name": "HTML",
"bytes": "54873"
},
{
"name": "JavaScript",
"bytes": "12651"
},
{
"name": "Python",
"bytes": "183855"
},
{
"name": "Shell",
"bytes": "150"
}
],
"symlink_target": ""
}
|
from django.views.generic import DetailView, ListView
from core.mixins import ConveneTimeMixin
from explainers.models import Explainer
class ExplainerListDetail(ConveneTimeMixin, ListView):
queryset = Explainer.objects.all().published()
template_name = 'pages/explainer-landing.html'
class ExplainerDetail(ConveneTimeMixin, DetailView):
model = Explainer
template_name = 'pages/explainer.html'
|
{
"content_hash": "8544950c82a90fc8671e1bd38904c59e",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 54,
"avg_line_length": 27.733333333333334,
"alnum_prop": 0.7884615384615384,
"repo_name": "texastribune/txlege84",
"id": "f875a1032717bccc89a931342b24bbea137e81f5",
"size": "416",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "txlege84/explainers/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "75832"
},
{
"name": "HTML",
"bytes": "437694"
},
{
"name": "JavaScript",
"bytes": "107508"
},
{
"name": "Makefile",
"bytes": "3270"
},
{
"name": "PHP",
"bytes": "2199"
},
{
"name": "Python",
"bytes": "124863"
}
],
"symlink_target": ""
}
|
from django.db.models import Q
from .models import UserProfile
class TestUserBackend(object):
"""Authentication backend to easily log in a user while testing."""
def authenticate(self, request=None, username=None, email=None,
password=None):
# This needs to explicitly throw when there is a password since django
# will skip this backend if a user passes a password.
# http://bit.ly/2duYr93
if password is not None:
raise TypeError('password is not allowed')
try:
return UserProfile.objects.get(
Q(email=email) | Q(username=username))
except UserProfile.DoesNotExist:
return None
def get_user(self, user_id):
try:
return UserProfile.objects.get(pk=user_id)
except UserProfile.DoesNotExist:
return None
class NoAuthForYou(object):
"""An authentication backend for read-only mode."""
supports_anonymous_user = False
supports_inactive_user = False
supports_object_permissions = False
def authenticate(self, *args, **kw):
return None
def get_user(self, *args, **kw):
return None
|
{
"content_hash": "c73f5b99c94740a731c0f615f8e6fcad",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 78,
"avg_line_length": 30.692307692307693,
"alnum_prop": 0.6349206349206349,
"repo_name": "psiinon/addons-server",
"id": "8c74798a9d1f442c38c089c16500b21c4343de67",
"size": "1197",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/olympia/users/backends.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "752741"
},
{
"name": "Dockerfile",
"bytes": "4089"
},
{
"name": "HTML",
"bytes": "314894"
},
{
"name": "JavaScript",
"bytes": "947557"
},
{
"name": "Makefile",
"bytes": "564"
},
{
"name": "Python",
"bytes": "5192809"
},
{
"name": "Shell",
"bytes": "6712"
},
{
"name": "Smarty",
"bytes": "1418"
},
{
"name": "TSQL",
"bytes": "6926"
}
],
"symlink_target": ""
}
|
from juriscraper.opinions.united_states.state import tex
class Site(tex.Site):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.court_id = self.__module__
self.court_name = "capp_5"
|
{
"content_hash": "8e5dad56299ecc90f8c16b83ba86e101",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 56,
"avg_line_length": 29.875,
"alnum_prop": 0.602510460251046,
"repo_name": "freelawproject/juriscraper",
"id": "c1a9d27619eebca920c2912dadc41dd7f2e95ffc",
"size": "384",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "juriscraper/opinions/united_states/state/texapp_5.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "63242956"
},
{
"name": "Jinja",
"bytes": "2201"
},
{
"name": "Makefile",
"bytes": "75"
},
{
"name": "Python",
"bytes": "1059228"
}
],
"symlink_target": ""
}
|
import logging
import time
from typing import Any, Dict
from datetime import timedelta
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import transaction
from django.utils.timezone import now as timezone_now
from zerver.lib.context_managers import lockfile
from zerver.lib.logging_util import log_to_file
from zerver.lib.management import sleep_forever
from zerver.models import ScheduledMessage, Message, get_user_by_delivery_email
from zerver.lib.actions import do_send_messages
from zerver.lib.addressee import Addressee
## Setup ##
logger = logging.getLogger(__name__)
log_to_file(logger, settings.SCHEDULED_MESSAGE_DELIVERER_LOG_PATH)
class Command(BaseCommand):
help = """Deliver scheduled messages from the ScheduledMessage table.
Run this command under supervisor.
This management command is run via supervisor. Do not run on multiple
machines, as you may encounter multiple sends in a specific race
condition. (Alternatively, you can set `EMAIL_DELIVERER_DISABLED=True`
on all but one machine to make the command have no effect.)
Usage: ./manage.py deliver_scheduled_messages
"""
def construct_message(self, scheduled_message: ScheduledMessage) -> Dict[str, Any]:
message = Message()
original_sender = scheduled_message.sender
message.content = scheduled_message.content
message.recipient = scheduled_message.recipient
message.subject = scheduled_message.subject
message.pub_date = timezone_now()
message.sending_client = scheduled_message.sending_client
delivery_type = scheduled_message.delivery_type
if delivery_type == ScheduledMessage.SEND_LATER:
message.sender = original_sender
elif delivery_type == ScheduledMessage.REMIND:
message.sender = get_user_by_delivery_email(settings.REMINDER_BOT, original_sender.realm)
return {'message': message, 'stream': scheduled_message.stream,
'realm': scheduled_message.realm}
def handle(self, *args: Any, **options: Any) -> None:
if settings.EMAIL_DELIVERER_DISABLED:
# Here doing a check and sleeping indefinitely on this setting might
# not sound right. Actually we do this check to avoid running this
# process on every server that might be in service to a realm. See
# the comment in zproject/settings.py file about renaming this setting.
sleep_forever()
with lockfile("/tmp/zulip_scheduled_message_deliverer.lockfile"):
while True:
messages_to_deliver = ScheduledMessage.objects.filter(
scheduled_timestamp__lte=timezone_now(),
delivered=False)
if messages_to_deliver:
for message in messages_to_deliver:
with transaction.atomic():
do_send_messages([self.construct_message(message)])
message.delivered = True
message.save(update_fields=['delivered'])
cur_time = timezone_now()
time_next_min = (cur_time + timedelta(minutes=1)).replace(second=0, microsecond=0)
sleep_time = (time_next_min - cur_time).total_seconds()
time.sleep(sleep_time)
|
{
"content_hash": "225b6e1b95b52181ab62acf855015a3d",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 101,
"avg_line_length": 44.1578947368421,
"alnum_prop": 0.6769964243146603,
"repo_name": "dhcrzf/zulip",
"id": "9c04e8526c4494f50cd4123372fa3e992da061cb",
"size": "3356",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "zerver/management/commands/deliver_scheduled_messages.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "436713"
},
{
"name": "Emacs Lisp",
"bytes": "158"
},
{
"name": "HTML",
"bytes": "673974"
},
{
"name": "JavaScript",
"bytes": "2951950"
},
{
"name": "Perl",
"bytes": "398747"
},
{
"name": "Puppet",
"bytes": "72908"
},
{
"name": "Python",
"bytes": "6188005"
},
{
"name": "Ruby",
"bytes": "6110"
},
{
"name": "Shell",
"bytes": "118284"
},
{
"name": "TypeScript",
"bytes": "9543"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, unicode_literals
import unittest
import mock
import pykka
from mopidy import backend, core
from mopidy.internal import deprecation
from mopidy.models import Track
from tests import dummy_audio
class TestPlaybackProvider(backend.PlaybackProvider):
def __init__(self, audio, backend):
super(TestPlaybackProvider, self).__init__(audio, backend)
self._call_limit = 10
self._call_count = 0
self._call_onetime = False
def reset_call_limit(self):
self._call_count = 0
self._call_onetime = False
def is_call_limit_reached(self):
return self._call_count > self._call_limit
def _translate_uri_call_limit(self, uri):
self._call_count += 1
if self._call_count > self._call_limit:
# return any url (not 'None') to stop the endless loop
return 'assert: call limit reached'
if 'limit_never' in uri:
# unplayable
return None
elif 'limit_one' in uri:
# one time playable
if self._call_onetime:
return None
self._call_onetime = True
return uri
def translate_uri(self, uri):
if 'error' in uri:
raise Exception(uri)
elif 'unplayable' in uri:
return None
elif 'limit' in uri:
return self._translate_uri_call_limit(uri)
else:
return uri
# TODO: Replace this with dummy_backend now that it uses a real
# playbackprovider Since we rely on our DummyAudio to actually emit events we
# need a "real" backend and not a mock so the right calls make it through to
# audio.
class TestBackend(pykka.ThreadingActor, backend.Backend):
uri_schemes = ['dummy']
def __init__(self, config, audio):
super(TestBackend, self).__init__()
self.playback = TestPlaybackProvider(audio=audio, backend=self)
class BaseTest(unittest.TestCase):
config = {'core': {'max_tracklist_length': 10000}}
tracks = [Track(uri='dummy:a', length=1234),
Track(uri='dummy:b', length=1234),
Track(uri='dummy:c', length=1234)]
def setUp(self): # noqa: N802
# TODO: use create_proxy helpers.
self.audio = dummy_audio.DummyAudio.start().proxy()
self.backend = TestBackend.start(
audio=self.audio, config=self.config).proxy()
self.core = core.Core(
audio=self.audio, backends=[self.backend], config=self.config)
self.playback = self.core.playback
# We don't have a core actor running, so call about to finish directly.
self.audio.set_about_to_finish_callback(
self.playback._on_about_to_finish)
with deprecation.ignore('core.tracklist.add:tracks_arg'):
self.core.tracklist.add(self.tracks)
self.events = []
self.patcher = mock.patch('mopidy.audio.listener.AudioListener.send')
self.send_mock = self.patcher.start()
def send(event, **kwargs):
self.events.append((event, kwargs))
self.send_mock.side_effect = send
def tearDown(self): # noqa: N802
pykka.ActorRegistry.stop_all()
self.patcher.stop()
def replay_events(self, until=None):
while self.events:
if self.events[0][0] == until:
break
event, kwargs = self.events.pop(0)
self.core.on_event(event, **kwargs)
def trigger_about_to_finish(self, replay_until=None):
self.replay_events()
callback = self.audio.get_about_to_finish_callback().get()
callback()
self.replay_events(until=replay_until)
class TestPlayHandling(BaseTest):
def test_get_current_tl_track_play(self):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
self.assertEqual(
self.core.playback.get_current_tl_track(), tl_tracks[0])
def test_get_current_track_play(self):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
self.assertEqual(
self.core.playback.get_current_track(), self.tracks[0])
def test_get_current_tlid_play(self):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
self.assertEqual(
self.core.playback.get_current_tlid(), tl_tracks[0].tlid)
def test_play_skips_to_next_on_unplayable_track(self):
"""Checks that we handle backend.change_track failing."""
tl_tracks = self.core.tracklist.get_tl_tracks()
self.audio.trigger_fake_playback_failure(tl_tracks[0].track.uri)
self.core.playback.play(tl_tracks[0])
self.replay_events()
current_tl_track = self.core.playback.get_current_tl_track()
self.assertEqual(tl_tracks[1], current_tl_track)
def test_resume_skips_to_next_on_unplayable_track(self):
"""Checks that we handle backend.change_track failing when
resuming playback."""
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.core.playback.pause()
self.audio.trigger_fake_playback_failure(tl_tracks[1].track.uri)
self.core.playback.next()
self.core.playback.resume()
self.replay_events()
current_tl_track = self.core.playback.get_current_tl_track()
self.assertEqual(tl_tracks[2], current_tl_track)
def test_play_tlid(self):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tlid=tl_tracks[1].tlid)
self.replay_events()
current_tl_track = self.core.playback.get_current_tl_track()
self.assertEqual(tl_tracks[1], current_tl_track)
class TestNextHandling(BaseTest):
def test_get_current_tl_track_next(self):
self.core.playback.play()
self.replay_events()
self.core.playback.next()
self.replay_events()
tl_tracks = self.core.tracklist.get_tl_tracks()
current_tl_track = self.core.playback.get_current_tl_track()
self.assertEqual(current_tl_track, tl_tracks[1])
def test_get_pending_tl_track_next(self):
self.core.playback.play()
self.replay_events()
self.core.playback.next()
tl_tracks = self.core.tracklist.get_tl_tracks()
self.assertEqual(self.core.playback._pending_tl_track, tl_tracks[1])
def test_get_current_track_next(self):
self.core.playback.play()
self.replay_events()
self.core.playback.next()
self.replay_events()
current_track = self.core.playback.get_current_track()
self.assertEqual(current_track, self.tracks[1])
def test_next_keeps_finished_track_in_tracklist(self):
tl_track = self.core.tracklist.get_tl_tracks()[0]
self.core.playback.play(tl_track)
self.replay_events()
self.core.playback.next()
self.replay_events()
self.assertIn(tl_track, self.core.tracklist.tl_tracks)
def test_next_skips_over_unplayable_track(self):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.audio.trigger_fake_playback_failure(tl_tracks[1].track.uri)
self.core.playback.play(tl_tracks[0])
self.replay_events()
self.core.playback.next()
self.replay_events()
assert self.core.playback.get_current_tl_track() == tl_tracks[2]
def test_next_skips_over_change_track_error(self):
# Trigger an exception in translate_uri.
track = Track(uri='dummy:error', length=1234)
self.core.tracklist.add(tracks=[track], at_position=1)
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play()
self.replay_events()
self.core.playback.next()
self.replay_events()
assert self.core.playback.get_current_tl_track() == tl_tracks[2]
def test_next_skips_over_change_track_unplayable(self):
# Make translate_uri return None.
track = Track(uri='dummy:unplayable', length=1234)
self.core.tracklist.add(tracks=[track], at_position=1)
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play()
self.replay_events()
self.core.playback.next()
self.replay_events()
assert self.core.playback.get_current_tl_track() == tl_tracks[2]
class TestPreviousHandling(BaseTest):
# TODO Test previous() more
def test_get_current_tl_track_prev(self):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[1])
self.core.playback.previous()
self.replay_events()
self.assertEqual(
self.core.playback.get_current_tl_track(), tl_tracks[0])
def test_get_current_track_prev(self):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[1])
self.core.playback.previous()
self.replay_events()
self.assertEqual(
self.core.playback.get_current_track(), self.tracks[0])
def test_previous_keeps_finished_track_in_tracklist(self):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[1])
self.core.playback.previous()
self.replay_events()
self.assertIn(tl_tracks[1], self.core.tracklist.tl_tracks)
def test_previous_keeps_finished_track_even_in_consume_mode(self):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[1])
self.core.tracklist.consume = True
self.core.playback.previous()
self.replay_events()
self.assertIn(tl_tracks[1], self.core.tracklist.tl_tracks)
def test_previous_skips_over_unplayable_track(self):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.audio.trigger_fake_playback_failure(tl_tracks[1].track.uri)
self.core.playback.play(tl_tracks[2])
self.replay_events()
self.core.playback.previous()
self.replay_events()
assert self.core.playback.get_current_tl_track() == tl_tracks[0]
def test_previous_skips_over_change_track_error(self):
# Trigger an exception in translate_uri.
track = Track(uri='dummy:error', length=1234)
self.core.tracklist.add(tracks=[track], at_position=1)
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[2])
self.replay_events()
self.core.playback.previous()
self.replay_events()
assert self.core.playback.get_current_tl_track() == tl_tracks[0]
def test_previous_skips_over_change_track_unplayable(self):
# Makes translate_uri return None.
track = Track(uri='dummy:unplayable', length=1234)
self.core.tracklist.add(tracks=[track], at_position=1)
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[2])
self.replay_events()
self.core.playback.previous()
self.replay_events()
assert self.core.playback.get_current_tl_track() == tl_tracks[0]
class TestOnAboutToFinish(BaseTest):
def test_on_about_to_finish_keeps_finished_track_in_tracklist(self):
tl_track = self.core.tracklist.get_tl_tracks()[0]
self.core.playback.play(tl_track)
self.trigger_about_to_finish()
self.assertIn(tl_track, self.core.tracklist.tl_tracks)
def test_on_about_to_finish_skips_over_change_track_error(self):
# Trigger an exception in translate_uri.
track = Track(uri='dummy:error', length=1234)
self.core.tracklist.add(tracks=[track], at_position=1)
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
self.trigger_about_to_finish()
assert self.core.playback.get_current_tl_track() == tl_tracks[2]
def test_on_about_to_finish_skips_over_change_track_unplayable(self):
# Makes translate_uri return None.
track = Track(uri='dummy:unplayable', length=1234)
self.core.tracklist.add(tracks=[track], at_position=1)
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
self.trigger_about_to_finish()
assert self.core.playback.get_current_tl_track() == tl_tracks[2]
class TestConsumeHandling(BaseTest):
def test_next_in_consume_mode_removes_finished_track(self):
tl_track = self.core.tracklist.get_tl_tracks()[0]
self.core.playback.play(tl_track)
self.core.tracklist.set_consume(True)
self.replay_events()
self.core.playback.next()
self.replay_events()
self.assertNotIn(tl_track, self.core.tracklist.get_tl_tracks())
def test_next_in_consume_mode_removes_unplayable_track(self):
last_playable_tl_track = self.core.tracklist.get_tl_tracks()[-2]
unplayable_tl_track = self.core.tracklist.get_tl_tracks()[-1]
self.audio.trigger_fake_playback_failure(unplayable_tl_track.track.uri)
self.core.playback.play(last_playable_tl_track)
self.core.tracklist.set_consume(True)
self.core.playback.next()
self.replay_events()
self.assertNotIn(
unplayable_tl_track, self.core.tracklist.get_tl_tracks())
def test_on_about_to_finish_in_consume_mode_removes_finished_track(self):
tl_track = self.core.tracklist.get_tl_tracks()[0]
self.core.playback.play(tl_track)
self.core.tracklist.consume = True
self.trigger_about_to_finish()
self.assertNotIn(tl_track, self.core.tracklist.get_tl_tracks())
class TestCurrentAndPendingTlTrack(BaseTest):
def test_get_current_tl_track_none(self):
self.assertEqual(
self.core.playback.get_current_tl_track(), None)
def test_get_current_tlid_none(self):
self.assertEqual(self.core.playback.get_current_tlid(), None)
def test_pending_tl_track_is_none(self):
self.core.playback.play()
self.replay_events()
self.assertEqual(self.playback._pending_tl_track, None)
def test_pending_tl_track_after_about_to_finish(self):
self.core.playback.play()
self.replay_events()
self.trigger_about_to_finish(replay_until='stream_changed')
self.assertEqual(self.playback._pending_tl_track.track.uri, 'dummy:b')
def test_pending_tl_track_after_stream_changed(self):
self.trigger_about_to_finish()
self.assertEqual(self.playback._pending_tl_track, None)
def test_current_tl_track_after_about_to_finish(self):
self.core.playback.play()
self.replay_events()
self.trigger_about_to_finish(replay_until='stream_changed')
self.assertEqual(self.playback.current_tl_track.track.uri, 'dummy:a')
def test_current_tl_track_after_stream_changed(self):
self.core.playback.play()
self.replay_events()
self.trigger_about_to_finish()
self.assertEqual(self.playback.current_tl_track.track.uri, 'dummy:b')
def test_current_tl_track_after_end_of_stream(self):
self.core.playback.play()
self.replay_events()
self.trigger_about_to_finish()
self.trigger_about_to_finish()
self.trigger_about_to_finish() # EOS
self.assertEqual(self.playback.current_tl_track, None)
@mock.patch(
'mopidy.core.playback.listener.CoreListener', spec=core.CoreListener)
class EventEmissionTest(BaseTest):
maxDiff = None
def test_play_when_stopped_emits_events(self, listener_mock):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
self.assertListEqual(
[
mock.call(
'playback_state_changed',
old_state='stopped', new_state='playing'),
mock.call(
'track_playback_started', tl_track=tl_tracks[0]),
],
listener_mock.send.mock_calls)
def test_play_when_paused_emits_events(self, listener_mock):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
self.core.playback.pause()
self.replay_events()
listener_mock.reset_mock()
self.core.playback.play(tl_tracks[1])
self.replay_events()
self.assertListEqual(
[
mock.call(
'track_playback_ended',
tl_track=tl_tracks[0], time_position=mock.ANY),
mock.call(
'playback_state_changed',
old_state='paused', new_state='playing'),
mock.call(
'track_playback_started', tl_track=tl_tracks[1]),
],
listener_mock.send.mock_calls)
def test_play_when_playing_emits_events(self, listener_mock):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
listener_mock.reset_mock()
self.core.playback.play(tl_tracks[2])
self.replay_events()
self.assertListEqual(
[
mock.call(
'track_playback_ended',
tl_track=tl_tracks[0], time_position=mock.ANY),
mock.call(
'playback_state_changed', old_state='playing',
new_state='playing'),
mock.call(
'track_playback_started', tl_track=tl_tracks[2]),
],
listener_mock.send.mock_calls)
def test_pause_emits_events(self, listener_mock):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
self.core.playback.seek(1000)
listener_mock.reset_mock()
self.core.playback.pause()
self.assertListEqual(
[
mock.call(
'playback_state_changed',
old_state='playing', new_state='paused'),
mock.call(
'track_playback_paused',
tl_track=tl_tracks[0], time_position=1000),
],
listener_mock.send.mock_calls)
def test_resume_emits_events(self, listener_mock):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
self.core.playback.pause()
self.core.playback.seek(1000)
listener_mock.reset_mock()
self.core.playback.resume()
self.assertListEqual(
[
mock.call(
'playback_state_changed',
old_state='paused', new_state='playing'),
mock.call(
'track_playback_resumed',
tl_track=tl_tracks[0], time_position=1000),
],
listener_mock.send.mock_calls)
def test_stop_emits_events(self, listener_mock):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
self.core.playback.seek(1000)
self.replay_events()
listener_mock.reset_mock()
self.core.playback.stop()
self.replay_events()
self.assertListEqual(
[
mock.call(
'playback_state_changed',
old_state='playing', new_state='stopped'),
mock.call(
'track_playback_ended',
tl_track=tl_tracks[0], time_position=1000),
],
listener_mock.send.mock_calls)
def test_next_emits_events(self, listener_mock):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
self.core.playback.seek(1000)
self.replay_events()
listener_mock.reset_mock()
self.core.playback.next()
self.replay_events()
self.assertListEqual(
[
mock.call(
'track_playback_ended',
tl_track=tl_tracks[0], time_position=mock.ANY),
mock.call(
'playback_state_changed',
old_state='playing', new_state='playing'),
mock.call(
'track_playback_started', tl_track=tl_tracks[1]),
],
listener_mock.send.mock_calls)
def test_next_emits_events_when_consume_mode_is_enabled(
self,
listener_mock):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.tracklist.set_consume(True)
self.core.playback.play(tl_tracks[0])
self.replay_events()
self.core.playback.seek(1000)
self.replay_events()
listener_mock.reset_mock()
self.core.playback.next()
self.replay_events()
self.assertListEqual(
[
mock.call(
'tracklist_changed'),
mock.call(
'track_playback_ended',
tl_track=tl_tracks[0], time_position=mock.ANY),
mock.call(
'playback_state_changed',
old_state='playing', new_state='playing'),
mock.call(
'track_playback_started', tl_track=tl_tracks[1]),
],
listener_mock.send.mock_calls)
def test_gapless_track_change_emits_events(self, listener_mock):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
listener_mock.reset_mock()
self.trigger_about_to_finish()
self.assertListEqual(
[
mock.call(
'track_playback_ended',
tl_track=tl_tracks[0], time_position=mock.ANY),
mock.call(
'playback_state_changed',
old_state='playing', new_state='playing'),
mock.call(
'track_playback_started', tl_track=tl_tracks[1]),
],
listener_mock.send.mock_calls)
def test_seek_emits_seeked_event(self, listener_mock):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
listener_mock.reset_mock()
self.core.playback.seek(1000)
self.replay_events()
listener_mock.send.assert_called_once_with(
'seeked', time_position=1000)
def test_seek_past_end_of_track_emits_events(self, listener_mock):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
listener_mock.reset_mock()
self.core.playback.seek(self.tracks[0].length * 5)
self.replay_events()
self.assertListEqual(
[
mock.call(
'track_playback_ended',
tl_track=tl_tracks[0], time_position=mock.ANY),
mock.call(
'playback_state_changed',
old_state='playing', new_state='playing'),
mock.call(
'track_playback_started', tl_track=tl_tracks[1]),
],
listener_mock.send.mock_calls)
def test_seek_race_condition_emits_events(self, listener_mock):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.trigger_about_to_finish(replay_until='stream_changed')
self.replay_events()
listener_mock.reset_mock()
self.core.playback.seek(1000)
self.replay_events()
# When we trigger seek after an about to finish the other code that
# emits track stopped/started and playback state changed events gets
# triggered as we have to switch back to the previous track.
# The correct behavior would be to only emit seeked.
self.assertListEqual(
[mock.call('seeked', time_position=1000)],
listener_mock.send.mock_calls)
def test_previous_emits_events(self, listener_mock):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[1])
self.replay_events()
listener_mock.reset_mock()
self.core.playback.previous()
self.replay_events()
self.assertListEqual(
[
mock.call(
'track_playback_ended',
tl_track=tl_tracks[1], time_position=mock.ANY),
mock.call(
'playback_state_changed',
old_state='playing', new_state='playing'),
mock.call(
'track_playback_started', tl_track=tl_tracks[0]),
],
listener_mock.send.mock_calls)
class TestUnplayableURI(BaseTest):
tracks = [
Track(uri='unplayable://'),
Track(uri='dummy:b'),
]
def setUp(self): # noqa: N802
super(TestUnplayableURI, self).setUp()
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback._set_current_tl_track(tl_tracks[0])
def test_play_skips_to_next_if_track_is_unplayable(self):
self.core.playback.play()
self.replay_events()
current_track = self.core.playback.get_current_track()
self.assertEqual(current_track, self.tracks[1])
def test_pause_changes_state_even_if_track_is_unplayable(self):
self.core.playback.pause()
self.assertEqual(self.core.playback.state, core.PlaybackState.PAUSED)
def test_resume_does_nothing_if_track_is_unplayable(self):
self.core.playback.state = core.PlaybackState.PAUSED
self.core.playback.resume()
self.assertEqual(self.core.playback.state, core.PlaybackState.PAUSED)
def test_stop_changes_state_even_if_track_is_unplayable(self):
self.core.playback.state = core.PlaybackState.PAUSED
self.core.playback.stop()
self.assertEqual(self.core.playback.state, core.PlaybackState.STOPPED)
def test_time_position_returns_0_if_track_is_unplayable(self):
result = self.core.playback.time_position
self.assertEqual(result, 0)
def test_seek_fails_for_unplayable_track(self):
self.core.playback.state = core.PlaybackState.PLAYING
success = self.core.playback.seek(1000)
self.assertFalse(success)
class SeekTest(BaseTest):
def test_seek_normalizes_negative_positions_to_zero(self):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
self.core.playback.seek(-100) # Dummy audio doesn't progress time.
self.assertEqual(0, self.core.playback.get_time_position())
def test_seek_fails_for_track_without_duration(self):
track = self.tracks[0].replace(length=None)
self.core.tracklist.clear()
self.core.tracklist.add([track])
self.core.playback.play()
self.replay_events()
self.assertFalse(self.core.playback.seek(1000))
self.assertEqual(0, self.core.playback.get_time_position())
def test_seek_play_stay_playing(self):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
self.core.playback.seek(1000)
self.assertEqual(self.core.playback.state, core.PlaybackState.PLAYING)
def test_seek_paused_stay_paused(self):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
self.core.playback.pause()
self.replay_events()
self.core.playback.seek(1000)
self.assertEqual(self.core.playback.state, core.PlaybackState.PAUSED)
def test_seek_race_condition_after_about_to_finish(self):
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
self.trigger_about_to_finish(replay_until='stream_changed')
self.core.playback.seek(1000)
self.replay_events()
current_tl_track = self.core.playback.get_current_tl_track()
self.assertEqual(current_tl_track, tl_tracks[0])
class TestStream(BaseTest):
def test_get_stream_title_before_playback(self):
self.assertEqual(self.playback.get_stream_title(), None)
def test_get_stream_title_during_playback(self):
self.core.playback.play()
self.replay_events()
self.assertEqual(self.playback.get_stream_title(), None)
def test_get_stream_title_during_playback_with_tags_change(self):
self.core.playback.play()
self.audio.trigger_fake_tags_changed({'organization': ['baz']})
self.audio.trigger_fake_tags_changed({'title': ['foobar']}).get()
self.replay_events()
self.assertEqual(self.playback.get_stream_title(), 'foobar')
def test_get_stream_title_after_next(self):
self.core.playback.play()
self.audio.trigger_fake_tags_changed({'organization': ['baz']})
self.audio.trigger_fake_tags_changed({'title': ['foobar']}).get()
self.replay_events()
self.core.playback.next()
self.replay_events()
self.assertEqual(self.playback.get_stream_title(), None)
def test_get_stream_title_after_next_with_tags_change(self):
self.core.playback.play()
self.audio.trigger_fake_tags_changed({'organization': ['baz']})
self.audio.trigger_fake_tags_changed({'title': ['foo']}).get()
self.replay_events()
self.core.playback.next()
self.audio.trigger_fake_tags_changed({'organization': ['baz']})
self.audio.trigger_fake_tags_changed({'title': ['bar']}).get()
self.replay_events()
self.assertEqual(self.playback.get_stream_title(), 'bar')
def test_get_stream_title_after_stop(self):
self.core.playback.play()
self.audio.trigger_fake_tags_changed({'organization': ['baz']})
self.audio.trigger_fake_tags_changed({'title': ['foobar']}).get()
self.replay_events()
self.core.playback.stop()
self.replay_events()
self.assertEqual(self.playback.get_stream_title(), None)
class TestBackendSelection(unittest.TestCase):
def setUp(self): # noqa: N802
config = {
'core': {
'max_tracklist_length': 10000,
}
}
self.backend1 = mock.Mock()
self.backend1.uri_schemes.get.return_value = ['dummy1']
self.playback1 = mock.Mock(spec=backend.PlaybackProvider)
self.backend1.playback = self.playback1
self.backend2 = mock.Mock()
self.backend2.uri_schemes.get.return_value = ['dummy2']
self.playback2 = mock.Mock(spec=backend.PlaybackProvider)
self.backend2.playback = self.playback2
self.tracks = [
Track(uri='dummy1:a', length=40000),
Track(uri='dummy2:a', length=40000),
]
self.core = core.Core(config, mixer=None, backends=[
self.backend1, self.backend2])
self.tl_tracks = self.core.tracklist.add(self.tracks)
def trigger_stream_changed(self):
pending = self.core.playback._pending_tl_track
if pending:
self.core.stream_changed(uri=pending.track.uri)
else:
self.core.stream_changed(uri=None)
def test_play_selects_dummy1_backend(self):
self.core.playback.play(self.tl_tracks[0])
self.trigger_stream_changed()
self.playback1.prepare_change.assert_called_once_with()
self.playback1.change_track.assert_called_once_with(self.tracks[0])
self.playback1.play.assert_called_once_with()
self.assertFalse(self.playback2.play.called)
def test_play_selects_dummy2_backend(self):
self.core.playback.play(self.tl_tracks[1])
self.trigger_stream_changed()
self.assertFalse(self.playback1.play.called)
self.playback2.prepare_change.assert_called_once_with()
self.playback2.change_track.assert_called_once_with(self.tracks[1])
self.playback2.play.assert_called_once_with()
def test_pause_selects_dummy1_backend(self):
self.core.playback.play(self.tl_tracks[0])
self.trigger_stream_changed()
self.core.playback.pause()
self.playback1.pause.assert_called_once_with()
self.assertFalse(self.playback2.pause.called)
def test_pause_selects_dummy2_backend(self):
self.core.playback.play(self.tl_tracks[1])
self.trigger_stream_changed()
self.core.playback.pause()
self.assertFalse(self.playback1.pause.called)
self.playback2.pause.assert_called_once_with()
def test_resume_selects_dummy1_backend(self):
self.core.playback.play(self.tl_tracks[0])
self.trigger_stream_changed()
self.core.playback.pause()
self.core.playback.resume()
self.playback1.resume.assert_called_once_with()
self.assertFalse(self.playback2.resume.called)
def test_resume_selects_dummy2_backend(self):
self.core.playback.play(self.tl_tracks[1])
self.trigger_stream_changed()
self.core.playback.pause()
self.core.playback.resume()
self.assertFalse(self.playback1.resume.called)
self.playback2.resume.assert_called_once_with()
def test_stop_selects_dummy1_backend(self):
self.core.playback.play(self.tl_tracks[0])
self.trigger_stream_changed()
self.core.playback.stop()
self.trigger_stream_changed()
self.playback1.stop.assert_called_once_with()
self.assertFalse(self.playback2.stop.called)
def test_stop_selects_dummy2_backend(self):
self.core.playback.play(self.tl_tracks[1])
self.trigger_stream_changed()
self.core.playback.stop()
self.trigger_stream_changed()
self.assertFalse(self.playback1.stop.called)
self.playback2.stop.assert_called_once_with()
def test_seek_selects_dummy1_backend(self):
self.core.playback.play(self.tl_tracks[0])
self.trigger_stream_changed()
self.core.playback.seek(10000)
self.playback1.seek.assert_called_once_with(10000)
self.assertFalse(self.playback2.seek.called)
def test_seek_selects_dummy2_backend(self):
self.core.playback.play(self.tl_tracks[1])
self.trigger_stream_changed()
self.core.playback.seek(10000)
self.assertFalse(self.playback1.seek.called)
self.playback2.seek.assert_called_once_with(10000)
def test_time_position_selects_dummy1_backend(self):
self.core.playback.play(self.tl_tracks[0])
self.trigger_stream_changed()
self.core.playback.time_position
self.playback1.get_time_position.assert_called_once_with()
self.assertFalse(self.playback2.get_time_position.called)
def test_time_position_selects_dummy2_backend(self):
self.core.playback.play(self.tl_tracks[1])
self.trigger_stream_changed()
self.core.playback.time_position
self.assertFalse(self.playback1.get_time_position.called)
self.playback2.get_time_position.assert_called_once_with()
class TestCorePlaybackWithOldBackend(unittest.TestCase):
def test_type_error_from_old_backend_does_not_crash_core(self):
config = {
'core': {
'max_tracklist_length': 10000,
}
}
b = mock.Mock()
b.actor_ref.actor_class.__name__ = 'DummyBackend'
b.uri_schemes.get.return_value = ['dummy1']
b.playback = mock.Mock(spec=backend.PlaybackProvider)
b.playback.play.side_effect = TypeError
b.library.lookup.return_value.get.return_value = [
Track(uri='dummy1:a', length=40000)]
c = core.Core(config, mixer=None, backends=[b])
c.tracklist.add(uris=['dummy1:a'])
c.playback.play() # No TypeError == test passed.
b.playback.play.assert_called_once_with()
class TestBug1177Regression(unittest.TestCase):
def test(self):
config = {
'core': {
'max_tracklist_length': 10000,
}
}
b = mock.Mock()
b.uri_schemes.get.return_value = ['dummy']
b.playback = mock.Mock(spec=backend.PlaybackProvider)
b.playback.change_track.return_value.get.return_value = True
b.playback.play.return_value.get.return_value = True
track1 = Track(uri='dummy:a', length=40000)
track2 = Track(uri='dummy:b', length=40000)
c = core.Core(config, mixer=None, backends=[b])
c.tracklist.add([track1, track2])
c.playback.play()
b.playback.change_track.assert_called_once_with(track1)
b.playback.change_track.reset_mock()
c.playback.pause()
c.playback.next()
b.playback.change_track.assert_called_once_with(track2)
class TestBug1352Regression(BaseTest):
tracks = [
Track(uri='dummy:a', length=40000),
Track(uri='dummy:b', length=40000),
]
def test_next_when_paused_updates_history(self):
self.core.history._add_track = mock.Mock()
self.core.tracklist._mark_playing = mock.Mock()
tl_tracks = self.core.tracklist.get_tl_tracks()
self.playback.play()
self.replay_events()
self.core.history._add_track.assert_called_once_with(self.tracks[0])
self.core.tracklist._mark_playing.assert_called_once_with(tl_tracks[0])
self.core.history._add_track.reset_mock()
self.core.tracklist._mark_playing.reset_mock()
self.playback.pause()
self.playback.next()
self.replay_events()
self.core.history._add_track.assert_called_once_with(self.tracks[1])
self.core.tracklist._mark_playing.assert_called_once_with(tl_tracks[1])
class TestEndlessLoop(BaseTest):
tracks_play = [
Track(uri='dummy:limit_never:a'),
Track(uri='dummy:limit_never:b')
]
tracks_other = [
Track(uri='dummy:limit_never:a'),
Track(uri='dummy:limit_one'),
Track(uri='dummy:limit_never:b')
]
def test_play(self):
self.core.tracklist.clear()
self.core.tracklist.add(self.tracks_play)
self.backend.playback.reset_call_limit().get()
self.core.tracklist.set_repeat(True)
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[0])
self.replay_events()
self.assertFalse(self.backend.playback.is_call_limit_reached().get())
def test_next(self):
self.core.tracklist.clear()
self.core.tracklist.add(self.tracks_other)
self.backend.playback.reset_call_limit().get()
self.core.tracklist.set_repeat(True)
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[1])
self.replay_events()
self.core.playback.next()
self.replay_events()
self.assertFalse(self.backend.playback.is_call_limit_reached().get())
def test_previous(self):
self.core.tracklist.clear()
self.core.tracklist.add(self.tracks_other)
self.backend.playback.reset_call_limit().get()
self.core.tracklist.set_repeat(True)
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[1])
self.replay_events()
self.core.playback.previous()
self.replay_events()
self.assertFalse(self.backend.playback.is_call_limit_reached().get())
def test_on_about_to_finish(self):
self.core.tracklist.clear()
self.core.tracklist.add(self.tracks_other)
self.backend.playback.reset_call_limit().get()
self.core.tracklist.set_repeat(True)
tl_tracks = self.core.tracklist.get_tl_tracks()
self.core.playback.play(tl_tracks[1])
self.replay_events()
self.trigger_about_to_finish()
self.assertFalse(self.backend.playback.is_call_limit_reached().get())
|
{
"content_hash": "6fbafea088360044f0e5906d6f16196c",
"timestamp": "",
"source": "github",
"line_count": 1233,
"max_line_length": 79,
"avg_line_length": 32.96918085969181,
"alnum_prop": 0.617057390962092,
"repo_name": "vrs01/mopidy",
"id": "34c9d367600a791155b13c741b46c45e82004336",
"size": "40651",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "tests/core/test_playback.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "610"
},
{
"name": "Groff",
"bytes": "573"
},
{
"name": "HTML",
"bytes": "805"
},
{
"name": "JavaScript",
"bytes": "82060"
},
{
"name": "Python",
"bytes": "1192583"
},
{
"name": "Shell",
"bytes": "556"
}
],
"symlink_target": ""
}
|
"""Test helper functions"""
import pathlib
import tempfile
import warnings
import zipfile
import numpy as np
from scipy.ndimage import rotate
def create_test_sino_2d(A=9, N=22, max_phase=5.0,
ampl_range=(1.0, 1.0)):
"""
Creates 2D test sinogram for optical diffraction tomography.
The sinogram is generated from a Gaussian that is shifted
according to the rotational position of a non-centered
object.
Parameters
----------
A : int
Number of angles of the sinogram.
N : int
Size of one acquisition.
max_phase : float
Phase normalization. If this is greater than
2PI, then it also tests the unwrapping
capabilities of the reconstruction algorithm.
ampl_range : tuple of floats
Determines the min/max range of the amplitude values.
Equal values means constant amplitude.
"""
# initiate array
resar = np.zeros((A, N), dtype=np.complex128)
# 2pi coverage
angles = np.linspace(0, 2*np.pi, A, endpoint=False)
# x-values of Gaussian
x = np.linspace(-N/2, N/2, N, endpoint=True)
# SD of Gaussian
dev = np.sqrt(N/2)
# Off-centered rotation:
off = N/7
for ii in range(A):
# Gaussian distribution sinogram
x0 = np.cos(angles[ii])*off
phase = np.exp(-(x-x0)**2/dev**2)
phase = normalize(phase, vmax=max_phase)
if ampl_range[0] == ampl_range[1]:
# constant amplitude
ampl = ampl_range[0]
else:
# ring
ampldev = dev/5
amploff = off*.3
ampl1 = np.exp(-(x-x0-amploff)**2/ampldev**2)
ampl2 = np.exp(-(x-x0+amploff)**2/ampldev**2)
ampl = ampl1+ampl2
ampl = normalize(ampl, vmin=ampl_range[0], vmax=ampl_range[1])
resar[ii] = ampl*np.exp(1j*phase)
return resar, angles
def create_test_sino_3d(A=9, Nx=22, Ny=22, max_phase=5.0,
ampl_range=(1.0, 1.0)):
"""
Creates 3D test sinogram for optical diffraction tomography.
The sinogram is generated from a Gaussian that is shifted
according to the rotational position of a non-centered
object. The simulated rotation is about the second (y)/[1]
axis.
Parameters
----------
A : int
Number of angles of the sinogram.
Nx : int
Size of the first axis.
Ny : int
Size of the second axis.
max_phase : float
Phase normalization. If this is greater than
2PI, then it also tests the unwrapping
capabilities of the reconstruction algorithm.
ampl_range : tuple of floats
Determines the min/max range of the amplitude values.
Equal values means constant amplitude.
Returns
"""
# initiate array
resar = np.zeros((A, Ny, Nx), dtype=np.complex128)
# 2pi coverage
angles = np.linspace(0, 2*np.pi, A, endpoint=False)
# x-values of Gaussian
x = np.linspace(-Nx/2, Nx/2, Nx, endpoint=True).reshape(1, -1)
y = np.linspace(-Ny/2, Ny/2, Ny, endpoint=True).reshape(-1, 1)
# SD of Gaussian
dev = min(np.sqrt(Nx/2), np.sqrt(Ny/2))
# Off-centered rotation about second axis:
off = Nx/7
for ii in range(A):
# Gaussian distribution sinogram
x0 = np.cos(angles[ii])*off
phase = np.exp(-(x-x0)**2/dev**2) * np.exp(-(y)**2/dev**2)
phase = normalize(phase, vmax=max_phase)
if ampl_range[0] == ampl_range[1]:
# constant amplitude
ampl = ampl_range[0]
else:
# ring
ampldev = dev/5
amploff = off*.3
ampl1 = np.exp(-(x-x0-amploff)**2/ampldev**2)
ampl2 = np.exp(-(x-x0+amploff)**2/ampldev**2)
ampl = ampl1+ampl2
ampl = normalize(ampl, vmin=ampl_range[0], vmax=ampl_range[1])
resar[ii] = ampl*np.exp(1j*phase)
return resar, angles
def create_test_sino_3d_tilted(A=9, Nx=22, Ny=22, max_phase=5.0,
ampl_range=(1.0, 1.0),
tilt_plane=0.0):
"""
Creates 3D test sinogram for optical diffraction tomography.
The sinogram is generated from a Gaussian that is shifted
according to the rotational position of a non-centered
object. The simulated rotation is about the second (y)/[1]
axis.
Parameters
----------
A : int
Number of angles of the sinogram.
Nx : int
Size of the first axis.
Ny : int
Size of the second axis.
max_phase : float
Phase normalization. If this is greater than
2PI, then it also tests the unwrapping
capabilities of the reconstruction algorithm.
ampl_range : tuple of floats
Determines the min/max range of the amplitude values.
Equal values means constant amplitude.
tilt_plane : float
Rotation tilt offset [rad].
Returns
"""
# initiate array
resar = np.zeros((A, Ny, Nx), dtype=np.complex128)
# 2pi coverage
angles = np.linspace(0, 2*np.pi, A, endpoint=False)
# x-values of Gaussain
x = np.linspace(-Nx/2, Nx/2, Nx, endpoint=True).reshape(1, -1)
y = np.linspace(-Ny/2, Ny/2, Ny, endpoint=True).reshape(-1, 1)
# SD of Gaussian
dev = min(np.sqrt(Nx/2), np.sqrt(Ny/2))
# Off-centered rotation about second axis:
off = Nx/7
for ii in range(A):
# Gaussian distribution sinogram
x0 = np.cos(angles[ii])*off
phase = np.exp(-(x-x0)**2/dev**2) * np.exp(-(y)**2/dev**2)
phase = normalize(phase, vmax=max_phase)
if ampl_range[0] == ampl_range[1]:
# constant amplitude
ampl = np.ones((Nx, Ny))*ampl_range[0]
else:
# ring
ampldev = dev/5
amploff = off*.3
ampl1 = np.exp(-(x-x0-amploff)**2/ampldev**2)
ampl2 = np.exp(-(x-x0+amploff)**2/ampldev**2)
ampl = ampl1+ampl2
ampl = normalize(ampl, vmin=ampl_range[0], vmax=ampl_range[1])
# perform in-plane rotation
ampl = rotate(ampl, np.rad2deg(tilt_plane), reshape=False, cval=1)
phase = rotate(phase, np.rad2deg(tilt_plane), reshape=False, cval=0)
resar[ii] = ampl*np.exp(1j*phase)
return resar, angles
def cutout(a):
""" cut out circle/sphere from 2D/3D square/cubic array
"""
x = np.arange(a.shape[0])
c = a.shape[0] / 2
if len(a.shape) == 2:
x = x.reshape(-1, 1)
y = x.reshape(1, -1)
zero = ((x-c)**2 + (y-c)**2) < c**2
elif len(a.shape) == 3:
x = x.reshape(-1, 1, 1)
y = x.reshape(1, -1, 1)
z = x.reshape(1, -1, 1)
zero = ((x-c)**2 + (y-c)**2 + (z-c)**2) < c**2
else:
raise ValueError("Cutout array must have dimension 2 or 3!")
a *= zero
return a
def get_results(frame):
""" Get the results from the frame of a method """
filen = frame.f_globals["__file__"]
funcname = frame.f_code.co_name
identifier = "{}__{}".format(filen.split("test_", 1)[1][:-3],
funcname)
wdir = pathlib.Path(__file__).parent / "data"
zipf = wdir / (identifier + ".zip")
text = "data.txt"
tdir = tempfile.gettempdir()
if zipf.exists():
with zipfile.ZipFile(str(zipf)) as arc:
arc.extract(text, tdir)
else:
raise ValueError("No reference found for test: {}".format(text))
tfile = pathlib.Path(tdir) / text
data = np.loadtxt(str(tfile))
tfile.unlink()
return data
def get_test_parameter_set(set_number=1):
res = 2.1
lD = 0
nm = 1.333
parameters = []
for _i in range(set_number):
parameters.append({"res": res,
"lD": lD,
"nm": nm})
res += .1
lD += np.pi
nm *= 1.01
return parameters
def normalize(av, vmin=0., vmax=1.):
"""
normalize an array to the range vmin/vmax
"""
if vmin == vmax:
return np.ones_like(av)*vmin
elif vmax < vmin:
warnings.warn("swapping vmin and vmax, because vmax < vmin.")
vmin, vmax = vmax, vmin
norm_one = (av - np.min(av))/(np.max(av)-np.min(av))
return norm_one * (vmax-vmin) + vmin
def write_results(frame, r):
"""
Used for writing the results to zip-files in the current directory.
If put in the directory "data", these files will be used for tests.
"""
# cast single precision to double precision
if np.iscomplexobj(r):
r = np.array(r, dtype=complex)
else:
r = np.array(r, dtype=float)
data = np.array(r).flatten().view(float)
filen = frame.f_globals["__file__"]
funcname = frame.f_code.co_name
identifier = "{}__{}".format(filen.split("test_", 1)[1][:-3],
funcname)
text = pathlib.Path("data.txt")
zipf = pathlib.Path(identifier+".zip")
# remove existing files
if text.exists():
text.unlink()
if zipf.exists():
zipf.unlink()
# save text
np.savetxt(str(text), data, fmt="%.10f")
# make zip
with zipfile.ZipFile(str(zipf),
"w",
compression=zipfile.ZIP_DEFLATED) as arc:
arc.write(str(text))
text.unlink()
|
{
"content_hash": "6508ecb0048d0e907d4d008c385c39ce",
"timestamp": "",
"source": "github",
"line_count": 289,
"max_line_length": 76,
"avg_line_length": 32.173010380622834,
"alnum_prop": 0.5672187567218757,
"repo_name": "RI-imaging/ODTbrain",
"id": "94f326964f2cda1da4d2b6abb2dffca3ccc6d134",
"size": "9298",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/common_methods.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "14950"
},
{
"name": "Python",
"bytes": "170001"
}
],
"symlink_target": ""
}
|
"""
Gaussian Mixture Models.
This implementation corresponds to frequentist (non-Bayesian) formulation
of Gaussian Mixture Models.
"""
# Author: Ron Weiss <ronweiss@gmail.com>
# Fabian Pedregosa <fabian.pedregosa@inria.fr>
# Bertrand Thirion <bertrand.thirion@inria.fr>
import numpy as np
from scipy import linalg
from ..base import BaseEstimator
from ..utils import check_random_state
from ..utils.extmath import logsumexp, pinvh
from ..utils.validation import check_is_fitted
from .. import cluster
from sklearn.externals.six.moves import zip
EPS = np.finfo(float).eps
def log_multivariate_normal_density(X, means, covars, covariance_type='diag'):
"""Compute the log probability under a multivariate Gaussian distribution.
Parameters
----------
X : array_like, shape (n_samples, n_features)
List of n_features-dimensional data points. Each row corresponds to a
single data point.
means : array_like, shape (n_components, n_features)
List of n_features-dimensional mean vectors for n_components Gaussians.
Each row corresponds to a single mean vector.
covars : array_like
List of n_components covariance parameters for each Gaussian. The shape
depends on `covariance_type`:
(n_components, n_features) if 'spherical',
(n_features, n_features) if 'tied',
(n_components, n_features) if 'diag',
(n_components, n_features, n_features) if 'full'
covariance_type : string
Type of the covariance parameters. Must be one of
'spherical', 'tied', 'diag', 'full'. Defaults to 'diag'.
Returns
-------
lpr : array_like, shape (n_samples, n_components)
Array containing the log probabilities of each data point in
X under each of the n_components multivariate Gaussian distributions.
"""
log_multivariate_normal_density_dict = {
'spherical': _log_multivariate_normal_density_spherical,
'tied': _log_multivariate_normal_density_tied,
'diag': _log_multivariate_normal_density_diag,
'full': _log_multivariate_normal_density_full}
return log_multivariate_normal_density_dict[covariance_type](
X, means, covars)
def sample_gaussian(mean, covar, covariance_type='diag', n_samples=1,
random_state=None):
"""Generate random samples from a Gaussian distribution.
Parameters
----------
mean : array_like, shape (n_features,)
Mean of the distribution.
covar : array_like, optional
Covariance of the distribution. The shape depends on `covariance_type`:
scalar if 'spherical',
(n_features) if 'diag',
(n_features, n_features) if 'tied', or 'full'
covariance_type : string, optional
Type of the covariance parameters. Must be one of
'spherical', 'tied', 'diag', 'full'. Defaults to 'diag'.
n_samples : int, optional
Number of samples to generate. Defaults to 1.
Returns
-------
X : array, shape (n_features, n_samples)
Randomly generated sample
"""
rng = check_random_state(random_state)
n_dim = len(mean)
rand = rng.randn(n_dim, n_samples)
if n_samples == 1:
rand.shape = (n_dim,)
if covariance_type == 'spherical':
rand *= np.sqrt(covar)
elif covariance_type == 'diag':
rand = np.dot(np.diag(np.sqrt(covar)), rand)
else:
s, U = linalg.eigh(covar)
s.clip(0, out=s) # get rid of tiny negatives
np.sqrt(s, out=s)
U *= s
rand = np.dot(U, rand)
return (rand.T + mean).T
class GMM(BaseEstimator):
"""Gaussian Mixture Model
Representation of a Gaussian mixture model probability distribution.
This class allows for easy evaluation of, sampling from, and
maximum-likelihood estimation of the parameters of a GMM distribution.
Initializes parameters such that every mixture component has zero
mean and identity covariance.
Parameters
----------
n_components : int, optional
Number of mixture components. Defaults to 1.
covariance_type : string, optional
String describing the type of covariance parameters to
use. Must be one of 'spherical', 'tied', 'diag', 'full'.
Defaults to 'diag'.
random_state: RandomState or an int seed (None by default)
A random number generator instance
min_covar : float, optional
Floor on the diagonal of the covariance matrix to prevent
overfitting. Defaults to 1e-3.
thresh : float, optional
Convergence threshold.
n_iter : int, optional
Number of EM iterations to perform.
n_init : int, optional
Number of initializations to perform. the best results is kept
params : string, optional
Controls which parameters are updated in the training
process. Can contain any combination of 'w' for weights,
'm' for means, and 'c' for covars. Defaults to 'wmc'.
init_params : string, optional
Controls which parameters are updated in the initialization
process. Can contain any combination of 'w' for weights,
'm' for means, and 'c' for covars. Defaults to 'wmc'.
Attributes
----------
weights_ : array, shape (`n_components`,)
This attribute stores the mixing weights for each mixture component.
means_ : array, shape (`n_components`, `n_features`)
Mean parameters for each mixture component.
covars_ : array
Covariance parameters for each mixture component. The shape
depends on `covariance_type`::
(n_components, n_features) if 'spherical',
(n_features, n_features) if 'tied',
(n_components, n_features) if 'diag',
(n_components, n_features, n_features) if 'full'
converged_ : bool
True when convergence was reached in fit(), False otherwise.
See Also
--------
DPGMM : Infinite gaussian mixture model, using the dirichlet
process, fit with a variational algorithm
VBGMM : Finite gaussian mixture model fit with a variational
algorithm, better for situations where there might be too little
data to get a good estimate of the covariance matrix.
Examples
--------
>>> import numpy as np
>>> from sklearn import mixture
>>> np.random.seed(1)
>>> g = mixture.GMM(n_components=2)
>>> # Generate random observations with two modes centered on 0
>>> # and 10 to use for training.
>>> obs = np.concatenate((np.random.randn(100, 1),
... 10 + np.random.randn(300, 1)))
>>> g.fit(obs) # doctest: +NORMALIZE_WHITESPACE
GMM(covariance_type='diag', init_params='wmc', min_covar=0.001,
n_components=2, n_init=1, n_iter=100, params='wmc',
random_state=None, thresh=0.01)
>>> np.round(g.weights_, 2)
array([ 0.75, 0.25])
>>> np.round(g.means_, 2)
array([[ 10.05],
[ 0.06]])
>>> np.round(g.covars_, 2) #doctest: +SKIP
array([[[ 1.02]],
[[ 0.96]]])
>>> g.predict([[0], [2], [9], [10]]) #doctest: +ELLIPSIS
array([1, 1, 0, 0]...)
>>> np.round(g.score([[0], [2], [9], [10]]), 2)
array([-2.19, -4.58, -1.75, -1.21])
>>> # Refit the model on new data (initial parameters remain the
>>> # same), this time with an even split between the two modes.
>>> g.fit(20 * [[0]] + 20 * [[10]]) # doctest: +NORMALIZE_WHITESPACE
GMM(covariance_type='diag', init_params='wmc', min_covar=0.001,
n_components=2, n_init=1, n_iter=100, params='wmc',
random_state=None, thresh=0.01)
>>> np.round(g.weights_, 2)
array([ 0.5, 0.5])
"""
def __init__(self, n_components=1, covariance_type='diag',
random_state=None, thresh=1e-2, min_covar=1e-3,
n_iter=100, n_init=1, params='wmc', init_params='wmc'):
self.n_components = n_components
self.covariance_type = covariance_type
self.thresh = thresh
self.min_covar = min_covar
self.random_state = random_state
self.n_iter = n_iter
self.n_init = n_init
self.params = params
self.init_params = init_params
if not covariance_type in ['spherical', 'tied', 'diag', 'full']:
raise ValueError('Invalid value for covariance_type: %s' %
covariance_type)
if n_init < 1:
raise ValueError('GMM estimation requires at least one run')
self.weights_ = np.ones(self.n_components) / self.n_components
# flag to indicate exit status of fit() method: converged (True) or
# n_iter reached (False)
self.converged_ = False
def _get_covars(self):
"""Covariance parameters for each mixture component.
The shape depends on `cvtype`::
(`n_states`, 'n_features') if 'spherical',
(`n_features`, `n_features`) if 'tied',
(`n_states`, `n_features`) if 'diag',
(`n_states`, `n_features`, `n_features`) if 'full'
"""
if self.covariance_type == 'full':
return self.covars_
elif self.covariance_type == 'diag':
return [np.diag(cov) for cov in self.covars_]
elif self.covariance_type == 'tied':
return [self.covars_] * self.n_components
elif self.covariance_type == 'spherical':
return [np.diag(cov) for cov in self.covars_]
def _set_covars(self, covars):
"""Provide values for covariance"""
covars = np.asarray(covars)
_validate_covars(covars, self.covariance_type, self.n_components)
self.covars_ = covars
def score_samples(self, X):
"""Return the per-sample likelihood of the data under the model.
Compute the log probability of X under the model and
return the posterior distribution (responsibilities) of each
mixture component for each element of X.
Parameters
----------
X: array_like, shape (n_samples, n_features)
List of n_features-dimensional data points. Each row
corresponds to a single data point.
Returns
-------
logprob : array_like, shape (n_samples,)
Log probabilities of each data point in X.
responsibilities : array_like, shape (n_samples, n_components)
Posterior probabilities of each mixture component for each
observation
"""
check_is_fitted(self, 'means_')
X = np.asarray(X)
if X.ndim == 1:
X = X[:, np.newaxis]
if X.size == 0:
return np.array([]), np.empty((0, self.n_components))
if X.shape[1] != self.means_.shape[1]:
raise ValueError('The shape of X is not compatible with self')
lpr = (log_multivariate_normal_density(X, self.means_, self.covars_,
self.covariance_type)
+ np.log(self.weights_))
logprob = logsumexp(lpr, axis=1)
responsibilities = np.exp(lpr - logprob[:, np.newaxis])
return logprob, responsibilities
def score(self, X):
"""Compute the log probability under the model.
Parameters
----------
X : array_like, shape (n_samples, n_features)
List of n_features-dimensional data points. Each row
corresponds to a single data point.
Returns
-------
logprob : array_like, shape (n_samples,)
Log probabilities of each data point in X
"""
logprob, _ = self.score_samples(X)
return logprob
def predict(self, X):
"""Predict label for data.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Returns
-------
C : array, shape = (n_samples,)
"""
logprob, responsibilities = self.score_samples(X)
return responsibilities.argmax(axis=1)
def predict_proba(self, X):
"""Predict posterior probability of data under each Gaussian
in the model.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Returns
-------
responsibilities : array-like, shape = (n_samples, n_components)
Returns the probability of the sample for each Gaussian
(state) in the model.
"""
logprob, responsibilities = self.score_samples(X)
return responsibilities
def sample(self, n_samples=1, random_state=None):
"""Generate random samples from the model.
Parameters
----------
n_samples : int, optional
Number of samples to generate. Defaults to 1.
Returns
-------
X : array_like, shape (n_samples, n_features)
List of samples
"""
check_is_fitted(self, 'means_')
if random_state is None:
random_state = self.random_state
random_state = check_random_state(random_state)
weight_cdf = np.cumsum(self.weights_)
X = np.empty((n_samples, self.means_.shape[1]))
rand = random_state.rand(n_samples)
# decide which component to use for each sample
comps = weight_cdf.searchsorted(rand)
# for each component, generate all needed samples
for comp in range(self.n_components):
# occurrences of current component in X
comp_in_X = (comp == comps)
# number of those occurrences
num_comp_in_X = comp_in_X.sum()
if num_comp_in_X > 0:
if self.covariance_type == 'tied':
cv = self.covars_
elif self.covariance_type == 'spherical':
cv = self.covars_[comp][0]
else:
cv = self.covars_[comp]
X[comp_in_X] = sample_gaussian(
self.means_[comp], cv, self.covariance_type,
num_comp_in_X, random_state=random_state).T
return X
def fit(self, X):
"""Estimate model parameters with the expectation-maximization
algorithm.
A initialization step is performed before entering the em
algorithm. If you want to avoid this step, set the keyword
argument init_params to the empty string '' when creating the
GMM object. Likewise, if you would like just to do an
initialization, set n_iter=0.
Parameters
----------
X : array_like, shape (n, n_features)
List of n_features-dimensional data points. Each row
corresponds to a single data point.
"""
## initialization step
X = np.asarray(X, dtype=np.float)
if X.ndim == 1:
X = X[:, np.newaxis]
if X.shape[0] < self.n_components:
raise ValueError(
'GMM estimation with %s components, but got only %s samples' %
(self.n_components, X.shape[0]))
max_log_prob = -np.infty
for _ in range(self.n_init):
if 'm' in self.init_params or not hasattr(self, 'means_'):
self.means_ = cluster.KMeans(
n_clusters=self.n_components,
random_state=self.random_state).fit(X).cluster_centers_
if 'w' in self.init_params or not hasattr(self, 'weights_'):
self.weights_ = np.tile(1.0 / self.n_components,
self.n_components)
if 'c' in self.init_params or not hasattr(self, 'covars_'):
cv = np.cov(X.T) + self.min_covar * np.eye(X.shape[1])
if not cv.shape:
cv.shape = (1, 1)
self.covars_ = \
distribute_covar_matrix_to_match_covariance_type(
cv, self.covariance_type, self.n_components)
# EM algorithms
log_likelihood = []
# reset self.converged_ to False
self.converged_ = False
for i in range(self.n_iter):
# Expectation step
curr_log_likelihood, responsibilities = self.score_samples(X)
log_likelihood.append(curr_log_likelihood.sum())
# Check for convergence.
if i > 0 and abs(log_likelihood[-1] - log_likelihood[-2]) < \
self.thresh:
self.converged_ = True
break
# Maximization step
self._do_mstep(X, responsibilities, self.params,
self.min_covar)
# if the results are better, keep it
if self.n_iter:
if log_likelihood[-1] > max_log_prob:
max_log_prob = log_likelihood[-1]
best_params = {'weights': self.weights_,
'means': self.means_,
'covars': self.covars_}
# check the existence of an init param that was not subject to
# likelihood computation issue.
if np.isneginf(max_log_prob) and self.n_iter:
raise RuntimeError(
"EM algorithm was never able to compute a valid likelihood " +
"given initial parameters. Try different init parameters " +
"(or increasing n_init) or check for degenerate data.")
# self.n_iter == 0 occurs when using GMM within HMM
if self.n_iter:
self.covars_ = best_params['covars']
self.means_ = best_params['means']
self.weights_ = best_params['weights']
return self
def _do_mstep(self, X, responsibilities, params, min_covar=0):
""" Perform the Mstep of the EM algorithm and return the class weihgts.
"""
weights = responsibilities.sum(axis=0)
weighted_X_sum = np.dot(responsibilities.T, X)
inverse_weights = 1.0 / (weights[:, np.newaxis] + 10 * EPS)
if 'w' in params:
self.weights_ = (weights / (weights.sum() + 10 * EPS) + EPS)
if 'm' in params:
self.means_ = weighted_X_sum * inverse_weights
if 'c' in params:
covar_mstep_func = _covar_mstep_funcs[self.covariance_type]
self.covars_ = covar_mstep_func(
self, X, responsibilities, weighted_X_sum, inverse_weights,
min_covar)
return weights
def _n_parameters(self):
"""Return the number of free parameters in the model."""
ndim = self.means_.shape[1]
if self.covariance_type == 'full':
cov_params = self.n_components * ndim * (ndim + 1) / 2.
elif self.covariance_type == 'diag':
cov_params = self.n_components * ndim
elif self.covariance_type == 'tied':
cov_params = ndim * (ndim + 1) / 2.
elif self.covariance_type == 'spherical':
cov_params = self.n_components
mean_params = ndim * self.n_components
return int(cov_params + mean_params + self.n_components - 1)
def bic(self, X):
"""Bayesian information criterion for the current model fit
and the proposed data
Parameters
----------
X : array of shape(n_samples, n_dimensions)
Returns
-------
bic: float (the lower the better)
"""
return (-2 * self.score(X).sum() +
self._n_parameters() * np.log(X.shape[0]))
def aic(self, X):
"""Akaike information criterion for the current model fit
and the proposed data
Parameters
----------
X : array of shape(n_samples, n_dimensions)
Returns
-------
aic: float (the lower the better)
"""
return - 2 * self.score(X).sum() + 2 * self._n_parameters()
#########################################################################
## some helper routines
#########################################################################
def _log_multivariate_normal_density_diag(X, means, covars):
"""Compute Gaussian log-density at X for a diagonal model"""
n_samples, n_dim = X.shape
lpr = -0.5 * (n_dim * np.log(2 * np.pi) + np.sum(np.log(covars), 1)
+ np.sum((means ** 2) / covars, 1)
- 2 * np.dot(X, (means / covars).T)
+ np.dot(X ** 2, (1.0 / covars).T))
return lpr
def _log_multivariate_normal_density_spherical(X, means, covars):
"""Compute Gaussian log-density at X for a spherical model"""
cv = covars.copy()
if covars.ndim == 1:
cv = cv[:, np.newaxis]
if covars.shape[1] == 1:
cv = np.tile(cv, (1, X.shape[-1]))
return _log_multivariate_normal_density_diag(X, means, cv)
def _log_multivariate_normal_density_tied(X, means, covars):
"""Compute Gaussian log-density at X for a tied model"""
n_samples, n_dim = X.shape
icv = pinvh(covars)
lpr = -0.5 * (n_dim * np.log(2 * np.pi) + np.log(linalg.det(covars) + 0.1)
+ np.sum(X * np.dot(X, icv), 1)[:, np.newaxis]
- 2 * np.dot(np.dot(X, icv), means.T)
+ np.sum(means * np.dot(means, icv), 1))
return lpr
def _log_multivariate_normal_density_full(X, means, covars, min_covar=1.e-7):
"""Log probability for full covariance matrices.
"""
n_samples, n_dim = X.shape
nmix = len(means)
log_prob = np.empty((n_samples, nmix))
for c, (mu, cv) in enumerate(zip(means, covars)):
try:
cv_chol = linalg.cholesky(cv, lower=True)
except linalg.LinAlgError:
# The model is most probably stuck in a component with too
# few observations, we need to reinitialize this components
cv_chol = linalg.cholesky(cv + min_covar * np.eye(n_dim),
lower=True)
cv_log_det = 2 * np.sum(np.log(np.diagonal(cv_chol)))
cv_sol = linalg.solve_triangular(cv_chol, (X - mu).T, lower=True).T
log_prob[:, c] = - .5 * (np.sum(cv_sol ** 2, axis=1) +
n_dim * np.log(2 * np.pi) + cv_log_det)
return log_prob
def _validate_covars(covars, covariance_type, n_components):
"""Do basic checks on matrix covariance sizes and values
"""
from scipy import linalg
if covariance_type == 'spherical':
if len(covars) != n_components:
raise ValueError("'spherical' covars have length n_components")
elif np.any(covars <= 0):
raise ValueError("'spherical' covars must be non-negative")
elif covariance_type == 'tied':
if covars.shape[0] != covars.shape[1]:
raise ValueError("'tied' covars must have shape (n_dim, n_dim)")
elif (not np.allclose(covars, covars.T)
or np.any(linalg.eigvalsh(covars) <= 0)):
raise ValueError("'tied' covars must be symmetric, "
"positive-definite")
elif covariance_type == 'diag':
if len(covars.shape) != 2:
raise ValueError("'diag' covars must have shape "
"(n_components, n_dim)")
elif np.any(covars <= 0):
raise ValueError("'diag' covars must be non-negative")
elif covariance_type == 'full':
if len(covars.shape) != 3:
raise ValueError("'full' covars must have shape "
"(n_components, n_dim, n_dim)")
elif covars.shape[1] != covars.shape[2]:
raise ValueError("'full' covars must have shape "
"(n_components, n_dim, n_dim)")
for n, cv in enumerate(covars):
if (not np.allclose(cv, cv.T)
or np.any(linalg.eigvalsh(cv) <= 0)):
raise ValueError("component %d of 'full' covars must be "
"symmetric, positive-definite" % n)
else:
raise ValueError("covariance_type must be one of " +
"'spherical', 'tied', 'diag', 'full'")
def distribute_covar_matrix_to_match_covariance_type(
tied_cv, covariance_type, n_components):
"""Create all the covariance matrices from a given template
"""
if covariance_type == 'spherical':
cv = np.tile(tied_cv.mean() * np.ones(tied_cv.shape[1]),
(n_components, 1))
elif covariance_type == 'tied':
cv = tied_cv
elif covariance_type == 'diag':
cv = np.tile(np.diag(tied_cv), (n_components, 1))
elif covariance_type == 'full':
cv = np.tile(tied_cv, (n_components, 1, 1))
else:
raise ValueError("covariance_type must be one of " +
"'spherical', 'tied', 'diag', 'full'")
return cv
def _covar_mstep_diag(gmm, X, responsibilities, weighted_X_sum, norm,
min_covar):
"""Performing the covariance M step for diagonal cases"""
avg_X2 = np.dot(responsibilities.T, X * X) * norm
avg_means2 = gmm.means_ ** 2
avg_X_means = gmm.means_ * weighted_X_sum * norm
return avg_X2 - 2 * avg_X_means + avg_means2 + min_covar
def _covar_mstep_spherical(*args):
"""Performing the covariance M step for spherical cases"""
cv = _covar_mstep_diag(*args)
return np.tile(cv.mean(axis=1)[:, np.newaxis], (1, cv.shape[1]))
def _covar_mstep_full(gmm, X, responsibilities, weighted_X_sum, norm,
min_covar):
"""Performing the covariance M step for full cases"""
# Eq. 12 from K. Murphy, "Fitting a Conditional Linear Gaussian
# Distribution"
n_features = X.shape[1]
cv = np.empty((gmm.n_components, n_features, n_features))
for c in range(gmm.n_components):
post = responsibilities[:, c]
# Underflow Errors in doing post * X.T are not important
np.seterr(under='ignore')
avg_cv = np.dot(post * X.T, X) / (post.sum() + 10 * EPS)
mu = gmm.means_[c][np.newaxis]
cv[c] = (avg_cv - np.dot(mu.T, mu) + min_covar * np.eye(n_features))
return cv
def _covar_mstep_tied(gmm, X, responsibilities, weighted_X_sum, norm,
min_covar):
# Eq. 15 from K. Murphy, "Fitting a Conditional Linear Gaussian
n_features = X.shape[1]
avg_X2 = np.dot(X.T, X)
avg_means2 = np.dot(gmm.means_.T, weighted_X_sum)
return (avg_X2 - avg_means2 + min_covar * np.eye(n_features)) / X.shape[0]
_covar_mstep_funcs = {'spherical': _covar_mstep_spherical,
'diag': _covar_mstep_diag,
'tied': _covar_mstep_tied,
'full': _covar_mstep_full,
}
|
{
"content_hash": "49c50ad90c39a31b906eb7fd8f6754cd",
"timestamp": "",
"source": "github",
"line_count": 715,
"max_line_length": 79,
"avg_line_length": 37.62937062937063,
"alnum_prop": 0.5661401226537818,
"repo_name": "bigdataelephants/scikit-learn",
"id": "c9f252dc7ffe9e6557bc3777b79273e1bb98b800",
"size": "26905",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "sklearn/mixture/gmm.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "18594358"
},
{
"name": "C++",
"bytes": "1810938"
},
{
"name": "Makefile",
"bytes": "1364"
},
{
"name": "PowerShell",
"bytes": "13427"
},
{
"name": "Python",
"bytes": "5616877"
},
{
"name": "Shell",
"bytes": "5647"
}
],
"symlink_target": ""
}
|
import hashlib
import os
import tempfile
from attic.hashindex import NSIndex, ChunkIndex
from attic.testsuite import AtticTestCase
class HashIndexTestCase(AtticTestCase):
def _generic_test(self, cls, make_value, sha):
idx = cls()
self.assert_equal(len(idx), 0)
# Test set
for x in range(100):
idx[bytes('%-32d' % x, 'ascii')] = make_value(x)
self.assert_equal(len(idx), 100)
for x in range(100):
self.assert_equal(idx[bytes('%-32d' % x, 'ascii')], make_value(x))
# Test update
for x in range(100):
idx[bytes('%-32d' % x, 'ascii')] = make_value(x * 2)
self.assert_equal(len(idx), 100)
for x in range(100):
self.assert_equal(idx[bytes('%-32d' % x, 'ascii')], make_value(x * 2))
# Test delete
for x in range(50):
del idx[bytes('%-32d' % x, 'ascii')]
self.assert_equal(len(idx), 50)
idx_name = tempfile.NamedTemporaryFile()
idx.write(idx_name.name)
del idx
# Verify file contents
with open(idx_name.name, 'rb') as fd:
self.assert_equal(hashlib.sha256(fd.read()).hexdigest(), sha)
# Make sure we can open the file
idx = cls.read(idx_name.name)
self.assert_equal(len(idx), 50)
for x in range(50, 100):
self.assert_equal(idx[bytes('%-32d' % x, 'ascii')], make_value(x * 2))
idx.clear()
self.assert_equal(len(idx), 0)
idx.write(idx_name.name)
del idx
self.assert_equal(len(cls.read(idx_name.name)), 0)
def test_nsindex(self):
self._generic_test(NSIndex, lambda x: (x, x), '369a18ae6a52524eb2884a3c0fdc2824947edd017a2688c5d4d7b3510c245ab9')
def test_chunkindex(self):
self._generic_test(ChunkIndex, lambda x: (x, x, x), 'ed22e8a883400453c0ee79a06c54df72c994a54eeefdc6c0989efdc5ee6d07b7')
def test_resize(self):
n = 2000 # Must be >= MIN_BUCKETS
idx_name = tempfile.NamedTemporaryFile()
idx = NSIndex()
idx.write(idx_name.name)
initial_size = os.path.getsize(idx_name.name)
self.assert_equal(len(idx), 0)
for x in range(n):
idx[bytes('%-32d' % x, 'ascii')] = x, x
idx.write(idx_name.name)
self.assert_true(initial_size < os.path.getsize(idx_name.name))
for x in range(n):
del idx[bytes('%-32d' % x, 'ascii')]
self.assert_equal(len(idx), 0)
idx.write(idx_name.name)
self.assert_equal(initial_size, os.path.getsize(idx_name.name))
def test_iteritems(self):
idx = NSIndex()
for x in range(100):
idx[bytes('%-0.32d' % x, 'ascii')] = x, x
all = list(idx.iteritems())
self.assert_equal(len(all), 100)
second_half = list(idx.iteritems(marker=all[49][0]))
self.assert_equal(len(second_half), 50)
self.assert_equal(second_half, all[50:])
|
{
"content_hash": "3a1499b98b122f98faa23a8770907a0d",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 127,
"avg_line_length": 38.42857142857143,
"alnum_prop": 0.5809395065900642,
"repo_name": "pombredanne/attic",
"id": "1f6aa22aa8a09c0817aea494ecb1a0feac1e017a",
"size": "2959",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "attic/testsuite/hashindex.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "18912"
},
{
"name": "Python",
"bytes": "296164"
}
],
"symlink_target": ""
}
|
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import ConfigParser, os, platform, subprocess, sys, json
from copy_reg import add_extension
# globals
prefs = {}
main_wnd = None
# register module path
mod_path = os.path.normpath(os.path.dirname(os.path.abspath(__file__)) + '/../pymodules')
sys.path.append(mod_path)
from dhdlg import about
from dhutil import util
import dheng, clipedit, engine, modelprev
def add_extension(filepath, ext):
(filename, fileext) = os.path.splitext(filepath)
if not ("." + ext) in fileext:
filepath = filepath + "." + ext
return filepath
###############################################################################################
class w_prefs(QDialog):
def __init__(self, parent):
super(w_prefs, self).__init__(parent)
self.init_ui()
def init_ui(self):
self.setMinimumWidth(500)
self.setWindowTitle('Preferences')
self.setWindowFlags(self.windowFlags() & (~Qt.WindowContextHelpButtonHint))
layout = QVBoxLayout(self)
self.edit_bin = QLineEdit(self)
self.check_verbose = QCheckBox("Verbose mode", self)
btn_browse = QPushButton("Browse ...", self)
btn_browse.setFixedWidth(80)
btn_browse.clicked.connect(self.browse_clicked)
layout2 = QHBoxLayout()
layout2.addWidget(QLabel("Importer binary :"))
layout2.addWidget(self.edit_bin)
layout2.addWidget(btn_browse)
layout.addLayout(layout2)
layout4 = QHBoxLayout()
ed_assetdir = QLineEdit(self)
btn_browse = QPushButton('Browse', self)
btn_browse.setFixedWidth(80)
btn_browse.clicked.connect(self.btn_assetdir_clicked)
layout4.addWidget(QLabel('Asset Directory :'))
layout4.addWidget(ed_assetdir)
layout4.addWidget(btn_browse)
layout.addLayout(layout4)
self.ed_assetdir = ed_assetdir
layout.addWidget(self.check_verbose)
btn_ok = QPushButton("Ok", self)
btn_cancel = QPushButton("Cancel", self)
btn_ok.clicked.connect(self.ok_clicked)
btn_cancel.clicked.connect(self.cancel_clicked)
layout3 = QHBoxLayout()
layout3.addWidget(btn_ok)
layout3.addWidget(btn_cancel)
layout3.addStretch()
layout.addLayout(layout3)
layout.addStretch()
self.setLayout(layout)
def btn_assetdir_clicked(self, checked):
dlg = QFileDialog(self, "Choose asset directory", self.ed_assetdir.text())
dlg.setFileMode(QFileDialog.Directory)
if dlg.exec_():
dirs = dlg.selectedFiles()
self.ed_assetdir.setText(os.path.abspath(str(dirs[0])))
def browse_clicked(self, checked):
if platform.system() == "Windows":
filters = "Executables (*.exe)"
else:
filters = "Executables (*)"
binfile = QFileDialog.getOpenFileName(self, "Open h3dimport binary", "", filters)
binfile = os.path.abspath(str(binfile))
if binfile != "":
self.edit_bin.setText(binfile)
def ok_clicked(self, checked):
self.accept()
def cancel_clicked(self, checked):
self.reject()
def load_config(self, cfg):
global prefs
self.edit_bin.setText(prefs['binpath'])
self.check_verbose.setChecked(prefs['verbose'])
self.ed_assetdir.setText(prefs['assetdir'])
def save_config(self, cfg):
global prefs
if not "general" in cfg.sections():
cfg.add_section("general")
prefs['verbose'] = self.check_verbose.isChecked()
prefs['binpath'] = str(self.edit_bin.text())
prefs['assetdir'] = str(self.ed_assetdir.text())
###############################################################################################
class w_phx(QWidget):
def __init__(self, parent):
super(w_phx, self).__init__(parent)
self.infiledir = ""
self.outfiledir = ""
self.ctrls = []
self.quiet_mode = False
self.watcher = QFileSystemWatcher(self)
self.watcher.fileChanged.connect(self.filemon_onfilechange)
self.init_ui()
def init_ui(self):
layout = QFormLayout(self)
self.setLayout(layout)
# make controls
self.edit_infilepath = QLineEdit(self)
self.edit_infilepath.setReadOnly(True)
btn_browse_infile = QPushButton("Browse", self)
btn_browse_infile.setFixedWidth(60)
self.combo_names = QComboBox(self)
self.edit_outfilepath = QLineEdit(self)
self.edit_outfilepath.setReadOnly(True)
btn_browse_outfile = QPushButton("Browse", self)
btn_browse_outfile.setFixedWidth(60)
self.check_zup = QCheckBox(self)
btn_auto = QCheckBox("Auto", self)
btn_auto.setMaximumWidth(50)
btn_import = QPushButton("Import", self)
self.btn_auto = btn_auto
self.btn_import = btn_import
# layouts
layout_infile = QHBoxLayout()
layout_infile.addWidget(self.edit_infilepath)
layout_infile.addWidget(btn_browse_infile)
layout.addRow("Input file:", layout_infile)
layout.addRow("Object name:", self.combo_names)
layout_outfile = QHBoxLayout()
layout_outfile.addWidget(self.edit_outfilepath)
layout_outfile.addWidget(btn_browse_outfile)
layout.addRow("Output file:", layout_outfile)
layout.addRow("Up is Z (3dsmax):", self.check_zup)
layout_import = QHBoxLayout()
layout_import.addWidget(btn_import)
layout_import.addWidget(btn_auto)
layout.addRow(layout_import)
#events
btn_browse_infile.clicked.connect(self.btn_browseinfile_click)
btn_browse_outfile.clicked.connect(self.btn_browseoutfile_click)
btn_auto.stateChanged.connect(self.btn_auto_checkstate)
btn_import.clicked.connect(self.btn_import_click)
# group controls for enable/disable
self.ctrls.append(btn_browse_infile)
self.ctrls.append(self.edit_infilepath)
self.ctrls.append(self.combo_names)
self.ctrls.append(self.edit_outfilepath)
self.ctrls.append(self.check_zup)
self.ctrls.append(btn_browse_outfile)
self.ctrls.append(btn_import)
def filemon_onfilechange(self, qfilepath):
filepath = str(qfilepath)
if filepath == str(self.edit_infilepath.text()):
self.btn_import_click(True)
def enable_controls(self, enable):
for c in self.ctrls:
c.setEnabled(enable)
def btn_auto_checkstate(self, state):
if state == Qt.Checked:
self.btn_import.setCheckable(True)
self.btn_import_click(True)
self.btn_import.setChecked(True)
self.enable_controls(False)
self.quiet_mode = True
if len(self.edit_infilepath.text()) > 0:
self.watcher.addPath(self.edit_infilepath.text())
else:
self.quiet_mode = False
self.btn_import.setChecked(False)
self.btn_import.setCheckable(False)
self.enable_controls(True)
if len(self.edit_infilepath.text()) > 0:
self.watcher.removePath(self.edit_infilepath.text())
def btn_browseinfile_click(self):
dlg = QFileDialog(self, "Open physics", self.infiledir, \
"Physx3 files (*.RepX)")
dlg.setFileMode(QFileDialog.ExistingFile)
if dlg.exec_():
files = dlg.selectedFiles()
self.edit_infilepath.setText(os.path.normpath(str(files[0])))
self.infiledir = os.path.normpath(str(dlg.directory().path()))
in_filepath = str(self.edit_infilepath.text())
out_filepath = str(self.edit_outfilepath.text())
self.edit_outfilepath.setText(util.make_samefname(out_filepath, in_filepath, "h3dp"))
self.enum_phxobjects()
def btn_browseoutfile_click(self):
dlg = QFileDialog(self, "Save h3dp file", self.outfiledir, \
"dark-hammer physics (*.h3dp)")
dlg.setFileMode(QFileDialog.AnyFile)
dlg.setAcceptMode(QFileDialog.AcceptSave)
if dlg.exec_():
relative_path = util.get_rel_path(str(dlg.selectedFiles()[0]), prefs['assetdir'])
if not relative_path:
QMessageBox.warning(self, 'h3dimport', \
'Path must be under asset directory tree')
return
self.edit_outfilepath.setText(add_extension(relative_path, "h3dp"))
self.outfiledir = os.path.normcase(str(dlg.directory().path()))
def btn_import_click(self, checked):
global prefs
name = str(self.combo_names.itemText(self.combo_names.currentIndex()))
args = [str(prefs['binpath']),
"-p", str(self.edit_infilepath.text()),
"-o", os.path.normcase(prefs['assetdir'] + '/' + str(self.edit_outfilepath.text())),
"-n", name]
if self.check_zup.isChecked():
args.extend(["-zup"])
if prefs['verbose']:
args.extend(["-v"])
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
r = subprocess.call(args)
QApplication.restoreOverrideCursor()
if r == -1 and not self.quiet_mode:
QMessageBox.critical(self, "h3dimport", """Failed to import file, see the"""\
""" terminal for more info""")
def save_config(self, cfg):
if not "physics" in cfg.sections():
cfg.add_section("physics")
cfg.set("physics", "in_filepath", str(self.edit_infilepath.text()))
cfg.set("physics", "out_filepath", str(self.edit_outfilepath.text()))
cfg.set("physics", "in_filedir", str(self.infiledir))
cfg.set("physics", "out_filedir", str(self.outfiledir))
cfg.set("physics", "zup", str(self.check_zup.isChecked()))
def load_config(self, cfg):
if not "physics" in cfg.sections():
return
self.edit_infilepath.setText(cfg.get("physics", "in_filepath"))
self.edit_outfilepath.setText(cfg.get("physics", "out_filepath"))
self.infiledir = cfg.get("physics", "in_filedir")
self.outfiledir = cfg.get("physics", "out_filedir")
self.check_zup.setChecked(cfg.getboolean("physics", "zup"))
def enum_phxobjects(self):
global prefs
self.combo_names.clear()
if len(prefs['binpath']) == 0 or len(self.edit_infilepath.text()) == 0:
return
phx_filepath = str(self.edit_infilepath.text())
args = [prefs['binpath'], "-l", "-p", phx_filepath]
try:
r = subprocess.check_output(args)
except subprocess.CalledProcessError as ce:
print ce.output
QMessageBox.critical(self, "h3dimport", "h3dimport raised error!")
else:
objs = str(r).replace("\r", "").split("\n")
for ln in objs:
if len(ln) > 0:
self.combo_names.addItem(ln)
if self.combo_names.count() > 1:
self.combo_names.setCurrentIndex(1)
###############################################################################################
class w_anim(QWidget):
def __init__(self, parent):
super(w_anim, self).__init__(parent)
global prefs
self.infiledir = ""
self.outfiledir = ""
self.init_ui()
self.dlg_clipedit = clipedit.qClipEditDlg(self)
self.watcher = QFileSystemWatcher(self)
self.watcher.fileChanged.connect(self.monitor_onfilechange)
self.quiet_mode = False
def init_ui(self):
layout = QFormLayout(self)
self.setLayout(layout)
# make controls
self.edit_infilepath = QLineEdit(self)
self.edit_infilepath.setReadOnly(True)
btn_browse_infile = QPushButton("Browse", self)
btn_browse_infile.setFixedWidth(60)
self.edit_outfilepath = QLineEdit(self)
self.edit_outfilepath.setReadOnly(True)
btn_browse_outfile = QPushButton("Browse", self)
btn_browse_outfile.setFixedWidth(60)
btn_import = QPushButton("Import", self)
self.edit_fps = QLineEdit("30", self)
self.edit_fps.setMaximumWidth(40)
btn_clipedit = QPushButton('Edit clips', self)
# add to layout
layout_infile = QHBoxLayout()
layout_infile.addWidget(self.edit_infilepath)
layout_infile.addWidget(btn_browse_infile)
layout.addRow("Input file:", layout_infile)
layout_outfile = QHBoxLayout()
layout_outfile.addWidget(self.edit_outfilepath)
layout_outfile.addWidget(btn_browse_outfile)
layout.addRow("Output file:", layout_outfile)
layout.addRow("Fps:", self.edit_fps)
layout2 = QHBoxLayout()
btn_auto = QCheckBox("Auto", self)
btn_auto.setMaximumWidth(50)
layout2.addWidget(btn_clipedit)
layout2.addWidget(btn_import)
layout2.addWidget(btn_auto)
layout.addRow(layout2)
# vars
self.btn_auto = btn_auto
self.btn_import = btn_import
self.ctrls = [btn_browse_infile, btn_browse_outfile, btn_import, self.edit_infilepath,
self.edit_outfilepath, self.edit_fps, btn_clipedit]
# events
btn_browse_infile.clicked.connect(self.browse_infile_clicked)
btn_browse_outfile.clicked.connect(self.browse_outfile_clicked)
btn_import.clicked.connect(self.btn_import_clicked)
btn_clipedit.clicked.connect(self.btn_clipedit_clicked)
btn_auto.stateChanged.connect(self.btn_auto_checkstate)
def monitor_onfilechange(self, qfilepath):
filepath = str(qfilepath)
if filepath == str(self.edit_infilepath.text()):
self.btn_import_clicked(True)
def enable_controls(self, enable):
for c in self.ctrls:
c.setEnabled(enable)
def btn_auto_checkstate(self, state):
if state == Qt.Checked:
self.btn_import.setCheckable(True)
self.btn_import_clicked(True)
self.btn_import.setChecked(True)
self.enable_controls(False)
self.quiet_mode = True
if len(self.edit_infilepath.text()) > 0:
self.watcher.addPath(self.edit_infilepath.text())
else:
self.quiet_mode = False
self.btn_import.setChecked(False)
self.btn_import.setCheckable(False)
self.enable_controls(True)
if len(self.edit_infilepath.text()) > 0:
self.watcher.removePath(self.edit_infilepath.text())
def btn_clipedit_clicked(self, checked):
global prefs
if not engine.initialize(prefs['assetdir'], self.dlg_clipedit.eng_view):
print 'could not initialize dark-hammer engine'
else:
# get model file from the current imported model
global main_wnd
# before anything, do the animation import process
self.btn_import_clicked(True)
model_file = str(main_wnd.wnds['model'].edit_outfilepath.text())
anim_file = str(self.edit_outfilepath.text())
self.dlg_clipedit.load_props(model_file, anim_file, self.in_jsonfile)
self.dlg_clipedit.exec_()
def init_clips_jsonfile(self):
global prefs
in_filepath = str(self.edit_infilepath.text())
in_jsonfile = util.make_samefname(in_filepath, in_filepath, 'json')
if not os.path.isfile(in_jsonfile):
# create an empty json clips file
open(in_jsonfile, 'w').write('[{"name":"all"}]')
self.in_jsonfile = in_jsonfile
def browse_infile_clicked(self):
dlg = QFileDialog(self, "Open animation", self.infiledir, \
"Animation files (*.dae *.obj *.x *.ase *.ms3d)")
dlg.setFileMode(QFileDialog.ExistingFile)
if dlg.exec_():
files = dlg.selectedFiles()
self.edit_infilepath.setText(os.path.normpath(str(files[0])))
self.infiledir = os.path.normpath(str(dlg.directory().path()))
in_filepath = str(self.edit_infilepath.text())
out_filepath = str(self.edit_outfilepath.text())
self.edit_outfilepath.setText(util.make_samefname(out_filepath, in_filepath, "h3da"))
# try to locate clip (json) file in the same directory and same name as input file,
# if not found, create an empty
self.init_clips_jsonfile()
def browse_outfile_clicked(self):
dlg = QFileDialog(self, "Save h3da file", self.outfiledir, \
"dark-hammer anims (*.h3da)")
dlg.setFileMode(QFileDialog.AnyFile)
dlg.setAcceptMode(QFileDialog.AcceptSave)
if dlg.exec_():
relative_path = util.get_rel_path(str(dlg.selectedFiles()[0]), prefs['assetdir'])
if not relative_path:
QMessageBox.warning(self, 'h3dimport', 'Path must be under asset directory tree')
return
self.edit_outfilepath.setText(add_extension(relative_path, "h3da"))
self.outfiledir = os.path.normpath(str(dlg.directory().path()))
def btn_import_clicked(self, checked):
global prefs
args = [prefs['binpath'],
"-a", str(self.edit_infilepath.text()),
"-o", os.path.normcase(prefs['assetdir'] + '/' + str(self.edit_outfilepath.text())),
"-fps", str(self.edit_fps.text()),
'-clips', self.in_jsonfile]
if prefs['verbose']:
args.extend(["-v"])
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
r = subprocess.call(args)
QApplication.restoreOverrideCursor()
if r == -1 and not self.quiet_mode:
QMessageBox.critical(self, "h3dimport", """Failed to import file, see the"""\
""" terminal for more info""")
def save_config(self, cfg):
if not "anim" in cfg.sections():
cfg.add_section("anim")
cfg.set("anim", "in_filepath", str(self.edit_infilepath.text()))
cfg.set("anim", "out_filepath", str(self.edit_outfilepath.text()))
cfg.set("anim", "in_filedir", str(self.infiledir))
cfg.set("anim", "out_filedir", str(self.outfiledir))
cfg.set("anim", "fps", str(self.edit_fps.text()))
def load_config(self, cfg):
if not "anim" in cfg.sections():
return
self.edit_infilepath.setText(cfg.get("anim", "in_filepath"))
self.edit_outfilepath.setText(cfg.get("anim", "out_filepath"))
self.infiledir = cfg.get("anim", "in_filedir")
self.outfiledir = cfg.get("anim", "out_filedir")
self.edit_fps.setText(cfg.get("anim", "fps"))
self.init_clips_jsonfile()
###############################################################################################
class w_model(QWidget):
def __init__(self, parent):
super(w_model, self).__init__(parent)
self.infile_dir = ""
self.outfile_dir = ""
self.texdir_dir = ""
self.watcher = QFileSystemWatcher(self)
self.watcher.fileChanged.connect(self.monitor_onfilechange)
self.quiet_mode = False
self.ctrls = []
self.textures = {}
self.init_ui()
def init_ui(self):
layout = QFormLayout(self)
self.setLayout(layout)
self.edit_infilepath = QLineEdit(self)
self.edit_infilepath.setReadOnly(True)
self.combo_names = QComboBox(self)
self.combo_occ = QComboBox(self)
self.edit_outfilepath = QLineEdit(self)
self.edit_outfilepath.setReadOnly(True)
self.edit_texdir = QLineEdit(self)
self.edit_texdir.setReadOnly(True)
self.check_calctng = QCheckBox(self)
self.check_fastcompress = QCheckBox(self)
self.check_dxt3 = QCheckBox(self)
self.edit_scale = QLineEdit(self)
self.edit_scale.setMaximumWidth(40)
self.edit_scale.setValidator(QDoubleValidator())
btn_browse_infile = QPushButton("Browse", self)
btn_browse_outfile = QPushButton("Browse", self)
btn_choose_texdir = QPushButton("Choose", self)
btn_browse_infile.setFixedWidth(60)
btn_browse_outfile.setFixedWidth(60)
btn_choose_texdir.setFixedWidth(60)
layout_infile = QHBoxLayout()
layout_infile.addWidget(self.edit_infilepath)
layout_infile.addWidget(btn_browse_infile)
layout_outfile = QHBoxLayout()
layout_outfile.addWidget(self.edit_outfilepath)
layout_outfile.addWidget(btn_browse_outfile)
layout_texdir = QHBoxLayout()
layout_texdir.addWidget(self.edit_texdir)
layout_texdir.addWidget(btn_choose_texdir)
btn_browse_infile.clicked.connect(self.browse_infile_clicked)
btn_browse_outfile.clicked.connect(self.browse_outfile_clicked)
btn_choose_texdir.clicked.connect(self.choose_texdir_clicked)
self.cmbo_names_update = True
self.combo_names.currentIndexChanged.connect(self.cmbo_names_changed)
btn_prev = QPushButton('Preview', self)
btn_prev.clicked.connect(self.btn_prev_clicked)
layout_import = QHBoxLayout()
btn_auto = QCheckBox("Auto", self)
btn_auto.stateChanged.connect(self.btn_auto_checkstate)
btn_auto.setMaximumWidth(50)
self.btn_auto = btn_auto
btn_import = QPushButton("Import", self)
btn_import.clicked.connect(self.import_clicked)
layout_import.addWidget(btn_prev)
layout_import.addWidget(btn_import)
layout_import.addWidget(btn_auto)
self.btn_import = btn_import
layout.addRow("Input file:", layout_infile)
layout.addRow("Model name:", self.combo_names)
layout.addRow("Occluder:", self.combo_occ)
layout.addRow("Output file:", layout_outfile)
layout.addRow("Texture dir:", layout_texdir)
layout.addRow("Scale:", self.edit_scale)
layout.addRow("Calculate tangents:", self.check_calctng)
layout.addRow("Fast texture compress:", self.check_fastcompress)
layout.addRow("Force DXT3 for alpha:", self.check_dxt3)
layout.addRow(layout_import)
# add main controls to array for group enable/disable
self.ctrls = [btn_browse_infile, self.edit_infilepath, self.combo_names, self.combo_occ, \
self.edit_outfilepath, self.edit_texdir, self.check_calctng, self.check_dxt3, \
self.check_fastcompress, btn_browse_outfile, btn_choose_texdir, btn_import]
self.dlg_prev = modelprev.qModelPrev(self)
def btn_prev_clicked(self, checked):
global prefs
if not engine.initialize(prefs['assetdir'], self.dlg_prev.eng_view):
print 'could not initialize dark-hammer engine'
else:
# get model file from the current imported model
global main_wnd
# before anything, do the animation import process
self.import_clicked(True)
model_file = str(self.edit_outfilepath.text())
self.dlg_prev.load_props(model_file)
self.dlg_prev.exec_()
def import_texture(self, tex_filepath, tex_type):
global prefs
texdir = os.path.normcase(prefs['assetdir'] + '/' + str(self.edit_texdir.text()))
texdir_alias = util.valid_engine_path(str(self.edit_texdir.text()))
calctng = self.check_calctng.isChecked()
fastcompress = self.check_fastcompress.isChecked()
forcedxt3 = self.check_dxt3.isChecked()
args = [prefs['binpath']]
if tex_filepath != "": args.extend(["-t", tex_filepath])
if texdir != "": args.extend(["-tdir", texdir])
if fastcompress: args.extend(["-tfast"])
if forcedxt3: args.extend(["-tdxt3"])
args.extend(["-ttype", str(tex_type)])
# call h3dimport command
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
r = subprocess.call(args)
QApplication.restoreOverrideCursor()
if r == -1 and not self.quiet_mode:
QMessageBox.critical(self, "h3dimport", """Failed to import file, see the"""\
""" terminal for more info""")
self.watcher.addPaths(self.textures.keys())
def cmbo_names_changed(self, idx):
global prefs
if len(prefs['binpath']) == 0 or len(self.edit_infilepath.text()) == 0:
return
if not self.cmbo_names_update:
return
model_filepath = str(self.edit_infilepath.text())
name = str(self.combo_names.itemText(self.combo_names.currentIndex()))
args = [prefs['binpath'], "-lm", "-m", model_filepath, "-n", name]
try:
r = subprocess.check_output(args)
except subprocess.CalledProcessError as ce:
print ce.output
QMessageBox.critical(self, "h3dimport", "h3dimport raised error!")
else:
self.textures = {}
objs = str(r).replace("\r", "").split("\n")
for ln in objs:
if len(ln) > 0 and ("Error:" not in ln) and ("material:" in ln):
self.read_material_textures(ln.lstrip("material: "))
def monitor_reg_files(self):
if len(self.edit_infilepath.text()) > 0:
self.watcher.addPath(self.edit_infilepath.text())
if len(self.textures) > 0:
self.watcher.addPaths(self.textures.keys())
def monitor_unreg_files(self):
if len(self.edit_infilepath.text()) > 0:
self.watcher.removePath(self.edit_infilepath.text())
if len(self.textures) > 0:
self.watcher.removePaths(self.textures.keys())
def monitor_onfilechange(self, qfilepath):
filepath = str(qfilepath)
if filepath == str(self.edit_infilepath.text()):
self.import_clicked(True)
elif filepath in self.textures:
self.import_texture(filepath, self.textures[filepath])
def enable_controls(self, enable):
for c in self.ctrls:
c.setEnabled(enable)
def btn_auto_checkstate(self, state):
if state == Qt.Checked:
self.btn_import.setCheckable(True)
self.import_clicked(True)
self.btn_import.setChecked(True)
self.monitor_reg_files()
self.enable_controls(False)
self.quiet_mode = True
else:
self.quiet_mode = False
self.btn_import.setChecked(False)
self.btn_import.setCheckable(False)
self.monitor_unreg_files()
self.enable_controls(True)
def read_material_textures(self, jstr):
# parse textures from material data
jdata = json.loads(jstr)
for (k, v) in jdata.items():
if "-tex" in k:
v = os.path.normpath(v)
if v not in self.textures:
self.textures[v] = k # key=texture-file, value=type-string
def enum_models(self):
global prefs
self.cmbo_names_update = False
self.combo_names.clear()
self.combo_occ.clear()
self.combo_occ.addItem("[None]")
if len(prefs['binpath']) == 0 or len(self.edit_infilepath.text()) == 0:
return
model_filepath = str(self.edit_infilepath.text())
args = [prefs['binpath'], "-l", "-lm", "-m", model_filepath]
try:
r = subprocess.check_output(args)
except subprocess.CalledProcessError as ce:
print ce.output
QMessageBox.critical(self, "h3dimport", "h3dimport raised error!")
self.cmbo_names_update = True
else:
self.textures = {}
objs = str(r).replace("\r", "").split("\n")
for ln in objs:
if len(ln) > 0 and ("Error:" not in ln):
if "model:" in ln:
name = ln.lstrip("model: ")
self.combo_names.addItem(name)
self.combo_occ.addItem(name)
elif "material:" in ln:
self.read_material_textures(ln.lstrip("material: "))
self.cmbo_names_update = True
def browse_infile_clicked(self, checked):
global prefs
dlg = QFileDialog(self, "Open model", self.infile_dir, \
"Models (*.dae *.obj *.x *.ase *.ms3d *.fbx)")
dlg.setFileMode(QFileDialog.ExistingFile)
if dlg.exec_():
filepath = os.path.normcase(str(dlg.selectedFiles()[0]))
self.edit_infilepath.setText(filepath)
self.infile_dir = os.path.abspath(str(dlg.directory().path()))
self.enum_models()
# automatically set the name of the output file to the name of the input file
in_filepath = str(self.edit_infilepath.text())
out_filepath = str(self.edit_outfilepath.text())
self.edit_outfilepath.setText(util.make_samefname(out_filepath, in_filepath, "h3dm"))
def browse_outfile_clicked(self, checked):
dlg = QFileDialog(self, "Save h3dm file", self.outfile_dir, \
"dark-hammer models (*.h3dm)")
dlg.setFileMode(QFileDialog.AnyFile)
dlg.setAcceptMode(QFileDialog.AcceptSave)
if dlg.exec_():
relative_path = util.get_rel_path(str(dlg.selectedFiles()[0]), prefs['assetdir'])
if not relative_path:
QMessageBox.warning(self, 'h3dimport', \
'Path must be under asset directory tree')
return
self.edit_outfilepath.setText(add_extension(relative_path, "h3dm"))
self.outfile_dir = os.path.normpath(str(dlg.directory().path()))
def choose_texdir_clicked(self, checked):
dlg = QFileDialog(self, "Choose texture output directory", self.texdir_dir)
dlg.setFileMode(QFileDialog.Directory)
if dlg.exec_():
relative_path = util.get_rel_path(str(dlg.selectedFiles()[0]), prefs['assetdir'])
if not relative_path:
QMessageBox.warning(self, 'h3dimport', \
'Path must be under asset directory tree')
return
self.edit_texdir.setText(relative_path)
def load_config(self, cfg):
if not "model" in cfg.sections():
return
self.edit_infilepath.setText(cfg.get("model", "in_filepath"))
self.edit_outfilepath.setText(cfg.get("model", "out_filepath"))
self.edit_texdir.setText(cfg.get("model", "texdir"))
self.check_calctng.setChecked(cfg.getboolean("model", "calctng"))
self.check_fastcompress.setChecked(cfg.getboolean("model", "fastcompress"))
self.check_dxt3.setChecked(cfg.getboolean("model", "forcedxt3"))
self.infile_dir = cfg.get("model", "infile_dir")
self.outfile_dir = cfg.get("model", "outfile_dir")
self.texdir_dir = cfg.get("model", "texdir")
self.edit_scale.setText(cfg.get("model", "scale"))
def save_config(self, cfg):
if not "model" in cfg.sections():
cfg.add_section("model")
cfg.set("model", "in_filepath", str(self.edit_infilepath.text()))
cfg.set("model", "out_filepath", str(self.edit_outfilepath.text()))
cfg.set("model", "texdir", str(self.edit_texdir.text()))
cfg.set("model", "calctng", str(self.check_calctng.isChecked()))
cfg.set("model", "fastcompress", str(self.check_fastcompress.isChecked()))
cfg.set("model", "forcedxt3", str(self.check_dxt3.isChecked()))
cfg.set("model", "infile_dir", str(self.infile_dir))
cfg.set("model", "outfile_dir", str(self.outfile_dir))
cfg.set("model", "scale", str(self.edit_scale.text()))
def import_clicked(self, checked):
global prefs
name = str(self.combo_names.itemText(self.combo_names.currentIndex()))
occname = ""
if self.combo_occ.currentIndex() != 0:
occname = str(self.combo_occ.itemText(self.combo_occ.currentIndex()))
infilepath = str(self.edit_infilepath.text())
outfilepath = os.path.normcase(prefs['assetdir'] + '/' + str(self.edit_outfilepath.text()))
texdir = os.path.normcase(prefs['assetdir'] + '/' + str(self.edit_texdir.text()))
texdir_alias = util.valid_engine_path(str(self.edit_texdir.text()))
calctng = self.check_calctng.isChecked()
fastcompress = self.check_fastcompress.isChecked()
forcedxt3 = self.check_dxt3.isChecked()
scale = self.edit_scale.text().toFloat()[0]
if scale <= 0: scale = 1
args = [prefs['binpath']]
if prefs['verbose']: args.extend(["-v"])
if name != "": args.extend(["-n", name])
if infilepath != "": args.extend(["-m", infilepath])
if outfilepath != "": args.extend(["-o", outfilepath])
if texdir != "": args.extend(["-tdir", texdir])
if texdir_alias != "": args.extend(["-talias", texdir_alias])
if calctng: args.extend(["-calctng"])
if fastcompress: args.extend(["-tfast"])
if forcedxt3: args.extend(["-tdxt3"])
if len(occname) > 0: args.extend(["-occ", occname])
if self.quiet_mode: args.extend(["-toff"])
if scale != 1: args.extend(["-scale", str(scale)])
print args
# call h3dimport command
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
r = subprocess.call(args)
QApplication.restoreOverrideCursor()
if r == -1 and not self.quiet_mode:
QMessageBox.critical(self, "h3dimport", """Failed to import file, see the""" \
""" terminal for more info""")
###############################################################################################
class w_main(QMainWindow):
def __init__(self):
super(w_main, self).__init__()
self.init_ui()
def __del__(self):
engine.release()
def init_ui(self):
app_icon_path = os.path.normcase(util.get_exec_dir(__file__) + '/img/icon.ico')
self.setMinimumWidth(600)
self.setWindowTitle("darkHAMMER: h3dimport")
self.setWindowFlags(self.windowFlags() & (~Qt.WindowMaximizeButtonHint))
self.setWindowIcon(QIcon(app_icon_path))
main_menu = QMenuBar(self)
main_menu.addAction("Preferences", self.pref_clicked)
mnu_help = QMenu("Help", self)
mnu_help.addAction("About", self.mnu_help_click)
main_menu.addMenu(mnu_help)
self.setMenuBar(main_menu)
self.main_tab = QTabWidget(self)
self.setCentralWidget(self.main_tab)
# child tabs
self.wnds = dict()
model_wnd = w_model(self)
self.wnds["model"] = model_wnd
self.main_tab.addTab(model_wnd, "Model")
anim_wnd = w_anim(self)
self.wnds["anim"] = anim_wnd
self.main_tab.addTab(anim_wnd, "Anim")
phx_wnd = w_phx(self)
self.wnds["phx"] = phx_wnd
self.main_tab.addTab(phx_wnd, "Physics")
self.show()
self.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Fixed)
self.main_tab.currentChanged.connect(self.tab_change)
# load ui state
self.load_config()
def tab_change(self, index):
# turn off auto import on all panels
for (wnd_name, wnd) in self.wnds.iteritems():
if wnd_name == "model":
wnd.btn_auto.setChecked(False)
elif wnd_name == "phx":
wnd.btn_auto.setChecked(False)
if index == 0:
self.wnds["model"].enum_models()
elif index == 2:
self.wnds["phx"].enum_phxobjects()
def mnu_help_click(self):
wnd_about = about.qAboutDlg(self, "h3dimport-gui", "GUI front-end for h3dimport tool")
wnd_about.exec_()
def pref_clicked(self):
pref_dlg = w_prefs(self)
pref_dlg.load_config(self.config)
if pref_dlg.exec_():
pref_dlg.save_config(self.config)
def load_prefs(self, cfg):
global prefs
if 'general' in cfg.sections():
prefs['binpath'] = cfg.get('general', 'binary_path')
prefs['assetdir'] = cfg.get('general', 'asset_dir')
prefs['verbose'] = cfg.getboolean('general', 'verbose')
else:
prefs['binpath'] = os.path.abspath(util.get_exec_dir(__file__) + '/../../bin/h3dimport')
prefs['assetdir'] = os.path.abspath(util.get_exec_dir(__file__) + '/../..')
prefs['verbose'] = False
if platform.system() == "Windows":
prefs['binpath'] += ".exe"
def save_prefs(self, cfg):
if not 'general' in cfg.sections():
cfg.add_section('general')
cfg.set('general', 'binary_path', prefs['binpath'])
cfg.set('general', 'verbose', str(prefs['verbose']))
cfg.set('general', 'asset_dir', prefs['assetdir'])
def load_config(self):
try:
self.config = ConfigParser.SafeConfigParser()
f = self.config.read(os.path.normpath(util.get_exec_dir(__file__) + \
"/h3dimport-gui.ini"))
if len(f) == 0:
raise BaseException()
except:
print "Warning: could not load program config, reset to defaults"
self.load_prefs(self.config)
else:
self.load_prefs(self.config)
if "ui" in self.config.sections():
tab_idx = int(self.config.get("ui", "tab_idx"))
else:
tab_idx = 0
self.wnds["model"].load_config(self.config)
self.wnds["anim"].load_config(self.config)
self.wnds["phx"].load_config(self.config)
self.main_tab.setCurrentIndex(tab_idx)
self.tab_change(tab_idx)
def save_config(self):
# save config
self.save_prefs(self.config)
if not "ui" in self.config.sections():
self.config.add_section("ui")
self.config.set("ui", "tab_idx", str(self.main_tab.currentIndex()))
self.wnds["model"].save_config(self.config)
self.wnds["anim"].save_config(self.config)
self.wnds["phx"].save_config(self.config)
self.config.write(open(os.path.normpath(util.get_exec_dir(__file__) + '/h3dimport-gui.ini'),
'w'))
def closeEvent(self, e):
self.save_config()
###############################################################################################
def main():
global main_wnd
app = QApplication(sys.argv)
main_wnd = w_main()
r = app.exec_()
del main_wnd
sys.exit(r)
if __name__ == "__main__":
main()
|
{
"content_hash": "be625f2a7c3760e276845e263de12a00",
"timestamp": "",
"source": "github",
"line_count": 956,
"max_line_length": 100,
"avg_line_length": 40.65481171548117,
"alnum_prop": 0.5885092368651264,
"repo_name": "UPO33/darkhammer",
"id": "57c44200994f6301180438ae597d2428a0a3b0d9",
"size": "38866",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/h3dimport-gui/h3dimport-gui.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "119"
},
{
"name": "C",
"bytes": "1509849"
},
{
"name": "C++",
"bytes": "682318"
},
{
"name": "CSS",
"bytes": "91"
},
{
"name": "GLSL",
"bytes": "77443"
},
{
"name": "HTML",
"bytes": "183677"
},
{
"name": "JavaScript",
"bytes": "254826"
},
{
"name": "Lua",
"bytes": "1028"
},
{
"name": "Makefile",
"bytes": "234"
},
{
"name": "Objective-C",
"bytes": "9548"
},
{
"name": "Python",
"bytes": "199409"
},
{
"name": "Shell",
"bytes": "143"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_command
short_description: Run TMSH and BASH commands on F5 devices
description:
- Sends a TMSH or BASH command to an BIG-IP node and returns the results
read from the device. This module includes an argument that will cause
the module to wait for a specific condition before returning or timing
out if the condition is not met.
- This module is B(not) idempotent, nor will it ever be. It is intended as
a stop-gap measure to satisfy automation requirements until such a time as
a real module has been developed to configure in the way you need.
- If you are using this module, you should probably also be filing an issue
to have a B(real) module created for your needs.
version_added: 2.4
options:
commands:
description:
- The commands to send to the remote BIG-IP device over the
configured provider. The resulting output from the command
is returned. If the I(wait_for) argument is provided, the
module is not returned until the condition is satisfied or
the number of retries as expired.
- Only C(tmsh) commands are supported. If you are piping or adding additional
logic that is outside of C(tmsh) (such as grep'ing, awk'ing or other shell
related things that are not C(tmsh), this behavior is not supported.
required: True
wait_for:
description:
- Specifies what to evaluate from the output of the command
and what conditionals to apply. This argument will cause
the task to wait for a particular conditional to be true
before moving forward. If the conditional is not true
by the configured retries, the task fails. See examples.
aliases: ['waitfor']
match:
description:
- The I(match) argument is used in conjunction with the
I(wait_for) argument to specify the match policy. Valid
values are C(all) or C(any). If the value is set to C(all)
then all conditionals in the I(wait_for) must be satisfied. If
the value is set to C(any) then only one of the values must be
satisfied.
choices:
- any
- all
default: all
retries:
description:
- Specifies the number of retries a command should by tried
before it is considered failed. The command is run on the
target device every retry and evaluated against the I(wait_for)
conditionals.
default: 10
interval:
description:
- Configures the interval in seconds to wait between retries
of the command. If the command does not pass the specified
conditional, the interval indicates how to long to wait before
trying the command again.
default: 1
transport:
description:
- Configures the transport connection to use when connecting to the
remote device. The transport argument supports connectivity to the
device over cli (ssh) or rest.
required: true
choices:
- rest
- cli
default: rest
version_added: 2.5
warn:
description:
- Whether the module should raise warnings related to command idempotency
or not.
- Note that the F5 Ansible developers specifically leave this on to make you
aware that your usage of this module may be better served by official F5
Ansible modules. This module should always be used as a last resort.
default: True
type: bool
version_added: 2.6
chdir:
description:
- Change into this directory before running the command.
version_added: 2.6
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: run show version on remote devices
bigip_command:
commands: show sys version
provider:
server: lb.mydomain.com
password: secret
user: admin
delegate_to: localhost
- name: run show version and check to see if output contains BIG-IP
bigip_command:
commands: show sys version
wait_for: result[0] contains BIG-IP
provider:
server: lb.mydomain.com
password: secret
user: admin
register: result
delegate_to: localhost
- name: run multiple commands on remote nodes
bigip_command:
commands:
- show sys version
- list ltm virtual
provider:
server: lb.mydomain.com
password: secret
user: admin
delegate_to: localhost
- name: run multiple commands and evaluate the output
bigip_command:
commands:
- show sys version
- list ltm virtual
wait_for:
- result[0] contains BIG-IP
- result[1] contains my-vs
provider:
server: lb.mydomain.com
password: secret
user: admin
register: result
delegate_to: localhost
- name: tmsh prefixes will automatically be handled
bigip_command:
commands:
- show sys version
- tmsh list ltm virtual
provider:
server: lb.mydomain.com
password: secret
user: admin
delegate_to: localhost
- name: Delete all LTM nodes in Partition1, assuming no dependencies exist
bigip_command:
commands:
- delete ltm node all
chdir: Partition1
provider:
server: lb.mydomain.com
password: secret
user: admin
delegate_to: localhost
'''
RETURN = r'''
stdout:
description: The set of responses from the commands.
returned: always
type: list
sample: ['...', '...']
stdout_lines:
description: The value of stdout split into a list.
returned: always
type: list
sample: [['...', '...'], ['...'], ['...']]
failed_conditions:
description: The list of conditionals that have failed.
returned: failed
type: list
sample: ['...', '...']
warn:
description: Whether or not to raise warnings about modification commands.
returned: changed
type: bool
sample: True
'''
import re
import time
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.parsing import FailedConditionsError
from ansible.module_utils.network.common.parsing import Conditional
from ansible.module_utils.network.common.utils import ComplexList
from ansible.module_utils.network.common.utils import to_list
from ansible.module_utils.six import string_types
from collections import deque
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.common import is_cli
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.common import is_cli
try:
from ansible.module_utils.network.f5.common import run_commands
HAS_CLI_TRANSPORT = True
except ImportError:
HAS_CLI_TRANSPORT = False
class NoChangeReporter(object):
stdout_re = [
# A general error when a resource already exists
re.compile(r"The requested.*already exists"),
# Returned when creating a duplicate cli alias
re.compile(r"Data Input Error: shared.*already exists"),
]
def find_no_change(self, responses):
"""Searches the response for something that looks like a change
This method borrows heavily from Ansible's ``_find_prompt`` method
defined in the ``lib/ansible/plugins/connection/network_cli.py::Connection``
class.
Arguments:
response (string): The output from the command.
Returns:
bool: True when change is detected. False otherwise.
"""
for response in responses:
for regex in self.stdout_re:
if regex.search(response):
return True
return False
class Parameters(AnsibleF5Parameters):
returnables = ['stdout', 'stdout_lines', 'warnings', 'executed_commands']
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
except Exception:
return result
@property
def raw_commands(self):
if self._values['commands'] is None:
return []
if isinstance(self._values['commands'], string_types):
result = [self._values['commands']]
else:
result = self._values['commands']
return result
def convert_commands(self, commands):
result = []
for command in commands:
tmp = dict(
command='',
pipeline=''
)
command = command.replace("'", "\\'")
pipeline = command.split('|', 1)
tmp['command'] = pipeline[0]
try:
tmp['pipeline'] = pipeline[1]
except IndexError:
pass
result.append(tmp)
return result
def convert_commands_cli(self, commands):
result = []
for command in commands:
tmp = dict(
command='',
pipeline=''
)
pipeline = command.split('|', 1)
tmp['command'] = pipeline[0]
try:
tmp['pipeline'] = pipeline[1]
except IndexError:
pass
result.append(tmp)
return result
def merge_command_dict(self, command):
if command['pipeline'] != '':
escape_patterns = r'([$"])'
command['pipeline'] = re.sub(escape_patterns, r'\\\1', command['pipeline'])
command['command'] = '{0} | {1}'.format(command['command'], command['pipeline']).strip()
def merge_command_dict_cli(self, command):
if command['pipeline'] != '':
command['command'] = '{0} | {1}'.format(command['command'], command['pipeline']).strip()
@property
def rest_commands(self):
# ['list ltm virtual']
commands = self.normalized_commands
commands = self.convert_commands(commands)
if self.chdir:
# ['cd /Common; list ltm virtual']
for command in commands:
self.addon_chdir(command)
# ['tmsh -c "cd /Common; list ltm virtual"']
for command in commands:
self.addon_tmsh(command)
for command in commands:
self.merge_command_dict(command)
result = [x['command'] for x in commands]
return result
@property
def cli_commands(self):
# ['list ltm virtual']
commands = self.normalized_commands
commands = self.convert_commands_cli(commands)
if self.chdir:
# ['cd /Common; list ltm virtual']
for command in commands:
self.addon_chdir(command)
if not self.is_tmsh:
# ['tmsh -c "cd /Common; list ltm virtual"']
for command in commands:
self.addon_tmsh_cli(command)
for command in commands:
self.merge_command_dict_cli(command)
result = [x['command'] for x in commands]
return result
@property
def normalized_commands(self):
if self._values['normalized_commands'] is None:
return None
return deque(self._values['normalized_commands'])
@property
def chdir(self):
if self._values['chdir'] is None:
return None
if self._values['chdir'].startswith('/'):
return self._values['chdir']
return '/{0}'.format(self._values['chdir'])
@property
def user_commands(self):
commands = self.raw_commands
return map(self._ensure_tmsh_prefix, commands)
@property
def wait_for(self):
return self._values['wait_for'] or list()
def addon_tmsh(self, command):
escape_patterns = r'([$"])'
if command['command'].count('"') % 2 != 0:
raise Exception('Double quotes are unbalanced')
command['command'] = re.sub(escape_patterns, r'\\\\\\\1', command['command'])
command['command'] = 'tmsh -c \\\"{0}\\\"'.format(command['command'])
def addon_tmsh_cli(self, command):
if command['command'].count('"') % 2 != 0:
raise Exception('Double quotes are unbalanced')
command['command'] = 'tmsh -c "{0}"'.format(command['command'])
def addon_chdir(self, command):
command['command'] = "cd {0}; {1}".format(self.chdir, command['command'])
class BaseManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = Parameters(params=self.module.params)
self.want.update({'module': self.module})
self.changes = Parameters(module=self.module)
self.valid_configs = [
'list', 'show', 'modify cli preference pager disabled'
]
self.changed_command_prefixes = ('modify', 'create', 'delete')
self.warnings = list()
def _to_lines(self, stdout):
lines = list()
for item in stdout:
if isinstance(item, string_types):
item = item.split('\n')
lines.append(item)
return lines
def exec_module(self):
result = dict()
changed = self.execute()
result.update(**self.changes.to_return())
result.update(dict(changed=changed))
self._announce_warnings(result)
return result
def _announce_warnings(self, result):
warnings = result.pop('warnings', [])
for warning in warnings:
self.module.warn(warning)
def notify_non_idempotent_commands(self, commands):
for index, item in enumerate(commands):
if any(item.startswith(x) for x in self.valid_configs):
return
else:
self.warnings.append(
'Using "write" commands is not idempotent. You should use '
'a module that is specifically made for that. If such a '
'module does not exist, then please file a bug. The command '
'in question is "{0}..."'.format(item[0:40])
)
@staticmethod
def normalize_commands(raw_commands):
if not raw_commands:
return None
result = []
for command in raw_commands:
command = command.strip()
if command[0:5] == 'tmsh ':
command = command[4:].strip()
result.append(command)
return result
def parse_commands(self):
results = []
commands = self._transform_to_complex_commands(self.commands)
for index, item in enumerate(commands):
# This needs to be removed so that the ComplexList used in to_commands
# will work correctly.
output = item.pop('output', None)
if output == 'one-line' and 'one-line' not in item['command']:
item['command'] += ' one-line'
elif output == 'text' and 'one-line' in item['command']:
item['command'] = item['command'].replace('one-line', '')
results.append(item)
return results
def execute(self):
if self.want.normalized_commands:
result = self.want.normalized_commands
else:
result = self.normalize_commands(self.want.raw_commands)
self.want.update({'normalized_commands': result})
if not result:
return False
self.notify_non_idempotent_commands(self.want.normalized_commands)
commands = self.parse_commands()
retries = self.want.retries
conditionals = [Conditional(c) for c in self.want.wait_for]
if self.module.check_mode:
return
while retries > 0:
responses = self._execute(commands)
self._check_known_errors(responses)
for item in list(conditionals):
if item(responses):
if self.want.match == 'any':
conditionals = list()
break
conditionals.remove(item)
if not conditionals:
break
time.sleep(self.want.interval)
retries -= 1
else:
failed_conditions = [item.raw for item in conditionals]
errmsg = 'One or more conditional statements have not been satisfied.'
raise FailedConditionsError(errmsg, failed_conditions)
stdout_lines = self._to_lines(responses)
changes = {
'stdout': responses,
'stdout_lines': stdout_lines,
'executed_commands': self.commands
}
if self.want.warn:
changes['warnings'] = self.warnings
self.changes = Parameters(params=changes, module=self.module)
return self.determine_change(responses)
def determine_change(self, responses):
changer = NoChangeReporter()
if changer.find_no_change(responses):
return False
if any(x for x in self.want.normalized_commands if x.startswith(self.changed_command_prefixes)):
return True
return False
def _check_known_errors(self, responses):
# A regex to match the error IDs used in the F5 v2 logging framework.
# pattern = r'^[0-9A-Fa-f]+:?\d+?:'
for resp in responses:
if 'usage: tmsh' in resp:
raise F5ModuleError(
"tmsh command printed its 'help' message instead of running your command. "
"This usually indicates unbalanced quotes."
)
def _transform_to_complex_commands(self, commands):
spec = dict(
command=dict(key=True),
output=dict(
default='text',
choices=['text', 'one-line']
),
)
transform = ComplexList(spec, self.module)
result = transform(commands)
return result
class V1Manager(BaseManager):
"""Supports CLI (SSH) communication with the remote device
"""
def _execute(self, commands):
if self.want.is_tmsh:
command = dict(
command="modify cli preference pager disabled"
)
else:
command = dict(
command="tmsh modify cli preference pager disabled"
)
self.execute_on_device(command)
return self.execute_on_device(commands)
@property
def commands(self):
return self.want.cli_commands
def is_tmsh(self):
try:
self.execute_on_device('tmsh -v')
except Exception as ex:
if 'Syntax Error:' in str(ex):
return True
raise
return False
def execute(self):
self.want.update({'is_tmsh': self.is_tmsh()})
return super(V1Manager, self).execute()
def execute_on_device(self, commands):
result = run_commands(self.module, commands)
return result
class V2Manager(BaseManager):
"""Supports REST communication with the remote device
"""
def _execute(self, commands):
command = dict(
command="tmsh modify cli preference pager disabled"
)
self.execute_on_device(command)
return self.execute_on_device(commands)
@property
def commands(self):
return self.want.rest_commands
def execute_on_device(self, commands):
responses = []
uri = "https://{0}:{1}/mgmt/tm/util/bash".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
for item in to_list(commands):
try:
args = dict(
command='run',
utilCmdArgs='-c "{0}"'.format(item['command'])
)
resp = self.client.api.post(uri, json=args)
response = resp.json()
if 'commandResult' in response:
output = u'{0}'.format(response['commandResult'])
responses.append(output.strip())
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return responses
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.kwargs = kwargs
self.module = kwargs.get('module', None)
def exec_module(self):
if is_cli(self.module) and HAS_CLI_TRANSPORT:
manager = self.get_manager('v1')
else:
manager = self.get_manager('v2')
result = manager.exec_module()
return result
def get_manager(self, type):
if type == 'v1':
return V1Manager(**self.kwargs)
elif type == 'v2':
return V2Manager(**self.kwargs)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
commands=dict(
type='raw',
required=True
),
wait_for=dict(
type='list',
aliases=['waitfor']
),
match=dict(
default='all',
choices=['any', 'all']
),
retries=dict(
default=10,
type='int'
),
interval=dict(
default=1,
type='int'
),
transport=dict(
type='str',
default='rest',
choices=['cli', 'rest']
),
warn=dict(
type='bool',
default='yes'
),
chdir=dict()
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
|
{
"content_hash": "eb8ac32062b43bbd351c8bc7cad84a2d",
"timestamp": "",
"source": "github",
"line_count": 714,
"max_line_length": 104,
"avg_line_length": 32.76890756302521,
"alnum_prop": 0.593623114074454,
"repo_name": "SergeyCherepanov/ansible",
"id": "e312eabf504b4ce8029a2dba6b73373f0c30a4e1",
"size": "23574",
"binary": false,
"copies": "15",
"ref": "refs/heads/master",
"path": "ansible/ansible/modules/network/f5/bigip_command.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Shell",
"bytes": "824"
}
],
"symlink_target": ""
}
|
""" S3 Framework Tables
@copyright: 2009-2015 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("S3HierarchyModel",)
from gluon import *
from ..s3 import *
# =============================================================================
class S3HierarchyModel(S3Model):
""" Model for stored object hierarchies, experimental """
names = ("s3_hierarchy",)
def model(self):
define_table = self.define_table
# -------------------------------------------------------------------------
# Stored Object Hierarchy
#
tablename = "s3_hierarchy"
define_table(tablename,
Field("tablename",
length=64),
Field("dirty", "boolean",
default=False),
Field("hierarchy", "json"),
*s3_timestamp())
# ---------------------------------------------------------------------
# Return global names to s3.*
#
return {}
# -------------------------------------------------------------------------
def defaults(self):
""" Safe defaults if module is disabled """
return {}
# END =========================================================================
|
{
"content_hash": "5d67925edc0c41f5ab736396e496a62b",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 83,
"avg_line_length": 36.484848484848484,
"alnum_prop": 0.5348837209302325,
"repo_name": "flavour/Turkey",
"id": "09e4042fad51bb4844d5e91dcab0fb8ee1ebeaf8",
"size": "2433",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "modules/s3db/s3.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2021594"
},
{
"name": "HTML",
"bytes": "1310585"
},
{
"name": "JavaScript",
"bytes": "19245058"
},
{
"name": "PHP",
"bytes": "15220"
},
{
"name": "Perl",
"bytes": "500"
},
{
"name": "Python",
"bytes": "28627483"
},
{
"name": "Ruby",
"bytes": "2051"
},
{
"name": "Shell",
"bytes": "4860"
},
{
"name": "XSLT",
"bytes": "2678742"
}
],
"symlink_target": ""
}
|
from calendar import month_name
from collections import defaultdict
from django.http import Http404
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib import messages
from django.shortcuts import get_object_or_404
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, HttpResponseForbidden, HttpResponseNotFound
from django.contrib.auth.decorators import login_required
from django import VERSION
from django.utils.translation import ugettext as _
from mezzanine_wiki.models import WikiPage, WikiCategory, WikiPageRevision
from mezzanine.conf import settings
from mezzanine.generic.models import AssignedKeyword, Keyword
from mezzanine.utils.views import render, paginate
from mezzanine_wiki.forms import WikiPageForm
from mezzanine_wiki.utils import urlize_title, deurlize_title
from mezzanine_wiki import defaults as wiki_settings
from diff_match_patch import diff_match_patch
from urllib import urlencode, quote
def wiki_index(request, template_name='mezawiki/wiki_page_detail.html'):
"""
Redirects to the default wiki index name.
"""
if settings.WIKI_PRIVACY == wiki_settings.WIKI_PRIVACY_CLOSED:
return HttpResponseRedirect(reverse('wiki_page_list'))
return HttpResponseRedirect(
reverse('wiki_page_detail', args=[settings.WIKI_DEFAULT_INDEX]))
def wiki_page_list(request, tag=None, username=None,
category=None, template="mezawiki/wiki_page_list.html"):
"""
Display a list of wiki pages that are filtered by tag,
author or category.
Custom templates are checked for using the name
``mezawiki/wiki_page_list_XXX.html`` where ``XXX`` is either the
category slug or author's username if given.
"""
settings.use_editable()
templates = []
wiki_pages = WikiPage.objects.published(for_user=request.user)
if tag is not None:
tag = get_object_or_404(Keyword, slug=tag)
wiki_pages = wiki_pages.filter(keywords__in=tag.assignments.all())
if category is not None:
category = get_object_or_404(WikiCategory, slug=category)
wiki_pages = wiki_pages.filter(categories=category)
templates.append(u"mezawiki/wiki_post_list_%s.html" %
unicode(category.slug))
author = None
if username is not None:
author = get_object_or_404(User, username=username)
wiki_pages = wiki_pages.filter(user=author)
templates.append(u"mezawiki/wiki_page_list_%s.html" % username)
# We want to iterate keywords and categories for each wiki page
# without triggering "num posts x 2" queries.
#
# For Django 1.3 we create dicts mapping wiki page IDs to lists of
# categories and keywords, and assign these to each wiki page
#
# For Django 1.4 we just use prefetch related.
if VERSION >= (1, 4):
rel = ("categories", "keywords__keyword")
wiki_pages = wiki_pages.select_related("user").prefetch_related(*rel)
else:
wiki_pages = list(wiki_pages.select_related("user"))
categories = defaultdict(list)
if wiki_pages:
ids = ",".join([str(p.id) for p in wiki_pages])
for cat in WikiCategory.objects.raw(
"SELECT * FROM mezzanine_wiki_wikicategory "
"JOIN mezzanine_wiki_wikipage_categories "
"ON mezzanine_wiki_wikicategory.id = wikicategory_id "
"WHERE wikipage_id IN (%s)" % ids):
categories[cat.wikipage_id].append(cat)
keywords = defaultdict(list)
wikipage_type = ContentType.objects.get(app_label="mezzanine_wiki",
model="wikipage")
assigned = AssignedKeyword.objects.filter(wikipage__in=wiki_pages,
content_type=wikipage_type).select_related("keyword")
for a in assigned:
keywords[a.object_pk].append(a.keyword)
for i, page in enumerate(wiki_pages):
if VERSION < (1, 4):
setattr(wiki_pages[i], "category_list", categories[page.id])
setattr(wiki_pages[i], "keyword_list", keywords[page.id])
else:
setattr(wiki_pages[i], "category_list",
page.categories.all())
setattr(wiki_pages[i], "keyword_list",
[k.keyword for k in page.keywords.all()])
wiki_pages = paginate(wiki_pages,
request.GET.get("page", 1),
settings.WIKI_PAGES_PER_PAGE,
settings.MAX_PAGING_LINKS)
context = {"wiki_pages": wiki_pages,
"tag": tag, "category": category, "author": author}
templates.append(template)
return render(request, templates, context)
def wiki_page_detail(request, slug, year=None, month=None,
template="mezawiki/wiki_page_detail.html"):
"""
Displays a wiki page.
Redirects to the edit view if the page doesn't exist.
Custom templates are checked for using the name
``mezawiki/wiki_page_detail_XXX.html``
where ``XXX`` is the wiki pages's slug.
"""
slug_original = slug
slug = urlize_title(slug)
if slug != slug_original:
return HttpResponseRedirect(
reverse('wiki_page_detail', args=[slug])
)
try:
wiki_page_exist = WikiPage.objects.get(slug=slug)
if wiki_page_exist.status == 1 and not wiki_page_exist.can_edit_wikipage(request.user):
return HttpResponseForbidden(
_("You don't have permission to view this wiki page."))
wiki_pages = WikiPage.objects.published(for_user=request.user)
wiki_page = wiki_pages.get(slug=slug)
except WikiPage.DoesNotExist:
if can_add_wikipage(request.user):
return HttpResponseRedirect(reverse('wiki_page_edit', args=[slug]))
else:
return HttpResponseForbidden(
_("You don't have permission to add new wiki page."))
context = {"wiki_page": wiki_page}
templates = [u"mezawiki/wiki_page_detail_%s.html" % unicode(slug), template]
return render(request, templates, context)
def wiki_page_history(request, slug,
template="mezawiki/wiki_page_history.html"):
"""
Displays a wiki page history.
Redirects to the edit view if the page doesn't exist.
Custom templates are checked for using the name
``mezawiki/wiki_page_detail_XXX.html``
where ``XXX`` is the wiki pages's slug.
"""
slug_original = slug
slug = urlize_title(slug)
if slug != slug_original:
return HttpResponseRedirect(
reverse('wiki_page_history', args=[slug])
)
try:
wiki_pages = WikiPage.objects.published(for_user=request.user)
wiki_page = wiki_pages.get(slug=slug)
revisions = WikiPageRevision.objects.filter(page=wiki_page)
except WikiPage.DoesNotExist:
return HttpResponseRedirect(reverse('wiki_page_edit', args=[slug]))
if not wiki_page.can_view_wikipage(request.user):
return HttpResponseForbidden(
_("You don't have permission to view this wiki page."))
context = {"wiki_page": wiki_page, "revisions": revisions}
templates = [u"mezawiki/wiki_page_history_%s.html" % unicode(slug), template]
return render(request, templates, context)
def wiki_page_revision(request, slug, rev_id,
template="mezawiki/wiki_page_revision.html"):
"""
Displays a wiki page revision.
Redirects to the edit view if the page doesn't exist.
Custom templates are checked for using the name
``mezawiki/wiki_page_detail_XXX.html``
where ``XXX`` is the wiki pages's slug.
"""
slug_original = slug
slug = urlize_title(slug)
if slug != slug_original:
return HttpResponseRedirect(
reverse('wiki_page_revision', args=[slug])
)
try:
wiki_pages = WikiPage.objects.published(for_user=request.user)
wiki_page = wiki_pages.get(slug=slug)
revision = WikiPageRevision.objects.get(id=rev_id)
except WikiPage.DoesNotExist:
return HttpResponseRedirect(reverse('wiki_page_edit', args=[slug]))
if not wiki_page.can_view_wikipage(request.user):
return HttpResponseForbidden(
_("You don't have permission to view this wiki page revision."))
context = {"wiki_page": wiki_page, "revision": revision}
templates = [u"mezawiki/wiki_page_detail_%s.html" % unicode(slug), template]
return render(request, templates, context)
def wiki_page_diff(request, slug,
template="mezawiki/wiki_page_diff.html"):
slug_original = slug
slug = urlize_title(slug)
if slug != slug_original:
return HttpResponseRedirect(
reverse('wiki_page_diff', args=[slug])
)
try:
wiki_pages = WikiPage.objects.published(for_user=request.user)
wiki_page = wiki_pages.get(slug=slug)
except WikiPage.DoesNotExist:
return HttpResponseRedirect(reverse('wiki_page_edit', args=[slug]))
try:
from_rev = wiki_page.wikipagerevision_set.get(pk=request.REQUEST['from_revision_pk'])
to_rev = wiki_page.wikipagerevision_set.get(pk=request.REQUEST['to_revision_pk'])
except (KeyError, WikiPage.DoesNotExist):
return HttpResponseNotFound()
dmp = diff_match_patch()
diff = dmp.diff_compute(from_rev.content, to_rev.content, True, 2)
undo_error = False
if 'undo' in request.REQUEST and request.REQUEST['undo'] == 'error':
undo_error = True
return render(request, 'mezawiki/wiki_page_diff.html',
{'wiki_page': wiki_page, 'from_revision': from_rev, 'to_revision': to_rev, 'diff': diff, 'undo_error': undo_error})
def wiki_page_revert(request, slug, revision_pk):
slug_original = slug
slug = urlize_title(slug)
if slug != slug_original:
return HttpResponseRedirect(
reverse('wiki_page_revert', args=[slug, revision_pk])
)
try:
wiki_pages = WikiPage.objects.published(for_user=request.user)
wiki_page = wiki_pages.get(slug=slug)
except WikiPage.DoesNotExist:
return HttpResponseRedirect(reverse('wiki_page_edit', args=[slug]))
src_revision = get_object_or_404(WikiPageRevision, page=wiki_page, pk=revision_pk)
new_revision = WikiPageRevision(page=wiki_page,
user=request.user if request.user.is_authenticated() else User.objects.get(id=-1))
if request.method == 'POST':
form = WikiPageForm(data=request.POST or None, instance=wiki_page)
if form.is_valid():
form.save()
new_revision.content = form.cleaned_data["content"]
new_revision.description = form.cleaned_data["summary"]
new_revision.save()
return HttpResponseRedirect(reverse('wiki_page_detail', kwargs={'slug': slug}))
else:
if src_revision.user:
description = _("Reverted to revision of %(time)s by %(user)s.") % \
{'time': src_revision.created, 'user': src_revision.user.username}
else:
description = _("Reverted to anonymous revision of %(time)s.") % \
{'time': src_revision.created}
form = WikiPageForm(data=request.POST or None, instance=wiki_page,
initial={'content': src_revision.content, 'summary': description})
return render(request, 'mezawiki/wiki_page_edit.html',
{'wiki_page': wiki_page, 'form': form, 'src_revision': src_revision})
def wiki_page_undo(request, slug, revision_pk):
slug_original = slug
slug = urlize_title(slug)
if slug != slug_original:
return HttpResponseRedirect(
reverse('wiki_page_undo', args=[slug, revision_pk])
)
try:
wiki_pages = WikiPage.objects.published(for_user=request.user)
wiki_page = wiki_pages.get(slug=slug)
except WikiPage.DoesNotExist:
return HttpResponseRedirect(reverse('wiki_page_edit', args=[slug]))
src_revision = get_object_or_404(WikiPageRevision, page=wiki_page, pk=revision_pk)
new_revision = WikiPageRevision(page=wiki_page,
user=request.user if request.user.is_authenticated() else User.objects.get(id=-1))
if request.method == 'POST':
form = WikiPageForm(data=request.POST or None, instance=wiki_page)
if form.is_valid():
form.save()
new_revision.content = form.cleaned_data["content"]
new_revision.description = form.cleaned_data["summary"]
new_revision.save()
return HttpResponseRedirect(reverse('wiki_page_detail', kwargs={'slug': slug}))
else:
if src_revision.user:
description = _("Undid revision of %(time)s by %(user)s.") % \
{'time': src_revision.created, 'user': src_revision.user.username}
else:
description = _("Undid anonymous revision of %(time)s.") % {'time': src_revision.created}
prev_revision = None
try:
prev_revision = WikiPageRevision.objects\
.filter(page=wiki_page, created__lt=src_revision.created)\
.order_by('-created')[0]
prev_content = prev_revision.content
except IndexError:
prev_content = ''
dmp = diff_match_patch()
rdiff = dmp.patch_make(src_revision.content, prev_content)
content, results = dmp.patch_apply(rdiff, wiki_page.content)
if False in results:
urldata = {'to_revision_pk': src_revision.pk}
if prev_revision:
urldata['from_revision_pk'] = prev_revision.pk
urldata['undo'] = 'error'
return HttpResponseRedirect("%s?%s" % (
reverse('wiki_page_diff', kwargs={'slug': slug}),
urlencode(urldata)))
form = WikiPageForm(data=request.POST or None, instance=wiki_page,
initial={'content': content, 'summary': description})
return render(request, 'mezawiki/wiki_page_edit.html', {'wiki_page': wiki_page, 'form': form})
def wiki_page_changes(request,
template="mezawiki/wiki_page_changes.html"):
"""
Displays a recent wiki changes.
"""
wiki_pages = WikiPage.objects.published(for_user=request.user)
wiki_revisions = WikiPageRevision.objects.filter(page__in=wiki_pages)
context = {"wiki_revisions": wiki_revisions}
return render(request, template, context)
def wiki_page_edit(request, slug,
template="mezawiki/wiki_page_edit.html"):
"""
Displays the form for editing a page.
"""
try:
#wiki_pages = WikiPage.objects.published(for_user=request.user)
wiki_page = WikiPage.objects.get(slug=slug)
wiki_page.is_initial = False
initial = {}
except WikiPage.DoesNotExist:
wiki_page = WikiPage(slug=slug)
wiki_page.is_initial = True
initial = {'status': 1}
if not wiki_page.can_edit_wikipage(request.user):
return HttpResponseForbidden(
_("You don't have permission to edit this wiki page."))
if request.method == 'POST':
form = WikiPageForm(request.POST, instance=wiki_page)
if form.is_valid():
page = form.save(commit=False)
if wiki_page.is_initial:
try:
page.user = request.user
except:
page.user_id = -1
page.title = deurlize_title(slug)
page.save()
if 'content' in form.changed_data:
revision = WikiPageRevision()
revision.content = page.content
revision.description = form.cleaned_data["summary"]
revision.page = page
try:
revision.user = request.user
except:
# anonymous
revision.user_id = -1
revision.save()
return HttpResponseRedirect(
reverse('wiki_page_detail', args=[slug]))
else:
form = WikiPageForm(initial=initial, instance=wiki_page)
context = {'wiki_page': wiki_page, 'form': form,
'title': deurlize_title(slug)}
return render(request, template, context)
def can_add_wikipage(user):
# Simple cases first, we don't want to waste CPU and DB hits.
# Everyone.
if (settings.WIKI_PRIVACY == wiki_settings.WIKI_PRIVACY_OPENED):
return True
# Registered users.
elif (settings.WIKI_PRIVACY == wiki_settings.WIKI_PRIVACY_REGISTERED
) and (isinstance(user, User)):
return True
# Moderated.
elif (settings.WIKI_PRIVACY == wiki_settings.WIKI_PRIVACY_MODERATED
) and (user.has_perm('mezzanine_wiki.add_wikipage')):
return True
# TODO closed.
#elif (settings.WIKI_PRIVACY == wiki_settings.WIKI_PRIVACY_CLOSED):
# Fallback to closed page.
return False
def wiki_page_new(request, template="mezawiki/wiki_page_new.html"):
"""
Displays the form for creating a page.
"""
if not can_add_wikipage(request.user):
return HttpResponseForbidden(
_("You don't have permission to create wiki page."))
if request.method == 'POST':
form = WikiPageForm(request.POST)
if form.is_valid():
page = form.save(commit=False)
try:
page.user = request.user
except:
# anonymous
page.user_id = -1
page.slug = urlize_title(form.cleaned_data["title"])
# TODO Check slug, it is not a unique field
page.save()
revision = WikiPageRevision()
revision.content = page.content
revision.description = form.cleaned_data["summary"]
revision.page = page
try:
revision.user = request.user
except:
# anonymous
revision.user_id = -1
revision.save()
return HttpResponseRedirect(
reverse('wiki_page_detail', args=[page.slug]))
else:
form = WikiPageForm(initial={'status': 1})
context = {'form': form}
return render(request, template, context)
|
{
"content_hash": "241703dd1ecfbb5c486a64d81162f659",
"timestamp": "",
"source": "github",
"line_count": 447,
"max_line_length": 133,
"avg_line_length": 41.214765100671144,
"alnum_prop": 0.621885686370298,
"repo_name": "dfalk/mezzanine-wiki",
"id": "83006cf982fbf1c93dd606c06d3f8ae23b251db2",
"size": "18423",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mezzanine_wiki/views.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "113"
},
{
"name": "Python",
"bytes": "125218"
}
],
"symlink_target": ""
}
|
"""Implementation of Transformer networks.
Size glossary:
* Batch size (B).
* Sequence length (N).
* Memory size (M). The size of the optional memory, passed in via `state`.
* Number of heads (H): the number of attention heads.
* Value size (V): the size of each value embedding per head.
* Key size (K): the size of each key embedding per head. Equally, the size
of each query embedding per head. Typically K <= V.
* Embedding size (HV). The size of the activation or embedding relating to
each input between layers. Equal to value_size * num_heads.
* All attention size (F). The size of all attention activations over every
head.
* QKV size (F / H): The size of the query, key and value per head. Equal to
2K + V or equivalently F / H.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import numpy as np
from sonnet.python.modules import base
from sonnet.python.modules import basic
from sonnet.python.modules import layer_norm as snt_ln
from sonnet.python.modules import util
from sonnet.python.modules.nets import mlp as snt_mlp
import tensorflow.compat.v1 as tf
AttentionState = collections.namedtuple('AttentionState',
('queries', 'keys', 'values', 'logits',
'weights', 'embeddings', 'read_words'))
CompressedMemoryState = collections.namedtuple(
'CompressedMemoryState', ('episodic_memory', 'compressed_memory', 'index'))
def rel_shift(position_logits):
"""Shifting of logits for relative attention.
Args:
position_logits: A tensor of shape [B, H, N, N + M].
Returns:
The shifted logits. Example, for input (H=1, B=1):
[5, 4, 3, 2, 1]
[5, 4, 3, 2, 1]
[5, 4, 3, 2, 1]
[5, 4, 3, 2, 1]
[5, 4, 3, 2, 1]
the function outputs:
[1, 0, 5, 4, 3]
[2, 1, 0, 5, 4]
[3, 2, 1, 0, 5]
[4, 3, 2, 1, 0]
[5, 4, 3, 2, 1]
Raises:
ValueError if position_logits is not 4D.
Note: this is not an exact shift as the upper triangle is non-zero. This
works as intended in the causally-masked case. If this is used with un-masked
attention, we'd want these to also be zero.
"""
if position_logits.get_shape().ndims != 4:
raise ValueError('Expected 4D position logits.')
input_shape = position_logits.shape
batch_size = input_shape[0]
num_heads = input_shape[1]
t1 = input_shape[2]
t2 = input_shape[3]
# We prepend zeros on the final timescale dimension.
to_pad = tf.zeros([batch_size, num_heads, t1, 1])
position_logits = tf.concat([to_pad, position_logits], -1)
# Reshape trick to shift input.
position_logits = tf.reshape(position_logits,
[batch_size, num_heads, t2 + 1, t1])
# Remove extra time dimension and re-shape.
position_logits = position_logits[:, :, 1:]
position_logits = tf.reshape(position_logits, input_shape)
return position_logits
def _layer_norm(inputs):
if inputs.get_shape().ndims > 2:
return basic.BatchApply(snt_ln.LayerNorm())(inputs)
else:
return snt_ln.LayerNorm()(inputs)
def _concat_and_slice(prev_memory, new_memory):
original_memory_size = prev_memory.get_shape().as_list()[1]
concat_memory = tf.concat([prev_memory, new_memory], 1)
memory = concat_memory[:, -original_memory_size:]
return memory, concat_memory
def simple_attention(queries, keys, values):
logits = tf.matmul(queries, keys, transpose_b=True)
weights = tf.nn.softmax(logits)
return tf.matmul(weights, values)
class ResidualDropoutWrapper(base.AbstractModule):
"""Wrapper class that applies residual connections, dropout and layer norm.
By default applies a relu to the module output before the other operations.
"""
def __init__(self,
layer,
dropout_rate,
layer_norm='input',
name='residual_dropout_wrapper'):
self._module = layer
self._dropout_rate = dropout_rate
self._layer_norm = layer_norm
super(ResidualDropoutWrapper, self).__init__(name=name)
def _build(self, inputs, *args, **kwargs):
if self._layer_norm in ('both', 'input'):
normed_inputs = _layer_norm(inputs)
else:
normed_inputs = inputs
module_output = self._module(normed_inputs, *args, **kwargs)
module_state = None
# If module outputs multiple items, assumes (output, state) tuple.
if isinstance(module_output, tuple):
module_output, module_state = module_output
if kwargs['is_training']: # kwargs must contain is_training.
module_output = tf.nn.dropout(module_output, rate=self._dropout_rate)
output = inputs + module_output
if self._layer_norm in ('both', 'output'):
output = _layer_norm(output)
if module_state is None:
return output
else:
return output, module_state
def future_mask(chunk_size, dtype):
"""Creates attention mask to ensure an element i cannot attend to j > i."""
square = tf.ones([chunk_size, chunk_size], dtype=dtype)
# Create upper diagonal matrix and remove diagonal entries (allow self-attn).
mask = tf.matrix_band_part(square, 0, -1) - tf.matrix_band_part(square, 0, 0)
# Multiply by -1e6 and expand to broadcast with [B, H, N, N] logits.
mask = -1e6 * tf.reshape(mask, [1, 1, chunk_size, chunk_size])
return mask
def _memory_size(state):
if isinstance(state, CompressedMemoryState):
return (state.episodic_memory.get_shape().as_list()[1] +
state.compressed_memory.get_shape().as_list()[1])
else:
return state.get_shape().as_list()[1]
def create_mask(inputs, state, equal_window):
"""Creates mask for future sequence positions.
Args:
inputs: inputs tensor of shape [B, N, D]
state: optional tensor of shape [B, M, D], CompressedMemoryState or a list
where the ith entry corresponds to the ith layer's state.
equal_window: if True, then each activation has an equally-sized attention
window of length 'M'. This only makes sense if a state is given.
Returns:
Float tensor of shape [1, 1, N, N + M], to be summed with logits.
"""
chunk_size = inputs.get_shape().as_list()[1]
dtype = inputs.dtype
mask = future_mask(chunk_size, dtype)
if state is not None:
if isinstance(state, (tuple, list)):
largest_memory_layer = np.argmax([_memory_size(s) for s in state])
state = state[largest_memory_layer]
mem_size = _memory_size(state)
mask = tf.concat(
[tf.zeros([1, 1, chunk_size, mem_size], dtype=dtype), mask], 3)
if equal_window:
attn_mask = tf.ones([chunk_size, chunk_size], dtype=dtype)
mask_dia = tf.cast(tf.matrix_band_part(attn_mask, 0, 0), dtype=dtype)
mask_l = tf.cast(tf.matrix_band_part(attn_mask, -1, 0), dtype=dtype)
start_mask = tf.reshape(mask_l - mask_dia,
[1, 1, chunk_size, chunk_size]) * -1e6
mask = tf.concat(
[mask[:, :, :, :chunk_size] + start_mask, mask[:, :, :, chunk_size:]],
3)
return mask
def default_mlp(hidden_sizes, activate_final=False, init_std=2., **kwargs):
"""Standard batch-applied MLP for transformer modules."""
init = {'w': tf.variance_scaling_initializer(init_std, distribution='normal')}
mlp = snt_mlp.MLP(
hidden_sizes,
activate_final=activate_final,
use_dropout=True,
initializers=init,
**kwargs)
return basic.BatchApply(mlp)
def get_position_encodings(sequence_length,
hidden_size,
clamp_value,
max_timescale=10000.,
min_timescale=2.0):
"""Creates sinusoidal encodings of shape [1, N + M, D]."""
# NOTE: when not using relative position encodings, min_timescale must be 2.0
# and hidden_size must be an even number. Otherwise, the dimensions do not
# match.
pos_seq = tf.range(sequence_length - 1, -1, -1.0)
if clamp_value > 0:
pos_seq = tf.minimum(pos_seq, clamp_value)
freqs = tf.range(0, hidden_size, min_timescale)
inv_freq = 1 / (max_timescale**(freqs / hidden_size))
sinusoid_inp = tf.einsum('i,j->ij', pos_seq, inv_freq)
pos_emb = tf.concat([tf.sin(sinusoid_inp), tf.cos(sinusoid_inp)], -1)
pos_emb = tf.expand_dims(pos_emb, 0)
output_dim = pos_emb.get_shape().as_list()[-1]
if output_dim != hidden_size:
raise ValueError(
'position embedding dimension ({}) does not match that of the input ({}).'
.format(output_dim, hidden_size))
return pos_emb
class MultiheadAttention(base.AbstractModule):
"""Implements multi-head attention with optional state context."""
def __init__(self,
value_size,
key_size,
num_heads,
mask=None,
scaling=True,
positional_encodings=None,
use_relative_positions=False,
init_std=2.,
name='multihead_attention'):
"""Creates a MultiheadAttention module.
Args:
value_size: V parameter. See size glossary in class docstring.
key_size: K parameter. See size glossary in class docstring.
num_heads: The number of independent queries per timestep.
mask: Optional mask to attention logits. This can prevent attending to
future positions or unused memory slots.
scaling: Whether to scale the attention logits.
positional_encodings: Either None (none given), or an iterable of
`(key_positional_encodings, query_positional_encodings)` tuples, where
the first encodings in the list indicate the oldest entries in memory
and the final encodings indicate the newest entries in memory and the
sequence.
use_relative_positions: If True then relative positions are incorporated,
vs absolute, into the attention logits. This is done exactly as
described in the TransformerXL, Dai et al. 2019.
init_std: scaling of standard deviation for weight matrices init.
name: Name of module.
"""
super(MultiheadAttention, self).__init__(name=name)
self._value_size = value_size
self._key_size = key_size
self._sizes = {
'value': self._value_size,
'key': self._key_size,
'query': self._key_size,
'relative_keys': self._key_size,
'relative_keys_0': self._key_size,
}
self._num_heads = num_heads
self._mask = mask
self._scaling = scaling
self._positional_encodings = positional_encodings
self._use_relative_positions = use_relative_positions
self._init = {'w': tf.variance_scaling_initializer(init_std)}
@util.reuse_variables
def multihead_linear(self, inputs, name):
with tf.variable_scope(name, reuse=tf.AUTO_REUSE):
hidden_size = self._sizes[name]
input_size = inputs.shape[-1].value
w = tf.get_variable(
'linear/w',
shape=[input_size, self._num_heads * hidden_size],
initializer=self._init['w'])
w = tf.reshape(w, [input_size, self._num_heads, hidden_size])
out = tf.einsum('bij,jhk->bhik', inputs, w)
return out
def _build(self,
inputs,
query_inputs=None,
state=None,
is_training=False,
dropout_keep_prob=0.5,
key_value_inputs=None):
"""Calculates multi-layer self attention.
Args:
inputs: Tensor of shape [batch_size, num_steps, output_dim_size]. Inputs
used as the query, key, and value to the attention layer.
query_inputs: optional Tensor of shape [batch_size, num_steps,
output_dim_size]. Query inputs to the attention layer. Set when
query_inputs is different from the inputs argument.
state: optional CompressedMemoryState or a Tensor of shape [batch_size,
memory_size, dim_size] concatenated to the inputs. Set when attend to
the memory from previous steps.
is_training: if currently training.
dropout_keep_prob: dropout rate applied to attention weights.
key_value_inputs: optional Tensor of shape [batch_size, num_steps,
output_dim_size]. It is used as the key and value of the multihead
attention. Set when the key and value are different from the inputs
argument.
Returns:
output: the result Tensor of shape
[batch_size, num_steps, output_dim_size].
attention_state: named tuple of AttentionState.
"""
if key_value_inputs is not None and state is not None:
raise ValueError('Only one of the key_value_input and state is needed.')
embedding_size = self._value_size * self._num_heads
q_inputs = inputs if query_inputs is None else query_inputs
# Denoted by L. If query_inputs is None, L = N.
_, query_size = q_inputs.get_shape().as_list()[:2]
if key_value_inputs is not None:
k_inputs = key_value_inputs
v_inputs = k_inputs
elif state is not None:
if isinstance(state, CompressedMemoryState):
state_memory_list = [state.compressed_memory, state.episodic_memory]
else:
state_memory_list = [state]
k_inputs = tf.concat(state_memory_list + [inputs], 1)
v_inputs = k_inputs
else:
k_inputs = inputs
v_inputs = inputs
# Batch size denoted by B
batch_size = tf.shape(inputs)[0]
# Chunk_size denoted by N
chunk_size = inputs.get_shape().as_list()[1]
# Denoted by N + M
att_size = k_inputs.get_shape().as_list()[1]
if self._positional_encodings and not self._use_relative_positions:
if len(self._positional_encodings) != 1:
raise ValueError(
'Absolute positional encodings only supported for 1 memory. '
'Found %i.' % len(self._positional_encodings))
key_positions, query_positions = self._positional_encodings[0]
k_inputs += key_positions
q_inputs += query_positions
# [B, H, L, K]
q = self.multihead_linear(q_inputs, 'query')
# [B, H, N + M, K]
k = self.multihead_linear(k_inputs, 'key')
# [B, H, N + M, V]
v = self.multihead_linear(v_inputs, 'value')
# Scaling the dot-product
if self._scaling:
q *= self._key_size**-0.5
# [B, H, L, N + M]
if self._use_relative_positions:
r_w_bias = tf.get_variable(
'r_w_bias', [1, self._num_heads, 1, self._key_size],
dtype=inputs.dtype)
content_logits = tf.matmul(q + r_w_bias, k, transpose_b=True)
all_relative_logits = []
# Loop over multiple positional encodings, for the case of multiple
# memory types.
for i, positional_encodings in enumerate(self._positional_encodings):
key_positions, query_positions = positional_encodings
if key_positions.get_shape().as_list()[-1] != att_size:
key_positions = key_positions[:, -att_size:] # Crop to layer mem size
is_final = i == len(self._positional_encodings) - 1
suffix = '' if is_final else '_%d' % i
relative_keys = self.multihead_linear(
key_positions, name='relative_keys' + suffix)
# [B, H, N, D]
r_r_bias = tf.get_variable(
'r_r_bias' + suffix, [1, self._num_heads, 1, self._key_size],
dtype=inputs.dtype)
relative_keys = tf.tile(relative_keys, [batch_size, 1, 1, 1])
relative_logits = tf.matmul(
q + r_r_bias, relative_keys, transpose_b=True)
relative_logits = rel_shift(relative_logits)
if not is_final: # Include relative positions for input sequence.
relative_logits = relative_logits[:, :, :, :-chunk_size]
all_relative_logits.append(relative_logits)
all_relative_logits = tf.concat(all_relative_logits, 3)
logits = content_logits + all_relative_logits
else:
# [B, H, N, N + M]
logits = tf.matmul(q, k, transpose_b=True)
content_logits = logits
if self._mask is not None:
if self._mask.get_shape().as_list()[-1] != att_size:
mask = self._mask[:, :, :, -att_size:]
else:
mask = self._mask
logits += mask
weights = tf.nn.softmax(logits)
if is_training:
weights = tf.nn.dropout(weights, dropout_keep_prob)
# [B, L, H, V], where V is value_size
output_transpose = tf.einsum('bhij,bhjk->bihk', weights, v)
# [B, L, H, V] -> [B, L, HV]
attended_inputs = basic.BatchReshape([query_size, embedding_size])(
output_transpose)
# Apply final mlp to mix information between heads.
output = basic.BatchApply(basic.Linear(embedding_size))(attended_inputs)
attention_state = AttentionState(
queries=q,
keys=k,
values=v,
weights=weights,
logits=content_logits,
embeddings=inputs,
read_words=output)
return output, attention_state
class TransformerTower(base.AbstractModule):
"""Transformer tower.
Deep residual network using blocks of attention and MLPs, specified in
Vaswani et al. 2017.
"""
def __init__(self,
value_size,
num_heads,
num_layers,
causal=True,
key_size=None,
shared_attention=False,
output_size=None,
mlp_hidden_sizes=tuple([1024]),
dropout_rate=0.1,
use_relative_positions=True,
clamp_time_range=0,
same_attention_length=False,
layer_norm='input',
name='transformer_tower'):
"""Initializes TransformerTower.
Args:
value_size: dimensionality of values per-head.
num_heads: number of attention heads.
num_layers: number of transformer blocks, where each block contains a
multi-head attention layer and an MLP.
causal: if True, applies a causal mask.
key_size: optional dimensionality of key size. If unspecified then it is
set to `value_size`.
shared_attention: if True, attention params are shared across all layers.
output_size: if set, the desired output dimensionality. By default the
output size is `value_size` x `num_heads`.
mlp_hidden_sizes: tuple containing dimensionality of mlp layer(s). If
multiple values are specified, the mlp contains multiple layers for each
transformer block.
dropout_rate: dropout rate applied to hidden activations, attention, and
positional encodings.
use_relative_positions: if False, applies absolute positional encodings.
If true, uses relative positional encodings from Dai et al. 2019.
clamp_time_range: clamps max temporal positional encoding if specified.
same_attention_length: if True, attention is masked to ensure each
position in the sequence contains the same length of attention.
layer_norm: Where to apply layer-norm in Transformer block. Can be one of
'input' (Vaswani et al. 2017), 'output', or 'both'.
name: name of variable scope.
"""
super(TransformerTower, self).__init__(name=name)
self._causal = causal
self._mask = None
if key_size is None:
key_size = value_size
self._key_size = key_size
self._value_size = value_size
self._shared_attention = shared_attention
self._num_heads = num_heads
self._num_layers = num_layers
self._output_size = output_size
self._embedding_size = self._value_size * self._num_heads
self._mlp_hidden_sizes = list(mlp_hidden_sizes) + [self._embedding_size]
self._multihead_attention = None
self._object_embeddings = None
self._dropout_rate = dropout_rate
self._positional_encodings = None
self._use_relative_positions = use_relative_positions
self._clamp_time_range = clamp_time_range
self._same_attention_length = same_attention_length
self._layer_norm = layer_norm
self._attention_modules = []
self._object_mlps = []
def get_sublayers(self, is_training):
if self._multihead_attention is None or not self._shared_attention:
attention_module = MultiheadAttention(
value_size=self._value_size,
key_size=self._key_size,
num_heads=self._num_heads,
mask=self._mask,
positional_encodings=self._positional_encodings,
use_relative_positions=self._use_relative_positions,
init_std=2. / np.sqrt(self._num_layers),
)
self._multihead_attention = ResidualDropoutWrapper(
attention_module, self._dropout_rate, layer_norm=self._layer_norm)
mlp = default_mlp(
self._mlp_hidden_sizes, init_std=2. / np.sqrt(self._num_layers))
object_mlp = ResidualDropoutWrapper(
mlp, self._dropout_rate, layer_norm=self._layer_norm)
self._attention_modules.append(attention_module)
self._object_mlps.append(mlp)
return self._multihead_attention, object_mlp
def _build(self,
inputs,
state=None,
condition=None,
is_training=True,
final_layer_key_value_inputs=None):
"""Calculates multi-layer self attention and mlp transformation.
Args:
inputs: Tensor of shape [batch_size, num_steps, dim_size].
state: optional list of length num_layers of tensors of shape
[batch_size, memory_size, dim_size].
condition: optional tensor to condition on. The shape is shape
[batch_size, dim_size].
is_training: If true, dropout is applied.
final_layer_key_value_inputs: optional Tensor to be used as the key and
value for the final multi-head attention layer of shape
[batch_size, num_steps, dim_size]. Useful when the tower is a Seq2Seq
decoder and it can attend to encoder outputs.
Returns:
output: tensor of shape [batch_size, num_steps, output_dim_size].
state: list of length `num_layers` containing AttentionState tuples.
"""
# inputs: [B, N, F]
if final_layer_key_value_inputs is not None and state is not None and len(
state) == (self._num_layers - 1):
raise ValueError('When the final_layer_key_value_input is set, exclude'
'the state of the last layer.')
if condition is not None:
condition_tile = tf.tile(
tf.expand_dims(condition, 1), [1, tf.shape(inputs)[1], 1])
inputs = tf.concat([inputs, condition_tile], -1)
# Map inputs to be of `embedding_size` dimension.
if inputs.get_shape().as_list()[-1] != self._embedding_size:
inputs = default_mlp([self._embedding_size], activate_final=True)(
inputs,
is_training=is_training,
dropout_keep_prob=1 - self._dropout_rate)
if state is None:
memory_sizes = [0]
elif isinstance(state[0], CompressedMemoryState):
cm_mem_size = max(_memory_size(s.compressed_memory) for s in state)
em_mem_size = max(_memory_size(s.episodic_memory) for s in state)
memory_sizes = [cm_mem_size, em_mem_size]
else:
memory_sizes = [max([_memory_size(s) for s in state])]
chunk_size = inputs.get_shape().as_list()[1]
self._positional_encodings = []
# Creates positional encodings for different memory types.
for i, memory_size in enumerate(memory_sizes):
seq_len = chunk_size + memory_size
key_positions = get_position_encodings(
sequence_length=seq_len,
hidden_size=inputs.get_shape().as_list()[2],
clamp_value=self._clamp_time_range,
)
if is_training:
key_positions = tf.nn.dropout(key_positions, rate=self._dropout_rate)
key_positions = tf.cast(key_positions, dtype=inputs.dtype)
query_positions = key_positions[:, -chunk_size:, :]
self._positional_encodings.append((key_positions, query_positions))
if self._causal:
self._mask = create_mask(inputs, state, self._same_attention_length)
layer_i_inputs = inputs
attention_states = []
key_value_inputs = None
for i in range(self._num_layers):
with tf.variable_scope('layer_%d' % i, reuse=tf.AUTO_REUSE):
multihead_attention, object_mlp = self.get_sublayers(is_training)
# Multihead attention with residuals.
state_i = None if state is None else state[i]
if i == (self._num_layers -
1) and final_layer_key_value_inputs is not None:
# When the final_layer_key_value_inputs is set, the finaly layer
# of attention will use it as the key & value, thus no need for state.
key_value_inputs = final_layer_key_value_inputs
state_i = None
attention_outputs, attention_state = multihead_attention(
layer_i_inputs,
state=state_i,
is_training=is_training,
dropout_keep_prob=1. - self._dropout_rate,
key_value_inputs=key_value_inputs)
attention_states.append(attention_state)
# Feed-forward with residuals.
output = object_mlp(
attention_outputs,
is_training=is_training,
dropout_keep_prob=1 - self._dropout_rate)
layer_i_inputs = output
if self._output_size is not None:
output = basic.BatchApply(
basic.Linear(self._output_size, use_bias=False))(
output)
return output, attention_states
def attention_module(self, i):
"""Returns the i-th layer attention module."""
return self._attention_modules[i]
|
{
"content_hash": "27aeb116b4ef448818ac9a33a331c21b",
"timestamp": "",
"source": "github",
"line_count": 651,
"max_line_length": 82,
"avg_line_length": 38.93855606758832,
"alnum_prop": 0.6366326087814115,
"repo_name": "deepmind/deepmind-research",
"id": "c1b547420df635b8415f7a1a216f1ec1f7e4d027",
"size": "26094",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "object_attention_for_reasoning/transformer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1002"
},
{
"name": "C++",
"bytes": "5765"
},
{
"name": "Jupyter Notebook",
"bytes": "12330730"
},
{
"name": "Lua",
"bytes": "76186"
},
{
"name": "OpenEdge ABL",
"bytes": "15630"
},
{
"name": "PureBasic",
"bytes": "8"
},
{
"name": "Python",
"bytes": "3419119"
},
{
"name": "Racket",
"bytes": "226692"
},
{
"name": "Shell",
"bytes": "84450"
},
{
"name": "Starlark",
"bytes": "3463"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('notifications', '0008_vacuum_content'),
]
operations = [
migrations.AddField(
model_name='statusupdate',
name='previous_status',
field=models.PositiveIntegerField(blank=True, choices=[(1, 'Draft'), (2, 'Submitted'), (3, 'Reviewed'), (4, 'Public'), (5, 'Rejected'), (6, 'Inactive')], null=True),
),
migrations.AlterField(
model_name='mailpreferences',
name='language',
field=models.CharField(choices=[('en', 'English'), ('fr', 'French'), ('es', 'Spanish'), ('ru', 'Russian'), ('km', 'Khmer'), ('lo', 'Lao'), ('ar', 'Arabic'), ('pt', 'Portuguese'), ('af', 'Afrikaans'), ('th', 'Thai')], default='en', max_length=2),
),
]
|
{
"content_hash": "decb8cc5352434883452e8a54d004512",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 257,
"avg_line_length": 38.73913043478261,
"alnum_prop": 0.5589225589225589,
"repo_name": "CDE-UNIBE/qcat",
"id": "7e41466ac5b46c2223e130438554ed7f0809ee67",
"size": "965",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "apps/notifications/migrations/0009_auto_20181011_1340.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1098"
},
{
"name": "HTML",
"bytes": "823938"
},
{
"name": "Handlebars",
"bytes": "224139"
},
{
"name": "JavaScript",
"bytes": "153067"
},
{
"name": "Python",
"bytes": "3515948"
},
{
"name": "SCSS",
"bytes": "165400"
},
{
"name": "Shell",
"bytes": "1943"
}
],
"symlink_target": ""
}
|
"""
Kubeflow Pipelines API
This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition.
Contact: kubeflow-pipelines@google.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import kfp_server_api
from kfp_server_api.models.api_experiment import ApiExperiment # noqa: E501
from kfp_server_api.rest import ApiException
class TestApiExperiment(unittest.TestCase):
"""ApiExperiment unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test ApiExperiment
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = kfp_server_api.models.api_experiment.ApiExperiment() # noqa: E501
if include_optional :
return ApiExperiment(
id = '0',
name = '0',
description = '0',
created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
resource_references = [
kfp_server_api.models.api_resource_reference.apiResourceReference(
key = kfp_server_api.models.api_resource_key.apiResourceKey(
type = 'UNKNOWN_RESOURCE_TYPE',
id = '0', ),
name = '0',
relationship = 'UNKNOWN_RELATIONSHIP', )
],
storage_state = 'STORAGESTATE_UNSPECIFIED'
)
else :
return ApiExperiment(
)
def testApiExperiment(self):
"""Test ApiExperiment"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "3bb2f0c2fc5c85969cce38230e59ed31",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 124,
"avg_line_length": 33.225806451612904,
"alnum_prop": 0.5839805825242719,
"repo_name": "kubeflow/pipelines",
"id": "fceb87dc323cfbebcae8f8c3ab6908fc368776a0",
"size": "2077",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "backend/api/v1beta1/python_http_client/test/test_api_experiment.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "799"
},
{
"name": "CSS",
"bytes": "2171"
},
{
"name": "Dockerfile",
"bytes": "49331"
},
{
"name": "Go",
"bytes": "1903937"
},
{
"name": "HTML",
"bytes": "3656"
},
{
"name": "JavaScript",
"bytes": "544297"
},
{
"name": "Jinja",
"bytes": "938"
},
{
"name": "Jupyter Notebook",
"bytes": "359548"
},
{
"name": "Makefile",
"bytes": "22164"
},
{
"name": "Mustache",
"bytes": "23652"
},
{
"name": "PowerShell",
"bytes": "3194"
},
{
"name": "Python",
"bytes": "5684887"
},
{
"name": "Shell",
"bytes": "264595"
},
{
"name": "Smarty",
"bytes": "8295"
},
{
"name": "Starlark",
"bytes": "553"
},
{
"name": "TypeScript",
"bytes": "4294958"
}
],
"symlink_target": ""
}
|
from apps.account import views as account_views
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from rest_framework import routers
router = routers.DefaultRouter(trailing_slash=False)
urlpatterns = [
# account
url(r'api/auth$', account_views.AuthenticateView.as_view()),
url(r'api/me$', account_views.MeView.as_view()),
url(r'api/users$', account_views.UsersView.as_view()),
url(r'^admin/', admin.site.urls),
]
# Enable local images and debug toolbar in dev
if settings.DEBUG:
import debug_toolbar
urlpatterns += [url(r'^__debug__/', include(debug_toolbar.urls))]
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
{
"content_hash": "28b4402c57f658f1930da197f4e19244",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 80,
"avg_line_length": 32.541666666666664,
"alnum_prop": 0.7323943661971831,
"repo_name": "RonquilloAeon/django-golden-image",
"id": "825407408df741b1d4f236fbe53aa5f836bc406a",
"size": "781",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/backend/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "324"
},
{
"name": "Python",
"bytes": "27259"
},
{
"name": "Shell",
"bytes": "128"
}
],
"symlink_target": ""
}
|
import ddt
import mock
from nova import exception
from nova.network import model
from os_win import constants as os_win_const
import compute_hyperv.nova.conf
from compute_hyperv.nova import vif
from compute_hyperv.tests.unit import test_base
CONF = compute_hyperv.nova.conf.CONF
class HyperVNovaNetworkVIFPluginTestCase(test_base.HyperVBaseTestCase):
def setUp(self):
super(HyperVNovaNetworkVIFPluginTestCase, self).setUp()
self.vif_driver = vif.HyperVNovaNetworkVIFPlugin()
def test_plug(self):
self.flags(vswitch_name='fake_vswitch_name', group='hyperv')
fake_vif = {'id': mock.sentinel.fake_id}
self.vif_driver.plug(mock.sentinel.instance, fake_vif)
netutils = self.vif_driver._netutils
netutils.connect_vnic_to_vswitch.assert_called_once_with(
'fake_vswitch_name', mock.sentinel.fake_id)
@ddt.ddt
class HyperVVIFDriverTestCase(test_base.HyperVBaseTestCase):
def setUp(self):
super(HyperVVIFDriverTestCase, self).setUp()
self.vif_driver = vif.HyperVVIFDriver()
self.vif_driver._vif_plugin = mock.MagicMock()
self._netutils = self.vif_driver._netutils
self._vmutils = self.vif_driver._vmutils
self._metricsutils = self.vif_driver._metricsutils
@mock.patch.object(vif.nova.network, 'is_neutron')
def test_init_neutron(self, mock_is_neutron):
mock_is_neutron.return_value = True
driver = vif.HyperVVIFDriver()
self.assertIsInstance(driver._vif_plugin, vif.HyperVNeutronVIFPlugin)
@mock.patch.object(vif.nova.network, 'is_neutron')
def test_init_nova(self, mock_is_neutron):
mock_is_neutron.return_value = False
driver = vif.HyperVVIFDriver()
self.assertIsInstance(driver._vif_plugin,
vif.HyperVNovaNetworkVIFPlugin)
def test_plug(self):
vif = {'type': model.VIF_TYPE_HYPERV}
self.vif_driver.plug(mock.sentinel.instance, vif)
self.vif_driver._vif_plugin.plug.assert_called_once_with(
mock.sentinel.instance, vif)
@mock.patch.object(vif, 'os_vif')
@mock.patch.object(vif.HyperVVIFDriver, 'enable_metrics')
@mock.patch.object(vif.os_vif_util, 'nova_to_osvif_instance')
@mock.patch.object(vif.os_vif_util, 'nova_to_osvif_vif')
def test_plug_ovs(self, mock_nova_to_osvif_vif,
mock_nova_to_osvif_instance,
mock_enable_metrics, mock_os_vif):
self.flags(enable_instance_metrics_collection=True,
group='hyperv')
vif = {'type': model.VIF_TYPE_OVS}
osvif_instance = mock_nova_to_osvif_instance.return_value
vif_obj = mock_nova_to_osvif_vif.return_value
self.vif_driver.plug(mock.sentinel.instance, vif)
mock_nova_to_osvif_vif.assert_called_once_with(vif)
mock_nova_to_osvif_instance.assert_called_once_with(
mock.sentinel.instance)
connect_vnic = self.vif_driver._netutils.connect_vnic_to_vswitch
connect_vnic.assert_called_once_with(
CONF.hyperv.vswitch_name, vif_obj.id)
mock_os_vif.plug.assert_called_once_with(
vif_obj, osvif_instance)
self._netutils.add_metrics_collection_acls.assert_called_once_with(
vif_obj.id)
mock_enable_metrics.assert_called_once_with(
osvif_instance.name, vif_obj.id)
@ddt.data(True, False)
def test_enable_metrics(self, vm_running):
state = (os_win_const.HYPERV_VM_STATE_ENABLED if vm_running
else os_win_const.HYPERV_VM_STATE_DISABLED)
self._vmutils.get_vm_state.return_value = state
enable_metrics = self._metricsutils.enable_port_metrics_collection
self.vif_driver.enable_metrics(mock.sentinel.instance_name,
mock.sentinel.vif_id)
self._vmutils.get_vm_state.assert_called_once_with(
mock.sentinel.instance_name)
if vm_running:
enable_metrics.assert_called_once_with(mock.sentinel.vif_id)
else:
enable_metrics.assert_not_called()
def test_plug_type_unknown(self):
vif = {'type': mock.sentinel.vif_type}
self.assertRaises(exception.VirtualInterfacePlugException,
self.vif_driver.plug,
mock.sentinel.instance, vif)
def test_unplug(self):
vif = {'type': model.VIF_TYPE_HYPERV}
self.vif_driver.unplug(mock.sentinel.instance, vif)
self.vif_driver._vif_plugin.unplug.assert_called_once_with(
mock.sentinel.instance, vif)
@mock.patch.object(vif, 'os_vif')
@mock.patch.object(vif.os_vif_util, 'nova_to_osvif_instance')
@mock.patch.object(vif.os_vif_util, 'nova_to_osvif_vif')
def test_unplug_ovs(self, mock_nova_to_osvif_vif,
mock_nova_to_osvif_instance, mock_os_vif):
vif = {'type': model.VIF_TYPE_OVS}
self.vif_driver.unplug(mock.sentinel.instance, vif)
mock_nova_to_osvif_vif.assert_called_once_with(vif)
mock_nova_to_osvif_instance.assert_called_once_with(
mock.sentinel.instance)
mock_os_vif.unplug.assert_called_once_with(
mock_nova_to_osvif_vif.return_value,
mock_nova_to_osvif_instance.return_value)
def test_unplug_type_unknown(self):
vif = {'type': mock.sentinel.vif_type}
self.assertRaises(exception.VirtualInterfaceUnplugException,
self.vif_driver.unplug,
mock.sentinel.instance, vif)
|
{
"content_hash": "77a2fa3f30df8e17126d7ff65d364219",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 77,
"avg_line_length": 39.15384615384615,
"alnum_prop": 0.6467226290409002,
"repo_name": "stackforge/compute-hyperv",
"id": "385161c405eec26c2d4a2b72825374d09d31e797",
"size": "6240",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "compute_hyperv/tests/unit/test_vif.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "482444"
}
],
"symlink_target": ""
}
|
"""This component provides basic support for Foscam IP cameras."""
from __future__ import annotations
import asyncio
from libpyfoscam import FoscamCamera
import voluptuous as vol
from homeassistant.components.camera import SUPPORT_STREAM, Camera
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv, entity_platform
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import CONF_RTSP_PORT, CONF_STREAM, LOGGER, SERVICE_PTZ, SERVICE_PTZ_PRESET
DIR_UP = "up"
DIR_DOWN = "down"
DIR_LEFT = "left"
DIR_RIGHT = "right"
DIR_TOPLEFT = "top_left"
DIR_TOPRIGHT = "top_right"
DIR_BOTTOMLEFT = "bottom_left"
DIR_BOTTOMRIGHT = "bottom_right"
MOVEMENT_ATTRS = {
DIR_UP: "ptz_move_up",
DIR_DOWN: "ptz_move_down",
DIR_LEFT: "ptz_move_left",
DIR_RIGHT: "ptz_move_right",
DIR_TOPLEFT: "ptz_move_top_left",
DIR_TOPRIGHT: "ptz_move_top_right",
DIR_BOTTOMLEFT: "ptz_move_bottom_left",
DIR_BOTTOMRIGHT: "ptz_move_bottom_right",
}
DEFAULT_TRAVELTIME = 0.125
ATTR_MOVEMENT = "movement"
ATTR_TRAVELTIME = "travel_time"
ATTR_PRESET_NAME = "preset_name"
PTZ_GOTO_PRESET_COMMAND = "ptz_goto_preset"
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Add a Foscam IP camera from a config entry."""
platform = entity_platform.async_get_current_platform()
platform.async_register_entity_service(
SERVICE_PTZ,
{
vol.Required(ATTR_MOVEMENT): vol.In(
[
DIR_UP,
DIR_DOWN,
DIR_LEFT,
DIR_RIGHT,
DIR_TOPLEFT,
DIR_TOPRIGHT,
DIR_BOTTOMLEFT,
DIR_BOTTOMRIGHT,
]
),
vol.Optional(ATTR_TRAVELTIME, default=DEFAULT_TRAVELTIME): cv.small_float,
},
"async_perform_ptz",
)
platform.async_register_entity_service(
SERVICE_PTZ_PRESET,
{
vol.Required(ATTR_PRESET_NAME): cv.string,
},
"async_perform_ptz_preset",
)
camera = FoscamCamera(
config_entry.data[CONF_HOST],
config_entry.data[CONF_PORT],
config_entry.data[CONF_USERNAME],
config_entry.data[CONF_PASSWORD],
verbose=False,
)
async_add_entities([HassFoscamCamera(camera, config_entry)])
class HassFoscamCamera(Camera):
"""An implementation of a Foscam IP camera."""
def __init__(self, camera, config_entry):
"""Initialize a Foscam camera."""
super().__init__()
self._foscam_session = camera
self._name = config_entry.title
self._username = config_entry.data[CONF_USERNAME]
self._password = config_entry.data[CONF_PASSWORD]
self._stream = config_entry.data[CONF_STREAM]
self._unique_id = config_entry.entry_id
self._rtsp_port = config_entry.data[CONF_RTSP_PORT]
self._motion_status = False
async def async_added_to_hass(self):
"""Handle entity addition to hass."""
# Get motion detection status
ret, response = await self.hass.async_add_executor_job(
self._foscam_session.get_motion_detect_config
)
if ret == -3:
LOGGER.info(
"Can't get motion detection status, camera %s configured with non-admin user",
self._name,
)
elif ret != 0:
LOGGER.error(
"Error getting motion detection status of %s: %s", self._name, ret
)
else:
self._motion_status = response == 1
@property
def unique_id(self):
"""Return the entity unique ID."""
return self._unique_id
def camera_image(
self, width: int | None = None, height: int | None = None
) -> bytes | None:
"""Return a still image response from the camera."""
# Send the request to snap a picture and return raw jpg data
# Handle exception if host is not reachable or url failed
result, response = self._foscam_session.snap_picture_2()
if result != 0:
return None
return response
@property
def supported_features(self):
"""Return supported features."""
if self._rtsp_port:
return SUPPORT_STREAM
return None
async def stream_source(self):
"""Return the stream source."""
if self._rtsp_port:
return f"rtsp://{self._username}:{self._password}@{self._foscam_session.host}:{self._rtsp_port}/video{self._stream}"
return None
@property
def motion_detection_enabled(self):
"""Camera Motion Detection Status."""
return self._motion_status
def enable_motion_detection(self):
"""Enable motion detection in camera."""
try:
ret = self._foscam_session.enable_motion_detection()
if ret != 0:
if ret == -3:
LOGGER.info(
"Can't set motion detection status, camera %s configured with non-admin user",
self._name,
)
return
self._motion_status = True
except TypeError:
LOGGER.debug(
"Failed enabling motion detection on '%s'. Is it supported by the device?",
self._name,
)
def disable_motion_detection(self):
"""Disable motion detection."""
try:
ret = self._foscam_session.disable_motion_detection()
if ret != 0:
if ret == -3:
LOGGER.info(
"Can't set motion detection status, camera %s configured with non-admin user",
self._name,
)
return
self._motion_status = False
except TypeError:
LOGGER.debug(
"Failed disabling motion detection on '%s'. Is it supported by the device?",
self._name,
)
async def async_perform_ptz(self, movement, travel_time):
"""Perform a PTZ action on the camera."""
LOGGER.debug("PTZ action '%s' on %s", movement, self._name)
movement_function = getattr(self._foscam_session, MOVEMENT_ATTRS[movement])
ret, _ = await self.hass.async_add_executor_job(movement_function)
if ret != 0:
LOGGER.error("Error moving %s '%s': %s", movement, self._name, ret)
return
await asyncio.sleep(travel_time)
ret, _ = await self.hass.async_add_executor_job(
self._foscam_session.ptz_stop_run
)
if ret != 0:
LOGGER.error("Error stopping movement on '%s': %s", self._name, ret)
return
async def async_perform_ptz_preset(self, preset_name):
"""Perform a PTZ preset action on the camera."""
LOGGER.debug("PTZ preset '%s' on %s", preset_name, self._name)
preset_function = getattr(self._foscam_session, PTZ_GOTO_PRESET_COMMAND)
ret, _ = await self.hass.async_add_executor_job(preset_function, preset_name)
if ret != 0:
LOGGER.error(
"Error moving to preset %s on '%s': %s", preset_name, self._name, ret
)
return
@property
def name(self):
"""Return the name of this camera."""
return self._name
|
{
"content_hash": "89e4606021e477d028f0c9c21eef16df",
"timestamp": "",
"source": "github",
"line_count": 247,
"max_line_length": 128,
"avg_line_length": 31.161943319838056,
"alnum_prop": 0.5799662206054307,
"repo_name": "GenericStudent/home-assistant",
"id": "eee1e136af060445b2650a63d7c2c4f08c79b0fb",
"size": "7697",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/foscam/camera.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3070"
},
{
"name": "Python",
"bytes": "44491729"
},
{
"name": "Shell",
"bytes": "5092"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
import rules
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
from django.shortcuts import reverse
from django.utils.encoding import force_str
from rest_framework.test import APITestCase
from testapp.models import Book
from testapp import views
class PermissionRequiredDecoratedAPIViewTests(APITestCase):
"""Tests the behavior of the mixin when used on an APIView
"""
def test_user_with_permission_gets_access(self):
user = User.objects.get(username='anton')
# assert that the user has the right permission
self.assertTrue(self.client.login(username='anton', password='secr3t'))
response = self.client.get(reverse('decorated_view'))
self.assertEqual(200, response.status_code)
def test_user_without_permission_gets_no_access(self):
user = User.objects.get(username='beatrix')
# assert that the user has no permissions
self.assertTrue(self.client.login(username='beatrix', password='secr3t'))
response = self.client.get(reverse('decorated_view'))
self.assertEqual(403, response.status_code)
def test_user_with_object_permission_gets_access(self):
user = User.objects.get(username='anton')
# assert that the user has the right permission
self.assertTrue(self.client.login(username='anton', password='secr3t'))
response = self.client.head(reverse('decorated_view'))
self.assertEqual(200, response.status_code)
def test_user_without_object_permission_permission_gets_no_access(self):
user = User.objects.get(username='beatrix')
# assert that the user has no permissions
self.assertTrue(self.client.login(username='beatrix', password='secr3t'))
response = self.client.head(reverse('decorated_view'))
self.assertEqual(403, response.status_code)
def test_user_with_permissions_gets_access(self):
user = User.objects.get(username='anton')
# assert that the user has the right permissions
self.assertTrue(self.client.login(username='anton', password='secr3t'))
response = self.client.patch(reverse('decorated_view'))
self.assertEqual(200, response.status_code)
def test_user_with_partial_permissions_gets_no_access(self):
user = User.objects.get(username='beatrix')
# assert that the user has not all permissions
self.assertTrue(self.client.login(username='beatrix', password='secr3t'))
response = self.client.patch(reverse('decorated_view'))
self.assertEqual(403, response.status_code)
def test_user_without_permissions_gets_no_access(self):
user = User.objects.get(username='carlos')
# assert that the user has no permissions
self.assertTrue(self.client.login(username='carlos', password='secr3t'))
response = self.client.patch(reverse('decorated_view'))
self.assertEqual(403, response.status_code)
def test_user_with_object_permissions_gets_access(self):
user = User.objects.get(username='anton')
# assert tha the user has the right permissions
self.assertTrue(self.client.login(username='anton', password='secr3t'))
response = self.client.post(reverse('decorated_view'))
self.assertEqual(200, response.status_code)
def test_user_with_partial_permissions_gets_no_access(self):
user = User.objects.get(username='beatrix')
# assert that the user has not all permissions
self.assertTrue(self.client.login(username='beatrix', password='secr3t'))
response = self.client.post(reverse('decorated_view'))
self.assertEqual(403, response.status_code)
def test_user_without_permissions_gets_no_access(self):
user = User.objects.get(username='carlos')
# assert that the user has no permissions
self.assertTrue(self.client.login(username='carlos', password='secr3t'))
response = self.client.post(reverse('decorated_view'))
self.assertEqual(403, response.status_code)
|
{
"content_hash": "c7fd24e6284e5818ce20b60b79fe9895",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 81,
"avg_line_length": 42.979166666666664,
"alnum_prop": 0.6970431410567135,
"repo_name": "escodebar/django-rest-framework-rules",
"id": "907a5636d25698e226e6a64bea51212836d0e83e",
"size": "4126",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/testsuite/test_views/test_decorator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "46208"
},
{
"name": "Shell",
"bytes": "149"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import unittest
from guardian.conf import settings as guardian_settings
from django.conf import settings
from django.conf import UserSettingsHolder
from django.utils.functional import wraps
def skipUnlessTestApp(obj):
app = 'guardian.testapp'
return unittest.skipUnless(app in settings.INSTALLED_APPS,
'app %r must be installed to run this test' % app)(obj)
class TestDataMixin(object):
def setUp(self):
super(TestDataMixin, self).setUp()
from django.contrib.auth.models import Group
from django.contrib.auth import get_user_model
User = get_user_model()
Group.objects.create(pk=1, name='admins')
jack_group = Group.objects.create(pk=2, name='jackGroup')
User.objects.get_or_create(username=guardian_settings.ANONYMOUS_USER_NAME)
jack = User.objects.create(username='jack', is_active=True,
is_superuser=False, is_staff=False)
jack.groups.add(jack_group)
class override_settings(object):
"""
Acts as either a decorator, or a context manager. If it's a decorator it
takes a function and returns a wrapped function. If it's a contextmanager
it's used with the ``with`` statement. In either event entering/exiting
are called before and after, respectively, the function/block is executed.
"""
def __init__(self, **kwargs):
self.options = kwargs
self.wrapped = settings._wrapped
def __enter__(self):
self.enable()
def __exit__(self, exc_type, exc_value, traceback):
self.disable()
def __call__(self, test_func):
from django.test import TransactionTestCase
if isinstance(test_func, type) and issubclass(test_func, TransactionTestCase):
original_pre_setup = test_func._pre_setup
original_post_teardown = test_func._post_teardown
def _pre_setup(innerself):
self.enable()
original_pre_setup(innerself)
def _post_teardown(innerself):
original_post_teardown(innerself)
self.disable()
test_func._pre_setup = _pre_setup
test_func._post_teardown = _post_teardown
return test_func
else:
@wraps(test_func)
def inner(*args, **kwargs):
with self:
return test_func(*args, **kwargs)
return inner
def enable(self):
override = UserSettingsHolder(settings._wrapped)
for key, new_value in self.options.items():
setattr(override, key, new_value)
settings._wrapped = override
def disable(self):
settings._wrapped = self.wrapped
|
{
"content_hash": "7431ba814e4a645548ce4f0014984ae1",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 86,
"avg_line_length": 35.52564102564103,
"alnum_prop": 0.6261277517141826,
"repo_name": "rmgorman/django-guardian",
"id": "3d17951dcc8dc6f4021b8e4b5efa4614e0232619",
"size": "2771",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "guardian/testapp/tests/conf.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "JavaScript",
"bytes": "498"
},
{
"name": "Python",
"bytes": "220609"
},
{
"name": "Shell",
"bytes": "235"
}
],
"symlink_target": ""
}
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bustime.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
{
"content_hash": "aff72bd99b22ffea8c63430593777077",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 71,
"avg_line_length": 25.333333333333332,
"alnum_prop": 0.7105263157894737,
"repo_name": "norn/bustime",
"id": "23a757828be3a9393976eb4870e56bf0e1e3738e",
"size": "250",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "56066"
},
{
"name": "HTML",
"bytes": "158846"
},
{
"name": "JavaScript",
"bytes": "166643"
},
{
"name": "PLpgSQL",
"bytes": "2143"
},
{
"name": "Python",
"bytes": "161770"
},
{
"name": "Shell",
"bytes": "2282"
}
],
"symlink_target": ""
}
|
from typing import List
from RULEngine.Game.OurPlayer import OurPlayer
from RULEngine.Util.Pose import Pose
from RULEngine.Util.Position import Position
from RULEngine.Util.geometry import get_distance, conv_position_2_list, remove_duplicates
from ai.Algorithm.IntelligentModule import Pathfinder
from ai.states.world_state import WorldState
import numpy as np
class Path:
def __init__(self, start=Position(), end=Position()):
self.start = start
self.goal = end
self.points = [start, end]
self.speeds = [0, 0]
def join_segments(self, other):
new_path = Path()
new_path.points = self.points+other.points[1:]
new_path.start = self.start
new_path.goal = other.points[-1]
return new_path
def split_path(self, idx):
if idx < 1:
path_1 = Path()
path_2 = self
else:
path_1 = Path()
path_1.start = self.start
path_1.goal = self.points[idx]
path_1.points = self.points[:idx+1]
path_2 = Path()
path_2.start = self.points[idx]
path_2.goal = self.goal
path_2.points = self.points[idx:]
return path_1, path_2
@staticmethod
def generate_path_from_points(points_list, speed_list=None, threshold=None):
if speed_list is None:
speed_list = [0, 0]
if len(points_list) < 3:
pass
else:
if (threshold is not None) and len(speed_list) > 2:
if np.linalg.norm(points_list[0] - points_list[1]) < threshold:
del points_list[1]
del speed_list[1]
# print(position_list)
# print(new_speed_list)
#points étant une liste de positions
new_path = Path()
new_path.start = points_list[0]
new_path.goal = points_list[-1]
new_path.points = points_list
new_path.speeds = speed_list
return new_path
def get_path_length(self):
length = 0
for idx, point in enumerate(self.points[:-1]):
length += np.linalg.norm(point - self.points[idx+1])
return length
def quick_update_path(self, player):
self.points[0] = player.pose.position
return self.generate_path_from_points(self.points, self.speeds, 80)
class PathPartitionner(Pathfinder):
def __init__(self, p_worldstate: WorldState):
super().__init__(p_worldstate)
self.p_worldstate = p_worldstate
self.game_state = self.p_worldstate.game_state
self.path = Path(Position(0, 0), Position(0, 0))
self.raw_path = Path(Position(0, 0), Position(0, 0))
self.res = 100
self.gap_proxy = 200
self.max_recurs = 5
self.players_obstacles = []
self.pose_obstacle = None
self.reshaper = PathReshaper(self.path)
self.cruise_speed = 1
self.player = None
self.closest_obs_speed = np.array([0, 0])
def fastpathplanner(self, path, depth=0, avoid_dir=None):
if self.is_path_collide(path) and depth < self.max_recurs:
sub_target, avoid_dir = self.search_point(path, avoid_dir)
#print(sub_target)
path_1 = Path(path.start, sub_target)
path_1 = self.fastpathplanner(path_1, depth + 1, avoid_dir)
path_2 = Path(sub_target, path.goal)
path_2 = self.fastpathplanner(path_2, depth + 1, avoid_dir)
path = path_1.join_segments(path_2)
return path
def get_path(self, player: OurPlayer, pose_target: Pose=Pose(), cruise_speed: [int, float]=1,
old_path=None, old_raw_path=Path(Position(99999, 99999), Position(99999, -99999))):
self.cruise_speed = cruise_speed
self.player = player
i = 0
self.pose_obstacle = np.zeros((len(self.game_state.my_team.available_players) +
len(self.game_state.other_team.available_players) - 1, 2))
for player in self.game_state.my_team.available_players.values():
if player.id != self.player.id:
self.pose_obstacle[i, :] = player.pose.position
self.players_obstacles.append(player)
i += 1
for player in self.game_state.other_team.available_players.values():
self.pose_obstacle[i, :] = player.pose.position
self.players_obstacles.append(player)
i += 1
# tentative de code pour ne pas recalculer le path a toutes les ittérations (marche un peu mais pas parfait)
#if (old_path is not None) and (np.linalg.norm(self.path.goal - old_raw_path.goal) < 20):
if (old_path is not None) and (not self.is_path_collide(old_raw_path, tolerance=self.gap_proxy-50)) and \
(np.linalg.norm(self.path.goal - old_raw_path.goal) < 20):
old_raw_path.quick_update_path(self.player)
old_path.quick_update_path(self.player)
self.path = old_path
self.raw_path = old_raw_path
self.path = self.remove_redundant_points()
else:
self.path = Path(self.player.pose.position, pose_target.position)
if self.path.get_path_length() < 1:
"""
hack shady pour eviter une erreur shady (trop fatiguer pour dealer ak ste shit la)
File "/home/phil/robocup/StrategyIA/RULEngine/Util/Position.py", line 68, in __eq__
min_abs_tol = min(self.abs_tol, other.position.abs_tol)
AttributeError: 'numpy.ndarray' object has no attribute 'position'
"""
return self.path , self.path
self.closest_obs_speed = self.find_closest_obstacle(self.player.pose.position, self.path)
self.path = self.fastpathplanner(self.path)
self.raw_path = self.path
self.path = self.reshaper.reshape_path(self.path, self.player, self.cruise_speed)
self.path = self.remove_redundant_points()
# print("points", self.path.points)
# print("speeds", self.path.speeds)
return self.path, self.raw_path
def get_raw_path(self, pose_target=Pose()):
# sans path_reshaper
i = 0
self.pose_obstacle = np.zeros((len(self.game_state.my_team.available_players) +
len(self.game_state.other_team.available_players) - 1, 2))
for player in self.game_state.my_team.available_players.values():
if player.id != self.player.id:
self.pose_obstacle[i, :] = player.pose.position
self.players_obstacles.append(player)
i += 1
for player in self.game_state.other_team.available_players.values():
self.pose_obstacle[i, :] = player.pose.position
self.players_obstacles.append(player)
i += 1
self.path = Path(self.player.pose.position, pose_target.position)
return self.fastpathplanner(self.path)
def is_path_collide(self, path, obstacles=None, tolerance=None):
if obstacles is None:
obstacles = self.pose_obstacle
if tolerance is None:
tolerance = self.gap_proxy
for idx, points in enumerate(path.points[:-1]):
pose_start = path.points[idx]
pose_target = path.points[idx + 1]
direction = (pose_target - pose_start)
if np.linalg.norm(direction) < 0.00001:
return False
else:
direction = direction / np.linalg.norm(direction)
distance_sub_path = np.linalg.norm(pose_start - pose_target)
if distance_sub_path > 0.01:
for pose_obs in obstacles:
vec_robot_2_obs = pose_obs - pose_start
dist_from_path = np.linalg.norm(np.cross(direction, vec_robot_2_obs))
projection_obs_on_direction = \
np.dot(direction, vec_robot_2_obs / np.linalg.norm(vec_robot_2_obs))
if projection_obs_on_direction < 0.00001 or projection_obs_on_direction > 1:
#le vecteur entre l'obstacle et la ligne n'est pas perpendiculaire
dist_from_path_temp = np.linalg.norm(pose_start - pose_obs)
if dist_from_path_temp > np.linalg.norm(pose_target - pose_obs):
dist_from_path = np.linalg.norm(pose_target - pose_obs)
else:
dist_from_path = dist_from_path_temp
if tolerance > dist_from_path:
return True
return False
def find_closest_obstacle(self, point, path):
dist_point_obs = np.inf
closest_obs = None
closest_player = self.players_obstacles[0].pose.position
#print(get_distance(path.start, path.goal))
if np.linalg.norm(path.start - path.goal) < 0.001:
return [closest_obs, dist_point_obs, closest_player]
if point == path.start:
return [closest_obs, dist_point_obs, closest_player]
pose_start = path.start
direction = (point - pose_start) / np.linalg.norm(point - pose_start)
for idx, pose_obs in enumerate(self.pose_obstacle):
vec_robot_2_obs_temp = pose_obs - pose_start
dist_from_path_temp = np.linalg.norm(np.cross(direction, vec_robot_2_obs_temp))
if self.gap_proxy > dist_from_path_temp and self.is_path_collide(path, [pose_obs]):
obstacle_pos = Position(pose_obs)
dist = (path.start - obstacle_pos).norm()
if dist < dist_point_obs:
dist_point_obs = dist
closest_obs = obstacle_pos
closest_player = self.players_obstacles[idx]
return [closest_obs, dist_point_obs, closest_player]
def verify_sub_target(self, sub_target):
for pose_obs in self.pose_obstacle:
dist_sub_2_obs = (Position(pose_obs) - sub_target).norm()
if dist_sub_2_obs < self.gap_proxy:
return True
return False
def search_point(self, path, avoid_dir=None):
pose_robot = path.start
#print(pose_robot)
pose_target = path.goal
pose_obstacle_closest, dist_point_obs, closest_player = self.find_closest_obstacle(pose_target, path)
if pose_obstacle_closest is None:
sub_target = pose_target
return sub_target
direction = (pose_target - pose_robot) / np.linalg.norm(pose_target - pose_robot)
vec_robot_2_obs = np.array(conv_position_2_list(pose_obstacle_closest - pose_robot))
len_along_path = np.dot(vec_robot_2_obs, direction)
dist_from_path = np.linalg.norm(np.cross(direction, vec_robot_2_obs))
projection_obs_on_direction = np.dot(direction, vec_robot_2_obs / np.linalg.norm(vec_robot_2_obs))
if 0 < len_along_path < (pose_target - pose_robot).norm():
vec_perp = np.cross(np.append(direction, [0]), np.array([0, 0, 1]))
vec_perp = vec_perp[0:2] / np.linalg.norm(vec_perp)
# print(self.player.velocity)
cruise_speed = self.player.velocity.position
self.closest_obs_speed = closest_player.velocity.position
avoid_dir = -vec_perp
if avoid_dir is None:
avoid_dir = -vec_perp
sub_target_1 = np.array(conv_position_2_list(pose_robot)) + \
direction * len_along_path + vec_perp * self.res
sub_target_2 = np.array(conv_position_2_list(pose_robot)) + \
direction * len_along_path - vec_perp * self.res
bool_sub_target_1 = self.verify_sub_target(Position(sub_target_1[0], sub_target_1[1]))
bool_sub_target_2 = self.verify_sub_target(Position(sub_target_2[0], sub_target_2[1]))
while bool_sub_target_1:
sub_target_1 += vec_perp * self.res
bool_sub_target_1 = self.verify_sub_target(Position(sub_target_1[0], sub_target_1[1]))
sub_target_1 += vec_perp * 0.01 * self.res
while bool_sub_target_2:
sub_target_2 -= vec_perp * self.res
bool_sub_target_2 = self.verify_sub_target(Position(sub_target_2[0], sub_target_2[1]))
sub_target_2 -= vec_perp * 0.01 * self.res
if np.linalg.norm(cruise_speed) < 0.1:
sub_target = sub_target_1
elif np.abs(np.dot(direction, (sub_target_1 - path.start) /
np.linalg.norm(sub_target_1 - path.start))) > \
np.abs(np.dot(direction, (sub_target_2 - path.start) /
np.linalg.norm(sub_target_2 - path.start))):
sub_target = sub_target_1
else:
sub_target = sub_target_2
else:
# if np.dot(avoid_dir, np.transpose(vec_perp)) < 0:
# vec_perp = -vec_perp
if np.linalg.norm(avoid_dir) > 0.001:
avoid_dir /= np.linalg.norm(avoid_dir)
elif np.dot(avoid_dir, np.transpose(vec_perp)) < 0:
avoid_dir = -vec_perp
else:
avoid_dir = vec_perp
sub_target = np.array(conv_position_2_list(pose_robot)) +\
direction * len_along_path + vec_perp * self.res
bool_sub_target = self.verify_sub_target(Position(sub_target[0], sub_target[1]))
while bool_sub_target:
sub_target -= avoid_dir * self.res
bool_sub_target = self.verify_sub_target(Position(sub_target[0], sub_target[1]))
sub_target -= avoid_dir * 0.01 * self.res
avoid_dir = vec_perp
sub_target = Position(sub_target[0], sub_target[1])
else:
sub_target = pose_target
return [sub_target, avoid_dir]
def get_next_point(self, robot_id=None):
pass
def update(self):
pass
def remove_redundant_points(self):
if len(self.path.points) > 2:
points, speeds = remove_duplicates(self.path.points, self.path.speeds, 5)
return Path().generate_path_from_points(points, speeds)
else:
return Path().generate_path_from_points(self.path.points, self.path.speeds)
class PathReshaper:
def __init__(self, path: Path):
self.path = path
self.dist_from_path = 25 # mm
self.player_id = None
self.player = None
self.vel_max = None
def reshape_path(self, path: Path, player: OurPlayer, vel_cruise: [int, float]=1000):
self.path = path
self.player = player
cmd = self.player.ai_command
if cmd.cruise_speed:
vel_cruise = cmd.cruise_speed * 1000
# print(vel_cruise)
self.vel_max = vel_cruise
positions_list = [path.points[0]]
for idx, point in enumerate(path.points[1:-1]):
i = idx + 1
if np.linalg.norm(path.points[i] - path.points[i+1]) < 10:
continue
positions_list += [path.points[i]]
positions_list += [path.points[-1]]
self.path.points = positions_list
p1 = self.path.points[0]
point_list = [p1]
speed_list = [0]
for idx, point in enumerate(self.path.points[1:-1]):
self.dist_from_path = 50 # mm
i = idx + 1
p2 = point
p3 = self.path.points[i+1]
# if np.linalg.norm(p1, p2) / 2 < OurPlayer.max_acc * 2 / vel_cruise ** 2:
# # on ne calcul pas le radius a partir de vel cruise. profil triangulaire
# vel_pointe = np.sqrt(2 * OurPlayer.max_acc / (np.linalg.norm(p1,p2) / 2))
# radius_at_const_speed = vel_pointe ** 2 / (OurPlayer.max_acc * 1000)
# else:
radius_at_const_speed = vel_cruise ** 2 / (OurPlayer.max_acc * 1000)
theta = abs(np.math.atan2(p3[1]-p2[1], p3[0]-p2[0]) - np.math.atan2(p1[1]-p2[1], p1[0]-p2[0]))
try:
dist_deviation = (radius_at_const_speed/(np.math.sin(theta/2)))-radius_at_const_speed
except ZeroDivisionError:
dist_deviation = 0
speed = vel_cruise
radius = radius_at_const_speed
while dist_deviation > self.dist_from_path:
speed *= 0.4
radius = speed ** 2 / (OurPlayer.max_acc * 1000)
dist_deviation = (radius / (np.math.sin(theta / 2))) - radius
# print(radius, radius_at_const_speed)
if np.linalg.norm(p1-p2) < 0.001 or np.linalg.norm(p2-p3) < 0.001 or np.linalg.norm(p1-p3) < 0.001:
# on traite tout le cas ou le problème dégènere
point_list += [p2]
speed_list += [vel_cruise]
else:
p4 = p2 + np.sqrt(np.square(dist_deviation + radius) - radius ** 2) *\
(p1 - p2) / np.linalg.norm(p1 - p2)
p5 = p2 + np.sqrt(np.square(dist_deviation + radius) - radius ** 2) *\
(p3 - p2) / np.linalg.norm(p3 - p2)
if np.linalg.norm(p4-p5) > np.linalg.norm(p3-p1):
point_list += [p2]
speed_list += [vel_cruise]
elif np.linalg.norm(p1 - p2) < np.linalg.norm(p4 - p2):
radius *= np.linalg.norm(p1 - p2) / np.linalg.norm(p4 - p2)
dist_deviation = (radius / (np.math.sin(theta / 2))) - radius
p4 = p2 + np.sqrt(np.square(dist_deviation + radius) - radius ** 2) * (p1 - p2) / np.linalg.norm(
p1 - p2)
p5 = p2 + np.sqrt(np.square(dist_deviation + radius) - radius ** 2) * (p3 - p2) / np.linalg.norm(
p3 - p2)
point_list += [p4, p5]
speed_list += [speed, speed]
elif np.linalg.norm(p3 - p2) < np.linalg.norm(p5 - p2):
radius *= np.linalg.norm(p3 - p2) / np.linalg.norm(p5 - p2)
dist_deviation = (radius / (np.math.sin(theta / 2))) - radius
p4 = p2 + np.sqrt(np.square(dist_deviation + radius) - radius ** 2) * (p1 - p2) / np.linalg.norm(
p1 - p2)
p5 = p2 + np.sqrt(np.square(dist_deviation + radius) - radius ** 2) * (p3 - p2) / np.linalg.norm(
p3 - p2)
point_list += [p4, p5]
speed_list += [speed, speed]
else:
point_list += [p4, p5]
speed_list += [speed, speed]
# radius = abs(self.dist_from_path*np.sin(theta/2)/(1-np.sin(theta/2)))
# print(radius, radius_at_const_speed)
# if radius > radius_at_const_speed:
# radius = radius_at_const_speed
# self.dist_from_path = -radius + radius / abs(np.math.sin(theta / 2))
# if np.linalg.norm(p1-p2) < 0.001 or np.linalg.norm(p2-p3) < 0.001 or np.linalg.norm(p1-p3) < 0.001:
# # on traite tout le cas ou le problème dégènere
# point_list += [point]
# speed_list += [vel_cruise/1000]
# else:
# p4 = p2 + np.sqrt(np.square(self.dist_from_path + radius) - radius ** 2) * \
# (p1 - p2)/np.linalg.norm(p1-p2)
# p5 = p2 + np.sqrt(np.square(self.dist_from_path + radius) - radius ** 2) * \
# (p3 - p2) / np.linalg.norm(p3 - p2)
# if np.linalg.norm(p4-p5) > np.linalg.norm(p3-p1):
# point_list += [point]
# speed_list += [vel_cruise/1000]
# else:
# point_list += [Position.from_np(p4), Position.from_np(p5)]
# speed_list += [np.sqrt(radius / (OurPlayer.max_acc * 1000)),
# np.sqrt(radius / (OurPlayer.max_acc * 1000))]
p1 = point_list[-1]
speed_list += [0]
point_list += [self.path.goal]
# on s'assure que le path est bel et bien réalisable par un robot et on
# merge les points qui sont trop proches les un des autres.
position_list = [point_list[0]]
new_speed_list = [speed_list[0]]
for idx, point in enumerate(point_list[1:-1]):
i = idx + 1
if np.linalg.norm(point_list[i] - point_list[i+1]) < 10:
continue
if False:
min_dist = abs(0.5 * (np.square(speed_list[i]) - np.square(speed_list[i + 1])) / (OurPlayer.max_acc * 1000))
if min_dist > np.linalg.norm(point_list[i] - point_list[i+1]):
if speed_list[i] > speed_list[i + 1]:
speed_list[i] *= np.linalg.norm(point_list[i] - point_list[i+1]) / min_dist
position_list += [point_list[i]]
new_speed_list += [speed_list[i]]
position_list += [point_list[-1]]
new_speed_list += [speed_list[-1]]
return Path().generate_path_from_points(position_list, new_speed_list)
|
{
"content_hash": "0f27cb4e198446e79f71ffdfe3753e48",
"timestamp": "",
"source": "github",
"line_count": 465,
"max_line_length": 124,
"avg_line_length": 46.1247311827957,
"alnum_prop": 0.5413558373741142,
"repo_name": "MaximeGLegault/StrategyIA",
"id": "d1452358fc4949f7b552cfdbdcfb85b6a430ee50",
"size": "21457",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "ai/Algorithm/path_partitionner.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "207240"
},
{
"name": "Protocol Buffer",
"bytes": "30229"
},
{
"name": "Python",
"bytes": "1445233"
}
],
"symlink_target": ""
}
|
import unittest
from test import test_support
import string
import StringIO
mimetools = test_support.import_module("mimetools", deprecated=True)
msgtext1 = mimetools.Message(StringIO.StringIO(
"""Content-Type: text/plain; charset=iso-8859-1; format=flowed
Content-Transfer-Encoding: 8bit
Foo!
"""))
class MimeToolsTest(unittest.TestCase):
def test_decodeencode(self):
start = string.ascii_letters + "=" + string.digits + "\n"
for enc in ['7bit','8bit','base64','quoted-printable',
'uuencode', 'x-uuencode', 'uue', 'x-uue']:
i = StringIO.StringIO(start)
o = StringIO.StringIO()
mimetools.encode(i, o, enc)
i = StringIO.StringIO(o.getvalue())
o = StringIO.StringIO()
mimetools.decode(i, o, enc)
self.assertEqual(o.getvalue(), start)
def test_boundary(self):
s = set([""])
for i in xrange(100):
nb = mimetools.choose_boundary()
self.assert_(nb not in s)
s.add(nb)
def test_message(self):
msg = mimetools.Message(StringIO.StringIO(msgtext1))
self.assertEqual(msg.gettype(), "text/plain")
self.assertEqual(msg.getmaintype(), "text")
self.assertEqual(msg.getsubtype(), "plain")
self.assertEqual(msg.getplist(), ["charset=iso-8859-1", "format=flowed"])
self.assertEqual(msg.getparamnames(), ["charset", "format"])
self.assertEqual(msg.getparam("charset"), "iso-8859-1")
self.assertEqual(msg.getparam("format"), "flowed")
self.assertEqual(msg.getparam("spam"), None)
self.assertEqual(msg.getencoding(), "8bit")
def test_main():
test_support.run_unittest(MimeToolsTest)
if __name__=="__main__":
test_main()
|
{
"content_hash": "35ee3f8090cf15c3fc631ddf5d99bbda",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 81,
"avg_line_length": 34.58490566037736,
"alnum_prop": 0.5984724495362793,
"repo_name": "babyliynfg/cross",
"id": "f754b78b35458e3b36c142e7d8fd6d42b2ba75e3",
"size": "1833",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/project-creator/Python2.6.6/Lib/test/test_mimetools.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "36722"
},
{
"name": "C",
"bytes": "6345646"
},
{
"name": "C++",
"bytes": "15980000"
},
{
"name": "CMake",
"bytes": "1238"
},
{
"name": "GLSL",
"bytes": "64406"
},
{
"name": "HTML",
"bytes": "147661"
},
{
"name": "Java",
"bytes": "574078"
},
{
"name": "JavaScript",
"bytes": "503327"
},
{
"name": "Makefile",
"bytes": "18778"
},
{
"name": "Objective-C",
"bytes": "396703"
},
{
"name": "Objective-C++",
"bytes": "378740"
},
{
"name": "PLSQL",
"bytes": "22886"
},
{
"name": "Python",
"bytes": "15265548"
},
{
"name": "Roff",
"bytes": "23"
},
{
"name": "Shell",
"bytes": "61021"
},
{
"name": "Visual Basic",
"bytes": "19200"
}
],
"symlink_target": ""
}
|
import os
import pprint
from mediafire import MediaFireApi
APP_ID = '42511'
MEDIAFIRE_EMAIL = os.environ["MEDIAFIRE_EMAIL"]
MEDIAFIRE_PASSWORD = os.environ["MEDIAFIRE_PASSWORD"]
pp = pprint.PrettyPrinter(indent=2)
api = MediaFireApi()
session = api.user_get_session_token(
app_id=APP_ID, email=MEDIAFIRE_EMAIL, password=MEDIAFIRE_PASSWORD)
api.session = session
response = api.file_zip(keys="49v457pmu1wacb1,2c16gp40ad8orca")
pp.pprint(response.headers)
written_bytes = 0
with open("/tmp/mediafire.zip", "wb") as out_fd:
for line in response.iter_content(chunk_size=4096):
written_bytes += out_fd.write(line)
assert written_bytes == int(response.headers['Content-Length'])
response.close()
|
{
"content_hash": "f0f5757c088a8d195b9e835d8f916272",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 70,
"avg_line_length": 23.9,
"alnum_prop": 0.7433751743375174,
"repo_name": "roman-yepishev/mediafire-python-open-sdk",
"id": "1faef46e62a23a9df16a86a44d4720aa89610102",
"size": "821",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/api-file-zip.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "122947"
}
],
"symlink_target": ""
}
|
"""Tests of features related to GDAL RFC 49
See https://trac.osgeo.org/gdal/wiki/rfc49_curve_geometries.
"""
import fiona
from .conftest import requires_gdal2
@requires_gdal2
def test_line_curve_conversion(path_curves_line_csv):
"""Convert curved geometries to linear approximations"""
with fiona.open(path_curves_line_csv) as col:
assert col.schema['geometry'] == 'Unknown'
features = list(col)
assert len(features) == 9
|
{
"content_hash": "aff196a705a8deb6150f0979c7c0ba0b",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 60,
"avg_line_length": 27,
"alnum_prop": 0.7015250544662309,
"repo_name": "rbuffat/Fiona",
"id": "50e6634a905b5b79b71ca34338743b6e3086a5ab",
"size": "459",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tests/test_curve_geometries.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "PowerShell",
"bytes": "7195"
},
{
"name": "Python",
"bytes": "536189"
},
{
"name": "Shell",
"bytes": "4951"
}
],
"symlink_target": ""
}
|
"""
Django settings for cheeseshop project.
Generated by 'django-admin startproject' using Django 1.8.14.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from datetime import date
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
SITE_ID = 1
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'hdx64#m+lnc_0ffoyehbk&7gk1&*9uar$pcfcm-%$km#p0$k=6'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'cheeseshop.apps.catalog',
'cheeseshop.apps.storage',
'constance',
'constance.backends.database',
)
MIDDLEWARE = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'cheeseshop.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'cheeseshop.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
#'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
'NAME': '/tmp/cheeseshop.db',
}
}
CONSTANCE_REDIS_CONNECTION = {
'host': 'localhost',
'port': 6379,
'db': 0,
}
CONSTANCE_ADDITIONAL_FIELDS = {
'yes_no_null_select': [
'django.forms.fields.ChoiceField',
{
'widget': 'django.forms.Select',
'choices': ((None, "-----"), ("yes", "Yes"), ("no", "No"))
}
],
'email': ('django.forms.fields.EmailField',),
'json_field': ['cheeseshop.fields.JsonField']
}
CONSTANCE_CONFIG = {
'BANNER': ('The National Cheese Emporium', 'name of the shop'),
'OWNER': ('Mr. Henry Wensleydale', 'owner of the shop'),
'OWNER_EMAIL': ('henry@example.com', 'contact email for owner', 'email'),
'MUSICIANS': (4, 'number of musicians inside the shop'),
'DATE_ESTABLISHED': (date(1972, 11, 30), "the shop's first opening"),
'MY_SELECT_KEY': ('yes', 'select yes or no', 'yes_no_null_select'),
'MULTILINE': ('Line one\nLine two', 'multiline string'),
'JSON_DATA': (
{'a': 1_000, 'b': 'test', 'max': 30_000_000},
'Some test data for json',
'json_field',
),
}
CONSTANCE_BACKEND = 'constance.backends.database.DatabaseBackend'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
CONSTANCE_DATABASE_CACHE_BACKEND = 'default'
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/Chicago'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
|
{
"content_hash": "6144cf96a4091ffa4c432d8b4bd899aa",
"timestamp": "",
"source": "github",
"line_count": 151,
"max_line_length": 77,
"avg_line_length": 26.66887417218543,
"alnum_prop": 0.6471318599453688,
"repo_name": "jezdez/django-constance",
"id": "76308e6dd2433b2eacfc734cd1f1cc8bf7ca2a21",
"size": "4027",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "example/cheeseshop/settings.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "3418"
},
{
"name": "Python",
"bytes": "44386"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import time
import gym
# default dummy environment execution over 20 episodes
# load the environment
env = gym.make('CartPole-v0')
for i_episode in range(20):
# reset the enviroment at the beginning of each episode
observation = env.reset()
# up to a 100 steps
for t in range(100):
env.render() # render the environment
print(observation)
action = env.action_space.sample() # sample a random action
# take action, get back the reward and the observations
observation, reward, done, info = env.step(action)
if done: # the episode is terminated (we 'lost'/'won')
print("Episode finished after {} timesteps".format(t + 1))
time.sleep(1)
break
# Using the following line, gym can record the execution of the environment
# env.monitor.start('/tmp/experiment-name-1')
|
{
"content_hash": "1a422f27a993c3b48dbc8f08d85dd797",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 75,
"avg_line_length": 33.7037037037037,
"alnum_prop": 0.6648351648351648,
"repo_name": "Hiestaa/RLViz",
"id": "1e1fb49edd8c50ae697e2ba6fd1fbb33013a6c38",
"size": "953",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "experiments/gym_test_1.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "48689"
},
{
"name": "HTML",
"bytes": "12613"
},
{
"name": "JavaScript",
"bytes": "1690208"
},
{
"name": "Python",
"bytes": "115208"
},
{
"name": "Shell",
"bytes": "34"
}
],
"symlink_target": ""
}
|
import sys
is_3 = sys.version_info >= (3, 0)
if is_3:
import io
else:
import StringIO
try:
import cStringIO
except ImportError:
cStringIO = None
__all__ = ['jsmin', 'JavascriptMinify']
__version__ = '2.0.9'
def jsmin(js):
"""
returns a minified version of the javascript string
"""
if not is_3:
if cStringIO and not isinstance(js, unicode):
# strings can use cStringIO for a 3x performance
# improvement, but unicode (in python2) cannot
klass = cStringIO.StringIO
else:
klass = StringIO.StringIO
else:
klass = io.StringIO
ins = klass(js)
outs = klass()
JavascriptMinify(ins, outs).minify()
return outs.getvalue()
class JavascriptMinify(object):
"""
Minify an input stream of javascript, writing
to an output stream
"""
def __init__(self, instream=None, outstream=None):
self.ins = instream
self.outs = outstream
def minify(self, instream=None, outstream=None):
if instream and outstream:
self.ins, self.outs = instream, outstream
self.is_return = False
self.return_buf = ''
def write(char):
# all of this is to support literal regular expressions.
# sigh
if char in 'return':
self.return_buf += char
self.is_return = self.return_buf == 'return'
self.outs.write(char)
if self.is_return:
self.return_buf = ''
read = self.ins.read
space_strings = "abcdefghijklmnopqrstuvwxyz"\
"ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_$\\"
starters, enders = '{[(+-', '}])+-"\''
newlinestart_strings = starters + space_strings
newlineend_strings = enders + space_strings
do_newline = False
do_space = False
escape_slash_count = 0
doing_single_comment = False
previous_before_comment = ''
doing_multi_comment = False
in_re = False
in_quote = ''
quote_buf = []
previous = read(1)
if previous == '\\':
escape_slash_count += 1
next1 = read(1)
if previous == '/':
if next1 == '/':
doing_single_comment = True
elif next1 == '*':
doing_multi_comment = True
previous = next1
next1 = read(1)
else:
write(previous)
elif not previous:
return
elif previous >= '!':
if previous in "'\"":
in_quote = previous
write(previous)
previous_non_space = previous
else:
previous_non_space = ' '
if not next1:
return
while 1:
next2 = read(1)
if not next2:
last = next1.strip()
if not (doing_single_comment or doing_multi_comment)\
and last not in ('', '/'):
if in_quote:
write(''.join(quote_buf))
write(last)
break
if doing_multi_comment:
if next1 == '*' and next2 == '/':
doing_multi_comment = False
next2 = read(1)
elif doing_single_comment:
if next1 in '\r\n':
doing_single_comment = False
while next2 in '\r\n':
next2 = read(1)
if not next2:
break
if previous_before_comment in ')}]':
do_newline = True
elif previous_before_comment in space_strings:
write('\n')
elif in_quote:
quote_buf.append(next1)
if next1 == in_quote:
numslashes = 0
for c in reversed(quote_buf[:-1]):
if c != '\\':
break
else:
numslashes += 1
if numslashes % 2 == 0:
in_quote = ''
write(''.join(quote_buf))
elif next1 in '\r\n':
if previous_non_space in newlineend_strings \
or previous_non_space > '~':
while 1:
if next2 < '!':
next2 = read(1)
if not next2:
break
else:
if next2 in newlinestart_strings \
or next2 > '~' or next2 == '/':
do_newline = True
break
elif next1 < '!' and not in_re:
if (previous_non_space in space_strings \
or previous_non_space > '~') \
and (next2 in space_strings or next2 > '~'):
do_space = True
elif previous_non_space in '-+' and next2 == previous_non_space:
# protect against + ++ or - -- sequences
do_space = True
elif self.is_return and next2 == '/':
# returning a regex...
write(' ')
elif next1 == '/':
if do_space:
write(' ')
if in_re:
if previous != '\\' or (not escape_slash_count % 2) or next2 in 'gimy':
in_re = False
write('/')
elif next2 == '/':
doing_single_comment = True
previous_before_comment = previous_non_space
elif next2 == '*':
doing_multi_comment = True
previous = next1
next1 = next2
next2 = read(1)
else:
in_re = previous_non_space in '(,=:[?!&|' or self.is_return # literal regular expression
write('/')
else:
if do_space:
do_space = False
write(' ')
if do_newline:
write('\n')
do_newline = False
write(next1)
if not in_re and next1 in "'\"":
in_quote = next1
quote_buf = []
previous = next1
next1 = next2
if previous >= '!':
previous_non_space = previous
if previous == '\\':
escape_slash_count += 1
else:
escape_slash_count = 0
|
{
"content_hash": "49823084b026bc8c45152260abe9e457",
"timestamp": "",
"source": "github",
"line_count": 207,
"max_line_length": 109,
"avg_line_length": 33.45893719806763,
"alnum_prop": 0.42030031764366155,
"repo_name": "diderson/couchapp",
"id": "9ba31aaac56e4d0951cca1500234fcdf0230c800",
"size": "8204",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "couchapp/hooks/compress/jsmin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "262"
},
{
"name": "CSS",
"bytes": "887"
},
{
"name": "HTML",
"bytes": "4849"
},
{
"name": "Inno Setup",
"bytes": "1834"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Python",
"bytes": "145970"
},
{
"name": "Shell",
"bytes": "2716"
}
],
"symlink_target": ""
}
|
from ycm import vimsupport
from ycmd.responses import UnknownExtraConf
from ycm.client.base_request import ( BaseRequest, BuildRequestData,
JsonFromFuture )
class EventNotification( BaseRequest ):
def __init__( self, event_name, extra_data = None ):
super( EventNotification, self ).__init__()
self._event_name = event_name
self._extra_data = extra_data
self._cached_response = None
def Start( self ):
request_data = BuildRequestData()
if self._extra_data:
request_data.update( self._extra_data )
request_data[ 'event_name' ] = self._event_name
self._response_future = self.PostDataToHandlerAsync( request_data,
'event_notification' )
def Done( self ):
return self._response_future.done()
def Response( self ):
if self._cached_response:
return self._cached_response
if not self._response_future or self._event_name != 'FileReadyToParse':
return []
try:
try:
self._cached_response = JsonFromFuture( self._response_future )
except UnknownExtraConf as e:
if vimsupport.Confirm( str( e ) ):
_LoadExtraConfFile( e.extra_conf_file )
else:
_IgnoreExtraConfFile( e.extra_conf_file )
except Exception as e:
vimsupport.PostVimMessage( str( e ) )
return self._cached_response if self._cached_response else []
def SendEventNotificationAsync( event_name, extra_data = None ):
event = EventNotification( event_name, extra_data )
event.Start()
def _LoadExtraConfFile( filepath ):
BaseRequest.PostDataToHandler( { 'filepath': filepath },
'load_extra_conf_file' )
def _IgnoreExtraConfFile( filepath ):
BaseRequest.PostDataToHandler( { 'filepath': filepath },
'ignore_extra_conf_file' )
|
{
"content_hash": "c2adc60c59cb3d4359a199c60dee63f8",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 79,
"avg_line_length": 31.75,
"alnum_prop": 0.6251968503937008,
"repo_name": "korbenzhang/vim-ycm-win",
"id": "a5978ad0529703cc5b5b3de018e62fcefac05d2b",
"size": "2643",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "python/ycm/client/event_notification.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "93345"
},
{
"name": "VimL",
"bytes": "30287"
}
],
"symlink_target": ""
}
|
import typing
from urllib.parse import ParseResult, urlparse
from jinja2 import Undefined, contextfunction, escape
from starlette.templating import Jinja2Templates
def render_ql_nickname(nickname):
nickname = str(escape(nickname))
for i in range(8):
nickname = nickname.replace(
"^" + str(i), '</span><span class="qc' + str(i) + '">'
)
return '<span class="qc7">' + nickname + "</span>"
def seconds_to_mmss(value):
seconds = int(escape(value))
m, s = divmod(seconds, 60)
return "%02d:%02d" % (m, s)
class Templates(Jinja2Templates):
def __init__(self, directory: str) -> None:
@contextfunction
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
path_params = {
k: v
for k, v in path_params.items()
if not isinstance(v, Undefined) and v is not None
}
# NOTE: take this stupid hack away, when url_for returns relative path
absolute_url = request.url_for(name, **path_params)
parsed_absolute_url = urlparse(absolute_url)
return ParseResult("", "", *parsed_absolute_url[2:]).geturl()
super().__init__(directory)
self.env.filters["ql_nickname"] = render_ql_nickname
self.env.filters["seconds_to_mmss"] = seconds_to_mmss
self.env.globals["url_for"] = url_for
templates = Templates(directory="templates")
|
{
"content_hash": "4385b7e05ee006e705b789c121eaa188",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 82,
"avg_line_length": 33.97727272727273,
"alnum_prop": 0.6006688963210702,
"repo_name": "em92/pickup-rating",
"id": "1faee861c99bd7b77746d1cd225749693e680538",
"size": "1495",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "qllr/templating.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1311"
},
{
"name": "HTML",
"bytes": "1106"
},
{
"name": "JavaScript",
"bytes": "22578"
},
{
"name": "Python",
"bytes": "6412"
}
],
"symlink_target": ""
}
|
'''Web-based file differ.
For usage, see README.md.
'''
from __future__ import print_function
from binaryornot.check import is_binary
import logging
import mimetypes
import os
import platform
import requests
import socket
import sys
from threading import Timer
import time
import webbrowser
from flask import (Flask, render_template, send_from_directory, send_file,
request, jsonify, Response)
from webdiff import diff
from webdiff import util
from webdiff import argparser
VERSION = '0.14.0'
def determine_path():
"""Borrowed from wxglade.py"""
try:
root = __file__
if os.path.islink (root):
root = os.path.realpath (root)
return os.path.dirname (os.path.abspath (root))
except:
print("I'm sorry, but something is wrong.")
print("There is no __file__ variable. Please contact the author.")
sys.exit()
def is_hot_reload():
"""In debug mode, Werkzeug reloads the app on any changes."""
return os.environ.get('WERKZEUG_RUN_MAIN')
class Config:
pass
#TESTING=True # not exactly sure what this does...
app = Flask(__name__)
app.config.from_object(Config)
app.config.from_envvar('WEBDIFF_CONFIG', silent=True)
DIFF = None
PORT = None
HOSTNAME = 'localhost'
if app.config['TESTING'] or app.config['DEBUG']:
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
app.logger.addHandler(handler)
for logname in ['']:
log = logging.getLogger(logname)
log.setLevel(logging.DEBUG)
log.addHandler(handler)
logging.getLogger('github').setLevel(logging.ERROR)
else:
# quiet down werkzeug -- no need to log every request.
logging.getLogger('werkzeug').setLevel(logging.ERROR)
LAST_REQUEST_MS = 0
@app.before_request
def update_last_request_ms():
global LAST_REQUEST_MS
LAST_REQUEST_MS = time.time() * 1000
def error(code, message):
e = {"code": code, "message": message}
response = jsonify(e)
response.status_code = 400
return response
@app.route("/<side>/get_contents", methods=['POST'])
def get_contents(side):
if side not in ('a', 'b'):
return error('invalid side', 'Side must be "a" or "b", got %s' % side)
# TODO: switch to index? might be simpler
path = request.form.get('path', '')
if not path:
return error('incomplete', 'Incomplete request (need path)')
idx = diff.find_diff_index(DIFF, side, path)
if idx is None:
return error('not found', 'Invalid path on side %s: %s' % (side, path))
d = DIFF[idx]
abs_path = d.a_path if side == 'a' else d.b_path
try:
if is_binary(abs_path):
size = os.path.getsize(abs_path)
contents = "Binary file (%d bytes)" % size
else:
contents = open(abs_path).read()
return Response(contents, mimetype='text/plain')
except Exception:
return error('read-error', 'Unable to read %s' % abs_path)
@app.route("/<side>/image/<path:path>")
def get_image(side, path):
if side not in ('a', 'b'):
return error('invalid side', 'Side must be "a" or "b", got %s' % side)
# TODO: switch to index? might be simpler
if not path:
return error('incomplete', 'Incomplete request (need path)')
mime_type, enc = mimetypes.guess_type(path)
if not mime_type or not mime_type.startswith('image/') or enc is not None:
return error('wrongtype', 'Requested file of type (%s, %s) as image' % (
mime_type, enc))
idx = diff.find_diff_index(DIFF, side, path)
if idx is None:
return error('not found', 'Invalid path on side %s: %s' % (side, path))
d = DIFF[idx]
abs_path = d.a_path if side == 'a' else d.b_path
try:
contents = open(abs_path, mode='rb').read()
return Response(contents, mimetype=mime_type)
except Exception:
return error('read-error', 'Unable to read %s' % abs_path)
@app.route("/pdiff/<int:idx>")
def get_pdiff(idx):
idx = int(idx)
d = DIFF[idx]
try:
_, pdiff_image = util.generate_pdiff_image(d.a_path, d.b_path)
dilated_image = util.generate_dilated_pdiff_image(pdiff_image)
except util.ImageMagickNotAvailableError:
return 'ImageMagick is not available', 501
except util.ImageMagickError as e:
return 'ImageMagick error %s' % e, 501
return send_file(dilated_image)
@app.route("/pdiffbbox/<int:idx>")
def get_pdiff_bbox(idx):
idx = int(idx)
d = DIFF[idx]
try:
_, pdiff_image = util.generate_pdiff_image(d.a_path, d.b_path)
bbox = util.get_pdiff_bbox(pdiff_image)
except util.ImageMagickNotAvailableError:
return 'ImageMagick is not available', 501
except util.ImageMagickError as e:
return 'ImageMagick error %s' % e, 501
return jsonify(bbox)
# Show the first diff by default
@app.route("/")
def index():
return file_diff('0')
@app.route("/<int:idx>")
def file_diff(idx):
idx = int(idx)
pairs = diff.get_thin_list(DIFF)
return render_template('file_diff.html',
idx=idx,
has_magick=util.is_imagemagick_available(),
pairs=pairs)
@app.route('/thick/<int:idx>')
def thick_diff(idx):
idx = int(idx)
return jsonify(diff.get_thick_dict(DIFF[idx]))
@app.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static/img'),
'favicon.ico',
mimetype='image/vnd.microsoft.icon')
@app.route('/seriouslykill', methods=['POST'])
def seriouslykill():
func = request.environ.get('werkzeug.server.shutdown')
if func is None:
raise RuntimeError('Not running with the Werkzeug Server')
func()
return "Shutting down..."
@app.route('/kill', methods=['POST'])
def kill():
global PORT
if 'STAY_RUNNING' in app.config:
return 'Will stay running.'
last_ms = LAST_REQUEST_MS
def shutdown():
if LAST_REQUEST_MS <= last_ms: # subsequent requests abort shutdown
requests.post('http://%s:%d/seriouslykill' % (HOSTNAME, PORT))
else:
pass
Timer(0.5, shutdown).start()
return 'Shutting down...'
def open_browser():
global PORT
global HOSTNAME
if not 'NO_OPEN_BROWSER' in app.config:
if is_hot_reload():
log.debug('Skipping browser open on reload')
else:
webbrowser.open_new_tab('http://%s:%s' % (HOSTNAME, PORT))
def usage_and_die():
sys.stderr.write(argparser.USAGE)
sys.exit(1)
def pick_a_port(args):
if 'port' in args != -1:
return args['port']
if os.environ.get('WEBDIFF_PORT'):
return int(os.environ.get('WEBDIFF_PORT'))
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(('localhost', 0))
port = sock.getsockname()[1]
sock.close()
return port
def abs_path_from_rel(path):
'''Changes relative paths to be abs w/r/t/ the original cwd.'''
if os.path.isabs(path):
return path
else:
return os.path.join(os.getcwd(), path)
def is_webdiff_from_head():
'''Was webdiff invoked as `git webdiff` with no other non-flag args?'''
return os.environ.get('WEBDIFF_FROM_HEAD') is not None
def run():
global DIFF, PORT, HOSTNAME
try:
parsed_args = argparser.parse(sys.argv[1:], VERSION)
except argparser.UsageError as e:
sys.stderr.write('Error: %s\n\n' % e)
usage_and_die()
DIFF = argparser.diff_for_args(parsed_args)
if app.config['TESTING'] or app.config['DEBUG']:
sys.stderr.write('Diff:\n%s' % DIFF)
PORT = pick_a_port(parsed_args)
if app.config.get('USE_HOSTNAME'):
_hostname = platform.node()
# platform.node will return empty string if it can't find the hostname
if not _hostname:
sys.stderr.write('Warning: hostname could not be determined')
else:
HOSTNAME = _hostname
sys.stderr.write('''Serving diffs on http://%s:%s
Close the browser tab or hit Ctrl-C when you're done.
''' % (HOSTNAME, PORT))
Timer(0.1, open_browser).start()
app.run(host=HOSTNAME, port=PORT)
if __name__ == '__main__':
run()
|
{
"content_hash": "17b66866f591310425d7c89fb573a091",
"timestamp": "",
"source": "github",
"line_count": 302,
"max_line_length": 89,
"avg_line_length": 27.86092715231788,
"alnum_prop": 0.620513429997623,
"repo_name": "daytonb/webdiff",
"id": "2bf0f44ea27c4722a01da8b02b13f52546f5c402",
"size": "8436",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webdiff/app.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "8339"
},
{
"name": "HTML",
"bytes": "15052"
},
{
"name": "JavaScript",
"bytes": "136276"
},
{
"name": "Python",
"bytes": "34855"
},
{
"name": "Shell",
"bytes": "364"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0002_initial_data'),
]
operations = [
migrations.CreateModel(
name='Redirect',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, serialize=False, primary_key=True)),
('old_path', models.CharField(
verbose_name='Redirect from', max_length=255, unique=True, db_index=True
)),
('is_permanent', models.BooleanField(
verbose_name='Permanent', default=True, help_text="""Recommended. Permanent redirects \
ensure search engines forget the old page (the 'Redirect from') and index the new page instead."""
)),
('redirect_link', models.URLField(blank=True, verbose_name='Redirect to any URL')),
('redirect_page', models.ForeignKey(
blank=True, null=True, verbose_name='Redirect to a page', to='wagtailcore.Page'
)),
('site', models.ForeignKey(
blank=True, to='wagtailcore.Site', editable=False, null=True, related_name='redirects'
)),
],
options={
},
bases=(models.Model,),
),
]
|
{
"content_hash": "0e2893fbcbdd31d4dd585fca1191f991",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 118,
"avg_line_length": 39.52777777777778,
"alnum_prop": 0.5397048489107519,
"repo_name": "inonit/wagtail",
"id": "d3ab5d3d2cb722c561c80102cacf928961cbe57b",
"size": "1447",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "wagtail/wagtailredirects/migrations/0001_initial.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "152389"
},
{
"name": "HTML",
"bytes": "253521"
},
{
"name": "JavaScript",
"bytes": "96567"
},
{
"name": "Makefile",
"bytes": "548"
},
{
"name": "Python",
"bytes": "1843138"
},
{
"name": "Shell",
"bytes": "7388"
}
],
"symlink_target": ""
}
|
from collections import namedtuple
from .common import get_debian_package_name
MaintainerDescriptor = namedtuple('Maintainer', 'name email')
class RosPackage(object):
__slots__ = [
'name',
'debian_name',
'version',
'url',
'repository_name',
'repository_url',
'status',
'status_description',
'maintainers',
]
def __init__(self, name):
self.name = name
def get_rosdistro_info(dist, build_file):
all_pkg_names = dist.release_packages.keys()
pkg_names = build_file.filter_packages(all_pkg_names)
packages = {}
for pkg_name in pkg_names:
# package name
ros_pkg = RosPackage(pkg_name)
ros_pkg.debian_name = get_debian_package_name(dist.name, pkg_name)
pkg = dist.release_packages[pkg_name]
repo = dist.repositories[pkg.repository_name].release_repository
# package version
if not repo.version:
continue
ros_pkg.version = repo.version
# repository name and url
ros_pkg.repository_name = pkg.repository_name
repo_url = repo.url
other_repos = [
dist.repositories[pkg.repository_name].source_repository,
dist.repositories[pkg.repository_name].doc_repository]
for other_repo in other_repos:
if other_repo:
repo_url = other_repo.url
if repo_url.startswith('https://github.com/') and \
repo_url.endswith('.git'):
if other_repo.version:
repo_url = '%s/tree/%s' % \
(repo_url[:-4], other_repo.version)
break
ros_pkg.repository_url = repo_url
# package status and description
ros_pkg.status = 'unknown'
ros_pkg.status_description = ''
if dist.repositories[pkg.repository_name].status:
ros_pkg.status = dist.repositories[pkg.repository_name].status
if dist.repositories[pkg.repository_name].status_description:
ros_pkg.status_description = \
dist.repositories[pkg.repository_name].status_description
# maintainers and package url from manifest
ros_pkg.maintainers = []
ros_pkg.url = None
pkg_xml = dist.get_release_package_xml(pkg_name)
if pkg_xml is not None:
from catkin_pkg.package import InvalidPackage, parse_package_string
try:
pkg_manifest = parse_package_string(pkg_xml)
for m in pkg_manifest.maintainers:
ros_pkg.maintainers.append(
MaintainerDescriptor(m.name, m.email))
for u in pkg_manifest['urls']:
if u.type == 'website':
ros_pkg.url = u.url
break
except InvalidPackage:
pass
packages[pkg_name] = ros_pkg
return packages
|
{
"content_hash": "e652ceae2b44d55335243a6f2088b6e2",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 79,
"avg_line_length": 33.95454545454545,
"alnum_prop": 0.5642570281124498,
"repo_name": "130s/ros_buildfarm",
"id": "10e3d10e6ae93d460189ca8e260d43a454104c11",
"size": "2988",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "ros_buildfarm/status_page_input.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4148"
},
{
"name": "EmberScript",
"bytes": "175039"
},
{
"name": "JavaScript",
"bytes": "10890"
},
{
"name": "Python",
"bytes": "347184"
},
{
"name": "Shell",
"bytes": "5932"
}
],
"symlink_target": ""
}
|
import django_filters
from django.contrib.auth.models import User, Group
from rest_framework import viewsets, mixins
from rest_framework.response import Response
from rest_framework.authentication import TokenAuthentication
from rest_framework import filters
from api.pagination import LargeResultsSetPagination
from api.permissions import IsUser
from api.serializers import FinalGoalSerializer
from api.models import FinalGoal
class FinalGoalFilter(django_filters.FilterSet):
class Meta:
model = FinalGoal
fields = ['id', 'user', 'created', 'is_locked', 'unlocks', 'is_closed', 'was_accomplished', 'earned_xp', 'amount', 'for_want', 'for_other_want',]
class FinalGoalViewSet(viewsets.ModelViewSet):
queryset = FinalGoal.objects.all()
serializer_class = FinalGoalSerializer
pagination_class = LargeResultsSetPagination
authentication_classes = (TokenAuthentication,)
permission_classes = (IsUser,)
filter_class = FinalGoalFilter
|
{
"content_hash": "29e73ad8b804c9d53ad18cd84e0e75c9",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 153,
"avg_line_length": 39.08,
"alnum_prop": 0.7758444216990789,
"repo_name": "Oinweb/py-fly",
"id": "2f0c64dc86a7f4f74d4f66c8d55730fbae7ac25d",
"size": "977",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "api/views/final_goal.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "187803"
},
{
"name": "HTML",
"bytes": "331298"
},
{
"name": "JavaScript",
"bytes": "136579"
},
{
"name": "Python",
"bytes": "214171"
},
{
"name": "Shell",
"bytes": "280"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = '''
module: ec2_vpc_nacl
short_description: create and delete Network ACLs.
description:
- Read the AWS documentation for Network ACLS
U(http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_ACLs.html)
version_added: "2.2"
options:
name:
description:
- Tagged name identifying a network ACL.
- One and only one of the I(name) or I(nacl_id) is required.
required: false
nacl_id:
description:
- NACL id identifying a network ACL.
- One and only one of the I(name) or I(nacl_id) is required.
required: false
version_added: "2.4"
vpc_id:
description:
- VPC id of the requesting VPC.
- Required when state present.
required: false
subnets:
description:
- The list of subnets that should be associated with the network ACL.
- Must be specified as a list
- Each subnet can be specified as subnet ID, or its tagged name.
required: false
egress:
description:
- A list of rules for outgoing traffic.
- Each rule must be specified as a list.
required: false
ingress:
description:
- List of rules for incoming traffic.
- Each rule must be specified as a list.
required: false
tags:
description:
- Dictionary of tags to look for and apply when creating a network ACL.
required: false
state:
description:
- Creates or modifies an existing NACL
- Deletes a NACL and reassociates subnets to the default NACL
required: false
choices: ['present', 'absent']
default: present
author: Mike Mochan(@mmochan)
extends_documentation_fragment: aws
requirements: [ botocore, boto3, json ]
'''
EXAMPLES = '''
# Complete example to create and delete a network ACL
# that allows SSH, HTTP and ICMP in, and all traffic out.
- name: "Create and associate production DMZ network ACL with DMZ subnets"
ec2_vpc_nacl:
vpc_id: vpc-12345678
name: prod-dmz-nacl
region: ap-southeast-2
subnets: ['prod-dmz-1', 'prod-dmz-2']
tags:
CostCode: CC1234
Project: phoenix
Description: production DMZ
ingress: [
# rule no, protocol, allow/deny, cidr, icmp_code, icmp_type,
# port from, port to
[100, 'tcp', 'allow', '0.0.0.0/0', null, null, 22, 22],
[200, 'tcp', 'allow', '0.0.0.0/0', null, null, 80, 80],
[300, 'icmp', 'allow', '0.0.0.0/0', 0, 8],
]
egress: [
[100, 'all', 'allow', '0.0.0.0/0', null, null, null, null]
]
state: 'present'
- name: "Remove the ingress and egress rules - defaults to deny all"
ec2_vpc_nacl:
vpc_id: vpc-12345678
name: prod-dmz-nacl
region: ap-southeast-2
subnets:
- prod-dmz-1
- prod-dmz-2
tags:
CostCode: CC1234
Project: phoenix
Description: production DMZ
state: present
- name: "Remove the NACL subnet associations and tags"
ec2_vpc_nacl:
vpc_id: 'vpc-12345678'
name: prod-dmz-nacl
region: ap-southeast-2
state: present
- name: "Delete nacl and subnet associations"
ec2_vpc_nacl:
vpc_id: vpc-12345678
name: prod-dmz-nacl
state: absent
- name: "Delete nacl by its id"
ec2_vpc_nacl:
nacl_id: acl-33b4ee5b
state: absent
'''
RETURN = '''
task:
description: The result of the create, or delete action.
returned: success
type: dictionary
'''
try:
import botocore
import boto3
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import boto3_conn, ec2_argument_spec, get_aws_connection_info
# Common fields for the default rule that is contained within every VPC NACL.
DEFAULT_RULE_FIELDS = {
'RuleNumber': 32767,
'RuleAction': 'deny',
'CidrBlock': '0.0.0.0/0',
'Protocol': '-1'
}
DEFAULT_INGRESS = dict(list(DEFAULT_RULE_FIELDS.items()) + [('Egress', False)])
DEFAULT_EGRESS = dict(list(DEFAULT_RULE_FIELDS.items()) + [('Egress', True)])
# VPC-supported IANA protocol numbers
# http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml
PROTOCOL_NUMBERS = {'all': -1, 'icmp': 1, 'tcp': 6, 'udp': 17, }
#Utility methods
def icmp_present(entry):
if len(entry) == 6 and entry[1] == 'icmp' or entry[1] == 1:
return True
def load_tags(module):
tags = []
if module.params.get('tags'):
for name, value in module.params.get('tags').items():
tags.append({'Key': name, 'Value': str(value)})
tags.append({'Key': "Name", 'Value': module.params.get('name')})
else:
tags.append({'Key': "Name", 'Value': module.params.get('name')})
return tags
def subnets_removed(nacl_id, subnets, client, module):
results = find_acl_by_id(nacl_id, client, module)
associations = results['NetworkAcls'][0]['Associations']
subnet_ids = [assoc['SubnetId'] for assoc in associations]
return [subnet for subnet in subnet_ids if subnet not in subnets]
def subnets_added(nacl_id, subnets, client, module):
results = find_acl_by_id(nacl_id, client, module)
associations = results['NetworkAcls'][0]['Associations']
subnet_ids = [assoc['SubnetId'] for assoc in associations]
return [subnet for subnet in subnets if subnet not in subnet_ids]
def subnets_changed(nacl, client, module):
changed = False
vpc_id = module.params.get('vpc_id')
nacl_id = nacl['NetworkAcls'][0]['NetworkAclId']
subnets = subnets_to_associate(nacl, client, module)
if not subnets:
default_nacl_id = find_default_vpc_nacl(vpc_id, client, module)[0]
subnets = find_subnet_ids_by_nacl_id(nacl_id, client, module)
if subnets:
replace_network_acl_association(default_nacl_id, subnets, client, module)
changed = True
return changed
changed = False
return changed
subs_added = subnets_added(nacl_id, subnets, client, module)
if subs_added:
replace_network_acl_association(nacl_id, subs_added, client, module)
changed = True
subs_removed = subnets_removed(nacl_id, subnets, client, module)
if subs_removed:
default_nacl_id = find_default_vpc_nacl(vpc_id, client, module)[0]
replace_network_acl_association(default_nacl_id, subs_removed, client, module)
changed = True
return changed
def nacls_changed(nacl, client, module):
changed = False
params = dict()
params['egress'] = module.params.get('egress')
params['ingress'] = module.params.get('ingress')
nacl_id = nacl['NetworkAcls'][0]['NetworkAclId']
nacl = describe_network_acl(client, module)
entries = nacl['NetworkAcls'][0]['Entries']
tmp_egress = [entry for entry in entries if entry['Egress'] is True and DEFAULT_EGRESS !=entry]
tmp_ingress = [entry for entry in entries if entry['Egress'] is False]
egress = [rule for rule in tmp_egress if DEFAULT_EGRESS != rule]
ingress = [rule for rule in tmp_ingress if DEFAULT_INGRESS != rule]
if rules_changed(egress, params['egress'], True, nacl_id, client, module):
changed = True
if rules_changed(ingress, params['ingress'], False, nacl_id, client, module):
changed = True
return changed
def tags_changed(nacl_id, client, module):
changed = False
tags = dict()
if module.params.get('tags'):
tags = module.params.get('tags')
tags['Name'] = module.params.get('name')
nacl = find_acl_by_id(nacl_id, client, module)
if nacl['NetworkAcls']:
nacl_values = [t.values() for t in nacl['NetworkAcls'][0]['Tags']]
nacl_tags = [item for sublist in nacl_values for item in sublist]
tag_values = [[key, str(value)] for key, value in tags.items()]
tags = [item for sublist in tag_values for item in sublist]
if sorted(nacl_tags) == sorted(tags):
changed = False
return changed
else:
delete_tags(nacl_id, client, module)
create_tags(nacl_id, client, module)
changed = True
return changed
return changed
def rules_changed(aws_rules, param_rules, Egress, nacl_id, client, module):
changed = False
rules = list()
for entry in param_rules:
rules.append(process_rule_entry(entry, Egress))
if rules == aws_rules:
return changed
else:
removed_rules = [x for x in aws_rules if x not in rules]
if removed_rules:
params = dict()
for rule in removed_rules:
params['NetworkAclId'] = nacl_id
params['RuleNumber'] = rule['RuleNumber']
params['Egress'] = Egress
delete_network_acl_entry(params, client, module)
changed = True
added_rules = [x for x in rules if x not in aws_rules]
if added_rules:
for rule in added_rules:
rule['NetworkAclId'] = nacl_id
create_network_acl_entry(rule, client, module)
changed = True
return changed
def process_rule_entry(entry, Egress):
params = dict()
params['RuleNumber'] = entry[0]
params['Protocol'] = str(PROTOCOL_NUMBERS[entry[1]])
params['RuleAction'] = entry[2]
params['Egress'] = Egress
params['CidrBlock'] = entry[3]
if icmp_present(entry):
params['IcmpTypeCode'] = {"Type": int(entry[4]), "Code": int(entry[5])}
else:
if entry[6] or entry[7]:
params['PortRange'] = {"From": entry[6], 'To': entry[7]}
return params
def restore_default_associations(assoc_ids, default_nacl_id, client, module):
if assoc_ids:
params = dict()
params['NetworkAclId'] = default_nacl_id[0]
for assoc_id in assoc_ids:
params['AssociationId'] = assoc_id
restore_default_acl_association(params, client, module)
return True
def construct_acl_entries(nacl, client, module):
for entry in module.params.get('ingress'):
params = process_rule_entry(entry, Egress=False)
params['NetworkAclId'] = nacl['NetworkAcl']['NetworkAclId']
create_network_acl_entry(params, client, module)
for rule in module.params.get('egress'):
params = process_rule_entry(rule, Egress=True)
params['NetworkAclId'] = nacl['NetworkAcl']['NetworkAclId']
create_network_acl_entry(params, client, module)
## Module invocations
def setup_network_acl(client, module):
changed = False
nacl = describe_network_acl(client, module)
if not nacl['NetworkAcls']:
nacl = create_network_acl(module.params.get('vpc_id'), client, module)
nacl_id = nacl['NetworkAcl']['NetworkAclId']
create_tags(nacl_id, client, module)
subnets = subnets_to_associate(nacl, client, module)
replace_network_acl_association(nacl_id, subnets, client, module)
construct_acl_entries(nacl, client, module)
changed = True
return(changed, nacl['NetworkAcl']['NetworkAclId'])
else:
changed = False
nacl_id = nacl['NetworkAcls'][0]['NetworkAclId']
subnet_result = subnets_changed(nacl, client, module)
nacl_result = nacls_changed(nacl, client, module)
tag_result = tags_changed(nacl_id, client, module)
if subnet_result is True or nacl_result is True or tag_result is True:
changed = True
return(changed, nacl_id)
return (changed, nacl_id)
def remove_network_acl(client, module):
changed = False
result = dict()
nacl = describe_network_acl(client, module)
if nacl['NetworkAcls']:
nacl_id = nacl['NetworkAcls'][0]['NetworkAclId']
vpc_id = nacl['NetworkAcls'][0]['VpcId']
associations = nacl['NetworkAcls'][0]['Associations']
assoc_ids = [a['NetworkAclAssociationId'] for a in associations]
default_nacl_id = find_default_vpc_nacl(vpc_id, client, module)
if not default_nacl_id:
result = {vpc_id: "Default NACL ID not found - Check the VPC ID"}
return changed, result
if restore_default_associations(assoc_ids, default_nacl_id, client, module):
delete_network_acl(nacl_id, client, module)
changed = True
result[nacl_id] = "Successfully deleted"
return changed, result
if not assoc_ids:
delete_network_acl(nacl_id, client, module)
changed = True
result[nacl_id] = "Successfully deleted"
return changed, result
return changed, result
#Boto3 client methods
def create_network_acl(vpc_id, client, module):
try:
if module.check_mode:
nacl = dict(NetworkAcl=dict(NetworkAclId="nacl-00000000"))
else:
nacl = client.create_network_acl(VpcId=vpc_id)
except botocore.exceptions.ClientError as e:
module.fail_json(msg=str(e))
return nacl
def create_network_acl_entry(params, client, module):
try:
if not module.check_mode:
client.create_network_acl_entry(**params)
except botocore.exceptions.ClientError as e:
module.fail_json(msg=str(e))
def create_tags(nacl_id, client, module):
try:
delete_tags(nacl_id, client, module)
if not module.check_mode:
client.create_tags(Resources=[nacl_id], Tags=load_tags(module))
except botocore.exceptions.ClientError as e:
module.fail_json(msg=str(e))
def delete_network_acl(nacl_id, client, module):
try:
if not module.check_mode:
client.delete_network_acl(NetworkAclId=nacl_id)
except botocore.exceptions.ClientError as e:
module.fail_json(msg=str(e))
def delete_network_acl_entry(params, client, module):
try:
if not module.check_mode:
client.delete_network_acl_entry(**params)
except botocore.exceptions.ClientError as e:
module.fail_json(msg=str(e))
def delete_tags(nacl_id, client, module):
try:
if not module.check_mode:
client.delete_tags(Resources=[nacl_id])
except botocore.exceptions.ClientError as e:
module.fail_json(msg=str(e))
def describe_acl_associations(subnets, client, module):
if not subnets:
return []
try:
results = client.describe_network_acls(Filters=[
{'Name': 'association.subnet-id', 'Values': subnets}
])
except botocore.exceptions.ClientError as e:
module.fail_json(msg=str(e))
associations = results['NetworkAcls'][0]['Associations']
return [a['NetworkAclAssociationId'] for a in associations if a['SubnetId'] in subnets]
def describe_network_acl(client, module):
try:
if module.params.get('nacl_id'):
nacl = client.describe_network_acls(Filters=[
{'Name': 'network-acl-id', 'Values': [module.params.get('nacl_id')]}
])
else:
nacl = client.describe_network_acls(Filters=[
{'Name': 'tag:Name', 'Values': [module.params.get('name')]}
])
except botocore.exceptions.ClientError as e:
module.fail_json(msg=str(e))
return nacl
def find_acl_by_id(nacl_id, client, module):
try:
return client.describe_network_acls(NetworkAclIds=[nacl_id])
except botocore.exceptions.ClientError as e:
module.fail_json(msg=str(e))
def find_default_vpc_nacl(vpc_id, client, module):
try:
response = client.describe_network_acls(Filters=[
{'Name': 'vpc-id', 'Values': [vpc_id]}])
except botocore.exceptions.ClientError as e:
module.fail_json(msg=str(e))
nacls = response['NetworkAcls']
return [n['NetworkAclId'] for n in nacls if n['IsDefault'] is True]
def find_subnet_ids_by_nacl_id(nacl_id, client, module):
try:
results = client.describe_network_acls(Filters=[
{'Name': 'association.network-acl-id', 'Values': [nacl_id]}
])
except botocore.exceptions.ClientError as e:
module.fail_json(msg=str(e))
if results['NetworkAcls']:
associations = results['NetworkAcls'][0]['Associations']
return [s['SubnetId'] for s in associations if s['SubnetId']]
else:
return []
def replace_network_acl_association(nacl_id, subnets, client, module):
params = dict()
params['NetworkAclId'] = nacl_id
for association in describe_acl_associations(subnets, client, module):
params['AssociationId'] = association
try:
if not module.check_mode:
client.replace_network_acl_association(**params)
except botocore.exceptions.ClientError as e:
module.fail_json(msg=str(e))
def replace_network_acl_entry(entries, Egress, nacl_id, client, module):
params = dict()
for entry in entries:
params = entry
params['NetworkAclId'] = nacl_id
try:
if not module.check_mode:
client.replace_network_acl_entry(**params)
except botocore.exceptions.ClientError as e:
module.fail_json(msg=str(e))
def restore_default_acl_association(params, client, module):
try:
if not module.check_mode:
client.replace_network_acl_association(**params)
except botocore.exceptions.ClientError as e:
module.fail_json(msg=str(e))
def subnets_to_associate(nacl, client, module):
params = list(module.params.get('subnets'))
if not params:
return []
if params[0].startswith("subnet-"):
try:
subnets = client.describe_subnets(Filters=[
{'Name': 'subnet-id', 'Values': params}])
except botocore.exceptions.ClientError as e:
module.fail_json(msg=str(e))
else:
try:
subnets = client.describe_subnets(Filters=[
{'Name': 'tag:Name', 'Values': params}])
except botocore.exceptions.ClientError as e:
module.fail_json(msg=str(e))
return [s['SubnetId'] for s in subnets['Subnets'] if s['SubnetId']]
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
vpc_id=dict(),
name=dict(),
nacl_id=dict(),
subnets=dict(required=False, type='list', default=list()),
tags=dict(required=False, type='dict'),
ingress=dict(required=False, type='list', default=list()),
egress=dict(required=False, type='list', default=list(),),
state=dict(default='present', choices=['present', 'absent']),
),
)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True,
required_one_of=[['name', 'nacl_id']],
required_if=[['state', 'present', ['vpc_id']]])
if not HAS_BOTO3:
module.fail_json(msg='json, botocore and boto3 are required.')
state = module.params.get('state').lower()
try:
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
client = boto3_conn(module, conn_type='client', resource='ec2', region=region, endpoint=ec2_url, **aws_connect_kwargs)
except botocore.exceptions.NoCredentialsError as e:
module.fail_json(msg="Can't authorize connection - %s" % str(e))
invocations = {
"present": setup_network_acl,
"absent": remove_network_acl
}
(changed, results) = invocations[state](client, module)
module.exit_json(changed=changed, nacl_id=results)
if __name__ == '__main__':
main()
|
{
"content_hash": "9f208893dd30551103d6dd87cf1d15de",
"timestamp": "",
"source": "github",
"line_count": 570,
"max_line_length": 126,
"avg_line_length": 34.598245614035086,
"alnum_prop": 0.6263881141929922,
"repo_name": "e-gob/plataforma-kioscos-autoatencion",
"id": "0f26aac7f9767b46534967889f07a39674095eaf",
"size": "19862",
"binary": false,
"copies": "29",
"ref": "refs/heads/master",
"path": "scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/modules/cloud/amazon/ec2_vpc_nacl.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "41110"
},
{
"name": "C++",
"bytes": "3804"
},
{
"name": "CSS",
"bytes": "34823"
},
{
"name": "CoffeeScript",
"bytes": "8521"
},
{
"name": "HTML",
"bytes": "61168"
},
{
"name": "JavaScript",
"bytes": "7206"
},
{
"name": "Makefile",
"bytes": "1347"
},
{
"name": "PowerShell",
"bytes": "584344"
},
{
"name": "Python",
"bytes": "25506593"
},
{
"name": "Ruby",
"bytes": "245726"
},
{
"name": "Shell",
"bytes": "5075"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.