code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
#
# Copyright 2013 NTT Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mox
from neutronclient.common import exceptions as qe
from neutronclient.neutron import v2_0 as neutronV20
from neutronclient.v2_0 import client as neutronclient
import six
from heat.common import exception
from heat.common import template_format
from heat.engine.resources.openstack.neutron import network_gateway
from heat.engine import rsrc_defn
from heat.engine import scheduler
from heat.tests import common
from heat.tests import utils
gw_template_deprecated = '''
heat_template_version: 2015-04-30
description: Template to test Network Gateway resource
resources:
NetworkGateway:
type: OS::Neutron::NetworkGateway
properties:
name: NetworkGateway
devices:
- id: e52148ca-7db9-4ec3-abe6-2c7c0ff316eb
interface_name: breth1
connections:
- network_id: 6af055d3-26f6-48dd-a597-7611d7e58d35
segmentation_type: vlan
segmentation_id: 10
'''
gw_template = '''
heat_template_version: 2015-04-30
description: Template to test Network Gateway resource
resources:
NetworkGateway:
type: OS::Neutron::NetworkGateway
properties:
name: NetworkGateway
devices:
- id: e52148ca-7db9-4ec3-abe6-2c7c0ff316eb
interface_name: breth1
connections:
- network: 6af055d3-26f6-48dd-a597-7611d7e58d35
segmentation_type: vlan
segmentation_id: 10
'''
sng = {
'network_gateway': {
'id': 'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37',
'name': 'NetworkGateway',
'default': False,
'tenant_id': '96ba52dc-c5c5-44c6-9a9d-d3ba1a03f77f',
'devices': [{
'id': 'e52148ca-7db9-4ec3-abe6-2c7c0ff316eb',
'interface_name': 'breth1'}],
'ports': [{
'segmentation_type': 'vlan',
'port_id': '32acc49c-899e-44ea-8177-6f4157e12eb4',
'segmentation_id': 10}]
}
}
class NeutronNetworkGatewayTest(common.HeatTestCase):
def setUp(self):
super(NeutronNetworkGatewayTest, self).setUp()
self.m.StubOutWithMock(neutronclient.Client, 'create_network_gateway')
self.m.StubOutWithMock(neutronclient.Client, 'show_network_gateway')
self.m.StubOutWithMock(neutronclient.Client, 'delete_network_gateway')
self.m.StubOutWithMock(neutronclient.Client, 'connect_network_gateway')
self.m.StubOutWithMock(neutronclient.Client, 'update_network_gateway')
self.m.StubOutWithMock(neutronclient.Client,
'disconnect_network_gateway')
self.m.StubOutWithMock(neutronclient.Client, 'list_networks')
self.m.StubOutWithMock(neutronV20, 'find_resourceid_by_name_or_id')
def mock_create_fail_network_not_found_delete_success(self):
neutronclient.Client.create_network_gateway({
'network_gateway': {
'name': u'NetworkGateway',
'devices': [{'id': u'e52148ca-7db9-4ec3-abe6-2c7c0ff316eb',
'interface_name': u'breth1'}]
}
}
).AndReturn({
'network_gateway': {
'id': 'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37',
'name': 'NetworkGateway',
'default': False,
'tenant_id': '96ba52dc-c5c5-44c6-9a9d-d3ba1a03f77f',
'devices': [{
'id': 'e52148ca-7db9-4ec3-abe6-2c7c0ff316eb',
'interface_name': 'breth1'}]
}
}
)
neutronV20.find_resourceid_by_name_or_id(
mox.IsA(neutronclient.Client),
'network',
'6af055d3-26f6-48dd-a597-7611d7e58d35'
).MultipleTimes().AndRaise(qe.NeutronClientException(status_code=404))
# mock successful to delete the network_gateway
neutronclient.Client.delete_network_gateway(
u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37'
).AndReturn(None)
neutronclient.Client.show_network_gateway(
u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37'
).AndRaise(qe.NeutronClientException(status_code=404))
t = template_format.parse(gw_template)
self.stack = utils.parse_stack(t)
resource_defns = self.stack.t.resource_definitions(self.stack)
rsrc = network_gateway.NetworkGateway(
'test_network_gateway',
resource_defns['NetworkGateway'], self.stack)
return rsrc
def prepare_create_network_gateway(self, resolve_neutron=True):
neutronclient.Client.create_network_gateway({
'network_gateway': {
'name': u'NetworkGateway',
'devices': [{'id': u'e52148ca-7db9-4ec3-abe6-2c7c0ff316eb',
'interface_name': u'breth1'}]
}
}
).AndReturn({
'network_gateway': {
'id': 'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37',
'name': 'NetworkGateway',
'default': False,
'tenant_id': '96ba52dc-c5c5-44c6-9a9d-d3ba1a03f77f',
'devices': [{
'id': 'e52148ca-7db9-4ec3-abe6-2c7c0ff316eb',
'interface_name': 'breth1'}]
}
}
)
neutronclient.Client.connect_network_gateway(
u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37', {
'network_id': u'6af055d3-26f6-48dd-a597-7611d7e58d35',
'segmentation_id': 10,
'segmentation_type': u'vlan'
}
).AndReturn({
'connection_info': {
'network_gateway_id': u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37',
'network_id': u'6af055d3-26f6-48dd-a597-7611d7e58d35',
'port_id': u'32acc49c-899e-44ea-8177-6f4157e12eb4'
}
})
self.stub_NetworkConstraint_validate()
if resolve_neutron:
neutronV20.find_resourceid_by_name_or_id(
mox.IsA(neutronclient.Client),
'network',
'6af055d3-26f6-48dd-a597-7611d7e58d35'
).AndReturn('6af055d3-26f6-48dd-a597-7611d7e58d35')
t = template_format.parse(gw_template)
else:
t = template_format.parse(gw_template_deprecated)
self.stack = utils.parse_stack(t)
resource_defns = self.stack.t.resource_definitions(self.stack)
rsrc = network_gateway.NetworkGateway(
'test_network_gateway',
resource_defns['NetworkGateway'], self.stack)
return rsrc
def _test_network_gateway_create(self, resolve_neutron=True):
rsrc = self.prepare_create_network_gateway(resolve_neutron)
neutronV20.find_resourceid_by_name_or_id(
mox.IsA(neutronclient.Client),
'network',
'6af055d3-26f6-48dd-a597-7611d7e58d35'
).MultipleTimes().AndReturn(
'6af055d3-26f6-48dd-a597-7611d7e58d35')
neutronclient.Client.disconnect_network_gateway(
'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37', {
'network_id': u'6af055d3-26f6-48dd-a597-7611d7e58d35',
'segmentation_id': 10,
'segmentation_type': u'vlan'
}
).AndReturn(None)
neutronclient.Client.disconnect_network_gateway(
u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37', {
'network_id': u'6af055d3-26f6-48dd-a597-7611d7e58d35',
'segmentation_id': 10,
'segmentation_type': u'vlan'
}
).AndReturn(qe.NeutronClientException(status_code=404))
neutronclient.Client.delete_network_gateway(
u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37'
).AndReturn(None)
neutronclient.Client.show_network_gateway(
u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37'
).AndReturn(sng)
neutronclient.Client.show_network_gateway(
u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37'
).AndRaise(qe.NeutronClientException(status_code=404))
neutronclient.Client.delete_network_gateway(
u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37'
).AndRaise(qe.NeutronClientException(status_code=404))
self.m.ReplayAll()
rsrc.validate()
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
ref_id = rsrc.FnGetRefId()
self.assertEqual(u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37', ref_id)
self.assertRaises(
exception.InvalidTemplateAttribute, rsrc.FnGetAtt, 'Foo')
self.assertIsNone(scheduler.TaskRunner(rsrc.delete)())
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
rsrc.state_set(rsrc.CREATE, rsrc.COMPLETE, 'to delete again')
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_network_gateway_create_deprecated(self):
self._test_network_gateway_create(resolve_neutron=False)
def test_network_gateway_create(self):
self._test_network_gateway_create()
def test_network_gateway_create_fail_delete_success(self):
# if network_gateway created successful, but didn't to connect with
# network, then can delete the network_gateway successful
# without residue network_gateway
rsrc = self.mock_create_fail_network_not_found_delete_success()
self.stub_NetworkConstraint_validate()
self.m.ReplayAll()
rsrc.validate()
self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.create))
self.assertEqual((rsrc.CREATE, rsrc.FAILED), rsrc.state)
ref_id = rsrc.FnGetRefId()
self.assertEqual(u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37', ref_id)
self.assertIsNone(scheduler.TaskRunner(rsrc.delete)())
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_network_gateway_update(self):
rsrc = self.prepare_create_network_gateway()
neutronV20.find_resourceid_by_name_or_id(
mox.IsA(neutronclient.Client),
'network',
'6af055d3-26f6-48dd-a597-7611d7e58d35'
).AndReturn('6af055d3-26f6-48dd-a597-7611d7e58d35')
neutronV20.find_resourceid_by_name_or_id(
mox.IsA(neutronclient.Client),
'network',
'6af055d3-26f6-48dd-a597-7611d7e58d35'
).AndReturn('6af055d3-26f6-48dd-a597-7611d7e58d35')
neutronV20.find_resourceid_by_name_or_id(
mox.IsA(neutronclient.Client),
'network',
'6af055d3-26f6-48dd-a597-7611d7e58d35'
).AndReturn('6af055d3-26f6-48dd-a597-7611d7e58d35')
neutronV20.find_resourceid_by_name_or_id(
mox.IsA(neutronclient.Client),
'network',
'6af055d3-26f6-48dd-a597-7611d7e58d35'
).AndReturn('6af055d3-26f6-48dd-a597-7611d7e58d35')
neutronV20.find_resourceid_by_name_or_id(
mox.IsA(neutronclient.Client),
'network',
'6af055d3-26f6-48dd-a597-7611d7e58d35'
).AndReturn('6af055d3-26f6-48dd-a597-7611d7e58d35')
neutronV20.find_resourceid_by_name_or_id(
mox.IsA(neutronclient.Client),
'network',
'6af055d3-26f6-48dd-a597-7611d7e58d35'
).AndReturn('6af055d3-26f6-48dd-a597-7611d7e58d35')
neutronclient.Client.update_network_gateway(
u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37', {
'network_gateway': {
'name': u'NetworkGatewayUpdate'
}
}
).AndReturn(None)
neutronclient.Client.disconnect_network_gateway(
u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37', {
'network_id': u'6af055d3-26f6-48dd-a597-7611d7e58d35',
'segmentation_id': 10,
'segmentation_type': u'vlan'
}
).AndReturn(None)
neutronclient.Client.connect_network_gateway(
u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37', {
'network_id': u'6af055d3-26f6-48dd-a597-7611d7e58d35',
'segmentation_id': 0,
'segmentation_type': u'flat'
}
).AndReturn({
'connection_info': {
'network_gateway_id': u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37',
'network_id': u'6af055d3-26f6-48dd-a597-7611d7e58d35',
'port_id': u'aa800972-f6be-4c65-8453-9ab31834bf80'
}
})
neutronclient.Client.disconnect_network_gateway(
u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37', {
'network_id': u'6af055d3-26f6-48dd-a597-7611d7e58d35',
'segmentation_id': 10,
'segmentation_type': u'vlan'
}
).AndRaise(qe.NeutronClientException(status_code=404))
neutronclient.Client.connect_network_gateway(
u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37', {
'network_id': u'6af055d3-26f6-48dd-a597-7611d7e58d35',
'segmentation_id': 0,
'segmentation_type': u'flat'
}
).AndReturn({
'connection_info': {
'network_gateway_id': u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37',
'network_id': u'6af055d3-26f6-48dd-a597-7611d7e58d35',
'port_id': u'aa800972-f6be-4c65-8453-9ab31834bf80'
}
})
neutronclient.Client.disconnect_network_gateway(
u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37', {
'network_id': u'6af055d3-26f6-48dd-a597-7611d7e58d35',
'segmentation_id': 10,
'segmentation_type': u'vlan'
}
).AndReturn(None)
neutronclient.Client.delete_network_gateway(
u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37'
).AndReturn(None)
neutronclient.Client.create_network_gateway({
'network_gateway': {
'name': u'NetworkGateway',
'devices': [{'id': u'e52148ca-7db9-4ec3-abe6-2c7c0ff316eb',
'interface_name': u'breth2'}]
}
}
).AndReturn({
'network_gateway': {
'id': 'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37',
'name': 'NetworkGateway',
'default': False,
'tenant_id': '96ba52dc-c5c5-44c6-9a9d-d3ba1a03f77f',
'devices': [{
'id': 'e52148ca-7db9-4ec3-abe6-2c7c0ff316eb',
'interface_name': 'breth2'}]
}
}
)
neutronclient.Client.connect_network_gateway(
u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37', {
'network_id': u'6af055d3-26f6-48dd-a597-7611d7e58d35',
'segmentation_id': 10,
'segmentation_type': u'vlan'
}
).AndReturn({
'connection_info': {
'network_gateway_id': u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37',
'network_id': u'6af055d3-26f6-48dd-a597-7611d7e58d35',
'port_id': u'aa800972-f6be-4c65-8453-9ab31834bf80'
}
})
self.m.ReplayAll()
rsrc.validate()
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
# update name
snippet_for_update = rsrc_defn.ResourceDefinition(
rsrc.name,
rsrc.type(),
{
'name': u'NetworkGatewayUpdate',
'devices': [{
'id': u'e52148ca-7db9-4ec3-abe6-2c7c0ff316eb',
'interface_name': u'breth1'}],
'connections': [{
'network': '6af055d3-26f6-48dd-a597-7611d7e58d35',
'segmentation_type': 'vlan',
'segmentation_id': 10}]
})
prop_diff = {'name': u'NetworkGatewayUpdate'}
self.assertIsNone(rsrc.handle_update(snippet_for_update,
mox.IgnoreArg(),
prop_diff))
# update connections
snippet_for_update = rsrc_defn.ResourceDefinition(
rsrc.name,
rsrc.type(),
{
'name': u'NetworkGateway',
'devices': [{
'id': u'e52148ca-7db9-4ec3-abe6-2c7c0ff316eb',
'interface_name': u'breth1'}],
'connections': [{
'network': u'6af055d3-26f6-48dd-a597-7611d7e58d35',
'segmentation_type': u'flat',
'segmentation_id': 0}]
})
prop_diff = {
'connections': [{
'network': u'6af055d3-26f6-48dd-a597-7611d7e58d35',
'segmentation_type': u'flat',
'segmentation_id': 0}]
}
self.assertIsNone(rsrc.handle_update(snippet_for_update,
mox.IgnoreArg(),
prop_diff))
# update connections once more
self.assertIsNone(rsrc.handle_update(snippet_for_update,
mox.IgnoreArg(),
prop_diff))
# update devices
snippet_for_update = rsrc_defn.ResourceDefinition(
rsrc.name,
rsrc.type(),
{
'name': u'NetworkGateway',
'devices': [{
'id': u'e52148ca-7db9-4ec3-abe6-2c7c0ff316eb',
'interface_name': u'breth2'}],
'connections': [{
'network_id': u'6af055d3-26f6-48dd-a597-7611d7e58d35',
'segmentation_type': u'vlan',
'segmentation_id': 10}]
})
prop_diff = {
'devices': [{
'id': u'e52148ca-7db9-4ec3-abe6-2c7c0ff316eb',
'interface_name': u'breth2'}]
}
self.assertIsNone(rsrc.handle_update(snippet_for_update,
mox.IgnoreArg(),
prop_diff))
self.m.VerifyAll()
def test_network_gatway_create_failed(self):
neutronclient.Client.create_network_gateway({
'network_gateway': {
'name': u'NetworkGateway',
'devices': [{
'id': u'e52148ca-7db9-4ec3-abe6-2c7c0ff316eb',
'interface_name': u'breth1'}]
}
}
).AndRaise(qe.NeutronClientException)
self.stub_NetworkConstraint_validate()
self.m.ReplayAll()
t = template_format.parse(gw_template)
stack = utils.parse_stack(t)
resource_defns = stack.t.resource_definitions(stack)
rsrc = network_gateway.NetworkGateway(
'network_gateway', resource_defns['NetworkGateway'], stack)
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.create))
self.assertEqual(
'NeutronClientException: resources.network_gateway: '
'An unknown exception occurred.',
six.text_type(error))
self.assertEqual((rsrc.CREATE, rsrc.FAILED), rsrc.state)
self.assertIsNone(scheduler.TaskRunner(rsrc.delete)())
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_gateway_validate_failed_with_vlan(self):
t = template_format.parse(gw_template)
del t['resources']['NetworkGateway']['properties'][
'connections'][0]['segmentation_id']
stack = utils.parse_stack(t)
resource_defns = stack.t.resource_definitions(stack)
rsrc = network_gateway.NetworkGateway(
'test_network_gateway',
resource_defns['NetworkGateway'], stack)
self.stub_NetworkConstraint_validate()
self.m.ReplayAll()
error = self.assertRaises(exception.StackValidationFailed,
scheduler.TaskRunner(rsrc.validate))
self.assertEqual(
'segmentation_id must be specified for using vlan',
six.text_type(error))
self.m.VerifyAll()
def test_gateway_validate_failed_with_flat(self):
t = template_format.parse(gw_template)
t['resources']['NetworkGateway']['properties'][
'connections'][0]['segmentation_type'] = 'flat'
stack = utils.parse_stack(t)
resource_defns = stack.t.resource_definitions(stack)
rsrc = network_gateway.NetworkGateway(
'test_network_gateway',
resource_defns['NetworkGateway'], stack)
self.stub_NetworkConstraint_validate()
self.m.ReplayAll()
error = self.assertRaises(exception.StackValidationFailed,
scheduler.TaskRunner(rsrc.validate))
self.assertEqual(
'segmentation_id cannot be specified except 0 for using flat',
six.text_type(error))
self.m.VerifyAll()
def test_network_gateway_attribute(self):
neutronclient.Client.show_network_gateway(
u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37'
).MultipleTimes().AndReturn(sng)
rsrc = self.prepare_create_network_gateway()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.assertEqual(u'ed4c03b9-8251-4c09-acc4-e59ee9e6aa37',
rsrc.FnGetRefId())
self.assertEqual(False, rsrc.FnGetAtt('default'))
error = self.assertRaises(exception.InvalidTemplateAttribute,
rsrc.FnGetAtt, 'hoge')
self.assertEqual(
'The Referenced Attribute (test_network_gateway hoge) is '
'incorrect.', six.text_type(error))
self.m.VerifyAll()
|
cryptickp/heat
|
heat/tests/neutron/test_neutron_network_gateway.py
|
Python
|
apache-2.0
| 22,563
|
from __future__ import unicode_literals
from . import responses
url_bases = [
"https://route53.amazonaws.com/201.-..-../hostedzone",
]
url_paths = {
'{0}$': responses.list_or_create_hostzone_response,
'{0}/[^/]+$': responses.get_or_delete_hostzone_response,
'{0}/[^/]+/rrset$': responses.rrset_response,
}
|
djmitche/moto
|
moto/route53/urls.py
|
Python
|
apache-2.0
| 324
|
# Copyright (c) $today.year. Mark E. Madsen <mark@madsenlab.org>
#
# This work is licensed under the terms of the Creative Commons-GNU General Public Llicense 2.0, as "non-commercial/sharealike". You may use, modify, and distribute this software for non-commercial purposes, and you must distribute any modifications under the same license.
#
# For detailed license terms, see:
# http://creativecommons.org/licenses/GPL/2.0/
"""
.. module:: allele_distribution
:platform: Unix, Windows
:synopsis: Module for creating various initial distributions of traits/alleles, for initializing a simuPOP Population.
.. moduleauthor:: Mark E. Madsen <mark@madsenlab.org>
"""
def constructUniformAllelicDistribution(numalleles):
"""Constructs a uniform distribution of N alleles in the form of a frequency list.
Args:
numalleles (int): Number of alleles present in the initial population.
Returns:
(list): Array of floats, giving the initial frequency of N alleles.
"""
divisor = 100.0 / numalleles
frac = divisor / 100.0
distribution = [frac] * numalleles
return distribution
|
mmadsen/CTPy
|
ctpy/utils/allele_distribution.py
|
Python
|
apache-2.0
| 1,152
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""Check that Spack complies with minimum supported python versions.
We ensure that all Spack files work with Python2 >= 2.6 and Python3 >= 3.0.
We'd like to drop 2.6 support at some point, but there are still many HPC
systems that ship with RHEL6/CentOS 6, which have Python 2.6 as the
default version. Once those go away, we can likely drop 2.6 and increase
the minimum supported Python 3 version, as well.
"""
from __future__ import print_function
import os
import sys
import re
import llnl.util.tty as tty
import spack
#
# This test uses pyqver, by Greg Hewgill, which is a dual-source module.
# That means we need to do different checks depending on whether we're
# running Python 2 or Python 3.
#
if sys.version_info[0] < 3:
import pyqver2 as pyqver
spack_min_supported = (2, 6)
# Exclude Python 3 versions of dual-source modules when using Python 2
exclude_paths = [
# Jinja 2 has some 'async def' functions that are not treated correctly
# by pyqver.py
os.path.join(spack.lib_path, 'external', 'jinja2', 'asyncfilters.py'),
os.path.join(spack.lib_path, 'external', 'jinja2', 'asyncsupport.py'),
os.path.join(spack.lib_path, 'external', 'yaml', 'lib3'),
os.path.join(spack.lib_path, 'external', 'pyqver3.py')]
else:
import pyqver3 as pyqver
spack_min_supported = (3, 0)
# Exclude Python 2 versions of dual-source modules when using Python 3
exclude_paths = [
# Jinja 2 has some 'async def' functions that are not treated correctly
# by pyqver.py
os.path.join(spack.lib_path, 'external', 'jinja2', 'asyncfilters.py'),
os.path.join(spack.lib_path, 'external', 'jinja2', 'asyncsupport.py'),
os.path.join(spack.lib_path, 'external', 'yaml', 'lib'),
os.path.join(spack.lib_path, 'external', 'pyqver2.py')]
def pyfiles(search_paths, exclude=()):
"""Generator that yields all the python files in the search paths.
Args:
search_paths (list of str): list of paths to search for python files
exclude (list of str): file paths to exclude from search
Yields:
python files in the search path.
"""
# first file is the spack script.
yield spack.spack_file
# Iterate through the whole spack source tree.
for path in search_paths:
for root, dirnames, filenames in os.walk(path):
for filename in filenames:
realpath = os.path.realpath(os.path.join(root, filename))
if any(realpath.startswith(p) for p in exclude):
continue
if re.match(r'^[^.#].*\.py$', filename):
yield os.path.join(root, filename)
def check_python_versions(files):
"""Check that a set of Python files works with supported Ptyhon versions"""
# This is a dict dict mapping:
# version -> filename -> reasons
#
# Reasons are tuples of (lineno, string), where the string is the
# cause for a version incompatibility.
all_issues = {}
# Parse files and run pyqver on each file.
for path in files:
with open(path) as pyfile:
full_text = pyfile.read()
versions = pyqver.get_versions(full_text, path)
for ver, reasons in versions.items():
if ver <= spack_min_supported:
continue
# Record issues. Mark exceptions with '# nopyqver' comment
for lineno, cause in reasons:
lines = full_text.split('\n')
if not re.search(r'#\s*nopyqver\s*$', lines[lineno - 1]):
all_issues.setdefault(ver, {})[path] = reasons
# Print a message if there are are issues
if all_issues:
tty.msg("Spack must remain compatible with Python version %d.%d"
% spack_min_supported)
# Print out a table showing which files/linenos require which
# python version, and a string describing why.
for v in sorted(all_issues.keys(), reverse=True):
messages = []
for path in sorted(all_issues[v].keys()):
short_path = path
if path.startswith(spack.prefix):
short_path = path[len(spack.prefix):]
reasons = [r for r in set(all_issues[v][path]) if r]
for lineno, cause in reasons:
file_line = "%s:%s" % (short_path.lstrip('/'), lineno)
messages.append((file_line, cause))
print()
tty.msg("These files require version %d.%d:" % v)
maxlen = max(len(f) for f, prob in messages)
fmt = "%%-%ds%%s" % (maxlen + 3)
print(fmt % ('File', 'Reason'))
print(fmt % ('-' * (maxlen), '-' * 20))
for msg in messages:
print(fmt % msg)
# Fail this test if there were issues.
assert not all_issues
def test_core_module_compatibility():
"""Test that all core spack modules work with supported Python versions."""
check_python_versions(pyfiles([spack.lib_path], exclude=exclude_paths))
def test_package_module_compatibility():
"""Test that all spack packages work with supported Python versions."""
check_python_versions(pyfiles([spack.packages_path]))
|
TheTimmy/spack
|
lib/spack/spack/test/python_version.py
|
Python
|
lgpl-2.1
| 6,415
|
# Copyright (C) 2003-2005 Peter J. Verveer
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# 3. The name of the author may not be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import math
import numpy
import numpy as np
from numpy import fft
from numpy.testing import assert_, assert_equal, assert_array_equal, \
TestCase, run_module_suite, \
assert_array_almost_equal, assert_almost_equal
import scipy.ndimage as ndimage
eps = 1e-12
def sumsq(a, b):
return math.sqrt(((a - b)**2).sum())
class TestNdimage:
def setUp(self):
# list of numarray data types
self.types = [numpy.int8, numpy.uint8, numpy.int16,
numpy.uint16, numpy.int32, numpy.uint32,
numpy.int64, numpy.uint64,
numpy.float32, numpy.float64]
# list of boundary modes:
self.modes = ['nearest', 'wrap', 'reflect', 'mirror', 'constant']
def test_correlate01(self):
"correlation 1"
array = numpy.array([1, 2])
weights = numpy.array([2])
expected = [2, 4]
output = ndimage.correlate(array, weights)
assert_array_almost_equal(output, expected)
output = ndimage.convolve(array, weights)
assert_array_almost_equal(output, expected)
output = ndimage.correlate1d(array, weights)
assert_array_almost_equal(output, expected)
output = ndimage.convolve1d(array, weights)
assert_array_almost_equal(output, expected)
def test_correlate02(self):
"correlation 2"
array = numpy.array([1, 2, 3])
kernel = numpy.array([1])
output = ndimage.correlate(array, kernel)
assert_array_almost_equal(array, output)
output = ndimage.convolve(array, kernel)
assert_array_almost_equal(array, output)
output = ndimage.correlate1d(array, kernel)
assert_array_almost_equal(array, output)
output = ndimage.convolve1d(array, kernel)
assert_array_almost_equal(array, output)
def test_correlate03(self):
"correlation 3"
array = numpy.array([1])
weights = numpy.array([1, 1])
expected = [2]
output = ndimage.correlate(array, weights)
assert_array_almost_equal(output, expected)
output = ndimage.convolve(array, weights)
assert_array_almost_equal(output, expected)
output = ndimage.correlate1d(array, weights)
assert_array_almost_equal(output, expected)
output = ndimage.convolve1d(array, weights)
assert_array_almost_equal(output, expected)
def test_correlate04(self):
"correlation 4"
array = numpy.array([1, 2])
tcor = [2, 3]
tcov = [3, 4]
weights = numpy.array([1, 1])
output = ndimage.correlate(array, weights)
assert_array_almost_equal(output, tcor)
output = ndimage.convolve(array, weights)
assert_array_almost_equal(output, tcov)
output = ndimage.correlate1d(array, weights)
assert_array_almost_equal(output, tcor)
output = ndimage.convolve1d(array, weights)
assert_array_almost_equal(output, tcov)
def test_correlate05(self):
"correlation 5"
array = numpy.array([1, 2, 3])
tcor = [2, 3, 5]
tcov = [3, 5, 6]
kernel = numpy.array([1, 1])
output = ndimage.correlate(array, kernel)
assert_array_almost_equal(tcor, output)
output = ndimage.convolve(array, kernel)
assert_array_almost_equal(tcov, output)
output = ndimage.correlate1d(array, kernel)
assert_array_almost_equal(tcor, output)
output = ndimage.convolve1d(array, kernel)
assert_array_almost_equal(tcov, output)
def test_correlate06(self):
"correlation 6"
array = numpy.array([1, 2, 3])
tcor = [9, 14, 17]
tcov = [7, 10, 15]
weights = numpy.array([1, 2, 3])
output = ndimage.correlate(array, weights)
assert_array_almost_equal(output, tcor)
output = ndimage.convolve(array, weights)
assert_array_almost_equal(output, tcov)
output = ndimage.correlate1d(array, weights)
assert_array_almost_equal(output, tcor)
output = ndimage.convolve1d(array, weights)
assert_array_almost_equal(output, tcov)
def test_correlate07(self):
"correlation 7"
array = numpy.array([1, 2, 3])
expected = [5, 8, 11]
weights = numpy.array([1, 2, 1])
output = ndimage.correlate(array, weights)
assert_array_almost_equal(output, expected)
output = ndimage.convolve(array, weights)
assert_array_almost_equal(output, expected)
output = ndimage.correlate1d(array, weights)
assert_array_almost_equal(output, expected)
output = ndimage.convolve1d(array, weights)
assert_array_almost_equal(output, expected)
def test_correlate08(self):
"correlation 8"
array = numpy.array([1, 2, 3])
tcor = [1, 2, 5]
tcov = [3, 6, 7]
weights = numpy.array([1, 2, -1])
output = ndimage.correlate(array, weights)
assert_array_almost_equal(output, tcor)
output = ndimage.convolve(array, weights)
assert_array_almost_equal(output, tcov)
output = ndimage.correlate1d(array, weights)
assert_array_almost_equal(output, tcor)
output = ndimage.convolve1d(array, weights)
assert_array_almost_equal(output, tcov)
def test_correlate09(self):
"correlation 9"
array = []
kernel = numpy.array([1, 1])
output = ndimage.correlate(array, kernel)
assert_array_almost_equal(array, output)
output = ndimage.convolve(array, kernel)
assert_array_almost_equal(array, output)
output = ndimage.correlate1d(array, kernel)
assert_array_almost_equal(array, output)
output = ndimage.convolve1d(array, kernel)
assert_array_almost_equal(array, output)
def test_correlate10(self):
"correlation 10"
array = [[]]
kernel = numpy.array([[1, 1]])
output = ndimage.correlate(array, kernel)
assert_array_almost_equal(array, output)
output = ndimage.convolve(array, kernel)
assert_array_almost_equal(array, output)
def test_correlate11(self):
"correlation 11"
array = numpy.array([[1, 2, 3],
[4, 5, 6]])
kernel = numpy.array([[1, 1],
[1, 1]])
output = ndimage.correlate(array, kernel)
assert_array_almost_equal([[4, 6, 10], [10, 12, 16]], output)
output = ndimage.convolve(array, kernel)
assert_array_almost_equal([[12, 16, 18], [18, 22, 24]], output)
def test_correlate12(self):
"correlation 12"
array = numpy.array([[1, 2, 3],
[4, 5, 6]])
kernel = numpy.array([[1, 0],
[0, 1]])
output = ndimage.correlate(array, kernel)
assert_array_almost_equal([[2, 3, 5], [5, 6, 8]], output)
output = ndimage.convolve(array, kernel)
assert_array_almost_equal([[6, 8, 9], [9, 11, 12]], output)
def test_correlate13(self):
"correlation 13"
kernel = numpy.array([[1, 0],
[0, 1]])
for type1 in self.types:
array = numpy.array([[1, 2, 3],
[4, 5, 6]], type1)
for type2 in self.types:
output = ndimage.correlate(array, kernel,
output=type2)
assert_array_almost_equal([[2, 3, 5], [5, 6, 8]], output)
assert_equal(output.dtype.type, type2)
output = ndimage.convolve(array, kernel,
output=type2)
assert_array_almost_equal([[6, 8, 9], [9, 11, 12]], output)
assert_equal(output.dtype.type, type2)
def test_correlate14(self):
"correlation 14"
kernel = numpy.array([[1, 0],
[0, 1]])
for type1 in self.types:
array = numpy.array([[1, 2, 3],
[4, 5, 6]], type1)
for type2 in self.types:
output = numpy.zeros(array.shape, type2)
ndimage.correlate(array, kernel,
output=output)
assert_array_almost_equal([[2, 3, 5], [5, 6, 8]], output)
assert_equal(output.dtype.type, type2)
ndimage.convolve(array, kernel, output=output)
assert_array_almost_equal([[6, 8, 9], [9, 11, 12]], output)
assert_equal(output.dtype.type, type2)
def test_correlate15(self):
"correlation 15"
kernel = numpy.array([[1, 0],
[0, 1]])
for type1 in self.types:
array = numpy.array([[1, 2, 3],
[4, 5, 6]], type1)
output = ndimage.correlate(array, kernel,
output=numpy.float32)
assert_array_almost_equal([[2, 3, 5], [5, 6, 8]], output)
assert_equal(output.dtype.type, numpy.float32)
output = ndimage.convolve(array, kernel,
output=numpy.float32)
assert_array_almost_equal([[6, 8, 9], [9, 11, 12]], output)
assert_equal(output.dtype.type, numpy.float32)
def test_correlate16(self):
"correlation 16"
kernel = numpy.array([[0.5, 0 ],
[0, 0.5]])
for type1 in self.types:
array = numpy.array([[1, 2, 3],
[4, 5, 6]], type1)
output = ndimage.correlate(array, kernel,
output=numpy.float32)
assert_array_almost_equal([[1, 1.5, 2.5], [2.5, 3, 4]], output)
assert_equal(output.dtype.type, numpy.float32)
output = ndimage.convolve(array, kernel,
output=numpy.float32)
assert_array_almost_equal([[3, 4, 4.5], [4.5, 5.5, 6]], output)
assert_equal(output.dtype.type, numpy.float32)
def test_correlate17(self):
"correlation 17"
array = numpy.array([1, 2, 3])
tcor = [3, 5, 6]
tcov = [2, 3, 5]
kernel = numpy.array([1, 1])
output = ndimage.correlate(array, kernel, origin=-1)
assert_array_almost_equal(tcor, output)
output = ndimage.convolve(array, kernel, origin=-1)
assert_array_almost_equal(tcov, output)
output = ndimage.correlate1d(array, kernel, origin=-1)
assert_array_almost_equal(tcor, output)
output = ndimage.convolve1d(array, kernel, origin=-1)
assert_array_almost_equal(tcov, output)
def test_correlate18(self):
"correlation 18"
kernel = numpy.array([[1, 0],
[0, 1]])
for type1 in self.types:
array = numpy.array([[1, 2, 3],
[4, 5, 6]], type1)
output = ndimage.correlate(array, kernel,
output=numpy.float32,
mode='nearest', origin=-1)
assert_array_almost_equal([[6, 8, 9], [9, 11, 12]], output)
assert_equal(output.dtype.type, numpy.float32)
output = ndimage.convolve(array, kernel,
output=numpy.float32,
mode='nearest', origin=-1)
assert_array_almost_equal([[2, 3, 5], [5, 6, 8]], output)
assert_equal(output.dtype.type, numpy.float32)
def test_correlate19(self):
"correlation 19"
kernel = numpy.array([[1, 0],
[0, 1]])
for type1 in self.types:
array = numpy.array([[1, 2, 3],
[4, 5, 6]], type1)
output = ndimage.correlate(array, kernel,
output=numpy.float32,
mode='nearest', origin=[-1, 0])
assert_array_almost_equal([[5, 6, 8], [8, 9, 11]], output)
assert_equal(output.dtype.type, numpy.float32)
output = ndimage.convolve(array, kernel,
output=numpy.float32,
mode='nearest', origin=[-1, 0])
assert_array_almost_equal([[3, 5, 6], [6, 8, 9]], output)
assert_equal(output.dtype.type, numpy.float32)
def test_correlate20(self):
"correlation 20"
weights = numpy.array([1, 2, 1])
expected = [[5, 10, 15], [7, 14, 21]]
for type1 in self.types:
array = numpy.array([[1, 2, 3],
[2, 4, 6]], type1)
for type2 in self.types:
output = numpy.zeros((2, 3), type2)
ndimage.correlate1d(array, weights, axis=0,
output=output)
assert_array_almost_equal(output, expected)
ndimage.convolve1d(array, weights, axis=0,
output=output)
assert_array_almost_equal(output, expected)
def test_correlate21(self):
"correlation 21"
array = numpy.array([[1, 2, 3],
[2, 4, 6]])
expected = [[5, 10, 15], [7, 14, 21]]
weights = numpy.array([1, 2, 1])
output = ndimage.correlate1d(array, weights, axis=0)
assert_array_almost_equal(output, expected)
output = ndimage.convolve1d(array, weights, axis=0)
assert_array_almost_equal(output, expected)
def test_correlate22(self):
"correlation 22"
weights = numpy.array([1, 2, 1])
expected = [[6, 12, 18], [6, 12, 18]]
for type1 in self.types:
array = numpy.array([[1, 2, 3],
[2, 4, 6]], type1)
for type2 in self.types:
output = numpy.zeros((2, 3), type2)
ndimage.correlate1d(array, weights, axis=0,
mode='wrap', output=output)
assert_array_almost_equal(output, expected)
ndimage.convolve1d(array, weights, axis=0,
mode='wrap', output=output)
assert_array_almost_equal(output, expected)
def test_correlate23(self):
"correlation 23"
weights = numpy.array([1, 2, 1])
expected = [[5, 10, 15], [7, 14, 21]]
for type1 in self.types:
array = numpy.array([[1, 2, 3],
[2, 4, 6]], type1)
for type2 in self.types:
output = numpy.zeros((2, 3), type2)
ndimage.correlate1d(array, weights, axis=0,
mode='nearest', output=output)
assert_array_almost_equal(output, expected)
ndimage.convolve1d(array, weights, axis=0,
mode='nearest', output=output)
assert_array_almost_equal(output, expected)
def test_correlate24(self):
"correlation 24"
weights = numpy.array([1, 2, 1])
tcor = [[7, 14, 21], [8, 16, 24]]
tcov = [[4, 8, 12], [5, 10, 15]]
for type1 in self.types:
array = numpy.array([[1, 2, 3],
[2, 4, 6]], type1)
for type2 in self.types:
output = numpy.zeros((2, 3), type2)
ndimage.correlate1d(array, weights, axis=0,
mode='nearest', output=output, origin=-1)
assert_array_almost_equal(output, tcor)
ndimage.convolve1d(array, weights, axis=0,
mode='nearest', output=output, origin=-1)
assert_array_almost_equal(output, tcov)
def test_correlate25(self):
"correlation 25"
weights = numpy.array([1, 2, 1])
tcor = [[4, 8, 12], [5, 10, 15]]
tcov = [[7, 14, 21], [8, 16, 24]]
for type1 in self.types:
array = numpy.array([[1, 2, 3],
[2, 4, 6]], type1)
for type2 in self.types:
output = numpy.zeros((2, 3), type2)
ndimage.correlate1d(array, weights, axis=0,
mode='nearest', output=output, origin=1)
assert_array_almost_equal(output, tcor)
ndimage.convolve1d(array, weights, axis=0,
mode='nearest', output=output, origin=1)
assert_array_almost_equal(output, tcov)
def test_gauss01(self):
"gaussian filter 1"
input = numpy.array([[1, 2, 3],
[2, 4, 6]], numpy.float32)
output = ndimage.gaussian_filter(input, 0)
assert_array_almost_equal(output, input)
def test_gauss02(self):
"gaussian filter 2"
input = numpy.array([[1, 2, 3],
[2, 4, 6]], numpy.float32)
output = ndimage.gaussian_filter(input, 1.0)
assert_equal(input.dtype, output.dtype)
assert_equal(input.shape, output.shape)
def test_gauss03(self):
"gaussian filter 3 - single precision data"
input = numpy.arange(100 * 100).astype(numpy.float32)
input.shape = (100, 100)
output = ndimage.gaussian_filter(input, [1.0, 1.0])
assert_equal(input.dtype, output.dtype)
assert_equal(input.shape, output.shape)
# input.sum() is 49995000.0. With single precision floats, we can't
# expect more than 8 digits of accuracy, so use decimal=0 in this test.
assert_almost_equal(output.sum(dtype='d'), input.sum(dtype='d'), decimal=0)
assert_(sumsq(input, output) > 1.0)
def test_gauss04(self):
"gaussian filter 4"
input = numpy.arange(100 * 100).astype(numpy.float32)
input.shape = (100, 100)
otype = numpy.float64
output = ndimage.gaussian_filter(input, [1.0, 1.0],
output=otype)
assert_equal(output.dtype.type, numpy.float64)
assert_equal(input.shape, output.shape)
assert_(sumsq(input, output) > 1.0)
def test_gauss05(self):
"gaussian filter 5"
input = numpy.arange(100 * 100).astype(numpy.float32)
input.shape = (100, 100)
otype = numpy.float64
output = ndimage.gaussian_filter(input, [1.0, 1.0],
order=1, output=otype)
assert_equal(output.dtype.type, numpy.float64)
assert_equal(input.shape, output.shape)
assert_(sumsq(input, output) > 1.0)
def test_gauss06(self):
"gaussian filter 6"
input = numpy.arange(100 * 100).astype(numpy.float32)
input.shape = (100, 100)
otype = numpy.float64
output1 = ndimage.gaussian_filter(input, [1.0, 1.0],
output=otype)
output2 = ndimage.gaussian_filter(input, 1.0,
output=otype)
assert_array_almost_equal(output1, output2)
def test_prewitt01(self):
"prewitt filter 1"
for type in self.types:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type)
t = ndimage.correlate1d(array, [-1.0, 0.0, 1.0], 0)
t = ndimage.correlate1d(t, [1.0, 1.0, 1.0], 1)
output = ndimage.prewitt(array, 0)
assert_array_almost_equal(t, output)
def test_prewitt02(self):
"prewitt filter 2"
for type in self.types:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type)
t = ndimage.correlate1d(array, [-1.0, 0.0, 1.0], 0)
t = ndimage.correlate1d(t, [1.0, 1.0, 1.0], 1)
output = numpy.zeros(array.shape, type)
ndimage.prewitt(array, 0, output)
assert_array_almost_equal(t, output)
def test_prewitt03(self):
"prewitt filter 3"
for type in self.types:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type)
t = ndimage.correlate1d(array, [-1.0, 0.0, 1.0], 1)
t = ndimage.correlate1d(t, [1.0, 1.0, 1.0], 0)
output = ndimage.prewitt(array, 1)
assert_array_almost_equal(t, output)
def test_prewitt04(self):
"prewitt filter 4"
for type in self.types:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type)
t = ndimage.prewitt(array, -1)
output = ndimage.prewitt(array, 1)
assert_array_almost_equal(t, output)
def test_sobel01(self):
"sobel filter 1"
for type in self.types:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type)
t = ndimage.correlate1d(array, [-1.0, 0.0, 1.0], 0)
t = ndimage.correlate1d(t, [1.0, 2.0, 1.0], 1)
output = ndimage.sobel(array, 0)
assert_array_almost_equal(t, output)
def test_sobel02(self):
"sobel filter 2"
for type in self.types:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type)
t = ndimage.correlate1d(array, [-1.0, 0.0, 1.0], 0)
t = ndimage.correlate1d(t, [1.0, 2.0, 1.0], 1)
output = numpy.zeros(array.shape, type)
ndimage.sobel(array, 0, output)
assert_array_almost_equal(t, output)
def test_sobel03(self):
"sobel filter 3"
for type in self.types:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type)
t = ndimage.correlate1d(array, [-1.0, 0.0, 1.0], 1)
t = ndimage.correlate1d(t, [1.0, 2.0, 1.0], 0)
output = numpy.zeros(array.shape, type)
output = ndimage.sobel(array, 1)
assert_array_almost_equal(t, output)
def test_sobel04(self):
"sobel filter 4"
for type in self.types:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type)
t = ndimage.sobel(array, -1)
output = ndimage.sobel(array, 1)
assert_array_almost_equal(t, output)
def test_laplace01(self):
"laplace filter 1"
for type in [numpy.int32, numpy.float32, numpy.float64]:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type) * 100
tmp1 = ndimage.correlate1d(array, [1, -2, 1], 0)
tmp2 = ndimage.correlate1d(array, [1, -2, 1], 1)
output = ndimage.laplace(array)
assert_array_almost_equal(tmp1 + tmp2, output)
def test_laplace02(self):
"laplace filter 2"
for type in [numpy.int32, numpy.float32, numpy.float64]:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type) * 100
tmp1 = ndimage.correlate1d(array, [1, -2, 1], 0)
tmp2 = ndimage.correlate1d(array, [1, -2, 1], 1)
output = numpy.zeros(array.shape, type)
ndimage.laplace(array, output=output)
assert_array_almost_equal(tmp1 + tmp2, output)
def test_gaussian_laplace01(self):
"gaussian laplace filter 1"
for type in [numpy.int32, numpy.float32, numpy.float64]:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type) * 100
tmp1 = ndimage.gaussian_filter(array, 1.0, [2, 0])
tmp2 = ndimage.gaussian_filter(array, 1.0, [0, 2])
output = ndimage.gaussian_laplace(array, 1.0)
assert_array_almost_equal(tmp1 + tmp2, output)
def test_gaussian_laplace02(self):
"gaussian laplace filter 2"
for type in [numpy.int32, numpy.float32, numpy.float64]:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type) * 100
tmp1 = ndimage.gaussian_filter(array, 1.0, [2, 0])
tmp2 = ndimage.gaussian_filter(array, 1.0, [0, 2])
output = numpy.zeros(array.shape, type)
ndimage.gaussian_laplace(array, 1.0, output)
assert_array_almost_equal(tmp1 + tmp2, output)
def test_generic_laplace01(self):
"generic laplace filter 1"
def derivative2(input, axis, output, mode, cval, a, b):
sigma = [a, b / 2.0]
input = numpy.asarray(input)
order = [0] * input.ndim
order[axis] = 2
return ndimage.gaussian_filter(input, sigma, order,
output, mode, cval)
for type in self.types:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type)
output = numpy.zeros(array.shape, type)
tmp = ndimage.generic_laplace(array, derivative2,
extra_arguments=(1.0,), extra_keywords={'b': 2.0})
ndimage.gaussian_laplace(array, 1.0, output)
assert_array_almost_equal(tmp, output)
def test_gaussian_gradient_magnitude01(self):
"gaussian gradient magnitude filter 1"
for type in [numpy.int32, numpy.float32, numpy.float64]:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type) * 100
tmp1 = ndimage.gaussian_filter(array, 1.0, [1, 0])
tmp2 = ndimage.gaussian_filter(array, 1.0, [0, 1])
output = ndimage.gaussian_gradient_magnitude(array,
1.0)
expected = tmp1 * tmp1 + tmp2 * tmp2
numpy.sqrt(expected, expected)
assert_array_almost_equal(expected, output)
def test_gaussian_gradient_magnitude02(self):
"gaussian gradient magnitude filter 2"
for type in [numpy.int32, numpy.float32, numpy.float64]:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type) * 100
tmp1 = ndimage.gaussian_filter(array, 1.0, [1, 0])
tmp2 = ndimage.gaussian_filter(array, 1.0, [0, 1])
output = numpy.zeros(array.shape, type)
ndimage.gaussian_gradient_magnitude(array, 1.0,
output)
expected = tmp1 * tmp1 + tmp2 * tmp2
numpy.sqrt(expected, expected)
assert_array_almost_equal(expected, output)
def test_generic_gradient_magnitude01(self):
"generic gradient magnitude 1"
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], numpy.float64)
def derivative(input, axis, output, mode, cval, a, b):
sigma = [a, b / 2.0]
input = numpy.asarray(input)
order = [0] * input.ndim
order[axis] = 1
return ndimage.gaussian_filter(input, sigma, order,
output, mode, cval)
tmp1 = ndimage.gaussian_gradient_magnitude(array, 1.0)
tmp2 = ndimage.generic_gradient_magnitude(array,
derivative, extra_arguments=(1.0,),
extra_keywords={'b': 2.0})
assert_array_almost_equal(tmp1, tmp2)
def test_uniform01(self):
"uniform filter 1"
array = numpy.array([2, 4, 6])
size = 2
output = ndimage.uniform_filter1d(array, size,
origin=-1)
assert_array_almost_equal([3, 5, 6], output)
def test_uniform02(self):
"uniform filter 2"
array = numpy.array([1, 2, 3])
filter_shape = [0]
output = ndimage.uniform_filter(array, filter_shape)
assert_array_almost_equal(array, output)
def test_uniform03(self):
"uniform filter 3"
array = numpy.array([1, 2, 3])
filter_shape = [1]
output = ndimage.uniform_filter(array, filter_shape)
assert_array_almost_equal(array, output)
def test_uniform04(self):
"uniform filter 4"
array = numpy.array([2, 4, 6])
filter_shape = [2]
output = ndimage.uniform_filter(array, filter_shape)
assert_array_almost_equal([2, 3, 5], output)
def test_uniform05(self):
"uniform filter 5"
array = []
filter_shape = [1]
output = ndimage.uniform_filter(array, filter_shape)
assert_array_almost_equal([], output)
def test_uniform06(self):
"uniform filter 6"
filter_shape = [2, 2]
for type1 in self.types:
array = numpy.array([[4, 8, 12],
[16, 20, 24]], type1)
for type2 in self.types:
output = ndimage.uniform_filter(array,
filter_shape, output=type2)
assert_array_almost_equal([[4, 6, 10], [10, 12, 16]], output)
assert_equal(output.dtype.type, type2)
def test_minimum_filter01(self):
"minimum filter 1"
array = numpy.array([1, 2, 3, 4, 5])
filter_shape = numpy.array([2])
output = ndimage.minimum_filter(array, filter_shape)
assert_array_almost_equal([1, 1, 2, 3, 4], output)
def test_minimum_filter02(self):
"minimum filter 2"
array = numpy.array([1, 2, 3, 4, 5])
filter_shape = numpy.array([3])
output = ndimage.minimum_filter(array, filter_shape)
assert_array_almost_equal([1, 1, 2, 3, 4], output)
def test_minimum_filter03(self):
"minimum filter 3"
array = numpy.array([3, 2, 5, 1, 4])
filter_shape = numpy.array([2])
output = ndimage.minimum_filter(array, filter_shape)
assert_array_almost_equal([3, 2, 2, 1, 1], output)
def test_minimum_filter04(self):
"minimum filter 4"
array = numpy.array([3, 2, 5, 1, 4])
filter_shape = numpy.array([3])
output = ndimage.minimum_filter(array, filter_shape)
assert_array_almost_equal([2, 2, 1, 1, 1], output)
def test_minimum_filter05(self):
"minimum filter 5"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
filter_shape = numpy.array([2, 3])
output = ndimage.minimum_filter(array, filter_shape)
assert_array_almost_equal([[2, 2, 1, 1, 1],
[2, 2, 1, 1, 1],
[5, 3, 3, 1, 1]], output)
def test_minimum_filter06(self):
"minimum filter 6"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 1, 1], [1, 1, 1]]
output = ndimage.minimum_filter(array,
footprint=footprint)
assert_array_almost_equal([[2, 2, 1, 1, 1],
[2, 2, 1, 1, 1],
[5, 3, 3, 1, 1]], output)
def test_minimum_filter07(self):
"minimum filter 7"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
output = ndimage.minimum_filter(array,
footprint=footprint)
assert_array_almost_equal([[2, 2, 1, 1, 1],
[2, 3, 1, 3, 1],
[5, 5, 3, 3, 1]], output)
def test_minimum_filter08(self):
"minimum filter 8"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
output = ndimage.minimum_filter(array,
footprint=footprint, origin=-1)
assert_array_almost_equal([[3, 1, 3, 1, 1],
[5, 3, 3, 1, 1],
[3, 3, 1, 1, 1]], output)
def test_minimum_filter09(self):
"minimum filter 9"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
output = ndimage.minimum_filter(array,
footprint=footprint, origin=[-1, 0])
assert_array_almost_equal([[2, 3, 1, 3, 1],
[5, 5, 3, 3, 1],
[5, 3, 3, 1, 1]], output)
def test_maximum_filter01(self):
"maximum filter 1"
array = numpy.array([1, 2, 3, 4, 5])
filter_shape = numpy.array([2])
output = ndimage.maximum_filter(array, filter_shape)
assert_array_almost_equal([1, 2, 3, 4, 5], output)
def test_maximum_filter02(self):
"maximum filter 2"
array = numpy.array([1, 2, 3, 4, 5])
filter_shape = numpy.array([3])
output = ndimage.maximum_filter(array, filter_shape)
assert_array_almost_equal([2, 3, 4, 5, 5], output)
def test_maximum_filter03(self):
"maximum filter 3"
array = numpy.array([3, 2, 5, 1, 4])
filter_shape = numpy.array([2])
output = ndimage.maximum_filter(array, filter_shape)
assert_array_almost_equal([3, 3, 5, 5, 4], output)
def test_maximum_filter04(self):
"maximum filter 4"
array = numpy.array([3, 2, 5, 1, 4])
filter_shape = numpy.array([3])
output = ndimage.maximum_filter(array, filter_shape)
assert_array_almost_equal([3, 5, 5, 5, 4], output)
def test_maximum_filter05(self):
"maximum filter 5"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
filter_shape = numpy.array([2, 3])
output = ndimage.maximum_filter(array, filter_shape)
assert_array_almost_equal([[3, 5, 5, 5, 4],
[7, 9, 9, 9, 5],
[8, 9, 9, 9, 7]], output)
def test_maximum_filter06(self):
"maximum filter 6"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 1, 1], [1, 1, 1]]
output = ndimage.maximum_filter(array,
footprint=footprint)
assert_array_almost_equal([[3, 5, 5, 5, 4],
[7, 9, 9, 9, 5],
[8, 9, 9, 9, 7]], output)
def test_maximum_filter07(self):
"maximum filter 7"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
output = ndimage.maximum_filter(array,
footprint=footprint)
assert_array_almost_equal([[3, 5, 5, 5, 4],
[7, 7, 9, 9, 5],
[7, 9, 8, 9, 7]], output)
def test_maximum_filter08(self):
"maximum filter 8"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
output = ndimage.maximum_filter(array,
footprint=footprint, origin=-1)
assert_array_almost_equal([[7, 9, 9, 5, 5],
[9, 8, 9, 7, 5],
[8, 8, 7, 7, 7]], output)
def test_maximum_filter09(self):
"maximum filter 9"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
output = ndimage.maximum_filter(array,
footprint=footprint, origin=[-1, 0])
assert_array_almost_equal([[7, 7, 9, 9, 5],
[7, 9, 8, 9, 7],
[8, 8, 8, 7, 7]], output)
def test_rank01(self):
"rank filter 1"
array = numpy.array([1, 2, 3, 4, 5])
output = ndimage.rank_filter(array, 1, size=2)
assert_array_almost_equal(array, output)
output = ndimage.percentile_filter(array, 100, size=2)
assert_array_almost_equal(array, output)
output = ndimage.median_filter(array, 2)
assert_array_almost_equal(array, output)
def test_rank02(self):
"rank filter 2"
array = numpy.array([1, 2, 3, 4, 5])
output = ndimage.rank_filter(array, 1, size=[3])
assert_array_almost_equal(array, output)
output = ndimage.percentile_filter(array, 50, size=3)
assert_array_almost_equal(array, output)
output = ndimage.median_filter(array, (3,))
assert_array_almost_equal(array, output)
def test_rank03(self):
"rank filter 3"
array = numpy.array([3, 2, 5, 1, 4])
output = ndimage.rank_filter(array, 1, size=[2])
assert_array_almost_equal([3, 3, 5, 5, 4], output)
output = ndimage.percentile_filter(array, 100, size=2)
assert_array_almost_equal([3, 3, 5, 5, 4], output)
def test_rank04(self):
"rank filter 4"
array = numpy.array([3, 2, 5, 1, 4])
expected = [3, 3, 2, 4, 4]
output = ndimage.rank_filter(array, 1, size=3)
assert_array_almost_equal(expected, output)
output = ndimage.percentile_filter(array, 50, size=3)
assert_array_almost_equal(expected, output)
output = ndimage.median_filter(array, size=3)
assert_array_almost_equal(expected, output)
def test_rank05(self):
"rank filter 5"
array = numpy.array([3, 2, 5, 1, 4])
expected = [3, 3, 2, 4, 4]
output = ndimage.rank_filter(array, -2, size=3)
assert_array_almost_equal(expected, output)
def test_rank06(self):
"rank filter 6"
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]])
expected = [[2, 2, 1, 1, 1],
[3, 3, 2, 1, 1],
[5, 5, 3, 3, 1]]
output = ndimage.rank_filter(array, 1, size=[2, 3])
assert_array_almost_equal(expected, output)
output = ndimage.percentile_filter(array, 17,
size=(2, 3))
assert_array_almost_equal(expected, output)
def test_rank07(self):
"rank filter 7"
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]])
expected = [[3, 5, 5, 5, 4],
[5, 5, 7, 5, 4],
[6, 8, 8, 7, 5]]
output = ndimage.rank_filter(array, -2, size=[2, 3])
assert_array_almost_equal(expected, output)
def test_rank08(self):
"median filter 8"
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]])
expected = [[3, 3, 2, 4, 4],
[5, 5, 5, 4, 4],
[5, 6, 7, 5, 5]]
kernel = numpy.array([2, 3])
output = ndimage.percentile_filter(array, 50.0,
size=(2, 3))
assert_array_almost_equal(expected, output)
output = ndimage.rank_filter(array, 3, size=(2, 3))
assert_array_almost_equal(expected, output)
output = ndimage.median_filter(array, size=(2, 3))
assert_array_almost_equal(expected, output)
def test_rank09(self):
"rank filter 9"
expected = [[3, 3, 2, 4, 4],
[3, 5, 2, 5, 1],
[5, 5, 8, 3, 5]]
footprint = [[1, 0, 1], [0, 1, 0]]
for type in self.types:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type)
output = ndimage.rank_filter(array, 1,
footprint=footprint)
assert_array_almost_equal(expected, output)
output = ndimage.percentile_filter(array, 35,
footprint=footprint)
assert_array_almost_equal(expected, output)
def test_rank10(self):
"rank filter 10"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
expected = [[2, 2, 1, 1, 1],
[2, 3, 1, 3, 1],
[5, 5, 3, 3, 1]]
footprint = [[1, 0, 1], [1, 1, 0]]
output = ndimage.rank_filter(array, 0,
footprint=footprint)
assert_array_almost_equal(expected, output)
output = ndimage.percentile_filter(array, 0.0,
footprint=footprint)
assert_array_almost_equal(expected, output)
def test_rank11(self):
"rank filter 11"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
expected = [[3, 5, 5, 5, 4],
[7, 7, 9, 9, 5],
[7, 9, 8, 9, 7]]
footprint = [[1, 0, 1], [1, 1, 0]]
output = ndimage.rank_filter(array, -1,
footprint=footprint)
assert_array_almost_equal(expected, output)
output = ndimage.percentile_filter(array, 100.0,
footprint=footprint)
assert_array_almost_equal(expected, output)
def test_rank12(self):
"rank filter 12"
expected = [[3, 3, 2, 4, 4],
[3, 5, 2, 5, 1],
[5, 5, 8, 3, 5]]
footprint = [[1, 0, 1], [0, 1, 0]]
for type in self.types:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type)
output = ndimage.rank_filter(array, 1,
footprint=footprint)
assert_array_almost_equal(expected, output)
output = ndimage.percentile_filter(array, 50.0,
footprint=footprint)
assert_array_almost_equal(expected, output)
output = ndimage.median_filter(array,
footprint=footprint)
assert_array_almost_equal(expected, output)
def test_rank13(self):
"rank filter 13"
expected = [[5, 2, 5, 1, 1],
[5, 8, 3, 5, 5],
[6, 6, 5, 5, 5]]
footprint = [[1, 0, 1], [0, 1, 0]]
for type in self.types:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type)
output = ndimage.rank_filter(array, 1,
footprint=footprint, origin=-1)
assert_array_almost_equal(expected, output)
def test_rank14(self):
"rank filter 14"
expected = [[3, 5, 2, 5, 1],
[5, 5, 8, 3, 5],
[5, 6, 6, 5, 5]]
footprint = [[1, 0, 1], [0, 1, 0]]
for type in self.types:
array = numpy.array([[3, 2, 5, 1, 4],
[5, 8, 3, 7, 1],
[5, 6, 9, 3, 5]], type)
output = ndimage.rank_filter(array, 1,
footprint=footprint, origin=[-1, 0])
assert_array_almost_equal(expected, output)
def test_generic_filter1d01(self):
"generic 1d filter 1"
weights = numpy.array([1.1, 2.2, 3.3])
def _filter_func(input, output, fltr, total):
fltr = fltr / total
for ii in range(input.shape[0] - 2):
output[ii] = input[ii] * fltr[0]
output[ii] += input[ii + 1] * fltr[1]
output[ii] += input[ii + 2] * fltr[2]
for type in self.types:
a = numpy.arange(12, dtype=type)
a.shape = (3,4)
r1 = ndimage.correlate1d(a, weights / weights.sum(), 0,
origin=-1)
r2 = ndimage.generic_filter1d(a, _filter_func, 3,
axis=0, origin=-1, extra_arguments=(weights,),
extra_keywords={'total': weights.sum()})
assert_array_almost_equal(r1, r2)
def test_generic_filter01(self):
"generic filter 1"
filter_ = numpy.array([[1.0, 2.0], [3.0, 4.0]])
footprint = numpy.array([[1, 0], [0, 1]])
cf = numpy.array([1., 4.])
def _filter_func(buffer, weights, total=1.0):
weights = cf / total
return (buffer * weights).sum()
for type in self.types:
a = numpy.arange(12, dtype=type)
a.shape = (3,4)
r1 = ndimage.correlate(a, filter_ * footprint)
r1 /= 5
r2 = ndimage.generic_filter(a, _filter_func,
footprint=footprint, extra_arguments=(cf,),
extra_keywords={'total': cf.sum()})
assert_array_almost_equal(r1, r2)
def test_extend01(self):
"line extension 1"
array = numpy.array([1, 2, 3])
weights = numpy.array([1, 0])
expected_values = [[1, 1, 2],
[3, 1, 2],
[1, 1, 2],
[2, 1, 2],
[0, 1, 2]]
for mode, expected_value in zip(self.modes, expected_values):
output = ndimage.correlate1d(array, weights, 0,
mode=mode, cval=0)
assert_array_equal(output,expected_value)
def test_extend02(self):
"line extension 2"
array = numpy.array([1, 2, 3])
weights = numpy.array([1, 0, 0, 0, 0, 0, 0, 0])
expected_values = [[1, 1, 1],
[3, 1, 2],
[3, 3, 2],
[1, 2, 3],
[0, 0, 0]]
for mode, expected_value in zip(self.modes, expected_values):
output = ndimage.correlate1d(array, weights, 0,
mode=mode, cval=0)
assert_array_equal(output, expected_value)
def test_extend03(self):
"line extension 3"
array = numpy.array([1, 2, 3])
weights = numpy.array([0, 0, 1])
expected_values = [[2, 3, 3],
[2, 3, 1],
[2, 3, 3],
[2, 3, 2],
[2, 3, 0]]
for mode, expected_value in zip(self.modes, expected_values):
output = ndimage.correlate1d(array, weights, 0,
mode=mode, cval=0)
assert_array_equal(output, expected_value)
def test_extend04(self):
"line extension 4"
array = numpy.array([1, 2, 3])
weights = numpy.array([0, 0, 0, 0, 0, 0, 0, 0, 1])
expected_values = [[3, 3, 3],
[2, 3, 1],
[2, 1, 1],
[1, 2, 3],
[0, 0, 0]]
for mode, expected_value in zip(self.modes, expected_values):
output = ndimage.correlate1d(array, weights, 0,
mode=mode, cval=0)
assert_array_equal(output, expected_value)
def test_extend05(self):
"line extension 5"
array = numpy.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
weights = numpy.array([[1, 0], [0, 0]])
expected_values = [[[1, 1, 2], [1, 1, 2], [4, 4, 5]],
[[9, 7, 8], [3, 1, 2], [6, 4, 5]],
[[1, 1, 2], [1, 1, 2], [4, 4, 5]],
[[5, 4, 5], [2, 1, 2], [5, 4, 5]],
[[0, 0, 0], [0, 1, 2], [0, 4, 5]]]
for mode, expected_value in zip(self.modes, expected_values):
output = ndimage.correlate(array, weights,
mode=mode, cval=0)
assert_array_equal(output, expected_value)
def test_extend06(self):
"line extension 6"
array = numpy.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
weights = numpy.array([[0, 0, 0], [0, 0, 0], [0, 0, 1]])
expected_values = [[[5, 6, 6], [8, 9, 9], [8, 9, 9]],
[[5, 6, 4], [8, 9, 7], [2, 3, 1]],
[[5, 6, 6], [8, 9, 9], [8, 9, 9]],
[[5, 6, 5], [8, 9, 8], [5, 6, 5]],
[[5, 6, 0], [8, 9, 0], [0, 0, 0]]]
for mode, expected_value in zip(self.modes, expected_values):
output = ndimage.correlate(array, weights,
mode=mode, cval=0)
assert_array_equal(output, expected_value)
def test_extend07(self):
"line extension 7"
array = numpy.array([1, 2, 3])
weights = numpy.array([0, 0, 0, 0, 0, 0, 0, 0, 1])
expected_values = [[3, 3, 3],
[2, 3, 1],
[2, 1, 1],
[1, 2, 3],
[0, 0, 0]]
for mode, expected_value in zip(self.modes, expected_values):
output = ndimage.correlate(array, weights,
mode=mode, cval=0)
assert_array_equal(output, expected_value)
def test_extend08(self):
"line extension 8"
array = numpy.array([[1], [2], [3]])
weights = numpy.array([[0], [0], [0], [0], [0], [0], [0],
[0], [1]])
expected_values = [[[3], [3], [3]],
[[2], [3], [1]],
[[2], [1], [1]],
[[1], [2], [3]],
[[0], [0], [0]]]
for mode, expected_value in zip(self.modes, expected_values):
output = ndimage.correlate(array, weights,
mode=mode, cval=0)
assert_array_equal(output, expected_value)
def test_extend09(self):
"line extension 9"
array = numpy.array([1, 2, 3])
weights = numpy.array([0, 0, 0, 0, 0, 0, 0, 0, 1])
expected_values = [[3, 3, 3],
[2, 3, 1],
[2, 1, 1],
[1, 2, 3],
[0, 0, 0]]
for mode, expected_value in zip(self.modes, expected_values):
output = ndimage.correlate(array, weights,
mode=mode, cval=0)
assert_array_equal(output, expected_value)
def test_extend10(self):
"line extension 10"
array = numpy.array([[1], [2], [3]])
weights = numpy.array([[0], [0], [0], [0], [0], [0], [0],
[0], [1]])
expected_values = [[[3], [3], [3]],
[[2], [3], [1]],
[[2], [1], [1]],
[[1], [2], [3]],
[[0], [0], [0]]]
for mode, expected_value in zip(self.modes, expected_values):
output = ndimage.correlate(array, weights,
mode=mode, cval=0)
assert_array_equal(output, expected_value)
def test_boundaries(self):
"boundary modes"
def shift(x):
return (x[0] + 0.5,)
data = numpy.array([1,2,3,4.])
expected = {'constant': [1.5,2.5,3.5,-1,-1,-1,-1],
'wrap': [1.5,2.5,3.5,1.5,2.5,3.5,1.5],
'mirror' : [1.5,2.5,3.5,3.5,2.5,1.5,1.5],
'nearest' : [1.5,2.5,3.5,4,4,4,4]}
for mode in expected.keys():
assert_array_equal(expected[mode],
ndimage.geometric_transform(data,shift,
cval=-1,mode=mode,
output_shape=(7,),
order=1))
def test_boundaries2(self):
"boundary modes 2"
def shift(x):
return (x[0] - 0.9,)
data = numpy.array([1,2,3,4])
expected = {'constant': [-1,1,2,3],
'wrap': [3,1,2,3],
'mirror' : [2,1,2,3],
'nearest' : [1,1,2,3]}
for mode in expected.keys():
assert_array_equal(expected[mode],
ndimage.geometric_transform(data,shift,
cval=-1,mode=mode,
output_shape=(4,)))
def test_fourier_gaussian_real01(self):
"gaussian fourier filter for real transforms 1"
for shape in [(32, 16), (31, 15)]:
for type in [numpy.float32, numpy.float64]:
a = numpy.zeros(shape, type)
a[0, 0] = 1.0
a = fft.rfft(a, shape[0], 0)
a = fft.fft(a, shape[1], 1)
a = ndimage.fourier_gaussian(a, [5.0, 2.5],
shape[0], 0)
a = fft.ifft(a, shape[1], 1)
a = fft.irfft(a, shape[0], 0)
assert_almost_equal(ndimage.sum(a), 1)
def test_fourier_gaussian_complex01(self):
"gaussian fourier filter for complex transforms 1"
for shape in [(32, 16), (31, 15)]:
for type in [numpy.complex64, numpy.complex128]:
a = numpy.zeros(shape, type)
a[0, 0] = 1.0
a = fft.fft(a, shape[0], 0)
a = fft.fft(a, shape[1], 1)
a = ndimage.fourier_gaussian(a, [5.0, 2.5], -1,
0)
a = fft.ifft(a, shape[1], 1)
a = fft.ifft(a, shape[0], 0)
assert_almost_equal(ndimage.sum(a.real), 1.0)
def test_fourier_uniform_real01(self):
"uniform fourier filter for real transforms 1"
for shape in [(32, 16), (31, 15)]:
for type in [numpy.float32, numpy.float64]:
a = numpy.zeros(shape, type)
a[0, 0] = 1.0
a = fft.rfft(a, shape[0], 0)
a = fft.fft(a, shape[1], 1)
a = ndimage.fourier_uniform(a, [5.0, 2.5],
shape[0], 0)
a = fft.ifft(a, shape[1], 1)
a = fft.irfft(a, shape[0], 0)
assert_almost_equal(ndimage.sum(a), 1.0)
def test_fourier_uniform_complex01(self):
"uniform fourier filter for complex transforms 1"
for shape in [(32, 16), (31, 15)]:
for type in [numpy.complex64, numpy.complex128]:
a = numpy.zeros(shape, type)
a[0, 0] = 1.0
a = fft.fft(a, shape[0], 0)
a = fft.fft(a, shape[1], 1)
a = ndimage.fourier_uniform(a, [5.0, 2.5], -1, 0)
a = fft.ifft(a, shape[1], 1)
a = fft.ifft(a, shape[0], 0)
assert_almost_equal(ndimage.sum(a.real), 1.0)
def test_fourier_shift_real01(self):
"shift filter for real transforms 1"
for shape in [(32, 16), (31, 15)]:
for dtype in [numpy.float32, numpy.float64]:
expected = numpy.arange(shape[0] * shape[1], dtype=dtype)
expected.shape = shape
a = fft.rfft(expected, shape[0], 0)
a = fft.fft(a, shape[1], 1)
a = ndimage.fourier_shift(a, [1, 1], shape[0], 0)
a = fft.ifft(a, shape[1], 1)
a = fft.irfft(a, shape[0], 0)
assert_array_almost_equal(a[1:, 1:], expected[:-1, :-1])
assert_array_almost_equal(a.imag, numpy.zeros(shape))
def test_fourier_shift_complex01(self):
"shift filter for complex transforms 1"
for shape in [(32, 16), (31, 15)]:
for type in [numpy.complex64, numpy.complex128]:
expected = numpy.arange(shape[0] * shape[1],
dtype=type)
expected.shape = shape
a = fft.fft(expected, shape[0], 0)
a = fft.fft(a, shape[1], 1)
a = ndimage.fourier_shift(a, [1, 1], -1, 0)
a = fft.ifft(a, shape[1], 1)
a = fft.ifft(a, shape[0], 0)
assert_array_almost_equal(a.real[1:, 1:], expected[:-1, :-1])
assert_array_almost_equal(a.imag, numpy.zeros(shape))
def test_fourier_ellipsoid_real01(self):
"ellipsoid fourier filter for real transforms 1"
for shape in [(32, 16), (31, 15)]:
for type in [numpy.float32, numpy.float64]:
a = numpy.zeros(shape, type)
a[0, 0] = 1.0
a = fft.rfft(a, shape[0], 0)
a = fft.fft(a, shape[1], 1)
a = ndimage.fourier_ellipsoid(a, [5.0, 2.5],
shape[0], 0)
a = fft.ifft(a, shape[1], 1)
a = fft.irfft(a, shape[0], 0)
assert_almost_equal(ndimage.sum(a), 1.0)
def test_fourier_ellipsoid_complex01(self):
"ellipsoid fourier filter for complex transforms 1"
for shape in [(32, 16), (31, 15)]:
for type in [numpy.complex64, numpy.complex128]:
a = numpy.zeros(shape, type)
a[0, 0] = 1.0
a = fft.fft(a, shape[0], 0)
a = fft.fft(a, shape[1], 1)
a = ndimage.fourier_ellipsoid(a, [5.0, 2.5], -1,
0)
a = fft.ifft(a, shape[1], 1)
a = fft.ifft(a, shape[0], 0)
assert_almost_equal(ndimage.sum(a.real), 1.0)
def test_spline01(self):
"spline filter 1"
for type in self.types:
data = numpy.ones([], type)
for order in range(2, 6):
out = ndimage.spline_filter(data, order=order)
assert_array_almost_equal(out, 1)
def test_spline02(self):
"spline filter 2"
for type in self.types:
data = numpy.array([1])
for order in range(2, 6):
out = ndimage.spline_filter(data, order=order)
assert_array_almost_equal(out, [1])
def test_spline03(self):
"spline filter 3"
for type in self.types:
data = numpy.ones([], type)
for order in range(2, 6):
out = ndimage.spline_filter(data, order,
output=type)
assert_array_almost_equal(out, 1)
def test_spline04(self):
"spline filter 4"
for type in self.types:
data = numpy.ones([4], type)
for order in range(2, 6):
out = ndimage.spline_filter(data, order)
assert_array_almost_equal(out, [1, 1, 1, 1])
def test_spline05(self):
"spline filter 5"
for type in self.types:
data = numpy.ones([4, 4], type)
for order in range(2, 6):
out = ndimage.spline_filter(data, order=order)
assert_array_almost_equal(out, [[1, 1, 1, 1],
[1, 1, 1, 1],
[1, 1, 1, 1],
[1, 1, 1, 1]])
def test_geometric_transform01(self):
"geometric transform 1"
data = numpy.array([1])
def mapping(x):
return x
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
data.shape,
order=order)
assert_array_almost_equal(out, [1])
def test_geometric_transform02(self):
"geometric transform 2"
data = numpy.ones([4])
def mapping(x):
return x
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
data.shape, order=order)
assert_array_almost_equal(out, [1, 1, 1, 1])
def test_geometric_transform03(self):
"geometric transform 3"
data = numpy.ones([4])
def mapping(x):
return (x[0] - 1,)
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
data.shape, order=order)
assert_array_almost_equal(out, [0, 1, 1, 1])
def test_geometric_transform04(self):
"geometric transform 4"
data = numpy.array([4, 1, 3, 2])
def mapping(x):
return (x[0] - 1,)
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
data.shape, order=order)
assert_array_almost_equal(out, [0, 4, 1, 3])
def test_geometric_transform05(self):
"geometric transform 5"
data = numpy.array([[1, 1, 1, 1],
[1, 1, 1, 1],
[1, 1, 1, 1]])
def mapping(x):
return (x[0], x[1] - 1)
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
data.shape, order=order)
assert_array_almost_equal(out, [[0, 1, 1, 1],
[0, 1, 1, 1],
[0, 1, 1, 1]])
def test_geometric_transform06(self):
"geometric transform 6"
data = numpy.array([[4, 1, 3, 2],
[7, 6, 8, 5],
[3, 5, 3, 6]])
def mapping(x):
return (x[0], x[1] - 1)
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
data.shape, order=order)
assert_array_almost_equal(out, [[0, 4, 1, 3],
[0, 7, 6, 8],
[0, 3, 5, 3]])
def test_geometric_transform07(self):
"geometric transform 7"
data = numpy.array([[4, 1, 3, 2],
[7, 6, 8, 5],
[3, 5, 3, 6]])
def mapping(x):
return (x[0] - 1, x[1])
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
data.shape, order=order)
assert_array_almost_equal(out, [[0, 0, 0, 0],
[4, 1, 3, 2],
[7, 6, 8, 5]])
def test_geometric_transform08(self):
"geometric transform 8"
data = numpy.array([[4, 1, 3, 2],
[7, 6, 8, 5],
[3, 5, 3, 6]])
def mapping(x):
return (x[0] - 1, x[1] - 1)
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
data.shape, order=order)
assert_array_almost_equal(out, [[0, 0, 0, 0],
[0, 4, 1, 3],
[0, 7, 6, 8]])
def test_geometric_transform10(self):
"geometric transform 10"
data = numpy.array([[4, 1, 3, 2],
[7, 6, 8, 5],
[3, 5, 3, 6]])
def mapping(x):
return (x[0] - 1, x[1] - 1)
for order in range(0, 6):
if (order > 1):
filtered = ndimage.spline_filter(data,
order=order)
else:
filtered = data
out = ndimage.geometric_transform(filtered, mapping,
data.shape, order=order, prefilter=False)
assert_array_almost_equal(out, [[0, 0, 0, 0],
[0, 4, 1, 3],
[0, 7, 6, 8]])
def test_geometric_transform13(self):
"geometric transform 13"
data = numpy.ones([2], numpy.float64)
def mapping(x):
return (x[0] // 2,)
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
[4], order=order)
assert_array_almost_equal(out, [1, 1, 1, 1])
def test_geometric_transform14(self):
"geometric transform 14"
data = [1, 5, 2, 6, 3, 7, 4, 4]
def mapping(x):
return (2 * x[0],)
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
[4], order=order)
assert_array_almost_equal(out, [1, 2, 3, 4])
def test_geometric_transform15(self):
"geometric transform 15"
data = [1, 2, 3, 4]
def mapping(x):
return (x[0] / 2,)
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
[8], order=order)
assert_array_almost_equal(out[::2], [1, 2, 3, 4])
def test_geometric_transform16(self):
"geometric transform 16"
data = [[1, 2, 3, 4],
[5, 6, 7, 8],
[9.0, 10, 11, 12]]
def mapping(x):
return (x[0], x[1] * 2)
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
(3, 2), order=order)
assert_array_almost_equal(out, [[1, 3], [5, 7], [9, 11]])
def test_geometric_transform17(self):
"geometric transform 17"
data = [[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]]
def mapping(x):
return (x[0] * 2, x[1])
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
(1, 4), order=order)
assert_array_almost_equal(out, [[1, 2, 3, 4]])
def test_geometric_transform18(self):
"geometric transform 18"
data = [[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]]
def mapping(x):
return (x[0] * 2, x[1] * 2)
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
(1, 2), order=order)
assert_array_almost_equal(out, [[1, 3]])
def test_geometric_transform19(self):
"geometric transform 19"
data = [[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]]
def mapping(x):
return (x[0], x[1] / 2)
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
(3, 8), order=order)
assert_array_almost_equal(out[..., ::2], data)
def test_geometric_transform20(self):
"geometric transform 20"
data = [[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]]
def mapping(x):
return (x[0] / 2, x[1])
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
(6, 4), order=order)
assert_array_almost_equal(out[::2, ...], data)
def test_geometric_transform21(self):
"geometric transform 21"
data = [[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]]
def mapping(x):
return (x[0] / 2, x[1] / 2)
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
(6, 8), order=order)
assert_array_almost_equal(out[::2, ::2], data)
def test_geometric_transform22(self):
"geometric transform 22"
data = numpy.array([[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]], numpy.float64)
def mapping1(x):
return (x[0] / 2, x[1] / 2)
def mapping2(x):
return (x[0] * 2, x[1] * 2)
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping1,
(6, 8), order=order)
out = ndimage.geometric_transform(out, mapping2,
(3, 4), order=order)
assert_array_almost_equal(out, data)
def test_geometric_transform23(self):
"geometric transform 23"
data = [[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]]
def mapping(x):
return (1, x[0] * 2)
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
(2,), order=order)
out = out.astype(numpy.int32)
assert_array_almost_equal(out, [5, 7])
def test_geometric_transform24(self):
"geometric transform 24"
data = [[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]]
def mapping(x, a, b):
return (a, x[0] * b)
for order in range(0, 6):
out = ndimage.geometric_transform(data, mapping,
(2,), order=order, extra_arguments=(1,),
extra_keywords={'b': 2})
assert_array_almost_equal(out, [5, 7])
def test_map_coordinates01(self):
"map coordinates 1"
data = numpy.array([[4, 1, 3, 2],
[7, 6, 8, 5],
[3, 5, 3, 6]])
idx = numpy.indices(data.shape)
idx -= 1
for order in range(0, 6):
out = ndimage.map_coordinates(data, idx, order=order)
assert_array_almost_equal(out, [[0, 0, 0, 0],
[0, 4, 1, 3],
[0, 7, 6, 8]])
def test_map_coordinates02(self):
"map coordinates 2"
data = numpy.array([[4, 1, 3, 2],
[7, 6, 8, 5],
[3, 5, 3, 6]])
idx = numpy.indices(data.shape, numpy.float64)
idx -= 0.5
for order in range(0, 6):
out1 = ndimage.shift(data, 0.5, order=order)
out2 = ndimage.map_coordinates(data, idx,
order=order)
assert_array_almost_equal(out1, out2)
def test_affine_transform01(self):
"affine_transform 1"
data = numpy.array([1])
for order in range(0, 6):
out = ndimage.affine_transform(data, [[1]],
order=order)
assert_array_almost_equal(out, [1])
def test_affine_transform02(self):
"affine transform 2"
data = numpy.ones([4])
for order in range(0, 6):
out = ndimage.affine_transform(data, [[1]],
order=order)
assert_array_almost_equal(out, [1, 1, 1, 1])
def test_affine_transform03(self):
"affine transform 3"
data = numpy.ones([4])
for order in range(0, 6):
out = ndimage.affine_transform(data, [[1]], -1,
order=order)
assert_array_almost_equal(out, [0, 1, 1, 1])
def test_affine_transform04(self):
"affine transform 4"
data = numpy.array([4, 1, 3, 2])
for order in range(0, 6):
out = ndimage.affine_transform(data, [[1]], -1,
order=order)
assert_array_almost_equal(out, [0, 4, 1, 3])
def test_affine_transform05(self):
"affine transform 5"
data = numpy.array([[1, 1, 1, 1],
[1, 1, 1, 1],
[1, 1, 1, 1]])
for order in range(0, 6):
out = ndimage.affine_transform(data, [[1, 0],
[0, 1]],
[0, -1], order=order)
assert_array_almost_equal(out, [[0, 1, 1, 1],
[0, 1, 1, 1],
[0, 1, 1, 1]])
def test_affine_transform06(self):
"affine transform 6"
data = numpy.array([[4, 1, 3, 2],
[7, 6, 8, 5],
[3, 5, 3, 6]])
for order in range(0, 6):
out = ndimage.affine_transform(data, [[1, 0],
[0, 1]],
[0, -1], order=order)
assert_array_almost_equal(out, [[0, 4, 1, 3],
[0, 7, 6, 8],
[0, 3, 5, 3]])
def test_affine_transform07(self):
"affine transform 7"
data = numpy.array([[4, 1, 3, 2],
[7, 6, 8, 5],
[3, 5, 3, 6]])
for order in range(0, 6):
out = ndimage.affine_transform(data, [[1, 0],
[0, 1]],
[-1, 0], order=order)
assert_array_almost_equal(out, [[0, 0, 0, 0],
[4, 1, 3, 2],
[7, 6, 8, 5]])
def test_affine_transform08(self):
"affine transform 8"
data = numpy.array([[4, 1, 3, 2],
[7, 6, 8, 5],
[3, 5, 3, 6]])
for order in range(0, 6):
out = ndimage.affine_transform(data, [[1, 0],
[0, 1]],
[-1, -1], order=order)
assert_array_almost_equal(out, [[0, 0, 0, 0],
[0, 4, 1, 3],
[0, 7, 6, 8]])
def test_affine_transform09(self):
"affine transform 9"
data = numpy.array([[4, 1, 3, 2],
[7, 6, 8, 5],
[3, 5, 3, 6]])
for order in range(0, 6):
if (order > 1):
filtered = ndimage.spline_filter(data,
order=order)
else:
filtered = data
out = ndimage.affine_transform(filtered,[[1, 0],
[0, 1]],
[-1, -1], order=order, prefilter=False)
assert_array_almost_equal(out, [[0, 0, 0, 0],
[0, 4, 1, 3],
[0, 7, 6, 8]])
def test_affine_transform10(self):
"affine transform 10"
data = numpy.ones([2], numpy.float64)
for order in range(0, 6):
out = ndimage.affine_transform(data, [[0.5]],
output_shape=(4,), order=order)
assert_array_almost_equal(out, [1, 1, 1, 0])
def test_affine_transform11(self):
"affine transform 11"
data = [1, 5, 2, 6, 3, 7, 4, 4]
for order in range(0, 6):
out = ndimage.affine_transform(data, [[2]], 0, (4,),
order=order)
assert_array_almost_equal(out, [1, 2, 3, 4])
def test_affine_transform12(self):
"affine transform 12"
data = [1, 2, 3, 4]
for order in range(0, 6):
out = ndimage.affine_transform(data, [[0.5]], 0,
(8,), order=order)
assert_array_almost_equal(out[::2], [1, 2, 3, 4])
def test_affine_transform13(self):
"affine transform 13"
data = [[1, 2, 3, 4],
[5, 6, 7, 8],
[9.0, 10, 11, 12]]
for order in range(0, 6):
out = ndimage.affine_transform(data, [[1, 0],
[0, 2]], 0,
(3, 2), order=order)
assert_array_almost_equal(out, [[1, 3], [5, 7], [9, 11]])
def test_affine_transform14(self):
"affine transform 14"
data = [[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]]
for order in range(0, 6):
out = ndimage.affine_transform(data, [[2, 0],
[0, 1]], 0,
(1, 4), order=order)
assert_array_almost_equal(out, [[1, 2, 3, 4]])
def test_affine_transform15(self):
"affine transform 15"
data = [[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]]
for order in range(0, 6):
out = ndimage.affine_transform(data, [[2, 0],
[0, 2]], 0,
(1, 2), order=order)
assert_array_almost_equal(out, [[1, 3]])
def test_affine_transform16(self):
"affine transform 16"
data = [[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]]
for order in range(0, 6):
out = ndimage.affine_transform(data, [[1, 0.0],
[0, 0.5]], 0,
(3, 8), order=order)
assert_array_almost_equal(out[..., ::2], data)
def test_affine_transform17(self):
"affine transform 17"
data = [[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]]
for order in range(0, 6):
out = ndimage.affine_transform(data, [[0.5, 0],
[0, 1]], 0,
(6, 4), order=order)
assert_array_almost_equal(out[::2, ...], data)
def test_affine_transform18(self):
"affine transform 18"
data = [[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]]
for order in range(0, 6):
out = ndimage.affine_transform(data,
[[0.5, 0],
[0, 0.5]], 0,
(6, 8), order=order)
assert_array_almost_equal(out[::2, ::2], data)
def test_affine_transform19(self):
"affine transform 19"
data = numpy.array([[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]], numpy.float64)
for order in range(0, 6):
out = ndimage.affine_transform(data,
[[0.5, 0],
[0, 0.5]], 0,
(6, 8), order=order)
out = ndimage.affine_transform(out,
[[2.0, 0],
[0, 2.0]], 0,
(3, 4), order=order)
assert_array_almost_equal(out, data)
def test_affine_transform20(self):
"affine transform 20"
data = [[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]]
for order in range(0, 6):
out = ndimage.affine_transform(data, [[0], [2]], 0,
(2,), order=order)
assert_array_almost_equal(out, [1, 3])
def test_affine_transform21(self):
"affine transform 21"
data = [[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]]
for order in range(0, 6):
out = ndimage.affine_transform(data, [[2], [0]], 0,
(2,), order=order)
assert_array_almost_equal(out, [1, 9])
def test_shift01(self):
"shift 1"
data = numpy.array([1])
for order in range(0, 6):
out = ndimage.shift(data, [1], order=order)
assert_array_almost_equal(out, [0])
def test_shift02(self):
"shift 2"
data = numpy.ones([4])
for order in range(0, 6):
out = ndimage.shift(data, [1], order=order)
assert_array_almost_equal(out, [0, 1, 1, 1])
def test_shift03(self):
"shift 3"
data = numpy.ones([4])
for order in range(0, 6):
out = ndimage.shift(data, -1, order=order)
assert_array_almost_equal(out, [1, 1, 1, 0])
def test_shift04(self):
"shift 4"
data = numpy.array([4, 1, 3, 2])
for order in range(0, 6):
out = ndimage.shift(data, 1, order=order)
assert_array_almost_equal(out, [0, 4, 1, 3])
def test_shift05(self):
"shift 5"
data = numpy.array([[1, 1, 1, 1],
[1, 1, 1, 1],
[1, 1, 1, 1]])
for order in range(0, 6):
out = ndimage.shift(data, [0, 1], order=order)
assert_array_almost_equal(out, [[0, 1, 1, 1],
[0, 1, 1, 1],
[0, 1, 1, 1]])
def test_shift06(self):
"shift 6"
data = numpy.array([[4, 1, 3, 2],
[7, 6, 8, 5],
[3, 5, 3, 6]])
for order in range(0, 6):
out = ndimage.shift(data, [0, 1], order=order)
assert_array_almost_equal(out, [[0, 4, 1, 3],
[0, 7, 6, 8],
[0, 3, 5, 3]])
def test_shift07(self):
"shift 7"
data = numpy.array([[4, 1, 3, 2],
[7, 6, 8, 5],
[3, 5, 3, 6]])
for order in range(0, 6):
out = ndimage.shift(data, [1, 0], order=order)
assert_array_almost_equal(out, [[0, 0, 0, 0],
[4, 1, 3, 2],
[7, 6, 8, 5]])
def test_shift08(self):
"shift 8"
data = numpy.array([[4, 1, 3, 2],
[7, 6, 8, 5],
[3, 5, 3, 6]])
for order in range(0, 6):
out = ndimage.shift(data, [1, 1], order=order)
assert_array_almost_equal(out, [[0, 0, 0, 0],
[0, 4, 1, 3],
[0, 7, 6, 8]])
def test_shift09(self):
"shift 9"
data = numpy.array([[4, 1, 3, 2],
[7, 6, 8, 5],
[3, 5, 3, 6]])
for order in range(0, 6):
if (order > 1):
filtered = ndimage.spline_filter(data,
order=order)
else:
filtered = data
out = ndimage.shift(filtered, [1, 1], order=order,
prefilter=False)
assert_array_almost_equal(out, [[0, 0, 0, 0],
[0, 4, 1, 3],
[0, 7, 6, 8]])
def test_zoom1(self):
"zoom 1"
for order in range(0,6):
for z in [2,[2,2]]:
arr = numpy.array(range(25)).reshape((5,5)).astype(float)
arr = ndimage.zoom(arr, z, order=order)
assert_equal(arr.shape,(10,10))
assert_(numpy.all(arr[-1,:] != 0))
assert_(numpy.all(arr[-1,:] >= (20 - eps)))
assert_(numpy.all(arr[0,:] <= (5 + eps)))
assert_(numpy.all(arr >= (0 - eps)))
assert_(numpy.all(arr <= (24 + eps)))
def test_zoom2(self):
"zoom 2"
arr = numpy.arange(12).reshape((3,4))
out = ndimage.zoom(ndimage.zoom(arr,2),0.5)
assert_array_equal(out,arr)
def test_zoom_affine01(self):
"zoom by affine transformation 1"
data = [[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12]]
for order in range(0, 6):
out = ndimage.affine_transform(data, [0.5, 0.5], 0,
(6, 8), order=order)
assert_array_almost_equal(out[::2, ::2], data)
def test_rotate01(self):
"rotate 1"
data = numpy.array([[0, 0, 0, 0],
[0, 1, 1, 0],
[0, 0, 0, 0]], dtype=numpy.float64)
for order in range(0, 6):
out = ndimage.rotate(data, 0)
assert_array_almost_equal(out, data)
def test_rotate02(self):
"rotate 2"
data = numpy.array([[0, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 0, 0]], dtype=numpy.float64)
expected = numpy.array([[0, 0, 0],
[0, 0, 0],
[0, 1, 0],
[0, 0, 0]], dtype=numpy.float64)
for order in range(0, 6):
out = ndimage.rotate(data, 90)
assert_array_almost_equal(out, expected)
def test_rotate03(self):
"rotate 3"
data = numpy.array([[0, 0, 0, 0, 0],
[0, 1, 1, 0, 0],
[0, 0, 0, 0, 0]], dtype=numpy.float64)
expected = numpy.array([[0, 0, 0],
[0, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 0]], dtype=numpy.float64)
for order in range(0, 6):
out = ndimage.rotate(data, 90)
assert_array_almost_equal(out, expected)
def test_rotate04(self):
"rotate 4"
data = numpy.array([[0, 0, 0, 0, 0],
[0, 1, 1, 0, 0],
[0, 0, 0, 0, 0]], dtype=numpy.float64)
expected = numpy.array([[0, 0, 0, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0]], dtype=numpy.float64)
for order in range(0, 6):
out = ndimage.rotate(data, 90, reshape=False)
assert_array_almost_equal(out, expected)
def test_rotate05(self):
"rotate 5"
data = numpy.empty((4,3,3))
for i in range(3):
data[:,:,i] = numpy.array([[0,0,0],
[0,1,0],
[0,1,0],
[0,0,0]], dtype=numpy.float64)
expected = numpy.array([[0,0,0,0],
[0,1,1,0],
[0,0,0,0]], dtype=numpy.float64)
for order in range(0, 6):
out = ndimage.rotate(data, 90)
for i in range(3):
assert_array_almost_equal(out[:,:,i], expected)
def test_rotate06(self):
"rotate 6"
data = numpy.empty((3,4,3))
for i in range(3):
data[:,:,i] = numpy.array([[0,0,0,0],
[0,1,1,0],
[0,0,0,0]], dtype=numpy.float64)
expected = numpy.array([[0,0,0],
[0,1,0],
[0,1,0],
[0,0,0]], dtype=numpy.float64)
for order in range(0, 6):
out = ndimage.rotate(data, 90)
for i in range(3):
assert_array_almost_equal(out[:,:,i], expected)
def test_rotate07(self):
"rotate 7"
data = numpy.array([[[0, 0, 0, 0, 0],
[0, 1, 1, 0, 0],
[0, 0, 0, 0, 0]]] * 2,
dtype=numpy.float64)
data = data.transpose()
expected = numpy.array([[[0, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 0],
[0, 0, 0]]] * 2, dtype=numpy.float64)
expected = expected.transpose([2,1,0])
for order in range(0, 6):
out = ndimage.rotate(data, 90, axes=(0, 1))
assert_array_almost_equal(out, expected)
def test_rotate08(self):
"rotate 8"
data = numpy.array([[[0, 0, 0, 0, 0],
[0, 1, 1, 0, 0],
[0, 0, 0, 0, 0]]] * 2,
dtype=numpy.float64)
data = data.transpose()
expected = numpy.array([[[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 0, 0, 0]]] * 2,
dtype=numpy.float64)
expected = expected.transpose()
for order in range(0, 6):
out = ndimage.rotate(data, 90, axes=(0, 1),
reshape=False)
assert_array_almost_equal(out, expected)
def test_watershed_ift01(self):
"watershed_ift 1"
data = numpy.array([[0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 0],
[0, 1, 0, 0, 0, 1, 0],
[0, 1, 0, 0, 0, 1, 0],
[0, 1, 0, 0, 0, 1, 0],
[0, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]], numpy.uint8)
markers = numpy.array([[ -1, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 1, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0]],
numpy.int8)
out = ndimage.watershed_ift(data, markers,
structure=[[1,1,1],
[1,1,1],
[1,1,1]])
expected = [[-1, -1, -1, -1, -1, -1, -1],
[-1, 1, 1, 1, 1, 1, -1],
[-1, 1, 1, 1, 1, 1, -1],
[-1, 1, 1, 1, 1, 1, -1],
[-1, 1, 1, 1, 1, 1, -1],
[-1, 1, 1, 1, 1, 1, -1],
[-1, -1, -1, -1, -1, -1, -1],
[-1, -1, -1, -1, -1, -1, -1]]
assert_array_almost_equal(out, expected)
def test_watershed_ift02(self):
"watershed_ift 2"
data = numpy.array([[0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 0],
[0, 1, 0, 0, 0, 1, 0],
[0, 1, 0, 0, 0, 1, 0],
[0, 1, 0, 0, 0, 1, 0],
[0, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]], numpy.uint8)
markers = numpy.array([[ -1, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 1, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0]],
numpy.int8)
out = ndimage.watershed_ift(data, markers)
expected = [[-1, -1, -1, -1, -1, -1, -1],
[-1, -1, 1, 1, 1, -1, -1],
[-1, 1, 1, 1, 1, 1, -1],
[-1, 1, 1, 1, 1, 1, -1],
[-1, 1, 1, 1, 1, 1, -1],
[-1, -1, 1, 1, 1, -1, -1],
[-1, -1, -1, -1, -1, -1, -1],
[-1, -1, -1, -1, -1, -1, -1]]
assert_array_almost_equal(out, expected)
def test_watershed_ift03(self):
"watershed_ift 3"
data = numpy.array([[0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 0],
[0, 1, 0, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 1, 0],
[0, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0]], numpy.uint8)
markers = numpy.array([[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 2, 0, 3, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, -1]],
numpy.int8)
out = ndimage.watershed_ift(data, markers)
expected = [[-1, -1, -1, -1, -1, -1, -1],
[-1, -1, 2, -1, 3, -1, -1],
[-1, 2, 2, 3, 3, 3, -1],
[-1, 2, 2, 3, 3, 3, -1],
[-1, 2, 2, 3, 3, 3, -1],
[-1, -1, 2, -1, 3, -1, -1],
[-1, -1, -1, -1, -1, -1, -1]]
assert_array_almost_equal(out, expected)
def test_watershed_ift04(self):
"watershed_ift 4"
data = numpy.array([[0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 0],
[0, 1, 0, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 1, 0],
[0, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0]], numpy.uint8)
markers = numpy.array([[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 2, 0, 3, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, -1]],
numpy.int8)
out = ndimage.watershed_ift(data, markers,
structure=[[1,1,1],
[1,1,1],
[1,1,1]])
expected = [[-1, -1, -1, -1, -1, -1, -1],
[-1, 2, 2, 3, 3, 3, -1],
[-1, 2, 2, 3, 3, 3, -1],
[-1, 2, 2, 3, 3, 3, -1],
[-1, 2, 2, 3, 3, 3, -1],
[-1, 2, 2, 3, 3, 3, -1],
[-1, -1, -1, -1, -1, -1, -1]]
assert_array_almost_equal(out, expected)
def test_watershed_ift05(self):
"watershed_ift 5"
data = numpy.array([[0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 0],
[0, 1, 0, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 1, 0],
[0, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0]], numpy.uint8)
markers = numpy.array([[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 3, 0, 2, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, -1]],
numpy.int8)
out = ndimage.watershed_ift(data, markers,
structure=[[1,1,1],
[1,1,1],
[1,1,1]])
expected = [[-1, -1, -1, -1, -1, -1, -1],
[-1, 3, 3, 2, 2, 2, -1],
[-1, 3, 3, 2, 2, 2, -1],
[-1, 3, 3, 2, 2, 2, -1],
[-1, 3, 3, 2, 2, 2, -1],
[-1, 3, 3, 2, 2, 2, -1],
[-1, -1, -1, -1, -1, -1, -1]]
assert_array_almost_equal(out, expected)
def test_watershed_ift06(self):
"watershed_ift 6"
data = numpy.array([[0, 1, 0, 0, 0, 1, 0],
[0, 1, 0, 0, 0, 1, 0],
[0, 1, 0, 0, 0, 1, 0],
[0, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]], numpy.uint8)
markers = numpy.array([[ -1, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 1, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0]],
numpy.int8)
out = ndimage.watershed_ift(data, markers,
structure=[[1,1,1],
[1,1,1],
[1,1,1]])
expected = [[-1, 1, 1, 1, 1, 1, -1],
[-1, 1, 1, 1, 1, 1, -1],
[-1, 1, 1, 1, 1, 1, -1],
[-1, 1, 1, 1, 1, 1, -1],
[-1, -1, -1, -1, -1, -1, -1],
[-1, -1, -1, -1, -1, -1, -1]]
assert_array_almost_equal(out, expected)
def test_watershed_ift07(self):
"watershed_ift 7"
shape = (7, 6)
data = numpy.zeros(shape, dtype=numpy.uint8)
data = data.transpose()
data[...] = numpy.array([[0, 1, 0, 0, 0, 1, 0],
[0, 1, 0, 0, 0, 1, 0],
[0, 1, 0, 0, 0, 1, 0],
[0, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]], numpy.uint8)
markers = numpy.array([[-1, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 1, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0]],
numpy.int8)
out = numpy.zeros(shape, dtype = numpy.int16)
out = out.transpose()
ndimage.watershed_ift(data, markers,
structure=[[1,1,1],
[1,1,1],
[1,1,1]],
output=out)
expected = [[-1, 1, 1, 1, 1, 1, -1],
[-1, 1, 1, 1, 1, 1, -1],
[-1, 1, 1, 1, 1, 1, -1],
[-1, 1, 1, 1, 1, 1, -1],
[-1, -1, -1, -1, -1, -1, -1],
[-1, -1, -1, -1, -1, -1, -1]]
assert_array_almost_equal(out, expected)
def test_distance_transform_bf01(self):
"brute force distance transform 1"
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]], type)
out, ft = ndimage.distance_transform_bf(data, 'euclidean',
return_indices=True)
expected = [[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 2, 4, 2, 1, 0, 0],
[0, 0, 1, 4, 8, 4, 1, 0, 0],
[0, 0, 1, 2, 4, 2, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]]
assert_array_almost_equal(out * out, expected)
expected = [[[0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1],
[2, 2, 2, 2, 1, 2, 2, 2, 2],
[3, 3, 3, 2, 1, 2, 3, 3, 3],
[4, 4, 4, 4, 6, 4, 4, 4, 4],
[5, 5, 6, 6, 7, 6, 6, 5, 5],
[6, 6, 6, 7, 7, 7, 6, 6, 6],
[7, 7, 7, 7, 7, 7, 7, 7, 7],
[8, 8, 8, 8, 8, 8, 8, 8, 8]],
[[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 2, 4, 6, 6, 7, 8],
[0, 1, 1, 2, 4, 6, 7, 7, 8],
[0, 1, 1, 1, 6, 7, 7, 7, 8],
[0, 1, 2, 2, 4, 6, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8]]]
assert_array_almost_equal(ft, expected)
def test_distance_transform_bf02(self):
"brute force distance transform 2"
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]], type)
out, ft = ndimage.distance_transform_bf(data, 'cityblock',
return_indices=True)
expected = [[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 2, 2, 2, 1, 0, 0],
[0, 0, 1, 2, 3, 2, 1, 0, 0],
[0, 0, 1, 2, 2, 2, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]]
assert_array_almost_equal(out, expected)
expected = [[[0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1],
[2, 2, 2, 2, 1, 2, 2, 2, 2],
[3, 3, 3, 3, 1, 3, 3, 3, 3],
[4, 4, 4, 4, 7, 4, 4, 4, 4],
[5, 5, 6, 7, 7, 7, 6, 5, 5],
[6, 6, 6, 7, 7, 7, 6, 6, 6],
[7, 7, 7, 7, 7, 7, 7, 7, 7],
[8, 8, 8, 8, 8, 8, 8, 8, 8]],
[[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 2, 4, 6, 6, 7, 8],
[0, 1, 1, 1, 4, 7, 7, 7, 8],
[0, 1, 1, 1, 4, 7, 7, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8]]]
assert_array_almost_equal(expected, ft)
def test_distance_transform_bf03(self):
"brute force distance transform 3"
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]], type)
out, ft = ndimage.distance_transform_bf(data, 'chessboard',
return_indices=True)
expected = [[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 2, 1, 1, 0, 0],
[0, 0, 1, 2, 2, 2, 1, 0, 0],
[0, 0, 1, 1, 2, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]]
assert_array_almost_equal(out, expected)
expected = [[[0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1],
[2, 2, 2, 2, 1, 2, 2, 2, 2],
[3, 3, 4, 2, 2, 2, 4, 3, 3],
[4, 4, 5, 6, 6, 6, 5, 4, 4],
[5, 5, 6, 6, 7, 6, 6, 5, 5],
[6, 6, 6, 7, 7, 7, 6, 6, 6],
[7, 7, 7, 7, 7, 7, 7, 7, 7],
[8, 8, 8, 8, 8, 8, 8, 8, 8]],
[[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 2, 5, 6, 6, 7, 8],
[0, 1, 1, 2, 6, 6, 7, 7, 8],
[0, 1, 1, 2, 6, 7, 7, 7, 8],
[0, 1, 2, 2, 6, 6, 7, 7, 8],
[0, 1, 2, 4, 5, 6, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8]]]
assert_array_almost_equal(ft, expected)
def test_distance_transform_bf04(self):
"brute force distance transform 4"
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]], type)
tdt, tft = ndimage.distance_transform_bf(data,
return_indices=1)
dts = []
fts = []
dt = numpy.zeros(data.shape, dtype=numpy.float64)
ndimage.distance_transform_bf(data, distances=dt)
dts.append(dt)
ft = ndimage.distance_transform_bf(data,
return_distances=False, return_indices=1)
fts.append(ft)
ft = numpy.indices(data.shape, dtype=numpy.int32)
ndimage.distance_transform_bf(data,
return_distances=False, return_indices=True, indices=ft)
fts.append(ft)
dt, ft = ndimage.distance_transform_bf(data,
return_indices=1)
dts.append(dt)
fts.append(ft)
dt = numpy.zeros(data.shape, dtype=numpy.float64)
ft = ndimage.distance_transform_bf(data, distances=dt,
return_indices=True)
dts.append(dt)
fts.append(ft)
ft = numpy.indices(data.shape, dtype=numpy.int32)
dt = ndimage.distance_transform_bf(data,
return_indices=True, indices=ft)
dts.append(dt)
fts.append(ft)
dt = numpy.zeros(data.shape, dtype=numpy.float64)
ft = numpy.indices(data.shape, dtype=numpy.int32)
ndimage.distance_transform_bf(data, distances=dt,
return_indices=True, indices=ft)
dts.append(dt)
fts.append(ft)
for dt in dts:
assert_array_almost_equal(tdt, dt)
for ft in fts:
assert_array_almost_equal(tft, ft)
def test_distance_transform_bf05(self):
"brute force distance transform 5"
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]], type)
out, ft = ndimage.distance_transform_bf(data,
'euclidean', return_indices=True, sampling=[2, 2])
expected = [[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 4, 4, 4, 0, 0, 0],
[0, 0, 4, 8, 16, 8, 4, 0, 0],
[0, 0, 4, 16, 32, 16, 4, 0, 0],
[0, 0, 4, 8, 16, 8, 4, 0, 0],
[0, 0, 0, 4, 4, 4, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]]
assert_array_almost_equal(out * out, expected)
expected = [[[0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1],
[2, 2, 2, 2, 1, 2, 2, 2, 2],
[3, 3, 3, 2, 1, 2, 3, 3, 3],
[4, 4, 4, 4, 6, 4, 4, 4, 4],
[5, 5, 6, 6, 7, 6, 6, 5, 5],
[6, 6, 6, 7, 7, 7, 6, 6, 6],
[7, 7, 7, 7, 7, 7, 7, 7, 7],
[8, 8, 8, 8, 8, 8, 8, 8, 8]],
[[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 2, 4, 6, 6, 7, 8],
[0, 1, 1, 2, 4, 6, 7, 7, 8],
[0, 1, 1, 1, 6, 7, 7, 7, 8],
[0, 1, 2, 2, 4, 6, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8]]]
assert_array_almost_equal(ft, expected)
def test_distance_transform_bf06(self):
"brute force distance transform 6"
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]], type)
out, ft = ndimage.distance_transform_bf(data,
'euclidean', return_indices=True, sampling=[2, 1])
expected = [[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 4, 1, 0, 0, 0],
[0, 0, 1, 4, 8, 4, 1, 0, 0],
[0, 0, 1, 4, 9, 4, 1, 0, 0],
[0, 0, 1, 4, 8, 4, 1, 0, 0],
[0, 0, 0, 1, 4, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]]
assert_array_almost_equal(out * out, expected)
expected = [[[0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1],
[2, 2, 2, 2, 2, 2, 2, 2, 2],
[3, 3, 3, 3, 2, 3, 3, 3, 3],
[4, 4, 4, 4, 4, 4, 4, 4, 4],
[5, 5, 5, 5, 6, 5, 5, 5, 5],
[6, 6, 6, 6, 7, 6, 6, 6, 6],
[7, 7, 7, 7, 7, 7, 7, 7, 7],
[8, 8, 8, 8, 8, 8, 8, 8, 8]],
[[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 2, 6, 6, 6, 7, 8],
[0, 1, 1, 1, 6, 7, 7, 7, 8],
[0, 1, 1, 1, 7, 7, 7, 7, 8],
[0, 1, 1, 1, 6, 7, 7, 7, 8],
[0, 1, 2, 2, 4, 6, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8]]]
assert_array_almost_equal(ft, expected)
def test_distance_transform_cdt01(self):
"chamfer type distance transform 1"
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]], type)
out, ft = ndimage.distance_transform_cdt(data,
'cityblock', return_indices=True)
bf = ndimage.distance_transform_bf(data, 'cityblock')
assert_array_almost_equal(bf, out)
expected = [[[0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1],
[2, 2, 2, 1, 1, 1, 2, 2, 2],
[3, 3, 2, 1, 1, 1, 2, 3, 3],
[4, 4, 4, 4, 1, 4, 4, 4, 4],
[5, 5, 5, 5, 7, 7, 6, 5, 5],
[6, 6, 6, 6, 7, 7, 6, 6, 6],
[7, 7, 7, 7, 7, 7, 7, 7, 7],
[8, 8, 8, 8, 8, 8, 8, 8, 8]],
[[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 1, 1, 4, 7, 7, 7, 8],
[0, 1, 1, 1, 4, 5, 6, 7, 8],
[0, 1, 2, 2, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],]]
assert_array_almost_equal(ft, expected)
def test_distance_transform_cdt02(self):
"chamfer type distance transform 2"
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]], type)
out, ft = ndimage.distance_transform_cdt(data, 'chessboard',
return_indices=True)
bf = ndimage.distance_transform_bf(data, 'chessboard')
assert_array_almost_equal(bf, out)
expected = [[[0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1],
[2, 2, 2, 1, 1, 1, 2, 2, 2],
[3, 3, 2, 2, 1, 2, 2, 3, 3],
[4, 4, 3, 2, 2, 2, 3, 4, 4],
[5, 5, 4, 6, 7, 6, 4, 5, 5],
[6, 6, 6, 6, 7, 7, 6, 6, 6],
[7, 7, 7, 7, 7, 7, 7, 7, 7],
[8, 8, 8, 8, 8, 8, 8, 8, 8]],
[[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 2, 3, 4, 6, 7, 8],
[0, 1, 1, 2, 2, 6, 6, 7, 8],
[0, 1, 1, 1, 2, 6, 7, 7, 8],
[0, 1, 1, 2, 6, 6, 7, 7, 8],
[0, 1, 2, 2, 5, 6, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],
[0, 1, 2, 3, 4, 5, 6, 7, 8],]]
assert_array_almost_equal(ft, expected)
def test_distance_transform_cdt03(self):
"chamfer type distance transform 3"
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]], type)
tdt, tft = ndimage.distance_transform_cdt(data,
return_indices=True)
dts = []
fts = []
dt = numpy.zeros(data.shape, dtype = numpy.int32)
ndimage.distance_transform_cdt(data, distances = dt)
dts.append(dt)
ft = ndimage.distance_transform_cdt(data,
return_distances=False, return_indices=True)
fts.append(ft)
ft = numpy.indices(data.shape, dtype=numpy.int32)
ndimage.distance_transform_cdt(data,
return_distances=False, return_indices=True, indices=ft)
fts.append(ft)
dt, ft = ndimage.distance_transform_cdt(data,
return_indices=True)
dts.append(dt)
fts.append(ft)
dt = numpy.zeros(data.shape, dtype=numpy.int32)
ft = ndimage.distance_transform_cdt(data, distances=dt,
return_indices = True)
dts.append(dt)
fts.append(ft)
ft = numpy.indices(data.shape, dtype=numpy.int32)
dt = ndimage.distance_transform_cdt(data,
return_indices=True, indices=ft)
dts.append(dt)
fts.append(ft)
dt = numpy.zeros(data.shape, dtype=numpy.int32)
ft = numpy.indices(data.shape, dtype=numpy.int32)
ndimage.distance_transform_cdt(data, distances=dt,
return_indices=True, indices=ft)
dts.append(dt)
fts.append(ft)
for dt in dts:
assert_array_almost_equal(tdt, dt)
for ft in fts:
assert_array_almost_equal(tft, ft)
def test_distance_transform_edt01(self):
"euclidean distance transform 1"
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]], type)
out, ft = ndimage.distance_transform_edt(data,
return_indices=True)
bf = ndimage.distance_transform_bf(data, 'euclidean')
assert_array_almost_equal(bf, out)
dt = ft - numpy.indices(ft.shape[1:], dtype=ft.dtype)
dt = dt.astype(numpy.float64)
numpy.multiply(dt, dt, dt)
dt = numpy.add.reduce(dt, axis=0)
numpy.sqrt(dt, dt)
assert_array_almost_equal(bf, dt)
def test_distance_transform_edt02(self):
"euclidean distance transform 2"
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]], type)
tdt, tft = ndimage.distance_transform_edt(data,
return_indices=True)
dts = []
fts = []
dt = numpy.zeros(data.shape, dtype=numpy.float64)
ndimage.distance_transform_edt(data, distances=dt)
dts.append(dt)
ft = ndimage.distance_transform_edt(data,
return_distances=0, return_indices=True)
fts.append(ft)
ft = numpy.indices(data.shape, dtype=numpy.int32)
ndimage.distance_transform_edt(data,
return_distances=False,return_indices=True, indices=ft)
fts.append(ft)
dt, ft = ndimage.distance_transform_edt(data,
return_indices=True)
dts.append(dt)
fts.append(ft)
dt = numpy.zeros(data.shape, dtype=numpy.float64)
ft = ndimage.distance_transform_edt(data, distances=dt,
return_indices=True)
dts.append(dt)
fts.append(ft)
ft = numpy.indices(data.shape, dtype=numpy.int32)
dt = ndimage.distance_transform_edt(data,
return_indices=True, indices=ft)
dts.append(dt)
fts.append(ft)
dt = numpy.zeros(data.shape, dtype=numpy.float64)
ft = numpy.indices(data.shape, dtype=numpy.int32)
ndimage.distance_transform_edt(data, distances=dt,
return_indices=True, indices=ft)
dts.append(dt)
fts.append(ft)
for dt in dts:
assert_array_almost_equal(tdt, dt)
for ft in fts:
assert_array_almost_equal(tft, ft)
def test_distance_transform_edt03(self):
"euclidean distance transform 3"
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]], type)
ref = ndimage.distance_transform_bf(data, 'euclidean',
sampling=[2, 2])
out = ndimage.distance_transform_edt(data,
sampling=[2, 2])
assert_array_almost_equal(ref, out)
def test_distance_transform_edt4(self):
"euclidean distance transform 4"
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]], type)
ref = ndimage.distance_transform_bf(data, 'euclidean',
sampling=[2, 1])
out = ndimage.distance_transform_edt(data,
sampling=[2, 1])
assert_array_almost_equal(ref, out)
def test_generate_structure01(self):
"generation of a binary structure 1"
struct = ndimage.generate_binary_structure(0, 1)
assert_array_almost_equal(struct, 1)
def test_generate_structure02(self):
"generation of a binary structure 2"
struct = ndimage.generate_binary_structure(1, 1)
assert_array_almost_equal(struct, [1, 1, 1])
def test_generate_structure03(self):
"generation of a binary structure 3"
struct = ndimage.generate_binary_structure(2, 1)
assert_array_almost_equal(struct, [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]])
def test_generate_structure04(self):
"generation of a binary structure 4"
struct = ndimage.generate_binary_structure(2, 2)
assert_array_almost_equal(struct, [[1, 1, 1],
[1, 1, 1],
[1, 1, 1]])
def test_iterate_structure01(self):
"iterating a structure 1"
struct = [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]]
out = ndimage.iterate_structure(struct, 2)
assert_array_almost_equal(out, [[0, 0, 1, 0, 0],
[0, 1, 1, 1, 0],
[1, 1, 1, 1, 1],
[0, 1, 1, 1, 0],
[0, 0, 1, 0, 0]])
def test_iterate_structure02(self):
"iterating a structure 2"
struct = [[0, 1],
[1, 1],
[0, 1]]
out = ndimage.iterate_structure(struct, 2)
assert_array_almost_equal(out, [[0, 0, 1],
[0, 1, 1],
[1, 1, 1],
[0, 1, 1],
[0, 0, 1]])
def test_iterate_structure03(self):
"iterating a structure 3"
struct = [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]]
out = ndimage.iterate_structure(struct, 2, 1)
expected = [[0, 0, 1, 0, 0],
[0, 1, 1, 1, 0],
[1, 1, 1, 1, 1],
[0, 1, 1, 1, 0],
[0, 0, 1, 0, 0]]
assert_array_almost_equal(out[0], expected)
assert_equal(out[1], [2, 2])
def test_binary_erosion01(self):
"binary erosion 1"
for type in self.types:
data = numpy.ones([], type)
out = ndimage.binary_erosion(data)
assert_array_almost_equal(out, 1)
def test_binary_erosion02(self):
"binary erosion 2"
for type in self.types:
data = numpy.ones([], type)
out = ndimage.binary_erosion(data, border_value=1)
assert_array_almost_equal(out, 1)
def test_binary_erosion03(self):
"binary erosion 3"
for type in self.types:
data = numpy.ones([1], type)
out = ndimage.binary_erosion(data)
assert_array_almost_equal(out, [0])
def test_binary_erosion04(self):
"binary erosion 4"
for type in self.types:
data = numpy.ones([1], type)
out = ndimage.binary_erosion(data, border_value=1)
assert_array_almost_equal(out, [1])
def test_binary_erosion05(self):
"binary erosion 5"
for type in self.types:
data = numpy.ones([3], type)
out = ndimage.binary_erosion(data)
assert_array_almost_equal(out, [0, 1, 0])
def test_binary_erosion06(self):
"binary erosion 6"
for type in self.types:
data = numpy.ones([3], type)
out = ndimage.binary_erosion(data, border_value=1)
assert_array_almost_equal(out, [1, 1, 1])
def test_binary_erosion07(self):
"binary erosion 7"
for type in self.types:
data = numpy.ones([5], type)
out = ndimage.binary_erosion(data)
assert_array_almost_equal(out, [0, 1, 1, 1, 0])
def test_binary_erosion08(self):
"binary erosion 8"
for type in self.types:
data = numpy.ones([5], type)
out = ndimage.binary_erosion(data, border_value=1)
assert_array_almost_equal(out, [1, 1, 1, 1, 1])
def test_binary_erosion09(self):
"binary erosion 9"
for type in self.types:
data = numpy.ones([5], type)
data[2] = 0
out = ndimage.binary_erosion(data)
assert_array_almost_equal(out, [0, 0, 0, 0, 0])
def test_binary_erosion10(self):
"binary erosion 10"
for type in self.types:
data = numpy.ones([5], type)
data[2] = 0
out = ndimage.binary_erosion(data, border_value=1)
assert_array_almost_equal(out, [1, 0, 0, 0, 1])
def test_binary_erosion11(self):
"binary erosion 11"
for type in self.types:
data = numpy.ones([5], type)
data[2] = 0
struct = [1, 0, 1]
out = ndimage.binary_erosion(data, struct,
border_value=1)
assert_array_almost_equal(out, [1, 0, 1, 0, 1])
def test_binary_erosion12(self):
"binary erosion 12"
for type in self.types:
data = numpy.ones([5], type)
data[2] = 0
struct = [1, 0, 1]
out = ndimage.binary_erosion(data, struct,
border_value=1,
origin=-1)
assert_array_almost_equal(out, [0, 1, 0, 1, 1])
def test_binary_erosion13(self):
"binary erosion 13"
for type in self.types:
data = numpy.ones([5], type)
data[2] = 0
struct = [1, 0, 1]
out = ndimage.binary_erosion(data, struct,
border_value=1,
origin=1)
assert_array_almost_equal(out, [1, 1, 0, 1, 0])
def test_binary_erosion14(self):
"binary erosion 14"
for type in self.types:
data = numpy.ones([5], type)
data[2] = 0
struct = [1, 1]
out = ndimage.binary_erosion(data, struct,
border_value=1)
assert_array_almost_equal(out, [1, 1, 0, 0, 1])
def test_binary_erosion15(self):
"binary erosion 15"
for type in self.types:
data = numpy.ones([5], type)
data[2] = 0
struct = [1, 1]
out = ndimage.binary_erosion(data, struct,
border_value=1,
origin=-1)
assert_array_almost_equal(out, [1, 0, 0, 1, 1])
def test_binary_erosion16(self):
"binary erosion 16"
for type in self.types:
data = numpy.ones([1, 1], type)
out = ndimage.binary_erosion(data, border_value=1)
assert_array_almost_equal(out, [[1]])
def test_binary_erosion17(self):
"binary erosion 17"
for type in self.types:
data = numpy.ones([1, 1], type)
out = ndimage.binary_erosion(data)
assert_array_almost_equal(out, [[0]])
def test_binary_erosion18(self):
"binary erosion 18"
for type in self.types:
data = numpy.ones([1, 3], type)
out = ndimage.binary_erosion(data)
assert_array_almost_equal(out, [[0, 0, 0]])
def test_binary_erosion19(self):
"binary erosion 19"
for type in self.types:
data = numpy.ones([1, 3], type)
out = ndimage.binary_erosion(data, border_value=1)
assert_array_almost_equal(out, [[1, 1, 1]])
def test_binary_erosion20(self):
"binary erosion 20"
for type in self.types:
data = numpy.ones([3, 3], type)
out = ndimage.binary_erosion(data)
assert_array_almost_equal(out, [[0, 0, 0],
[0, 1, 0],
[0, 0, 0]])
def test_binary_erosion21(self):
"binary erosion 21"
for type in self.types:
data = numpy.ones([3, 3], type)
out = ndimage.binary_erosion(data, border_value=1)
assert_array_almost_equal(out, [[1, 1, 1],
[1, 1, 1],
[1, 1, 1]])
def test_binary_erosion22(self):
"binary erosion 22"
expected = [[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 1, 1],
[0, 0, 1, 1, 1, 1, 1, 1],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], type)
out = ndimage.binary_erosion(data, border_value=1)
assert_array_almost_equal(out, expected)
def test_binary_erosion23(self):
"binary erosion 23"
struct = ndimage.generate_binary_structure(2, 2)
expected = [[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 1, 1],
[0, 0, 1, 1, 1, 1, 1, 1],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], type)
out = ndimage.binary_erosion(data, struct,
border_value=1)
assert_array_almost_equal(out, expected)
def test_binary_erosion24(self):
"binary erosion 24"
struct = [[0, 1],
[1, 1]]
expected = [[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 1, 1],
[0, 0, 0, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 1, 1],
[0, 0, 1, 1, 1, 1, 1, 1],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], type)
out = ndimage.binary_erosion(data, struct,
border_value=1)
assert_array_almost_equal(out, expected)
def test_binary_erosion25(self):
"binary erosion 25"
struct = [[0, 1, 0],
[1, 0, 1],
[0, 1, 0]]
expected = [[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 1, 1],
[0, 0, 1, 1, 1, 0, 1, 1],
[0, 0, 1, 0, 1, 1, 0, 0],
[0, 1, 0, 1, 1, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], type)
out = ndimage.binary_erosion(data, struct,
border_value=1)
assert_array_almost_equal(out, expected)
def test_binary_erosion26(self):
"binary erosion 26"
struct = [[0, 1, 0],
[1, 0, 1],
[0, 1, 0]]
expected = [[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 1, 0, 0, 1],
[0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1]]
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 1, 1],
[0, 0, 1, 1, 1, 0, 1, 1],
[0, 0, 1, 0, 1, 1, 0, 0],
[0, 1, 0, 1, 1, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], type)
out = ndimage.binary_erosion(data, struct,
border_value=1, origin=(-1, -1))
assert_array_almost_equal(out, expected)
def test_binary_erosion27(self):
"binary erosion 27"
struct = [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]]
expected = [[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]]
data = numpy.array([[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]], bool)
out = ndimage.binary_erosion(data, struct,
border_value=1, iterations=2)
assert_array_almost_equal(out, expected)
def test_binary_erosion28(self):
"binary erosion 28"
struct = [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]]
expected = [[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]]
data = numpy.array([[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]], bool)
out = numpy.zeros(data.shape, bool)
ndimage.binary_erosion(data, struct, border_value=1,
iterations=2, output=out)
assert_array_almost_equal(out, expected)
def test_binary_erosion29(self):
"binary erosion 29"
struct = [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]]
expected = [[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]]
data = numpy.array([[0, 0, 0, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 0],
[1, 1, 1, 1, 1, 1, 1],
[0, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 0, 0, 0]], bool)
out = ndimage.binary_erosion(data, struct,
border_value=1, iterations=3)
assert_array_almost_equal(out, expected)
def test_binary_erosion30(self):
"binary erosion 30"
struct = [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]]
expected = [[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]]
data = numpy.array([[0, 0, 0, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 0],
[1, 1, 1, 1, 1, 1, 1],
[0, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 0, 0, 0]], bool)
out = numpy.zeros(data.shape, bool)
ndimage.binary_erosion(data, struct, border_value=1,
iterations=3, output=out)
assert_array_almost_equal(out, expected)
def test_binary_erosion31(self):
"binary erosion 31"
struct = [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]]
expected = [[0, 0, 1, 0, 0, 0, 0],
[0, 1, 1, 1, 0, 0, 0],
[1, 1, 1, 1, 1, 0, 1],
[0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 1]]
data = numpy.array([[0, 0, 0, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 0],
[1, 1, 1, 1, 1, 1, 1],
[0, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 0, 0, 0]], bool)
out = numpy.zeros(data.shape, bool)
ndimage.binary_erosion(data, struct, border_value=1,
iterations=1, output=out, origin=(-1, -1))
assert_array_almost_equal(out, expected)
def test_binary_erosion32(self):
"binary erosion 32"
struct = [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]]
expected = [[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]]
data = numpy.array([[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]], bool)
out = ndimage.binary_erosion(data, struct,
border_value=1, iterations=2)
assert_array_almost_equal(out, expected)
def test_binary_erosion33(self):
"binary erosion 33"
struct = [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]]
expected = [[0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]]
mask = [[1, 1, 1, 1, 1, 0, 0],
[1, 1, 1, 1, 1, 1, 0],
[1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1]]
data = numpy.array([[0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 1, 0, 0, 1],
[0, 0, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]], bool)
out = ndimage.binary_erosion(data, struct,
border_value=1, mask=mask, iterations=-1)
assert_array_almost_equal(out, expected)
def test_binary_erosion34(self):
"binary erosion 34"
struct = [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]]
expected = [[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]]
mask = [[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 0, 1, 0, 1, 0, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]]
data = numpy.array([[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]], bool)
out = ndimage.binary_erosion(data, struct,
border_value=1, mask=mask)
assert_array_almost_equal(out, expected)
def test_binary_erosion35(self):
"binary erosion 35"
struct = [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]]
mask = [[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 0, 1, 0, 1, 0, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]]
data = numpy.array([[0, 0, 0, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 0],
[1, 1, 1, 1, 1, 1, 1],
[0, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 0, 0, 0]], bool)
tmp = [[0, 0, 1, 0, 0, 0, 0],
[0, 1, 1, 1, 0, 0, 0],
[1, 1, 1, 1, 1, 0, 1],
[0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 1]]
expected = numpy.logical_and(tmp, mask)
tmp = numpy.logical_and(data, numpy.logical_not(mask))
expected = numpy.logical_or(expected, tmp)
out = numpy.zeros(data.shape, bool)
ndimage.binary_erosion(data, struct, border_value=1,
iterations=1, output=out,
origin=(-1, -1), mask=mask)
assert_array_almost_equal(out, expected)
def test_binary_erosion36(self):
"binary erosion 36"
struct = [[0, 1, 0],
[1, 0, 1],
[0, 1, 0]]
mask = [[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 0, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
tmp = [[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 1, 0, 0, 1],
[0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1]]
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 1, 1],
[0, 0, 1, 1, 1, 0, 1, 1],
[0, 0, 1, 0, 1, 1, 0, 0],
[0, 1, 0, 1, 1, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0]])
expected = numpy.logical_and(tmp, mask)
tmp = numpy.logical_and(data, numpy.logical_not(mask))
expected = numpy.logical_or(expected, tmp)
out = ndimage.binary_erosion(data, struct, mask=mask,
border_value=1, origin=(-1, -1))
assert_array_almost_equal(out, expected)
def test_binary_dilation01(self):
"binary dilation 1"
for type in self.types:
data = numpy.ones([], type)
out = ndimage.binary_dilation(data)
assert_array_almost_equal(out, 1)
def test_binary_dilation02(self):
"binary dilation 2"
for type in self.types:
data = numpy.zeros([], type)
out = ndimage.binary_dilation(data)
assert_array_almost_equal(out, 0)
def test_binary_dilation03(self):
"binary dilation 3"
for type in self.types:
data = numpy.ones([1], type)
out = ndimage.binary_dilation(data)
assert_array_almost_equal(out, [1])
def test_binary_dilation04(self):
"binary dilation 4"
for type in self.types:
data = numpy.zeros([1], type)
out = ndimage.binary_dilation(data)
assert_array_almost_equal(out, [0])
def test_binary_dilation05(self):
"binary dilation 5"
for type in self.types:
data = numpy.ones([3], type)
out = ndimage.binary_dilation(data)
assert_array_almost_equal(out, [1, 1, 1])
def test_binary_dilation06(self):
"binary dilation 6"
for type in self.types:
data = numpy.zeros([3], type)
out = ndimage.binary_dilation(data)
assert_array_almost_equal(out, [0, 0, 0])
def test_binary_dilation07(self):
"binary dilation 7"
struct = ndimage.generate_binary_structure(1, 1)
for type in self.types:
data = numpy.zeros([3], type)
data[1] = 1
out = ndimage.binary_dilation(data)
assert_array_almost_equal(out, [1, 1, 1])
def test_binary_dilation08(self):
"binary dilation 8"
for type in self.types:
data = numpy.zeros([5], type)
data[1] = 1
data[3] = 1
out = ndimage.binary_dilation(data)
assert_array_almost_equal(out, [1, 1, 1, 1, 1])
def test_binary_dilation09(self):
"binary dilation 9"
for type in self.types:
data = numpy.zeros([5], type)
data[1] = 1
out = ndimage.binary_dilation(data)
assert_array_almost_equal(out, [1, 1, 1, 0, 0])
def test_binary_dilation10(self):
"binary dilation 10"
for type in self.types:
data = numpy.zeros([5], type)
data[1] = 1
out = ndimage.binary_dilation(data, origin=-1)
assert_array_almost_equal(out, [0, 1, 1, 1, 0])
def test_binary_dilation11(self):
"binary dilation 11"
for type in self.types:
data = numpy.zeros([5], type)
data[1] = 1
out = ndimage.binary_dilation(data, origin=1)
assert_array_almost_equal(out, [1, 1, 0, 0, 0])
def test_binary_dilation12(self):
"binary dilation 12"
for type in self.types:
data = numpy.zeros([5], type)
data[1] = 1
struct = [1, 0, 1]
out = ndimage.binary_dilation(data, struct)
assert_array_almost_equal(out, [1, 0, 1, 0, 0])
def test_binary_dilation13(self):
"binary dilation 13"
for type in self.types:
data = numpy.zeros([5], type)
data[1] = 1
struct = [1, 0, 1]
out = ndimage.binary_dilation(data, struct,
border_value=1)
assert_array_almost_equal(out, [1, 0, 1, 0, 1])
def test_binary_dilation14(self):
"binary dilation 14"
for type in self.types:
data = numpy.zeros([5], type)
data[1] = 1
struct = [1, 0, 1]
out = ndimage.binary_dilation(data, struct,
origin=-1)
assert_array_almost_equal(out, [0, 1, 0, 1, 0])
def test_binary_dilation15(self):
"binary dilation 15"
for type in self.types:
data = numpy.zeros([5], type)
data[1] = 1
struct = [1, 0, 1]
out = ndimage.binary_dilation(data, struct,
origin=-1, border_value=1)
assert_array_almost_equal(out, [1, 1, 0, 1, 0])
def test_binary_dilation16(self):
"binary dilation 16"
for type in self.types:
data = numpy.ones([1, 1], type)
out = ndimage.binary_dilation(data)
assert_array_almost_equal(out, [[1]])
def test_binary_dilation17(self):
"binary dilation 17"
for type in self.types:
data = numpy.zeros([1, 1], type)
out = ndimage.binary_dilation(data)
assert_array_almost_equal(out, [[0]])
def test_binary_dilation18(self):
"binary dilation 18"
for type in self.types:
data = numpy.ones([1, 3], type)
out = ndimage.binary_dilation(data)
assert_array_almost_equal(out, [[1, 1, 1]])
def test_binary_dilation19(self):
"binary dilation 19"
for type in self.types:
data = numpy.ones([3, 3], type)
out = ndimage.binary_dilation(data)
assert_array_almost_equal(out, [[1, 1, 1],
[1, 1, 1],
[1, 1, 1]])
def test_binary_dilation20(self):
"binary dilation 20"
for type in self.types:
data = numpy.zeros([3, 3], type)
data[1, 1] = 1
out = ndimage.binary_dilation(data)
assert_array_almost_equal(out, [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]])
def test_binary_dilation21(self):
"binary dilation 21"
struct = ndimage.generate_binary_structure(2, 2)
for type in self.types:
data = numpy.zeros([3, 3], type)
data[1, 1] = 1
out = ndimage.binary_dilation(data, struct)
assert_array_almost_equal(out, [[1, 1, 1],
[1, 1, 1],
[1, 1, 1]])
def test_binary_dilation22(self):
"binary dilation 22"
expected = [[0, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], type)
out = ndimage.binary_dilation(data)
assert_array_almost_equal(out, expected)
def test_binary_dilation23(self):
"binary dilation 23"
expected = [[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 0, 0, 0, 0, 1],
[1, 1, 0, 0, 0, 1, 0, 1],
[1, 0, 0, 1, 1, 1, 1, 1],
[1, 0, 1, 1, 1, 1, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 0, 1, 0, 0, 1, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1]]
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], type)
out = ndimage.binary_dilation(data, border_value=1)
assert_array_almost_equal(out, expected)
def test_binary_dilation24(self):
"binary dilation 24"
expected = [[1, 1, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], type)
out = ndimage.binary_dilation(data, origin=(1, 1))
assert_array_almost_equal(out, expected)
def test_binary_dilation25(self):
"binary dilation 25"
expected = [[1, 1, 0, 0, 0, 0, 1, 1],
[1, 0, 0, 0, 1, 0, 1, 1],
[0, 0, 1, 1, 1, 1, 1, 1],
[0, 1, 1, 1, 1, 0, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[0, 1, 0, 0, 1, 0, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1]]
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], type)
out = ndimage.binary_dilation(data, origin=(1, 1),
border_value=1)
assert_array_almost_equal(out, expected)
def test_binary_dilation26(self):
"binary dilation 26"
struct = ndimage.generate_binary_structure(2, 2)
expected = [[1, 1, 1, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], type)
out = ndimage.binary_dilation(data, struct)
assert_array_almost_equal(out, expected)
def test_binary_dilation27(self):
"binary dilation 27"
struct = [[0, 1],
[1, 1]]
expected = [[0, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], type)
out = ndimage.binary_dilation(data, struct)
assert_array_almost_equal(out, expected)
def test_binary_dilation28(self):
"binary dilation 28"
expected = [[1, 1, 1, 1],
[1, 0, 0, 1],
[1, 0, 0, 1],
[1, 1, 1, 1]]
for type in self.types:
data = numpy.array([[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]], type)
out = ndimage.binary_dilation(data, border_value=1)
assert_array_almost_equal(out, expected)
def test_binary_dilation29(self):
"binary dilation 29"
struct = [[0, 1],
[1, 1]]
expected = [[0, 0, 0, 0, 0],
[0, 0, 0, 1, 0],
[0, 0, 1, 1, 0],
[0, 1, 1, 1, 0],
[0, 0, 0, 0, 0]]
data = numpy.array([[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 1, 0],
[0, 0, 0, 0, 0]], bool)
out = ndimage.binary_dilation(data, struct,
iterations=2)
assert_array_almost_equal(out, expected)
def test_binary_dilation30(self):
"binary dilation 30"
struct = [[0, 1],
[1, 1]]
expected = [[0, 0, 0, 0, 0],
[0, 0, 0, 1, 0],
[0, 0, 1, 1, 0],
[0, 1, 1, 1, 0],
[0, 0, 0, 0, 0]]
data = numpy.array([[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 1, 0],
[0, 0, 0, 0, 0]], bool)
out = numpy.zeros(data.shape, bool)
ndimage.binary_dilation(data, struct, iterations=2,
output=out)
assert_array_almost_equal(out, expected)
def test_binary_dilation31(self):
"binary dilation 31"
struct = [[0, 1],
[1, 1]]
expected = [[0, 0, 0, 1, 0],
[0, 0, 1, 1, 0],
[0, 1, 1, 1, 0],
[1, 1, 1, 1, 0],
[0, 0, 0, 0, 0]]
data = numpy.array([[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 1, 0],
[0, 0, 0, 0, 0]], bool)
out = ndimage.binary_dilation(data, struct,
iterations=3)
assert_array_almost_equal(out, expected)
def test_binary_dilation32(self):
"binary dilation 32"
struct = [[0, 1],
[1, 1]]
expected = [[0, 0, 0, 1, 0],
[0, 0, 1, 1, 0],
[0, 1, 1, 1, 0],
[1, 1, 1, 1, 0],
[0, 0, 0, 0, 0]]
data = numpy.array([[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 1, 0],
[0, 0, 0, 0, 0]], bool)
out = numpy.zeros(data.shape, bool)
ndimage.binary_dilation(data, struct, iterations=3,
output=out)
assert_array_almost_equal(out, expected)
def test_binary_dilation33(self):
"binary dilation 33"
struct = [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]]
expected = numpy.array([[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 0, 0, 0],
[0, 1, 1, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], bool)
mask = numpy.array([[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 0, 0, 0],
[0, 1, 1, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], bool)
data = numpy.array([[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], bool)
out = ndimage.binary_dilation(data, struct,
iterations=-1, mask=mask, border_value=0)
assert_array_almost_equal(out, expected)
def test_binary_dilation34(self):
"binary dilation 34"
struct = [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]]
expected = [[0, 1, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
mask = numpy.array([[0, 1, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], bool)
data = numpy.zeros(mask.shape, bool)
out = ndimage.binary_dilation(data, struct,
iterations=-1, mask=mask, border_value=1)
assert_array_almost_equal(out, expected)
def test_binary_dilation35(self):
"binary dilation 35"
tmp = [[1, 1, 0, 0, 0, 0, 1, 1],
[1, 0, 0, 0, 1, 0, 1, 1],
[0, 0, 1, 1, 1, 1, 1, 1],
[0, 1, 1, 1, 1, 0, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[0, 1, 0, 0, 1, 0, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1]]
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]])
mask = [[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
expected = numpy.logical_and(tmp, mask)
tmp = numpy.logical_and(data, numpy.logical_not(mask))
expected = numpy.logical_or(expected, tmp)
for type in self.types:
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], type)
out = ndimage.binary_dilation(data, mask=mask,
origin=(1, 1), border_value=1)
assert_array_almost_equal(out, expected)
def test_binary_propagation01(self):
"binary propagation 1"
struct = [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]]
expected = numpy.array([[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 0, 0, 0],
[0, 1, 1, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], bool)
mask = numpy.array([[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 0, 0, 0],
[0, 1, 1, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], bool)
data = numpy.array([[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], bool)
out = ndimage.binary_propagation(data, struct,
mask=mask, border_value=0)
assert_array_almost_equal(out, expected)
def test_binary_propagation02(self):
"binary propagation 2"
struct = [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]]
expected = [[0, 1, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
mask = numpy.array([[0, 1, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], bool)
data = numpy.zeros(mask.shape, bool)
out = ndimage.binary_propagation(data, struct,
mask=mask, border_value=1)
assert_array_almost_equal(out, expected)
def test_binary_opening01(self):
"binary opening 1"
expected = [[0, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
for type in self.types:
data = numpy.array([[0, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 0, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], type)
out = ndimage.binary_opening(data)
assert_array_almost_equal(out, expected)
def test_binary_opening02(self):
"binary opening 2"
struct = ndimage.generate_binary_structure(2, 2)
expected = [[1, 1, 1, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 0, 0, 0, 0],
[0, 1, 1, 1, 0, 0, 0, 0],
[0, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
for type in self.types:
data = numpy.array([[1, 1, 1, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 0, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], type)
out = ndimage.binary_opening(data, struct)
assert_array_almost_equal(out, expected)
def test_binary_closing01(self):
"binary closing 1"
expected = [[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 0, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
for type in self.types:
data = numpy.array([[0, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 0, 1, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], type)
out = ndimage.binary_closing(data)
assert_array_almost_equal(out, expected)
def test_binary_closing02(self):
"binary closing 2"
struct = ndimage.generate_binary_structure(2, 2)
expected = [[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
for type in self.types:
data = numpy.array([[1, 1, 1, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 0, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], type)
out = ndimage.binary_closing(data, struct)
assert_array_almost_equal(out, expected)
def test_binary_fill_holes01(self):
"binary fill holes 1"
expected = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], bool)
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], bool)
out = ndimage.binary_fill_holes(data)
assert_array_almost_equal(out, expected)
def test_binary_fill_holes02(self):
"binary fill holes 2"
expected = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], bool)
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], bool)
out = ndimage.binary_fill_holes(data)
assert_array_almost_equal(out, expected)
def test_binary_fill_holes03(self):
"binary fill holes 3"
expected = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 0, 1, 1, 1],
[0, 1, 1, 1, 0, 1, 1, 1],
[0, 1, 1, 1, 0, 1, 1, 1],
[0, 0, 1, 0, 0, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0]], bool)
data = numpy.array([[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 1, 0, 1, 1, 1],
[0, 1, 0, 1, 0, 1, 0, 1],
[0, 1, 0, 1, 0, 1, 0, 1],
[0, 0, 1, 0, 0, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0]], bool)
out = ndimage.binary_fill_holes(data)
assert_array_almost_equal(out, expected)
def test_grey_erosion01(self):
"grey erosion 1"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
output = ndimage.grey_erosion(array,
footprint=footprint)
assert_array_almost_equal([[2, 2, 1, 1, 1],
[2, 3, 1, 3, 1],
[5, 5, 3, 3, 1]], output)
def test_grey_erosion02(self):
"grey erosion 2"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
structure = [[0, 0, 0], [0, 0, 0]]
output = ndimage.grey_erosion(array,
footprint=footprint, structure=structure)
assert_array_almost_equal([[2, 2, 1, 1, 1],
[2, 3, 1, 3, 1],
[5, 5, 3, 3, 1]], output)
def test_grey_erosion03(self):
"grey erosion 3"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
structure = [[1, 1, 1], [1, 1, 1]]
output = ndimage.grey_erosion(array,
footprint=footprint, structure=structure)
assert_array_almost_equal([[1, 1, 0, 0, 0],
[1, 2, 0, 2, 0],
[4, 4, 2, 2, 0]], output)
def test_grey_dilation01(self):
"grey dilation 1"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[0, 1, 1], [1, 0, 1]]
output = ndimage.grey_dilation(array,
footprint=footprint)
assert_array_almost_equal([[7, 7, 9, 9, 5],
[7, 9, 8, 9, 7],
[8, 8, 8, 7, 7]], output)
def test_grey_dilation02(self):
"grey dilation 2"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[0, 1, 1], [1, 0, 1]]
structure = [[0, 0, 0], [0, 0, 0]]
output = ndimage.grey_dilation(array,
footprint=footprint, structure=structure)
assert_array_almost_equal([[7, 7, 9, 9, 5],
[7, 9, 8, 9, 7],
[8, 8, 8, 7, 7]], output)
def test_grey_dilation03(self):
"grey dilation 3"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[0, 1, 1], [1, 0, 1]]
structure = [[1, 1, 1], [1, 1, 1]]
output = ndimage.grey_dilation(array,
footprint=footprint, structure=structure)
assert_array_almost_equal([[8, 8, 10, 10, 6],
[8, 10, 9, 10, 8],
[9, 9, 9, 8, 8]], output)
def test_grey_opening01(self):
"grey opening 1"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
tmp = ndimage.grey_erosion(array, footprint=footprint)
expected = ndimage.grey_dilation(tmp, footprint=footprint)
output = ndimage.grey_opening(array,
footprint=footprint)
assert_array_almost_equal(expected, output)
def test_grey_opening02(self):
"grey opening 2"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
structure = [[0, 0, 0], [0, 0, 0]]
tmp = ndimage.grey_erosion(array, footprint=footprint,
structure=structure)
expected = ndimage.grey_dilation(tmp, footprint=footprint,
structure=structure)
output = ndimage.grey_opening(array,
footprint=footprint, structure=structure)
assert_array_almost_equal(expected, output)
def test_grey_closing01(self):
"grey closing 1"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
tmp = ndimage.grey_dilation(array, footprint=footprint)
expected = ndimage.grey_erosion(tmp, footprint=footprint)
output = ndimage.grey_closing(array,
footprint=footprint)
assert_array_almost_equal(expected, output)
def test_grey_closing02(self):
"grey closing 2"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
structure = [[0, 0, 0], [0, 0, 0]]
tmp = ndimage.grey_dilation(array, footprint=footprint,
structure=structure)
expected = ndimage.grey_erosion(tmp, footprint=footprint,
structure=structure)
output = ndimage.grey_closing(array,
footprint=footprint, structure=structure)
assert_array_almost_equal(expected, output)
def test_morphological_gradient01(self):
"morphological gradient 1"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
structure = [[0, 0, 0], [0, 0, 0]]
tmp1 = ndimage.grey_dilation(array,
footprint=footprint, structure=structure)
tmp2 = ndimage.grey_erosion(array, footprint=footprint,
structure=structure)
expected = tmp1 - tmp2
output = numpy.zeros(array.shape, array.dtype)
ndimage.morphological_gradient(array,
footprint=footprint, structure=structure, output=output)
assert_array_almost_equal(expected, output)
def test_morphological_gradient02(self):
"morphological gradient 2"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
structure = [[0, 0, 0], [0, 0, 0]]
tmp1 = ndimage.grey_dilation(array,
footprint=footprint, structure=structure)
tmp2 = ndimage.grey_erosion(array, footprint=footprint,
structure=structure)
expected = tmp1 - tmp2
output =ndimage.morphological_gradient(array,
footprint=footprint, structure=structure)
assert_array_almost_equal(expected, output)
def test_morphological_laplace01(self):
"morphological laplace 1"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
structure = [[0, 0, 0], [0, 0, 0]]
tmp1 = ndimage.grey_dilation(array,
footprint=footprint, structure=structure)
tmp2 = ndimage.grey_erosion(array, footprint=footprint,
structure=structure)
expected = tmp1 + tmp2 - 2 * array
output = numpy.zeros(array.shape, array.dtype)
ndimage.morphological_laplace(array, footprint=footprint,
structure=structure, output=output)
assert_array_almost_equal(expected, output)
def test_morphological_laplace02(self):
"morphological laplace 2"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
structure = [[0, 0, 0], [0, 0, 0]]
tmp1 = ndimage.grey_dilation(array,
footprint=footprint, structure=structure)
tmp2 = ndimage.grey_erosion(array, footprint=footprint,
structure=structure)
expected = tmp1 + tmp2 - 2 * array
output = ndimage.morphological_laplace(array,
footprint=footprint, structure=structure)
assert_array_almost_equal(expected, output)
def test_white_tophat01(self):
"white tophat 1"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
structure = [[0, 0, 0], [0, 0, 0]]
tmp = ndimage.grey_opening(array, footprint=footprint,
structure=structure)
expected = array - tmp
output = numpy.zeros(array.shape, array.dtype)
ndimage.white_tophat(array, footprint=footprint,
structure=structure, output=output)
assert_array_almost_equal(expected, output)
def test_white_tophat02(self):
"white tophat 2"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
structure = [[0, 0, 0], [0, 0, 0]]
tmp = ndimage.grey_opening(array, footprint=footprint,
structure=structure)
expected = array - tmp
output = ndimage.white_tophat(array, footprint=footprint,
structure=structure)
assert_array_almost_equal(expected, output)
def test_black_tophat01(self):
"black tophat 1"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
structure = [[0, 0, 0], [0, 0, 0]]
tmp = ndimage.grey_closing(array, footprint=footprint,
structure=structure)
expected = tmp - array
output = numpy.zeros(array.shape, array.dtype)
ndimage.black_tophat(array, footprint=footprint,
structure=structure, output=output)
assert_array_almost_equal(expected, output)
def test_black_tophat02(self):
"black tophat 2"
array = numpy.array([[3, 2, 5, 1, 4],
[7, 6, 9, 3, 5],
[5, 8, 3, 7, 1]])
footprint = [[1, 0, 1], [1, 1, 0]]
structure = [[0, 0, 0], [0, 0, 0]]
tmp = ndimage.grey_closing(array, footprint=footprint,
structure=structure)
expected = tmp - array
output = ndimage.black_tophat(array, footprint=footprint,
structure=structure)
assert_array_almost_equal(expected, output)
def test_hit_or_miss01(self):
"binary hit-or-miss transform 1"
struct = [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]]
expected = [[0, 0, 0, 0, 0],
[0, 1, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]]
for type in self.types:
data = numpy.array([[0, 1, 0, 0, 0],
[1, 1, 1, 0, 0],
[0, 1, 0, 1, 1],
[0, 0, 1, 1, 1],
[0, 1, 1, 1, 0],
[0, 1, 1, 1, 1],
[0, 1, 1, 1, 1],
[0, 0, 0, 0, 0]], type)
out = numpy.zeros(data.shape, bool)
ndimage.binary_hit_or_miss(data, struct,
output=out)
assert_array_almost_equal(expected, out)
def test_hit_or_miss02(self):
"binary hit-or-miss transform 2"
struct = [[0, 1, 0],
[1, 1, 1],
[0, 1, 0]]
expected = [[0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
for type in self.types:
data = numpy.array([[0, 1, 0, 0, 1, 1, 1, 0],
[1, 1, 1, 0, 0, 1, 0, 0],
[0, 1, 0, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], type)
out = ndimage.binary_hit_or_miss(data, struct)
assert_array_almost_equal(expected, out)
def test_hit_or_miss03(self):
"binary hit-or-miss transform 3"
struct1 = [[0, 0, 0],
[1, 1, 1],
[0, 0, 0]]
struct2 = [[1, 1, 1],
[0, 0, 0],
[1, 1, 1]]
expected = [[0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]]
for type in self.types:
data = numpy.array([[0, 1, 0, 0, 1, 1, 1, 0],
[1, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 0, 1, 1, 0],
[0, 0, 0, 0, 1, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0]], type)
out = ndimage.binary_hit_or_miss(data, struct1,
struct2)
assert_array_almost_equal(expected, out)
#class NDImageTestResult(unittest.TestResult):
# separator1 = '=' * 70 + '\n'
# separator2 = '-' * 70 + '\n'
#
# def __init__(self, stream, verbose):
# unittest.TestResult.__init__(self)
# self.stream = stream
# self.verbose = verbose
#
# def getDescription(self, test):
# return test.shortDescription() or str(test)
#
# def startTest(self, test):
# unittest.TestResult.startTest(self, test)
# if self.verbose:
# self.stream.write(self.getDescription(test))
# self.stream.write(" ... ")
#
# def addSuccess(self, test):
# unittest.TestResult.addSuccess(self, test)
# if self.verbose:
# self.stream.write("ok\n")
#
# def addError(self, test, err):
# unittest.TestResult.addError(self, test, err)
# if self.verbose:
# self.stream.write("ERROR\n")
#
# def addFailure(self, test, err):
# unittest.TestResult.addFailure(self, test, err)
# if self.verbose:
# self.stream.write("FAIL\n")
#
# def printErrors(self):
# self.printErrorList('ERROR', self.errors)
# self.printErrorList('FAIL', self.failures)
#
# def printErrorList(self, flavour, errors):
# for test, err in errors:
# self.stream.write(self.separator1)
# description = self.getDescription(test)
# self.stream.write("%s: %s\n" % (flavour, description))
# self.stream.write(self.separator2)
# self.stream.write(err)
#
#def test():
# if '-v' in sys.argv[1:]:
# verbose = 1
# else:
# verbose = 0
# suite = unittest.TestSuite()
# suite.addTest(unittest.makeSuite(NDImageTest))
# result = NDImageTestResult(sys.stdout, verbose)
# suite(result)
# result.printErrors()
# return len(result.failures), result.testsRun
if __name__ == "__main__":
run_module_suite()
|
lesserwhirls/scipy-cwt
|
scipy/ndimage/tests/test_ndimage.py
|
Python
|
bsd-3-clause
| 203,773
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'ModelFieldData.foreign'
db.alter_column('blogs_modelfielddata', 'foreign_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['blogs.ModelData']))
def backwards(self, orm):
# Changing field 'ModelFieldData.foreign'
db.alter_column('blogs_modelfielddata', 'foreign_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['blogs.ModelFieldData'], null=True))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'blogs.blog': {
'Meta': {'object_name': 'Blog'},
'analytics_account': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'contributors': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'blogcontributor'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'custom_domain': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'draft_notice': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'exclusion_end': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'exclusion_start': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'facebook_link': ('django.db.models.fields.URLField', [], {'max_length': '100', 'blank': 'True'}),
'fb_page_access_token': ('django.db.models.fields.CharField', [], {'max_length': '260', 'blank': 'True'}),
'frequency': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'has_artists': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'has_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'header_image': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_bootblog': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_online': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_open': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '7', 'blank': 'True'}),
'main_color': ('django.db.models.fields.CharField', [], {'default': "'#C4BDB2'", 'max_length': '10', 'blank': 'True'}),
'moderator_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'pinterest_link': ('django.db.models.fields.URLField', [], {'max_length': '100', 'blank': 'True'}),
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '30'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Template']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
'translation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True', 'blank': 'True'}),
'twitter_link': ('django.db.models.fields.URLField', [], {'max_length': '100', 'blank': 'True'}),
'twitter_oauth_token': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'twitter_oauth_token_secret': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
'blogs.category': {
'Meta': {'object_name': 'Category'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Blog_category'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'cat_image': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_caret': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_close': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_email': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_fb': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_left': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_pint': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_right': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_tw': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'color': ('django.db.models.fields.CharField', [], {'default': "'#000000'", 'max_length': '10'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140', 'null': 'True', 'blank': 'True'}),
'parent_category': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'child_category'", 'null': 'True', 'to': "orm['blogs.Category']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '140', 'blank': 'True'})
},
'blogs.comment': {
'Meta': {'object_name': 'Comment'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'Comment_author'", 'null': 'True', 'to': "orm['auth.User']"}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'max_length': '10000'}),
'comment_status': ('django.db.models.fields.CharField', [], {'default': "'pe'", 'max_length': '2'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'notify_me': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'occupation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Post']", 'null': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'})
},
'blogs.info_email': {
'Meta': {'object_name': 'Info_email'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'We'", 'max_length': '2', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'subject': ('django.db.models.fields.TextField', [], {'max_length': '100', 'blank': 'True'}),
'subscribers': ('django.db.models.fields.CharField', [], {'default': "'A'", 'max_length': '2', 'null': 'True'})
},
'blogs.language': {
'Meta': {'object_name': 'Language'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'language_name': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
'blogs.menu': {
'Meta': {'object_name': 'Menu'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Blog_menu'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_main': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
'blogs.menuitem': {
'Meta': {'object_name': 'MenuItem'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Category']", 'null': 'True', 'blank': 'True'}),
'external_link': ('django.db.models.fields.URLField', [], {'max_length': '140', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'menu': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Menu']", 'null': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Page']", 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'})
},
'blogs.model': {
'Meta': {'object_name': 'Model'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Custom_post'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'})
},
'blogs.modeldata': {
'Meta': {'object_name': 'ModelData'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Model']", 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'Unknown'", 'max_length': '140'})
},
'blogs.modelfield': {
'Meta': {'object_name': 'ModelField'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Model']", 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140', 'null': 'True', 'blank': 'True'}),
'post_type': ('django.db.models.fields.CharField', [], {'default': "'Text'", 'max_length': '40'}),
'rank': ('django.db.models.fields.CharField', [], {'default': "'1'", 'max_length': '2'})
},
'blogs.modelfielddata': {
'Meta': {'object_name': 'ModelFieldData'},
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '100', 'blank': 'True'}),
'foreign': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'foreign'", 'null': 'True', 'to': "orm['blogs.ModelData']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'longtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'model': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Model']", 'null': 'True'}),
'model_data': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.ModelData']", 'null': 'True'}),
'model_field': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.ModelField']", 'null': 'True'}),
'nullboolean': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'onetofive': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'positiveinteger': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2', 'blank': 'True'}),
'relation': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'relation'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['blogs.ModelData']"}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '140', 'blank': 'True'})
},
'blogs.page': {
'Meta': {'object_name': 'Page'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Blog_page'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'content': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '140', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'})
},
'blogs.post': {
'Meta': {'object_name': 'Post'},
'artist': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'base62id': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'category': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['blogs.Category']", 'null': 'True', 'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_0': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_01': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_1': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_2': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_3': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_4': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_5': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_6': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_video': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_post': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Model']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discarded': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_ready': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_sticky': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_top': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'karma': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'layout_type': ('django.db.models.fields.CharField', [], {'default': "'s'", 'max_length': '1'}),
'message': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'pic': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_0': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_04': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_1': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_10': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_11': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_12': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_13': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_14': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_15': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_16': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_17': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_18': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_19': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_2': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_20': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_21': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_22': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_23': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_24': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_25': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_26': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_27': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_28': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_29': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_3': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_30': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_31': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_32': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_33': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_4': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_5': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_6': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_7': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_8': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_9': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'publish_on_facebook': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '140', 'blank': 'True'}),
'soundcloud_id': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'soundcloud_url': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'}),
'source': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'P'", 'max_length': '2', 'null': 'True'}),
'tag': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['blogs.Tag']", 'null': 'True', 'blank': 'True'}),
'temp_tag_field': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'translated_content': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'translated_title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'video': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'video_ogg': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'video_url': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'}),
'views': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'vimeo_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'vimeo_thumb_url': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'}),
'youtube_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
'blogs.rss': {
'Meta': {'object_name': 'Rss'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Blog_rss'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'feed_url': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'blogs.subscription': {
'Meta': {'object_name': 'Subscription'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_new': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'blogs.subuser': {
'Meta': {'object_name': 'Subuser'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Blog_user'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'})
},
'blogs.tag': {
'Meta': {'object_name': 'Tag'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Blog_tag'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '140', 'blank': 'True'})
},
'blogs.template': {
'Meta': {'object_name': 'Template'},
'archives': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'base': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'category': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'single': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'})
},
'blogs.translation': {
'Meta': {'object_name': 'Translation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'origin_blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Translation_origin_blog'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'translated_blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Translation_translated_blog'", 'null': 'True', 'to': "orm['blogs.Blog']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['blogs']
|
carquois/blobon
|
blobon/blogs/migrations/0125_auto__chg_field_modelfielddata_foreign.py
|
Python
|
mit
| 31,918
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.base.globals import cfg
def extract_notes(mapper, connection, target):
if cfg['ANNOTATIONS_NOTES_ENABLED'] and target.star_score == 0:
from .noteutils import extract_notes_from_comment
revs = extract_notes_from_comment(target)
if len(revs) > 0:
from invenio.modules.annotations.api import add_annotation
for rev in revs:
add_annotation(model='annotation_note', **rev)
|
chokribr/invenio
|
invenio/modules/annotations/receivers.py
|
Python
|
gpl-2.0
| 1,224
|
"""
Django settings for pronomen project.
Generated by 'django-admin startproject' using Django 1.10.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
from .parser import parse_pronouns
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '1234'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'pronomen_app.apps.PronomenAppConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'pronomen.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'pronomen.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'de-DE'
TIME_ZONE = 'CET'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, STATIC_URL[1:])
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, MEDIA_URL[1:])
# Load JSON files
PRONOUNS = parse_pronouns()
|
jh0ker/pronomen
|
site/pronomen/settings.py
|
Python
|
agpl-3.0
| 3,308
|
##################################################################
# Copyright 2018 Open Source Geospatial Foundation and others #
# licensed under MIT, Please consult LICENSE.txt for details #
##################################################################
from pywps._compat import text_type, StringIO
import os
import shutil
import requests
import tempfile
import logging
import pywps.configuration as config
from pywps.inout.literaltypes import (LITERAL_DATA_TYPES, convert,
make_allowedvalues, is_anyvalue)
from pywps import get_ElementMakerForVersion, OGCUNIT, NAMESPACES
from pywps.validator.mode import MODE
from pywps.validator.base import emptyvalidator
from pywps.validator import get_validator
from pywps.validator.literalvalidator import (validate_anyvalue,
validate_allowed_values)
from pywps.exceptions import NoApplicableCode, InvalidParameterValue, FileSizeExceeded, \
FileURLNotSupported
from pywps._compat import PY2, urlparse
import base64
from collections import namedtuple
from io import BytesIO
import six
_SOURCE_TYPE = namedtuple('SOURCE_TYPE', 'MEMORY, FILE, STREAM, DATA, URL')
SOURCE_TYPE = _SOURCE_TYPE(0, 1, 2, 3, 4)
LOGGER = logging.getLogger("PYWPS")
def _is_textfile(filename):
try:
# use python-magic if available
import magic
is_text = 'text/' in magic.from_file(filename, mime=True)
except ImportError:
# read the first part of the file to check for a binary indicator.
# This method won't detect all binary files.
blocksize = 512
fh = open(filename, 'rb')
is_text = b'\x00' not in fh.read(blocksize)
fh.close()
return is_text
def extend_instance(obj, cls):
"""Apply mixins to a class instance after creation."""
base_cls = obj.__class__
base_cls_name = obj.__class__.__name__
obj.__class__ = type(base_cls_name, (cls, base_cls), {})
class UOM(object):
"""
:param uom: unit of measure
"""
def __init__(self, uom=''):
self.uom = uom
@property
def json(self):
return {"reference": OGCUNIT[self.uom],
"uom": self.uom}
class IOHandler(object):
"""Base IO handling class subclassed by specialized versions: FileHandler, UrlHandler, DataHandler, etc.
If the specialized handling class is not known when the object is created, instantiate the object with IOHandler.
The first time the `file`, `url` or `data` attribute is set, the associated subclass will be automatically
registered. Once set, the specialized subclass cannot be switched.
:param workdir: working directory, to save temporal file objects in.
:param mode: ``MODE`` validation mode.
`file` : str
Filename on the local disk.
`url` : str
Link to an online resource.
`stream` : FileIO
A readable object.
`data` : object
A native python object (integer, string, float, etc)
`base64` : str
A base 64 encoding of the data.
>>> # setting up
>>> import os
>>> from io import RawIOBase
>>> from io import FileIO
>>>
>>> ioh_file = IOHandler(workdir=tmp)
>>> assert isinstance(ioh_file, IOHandler)
>>>
>>> # Create test file input
>>> fileobj = open(os.path.join(tmp, 'myfile.txt'), 'w')
>>> fileobj.write('ASDF ASFADSF ASF ASF ASDF ASFASF')
>>> fileobj.close()
>>>
>>> # testing file object on input
>>> ioh_file.file = fileobj.name
>>> assert isinstance(ioh_file, FileHandler
>>> assert ioh_file.file == fileobj.name
>>> assert isinstance(ioh_file.stream, RawIOBase)
>>> # skipped assert isinstance(ioh_file.memory_object, POSH)
>>>
>>> # testing stream object on input
>>> ioh_stream = IOHandler(workdir=tmp)
>>> assert ioh_stream.workdir == tmp
>>> ioh_stream.stream = FileIO(fileobj.name,'r')
>>> assert isinstance(ioh_stream, StreamHandler)
>>> assert open(ioh_stream.file).read() == ioh_file.stream.read()
>>> assert isinstance(ioh_stream.stream, RawIOBase)
"""
prop = None
def __init__(self, workdir=None, mode=MODE.NONE):
# Internal defaults for class and subclass properties.
self._workdir = None
self._reset_cache()
# Set public defaults
self.workdir = workdir
self.valid_mode = mode
# TODO: Clarify intent
self.as_reference = False
self.inpt = {}
self.uuid = None # request identifier
self.data_set = False
# This creates dummy property setters and getters for `file`, `data`, `url`, `stream` that
# 1. register the subclass methods according to the given property
# 2. replace the property setter by the subclass property setter
# 3. set the property
self._create_fset_properties()
def _reset_cache(self):
"""Sets all internal objects to None."""
self._file = None
self._data = None
self._post_data = None
self._stream = None
self._url = None
self.data_set = False
def _check_valid(self):
"""Validate this input using given validator
"""
validate = self.validator
_valid = validate(self, self.valid_mode)
if not _valid:
self.data_set = False
raise InvalidParameterValue('Input data not valid using '
'mode {}'.format(self.valid_mode))
self.data_set = True
@property
def workdir(self):
return self._workdir
@workdir.setter
def workdir(self, path):
"""Set working temporary directory for files to be stored in."""
if path is not None:
if not os.path.exists(path):
os.makedirs(path)
self._workdir = path
@property
def validator(self):
"""Return the function suitable for validation
This method should be overridden by class children
:return: validating function
"""
return emptyvalidator
@property
def source_type(self):
"""Return the source type."""
# For backward compatibility only. source_type checks could be replaced by `isinstance`.
return getattr(SOURCE_TYPE, self.prop.upper())
def _set_default_value(self, value=None, value_type=None):
"""Set default value based on input data type."""
value = value or getattr(self, '_default')
value_type = value_type or getattr(self, '_default_type')
if value:
if value_type == SOURCE_TYPE.DATA:
self.data = value
elif value_type == SOURCE_TYPE.MEMORY:
raise NotImplementedError
elif value_type == SOURCE_TYPE.FILE:
self.file = value
elif value_type == SOURCE_TYPE.STREAM:
self.stream = value
elif value_type == SOURCE_TYPE.URL:
self.url = value
def _build_file_name(self, href=''):
"""Return a file name for the local system."""
url_path = urlparse(href).path or ''
file_name = os.path.basename(url_path).strip() or 'input'
(prefix, suffix) = os.path.splitext(file_name)
suffix = suffix or self.extension
if prefix and suffix:
file_name = prefix + suffix
input_file_name = os.path.join(self.workdir, file_name)
# build tempfile in case of duplicates
if os.path.exists(input_file_name):
input_file_name = tempfile.mkstemp(
suffix=suffix, prefix=prefix + '_',
dir=self.workdir)[1]
return input_file_name
@property
def extension(self):
"""Return the file extension for the data format, if set."""
if getattr(self, 'data_format', None):
return self.data_format.extension
else:
return ''
@staticmethod
def _create_fset_properties():
"""Create properties that when set for the first time, will determine
the instance's handler class.
Example
-------
>>> h = IOHandler()
>>> isinstance(h, DataHandler)
False
>>> h.data = 1 # Mixes the DataHandler class to IOHandler. h inherits DataHandler methods.
>>> isinstance(h, DataHandler)
True
Note that trying to set another attribute (e.g. `h.file = 'a.txt'`) will raise an AttributeError.
"""
for cls in (FileHandler, DataHandler, StreamHandler, UrlHandler):
def fset(s, value, kls=cls):
"""Assign the handler class and set the value to the attribute.
This function will only be called once. The next `fset` will
use the subclass' property.
"""
# Add cls methods to this instance.
extend_instance(s, kls)
# Set the attribute value through the associated cls property.
setattr(s, kls.prop, value)
setattr(IOHandler, cls.prop, property(fget=lambda x: None, fset=fset))
class FileHandler(IOHandler):
prop = 'file'
@property
def file(self):
"""Return filename."""
return self._file
@file.setter
def file(self, value):
"""Set file name"""
self._reset_cache()
self._file = os.path.abspath(value)
self._check_valid()
@property
def data(self):
"""Read file and return content."""
if self._data is None:
with open(self.file, mode=self._openmode()) as fh:
self._data = fh.read()
return self._data
@property
def base64(self):
"""Return base64 encoding of data."""
data = self.data.encode() if not isinstance(self.data, bytes) else self.data
return base64.b64encode(data)
@property
def stream(self):
"""Return stream object."""
from io import FileIO
if getattr(self, '_stream', None) and not self._stream.closed:
self._stream.close()
self._stream = FileIO(self.file, mode='r', closefd=True)
return self._stream
@property
def mem(self):
"""Return memory object."""
raise NotImplementedError
@property
def url(self):
"""Return url to file."""
import pathlib
return pathlib.PurePosixPath(self.file).as_uri()
def _openmode(self, data=None):
openmode = 'r'
if not PY2:
# in Python 3 we need to open binary files in binary mode.
checked = False
if hasattr(self, 'data_format'):
if self.data_format.encoding == 'base64':
# binary, when the data is to be encoded to base64
openmode += 'b'
checked = True
elif 'text/' in self.data_format.mime_type:
# not binary, when mime_type is 'text/'
checked = True
# when we can't guess it from the mime_type, we need to check the file.
# mimetypes like application/xml and application/json are text files too.
if not checked and not _is_textfile(self.file):
openmode += 'b'
return openmode
class DataHandler(FileHandler):
prop = 'data'
def _openmode(self, data=None):
openmode = 'w'
if not PY2 and isinstance(data, bytes):
# on Python 3 open the file in binary mode if the source is
# bytes, which happens when the data was base64-decoded
openmode += 'b'
return openmode
@property
def data(self):
"""Return data."""
return getattr(self, '_data', None)
@data.setter
def data(self, value):
self._reset_cache()
self._data = value
self._check_valid()
@property
def file(self):
"""Return file name storing the data.
Requesting the file attributes writes the data to a temporary file on disk.
"""
if self._file is None:
self._file = self._build_file_name()
with open(self._file, self._openmode(self.data)) as fh:
fh.write(self.data)
return self._file
@property
def stream(self):
"""Return a stream representation of the data."""
if not PY2 and isinstance(self.data, bytes):
return BytesIO(self.data)
else:
return StringIO(text_type(self.data))
class StreamHandler(DataHandler):
prop = 'stream'
@property
def stream(self):
"""Return the stream."""
return self._stream
@stream.setter
def stream(self, value):
"""Set the stream."""
self._reset_cache()
self._stream = value
self._check_valid()
@property
def data(self):
"""Return the data from the stream."""
if self._data is None:
self._data = self.stream.read()
return self._data
class UrlHandler(FileHandler):
prop = 'url'
@property
def url(self):
"""Return the URL."""
return self._url
@url.setter
def url(self, value):
"""Set the URL value."""
self._reset_cache()
self._url = value
self._check_valid()
@property
def file(self):
if self._file is not None:
return self._file
self._file = self._build_file_name(href=self.url)
max_byte_size = self.max_input_size()
# Create request
try:
reference_file = self._openurl(self.url, self.post_data)
data_size = reference_file.headers.get('Content-Length', 0)
except Exception as e:
raise NoApplicableCode('File reference error: {}'.format(e))
FSEE = FileSizeExceeded(
'File size for input {} exceeded. Maximum allowed: {} megabytes'.
format(getattr(self.inpt, 'identifier', '?'), max_byte_size))
if int(data_size) > int(max_byte_size):
raise FSEE
try:
with open(self._file, 'wb') as f:
data_size = 0
for chunk in reference_file.iter_content(chunk_size=1024):
data_size += len(chunk)
if int(data_size) > int(max_byte_size):
raise FSEE
f.write(chunk)
except Exception as e:
raise NoApplicableCode(e)
return self._file
@property
def post_data(self):
return self._post_data
@post_data.setter
def post_data(self, value):
self._post_data = value
@staticmethod
def _openurl(href, data=None):
"""Open given href.
"""
LOGGER.debug('Fetching URL {}'.format(href))
if data is not None:
req = requests.post(url=href, data=data, stream=True)
else:
req = requests.get(url=href, stream=True)
return req
@staticmethod
def max_input_size():
"""Calculates maximal size for input file based on configuration
and units.
:return: maximum file size in bytes
"""
ms = config.get_config_value('server', 'maxsingleinputsize')
return config.get_size_mb(ms) * 1024**2
class SimpleHandler(DataHandler):
"""Data handler for Literal In- and Outputs
>>> class Int_type(object):
... @staticmethod
... def convert(value): return int(value)
>>>
>>> class MyValidator(object):
... @staticmethod
... def validate(inpt): return 0 < inpt.data < 3
>>>
>>> inpt = SimpleHandler(data_type = Int_type)
>>> inpt.validator = MyValidator
>>>
>>> inpt.data = 1
>>> inpt.validator.validate(inpt)
True
>>> inpt.data = 5
>>> inpt.validator.validate(inpt)
False
"""
def __init__(self, workdir=None, data_type=None, mode=MODE.NONE):
DataHandler.__init__(self, workdir=workdir, mode=mode)
if data_type not in LITERAL_DATA_TYPES:
raise ValueError('data_type {} not in {}'.format(data_type, LITERAL_DATA_TYPES))
self.data_type = data_type
@DataHandler.data.setter
def data(self, value):
"""Set data value. Inputs are converted into target format.
"""
if self.data_type and value is not None:
value = convert(self.data_type, value)
DataHandler.data.fset(self, value)
class BasicIO:
"""Basic Input/Output class
"""
def __init__(self, identifier, title=None, abstract=None, keywords=None,
min_occurs=1, max_occurs=1, metadata=[]):
self.identifier = identifier
self.title = title
self.abstract = abstract
self.keywords = keywords
self.min_occurs = int(min_occurs)
self.max_occurs = int(max_occurs)
self.metadata = metadata
class BasicLiteral:
"""Basic literal Input/Output class
"""
def __init__(self, data_type="integer", uoms=None):
assert data_type in LITERAL_DATA_TYPES
self.data_type = data_type
# list of uoms
self.uoms = []
# current uom
self._uom = None
# add all uoms (upcasting to UOM)
if uoms is not None:
for uom in uoms:
if not isinstance(uom, UOM):
uom = UOM(uom)
self.uoms.append(uom)
if self.uoms:
# default/current uom
self.uom = self.uoms[0]
@property
def uom(self):
return self._uom
@uom.setter
def uom(self, uom):
if uom is not None:
self._uom = uom
class BasicComplex(object):
"""Basic complex input/output class
"""
def __init__(self, data_format=None, supported_formats=None):
self._data_format = data_format
self._supported_formats = ()
if supported_formats:
self.supported_formats = supported_formats
if data_format:
self.data_format = data_format
elif self.supported_formats:
# not an empty list, set the default/current format to the first
self.data_format = supported_formats[0]
def get_format(self, mime_type):
"""
:param mime_type: given mimetype
:return: Format
"""
for frmt in self.supported_formats:
if frmt.mime_type == mime_type:
return frmt
else:
return None
@property
def validator(self):
"""Return the proper validator for given data_format
"""
return self.data_format.validate
@property
def supported_formats(self):
return self._supported_formats
@supported_formats.setter
def supported_formats(self, supported_formats):
"""Setter of supported formats
"""
def set_format_validator(supported_format):
if not supported_format.validate or \
supported_format.validate == emptyvalidator:
supported_format.validate =\
get_validator(supported_format.mime_type)
return supported_format
self._supported_formats = tuple(map(set_format_validator, supported_formats))
@property
def data_format(self):
return self._data_format
@data_format.setter
def data_format(self, data_format):
"""self data_format setter
"""
if self._is_supported(data_format):
self._data_format = data_format
if not data_format.validate or data_format.validate == emptyvalidator:
data_format.validate = get_validator(data_format.mime_type)
else:
raise InvalidParameterValue("Requested format {}, {}, {} not supported".format(
data_format.mime_type,
data_format.encoding,
data_format.schema),
'mimeType')
def _is_supported(self, data_format):
if self.supported_formats:
for frmt in self.supported_formats:
if frmt.same_as(data_format):
return True
return False
class BasicBoundingBox(object):
"""Basic BoundingBox input/output class
"""
def __init__(self, crss=None, dimensions=2):
self.crss = crss or ['epsg:4326']
self.crs = self.crss[0]
self.dimensions = dimensions
self.ll = []
self.ur = []
class LiteralInput(BasicIO, BasicLiteral, SimpleHandler):
"""LiteralInput input abstract class
"""
def __init__(self, identifier, title=None, abstract=None, keywords=None,
data_type="integer", workdir=None, allowed_values=None,
uoms=None, mode=MODE.NONE,
min_occurs=1, max_occurs=1, metadata=[],
default=None, default_type=SOURCE_TYPE.DATA):
BasicIO.__init__(self, identifier, title, abstract, keywords,
min_occurs, max_occurs, metadata)
BasicLiteral.__init__(self, data_type, uoms)
SimpleHandler.__init__(self, workdir, data_type, mode=mode)
if default_type != SOURCE_TYPE.DATA:
raise InvalidParameterValue("Source types other than data are not supported.")
self.any_value = is_anyvalue(allowed_values)
self.allowed_values = []
if not self.any_value:
self.allowed_values = make_allowedvalues(allowed_values)
self._default = default
self._default_type = default_type
if default is not None:
self.data = default
@property
def validator(self):
"""Get validator for any value as well as allowed_values
:rtype: function
"""
if self.any_value:
return validate_anyvalue
else:
return validate_allowed_values
@property
def json(self):
"""Get JSON representation of the input
"""
data = {
'identifier': self.identifier,
'title': self.title,
'abstract': self.abstract,
'keywords': self.keywords,
'type': 'literal',
'data_type': self.data_type,
'workdir': self.workdir,
'any_value': self.any_value,
'allowed_values': [value.json for value in self.allowed_values],
'mode': self.valid_mode,
'data': self.data,
'min_occurs': self.min_occurs,
'max_occurs': self.max_occurs
}
if self.uoms:
data["uoms"] = [uom.json for uom in self.uoms],
if self.uom:
data["uom"] = self.uom.json
return data
class LiteralOutput(BasicIO, BasicLiteral, SimpleHandler):
"""Basic LiteralOutput class
"""
def __init__(self, identifier, title=None, abstract=None, keywords=None,
data_type=None, workdir=None, uoms=None, validate=None,
mode=MODE.NONE):
BasicIO.__init__(self, identifier, title, abstract, keywords)
BasicLiteral.__init__(self, data_type, uoms)
SimpleHandler.__init__(self, workdir=None, data_type=data_type,
mode=mode)
self._storage = None
@property
def storage(self):
return self._storage
@storage.setter
def storage(self, storage):
self._storage = storage
@property
def validator(self):
"""Get validator for any value as well as allowed_values
"""
return validate_anyvalue
class BBoxInput(BasicIO, BasicBoundingBox, DataHandler):
"""Basic Bounding box input abstract class
"""
def __init__(self, identifier, title=None, abstract=None, keywords=[], crss=None,
dimensions=None, workdir=None,
mode=MODE.SIMPLE,
min_occurs=1, max_occurs=1, metadata=[],
default=None, default_type=SOURCE_TYPE.DATA):
BasicIO.__init__(self, identifier, title, abstract, keywords,
min_occurs, max_occurs, metadata)
BasicBoundingBox.__init__(self, crss, dimensions)
DataHandler.__init__(self, workdir=workdir, mode=mode)
if default_type != SOURCE_TYPE.DATA:
raise InvalidParameterValue("Source types other than data are not supported.")
self._default = default
self._default_type = default_type
self._set_default_value(default, default_type)
@property
def json(self):
"""Get JSON representation of the input. It returns following keys in
the JSON object:
* identifier
* title
* abstract
* type
* crs
* bbox
* dimensions
* workdir
* mode
"""
return {
'identifier': self.identifier,
'title': self.title,
'abstract': self.abstract,
'keywords': self.keywords,
'type': 'bbox',
'crs': self.crs,
'crss': self.crss,
'bbox': (self.ll, self.ur),
'dimensions': self.dimensions,
'workdir': self.workdir,
'mode': self.valid_mode,
'min_occurs': self.min_occurs,
'max_occurs': self.max_occurs
}
class BBoxOutput(BasicIO, BasicBoundingBox, DataHandler):
"""Basic BoundingBox output class
"""
def __init__(self, identifier, title=None, abstract=None, keywords=None, crss=None,
dimensions=None, workdir=None, mode=MODE.NONE):
BasicIO.__init__(self, identifier, title, abstract, keywords)
BasicBoundingBox.__init__(self, crss, dimensions)
DataHandler.__init__(self, workdir=None, mode=mode)
self._storage = None
@property
def storage(self):
return self._storage
@storage.setter
def storage(self, storage):
self._storage = storage
class ComplexInput(BasicIO, BasicComplex, IOHandler):
"""Complex input abstract class
>>> ci = ComplexInput()
>>> ci.validator = 1
>>> ci.validator
1
"""
def __init__(self, identifier, title=None, abstract=None, keywords=None,
workdir=None, data_format=None, supported_formats=None,
mode=MODE.NONE,
min_occurs=1, max_occurs=1, metadata=[],
default=None, default_type=SOURCE_TYPE.DATA):
BasicIO.__init__(self, identifier, title, abstract, keywords,
min_occurs, max_occurs, metadata)
IOHandler.__init__(self, workdir=workdir, mode=mode)
BasicComplex.__init__(self, data_format, supported_formats)
self._default = default
self._default_type = default_type
def file_handler(self, inpt):
"""<wps:Reference /> handler.
Used when href is a file url."""
extend_instance(self, FileHandler)
# check if file url is allowed
self._validate_file_input(href=inpt.get('href'))
# save the file reference input in workdir
tmp_file = self._build_file_name(href=inpt.get('href'))
try:
inpt_file = urlparse(inpt.get('href')).path
inpt_file = os.path.abspath(inpt_file)
os.symlink(inpt_file, tmp_file)
LOGGER.debug("Linked input file {} to {}.".format(inpt_file, tmp_file))
except Exception:
# TODO: handle os.symlink on windows
# raise NoApplicableCode("Could not link file reference: {}".format(e))
LOGGER.warn("Could not link file reference")
shutil.copy2(inpt_file, tmp_file)
return tmp_file
def url_handler(self, inpt):
# That could possibly go into the data property...
if inpt.get('method') == 'POST':
if 'body' in inpt:
self.post_data = inpt.get('body')
elif 'bodyreference' in inpt:
self.post_data = requests.get(url=inpt.get('bodyreference')).text
else:
raise AttributeError("Missing post data content.")
return inpt.get('href')
def process(self, inpt):
"""Subclass with the appropriate handler given the data input."""
href = inpt.get('href', None)
self.inpt = inpt
if href:
if urlparse(href).scheme == 'file':
self.file = self.file_handler(inpt)
else:
# No file download occurs here. The file content will
# only be retrieved when the file property is accessed.
self.url = self.url_handler(inpt)
else:
self.data = inpt.get('data')
@staticmethod
def _validate_file_input(href):
href = href or ''
parsed_url = urlparse(href)
if parsed_url.scheme != 'file':
raise FileURLNotSupported('Invalid URL scheme')
file_path = parsed_url.path
if not file_path:
raise FileURLNotSupported('Invalid URL path')
file_path = os.path.abspath(file_path)
# build allowed paths list
inputpaths = config.get_config_value('server', 'allowedinputpaths')
allowed_paths = [os.path.abspath(p.strip()) for p in inputpaths.split(':') if p.strip()]
for allowed_path in allowed_paths:
if file_path.startswith(allowed_path):
LOGGER.debug("Accepted file url as input.")
return
raise FileURLNotSupported()
class ComplexOutput(BasicIO, BasicComplex, IOHandler):
"""Complex output abstract class
>>> # temporary configuration
>>> import ConfigParser
>>> from pywps.storage import *
>>> config = ConfigParser.RawConfigParser()
>>> config.add_section('FileStorage')
>>> config.set('FileStorage', 'target', './')
>>> config.add_section('server')
>>> config.set('server', 'outputurl', 'http://foo/bar/filestorage')
>>>
>>> # create temporary file
>>> tiff_file = open('file.tiff', 'w')
>>> tiff_file.write("AA")
>>> tiff_file.close()
>>>
>>> co = ComplexOutput()
>>> co.file ='file.tiff'
>>> fs = FileStorage(config)
>>> co.storage = fs
>>>
>>> url = co.url # get url, data are stored
>>>
>>> co.stream.read() # get data - nothing is stored
'AA'
"""
def __init__(self, identifier, title=None, abstract=None, keywords=None,
workdir=None, data_format=None, supported_formats=None,
mode=MODE.NONE):
BasicIO.__init__(self, identifier, title, abstract, keywords)
IOHandler.__init__(self, workdir=workdir, mode=mode)
BasicComplex.__init__(self, data_format, supported_formats)
self._storage = None
@property
def storage(self):
return self._storage
@storage.setter
def storage(self, storage):
# don't set storage twice
if self._storage is None:
self._storage = storage
# TODO: refactor ?
def get_url(self):
"""Return URL pointing to data
"""
(outtype, storage, url) = self.storage.store(self)
return url
if __name__ == "__main__":
import doctest
from pywps.wpsserver import temp_dir
with temp_dir() as tmp:
os.chdir(tmp)
doctest.testmod()
|
tomkralidis/pywps
|
pywps/inout/basic.py
|
Python
|
mit
| 31,420
|
# encoding: utf-8
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Contact: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from mo_dots import Data
d = Data(a=44)
de = Data(a=42)
De = Data(a=43)
|
klahnakoski/mo-dots
|
tests/ambiguous_test.py
|
Python
|
mpl-2.0
| 346
|
#Slic3r comand line wrapper and FAB UI syncronization tool
#sudo /var/www/slic3r/slic3r -load /var/www/slic3r/config.ini -o /var/www/slic3r/output.gcode /var/www/slic3r/cube.stl
from subprocess import Popen, PIPE, STDOUT
import time
import os, sys, getopt
from subprocess import call
times=[]
percents=[]
names=[]
process=[23.56, 5.53, 13.30, 1.14, 0.07, 1.17, 2.33, 0.004 , 23.67, 0.09, 27.05,0]
percent=0.01
completed=0
config=""
myPID=os.getpid()
name="none"
basetime=endtime=0
log_file=""
trace_file=""
input=""
output=""
config=""
task_id=0
perc_estimate=0
eta=0
elapsed=0
estimate=0
log_time=0
log_elapsed=0
started=False
usage= 'Usage: slic3r_wrapper.py -t<trace> -l<log> -i<input STL> -o<Output Gcode> -c<config>\n'
#python /var/www/fabui/python/slic3r_wrapper.py -t -l -i -o<Output Gcode> -c/var/www/slic3r/config.ini\n'
try:
opts, args = getopt.getopt(sys.argv[1:],"ht:l:i:o:c:k:",["help","trace=","log=","input=","output=","config="])
except getopt.GetoptError:
#Error handling for unknown or incorrect number of options
print "\n\nERROR!\n Correct usage:\n\n",usage
sys.exit(2)
for opt, arg in opts:
if opt =='--help':
print usage
sys.exit()
elif opt in ("-t", "--trace"):
trace_file = arg
elif opt in ("-l", "--log"):
log_file = arg
elif opt in ("-i", "--input"):
input = arg
elif opt in ("-o", "--output"):
output = arg
elif opt in ("-c", "--config"):
config = arg
elif opt in ("-k", "--key"):
task_id = int(arg)
else:
print usage
sys.exit(2)
def printlog(percent,status):
str_log='{"slicing":{"id": "'+str(task_id)+'","pid": "'+str(myPID)+'","started": "'+str(basetime)+'","completed": "'+str(completed)+'","completed_time": "'+str(endtime)+'","stats":{"percent":"'+str(percent)+'","time_left":"'+str(eta)+'","time_elapsed":"'+str(elapsed)+'","time_total":"'+str(estimate)+'"}}}'
handle=open(log_file,'w')
print>>handle, str_log
return
#track trace
#track trace
def trace(string):
out_file = open(trace_file,"a+")
out_file.write(str(string) + "<br>")
out_file.close()
print string
return
printlog(0,"initializing...")
trace("Slic3r is being initialized")
names.append("Slic3r is being initialized")
cmd = 'sudo /var/www/fabui/slic3r/slic3r -load '+config+' -o '+ output + ' ' +input
p = Popen(cmd, stdout = PIPE, stderr = STDOUT, shell = True)
slicer_pid=p.pid
#aggiorno info pid per gli attributi del task
call (['sudo php /var/www/fabui/script/update_slicer_task.php '+str(task_id)+ " " +str(slicer_pid)], shell=True)
i=0
basetime=time.time()
while True:
passed=time.time()-basetime
line = p.stdout.readline()
if line[:31]=="=> Processing triangulated mesh":
#avoid warnings and messages
started=True
if line!="" and started:
names.append(line)
times.append(time.time()-basetime)
#print str(i) +" - "+ line + " passed " + str(times[i])
percent+=process[i]
trace(line)
i+=1
if not line: break
growt=percent/passed
log_elapsed=time.time()-log_time
if log_elapsed>2:
#estimated time of completion and progress
perc_estimate=growt*passed #current percentage
estimate=(passed/perc_estimate)*100 #estimated total time to completion in seconds.
eta=int(abs(estimate-passed))
printlog(perc_estimate,line)
log_time=time.time() #reset counter.
#print str(i)+ " - " + str(perc_estimate) + " left: " + str(eta) +" Passed: "+str(passed)
#done!
#print "started at" + str(basetime)
#print times
endtime=time.time()
#print "ended" + str(endtime)
#print "total" + str(endtime-basetime)
completed=1
eta=0
printlog(100,"Slicing Completed!")
#finalize PHP (in case no client is connected)
call (['sudo php /var/www/fabui/script/finalize.php '+str(task_id)+ " slice"], shell=True)
sys.exit()
|
FABtotum/FAB-UI
|
fabui/python/slic3r_wrapper.py
|
Python
|
gpl-2.0
| 3,836
|
import numpy as np
import numpy.linalg
import scipy as sp
import scipy.linalg
from numpy import dot
def solve_tv_lqr(A, B, Q, R):
"""
(K, P) = solve_tv_lqr(A, B, Q, R)
Solve the time-varying discrete LQR problem.
Inputs:
A - Sequence of N matrices (system dynamics)
B - Sequence of N matrices (input matrix)
Q - Function Q(k) returning state cost at time k
R - Function R(k) returning input cost at time k
Returns:
K - Sequence of N matrices (optimal feedback gain)
P - matrix (P(0) solution to Ricatti equation)
"""
kf = len(A)
K = [None]*(kf)
P = Q(kf)
for k in reversed(range(kf)):
gamma = R(k) + dot(dot(B[k].T, P),B[k])
K_part = dot(B[k].T,dot(P,A[k])) # See lq function for explanation of this
K[k] = np.linalg.solve(gamma, K_part)
P = Q(k) + dot(dot(A[k].T,P),A[k]) - dot(K_part.T,K[k])
P = (P + P.T)/2.0 # Note: absolutely necessary for stability
return (K, P)
def solve_tv_lq(A, B, q, r, Q, S, R):
"""
(K, P) = solve_tv_lq(A, B, q, r, Q, S, R)
Solve the time-varying discrete LQ problem.
Inputs:
A - Sequence of N matrices (system dynamics)
B - Sequence of N matrices (input matrix)
q - Sequence of N+1 matrices of state linear cost
r - Sequence of N matrices of input linear cost
Q - Function Q(k) returning state cost at time k
R - Function R(k) returning input cost at time k
S - Function S(k) returning cross term cost at time k
Returns:
K - Sequence of N matrices (optimal feedback gain)
C - Sequence of N matrices (affine component of optimal control)
P - matrix (P(0) solution to Ricatti equation)
b - matrix (b(0) solution to affine Ricatti equation)
"""
kf = len(A)
K = [None]*kf
C = [None]*kf
P = Q(kf)
b = q[kf]
for k in reversed(range(kf)):
gamma = R(k) + dot(dot(B[k].T,P),B[k])
gamma_lu = sp.linalg.lu_factor(gamma, True)
# Pull K_part out so later we can replace K.T*gamma*K with
# K_part.T*K to avoid numeric instabilities from gamma *
# inverse(gamma)
K_part = dot(dot(B[k].T, P),A[k]) + S(k).T
C[k] = sp.linalg.lu_solve(gamma_lu, dot(B[k].T,b) + r[k])
K[k] = sp.linalg.lu_solve(gamma_lu, K_part)
b = q[k] - dot(K[k].T,r[k]) + dot(A[k].T - dot(K[k].T,B[k].T),b)
P = Q(k) + dot(dot(A[k].T,P),A[k]) - dot(K_part.T,K[k])
P = (P + P.T)/2.0 # Note: absolutely necessary for stability
return (K, C, P, b)
|
hilario/trep
|
src/discopt/dlqr.py
|
Python
|
gpl-3.0
| 2,583
|
# coding: utf-8
from __future__ import unicode_literals, absolute_import
from mock import Mock
from boxsdk.auth import remote_managed_oauth2
def test_remote_managed_oauth2_calls_retrieve_tokens_during_refresh(access_token):
retrieve_access_token = Mock()
oauth2 = remote_managed_oauth2.RemoteOAuth2(
retrieve_access_token=retrieve_access_token,
client_id=None,
client_secret=None,
access_token=access_token,
)
retrieve_access_token.return_value = access_token
assert oauth2.refresh(access_token) == (access_token, None)
retrieve_access_token.assert_called_once_with(access_token)
|
Frencil/box-python-sdk
|
test/unit/auth/test_remote_managed_oauth2.py
|
Python
|
apache-2.0
| 641
|
'''
Created on 11-Jun-2016
@author: prakash.selvam
'''
import hashlib
import random
from Maintenance.models import RegisteredApartUser, UnmatchedRegistrations, PreRegistrations
class ApartUserUtil(object):
def model_to_dict(self, instance, include=None, exclude=None):
fields = instance._meta.concrete_fields
if include is not None:
return {f.attname: getattr(instance, f.attname) for f in fields if f.name in include.split(',')}
if exclude is not None:
return {f.attname: getattr(instance, f.attname) for f in fields if f.name not in exclude.split(',')}
return {f.attname: getattr(instance, f.attname) for f in fields}
def verifyOTP(self, apartment_id, mobile_number, otp):
try:
otp_hash_object = hashlib.sha1(otp)
otp_hash = otp_hash_object.hexdigest()
regResultSet = RegisteredApartUser.objects.filter(mobile_number = mobile_number, apartment_id = apartment_id,
otp_hash = otp_hash)
if regResultSet.count()>0:
regResultSet.update(verified_mobile = True,
otp_hash = '')
result = {'status': 'success', 'msg':'mobile number successfully verified'}
return result
else:
result = {'status': 'failure', 'msg':'wrong otp'}
except:
raise
return result
def registerUserAccount(self,first_name,last_name, block_name, flat_number,mobile_number,
email_id,type_occupancy, have_car, apartment_id, password):
try:
type_occupancy = 1 if type_occupancy.lower() == 'owner' else 0
password_hash_object = hashlib.sha1(password)
passwordHash = password_hash_object.hexdigest()
resultSet = PreRegistrations.objects.filter(block_name = block_name, flat_number = flat_number,
mobile_number = mobile_number, apartment_id = apartment_id)
regResultSet = RegisteredApartUser.objects.filter(block_name = block_name, flat_number = flat_number,
mobile_number = mobile_number, apartment_id = apartment_id)
if regResultSet.count()>0:
result = {'status': 'failure', 'msg':'Account already registered try to login or reset password'}
return result
if (resultSet.count()>0):
otp = str(random.randrange(1111, 9999))
otp_hash_object = hashlib.sha1(otp)
otp_hash = otp_hash_object.hexdigest()
RegisteredApartUser.objects.create(first_name = first_name,
last_name = last_name,
block_name = block_name,
flat_number = flat_number,
mobile_number = mobile_number,
email_id = email_id,
type_occupancy = type_occupancy,
have_car = have_car,
apartment_id = apartment_id,
passwordHash = passwordHash,
otp_hash = otp_hash,
verified_mobile = False)
result = {'status': 'success', 'msg':'Account successfully ceated', 'otp': otp}
else:
unregResultset = UnmatchedRegistrations.objects.filter(mobile_number = mobile_number)
if unregResultset.count()>0:
unregResultset.update(first_name = first_name,
last_name = last_name,
block_name = block_name,
flat_number = flat_number,
mobile_number = mobile_number,
email_id = email_id,
type_occupancy = type_occupancy,
have_car = have_car,
apartment_id = apartment_id,
passwordHash = passwordHash)
else:
UnmatchedRegistrations.objects.create(first_name = first_name,
last_name = last_name,
block_name = block_name,
flat_number = flat_number,
mobile_number = mobile_number,
email_id = email_id,
type_occupancy = type_occupancy,
have_car = have_car,
apartment_id = apartment_id,
passwordHash = passwordHash)
result = {'status': 'notmatched', 'msg':'mobile number not in database. Contact your apartment admin'}
except:
raise
return result
def getpreregistrations(self, block_name, flat_number, apartment_id, type_occupancy):
try:
type_occupancy = 1 if type_occupancy.lower() == 'owner' else 0
unregResultset = PreRegistrations.objects.filter(block_name=block_name, flat_number=flat_number, apartment_id=apartment_id,type_occupancy=type_occupancy)
if unregResultset.count()>0:
result = self.model_to_dict(unregResultset[0],None,'passwordHash,have_car')
else:
result = {'status': 'notmatched', 'msg':'registration details not in database.'}
except:
raise
return result
def getunmatchreg(self, block_name, flat_number, apartment_id, type_occupancy):
try:
type_occupancy = 1 if type_occupancy.lower() == 'owner' else 0
unregResultset = UnmatchedRegistrations.objects.filter(block_name=block_name, flat_number=flat_number, apartment_id=apartment_id,type_occupancy=type_occupancy)
if unregResultset.count()>0:
result = self.model_to_dict(unregResultset[0],None,'passwordHash,have_car')
else:
result = {'status': 'notmatched', 'msg':'registration details not in database.'}
except:
raise
return result
def updatePreRegUser(self,first_name,last_name, block_name, flat_number,mobile_number,
email_id,type_occupancy, apartment_id):
try:
type_occupancy = 1 if type_occupancy.lower() == 'owner' else 0
resultSet = PreRegistrations.objects.filter(block_name = block_name, flat_number = flat_number,
apartment_id = apartment_id, type_occupancy = type_occupancy)
if (resultSet.count()>0):
resultSet.update(
first_name = first_name,
last_name = last_name,
mobile_number = mobile_number,
email_id = email_id)
result = {'status': 'success', 'msg':'registration details updated. ask customer to try registering again'}
else:
result = {'status': 'notmatched', 'msg':'registration details not in database.'}
except:
raise
return result
|
prakashselvam/Apart
|
Maintenance/Maintenance_utils/ApartmentUserUtils.py
|
Python
|
gpl-3.0
| 7,764
|
import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('tusken_berserker')
mobileTemplate.setLevel(31)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(True)
mobileTemplate.setScale(1)
mobileTemplate.setSocialGroup("tusken raider")
mobileTemplate.setAssistRange(6)
mobileTemplate.setStalker(True)
mobileTemplate.setOptionsBitmask(Options.AGGRESSIVE | Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_tusken_raider.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/ranged/rifle/shared_rifle_tusken_generic.iff', WeaponType.RIFLE, 1.0, 24, 'kinetic')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
mobileTemplate.setDefaultAttack('rangedShotrifle')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('tusken_berserker', mobileTemplate)
return
|
agry/NGECore2
|
scripts/mobiles/tatooine/tusken_berserker.py
|
Python
|
lgpl-3.0
| 1,347
|
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.models import NOT_PROVIDED
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_rename_table = "RENAME TABLE %(old_table)s TO %(new_table)s"
sql_alter_column_null = "MODIFY %(column)s %(type)s NULL"
sql_alter_column_not_null = "MODIFY %(column)s %(type)s NOT NULL"
sql_alter_column_type = "MODIFY %(column)s %(type)s"
sql_rename_column = "ALTER TABLE %(table)s CHANGE %(old_column)s %(new_column)s %(type)s"
sql_delete_unique = "ALTER TABLE %(table)s DROP INDEX %(name)s"
sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s)"
sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s"
sql_delete_index = "DROP INDEX %(name)s ON %(table)s"
sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"
alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;'
alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;'
sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)"
sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"
def quote_value(self, value):
return self.connection.escape(value)
def skip_default(self, field):
"""
MySQL doesn't accept default values for longtext and longblob
and implicitly treats these columns as nullable.
"""
return field.db_type(self.connection) in {'longtext', 'longblob'}
def add_field(self, model, field):
super(DatabaseSchemaEditor, self).add_field(model, field)
# Simulate the effect of a one-off default.
if self.skip_default(field) and field.default not in {None, NOT_PROVIDED}:
effective_default = self.effective_default(field)
self.execute('UPDATE %(table)s SET %(column)s = %%s' % {
'table': self.quote_name(model._meta.db_table),
'column': self.quote_name(field.column),
}, [effective_default])
|
cyaninc/django-mysql-pymysql
|
src/mysql_pymysql/schema.py
|
Python
|
bsd-3-clause
| 2,073
|
# Copyright 2017 Eficent Business and IT Consulting Services, S.L.
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockPicking(models.Model):
_inherit = 'stock.picking'
stock_request_ids = fields.One2many(comodel_name='stock.request',
string='Stock Requests',
compute='_compute_stock_request_ids')
stock_request_count = fields.Integer('Stock Request #',
compute='_compute_stock_request_ids')
@api.depends('move_lines')
def _compute_stock_request_ids(self):
for rec in self:
rec.stock_request_ids = rec.move_lines.mapped('stock_request_ids')
rec.stock_request_count = len(rec.stock_request_ids)
def action_view_stock_request(self):
"""
:return dict: dictionary value for created view
"""
action = self.env.ref(
'stock_request.action_stock_request_form').read()[0]
requests = self.mapped('stock_request_ids')
if len(requests) > 1:
action['domain'] = [('id', 'in', requests.ids)]
elif requests:
action['views'] = [
(self.env.ref('stock_request.view_stock_request_form').id,
'form')]
action['res_id'] = requests.id
return action
|
Vauxoo/stock-logistics-warehouse
|
stock_request/models/stock_picking.py
|
Python
|
agpl-3.0
| 1,420
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import codecs
import re
import urlparse
import pymongo
import re
from pcnile.resource import atom_download_resource
from bson import ObjectId
if __name__ == '__main__':
db = pymongo.Connection().server
for item in db.server.find():
item['resource']['download'] = atom_download_resource(item['resource']['download'])
db.server.save(item)
|
pczhaoyun/obtainfo
|
tools/atom.py
|
Python
|
apache-2.0
| 417
|
import _plotly_utils.basevalidators
class FontValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="font", parent_name="barpolar.hoverlabel", **kwargs):
super(FontValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Font"),
data_docs=kwargs.pop(
"data_docs",
"""
color
colorsrc
Sets the source reference on Chart Studio Cloud
for `color`.
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud
for `family`.
size
sizesrc
Sets the source reference on Chart Studio Cloud
for `size`.
""",
),
**kwargs
)
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/barpolar/hoverlabel/_font.py
|
Python
|
mit
| 1,860
|
import datetime
from django.db import models
from django.db.models import Q
from django.utils.translation import ugettext_lazy as _
from django.urls import reverse
import reversion
from users.models import Lageruser
from devicetypes.models import Type
from devicegroups.models import Devicegroup
from locations.models import Section
from Lagerregal import utils
from users.models import Department
@reversion.register()
class Building(models.Model):
name = models.CharField(_('Name'), max_length=200, unique=True)
street = models.CharField(_('Street'), max_length=100, blank=True)
number = models.CharField(_('Number'), max_length=30, blank=True)
zipcode = models.CharField(_('ZIP code'), max_length=5, blank=True)
city = models.CharField(_('City'), max_length=100, blank=True)
state = models.CharField(_('State'), max_length=100, blank=True)
country = models.CharField(_('Country'), max_length=100, blank=True)
def __str__(self):
return self.name
class Meta:
verbose_name = _('Building')
verbose_name_plural = _('Buildings')
permissions = (
("read_building", _("Can read Building")),
)
def get_absolute_url(self):
return reverse('building-detail', kwargs={'pk': self.pk})
def get_edit_url(self):
return reverse('building-edit', kwargs={'pk': self.pk})
@reversion.register()
class Room(models.Model):
name = models.CharField(_('Name'), max_length=200)
building = models.ForeignKey(Building, null=True, on_delete=models.SET_NULL)
section = models.ForeignKey(Section, null=True, on_delete=models.SET_NULL, related_name="rooms", blank=True)
def __str__(self):
if self.building:
return self.name + " (" + str(self.building) + ")"
else:
return self.name
class Meta:
verbose_name = _('Room')
verbose_name_plural = _('Rooms')
permissions = (
("read_room", _("Can read Room")),
)
def get_absolute_url(self):
return reverse('room-detail', kwargs={'pk': self.pk})
def get_edit_url(self):
return reverse('room-edit', kwargs={'pk': self.pk})
@reversion.register()
class Manufacturer(models.Model):
name = models.CharField(_('Manufacturer'), max_length=200, unique=True)
def __str__(self):
return self.name
class Meta:
verbose_name = _('Manufacturer')
verbose_name_plural = _('Manufacturers')
permissions = (
("read_manufacturer", _("Can read Manufacturer")),
)
def get_absolute_url(self):
return reverse('manufacturer-detail', kwargs={'pk': self.pk})
def get_edit_url(self):
return reverse('manufacturer-edit', kwargs={'pk': self.pk})
class Bookmark(models.Model):
device = models.ForeignKey("Device", on_delete=models.CASCADE)
user = models.ForeignKey(Lageruser, on_delete=models.CASCADE)
@reversion.register(follow=["typeattributevalue_set", ], exclude=[
"archived", "currentlending", "inventoried", "bookmarks", "trashed",
], ignore_duplicates=True)
class Device(models.Model):
created_at = models.DateTimeField(auto_now_add=True, blank=True, null=True)
creator = models.ForeignKey(Lageruser, on_delete=models.SET_NULL, null=True)
name = models.CharField(_('Name'), max_length=200)
inventorynumber = models.CharField(_('Inventorynumber'), max_length=50, blank=True)
serialnumber = models.CharField(_('Serialnumber'), max_length=50, blank=True)
manufacturer = models.ForeignKey(Manufacturer, blank=True, null=True, on_delete=models.SET_NULL)
hostname = models.CharField(_('Hostname'), max_length=40, blank=True)
description = models.CharField(_('Description'), max_length=10000, blank=True)
devicetype = models.ForeignKey(Type, blank=True, null=True, on_delete=models.SET_NULL)
room = models.ForeignKey(Room, blank=True, null=True, on_delete=models.SET_NULL)
group = models.ForeignKey(Devicegroup, blank=True, null=True, related_name="devices", on_delete=models.SET_NULL)
webinterface = models.CharField(_('Webinterface'), max_length=60, blank=True)
templending = models.BooleanField(default=False, verbose_name=_("For short term lending"))
currentlending = models.ForeignKey("Lending", related_name="currentdevice", null=True, blank=True,
on_delete=models.SET_NULL)
manual = models.FileField(upload_to=utils.get_file_location, null=True, blank=True)
contact = models.ForeignKey(Lageruser, related_name="as_contact",
help_text=_("Person to contact about using this device"), blank=True,
null=True, on_delete=models.SET_NULL)
archived = models.DateTimeField(null=True, blank=True)
trashed = models.DateTimeField(null=True, blank=True)
inventoried = models.DateTimeField(null=True, blank=True)
bookmarkers = models.ManyToManyField(Lageruser, through=Bookmark, related_name="bookmarks", blank=True)
department = models.ForeignKey(Department, null=True, blank=True, related_name="devices", on_delete=models.SET_NULL)
is_private = models.BooleanField(default=False)
used_in = models.ForeignKey('self', null=True, blank=True, on_delete=models.SET_NULL,)
def __str__(self):
return self.name
class Meta:
verbose_name = _('Device')
verbose_name_plural = _('Devices')
permissions = (
("boss_mails", _("Emails for bosses")),
("managment_mails", _("Emails for managment")),
("support_mails", _("Emails for support")),
("read_device", _("Can read Device")),
("lend_device", _("Can lend Device")),
("read_puppetdetails", _("Read Puppet Details"))
)
def get_absolute_url(self):
return reverse('device-detail', kwargs={'pk': self.pk})
def get_edit_url(self):
return reverse('device-edit', kwargs={'pk': self.pk})
def get_as_dict(self):
dict = {}
dict["name"] = self.name
dict["description"] = self.description
dict["manufacturer"] = self.manufacturer
dict["devicetype"] = self.devicetype
dict["room"] = self.room
return dict
def is_overdue(self):
if self.currentlending is None:
return False
if self.currentlending.duedate < datetime.date.today():
return True
return False
@staticmethod
def active():
return Device.objects.filter(archived=None, trashed=None)
@staticmethod
def devices_for_departments(departments=[]):
return Device.objects.filter(department__in=departments).exclude(
~Q(department__in=departments), is_private=True)
class DeviceInformationType(models.Model):
keyname = models.CharField(_('Name'), max_length=200)
humanname = models.CharField(_('Human readable name'), max_length=200)
def __str__(self):
return self.humanname
class Meta:
verbose_name = _('Information Type')
verbose_name_plural = _('Information Type')
class DeviceInformation(models.Model):
information = models.CharField(_('Information'), max_length=200)
device = models.ForeignKey(Device, related_name="information", on_delete=models.CASCADE)
infotype = models.ForeignKey(DeviceInformationType, on_delete=models.CASCADE)
def __str__(self):
return str(self.infotype) + ": " + self.information
class Meta:
verbose_name = _('Information')
verbose_name_plural = _('Information')
@reversion.register(ignore_duplicates=True)
class Lending(models.Model):
owner = models.ForeignKey(Lageruser, verbose_name=_("Lent to"), on_delete=models.SET_NULL, null=True)
lenddate = models.DateField(auto_now_add=True)
duedate = models.DateField(blank=True, null=True)
duedate_email = models.DateField(blank=True, null=True)
returndate = models.DateField(blank=True, null=True)
device = models.ForeignKey(Device, null=True, blank=True, on_delete=models.CASCADE)
smalldevice = models.CharField(_("Small Device"), max_length=200, null=True, blank=True)
class Meta:
verbose_name = _('Lending')
verbose_name_plural = _('Lendings')
class Template(models.Model):
templatename = models.CharField(_('Templatename'), max_length=200)
name = models.CharField(_('Name'), max_length=200)
manufacturer = models.ForeignKey(Manufacturer, blank=True, null=True, on_delete=models.CASCADE)
description = models.CharField(_('Description'), max_length=1000, blank=True)
devicetype = models.ForeignKey(Type, blank=True, null=True, on_delete=models.CASCADE)
def __str__(self):
return self.templatename
class Meta:
ordering = ['name']
verbose_name = _('Template')
verbose_name_plural = _('Templates')
permissions = (
("read_template", _("Can read Template")),
)
def get_absolute_url(self):
return reverse('device-list')
def get_as_dict(self):
dict = {}
dict["name"] = self.name
dict["description"] = self.description
dict["manufacturer"] = self.manufacturer
dict["devicetype"] = self.devicetype
return dict
class Note(models.Model):
device = models.ForeignKey(Device, related_name="notes", on_delete=models.CASCADE)
note = models.CharField(max_length=5000)
creator = models.ForeignKey(Lageruser, on_delete=models.SET_NULL, null=True)
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
verbose_name = _("Note")
verbose_name_plural = _("Notes")
def get_absolute_url(self):
return reverse("device-detail", kwargs={'pk': self.device.pk})
class Picture(models.Model):
device = models.ForeignKey(Device, related_name="pictures", on_delete=models.CASCADE)
image = models.ImageField(upload_to=utils.get_file_location)
caption = models.CharField(max_length=200, null=True, blank=True)
class Meta:
verbose_name = _("Picture")
verbose_name_plural = _("Pictures")
def get_absolute_url(self):
return reverse("device-detail", kwargs={'pk': self.device.pk})
|
vIiRuS/Lagerregal
|
devices/models.py
|
Python
|
bsd-3-clause
| 10,243
|
# This file is part of the Astrometry.net suite.
# Licensed under a 3-clause BSD style license - see LICENSE
from __future__ import print_function
from __future__ import absolute_import
import numpy as np
class ResampleError(Exception):
pass
class OverlapError(ResampleError):
pass
class NoOverlapError(OverlapError):
pass
class SmallOverlapError(OverlapError):
pass
def resample_with_wcs(targetwcs, wcs, Limages=[], L=3, spline=True,
splineFallback = True,
splineStep = 25,
splineMargin = 12,
table=True,
cinterp = True):
'''
Returns (Yo,Xo, Yi,Xi, ims)
Use the results like:
target[Yo,Xo] = nearest_neighbour[Yi,Xi]
# or
target[Yo,Xo] = ims[i]
raises NoOverlapError if the target and input WCSes do not
overlap. Raises SmallOverlapError if they do not overlap "enough"
(as described below).
targetwcs, wcs: duck-typed WCS objects that must have:
- properties "imagew", "imageh"
- methods "r,d = pixelxy2radec(x, y)"
- "ok,x,y = radec2pixelxy(ra, dec)"
The WCS functions are expected to operate in FITS pixel-indexing.
The WCS function must support 1-d, broadcasting, vectorized
pixel<->radec calls.
Limages: list of images to Lanczos-interpolate at the given Lanczos order.
If empty, just returns nearest-neighbour indices.
L: int, lanczos order
spline: bool: use a spline interpolator to reduce the number of
WCS calls.
splineFallback: bool: the spline requires a certain amount of
spatial overlap. With splineFallback = True, fall back to
non-spline version. With splineFallback = False, just raises
SmallOverlapError.
splineStep: approximate grid size
table: use Lanczos3 look-up table?
'''
### DEBUG
#ps = PlotSequence('resample')
ps = None
H,W = int(targetwcs.imageh), int(targetwcs.imagew)
h,w = int( wcs.imageh), int( wcs.imagew)
for im in Limages:
assert(im.shape == (h,w))
#print 'Target size', W, H
#print 'Input size', w, h
# First find the approximate bbox of the input image in
# the target image so that we don't ask for way too
# many out-of-bounds pixels...
XY = []
for x,y in [(0,0), (w-1,0), (w-1,h-1), (0, h-1)]:
# [-2:]: handle ok,ra,dec or ra,dec
ok,xw,yw = targetwcs.radec2pixelxy(
*(wcs.pixelxy2radec(float(x + 1), float(y + 1))[-2:]))
XY.append((xw - 1, yw - 1))
XY = np.array(XY)
x0,y0 = np.round(XY.min(axis=0)).astype(int)
x1,y1 = np.round(XY.max(axis=0)).astype(int)
if spline:
# Now we build a spline that maps "target" pixels to "input" pixels
# spline inputs: pixel coords in the 'target' image
margin = splineMargin
step = splineStep
xlo = max(0, x0-margin)
xhi = min(W-1, x1+margin)
ylo = max(0, y0-margin)
yhi = min(H-1, y1+margin)
if xlo > xhi or ylo > yhi:
raise NoOverlapError()
nx = np.ceil(float(xhi - xlo) / step) + 1
xx = np.linspace(xlo, xhi, nx)
ny = np.ceil(float(yhi - ylo) / step) + 1
yy = np.linspace(ylo, yhi, ny)
if ps:
def expand_axes():
M = 100
ax = plt.axis()
plt.axis([ax[0]-M, ax[1]+M, ax[2]-M, ax[3]+M])
plt.axis('scaled')
plt.clf()
plt.plot(XY[:,0], XY[:,1], 'ro')
plt.plot(xx, np.zeros_like(xx), 'b.')
plt.plot(np.zeros_like(yy), yy, 'c.')
plt.plot(xx, np.zeros_like(xx)+max(yy), 'b.')
plt.plot(max(xx) + np.zeros_like(yy), yy, 'c.')
plt.plot([0,W,W,0,0], [0,0,H,H,0], 'k-')
plt.title('A: Target image: bbox')
expand_axes()
ps.savefig()
if (len(xx) == 0) or (len(yy) == 0):
#print 'No overlap between input and target WCSes'
raise NoOverlapError()
if (len(xx) <= 3) or (len(yy) <= 3):
#print 'Not enough overlap between input and target WCSes'
if splineFallback:
spline = False
else:
raise SmallOverlapError()
if spline:
# spline inputs -- pixel coords in the 'target' image
# (xx, yy)
# spline outputs -- pixel coords in the 'input' image
# (XX, YY)
# We use vectorized radec <-> pixelxy functions here
R = targetwcs.pixelxy2radec(xx[np.newaxis,:] + 1,
yy[:,np.newaxis] + 1)
if len(R) == 3:
ok = R[0]
assert(np.all(ok))
ok,XX,YY = wcs.radec2pixelxy(*(R[-2:]))
del R
XX -= 1.
YY -= 1.
assert(np.all(ok))
del ok
# ok,XX,YY = wcs.radec2pixelxy(
# *(targetwcs.pixelxy2radec(
# xx[np.newaxis,:] + 1,
# yy[:,np.newaxis] + 1)[-2:]))
# XX -= 1.
# YY -= 1.
# del ok
# print 'Spline inputs:'
# print xx
# print yy
# print 'Spline outputs:'
# print XX
# print YY
if ps:
plt.clf()
plt.plot(Xo, Yo, 'b.')
plt.plot([0,w,w,0,0], [0,0,h,h,0], 'k-')
plt.title('B: Input image')
expand_axes()
ps.savefig()
import scipy.interpolate as interp
xspline = interp.RectBivariateSpline(xx, yy, XX.T)
yspline = interp.RectBivariateSpline(xx, yy, YY.T)
del XX
del YY
else:
margin = 0
# Now, build the full pixel grid (in the ouput image) we want to
# interpolate...
ixo = np.arange(max(0, x0-margin), min(W, x1+margin+1), dtype=int)
iyo = np.arange(max(0, y0-margin), min(H, y1+margin+1), dtype=int)
if len(ixo) == 0 or len(iyo) == 0:
raise NoOverlapError()
if spline:
# And run the interpolator.
# [xy]spline() does a meshgrid-like broadcast, so fxi,fyi have
# shape n(iyo),n(ixo)
#
# f[xy]i: floating-point pixel coords in the input image
fxi = xspline(ixo, iyo).T.astype(np.float32)
fyi = yspline(ixo, iyo).T.astype(np.float32)
if ps:
plt.clf()
plt.plot(ixo, np.zeros_like(ixo), 'r,')
plt.plot(np.zeros_like(iyo), iyo, 'm,')
plt.plot(ixo, max(iyo) + np.zeros_like(ixo), 'r,')
plt.plot(max(ixo) + np.zeros_like(iyo), iyo, 'm,')
plt.plot([0,W,W,0,0], [0,0,H,H,0], 'k-')
plt.title('C: Target image; i*o')
expand_axes()
ps.savefig()
plt.clf()
plt.plot(fxi, fyi, 'r,')
plt.plot([0,w,w,0,0], [0,0,h,h,0], 'k-')
plt.title('D: Input image, f*i')
expand_axes()
ps.savefig()
else:
# Use 2-d broadcasting pixel <-> radec functions here.
# This can be rather expensive, with lots of WCS calls!
R = targetwcs.pixelxy2radec(ixo[np.newaxis,:] + 1.,
iyo[:,np.newaxis] + 1.)
if len(R) == 3:
# ok,ra,dec
R = R[1:]
ok,fxi,fyi = wcs.radec2pixelxy(*R)
assert(np.all(ok))
# ok,fxi,fyi = wcs.radec2pixelxy(
# *targetwcs.pixelxy2radec(ixo[np.newaxis,:] + 1.,
# iyo[:,np.newaxis] + 1.))
del ok
fxi -= 1.
fyi -= 1.
# print 'ixo', ixo.shape
# print 'iyo', iyo.shape
# print 'fxi', fxi.shape
# print 'fyi', fyi.shape
# Keep only in-bounds pixels.
## HACK -- 0.51
I = np.flatnonzero((fxi >= -0.5) * (fyi >= -0.5) *
(fxi < w-0.51) * (fyi < h-0.51))
fxi = fxi.flat[I]
fyi = fyi.flat[I]
# i[xy]i: int coords in the input image
ixi = np.round(fxi).astype(np.int32)
iyi = np.round(fyi).astype(np.int32)
#print 'dims', (len(iyo),len(ixo))
iy,ix = np.unravel_index(I, (len(iyo),len(ixo)))
iyo = iyo[0] + iy
ixo = ixo[0] + ix
# i[xy]o: int coords in the target image
if spline and ps:
plt.clf()
plt.plot(ixo, iyo, 'r,')
plt.plot([0,W,W,0,0], [0,0,H,H,0], 'k-')
plt.title('E: Target image; i*o')
expand_axes()
ps.savefig()
plt.clf()
plt.plot(fxi, fyi, 'r,')
plt.plot([0,w,w,0,0], [0,0,h,h,0], 'k-')
plt.title('F: Input image, f*i')
expand_axes()
ps.savefig()
assert(np.all(ixo >= 0))
assert(np.all(iyo >= 0))
assert(np.all(ixo < W))
assert(np.all(iyo < H))
assert(np.all(ixi >= 0))
assert(np.all(iyi >= 0))
assert(np.all(ixi < w))
assert(np.all(iyi < h))
if len(Limages):
dx = (fxi - ixi).astype(np.float32)
dy = (fyi - iyi).astype(np.float32)
del fxi
del fyi
# print 'dx', dx.min(), dx.max()
# print 'dy', dy.min(), dy.max()
# Lanczos interpolation.
# number of pixels
nn = len(ixo)
NL = 2*L+1
# accumulators for each input image
laccs = [np.zeros(nn, np.float32) for im in Limages]
if cinterp:
from .util import lanczos3_interpolate
# ixi = ixi.astype(np.int)
# iyi = iyi.astype(np.int)
# print 'ixi/iyi', ixi.shape, ixi.dtype, iyi.shape, iyi.dtype
# print 'dx/dy', dx.shape, dx.dtype, dy.shape, dy.dtype
rtn = lanczos3_interpolate(ixi, iyi, dx, dy, laccs,
[lim.astype(np.float32)
for lim in Limages])
# print 'rtn:', rtn
else:
_lanczos_interpolate(L, ixi, iyi, dx, dy, laccs, Limages,
table=table)
rims = laccs
else:
rims = []
return (iyo,ixo, iyi,ixi, rims)
def _lanczos_interpolate(L, ixi, iyi, dx, dy, laccs, limages,
table=True):
'''
L: int, Lanczos order
ixi: int, 1-d numpy array, len n, x coord in input images
iyi: ----""---- y
dx: float, 1-d numpy array, len n, fractional x coord
dy: ----""---- y
laccs: list of [float, 1-d numpy array, len n]: outputs
limages list of [float, 2-d numpy array, shape h,w]: inputs
'''
from .miscutils import lanczos_filter
lfunc = lanczos_filter
if L == 3:
try:
from .util import lanczos3_filter, lanczos3_filter_table
# 0: no rangecheck
if table:
#lfunc = lambda nil,x,y: lanczos3_filter_table(x,y, 0)
lfunc = lambda nil,x,y: lanczos3_filter_table(x,y, 1)
else:
lfunc = lambda nil,x,y: lanczos3_filter(x,y)
except:
pass
h,w = limages[0].shape
n = len(ixi)
# sum of lanczos terms
fsum = np.zeros(n)
off = np.arange(-L, L+1)
#fx = np.zeros(n)
#fy = np.zeros(n)
fx = np.zeros(n, np.float32)
fy = np.zeros(n, np.float32)
for oy in off:
#print 'dy range:', min(-oy + dy), max(-oy + dy)
lfunc(L, -oy + dy, fy)
for ox in off:
lfunc(L, -ox + dx, fx)
#print 'dx range:', min(-ox + dx), max(-ox + dx)
for lacc,im in zip(laccs, limages):
lacc += fx * fy * im[np.clip(iyi + oy, 0, h-1),
np.clip(ixi + ox, 0, w-1)]
fsum += fx*fy
for lacc in laccs:
lacc /= fsum
if __name__ == '__main__':
import fitsio
from astrometry.util.util import Sip,Tan
import time
import sys
import pylab as plt
from astrometry.util.util import lanczos3_filter, lanczos3_filter_table
# x = np.linspace(-4, 4, 500)
# L = np.zeros_like(x)
# L2 = np.zeros(len(x), np.float32)
# lanczos3_filter(x, L)
# lanczos3_filter_table(x.astype(np.float32), L2, 1)
# plt.clf()
# plt.plot(x, L, 'r-')
# plt.plot(x, L2, 'b-')
# plt.savefig('l1.png')
x = np.linspace(-3.5, 4.5, 8192).astype(np.float32)
L1 = np.zeros_like(x)
L2 = np.zeros_like(x)
lanczos3_filter(x, L1)
lanczos3_filter_table(x, L2, 1)
print('L2 - L1 RMS:', np.sqrt(np.mean((L2-L1)**2)))
if True:
ra,dec = 0.,0.,
pixscale = 1e-3
W,H = 10,1
cowcs = Tan(ra, dec, (W+1)/2., (H+1)/2.,
-pixscale, 0., 0., pixscale, W, H)
dx,dy = 0.25, 0.
wcs = Tan(ra, dec, (W+1)/2. + dx, (H+1)/2. + dy,
-pixscale, 0., 0., pixscale, W, H)
pix = np.zeros((H,W), np.float32)
pix[0,W/2] = 1.
Yo,Xo,Yi,Xi,(cpix,) = resample_with_wcs(cowcs, wcs, [pix], 3)
print('C', cpix)
Yo2,Xo2,Yi2,Xi2,(pypix,) = resample_with_wcs(cowcs, wcs, [pix], 3, cinterp=False, table=False)
print('Py', pypix)
print('RMS', np.sqrt(np.mean((cpix - pypix)**2)))
sys.exit(0)
if True:
ra,dec = 219.577111, 54.52
pixscale = 2.75 / 3600.
W,H = 10,10
cowcs = Tan(ra, dec, (W+1)/2., (H+1)/2.,
-pixscale, 0., 0., pixscale, W, H)
for i,(dx,dy) in enumerate([(0.01, 0.02),
(0.1, 0.0),
(0.2, 0.0),
(0.3, 0.0),
(0.4, 0.0),
(0.5, 0.0),
(0.6, 0.0),
(0.7, 0.0),
(0.8, 0.0),
]):
wcs = Tan(ra, dec, (W+1)/2. + dx, (H+1)/2. + dy,
-pixscale, 0., 0., pixscale, W, H)
pix = np.zeros((H,W), np.float32)
pix[H/2, :] = 1.
pix[:, W/2] = 1.
Yo,Xo,Yi,Xi,(cpix,) = resample_with_wcs(cowcs, wcs, [pix], 3)
Yo2,Xo2,Yi2,Xi2,(pypix,) = resample_with_wcs(cowcs, wcs, [pix], 3, cinterp=False)
cim = np.zeros((H,W))
cim[Yo,Xo] = cpix
pyim = np.zeros((H,W))
pyim[Yo2,Xo2] = pypix
plt.clf()
plt.plot(cim[0,:], 'b-', alpha=0.5)
plt.plot(cim[H/4,:], 'c-', alpha=0.5)
plt.plot(pyim[0,:], 'r-', alpha=0.5)
plt.plot(pyim[H/4,:], 'm-', alpha=0.5)
plt.plot(1000. * (cim[0,:] - pyim[0,:]), 'k-', alpha=0.5)
plt.savefig('p2-%02i.png' % i)
sys.exit(0)
ra,dec = 219.577111, 54.52
pixscale = 2.75 / 3600.
#W,H = 2048, 2048
W,H = 512, 512
#W,H = 100,100
cowcs = Tan(ra, dec, (W+1)/2., (H+1)/2.,
-pixscale, 0., 0., pixscale, W, H)
cowcs.write_to('co.wcs')
if True:
#intfn = '05579a167-w1-int-1b.fits'
intfn = 'wise-frames/9a/05579a/167/05579a167-w1-int-1b.fits'
wcs = Sip(intfn)
pix = fitsio.read(intfn)
pix[np.logical_not(np.isfinite(pix))] = 0.
print('pix', pix.shape, pix.dtype)
for i in range(5):
t0 = time.clock()
Yo,Xo,Yi,Xi,ims = resample_with_wcs(cowcs, wcs, [pix], 3)
t1 = time.clock() - t0
print('C resampling took', t1)
t0 = time.clock()
Yo2,Xo2,Yi2,Xi2,ims2 = resample_with_wcs(cowcs, wcs, [pix], 3, cinterp=False, table=False)
t2 = time.clock() - t0
print('py resampling took', t2)
out = np.zeros((H,W))
out[Yo,Xo] = ims[0]
fitsio.write('resampled-c.fits', out, clobber=True)
cout = out
out = np.zeros((H,W))
out[Yo,Xo] = ims2[0]
fitsio.write('resampled-py.fits', out, clobber=True)
pyout = out
plt.clf()
plt.imshow(cout, interpolation='nearest', origin='lower')
plt.colorbar()
plt.savefig('c.png')
plt.clf()
plt.imshow(pyout, interpolation='nearest', origin='lower')
plt.colorbar()
plt.savefig('py.png')
plt.clf()
plt.imshow(cout - pyout, interpolation='nearest', origin='lower')
plt.colorbar()
plt.savefig('diff.png')
print('Max diff:', np.abs(cout - pyout).max())
|
olebole/astrometry.net
|
util/resample.py
|
Python
|
bsd-3-clause
| 16,293
|
# -*- coding: utf-8 -*-
import datetime as dt
import itertools
import logging
import re
import urlparse
from copy import deepcopy
import bson
import pytz
import itsdangerous
from modularodm import fields, Q
from modularodm.exceptions import NoResultsFound, ValidationError, ValidationValueError, QueryException
from modularodm.validators import URLValidator
import framework
from framework import analytics
from framework.addons import AddonModelMixin
from framework.auth import signals, utils
from framework.auth.exceptions import (ChangePasswordError, ExpiredTokenError, InvalidTokenError,
MergeConfirmedRequiredError, MergeConflictError)
from framework.bcrypt import generate_password_hash, check_password_hash
from framework.exceptions import PermissionsError
from framework.guid.model import GuidStoredObject
from framework.mongo.validators import string_required
from framework.sentry import log_exception
from framework.sessions import session
from framework.sessions.model import Session
from framework.sessions.utils import remove_sessions_for_user
from website import mails, settings, filters, security
name_formatters = {
'long': lambda user: user.fullname,
'surname': lambda user: user.family_name if user.family_name else user.fullname,
'initials': lambda user: u'{surname}, {initial}.'.format(
surname=user.family_name,
initial=user.given_name_initial,
),
}
logger = logging.getLogger(__name__)
# Hide implementation of token generation
def generate_confirm_token():
return security.random_string(30)
def generate_claim_token():
return security.random_string(30)
def generate_verification_key():
return security.random_string(30)
def validate_history_item(item):
string_required(item.get('institution'))
startMonth = item.get('startMonth')
startYear = item.get('startYear')
endMonth = item.get('endMonth')
endYear = item.get('endYear')
validate_year(startYear)
validate_year(endYear)
if startYear and endYear:
if endYear < startYear:
raise ValidationValueError('End date must be later than start date.')
elif endYear == startYear:
if endMonth and startMonth and endMonth < startMonth:
raise ValidationValueError('End date must be later than start date.')
def validate_year(item):
if item:
try:
int(item)
except ValueError:
raise ValidationValueError('Please enter a valid year.')
else:
if len(item) != 4:
raise ValidationValueError('Please enter a valid year.')
validate_url = URLValidator()
def validate_profile_websites(profile_websites):
for value in profile_websites or []:
try:
validate_url(value)
except ValidationError:
# Reraise with a better message
raise ValidationError('Invalid personal URL.')
def validate_social(value):
validate_profile_websites(value.get('profileWebsites'))
# TODO - rename to _get_current_user_from_session /HRYBACKI
def _get_current_user():
uid = session._get_current_object() and session.data.get('auth_user_id')
return User.load(uid)
# TODO: This should be a class method of User?
def get_user(email=None, password=None, verification_key=None):
"""Get an instance of User matching the provided params.
:return: The instance of User requested
:rtype: User or None
"""
# tag: database
if password and not email:
raise AssertionError('If a password is provided, an email must also '
'be provided.')
query_list = []
if email:
email = email.strip().lower()
query_list.append(Q('emails', 'eq', email) | Q('username', 'eq', email))
if password:
password = password.strip()
try:
query = query_list[0]
for query_part in query_list[1:]:
query = query & query_part
user = User.find_one(query)
except Exception as err:
logger.error(err)
user = None
if user and not user.check_password(password):
return False
return user
if verification_key:
query_list.append(Q('verification_key', 'eq', verification_key))
try:
query = query_list[0]
for query_part in query_list[1:]:
query = query & query_part
user = User.find_one(query)
return user
except Exception as err:
logger.error(err)
return None
class Auth(object):
def __init__(self, user=None, api_node=None,
private_key=None):
self.user = user
self.api_node = api_node
self.private_key = private_key
def __repr__(self):
return ('<Auth(user="{self.user}", '
'private_key={self.private_key})>').format(self=self)
@property
def logged_in(self):
return self.user is not None
@property
def private_link(self):
if not self.private_key:
return None
try:
# Avoid circular import
from website.project.model import PrivateLink
private_link = PrivateLink.find_one(
Q('key', 'eq', self.private_key)
)
if private_link.is_deleted:
return None
except QueryException:
return None
return private_link
@classmethod
def from_kwargs(cls, request_args, kwargs):
user = request_args.get('user') or kwargs.get('user') or _get_current_user()
private_key = request_args.get('view_only')
return cls(
user=user,
private_key=private_key,
)
class User(GuidStoredObject, AddonModelMixin):
# Node fields that trigger an update to the search engine on save
SEARCH_UPDATE_FIELDS = {
'fullname',
'given_name',
'middle_names',
'family_name',
'suffix',
'merged_by',
'date_disabled',
'date_confirmed',
'jobs',
'schools',
'social',
}
# TODO: Add SEARCH_UPDATE_NODE_FIELDS, for fields that should trigger a
# search update for all nodes to which the user is a contributor.
SOCIAL_FIELDS = {
'orcid': u'http://orcid.org/{}',
'github': u'http://github.com/{}',
'scholar': u'http://scholar.google.com/citations?user={}',
'twitter': u'http://twitter.com/{}',
'profileWebsites': [],
'linkedIn': u'https://www.linkedin.com/{}',
'impactStory': u'https://impactstory.org/{}',
'researcherId': u'http://researcherid.com/rid/{}',
'researchGate': u'https://researchgate.net/profile/{}',
'academiaInstitution': u'https://{}',
'academiaProfileID': u'.academia.edu/{}',
'baiduScholar': u'http://xueshu.baidu.com/scholarID/{}'
}
# This is a GuidStoredObject, so this will be a GUID.
_id = fields.StringField(primary=True)
# The primary email address for the account.
# This value is unique, but multiple "None" records exist for:
# * unregistered contributors where an email address was not provided.
# TODO: Update mailchimp subscription on username change in user.save()
username = fields.StringField(required=False, unique=True, index=True)
# Hashed. Use `User.set_password` and `User.check_password`
password = fields.StringField()
fullname = fields.StringField(required=True, validate=string_required)
# user has taken action to register the account
is_registered = fields.BooleanField(index=True)
# user has claimed the account
# TODO: This should be retired - it always reflects is_registered.
# While a few entries exist where this is not the case, they appear to be
# the result of a bug, as they were all created over a small time span.
is_claimed = fields.BooleanField(default=False, index=True)
# a list of strings - for internal use
system_tags = fields.StringField(list=True)
# security emails that have been sent
# TODO: This should be removed and/or merged with system_tags
security_messages = fields.DictionaryField()
# Format: {
# <message label>: <datetime>
# ...
# }
# user was invited (as opposed to registered unprompted)
is_invited = fields.BooleanField(default=False, index=True)
# Per-project unclaimed user data:
# TODO: add validation
unclaimed_records = fields.DictionaryField(required=False)
# Format: {
# <project_id>: {
# 'name': <name that referrer provided>,
# 'referrer_id': <user ID of referrer>,
# 'token': <token used for verification urls>,
# 'email': <email the referrer provided or None>,
# 'claimer_email': <email the claimer entered or None>,
# 'last_sent': <timestamp of last email sent to referrer or None>
# }
# ...
# }
# Time of last sent notification email to newly added contributors
# Format : {
# <project_id>: {
# 'last_sent': time.time()
# }
# ...
# }
contributor_added_email_records = fields.DictionaryField(default=dict)
# The user into which this account was merged
merged_by = fields.ForeignField('user', default=None, index=True)
# verification key used for resetting password
verification_key = fields.StringField()
email_last_sent = fields.DateTimeField()
# confirmed emails
# emails should be stripped of whitespace and lower-cased before appending
# TODO: Add validator to ensure an email address only exists once across
# all User's email lists
emails = fields.StringField(list=True)
# email verification tokens
# see also ``unconfirmed_emails``
email_verifications = fields.DictionaryField(default=dict)
# Format: {
# <token> : {'email': <email address>,
# 'expiration': <datetime>}
# }
# TODO remove this field once migration (scripts/migration/migrate_mailing_lists_to_mailchimp_fields.py)
# has been run. This field is deprecated and replaced with mailchimp_mailing_lists
mailing_lists = fields.DictionaryField()
# email lists to which the user has chosen a subscription setting
mailchimp_mailing_lists = fields.DictionaryField()
# Format: {
# 'list1': True,
# 'list2: False,
# ...
# }
# email lists to which the user has chosen a subscription setting, being sent from osf, rather than mailchimp
osf_mailing_lists = fields.DictionaryField(default=lambda: {settings.OSF_HELP_LIST: True})
# Format: {
# 'list1': True,
# 'list2: False,
# ...
# }
# the date this user was registered
# TODO: consider removal - this can be derived from date_registered
date_registered = fields.DateTimeField(auto_now_add=dt.datetime.utcnow,
index=True)
# watched nodes are stored via a list of WatchConfigs
watched = fields.ForeignField('WatchConfig', list=True)
# list of collaborators that this user recently added to nodes as a contributor
recently_added = fields.ForeignField('user', list=True)
# Attached external accounts (OAuth)
external_accounts = fields.ForeignField('externalaccount', list=True)
# CSL names
given_name = fields.StringField()
middle_names = fields.StringField()
family_name = fields.StringField()
suffix = fields.StringField()
# Employment history
jobs = fields.DictionaryField(list=True, validate=validate_history_item)
# Format: {
# 'title': <position or job title>,
# 'institution': <institution or organization>,
# 'department': <department>,
# 'location': <location>,
# 'startMonth': <start month>,
# 'startYear': <start year>,
# 'endMonth': <end month>,
# 'endYear': <end year>,
# 'ongoing: <boolean>
# }
# Educational history
schools = fields.DictionaryField(list=True, validate=validate_history_item)
# Format: {
# 'degree': <position or job title>,
# 'institution': <institution or organization>,
# 'department': <department>,
# 'location': <location>,
# 'startMonth': <start month>,
# 'startYear': <start year>,
# 'endMonth': <end month>,
# 'endYear': <end year>,
# 'ongoing: <boolean>
# }
# Social links
social = fields.DictionaryField(validate=validate_social)
# Format: {
# 'profileWebsites': <list of profile websites>
# 'twitter': <twitter id>,
# }
# date the user last sent a request
date_last_login = fields.DateTimeField()
# date the user first successfully confirmed an email address
date_confirmed = fields.DateTimeField(index=True)
# When the user was disabled.
date_disabled = fields.DateTimeField(index=True)
# when comments were last viewed
comments_viewed_timestamp = fields.DictionaryField()
# Format: {
# 'Comment.root_target._id': 'timestamp',
# ...
# }
# timezone for user's locale (e.g. 'America/New_York')
timezone = fields.StringField(default='Etc/UTC')
# user language and locale data (e.g. 'en_US')
locale = fields.StringField(default='en_US')
# whether the user has requested to deactivate their account
requested_deactivation = fields.BooleanField(default=False)
# dictionary of projects a user has changed the setting on
notifications_configured = fields.DictionaryField()
# Format: {
# <node.id>: True
# ...
# }
# If this user was created through the API,
# keep track of who added them.
registered_by = fields.ForeignField('user', default=None, index=True)
_meta = {'optimistic': True}
def __repr__(self):
return '<User({0!r}) with id {1!r}>'.format(self.username, self._id)
def __str__(self):
return self.fullname.encode('ascii', 'replace')
__unicode__ = __str__
# For compatibility with Django auth
@property
def pk(self):
return self._id
@property
def email(self):
return self.username
def is_authenticated(self): # Needed for django compat
return True
def is_anonymous(self):
return False
@property
def absolute_api_v2_url(self):
from website import util
return util.api_v2_url('users/{}/'.format(self.pk))
# used by django and DRF
def get_absolute_url(self):
if not self.is_registered:
return None
return self.absolute_api_v2_url
@classmethod
def create_unregistered(cls, fullname, email=None):
"""Create a new unregistered user.
"""
user = cls(
username=email,
fullname=fullname,
is_invited=True,
is_registered=False,
)
user.update_guessed_names()
return user
@classmethod
def create(cls, username, password, fullname):
user = cls(
username=username,
fullname=fullname,
)
user.update_guessed_names()
user.set_password(password)
return user
@classmethod
def create_unconfirmed(cls, username, password, fullname, do_confirm=True,
campaign=None):
"""Create a new user who has begun registration but needs to verify
their primary email address (username).
"""
user = cls.create(username, password, fullname)
user.add_unconfirmed_email(username)
user.is_registered = False
if campaign:
# needed to prevent cirular import
from framework.auth.campaigns import system_tag_for_campaign # skipci
user.system_tags.append(system_tag_for_campaign(campaign))
return user
@classmethod
def create_confirmed(cls, username, password, fullname):
user = cls.create(username, password, fullname)
user.is_registered = True
user.is_claimed = True
user.date_confirmed = user.date_registered
user.emails.append(username)
return user
@classmethod
def from_cookie(cls, cookie, secret=None):
"""Attempt to load a user from their signed cookie
:returns: None if a user cannot be loaded else User
"""
if not cookie:
return None
secret = secret or settings.SECRET_KEY
try:
token = itsdangerous.Signer(secret).unsign(cookie)
except itsdangerous.BadSignature:
return None
user_session = Session.load(token)
if user_session is None:
return None
return cls.load(user_session.data.get('auth_user_id'))
def get_or_create_cookie(self, secret=None):
"""Find the cookie for the given user
Create a new session if no cookie is found
:param str secret: The key to sign the cookie with
:returns: The signed cookie
"""
secret = secret or settings.SECRET_KEY
sessions = Session.find(
Q('data.auth_user_id', 'eq', self._id)
).sort(
'-date_modified'
).limit(1)
if sessions.count() > 0:
user_session = sessions[0]
else:
user_session = Session(data={
'auth_user_id': self._id,
'auth_user_username': self.username,
'auth_user_fullname': self.fullname,
})
user_session.save()
signer = itsdangerous.Signer(secret)
return signer.sign(user_session._id)
def update_guessed_names(self):
"""Updates the CSL name fields inferred from the the full name.
"""
parsed = utils.impute_names(self.fullname)
self.given_name = parsed['given']
self.middle_names = parsed['middle']
self.family_name = parsed['family']
self.suffix = parsed['suffix']
def register(self, username, password=None):
"""Registers the user.
"""
self.username = username
if password:
self.set_password(password)
if username not in self.emails:
self.emails.append(username)
self.is_registered = True
self.is_claimed = True
self.date_confirmed = dt.datetime.utcnow()
self.update_search()
self.update_search_nodes()
from website.project import new_public_files_collection # Avoids circular import
new_public_files_collection(self)
# Emit signal that a user has confirmed
signals.user_confirmed.send(self)
return self
def add_unclaimed_record(self, node, referrer, given_name, email=None):
"""Add a new project entry in the unclaimed records dictionary.
:param Node node: Node this unclaimed user was added to.
:param User referrer: User who referred this user.
:param str given_name: The full name that the referrer gave for this user.
:param str email: The given email address.
:returns: The added record
"""
if not node.can_edit(user=referrer):
raise PermissionsError('Referrer does not have permission to add a contributor '
'to project {0}'.format(node._primary_key))
project_id = node._primary_key
referrer_id = referrer._primary_key
if email:
clean_email = email.lower().strip()
else:
clean_email = None
record = {
'name': given_name,
'referrer_id': referrer_id,
'token': generate_confirm_token(),
'email': clean_email
}
self.unclaimed_records[project_id] = record
return record
def display_full_name(self, node=None):
"""Return the full name , as it would display in a contributor list for a
given node.
NOTE: Unclaimed users may have a different name for different nodes.
"""
if node:
unclaimed_data = self.unclaimed_records.get(node._primary_key, None)
if unclaimed_data:
return unclaimed_data['name']
return self.fullname
@property
def is_active(self):
"""Returns True if the user is active. The user must have activated
their account, must not be deleted, suspended, etc.
:return: bool
"""
return (self.is_registered and
self.password is not None and
not self.is_merged and
not self.is_disabled and
self.is_confirmed)
def get_unclaimed_record(self, project_id):
"""Get an unclaimed record for a given project_id.
:raises: ValueError if there is no record for the given project.
"""
try:
return self.unclaimed_records[project_id]
except KeyError: # reraise as ValueError
raise ValueError('No unclaimed record for user {self._id} on node {project_id}'
.format(**locals()))
def get_claim_url(self, project_id, external=False):
"""Return the URL that an unclaimed user should use to claim their
account. Return ``None`` if there is no unclaimed_record for the given
project ID.
:param project_id: The project ID for the unclaimed record
:raises: ValueError if a record doesn't exist for the given project ID
:rtype: dict
:returns: The unclaimed record for the project
"""
uid = self._primary_key
base_url = settings.DOMAIN if external else '/'
unclaimed_record = self.get_unclaimed_record(project_id)
token = unclaimed_record['token']
return '{base_url}user/{uid}/{project_id}/claim/?token={token}'\
.format(**locals())
def set_password(self, raw_password, notify=True):
"""Set the password for this user to the hash of ``raw_password``.
If this is a new user, we're done. If this is a password change,
then email the user about the change and clear all the old sessions
so that users will have to log in again with the new password.
:param raw_password: the plaintext value of the new password
:param notify: Only meant for unit tests to keep extra notifications from being sent
:rtype: list
:returns: Changed fields from the user save
"""
had_existing_password = bool(self.password)
self.password = generate_password_hash(raw_password)
if self.username == raw_password:
raise ChangePasswordError(['Password cannot be the same as your email address'])
if had_existing_password and notify:
mails.send_mail(
to_addr=self.username,
mail=mails.PASSWORD_RESET,
mimetype='plain',
user=self
)
remove_sessions_for_user(self)
def check_password(self, raw_password):
"""Return a boolean of whether ``raw_password`` was correct."""
if not self.password or not raw_password:
return False
return check_password_hash(self.password, raw_password)
@property
def csl_given_name(self):
parts = [self.given_name]
if self.middle_names:
parts.extend(each[0] for each in re.split(r'\s+', self.middle_names))
return ' '.join(parts)
@property
def csl_name(self):
return {
'family': self.family_name,
'given': self.csl_given_name,
}
@property
def public_files_node(self):
from website.project.model import Node # avoids import error
return Node.find_one(Q('is_public_files_collection', 'eq', True) & Q('contributors', 'eq', self._id))
@property
def created(self):
from website.project.model import Node
return Node.find(Q('creator', 'eq', self._id))
# TODO: This should not be on the User object.
def change_password(self, raw_old_password, raw_new_password, raw_confirm_password):
"""Change the password for this user to the hash of ``raw_new_password``."""
raw_old_password = (raw_old_password or '').strip()
raw_new_password = (raw_new_password or '').strip()
raw_confirm_password = (raw_confirm_password or '').strip()
# TODO: Move validation to set_password
issues = []
if not self.check_password(raw_old_password):
issues.append('Old password is invalid')
elif raw_old_password == raw_new_password:
issues.append('Password cannot be the same')
elif raw_new_password == self.username:
issues.append('Password cannot be the same as your email address')
if not raw_old_password or not raw_new_password or not raw_confirm_password:
issues.append('Passwords cannot be blank')
elif len(raw_new_password) < 6:
issues.append('Password should be at least six characters')
elif len(raw_new_password) > 256:
issues.append('Password should not be longer than 256 characters')
if raw_new_password != raw_confirm_password:
issues.append('Password does not match the confirmation')
if issues:
raise ChangePasswordError(issues)
self.set_password(raw_new_password)
def _set_email_token_expiration(self, token, expiration=None):
"""Set the expiration date for given email token.
:param str token: The email token to set the expiration for.
:param datetime expiration: Datetime at which to expire the token. If ``None``, the
token will expire after ``settings.EMAIL_TOKEN_EXPIRATION`` hours. This is only
used for testing purposes.
"""
expiration = expiration or (dt.datetime.utcnow() + dt.timedelta(hours=settings.EMAIL_TOKEN_EXPIRATION))
self.email_verifications[token]['expiration'] = expiration
return expiration
def add_unconfirmed_email(self, email, expiration=None):
"""Add an email verification token for a given email."""
# TODO: This is technically not compliant with RFC 822, which requires
# that case be preserved in the "local-part" of an address. From
# a practical standpoint, the vast majority of email servers do
# not preserve case.
# ref: https://tools.ietf.org/html/rfc822#section-6
email = email.lower().strip()
if email in self.emails:
raise ValueError('Email already confirmed to this user.')
utils.validate_email(email)
# If the unconfirmed email is already present, refresh the token
if email in self.unconfirmed_emails:
self.remove_unconfirmed_email(email)
token = generate_confirm_token()
# handle when email_verifications is None
if not self.email_verifications:
self.email_verifications = {}
# confirmed used to check if link has been clicked
self.email_verifications[token] = {'email': email,
'confirmed': False}
self._set_email_token_expiration(token, expiration=expiration)
return token
def remove_unconfirmed_email(self, email):
"""Remove an unconfirmed email addresses and their tokens."""
for token, value in self.email_verifications.iteritems():
if value.get('email') == email:
del self.email_verifications[token]
return True
return False
def remove_email(self, email):
"""Remove a confirmed email"""
if email == self.username:
raise PermissionsError("Can't remove primary email")
if email in self.emails:
self.emails.remove(email)
signals.user_email_removed.send(self, email=email)
@signals.user_email_removed.connect
def _send_email_removal_confirmations(self, email):
mails.send_mail(to_addr=self.username,
mail=mails.REMOVED_EMAIL,
user=self,
removed_email=email,
security_addr='alternate email address ({})'.format(email))
mails.send_mail(to_addr=email,
mail=mails.REMOVED_EMAIL,
user=self,
removed_email=email,
security_addr='primary email address ({})'.format(self.username))
def get_confirmation_token(self, email, force=False):
"""Return the confirmation token for a given email.
:param str email: Email to get the token for.
:param bool force: If an expired token exists for the given email, generate a new
token and return that token.
:raises: ExpiredTokenError if trying to access a token that is expired and force=False.
:raises: KeyError if there no token for the email.
"""
# TODO: Refactor "force" flag into User.get_or_add_confirmation_token
for token, info in self.email_verifications.items():
if info['email'].lower() == email.lower():
# Old records will not have an expiration key. If it's missing,
# assume the token is expired
expiration = info.get('expiration')
if not expiration or (expiration and expiration < dt.datetime.utcnow()):
if not force:
raise ExpiredTokenError('Token for email "{0}" is expired'.format(email))
else:
new_token = self.add_unconfirmed_email(email)
self.save()
return new_token
return token
raise KeyError('No confirmation token for email "{0}"'.format(email))
def get_confirmation_url(self, email, external=True, force=False):
"""Return the confirmation url for a given email.
:raises: ExpiredTokenError if trying to access a token that is expired.
:raises: KeyError if there is no token for the email.
"""
base = settings.DOMAIN if external else '/'
token = self.get_confirmation_token(email, force=force)
return '{0}confirm/{1}/{2}/'.format(base, self._primary_key, token)
def get_unconfirmed_email_for_token(self, token):
"""Return email if valid.
:rtype: bool
:raises: ExpiredTokenError if trying to access a token that is expired.
:raises: InvalidTokenError if trying to access a token that is invalid.
"""
if token not in self.email_verifications:
raise InvalidTokenError
verification = self.email_verifications[token]
# Not all tokens are guaranteed to have expiration dates
if (
'expiration' in verification and
verification['expiration'] < dt.datetime.utcnow()
):
raise ExpiredTokenError
return verification['email']
def clean_email_verifications(self, given_token=None):
email_verifications = deepcopy(self.email_verifications or {})
for token in self.email_verifications or {}:
try:
self.get_unconfirmed_email_for_token(token)
except (KeyError, ExpiredTokenError):
email_verifications.pop(token)
continue
if token == given_token:
email_verifications.pop(token)
self.email_verifications = email_verifications
def verify_claim_token(self, token, project_id):
"""Return whether or not a claim token is valid for this user for
a given node which they were added as a unregistered contributor for.
"""
try:
record = self.get_unclaimed_record(project_id)
except ValueError: # No unclaimed record for given pid
return False
return record['token'] == token
def confirm_email(self, token, merge=False):
"""Confirm the email address associated with the token"""
email = self.get_unconfirmed_email_for_token(token)
# If this email is confirmed on another account, abort
try:
user_to_merge = User.find_one(Q('emails', 'iexact', email))
except NoResultsFound:
user_to_merge = None
if user_to_merge and merge:
self.merge_user(user_to_merge)
elif user_to_merge:
raise MergeConfirmedRequiredError(
'Merge requires confirmation',
user=self,
user_to_merge=user_to_merge,
)
# If another user has this email as its username, get it
try:
unregistered_user = User.find_one(Q('username', 'eq', email) &
Q('_id', 'ne', self._id))
except NoResultsFound:
unregistered_user = None
if unregistered_user:
self.merge_user(unregistered_user)
self.save()
unregistered_user.username = None
if email not in self.emails:
self.emails.append(email)
# Complete registration if primary email
if email.lower() == self.username.lower():
self.register(self.username)
self.date_confirmed = dt.datetime.utcnow()
# Revoke token
del self.email_verifications[token]
# TODO: We can't assume that all unclaimed records are now claimed.
# Clear unclaimed records, so user's name shows up correctly on
# all projects
self.unclaimed_records = {}
self.save()
self.update_search_nodes()
return True
@property
def unconfirmed_emails(self):
# Handle when email_verifications field is None
email_verifications = self.email_verifications or {}
return [
each['email']
for each
in email_verifications.values()
]
def update_search_nodes(self):
"""Call `update_search` on all nodes on which the user is a
contributor. Needed to add self to contributor lists in search upon
registration or claiming.
"""
for node in self.contributed:
node.update_search()
def update_search_nodes_contributors(self):
"""
Bulk update contributor name on all nodes on which the user is
a contributor.
:return:
"""
from website.search import search
search.update_contributors(self.visible_contributor_to)
def update_affiliated_institutions_by_email_domain(self):
"""
Append affiliated_institutions by email domain.
:return:
"""
# Avoid circular import
from website.project.model import Institution
try:
email_domains = [email.split('@')[1] for email in self.emails]
insts = Institution.find(Q('email_domains', 'in', email_domains))
for inst in insts:
if inst not in self.affiliated_institutions:
self.affiliated_institutions.append(inst)
except (IndexError, NoResultsFound):
pass
@property
def is_confirmed(self):
return bool(self.date_confirmed)
@property
def social_links(self):
social_user_fields = {}
for key, val in self.social.items():
if val and key in self.SOCIAL_FIELDS:
if not isinstance(val, basestring):
social_user_fields[key] = val
else:
social_user_fields[key] = self.SOCIAL_FIELDS[key].format(val)
return social_user_fields
@property
def biblio_name(self):
given_names = self.given_name + ' ' + self.middle_names
surname = self.family_name
if surname != given_names:
initials = [
name[0].upper() + '.'
for name in given_names.split(' ')
if name and re.search(r'\w', name[0], re.I)
]
return u'{0}, {1}'.format(surname, ' '.join(initials))
return surname
@property
def given_name_initial(self):
"""
The user's preferred initialization of their given name.
Some users with common names may choose to distinguish themselves from
their colleagues in this way. For instance, there could be two
well-known researchers in a single field named "Robert Walker".
"Walker, R" could then refer to either of them. "Walker, R.H." could
provide easy disambiguation.
NOTE: The internal representation for this should never end with a
period. "R" and "R.H" would be correct in the prior case, but
"R.H." would not.
"""
return self.given_name[0]
@property
def url(self):
return '/{}/'.format(self._primary_key)
@property
def api_url(self):
return '/api/v1/profile/{0}/'.format(self._primary_key)
@property
def absolute_url(self):
return urlparse.urljoin(settings.DOMAIN, self.url)
@property
def display_absolute_url(self):
url = self.absolute_url
if url is not None:
return re.sub(r'https?:', '', url).strip('/')
@property
def deep_url(self):
return '/profile/{}/'.format(self._primary_key)
@property
def unconfirmed_email_info(self):
"""Return a list of dictionaries containing information about each of this
user's unconfirmed emails.
"""
unconfirmed_emails = []
email_verifications = self.email_verifications or []
for token in email_verifications:
if self.email_verifications[token].get('confirmed', False):
try:
user_merge = User.find_one(Q('emails', 'eq', self.email_verifications[token]['email'].lower()))
except NoResultsFound:
user_merge = False
unconfirmed_emails.append({'address': self.email_verifications[token]['email'],
'token': token,
'confirmed': self.email_verifications[token]['confirmed'],
'user_merge': user_merge.email if user_merge else False})
return unconfirmed_emails
def profile_image_url(self, size=None):
"""A generalized method for getting a user's profile picture urls.
We may choose to use some service other than gravatar in the future,
and should not commit ourselves to using a specific service (mostly
an API concern).
As long as we use gravatar, this is just a proxy to User.gravatar_url
"""
return self._gravatar_url(size)
def _gravatar_url(self, size):
return filters.gravatar(
self,
use_ssl=True,
size=size
)
def get_activity_points(self, db=None):
db = db or framework.mongo.database
return analytics.get_total_activity_count(self._primary_key, db=db)
def disable_account(self):
"""
Disables user account, making is_disabled true, while also unsubscribing user
from mailchimp emails.
"""
from website import mailchimp_utils
try:
mailchimp_utils.unsubscribe_mailchimp(
list_name=settings.MAILCHIMP_GENERAL_LIST,
user_id=self._id,
username=self.username
)
except mailchimp_utils.mailchimp.ListNotSubscribedError:
pass
except mailchimp_utils.mailchimp.InvalidApiKeyError:
if not settings.ENABLE_EMAIL_SUBSCRIPTIONS:
pass
else:
raise
except mailchimp_utils.mailchimp.EmailNotExistsError:
pass
self.is_disabled = True
@property
def is_disabled(self):
"""Whether or not this account has been disabled.
Abstracts ``User.date_disabled``.
:return: bool
"""
return self.date_disabled is not None
@is_disabled.setter
def is_disabled(self, val):
"""Set whether or not this account has been disabled."""
if val and not self.date_disabled:
self.date_disabled = dt.datetime.utcnow()
elif val is False:
self.date_disabled = None
@property
def is_merged(self):
'''Whether or not this account has been merged into another account.
'''
return self.merged_by is not None
@property
def profile_url(self):
return '/{}/'.format(self._id)
@property
def contributed(self):
from website.project.model import Node
return Node.find(Q('contributors', 'eq', self._id))
@property
def contributor_to(self):
from website.project.model import Node
return Node.find(
Q('contributors', 'eq', self._id) &
Q('is_deleted', 'ne', True) &
Q('is_collection', 'ne', True)
)
@property
def visible_contributor_to(self):
from website.project.model import Node
return Node.find(
Q('contributors', 'eq', self._id) &
Q('is_deleted', 'ne', True) &
Q('is_collection', 'ne', True) &
Q('visible_contributor_ids', 'eq', self._id)
)
def get_summary(self, formatter='long'):
return {
'user_fullname': self.fullname,
'user_profile_url': self.profile_url,
'user_display_name': name_formatters[formatter](self),
'user_is_claimed': self.is_claimed
}
def save(self, *args, **kwargs):
# TODO: Update mailchimp subscription on username change
# Avoid circular import
self.username = self.username.lower().strip() if self.username else None
ret = super(User, self).save(*args, **kwargs)
if self.SEARCH_UPDATE_FIELDS.intersection(ret) and self.is_confirmed:
self.update_search()
self.update_search_nodes_contributors()
return ret
def update_search(self):
from website import search
try:
search.search.update_user(self)
except search.exceptions.SearchUnavailableError as e:
logger.exception(e)
log_exception()
@classmethod
def find_by_email(cls, email):
try:
user = cls.find_one(
Q('emails', 'eq', email)
)
return [user]
except:
return []
def serialize(self, anonymous=False):
return {
'id': utils.privacy_info_handle(self._primary_key, anonymous),
'fullname': utils.privacy_info_handle(self.fullname, anonymous, name=True),
'registered': self.is_registered,
'url': utils.privacy_info_handle(self.url, anonymous),
'api_url': utils.privacy_info_handle(self.api_url, anonymous),
}
###### OSF-Specific methods ######
def watch(self, watch_config):
"""Watch a node by adding its WatchConfig to this user's ``watched``
list. Raises ``ValueError`` if the node is already watched.
:param watch_config: The WatchConfig to add.
:param save: Whether to save the user.
"""
watched_nodes = [each.node for each in self.watched]
if watch_config.node in watched_nodes:
raise ValueError('Node is already being watched.')
watch_config.save()
self.watched.append(watch_config)
return None
def unwatch(self, watch_config):
"""Unwatch a node by removing its WatchConfig from this user's ``watched``
list. Raises ``ValueError`` if the node is not already being watched.
:param watch_config: The WatchConfig to remove.
:param save: Whether to save the user.
"""
for each in self.watched:
if watch_config.node._id == each.node._id:
from framework.transactions.context import TokuTransaction # Avoid circular import
with TokuTransaction():
# Ensure that both sides of the relationship are removed
each.__class__.remove_one(each)
self.watched.remove(each)
self.save()
return None
raise ValueError('Node not being watched.')
def is_watching(self, node):
'''Return whether a not a user is watching a Node.'''
watched_node_ids = set([config.node._id for config in self.watched])
return node._id in watched_node_ids
def get_recent_log_ids(self, since=None):
'''Return a generator of recent logs' ids.
:param since: A datetime specifying the oldest time to retrieve logs
from. If ``None``, defaults to 60 days before today. Must be a tz-aware
datetime because PyMongo's generation times are tz-aware.
:rtype: generator of log ids (strings)
'''
log_ids = []
# Default since to 60 days before today if since is None
# timezone aware utcnow
utcnow = dt.datetime.utcnow().replace(tzinfo=pytz.utc)
since_date = since or (utcnow - dt.timedelta(days=60))
for config in self.watched:
# Extract the timestamps for each log from the log_id (fast!)
# The first 4 bytes of Mongo's ObjectId encodes time
# This prevents having to load each Log Object and access their
# date fields
node_log_ids = [log.pk for log in config.node.logs
if bson.ObjectId(log.pk).generation_time > since_date and
log.pk not in log_ids]
# Log ids in reverse chronological order
log_ids = _merge_into_reversed(log_ids, node_log_ids)
return (l_id for l_id in log_ids)
def get_daily_digest_log_ids(self):
'''Return a generator of log ids generated in the past day
(starting at UTC 00:00).
'''
utcnow = dt.datetime.utcnow()
midnight = dt.datetime(
utcnow.year, utcnow.month, utcnow.day,
0, 0, 0, tzinfo=pytz.utc
)
return self.get_recent_log_ids(since=midnight)
@property
def can_be_merged(self):
"""The ability of the `merge_user` method to fully merge the user"""
return all((addon.can_be_merged for addon in self.get_addons()))
def merge_user(self, user):
"""Merge a registered user into this account. This user will be
a contributor on any project. if the registered user and this account
are both contributors of the same project. Then it will remove the
registered user and set this account to the highest permission of the two
and set this account to be visible if either of the two are visible on
the project.
:param user: A User object to be merged.
"""
# Fail if the other user has conflicts.
if not user.can_be_merged:
raise MergeConflictError('Users cannot be merged')
# Move over the other user's attributes
# TODO: confirm
for system_tag in user.system_tags:
if system_tag not in self.system_tags:
self.system_tags.append(system_tag)
self.is_claimed = self.is_claimed or user.is_claimed
self.is_invited = self.is_invited or user.is_invited
# copy over profile only if this user has no profile info
if user.jobs and not self.jobs:
self.jobs = user.jobs
if user.schools and not self.schools:
self.schools = user.schools
if user.social and not self.social:
self.social = user.social
unclaimed = user.unclaimed_records.copy()
unclaimed.update(self.unclaimed_records)
self.unclaimed_records = unclaimed
# - unclaimed records should be connected to only one user
user.unclaimed_records = {}
security_messages = user.security_messages.copy()
security_messages.update(self.security_messages)
self.security_messages = security_messages
notifications_configured = user.notifications_configured.copy()
notifications_configured.update(self.notifications_configured)
self.notifications_configured = notifications_configured
for key, value in user.mailchimp_mailing_lists.iteritems():
# subscribe to each list if either user was subscribed
subscription = value or self.mailchimp_mailing_lists.get(key)
signals.user_merged.send(self, list_name=key, subscription=subscription)
# clear subscriptions for merged user
signals.user_merged.send(user, list_name=key, subscription=False, send_goodbye=False)
for target_id, timestamp in user.comments_viewed_timestamp.iteritems():
if not self.comments_viewed_timestamp.get(target_id):
self.comments_viewed_timestamp[target_id] = timestamp
elif timestamp > self.comments_viewed_timestamp[target_id]:
self.comments_viewed_timestamp[target_id] = timestamp
self.emails.extend(user.emails)
user.emails = []
for k, v in user.email_verifications.iteritems():
email_to_confirm = v['email']
if k not in self.email_verifications and email_to_confirm != user.username:
self.email_verifications[k] = v
user.email_verifications = {}
for institution in user.affiliated_institutions:
self.affiliated_institutions.append(institution)
user._affiliated_institutions = []
# FOREIGN FIELDS
for watched in user.watched:
if watched not in self.watched:
self.watched.append(watched)
user.watched = []
for account in user.external_accounts:
if account not in self.external_accounts:
self.external_accounts.append(account)
user.external_accounts = []
# - addons
# Note: This must occur before the merged user is removed as a
# contributor on the nodes, as an event hook is otherwise fired
# which removes the credentials.
for addon in user.get_addons():
user_settings = self.get_or_add_addon(addon.config.short_name)
user_settings.merge(addon)
user_settings.save()
# Disconnect signal to prevent emails being sent about being a new contributor when merging users
# be sure to reconnect it at the end of this code block. Import done here to prevent circular import error.
from website.addons.osfstorage.listeners import checkin_files_by_user
from website.project.signals import contributor_added, contributor_removed
from website.project.views.contributor import notify_added_contributor
from website.util import disconnected_from
# - projects where the user was a contributor
with disconnected_from(signal=contributor_added, listener=notify_added_contributor):
for node in user.contributed:
# Skip bookmark collection node
if node.is_bookmark_collection:
continue
if node.is_public_files_collection:
node.merge_public_files(self.public_files_node)
continue
# if both accounts are contributor of the same project
if node.is_contributor(self) and node.is_contributor(user):
if node.permissions[user._id] > node.permissions[self._id]:
permissions = node.permissions[user._id]
else:
permissions = node.permissions[self._id]
node.set_permissions(user=self, permissions=permissions)
visible1 = self._id in node.visible_contributor_ids
visible2 = user._id in node.visible_contributor_ids
if visible1 != visible2:
node.set_visible(user=self, visible=True, log=True, auth=Auth(user=self))
else:
node.add_contributor(
contributor=self,
permissions=node.get_permissions(user),
visible=node.get_visible(user),
log=False,
)
with disconnected_from(signal=contributor_removed, listener=checkin_files_by_user):
try:
node.remove_contributor(
contributor=user,
auth=Auth(user=self),
log=False,
)
except ValueError:
logger.error('Contributor {0} not in list on node {1}'.format(
user._id, node._id
))
node.save()
# - projects where the user was the creator
for node in user.created:
node.creator = self
node.save()
# - file that the user has checked_out, import done here to prevent import error
from website.files.models.base import FileNode
for file_node in FileNode.files_checked_out(user=user):
file_node.checkout = self
file_node.save()
# finalize the merge
remove_sessions_for_user(user)
# - username is set to None so the resultant user can set it primary
# in the future.
user.username = None
user.password = None
user.verification_key = None
user.osf_mailing_lists = {}
user.merged_by = self
user.save()
def get_projects_in_common(self, other_user, primary_keys=True):
"""Returns either a collection of "shared projects" (projects that both users are contributors for)
or just their primary keys
"""
if primary_keys:
projects_contributed_to = set(self.contributed.get_keys())
other_projects_primary_keys = set(other_user.contributed.get_keys())
return projects_contributed_to.intersection(other_projects_primary_keys)
else:
projects_contributed_to = set(self.contributed)
return projects_contributed_to.intersection(other_user.contributed)
def n_projects_in_common(self, other_user):
"""Returns number of "shared projects" (projects that both users are contributors for)"""
return len(self.get_projects_in_common(other_user, primary_keys=True))
def is_affiliated_with_institution(self, inst):
return inst in self.affiliated_institutions
def remove_institution(self, inst_id):
removed = False
for inst in self.affiliated_institutions:
if inst._id == inst_id:
self.affiliated_institutions.remove(inst)
removed = True
return removed
_affiliated_institutions = fields.ForeignField('node', list=True)
@property
def affiliated_institutions(self):
from website.institutions.model import Institution, AffiliatedInstitutionsList
return AffiliatedInstitutionsList([Institution(inst) for inst in self._affiliated_institutions], obj=self, private_target='_affiliated_institutions')
def get_node_comment_timestamps(self, target_id):
""" Returns the timestamp for when comments were last viewed on a node, file or wiki.
"""
default_timestamp = dt.datetime(1970, 1, 1, 12, 0, 0)
return self.comments_viewed_timestamp.get(target_id, default_timestamp)
def _merge_into_reversed(*iterables):
'''Merge multiple sorted inputs into a single output in reverse order.
'''
return sorted(itertools.chain(*iterables), reverse=True)
|
SSJohns/osf.io
|
framework/auth/core.py
|
Python
|
apache-2.0
| 56,122
|
#########################################################################
#
# Copyright (c) 2003-2004 Danny Brewer d29583@groovegarden.com
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#############################################################################
import uno
from com.sun.star.task import XJobExecutor
if __name__<>'package':
from lib.gui import *
class About(unohelper.Base, XJobExecutor):
def __init__(self, ctx):
self.ctx = ctx
self.module = "openerp_report"
self.version = "0.1"
self.win = DBModalDialog(60, 50, 175, 115, "About OpenERP Report Designer")
fdBigFont = createUnoStruct("com.sun.star.awt.FontDescriptor")
fdBigFont.Width = 20
fdBigFont.Height = 25
fdBigFont.Weight = 120
fdBigFont.Family= 3
oLabelTitle1 = self.win.addFixedText("lblTitle1", 1, 1, 35, 30)
oLabelTitle1.Model.TextColor = 16056320
oLabelTitle1.Model.FontDescriptor = fdBigFont
oLabelTitle1.Model.FontRelief = 1
oLabelTitle1.Text = "Open"
oLabelTitle2 = self.win.addFixedText("lblTitle2", 35, 1, 30, 30)
oLabelTitle2.Model.TextColor = 1
oLabelTitle2.Model.FontDescriptor = fdBigFont
oLabelTitle2.Model.FontRelief = 1
oLabelTitle2.Text = "ERP"
oLabelProdDesc = self.win.addFixedText("lblProdDesc", 1, 30, 173, 75)
oLabelProdDesc.Model.TextColor = 1
fdBigFont.Width = 10
fdBigFont.Height = 11
fdBigFont.Weight = 76
oLabelProdDesc.Model.FontDescriptor = fdBigFont
oLabelProdDesc.Model.Align = 1
oLabelProdDesc.Model.FontRelief = 1
oLabelProdDesc.Model.MultiLine = True
oLabelProdDesc.Text = "This package helps you to create or modify\nreports in OpenERP. Once connected to the\nserver, you can design your template of reports\nusing fields and expressions and browsing the\ncomplete structure of OpenERP object database."
oLabelFooter = self.win.addFixedText("lblFooter", -1, -1, 173, 25)
oLabelFooter.Model.TextColor = 255
#oLabelFooter.Model.BackgroundColor = 1
oLabelFooter.Model.Border = 2
oLabelFooter.Model.BorderColor = 255
fdBigFont.Width = 8
fdBigFont.Height = 9
fdBigFont.Weight = 100
oLabelFooter.Model.FontDescriptor = fdBigFont
oLabelFooter.Model.Align = 1
oLabelFooter.Model.FontRelief = 1
oLabelFooter.Model.MultiLine = True
sMessage = "OpenERP Report Designer v1.0 \nCopyright 2007-TODAY Tiny sprl \nThis product is free software, under the GNU Affero General Public License."
oLabelFooter.Text = sMessage
self.win.doModalDialog("",None)
if __name__<>"package" and __name__=="__main__":
About(None)
elif __name__=="package":
g_ImplementationHelper.addImplementation( About, "org.openoffice.openerp.report.about", ("com.sun.star.task.Job",),)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Johnzero/OE7
|
openerp/addons-modules/base_report_designer/plugin/openerp_report_designer/bin/script/About.py
|
Python
|
agpl-3.0
| 3,821
|
#!/usr/bin/kivy
# -*- coding: utf-8 -*-
import os
from kivy.app import App
from kivy.properties import NumericProperty, StringProperty, BooleanProperty, ListProperty, ObjectProperty
from kivy.uix.textinput import TextInput
from kivy.uix.boxlayout import BoxLayout
from kivy.core.window import Window
from kivy.metrics import dp
from kivy.animation import Animation
from kivy.event import EventDispatcher
from kivy.uix.togglebutton import ToggleButton
from kivy.clock import Clock
from kivy.core.clipboard import Clipboard
from kivy import platform
import re
from collections import deque
import sqlite3
#import numpy as np # Needed for pyxdameraulevenshtein
#from pyxdameraulevenshtein import normalized_damerau_levenshtein_distance_withNPArray # Add module TODO
from difflib import SequenceMatcher
import learnkanji_k_alg
#from __future__ import unicode_literals # TODO test if works
#font_kanji = os.path.join('data', 'fonts', 'TakaoPMincho.ttf')
from kivy.core.text import LabelBase
KIVY_FONTS = [
{
"name": "TakaoPMincho",
"fn_regular": os.path.join('data', 'fonts', 'TakaoPMincho_transformed_full.ttf'),
"fn_bold": os.path.join('data', 'fonts', 'TakaoPMincho_transformed_full_bold2.ttf')
}
]
for font in KIVY_FONTS:
LabelBase.register(**font)
class SKanjiToggleButton(ToggleButton):
lfunc = ObjectProperty(None)
def on_lfunc(self, obj, lfunc):
if lfunc:
self.font_name = lfunc.font_kanji
#self.text = self.text
class AnswerTextInput(TextInput):
#focus = BooleanProperty(False)
# redefine insert_text
#def insert_text(self, substring, from_undo=False):
# return super(AnswerTextInput, self).insert_text(substring, from_undo=from_undo)
def resettext(self, next_kanji):
if next_kanji == True:
self.text = ""
self.focus = True
def _on_focus(self, instance, value, *largs):
super(AnswerTextInput, self)._on_focus(instance, value, *largs)
print("TextInput focused: {}".format(value))
# Probably goes wrong with virtual keyboard
# if platform != 'android' and platform != 'ios':
# if not value:
# Clock.schedule_once(lambda dt: self.focus_on())
# keyb_height = Window.keyboard_height
# #print(App.get_running_app().root.ids.story_box.height)
# #App.get_running_app().root.ids.story_box.height = keyb_height
# def focus_on(self):
# print("focus set to True")
# self.focus = True
# Handles everything related to shown Kanji
class MasterKanji(EventDispatcher):
cur_framenum = NumericProperty()
cur_framekanji = StringProperty()
current = StringProperty()
cur_answer = ListProperty()
story = StringProperty()
story_show = BooleanProperty(False)
fix_answer = StringProperty("")
def __init__(self, **kwargs):
super(MasterKanji, self).__init__(**kwargs)
self.db_name = os.path.join("data", "db", "Kanji-story.db") # path from main.py
self.alg = learnkanji_k_alg.LearnAlg()
self.alg_count = learnkanji_k_alg.LearnCount()
self.cur_framekanji = self.dbcurrent("framekanji")
#print("Init with current framekanji: {}".format(self.cur_framekanji))
self.cur_framenum, self.cur_answer, self.story = self.dbkanji_info()
print(self.cur_answer)
if self.cur_framekanji == '-1' or self.cur_framekanji == '0':
print("-1 or 0: {}".format(self.cur_framekanji))
self.cur_framekanji = self.dbspecial(self.cur_framekanji)
self.upcoming = deque() # deque(self.alg.retrieveKanji()) gives error
self.story_hidden = "The answer is hidden, please provide an answer in the text-bar above and press 'check'." \
"\nYou cannot advance to the next Kanji until you have typed the right response."
self.radicals_list = []
self.radicalDict()
self.sKanji_list = []
self.sKanjiDict()
print("--- INIT MasterKanji COMPLETE ---\n")
def dbspecial(self, item):
print("Trying to connect to DB table Kanji (special)...")
conn = sqlite3.connect(self.db_name)
c = conn.cursor()
print("DB connected with special: {}".format(item))
c.execute("SELECT character FROM Kanji WHERE framenum = ?", (item,))
result = c.fetchone()
#print(result[0])
resulty = result[0]
# To prevent UnicodeEncodeError: 'ascii'
if item == '0':
resulty = resulty.encode('utf-8')
# Close connection
conn.close()
print("DB current connection closed")
return(resulty)
def dbcurrent(self, col):
# Connect Database
print("Trying to connect to DB table current...")
conn = sqlite3.connect(self.db_name)
c = conn.cursor()
print("DB connected with column: {}".format(col))
c.execute("SELECT {} FROM current".format(col))
result = c.fetchone()
print(result)
resulty = result[0]
# To prevent UnicodeEncodeError: 'ascii'
if col == "framekanji":
resulty = resulty.encode('utf-8')
#print(resulty)
# Close connection
conn.close()
print("DB current connection closed")
return(resulty)
def dbkanji_info(self, item=''):
# If no framekanji is given, use current framekanji
if item == '':
item = self.cur_framekanji
item = item.decode('utf-8')
# Connect Database
print("Trying to connect to DB table Kanji (info)...")
conn = sqlite3.connect(self.db_name)
c = conn.cursor()
#print("DB connected with item: {}".format(item.encode('utf-8')))
if item == '-1' or item == '0':
c.execute("SELECT framenum, meanings, story FROM Kanji WHERE framenum = ?", (item,))
else:
c.execute("SELECT framenum, meanings, story FROM Kanji WHERE character = ?", (item,))
result = c.fetchall()
returny = list(result[0])
returny[1] = result[0][1].split('/')
print(returny)
# Replace | for / in meanings
returny[1] = [x.replace('|', '/') for x in returny[1]]
# Close connection
conn.close()
print("DB current connection closed")
return(returny)
def dbupdate(self, nxt_cur):
# Connect Database
print("Trying to connect to DB table Kanji (update)...")
conn = sqlite3.connect(self.db_name)
c = conn.cursor()
print("DB connected with:")
#print(self.cur_framenum, self.cur_framekanji.decode('utf-8'), nxt_cur)
# Finish and tutorial
if self.cur_framenum == -1 or self.cur_framenum == 0:
update_framekanji = str(self.cur_framenum)
else:
update_framekanji = self.cur_framekanji
c.execute("UPDATE current SET framenum = ?, framekanji = ?, nextKanji = ?",
(self.cur_framenum, update_framekanji.decode('utf-8'), nxt_cur))
# Close connection
conn.commit()
conn.close()
print("DB current connection closed")
# Handles database
# def conndb(self, tabl, action, item, req=""):
# # tabl = Table, action = SELECT/UPDATE, item = what should be selected/*
#
# # Connect Database
# print("Trying to connect to DB")
# conn = sqlite3.connect(self.db_name)
# c = conn.cursor()
# print("DB connected with Table: {}, Action: {} and item: {}".format(tabl, action, item))
#
# if action == "SELECT":
# c.execute("{} {} FROM {}{}".format(action, item, tabl, req))
# result = c.fetchall()
# print(result)
# if item != "nextKanji" and item != "framekanji":
# print("I have no nextKanji or framekanji, because item = {}".format(item))
# if tabl == "Kanji":
# returny = result[0]
# #c.execute("UPDATE current SET framenum = {}".format(req[-1])) # Uncomment when should not start love
# else:
# c.execute("SELECT character, meanings, story FROM KANJI WHERE character={}".format(result[0][0]))
# returny = c.fetchone()
# returny = list(returny)
# answers = returny[1].split('/')
# # Makes the answers lower case
# returny[1] = answers #[x.lower() for x in answers]
# returny[0] = returny[0].encode('utf-8') # TODO fix this for Kanji Koohii (maybe fixed)
# else:
# print("I do have {}".format(item))
# returny = result[0][0]
# print(returny)
#
# else:
# c.execute("{} {} SET {}".format(action, tabl, req))
#
# # Save change to database
# conn.commit()
#
# # Close connection
# conn.close()
# print("DB connection closed")
#
# if action == "SELECT":
# return returny
# #return #list
# Creates list for radicals of current Kanji
def radicalDict(self):
# Connect Database
print("\nTrying to connect to DB with table Radical and RadicalMeaning")
conn = sqlite3.connect(self.db_name)
c = conn.cursor()
c.execute("SELECT radicals FROM Radical WHERE framekanji = ?", (self.cur_framekanji.decode('utf-8'),))
result = c.fetchone()
if result:
print(result)
# Split radical
r_list = result[0].split('/')
self.radicals_list = [x.encode('utf-8') for x in r_list]
#radical_num = result[0].split('/')
#print(radical_num)
# Get refered radicals
# for rnum in radical_num:
# c.execute("SELECT radical FROM RadicalMeaning WHERE number = ?", (rnum,))
# result = c.fetchone()
# print(result)
# self.radicals_list.append(result[0]) # .encode('utf-8')
print(self.radicals_list)
else:
print("No radicals")
self.radicals_list = []
# Creates list for small Kanji of current Kanji
def sKanjiDict(self):
# Connect Database
print("\nTrying to connect to DB with table sKanji")
conn = sqlite3.connect(self.db_name)
c = conn.cursor()
c.execute("SELECT smallKanji FROM sKanji WHERE framekanji = ?", (self.cur_framekanji.decode('utf-8'),))
result = c.fetchone()
if result:
print(result)
# Split number reference to Kanji to a list
s_list = result[0].split('/')
self.sKanji_list = [x.encode('utf-8') for x in s_list]
print(self.sKanji_list)
# Get refered Kanji's
# for snum in sKanji_num:
# # Get highest Kanji shown to not have small Kanji higher than so far shown.
# c.execute("SELECT MAX(framenum) FROM learnAlg")
# max = c.fetchone()
# print("Max framenum seen: {}".format(max))
#
# c.execute("SELECT character FROM Kanji WHERE framenum = ? AND framenum <= ?", (snum, max[0]))
# result = c.fetchone()
# print(result)
# if result:
# self.sKanji_list.append(result[0].encode('utf-8'))
#
# print(self.sKanji_list)
else:
print("No small Kanji")
self.sKanji_list = []
# Next Kanji
def nextkanji(self):
# Get next list of to be learned Kanji
if not self.upcoming:
self.upcoming = deque(self.alg.retrieveKanji())
# All Kanji's are learned
if not self.upcoming:
self.cur_framekanji = '-1'
self.cur_framenum, self.cur_answer, self.story = self.dbkanji_info(self.cur_framekanji)
self.cur_framekanji = self.dbspecial(self.cur_framekanji)
#self.conndb("Kanji", "SELECT", "character, meanings, story", " WHERE framenum = -1")
# Get next Kanji character, answer, story and also update current table
else:
# Get new Kanji and answer
self.cur_framekanji = self.upcoming.popleft()
#print(self.cur_framekanji)
self.cur_framenum, self.cur_answer, self.story = self.dbkanji_info()
#self.conndb("Kanji", "SELECT", "character, meanings, story", " WHERE framenum = {}".format(self.cur_framenum))
# Update status learning
print("Updating actionbar_status")
App.get_running_app().actionbar_status = self.alg_count.countlearned()
# Update sKanji_dict for sKanji buttons
self.radicalDict()
self.sKanjiDict()
# App.get_running_app().root.ids.lbl1.text = "Now: {}".format(status[0])
# App.get_running_app().root.ids.lbl2.text = "Forgot: {}".format(status[1])
# App.get_running_app().root.ids.lbl3.text = "Left: {}".format(status[2])
# Update current Kanji table
def updateCurrent(self, nxt, correct=-1):
# nxt_cur: 0: no answer, 1: correct answer, 2: wrong answer
print("Updating current with nxt: {} and correct: {}".format(nxt, correct))
if nxt:
nxt_cur = 1
else:
if correct == 0:
nxt_cur = 2
else:
nxt_cur = 0
self.dbupdate(nxt_cur)
# Update database with current knowledge of answered Kanji
def updateKanji(self, correct):
# Don't update when showing finished screen
if self.cur_framenum != -1:
# Change test
if self.cur_framenum == 0:
updatek = '0'
else:
updatek = self.cur_framekanji
self.alg.answerKanji(updatek, correct)
def story_bold(self):
print("Making answers bold...")
# Don't update GUI all the time
story = self.story
for x in self.cur_answer:
print("Search with: {}".format(x))
# Find all words with answer x in it
search = [(m.group(),m.start(),m.end()) for m in re.finditer(x, story, flags=re.IGNORECASE)]
print(search)
# Start from end of story to not screw over indexes when adding bold [b] [/b]
i = len(search)-1
while i >= 0:
s = search[i][1]
e = search[i][2]
#print("Start: {}\tEnd: {}".format(s, e))
#print("Start: {}\tEnd: {}".format(story[s], story[e]))
story = story[:s] +"[b]"+ story[s:e] +"[/b]"+ story[e:]
i -= 1
self.story = story
#print(self.story)
#Formats the answer of the user
def textFormat(self, answer):
#Set answer to lower case and clean the answer of strange symbols
answer = answer.lower()
pattern = '[a-z0-9 .-]'
answer = ''.join(re.findall(pattern, answer))
return answer
# Check if typed answer is correct by using dameau levenshtein distance
def check(self, answer):
print("Answers: {} , given: {}".format(self.cur_answer, answer))
# Convert list to numpy
#cur_answer_np = np.asarray(self.cur_answer, dtype='S')
# Calculate dameau levenshtein distance between given answer and correct answers
#dam_lev = normalized_damerau_levenshtein_distance_withNPArray(answer, cur_answer_np)
#print("dam-lev distance: {}".format(dam_lev))
# Get lowest (and therefore least difference) score
#dam_lev_ind = np.argmin(dam_lev)
# If distance is small enough TODO module support
#if dam_lev[dam_lev_ind] < 0.34:
# self.fix_answer = self.cur_answer[dam_lev_ind]
# return 1
#else:
# return 0
# SequenceMatcher(None, a, b).ratio()
sm_list = []
print(self.cur_answer)
cur_ans_lower = [self.textFormat(x) for x in self.cur_answer]
for a in cur_ans_lower:
sm_list.append(SequenceMatcher(None, answer, a).ratio())
print(sm_list)
sm_ind = sm_list.index(max(sm_list))
# If distance is small enough (higher is closer)
if sm_list[sm_ind] > 0.7:
self.fix_answer = self.cur_answer[sm_ind]
print("Fix: {}".format(self.cur_answer[sm_ind]))
return 1
else:
return 0
#if answer in self.cur_answer:
# return 1
#else:
# return 0
class LayoutFunctioning(BoxLayout):
if platform != 'android' and platform != 'ios':
keyb_height = NumericProperty(dp(300))
else:
# 280 dp, because otherwise maybe keyboard over inputfield
keyb_height = NumericProperty(dp(260)) #280 #254
print("Keyboard height: {}".format(keyb_height))
font_kanji = "TakaoPMincho" # os.path.join('data', 'fonts', 'TakaoPMincho.ttf') # TakaoPMincho.ttf
#Kanji_s = ["爪", "冖", "心", "夂"]
next_kanji = BooleanProperty(False)
answered = BooleanProperty(False)
master_kanji = MasterKanji()
txt_field_focus_i = BooleanProperty(True)
if master_kanji.cur_framenum == 0:
txt_field_focus_i = BooleanProperty(False)
#asw_ti = AnswerTextInput()
# Send button disabled with no text
send_disabled = BooleanProperty(True)
# Enable send button timer in sec
send_disabled_t = 3
# Link button to website
ww_link = StringProperty()
# TODO Enter on keyboard
#Window.bind(on_key_enter = btnPressed(answer))
def __init__(self, **kwargs):
super(LayoutFunctioning, self).__init__(**kwargs)
# Enable send_btn after some seconds
self.cb_disablesend = lambda dt: self.disablesend(False)
Clock.schedule_once(self.cb_disablesend, self.send_disabled_t)
# When previously saw finish screen
if self.master_kanji.cur_framenum == -1:
self.master_kanji.nextkanji()
self.reinitscreen(0)
else:
self.reinitscreen(self.master_kanji.dbcurrent("nextKanji"))
# First time opening app show explanation story
if self.master_kanji.cur_framenum == 0:
print("First time app")
self.master_kanji.story_show = True
# Link button to website
print("!!! DEBUG UTF-8 !!!")
#print(self.master_kanji.cur_framekanji)
print(type(self.master_kanji.cur_framekanji))
self.ww_link = "http://kanji.koohii.com/study/kanji/{}".format(self.master_kanji.cur_framekanji) # "TODO")
# Keyboard height
#Clock.schedule_once(lambda dt: self.storykeybheight())
print("Keyboard binding")
Window.bind(on_keyboard=self.storykeybheight)
print("--- INIT LayoutFunctioning COMPLETED ---\n")
def reinitscreen(self, nxt_cur):
# No answer given yet
if nxt_cur == 0:
print("Reinit: no answer")
self.next_kanji = False
self.answered = 0
self.master_kanji.story_show = False
# Correct answer
elif nxt_cur == 1:
print("Reinit: correct")
self.next_kanji = True
self.answered = 1
self.master_kanji.story_show = True
Clock.unschedule(self.cb_disablesend)
self.disablesend(False)
# Wrong answer
else:
print("Reinit: wrong")
self.next_kanji = False
self.answered = 1
self.master_kanji.story_show = True
# Answers bold in self.story
#self.master_kanji.story_bold()
# sKanji buttons (Clock.schedule to call this function after .kv initialisation)
Clock.schedule_once(lambda dt: self.changeSKanji())
#self.changeSKanji()
def disablesend(self, dis):
print("Disable send button: {}".format(dis))
self.send_disabled = dis
Clock.unschedule(self.cb_disablesend)
# Change the BoxLayout height with the story inside to match user's keyboard height
def storykeybheight(self, window, keycode1, keycode2, text, modifiers):
keyb_height_change = Window.keyboard_height
print("storykeybheight size: {}, current: {}".format(keyb_height_change, self.keyb_height))
# Keyboard open
if keyb_height_change > dp(50):
# remove padding + vague value
keyb_height_change -= dp(33)
if self.keyb_height != keyb_height_change:
print("Keyb before: {}".format(self.keyb_height))
self.keyb_height = keyb_height_change
print("Keyb after: {}".format(self.keyb_height))
# TODO accept enter als button press
if keycode1 == 13:
#print("Enter pressed")
# Replaces text_input enter
if self.ids.send_btn.disabled == False and platform != 'android' and platform != 'ios':
self.ids.send_btn.dispatch('on_release')
#self.asw_ti.text = 'test'
#self.txt_field_focus_i = BooleanProperty(False)
# def storykeybheight(self, *args):
# print("storykeybheight")
# print("keyb_height before: {}".format(self.keyb_height))
# keyb_height_change = Window.keyboard_height
# print("Window.keyboard_height: {}".format(keyb_height_change))
#
# # TODO Window.keyboard_height returns 50 ???
# if keyb_height_change <= dp(50):
# #keyb_height_change = dp(287)
# print("Keyb fix: {}".format(keyb_height_change))
# #Clock.schedule_once(lambda dt: self.storykeybheight()) # call this function again if too fast
# else:
# # remove padding
# keyb_height_change -= dp(35)
# print("Keyb - padding: {}".format(keyb_height_change))
# self.keyb_height = keyb_height_change
# remove padding
#keyb_height_change -= dp(35)
#print("Keyb - padding: {}".format(keyb_height_change))
#self.keyb_height = keyb_height_change
# Copy current Kanji to clipboard
def cpClipboard(self):
print("Copy current Kanji to clipboard")
Clipboard.copy("{}".format(self.master_kanji.cur_framekanji)) # TODO fix UTF8
print(Clipboard.get_types())
#print(Clipboard.paste())
# Flash the TextInput red when wrong answer is given
def flashred(self):
print("Flashing textinput red")
self.ids.txt_field.background_color = (1, 0.4, 0.4, 1)
Animation(background_color=(1, 0.75, 0.75, 1), duration=.5).start(self.ids.txt_field)
# Changes the shown story to the selected radical
def changeStory(self, skanji, radical):
print("Doing changeStory")
#print("Change story to {} and radical: {}".format(skanji, radical)) # Doesn't work on Android
#print("Change story to "+skanji+"and radical: {}".format(radical))
# Connect database
conn = sqlite3.connect(self.master_kanji.db_name)
#conn.row_factory = lambda cursor, row: row[0]
c = conn.cursor()
# Radical or not
if radical:
print("story change radical")
c.execute("SELECT Rjapanese, alt, strokes, meanings FROM RadicalMeaning WHERE radical = ?"
, (skanji.decode('utf-8'),))
story = c.fetchone()
print(story)
if story[1] != '':
self.master_kanji.story = skanji.decode('utf-8')+"Radical: "+story[0]+"\nAlt: "+story[1]\
+"\nStrokes: "+str(story[2])+"\n\nMeanings: [b]"+story[3]+"[/b]"
else:
self.master_kanji.story = skanji.decode('utf-8')+"\nRadical: "+story[0]+\
"\nStrokes: "+str(story[2])+"\n\nMeanings: [b]"+story[3]+"[/b]"
else:
print("story change skanji")
#print(skanji.decode('utf-8'))
if self.master_kanji.cur_framenum == -1 or self.master_kanji.cur_framenum == 0:
c.execute("SELECT meanings, story FROM Kanji WHERE framenum = ?", (self.master_kanji.cur_framenum,))
else:
c.execute("SELECT meanings, story FROM Kanji WHERE character = ?", (skanji.decode('utf-8'),))
story = c.fetchone()
print(story)
if skanji != self.master_kanji.cur_framekanji:
self.master_kanji.story = skanji.decode('utf-8')+"\n[b]"+story[0]+"[/b]\n\n"+story[1]
else:
self.master_kanji.story = story[1]
# Try making answers in self.story bold
try:
self.master_kanji.story_bold()
except:
print("Bolding story failed 0.o")
self.master_kanji.story += "\n\n[b]Bolding[/b] went wrong!!! Please report Kanji + framenumber " \
"(top-left) to App developer. Screenshot is appreciated :)"
print("Story changed")
def addsKanji(self, stext, r):
print("addsKanji")
#print(App.get_running_app().root.ids.learn_kanji.text)
skanj = SKanjiToggleButton(lfunc=self, text=stext, radical=r) # , lfunc = self.ids.lfunc
self.ids.sKanjibox.add_widget(skanj)
#self.ids.foo.text = "T"
def delsKanji(self):
print("delsKanji")
#print(getattr(self.ids, 'togglemain').text)
# TODO set togglemain to down
# children = self.children[:]
#
# while children:
# child = children.pop()
#
# print(child)
# Remove all buttons except with main Kanji
for child in [child for child in self.ids.sKanjibox.children if child.text != self.master_kanji.cur_framekanji]:
print("Remove: {}".format(child))
self.ids.sKanjibox.remove_widget(child)
# Set main Kanji button state to down
for child in self.ids.sKanjibox.children:
child.state = 'down'
def changeSKanji(self):
# Delete current sKanji buttons again to remove radicals same as Kanji
self.delsKanji()
# Add new sKanji buttons
for s in self.master_kanji.radicals_list:
self.addsKanji(s, True)
# Add new sKanji buttons
for s in self.master_kanji.sKanji_list:
self.addsKanji(s, False)
# Set radicals_list and sKanji_list to empty after adding buttons
self.master_kanji.radicals_list = []
self.master_kanji.sKanji_list = []
# Function when the check/next button is pressed
def btnPressed(self, answer):
print("\n- - - - -")
Clock.unschedule(self.cb_disablesend)
#Only do something when user actually typed or answer has been correct
#if self.send_disabled == False: # len(answer) > 0 or self.next_kanji == True:
#self.ids.story_box.height = Window.keyboard_height - dp(8)
#print("New keyb height: {}".format(self.ids.story_box.height))
# TODO bind()
# Get next Kanji after answered correctly
if self.next_kanji == True:
print("Next Kanji...")
# Delete current sKanji buttons
self.delsKanji()
# Next Kanji
self.master_kanji.nextkanji()
# Next Story although hidden
#self.ids.story.text = self.master_kanji.story
#re-init
print("Reinit button: no answer")
self.next_kanji = False
print(self.master_kanji.cur_answer)
self.master_kanji.story_show = False
self.answered = 0
# Disable and then Enable send_btn after some seconds
self.send_disabled = True
Clock.schedule_once(self.cb_disablesend, self.send_disabled_t)
# update current in DB
self.master_kanji.updateCurrent(self.next_kanji)
# Change sKanji buttons
self.changeSKanji()
# Change story button
self.ww_link = "http://kanji.koohii.com/study/kanji/{}".format(self.master_kanji.cur_framekanji)
# Answers bold in self.story
self.master_kanji.story_bold()
# Check given answer
else:
print("Checking answer...")
answer = self.master_kanji.textFormat(answer)
# Correct answer
if self.master_kanji.check(answer) or self.master_kanji.cur_framenum == -1:
print("Correct answer")
self.next_kanji = True
# TODO Change button color
# Only update when first time answering
if self.answered == 0:
self.master_kanji.updateKanji(True)
self.master_kanji.updateCurrent(self.next_kanji, 1)
# Wrong answer
else:
print("Wrong answer")
# Only update when first time answering
if self.answered == 0:
self.master_kanji.updateKanji(False)
self.master_kanji.updateCurrent(self.next_kanji, 0)
self.flashred()
# An answer is given changes
self.master_kanji.story_show = True
self.answered = 1
if __name__ == '__main__':
print("This code needs to be run with KanjiOrigin")
|
NumesSanguis/KanjiOrigin
|
data/screens/learnkanji_k.py
|
Python
|
gpl-3.0
| 29,549
|
# Django settings for benchmarking project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
import os
os.environ['PGCONNECT_TIMEOUT'] = '1'
MANAGERS = ADMINS
DATABASES = {
'default': {
# 'ENGINE' : 'django.db.backends.sqlite3',
'ENGINE' : 'django.db.backends.postgresql_psycopg2',
'HOST' : '/tmp',
# 'PORT' : '5433',
'PORT' : '64000',
'NAME' : 'benchapp',
# 'CONN_TIMEOUT' : 1
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '1q9qw$xd0-99wlw$6e8m59!tdz$pde$6mk&a9de3f6xnlt1uzl'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'benchmarking.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'benchmarking.benchapp'
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
kantai/passe-framework-prototype
|
passe-sample-apps/benchmarking/settings.py
|
Python
|
bsd-3-clause
| 4,770
|
import uptime
import storage
import settings
import time
import pymongo
from pymongo import MongoClient
import sqlite3
while True:
conn = sqlite3.connect(settings.DB)
cursor = conn.cursor()
connection = MongoClient('localhost', 27017)
collection = connection[settings.DBS_NAME][settings.FARMERS_COLLECTION]
uptime.update_farmers_table(conn, cursor, collection)
collection = connection[settings.DBS_NAME][settings.STORAGE_COLLECTION]
storage.update_stats_table(conn, cursor, collection)
cursor.close()
conn.close()
connection.close()
time.sleep(30)
|
littleskunk/driveshare-graph
|
driveshare_graph/updateSQL.py
|
Python
|
mit
| 593
|
# Copyright David Abrahams 2004. Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
from b2.build import type
def register ():
type.register_type ('MARKDOWN', ['markdown', 'md'])
register ()
|
mzdb/pwiz-mzdb
|
libraries/boost-build/src/tools/types/markdown.py
|
Python
|
apache-2.0
| 298
|
##############################################################################
#
# Copyright (c) 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Locale and LocaleProvider Implementation.
$Id: __init__.py 38178 2005-08-30 21:50:19Z mj $
"""
import os
from datetime import datetime, date
from time import strptime
from zope.interface import implements
from zope.i18n.interfaces.locales import ILocale
from zope.i18n.interfaces.locales import ILocaleDisplayNames, ILocaleDates
from zope.i18n.interfaces.locales import ILocaleVersion, ILocaleIdentity
from zope.i18n.interfaces.locales import ILocaleTimeZone, ILocaleCalendar
from zope.i18n.interfaces.locales import ILocaleCurrency, ILocaleNumbers
from zope.i18n.interfaces.locales import ILocaleFormat, ILocaleFormatLength
from zope.i18n.interfaces.locales import ILocaleOrientation
from zope.i18n.format import NumberFormat, DateTimeFormat
from zope.i18n.locales.inheritance import \
AttributeInheritance, InheritingDictionary, NoParentException
from zope.i18n.locales.provider import LocaleProvider, LoadLocaleError
# Setup the locale directory
from zope import i18n
LOCALEDIR = os.path.join(os.path.dirname(i18n.__file__), "locales", "data")
# Global LocaleProvider. We really just need this single one.
locales = LocaleProvider(LOCALEDIR)
# Define some constants that can be used
JANUARY = 1
FEBRUARY = 2
MARCH = 3
APRIL = 4
MAY = 5
JUNE = 6
JULY = 7
AUGUST = 8
SEPTEMBER = 9
OCTOBER = 10
NOVEMBER = 11
DECEMBER = 12
MONDAY = 1
TUESDAY = 2
WEDNESDAY = 3
THURSDAY = 4
FRIDAY = 5
SATURDAY = 6
SUNDAY = 7
dayMapping = {'mon': 1, 'tue': 2, 'wed': 3, 'thu': 4,
'fri': 5, 'sat': 6, 'sun': 7}
BC = 1
AD = 2
class LocaleIdentity(object):
"""Represents a unique identification of the locale
This class does not have to deal with inheritance.
Examples::
>>> id = LocaleIdentity('en')
>>> id
<LocaleIdentity (en, None, None, None)>
>>> id = LocaleIdentity('en', 'latin')
>>> id
<LocaleIdentity (en, latin, None, None)>
>>> id = LocaleIdentity('en', 'latin', 'US')
>>> id
<LocaleIdentity (en, latin, US, None)>
>>> id = LocaleIdentity('en', 'latin', 'US', 'POSIX')
>>> id
<LocaleIdentity (en, latin, US, POSIX)>
>>> id = LocaleIdentity('en', None, 'US', 'POSIX')
>>> id
<LocaleIdentity (en, None, US, POSIX)>
"""
implements(ILocaleIdentity)
def __init__(self, language=None, script=None, territory=None, variant=None):
"""Initialize object."""
self.language = language
self.script = script
self.territory = territory
self.variant = variant
def __repr__(self):
"""See zope.i18n.interfaces.ILocaleIdentity
"""
return "<LocaleIdentity (%s, %s, %s, %s)>" %(
self.language, self.script, self.territory, self.variant)
class LocaleVersion(object):
"""Represents a particular version of a locale
This class does not have to deal with inheritance.
Examples::
>>> cmp(LocaleVersion('1.0', datetime(2004, 1, 1), 'no notes'),
... LocaleVersion('1.0', datetime(2004, 1, 1), 'no notes again'))
0
>>> cmp(LocaleVersion('1.0', datetime(2004, 1, 1), 'no notes'),
... LocaleVersion('1.0', datetime(2004, 1, 2), 'no notes again'))
-1
>>> cmp(LocaleVersion('1.0', datetime(2004, 1, 1), 'no notes'),
... LocaleVersion('0.9', datetime(2004, 1, 2), 'no notes again'))
-1
>>> cmp(LocaleVersion('1.0', datetime(2004, 1, 1), 'no notes'),
... LocaleVersion('0.9', datetime(2004, 1, 1), 'no notes again'))
1
"""
implements(ILocaleVersion)
def __init__(self, number, generationDate, notes):
"""Initialize object."""
self.number = number
assert(isinstance(generationDate, (date, type(None))))
self.generationDate = generationDate
self.notes = notes
def __cmp__(self, other):
"See zope.i18n.interfaces.ILocaleVersion"
return cmp((self.generationDate, self.number),
(other.generationDate, other.number))
class LocaleDisplayNames(AttributeInheritance):
"""Locale display names with inheritable data.
Examples::
>>> from zope.i18n.locales.tests.test_docstrings import \\
... LocaleInheritanceStub
>>> root = LocaleInheritanceStub()
>>> root.displayNames = LocaleDisplayNames()
>>> root.displayNames.languages = ['en', 'de']
>>> root.displayNames.keys = ['foo', 'bar']
>>> locale = LocaleInheritanceStub(nextLocale=root)
>>> locale.displayNames = LocaleDisplayNames()
>>> locale.displayNames.keys = ['fu', 'bahr']
Here you can see the inheritance in action
>>> locale.displayNames.languages
['en', 'de']
>>> locale.displayNames.keys
['fu', 'bahr']
"""
implements(ILocaleDisplayNames)
class LocaleTimeZone(object):
"""Specifies one of the timezones of a specific locale.
The attributes of this class are not inherited, since all timezone
information is always provided together.
Example::
>>> tz = LocaleTimeZone('Europe/Berlin')
>>> tz.cities = ['Berlin']
>>> tz.names = {'standard': ('Mitteleuropaeische Zeit', 'MEZ'),
... 'daylight': ('Mitteleuropaeische Sommerzeit', 'MESZ')}
>>> tz.type
'Europe/Berlin'
>>> tz.cities
['Berlin']
"""
implements(ILocaleTimeZone)
def __init__(self, type):
"""Initialize the object."""
self.type = type
self.cities = []
self.names = {}
class LocaleFormat(object):
"""Specifies one of the format of a specific format length.
The attributes of this class are not inherited, since all format
information is always provided together. Note that this information by
itself is often not useful, since other calendar data is required to use
the specified pattern for formatting and parsing.
"""
implements(ILocaleFormat)
def __init__(self, type=None):
"""Initialize the object."""
self.type = type
self.displayName = u''
self.pattern = u''
class LocaleFormatLength(AttributeInheritance):
"""Specifies one of the format lengths of a specific quantity, like
numbers, dates, times and datetimes."""
implements(ILocaleFormatLength)
def __init__(self, type=None):
"""Initialize the object."""
self.type = type
self.default = None
class LocaleCalendar(AttributeInheritance):
"""Represents locale data for a calendar, like 'gregorian'.
This object is particular tricky, since the calendar not only inherits
from higher-up locales, but also from the specified gregorian calendar
available for this locale. This was done, since most other calendars have
different year and era data, but everything else remains the same.
Example::
Even though the 'Locale' object has no 'calendar' attribute for real, it
helps us here to make the example simpler.
>>> from zope.i18n.locales.tests.test_docstrings import \\
... LocaleInheritanceStub
>>> root = LocaleInheritanceStub()
>>> root.calendar = LocaleCalendar('gregorian')
>>> locale = LocaleInheritanceStub(nextLocale=root)
>>> locale.calendar = LocaleCalendar('gregorian')
>>> root.calendar.months = InheritingDictionary(
... {1: (u'January', u'Jan'), 2: (u'February', u'Feb')})
>>> locale.calendar.months = InheritingDictionary(
... {2: (u'Februar', u'Feb'), 3: (u'Maerz', u'Mrz')})
>>> locale.calendar.getMonthNames()[:4]
[u'January', u'Februar', u'Maerz', None]
>>> locale.calendar.getMonthTypeFromName(u'January')
1
>>> locale.calendar.getMonthTypeFromName(u'Februar')
2
>>> locale.calendar.getMonthAbbreviations()[:4]
[u'Jan', u'Feb', u'Mrz', None]
>>> locale.calendar.getMonthTypeFromAbbreviation(u'Jan')
1
>>> locale.calendar.getMonthTypeFromAbbreviation(u'Mrz')
3
>>> root.calendar.days = InheritingDictionary(
... {1: (u'Monday', u'Mon'), 2: (u'Tuesday', u'Tue')})
>>> locale.calendar.days = InheritingDictionary(
... {2: (u'Dienstag', u'Die'), 3: (u'Mittwoch', u'Mit')})
>>> locale.calendar.getDayNames()[:4]
[u'Monday', u'Dienstag', u'Mittwoch', None]
>>> locale.calendar.getDayTypeFromName(u'Monday')
1
>>> locale.calendar.getDayTypeFromName(u'Dienstag')
2
>>> locale.calendar.getDayAbbreviations()[:4]
[u'Mon', u'Die', u'Mit', None]
>>> locale.calendar.getDayTypeFromAbbreviation(u'Mon')
1
>>> locale.calendar.getDayTypeFromAbbreviation(u'Die')
2
Let's test the direct attribute access as well.
>>> root.am = u'AM'
>>> root.pm = u'PM'
>>> locale.pm = u'nachm.'
>>> locale.pm
u'nachm.'
>>> locale.am
u'AM'
"""
implements(ILocaleCalendar)
def __init__(self, type):
"""Initialize the object."""
self.type = type
def getMonthNames(self):
"""See zope.i18n.interfaces.ILocaleCalendar"""
return [self.months.get(type, (None, None))[0] for type in range(1, 13)]
def getMonthTypeFromName(self, name):
"""See zope.i18n.interfaces.ILocaleCalendar"""
for item in self.months.items():
if item[1][0] == name:
return item[0]
def getMonthAbbreviations(self):
"""See zope.i18n.interfaces.ILocaleCalendar"""
return [self.months.get(type, (None, None))[1] for type in range(1, 13)]
def getMonthTypeFromAbbreviation(self, abbr):
"""See zope.i18n.interfaces.ILocaleCalendar"""
for item in self.months.items():
if item[1][1] == abbr:
return item[0]
def getDayNames(self):
"""See zope.i18n.interfaces.ILocaleCalendar"""
return [self.days.get(type, (None, None))[0] for type in range(1, 8)]
def getDayTypeFromName(self, name):
"""See zope.i18n.interfaces.ILocaleCalendar"""
for item in self.days.items():
if item[1][0] == name:
return item[0]
def getDayAbbreviations(self):
"""See zope.i18n.interfaces.ILocaleCalendar"""
return [self.days.get(type, (None, None))[1] for type in range(1, 8)]
def getDayTypeFromAbbreviation(self, abbr):
"""See zope.i18n.interfaces.ILocaleCalendar"""
for item in self.days.items():
if item[1][1] == abbr:
return item[0]
def isWeekend(self, datetime):
"""See zope.i18n.interfaces.ILocaleCalendar"""
day = datetime.weekday()
time = datetime.time()
# TODO: Implement this method
return False
def getFirstWeekDayName(self):
"""See zope.i18n.interfaces.ILocaleCalendar"""
return self.days[dayMapping[self.week['firstDay']]][0]
class LocaleDates(AttributeInheritance):
"""Simple ILocaleDates implementation that can inherit data from other
locales.
Examples::
>>> from zope.i18n.tests.test_formats import LocaleCalendarStub as Stub
>>> from datetime import datetime, date, time
>>> dates = LocaleDates()
>>> cal = LocaleCalendar('gregorian')
>>> cal.months = Stub.months
>>> cal.days = Stub.days
>>> cal.am = Stub.am
>>> cal.pm = Stub.pm
>>> cal.eras = Stub.eras
>>> cal.week = {'firstDay': 1, 'minDays': 1}
>>> dates.calendars = {'gregorian': cal}
Setting up and accessing date format through a specific length
(very common scenario)
>>> fulllength = LocaleFormatLength()
>>> format = LocaleFormat()
>>> format.pattern = u'EEEE, d. MMMM yyyy'
>>> fulllength.formats = {None: format}
>>> mediumlength = LocaleFormatLength()
>>> format = LocaleFormat()
>>> format.pattern = u'dd.MM.yyyy'
>>> mediumlength.formats = {None: format}
>>> cal.dateFormats = {'full': fulllength, 'medium': mediumlength}
>>> cal.defaultDateFormat = 'medium'
>>> formatter = dates.getFormatter('date')
>>> formatter.format(date(2004, 02, 04))
u'04.02.2004'
>>> formatter = dates.getFormatter('date', length='full')
>>> formatter.format(date(2004, 02, 04))
u'Mittwoch, 4. Februar 2004'
Let's also test the time formatter
>>> fulllength = LocaleFormatLength()
>>> format = LocaleFormat()
>>> format.pattern = u"H:mm' Uhr 'z"
>>> fulllength.formats = {None: format}
>>> mediumlength = LocaleFormatLength()
>>> format = LocaleFormat()
>>> format.pattern = u'HH:mm:ss'
>>> mediumlength.formats = {None: format}
>>> cal.timeFormats = {'full': fulllength, 'medium': mediumlength}
>>> cal.defaultTimeFormat = 'medium'
>>> formatter = dates.getFormatter('time')
>>> formatter.format(time(12, 15, 00))
u'12:15:00'
>>> formatter = dates.getFormatter('time', length='full')
>>> formatter.format(time(12, 15, 00))
u'12:15 Uhr +000'
The datetime formatter is a bit special, since it is constructed from
the other two:
>>> length = LocaleFormatLength()
>>> format = LocaleFormat()
>>> format.pattern = u'{1} {0}'
>>> length.formats = {None: format}
>>> cal.dateTimeFormats = {None: length}
>>> formatter = dates.getFormatter('dateTime')
>>> formatter.format(datetime(2004, 02, 04, 12, 15, 00))
u'04.02.2004 12:15:00'
>>> formatter = dates.getFormatter('dateTime', length='full')
>>> formatter.format(datetime(2004, 02, 04, 12, 15, 00))
u'Mittwoch, 4. Februar 2004 12:15 Uhr +000'
Finally, we'll test some invalid input:
>>> dates.getFormatter('timeDate')
Traceback (most recent call last):
ValueError: Invalid category: timeDate
>>> dates.getFormatter('date', length='superlong')
Traceback (most recent call last):
ValueError: Invalid format length: superlong
>>> dates.getFormatter('date', calendar='irish-catholic')
Traceback (most recent call last):
ValueError: Invalid calendar: irish-catholic
"""
implements(ILocaleDates)
def getFormatter(self, category, length=None, name=None,
calendar=u'gregorian'):
"""See zope.i18n.interfaces.locales.ILocaleDates"""
if category not in (u'date', u'time', u'dateTime'):
raise ValueError('Invalid category: %s' % category)
if calendar not in (u'gregorian', u'arabic', u'chinese',
u'civil-arabic', u'hebrew', u'japanese',
u'thai-buddhist'):
raise ValueError('Invalid calendar: %s' % calendar)
if length not in (u'short', u'medium', u'long', u'full', None):
raise ValueError('Invalid format length: %s' % length)
cal = self.calendars[calendar]
formats = getattr(cal, category+'Formats')
if length is None:
length = getattr(
cal,
'default'+category[0].upper()+category[1:]+'Format',
formats.keys()[0])
# 'datetime' is always a bit special; we often do not have a length
# specification, but we need it for looking up the date and time
# formatters
if category == 'dateTime':
formatLength = formats.get(length, formats[None])
else:
formatLength = formats[length]
if name is None:
name = formatLength.default
format = formatLength.formats[name]
pattern = format.pattern
if category == 'dateTime':
date_pat = self.getFormatter(
'date', length, name, calendar).getPattern()
time_pat = self.getFormatter(
'time', length, name, calendar).getPattern()
pattern = pattern.replace('{1}', date_pat)
pattern = pattern.replace('{0}', time_pat)
return DateTimeFormat(pattern, cal)
class LocaleCurrency(object):
"""Simple implementation of ILocaleCurrency without inheritance support,
since it is not needed for a single currency."""
implements(ILocaleCurrency)
def __init__(self, type):
"""Initialize object."""
self.type = type
self.symbol = None
self.symbolChoice = False
self.displayName = None
class LocaleNumbers(AttributeInheritance):
"""Implementation of ILocaleCurrency including inheritance support.
` Examples::
>>> numbers = LocaleNumbers()
>>> numbers.symbols = {
... 'decimal': ',', 'group': '.', 'list': ';', 'percentSign': '%',
... 'nativeZeroDigit': '0', 'patternDigit': '#', 'plusSign': '+',
... 'minusSign': '-', 'exponential': 'E', 'perMille': 'o/oo',
... 'infinity': 'oo', 'nan': 'N/A'}
Setting up and accessing totally unnamed decimal format
(very common scenario)
>>> length = LocaleFormatLength()
>>> format = LocaleFormat()
>>> format.pattern = u'#,##0.###;-#,##0.###'
>>> length.formats = {None: format}
>>> numbers.decimalFormats = {None: length}
>>> formatter = numbers.getFormatter('decimal')
>>> formatter.format(3.4)
u'3,4'
>>> formatter.format(-3.4567)
u'-3,457'
>>> formatter.format(3210.4)
u'3.210,4'
Setting up and accessing scientific formats with named format lengths
>>> longlength = LocaleFormatLength('long')
>>> format = LocaleFormat()
>>> format.pattern = u'0.000###E+00'
>>> longlength.formats = {None: format}
>>> mediumlength = LocaleFormatLength('long')
>>> format = LocaleFormat()
>>> format.pattern = u'0.00##E+00'
>>> mediumlength.formats = {None: format}
>>> numbers.scientificFormats = {'long': longlength,
... 'medium': mediumlength}
>>> numbers.defaultScientificFormat = 'long'
>>> formatter = numbers.getFormatter('scientific')
>>> formatter.format(1234.5678)
u'1,234568E+03'
>>> formatter = numbers.getFormatter('scientific', 'medium')
>>> formatter.format(1234.5678)
u'1,2346E+03'
Setting up and accessing percent formats with named format lengths
and format names
>>> longlength = LocaleFormatLength('long')
>>> fooformat = LocaleFormat()
>>> fooformat.pattern = u'0.##0%'
>>> barformat = LocaleFormat()
>>> barformat.pattern = u'0%'
>>> longlength.formats = {None: fooformat, 'bar': barformat}
>>> numbers.percentFormats = {'long': longlength}
>>> numbers.defaultPercentFormat = 'long'
>>> formatter = numbers.getFormatter('percent')
>>> formatter.format(123.45678)
u'123,457%'
>>> formatter = numbers.getFormatter('percent', name='bar')
>>> formatter.format(123.45678)
u'123%'
...using a default name
>>> numbers.percentFormats['long'].default = 'bar'
>>> formatter = numbers.getFormatter('percent')
>>> formatter.format(123.45678)
u'123%'
"""
implements(ILocaleNumbers)
def getFormatter(self, category, length=None, name=None):
"""See zope.i18n.interfaces.locales.ILocaleNumbers"""
assert category in (u'decimal', u'percent', u'scientific', u'currency')
assert length in (u'short', u'medium', u'long', u'full', None)
formats = getattr(self, category+'Formats')
if length is None:
length = getattr(
self,
'default'+category[0].upper()+category[1:]+'Format',
formats.keys()[0])
formatLength = formats[length]
if name is None:
name = formatLength.default
format = formatLength.formats[name]
return NumberFormat(format.pattern, self.symbols)
class LocaleOrientation(AttributeInheritance):
"""Implementation of ILocaleOrientation
"""
implements(ILocaleOrientation)
class Locale(AttributeInheritance):
"""Implementation of the ILocale interface."""
implements(ILocale)
def __init__(self, id):
self.id = id
def getLocaleID(self):
"""Return the locale id."""
id = self.id
pieces = filter(None,
(id.language, id.script, id.territory, id.variant))
id_string = '_'.join(pieces)
# TODO: What about keys??? Where do I get this info from?
pieces = [key+'='+type for key, type in ()]
if pieces:
id_string += '@' + ','.join(pieces)
return id_string
def getInheritedSelf(self):
"""See zope.i18n.interfaces.locales.ILocaleInheritance
This is the really interesting method that looks up the next (more
general) Locale object. This is used in case this locale object does
not have the required information.
This method works closely with with LocaleProvider.
"""
language = self.id.language
territory = self.id.territory
variant = self.id.variant
if variant is not None:
return locales.getLocale(language, territory, None)
elif territory is not None:
return locales.getLocale(language, None, None)
elif language is not None:
return locales.getLocale(None, None, None)
else:
# Well, this is bad; we are already at the root locale
raise NoParentException('Cannot find a more general locale.')
|
Donkyhotay/MoonPy
|
zope/i18n/locales/__init__.py
|
Python
|
gpl-3.0
| 22,122
|
import os
import os.path
import shutil
import socket
import subprocess
import sys
import tempfile
import time
import random
try:
import pytest
except ImportError:
print >> sys.stderr, "Integ tests require pytests!"
sys.exit(1)
def pytest_funcarg__servers(request):
"Returns a new APIHandler with a filter manager"
# Create tmpdir and delete after
tmpdir = tempfile.mkdtemp()
# Make the command
output = "%s/output" % tmpdir
cmd = "cat >> %s" % output
# Write the configuration
port = random.randrange(10000, 65000)
config_path = os.path.join(tmpdir, "config.cfg")
conf = """[statsite]
flush_interval = 1
port = %d
udp_port = %d
timers_include = MEAN,STDEV,SUM,SUM_SQ,LOWER,UPPER,SAMPLE_RATE
[sink_stream_default]
command = %s
""" % (port, port, cmd)
open(config_path, "w").write(conf)
# Start the process
proc = subprocess.Popen(['./statsite', '-f', config_path])
proc.poll()
assert proc.returncode is None
# Define a cleanup handler
def cleanup():
try:
proc.kill()
proc.wait()
shutil.rmtree(tmpdir)
except:
print proc
pass
request.addfinalizer(cleanup)
# Make a connection to the server
connected = False
for x in xrange(3):
try:
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
conn.settimeout(1)
conn.connect(("localhost", port))
connected = True
break
except Exception, e:
print e
time.sleep(0.5)
# Die now
if not connected:
raise EnvironmentError("Failed to connect!")
# Make a second connection
conn2 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
conn2.connect(("localhost", port))
# Return the connection
return conn, conn2, output
def wait_file(path, timeout=5):
"Waits on a file to be make"
start = time.time()
while not os.path.isfile(path) and time.time() - start < timeout:
time.sleep(0.1)
if not os.path.isfile(path):
raise Exception("Timed out waiting for file %s" % path)
while os.path.getsize(path) == 0 and time.time() - start < timeout:
time.sleep(0.1)
class TestInteg(object):
def test_counters(self, servers):
"Tests adding kv pairs"
server, _, output = servers
server.sendall("foobar:100|ms\n")
server.sendall("foobar:200|ms\n")
server.sendall("foobar:300|ms\n")
wait_file(output)
out = open(output).read()
assert "timers.foobar.count" not in out
assert "timers.foobar.rate" not in out
assert "timers.foobar.mean|200" in out
assert "timers.foobar.stdev|100" in out
assert "timers.foobar.sum|600" in out
assert "timers.foobar.sum_sq|140000" in out
assert "timers.foobar.lower|100" in out
assert "timers.foobar.upper|300" in out
assert "timers.foobar.median" not in out
assert "timers.foobar.p50|200" in out
assert "timers.foobar.sample_rate|3" in out
|
twitter-forks/statsite
|
integ/test_timers_include.py
|
Python
|
bsd-3-clause
| 3,103
|
# coding: utf-8
from datetime import datetime
import random
from django.http import HttpResponse, HttpResponseBadRequest
from django.http import HttpResponseRedirect
from django.http import HttpResponseServerError
from django.shortcuts import render
from django.urls import reverse
from django.views import View
from leancloud import Object
from leancloud import Query
from leancloud.errors import LeanCloudError
# wexin
import hashlib
from django.views.decorators.csrf import csrf_exempt
import time
from django.template import loader, Context
from xml.etree import ElementTree as ET
from wechat_sdk.basic import WechatBasic
from wechat_sdk.exceptions import ParseError
from wechat_sdk.messages import TextMessage
# pacong
from pachong.newsSpider import page_info, getQiuShi
import requests
# 中文支持
import sys
reload(sys)
sys.setdefaultencoding('utf8')
WECHAT_TOKEN = '1db18532c43ec91f39b6448a865f4096'
# 实例化 WechatBasic
wechat_instance = WechatBasic(token=WECHAT_TOKEN)
class Todo(Object):
pass
def index(request):
return render(request, 'index.html', {})
def current_time(request):
return HttpResponse(datetime.now())
class TodoView(View):
def get(self, request):
try:
todos = Query(Todo).descending('createdAt').find()
except LeanCloudError as e:
if e.code == 101: # 服务端对应的 Class 还没创建
todos = []
else:
raise e
return render(request, 'todos.html', {
'todos': [x.get('content') for x in todos],
})
def post(self, request):
content = request.POST.get('content')
todo = Todo(content=content)
try:
todo.save()
except LeanCloudError as e:
return HttpResponseServerError(e.error)
return HttpResponseRedirect(reverse('todo_list'))
class hiWechat(View):
@csrf_exempt
def dispatch(self, *args, **kwargs):
return super(hiWechat, self).dispatch(*args, **kwargs)
def get(self, request):
# 接收为信服务器参数
signature = request.GET.get('signature', None)
timestamp = request.GET.get('timestamp', None)
nonce = request.GET.get('nonce', None)
echostr = request.GET.get('echostr', None)
token = '1db18532c43ec91f39b6448a865f4096'
# 序列化
hashlist = [token, timestamp, nonce]
hashlist.sort()
# 合成一个字符串
hashstr = ''.join([s for s in hashlist])
hashstr = hashlib.sha1(hashstr).hexdigest()
# 比较
if hashstr == signature:
return HttpResponse(echostr)
else:
return ""
# def post(self, request):
# str_xml = ET.fromstring(request.body)
# fromUser = str_xml.find('ToUserName').text
# toUser = str_xml.find('FromUserName').text
# content = str_xml.find('Content').text
# createTime = str_xml.find('CreateTime').text
# # 获取当前时间
# nowtime = str(init(time.time()))
# t = loader.get_template('text.xml')
# c = Context({'toUser': toUser, 'fromUser': fromUser,
# 'nowtime': 1503562682, 'content': 'hi'})
# return HttpResponse(t.render(c))
def post(self, request):
try:
wechat_instance.parse_data(data = request.body)
except ParseError:
return HttpResponseBadRequest('Invalid XML Data')
# 获取解析好的微信请求信息
message = wechat_instance.get_message()
if isinstance(message, TextMessage):
# 当前会话内容
content = message.content.strip()
print "**content: " + content
#content = getwangyi("http://news.163.com/rank/")
#wangyiLink = self.getwangyi(url="http://news.163.com/rank/")
#print "**wangyiLink: " + wangyiLink
qsContent = getQiuShi("https://www.qiushibaike.com/text/")
try:
if content.isdigit():
content = int(content)
maxlen = len(qsContent)
if content > maxlen:
res = "请输入小于 %d 的整数." % maxlen
else:
res = qsContent[content] + qsContent[content + 1] + qsContent[content + 2]
else:
res = "请输入数字"
except Exception as e:
res = e
response = wechat_instance.response_text(content=res)
else:
response = wechat_instance.response_text(content="功能升级中")
return HttpResponse(response, content_type="application/xml")
def getwangyi(self, url):
content = '没获取到内容'
#print "downloading ", url
myPage = requests.get(url).content.decode("gbk")
myPageResults = page_info(myPage)
url_list = []
for next_url, item in myPageResults:
#print "downloading ", next_url
#new_page = requests.get(next_url).content.decode("gbk")
#newPageResults = new_page_info(new_page)
url_list.append(next_url)
#print url_list
xuanze = random.randint(0, len(url_list))
content = url_list[xuanze]
return content
|
xingzhe25/testLeanCloud
|
views.py
|
Python
|
mit
| 5,363
|
"""Change relationship between instances and groups from m2m to m2o
Revision ID: 535ddcd39cad
Revises: 4031597ba686
Create Date: 2012-03-10 10:09:02.058131
"""
# downgrade revision identifier, used by Alembic.
revision = '535ddcd39cad'
down_revision = '4031597ba686'
from alembic import op
import sqlalchemy as sa
def upgrade():
# create parent_name in groups
op.add_column(
u'groups',
sa.Column(u'parent_name',
sa.Unicode(255),
sa.ForeignKey(u'groups.name',
name=u'groups_parent_name_fkey',
onupdate='cascade',
ondelete='restrict'),
nullable=True,
default=None
)
)
# create instance_groups table
op.create_table(u'instances_groups',
sa.Column(u'instance_domain',
sa.Unicode(255),
sa.ForeignKey(u'instances.domain',
onupdate="cascade",
ondelete="cascade"),
primary_key=True),
sa.Column(u'group_name',
sa.Unicode(255),
sa.ForeignKey(u'groups.name',
onupdate="cascade",
ondelete="cascade"),
primary_key=True),
mysql_engine=u'InnoDB')
# populate instances_groups
connection = op.get_bind()
connection.execute(
'INSERT INTO instances_groups (instance_domain, group_name) '
'SELECT instances.domain, groups.name '
'FROM instances, groups '
'WHERE instances.id = groups.instance_id '
)
# drop f.key for m2o
op.drop_constraint(u'groups_instance_id_fkey', u'groups')
op.drop_column(u'groups', u'instance_id')
# rename organization to company in users table
op.alter_column(u'users', u'organization',
name=u'company',
existing_type=sa.Unicode(128),
existing_server_default=None,
existing_nullable=True)
# create organization for users
op.add_column(
u'users',
sa.Column(u'organization_name',
sa.Unicode(255),
sa.ForeignKey(u'groups.name',
name='users_organization_name_fkey',
onupdate='cascade',
ondelete='restrict'
),
nullable=True,
default=None
)
)
def downgrade():
# cannot "stash" a M2M in a M2O, no way we can do that
raise NotImplementedError()
|
asidev/aybu-manager
|
migrations/versions/535ddcd39cad_change_relationship_.py
|
Python
|
apache-2.0
| 2,870
|
import time
for i in range(1, 10, 2):
time.sleep(2) # print to standard output
print('spam' * i) # nothing GUI about this, eh?
|
simontakite/sysadmin
|
pythonscripts/programmingpython/Gui/Tools/spams.py
|
Python
|
gpl-2.0
| 171
|
# -*- coding: utf-8 -*-
import argparse
import json
import logging
import xml.etree.ElementTree as ET
import os
from collections import defaultdict
log = logging.getLogger("importer")
psycopg_import_error = None
try:
import psycopg2
except ImportError as exc:
psycopg2 = None
psycopg_import_error = str(exc)
def _add_to_target(target_map, object, key):
target = None
for id_key, ex_target in target_map.items():
id = object.get(id_key)
if id:
target = ex_target[id]
break
if not target:
# print("No %s target found for %r" % (key, object))
return False
target.setdefault(key, []).append(object)
return target
def _process_hearings_tree(tables, geometries):
hearings = {hearing["id"]: hearing for hearing in tables.pop("hearing")}
alternatives = {alternative["id"]: alternative for alternative in tables.pop("alternative")}
sections = {section["id"]: section for section in tables.pop("section")}
comments = {comment["id"]: comment for comment in tables.pop("comment")}
images = {image["id"]: image for image in tables.pop("image")}
log.info(
"Found %d hearings, %d alternatives, %d sections, %d comments and %d images",
len(hearings),
len(alternatives),
len(sections),
len(comments),
len(images),
)
likes = defaultdict(list)
for like in tables.pop("like"):
likes[like["comment_id"]].append(like)
tables_map = {
"hearing_id": hearings,
"alternative_id": alternatives,
"comment_id": comments,
"section_id": sections,
}
for comment in comments.values():
comment["likes"] = likes.pop(comment["id"], [])
_add_to_target(tables_map, comment, "comments")
for image in images.values():
_add_to_target(tables_map, image, "images")
for id_key, ent_map in tables_map.items():
table = id_key.replace("_id", "")
for ent in ent_map.values():
ent["main_image"] = images.get(ent.get("main_image_id"))
if table in geometries and ent["id"] in geometries[table]:
ent["_geometry"] = geometries[table][ent["id"]]
for id, hearing in hearings.items():
hearing["alternatives"] = [a for a in alternatives.values() if a["hearing_id"] == id]
hearing["sections"] = [s for s in sections.values() if s["hearing_id"] == id]
return hearings
def process_tree(xml_tree, geometries):
tables = {
table.tag: [{column.tag: column.text for column in row} for row in table.getchildren()]
for table in xml_tree.find("public").getchildren()
}
hearings = _process_hearings_tree(tables, geometries)
out = {
"hearings": hearings
}
return out
def dump_xml(conn, xml_file):
cur = conn.cursor()
with open(xml_file, "w", encoding="utf8") as outf:
cur.execute("SELECT database_to_xml(false, false, '');")
row = cur.fetchone()
outf.write(row[0])
outf.flush()
log.info("Database XML: Wrote %d bytes to %s" % (outf.tell(), outf.name))
def dump_geojson(conn, geometry_json_file):
cur = conn.cursor()
with open(geometry_json_file, "w", encoding="utf8") as outf:
cur.execute("SELECT id, ST_AsGeoJSON(_area, 15, 1) FROM hearing;")
hearing_geometries = {row[0]: json.loads(row[1] or "null") for row in cur}
geometries = {
"hearing": hearing_geometries
}
json.dump(geometries, outf, ensure_ascii=False, indent=1, sort_keys=True)
outf.flush()
log.info("Geometry JSON: Wrote %d bytes to %s" % (outf.tell(), outf.name))
def main():
log_levels = {n.lower(): l for (n, l) in logging._nameToLevel.items()}
ap = argparse.ArgumentParser()
ap.add_argument(
"-p", "--from-pgsql", dest="pgsql", action="store_true", default=False,
help="import from PostgreSQL first"
)
ap.add_argument("--dsn", default="dbname=kerrokantasi_old user=postgres")
ap.add_argument("--xml", default="kerrokantasi.xml")
ap.add_argument("--geometry-json", default="kerrokantasi.geometries.json")
ap.add_argument("--output-json", default="kerrokantasi.json")
ap.add_argument("--log-level", default="info", choices=log_levels)
args = ap.parse_args()
logging.basicConfig(level=log_levels[args.log_level])
if args.pgsql:
log.info("Creating XML and geometry files")
if not psycopg2:
raise ValueError("Psycopg2 is not available; can't import from PostgreSQL. (%s)" % psycopg_import_error)
conn = psycopg2.connect(args.dsn)
cur = conn.cursor()
cur.execute("SET CLIENT_ENCODING TO 'utf8';")
dump_xml(conn, args.xml)
dump_geojson(conn, args.geometry_json)
conn.close()
log.info("Importing data from XML and geometry files...")
tree = ET.parse(args.xml)
if os.path.isfile(args.geometry_json):
with open(args.geometry_json, "r", encoding="utf8") as inf:
geometries = json.load(inf)
else:
log.warning("Geometry file %s does not exist" % args.geometry_json)
geometries = {}
tree = process_tree(tree, geometries)
with open(args.output_json, "w", encoding="utf8") as outf:
json.dump(tree, outf, ensure_ascii=False, indent=1, sort_keys=True)
outf.flush()
log.info("Output JSON: Wrote %d bytes to %s", outf.tell(), outf.name)
if __name__ == '__main__':
main()
|
City-of-Helsinki/kerrokantasi
|
migrator/process_legacy_data.py
|
Python
|
mit
| 5,516
|
# coding: utf-8
import unittest
from tapioca_instagram import Instagram
class TestTapiocaInstagram(unittest.TestCase):
def setUp(self):
self.wrapper = Instagram()
if __name__ == '__main__':
unittest.main()
|
vintasoftware/tapioca-instagram
|
tests/test_tapioca_instagram.py
|
Python
|
mit
| 229
|
"""
Tests for courseware middleware
"""
from django.http import Http404
from django.test.client import RequestFactory
from lms.djangoapps.courseware.exceptions import Redirect
from lms.djangoapps.courseware.middleware import RedirectMiddleware
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase # lint-amnesty, pylint: disable=wrong-import-order
from xmodule.modulestore.tests.factories import CourseFactory # lint-amnesty, pylint: disable=wrong-import-order
class CoursewareMiddlewareTestCase(SharedModuleStoreTestCase):
"""Tests that courseware middleware is correctly redirected"""
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.course = CourseFactory.create()
@staticmethod
def get_headers(cache_response):
"""
Django 3.2 has no ._headers
See https://docs.djangoproject.com/en/3.2/releases/3.2/#requests-and-responses
"""
if hasattr(cache_response, '_headers'):
headers = cache_response._headers.copy() # pylint: disable=protected-access
else:
headers = {k.lower(): (k, v) for k, v in cache_response.items()}
return headers
def test_process_404(self):
"""A 404 should not trigger anything"""
request = RequestFactory().get("dummy_url")
response = RedirectMiddleware().process_exception(
request, Http404()
)
assert response is None
def test_redirect_exceptions(self):
"""
Unit tests for handling of Redirect exceptions.
"""
request = RequestFactory().get("dummy_url")
test_url = '/test_url'
exception = Redirect(test_url)
response = RedirectMiddleware().process_exception(
request, exception
)
assert response.status_code == 302
headers = self.get_headers(response)
target_url = headers['location'][1]
assert target_url.endswith(test_url)
|
edx/edx-platform
|
lms/djangoapps/courseware/tests/test_middleware.py
|
Python
|
agpl-3.0
| 1,978
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2019 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Alvaro del Castillo San Felix <acs@bitergia.com>
#
import argparse
import logging
from datetime import datetime
from os import sys
from perceval.backends.bugzilla import Bugzilla
from perceval.backends.gerrit import Gerrit
from perceval.backends.github import GitHub
from grimoire_elk.elastic import ElasticConnectException
from grimoire_elk.elastic import ElasticSearch
from grimoire_elk.enriched.bugzilla import BugzillaEnrich
from grimoire_elk.enriched.gerrit import GerritEnrich
from grimoire_elk.enriched.github import GitHubEnrich
from grimoire_elk.enriched.sortinghat_gelk import SortingHat
from grimoire_elk.raw.bugzilla import BugzillaOcean
from grimoire_elk.raw.elastic import ElasticOcean
from grimoire_elk.raw.gerrit import GerritOcean
from grimoire_elk.raw.github import GitHubOcean
def get_connector_from_name(name, connectors):
found = None
for connector in connectors:
backend = connector[0]
if backend.get_name() == name:
found = connector
return found
if __name__ == '__main__':
"""Gelk: perceval2ocean and ocean2kibana"""
connectors = [[Bugzilla, BugzillaOcean, BugzillaEnrich],
[GitHub, GitHubOcean, GitHubEnrich],
[Gerrit, GerritOcean, GerritEnrich]] # Will come from Registry
parser = argparse.ArgumentParser()
ElasticOcean.add_params(parser)
subparsers = parser.add_subparsers(dest='backend',
help='perceval backend')
for connector in connectors:
name = connector[0].get_name()
subparser = subparsers.add_parser(name, help='gelk %s -h' % name)
# We need params for feed
connector[0].add_params(subparser)
args = parser.parse_args()
app_init = datetime.now()
backend_name = args.backend
if not backend_name:
parser.print_help()
sys.exit(0)
if 'debug' in args and args.debug:
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(message)s')
logging.debug("Debug mode activated")
else:
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
logging.getLogger("urllib3").setLevel(logging.WARNING)
logging.getLogger("requests").setLevel(logging.WARNING)
connector = get_connector_from_name(backend_name, connectors)
backend = connector[0](**vars(args))
ocean_backend = connector[1](backend, **vars(args))
enrich_backend = connector[2](backend, **vars(args))
es_index = backend.get_name() + "_" + backend.get_id()
clean = args.no_incremental
if args.cache:
clean = True
try:
# Ocean
elastic_state = ElasticSearch(args.elastic_url,
es_index,
ocean_backend.get_elastic_mappings(),
clean)
# Enriched ocean
enrich_index = es_index + "_enrich"
elastic = ElasticSearch(args.elastic_url,
enrich_index,
enrich_backend.get_elastic_mappings(),
clean)
except ElasticConnectException:
logging.error("Can't connect to Elastic Search. Is it running?")
sys.exit(1)
ocean_backend.set_elastic(elastic_state)
enrich_backend.set_elastic(elastic)
try:
# First feed the item in Ocean to use it later
logging.info("Adding data to %s" % (ocean_backend.elastic.index_url))
ocean_backend.feed()
if backend_name == "github":
GitHub.users = enrich_backend.users_from_es()
logging.info("Adding enrichment data to %s" %
(enrich_backend.elastic.index_url))
items = []
new_identities = []
items_count = 0
for item in ocean_backend:
# print("%s %s" % (item['url'], item['lastUpdated_date']))
if len(items) >= elastic.max_items_bulk:
enrich_backend.enrich_items(items)
items = []
items.append(item)
# Get identities from new items to be added to SortingHat
identities = ocean_backend.get_identities(item)
if not identities:
identities = []
for identity in identities:
if identity not in new_identities:
new_identities.append(identity)
items_count += 1
enrich_backend.enrich_items(items)
logging.info("Total items enriched %i " % items_count)
logging.info("Total new identities to be checked %i" % len(new_identities))
merged_identities = SortingHat.add_identities(new_identities, backend_name)
# Redo enrich for items with new merged identities
except KeyboardInterrupt:
logging.info("\n\nReceived Ctrl-C or other break signal. Exiting.\n")
logging.debug("Recovering cache")
backend.cache.recover()
sys.exit(0)
total_time_min = (datetime.now() - app_init).total_seconds() / 60
logging.info("Finished in %.2f min" % (total_time_min))
|
grimoirelab/GrimoireELK
|
utils/gelk.py
|
Python
|
gpl-3.0
| 5,856
|
#! /usr/bin/env python
# This file is part of the Astrometry.net suite.
# Licensed under a 3-clause BSD style license - see LICENSE
from __future__ import print_function
import matplotlib
matplotlib.use('Agg')
from astrometry.util.index import *
from astrometry.util.plotutils import *
from astrometry.libkd.spherematch import match
from astrometry.util.starutil_numpy import *
from astrometry.util.fits import *
from optparse import *
from pylab import *
from numpy import *
import os
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('-p', '--prefix', dest='prefix', help='Prefix for output plot names')
parser.add_option('-r', '--range', dest='range', type='float', help='Set search radius range (in arcsec) of stars-1 plot, default 15')
parser.set_defaults(prefix='', range=15.)
opt,args = parser.parse_args()
if 'plots' in args: # DEBUG!
cat = fits_table('cat2.fits')
xyz = radectoxyz(cat.ra, cat.dec)
R = 15.
inds,dists = match(xyz, xyz, deg2rad(R/3600.))
notself = (inds[:,0] != inds[:,1])
clf()
hist(rad2deg(dists[notself]) * 3600., 200)
title('ImSim reference catalog')
xlabel('Distance between pairs of sources (arcsec)')
ylabel('Counts')
xlim(0, R)
savefig('cat-stars-1.png')
cat = fits_table('stars3.fits')
xyz = radectoxyz(cat.ra, cat.dec)
R = 15.
inds,dists = match(xyz, xyz, deg2rad(R/3600.))
notself = (inds[:,0] != inds[:,1])
clf()
hist(rad2deg(dists[notself]) * 3600., 200)
title('ImSim reference catalog -- stars only')
xlabel('Distance between pairs of sources (arcsec)')
ylabel('Counts')
xlim(0, R)
savefig('cat-stars-2.png')
I1 = inds[notself,0]
I2 = inds[notself,1]
clf()
RA = cat.ra + (cat.ra > 180)*-360
dra = RA[I1]-RA[I2]
ddec = cat.dec[I1]-cat.dec[I2]
#plot(dra, ddec, 'r.')
(H,xe,ye) = histogram2d(dra, ddec, bins=(200,200))
H=H.T
imshow(H, extent=(min(xe), max(xe), min(ye), max(ye)), aspect='auto',
interpolation='nearest', origin='lower', cmap=antigray)
xlabel('dRA (deg)')
ylabel('dDec (deg)')
axis('equal')
savefig('cat-stars-4.png')
cat = fits_table('gals2.fits')
xyz = radectoxyz(cat.ra, cat.dec)
R = 15.
inds,dists = match(xyz, xyz, deg2rad(R/3600.))
notself = (inds[:,0] != inds[:,1])
clf()
hist(rad2deg(dists[notself]) * 3600., 200)
title('ImSim reference catalog -- galaxies only')
xlabel('Distance between pairs of sources (arcsec)')
ylabel('Counts')
xlim(0, R)
savefig('cat-stars-3.png')
sys.exit(0)
for indfn in args:
print('Reading index', indfn)
null = None
I = index_load(indfn, 0, null)
print('Loaded.')
NS = index_nstars(I)
NQ = index_nquads(I)
print('Index has', NS, 'stars and', NQ, 'quads')
DQ = index_get_quad_dim(I)
print('Index has "quads" with %i stars' % (DQ))
DC = index_get_quad_dim(I)
print('Index has %i-dimensional codes' % (DC))
iname = os.path.basename(I.indexname).replace('.fits', '')
# stars
print('Getting stars...')
stars = index_get_stars(I)
print(stars.shape)
ra,dec = xyztoradec(stars)
ra += (ra > 180)*-360
# FIXME --!
#ra *= cos(deg2rad(ra))
rng = [[-10,10],[-10,10]]
clf()
(H,xe,ye) = histogram2d(ra, dec, bins=(100,100), range=rng)
H=H.T
binarea = (xe[1]-xe[0])*(ye[1]-ye[0])
print('Bin area:', binarea, 'deg^2')
binarea *= 3600.
print(binarea, 'arcmin^2')
imshow(H/binarea, extent=(min(xe), max(xe), min(ye), max(ye)),
aspect='auto',
interpolation='nearest', origin='lower', cmap=antigray)
colorbar()
axis('equal')
xlabel('RA (deg)')
ylabel('Dec (deg)')
title('Reference source density in %s' % iname)
savefig(opt.prefix + 'stars-2.png')
R = opt.range
print('Finding pairs within', R, 'arcsec')
inds,dists = match(stars, stars, deg2rad(R/3600.))
print('inds', inds.shape, 'dists', dists.shape)
notself = (inds[:,0] != inds[:,1])
clf()
hist(rad2deg(dists[notself]) * 3600., 200)
xlabel('Star pair distances (arcsec)')
ylabel('Counts')
xlim(0, R)
savefig(opt.prefix + 'stars-1.png')
# codes
print('Getting codes...')
codes = index_get_codes(I)
print('shape', codes.shape)
# code slices
cx = codes[:,0]
cy = codes[:,1]
dx = codes[:,2]
dy = codes[:,3]
clf()
(H,xe,ye) = histogram2d(cx, cy, bins=(100,100))
H=H.T
imshow(H, extent=(min(xe), max(xe), min(ye), max(ye)), aspect='auto',
interpolation='nearest', origin='lower', cmap=antigray)
axis('equal')
xlabel('cx')
ylabel('cy')
savefig(opt.prefix + 'codes-1.png')
clf()
(H,xe,ye) = histogram2d(append(cx, dx), append(cy, dy), bins=(100,100))
H=H.T
imshow(H, extent=(min(xe), max(xe), min(ye), max(ye)), aspect='auto',
interpolation='nearest', origin='lower', cmap=antigray)
axis('equal')
xlabel('cx, dx')
ylabel('cy, dy')
savefig(opt.prefix + 'codes-2.png')
clf()
(H,xe,ye) = histogram2d(cx, dx, bins=(100,100))
H=H.T
imshow(H, extent=(min(xe), max(xe), min(ye), max(ye)), aspect='auto',
interpolation='nearest', origin='lower', cmap=antigray)
axis('equal')
xlabel('cx')
ylabel('dx')
savefig(opt.prefix + 'codes-3.png')
clf()
xx = append(cx, dx)
yy = append(cy, dy)
(H,xe,ye) = histogram2d(append(xx, 1.0-xx), append(yy, 1.0-yy), bins=(100,100))
H=H.T
imshow(H, extent=(min(xe), max(xe), min(ye), max(ye)), aspect='auto',
interpolation='nearest', origin='lower', cmap=antigray)
axis('equal')
xlabel('cx, dx')
ylabel('cy, dy')
title('duplicated for A-B swap')
savefig(opt.prefix + 'codes-4.png')
for pnum,arrs in [(5, [cx,cy,dx,dy]),
(6, [append(cx, 1-cx), append(cy, 1-cy), append(dx, 1-dx), append(dy, 1-dy)])]:
clf()
anames = ['cx', 'cy', 'dx', 'dy']
sp = 0
for i,a1 in enumerate(arrs):
for j,a2 in enumerate(arrs):
sp += 1
if j > i:
continue
subplot(4,4, sp)
if i == j:
hist(a1, 100, range=[-0.3, 1.3])
else:
(H,xe,ye) = histogram2d(a2, a1, bins=(100,100), range=[[-0.3,1.3],[-0.3,1.3]])
H=H.T
imshow(H, extent=(min(xe), max(xe), min(ye), max(ye)), aspect='auto',
interpolation='nearest', origin='lower', cmap=antigray)
axis([-0.3,1.3,-0.3,1.3])
axis('scaled')
xticks([],[])
yticks([],[])
if i == 3:
xlabel(anames[j])
if j == 0:
ylabel(anames[i])
savefig(opt.prefix + 'codes-%i.png' % pnum)
index_free(I)
|
olebole/astrometry.net
|
util/index-stats.py
|
Python
|
bsd-3-clause
| 7,688
|
"""
This module contains deprecation messages and bits of code used elsewhere in the codebase
that is planned to be removed in the next pytest release.
Keeping it in a central location makes it easy to track what is deprecated and should
be removed when the time comes.
"""
from __future__ import absolute_import, division, print_function
class RemovedInPytest4Warning(DeprecationWarning):
"""warning class for features removed in pytest 4.0"""
MAIN_STR_ARGS = 'passing a string to pytest.main() is deprecated, ' \
'pass a list of arguments instead.'
YIELD_TESTS = 'yield tests are deprecated, and scheduled to be removed in pytest 4.0'
FUNCARG_PREFIX = (
'{name}: declaring fixtures using "pytest_funcarg__" prefix is deprecated '
'and scheduled to be removed in pytest 4.0. '
'Please remove the prefix and use the @pytest.fixture decorator instead.')
SETUP_CFG_PYTEST = '[pytest] section in setup.cfg files is deprecated, use [tool:pytest] instead.'
GETFUNCARGVALUE = "use of getfuncargvalue is deprecated, use getfixturevalue"
RESULT_LOG = '--result-log is deprecated and scheduled for removal in pytest 4.0'
MARK_INFO_ATTRIBUTE = RemovedInPytest4Warning(
"MarkInfo objects are deprecated as they contain the merged marks"
)
MARK_PARAMETERSET_UNPACKING = RemovedInPytest4Warning(
"Applying marks directly to parameters is deprecated,"
" please use pytest.param(..., marks=...) instead.\n"
"For more details, see: https://docs.pytest.org/en/latest/parametrize.html"
)
|
MichaelAquilina/pytest
|
_pytest/deprecated.py
|
Python
|
mit
| 1,519
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# fincorrnet documentation build configuration file, created by
# sphinx-quickstart on Thu Apr 7 23:17:52 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.todo',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'fincorrnet'
copyright = '2016, Miguel Vaz'
author = 'Miguel Vaz'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'fincorrnetdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'fincorrnet.tex', 'fincorrnet Documentation',
'Miguel Vaz', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'fincorrnet', 'fincorrnet Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'fincorrnet', 'fincorrnet Documentation',
author, 'fincorrnet', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
mvaz/financial-correlation-networks
|
conf.py
|
Python
|
mit
| 9,219
|
import pytest
from api.base.settings.defaults import API_BASE
from osf_tests.factories import (
ProjectFactory,
AuthUserFactory,
PrivateLinkFactory,
)
from osf.utils import permissions
@pytest.fixture()
def admin():
return AuthUserFactory()
@pytest.fixture()
def base_url():
return '/{}nodes/'.format(API_BASE)
@pytest.fixture()
def read_contrib():
return AuthUserFactory()
@pytest.fixture()
def write_contrib():
return AuthUserFactory()
@pytest.fixture()
def valid_contributors(admin, read_contrib, write_contrib):
return [
admin._id,
read_contrib._id,
write_contrib._id,
]
@pytest.fixture()
def private_node_one(admin, read_contrib, write_contrib):
private_node_one = ProjectFactory(
is_public=False,
creator=admin,
title='Private One')
private_node_one.add_contributor(
read_contrib, permissions=[
permissions.READ], save=True)
private_node_one.add_contributor(
write_contrib,
permissions=[
permissions.READ,
permissions.WRITE],
save=True)
return private_node_one
@pytest.fixture()
def private_node_one_anonymous_link(private_node_one):
private_node_one_anonymous_link = PrivateLinkFactory(anonymous=True)
private_node_one_anonymous_link.nodes.add(private_node_one)
private_node_one_anonymous_link.save()
return private_node_one_anonymous_link
@pytest.fixture()
def private_node_one_private_link(private_node_one):
private_node_one_private_link = PrivateLinkFactory(anonymous=False)
private_node_one_private_link.nodes.add(private_node_one)
private_node_one_private_link.save()
return private_node_one_private_link
@pytest.fixture()
def private_node_one_url(private_node_one):
return '/{}nodes/{}/'.format(API_BASE, private_node_one._id)
@pytest.fixture()
def private_node_two(admin, read_contrib, write_contrib):
private_node_two = ProjectFactory(
is_public=False,
creator=admin,
title='Private Two')
private_node_two.add_contributor(
read_contrib, permissions=[permissions.READ], save=True)
private_node_two.add_contributor(
write_contrib,
permissions=[
permissions.READ,
permissions.WRITE],
save=True)
return private_node_two
@pytest.fixture()
def private_node_two_url(private_node_two):
return '/{}nodes/{}/'.format(API_BASE, private_node_two._id)
@pytest.fixture()
def public_node_one(admin, read_contrib, write_contrib):
public_node_one = ProjectFactory(
is_public=True, creator=admin, title='Public One')
public_node_one.add_contributor(
read_contrib, permissions=[permissions.READ], save=True)
public_node_one.add_contributor(
write_contrib,
permissions=[
permissions.READ,
permissions.WRITE],
save=True)
return public_node_one
@pytest.fixture()
def public_node_one_anonymous_link(public_node_one):
public_node_one_anonymous_link = PrivateLinkFactory(anonymous=True)
public_node_one_anonymous_link.nodes.add(public_node_one)
public_node_one_anonymous_link.save()
return public_node_one_anonymous_link
@pytest.fixture()
def public_node_one_private_link(public_node_one):
public_node_one_private_link = PrivateLinkFactory(anonymous=False)
public_node_one_private_link.nodes.add(public_node_one)
public_node_one_private_link.save()
return public_node_one_private_link
@pytest.fixture()
def public_node_one_url(public_node_one):
return '/{}nodes/{}/'.format(API_BASE, public_node_one._id)
@pytest.fixture()
def public_node_two(admin, read_contrib, write_contrib):
public_node_two = ProjectFactory(
is_public=True, creator=admin, title='Public Two')
public_node_two.add_contributor(
read_contrib, permissions=[permissions.READ], save=True)
public_node_two.add_contributor(
write_contrib,
permissions=[
permissions.READ,
permissions.WRITE],
save=True)
return public_node_two
@pytest.fixture()
def public_node_two_url(public_node_two):
return '/{}nodes/{}/'.format(API_BASE, public_node_two._id)
@pytest.mark.django_db
@pytest.mark.enable_quickfiles_creation
@pytest.mark.usefixtures(
'admin',
'read_contrib',
'write_contrib',
'valid_contributors',
'private_node_one',
'private_node_one_anonymous_link',
'private_node_one_private_link',
'private_node_one_url',
'private_node_two',
'private_node_two_url',
'public_node_one',
'public_node_one_anonymous_link',
'public_node_one_private_link',
'public_node_one_url',
'public_node_two',
'public_node_two_url')
class TestNodeDetailViewOnlyLinks:
def test_private_node(
self, app, admin, read_contrib, valid_contributors,
private_node_one, private_node_one_url,
private_node_one_private_link,
private_node_one_anonymous_link,
public_node_one_url,
public_node_one_private_link,
public_node_one_anonymous_link):
# test_private_node_with_link_works_when_using_link
res_normal = app.get(private_node_one_url, auth=read_contrib.auth)
assert res_normal.status_code == 200
res_linked = app.get(
private_node_one_url,
{'view_only': private_node_one_private_link.key})
assert res_linked.status_code == 200
assert res_linked.json['data']['attributes']['current_user_permissions'] == [
'read']
# Remove any keys that will be different for view-only responses
res_normal_json = res_normal.json
res_linked_json = res_linked.json
user_can_comment = res_normal_json['data']['attributes'].pop(
'current_user_can_comment')
view_only_can_comment = res_linked_json['data']['attributes'].pop(
'current_user_can_comment')
assert user_can_comment
assert not view_only_can_comment
# test_private_node_with_link_unauthorized_when_not_using_link
res = app.get(private_node_one_url, expect_errors=True)
assert res.status_code == 401
# test_private_node_with_link_anonymous_does_not_expose_contributor_id
res = app.get(private_node_one_url, {
'view_only': private_node_one_anonymous_link.key,
'embed': 'contributors',
})
assert res.status_code == 200
embeds = res.json['data'].get('embeds', None)
assert embeds is None or 'contributors' not in embeds
# test_private_node_with_link_non_anonymous_does_expose_contributor_id
res = app.get(private_node_one_url, {
'view_only': private_node_one_private_link.key,
'embed': 'contributors',
})
assert res.status_code == 200
contributors = res.json['data']['embeds']['contributors']['data']
for contributor in contributors:
assert contributor['id'].split('-')[1] in valid_contributors
# test_private_node_logged_in_with_anonymous_link_does_not_expose_contributor_id
res = app.get(private_node_one_url, {
'view_only': private_node_one_private_link.key,
'embed': 'contributors',
}, auth=admin.auth)
assert res.status_code == 200
contributors = res.json['data']['embeds']['contributors']['data']
for contributor in contributors:
assert contributor['id'].split('-')[1] in valid_contributors
# test_public_node_with_link_anonymous_does_not_expose_user_id
res = app.get(public_node_one_url, {
'view_only': public_node_one_anonymous_link.key,
'embed': 'contributors',
})
assert res.status_code == 200
embeds = res.json['data'].get('embeds', None)
assert embeds is None or 'contributors' not in embeds
# test_public_node_with_link_non_anonymous_does_expose_contributor_id
res = app.get(public_node_one_url, {
'view_only': public_node_one_private_link.key,
'embed': 'contributors',
})
assert res.status_code == 200
contributors = res.json['data']['embeds']['contributors']['data']
for contributor in contributors:
assert contributor['id'].split('-')[1] in valid_contributors
# test_public_node_with_link_unused_does_expose_contributor_id
res = app.get(public_node_one_url, {
'embed': 'contributors',
})
assert res.status_code == 200
contributors = res.json['data']['embeds']['contributors']['data']
for contributor in contributors:
assert contributor['id'].split('-')[1] in valid_contributors
# test_view_only_link_does_not_grant_write_permission
payload = {
'data': {
'attributes': {
'title': 'Cannot touch this'},
'id': private_node_one._id,
'type': 'nodes',
}
}
res = app.patch_json_api(private_node_one_url, payload, {
'view_only': private_node_one_private_link.key,
}, expect_errors=True)
assert res.status_code == 401
# test_view_only_link_from_anther_project_does_not_grant_view_permission
res = app.get(private_node_one_url, {
'view_only': public_node_one_private_link.key,
}, expect_errors=True)
assert res.status_code == 401
# test_private_project_logs_with_anonymous_link_does_not_expose_user_id
res = app.get(private_node_one_url + 'logs/', {
'view_only': str(private_node_one_anonymous_link.key),
})
assert res.status_code == 200
body = res.body
for id in valid_contributors:
assert id not in body
# test_private_project_with_anonymous_link_does_not_expose_registrations_or_forks
res = app.get(private_node_one_url, {
'view_only': private_node_one_anonymous_link.key,
})
assert res.status_code == 200
attributes = res.json['data']['attributes']
relationships = res.json['data']['relationships']
if 'embeds' in res.json['data']:
embeds = res.json['data']['embeds']
else:
embeds = {}
assert 'current_user_can_comment' not in attributes
assert 'citation' not in relationships
assert 'custom_citation' not in attributes
assert 'node_license' not in attributes
assert 'registrations' not in relationships
assert 'forks' not in relationships
assert 'registrations' not in embeds
assert 'forks' not in embeds
# test_deleted_anonymous_VOL_gives_401_for_unauthorized
private_node_one_anonymous_link.is_deleted = True
private_node_one_anonymous_link.save()
res = app.get(private_node_one_url, {
'view_only': private_node_one_anonymous_link.key,
}, expect_errors=True)
assert res.status_code == 401
# test_deleted_anonymous_VOL_does_not_anonymize_data_for_authorized
res = app.get(private_node_one_url, {
'view_only': private_node_one_anonymous_link.key,
}, auth=admin.auth)
assert res.status_code == 200
assert 'anonymous' not in res.json['meta']
attributes = res.json['data']['attributes']
relationships = res.json['data']['relationships']
assert 'current_user_can_comment' in attributes
assert 'citation' in relationships
assert 'custom_citation' in attributes
assert 'node_license' in attributes
assert 'forks' in relationships
# test_bad_view_only_link_does_not_modify_permissions
res = app.get(private_node_one_url + 'logs/', {
'view_only': 'thisisnotarealprivatekey',
}, expect_errors=True)
assert res.status_code == 401
res = app.get(private_node_one_url + 'logs/', {
'view_only': 'thisisnotarealprivatekey',
}, auth=admin.auth)
assert res.status_code == 200
# test_view_only_key_in_relationships_links
res = app.get(
private_node_one_url,
{'view_only': private_node_one_private_link.key})
assert res.status_code == 200
res_relationships = res.json['data']['relationships']
for key, value in res_relationships.items():
if isinstance(value, list):
for relationship in value:
links = relationship.get('links', {})
if links.get('related', False):
assert private_node_one_private_link.key in links['related']['href']
if links.get('self', False):
assert private_node_one_private_link.key in links['self']['href']
else:
links = value.get('links', {})
if links.get('related', False):
assert private_node_one_private_link.key in links['related']['href']
if links.get('self', False):
assert private_node_one_private_link.key in links['self']['href']
# test_view_only_key_in_self_and_html_links
res = app.get(
private_node_one_url,
{'view_only': private_node_one_private_link.key})
assert res.status_code == 200
links = res.json['data']['links']
assert private_node_one_private_link.key in links['self']
assert private_node_one_private_link.key in links['html']
@pytest.mark.django_db
@pytest.mark.enable_quickfiles_creation
@pytest.mark.usefixtures(
'admin',
'read_contrib',
'write_contrib',
'valid_contributors',
'private_node_one',
'private_node_one_anonymous_link',
'private_node_one_private_link',
'private_node_one_url',
'private_node_two',
'private_node_two_url',
'public_node_one',
'public_node_one_anonymous_link',
'public_node_one_private_link',
'public_node_one_url',
'public_node_two',
'public_node_two_url')
class TestNodeListViewOnlyLinks:
def test_node_list_view_only_links(
self, app, valid_contributors,
private_node_one,
private_node_one_private_link,
private_node_one_anonymous_link,
base_url):
# test_private_link_does_not_show_node_in_list
res = app.get(base_url, {
'view_only': private_node_one_private_link.key,
})
assert res.status_code == 200
nodes = res.json['data']
node_ids = []
for node in nodes:
node_ids.append(node['id'])
assert private_node_one._id not in node_ids
# test_anonymous_link_does_not_show_contributor_id_in_node_list
res = app.get(base_url, {
'view_only': private_node_one_anonymous_link.key,
'embed': 'contributors',
})
assert res.status_code == 200
nodes = res.json['data']
assertions = 0
for node in nodes:
embeds = node.get('embeds', None)
assert embeds is None or 'contributors' not in embeds
assertions += 1
assert assertions != 0
# test_non_anonymous_link_does_show_contributor_id_in_node_list
res = app.get(base_url, {
'view_only': private_node_one_private_link.key,
'embed': 'contributors',
})
assert res.status_code == 200
nodes = res.json['data']
assertions = 0
for node in nodes:
contributors = node['embeds']['contributors']['data']
for contributor in contributors:
assertions += 1
assert contributor['id'].split('-')[1] in valid_contributors
assert assertions != 0
|
pattisdr/osf.io
|
api_tests/nodes/views/test_view_only_query_parameter.py
|
Python
|
apache-2.0
| 15,854
|
"""
Basic operations for FITS files.
:requires: PyFITS
:requirs: NumPy
:author: Sami-Matias Niemi
:version: 0.2
"""
import pyfits as pf
import numpy as np
def getWavelengths(filename, length, ext=0):
"""
Returns ndarray of wavelengths. This information is
derived using the FITS header.
:param filename: name of the input file
:type filename: string
:param length: how many wavelength values
:type length: int
:param ext: FITS extension number
:type ext: int
:return: wavelengths
:rtype: ndarray
"""
hdr = pf.open(filename)[ext].header
crval = hdr['CRVAL1']
crpix = hdr['CRPIX1']
delta = hdr['CD1_1']
if 'LIN' in hdr['CTYPE1']:
if crpix < 0:
xps = np.arange(0, length - crpix + 1) * delta + crval
xps = xps[-crpix + 1:]
elif crpix > 0:
xps = np.arange(crpix - 1, length + crpix - 1)*delta + crval
else:
xps = np.arange(0, length) * delta + crval
else:
raise NotImplementedError('Does not support LOG spacing yet...')
return xps
|
sniemi/SamPy
|
fits/basics.py
|
Python
|
bsd-2-clause
| 1,093
|
# coding=utf8
#
# Insight Minr Active Spider Client Node
# CopyRight BestMiner Inc.
#
'''
从controller节点获取任务数据
'''
import os
import json
import time
import socket
import urllib
from twisted.internet.defer import inlineCallbacks, returnValue
from observer.utils.http import request, TimedAgentPool, InfiniteLoginError
from observer.utils import wait
from observer.platform.sina.weibo.users.utils import getAgent
from observer.node.client import ClientServiceBase
from observer import log
class NodeService(ClientServiceBase):
''' '''
servicename = 'observer.sina.users.user_spider'
def __init__(self, *args, **kwargs):
''' '''
ClientServiceBase.__init__(self, *args, **kwargs)
cfg = kwargs['cfg']
self.name = socket.gethostname() + cfg.prefix # node name
self.proxy = cfg.http_proxy # not used
self.userAgent = cfg.http_agent
self.max_agent = cfg.max_agent
self.agentPool = TimedAgentPool()
self.token = self.get_token()
self.last_clear = 0
self.ready = True
def get_token(self):
''' 获取一个可用的token'''
url = 'http://insight.bestminr.com/get_token'
return json.loads(urllib.urlopen(url).read()).get('access_token')
def addAgent(self, seq):
''' 添加一个新的agent到agentPool '''
agent = getAgent(self.proxy, self.userAgent)
agent.remove = False
agent.seq = seq
self.agentPool.initAgent(agent)
self.searchLoop(agent)
@inlineCallbacks
def startService(self):
''' start the fetch service '''
os.environ['TZ'] = 'PRC'
time.tzset()
yield ClientServiceBase.startService(self)
self.fillAgents()
@inlineCallbacks
def fillAgents(self):
''' '''
while 1:
seq = 0
while len(self.agentPool.agents) < self.max_agent:
seq += 1
self.addAgent(seq)
yield wait(1.)
@inlineCallbacks
def searchLoop(self, agent):
''' '''
needbreak = False
while 1:
if agent.remove:
self.agentPool.removeAgent(agent)
break
reqid, uid = yield self.callController('nextRequest', 'user')
log.info('Got uid %s from server' % uid)
try:
result = yield self.search(agent, uid, self.token)
log.debug('Got data %s' % repr(result))
except InfiniteLoginError:
log.exception()
yield self.callController("fail", uid=uid)
result = None
needbreak = True
except:
log.exception()
result = None
self.callController('sendResult', reqid, uid, result)
if needbreak:
break
@inlineCallbacks
def getContent(self, agent, uid, token):
''' '''
result = []
try:
url = 'https://api.weibo.com/2/statuses/user_timeline.json?uid=%s&access_token=%s' % (uid, token)
log.debug('Getting data with url: %s' % url)
result = yield request(agent, url)
except:
pass
returnValue(result)
@inlineCallbacks
def search(self, agent, uid, token):
''' '''
result = None
try:
data = yield self.getContent(agent, uid, token)
result = json.loads(data).get('statuses', [])
except Exception as msg:
log.debug("Got Something Wrong with uid: %s, Error: %s" % (uid, repr(msg)))
returnValue(None)
returnValue(result)
|
seraphlnWu/creditor
|
observer/platform/taobao/user_client.py
|
Python
|
apache-2.0
| 3,700
|
from pyramid.config import Configurator
import datetime
from pyramid.renderers import JSON
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
config = Configurator(settings=settings)
json_renderer = JSON()
def datetime_adapter(obj, request):
return obj.isoformat()
json_renderer.add_adapter(datetime.datetime, datetime_adapter)
config.add_renderer('json', json_renderer)
config.add_route('generate_key', '/', request_method='GET')
config.add_route('encrypt_file', '/encrypt_file', request_method='POST')
config.add_route('decrypt_file', '/decrypt_file', request_method='POST')
config.scan()
return config.make_wsgi_app()
|
Leits/openprocurement.api.encryprion
|
openprocurement/api/encryprion/__init__.py
|
Python
|
apache-2.0
| 726
|
#!/usr/bin/env python3
import re
from collections import namedtuple
from contextlib import contextmanager
WHITESPACE_RE = re.compile(r'[ \r\n\v\t]+')
IDENTIFIER_RE = re.compile(r'[a-zA-Z_][a-zA-Z0-9_]*')
NUMBER_RE = re.compile(r'[0-9]+|[0-9]+\.[0-9]*|[0-9]*\.[0-9]+')
STRING_RE = re.compile(r'"([^"\\]|\\.)*"|\'([^\'\\]|\\.)*\'')
ENDLINE_RE = re.compile(r'(;|\r?\n)')
OPERATOR_RE = re.compile(r'(==|<-|<=|>=|!=|n?eq|and|or|[!+\-*/<>=])')
NONE_RE = re.compile(r'\x00')
ANY_RE = re.compile(r'[^\x00]*')
COMMENT_RE = re.compile(r'#.*')
NEG_OPERATORS = ('-',)
PLUS_OPERATORS = ('+',)
NOT_OPERATORS = ('!', 'not')
UNARY_OPERATORS = NEG_OPERATORS + PLUS_OPERATORS + NOT_OPERATORS
ADD_OPERATORS = ('+',)
SUB_OPERATORS = ('-',)
MUL_OPERATORS = ('*',)
DIV_OPERATORS = ('/',)
EQ_OPERATORS = ('=', '==', 'eq', 'equals')
NEQ_OPERATORS = ('!=', 'neq')
LT_OPERATORS = ('<', 'lt')
GT_OPERATORS = ('>', 'gt')
LE_OPERATORS = ('<=', 'le')
GE_OPERATORS = ('>=', 'ge')
BINARY_AND_OPERATORS = ('&',)
BINARY_OR_OPERATORS = ('|',)
BINARY_XOR_OPERATORS = ('^',)
AND_OPERATORS = ('&&', 'and')
OR_OPERATORS = ('||', 'or')
ASSIGN_OPERATORS = (':=', '=', '<-')
KEYWORDS = "BEGIN", "END", "FOR", "TO", "WHILE", "THEN", "MODULE", "PROGRAM", "IF", "ELSE", "DO", "NEXT", "REPEAT", "OUTPUT", "INPUT", "PRINT", "BREAK", "CONTINUE", "RETURN", "RUN", "IS", "NOT", "INTEGER", "FLOAT", "REAL", "STRING", "INT", "NUMBER", "PARAM"
class ParseError(Exception):
def __init__(self, ctx, msg):
msg = ctx.get_context()[0] + msg
super().__init__(msg)
class ParseExpected(ParseError):
def __init__(self, ctx, expected, got=None):
msg = "Expected {}".format(expected)
if isinstance(got, Token):
got = "{} '{}'".format(got.type, got.value)
if got is not None:
msg += ", got {}".format(got)
super().__init__(ctx, msg)
class PseudoRuntimeError(Exception):
def __init__(self, ctx, msg):
if ctx:
msg = ctx + msg
super().__init__(msg)
class PseudoTypeError(PseudoRuntimeError):
pass
class PseudoNameError(PseudoRuntimeError):
pass
class PseudoFlowControl(Exception):
def __init__(self, ctx):
super().__init__(ctx)
self.context = ctx
class PseudoBreak(PseudoFlowControl):
pass
class PseudoContinue(PseudoFlowControl):
pass
class PseudoReturn(PseudoFlowControl):
def __init__(self, ctx, ret):
super().__init__(ctx)
self.value = ret
Token = namedtuple('Token', "type, value")
def keyword_eq(token1, token2):
if not isinstance(token1, Token) or not isinstance(token2, Token):
return False
if not (token1.type == token2.type == 'keyword'):
return token1 == token2
return token1.value.upper() == token2.value.upper()
class Tokeniser:
def __init__(self, name):
self.name = name
self.reset()
def reset(self):
self.lines = []
self.row = 1
self.col = 1
self.level = 1
self.tokeniser = iter(self)
self._peek = None
self._peek_token = None
self._peek_token_row_col = None
self._ready_ctx = []
@contextmanager
def nest(self):
self.level += 1
yield
self.level -= 1
@contextmanager
def ready_context(self, ctx=None):
if not ctx:
ctx = self.raw_context()
idx = len(self._ready_ctx)
self._ready_ctx.append(ctx)
yield ctx
while len(self._ready_ctx) > idx:
self._ready_ctx.pop()
def raw_context(self):
if self._peek_token_row_col is not None:
return self._peek_token_row_col
return self.row, self.col
def get_context(self):
ctx = ""
if len(self._ready_ctx) > 0:
row, col = self._ready_ctx.pop()
else:
row, col = self.raw_context()
if row > len(self.lines):
row = row-1
col = len(self.lines[row-1])+1
if row < 1:
return "File {}: \n".format(self.name)
elif col < 1:
return "File {}, line {}: \n".format(self.name, row)
ctx += "File {}, line {}, column {}: \n".format(self.name, row, col)
ctx += self.lines[row-1] + "\n"
ctx += "{}^\n".format(' ' * (col-1))
return ctx, (row, col)
def peek(self):
if self._peek is not None:
return self._peek
self._peek = self._get_char()
return self._peek
def char(self):
if self._peek is not None:
res = self._peek
self._peek = None
return res
else:
return self._get_char()
def consume(self, until_re=None, while_re=None):
#print("Consuming...")
res = self.char()
until_cond = lambda x: False if not until_re else until_re.match(x)
while_cond = lambda x: True if not while_re else while_re.fullmatch(x)
nchar = None
while while_cond(res):
if until_cond(res):
break
nchar = self.char()
if not nchar:
return res
res += nchar
#print("Consumption complete: '{}'".format(res))
if nchar:
self._peek = nchar
return res[:-1]
def _parse_string_escapes(self, string):
i = 0
res = ""
escapes = { 'r': '\r', 'n': '\n', "'": "'", '"': '"', '\\': '\\' }
try:
while i < len(string):
if string[i] == '\\' and (i+1) < len(string):
i += 1
c = string[i]
if c in escapes:
res += escapes[c]
elif c == 'x' and (i+2) < len(string):
res += chr(int(string[i+1 : i+3], 16))
i += 2
elif c == 'u' and (i+4) < len(string):
res += chr(int(string[i+1 : i+5], 16))
i += 4
else:
res += '\\' + c
else:
res += string[i]
i += 1
return res
except ValueError as e:
raise ParseError(self, 'Invalid character escape')
def __iter__(self):
c = self.peek()
while c:
if COMMENT_RE.match(c):
self.consume(while_re=COMMENT_RE)
elif ENDLINE_RE.match(c):
self.char()
#self.consume(while_re=ENDLINE_RE)
yield Token('eol', '')
elif WHITESPACE_RE.match(c):
self.consume(while_re=WHITESPACE_RE)
elif c in ('"', "'"):
string = self._parse_string_escapes(self.consume(until_re=STRING_RE)[1:])
self.char() # consume end of string
yield Token('string', string)
elif NUMBER_RE.match(c):
num = self.consume(while_re=NUMBER_RE)
yield Token('number', float(num))
elif OPERATOR_RE.match(c):
yield Token('operator', self.consume(while_re=OPERATOR_RE))
elif IDENTIFIER_RE.match(c):
ident = self.consume(while_re=IDENTIFIER_RE)
if ident.upper() in KEYWORDS:
yield Token('keyword', ident.upper())
else:
yield Token('identifier', ident)
else:
yield Token('symbol', self.char())
c = self.peek()
yield Token('eol', '')
def peek_token(self):
try:
if self._peek_token is not None:
return self._peek_token
self._peek_token_row_col = self.raw_context()
self._peek_token = next(self.tokeniser)
#print("new token: {}, '{}'".format(self._peek_token.type, self._peek_token.value))
return self._peek_token
except StopIteration as e:
raise EOFError from e
def token(self):
try:
if self._peek_token is not None:
token = self._peek_token
self._peek_token = self._peek_token_row_col = None
return token
else:
res = next(self.tokeniser)
#print("new token: {}, '{}'".format(res.type, res.value))
return res
except StopIteration as e:
raise EOFError from e
class FileTokeniser(Tokeniser):
def __init__(self, fp, filename='<stream>'):
super().__init__(filename)
self.fp = fp
self.lines = re.compile(r'\r?\n').split(fp.read())
def _get_char(self):
if self.row > len(self.lines):
return None
line = self.lines[self.row-1]
if self.col > len(line):
self.row += 1
self.col = 1
return '\n'
c = line[self.col-1]
self.col += 1
return c
class REPLTokeniser(Tokeniser):
def __init__(self):
super().__init__("<repl>")
def _get_char(self):
while self.row > len(self.lines):
prompt = ">>> "
if self.level >= 2:
prompt = "{}... ".format("...." * (self.level - 2))
line = input(prompt)
self.lines.append(line)
line = self.lines[self.row-1]
if self.col > len(line):
self.row += 1
self.col = 1
return '\n'
c = line[self.col-1]
self.col += 1
return c
|
bell345/pseudo-interpreter
|
pseudo/token.py
|
Python
|
mit
| 9,534
|
import threading
from sqlalchemy import Column, UnicodeText, Integer
from tg_bot.modules.sql import BASE, SESSION
class RSS(BASE):
__tablename__ = "rss_feed"
id = Column(Integer, primary_key=True)
chat_id = Column(UnicodeText, nullable=False)
feed_link = Column(UnicodeText)
old_entry_link = Column(UnicodeText)
def __init__(self, chat_id, feed_link, old_entry_link):
self.chat_id = chat_id
self.feed_link = feed_link
self.old_entry_link = old_entry_link
def __repr__(self):
return "<RSS for chatID {} at feed_link {} with old_entry_link {}>".format(self.chat_id,
self.feed_link,
self.old_entry_link)
RSS.__table__.create(checkfirst=True)
INSERTION_LOCK = threading.RLock()
def check_url_availability(tg_chat_id, tg_feed_link):
try:
return SESSION.query(RSS).filter(RSS.feed_link == tg_feed_link,
RSS.chat_id == tg_chat_id).all()
finally:
SESSION.close()
def add_url(tg_chat_id, tg_feed_link, tg_old_entry_link):
with INSERTION_LOCK:
action = RSS(tg_chat_id, tg_feed_link, tg_old_entry_link)
SESSION.add(action)
SESSION.commit()
def remove_url(tg_chat_id, tg_feed_link):
with INSERTION_LOCK:
# this loops to delete any possible duplicates for the same TG User ID, TG Chat ID and link
for row in check_url_availability(tg_chat_id, tg_feed_link):
# add the action to the DB query
SESSION.delete(row)
SESSION.commit()
def get_urls(tg_chat_id):
try:
return SESSION.query(RSS).filter(RSS.chat_id == tg_chat_id).all()
finally:
SESSION.close()
def get_all():
try:
return SESSION.query(RSS).all()
finally:
SESSION.close()
def update_url(row_id, new_entry_links):
with INSERTION_LOCK:
row = SESSION.query(RSS).get(row_id)
# set the new old_entry_link with the latest update from the RSS Feed
row.old_entry_link = new_entry_links[0]
# commit the changes to the DB
SESSION.commit()
|
PaulSonOfLars/tgbot
|
tg_bot/modules/sql/rss_sql.py
|
Python
|
gpl-3.0
| 2,258
|
'''
Atomix project, oros.py, (TODO: summary)
Copyright (c) 2015 Stanford University
Released under the Apache License v2.0. See the LICENSE file for details.
Author(s): Manu Bansal
'''
from java.lang import *
from java.util import *
from com.ti.debug.engine.scripting import *
from com.ti.ccstudio.scripting.environment import *
#import argparse
import optparse
def pause(s):
print s, 'Print any key to continue...'
raw_input()
def create_script():
# Create our scripting environment object - which is the main entry point into any script and
# the factory for creating other Scriptable Servers and Sessions
script = ScriptingEnvironment.instance()
# Create a log file in the current directory to log script execution
script.traceBegin("dss_py.xml", "DefaultStylesheet.xsl")
# Set our TimeOut
#script.setScriptTimeout(15000)
# Log everything
script.traceSetConsoleLevel(TraceLevel.ALL)
script.traceSetFileLevel(TraceLevel.ALL)
return script
#def startup(script):
# # Get the Debug Server and start a Debug Session
# debugServer = script.getServer("DebugServer.1")
# debugServer.setConfig("tisim_c64xple.ccxml");
#
# debugSession = debugServer.openSession()
#
# # Load program
# debugSession.memory.loadProgram("modem.out")
#
# # Set a breakpoint at "main"
# main = debugSession.symbol.getAddress("main")
# bp1 = debugSession.breakpoint.add(main)
#
# # Set another breakpoint
# address = debugSession.symbol.getAddress("ReadNextData")
# bp2 = debugSession.breakpoint.add(address)
#
# # Restart our Target
# debugSession.target.restart()
#
# # Run if already not automatically halted at main. Should halt at first BP
# if debugSession.expression.evaluate("PC") != main:
# debugSession.target.run()
#
# # Using an expression - get the current value of the PC
# nPC = debugSession.expression.evaluate("PC")
#
# # Verify we halted at the correct address. Use hex() to convert the
# # result to a hex string when logging messages
# if nPC == main:
# script.traceWrite("SUCCESS: Halted at correct location")
# else:
# script.traceWrite("FAIL: Expected halt at " + hex(address) + ", actually halted at " + hex(nPC))
# script.traceSetConsoleLevel(TraceLevel.INFO)
# script.traceWrite("TEST FAILED!")
# script.traceEnd()
# System.exit(1);
#
# # Run again. Should halt at our breakpoint
# debugSession.target.run()
#
# # Using an expression - get the current value of the PC
# nPC = debugSession.expression.evaluate("PC")
#
# # Verify we halted at the correct address.
# if nPC == address:
# script.traceWrite("SUCCESS: Halted at correct location")
# else:
# script.traceWrite("FAIL: Expected halt at " + hex(address) + ", actually halted at " + hex(nPC))
# script.traceSetConsoleLevel(TraceLevel.INFO)
# script.traceWrite("TEST FAILED!")
# script.traceEnd()
# System.exit(1);
#
# # All done
# debugSession.terminate()
# debugServer.stop()
def startup_debugServer(script, options):
# Start up CCS
ccsServer = None
ccsSession = None
if ((not options.cleanup) and options.gui):
#pause("Press enter to launch CCS...")
ccsServer = script.getServer("CCSServer.1")
ccsSession = ccsServer.openSession(".*")
# Start up the debugger with the C64 simulator; it will start up the session in CCS
debugServer = script.getServer("DebugServer.1")
#debugServer.setConfig("../C64/tisim_c64xple.ccxml")
#debugServer.setConfig("../targetconfs/evmbe6670_28K.ccxml")
debugServer.setConfig(options.tcf); #"../targetconfs/evmbe6670_28K.ccxml")
return (ccsServer, ccsSession, debugServer)
def startup_debugSessions(debugServer, SESSION_IDS):
#debugSession = debugServer.openSession(".*")
#pause("Press enter to open debug sessions...")
pause("Press enter to open debug session 1")
debugSession0 = debugServer.openSession("Blackhawk XDS560v2-USB System Trace Emulator_0/C66xx_00")
##pause("Press enter to open debug session 2")
##debugSession1 = debugServer.openSession("Blackhawk XDS560v2-USB System Trace Emulator_0/C66xx_1")
##pause("Press enter to open debug session 3")
##debugSession2 = debugServer.openSession("Blackhawk XDS560v2-USB System Trace Emulator_0/C66xx_2")
##pause("Press enter to open debug session 4")
##debugSession3 = debugServer.openSession("Blackhawk XDS560v2-USB System Trace Emulator_0/C66xx_3")
debugSessions = []
##for i, session_id in enumerate(SESSION_IDS):
## pause("Press enter to open debug session " + str(i))
## print "Opening", session_id
## debugSession = debugServer.openSession(session_id)
## debugSessions.append(debugSession)
return debugSessions
def connectAndRun():
pause("Press enter to connect to target...")
debugSession0.target.connect()
debugSession1.target.connect()
debugSession2.target.connect()
debugSession3.target.connect()
# Load a program
#var isaName = "../C64"
##script.setScriptTimeout(15000);
#debugSession.memory.loadProgram(isaName + "/modem/Debug/modem.out")
#debugSession0.memory.loadProgram("../build/app/wifirx54mc/app_wifirx54mc.out")
#debugSession1.memory.loadProgram("../build/app/wifirx54mc/app_wifirx54mc.out")
#debugSession2.memory.loadProgram("../build/app/wifirx54mc/app_wifirx54mc.out")
#debugSession3.memory.loadProgram("../build/app/wifirx54mc/app_wifirx54mc.out")
print("++++++ loading " + options.prog)
script.traceWrite("++++++ loading " + options.prog)
pause("Press enter to load program...")
debugSession0.memory.loadProgram(options.prog);
debugSession1.memory.loadProgram(options.prog);
debugSession2.memory.loadProgram(options.prog);
debugSession3.memory.loadProgram(options.prog);
pause("Press enter to run program...")
debugSession0.target.runAsynch()
debugSession1.target.runAsynch()
debugSession2.target.runAsynch()
debugSession3.target.runAsynch()
def cleanup():
cleanup_debug()
cleanup_ccs()
cleanup_script(script)
def cleanup_debug(debugServer):
pause("Press enter to terminate sessions...")
# terminate the debugger
##debugSession3.terminate()
##debugSession2.terminate()
##debugSession1.terminate()
##debugSession0.terminate()
debugServer.stop()
def cleanup_ccs(ccsServer, ccsSession, options):
# Terminate CCS
if ((not options.cleanup) and options.gui):
pause("Press enter to terminate CCS...")
ccsSession.terminate()
ccsServer.stop()
def cleanup_script(script):
# stop the Logging
script.traceSetConsoleLevel(TraceLevel.INFO)
script.traceWrite("TEST SUCCEEDED!")
script.traceEnd()
def main():
# my code here
#parser = argparse.ArgumentParser(description='Process some integers.')
#parser.add_argument("-c", "--tcf",
# help="target configuration file", action="store")
#parser.add_argument("-p", "--prog",
# help="program to load", action="store")
#parser.add_argument("-g", "--gui",
# help="attach and launch CCS GUI", action="store_true")
#parser.add_argument("-C", "--cleanup",
# help="cleanup debug sessions", action="store_true")
#options = parser.parse_args()
#print options
parser = optparse.OptionParser()
parser.add_option("-c", "--tcf",
help="target configuration file", action="store")
parser.add_option("-p", "--prog",
help="program to load", action="store")
parser.add_option("-g", "--gui", default=False,
help="attach and launch CCS GUI", action="store_true")
parser.add_option("-C", "--cleanup", default=False,
help="cleanup debug sessions", action="store_true")
(options, args) = parser.parse_args()
print options
print args
SESSION_PREFIX = "Blackhawk XDS560v2-USB System Trace Emulator_0/C66xx_0"
#SESSION_PREFIX = "Blackhawk XDS560v2-USB .*"
SESSION_IDS = [SESSION_PREFIX + str(i) for i in range(4)]
print SESSION_IDS
script = create_script()
(ccsServer, ccsSession, debugServer) = startup_debugServer(script, options)
#cleanup_debug(debugServer)
#cleanup_script(script)
#return
debugSessions = startup_debugSessions(debugServer, SESSION_IDS)
cleanup_debug(debugServer)
#cleanup_ccs(ccsServer, ccsSession, options)
cleanup_script(script)
return
if (options.cleanup):
pass
else:
connectAndRun();
cleanup()
if __name__ == "__main__":
main()
|
manub686/atomix
|
tirunutils/oros.py
|
Python
|
apache-2.0
| 8,347
|
"""
radish
~~~~~~
The root from red to green. BDD tooling for Python.
:copyright: (c) 2019 by Timo Furrer <tuxtimo@gmail.com>
:license: MIT, see LICENSE for more details.
"""
from collections import namedtuple
from pathlib import Path
import click
import colorful as cf
import radish.loader as loader
from radish.__main__ import expand_basedirs
from radish.config import Config
from radish.logger import enable_radish_debug_mode_click_hook, logger
from radish.stepregistry import registry as step_registry
from radish.step_testing.matcher import run_matcher_tests
#: Holds a type for the coverage configuration
CoverageConfig = namedtuple(
"CoverageConfig", ["show_missing", "show_missing_templates"]
)
def expand_matcher_configs(ctx, param, matcher_configs):
"""Expand the given matcher configuration files
Expanding directories recursively for YAML files.
"""
expanded_matcher_configs = []
for matcher_config_file_location in (Path(f) for f in matcher_configs):
if matcher_config_file_location.is_dir():
expanded_matcher_configs.extend(
list(matcher_config_file_location.glob("**/*.yml"))
)
else:
expanded_matcher_configs.append(matcher_config_file_location)
return expanded_matcher_configs
@click.command()
@click.version_option()
@click.help_option("--help", "-h")
@click.option(
"--debug",
"-d",
"enable_debug_mode",
is_flag=True,
help="Enable debug mode for radish-test itself",
callback=enable_radish_debug_mode_click_hook,
)
@click.option(
"--basedir",
"-b",
"basedirs",
multiple=True,
default=("radish",),
type=str,
callback=expand_basedirs,
help=(
"Specify the location of the Step Implementations. "
"One '-b' can contain multiple locations, split by a colon (:) [UNIX] "
"or semicolon (;) [Windows]"
),
)
@click.option(
"--no-ansi",
"no_ansi",
is_flag=True,
help="Turn off all ANSI sequences (colors, line rewrites).",
)
@click.option(
"--show-missing",
"-m",
"show_missing",
is_flag=True,
help="Show all Step Implementations which are not yet covered by a test",
)
@click.option(
"--show-missing-templates",
"show_missing_templates",
is_flag=True,
help="Print templates for all missing Step Implementations. Implies --show-missing.",
)
@click.argument(
"matcher-configs",
nargs=-1,
type=click.Path(exists=True),
callback=expand_matcher_configs,
)
def cli(**kwargs):
"""radish - The root from red to green. BDD tooling for Python.
radish-test can be used to perform tests for the Steps
implemented in a radish base directory.
Use the `MATCHER_CONFIGS` to pass configuration
files containing the Step matchers.
"""
config = Config(kwargs)
# turn of ANSI colors if requested
if config.no_ansi:
cf.disable()
logger.debug("Basedirs: %s", ", ".join(str(d) for d in config.basedirs))
logger.debug("Loading all modules from the basedirs")
loaded_modules = loader.load_modules(config.basedirs)
logger.debug(
"Loaded %d modules from the basedirs: %s",
len(loaded_modules),
", ".join(str(m) for m in loaded_modules),
)
logger.debug(
"Matcher configs: %s", ", ".join(str(m) for m in config.matcher_configs)
)
coverage_config = CoverageConfig(config.show_missing, config.show_missing_templates)
run_matcher_tests(config.matcher_configs, coverage_config, step_registry)
if __name__ == "__main__":
cli()
|
radish-bdd/radish
|
src/radish/step_testing/__main__.py
|
Python
|
mit
| 3,589
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from torch import nn
class ModuleProxyWrapper(nn.Module):
"""
Wrap a DistributedDataParallel module and forward requests for missing
attributes to the module wrapped by DDP (the twice-wrapped module).
Also forward calls to :func:`state_dict` and :func:`load_state_dict`.
Usage::
module.xyz = "hello world"
wrapped_module = DistributedDataParallel(module, **ddp_args)
wrapped_module = ModuleProxyWrapper(wrapped_module)
assert wrapped_module.xyz == "hello world"
assert wrapped_module.state_dict().keys() == module.state_dict().keys()
Args:
module (nn.Module): module to wrap
"""
def __init__(self, module: nn.Module):
super().__init__()
assert hasattr(
module, "module"
), "ModuleProxyWrapper expects input to wrap another module"
self.module = module
def __getattr__(self, name):
"""Forward missing attributes to twice-wrapped module."""
try:
# defer to nn.Module's logic
return super().__getattr__(name)
except AttributeError:
try:
# forward to the once-wrapped module
return getattr(self.module, name)
except AttributeError:
# forward to the twice-wrapped module
return getattr(self.module.module, name)
def state_dict(self, *args, **kwargs):
"""Forward to the twice-wrapped module."""
return self.module.module.state_dict(*args, **kwargs)
def load_state_dict(self, *args, **kwargs):
"""Forward to the twice-wrapped module."""
return self.module.module.load_state_dict(*args, **kwargs)
def forward(self, *args, **kwargs):
return self.module(*args, **kwargs)
|
pytorch/fairseq
|
fairseq/distributed/module_proxy_wrapper.py
|
Python
|
mit
| 1,965
|
try:
import smbus
except:
print 'Try sudo apt-get install python-smbus'
from time import sleep
# Models
MODEL_02BA = 0
MODEL_30BA = 1
# Oversampling options
OSR_256 = 0
OSR_512 = 1
OSR_1024 = 2
OSR_2048 = 3
OSR_4096 = 4
OSR_8192 = 5
# kg/m^3 convenience
DENSITY_FRESHWATER = 997
DENSITY_SALTWATER = 1029
# Conversion factors (from native unit, mbar)
UNITS_Pa = 100.0
UNITS_hPa = 1.0
UNITS_kPa = 0.1
UNITS_mbar = 1.0
UNITS_bar = 0.001
UNITS_atm = 0.000986923
UNITS_Torr = 0.750062
UNITS_psi = 0.014503773773022
# Valid units
UNITS_Centigrade = 1
UNITS_Farenheit = 2
UNITS_Kelvin = 3
class MS5837(object):
# Registers
_MS5837_ADDR = 0x76
_MS5837_RESET = 0x1E
_MS5837_ADC_READ = 0x00
_MS5837_PROM_READ = 0xA0
_MS5837_CONVERT_D1_256 = 0x40
_MS5837_CONVERT_D2_256 = 0x50
def __init__(self, model=MODEL_30BA, bus=1):
self._model = model
try:
self._bus = smbus.SMBus(bus)
except:
print("Bus %d is not available.") % bus
print("Available busses are listed as /dev/i2c*")
self._bus = None
self._fluidDensity = DENSITY_FRESHWATER
self._pressure = 0
self._temperature = 0
self._D1 = 0
self._D2 = 0
def init(self):
if self._bus is None:
"No bus!"
return False
self._bus.write_byte(self._MS5837_ADDR, self._MS5837_RESET)
# Wait for reset to complete
sleep(0.01)
self._C = []
# Read calibration values and CRC
for i in range(7):
c = self._bus.read_word_data(self._MS5837_ADDR, self._MS5837_PROM_READ + 2*i)
c = ((c & 0xFF) << 8) | (c >> 8) # SMBus is little-endian for word transfers, we need to swap MSB and LSB
self._C.append(c)
crc = (self._C[0] & 0xF000) >> 12
if crc != self._crc4(self._C):
print "PROM read error, CRC failed!"
return False
return True
def read(self, oversampling=OSR_8192):
if self._bus is None:
print "No bus!"
return False
if oversampling < OSR_256 or oversampling > OSR_8192:
print "Invalid oversampling option!"
return False
# Request D1 conversion (temperature)
self._bus.write_byte(self._MS5837_ADDR, self._MS5837_CONVERT_D1_256 + 2*oversampling)
# Maximum conversion time increases linearly with oversampling
# max time (seconds) ~= 2.2e-6(x) where x = OSR = (2^8, 2^9, ..., 2^13)
# We use 2.5e-6 for some overhead
sleep(2.5e-6 * 2**(8+oversampling))
d = self._bus.read_i2c_block_data(self._MS5837_ADDR, self._MS5837_ADC_READ, 3)
self._D1 = d[0] << 16 | d[1] << 8 | d[2]
# Request D2 conversion (pressure)
self._bus.write_byte(self._MS5837_ADDR, self._MS5837_CONVERT_D2_256 + 2*oversampling)
# As above
sleep(2.5e-6 * 2**(8+oversampling))
d = self._bus.read_i2c_block_data(self._MS5837_ADDR, self._MS5837_ADC_READ, 3)
self._D2 = d[0] << 16 | d[1] << 8 | d[2]
# Calculate compensated pressure and temperature
# using raw ADC values and internal calibration
self._calculate()
return True
def setFluidDensity(self, denisty):
self._fluidDensity = denisty
# Pressure in requested units
# mbar * conversion
def pressure(self, conversion=UNITS_mbar):
return self._pressure * conversion
# Temperature in requested units
# default degrees C
def temperature(self, conversion=UNITS_Centigrade):
degC = self._temperature / 100.0
if conversion == UNITS_Farenheit:
return (9/5) * degC + 32
elif conversion == UNITS_Kelvin:
return degC - 273
return degC
# Depth relative to MSL pressure in given fluid density
def depth(self):
return (self.pressure(UNITS_Pa)-101300)/(self._fluidDensity*9.80665)
# Altitude relative to MSL pressure
def altitude(self):
return (1-pow((self.pressure()/1013.25),.190284))*145366.45*.3048
# Cribbed from datasheet
def _calculate(self):
OFFi = 0
SENSi = 0
Ti = 0
dT = self._D2-self._C[5]*256
if self._model == MODEL_02BA:
SENS = self._C[1]*65536+(self._C[3]*dT)/128
OFF = self._C[2]*131072+(self._C[4]*dT)/64
self._pressure = (self._D1*SENS/(2097152)-OFF)/(32768)
else:
SENS = self._C[1]*32768+(self._C[3]*dT)/256
OFF = self._C[2]*65536+(self._C[4]*dT)/128
self._pressure = (self._D1*SENS/(2097152)-OFF)/(8192)
self._temperature = 2000+dT*self._C[6]/8388608
# Second order compensation
if self._model == MODEL_02BA:
if (self._temperature/100) < 20: # Low temp
Ti = (11*dT*dT)/(34359738368)
OFFi = (31*(self._temperature-2000)*(self._temperature-2000))/8
SENSi = (63*(self._temperature-2000)*(self._temperature-2000))/32
else:
if (self._temperature/100) < 20: # Low temp
Ti = (3*dT*dT)/(8589934592)
OFFi = (3*(self._temperature-2000)*(self._temperature-2000))/2
SENSi = (5*(self._temperature-2000)*(self._temperature-2000))/8
if (self._temperature/100) < -15: # Very low temp
OFFi = OFFi+7*(self._temperature+1500l)*(self._temperature+1500)
SENSi = SENSi+4*(self._temperature+1500l)*(self._temperature+1500)
elif (self._temperature/100) >= 20: # High temp
Ti = 2*(dT*dT)/(137438953472)
OFFi = (1*(self._temperature-2000)*(self._temperature-2000))/16
SENSi = 0
OFF2 = OFF-OFFi
SENS2 = SENS-SENSi
if self._model == MODEL_02BA:
self._temperature = (self._temperature-Ti)
self._pressure = (((self._D1*SENS2)/2097152-OFF2)/32768)/100.0
else:
self._temperature = (self._temperature-Ti)
self._pressure = (((self._D1*SENS2)/2097152-OFF2)/8192)/10.0
# Cribbed from datasheet
def _crc4(self, n_prom):
n_rem = 0
n_prom[0] = ((n_prom[0]) & 0x0FFF)
n_prom.append(0)
for i in range(16):
if i%2 == 1:
n_rem ^= ((n_prom[i>>1]) & 0x00FF)
else:
n_rem ^= (n_prom[i>>1] >> 8)
for n_bit in range(8,0,-1):
if n_rem & 0x8000:
n_rem = (n_rem << 1) ^ 0x3000
else:
n_rem = (n_rem << 1)
n_rem = ((n_rem >> 12) & 0x000F)
self.n_prom = n_prom
self.n_rem = n_rem
return n_rem ^ 0x00
class MS5837_30BA(MS5837):
def __init__(self, bus=1):
MS5837.__init__(self, MODEL_30BA, bus)
class MS5837_02BA(MS5837):
def __init__(self, bus=1):
MS5837.__init__(self, MODEL_02BA, bus)
|
dheideman/UCSD-RoboFishy
|
Mapper/ms5837.py
|
Python
|
mit
| 7,298
|
import pytest
VIEWPORT = {
'large': {'width': 1201, 'height': 1024}, # also nav-break-ends
'desktop': {'width': 1025, 'height': 1024},
'tablet': {'width': 851, 'height': 1024}, # also nav-block-ends
'mobile': {'width': 481, 'height': 1024},
'small': {'width': 320, 'height': 480}}
def pytest_addoption(parser):
parser.addoption(
"--maintenance-mode",
action="store_true",
help="run tests against a server in maintenance mode",
)
@pytest.fixture(scope='session')
def base_url(base_url, request):
return base_url or 'https://developer.allizom.org'
@pytest.fixture
def selenium(request, selenium):
viewport = VIEWPORT['large']
if request.keywords.get('viewport') is not None:
viewport = VIEWPORT[request.keywords.get('viewport').args[0]]
selenium.set_window_size(viewport['width'], viewport['height'])
return selenium
@pytest.fixture(scope='session')
def sensitive_url(request, base_url):
if not base_url:
return False
return not any(base_url.startswith(url)
for url in ('http://localhost',
'https://developer-local'))
|
safwanrahman/kuma
|
tests/conftest.py
|
Python
|
mpl-2.0
| 1,174
|
# -*- coding: utf-8 -*-
# Copyright 2016 Alexander Fahlke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""hellotravis-python sample program"""
# version is a human-readable version number.
# version_info is a four-tuple for programmatic comparison. The first
# three numbers are the components of the version number. The fourth
# is zero for an official release, positive for a development branch,
# or negative for a release candidate or beta (after the base version
# number has been incremented)
version = "0.1.0"
version_info = (0, 1, 0, 4)
|
alexanderfahlke/hellotravis-python
|
hellotravis-python/__init__.py
|
Python
|
apache-2.0
| 1,050
|
import errno
import glob
import platform
import re
import sys
import tempfile
import zipfile
from contextlib import contextmanager
from distutils.version import StrictVersion
import os
import requests
from xml.etree import ElementTree
IS_64_BIT = sys.maxsize > 2**32
IS_LINUX = platform.system().lower() == 'linux'
IS_WINDOWS = platform.system().lower() == 'windows'
IS_MAC = platform.system().lower() == 'darwin'
UNKNOWN_PLATFORM = not IS_LINUX and not IS_WINDOWS
REPO_DIR = os.path.join(os.path.expanduser('~'), '.rockyroad')
@contextmanager
def download_file(url):
"""
Download a remote file to a temporary location.
:param url: the file url
"""
resp = requests.get(url, stream=True)
with tempfile.NamedTemporaryFile(delete=False) as fp:
name = fp.name
for chunk in resp.iter_content(chunk_size=1024):
if chunk:
fp.write(chunk)
yield name
fp.close()
def _mkdirp(dirpath):
try:
os.makedirs(dirpath)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(dirpath):
pass
def _get_xml_ns(uri):
m = re.match(r'\{.*?\}', uri)
return m.group(0) if m else ''
class Driver:
version = None
bit = None
repo_dir = os.path.join(os.path.expanduser('~'), '.rockyroad')
def __init__(self, version=None, bit=None):
if version:
self.version = str(version)
if not bit:
self.bit = '64' if IS_64_BIT else '32'
else:
self.bit = str(bit)
if hasattr(self, 'setup'):
self.setup()
def download(self):
"""Download the driver binary"""
raise NotImplementedError('You must implement download()')
def binary_path(self):
"""The absolute path to the driver binary"""
raise NotImplementedError('You must implement binary_path()')
def path(self):
"""
The absolute path to the driver
:return:
"""
if not os.path.exists(self.binary_path()):
self.download()
return self.binary_path()
class ChromeDriver(Driver):
versions = {}
_bin_path = None
def setup(self):
url = 'https://chromedriver.storage.googleapis.com/'
resp = requests.get(url)
tree = ElementTree.fromstring(resp.content)
ns = _get_xml_ns(tree.tag)
for elem in tree.findall('%sContents' % ns):
key = elem.find('%sKey' % ns)
m = re.match('^([\d.]+?)/chromedriver_(linux|mac|win)(32|64)', key.text)
if m:
v = m.group(1) # version
p = m.group(2) # platform
b = m.group(3) # bit
if v not in self.versions:
self.versions[v] = {}
if p not in self.versions[v]:
self.versions[v][p] = {}
self.versions[v][p][b] = url + key.text
@property
def _platform(self):
if IS_WINDOWS:
return 'win'
elif IS_LINUX:
return 'linux'
elif IS_MAC:
return 'mac'
else:
raise RuntimeError('Unable to detect current platform')
def binary_path(self):
if self._bin_path:
return self._bin_path
if self.version and self.version not in self.versions:
raise RuntimeError('Chromedriver %s does not exist' % self.version)
if not self.version:
numbers = list(self.versions.keys())
numbers.sort(key=StrictVersion, reverse=True)
self.version = numbers[0]
bin_name = 'chromedriver.exe' if IS_WINDOWS else 'chromedriver'
self._bin_path = os.path.join(REPO_DIR, 'chromedriver', '%s-%s%s' %
(self.version, self._platform, self.bit,),
bin_name)
return self._bin_path
def download(self):
url = self.versions[self.version][self._platform][self.bit]
destination_dir = ''.join(self._bin_path.split(os.pathsep))
with download_file(url) as name:
_mkdirp(destination_dir)
z = zipfile.ZipFile(name, 'r')
z.extractall(destination_dir)
z.close()
for filename in glob.iglob(destination_dir + '/*'):
os.chmod(filename, 777)
def download_chromedriver(version=None, bit=None):
"""
Download the chromedriver binary.
If version is not set, then it will get the latest one. If the bit value is
not set then it will use the same value as the current system
"""
url = 'https://chromedriver.storage.googleapis.com/'
resp = requests.get(url)
tree = ElementTree.fromstring(resp.content)
ns = _get_xml_ns(tree.tag)
if version:
version = str(version)
if bit:
bit = str(bit)
else:
bit = '64' if IS_64_BIT else '32'
versions = {}
for elem in tree.findall('%sContents' % ns):
key = elem.find('%sKey' % ns)
m = re.match('^([\d.]+?)/chromedriver_(linux|mac|win)(32|64)', key.text)
if m:
v = m.group(1) # version
p = m.group(2) # platform
b = m.group(3) # bit
if v not in versions:
versions[v] = {}
if p not in versions[v]:
versions[v][p] = {}
versions[v][p][b] = url + key.text
if version and version not in versions:
raise RuntimeError('Chromedriver %s is not a valid version' % version)
if IS_WINDOWS:
p = 'win'
elif IS_LINUX:
p = 'linux'
elif IS_MAC:
p = 'mac'
else:
raise RuntimeError('Unable to detect current platform')
if version:
if bit is None:
download_url = versions[version][p][bit]
elif bit not in versions[version][p]:
raise RuntimeError('Invalid bit value %s' % bit)
else:
download_url = versions[version][p][bit]
else:
# get latest version
numbers = list(versions.keys())
numbers.sort(key=StrictVersion, reverse=True)
version = numbers[0]
download_url = versions[version][p][bit]
destination_dir = os.path.join(REPO_DIR, 'chromedriver',
'%s-%s%s' % (version, p, bit,))
if os.path.isdir(destination_dir):
return destination_dir
# download an unzip to repo directory
with download_file(download_url) as name:
_mkdirp(destination_dir)
z = zipfile.ZipFile(name, 'r')
z.extractall(destination_dir)
z.close()
for filename in glob.iglob(destination_dir + '/*'):
os.chmod(filename, 777)
return destination_dir
def get_binary(name, arch=None, version=None):
"""
Get the driver binary.
This will check the cache location to see if it has already been downloaded
and return its path. If it is not in the cache then it will be downloaded.
:param name: the binary name chromedriver,
:param arch:
:param version:
:return:
"""
|
kevbradwick/rockyroad
|
rockyroad/driver.py
|
Python
|
bsd-3-clause
| 7,103
|
from selenium import webdriver
import selenium.webdriver.support.ui as ui
from fixture.session import SessionHelper
from fixture.group import GroupHelper
from fixture.contact import ContactHelper
class Application:
def __init__(self, browser, base_url):
if browser == "firefox":
self.wd = webdriver.Firefox()
elif browser =="chrome":
self.wd = webdriver.Chrome()
elif browser =="ie":
self.wd = webdriver.Ie()
else:
raise ValueError("Unrecognaized browser %s" % browser)#выброс исключения с помощью raise и перехват с помощью фреймворка
self.wd.implicitly_wait(10)
#self.wait = ui.WebDriverWait(self.wd, 10)
self.session = SessionHelper(self)
self.group = GroupHelper(self)
self.contact = ContactHelper(self)
self.base_url=base_url
def is_valid(self):
try:
self.wd.current_url
return True
except:
return False
def home_page(self):
wd = self.wd
wd.get(self.base_url)
def destroy(self):
self.wd.quit()
|
elenagradovich/python_training
|
fixture/application.py
|
Python
|
apache-2.0
| 1,175
|
"""Default value unit tests."""
# Copyright (c) 2001-2009 ElevenCraft Inc.
# See LICENSE for details.
from schevo.constant import DEFAULT, UNASSIGNED
from schevo import error
from schevo.test import CreatesSchema
class BaseDefaultValues(CreatesSchema):
body = '''
class Charlie(E.Entity):
"""Fields have default values for create transactions."""
beta = f.string(default='foo') # Non-callable default value.
gamma = f.integer(default=lambda : 42) # Callable default value.
_sample_unittest = [
('bar', 12), # No defaults are used.
(DEFAULT, 12), # Default is used for beta.
('bar', DEFAULT), # Default is used for gamma.
(DEFAULT, DEFAULT), # Defaults used for beta and gamma.
]
'''
def test_populate_defaults(self):
charlies = db.Charlie.find()
assert len(charlies) == 4
expected = [
('bar', 12),
('foo', 12),
('bar', 42),
('foo', 42),
]
for charlie, (beta, gamma) in zip(charlies, expected):
assert charlie.beta == beta
assert charlie.gamma == gamma
# class TestDefaultValues1(BaseDefaultValues):
# include = True
# format = 1
class TestDefaultValues2(BaseDefaultValues):
include = True
format = 2
|
Schevo/schevo
|
schevo/test/test_default_values.py
|
Python
|
mit
| 1,424
|
##
# Copyright 2014-2016 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
Toolchain querying support for module naming schemes.
:author: Kenneth Hoste (Ghent University)
"""
from vsc.utils import fancylogger
from easybuild.framework.easyconfig.easyconfig import process_easyconfig, robot_find_easyconfig
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.module_naming_scheme.utilities import det_full_ec_version
from easybuild.tools.toolchain import DUMMY_TOOLCHAIN_NAME
_log = fancylogger.getLogger('module_naming_scheme.toolchain', fname=False)
_toolchain_details_cache = {}
# different types of toolchain elements
TOOLCHAIN_COMPILER = 'COMPILER'
TOOLCHAIN_MPI = 'MPI'
TOOLCHAIN_BLAS = 'BLAS'
TOOLCHAIN_LAPACK = 'LAPACK'
TOOLCHAIN_FFT = 'FFT'
def det_toolchain_element_details(tc, elem):
"""
Determine details of a particular toolchain element, for a given Toolchain instance.
"""
# check for cached version first
tc_dict = tc.as_dict()
key = (tc_dict['name'], tc_dict['version'] + tc_dict['versionsuffix'], elem)
if key in _toolchain_details_cache:
_log.debug("Obtained details for '%s' in toolchain '%s' from cache" % (elem, tc_dict))
return _toolchain_details_cache[key]
# grab version from parsed easyconfig file for toolchain
eb_file = robot_find_easyconfig(tc_dict['name'], det_full_ec_version(tc_dict))
tc_ec = process_easyconfig(eb_file, parse_only=True)
if len(tc_ec) > 1:
_log.warning("More than one toolchain specification found for %s, only retaining first" % tc_dict)
_log.debug("Full list of toolchain specifications: %s" % tc_ec)
tc_ec = tc_ec[0]['ec']
tc_deps = tc_ec['dependencies']
tc_elem_details = None
for tc_dep in tc_deps:
if tc_dep['name'] == elem:
tc_elem_details = tc_dep
_log.debug("Found details for toolchain element %s: %s" % (elem, tc_elem_details))
break
if tc_elem_details is None:
# for compiler-only toolchains, toolchain and compilers are one-and-the-same
if tc_ec['name'] == elem:
tc_elem_details = tc_ec
else:
raise EasyBuildError("No toolchain element '%s' found for toolchain %s: %s", elem, tc.as_dict(), tc_ec)
_toolchain_details_cache[key] = tc_elem_details
_log.debug("Obtained details for '%s' in toolchain '%s', added to cache" % (elem, tc_dict))
return _toolchain_details_cache[key]
def det_toolchain_compilers(ec):
"""
Determine compilers of toolchain for given EasyConfig instance.
:param ec: a parsed EasyConfig file (an AttributeError will occur if a simple dict is passed)
"""
tc_elems = ec.toolchain.definition()
if ec.toolchain.name == DUMMY_TOOLCHAIN_NAME:
# dummy toolchain has no compiler
tc_comps = None
elif not TOOLCHAIN_COMPILER in tc_elems:
# every toolchain should have at least a compiler
raise EasyBuildError("No compiler found in toolchain %s: %s", ec.toolchain.as_dict(), tc_elems)
elif tc_elems[TOOLCHAIN_COMPILER]:
tc_comps = []
for comp_elem in tc_elems[TOOLCHAIN_COMPILER]:
tc_comps.append(det_toolchain_element_details(ec.toolchain, comp_elem))
else:
raise EasyBuildError("Empty list of compilers for %s toolchain definition: %s",
ec.toolchain.as_dict(), tc_elems)
_log.debug("Found compilers %s for toolchain %s (%s)", tc_comps, ec.toolchain.name, ec.toolchain.as_dict())
return tc_comps
def det_toolchain_mpi(ec):
"""
Determine MPI library of toolchain for given EasyConfig instance.
:param ec: a parsed EasyConfig file (an AttributeError will occur if a simple dict is passed)
"""
tc_elems = ec.toolchain.definition()
if TOOLCHAIN_MPI in tc_elems:
if not tc_elems[TOOLCHAIN_MPI]:
raise EasyBuildError("Empty list of MPI libs for %s toolchain definition: %s",
ec.toolchain.as_dict(), tc_elems)
# assumption: only one MPI toolchain element
tc_mpi = det_toolchain_element_details(ec.toolchain, tc_elems[TOOLCHAIN_MPI][0])
else:
# no MPI in this toolchain
tc_mpi = None
return tc_mpi
|
Caylo/easybuild-framework
|
easybuild/tools/module_naming_scheme/toolchain.py
|
Python
|
gpl-2.0
| 5,230
|
# $Id: uas-cancel-no-final.py 4188 2012-06-29 09:01:17Z nanang $
#
import inc_const as const
PJSUA = ["--null-audio --max-calls=1 $SIPP_URI"]
PJSUA_EXPECTS = [[0, const.STATE_EARLY, "h"]]
|
halex2005/pjproject
|
tests/pjsua/scripts-sipp/uas-cancel-no-final.py
|
Python
|
gpl-2.0
| 190
|
# Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
from oslo import messaging
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.openstack.common import log as logging
from neutron.openstack.common import timeutils
LOG = logging.getLogger(__name__)
def create_consumers(endpoints, prefix, topic_details):
"""Create agent RPC consumers.
:param endpoints: The list of endpoints to process the incoming messages.
:param prefix: Common prefix for the plugin/agent message queues.
:param topic_details: A list of topics. Each topic has a name, an
operation, and an optional host param keying the
subscription to topic.host for plugin calls.
:returns: A common Connection.
"""
connection = n_rpc.create_connection(new=True)
for details in topic_details:
topic, operation, node_name = itertools.islice(
itertools.chain(details, [None]), 3)
topic_name = topics.get_topic_name(prefix, topic, operation)
connection.create_consumer(topic_name, endpoints, fanout=True)
if node_name:
node_topic_name = '%s.%s' % (topic_name, node_name)
connection.create_consumer(node_topic_name,
endpoints,
fanout=False)
connection.consume_in_threads()
return connection
class PluginReportStateAPI(n_rpc.RpcProxy):
BASE_RPC_API_VERSION = '1.0'
def __init__(self, topic):
super(PluginReportStateAPI, self).__init__(
topic=topic, default_version=self.BASE_RPC_API_VERSION)
def report_state(self, context, agent_state, use_call=False):
msg = self.make_msg('report_state',
agent_state={'agent_state':
agent_state},
time=timeutils.strtime())
if use_call:
return self.call(context, msg)
else:
return self.cast(context, msg)
class PluginApi(n_rpc.RpcProxy):
'''Agent side of the rpc API.
API version history:
1.0 - Initial version.
1.3 - get_device_details rpc signature upgrade to obtain 'host' and
return value to include fixed_ips and device_owner for
the device port
'''
BASE_RPC_API_VERSION = '1.1'
def __init__(self, topic):
super(PluginApi, self).__init__(
topic=topic, default_version=self.BASE_RPC_API_VERSION)
def get_device_details(self, context, device, agent_id, host=None):
return self.call(context,
self.make_msg('get_device_details', device=device,
agent_id=agent_id, host=host))
def get_devices_details_list(self, context, devices, agent_id, host=None):
res = []
try:
res = self.call(context,
self.make_msg('get_devices_details_list',
devices=devices,
agent_id=agent_id,
host=host),
version='1.3')
except messaging.UnsupportedVersion:
# If the server has not been upgraded yet, a DVR-enabled agent
# may not work correctly, however it can function in 'degraded'
# mode, in that DVR routers may not be in the system yet, and
# it might be not necessary to retrieve info about the host.
LOG.warn(_('DVR functionality requires a server upgrade.'))
res = [
self.call(context,
self.make_msg('get_device_details', device=device,
agent_id=agent_id, host=host))
for device in devices
]
return res
def get_port_detail(self, context, port_id, agent_id):
return self.call(context,
self.make_msg('get_port_detail', port_id=port_id,
agent_id=agent_id))
def get_ports(self, context, agent_id, host=None, mac_address=None, device_owner=None):
return self.call(context,
self.make_msg('get_ports', agent_id=agent_id,
host=host, mac_address=mac_address,
device_owner=device_owner))
def update_port(self, context, port_id, agent_id, port):
return self.call(context,
self.make_msg('update_port', port_id=port_id,
agent_id=agent_id, port=port))
def port_bound_to_router(self, context, port_id, agent_id, host):
return self.call(context,
self.make_msg('port_bound_to_router', port_id=port_id,
agent_id=agent_id, host=host))
def update_device_down(self, context, device, agent_id, host=None):
return self.call(context,
self.make_msg('update_device_down', device=device,
agent_id=agent_id, host=host))
def update_device_up(self, context, device, agent_id, host=None):
return self.call(context,
self.make_msg('update_device_up', device=device,
agent_id=agent_id, host=host))
def tunnel_sync(self, context, tunnel_ip, tunnel_type=None):
return self.call(context,
self.make_msg('tunnel_sync', tunnel_ip=tunnel_ip,
tunnel_type=tunnel_type))
def get_subnet_dhcp_by_network_id(self, context, network_id):
return self.call(context,
self.make_msg('get_subnet_dhcp_by_network_id', network_id=network_id))
def get_networks(self, context, agent_id, network_id):
return self.call(context,
self.make_msg('get_networks', agent_id=agent_id, network_id=network_id))
|
nash-x/hws
|
neutron/agent/rpc.py
|
Python
|
apache-2.0
| 6,712
|
"""
Flux for Home-Assistant.
The idea was taken from https://github.com/KpaBap/hue-flux/
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/switch.flux/
"""
from datetime import time
import logging
import voluptuous as vol
from homeassistant.components.light import is_on, turn_on
from homeassistant.components.switch import DOMAIN, SwitchDevice
from homeassistant.const import CONF_NAME, CONF_PLATFORM
from homeassistant.helpers.event import track_time_change
from homeassistant.helpers.sun import get_astral_event_date
from homeassistant.util.color import (
color_temperature_to_rgb, color_RGB_to_xy,
color_temperature_kelvin_to_mired)
from homeassistant.util.dt import now as dt_now
import homeassistant.helpers.config_validation as cv
DEPENDENCIES = ['light']
_LOGGER = logging.getLogger(__name__)
CONF_LIGHTS = 'lights'
CONF_START_TIME = 'start_time'
CONF_STOP_TIME = 'stop_time'
CONF_START_CT = 'start_colortemp'
CONF_SUNSET_CT = 'sunset_colortemp'
CONF_STOP_CT = 'stop_colortemp'
CONF_BRIGHTNESS = 'brightness'
CONF_DISABLE_BRIGTNESS_ADJUST = 'disable_brightness_adjust'
CONF_MODE = 'mode'
MODE_XY = 'xy'
MODE_MIRED = 'mired'
DEFAULT_MODE = MODE_XY
PLATFORM_SCHEMA = vol.Schema({
vol.Required(CONF_PLATFORM): 'flux',
vol.Required(CONF_LIGHTS): cv.entity_ids,
vol.Optional(CONF_NAME, default="Flux"): cv.string,
vol.Optional(CONF_START_TIME): cv.time,
vol.Optional(CONF_STOP_TIME, default=time(22, 0)): cv.time,
vol.Optional(CONF_START_CT, default=4000):
vol.All(vol.Coerce(int), vol.Range(min=1000, max=40000)),
vol.Optional(CONF_SUNSET_CT, default=3000):
vol.All(vol.Coerce(int), vol.Range(min=1000, max=40000)),
vol.Optional(CONF_STOP_CT, default=1900):
vol.All(vol.Coerce(int), vol.Range(min=1000, max=40000)),
vol.Optional(CONF_BRIGHTNESS):
vol.All(vol.Coerce(int), vol.Range(min=0, max=255)),
vol.Optional(CONF_DISABLE_BRIGTNESS_ADJUST): cv.boolean,
vol.Optional(CONF_MODE, default=DEFAULT_MODE):
vol.Any(MODE_XY, MODE_MIRED)
})
def set_lights_xy(hass, lights, x_val, y_val, brightness):
"""Set color of array of lights."""
for light in lights:
if is_on(hass, light):
turn_on(hass, light,
xy_color=[x_val, y_val],
brightness=brightness,
transition=30)
def set_lights_temp(hass, lights, mired, brightness):
"""Set color of array of lights."""
for light in lights:
if is_on(hass, light):
turn_on(hass, light,
color_temp=int(mired),
brightness=brightness,
transition=30)
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Flux switches."""
name = config.get(CONF_NAME)
lights = config.get(CONF_LIGHTS)
start_time = config.get(CONF_START_TIME)
stop_time = config.get(CONF_STOP_TIME)
start_colortemp = config.get(CONF_START_CT)
sunset_colortemp = config.get(CONF_SUNSET_CT)
stop_colortemp = config.get(CONF_STOP_CT)
brightness = config.get(CONF_BRIGHTNESS)
disable_brightness_adjust = config.get(CONF_DISABLE_BRIGTNESS_ADJUST)
mode = config.get(CONF_MODE)
flux = FluxSwitch(name, hass, False, lights, start_time, stop_time,
start_colortemp, sunset_colortemp, stop_colortemp,
brightness, disable_brightness_adjust, mode)
add_devices([flux])
def update(call=None):
"""Update lights."""
flux.flux_update()
hass.services.register(DOMAIN, name + '_update', update)
class FluxSwitch(SwitchDevice):
"""Representation of a Flux switch."""
def __init__(self, name, hass, state, lights, start_time, stop_time,
start_colortemp, sunset_colortemp, stop_colortemp,
brightness, disable_brightness_adjust, mode):
"""Initialize the Flux switch."""
self._name = name
self.hass = hass
self._lights = lights
self._start_time = start_time
self._stop_time = stop_time
self._start_colortemp = start_colortemp
self._sunset_colortemp = sunset_colortemp
self._stop_colortemp = stop_colortemp
self._brightness = brightness
self._disable_brightness_adjust = disable_brightness_adjust
self._mode = mode
self.unsub_tracker = None
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def is_on(self):
"""Return true if switch is on."""
return self.unsub_tracker is not None
def turn_on(self, **kwargs):
"""Turn on flux."""
if self.is_on:
return
# Make initial update
self.flux_update()
self.unsub_tracker = track_time_change(
self.hass, self.flux_update, second=[0, 30])
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn off flux."""
if self.unsub_tracker is not None:
self.unsub_tracker()
self.unsub_tracker = None
self.schedule_update_ha_state()
def flux_update(self, now=None):
"""Update all the lights using flux."""
if now is None:
now = dt_now()
sunset = get_astral_event_date(self.hass, 'sunset', now.date())
start_time = self.find_start_time(now)
stop_time = now.replace(
hour=self._stop_time.hour, minute=self._stop_time.minute,
second=0)
if start_time < now < sunset:
# Daytime
time_state = 'day'
temp_range = abs(self._start_colortemp - self._sunset_colortemp)
day_length = int(sunset.timestamp() - start_time.timestamp())
seconds_from_start = int(now.timestamp() - start_time.timestamp())
percentage_complete = seconds_from_start / day_length
temp_offset = temp_range * percentage_complete
if self._start_colortemp > self._sunset_colortemp:
temp = self._start_colortemp - temp_offset
else:
temp = self._start_colortemp + temp_offset
else:
# Nightime
time_state = 'night'
if now < stop_time and now > start_time:
now_time = now
else:
now_time = stop_time
temp_range = abs(self._sunset_colortemp - self._stop_colortemp)
night_length = int(stop_time.timestamp() - sunset.timestamp())
seconds_from_sunset = int(now_time.timestamp() -
sunset.timestamp())
percentage_complete = seconds_from_sunset / night_length
temp_offset = temp_range * percentage_complete
if self._sunset_colortemp > self._stop_colortemp:
temp = self._sunset_colortemp - temp_offset
else:
temp = self._sunset_colortemp + temp_offset
x_val, y_val, b_val = color_RGB_to_xy(*color_temperature_to_rgb(temp))
brightness = self._brightness if self._brightness else b_val
if self._disable_brightness_adjust:
brightness = None
if self._mode == MODE_XY:
set_lights_xy(self.hass, self._lights, x_val,
y_val, brightness)
_LOGGER.info("Lights updated to x:%s y:%s brightness:%s, %s%% "
"of %s cycle complete at %s", x_val, y_val,
brightness, round(
percentage_complete * 100), time_state, now)
else:
# Convert to mired and clamp to allowed values
mired = color_temperature_kelvin_to_mired(temp)
set_lights_temp(self.hass, self._lights, mired, brightness)
_LOGGER.info("Lights updated to mired:%s brightness:%s, %s%% "
"of %s cycle complete at %s", mired, brightness,
round(percentage_complete * 100), time_state, now)
def find_start_time(self, now):
"""Return sunrise or start_time if given."""
if self._start_time:
sunrise = now.replace(
hour=self._start_time.hour, minute=self._start_time.minute,
second=0)
else:
sunrise = get_astral_event_date(self.hass, 'sunrise', now.date())
return sunrise
|
alexmogavero/home-assistant
|
homeassistant/components/switch/flux.py
|
Python
|
apache-2.0
| 8,510
|
#!/usr/bin/env python
"""
Matplotlib provides sophisticated date plotting capabilities, standing on the
shoulders of python :mod:`datetime`, the add-on modules :mod:`pytz` and
:mod:`dateutil`. :class:`datetime` objects are converted to floating point
numbers which represent time in days since 0001-01-01 UTC, plus 1. For
example, 0001-01-01, 06:00 is 1.25, not 0.25. The helper functions
:func:`date2num`, :func:`num2date` and :func:`drange` are used to facilitate
easy conversion to and from :mod:`datetime` and numeric ranges.
.. note::
Like Python's datetime, mpl uses the Gregorian calendar for all
conversions between dates and floating point numbers. This practice
is not universal, and calendar differences can cause confusing
differences between what Python and mpl give as the number of days
since 0001-01-01 and what other software and databases yield. For
example, the US Naval Observatory uses a calendar that switches
from Julian to Gregorian in October, 1582. Hence, using their
calculator, the number of days between 0001-01-01 and 2006-04-01 is
732403, whereas using the Gregorian calendar via the datetime
module we find::
In [31]:date(2006,4,1).toordinal() - date(1,1,1).toordinal()
Out[31]:732401
A wide range of specific and general purpose date tick locators and
formatters are provided in this module. See
:mod:`matplotlib.ticker` for general information on tick locators
and formatters. These are described below.
All the matplotlib date converters, tickers and formatters are
timezone aware, and the default timezone is given by the timezone
parameter in your :file:`matplotlibrc` file. If you leave out a
:class:`tz` timezone instance, the default from your rc file will be
assumed. If you want to use a custom time zone, pass a
:class:`pytz.timezone` instance with the tz keyword argument to
:func:`num2date`, :func:`plot_date`, and any custom date tickers or
locators you create. See `pytz <http://pythonhosted.org/pytz/>`_ for
information on :mod:`pytz` and timezone handling.
The `dateutil module <https://dateutil.readthedocs.org>`_ provides
additional code to handle date ticking, making it easy to place ticks
on any kinds of dates. See examples below.
Date tickers
------------
Most of the date tickers can locate single or multiple values. For
example::
# import constants for the days of the week
from matplotlib.dates import MO, TU, WE, TH, FR, SA, SU
# tick on mondays every week
loc = WeekdayLocator(byweekday=MO, tz=tz)
# tick on mondays and saturdays
loc = WeekdayLocator(byweekday=(MO, SA))
In addition, most of the constructors take an interval argument::
# tick on mondays every second week
loc = WeekdayLocator(byweekday=MO, interval=2)
The rrule locator allows completely general date ticking::
# tick every 5th easter
rule = rrulewrapper(YEARLY, byeaster=1, interval=5)
loc = RRuleLocator(rule)
Here are all the date tickers:
* :class:`MinuteLocator`: locate minutes
* :class:`HourLocator`: locate hours
* :class:`DayLocator`: locate specifed days of the month
* :class:`WeekdayLocator`: Locate days of the week, e.g., MO, TU
* :class:`MonthLocator`: locate months, e.g., 7 for july
* :class:`YearLocator`: locate years that are multiples of base
* :class:`RRuleLocator`: locate using a
:class:`matplotlib.dates.rrulewrapper`. The
:class:`rrulewrapper` is a simple wrapper around a
:class:`dateutil.rrule` (`dateutil
<https://dateutil.readthedocs.org>`_) which allow almost
arbitrary date tick specifications. See `rrule example
<../examples/pylab_examples/date_demo_rrule.html>`_.
* :class:`AutoDateLocator`: On autoscale, this class picks the best
:class:`MultipleDateLocator` to set the view limits and the tick
locations.
Date formatters
---------------
Here all all the date formatters:
* :class:`AutoDateFormatter`: attempts to figure out the best format
to use. This is most useful when used with the :class:`AutoDateLocator`.
* :class:`DateFormatter`: use :func:`strftime` format strings
* :class:`IndexDateFormatter`: date plots with implicit *x*
indexing.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from matplotlib.externals import six
from matplotlib.externals.six.moves import xrange, zip
import re
import time
import math
import datetime
import warnings
from dateutil.rrule import (rrule, MO, TU, WE, TH, FR, SA, SU, YEARLY,
MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY,
SECONDLY)
from dateutil.relativedelta import relativedelta
import dateutil.parser
import numpy as np
import matplotlib
import matplotlib.units as units
import matplotlib.cbook as cbook
import matplotlib.ticker as ticker
__all__ = ('date2num', 'num2date', 'drange', 'epoch2num',
'num2epoch', 'mx2num', 'DateFormatter',
'IndexDateFormatter', 'AutoDateFormatter', 'DateLocator',
'RRuleLocator', 'AutoDateLocator', 'YearLocator',
'MonthLocator', 'WeekdayLocator',
'DayLocator', 'HourLocator', 'MinuteLocator',
'SecondLocator', 'MicrosecondLocator',
'rrule', 'MO', 'TU', 'WE', 'TH', 'FR', 'SA', 'SU',
'YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY',
'HOURLY', 'MINUTELY', 'SECONDLY', 'MICROSECONDLY', 'relativedelta',
'seconds', 'minutes', 'hours', 'weeks')
# Make a simple UTC instance so we don't always have to import
# pytz. From the python datetime library docs:
class _UTC(datetime.tzinfo):
"""UTC"""
def utcoffset(self, dt):
return datetime.timedelta(0)
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return datetime.timedelta(0)
UTC = _UTC()
def _get_rc_timezone():
"""
Retrieve the preferred timeszone from the rcParams dictionary.
"""
s = matplotlib.rcParams['timezone']
if s == 'UTC':
return UTC
import pytz
return pytz.timezone(s)
"""
Time-related constants.
"""
EPOCH_OFFSET = float(datetime.datetime(1970, 1, 1).toordinal())
JULIAN_OFFSET = 1721424.5 # Julian date at 0001-01-01
MICROSECONDLY = SECONDLY + 1
HOURS_PER_DAY = 24.
MIN_PER_HOUR = 60.
SEC_PER_MIN = 60.
MONTHS_PER_YEAR = 12.
DAYS_PER_WEEK = 7.
DAYS_PER_MONTH = 30.
DAYS_PER_YEAR = 365.0
MINUTES_PER_DAY = MIN_PER_HOUR * HOURS_PER_DAY
SEC_PER_HOUR = SEC_PER_MIN * MIN_PER_HOUR
SEC_PER_DAY = SEC_PER_HOUR * HOURS_PER_DAY
SEC_PER_WEEK = SEC_PER_DAY * DAYS_PER_WEEK
MUSECONDS_PER_DAY = 1e6 * SEC_PER_DAY
MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY = (
MO, TU, WE, TH, FR, SA, SU)
WEEKDAYS = (MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY)
def _to_ordinalf(dt):
"""
Convert :mod:`datetime` or :mod:`date` to the Gregorian date as UTC float
days, preserving hours, minutes, seconds and microseconds. Return value
is a :func:`float`.
"""
if hasattr(dt, 'tzinfo') and dt.tzinfo is not None:
delta = dt.tzinfo.utcoffset(dt)
if delta is not None:
dt -= delta
base = float(dt.toordinal())
if isinstance(dt, datetime.datetime):
# Get a datetime object at midnight in the same time zone as dt.
cdate = dt.date()
midnight_time = datetime.time(0, 0, 0, tzinfo=dt.tzinfo)
rdt = datetime.datetime.combine(cdate, midnight_time)
td_remainder = _total_seconds(dt - rdt)
if td_remainder > 0:
base += td_remainder / SEC_PER_DAY
return base
# a version of _to_ordinalf that can operate on numpy arrays
_to_ordinalf_np_vectorized = np.vectorize(_to_ordinalf)
try:
# Available as a native method in Python >= 2.7.
_total_seconds = datetime.timedelta.total_seconds
except AttributeError:
def _total_seconds(tdelta):
"""
Alias providing support for datetime.timedelta.total_seconds() function
calls even in Python < 2.7.
The input `tdelta` is a datetime.timedelta object, and returns a float
containing the total number of seconds representing the `tdelta`
duration. For large durations (> 270 on most platforms), this loses
microsecond accuracy.
"""
return (tdelta.microseconds +
(tdelta.seconds + tdelta.days * SEC_PER_DAY) * 1e6) * 1e-6
def _from_ordinalf(x, tz=None):
"""
Convert Gregorian float of the date, preserving hours, minutes,
seconds and microseconds. Return value is a :class:`datetime`.
The input date `x` is a float in ordinal days at UTC, and the output will
be the specified :class:`datetime` object corresponding to that time in
timezone `tz`, or if `tz` is `None`, in the timezone specified in
`rcParams['timezone']`.
"""
if tz is None:
tz = _get_rc_timezone()
ix = int(x)
dt = datetime.datetime.fromordinal(ix).replace(tzinfo=UTC)
remainder = float(x) - ix
# Round down to the nearest microsecond.
dt += datetime.timedelta(microseconds=int(remainder * MUSECONDS_PER_DAY))
# Compensate for rounding errors
if dt.microsecond < 10:
dt = dt.replace(microsecond=0)
elif dt.microsecond > 999990:
dt += datetime.timedelta(microseconds=1e6 - dt.microsecond)
return dt.astimezone(tz)
# a version of _from_ordinalf that can operate on numpy arrays
_from_ordinalf_np_vectorized = np.vectorize(_from_ordinalf)
class strpdate2num(object):
"""
Use this class to parse date strings to matplotlib datenums when
you know the date format string of the date you are parsing. See
:file:`examples/load_demo.py`.
"""
def __init__(self, fmt):
""" fmt: any valid strptime format is supported """
self.fmt = fmt
def __call__(self, s):
"""s : string to be converted
return value: a date2num float
"""
return date2num(datetime.datetime(*time.strptime(s, self.fmt)[:6]))
class bytespdate2num(strpdate2num):
"""
Use this class to parse date strings to matplotlib datenums when
you know the date format string of the date you are parsing. See
:file:`examples/load_demo.py`.
"""
def __init__(self, fmt, encoding='utf-8'):
"""
Args:
fmt: any valid strptime format is supported
encoding: encoding to use on byte input (default: 'utf-8')
"""
super(bytespdate2num, self).__init__(fmt)
self.encoding = encoding
def __call__(self, b):
"""
Args:
b: byte input to be converted
Returns:
A date2num float
"""
s = b.decode(self.encoding)
return super(bytespdate2num, self).__call__(s)
# a version of dateutil.parser.parse that can operate on nump0y arrays
_dateutil_parser_parse_np_vectorized = np.vectorize(dateutil.parser.parse)
def datestr2num(d, default=None):
"""
Convert a date string to a datenum using
:func:`dateutil.parser.parse`.
Parameters
----------
d : string or sequence of strings
The dates to convert.
default : datetime instance
The default date to use when fields are missing in `d`.
"""
if cbook.is_string_like(d):
dt = dateutil.parser.parse(d, default=default)
return date2num(dt)
else:
if default is not None:
d = [dateutil.parser.parse(s, default=default) for s in d]
d = np.asarray(d)
if not d.size:
return d
return date2num(_dateutil_parser_parse_np_vectorized(d))
def date2num(d):
"""
*d* is either a :class:`datetime` instance or a sequence of datetimes.
Return value is a floating point number (or sequence of floats)
which gives the number of days (fraction part represents hours,
minutes, seconds) since 0001-01-01 00:00:00 UTC, *plus* *one*.
The addition of one here is a historical artifact. Also, note
that the Gregorian calendar is assumed; this is not universal
practice. For details, see the module docstring.
"""
if not cbook.iterable(d):
return _to_ordinalf(d)
else:
d = np.asarray(d)
if not d.size:
return d
return _to_ordinalf_np_vectorized(d)
def julian2num(j):
"""
Convert a Julian date (or sequence) to a matplotlib date (or sequence).
"""
if cbook.iterable(j):
j = np.asarray(j)
return j - JULIAN_OFFSET
def num2julian(n):
"""
Convert a matplotlib date (or sequence) to a Julian date (or sequence).
"""
if cbook.iterable(n):
n = np.asarray(n)
return n + JULIAN_OFFSET
def num2date(x, tz=None):
"""
*x* is a float value which gives the number of days
(fraction part represents hours, minutes, seconds) since
0001-01-01 00:00:00 UTC *plus* *one*.
The addition of one here is a historical artifact. Also, note
that the Gregorian calendar is assumed; this is not universal
practice. For details, see the module docstring.
Return value is a :class:`datetime` instance in timezone *tz* (default to
rcparams TZ value).
If *x* is a sequence, a sequence of :class:`datetime` objects will
be returned.
"""
if tz is None:
tz = _get_rc_timezone()
if not cbook.iterable(x):
return _from_ordinalf(x, tz)
else:
x = np.asarray(x)
if not x.size:
return x
return _from_ordinalf_np_vectorized(x, tz).tolist()
def drange(dstart, dend, delta):
"""
Return a date range as float Gregorian ordinals. *dstart* and
*dend* are :class:`datetime` instances. *delta* is a
:class:`datetime.timedelta` instance.
"""
f1 = _to_ordinalf(dstart)
f2 = _to_ordinalf(dend)
step = _total_seconds(delta) / SEC_PER_DAY
# calculate the difference between dend and dstart in times of delta
num = int(np.ceil((f2 - f1) / step))
# calculate end of the interval which will be generated
dinterval_end = dstart + num * delta
# ensure, that an half open interval will be generated [dstart, dend)
if dinterval_end >= dend:
# if the endpoint is greated than dend, just subtract one delta
dinterval_end -= delta
num -= 1
f2 = _to_ordinalf(dinterval_end) # new float-endpoint
return np.linspace(f1, f2, num + 1)
### date tickers and formatters ###
class DateFormatter(ticker.Formatter):
"""
Tick location is seconds since the epoch. Use a :func:`strftime`
format string.
Python only supports :mod:`datetime` :func:`strftime` formatting
for years greater than 1900. Thanks to Andrew Dalke, Dalke
Scientific Software who contributed the :func:`strftime` code
below to include dates earlier than this year.
"""
illegal_s = re.compile(r"((^|[^%])(%%)*%s)")
def __init__(self, fmt, tz=None):
"""
*fmt* is a :func:`strftime` format string; *tz* is the
:class:`tzinfo` instance.
"""
if tz is None:
tz = _get_rc_timezone()
self.fmt = fmt
self.tz = tz
def __call__(self, x, pos=0):
if x == 0:
raise ValueError('DateFormatter found a value of x=0, which is '
'an illegal date. This usually occurs because '
'you have not informed the axis that it is '
'plotting dates, e.g., with ax.xaxis_date()')
dt = num2date(x, self.tz)
return self.strftime(dt, self.fmt)
def set_tzinfo(self, tz):
self.tz = tz
def _replace_common_substr(self, s1, s2, sub1, sub2, replacement):
"""Helper function for replacing substrings sub1 and sub2
located at the same indexes in strings s1 and s2 respectively,
with the string replacement. It is expected that sub1 and sub2
have the same length. Returns the pair s1, s2 after the
substitutions.
"""
# Find common indexes of substrings sub1 in s1 and sub2 in s2
# and make substitutions inplace. Because this is inplace,
# it is okay if len(replacement) != len(sub1), len(sub2).
i = 0
while True:
j = s1.find(sub1, i)
if j == -1:
break
i = j + 1
if s2[j:j + len(sub2)] != sub2:
continue
s1 = s1[:j] + replacement + s1[j + len(sub1):]
s2 = s2[:j] + replacement + s2[j + len(sub2):]
return s1, s2
def strftime_pre_1900(self, dt, fmt=None):
"""Call time.strftime for years before 1900 by rolling
forward a multiple of 28 years.
*fmt* is a :func:`strftime` format string.
Dalke: I hope I did this math right. Every 28 years the
calendar repeats, except through century leap years excepting
the 400 year leap years. But only if you're using the Gregorian
calendar.
"""
if fmt is None:
fmt = self.fmt
# Since python's time module's strftime implementation does not
# support %f microsecond (but the datetime module does), use a
# regular expression substitution to replace instances of %f.
# Note that this can be useful since python's floating-point
# precision representation for datetime causes precision to be
# more accurate closer to year 0 (around the year 2000, precision
# can be at 10s of microseconds).
fmt = re.sub(r'((^|[^%])(%%)*)%f',
r'\g<1>{0:06d}'.format(dt.microsecond), fmt)
year = dt.year
# For every non-leap year century, advance by
# 6 years to get into the 28-year repeat cycle
delta = 2000 - year
off = 6 * (delta // 100 + delta // 400)
year = year + off
# Move to between the years 1973 and 2000
year1 = year + ((2000 - year) // 28) * 28
year2 = year1 + 28
timetuple = dt.timetuple()
# Generate timestamp string for year and year+28
s1 = time.strftime(fmt, (year1,) + timetuple[1:])
s2 = time.strftime(fmt, (year2,) + timetuple[1:])
# Replace instances of respective years (both 2-digit and 4-digit)
# that are located at the same indexes of s1, s2 with dt's year.
# Note that C++'s strftime implementation does not use padded
# zeros or padded whitespace for %y or %Y for years before 100, but
# uses padded zeros for %x. (For example, try the runnable examples
# with .tm_year in the interval [-1900, -1800] on
# http://en.cppreference.com/w/c/chrono/strftime.) For ease of
# implementation, we always use padded zeros for %y, %Y, and %x.
s1, s2 = self._replace_common_substr(s1, s2,
"{0:04d}".format(year1),
"{0:04d}".format(year2),
"{0:04d}".format(dt.year))
s1, s2 = self._replace_common_substr(s1, s2,
"{0:02d}".format(year1 % 100),
"{0:02d}".format(year2 % 100),
"{0:02d}".format(dt.year % 100))
return cbook.unicode_safe(s1)
def strftime(self, dt, fmt=None):
"""Refer to documentation for datetime.strftime.
*fmt* is a :func:`strftime` format string.
Warning: For years before 1900, depending upon the current
locale it is possible that the year displayed with %x might
be incorrect. For years before 100, %y and %Y will yield
zero-padded strings.
"""
if fmt is None:
fmt = self.fmt
fmt = self.illegal_s.sub(r"\1", fmt)
fmt = fmt.replace("%s", "s")
if dt.year >= 1900:
# Note: in python 3.3 this is okay for years >= 1000,
# refer to http://bugs.python.org/issue177742
return cbook.unicode_safe(dt.strftime(fmt))
return self.strftime_pre_1900(dt, fmt)
class IndexDateFormatter(ticker.Formatter):
"""
Use with :class:`~matplotlib.ticker.IndexLocator` to cycle format
strings by index.
"""
def __init__(self, t, fmt, tz=None):
"""
*t* is a sequence of dates (floating point days). *fmt* is a
:func:`strftime` format string.
"""
if tz is None:
tz = _get_rc_timezone()
self.t = t
self.fmt = fmt
self.tz = tz
def __call__(self, x, pos=0):
'Return the label for time *x* at position *pos*'
ind = int(round(x))
if ind >= len(self.t) or ind <= 0:
return ''
dt = num2date(self.t[ind], self.tz)
return cbook.unicode_safe(dt.strftime(self.fmt))
class AutoDateFormatter(ticker.Formatter):
"""
This class attempts to figure out the best format to use. This is
most useful when used with the :class:`AutoDateLocator`.
The AutoDateFormatter has a scale dictionary that maps the scale
of the tick (the distance in days between one major tick) and a
format string. The default looks like this::
self.scaled = {
365.0 : '%Y',
30. : '%b %Y',
1.0 : '%b %d %Y',
1./24. : '%H:%M:%S',
1. / (24. * 60.): '%H:%M:%S.%f',
}
The algorithm picks the key in the dictionary that is >= the
current scale and uses that format string. You can customize this
dictionary by doing::
>>> formatter = AutoDateFormatter()
>>> formatter.scaled[1/(24.*60.)] = '%M:%S' # only show min and sec
A custom :class:`~matplotlib.ticker.FuncFormatter` can also be used.
The following example shows how to use a custom format function to strip
trailing zeros from decimal seconds and adds the date to the first
ticklabel::
>>> def my_format_function(x, pos=None):
... x = matplotlib.dates.num2date(x)
... if pos == 0:
... fmt = '%D %H:%M:%S.%f'
... else:
... fmt = '%H:%M:%S.%f'
... label = x.strftime(fmt)
... label = label.rstrip("0")
... label = label.rstrip(".")
... return label
>>> from matplotlib.ticker import FuncFormatter
>>> formatter.scaled[1/(24.*60.)] = FuncFormatter(my_format_function)
"""
# This can be improved by providing some user-level direction on
# how to choose the best format (precedence, etc...)
# Perhaps a 'struct' that has a field for each time-type where a
# zero would indicate "don't show" and a number would indicate
# "show" with some sort of priority. Same priorities could mean
# show all with the same priority.
# Or more simply, perhaps just a format string for each
# possibility...
def __init__(self, locator, tz=None, defaultfmt='%Y-%m-%d'):
"""
Autoformat the date labels. The default format is the one to use
if none of the values in ``self.scaled`` are greater than the unit
returned by ``locator._get_unit()``.
"""
self._locator = locator
self._tz = tz
self.defaultfmt = defaultfmt
self._formatter = DateFormatter(self.defaultfmt, tz)
self.scaled = {DAYS_PER_YEAR: '%Y',
DAYS_PER_MONTH: '%b %Y',
1.0: '%b %d %Y',
1. / HOURS_PER_DAY: '%H:%M:%S',
1. / (MINUTES_PER_DAY): '%H:%M:%S.%f'}
def __call__(self, x, pos=None):
locator_unit_scale = float(self._locator._get_unit())
fmt = self.defaultfmt
# Pick the first scale which is greater than the locator unit.
for possible_scale in sorted(self.scaled):
if possible_scale >= locator_unit_scale:
fmt = self.scaled[possible_scale]
break
if isinstance(fmt, six.string_types):
self._formatter = DateFormatter(fmt, self._tz)
result = self._formatter(x, pos)
elif six.callable(fmt):
result = fmt(x, pos)
else:
raise TypeError('Unexpected type passed to {0!r}.'.format(self))
return result
class rrulewrapper(object):
def __init__(self, freq, **kwargs):
self._construct = kwargs.copy()
self._construct["freq"] = freq
self._rrule = rrule(**self._construct)
def set(self, **kwargs):
self._construct.update(kwargs)
self._rrule = rrule(**self._construct)
def __getattr__(self, name):
if name in self.__dict__:
return self.__dict__[name]
return getattr(self._rrule, name)
class DateLocator(ticker.Locator):
"""
Determines the tick locations when plotting dates.
"""
hms0d = {'byhour': 0, 'byminute': 0, 'bysecond': 0}
def __init__(self, tz=None):
"""
*tz* is a :class:`tzinfo` instance.
"""
if tz is None:
tz = _get_rc_timezone()
self.tz = tz
def set_tzinfo(self, tz):
"""
Set time zone info.
"""
self.tz = tz
def datalim_to_dt(self):
"""
Convert axis data interval to datetime objects.
"""
dmin, dmax = self.axis.get_data_interval()
if dmin > dmax:
dmin, dmax = dmax, dmin
return num2date(dmin, self.tz), num2date(dmax, self.tz)
def viewlim_to_dt(self):
"""
Converts the view interval to datetime objects.
"""
vmin, vmax = self.axis.get_view_interval()
if vmin > vmax:
vmin, vmax = vmax, vmin
return num2date(vmin, self.tz), num2date(vmax, self.tz)
def _get_unit(self):
"""
Return how many days a unit of the locator is; used for
intelligent autoscaling.
"""
return 1
def _get_interval(self):
"""
Return the number of units for each tick.
"""
return 1
def nonsingular(self, vmin, vmax):
"""
Given the proposed upper and lower extent, adjust the range
if it is too close to being singular (i.e. a range of ~0).
"""
unit = self._get_unit()
interval = self._get_interval()
if abs(vmax - vmin) < 1e-6:
vmin -= 2 * unit * interval
vmax += 2 * unit * interval
return vmin, vmax
class RRuleLocator(DateLocator):
# use the dateutil rrule instance
def __init__(self, o, tz=None):
DateLocator.__init__(self, tz)
self.rule = o
def __call__(self):
# if no data have been set, this will tank with a ValueError
try:
dmin, dmax = self.viewlim_to_dt()
except ValueError:
return []
return self.tick_values(dmin, dmax)
def tick_values(self, vmin, vmax):
delta = relativedelta(vmax, vmin)
# We need to cap at the endpoints of valid datetime
try:
start = vmin - delta
except ValueError:
start = _from_ordinalf(1.0)
try:
stop = vmax + delta
except ValueError:
# The magic number!
stop = _from_ordinalf(3652059.9999999)
self.rule.set(dtstart=start, until=stop, count=self.MAXTICKS + 1)
# estimate the number of ticks very approximately so we don't
# have to do a very expensive (and potentially near infinite)
# 'between' calculation, only to find out it will fail.
nmax, nmin = date2num((vmax, vmin))
estimate = (nmax - nmin) / (self._get_unit() * self._get_interval())
# This estimate is only an estimate, so be really conservative
# about bailing...
if estimate > self.MAXTICKS * 2:
raise RuntimeError(
'RRuleLocator estimated to generate %d ticks from %s to %s: '
'exceeds Locator.MAXTICKS * 2 (%d) ' % (estimate, vmin, vmax,
self.MAXTICKS * 2))
dates = self.rule.between(vmin, vmax, True)
if len(dates) == 0:
return date2num([vmin, vmax])
return self.raise_if_exceeds(date2num(dates))
def _get_unit(self):
"""
Return how many days a unit of the locator is; used for
intelligent autoscaling.
"""
freq = self.rule._rrule._freq
return self.get_unit_generic(freq)
@staticmethod
def get_unit_generic(freq):
if freq == YEARLY:
return DAYS_PER_YEAR
elif freq == MONTHLY:
return DAYS_PER_MONTH
elif freq == WEEKLY:
return DAYS_PER_WEEK
elif freq == DAILY:
return 1.0
elif freq == HOURLY:
return 1.0 / HOURS_PER_DAY
elif freq == MINUTELY:
return 1.0 / MINUTES_PER_DAY
elif freq == SECONDLY:
return 1.0 / SEC_PER_DAY
else:
# error
return -1 # or should this just return '1'?
def _get_interval(self):
return self.rule._rrule._interval
def autoscale(self):
"""
Set the view limits to include the data range.
"""
dmin, dmax = self.datalim_to_dt()
delta = relativedelta(dmax, dmin)
# We need to cap at the endpoints of valid datetime
try:
start = dmin - delta
except ValueError:
start = _from_ordinalf(1.0)
try:
stop = dmax + delta
except ValueError:
# The magic number!
stop = _from_ordinalf(3652059.9999999)
self.rule.set(dtstart=start, until=stop)
dmin, dmax = self.datalim_to_dt()
vmin = self.rule.before(dmin, True)
if not vmin:
vmin = dmin
vmax = self.rule.after(dmax, True)
if not vmax:
vmax = dmax
vmin = date2num(vmin)
vmax = date2num(vmax)
return self.nonsingular(vmin, vmax)
class AutoDateLocator(DateLocator):
"""
On autoscale, this class picks the best
:class:`DateLocator` to set the view limits and the tick
locations.
"""
def __init__(self, tz=None, minticks=5, maxticks=None,
interval_multiples=False):
"""
*minticks* is the minimum number of ticks desired, which is used to
select the type of ticking (yearly, monthly, etc.).
*maxticks* is the maximum number of ticks desired, which controls
any interval between ticks (ticking every other, every 3, etc.).
For really fine-grained control, this can be a dictionary mapping
individual rrule frequency constants (YEARLY, MONTHLY, etc.)
to their own maximum number of ticks. This can be used to keep
the number of ticks appropriate to the format chosen in
:class:`AutoDateFormatter`. Any frequency not specified in this
dictionary is given a default value.
*tz* is a :class:`tzinfo` instance.
*interval_multiples* is a boolean that indicates whether ticks
should be chosen to be multiple of the interval. This will lock
ticks to 'nicer' locations. For example, this will force the
ticks to be at hours 0,6,12,18 when hourly ticking is done at
6 hour intervals.
The AutoDateLocator has an interval dictionary that maps the
frequency of the tick (a constant from dateutil.rrule) and a
multiple allowed for that ticking. The default looks like this::
self.intervald = {
YEARLY : [1, 2, 4, 5, 10, 20, 40, 50, 100, 200, 400, 500,
1000, 2000, 4000, 5000, 10000],
MONTHLY : [1, 2, 3, 4, 6],
DAILY : [1, 2, 3, 7, 14],
HOURLY : [1, 2, 3, 4, 6, 12],
MINUTELY: [1, 5, 10, 15, 30],
SECONDLY: [1, 5, 10, 15, 30],
MICROSECONDLY: [1, 2, 5, 10, 20, 50, 100, 200, 500, 1000, 2000,
5000, 10000, 20000, 50000, 100000, 200000, 500000,
1000000],
}
The interval is used to specify multiples that are appropriate for
the frequency of ticking. For instance, every 7 days is sensible
for daily ticks, but for minutes/seconds, 15 or 30 make sense.
You can customize this dictionary by doing::
locator = AutoDateLocator()
locator.intervald[HOURLY] = [3] # only show every 3 hours
"""
DateLocator.__init__(self, tz)
self._locator = YearLocator()
self._freq = YEARLY
self._freqs = [YEARLY, MONTHLY, DAILY, HOURLY, MINUTELY,
SECONDLY, MICROSECONDLY]
self.minticks = minticks
self.maxticks = {YEARLY: 11, MONTHLY: 12, DAILY: 11, HOURLY: 12,
MINUTELY: 11, SECONDLY: 11, MICROSECONDLY: 8}
if maxticks is not None:
try:
self.maxticks.update(maxticks)
except TypeError:
# Assume we were given an integer. Use this as the maximum
# number of ticks for every frequency and create a
# dictionary for this
self.maxticks = dict(zip(self._freqs,
[maxticks] * len(self._freqs)))
self.interval_multiples = interval_multiples
self.intervald = {
YEARLY: [1, 2, 4, 5, 10, 20, 40, 50, 100, 200, 400, 500,
1000, 2000, 4000, 5000, 10000],
MONTHLY: [1, 2, 3, 4, 6],
DAILY: [1, 2, 3, 7, 14, 21],
HOURLY: [1, 2, 3, 4, 6, 12],
MINUTELY: [1, 5, 10, 15, 30],
SECONDLY: [1, 5, 10, 15, 30],
MICROSECONDLY: [1, 2, 5, 10, 20, 50, 100, 200, 500, 1000, 2000,
5000, 10000, 20000, 50000, 100000, 200000, 500000,
1000000]}
self._byranges = [None, range(1, 13), range(1, 32),
range(0, 24), range(0, 60), range(0, 60), None]
def __call__(self):
'Return the locations of the ticks'
self.refresh()
return self._locator()
def tick_values(self, vmin, vmax):
return self.get_locator(vmin, vmax).tick_values(vmin, vmax)
def nonsingular(self, vmin, vmax):
# whatever is thrown at us, we can scale the unit.
# But default nonsingular date plots at an ~4 year period.
if vmin == vmax:
vmin = vmin - DAYS_PER_YEAR * 2
vmax = vmax + DAYS_PER_YEAR * 2
return vmin, vmax
def set_axis(self, axis):
DateLocator.set_axis(self, axis)
self._locator.set_axis(axis)
def refresh(self):
'Refresh internal information based on current limits.'
dmin, dmax = self.viewlim_to_dt()
self._locator = self.get_locator(dmin, dmax)
def _get_unit(self):
if self._freq in [MICROSECONDLY]:
return 1. / MUSECONDS_PER_DAY
else:
return RRuleLocator.get_unit_generic(self._freq)
def autoscale(self):
'Try to choose the view limits intelligently.'
dmin, dmax = self.datalim_to_dt()
self._locator = self.get_locator(dmin, dmax)
return self._locator.autoscale()
def get_locator(self, dmin, dmax):
'Pick the best locator based on a distance.'
delta = relativedelta(dmax, dmin)
tdelta = dmax - dmin
# take absolute difference
if dmin > dmax:
delta = -delta
tdelta = -tdelta
# The following uses a mix of calls to relativedelta and timedelta
# methods because there is incomplete overlap in the functionality of
# these similar functions, and it's best to avoid doing our own math
# whenever possible.
numYears = float(delta.years)
numMonths = (numYears * MONTHS_PER_YEAR) + delta.months
numDays = tdelta.days # Avoids estimates of days/month, days/year
numHours = (numDays * HOURS_PER_DAY) + delta.hours
numMinutes = (numHours * MIN_PER_HOUR) + delta.minutes
numSeconds = np.floor(_total_seconds(tdelta))
numMicroseconds = np.floor(_total_seconds(tdelta) * 1e6)
nums = [numYears, numMonths, numDays, numHours, numMinutes,
numSeconds, numMicroseconds]
use_rrule_locator = [True] * 6 + [False]
# Default setting of bymonth, etc. to pass to rrule
# [unused (for year), bymonth, bymonthday, byhour, byminute,
# bysecond, unused (for microseconds)]
byranges = [None, 1, 1, 0, 0, 0, None]
# Loop over all the frequencies and try to find one that gives at
# least a minticks tick positions. Once this is found, look for
# an interval from an list specific to that frequency that gives no
# more than maxticks tick positions. Also, set up some ranges
# (bymonth, etc.) as appropriate to be passed to rrulewrapper.
for i, (freq, num) in enumerate(zip(self._freqs, nums)):
# If this particular frequency doesn't give enough ticks, continue
if num < self.minticks:
# Since we're not using this particular frequency, set
# the corresponding by_ to None so the rrule can act as
# appropriate
byranges[i] = None
continue
# Find the first available interval that doesn't give too many
# ticks
for interval in self.intervald[freq]:
if num <= interval * (self.maxticks[freq] - 1):
break
else:
# We went through the whole loop without breaking, default to
# the last interval in the list and raise a warning
warnings.warn('AutoDateLocator was unable to pick an '
'appropriate interval for this date range. '
'It may be necessary to add an interval value '
"to the AutoDateLocator's intervald dictionary."
' Defaulting to {0}.'.format(interval))
# Set some parameters as appropriate
self._freq = freq
if self._byranges[i] and self.interval_multiples:
byranges[i] = self._byranges[i][::interval]
interval = 1
else:
byranges[i] = self._byranges[i]
# We found what frequency to use
break
else:
raise ValueError('No sensible date limit could be found in the '
'AutoDateLocator.')
if use_rrule_locator[i]:
_, bymonth, bymonthday, byhour, byminute, bysecond, _ = byranges
rrule = rrulewrapper(self._freq, interval=interval,
dtstart=dmin, until=dmax,
bymonth=bymonth, bymonthday=bymonthday,
byhour=byhour, byminute=byminute,
bysecond=bysecond)
locator = RRuleLocator(rrule, self.tz)
else:
locator = MicrosecondLocator(interval, tz=self.tz)
locator.set_axis(self.axis)
locator.set_view_interval(*self.axis.get_view_interval())
locator.set_data_interval(*self.axis.get_data_interval())
return locator
class YearLocator(DateLocator):
"""
Make ticks on a given day of each year that is a multiple of base.
Examples::
# Tick every year on Jan 1st
locator = YearLocator()
# Tick every 5 years on July 4th
locator = YearLocator(5, month=7, day=4)
"""
def __init__(self, base=1, month=1, day=1, tz=None):
"""
Mark years that are multiple of base on a given month and day
(default jan 1).
"""
DateLocator.__init__(self, tz)
self.base = ticker.Base(base)
self.replaced = {'month': month,
'day': day,
'hour': 0,
'minute': 0,
'second': 0,
'tzinfo': tz
}
def __call__(self):
# if no data have been set, this will tank with a ValueError
try:
dmin, dmax = self.viewlim_to_dt()
except ValueError:
return []
return self.tick_values(dmin, dmax)
def tick_values(self, vmin, vmax):
ymin = self.base.le(vmin.year)
ymax = self.base.ge(vmax.year)
ticks = [vmin.replace(year=ymin, **self.replaced)]
while 1:
dt = ticks[-1]
if dt.year >= ymax:
return date2num(ticks)
year = dt.year + self.base.get_base()
ticks.append(dt.replace(year=year, **self.replaced))
def autoscale(self):
"""
Set the view limits to include the data range.
"""
dmin, dmax = self.datalim_to_dt()
ymin = self.base.le(dmin.year)
ymax = self.base.ge(dmax.year)
vmin = dmin.replace(year=ymin, **self.replaced)
vmax = dmax.replace(year=ymax, **self.replaced)
vmin = date2num(vmin)
vmax = date2num(vmax)
return self.nonsingular(vmin, vmax)
class MonthLocator(RRuleLocator):
"""
Make ticks on occurances of each month month, e.g., 1, 3, 12.
"""
def __init__(self, bymonth=None, bymonthday=1, interval=1, tz=None):
"""
Mark every month in *bymonth*; *bymonth* can be an int or
sequence. Default is ``range(1,13)``, i.e. every month.
*interval* is the interval between each iteration. For
example, if ``interval=2``, mark every second occurance.
"""
if bymonth is None:
bymonth = range(1, 13)
elif isinstance(bymonth, np.ndarray):
# This fixes a bug in dateutil <= 2.3 which prevents the use of
# numpy arrays in (among other things) the bymonthday, byweekday
# and bymonth parameters.
bymonth = [x.item() for x in bymonth.astype(int)]
rule = rrulewrapper(MONTHLY, bymonth=bymonth, bymonthday=bymonthday,
interval=interval, **self.hms0d)
RRuleLocator.__init__(self, rule, tz)
class WeekdayLocator(RRuleLocator):
"""
Make ticks on occurances of each weekday.
"""
def __init__(self, byweekday=1, interval=1, tz=None):
"""
Mark every weekday in *byweekday*; *byweekday* can be a number or
sequence.
Elements of *byweekday* must be one of MO, TU, WE, TH, FR, SA,
SU, the constants from :mod:`dateutil.rrule`, which have been
imported into the :mod:`matplotlib.dates` namespace.
*interval* specifies the number of weeks to skip. For example,
``interval=2`` plots every second week.
"""
if isinstance(byweekday, np.ndarray):
# This fixes a bug in dateutil <= 2.3 which prevents the use of
# numpy arrays in (among other things) the bymonthday, byweekday
# and bymonth parameters.
[x.item() for x in byweekday.astype(int)]
rule = rrulewrapper(DAILY, byweekday=byweekday,
interval=interval, **self.hms0d)
RRuleLocator.__init__(self, rule, tz)
class DayLocator(RRuleLocator):
"""
Make ticks on occurances of each day of the month. For example,
1, 15, 30.
"""
def __init__(self, bymonthday=None, interval=1, tz=None):
"""
Mark every day in *bymonthday*; *bymonthday* can be an int or
sequence.
Default is to tick every day of the month: ``bymonthday=range(1,32)``
"""
if bymonthday is None:
bymonthday = range(1, 32)
elif isinstance(bymonthday, np.ndarray):
# This fixes a bug in dateutil <= 2.3 which prevents the use of
# numpy arrays in (among other things) the bymonthday, byweekday
# and bymonth parameters.
bymonthday = [x.item() for x in bymonthday.astype(int)]
rule = rrulewrapper(DAILY, bymonthday=bymonthday,
interval=interval, **self.hms0d)
RRuleLocator.__init__(self, rule, tz)
class HourLocator(RRuleLocator):
"""
Make ticks on occurances of each hour.
"""
def __init__(self, byhour=None, interval=1, tz=None):
"""
Mark every hour in *byhour*; *byhour* can be an int or sequence.
Default is to tick every hour: ``byhour=range(24)``
*interval* is the interval between each iteration. For
example, if ``interval=2``, mark every second occurrence.
"""
if byhour is None:
byhour = range(24)
rule = rrulewrapper(HOURLY, byhour=byhour, interval=interval,
byminute=0, bysecond=0)
RRuleLocator.__init__(self, rule, tz)
class MinuteLocator(RRuleLocator):
"""
Make ticks on occurances of each minute.
"""
def __init__(self, byminute=None, interval=1, tz=None):
"""
Mark every minute in *byminute*; *byminute* can be an int or
sequence. Default is to tick every minute: ``byminute=range(60)``
*interval* is the interval between each iteration. For
example, if ``interval=2``, mark every second occurrence.
"""
if byminute is None:
byminute = range(60)
rule = rrulewrapper(MINUTELY, byminute=byminute, interval=interval,
bysecond=0)
RRuleLocator.__init__(self, rule, tz)
class SecondLocator(RRuleLocator):
"""
Make ticks on occurances of each second.
"""
def __init__(self, bysecond=None, interval=1, tz=None):
"""
Mark every second in *bysecond*; *bysecond* can be an int or
sequence. Default is to tick every second: ``bysecond = range(60)``
*interval* is the interval between each iteration. For
example, if ``interval=2``, mark every second occurrence.
"""
if bysecond is None:
bysecond = range(60)
rule = rrulewrapper(SECONDLY, bysecond=bysecond, interval=interval)
RRuleLocator.__init__(self, rule, tz)
class MicrosecondLocator(DateLocator):
"""
Make ticks on occurances of each microsecond.
"""
def __init__(self, interval=1, tz=None):
"""
*interval* is the interval between each iteration. For
example, if ``interval=2``, mark every second microsecond.
"""
self._interval = interval
self._wrapped_locator = ticker.MultipleLocator(interval)
self.tz = tz
def set_axis(self, axis):
self._wrapped_locator.set_axis(axis)
return DateLocator.set_axis(self, axis)
def set_view_interval(self, vmin, vmax):
self._wrapped_locator.set_view_interval(vmin, vmax)
return DateLocator.set_view_interval(self, vmin, vmax)
def set_data_interval(self, vmin, vmax):
self._wrapped_locator.set_data_interval(vmin, vmax)
return DateLocator.set_data_interval(self, vmin, vmax)
def __call__(self):
# if no data have been set, this will tank with a ValueError
try:
dmin, dmax = self.viewlim_to_dt()
except ValueError:
return []
return self.tick_values(dmin, dmax)
def tick_values(self, vmin, vmax):
nmin, nmax = date2num((vmin, vmax))
nmin *= MUSECONDS_PER_DAY
nmax *= MUSECONDS_PER_DAY
ticks = self._wrapped_locator.tick_values(nmin, nmax)
ticks = [tick / MUSECONDS_PER_DAY for tick in ticks]
return ticks
def _get_unit(self):
"""
Return how many days a unit of the locator is; used for
intelligent autoscaling.
"""
return 1. / MUSECONDS_PER_DAY
def _get_interval(self):
"""
Return the number of units for each tick.
"""
return self._interval
def _close_to_dt(d1, d2, epsilon=5):
"""
Assert that datetimes *d1* and *d2* are within *epsilon* microseconds.
"""
delta = d2 - d1
mus = abs(_total_seconds(delta) * 1e6)
assert mus < epsilon
def _close_to_num(o1, o2, epsilon=5):
"""
Assert that float ordinals *o1* and *o2* are within *epsilon*
microseconds.
"""
delta = abs((o2 - o1) * MUSECONDS_PER_DAY)
assert delta < epsilon
def epoch2num(e):
"""
Convert an epoch or sequence of epochs to the new date format,
that is days since 0001.
"""
return EPOCH_OFFSET + np.asarray(e) / SEC_PER_DAY
def num2epoch(d):
"""
Convert days since 0001 to epoch. *d* can be a number or sequence.
"""
return (np.asarray(d) - EPOCH_OFFSET) * SEC_PER_DAY
def mx2num(mxdates):
"""
Convert mx :class:`datetime` instance (or sequence of mx
instances) to the new date format.
"""
scalar = False
if not cbook.iterable(mxdates):
scalar = True
mxdates = [mxdates]
ret = epoch2num([m.ticks() for m in mxdates])
if scalar:
return ret[0]
else:
return ret
def date_ticker_factory(span, tz=None, numticks=5):
"""
Create a date locator with *numticks* (approx) and a date formatter
for *span* in days. Return value is (locator, formatter).
"""
if span == 0:
span = 1 / HOURS_PER_DAY
mins = span * MINUTES_PER_DAY
hrs = span * HOURS_PER_DAY
days = span
wks = span / DAYS_PER_WEEK
months = span / DAYS_PER_MONTH # Approx
years = span / DAYS_PER_YEAR # Approx
if years > numticks:
locator = YearLocator(int(years / numticks), tz=tz) # define
fmt = '%Y'
elif months > numticks:
locator = MonthLocator(tz=tz)
fmt = '%b %Y'
elif wks > numticks:
locator = WeekdayLocator(tz=tz)
fmt = '%a, %b %d'
elif days > numticks:
locator = DayLocator(interval=int(math.ceil(days / numticks)), tz=tz)
fmt = '%b %d'
elif hrs > numticks:
locator = HourLocator(interval=int(math.ceil(hrs / numticks)), tz=tz)
fmt = '%H:%M\n%b %d'
elif mins > numticks:
locator = MinuteLocator(interval=int(math.ceil(mins / numticks)),
tz=tz)
fmt = '%H:%M:%S'
else:
locator = MinuteLocator(tz=tz)
fmt = '%H:%M:%S'
formatter = DateFormatter(fmt, tz=tz)
return locator, formatter
def seconds(s):
"""
Return seconds as days.
"""
return float(s) / SEC_PER_DAY
def minutes(m):
"""
Return minutes as days.
"""
return float(m) / MINUTES_PER_DAY
def hours(h):
"""
Return hours as days.
"""
return h / HOURS_PER_DAY
def weeks(w):
"""
Return weeks as days.
"""
return w * DAYS_PER_WEEK
class DateConverter(units.ConversionInterface):
"""
Converter for datetime.date and datetime.datetime data,
or for date/time data represented as it would be converted
by :func:`date2num`.
The 'unit' tag for such data is None or a tzinfo instance.
"""
@staticmethod
def axisinfo(unit, axis):
"""
Return the :class:`~matplotlib.units.AxisInfo` for *unit*.
*unit* is a tzinfo instance or None.
The *axis* argument is required but not used.
"""
tz = unit
majloc = AutoDateLocator(tz=tz)
majfmt = AutoDateFormatter(majloc, tz=tz)
datemin = datetime.date(2000, 1, 1)
datemax = datetime.date(2010, 1, 1)
return units.AxisInfo(majloc=majloc, majfmt=majfmt, label='',
default_limits=(datemin, datemax))
@staticmethod
def convert(value, unit, axis):
"""
If *value* is not already a number or sequence of numbers,
convert it with :func:`date2num`.
The *unit* and *axis* arguments are not used.
"""
if units.ConversionInterface.is_numlike(value):
return value
return date2num(value)
@staticmethod
def default_units(x, axis):
"""
Return the tzinfo instance of *x* or of its first element, or None
"""
if isinstance(x, np.ndarray):
x = x.ravel()
try:
x = cbook.safe_first_element(x)
except (TypeError, StopIteration):
pass
try:
return x.tzinfo
except AttributeError:
pass
return None
units.registry[datetime.date] = DateConverter()
units.registry[datetime.datetime] = DateConverter()
|
rbalda/neural_ocr
|
env/lib/python2.7/site-packages/matplotlib/dates.py
|
Python
|
mit
| 52,905
|
##
# Copyright 2013-2021 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for golfc compiler toolchain (includes GCC+CUDA, OpenBLAS, LAPACK, and FFTW).
:author: Kenneth Hoste (Ghent University)
:author: Bart Oldeman (McGill University, Calcul Quebec, Compute Canada)
"""
from easybuild.toolchains.gcccuda import GccCUDA
from easybuild.toolchains.golf import Golf
from easybuild.toolchains.fft.fftw import Fftw
from easybuild.toolchains.linalg.openblas import OpenBLAS
class Golfc(GccCUDA, Golf, OpenBLAS, Fftw):
"""Compiler toolchain with GCC+CUDA, OpenBLAS, and FFTW."""
NAME = 'golfc'
SUBTOOLCHAIN = [GccCUDA.NAME, Golf.NAME]
OPTIONAL = True
|
hpcugent/easybuild-framework
|
easybuild/toolchains/golfc.py
|
Python
|
gpl-2.0
| 1,661
|
#!/bin/python2
"""Main controller of LLL
"""
import sys, os
import logging
try:
from ptyview import PtyView
from PyQt4 import QtGui
from PyQt4 import QtCore, Qt
from PyQt4.QtCore import QThread, QSettings, pyqtSignal
from PyQt4.QtGui import QMessageBox, QIcon, QMenu
import util
except ImportError:
logging.fatal('Unable to import PyQt4')
sys.exit(1)
try:
import clang.cindex
except ImportError:
logging.fatal('Unable to import python bindings of clang')
sys.exit(1)
SETTINGS = QSettings('c0deforfun', 'lll')
def initialize():
""" initialize module imports"""
config_file = SETTINGS.fileName()
SETTINGS.beginGroup('common')
if not os.path.exists(config_file):
SETTINGS.setValue('clang_lib_path', 'llvm-install-prefix/lib')
SETTINGS.setValue('lldb_path', 'llvm-install-prefix/lib/python2.7/site-packages')
SETTINGS.setValue('logging_level', 'INFO')
logging.fatal('Please config ' + config_file)
clang_lib_path = str(SETTINGS.value('clang_lib_path', '').toString())
lldb_path = str(SETTINGS.value('lldb_path', clang_lib_path + \
'/lib/python2.7/site-packages').toString())
logging_level = str(SETTINGS.value('logging_level', 'logging.INFO').toString())
SETTINGS.endGroup()
sys.path.append(lldb_path)
clang.cindex.Config.set_library_path(clang_lib_path)
logging.basicConfig(level=logging_level)
initialize()
try:
import lldb
from lldb import SBDebugger, SBTarget, SBProcess, SBEvent, \
SBStream, SBBreakpoint
from debugger import Debugger
except ImportError:
logging.fatal('Unable to import LLDB python modules')
sys.exit(1)
from ui.UIMain import Ui_MainWindow
from ui.codeEditor import CodeEditor
from ui.RunConfigWindow import RunConfigWindow
from ui.About import AboutDialog
class MainWindow(QtGui.QMainWindow):
""" Main window of the debugger"""
FocusLine = pyqtSignal(str, int, name='FocusLine')
def __init__(self):
QtGui.QMainWindow.__init__(self)
self.init_ui()
self.save_restore_state(False)
self.cfg_window = RunConfigWindow()
self.about_dialog = AboutDialog()
self.pty_stdout = PtyView(self.ui.commander)
stdout_path = self.pty_stdout.get_file_path()
self.debugger = Debugger(stdout_path, stdout_path, self.cfg_window.working_dir)
self.ui.commander.set_tab_comp_handler(self.debugger.complete_tab)
self.last_highlighted_editor = None
self.my_listener = MyListeningThread(self.debugger.dbg, self.FocusLine)
self.FocusLine.connect(self.do_focus_line)
self.my_listener.StateChanged.connect(self.on_state_changed)
self.my_listener.BPChanged.connect(self.on_bp_changed)
self.debugger.listener = self.my_listener
self.my_listener.start()
self.opened_files = {}
self.bp_locations = {}
args = Qt.qApp.arguments()
self.cfg_window.working_dir = os.getcwd()
if len(args) > 1:
self.do_exe_file_open(args[1])
if len(args) > 2:
self.cfg_window.arglist = [str(x) for x in args[2:]]
logging.info('Ready')
def init_ui(self):
"""initialize UI"""
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.ui.tabCodeEditor.clear()
self.ui.tabCodeEditor.setTabsClosable(True)
self.ui.tabCodeEditor.setTabShape(QtGui.QTabWidget.Triangular)
# setup frame viewer dock
action_frames = self.ui.frame_dock.toggleViewAction()
self.ui.menuView.addAction(action_frames)
action_frames.setCheckable(True)
action_frames.setIcon(QIcon(":/icons/icons/frame.png"))
self.ui.frame_viewer.set_focus_signal(self.FocusLine)
self.tabifyDockWidget(self.ui.frame_dock, self.ui.value_dock)
self.tabifyDockWidget(self.ui.value_dock, self.ui.bp_dock)
self.ui.frame_dock.raise_()
# setup source file tree dock
action_source_tree = self.ui.file_tree_dock.toggleViewAction()
self.ui.menuView.addAction(action_source_tree)
action_source_tree.setCheckable(True)
action_source_tree.setIcon(QIcon(":/icons/icons/directory.png"))
self.ui.source_tree.set_open_file_signal(self.FocusLine)
self.connect(self.ui.action_Open, QtCore.SIGNAL('triggered()'), self.do_exe_file_open)
self.connect(self.ui.action_Run, QtCore.SIGNAL('triggered()'), self.do_run)
self.connect(self.ui.action_StepOver, QtCore.SIGNAL('triggered()'), self.do_step_over)
self.connect(self.ui.action_StepInto, QtCore.SIGNAL('triggered()'), self.do_step_into)
self.connect(self.ui.action_StepOut, QtCore.SIGNAL('triggered()'), self.do_step_out)
self.connect(self.ui.btn_frame_up, QtCore.SIGNAL('clicked()'), self.ui.frame_viewer.frame_up)
self.connect(self.ui.btn_frame_down, QtCore.SIGNAL('clicked()'), self.ui.frame_viewer.frame_down)
self.ui.frame_viewer.set_show_args(self.ui.chk_show_args)
self.connect(self.ui.tabCodeEditor, QtCore.SIGNAL('tabCloseRequested(int)'),
self.close_tab)
self.connect(self.ui.tabCodeEditor, QtCore.SIGNAL('currentChanged(int)'),
self.ui.source_tree.set_file_selected)
self.connect(self.ui.tabCodeEditor, QtCore.SIGNAL('customContextMenuRequested(QPoint)'),
self.show_tab_context_menu)
self.ui.action_Exit.triggered.connect(Qt.qApp.quit)
self.ui.commander.commandEntered.connect(self.do_command)
self.connect(self.ui.action_Run_Config, QtCore.SIGNAL('triggered()'), self.do_config)
self.connect(self.ui.action_About, QtCore.SIGNAL('triggered()'), self.show_about)
self.ui.frame_viewer.frame_changed.connect(self.ui.value_viewer.show_variables)
def show_tab_context_menu(self, point):
""" show context menu for tabs"""
if point.isNull():
return
tab_bar = self.ui.tabCodeEditor.tabBar()
idx = tab_bar.tabAt(point)
if idx < 0:
return
editor = self.ui.tabCodeEditor.widget(idx)
menu = QMenu(tab_bar)
save_action = menu.addAction(QIcon(":/icons/icons/save.png"), 'Save', editor.save)
save_action.setEnabled(editor.document().isModified())
menu.exec_(tab_bar.mapToGlobal(point))
def save_restore_state(self, is_save):
""" save/restore state to/from config file"""
if is_save:
SETTINGS.setValue('geometry', self.saveGeometry())
SETTINGS.setValue('windowState', self.saveState())
else:
self.restoreState(SETTINGS.value('windowState').toByteArray())
self.restoreGeometry(SETTINGS.value('geometry').toByteArray())
def closeEvent(self, event):
"""overrided. when close event is triggered"""
self.save_restore_state(True)
if not (self.debugger.curr_process and
self.debugger.curr_process.is_alive):
event.accept()
return
reply = QtGui.QMessageBox.question(self, 'Message', 'Quit?', \
QMessageBox.Yes, QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
self.my_listener.quit()
event.accept()
else:
event.ignore()
def close_tab(self, idx):
""" close a tab"""
editor = self.ui.tabCodeEditor.widget(idx)
if editor.document().isModified():
reply = QtGui.QMessageBox.question(self, 'Message', \
editor.source_file + ' has been modified', \
QMessageBox.Save, QMessageBox.Discard, QMessageBox.Cancel)
if reply == QtGui.QMessageBox.Cancel:
return
if reply == QMessageBox.Save:
editor.save()
#self.opened_files.remove()
#editor.close
self.close_src_file(editor.source_file)
self.ui.tabCodeEditor.removeTab(idx)
def show_about(self):
""" show "About" window"""
self.about_dialog.show()
def do_config(self):
"""show run-config window"""
self.cfg_window.show()
def do_exe_file_open(self, exe_filename=None):
"""open executable"""
if not exe_filename:
exe_filename = QtGui.QFileDialog.getOpenFileName(self, \
self.tr('Open Executable'), \
'', self.tr('Executable Files (*)'))
if exe_filename is None:
return
main_file, line = self.debugger.open_file(exe_filename)
if main_file is not None:
self.open_src_file(main_file.fullpath, line)
self.ui.action_Run.setEnabled(True)
self.my_listener.add_target_broadcaster(self.debugger.target.GetBroadcaster())
self.ui.source_tree.set_root(main_file.GetDirectory(), False)
self.exe_filename = exe_filename
elif line == 0:
logging.info('cannot find entry function')
else:
logging.info('error opening executable: %s', exe_filename)
def do_focus_line(self, filename, line_no):
""" slot for focusing line event"""
# if line_no is 0, just focus the tab
if not filename:
editor = None
else:
filename = str(filename)
logging.debug('Focusing [%s]:%d', filename, line_no)
if filename not in self.opened_files:
self.open_src_file(filename)
editor = self.opened_files[filename]
if editor is not None:
self.ui.tabCodeEditor.setCurrentWidget(editor)
if line_no:
if self.last_highlighted_editor and self.last_highlighted_editor != editor:
# clear previous highlight
self.last_highlighted_editor.setExtraSelections([])
editor.focus_line(line_no)
self.last_highlighted_editor = editor
def on_state_changed(self, state):
"""slot for state change event"""
self.ui.statusBar.update_state(SBDebugger.StateAsCString(state))
process = self.debugger.curr_process
steppable = process is not None and process.is_alive and state == lldb.eStateStopped
if steppable:
self.ui.frame_viewer.show_frame_info(process)
else:
self.ui.frame_viewer.clear()
if process is not None:
self.ui.action_StepOver.setEnabled(steppable)
self.ui.action_StepInto.setEnabled(steppable)
self.ui.action_StepOut.setEnabled(steppable)
self.ui.action_Run.setEnabled(state != lldb.eStateRunning)
if state == lldb.eStateExited or state == lldb.eStateCrashed \
or state == lldb.eStateSuspended:
self.do_focus_line(None, -1)
if state == lldb.eStateExited:
if process is not None:
logging.info('process exited: [%d]:%s', process.GetExitStatus(),
process.GetExitDescription())
self.ui.frame_viewer.clear()
return
def on_bp_changed(self, breakpoint, bp_type):
""" handler of breakpoing changing"""
self.ui.bp_viewer.update_bp_info(self.debugger.dbg.GetSelectedTarget())
# TODO: might be multiple locations for a BP
filename, line_no = self.debugger.getBPLocationFromDesc(breakpoint)
if not filename or not line_no:
logging.warning('Cannot find location from BP')
return
if filename not in self.bp_locations:
self.bp_locations[filename] = []
if bp_type == lldb.eBreakpointEventTypeAdded:
self.bp_locations[filename].append(line_no)
elif bp_type == lldb.eBreakpointEventTypeRemoved:
if line_no in self.bp_locations[filename]:
self.bp_locations[filename].remove(line_no)
else:
logging.warning('removing non-existed BP ' + filename + ':' + str(line_no))
# TODO: trigger a rescan of all BPs?
return
else:
logging.debug('Unhandled BP event:' + str(bp_type))
return
if filename in self.opened_files:
lna = self.opened_files[filename].line_number_area
lna.breakpoints = self.bp_locations[filename]
lna.repaint()
def close_src_file(self, name):
""" close a source file"""
editor = self.opened_files[name]
editor.setParent(None)
del self.opened_files[name]
def open_src_file(self, src_filename, line=0):
"""show the source file in editor"""
if not os.path.isfile(src_filename) or not os.access(src_filename, os.R_OK):
logging.warn('Unable to access ' + src_filename)
return
if src_filename in self.opened_files:
return
editor = CodeEditor(self.ui.tabCodeEditor)
idx = self.ui.tabCodeEditor.addTab(editor, os.path.basename(src_filename))
self.ui.tabCodeEditor.setTabToolTip(idx, src_filename)
editor.open_source_file(src_filename)
self.opened_files[str(src_filename)] = editor
editor.line_number_area.BPToggled.connect(self.toggle_breakpoint)
if line > 0:
self.do_focus_line(src_filename, line)
self.ui.source_tree.set_root(os.path.dirname(src_filename))
#self.debugger.disassemble(src_filename)
def toggle_breakpoint(self, line_no):
""" control the bp toggling"""
sender = self.sender()
filename = sender.filename
# it should trigger BPChanged signal.
self.debugger.toggle_breakpoint(filename, line_no)
def do_command(self, cmd):
""" on command entered"""
cmd = str(cmd)
cmds = cmd.split()
cmd0 = cmds[0].lower()
cmd1 = ''
if len(cmds) > 1:
cmd1 = cmds[1].lower()
if cmd0 == 'r' or cmd0 == 'run' or (cmd0 == 'process' and cmd1 == 'launch'):
if cmd0[0] == 'r' or cmd0 == 'run':
args = cmds[1:]
else:
args = cmds[2:]
self.do_run(args, True)
return
msg = self.debugger.execute(cmd0, cmd)
self.ui.commander.append(msg)
def do_run(self, args=None, from_cmd=False):
""" on run command entered"""
util.save_run_config(self.exe_filename, self.cfg_window.arglist, self.cfg_window.working_dir)
process = self.debugger.run(args, from_cmd, self.cfg_window.arglist)
if process is not None:
self.my_listener.add_process_broadcaster(process.GetBroadcaster())
def do_step_over(self):
""" on step over clicked"""
self.debugger.next(True)
def do_step_into(self):
""" on step into clicked"""
self.debugger.next(False)
def do_step_out(self):
""" on step out of frame"""
self.debugger.step_out()
class MyListeningThread(QThread):
"""Listening events"""
StateChanged = pyqtSignal(int, name='StateChanged')
BPChanged = pyqtSignal(SBBreakpoint, int, name='StateChanged')
def __init__(self, dbg, focus_signal):
QThread.__init__(self)
self.dbg = dbg
self.sb_listener = dbg.GetListener()
#self.stopped = False
self.process_broadcaster = None
self.target_broadcaster = None
self.focus_signal = focus_signal
self.exe_filename = None
def add_target_broadcaster(self, broadcaster):
""" add broadcaster for targget events"""
mask = SBTarget.eBroadcastBitBreakpointChanged or SBTarget.eBroadcastBitModulesLoaded or \
SBTarget.eBroadcastBitModulesUnloaded or SBTarget.eBroadcastBitSymbolsLoaded or \
SBTarget.eBroadcastBitWatchpointChanged
self.target_broadcaster = broadcaster
broadcaster.AddListener(self.sb_listener, mask)
self.sb_listener.StartListeningForEvents(broadcaster, mask)
def add_process_broadcaster(self, broadcaster):
""" add broadcaster for process events"""
mask = SBProcess.eBroadcastBitStateChanged or \
SBProcess.eBroadcastBitSTDERR or \
SBProcess.eBroadcastBitSTDOUT or \
lldb.SBProcess.eBroadcastBitInterrupt
broadcaster.AddListener(self.sb_listener, SBProcess.eBroadcastBitStateChanged)
self.sb_listener.StartListeningForEvents(broadcaster, mask)
self.process_broadcaster = broadcaster
def handle_stop_state(self, process):
""" handle stopped state"""
target = self.dbg.GetSelectedTarget()
for thread in process:
reason = thread.GetStopReason()
if reason == lldb.eStopReasonBreakpoint:
#assert(thread.GetStopReasonDataCount() == 2)
bp_id = thread.GetStopReasonDataAtIndex(0)
logging.debug('bp_id:%s', bp_id)
breakpoint = target.FindBreakpointByID(int(bp_id))
if breakpoint.GetNumLocations() == 1:
bp_loc = breakpoint.GetLocationAtIndex(0)
else:
bp_loc_id = thread.GetStopReasonDataAtIndex(1)
bp_loc = breakpoint.FindLocationByID(bp_loc_id)
line_entry = bp_loc.GetAddress().GetLineEntry()
file_spec = line_entry.GetFileSpec()
filename = file_spec.fullpath
line_no = line_entry.GetLine()
logging.debug('stopped for BP %d: %s:%d', bp_id, filename, line_no)
if filename is not None:
self.focus_signal.emit(filename, int(line_no))
else:
self.focus_signal.emit('', -1)
elif reason == lldb.eStopReasonWatchpoint or \
reason == lldb.eStopReasonPlanComplete:
frame = thread.GetFrameAtIndex(0)
line_entry = frame.GetLineEntry()
file_spec = line_entry.GetFileSpec()
filename = file_spec.fullpath
line_no = line_entry.GetLine()
logging.debug('stopped @ %s:%d', filename, line_no)
if filename is not None:
self.focus_signal.emit(filename, int(line_no))
break
elif reason == lldb.eStopReasonThreadExiting:
logging.debug('thread exit')
elif reason == lldb.eStopReasonSignal or \
reason == lldb.eStopReasonException:
logging.debug('signal/exception %x', thread.GetStopReasonDataAtIndex(0))
elif reason == lldb.eStopReasonExec:
logging.debug('re-run')
def run(self):
""" listerning loop"""
event = SBEvent()
stream = SBStream()
while self.sb_listener.IsValid():
self.sb_listener.WaitForEvent(1, event)
if not event.IsValid():
event.Clear()
continue
# Check for target events
target = self.dbg.GetSelectedTarget()
state = None
if not target.IsValid():
continue
# Handle BP events
if SBBreakpoint.EventIsBreakpointEvent(event):
val_type = SBBreakpoint.GetBreakpointEventTypeFromEvent(event)
breakpoint = SBBreakpoint.GetBreakpointFromEvent(event)
self.BPChanged.emit(breakpoint, val_type)
continue
state = None
# Handle Process state events
if SBProcess.EventIsProcessEvent(event):
process = SBProcess.GetProcessFromEvent(event)
state = process.GetState()
if state == lldb.eStateStopped:
self.handle_stop_state(process)
if state is not None:
self.StateChanged.emit(state)
event.GetDescription(stream)
logging.debug('Event desc: %s', stream.GetData())
stream.Clear()
event.Clear()
def main():
""" entry function"""
#QtGui.QApplication.setGraphicsSystem("native")
app = QtGui.QApplication(sys.argv)
main_window = MainWindow()
main_window.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
c0deforfun/LLL
|
lll.py
|
Python
|
mit
| 20,424
|
# coding=utf-8#
# Copyright (c) 2014-2018, F5 Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
import hashlib
import json
import logging as std_logging
import os
import urllib
from eventlet import greenthread
from time import strftime
from time import time
from requests import HTTPError
from oslo_config import cfg
from oslo_log import helpers as log_helpers
from oslo_log import log as logging
from oslo_utils import importutils
from f5.bigip import ManagementRoot
from f5_openstack_agent.lbaasv2.drivers.bigip.cluster_manager import \
ClusterManager
from f5_openstack_agent.lbaasv2.drivers.bigip import constants_v2 as f5const
from f5_openstack_agent.lbaasv2.drivers.bigip.esd_filehandler import \
EsdTagProcessor
from f5_openstack_agent.lbaasv2.drivers.bigip import exceptions as f5ex
from f5_openstack_agent.lbaasv2.drivers.bigip.lbaas_builder import \
LBaaSBuilder
from f5_openstack_agent.lbaasv2.drivers.bigip.lbaas_driver import \
LBaaSBaseDriver
from f5_openstack_agent.lbaasv2.drivers.bigip import network_helper
from f5_openstack_agent.lbaasv2.drivers.bigip.network_service import \
NetworkServiceBuilder
from f5_openstack_agent.lbaasv2.drivers.bigip import resource_helper
from f5_openstack_agent.lbaasv2.drivers.bigip.service_adapter import \
ServiceModelAdapter
from f5_openstack_agent.lbaasv2.drivers.bigip import ssl_profile
from f5_openstack_agent.lbaasv2.drivers.bigip import stat_helper
from f5_openstack_agent.lbaasv2.drivers.bigip.system_helper import \
SystemHelper
from f5_openstack_agent.lbaasv2.drivers.bigip.tenants import \
BigipTenantManager
from f5_openstack_agent.lbaasv2.drivers.bigip.utils import serialized
from f5_openstack_agent.lbaasv2.drivers.bigip.virtual_address import \
VirtualAddress
LOG = logging.getLogger(__name__)
NS_PREFIX = 'qlbaas-'
__VERSION__ = '0.1.1'
# configuration objects specific to iControl driver
# XXX see /etc/neutron/services/f5/f5-openstack-agent.ini
OPTS = [ # XXX maybe we should make this a dictionary
cfg.StrOpt(
'bigiq_hostname',
help='The hostname (name or IP address) to use for the BIG-IQ host'
),
cfg.StrOpt(
'bigiq_admin_username',
default='admin',
help='The admin username to use for BIG-IQ authentication',
),
cfg.StrOpt(
'bigiq_admin_password',
default='[Provide password in config file]',
secret=True,
help='The admin password to use for BIG-IQ authentication'
),
cfg.StrOpt(
'openstack_keystone_uri',
default='http://192.0.2.248:5000/',
help='The admin password to use for BIG-IQ authentication'
),
cfg.StrOpt(
'openstack_admin_username',
default='admin',
help='The admin username to use for authentication '
'with the Keystone service'
),
cfg.StrOpt(
'openstack_admin_password',
default='[Provide password in config file]',
secret=True,
help='The admin password to use for authentication'
' with the Keystone service'
),
cfg.StrOpt(
'bigip_management_username',
default='admin',
help='The admin username that the BIG-IQ will use to manage '
'discovered BIG-IPs'
),
cfg.StrOpt(
'bigip_management_password',
default='[Provide password in config file]',
secret=True,
help='The admin password that the BIG-IQ will use to manage '
'discovered BIG-IPs'
),
cfg.StrOpt(
'f5_device_type', default='external',
help='What type of device onboarding'
),
cfg.StrOpt(
'f5_ha_type', default='pair',
help='Are we standalone, pair(active/standby), or scalen'
),
cfg.ListOpt(
'f5_external_physical_mappings', default=['default:1.1:True'],
help='Mapping between Neutron physical_network to interfaces'
),
cfg.StrOpt(
'f5_vtep_folder', default='Common',
help='Folder for the VTEP SelfIP'
),
cfg.StrOpt(
'f5_vtep_selfip_name', default=None,
help='Name of the VTEP SelfIP'
),
cfg.ListOpt(
'advertised_tunnel_types', default=['vxlan'],
help='tunnel types which are advertised to other VTEPs'
),
cfg.BoolOpt(
'f5_populate_static_arp', default=False,
help='create static arp entries based on service entries'
),
cfg.StrOpt(
'vlan_binding_driver',
default=None,
help='driver class for binding vlans to device ports'
),
cfg.StrOpt(
'interface_port_static_mappings',
default=None,
help='JSON encoded static mapping of'
'devices to list of '
'interface and port_id'
),
cfg.StrOpt(
'l3_binding_driver',
default=None,
help='driver class for binding l3 address to l2 ports'
),
cfg.StrOpt(
'l3_binding_static_mappings', default=None,
help='JSON encoded static mapping of'
'subnet_id to list of '
'port_id, device_id list.'
),
cfg.BoolOpt(
'f5_route_domain_strictness', default=False,
help='Strict route domain isolation'
),
cfg.BoolOpt(
'f5_common_networks', default=False,
help='All networks defined under Common partition'
),
cfg.BoolOpt(
'f5_common_external_networks', default=True,
help='Treat external networks as common'
),
cfg.BoolOpt(
'external_gateway_mode', default=False,
help='All subnets have an external l3 route on gateway'
),
cfg.StrOpt(
'icontrol_vcmp_hostname',
help='The hostname (name or IP address) to use for vCMP Host '
'iControl access'
),
cfg.StrOpt(
'icontrol_hostname',
default="10.190.5.7",
help='The hostname (name or IP address) to use for iControl access'
),
cfg.StrOpt(
'icontrol_username', default='admin',
help='The username to use for iControl access'
),
cfg.StrOpt(
'icontrol_password', default='admin', secret=True,
help='The password to use for iControl access'
),
cfg.IntOpt(
'icontrol_connection_timeout', default=30,
help='How many seconds to timeout a connection to BIG-IP'
),
cfg.IntOpt(
'icontrol_connection_retry_interval', default=10,
help='How many seconds to wait between retry connection attempts'
),
cfg.DictOpt(
'common_network_ids', default={},
help='network uuid to existing Common networks mapping'
),
cfg.StrOpt(
'icontrol_config_mode', default='objects',
help='Whether to use iapp or objects for bigip configuration'
),
cfg.IntOpt(
'max_namespaces_per_tenant', default=1,
help='How many routing tables the BIG-IP will allocate per tenant'
' in order to accommodate overlapping IP subnets'
),
cfg.StrOpt(
'cert_manager',
default=None,
help='Class name of the certificate mangager used for retrieving '
'certificates and keys.'
),
cfg.StrOpt(
'auth_version',
default=None,
help='Keystone authentication version (v2 or v3) for Barbican client.'
),
cfg.StrOpt(
'os_project_id',
default='service',
help='OpenStack project ID.'
),
cfg.StrOpt(
'os_auth_url',
default=None,
help='OpenStack authentication URL.'
),
cfg.StrOpt(
'os_username',
default=None,
help='OpenStack user name for Keystone authentication.'
),
cfg.StrOpt(
'os_user_domain_name',
default=None,
help='OpenStack user domain name for Keystone authentication.'
),
cfg.StrOpt(
'os_project_name',
default=None,
help='OpenStack project name for Keystone authentication.'
),
cfg.StrOpt(
'os_project_domain_name',
default=None,
help='OpenStack domain name for Keystone authentication.'
),
cfg.StrOpt(
'os_password',
default=None,
help='OpenStack user password for Keystone authentication.'
),
cfg.StrOpt(
'f5_network_segment_physical_network', default=None,
help='Name of physical network to use for discovery of segment ID'
),
cfg.StrOpt(
'unlegacy_setting_placeholder', default=None,
help='use this setting to separate legacy with hw/etc on agent side'
),
cfg.IntOpt(
'f5_network_segment_polling_interval', default=10,
help='Seconds between periodic scans for disconnected virtual servers'
),
cfg.IntOpt(
'f5_network_segment_gross_timeout', default=300,
help='Seconds to wait for a virtual server to become connected'
),
cfg.StrOpt(
'f5_parent_ssl_profile',
default='clientssl',
help='Parent profile used when creating client SSL profiles '
'for listeners with TERMINATED_HTTPS protocols.'
),
cfg.StrOpt(
'os_tenant_name',
default=None,
help='OpenStack tenant name for Keystone authentication (v2 only).'
),
cfg.BoolOpt(
'trace_service_requests',
default=False,
help='Log service object.'
),
cfg.BoolOpt(
'report_esd_names_in_agent',
default=False,
help='whether or not to add valid esd names during report.'
)
]
def is_operational(method):
# Decorator to check we are operational before provisioning.
def wrapper(*args, **kwargs):
instance = args[0]
if instance.operational:
try:
return method(*args, **kwargs)
except IOError as ioe:
LOG.error('IO Error detected: %s' % method.__name__)
LOG.error(str(ioe))
raise ioe
else:
LOG.error('Cannot execute %s. Not operational. Re-initializing.'
% method.__name__)
instance._init_bigips()
return wrapper
class iControlDriver(LBaaSBaseDriver):
"""Control service deployment."""
# pzhang(NOTE) here: we only sync, CRUD objs in below status
positive_plugin_const_state = \
tuple([f5const.F5_PENDING_CREATE,
f5const.F5_PENDING_UPDATE])
def __init__(self, conf, registerOpts=True):
# The registerOpts parameter allows a test to
# turn off config option handling so that it can
# set the options manually instead.
super(iControlDriver, self).__init__(conf)
self.conf = conf
if registerOpts:
self.conf.register_opts(OPTS)
self.initialized = False
self.hostnames = None
self.device_type = conf.f5_device_type
self.plugin_rpc = None # overrides base, same value
self.agent_report_state = None # overrides base, same value
self.operational = False # overrides base, same value
self.driver_name = 'f5-lbaasv2-icontrol'
#
# BIG-IP containers
#
# BIG-IPs which currectly active
self.__bigips = {}
self.__last_connect_attempt = None
# HA and traffic group validation
self.ha_validated = False
self.tg_initialized = False
# traffic groups discovered from BIG-IPs for service placement
self.__traffic_groups = []
# base configurations to report to Neutron agent state reports
self.agent_configurations = {} # overrides base, same value
self.agent_configurations['device_drivers'] = [self.driver_name]
self.agent_configurations['icontrol_endpoints'] = {}
# to store the verified esd names
self.esd_names = []
# service component managers
self.tenant_manager = None
self.cluster_manager = None
self.system_helper = None
self.lbaas_builder = None
self.service_adapter = None
self.vlan_binding = None
self.l3_binding = None
self.cert_manager = None # overrides register_OPTS
# server helpers
self.stat_helper = stat_helper.StatHelper()
self.network_helper = network_helper.NetworkHelper()
# f5-sdk helpers
self.vs_manager = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.virtual)
self.pool_manager = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.pool)
try:
# debug logging of service requests recieved by driver
if self.conf.trace_service_requests:
path = '/var/log/neutron/service/'
if not os.path.exists(path):
os.makedirs(path)
self.file_name = path + strftime("%H%M%S-%m%d%Y") + '.json'
with open(self.file_name, 'w') as fp:
fp.write('[{}] ')
# driver mode settings - GRM vs L2 adjacent
if self.conf.f5_global_routed_mode:
LOG.info('WARNING - f5_global_routed_mode enabled.'
' There will be no L2 or L3 orchestration'
' or tenant isolation provisioned. All vips'
' and pool members must be routable through'
' pre-provisioned SelfIPs.')
self.conf.use_namespaces = False
self.conf.f5_snat_mode = True
self.conf.f5_snat_addresses_per_subnet = 0
self.agent_configurations['tunnel_types'] = []
self.agent_configurations['bridge_mappings'] = {}
else:
self.agent_configurations['tunnel_types'] = \
self.conf.advertised_tunnel_types
for net_id in self.conf.common_network_ids:
LOG.debug('network %s will be mapped to /Common/%s'
% (net_id, self.conf.common_network_ids[net_id]))
self.agent_configurations['common_networks'] = \
self.conf.common_network_ids
LOG.debug('Setting static ARP population to %s'
% self.conf.f5_populate_static_arp)
self.agent_configurations['f5_common_external_networks'] = \
self.conf.f5_common_external_networks
f5const.FDB_POPULATE_STATIC_ARP = \
self.conf.f5_populate_static_arp
# parse the icontrol_hostname setting
self._init_bigip_hostnames()
# instantiate the managers
self._init_bigip_managers()
self.initialized = True
LOG.debug('iControlDriver loaded successfully')
except Exception as exc:
LOG.error("exception in intializing driver %s" % str(exc))
self._set_agent_status(False)
def connect(self):
# initialize communications wiht BIG-IP via iControl
try:
self._init_bigips()
except Exception as exc:
LOG.error("exception in intializing communications to BIG-IPs %s"
% str(exc))
self._set_agent_status(False)
def get_valid_esd_names(self):
LOG.debug("verified esd names in get_valid_esd_names():")
LOG.debug(self.esd_names)
return self.esd_names
def _init_bigip_managers(self):
if self.conf.vlan_binding_driver:
try:
self.vlan_binding = importutils.import_object(
self.conf.vlan_binding_driver, self.conf, self)
except ImportError:
LOG.error('Failed to import VLAN binding driver: %s'
% self.conf.vlan_binding_driver)
if self.conf.l3_binding_driver:
try:
self.l3_binding = importutils.import_object(
self.conf.l3_binding_driver, self.conf, self)
except ImportError:
LOG.error('Failed to import L3 binding driver: %s'
% self.conf.l3_binding_driver)
else:
LOG.debug('No L3 binding driver configured.'
' No L3 binding will be done.')
if self.conf.cert_manager:
try:
self.cert_manager = importutils.import_object(
self.conf.cert_manager, self.conf)
except ImportError as import_err:
LOG.error('Failed to import CertManager: %s.' %
import_err.message)
raise
except Exception as err:
LOG.error('Failed to initialize CertManager. %s' % err.message)
# re-raise as ImportError to cause agent exit
raise ImportError(err.message)
self.service_adapter = ServiceModelAdapter(self.conf)
self.tenant_manager = BigipTenantManager(self.conf, self)
self.cluster_manager = ClusterManager()
self.system_helper = SystemHelper()
self.lbaas_builder = LBaaSBuilder(self.conf, self)
if self.conf.f5_global_routed_mode:
self.network_builder = None
else:
self.network_builder = NetworkServiceBuilder(
self.conf.f5_global_routed_mode,
self.conf,
self,
self.l3_binding)
def _init_bigip_hostnames(self):
# Validate and parse bigip credentials
if not self.conf.icontrol_hostname:
raise f5ex.F5InvalidConfigurationOption(
opt_name='icontrol_hostname',
opt_value='valid hostname or IP address'
)
if not self.conf.icontrol_username:
raise f5ex.F5InvalidConfigurationOption(
opt_name='icontrol_username',
opt_value='valid username'
)
if not self.conf.icontrol_password:
raise f5ex.F5InvalidConfigurationOption(
opt_name='icontrol_password',
opt_value='valid password'
)
self.hostnames = self.conf.icontrol_hostname.split(',')
self.hostnames = [item.strip() for item in self.hostnames]
self.hostnames = sorted(self.hostnames)
# initialize per host agent_configurations
for hostname in self.hostnames:
self.__bigips[hostname] = bigip = type('', (), {})()
bigip.hostname = hostname
bigip.status = 'creating'
bigip.status_message = 'creating BIG-IP from iControl hostnames'
bigip.device_interfaces = dict()
self.agent_configurations[
'icontrol_endpoints'][hostname] = {}
self.agent_configurations[
'icontrol_endpoints'][hostname]['failover_state'] = \
'undiscovered'
self.agent_configurations[
'icontrol_endpoints'][hostname]['status'] = 'unknown'
self.agent_configurations[
'icontrol_endpoints'][hostname]['status_message'] = ''
def _init_bigips(self):
# Connect to all BIG-IPs
if self.operational:
LOG.debug('iControl driver reports connection is operational')
return
LOG.debug('initializing communications to BIG-IPs')
try:
# setup logging options
if not self.conf.debug:
requests_log = std_logging.getLogger(
"requests.packages.urllib3")
requests_log.setLevel(std_logging.ERROR)
requests_log.propagate = False
else:
requests_log = std_logging.getLogger(
"requests.packages.urllib3")
requests_log.setLevel(std_logging.DEBUG)
requests_log.propagate = True
self.__last_connect_attempt = datetime.datetime.now()
for hostname in self.hostnames:
# connect to each BIG-IP and set it status
bigip = self._open_bigip(hostname)
if bigip.status == 'connected':
# set the status down until we assure initialized
bigip.status = 'initializing'
bigip.status_message = 'initializing HA viability'
LOG.debug('initializing HA viability %s' % hostname)
device_group_name = None
if not self.ha_validated:
device_group_name = self._validate_ha(bigip)
LOG.debug('HA validated from %s with DSG %s' %
(hostname, device_group_name))
self.ha_validated = True
if not self.tg_initialized:
self._init_traffic_groups(bigip)
LOG.debug('learned traffic groups from %s as %s' %
(hostname, self.__traffic_groups))
self.tg_initialized = True
LOG.debug('initializing bigip %s' % hostname)
self._init_bigip(bigip, hostname, device_group_name)
LOG.debug('initializing agent configurations %s'
% hostname)
self._init_agent_config(bigip)
# Assure basic BIG-IP HA is operational
LOG.debug('validating HA state for %s' % hostname)
bigip.status = 'validating_HA'
bigip.status_message = 'validating the current HA state'
if self._validate_ha_operational(bigip):
LOG.debug('setting status to active for %s' % hostname)
bigip.status = 'active'
bigip.status_message = 'BIG-IP ready for provisioning'
self._post_init()
else:
LOG.debug('setting status to error for %s' % hostname)
bigip.status = 'error'
bigip.status_message = 'BIG-IP is not operational'
self._set_agent_status(False)
else:
LOG.error('error opening BIG-IP %s - %s:%s'
% (hostname, bigip.status, bigip.status_message))
self._set_agent_status(False)
except Exception as exc:
LOG.error('Invalid agent configuration: %s' % exc.message)
raise
self._set_agent_status(force_resync=True)
def _init_errored_bigips(self):
try:
errored_bigips = self.get_errored_bigips_hostnames()
if errored_bigips:
LOG.debug('attempting to recover %s BIG-IPs' %
len(errored_bigips))
for hostname in errored_bigips:
# try to connect and set status
bigip = self._open_bigip(hostname)
if bigip.status == 'connected':
# set the status down until we assure initialized
bigip.status = 'initializing'
bigip.status_message = 'initializing HA viability'
LOG.debug('initializing HA viability %s' % hostname)
LOG.debug('proceeding to initialize %s' % hostname)
device_group_name = None
if not self.ha_validated:
device_group_name = self._validate_ha(bigip)
LOG.debug('HA validated from %s with DSG %s' %
(hostname, device_group_name))
self.ha_validated = True
if not self.tg_initialized:
self._init_traffic_groups(bigip)
LOG.debug('known traffic groups initialized',
' from %s as %s' %
(hostname, self.__traffic_groups))
self.tg_initialized = True
LOG.debug('initializing bigip %s' % hostname)
self._init_bigip(bigip, hostname, device_group_name)
LOG.debug('initializing agent configurations %s'
% hostname)
self._init_agent_config(bigip)
# Assure basic BIG-IP HA is operational
LOG.debug('validating HA state for %s' % hostname)
bigip.status = 'validating_HA'
bigip.status_message = \
'validating the current HA state'
if self._validate_ha_operational(bigip):
LOG.debug('setting status to active for %s'
% hostname)
bigip.status = 'active'
bigip.status_message = \
'BIG-IP ready for provisioning'
self._post_init()
self._set_agent_status(True)
else:
LOG.debug('setting status to error for %s'
% hostname)
bigip.status = 'error'
bigip.status_message = 'BIG-IP is not operational'
self._set_agent_status(False)
else:
LOG.debug('there are no BIG-IPs with error status')
except Exception as exc:
LOG.error('Invalid agent configuration: %s' % exc.message)
raise
def _open_bigip(self, hostname):
# Open bigip connection
try:
bigip = self.__bigips[hostname]
if bigip.status not in ['creating', 'error']:
LOG.debug('BIG-IP %s status invalid %s to open a connection'
% (hostname, bigip.status))
return bigip
bigip.status = 'connecting'
bigip.status_message = 'requesting iControl endpoint'
LOG.info('opening iControl connection to %s @ %s' %
(self.conf.icontrol_username, hostname))
bigip = ManagementRoot(hostname,
self.conf.icontrol_username,
self.conf.icontrol_password,
timeout=f5const.DEVICE_CONNECTION_TIMEOUT,
debug=self.conf.debug)
bigip.status = 'connected'
bigip.status_message = 'connected to BIG-IP'
self.__bigips[hostname] = bigip
return bigip
except Exception as exc:
LOG.error('could not communicate with ' +
'iControl device: %s' % hostname)
# since no bigip object was created, create a dummy object
# so we can store the status and status_message attributes
errbigip = type('', (), {})()
errbigip.hostname = hostname
errbigip.status = 'error'
errbigip.status_message = str(exc)[:80]
self.__bigips[hostname] = errbigip
return errbigip
def _init_bigip(self, bigip, hostname, check_group_name=None):
# Prepare a bigip for usage
try:
major_version, minor_version = self._validate_bigip_version(
bigip, hostname)
device_group_name = None
extramb = self.system_helper.get_provision_extramb(bigip)
if int(extramb) < f5const.MIN_EXTRA_MB:
raise f5ex.ProvisioningExtraMBValidateFailed(
'Device %s BIG-IP not provisioned for '
'management LARGE.' % hostname)
if self.conf.f5_ha_type == 'pair' and \
self.cluster_manager.get_sync_status(bigip) == \
'Standalone':
raise f5ex.BigIPClusterInvalidHA(
'HA mode is pair and bigip %s in standalone mode'
% hostname)
if self.conf.f5_ha_type == 'scalen' and \
self.cluster_manager.get_sync_status(bigip) == \
'Standalone':
raise f5ex.BigIPClusterInvalidHA(
'HA mode is scalen and bigip %s in standalone mode'
% hostname)
if self.conf.f5_ha_type != 'standalone':
device_group_name = \
self.cluster_manager.get_device_group(bigip)
if not device_group_name:
raise f5ex.BigIPClusterInvalidHA(
'HA mode is %s and no sync failover '
'device group found for device %s.'
% (self.conf.f5_ha_type, hostname))
if check_group_name and device_group_name != check_group_name:
raise f5ex.BigIPClusterInvalidHA(
'Invalid HA. Device %s is in device group'
' %s but should be in %s.'
% (hostname, device_group_name, check_group_name))
bigip.device_group_name = device_group_name
if self.network_builder:
for network in self.conf.common_network_ids.values():
if not self.network_builder.vlan_exists(bigip,
network,
folder='Common'):
raise f5ex.MissingNetwork(
'Common network %s on %s does not exist'
% (network, bigip.hostname))
bigip.device_name = self.cluster_manager.get_device_name(bigip)
bigip.mac_addresses = self.system_helper.get_mac_addresses(bigip)
LOG.debug("Initialized BIG-IP %s with MAC addresses %s" %
(bigip.device_name, ', '.join(bigip.mac_addresses)))
bigip.device_interfaces = \
self.system_helper.get_interface_macaddresses_dict(bigip)
bigip.assured_networks = {}
bigip.assured_tenant_snat_subnets = {}
bigip.assured_gateway_subnets = []
if self.conf.f5_ha_type != 'standalone':
self.cluster_manager.disable_auto_sync(
device_group_name, bigip)
# validate VTEP SelfIPs
if not self.conf.f5_global_routed_mode:
self.network_builder.initialize_tunneling(bigip)
# Turn off tunnel syncing between BIG-IP
# as our VTEPs properly use only local SelfIPs
if self.system_helper.get_tunnel_sync(bigip) == 'enable':
self.system_helper.set_tunnel_sync(bigip, enabled=False)
LOG.debug('connected to iControl %s @ %s ver %s.%s'
% (self.conf.icontrol_username, hostname,
major_version, minor_version))
except Exception as exc:
bigip.status = 'error'
bigip.status_message = str(exc)[:80]
raise
return bigip
def _post_init(self):
# After we have a connection to the BIG-IPs, initialize vCMP
# on all connected BIG-IPs
if self.network_builder:
self.network_builder.initialize_vcmp()
self.agent_configurations['network_segment_physical_network'] = \
self.conf.f5_network_segment_physical_network
LOG.info('iControlDriver initialized to %d bigips with username:%s'
% (len(self.get_active_bigips()),
self.conf.icontrol_username))
LOG.info('iControlDriver dynamic agent configurations:%s'
% self.agent_configurations)
if self.vlan_binding:
LOG.debug(
'getting BIG-IP device interface for VLAN Binding')
self.vlan_binding.register_bigip_interfaces()
if self.l3_binding:
LOG.debug('getting BIG-IP MAC Address for L3 Binding')
self.l3_binding.register_bigip_mac_addresses()
# endpoints = self.agent_configurations['icontrol_endpoints']
# for ic_host in endpoints.keys():
for hostbigip in self.get_all_bigips():
# hostbigip = self.__bigips[ic_host]
mac_addrs = [mac_addr for interface, mac_addr in
hostbigip.device_interfaces.items()
if interface != "mgmt"]
ports = self.plugin_rpc.get_ports_for_mac_addresses(
mac_addresses=mac_addrs)
if ports:
self.agent_configurations['nova_managed'] = True
else:
self.agent_configurations['nova_managed'] = False
if self.network_builder:
self.network_builder.post_init()
# read enhanced services definitions
esd_dir = os.path.join(self.get_config_dir(), 'esd')
esd = EsdTagProcessor(esd_dir)
try:
esd.process_esd(self.get_all_bigips())
self.lbaas_builder.init_esd(esd)
self.service_adapter.init_esd(esd)
LOG.debug('esd details here after process_esd(): ')
LOG.debug(esd)
self.esd_names = esd.esd_dict.keys() or []
LOG.debug('##### self.esd_names obtainded here:')
LOG.debug(self.esd_names)
except f5ex.esdJSONFileInvalidException as err:
LOG.error("unable to initialize ESD. Error: %s.", err.message)
self._set_agent_status(False)
def _validate_ha(self, bigip):
# if there was only one address supplied and
# this is not a standalone device, get the
# devices trusted by this device.
device_group_name = None
if self.conf.f5_ha_type == 'standalone':
if len(self.hostnames) != 1:
bigip.status = 'error'
bigip.status_message = \
'HA mode is standalone and %d hosts found.'\
% len(self.hostnames)
raise f5ex.BigIPClusterInvalidHA(
'HA mode is standalone and %d hosts found.'
% len(self.hostnames))
device_group_name = 'standalone'
elif self.conf.f5_ha_type == 'pair':
device_group_name = self.cluster_manager.\
get_device_group(bigip)
if len(self.hostnames) != 2:
mgmt_addrs = []
devices = self.cluster_manager.devices(bigip)
for device in devices:
mgmt_addrs.append(
self.cluster_manager.get_mgmt_addr_by_device(
bigip, device))
self.hostnames = mgmt_addrs
if len(self.hostnames) != 2:
bigip.status = 'error'
bigip.status_message = 'HA mode is pair and %d hosts found.' \
% len(self.hostnames)
raise f5ex.BigIPClusterInvalidHA(
'HA mode is pair and %d hosts found.'
% len(self.hostnames))
elif self.conf.f5_ha_type == 'scalen':
device_group_name = self.cluster_manager.\
get_device_group(bigip)
if len(self.hostnames) < 2:
mgmt_addrs = []
devices = self.cluster_manager.devices(bigip)
for device in devices:
mgmt_addrs.append(
self.cluster_manager.get_mgmt_addr_by_device(
bigip, device)
)
self.hostnames = mgmt_addrs
if len(self.hostnames) < 2:
bigip.status = 'error'
bigip.status_message = 'HA mode is scale and 1 hosts found.'
raise f5ex.BigIPClusterInvalidHA(
'HA mode is pair and 1 hosts found.')
return device_group_name
def _validate_ha_operational(self, bigip):
if self.conf.f5_ha_type == 'standalone':
return True
else:
# how many active BIG-IPs are there?
active_bigips = self.get_active_bigips()
if active_bigips:
sync_status = self.cluster_manager.get_sync_status(bigip)
if sync_status in ['Disconnected', 'Sync Failure']:
if len(active_bigips) > 1:
# the device should not be in the disconnected state
return False
if len(active_bigips) > 1:
# it should be in the same sync-failover group
# as the rest of the active bigips
device_group_name = \
self.cluster_manager.get_device_group(bigip)
for active_bigip in active_bigips:
adgn = self.cluster_manager.get_device_group(
active_bigip)
if not adgn == device_group_name:
return False
return True
else:
return True
def _init_agent_config(self, bigip):
# Init agent config
ic_host = {}
ic_host['version'] = self.system_helper.get_version(bigip)
ic_host['device_name'] = bigip.device_name
ic_host['platform'] = self.system_helper.get_platform(bigip)
ic_host['serial_number'] = self.system_helper.get_serial_number(bigip)
ic_host['status'] = bigip.status
ic_host['status_message'] = bigip.status_message
ic_host['failover_state'] = self.get_failover_state(bigip)
if hasattr(bigip, 'local_ip') and bigip.local_ip:
ic_host['local_ip'] = bigip.local_ip
else:
ic_host['local_ip'] = 'VTEP disabled'
self.agent_configurations['tunnel_types'] = list()
self.agent_configurations['icontrol_endpoints'][bigip.hostname] = \
ic_host
if self.network_builder:
self.agent_configurations['bridge_mappings'] = \
self.network_builder.interface_mapping
def _set_agent_status(self, force_resync=False):
for hostname in self.__bigips:
bigip = self.__bigips[hostname]
self.agent_configurations[
'icontrol_endpoints'][bigip.hostname][
'status'] = bigip.status
self.agent_configurations[
'icontrol_endpoints'][bigip.hostname][
'status_message'] = bigip.status_message
if self.conf.report_esd_names_in_agent:
LOG.debug('adding names to report:')
self.agent_configurations['esd_name'] = \
self.get_valid_esd_names()
# Policy - if any BIG-IP are active we're operational
if self.get_active_bigips():
self.operational = True
else:
self.operational = False
if self.agent_report_state:
self.agent_report_state(force_resync=force_resync)
def get_failover_state(self, bigip):
try:
if hasattr(bigip, 'tm'):
fs = bigip.tm.sys.dbs.db.load(name='failover.state')
bigip.failover_state = fs.value
return bigip.failover_state
else:
return 'error'
except Exception as exc:
LOG.exception('Error getting %s failover state' % bigip.hostname)
bigip.status = 'error'
bigip.status_message = str(exc)[:80]
self._set_agent_status(False)
return 'error'
def get_agent_configurations(self):
for hostname in self.__bigips:
bigip = self.__bigips[hostname]
if bigip.status == 'active':
failover_state = self.get_failover_state(bigip)
self.agent_configurations[
'icontrol_endpoints'][bigip.hostname][
'failover_state'] = failover_state
else:
self.agent_configurations[
'icontrol_endpoints'][bigip.hostname][
'failover_state'] = 'unknown'
self.agent_configurations['icontrol_endpoints'][
bigip.hostname]['status'] = bigip.status
self.agent_configurations['icontrol_endpoints'][
bigip.hostname]['status_message'] = bigip.status_message
self.agent_configurations['operational'] = \
self.operational
LOG.debug('agent configurations are: %s' % self.agent_configurations)
return dict(self.agent_configurations)
def recover_errored_devices(self):
# trigger a retry on errored BIG-IPs
try:
self._init_errored_bigips()
except Exception as exc:
LOG.error('Could not recover devices: %s' % exc.message)
def backend_integrity(self):
if self.operational:
return True
return False
def generate_capacity_score(self, capacity_policy=None):
"""Generate the capacity score of connected devices."""
if capacity_policy:
highest_metric = 0.0
highest_metric_name = None
my_methods = dir(self)
bigips = self.get_all_bigips()
for metric in capacity_policy:
func_name = 'get_' + metric
if func_name in my_methods:
max_capacity = int(capacity_policy[metric])
metric_func = getattr(self, func_name)
metric_value = 0
for bigip in bigips:
if bigip.status == 'active':
global_stats = \
self.stat_helper.get_global_statistics(bigip)
value = int(
metric_func(bigip=bigip,
global_statistics=global_stats)
)
LOG.debug('calling capacity %s on %s returned: %s'
% (func_name, bigip.hostname, value))
else:
value = 0
if value > metric_value:
metric_value = value
metric_capacity = float(metric_value) / float(max_capacity)
if metric_capacity > highest_metric:
highest_metric = metric_capacity
highest_metric_name = metric
else:
LOG.warn('capacity policy has method '
'%s which is not implemented in this driver'
% metric)
LOG.debug('capacity score: %s based on %s'
% (highest_metric, highest_metric_name))
return highest_metric
return 0
def set_context(self, context):
# Context to keep for database access
if self.network_builder:
self.network_builder.set_context(context)
def set_plugin_rpc(self, plugin_rpc):
# Provide Plugin RPC access
self.plugin_rpc = plugin_rpc
def set_tunnel_rpc(self, tunnel_rpc):
# Provide FDB Connector with ML2 RPC access
if self.network_builder:
self.network_builder.set_tunnel_rpc(tunnel_rpc)
def set_l2pop_rpc(self, l2pop_rpc):
# Provide FDB Connector with ML2 RPC access
if self.network_builder:
self.network_builder.set_l2pop_rpc(l2pop_rpc)
def set_agent_report_state(self, report_state_callback):
"""Set Agent Report State."""
self.agent_report_state = report_state_callback
def service_exists(self, service):
return self._service_exists(service)
def flush_cache(self):
# Remove cached objects so they can be created if necessary
for bigip in self.get_all_bigips():
bigip.assured_networks = {}
bigip.assured_tenant_snat_subnets = {}
bigip.assured_gateway_subnets = []
@serialized('get_all_deployed_loadbalancers')
@is_operational
def get_all_deployed_loadbalancers(self, purge_orphaned_folders=False):
LOG.debug('getting all deployed loadbalancers on BIG-IPs')
deployed_lb_dict = {}
for bigip in self.get_all_bigips():
folders = self.system_helper.get_folders(bigip)
for folder in folders:
tenant_id = folder[len(self.service_adapter.prefix):]
if str(folder).startswith(self.service_adapter.prefix):
resource = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.virtual_address)
deployed_lbs = resource.get_resources(bigip, folder)
if deployed_lbs:
for lb in deployed_lbs:
lb_id = lb.name[len(self.service_adapter.prefix):]
if lb_id in deployed_lb_dict:
deployed_lb_dict[lb_id][
'hostnames'].append(bigip.hostname)
else:
deployed_lb_dict[lb_id] = {
'id': lb_id,
'tenant_id': tenant_id,
'hostnames': [bigip.hostname]
}
else:
# delay to assure we are not in the tenant creation
# process before a virtual address is created.
greenthread.sleep(10)
deployed_lbs = resource.get_resources(bigip, folder)
if deployed_lbs:
for lb in deployed_lbs:
lb_id = lb.name[
len(self.service_adapter.prefix):]
deployed_lb_dict[lb_id] = \
{'id': lb_id, 'tenant_id': tenant_id}
else:
# Orphaned folder!
if purge_orphaned_folders:
try:
self.system_helper.purge_folder_contents(
bigip, folder)
self.system_helper.purge_folder(
bigip, folder)
LOG.error('orphaned folder %s on %s' %
(folder, bigip.hostname))
except Exception as exc:
LOG.error('error purging folder %s: %s' %
(folder, str(exc)))
return deployed_lb_dict
@serialized('get_all_deployed_listeners')
@is_operational
def get_all_deployed_listeners(self, expand_subcollections=False):
LOG.debug('getting all deployed listeners on BIG-IPs')
deployed_virtual_dict = {}
for bigip in self.get_all_bigips():
folders = self.system_helper.get_folders(bigip)
for folder in folders:
tenant_id = folder[len(self.service_adapter.prefix):]
if str(folder).startswith(self.service_adapter.prefix):
resource = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.virtual)
deployed_listeners = resource.get_resources(
bigip, folder, expand_subcollections)
if deployed_listeners:
for virtual in deployed_listeners:
virtual_id = \
virtual.name[len(self.service_adapter.prefix):]
l7_policy = ''
if hasattr(virtual, 'policiesReference') and \
'items' in virtual.policiesReference:
l7_policy = \
virtual.policiesReference['items'][0]
l7_policy = l7_policy['fullPath']
if virtual_id in deployed_virtual_dict:
deployed_virtual_dict[virtual_id][
'hostnames'].append(bigip.hostname)
else:
deployed_virtual_dict[virtual_id] = {
'id': virtual_id,
'tenant_id': tenant_id,
'hostnames': [bigip.hostname],
'l7_policy': l7_policy
}
return deployed_virtual_dict
@serialized('purge_orphaned_nodes')
@is_operational
@log_helpers.log_method_call
def purge_orphaned_nodes(self, tenant_members):
node_helper = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.node)
node_dict = dict()
for bigip in self.get_all_bigips():
for tenant_id, members in tenant_members.iteritems():
partition = self.service_adapter.prefix + tenant_id
nodes = node_helper.get_resources(bigip, partition=partition)
for n in nodes:
node_dict[n.name] = n
for member in members:
rd = self.network_builder.find_subnet_route_domain(
tenant_id, member.get('subnet_id', None))
node_name = "{}%{}".format(member['address'], rd)
node_dict.pop(node_name, None)
for node_name, node in node_dict.iteritems():
try:
node_helper.delete(bigip, name=urllib.quote(node_name),
partition=partition)
except HTTPError as error:
if error.response.status_code == 400:
LOG.error(error.response)
@serialized('get_all_deployed_pools')
@is_operational
def get_all_deployed_pools(self):
LOG.debug('getting all deployed pools on BIG-IPs')
deployed_pool_dict = {}
for bigip in self.get_all_bigips():
folders = self.system_helper.get_folders(bigip)
for folder in folders:
tenant_id = folder[len(self.service_adapter.prefix):]
if str(folder).startswith(self.service_adapter.prefix):
resource = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.pool)
deployed_pools = resource.get_resources(bigip, folder)
if deployed_pools:
for pool in deployed_pools:
pool_id = \
pool.name[len(self.service_adapter.prefix):]
monitor_id = ''
if hasattr(pool, 'monitor'):
monitor = pool.monitor.split('/')[2].strip()
monitor_id = \
monitor[len(self.service_adapter.prefix):]
LOG.debug(
'pool {} has monitor {}'.format(
pool.name, monitor))
else:
LOG.debug(
'pool {} has no healthmonitors'.format(
pool.name))
if pool_id in deployed_pool_dict:
deployed_pool_dict[pool_id][
'hostnames'].append(bigip.hostname)
else:
deployed_pool_dict[pool_id] = {
'id': pool_id,
'tenant_id': tenant_id,
'hostnames': [bigip.hostname],
'monitors': monitor_id
}
return deployed_pool_dict
@serialized('purge_orphaned_pool')
@is_operational
@log_helpers.log_method_call
def purge_orphaned_pool(self, tenant_id=None, pool_id=None,
hostnames=list()):
node_helper = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.node)
for bigip in self.get_all_bigips():
if bigip.hostname in hostnames:
try:
pool_name = self.service_adapter.prefix + pool_id
partition = self.service_adapter.prefix + tenant_id
pool = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.pool).load(
bigip, pool_name, partition)
members = pool.members_s.get_collection()
pool.delete()
for member in members:
node_name = member.address
try:
node_helper.delete(bigip,
name=urllib.quote(node_name),
partition=partition)
except HTTPError as e:
if e.response.status_code == 404:
pass
if e.response.status_code == 400:
LOG.warn("Failed to delete node -- in use")
else:
LOG.exception("Failed to delete node")
except HTTPError as err:
if err.response.status_code == 404:
LOG.debug('pool %s not on BIG-IP %s.'
% (pool_id, bigip.hostname))
except Exception as exc:
LOG.exception('Exception purging pool %s' % str(exc))
@serialized('get_all_deployed_monitors')
@is_operational
def get_all_deployed_health_monitors(self):
"""Retrieve a list of all Health Monitors deployed"""
LOG.debug('getting all deployed monitors on BIG-IP\'s')
monitor_types = ['http_monitor', 'https_monitor', 'tcp_monitor',
'ping_monitor']
deployed_monitor_dict = {}
adapter_prefix = self.service_adapter.prefix
for bigip in self.get_all_bigips():
folders = self.system_helper.get_folders(bigip)
for folder in folders:
tenant_id = folder[len(adapter_prefix):]
if str(folder).startswith(adapter_prefix):
resources = map(
lambda x: resource_helper.BigIPResourceHelper(
getattr(resource_helper.ResourceType, x)),
monitor_types)
for resource in resources:
deployed_monitors = resource.get_resources(
bigip, folder)
if deployed_monitors:
for monitor in deployed_monitors:
monitor_id = monitor.name[len(adapter_prefix):]
if monitor_id in deployed_monitor_dict:
deployed_monitor_dict[monitor_id][
'hostnames'].append(bigip.hostname)
else:
deployed_monitor_dict[monitor_id] = {
'id': monitor_id,
'tenant_id': tenant_id,
'hostnames': [bigip.hostname]
}
return deployed_monitor_dict
@serialized('purge_orphaned_health_monitor')
@is_operational
@log_helpers.log_method_call
def purge_orphaned_health_monitor(self, tenant_id=None, monitor_id=None,
hostnames=list()):
"""Purge all monitors that exist on the BIG-IP but not in Neutron"""
resource_types = [
resource_helper.BigIPResourceHelper(x) for x in [
resource_helper.ResourceType.http_monitor,
resource_helper.ResourceType.https_monitor,
resource_helper.ResourceType.ping_monitor,
resource_helper.ResourceType.tcp_monitor]]
for bigip in self.get_all_bigips():
if bigip.hostname in hostnames:
try:
monitor_name = self.service_adapter.prefix + monitor_id
partition = self.service_adapter.prefix + tenant_id
monitor = None
for monitor_type in resource_types:
try:
monitor = monitor_type.load(bigip, monitor_name,
partition)
break
except HTTPError as err:
if err.response.status_code == 404:
continue
monitor.delete()
except TypeError as err:
if 'NoneType' in err:
LOG.exception("Could not find monitor {}".format(
monitor_name))
except Exception as exc:
LOG.exception('Exception purging monitor %s' % str(exc))
@serialized('get_all_deployed_l7_policys')
@is_operational
def get_all_deployed_l7_policys(self):
"""Retrieve a dict of all l7policies deployed
The dict returned will have the following format:
{policy_bigip_id_0: {'id': policy_id_0,
'tenant_id': tenant_id,
'hostnames': [hostnames_0]}
...
}
Where hostnames is the list of BIG-IP hostnames impacted, and the
policy_id is the policy_bigip_id without 'wrapper_policy_'
"""
LOG.debug('getting all deployed l7_policys on BIG-IP\'s')
deployed_l7_policys_dict = {}
for bigip in self.get_all_bigips():
folders = self.system_helper.get_folders(bigip)
for folder in folders:
tenant_id = folder[len(self.service_adapter.prefix):]
if str(folder).startswith(self.service_adapter.prefix):
resource = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.l7policy)
deployed_l7_policys = resource.get_resources(
bigip, folder)
if deployed_l7_policys:
for l7_policy in deployed_l7_policys:
l7_policy_id = l7_policy.name
if l7_policy_id in deployed_l7_policys_dict:
my_dict = \
deployed_l7_policys_dict[l7_policy_id]
my_dict['hostnames'].append(bigip.hostname)
else:
po_id = l7_policy_id.replace(
'wrapper_policy_', '')
deployed_l7_policys_dict[l7_policy_id] = {
'id': po_id,
'tenant_id': tenant_id,
'hostnames': [bigip.hostname]
}
return deployed_l7_policys_dict
@serialized('purge_orphaned_l7_policy')
@is_operational
@log_helpers.log_method_call
def purge_orphaned_l7_policy(self, tenant_id=None, l7_policy_id=None,
hostnames=list(), listener_id=None):
"""Purge all l7_policys that exist on the BIG-IP but not in Neutron"""
for bigip in self.get_all_bigips():
if bigip.hostname in hostnames:
error = None
try:
l7_policy_name = l7_policy_id
partition = self.service_adapter.prefix + tenant_id
if listener_id and partition:
if self.service_adapter.prefix not in listener_id:
listener_id = \
self.service_adapter.prefix + listener_id
li_resource = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.virtual).load(
bigip, listener_id, partition)
li_resource.update(policies=[])
l7_policy = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.l7policy).load(
bigip, l7_policy_name, partition)
l7_policy.delete()
except HTTPError as err:
if err.response.status_code == 404:
LOG.debug('l7_policy %s not on BIG-IP %s.'
% (l7_policy_id, bigip.hostname))
else:
error = err
except Exception as exc:
error = err
if error:
kwargs = dict(
tenant_id=tenant_id, l7_policy_id=l7_policy_id,
hostname=bigip.hostname, listener_id=listener_id)
LOG.exception('Exception: purge_orphaned_l7_policy({}) '
'"{}"'.format(kwargs, exc))
@serialized('purge_orphaned_loadbalancer')
@is_operational
@log_helpers.log_method_call
def purge_orphaned_loadbalancer(self, tenant_id=None,
loadbalancer_id=None, hostnames=list()):
for bigip in self.get_all_bigips():
if bigip.hostname in hostnames:
try:
va_name = self.service_adapter.prefix + loadbalancer_id
partition = self.service_adapter.prefix + tenant_id
va = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.virtual_address).load(
bigip, va_name, partition)
# get virtual services (listeners)
# referencing this virtual address
vses = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.virtual).get_resources(
bigip, partition)
vs_dest_compare = '/' + partition + '/' + va.name
for vs in vses:
if str(vs.destination).startswith(vs_dest_compare):
if hasattr(vs, 'pool'):
pool = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.pool).load(
bigip, os.path.basename(vs.pool),
partition)
vs.delete()
pool.delete()
else:
vs.delete()
resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.virtual_address).delete(
bigip, va_name, partition)
except HTTPError as err:
if err.response.status_code == 404:
LOG.debug('loadbalancer %s not on BIG-IP %s.'
% (loadbalancer_id, bigip.hostname))
except Exception as exc:
LOG.exception('Exception purging loadbalancer %s'
% str(exc))
@serialized('purge_orphaned_listener')
@is_operational
@log_helpers.log_method_call
def purge_orphaned_listener(
self, tenant_id=None, listener_id=None, hostnames=[]):
for bigip in self.get_all_bigips():
if bigip.hostname in hostnames:
try:
listener_name = self.service_adapter.prefix + listener_id
partition = self.service_adapter.prefix + tenant_id
listener = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.virtual).load(
bigip, listener_name, partition)
listener.delete()
except HTTPError as err:
if err.response.status_code == 404:
LOG.debug('listener %s not on BIG-IP %s.'
% (listener_id, bigip.hostname))
except Exception as exc:
LOG.exception('Exception purging listener %s' % str(exc))
@serialized('create_loadbalancer')
@is_operational
def create_loadbalancer(self, loadbalancer, service):
"""Create virtual server."""
self._common_service_handler(service)
return self._update_target(service)
@serialized('update_loadbalancer')
@is_operational
def update_loadbalancer(self, old_loadbalancer, loadbalancer, service):
"""Update virtual server."""
# anti-pattern three args unused.
self._common_service_handler(service)
return self._update_target(service)
@serialized('delete_loadbalancer')
@is_operational
def delete_loadbalancer(self, loadbalancer, service):
"""Delete loadbalancer."""
LOG.debug("Deleting loadbalancer")
self._common_service_handler(
service,
delete_partition=True,
delete_event=True)
return self._update_target(service)
@serialized('create_listener')
@is_operational
@log_helpers.log_method_call
def create_listener(self, listener, service):
"""Create virtual server."""
LOG.debug("Creating listener")
self._common_service_handler(service)
return self._update_target(service,
self._update_listener_status,
service)
@serialized('update_listener')
@is_operational
def update_listener(self, old_listener, listener, service):
"""Update virtual server."""
LOG.debug("Updating listener")
self._common_service_handler(service)
return self._update_target(service,
self._update_listener_status,
service)
@serialized('delete_listener')
@is_operational
def delete_listener(self, listener, service):
"""Delete virtual server."""
LOG.debug("Deleting listener")
self._common_service_handler(service)
return self._update_target(service,
self._update_listener_status,
service)
@serialized('create_pool')
@is_operational
def create_pool(self, pool, service):
"""Create lb pool."""
LOG.debug("Creating pool")
# pzhang(NOTE): pool may not bound with a listener
if service.get("listeners"):
target_listener = service["listeners"][0]
target_listener["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_pool_status,
service["pools"])
@serialized('update_pool')
@is_operational
def update_pool(self, old_pool, pool, service):
"""Update lb pool."""
LOG.debug("Updating pool")
if service.get("listeners"):
target_listener = service["listeners"][0]
target_listener["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_pool_status,
service["pools"])
@serialized('delete_pool')
@is_operational
def delete_pool(self, pool, service):
"""Delete lb pool."""
LOG.debug("Deleting pool")
if service.get("listeners"):
target_listener = service["listeners"][0]
target_listener["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_pool_status,
service["pools"])
@serialized('create_l7policy')
@is_operational
def create_l7policy(self, l7policy, service):
"""Create lb l7policy."""
LOG.debug("Creating l7policy")
target_listener = service["listeners"][0]
target_listener["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_l7policy_status,
service["l7policies"])
@serialized('update_l7policy')
@is_operational
def update_l7policy(self, old_l7policy, l7policy, service):
"""Update lb l7policy."""
LOG.debug("Updating l7policy")
target_listener = service["listeners"][0]
target_listener["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_l7policy_status,
service["l7policies"])
@serialized('delete_l7policy')
@is_operational
def delete_l7policy(self, l7policy, service):
"""Delete lb l7policy."""
LOG.debug("Deleting l7policy")
target_listener = service["listeners"][0]
target_listener["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_l7policy_status,
service["l7policies"])
# TODO(pzhang): test this
@serialized('create_l7rule')
@is_operational
def create_l7rule(self, l7rule, service):
"""Create lb l7rule."""
LOG.debug("Creating l7rule")
target_listener = service["listeners"][0]
target_listener["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_l7rule_status,
service['l7policy_rules'])
# TODO(pzhang): test this
@serialized('update_l7rule')
@is_operational
def update_l7rule(self, old_l7rule, l7rule, service):
"""Update lb l7rule."""
LOG.debug("Updating l7rule")
target_listener = service["listeners"][0]
target_listener["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_l7rule_status,
service['l7policy_rules'])
# TODO(pzhang): test this
@serialized('delete_l7rule')
@is_operational
def delete_l7rule(self, l7rule, service):
"""Delete lb l7rule."""
LOG.debug("Deleting l7rule")
target_listener = service["listeners"][0]
target_listener["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_l7rule_status,
service['l7policy_rules'])
@serialized('create_member')
@is_operational
def create_member(self, member, service):
"""Create pool member."""
LOG.debug("Creating member")
target_pool = service["pools"][0]
target_pool["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_member_status,
service["members"])
@serialized('update_member')
@is_operational
def update_member(self, old_member, member, service):
"""Update pool member."""
LOG.debug("Updating member")
target_pool = service["pools"][0]
target_pool["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_member_status,
service["members"])
@serialized('delete_member')
@is_operational
def delete_member(self, member, service):
"""Delete pool member."""
LOG.debug("Deleting member")
target_pool = service["pools"][0]
target_pool["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_member_status,
service["members"])
@serialized('create_health_monitor')
@is_operational
def create_health_monitor(self, health_monitor, service):
"""Create pool health monitor."""
LOG.debug("Creating health monitor")
target_pool = service["pools"][0]
target_pool["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_health_monitor_status,
service["healthmonitors"])
@serialized('update_health_monitor')
@is_operational
def update_health_monitor(self, old_health_monitor,
health_monitor, service):
"""Update pool health monitor."""
LOG.debug("Updating health monitor")
target_pool = service["pools"][0]
target_pool["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_health_monitor_status,
service["healthmonitors"])
@serialized('delete_health_monitor')
@is_operational
def delete_health_monitor(self, health_monitor, service):
"""Delete pool health monitor."""
LOG.debug("Deleting health monitor")
target_pool = service["pools"][0]
target_pool["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_health_monitor_status,
service["healthmonitors"])
def _update_target(self, service,
update_method=None, target=None):
if self.do_service_update:
if target is not None and update_method is not None:
update_method(target)
self._update_loadbalancer_status(service, timed_out=False)
loadbalancer = service.get('loadbalancer', {})
lb_provisioning_status = loadbalancer.get("provisioning_status",
f5const.F5_ERROR)
lb_pending = \
(lb_provisioning_status == f5const.F5_PENDING_CREATE or
lb_provisioning_status == f5const.F5_PENDING_UPDATE)
return lb_pending
@is_operational
def get_stats(self, service):
lb_stats = {}
stats = ['clientside.bitsIn',
'clientside.bitsOut',
'clientside.curConns',
'clientside.totConns']
loadbalancer = service['loadbalancer']
try:
# sum virtual server stats for all BIG-IPs
vs_stats = self.lbaas_builder.get_listener_stats(service, stats)
# convert to bytes
lb_stats[f5const.F5_STATS_IN_BYTES] = \
vs_stats['clientside.bitsIn']/8
lb_stats[f5const.F5_STATS_OUT_BYTES] = \
vs_stats['clientside.bitsOut']/8
lb_stats[f5const.F5_STATS_ACTIVE_CONNECTIONS] = \
vs_stats['clientside.curConns']
lb_stats[f5const.F5_STATS_TOTAL_CONNECTIONS] = \
vs_stats['clientside.totConns']
# update Neutron
self.plugin_rpc.update_loadbalancer_stats(
loadbalancer['id'], lb_stats)
except Exception as e:
LOG.error("Error getting loadbalancer stats: %s", e.message)
finally:
return lb_stats
def fdb_add(self, fdb):
# Add (L2toL3) forwarding database entries
for bigip in self.get_all_bigips():
self.network_builder.add_bigip_fdb(bigip, fdb)
def fdb_remove(self, fdb):
# Remove (L2toL3) forwarding database entries
for bigip in self.get_all_bigips():
self.network_builder.remove_bigip_fdb(bigip, fdb)
def fdb_update(self, fdb):
# Update (L2toL3) forwarding database entries
for bigip in self.get_all_bigips():
self.network_builder.update_bigip_fdb(bigip, fdb)
def tunnel_update(self, **kwargs):
# Tunnel Update from Neutron Core RPC
pass
def tunnel_sync(self):
# Only sync when supported types are present
if not [i for i in self.agent_configurations['tunnel_types']
if i in ['gre', 'vxlan']]:
return False
tunnel_ips = []
for bigip in self.get_all_bigips():
if bigip.local_ip:
tunnel_ips.append(bigip.local_ip)
self.network_builder.tunnel_sync(tunnel_ips)
# Tunnel sync sent.
return False
@serialized('sync')
@is_operational
def sync(self, service):
"""Sync service defintion to device."""
# loadbalancer and plugin_rpc may not be set
lb_id = service.get('loadbalancer', dict()).get('id', '')
if hasattr(self, 'plugin_rpc') and self.plugin_rpc and lb_id:
# Get the latest service. It may have changed.
service = self.plugin_rpc.get_service_by_loadbalancer_id(lb_id)
if service.get('loadbalancer', None):
self.lbaas_builder.to_sync = True
self._common_service_handler(service)
self.lbaas_builder.to_sync = False
# pzhang(NOTE): move udpate neutron db out here for the lb tree
if self.do_service_update:
self.update_service_status(service)
loadbalancer = service.get('loadbalancer', {})
lb_provisioning_status = loadbalancer.get("provisioning_status",
f5const.F5_ERROR)
lb_pending = \
(lb_provisioning_status == f5const.F5_PENDING_CREATE or
lb_provisioning_status == f5const.F5_PENDING_UPDATE)
return lb_pending
else:
LOG.debug("Attempted sync of deleted pool")
@serialized('backup_configuration')
@is_operational
def backup_configuration(self):
# Save Configuration on Devices
for bigip in self.get_all_bigips():
LOG.debug('_backup_configuration: saving device %s.'
% bigip.hostname)
self.cluster_manager.save_config(bigip)
def _get_monitor_endpoint(self, bigip, service):
monitor_type = self.service_adapter.get_monitor_type(service)
if not monitor_type:
monitor_type = ""
if monitor_type == "HTTPS":
hm = bigip.tm.ltm.monitor.https_s.https
elif monitor_type == "TCP":
hm = bigip.tm.ltm.monitor.tcps.tcp
elif monitor_type == "PING":
hm = bigip.tm.ltm.monitor.gateway_icmps.gateway_icmp
else:
hm = bigip.tm.ltm.monitor.https.http
return hm
def service_rename_required(self, service):
rename_required = False
# Returns whether the bigip has a pool for the service
if not service['loadbalancer']:
return False
bigips = self.get_config_bigips()
loadbalancer = service['loadbalancer']
# Does the correctly named virtual address exist?
for bigip in bigips:
virtual_address = VirtualAddress(self.service_adapter,
loadbalancer)
if not virtual_address.exists(bigip):
rename_required = True
break
return rename_required
def service_object_teardown(self, service):
# Returns whether the bigip has a pool for the service
if not service['loadbalancer']:
return False
bigips = self.get_config_bigips()
loadbalancer = service['loadbalancer']
folder_name = self.service_adapter.get_folder_name(
loadbalancer['tenant_id']
)
# Change to bigips
for bigip in bigips:
# Delete all virtuals
v = bigip.tm.ltm.virtuals.virtual
for listener in service['listeners']:
l_name = listener.get("name", "")
if not l_name:
svc = {"loadbalancer": loadbalancer,
"listener": listener}
vip = self.service_adapter.get_virtual(svc)
l_name = vip['name']
if v.exists(name=l_name, partition=folder_name):
# Found a virtual that is named by the OS object,
# delete it.
l_obj = v.load(name=l_name, partition=folder_name)
LOG.warn("Deleting listener: /%s/%s" %
(folder_name, l_name))
l_obj.delete(name=l_name, partition=folder_name)
# Delete all pools
p = bigip.tm.ltm.pools.pool
for os_pool in service['pools']:
p_name = os_pool.get('name', "")
if not p_name:
svc = {"loadbalancer": loadbalancer,
"pool": os_pool}
pool = self.service_adapter.get_pool(svc)
p_name = pool['name']
if p.exists(name=p_name, partition=folder_name):
p_obj = p.load(name=p_name, partition=folder_name)
LOG.warn("Deleting pool: /%s/%s" % (folder_name, p_name))
p_obj.delete(name=p_name, partition=folder_name)
# Delete all healthmonitors
for healthmonitor in service['healthmonitors']:
svc = {'loadbalancer': loadbalancer,
'healthmonitor': healthmonitor}
monitor_ep = self._get_monitor_endpoint(bigip, svc)
m_name = healthmonitor.get('name', "")
if not m_name:
hm = self.service_adapter.get_healthmonitor(svc)
m_name = hm['name']
if monitor_ep.exists(name=m_name, partition=folder_name):
m_obj = monitor_ep.load(name=m_name, partition=folder_name)
LOG.warn("Deleting monitor: /%s/%s" % (
folder_name, m_name))
m_obj.delete()
def _service_exists(self, service):
# Returns whether the bigip has the service defined
if not service['loadbalancer']:
return False
loadbalancer = service['loadbalancer']
folder_name = self.service_adapter.get_folder_name(
loadbalancer['tenant_id']
)
if self.network_builder:
# append route domain to member address
self.network_builder._annotate_service_route_domains(service)
# Foreach bigip in the cluster:
for bigip in self.get_config_bigips():
# Does the tenant folder exist?
if not self.system_helper.folder_exists(bigip, folder_name):
LOG.error("Folder %s does not exists on bigip: %s" %
(folder_name, bigip.hostname))
return False
# Get the virtual address
virtual_address = VirtualAddress(self.service_adapter,
loadbalancer)
if not virtual_address.exists(bigip):
LOG.error("Virtual address %s(%s) does not "
"exists on bigip: %s" % (virtual_address.name,
virtual_address.address,
bigip.hostname))
return False
# Ensure that each virtual service exists.
for listener in service['listeners']:
svc = {"loadbalancer": loadbalancer,
"listener": listener}
virtual_server = self.service_adapter.get_virtual_name(svc)
if not self.vs_manager.exists(bigip,
name=virtual_server['name'],
partition=folder_name):
LOG.error("Virtual /%s/%s not found on bigip: %s" %
(virtual_server['name'], folder_name,
bigip.hostname))
return False
# Ensure that each pool exists.
for pool in service['pools']:
svc = {"loadbalancer": loadbalancer,
"pool": pool}
bigip_pool = self.service_adapter.get_pool(svc)
if not self.pool_manager.exists(
bigip,
name=bigip_pool['name'],
partition=folder_name):
LOG.error("Pool /%s/%s not found on bigip: %s" %
(folder_name, bigip_pool['name'],
bigip.hostname))
return False
else:
deployed_pool = self.pool_manager.load(
bigip,
name=bigip_pool['name'],
partition=folder_name)
deployed_members = \
deployed_pool.members_s.get_collection()
# First check that number of members deployed
# is equal to the number in the service.
if len(deployed_members) != len(pool['members']):
LOG.error("Pool %s members member count mismatch "
"match: deployed %d != service %d" %
(bigip_pool['name'], len(deployed_members),
len(pool['members'])))
return False
# Ensure each pool member exists
for member in service['members']:
if member['pool_id'] == pool['id']:
lb = self.lbaas_builder
pool = lb.get_pool_by_id(
service, member["pool_id"])
svc = {"loadbalancer": loadbalancer,
"member": member,
"pool": pool}
if not lb.pool_builder.member_exists(svc, bigip):
LOG.error("Pool member not found: %s" %
svc['member'])
return False
# Ensure that each health monitor exists.
for healthmonitor in service['healthmonitors']:
svc = {"loadbalancer": loadbalancer,
"healthmonitor": healthmonitor}
monitor = self.service_adapter.get_healthmonitor(svc)
monitor_ep = self._get_monitor_endpoint(bigip, svc)
if not monitor_ep.exists(name=monitor['name'],
partition=folder_name):
LOG.error("Monitor /%s/%s not found on bigip: %s" %
(monitor['name'], folder_name, bigip.hostname))
return False
return True
def get_loadbalancers_in_tenant(self, tenant_id):
loadbalancers = self.plugin_rpc.get_all_loadbalancers()
return [lb['lb_id'] for lb in loadbalancers
if lb['tenant_id'] == tenant_id]
def _common_service_handler(self, service,
delete_partition=False,
delete_event=False):
# Assure that the service is configured on bigip(s)
start_time = time()
lb_pending = True
self.do_service_update = True
if self.conf.trace_service_requests:
self.trace_service_requests(service)
loadbalancer = service.get("loadbalancer", None)
if not loadbalancer:
LOG.error("_common_service_handler: Service loadbalancer is None")
return lb_pending
lb_provisioning_status = loadbalancer.get("provisioning_status",
f5const.F5_ERROR)
try:
try:
self.tenant_manager.assure_tenant_created(service)
except Exception as e:
LOG.error("Tenant folder creation exception: %s",
e.message)
if lb_provisioning_status != f5const.F5_PENDING_DELETE:
loadbalancer['provisioning_status'] = \
f5const.F5_ERROR
raise e
LOG.debug(" _assure_tenant_created took %.5f secs" %
(time() - start_time))
traffic_group = self.service_to_traffic_group(service)
loadbalancer['traffic_group'] = traffic_group
if self.network_builder:
start_time = time()
try:
self.network_builder.prep_service_networking(
service, traffic_group)
except f5ex.NetworkNotReady as error:
LOG.debug("Network creation deferred until network "
"definition is completed: %s",
error.message)
if not delete_event:
self.do_service_update = False
raise error
except Exception as error:
LOG.error("Prep-network exception: icontrol_driver: %s",
error.message)
if lb_provisioning_status != f5const.F5_PENDING_DELETE:
loadbalancer['provisioning_status'] = \
f5const.F5_ERROR
if not delete_event:
raise error
finally:
if time() - start_time > .001:
LOG.debug(" _prep_service_networking "
"took %.5f secs" % (time() - start_time))
all_subnet_hints = {}
for bigip in self.get_config_bigips():
# check_for_delete_subnets:
# keep track of which subnets we should check to delete
# for a deleted vip or member
# do_not_delete_subnets:
# If we add an IP to a subnet we must not delete the subnet
all_subnet_hints[bigip.device_name] = \
{'check_for_delete_subnets': {},
'do_not_delete_subnets': []}
LOG.debug("XXXXXXXXX: Pre assure service")
self.lbaas_builder.assure_service(service,
traffic_group,
all_subnet_hints)
LOG.debug("XXXXXXXXX: Post assure service")
if self.network_builder:
start_time = time()
try:
self.network_builder.post_service_networking(
service, all_subnet_hints)
except Exception as error:
LOG.error("Post-network exception: icontrol_driver: %s",
error.message)
if lb_provisioning_status != f5const.F5_PENDING_DELETE:
loadbalancer['provisioning_status'] = \
f5const.F5_ERROR
raise error
if time() - start_time > .001:
LOG.debug(" _post_service_networking "
"took %.5f secs" % (time() - start_time))
except f5ex.NetworkNotReady as error:
pass
except Exception as err:
LOG.exception(err)
finally:
# only delete partition if loadbalancer is being deleted
if lb_provisioning_status == f5const.F5_PENDING_DELETE:
self.tenant_manager.assure_tenant_cleanup(service,
all_subnet_hints)
def update_service_status(self, service, timed_out=False):
"""Update status of objects in controller."""
LOG.debug("_update_service_status")
if not self.plugin_rpc:
LOG.error("Cannot update status in Neutron without "
"RPC handler.")
return
if 'members' in service:
# Call update_members_status
self._update_member_status(service['members'], timed_out)
if 'healthmonitors' in service:
# Call update_monitor_status
self._update_health_monitor_status(
service['healthmonitors']
)
if 'pools' in service:
# Call update_pool_status
self._update_pool_status(
service['pools']
)
if 'listeners' in service:
# Call update_listener_status
self._update_listener_status(service)
if 'l7policy_rules' in service:
self._update_l7rule_status(service['l7policy_rules'])
if 'l7policies' in service:
self._update_l7policy_status(service['l7policies'])
self._update_loadbalancer_status(service, timed_out)
def _update_member_status(self, members, timed_out=False):
"""Update member status in OpenStack."""
for member in members:
if 'provisioning_status' in member:
provisioning_status = member['provisioning_status']
if provisioning_status in self.positive_plugin_const_state:
if timed_out and \
provisioning_status != f5const.F5_ACTIVE:
member['provisioning_status'] = f5const.F5_ERROR
operating_status = f5const.F5_OFFLINE
else:
member['provisioning_status'] = f5const.F5_ACTIVE
operating_status = f5const.F5_ONLINE
self.plugin_rpc.update_member_status(
member['id'],
member['provisioning_status'],
operating_status
)
elif provisioning_status == f5const.F5_PENDING_DELETE:
if not member.get('parent_pool_deleted', False):
self.plugin_rpc.member_destroyed(
member['id'])
elif provisioning_status == f5const.F5_ERROR:
self.plugin_rpc.update_member_status(
member['id'],
f5const.F5_ERROR,
f5const.F5_OFFLINE)
def _update_health_monitor_status(self, health_monitors):
"""Update pool monitor status in OpenStack."""
for health_monitor in health_monitors:
if 'provisioning_status' in health_monitor:
provisioning_status = health_monitor['provisioning_status']
if provisioning_status in self.positive_plugin_const_state:
self.plugin_rpc.update_health_monitor_status(
health_monitor['id'],
f5const.F5_ACTIVE,
f5const.F5_ONLINE
)
health_monitor['provisioning_status'] = \
f5const.F5_ACTIVE
elif provisioning_status == f5const.F5_PENDING_DELETE:
self.plugin_rpc.health_monitor_destroyed(
health_monitor['id'])
elif provisioning_status == f5const.F5_ERROR:
self.plugin_rpc.update_health_monitor_status(
health_monitor['id'])
@log_helpers.log_method_call
def _update_pool_status(self, pools):
"""Update pool status in OpenStack."""
for pool in pools:
if 'provisioning_status' in pool:
provisioning_status = pool['provisioning_status']
if provisioning_status in self.positive_plugin_const_state:
self.plugin_rpc.update_pool_status(
pool['id'],
f5const.F5_ACTIVE,
f5const.F5_ONLINE
)
pool['provisioning_status'] = f5const.F5_ACTIVE
elif provisioning_status == f5const.F5_PENDING_DELETE:
self.plugin_rpc.pool_destroyed(
pool['id'])
elif provisioning_status == f5const.F5_ERROR:
self.plugin_rpc.update_pool_status(pool['id'])
@log_helpers.log_method_call
def _update_listener_status(self, service):
"""Update listener status in OpenStack."""
listeners = service['listeners']
for listener in listeners:
if 'provisioning_status' in listener:
provisioning_status = listener['provisioning_status']
if provisioning_status in self.positive_plugin_const_state:
self.plugin_rpc.update_listener_status(
listener['id'],
f5const.F5_ACTIVE,
listener['operating_status']
)
listener['provisioning_status'] = \
f5const.F5_ACTIVE
elif provisioning_status == f5const.F5_PENDING_DELETE:
self.plugin_rpc.listener_destroyed(
listener['id'])
elif provisioning_status == f5const.F5_ERROR:
self.plugin_rpc.update_listener_status(
listener['id'],
provisioning_status,
f5const.F5_OFFLINE)
@log_helpers.log_method_call
def _update_l7rule_status(self, l7rules):
"""Update l7rule status in OpenStack."""
for l7rule in l7rules:
if 'provisioning_status' in l7rule:
provisioning_status = l7rule['provisioning_status']
if provisioning_status in self.positive_plugin_const_state:
self.plugin_rpc.update_l7rule_status(
l7rule['id'],
l7rule['policy_id'],
f5const.F5_ACTIVE,
f5const.F5_ONLINE
)
elif provisioning_status == f5const.F5_PENDING_DELETE:
self.plugin_rpc.l7rule_destroyed(
l7rule['id'])
elif provisioning_status == f5const.F5_ERROR:
self.plugin_rpc.update_l7rule_status(
l7rule['id'], l7rule['policy_id'])
@log_helpers.log_method_call
def _update_l7policy_status(self, l7policies):
LOG.debug("_update_l7policy_status")
"""Update l7policy status in OpenStack."""
for l7policy in l7policies:
if 'provisioning_status' in l7policy:
provisioning_status = l7policy['provisioning_status']
if provisioning_status in self.positive_plugin_const_state:
self.plugin_rpc.update_l7policy_status(
l7policy['id'],
f5const.F5_ACTIVE,
f5const.F5_ONLINE
)
elif provisioning_status == f5const.F5_PENDING_DELETE:
LOG.debug("calling l7policy_destroyed")
self.plugin_rpc.l7policy_destroyed(
l7policy['id'])
elif provisioning_status == f5const.F5_ERROR:
self.plugin_rpc.update_l7policy_status(l7policy['id'])
@log_helpers.log_method_call
def _update_loadbalancer_status(self, service, timed_out=False):
"""Update loadbalancer status in OpenStack."""
loadbalancer = service.get('loadbalancer', {})
provisioning_status = loadbalancer.get('provisioning_status',
f5const.F5_ERROR)
# if provisioning_status in self.positive_plugin_const_state:
if provisioning_status in self.positive_plugin_const_state:
if timed_out:
operating_status = (f5const.F5_OFFLINE)
if provisioning_status == f5const.F5_PENDING_CREATE:
loadbalancer['provisioning_status'] = \
f5const.F5_ERROR
else:
loadbalancer['provisioning_status'] = \
f5const.F5_ACTIVE
else:
operating_status = (f5const.F5_ONLINE)
loadbalancer['provisioning_status'] = \
f5const.F5_ACTIVE
self.plugin_rpc.update_loadbalancer_status(
loadbalancer['id'],
loadbalancer['provisioning_status'],
operating_status)
elif provisioning_status == f5const.F5_PENDING_DELETE:
self.plugin_rpc.loadbalancer_destroyed(
loadbalancer['id'])
elif provisioning_status == f5const.F5_ERROR:
self.plugin_rpc.update_loadbalancer_status(
loadbalancer['id'],
provisioning_status,
f5const.F5_OFFLINE)
elif provisioning_status == f5const.F5_ACTIVE:
LOG.debug('Loadbalancer provisioning status is active')
else:
LOG.error('Loadbalancer provisioning status is invalid')
@is_operational
def update_operating_status(self, service):
if 'members' in service:
if self.network_builder:
# append route domain to member address
try:
self.network_builder._annotate_service_route_domains(
service)
except f5ex.InvalidNetworkType as exc:
LOG.warning(exc.msg)
return
# get currrent member status
self.lbaas_builder.update_operating_status(service)
# udpate Neutron
for member in service['members']:
if member['provisioning_status'] == f5const.F5_ACTIVE:
operating_status = member.get('operating_status', None)
self.plugin_rpc.update_member_status(
member['id'],
provisioning_status=None,
operating_status=operating_status)
def get_active_bigip(self):
bigips = self.get_all_bigips()
if len(bigips) == 1:
return bigips[0]
for bigip in bigips:
if hasattr(bigip, 'failover_state'):
if bigip.failover_state == 'active':
return bigip
# if can't determine active, default to first one
return bigips[0]
def service_to_traffic_group(self, service):
# Hash service tenant id to index of traffic group
# return which iControlDriver.__traffic_group that tenant is "in?"
return self.tenant_to_traffic_group(
service['loadbalancer']['tenant_id'])
def tenant_to_traffic_group(self, tenant_id):
# Hash tenant id to index of traffic group
hexhash = hashlib.md5(tenant_id).hexdigest()
tg_index = int(hexhash, 16) % len(self.__traffic_groups)
return self.__traffic_groups[tg_index]
# these functions should return only active BIG-IP
# not errored BIG-IPs.
def get_bigip(self):
hostnames = sorted(list(self.__bigips))
for host in hostnames:
if hasattr(self.__bigips[host], 'status') and \
self.__bigips[host].status == 'active':
return self.__bigips[host]
def get_bigip_hosts(self):
return_hosts = []
for host in list(self.__bigips):
if hasattr(self.__bigips[host], 'status') and \
self.__bigips[host].status == 'active':
return_hosts.append(host)
return sorted(return_hosts)
def get_all_bigips(self):
return_bigips = []
for host in list(self.__bigips):
if hasattr(self.__bigips[host], 'status') and \
self.__bigips[host].status == 'active':
return_bigips.append(self.__bigips[host])
return return_bigips
def get_config_bigips(self):
return self.get_all_bigips()
# these are the refactored methods
def get_active_bigips(self):
return self.get_all_bigips()
def get_errored_bigips_hostnames(self):
return_hostnames = []
for host in list(self.__bigips):
bigip = self.__bigips[host]
if hasattr(bigip, 'status') and bigip.status == 'error':
return_hostnames.append(host)
return return_hostnames
def get_inbound_throughput(self, bigip, global_statistics=None):
return self.stat_helper.get_inbound_throughput(
bigip, global_stats=global_statistics)
def get_outbound_throughput(self, bigip, global_statistics=None):
return self.stat_helper.get_outbound_throughput(
bigip, global_stats=global_statistics)
def get_throughput(self, bigip=None, global_statistics=None):
return self.stat_helper.get_throughput(
bigip, global_stats=global_statistics)
def get_active_connections(self, bigip=None, global_statistics=None):
return self.stat_helper.get_active_connection_count(
bigip, global_stats=global_statistics)
def get_ssltps(self, bigip=None, global_statistics=None):
return self.stat_helper.get_active_SSL_TPS(
bigip, global_stats=global_statistics)
def get_node_count(self, bigip=None, global_statistics=None):
return len(bigip.tm.ltm.nodes.get_collection())
def get_clientssl_profile_count(self, bigip=None, global_statistics=None):
return ssl_profile.SSLProfileHelper.get_client_ssl_profile_count(bigip)
def get_tenant_count(self, bigip=None, global_statistics=None):
return self.system_helper.get_tenant_folder_count(bigip)
def get_tunnel_count(self, bigip=None, global_statistics=None):
return self.network_helper.get_tunnel_count(bigip)
def get_vlan_count(self, bigip=None, global_statistics=None):
return self.network_helper.get_vlan_count(bigip)
def get_route_domain_count(self, bigip=None, global_statistics=None):
return self.network_helper.get_route_domain_count(bigip)
def _init_traffic_groups(self, bigip):
try:
LOG.debug('retrieving traffic groups from %s' % bigip.hostname)
self.__traffic_groups = \
self.cluster_manager.get_traffic_groups(bigip)
if 'traffic-group-local-only' in self.__traffic_groups:
LOG.debug('removing reference to non-floating traffic group')
self.__traffic_groups.remove('traffic-group-local-only')
self.__traffic_groups.sort()
LOG.debug('service placement will done on traffic group(s): %s'
% self.__traffic_groups)
except Exception:
bigip.status = 'error'
bigip.status_message = \
'could not determine traffic groups for service placement'
raise
def _validate_bigip_version(self, bigip, hostname):
# Ensure the BIG-IP has sufficient version
major_version = self.system_helper.get_major_version(bigip)
if major_version < f5const.MIN_TMOS_MAJOR_VERSION:
raise f5ex.MajorVersionValidateFailed(
'Device %s must be at least TMOS %s.%s'
% (hostname, f5const.MIN_TMOS_MAJOR_VERSION,
f5const.MIN_TMOS_MINOR_VERSION))
minor_version = self.system_helper.get_minor_version(bigip)
if minor_version < f5const.MIN_TMOS_MINOR_VERSION:
raise f5ex.MinorVersionValidateFailed(
'Device %s must be at least TMOS %s.%s'
% (hostname, f5const.MIN_TMOS_MAJOR_VERSION,
f5const.MIN_TMOS_MINOR_VERSION))
return major_version, minor_version
def trace_service_requests(self, service):
"""Dump services to a file for debugging."""
with open(self.file_name, 'r+') as fp:
fp.seek(-1, 2)
fp.write(',')
json.dump(service, fp, sort_keys=True, indent=2)
fp.write(']')
def get_config_dir(self):
"""Determine F5 agent configuration directory.
Oslo cfg has a config_dir option, but F5 agent is not currently
started with this option. To be complete, the code will check if
config_dir is defined, and use that value as long as it is a single
string (no idea what to do if it is not a str). If not defined,
get the full dir path of the INI file, which is currently used when
starting F5 agent. If neither option is available,
use /etc/neutron/services/f5.
:return: str defining configuration directory.
"""
if self.conf.config_dir and isinstance(self.conf.config_dir, str):
# use config_dir parameter if defined, and is a string
return self.conf.config_dir
elif self.conf.config_file:
# multiple config files (neutron and agent) are usually defined
if isinstance(self.conf.config_file, list):
# find agent config (f5-openstack-agent.ini)
config_files = self.conf.config_file
for file_name in config_files:
if 'f5-openstack-agent.ini' in file_name:
return os.path.dirname(file_name)
elif isinstance(self.conf.config_file, str):
# not a list, just a single string
return os.path.dirname(self.conf.config_file)
# if all else fails
return '/etc/neutron/services/f5'
|
F5Networks/f5-openstack-agent
|
f5_openstack_agent/lbaasv2/drivers/bigip/icontrol_driver.py
|
Python
|
apache-2.0
| 112,266
|
import re
# from https://stackoverflow.com/questions/1175208/elegant-python-function-to-convert-camelcase-to-snake-case
def snake_case(string):
''' Takes a string that represents for example a class name and returns
the snake case version of it. It is used for model-to-key conversion '''
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', string)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def camelCase(string):
''' Takes a string that represents the redis key version of a model name
and returns its camel case version. It is used for key-to-model
conversion. '''
return ''.join(s[0].upper() + s[1:] for s in string.split('_'))
def parse_embed(embed_array):
if not embed_array:
return []
fields = {}
for item in embed_array:
pieces = item.split('.', maxsplit=1)
if pieces[0] not in fields:
fields[pieces[0]] = None
if len(pieces) == 2:
if fields[pieces[0]] is None:
fields[pieces[0]] = []
fields[pieces[0]].append(pieces[1])
return sorted(map(
lambda item: list(item),
fields.items()
), key=lambda x: x[0])
|
getfleety/coralillo
|
coralillo/utils.py
|
Python
|
mit
| 1,180
|
# $Id: 401_fmtp_g7221_with_bitrate_32000.py 3664 2011-07-19 03:42:28Z nanang $
import inc_sip as sip
import inc_sdp as sdp
# Answer with codec G722.1 should choose the same bitrate
# which in this test is 32000
sdp = \
"""
v=0
o=- 3428650655 3428650655 IN IP4 192.168.1.9
s=pjmedia
c=IN IP4 192.168.1.9
t=0 0
a=X-nat:0
m=audio 4000 RTP/AVP 100 101
a=rtcp:4001 IN IP4 192.168.1.9
a=rtpmap:100 G7221/16000
a=fmtp:100 bitrate=32000
a=sendrecv
a=rtpmap:101 telephone-event/8000
a=fmtp:101 0-15
"""
pjsua_args = "--null-audio --auto-answer 200 --add-codec G7221"
extra_headers = ""
include = ["a=rtpmap:[\d]+ G7221/16000", # response must choose G722.1
"fmtp:[\d]+ bitrate=32000" # response must choose the same bitrate
]
exclude = []
sendto_cfg = sip.SendtoCfg("Answer with G722.1 should choose bitrate 32000", pjsua_args, sdp, 200,
extra_headers=extra_headers,
resp_inc=include, resp_exc=exclude)
|
lxki/pjsip
|
tests/pjsua/scripts-sendto/401_fmtp_g7221_with_bitrate_32000.py
|
Python
|
gpl-2.0
| 922
|
from SocialNetwork_API.arango_core import ArangoCore
from SocialNetwork_API.arango_services import ArangoBaseService
from SocialNetwork_API.const import ArangoVertex, ArangoEdge
class ArangoDownloadService(ArangoBaseService):
@classmethod
def save_download(cls, data):
try:
database = ArangoCore.get_database()
collections = [ArangoEdge.USER_DOWNLOAD,ArangoVertex.DOWNLOAD,ArangoEdge.DOWNLOAD_DATA]
with database.transaction(write=collections, commit_on_error=False) as transaction:
# Add post to graph vertex
ArangoCore.add_vertex_to_collection(ArangoVertex.DOWNLOAD, data, transaction)
# Add user_download to graph edge
ArangoCore.add_user_download_to_collection(ArangoEdge.USER_DOWNLOAD, ArangoVertex.USER, data['user_id'],
ArangoVertex.DOWNLOAD, data['id'], transaction)
ArangoCore.add_edge_to_collection(ArangoEdge.DOWNLOAD_DATA, ArangoVertex.DOWNLOAD, data['id'],
ArangoVertex.DATA, data['data_id'], transaction)
transaction.commit()
return True
except Exception as exception:
raise exception
@classmethod
def delete_download_history(cls, data):
try:
database = ArangoCore.get_database()
collections = [ArangoEdge.USER_DOWNLOAD]
with database.transaction(write=collections, commit_on_error=False) as transaction:
ArangoCore.delete_edge_from_collection(ArangoEdge.USER_DOWNLOAD, data.get('_key'), transaction)
transaction.commit()
return True
except Exception as exception:
raise exception
@classmethod
def get_download_history(cls, download_id):
try:
query_string = "LET downloads =(FOR download IN sn_user_download FILTER download._key==@download_id LIMIT 1 RETURN download) RETURN downloads[0]"
parameter = {'download_id': download_id}
result = ArangoCore.execute_query(query_string, parameter)
return result[0] if len(result) > 0 else None
except Exception as exception:
raise exception
@classmethod
def get_download_history_of_user(cls, user_id):
try:
user_id = 'sn_users/'+user_id
query_string = "LET historys = (FOR v,history IN OUTBOUND @user_id sn_user_download " \
"SORT history.created_at DESC " \
"RETURN merge(history,{infor:v})) " \
"FOR history IN historys " \
"FOR data IN sn_datas " \
"FILTER data.id == history.infor.data_id " \
"RETURN merge(history,{data:data}) "
parameter = {'user_id': user_id}
return ArangoCore.execute_query(query_string, parameter)
except Exception as exception:
raise exception
|
HoangNguyenHuy/SocialNetwork
|
src/SocialNetwork_API/arango_services/download.py
|
Python
|
mit
| 3,068
|
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import datetime
import weakref
import json
import base64
import botocore
import botocore.auth
from botocore.compat import six, OrderedDict
from botocore.awsrequest import create_request_object, prepare_request_dict
from botocore.exceptions import UnknownSignatureVersionError
from botocore.exceptions import UnknownClientMethodError
from botocore.exceptions import UnsupportedSignatureVersionError
from botocore.utils import datetime2timestamp
# Keep these imported. There's pre-existing code that uses them.
from botocore.utils import fix_s3_host # noqa
class RequestSigner(object):
"""
An object to sign requests before they go out over the wire using
one of the authentication mechanisms defined in ``auth.py``. This
class fires two events scoped to a service and operation name:
* choose-signer: Allows overriding the auth signer name.
* before-sign: Allows mutating the request before signing.
Together these events allow for customization of the request
signing pipeline, including overrides, request path manipulation,
and disabling signing per operation.
:type service_id: botocore.model.ServiceId
:param service_id: The service id for the service, e.g. ``S3``
:type region_name: string
:param region_name: Name of the service region, e.g. ``us-east-1``
:type signing_name: string
:param signing_name: Service signing name. This is usually the
same as the service name, but can differ. E.g.
``emr`` vs. ``elasticmapreduce``.
:type signature_version: string
:param signature_version: Signature name like ``v4``.
:type credentials: :py:class:`~botocore.credentials.Credentials`
:param credentials: User credentials with which to sign requests.
:type event_emitter: :py:class:`~botocore.hooks.BaseEventHooks`
:param event_emitter: Extension mechanism to fire events.
"""
def __init__(self, service_id, region_name, signing_name,
signature_version, credentials, event_emitter):
self._region_name = region_name
self._signing_name = signing_name
self._signature_version = signature_version
self._credentials = credentials
self._service_id = service_id
# We need weakref to prevent leaking memory in Python 2.6 on Linux 2.6
self._event_emitter = weakref.proxy(event_emitter)
@property
def region_name(self):
return self._region_name
@property
def signature_version(self):
return self._signature_version
@property
def signing_name(self):
return self._signing_name
def handler(self, operation_name=None, request=None, **kwargs):
# This is typically hooked up to the "request-created" event
# from a client's event emitter. When a new request is created
# this method is invoked to sign the request.
# Don't call this method directly.
return self.sign(operation_name, request)
def sign(self, operation_name, request, region_name=None,
signing_type='standard', expires_in=None, signing_name=None):
"""Sign a request before it goes out over the wire.
:type operation_name: string
:param operation_name: The name of the current operation, e.g.
``ListBuckets``.
:type request: AWSRequest
:param request: The request object to be sent over the wire.
:type region_name: str
:param region_name: The region to sign the request for.
:type signing_type: str
:param signing_type: The type of signing to perform. This can be one of
three possible values:
* 'standard' - This should be used for most requests.
* 'presign-url' - This should be used when pre-signing a request.
* 'presign-post' - This should be used when pre-signing an S3 post.
:type expires_in: int
:param expires_in: The number of seconds the presigned url is valid
for. This parameter is only valid for signing type 'presign-url'.
:type signing_name: str
:param signing_name: The name to use for the service when signing.
"""
explicit_region_name = region_name
if region_name is None:
region_name = self._region_name
if signing_name is None:
signing_name = self._signing_name
signature_version = self._choose_signer(
operation_name, signing_type, request.context)
# Allow mutating request before signing
self._event_emitter.emit(
'before-sign.{0}.{1}'.format(
self._service_id.hyphenize(), operation_name),
request=request, signing_name=signing_name,
region_name=self._region_name,
signature_version=signature_version, request_signer=self,
operation_name=operation_name
)
if signature_version != botocore.UNSIGNED:
kwargs = {
'signing_name': signing_name,
'region_name': region_name,
'signature_version': signature_version
}
if expires_in is not None:
kwargs['expires'] = expires_in
signing_context = request.context.get('signing', {})
if not explicit_region_name and signing_context.get('region'):
kwargs['region_name'] = signing_context['region']
if signing_context.get('signing_name'):
kwargs['signing_name'] = signing_context['signing_name']
try:
auth = self.get_auth_instance(**kwargs)
except UnknownSignatureVersionError as e:
if signing_type != 'standard':
raise UnsupportedSignatureVersionError(
signature_version=signature_version)
else:
raise e
auth.add_auth(request)
def _choose_signer(self, operation_name, signing_type, context):
"""
Allow setting the signature version via the choose-signer event.
A value of `botocore.UNSIGNED` means no signing will be performed.
:param operation_name: The operation to sign.
:param signing_type: The type of signing that the signer is to be used
for.
:return: The signature version to sign with.
"""
signing_type_suffix_map = {
'presign-post': '-presign-post',
'presign-url': '-query'
}
suffix = signing_type_suffix_map.get(signing_type, '')
signature_version = self._signature_version
if signature_version is not botocore.UNSIGNED and not \
signature_version.endswith(suffix):
signature_version += suffix
handler, response = self._event_emitter.emit_until_response(
'choose-signer.{0}.{1}'.format(
self._service_id.hyphenize(), operation_name),
signing_name=self._signing_name, region_name=self._region_name,
signature_version=signature_version, context=context)
if response is not None:
signature_version = response
# The suffix needs to be checked again in case we get an improper
# signature version from choose-signer.
if signature_version is not botocore.UNSIGNED and not \
signature_version.endswith(suffix):
signature_version += suffix
return signature_version
def get_auth_instance(self, signing_name, region_name,
signature_version=None, **kwargs):
"""
Get an auth instance which can be used to sign a request
using the given signature version.
:type signing_name: string
:param signing_name: Service signing name. This is usually the
same as the service name, but can differ. E.g.
``emr`` vs. ``elasticmapreduce``.
:type region_name: string
:param region_name: Name of the service region, e.g. ``us-east-1``
:type signature_version: string
:param signature_version: Signature name like ``v4``.
:rtype: :py:class:`~botocore.auth.BaseSigner`
:return: Auth instance to sign a request.
"""
if signature_version is None:
signature_version = self._signature_version
cls = botocore.auth.AUTH_TYPE_MAPS.get(signature_version)
if cls is None:
raise UnknownSignatureVersionError(
signature_version=signature_version)
# If there's no credentials provided (i.e credentials is None),
# then we'll pass a value of "None" over to the auth classes,
# which already handle the cases where no credentials have
# been provided.
frozen_credentials = None
if self._credentials is not None:
frozen_credentials = self._credentials.get_frozen_credentials()
kwargs['credentials'] = frozen_credentials
if cls.REQUIRES_REGION:
if self._region_name is None:
raise botocore.exceptions.NoRegionError()
kwargs['region_name'] = region_name
kwargs['service_name'] = signing_name
auth = cls(**kwargs)
return auth
# Alias get_auth for backwards compatibility.
get_auth = get_auth_instance
def generate_presigned_url(self, request_dict, operation_name,
expires_in=3600, region_name=None,
signing_name=None):
"""Generates a presigned url
:type request_dict: dict
:param request_dict: The prepared request dictionary returned by
``botocore.awsrequest.prepare_request_dict()``
:type operation_name: str
:param operation_name: The operation being signed.
:type expires_in: int
:param expires_in: The number of seconds the presigned url is valid
for. By default it expires in an hour (3600 seconds)
:type region_name: string
:param region_name: The region name to sign the presigned url.
:type signing_name: str
:param signing_name: The name to use for the service when signing.
:returns: The presigned url
"""
request = create_request_object(request_dict)
self.sign(operation_name, request, region_name,
'presign-url', expires_in, signing_name)
request.prepare()
return request.url
class CloudFrontSigner(object):
'''A signer to create a signed CloudFront URL.
First you create a cloudfront signer based on a normalized RSA signer::
import rsa
def rsa_signer(message):
private_key = open('private_key.pem', 'r').read()
return rsa.sign(
message,
rsa.PrivateKey.load_pkcs1(private_key.encode('utf8')),
'SHA-1') # CloudFront requires SHA-1 hash
cf_signer = CloudFrontSigner(key_id, rsa_signer)
To sign with a canned policy::
signed_url = cf_signer.generate_signed_url(
url, date_less_than=datetime(2015, 12, 1))
To sign with a custom policy::
signed_url = cf_signer.generate_signed_url(url, policy=my_policy)
'''
def __init__(self, key_id, rsa_signer):
"""Create a CloudFrontSigner.
:type key_id: str
:param key_id: The CloudFront Key Pair ID
:type rsa_signer: callable
:param rsa_signer: An RSA signer.
Its only input parameter will be the message to be signed,
and its output will be the signed content as a binary string.
The hash algorithm needed by CloudFront is SHA-1.
"""
self.key_id = key_id
self.rsa_signer = rsa_signer
def generate_presigned_url(self, url, date_less_than=None, policy=None):
"""Creates a signed CloudFront URL based on given parameters.
:type url: str
:param url: The URL of the protected object
:type date_less_than: datetime
:param date_less_than: The URL will expire after that date and time
:type policy: str
:param policy: The custom policy, possibly built by self.build_policy()
:rtype: str
:return: The signed URL.
"""
both_args_supplied = date_less_than is not None and policy is not None
neither_arg_supplied = date_less_than is None and policy is None
if both_args_supplied or neither_arg_supplied:
e = 'Need to provide either date_less_than or policy, but not both'
raise ValueError(e)
if date_less_than is not None:
# We still need to build a canned policy for signing purpose
policy = self.build_policy(url, date_less_than)
if isinstance(policy, six.text_type):
policy = policy.encode('utf8')
if date_less_than is not None:
params = ['Expires=%s' % int(datetime2timestamp(date_less_than))]
else:
params = ['Policy=%s' % self._url_b64encode(policy).decode('utf8')]
signature = self.rsa_signer(policy)
params.extend([
'Signature=%s' % self._url_b64encode(signature).decode('utf8'),
'Key-Pair-Id=%s' % self.key_id,
])
return self._build_url(url, params)
def _build_url(self, base_url, extra_params):
separator = '&' if '?' in base_url else '?'
return base_url + separator + '&'.join(extra_params)
def build_policy(self, resource, date_less_than,
date_greater_than=None, ip_address=None):
"""A helper to build policy.
:type resource: str
:param resource: The URL or the stream filename of the protected object
:type date_less_than: datetime
:param date_less_than: The URL will expire after the time has passed
:type date_greater_than: datetime
:param date_greater_than: The URL will not be valid until this time
:type ip_address: str
:param ip_address: Use 'x.x.x.x' for an IP, or 'x.x.x.x/x' for a subnet
:rtype: str
:return: The policy in a compact string.
"""
# Note:
# 1. Order in canned policy is significant. Special care has been taken
# to ensure the output will match the order defined by the document.
# There is also a test case to ensure that order.
# SEE: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-creating-signed-url-canned-policy.html#private-content-canned-policy-creating-policy-statement
# 2. Albeit the order in custom policy is not required by CloudFront,
# we still use OrderedDict internally to ensure the result is stable
# and also matches canned policy requirement.
# SEE: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-creating-signed-url-custom-policy.html
moment = int(datetime2timestamp(date_less_than))
condition = OrderedDict({"DateLessThan": {"AWS:EpochTime": moment}})
if ip_address:
if '/' not in ip_address:
ip_address += '/32'
condition["IpAddress"] = {"AWS:SourceIp": ip_address}
if date_greater_than:
moment = int(datetime2timestamp(date_greater_than))
condition["DateGreaterThan"] = {"AWS:EpochTime": moment}
ordered_payload = [('Resource', resource), ('Condition', condition)]
custom_policy = {"Statement": [OrderedDict(ordered_payload)]}
return json.dumps(custom_policy, separators=(',', ':'))
def _url_b64encode(self, data):
# Required by CloudFront. See also:
# http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-linux-openssl.html
return base64.b64encode(
data).replace(b'+', b'-').replace(b'=', b'_').replace(b'/', b'~')
def add_generate_db_auth_token(class_attributes, **kwargs):
class_attributes['generate_db_auth_token'] = generate_db_auth_token
def generate_db_auth_token(self, DBHostname, Port, DBUsername, Region=None):
"""Generates an auth token used to connect to a db with IAM credentials.
:type DBHostname: str
:param DBHostname: The hostname of the database to connect to.
:type Port: int
:param Port: The port number the database is listening on.
:type DBUsername: str
:param DBUsername: The username to log in as.
:type Region: str
:param Region: The region the database is in. If None, the client
region will be used.
:return: A presigned url which can be used as an auth token.
"""
region = Region
if region is None:
region = self.meta.region_name
params = {
'Action': 'connect',
'DBUser': DBUsername,
}
request_dict = {
'url_path': '/',
'query_string': '',
'headers': {},
'body': params,
'method': 'GET'
}
# RDS requires that the scheme not be set when sent over. This can cause
# issues when signing because the Python url parsing libraries follow
# RFC 1808 closely, which states that a netloc must be introduced by `//`.
# Otherwise the url is presumed to be relative, and thus the whole
# netloc would be treated as a path component. To work around this we
# introduce https here and remove it once we're done processing it.
scheme = 'https://'
endpoint_url = '%s%s:%s' % (scheme, DBHostname, Port)
prepare_request_dict(request_dict, endpoint_url)
presigned_url = self._request_signer.generate_presigned_url(
operation_name='connect', request_dict=request_dict,
region_name=region, expires_in=900, signing_name='rds-db'
)
return presigned_url[len(scheme):]
class S3PostPresigner(object):
def __init__(self, request_signer):
self._request_signer = request_signer
def generate_presigned_post(self, request_dict, fields=None,
conditions=None, expires_in=3600,
region_name=None):
"""Generates the url and the form fields used for a presigned s3 post
:type request_dict: dict
:param request_dict: The prepared request dictionary returned by
``botocore.awsrequest.prepare_request_dict()``
:type fields: dict
:param fields: A dictionary of prefilled form fields to build on top
of.
:type conditions: list
:param conditions: A list of conditions to include in the policy. Each
element can be either a list or a structure. For example:
[
{"acl": "public-read"},
{"bucket": "mybucket"},
["starts-with", "$key", "mykey"]
]
:type expires_in: int
:param expires_in: The number of seconds the presigned post is valid
for.
:type region_name: string
:param region_name: The region name to sign the presigned post to.
:rtype: dict
:returns: A dictionary with two elements: ``url`` and ``fields``.
Url is the url to post to. Fields is a dictionary filled with
the form fields and respective values to use when submitting the
post. For example:
{'url': 'https://mybucket.s3.amazonaws.com
'fields': {'acl': 'public-read',
'key': 'mykey',
'signature': 'mysignature',
'policy': 'mybase64 encoded policy'}
}
"""
if fields is None:
fields = {}
if conditions is None:
conditions = []
# Create the policy for the post.
policy = {}
# Create an expiration date for the policy
datetime_now = datetime.datetime.utcnow()
expire_date = datetime_now + datetime.timedelta(seconds=expires_in)
policy['expiration'] = expire_date.strftime(botocore.auth.ISO8601)
# Append all of the conditions that the user supplied.
policy['conditions'] = []
for condition in conditions:
policy['conditions'].append(condition)
# Store the policy and the fields in the request for signing
request = create_request_object(request_dict)
request.context['s3-presign-post-fields'] = fields
request.context['s3-presign-post-policy'] = policy
self._request_signer.sign(
'PutObject', request, region_name, 'presign-post')
# Return the url and the fields for th form to post.
return {'url': request.url, 'fields': fields}
def add_generate_presigned_url(class_attributes, **kwargs):
class_attributes['generate_presigned_url'] = generate_presigned_url
def generate_presigned_url(self, ClientMethod, Params=None, ExpiresIn=3600,
HttpMethod=None):
"""Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to
``ClientMethod``.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid
for. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By
default, the http method is whatever is used in the method's model.
:returns: The presigned url
"""
client_method = ClientMethod
params = Params
if params is None:
params = {}
expires_in = ExpiresIn
http_method = HttpMethod
context = {
'is_presign_request': True,
'use_global_endpoint': _should_use_global_endpoint(self),
}
request_signer = self._request_signer
serializer = self._serializer
try:
operation_name = self._PY_TO_OP_NAME[client_method]
except KeyError:
raise UnknownClientMethodError(method_name=client_method)
operation_model = self.meta.service_model.operation_model(
operation_name)
params = self._emit_api_params(params, operation_model, context)
# Create a request dict based on the params to serialize.
request_dict = serializer.serialize_to_request(
params, operation_model)
# Switch out the http method if user specified it.
if http_method is not None:
request_dict['method'] = http_method
# Prepare the request dict by including the client's endpoint url.
prepare_request_dict(
request_dict, endpoint_url=self.meta.endpoint_url, context=context)
# Generate the presigned url.
return request_signer.generate_presigned_url(
request_dict=request_dict, expires_in=expires_in,
operation_name=operation_name)
def add_generate_presigned_post(class_attributes, **kwargs):
class_attributes['generate_presigned_post'] = generate_presigned_post
def generate_presigned_post(self, Bucket, Key, Fields=None, Conditions=None,
ExpiresIn=3600):
"""Builds the url and the form fields used for a presigned s3 post
:type Bucket: string
:param Bucket: The name of the bucket to presign the post to. Note that
bucket related conditions should not be included in the
``conditions`` parameter.
:type Key: string
:param Key: Key name, optionally add ${filename} to the end to
attach the submitted filename. Note that key related conditions and
fields are filled out for you and should not be included in the
``Fields`` or ``Conditions`` parameter.
:type Fields: dict
:param Fields: A dictionary of prefilled form fields to build on top
of. Elements that may be included are acl, Cache-Control,
Content-Type, Content-Disposition, Content-Encoding, Expires,
success_action_redirect, redirect, success_action_status,
and x-amz-meta-.
Note that if a particular element is included in the fields
dictionary it will not be automatically added to the conditions
list. You must specify a condition for the element as well.
:type Conditions: list
:param Conditions: A list of conditions to include in the policy. Each
element can be either a list or a structure. For example:
[
{"acl": "public-read"},
["content-length-range", 2, 5],
["starts-with", "$success_action_redirect", ""]
]
Conditions that are included may pertain to acl,
content-length-range, Cache-Control, Content-Type,
Content-Disposition, Content-Encoding, Expires,
success_action_redirect, redirect, success_action_status,
and/or x-amz-meta-.
Note that if you include a condition, you must specify
the a valid value in the fields dictionary as well. A value will
not be added automatically to the fields dictionary based on the
conditions.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned post
is valid for.
:rtype: dict
:returns: A dictionary with two elements: ``url`` and ``fields``.
Url is the url to post to. Fields is a dictionary filled with
the form fields and respective values to use when submitting the
post. For example:
{'url': 'https://mybucket.s3.amazonaws.com
'fields': {'acl': 'public-read',
'key': 'mykey',
'signature': 'mysignature',
'policy': 'mybase64 encoded policy'}
}
"""
bucket = Bucket
key = Key
fields = Fields
conditions = Conditions
expires_in = ExpiresIn
if fields is None:
fields = {}
else:
fields = fields.copy()
if conditions is None:
conditions = []
post_presigner = S3PostPresigner(self._request_signer)
serializer = self._serializer
# We choose the CreateBucket operation model because its url gets
# serialized to what a presign post requires.
operation_model = self.meta.service_model.operation_model(
'CreateBucket')
# Create a request dict based on the params to serialize.
request_dict = serializer.serialize_to_request(
{'Bucket': bucket}, operation_model)
# Prepare the request dict by including the client's endpoint url.
prepare_request_dict(
request_dict, endpoint_url=self.meta.endpoint_url,
context={
'is_presign_request': True,
'use_global_endpoint': _should_use_global_endpoint(self),
},
)
# Append that the bucket name to the list of conditions.
conditions.append({'bucket': bucket})
# If the key ends with filename, the only constraint that can be
# imposed is if it starts with the specified prefix.
if key.endswith('${filename}'):
conditions.append(["starts-with", '$key', key[:-len('${filename}')]])
else:
conditions.append({'key': key})
# Add the key to the fields.
fields['key'] = key
return post_presigner.generate_presigned_post(
request_dict=request_dict, fields=fields, conditions=conditions,
expires_in=expires_in)
def _should_use_global_endpoint(client):
if client.meta.partition != 'aws':
return False
s3_config = client.meta.config.s3
if s3_config:
if s3_config.get('use_dualstack_endpoint', False):
return False
if s3_config.get('us_east_1_regional_endpoint') == 'regional' and \
client.meta.config.region_name == 'us-east-1':
return False
return True
|
boto/botocore
|
botocore/signers.py
|
Python
|
apache-2.0
| 28,412
|
#Create mp4 movie from png files in a folder, using ffmpeg encoder.
import os, random, sys
import glob
#--------------------------USER INPUT--------------------------------
#
#The old data location
#if sys.platform == 'darwin': BASEDIR='/Users/kamen/ubuntu_share/MWA_DATA/'
#if sys.platform == 'linux2': BASEDIR='/mnt/ubuntu_share/MWA_DATA/'
#The new data location
if sys.platform == 'darwin': BASEDIR='/Volumes/MyPassport/MWA_DATA/'
if sys.platform == 'linux2': BASEDIR='/mnt/MWA_DATA/'
polarization='XX'
fps='5' #FRAMES PER SECOND
force=1 #Overwrite old files if present
vv=0 #Leave at 0 for now
maxindices=['1','2']
#
#-----------------------END USER INPUT--------------------------------
process = 'ffmpeg -y -f image2 -r '
path_call = ' -i '
ffmpeg_params1 = ' -an -pix_fmt "yuv420p" -vcodec "libx264" -level 41 -crf 18.0 -b "28311k" -r '
#OLD version
ffmpeg_params2 = ' -bufsize "28311k" -maxrate "28311k" -g "100" -coder 1 -profile main -preset faster -qdiff 4 -qcomp 0.7 -directpred 3 -flags +loop+mv4 -cmp +chroma -partitions +parti4x4+partp8x8+partb8x8 -subq 7 -me_range 16 -keyint_min 1 -sc_threshold 40 -i_qfactor 0.71 -rc_eq ''blurCplx^(1-qComp)'' -b_strategy 1 -bidir_refine 1 -refs 6 -deblockalpha 0 -deblockbeta 0 -trellis 1 -x264opts keyint=10:min-keyint=1:bframes=1 -threads 2 '
#NEW version
ffmpeg_params2 = ' -bufsize "28311k" -maxrate "28311k" -g "100" -coder 1 -profile main -preset faster -qdiff 4 -qcomp 0.7 -direct-pred 3 -flags +loop+mv4 -cmp +chroma -partitions +parti4x4+partp8x8+partb8x8 -subq 7 -me_range 16 -keyint_min 1 -sc_threshold 40 -i_qfactor 0.71 -rc_eq ''blurCplx^\(1-qComp\)'' -b_strategy 1 -bidir_refine 1 -refs 6 -deblock 0:0 -trellis 1 -x264opts keyint=10:min-keyint=1:bframes=1 -threads 2 '
random.seed()
err=0
datadir=BASEDIR+'max_spectra/'
savedir=datadir
for maxindex in maxindices:
moviefname=savedir+'max'+maxindex+'_spectra_'+polarization+'_synchrotron.mp4'
if os.path.exists(moviefname) and force > 0:
os.system('rm -f '+moviefname)
if vv > 0: moviefname=savedir+'max'+maxindex+'_spectra_'+polarization+'_vv_synchrotron.mp4'
if os.path.isfile(moviefname):
print '#### File '+moviefname +' already exists.'
err+=1
#FIND THE PNG FILES
img_list = glob.glob(datadir+'Max'+maxindex+'_spectra*'+polarization+'_synchrotron.png')
if vv > 0: img_list = glob.glob(datadir+'Max'+maxindex+'_spectra*'+'_vv*'+polarization+'_synchrotron.png')
img_list.sort()
if len(img_list)==0:
print "#### Found NO image matching the search criterion";
err+=1
if len(img_list) < 6:
print "#### Not enough frames to make movie "+moviefname;
err+=1
if err == 0:
randint=random.randint(10000,50000)
tmpdir=savedir+'tmpdir_'+str(randint)+'/'
os.system('mkdir -m 0777 '+tmpdir)
for ii in range(len(img_list)):
img=img_list[ii]
iistr=str(ii+1)
if ii+1 < 10000: iistr='0'+iistr
if ii+1 < 1000: iistr='0'+iistr
if ii+1 < 100: iistr='0'+iistr
if ii+1 < 10: iistr='0'+iistr
os.system('cp '+img+' '+tmpdir+'tmpim_'+iistr+'.png')
imgfnames=tmpdir+'tmpim_%05d.png'
print imgfnames
command = process + fps + path_call + imgfnames + ffmpeg_params1 + fps + ffmpeg_params2 + moviefname
print command
os.system(command)
os.system('rm -rf '+tmpdir)
|
kkozarev/mwacme
|
src/png2mp4_synchrotron.py
|
Python
|
gpl-2.0
| 3,452
|
import pipes
from fabric.api import settings, task, local, hide
from fabric.contrib.console import confirm
def is_working_tree_clean():
with settings(hide('everything'), warn_only=True):
local('git update-index -q --ignore-submodules --refresh')
unstaged = local('git diff-files --quiet --ignore-submodules --',
capture=True)
uncommitted = local('git diff-index --cached --quiet HEAD '
'--ignore-submodules --', capture=True)
return unstaged.succeeded and uncommitted.succeeded
@task
def lint():
"""
Checks the source code using flake8.
"""
local('flake8 --statistics --exit-zero --max-complexity=10 '
'--exclude=\'*/migrations/*,build,dist\' .')
@task
def authors():
"""
Updates the AUTHORS file with a list of committers from GIT.
"""
local('git shortlog -s -e -n | cut -f 2- > AUTHORS')
@task
def compass():
local('compass watch -c ppc/assets/sass/config.rb')
@task
def livereload():
local('bundle exec guard')
@task
def release():
"""
Create a new release and upload it to PyPI.
"""
if not is_working_tree_clean():
print 'Your working tree is not clean. Refusing to create a release.'
return
print 'Rebuilding the AUTHORS file to check for modifications...'
authors()
if not is_working_tree_clean():
print (
'Your working tree is not clean after the AUTHORS file was '
'rebuilt.'
)
print 'Please commit the changes before continuing.'
return
# Get version
version = 'v{}'.format(local('python setup.py --version', capture=True))
name = local('python setup.py --name', capture=True)
# Tag
tag_message = '{} release version {}.'.format(name, version)
print '----------------------'
print 'Proceeding will tag the release, push the repository upstream,'
print 'and release a new version on PyPI.'
print
print 'Version: {}'.format(version)
print 'Tag message: {}'.format(tag_message)
print
if not confirm('Continue?', default=True):
print 'Aborting.'
return
local('git tag -a {} -m {}'.format(pipes.quote(version),
pipes.quote(tag_message)))
# Push
local('git push origin master')
# Package and upload to pypi
local('python setup.py sdist upload')
|
GaretJax/ppc
|
fabfile.py
|
Python
|
mit
| 2,432
|
from test_templatetags import *
|
acdha/django-google-analytics
|
google_analytics/tests/__init__.py
|
Python
|
mit
| 32
|
from __future__ import absolute_import
import urllib2
from django.template import Library, Node
from django.contrib.sites.models import Site
from django.core.cache import cache
from django.conf import settings
register = Library()
SOURCES = {
'nl': 'http://www.degrotegriepmeting.nl/count/counter/?country=NL',
'be': 'http://www.degrotegriepmeting.nl/count/counter/?country=BE',
'de': 'http://www.aktivgegengrippe.de/count/counter/',
'at': 'http://www.aktivgegengrippe.at/count/counter/',
'ch': 'http://www.aktivgegengrippe.ch/count/counter/',
'se': 'https://www.influensakoll.se/count/counter/',
'uk': 'http://flusurvey.org.uk/count/counter/',
'it': 'http://www.influweb.it/count/counter/',
'pt': 'http://www.gripenet.pt/count/counter/',
'fr': 'http://www.grippenet.fr/count/counter/',
}
def do_member_count(parser, token):
contents = token.split_contents()
assert len(contents) == 2, "%r tag requires a single argument" % contents[0]
country = contents[1]
assert country[0] in ['"', "'"], "argument must be a string"
country = country[1:-1]
return MemberCountNode(country)
class MemberCountNode(Node):
def __init__(self, country):
self.country = country
@classmethod
def get(cls, country):
key = "count-counter-%s" % country
if cache.get(key):
return cache.get(key)
try:
result = urllib2.urlopen(SOURCES[country], timeout=0.1 if settings.DEBUG else 2).read()
except:
result = '2300'
try:
int(result)
except ValueError:
result = '0'
cache.set(key, result, timeout=0 * 30) # timeout 30 minutes
return result
def render(self, context):
if self.country == 'total':
return "%s" % sum([int(self.get(country)) for country in SOURCES.keys()])
return self.get(self.country)
register.tag('member_count', do_member_count)
|
pkleimert/hrpt
|
apps/count/templatetags/count.py
|
Python
|
agpl-3.0
| 1,970
|
import datetime
from django.core import signing
from django.test import SimpleTestCase
from django.test.utils import freeze_time
class TestSigner(SimpleTestCase):
def test_signature(self):
"signature() method should generate a signature"
signer = signing.Signer('predictable-secret')
signer2 = signing.Signer('predictable-secret2')
for s in (
b'hello',
b'3098247:529:087:',
'\u2019'.encode(),
):
self.assertEqual(
signer.signature(s),
signing.base64_hmac(signer.salt + 'signer', s, 'predictable-secret')
)
self.assertNotEqual(signer.signature(s), signer2.signature(s))
def test_signature_with_salt(self):
"signature(value, salt=...) should work"
signer = signing.Signer('predictable-secret', salt='extra-salt')
self.assertEqual(
signer.signature('hello'),
signing.base64_hmac('extra-salt' + 'signer', 'hello', 'predictable-secret')
)
self.assertNotEqual(
signing.Signer('predictable-secret', salt='one').signature('hello'),
signing.Signer('predictable-secret', salt='two').signature('hello'))
def test_sign_unsign(self):
"sign/unsign should be reversible"
signer = signing.Signer('predictable-secret')
examples = [
'q;wjmbk;wkmb',
'3098247529087',
'3098247:529:087:',
'jkw osanteuh ,rcuh nthu aou oauh ,ud du',
'\u2019',
]
for example in examples:
signed = signer.sign(example)
self.assertIsInstance(signed, str)
self.assertNotEqual(example, signed)
self.assertEqual(example, signer.unsign(signed))
def test_unsign_detects_tampering(self):
"unsign should raise an exception if the value has been tampered with"
signer = signing.Signer('predictable-secret')
value = 'Another string'
signed_value = signer.sign(value)
transforms = (
lambda s: s.upper(),
lambda s: s + 'a',
lambda s: 'a' + s[1:],
lambda s: s.replace(':', ''),
)
self.assertEqual(value, signer.unsign(signed_value))
for transform in transforms:
with self.assertRaises(signing.BadSignature):
signer.unsign(transform(signed_value))
def test_dumps_loads(self):
"dumps and loads be reversible for any JSON serializable object"
objects = [
['a', 'list'],
'a string \u2019',
{'a': 'dictionary'},
]
for o in objects:
self.assertNotEqual(o, signing.dumps(o))
self.assertEqual(o, signing.loads(signing.dumps(o)))
self.assertNotEqual(o, signing.dumps(o, compress=True))
self.assertEqual(o, signing.loads(signing.dumps(o, compress=True)))
def test_decode_detects_tampering(self):
"loads should raise exception for tampered objects"
transforms = (
lambda s: s.upper(),
lambda s: s + 'a',
lambda s: 'a' + s[1:],
lambda s: s.replace(':', ''),
)
value = {
'foo': 'bar',
'baz': 1,
}
encoded = signing.dumps(value)
self.assertEqual(value, signing.loads(encoded))
for transform in transforms:
with self.assertRaises(signing.BadSignature):
signing.loads(transform(encoded))
def test_works_with_non_ascii_keys(self):
binary_key = b'\xe7' # Set some binary (non-ASCII key)
s = signing.Signer(binary_key)
self.assertEqual('foo:6NB0fssLW5RQvZ3Y-MTerq2rX7w', s.sign('foo'))
def test_valid_sep(self):
separators = ['/', '*sep*', ',']
for sep in separators:
signer = signing.Signer('predictable-secret', sep=sep)
self.assertEqual('foo%ssH9B01cZcJ9FoT_jEVkRkNULrl8' % sep, signer.sign('foo'))
def test_invalid_sep(self):
"""should warn on invalid separator"""
msg = 'Unsafe Signer separator: %r (cannot be empty or consist of only A-z0-9-_=)'
separators = ['', '-', 'abc']
for sep in separators:
with self.assertRaisesMessage(ValueError, msg % sep):
signing.Signer(sep=sep)
class TestTimestampSigner(SimpleTestCase):
def test_timestamp_signer(self):
value = 'hello'
with freeze_time(123456789):
signer = signing.TimestampSigner('predictable-key')
ts = signer.sign(value)
self.assertNotEqual(ts, signing.Signer('predictable-key').sign(value))
self.assertEqual(signer.unsign(ts), value)
with freeze_time(123456800):
self.assertEqual(signer.unsign(ts, max_age=12), value)
# max_age parameter can also accept a datetime.timedelta object
self.assertEqual(signer.unsign(ts, max_age=datetime.timedelta(seconds=11)), value)
with self.assertRaises(signing.SignatureExpired):
signer.unsign(ts, max_age=10)
|
ifduyue/django
|
tests/signing/tests.py
|
Python
|
bsd-3-clause
| 5,150
|
#!/usr/bin/env python
"""Test case."""
import os
import subprocess
import sys
try:
import unittest2 as unittest # Python 2.6
except ImportError:
import unittest
ROOT_DIR = os.path.split(os.path.abspath(os.path.dirname(__file__)))[0]
sys.path.append(ROOT_DIR)
class Tests(unittest.TestCase):
def test_system_gradient(self):
'''Test system with a gradient image file.'''
output_file = os.path.join(ROOT_DIR, 'test', 'output.ppm')
try:
subprocess.check_call(
[os.path.join(ROOT_DIR, 'heatmap.py'),
'-p', os.path.join(ROOT_DIR, 'test', 'few-points'),
'-b', 'black',
'-r', '3',
'-W', '22',
'-P', 'equirectangular',
'-G', os.path.join(ROOT_DIR, 'test', 'gradient.png'),
'-o', output_file])
subprocess.check_call(
['perceptualdiff',
os.path.join(ROOT_DIR, 'test', 'few-points.ppm'),
output_file])
finally:
try:
os.remove(output_file)
except OSError:
pass # perhaps it was never created
if __name__ == '__main__':
unittest.main()
|
amou269/heatmap
|
test/test_gradients.py
|
Python
|
agpl-3.0
| 1,245
|
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Availability zone helper functions."""
import collections
from oslo_config import cfg
from nova import objects
from nova.openstack.common import memorycache
# NOTE(vish): azs don't change that often, so cache them for an hour to
# avoid hitting the db multiple times on every request.
AZ_CACHE_SECONDS = 60 * 60
MC = None
availability_zone_opts = [
cfg.StrOpt('internal_service_availability_zone',
default='internal',
help='The availability_zone to show internal services under'),
cfg.StrOpt('default_availability_zone',
default='nova',
help='Default compute node availability_zone'),
]
CONF = cfg.CONF
CONF.register_opts(availability_zone_opts)
def _get_cache():
global MC
if MC is None:
MC = memorycache.get_client()
return MC
def reset_cache():
"""Reset the cache, mainly for testing purposes and update
availability_zone for host aggregate
"""
global MC
MC = None
def _make_cache_key(host):
return "azcache-%s" % host.encode('utf-8')
def _build_metadata_by_host(aggregates, hosts=None):
if hosts and not isinstance(hosts, set):
hosts = set(hosts)
metadata = collections.defaultdict(set)
for aggregate in aggregates:
for host in aggregate.hosts:
if hosts and host not in hosts:
continue
metadata[host].add(list(aggregate.metadata.values())[0])
return metadata
def set_availability_zones(context, services):
# Makes sure services isn't a sqlalchemy object
services = [dict(service) for service in services]
hosts = set([service['host'] for service in services])
aggregates = objects.AggregateList.get_by_metadata_key(context,
'availability_zone', hosts=hosts)
metadata = _build_metadata_by_host(aggregates, hosts=hosts)
# gather all of the availability zones associated with a service host
for service in services:
az = CONF.internal_service_availability_zone
if service['topic'] == "compute":
if metadata.get(service['host']):
az = u','.join(list(metadata[service['host']]))
else:
az = CONF.default_availability_zone
# update the cache
update_host_availability_zone_cache(context,
service['host'], az)
service['availability_zone'] = az
return services
def get_host_availability_zone(context, host):
aggregates = objects.AggregateList.get_by_host(context, host,
key='availability_zone')
if aggregates:
az = aggregates[0].metadata['availability_zone']
else:
az = CONF.default_availability_zone
return az
def update_host_availability_zone_cache(context, host, availability_zone=None):
if not availability_zone:
availability_zone = get_host_availability_zone(context, host)
cache = _get_cache()
cache_key = _make_cache_key(host)
cache.delete(cache_key)
cache.set(cache_key, availability_zone, AZ_CACHE_SECONDS)
def get_availability_zones(context, get_only_available=False,
with_hosts=False):
"""Return available and unavailable zones on demand.
:param get_only_available: flag to determine whether to return
available zones only, default False indicates return both
available zones and not available zones, True indicates return
available zones only
:param with_hosts: whether to return hosts part of the AZs
:type with_hosts: bool
"""
enabled_services = objects.ServiceList.get_all(context, disabled=False,
set_zones=True)
available_zones = []
for (zone, host) in [(service['availability_zone'], service['host'])
for service in enabled_services]:
if not with_hosts and zone not in available_zones:
available_zones.append(zone)
elif with_hosts:
_available_zones = dict(available_zones)
zone_hosts = _available_zones.setdefault(zone, set())
zone_hosts.add(host)
# .items() returns a view in Py3, casting it to list for Py2 compat
available_zones = list(_available_zones.items())
if not get_only_available:
disabled_services = objects.ServiceList.get_all(context, disabled=True,
set_zones=True)
not_available_zones = []
azs = available_zones if not with_hosts else dict(available_zones)
zones = [(service['availability_zone'], service['host'])
for service in disabled_services
if service['availability_zone'] not in azs]
for (zone, host) in zones:
if not with_hosts and zone not in not_available_zones:
not_available_zones.append(zone)
elif with_hosts:
_not_available_zones = dict(not_available_zones)
zone_hosts = _not_available_zones.setdefault(zone, set())
zone_hosts.add(host)
# .items() returns a view in Py3, casting it to list for Py2
# compat
not_available_zones = list(_not_available_zones.items())
return (available_zones, not_available_zones)
else:
return available_zones
def get_instance_availability_zone(context, instance):
"""Return availability zone of specified instance."""
host = str(instance.get('host'))
if not host:
return None
cache_key = _make_cache_key(host)
cache = _get_cache()
az = cache.get(cache_key)
if not az:
elevated = context.elevated()
az = get_host_availability_zone(elevated, host)
cache.set(cache_key, az, AZ_CACHE_SECONDS)
return az
|
yosshy/nova
|
nova/availability_zones.py
|
Python
|
apache-2.0
| 6,591
|
# coding=utf-8
# Copyright The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tokenization classes for ConvBERT."""
from ...utils import logging
from ..bert.tokenization_bert_fast import BertTokenizerFast
from .tokenization_convbert import ConvBertTokenizer
logger = logging.get_logger(__name__)
VOCAB_FILES_NAMES = {"vocab_file": "vocab.txt"}
PRETRAINED_VOCAB_FILES_MAP = {
"vocab_file": {
"YituTech/conv-bert-base": "https://huggingface.co/YituTech/conv-bert-base/resolve/main/vocab.txt",
"YituTech/conv-bert-medium-small": "https://huggingface.co/YituTech/conv-bert-medium-small/resolve/main/vocab.txt",
"YituTech/conv-bert-small": "https://huggingface.co/YituTech/conv-bert-small/resolve/main/vocab.txt",
}
}
PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = {
"YituTech/conv-bert-base": 512,
"YituTech/conv-bert-medium-small": 512,
"YituTech/conv-bert-small": 512,
}
PRETRAINED_INIT_CONFIGURATION = {
"YituTech/conv-bert-base": {"do_lower_case": True},
"YituTech/conv-bert-medium-small": {"do_lower_case": True},
"YituTech/conv-bert-small": {"do_lower_case": True},
}
class ConvBertTokenizerFast(BertTokenizerFast):
r"""
Construct a "fast" ConvBERT tokenizer (backed by HuggingFace's `tokenizers` library).
:class:`~transformers.ConvBertTokenizerFast` is identical to :class:`~transformers.BertTokenizerFast` and runs
end-to-end tokenization: punctuation splitting and wordpiece.
Refer to superclass :class:`~transformers.BertTokenizerFast` for usage examples and documentation concerning
parameters.
"""
vocab_files_names = VOCAB_FILES_NAMES
pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP
max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES
pretrained_init_configuration = PRETRAINED_INIT_CONFIGURATION
slow_tokenizer_class = ConvBertTokenizer
|
huggingface/pytorch-transformers
|
src/transformers/models/convbert/tokenization_convbert_fast.py
|
Python
|
apache-2.0
| 2,422
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test Transformer's schedule manager."""
import tensorflow as tf
from official.transformer.utils import schedule
class ScheduleBaseTester(tf.test.TestCase):
def test_mutual_exclusivity(self):
with self.assertRaises(ValueError):
schedule.Manager(
train_steps=100, steps_between_evals=100, train_epochs=2,
epochs_between_evals=1, default_train_epochs=None, batch_size=2048,
max_length=256)
def test_step_basis(self):
manager = schedule.Manager(
train_steps=1000, steps_between_evals=100, train_epochs=None,
epochs_between_evals=None, default_train_epochs=None, batch_size=2048,
max_length=256)
self.assertEqual(manager.single_iteration_train_steps, 100)
# Evaluation uses the full set
self.assertIsNone(manager.single_iteration_eval_steps)
self.assertIsNone(manager.repeat_dataset)
def test_epoch_basis(self):
manager = schedule.Manager(
train_steps=None, steps_between_evals=None, train_epochs=10,
epochs_between_evals=2, default_train_epochs=None, batch_size=2048,
max_length=256)
# For non-TPU, estimator relies on dataset exhausion
self.assertIsNone(manager.single_iteration_train_steps)
self.assertIsNone(manager.single_iteration_eval_steps)
self.assertEqual(manager.repeat_dataset, 2)
def test_step_basis_tpu(self):
manager = schedule.Manager(
train_steps=1000, steps_between_evals=100, train_epochs=None,
epochs_between_evals=None, default_train_epochs=None, batch_size=2048,
max_length=256, use_tpu=True)
self.assertEqual(manager.single_iteration_train_steps, 100)
# num_eval_examples / (batch_size / max_length) == 3000 / (2048 / 256)
self.assertEqual(manager.single_iteration_eval_steps, 375)
self.assertIsNone(manager.repeat_dataset)
def test_epoch_basis_tpu(self):
manager = schedule.Manager(
train_steps=None, steps_between_evals=None, train_epochs=10,
epochs_between_evals=2, default_train_epochs=None, batch_size=2048,
max_length=256, use_tpu=True)
self.assertEqual(
manager.single_iteration_train_steps,
schedule.NUM_EXAMPLES[tf.estimator.ModeKeys.TRAIN] * 2 // (2048 / 256)
)
# num_eval_examples / (batch_size / max_length) == 3000 / (2048 / 256)
self.assertEqual(manager.single_iteration_eval_steps, 375)
self.assertEqual(manager.repeat_dataset, 2)
if __name__ == "__main__":
tf.test.main()
|
mlperf/training_results_v0.5
|
v0.5.0/google/cloud_v3.8/resnet-tpuv3-8/code/resnet/model/models/official/transformer/utils/schedule_test.py
|
Python
|
apache-2.0
| 3,165
|
"""Blebox air_quality tests."""
import logging
import blebox_uniapi
import pytest
from homeassistant.components.air_quality import ATTR_PM_0_1, ATTR_PM_2_5, ATTR_PM_10
from homeassistant.const import ATTR_ICON, STATE_UNKNOWN
from .conftest import async_setup_entity, mock_feature
from tests.async_mock import AsyncMock, PropertyMock
@pytest.fixture(name="airsensor")
def airsensor_fixture():
"""Return a default air quality fixture."""
feature = mock_feature(
"air_qualities",
blebox_uniapi.air_quality.AirQuality,
unique_id="BleBox-airSensor-1afe34db9437-0.air",
full_name="airSensor-0.air",
device_class=None,
pm1=None,
pm2_5=None,
pm10=None,
)
product = feature.product
type(product).name = PropertyMock(return_value="My air sensor")
type(product).model = PropertyMock(return_value="airSensor")
return (feature, "air_quality.airsensor_0_air")
async def test_init(airsensor, hass, config):
"""Test airSensor default state."""
_, entity_id = airsensor
entry = await async_setup_entity(hass, config, entity_id)
assert entry.unique_id == "BleBox-airSensor-1afe34db9437-0.air"
state = hass.states.get(entity_id)
assert state.name == "airSensor-0.air"
assert ATTR_PM_0_1 not in state.attributes
assert ATTR_PM_2_5 not in state.attributes
assert ATTR_PM_10 not in state.attributes
assert state.attributes[ATTR_ICON] == "mdi:blur"
assert state.state == STATE_UNKNOWN
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(entry.device_id)
assert device.name == "My air sensor"
assert device.identifiers == {("blebox", "abcd0123ef5678")}
assert device.manufacturer == "BleBox"
assert device.model == "airSensor"
assert device.sw_version == "1.23"
async def test_update(airsensor, hass, config):
"""Test air quality sensor state after update."""
feature_mock, entity_id = airsensor
def initial_update():
feature_mock.pm1 = 49
feature_mock.pm2_5 = 222
feature_mock.pm10 = 333
feature_mock.async_update = AsyncMock(side_effect=initial_update)
await async_setup_entity(hass, config, entity_id)
state = hass.states.get(entity_id)
assert state.attributes[ATTR_PM_0_1] == 49
assert state.attributes[ATTR_PM_2_5] == 222
assert state.attributes[ATTR_PM_10] == 333
assert state.state == "222"
async def test_update_failure(airsensor, hass, config, caplog):
"""Test that update failures are logged."""
caplog.set_level(logging.ERROR)
feature_mock, entity_id = airsensor
feature_mock.async_update = AsyncMock(side_effect=blebox_uniapi.error.ClientError)
await async_setup_entity(hass, config, entity_id)
assert f"Updating '{feature_mock.full_name}' failed: " in caplog.text
|
nkgilley/home-assistant
|
tests/components/blebox/test_air_quality.py
|
Python
|
apache-2.0
| 2,891
|
# -*- coding: utf-8 -*-
#
# COPYRIGHT (C) 2016-2017 Michael Labouebe <gfarmerfr@free.fr>
# COPYRIGHT (C) 2016-2018 Mutnick <mutnick@techie.com>
# COPYRIGHT (C) 2008-2011 Quinox <quinox@users.sf.net>
# COPYRIGHT (C) 2006-2009 Daelstorm <daelstorm@gmail.com>
# COPYRIGHT (C) 2009 Hedonist <ak@sensi.org>
# COPYRIGHT (C) 2003-2004 Hyriand <hyriand@thegraveyard.org>
#
# GNU GENERAL PUBLIC LICENSE
# Version 3, 29 June 2007
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import signal
import sys
import time
import urllib.error
import urllib.parse
import urllib.request
from gettext import gettext as _
import gi
from gi.repository import Gdk
from gi.repository import GdkPixbuf
from gi.repository import GObject as gobject
from gi.repository import Gtk as gtk
import _thread
import pynicotine.utils
from pynicotine import pluginsystem
from pynicotine import slskmessages
from pynicotine import slskproto
from pynicotine.gtkgui import imagedata
from pynicotine.gtkgui import nowplaying
from pynicotine.gtkgui import utils
from pynicotine.gtkgui.about import AboutDialog
from pynicotine.gtkgui.about import AboutFiltersDialog
from pynicotine.gtkgui.about import AboutPrivateDialog
from pynicotine.gtkgui.about import AboutRoomsDialog
from pynicotine.gtkgui.chatrooms import ChatRooms
from pynicotine.gtkgui.checklatest import checklatest
from pynicotine.gtkgui.dirchooser import ChooseFile
from pynicotine.gtkgui.dirchooser import SaveFile
from pynicotine.gtkgui.downloads import Downloads
from pynicotine.gtkgui.entrydialog import FindDialog
from pynicotine.gtkgui.entrydialog import FolderDownload
from pynicotine.gtkgui.entrydialog import QuitBox
from pynicotine.gtkgui.entrydialog import input_box
from pynicotine.gtkgui.fastconfigure import FastConfigureAssistant
from pynicotine.gtkgui.privatechat import PrivateChats
from pynicotine.gtkgui.search import Searches
from pynicotine.gtkgui.settingswindow import SettingsWindow
from pynicotine.gtkgui.uploads import Uploads
from pynicotine.gtkgui.userbrowse import UserBrowse
from pynicotine.gtkgui.userinfo import UserInfo
from pynicotine.gtkgui.userinfo import UserTabs
from pynicotine.gtkgui.userlist import UserList
from pynicotine.gtkgui.utils import AppendLine
from pynicotine.gtkgui.utils import Humanize
from pynicotine.gtkgui.utils import HumanSpeed
from pynicotine.gtkgui.utils import ImageLabel
from pynicotine.gtkgui.utils import OpenUri
from pynicotine.gtkgui.utils import PopupMenu
from pynicotine.gtkgui.utils import ScrollBottom
from pynicotine.logfacility import log
from pynicotine.pynicotine import NetworkEventProcessor
from pynicotine.upnp import UPnPPortMapping
from pynicotine.utils import executeCommand
from pynicotine.utils import version
gi.require_version('Gtk', '3.0')
gi.require_version('Gdk', '3.0')
# LibSexy is deprecated, we should try to find a replacement
SEXY = True
try:
import sexy
except ImportError:
SEXY = False
class roomlist:
def __init__(self, frame):
# Build the window
self.frame = frame
builder = gtk.Builder()
builder.set_translation_domain('nicotine')
builder.add_from_file(os.path.join(os.path.dirname(os.path.realpath(__file__)), "ui", "roomlist.ui"))
self.RoomList = builder.get_object("RoomList")
for i in builder.get_objects():
try:
self.__dict__[gtk.Buildable.get_name(i)] = i
except TypeError:
pass
self.RoomList.remove(self.vbox2)
self.RoomList.destroy()
# self.RoomsList is the TreeView
builder.connect_signals(self)
self.search_iter = None
self.query = ""
self.room_model = self.RoomsList.get_model()
self.FindRoom.connect("clicked", self.OnSearchRoom)
def OnCreateRoom(self, widget):
room = widget.get_text()
if not room:
return
self.frame.np.queue.put(slskmessages.JoinRoom(room))
widget.set_text("")
def OnSearchRoom(self, widget):
if self.room_model is not self.RoomsList.get_model():
self.room_model = self.RoomsList.get_model()
self.search_iter = self.room_model.get_iter_first()
room = self.SearchRooms.get_text().lower()
if not room:
return
if self.query == room:
if self.search_iter is None:
self.search_iter = self.room_model.get_iter_first()
else:
self.search_iter = self.room_model.iter_next(self.search_iter)
else:
self.search_iter = self.room_model.get_iter_first()
self.query = room
while self.search_iter:
room_match, size = self.room_model.get(self.search_iter, 0, 1)
if self.query in room_match.lower():
path = self.room_model.get_path(self.search_iter)
self.RoomsList.set_cursor(path)
break
self.search_iter = self.room_model.iter_next(self.search_iter)
class BuddiesComboBox:
def __init__(self, frame, ComboBox):
self.frame = frame
self.items = {}
self.combobox = ComboBox
self.store = gtk.ListStore(gobject.TYPE_STRING)
self.combobox.set_model(self.store)
self.combobox.set_entry_text_column(0)
self.store.set_default_sort_func(lambda *args: -1)
self.store.set_sort_column_id(-1, gtk.SortType.ASCENDING)
self.combobox.show()
def Fill(self):
self.items.clear()
self.store.clear()
self.items[""] = self.store.append([""])
for user in self.frame.np.config.sections["server"]["userlist"]:
self.items[user[0]] = self.store.append([user[0]])
self.store.set_sort_column_id(0, gtk.SortType.ASCENDING)
def Append(self, item):
if item in self.items:
return
self.items[item] = self.combobox.get_model().append([item])
def Remove(self, item):
if item in self.items:
self.combobox.get_model().remove(self.items[item])
del self.items[item]
class NicotineFrame:
def __init__(self, data_dir, config, plugins, use_trayicon, start_hidden=False, bindip=None, port=None):
self.clip_data = ""
self.data_dir = data_dir
self.configfile = config
self.transfermsgs = {}
self.transfermsgspostedtime = 0
self.manualdisconnect = 0
self.away = 0
self.exiting = 0
self.startup = True
self.current_tab = 0
self.rescanning = 0
self.brescanning = 0
self.needrescan = False
self.autoaway = False
self.awaytimer = None
self.SEXY = SEXY
self.chatrooms = None
self.bindip = bindip
self.port = port
self.got_focus = False
try:
gi.require_version('Notify', '0.7')
from gi.repository import Notify
Notify.init("Nicotine+")
self.notify = Notify
self.notifyBox = None
from xml.dom.minidom import getDOMImplementation
self.xmldocument = getDOMImplementation().createDocument(None, None, None)
except ImportError:
self.notify = None
self.np = NetworkEventProcessor(
self,
self.callback,
self.logMessage,
self.SetStatusText,
self.bindip,
self.port,
data_dir,
config
)
config = self.np.config.sections
self.temp_modes_order = config["ui"]["modes_order"]
utils.DECIMALSEP = config["ui"]["decimalsep"]
utils.CATCH_URLS = config["urls"]["urlcatching"]
utils.HUMANIZE_URLS = config["urls"]["humanizeurls"]
utils.PROTOCOL_HANDLERS = config["urls"]["protocols"].copy()
utils.PROTOCOL_HANDLERS["slsk"] = self.OnSoulSeek
utils.USERNAMEHOTSPOTS = config["ui"]["usernamehotspots"]
utils.NICOTINE = self
pynicotine.utils.log = self.logMessage
log.addlistener(self.logCallback)
self.LoadIcons()
self.accel_group = gtk.AccelGroup()
self.roomlist = roomlist(self)
# Import GtkBuilder widgets
builder = gtk.Builder()
builder.set_translation_domain('nicotine')
builder.add_from_file(os.path.join(os.path.dirname(os.path.realpath(__file__)), "ui", "mainwindow.ui"))
for i in builder.get_objects():
try:
self.__dict__[gtk.Buildable.get_name(i)] = i
except TypeError:
pass
self.MainWindow.set_title(_("Nicotine+") + " " + version)
self.MainWindow.set_default_icon(self.images["n"])
self.MainWindow.set_icon(self.images["n"])
# self.MainWindow.selection_add_target("PRIMARY", "STRING", 1)
hints_geometry = Gdk.Geometry()
hints_geometry.base_height = 460
hints_geometry.base_width = 500
self.MainWindow.set_geometry_hints(None, hints_geometry, Gdk.WindowHints(Gdk.WindowHints.MIN_SIZE))
self.MainWindow.connect("focus_in_event", self.OnFocusIn)
self.MainWindow.connect("focus_out_event", self.OnFocusOut)
self.MainWindow.connect("configure_event", self.OnWindowChange)
self.MainWindow.add_accel_group(self.accel_group)
builder.connect_signals(self)
width = self.np.config.sections["ui"]["width"]
height = self.np.config.sections["ui"]["height"]
self.MainWindow.resize(width, height)
xpos = self.np.config.sections["ui"]["xposition"]
ypos = self.np.config.sections["ui"]["yposition"]
# According to the pygtk doc this will be ignored my many window managers since the move takes place before we do a show()
if min(xpos, ypos) < 0:
self.MainWindow.set_position(gtk.WindowPosition.CENTER)
else:
self.MainWindow.move(xpos, ypos)
self.MainWindow.show()
self.is_mapped = True
if start_hidden:
self.MainWindow.unmap()
self.is_mapped = False
self.minimized = False
self.HiddenTabs = {}
display = Gdk.Display.get_default() # noqa: F841
self.clip = gtk.Clipboard.get(Gdk.SELECTION_CLIPBOARD)
# for iterating buddy changes to the combos
self.CreateRecommendationsWidgets()
self.status_context_id = self.Statusbar.get_context_id("")
self.socket_context_id = self.SocketStatus.get_context_id("")
self.user_context_id = self.UserStatus.get_context_id("")
self.down_context_id = self.DownStatus.get_context_id("")
self.up_context_id = self.UpStatus.get_context_id("")
self.MainWindow.connect("delete-event", self.on_delete_event)
self.MainWindow.connect('window-state-event', self.window_state_event_cb)
self.MainWindow.connect("destroy", self.OnDestroy)
self.MainWindow.connect("key_press_event", self.OnKeyPress)
self.MainWindow.connect("motion-notify-event", self.OnButtonPress)
gobject.signal_new("network_event", gtk.Window, gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, (gobject.TYPE_PYOBJECT,))
gobject.signal_new("network_event_lo", gtk.Window, gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, (gobject.TYPE_PYOBJECT,))
self.MainWindow.connect("network_event", self.OnNetworkEvent)
self.MainWindow.connect("network_event_lo", self.OnNetworkEvent)
self.MainNotebook.connect("page-removed", self.OnPageRemoved)
self.MainNotebook.connect("page-reordered", self.OnPageReordered)
self.MainNotebook.connect("page-added", self.OnPageAdded)
for thing in config["interests"]["likes"]:
self.likes[thing] = self.likeslist.append([thing])
for thing in config["interests"]["dislikes"]:
self.dislikes[thing] = self.dislikeslist.append([thing])
# Initialise the Notebooks
self.ChatNotebook = ChatRooms(self)
self.PrivatechatNotebook = PrivateChats(self)
self.UserInfoNotebook = UserTabs(self, UserInfo, self.UserInfoNotebookRaw)
self.UserBrowseNotebook = UserTabs(self, UserBrowse, self.UserBrowseNotebookRaw)
self.SearchNotebook = Searches(self)
for w in self.ChatNotebook, self.PrivatechatNotebook, self.UserInfoNotebook, self.UserBrowseNotebook, self.SearchNotebook:
w.set_tab_closers(config["ui"]["tabclosers"])
w.set_reorderable(config["ui"]["tab_reorderable"])
w.show_images(config["ui"]["tab_icons"])
for tab in self.MainNotebook.get_children():
self.MainNotebook.set_tab_reorderable(tab, config["ui"]["tab_reorderable"])
# Translation for the labels of tabs
translated_tablabels = {
self.ChatTabLabel: _("Chat rooms"),
self.PrivateChatTabLabel: _("Private chat"),
self.SearchTabLabel: _("Search files"),
self.UserInfoTabLabel: _("User info"),
self.DownloadsTabLabel: _("Downloads"),
self.UploadsTabLabel: _("Uploads"),
self.UserBrowseTabLabel: _("User browse"),
self.InterestsTabLabel: _("Interests")
}
# Mapping between the pseudo tabs and their vbox/hbox
map_tablabels_to_box = {
self.ChatTabLabel: "chathbox",
self.PrivateChatTabLabel: "privatevbox",
self.SearchTabLabel: "searchvbox",
self.UserInfoTabLabel: "userinfovbox",
self.DownloadsTabLabel: "downloadsvbox",
self.UploadsTabLabel: "uploadsvbox",
self.UserBrowseTabLabel: "userbrowsevbox",
self.InterestsTabLabel: "interestsvbox"
}
# Initialize tabs labels
for label_tab in [
self.ChatTabLabel,
self.PrivateChatTabLabel,
self.SearchTabLabel,
self.UserInfoTabLabel,
self.DownloadsTabLabel,
self.UploadsTabLabel,
self.UserBrowseTabLabel,
self.InterestsTabLabel
]:
# Initialize the image label
img_label = ImageLabel(translated_tablabels[label_tab], self.images["empty"])
img_label.show()
# Add it to the eventbox
label_tab.add(img_label)
# Set tab icons, angle and text color
img_label.show_image(config["ui"]["tab_icons"])
img_label.set_angle(config["ui"]["labelmain"])
img_label.set_text_color(0)
# Set the menu to hide the tab
eventbox_name = gtk.Buildable.get_name(label_tab)
label_tab.connect('button_press_event', self.on_tab_click, eventbox_name + "Menu", map_tablabels_to_box[label_tab])
self.__dict__[eventbox_name + "Menu"] = popup = utils.PopupMenu(self)
popup.setup(
(
"#" + _("Hide %(tab)s") % {"tab": translated_tablabels[label_tab]}, self.HideTab, [label_tab, map_tablabels_to_box[label_tab]]
)
)
popup.set_user(map_tablabels_to_box[label_tab])
self.LogScrolledWindow = gtk.ScrolledWindow()
self.LogScrolledWindow.set_policy(gtk.PolicyType.AUTOMATIC, gtk.PolicyType.AUTOMATIC)
self.LogScrolledWindow.show()
self.LogWindow = gtk.TextView()
self.LogWindow.set_wrap_mode(gtk.WrapMode.WORD)
self.LogWindow.set_cursor_visible(False)
self.LogWindow.set_editable(False)
self.LogScrolledWindow.add(self.LogWindow)
self.LogWindow.connect("button-press-event", self.OnPopupLogMenu)
# Popup menu on the log windows
self.logpopupmenu = PopupMenu(self).setup(
("#" + _("Find"), self.OnFindLogWindow),
("", None),
("#" + _("Copy"), self.OnCopyLogWindow),
("#" + _("Copy All"), self.OnCopyAllLogWindow),
("", None),
("#" + _("Clear log"), self.OnClearLogWindow)
)
self.debugLogBox.pack_start(self.LogScrolledWindow, True, True, 0)
self.debugWarnings.set_active((1 in config["logging"]["debugmodes"]))
self.debugSearches.set_active((2 in config["logging"]["debugmodes"]))
self.debugConnections.set_active((3 in config["logging"]["debugmodes"]))
self.debugMessages.set_active((4 in config["logging"]["debugmodes"]))
self.debugTransfers.set_active((5 in config["logging"]["debugmodes"]))
self.debugStatistics.set_active((6 in config["logging"]["debugmodes"]))
self.debugButtonsBox.hide()
if config["logging"]["logcollapsed"]:
self.show_log_window1.set_active(False)
else:
self.show_log_window1.set_active(True)
self.LogWindow.show()
self.OnShowLog(self.show_log_window1)
self.show_tickers1.set_active(not config["ticker"]["hide"])
self.show_debug_info1.set_active(self.np.config.sections["logging"]["debug"])
self.settingswindow = SettingsWindow(self)
self.settingswindow.SettingsWindow.connect("settings-closed", self.OnSettingsClosed)
self.fastconfigure = FastConfigureAssistant(self)
self.chatrooms = self.ChatNotebook
self.chatrooms.show()
# Create Search combo ListStores
self.SearchEntryCombo_List = gtk.ListStore(gobject.TYPE_STRING)
self.SearchEntryCombo.set_model(self.SearchEntryCombo_List)
self.SearchEntryCombo.set_entry_text_column(0)
self.SearchEntry = self.SearchEntryCombo.get_child()
self.SearchEntry.connect("activate", self.OnSearch)
self.RoomSearchCombo_List = gtk.ListStore(gobject.TYPE_STRING)
self.RoomSearchCombo.set_model(self.RoomSearchCombo_List)
self.RoomSearchCombo.set_entry_text_column(0)
self.SearchMethod_List = gtk.ListStore(gobject.TYPE_STRING)
for i in [""]:
self.SearchMethod_List.append([i])
self.SearchMethod.set_model(self.SearchMethod_List)
self.SearchMethod.set_entry_text_column(0)
self.Searches = self.SearchNotebook
self.Searches.show()
self.Searches.LoadConfig()
self.downloads = Downloads(self)
self.uploads = Uploads(self)
self.userlist = UserList(self)
self.UpdateColours(1)
self.privatechats = self.PrivatechatNotebook
self.privatechats.show()
self.userinfo = self.UserInfoNotebook
self.userinfo.show()
self.userbrowse = self.UserBrowseNotebook
self.userbrowse.show()
self.userinfo.SetTabLabel(self.UserInfoTabLabel)
self.userbrowse.SetTabLabel(self.UserBrowseTabLabel)
self.sUserinfoButton.connect("clicked", self.OnGetUserInfo)
self.UserInfoCombo.get_child().connect("activate", self.OnGetUserInfo)
self.sPrivateChatButton.connect("clicked", self.OnGetPrivateChat)
self.UserPrivateCombo.get_child().connect("activate", self.OnGetPrivateChat)
self.sSharesButton.connect("clicked", self.OnGetShares)
self.UserBrowseCombo.get_child().connect("activate", self.OnGetShares)
if config["ui"]["roomlistcollapsed"]:
self.show_room_list1.set_active(False)
else:
self.vpaned3.pack2(self.roomlist.vbox2, True, True)
self.show_room_list1.set_active(True)
buddylist = config["ui"]["buddylistinchatrooms"]
if buddylist == 1:
self.buddylist_in_chatrooms1.set_active(True)
elif buddylist == 2:
self.buddylist_always_visible.set_active(True)
elif buddylist == 0:
self.buddylist_in_tab.set_active(True)
if config["columns"]["hideflags"]:
self.ShowFlags.set_active(False)
else:
self.ShowFlags.set_active(True)
self.SetUserStatus(_("Offline"))
self.Notifications = Notifications(self)
self.TrayApp = TrayApp(self)
self.UpdateBandwidth()
self.UpdateTransferButtons()
# Search Methods
self.searchroomslist = {}
self.searchmethods = {}
# Create a list of objects of the BuddiesComboBox class
# This add a few methods to add/remove entries on all combobox at once
self.BuddiesComboEntries = [
BuddiesComboBox(self, self.UserSearchCombo),
BuddiesComboBox(self, self.UserPrivateCombo),
BuddiesComboBox(self, self.UserInfoCombo),
BuddiesComboBox(self, self.UserBrowseCombo)
]
# Initial filling of the buddies combobox
_thread.start_new_thread(self.BuddiesCombosFill, ("",))
self.SearchMethod_List.clear()
# Space after Joined Rooms is important, so it doesn't conflict
# with any possible real room, but if it's not translated with the space
# nothing awful will happen
self.searchroomslist[_("Joined Rooms ")] = self.RoomSearchCombo_List.append([_("Joined Rooms ")])
self.RoomSearchCombo.set_active_iter(self.searchroomslist[_("Joined Rooms ")])
for method in [_("Global"), _("Buddies"), _("Rooms"), _("User")]:
self.searchmethods[method] = self.SearchMethod_List.append([method])
self.SearchMethod.set_active_iter(self.searchmethods[_("Global")])
self.SearchMethod.connect("changed", self.OnSearchMethod)
self.UserSearchCombo.hide()
self.RoomSearchCombo.hide()
self.disconnect1.set_sensitive(0)
self.awayreturn1.set_sensitive(0)
self.check_privileges1.set_sensitive(0)
self.gstreamer = gstreamer()
self.pluginhandler = pluginsystem.PluginHandler(self, plugins)
self.ShowChatButtons.set_active(not config["ui"]["chat_hidebuttons"])
if config["transfers"]["rescanonstartup"]:
# Rescan public shares if needed
if not self.np.config.sections["transfers"]["friendsonly"] and self.np.config.sections["transfers"]["shared"]:
self.OnRescan()
# Rescan buddy shares if needed
if self.np.config.sections["transfers"]["enablebuddyshares"]:
self.OnBuddyRescan()
self.now = nowplaying.NowPlaying(self)
self.SetTabPositions()
ConfigUnset = self.np.config.needConfig()
if ConfigUnset:
if ConfigUnset > 1:
self.connect1.set_sensitive(False)
self.rescan_public.set_sensitive(True)
# Display FastConfigure
self.OnFastConfigure(None)
else:
# Connect anyway
self.OnFirstConnect(-1)
else:
self.OnFirstConnect(-1)
self.UpdateDownloadFilters()
# Create the trayicon if needed
if use_trayicon and config["ui"]["trayicon"]:
self.TrayApp.Create()
# Deactivate public shares related menu entries if we don't use them
if self.np.config.sections["transfers"]["friendsonly"] or not self.np.config.sections["transfers"]["shared"]:
self.rescan_public.set_sensitive(False)
self.rebuild_public.set_sensitive(False)
self.browse_public_shares.set_sensitive(False)
# Deactivate buddy shares related menu entries if we don't use them
if not self.np.config.sections["transfers"]["enablebuddyshares"]:
self.rescan_buddy.set_sensitive(False)
self.rebuild_buddy.set_sensitive(False)
self.browse_buddy_shares.set_sensitive(False)
self.SetMainTabsVisibility()
self.SetLastSessionTab()
self.startup = False
def AddDebugLevel(self, debugLevel):
if debugLevel not in self.np.config.sections["logging"]["debugmodes"]:
self.np.config.sections["logging"]["debugmodes"].append(debugLevel)
def RemoveDebugLevel(self, debugLevel):
if debugLevel in self.np.config.sections["logging"]["debugmodes"]:
self.np.config.sections["logging"]["debugmodes"].remove(debugLevel)
def OnDebugWarnings(self, widget):
if self.startup:
return
if widget.get_active():
self.AddDebugLevel(1)
else:
self.RemoveDebugLevel(1)
def OnDebugSearches(self, widget):
if self.startup:
return
if widget.get_active():
self.AddDebugLevel(2)
else:
self.RemoveDebugLevel(2)
def OnDebugConnections(self, widget):
if self.startup:
return
if widget.get_active():
self.AddDebugLevel(3)
else:
self.RemoveDebugLevel(3)
def OnDebugMessages(self, widget):
if self.startup:
return
if widget.get_active():
self.AddDebugLevel(4)
else:
self.RemoveDebugLevel(4)
def OnDebugTransfers(self, widget):
if self.startup:
return
if widget.get_active():
self.AddDebugLevel(5)
else:
self.RemoveDebugLevel(5)
def OnDebugStatistics(self, widget):
if self.startup:
return
if widget.get_active():
self.AddDebugLevel(6)
else:
self.RemoveDebugLevel(6)
def on_delete_event(self, widget, event):
if not self.np.config.sections["ui"]["exitdialog"]:
return False
if self.TrayApp.HAVE_TRAYICON and self.np.config.sections["ui"]["exitdialog"] == 2:
if self.is_mapped:
self.MainWindow.unmap()
self.is_mapped = False
return True
if self.TrayApp.HAVE_TRAYICON:
option = QuitBox(
self,
title=_('Close Nicotine+?'),
message=_('Are you sure you wish to exit Nicotine+ at this time?'),
tray=True,
status="question",
third=_("Send to tray")
)
else:
option = QuitBox( # noqa: F841
self,
title=_('Close Nicotine+?'),
message=_('Are you sure you wish to exit Nicotine+ at this time?'),
tray=False,
status="question"
)
return True
def window_state_event_cb(self, window, event):
if event.changed_mask and Gdk.WindowState.ICONIFIED:
if event.new_window_state and Gdk.WindowState.ICONIFIED:
self.minimized = 1
else:
self.minimized = 0
def similar_users_drag_data_get_data(self, treeview, context, selection, target_id, etime):
treeselection = treeview.get_selection()
model, iter = treeselection.get_selected()
user = model.get_value(iter, 1)
# data = (status, flag, user, speed, files, trusted, notify, privileged, lastseen, comments)
selection.set(selection.target, 8, user)
def NewNotification(self, message, title="Nicotine+"):
if self.notify is None:
return
xmlmessage = self.xmldocument.createTextNode(message).toxml()
if self.notifyBox is None:
self.notifyBox = self.notify.Notification.new(title, xmlmessage)
self.notifyBox.set_image_from_pixbuf(self.images["notify"])
else:
self.notifyBox.update(title, xmlmessage)
try:
self.notifyBox.show()
except gobject.GError as error:
self.logMessage(_("Notification Error: %s") % str(error))
def LoadIcons(self):
self.images = {}
self.icons = {}
self.flag_images = {}
self.flag_users = {}
scale = None
def loadStatic(name):
loader = GdkPixbuf.PixbufLoader()
data = getattr(imagedata, "%s" % (name,))
loader.write(data)
loader.close()
pixbuf = loader.get_pixbuf()
if scale:
w, h = pixbuf.get_width(), pixbuf.get_height()
if w == h:
pixbuf = pixbuf.scale_simple(scale, scale, Gdk.INTERP_BILINEAR)
return pixbuf
names = [
"empty",
"away",
"online",
"offline",
"hilite",
"hilite3",
"trayicon_away",
"trayicon_connect",
"trayicon_disconnect",
"trayicon_msg",
"n",
"notify",
"plugin"
]
if self.np.config.sections["ui"].get("icontheme"):
extensions = ["jpg", "jpeg", "bmp", "png", "svg"]
for name in names:
path = None
exts = extensions[:]
loaded = False
while not path or (exts and not loaded):
path = os.path.expanduser(os.path.join(self.np.config.sections["ui"]["icontheme"], "%s.%s" % (name, exts.pop())))
if os.path.exists(path):
data = open(path, 'rb')
s = data.read()
data.close()
loader = GdkPixbuf.PixbufLoader()
try:
loader.write(s)
loader.close()
pixbuf = loader.get_pixbuf()
if scale:
w, h = pixbuf.get_width(), pixbuf.get_height()
if w == h:
pixbuf = pixbuf.scale_simple(scale, scale, Gdk.INTERP_BILINEAR)
self.images[name] = pixbuf
loaded = True
except gobject.GError:
pass
del loader
del s
if name not in self.images:
self.images[name] = loadStatic(name)
else:
for name in names:
self.images[name] = loadStatic(name)
def SaveColumns(self):
for i in [self.userbrowse, self.userlist, self.chatrooms.roomsctrl, self.downloads, self.uploads, self.Searches]:
i.saveColumns()
self.np.config.writeConfiguration()
def OnSearchMethod(self, widget):
act = False
search_mode = self.SearchMethod.get_model().get(self.SearchMethod.get_active_iter(), 0)[0]
if search_mode == _("User"):
self.UserSearchCombo.show()
act = True
else:
self.UserSearchCombo.hide()
self.UserSearchCombo.set_sensitive(act)
act = False
if search_mode == _("Rooms"):
act = True
self.RoomSearchCombo.show()
else:
self.RoomSearchCombo.hide()
self.RoomSearchCombo.set_sensitive(act)
def CreateRecommendationsWidgets(self):
self.likes = {}
self.likeslist = gtk.ListStore(gobject.TYPE_STRING)
self.likeslist.set_sort_column_id(0, gtk.SortType.ASCENDING)
cols = utils.InitialiseColumns(
self.LikesList,
[_("I like") + ":", 0, "text", self.CellDataFunc]
)
cols[0].set_sort_column_id(0)
self.LikesList.set_model(self.likeslist)
self.RecommendationsList.set_property("rules-hint", True)
self.RecommendationUsersList.set_property("rules-hint", True)
self.RecommendationUsersList.enable_model_drag_source(
Gdk.ModifierType.BUTTON1_MASK, [('text/plain', 0, 2)], Gdk.DragAction.COPY
)
self.RecommendationUsersList.connect("drag_data_get", self.similar_users_drag_data_get_data)
self.til_popup_menu = popup = utils.PopupMenu(self)
popup.setup(
("#" + _("_Remove this item"), self.OnRemoveThingILike),
("#" + _("Re_commendations for this item"), self.OnRecommendItem),
("", None),
("#" + _("_Search for this item"), self.OnRecommendSearch)
)
self.LikesList.connect("button_press_event", self.OnPopupTILMenu)
self.dislikes = {}
self.dislikeslist = gtk.ListStore(gobject.TYPE_STRING)
self.dislikeslist.set_sort_column_id(0, gtk.SortType.ASCENDING)
cols = utils.InitialiseColumns(
self.DislikesList,
[_("I dislike") + ":", 0, "text", self.CellDataFunc]
)
cols[0].set_sort_column_id(0)
self.DislikesList.set_model(self.dislikeslist)
self.tidl_popup_menu = popup = utils.PopupMenu(self)
popup.setup(
("#" + _("_Remove this item"), self.OnRemoveThingIDislike),
("", None),
("#" + _("_Search for this item"), self.OnRecommendSearch)
)
self.DislikesList.connect("button_press_event", self.OnPopupTIDLMenu)
cols = utils.InitialiseColumns(
self.RecommendationsList,
[_("Item"), 0, "text", self.CellDataFunc],
[_("Rating"), 75, "text", self.CellDataFunc]
)
cols[0].set_sort_column_id(0)
cols[1].set_sort_column_id(2)
self.recommendationslist = gtk.ListStore(
gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_INT
)
self.RecommendationsList.set_model(self.recommendationslist)
self.r_popup_menu = popup = utils.PopupMenu(self)
popup.setup(
("$" + _("I _like this"), self.OnLikeRecommendation),
("$" + _("I _don't like this"), self.OnDislikeRecommendation),
("#" + _("_Recommendations for this item"), self.OnRecommendRecommendation),
("", None),
("#" + _("_Search for this item"), self.OnRecommendSearch)
)
self.RecommendationsList.connect("button_press_event", self.OnPopupRMenu)
cols = utils.InitialiseColumns(
self.UnrecommendationsList,
[_("Item"), 0, "text", self.CellDataFunc],
[_("Rating"), 75, "text", self.CellDataFunc]
)
cols[0].set_sort_column_id(0)
cols[1].set_sort_column_id(2)
self.unrecommendationslist = gtk.ListStore(
gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_INT
)
self.UnrecommendationsList.set_model(self.unrecommendationslist)
self.ur_popup_menu = popup = utils.PopupMenu(self)
popup.setup(
("$" + _("I _like this"), self.OnLikeRecommendation),
("$" + _("I _don't like this"), self.OnDislikeRecommendation),
("#" + _("_Recommendations for this item"), self.OnRecommendRecommendation),
("", None),
("#" + _("_Search for this item"), self.OnRecommendSearch)
)
self.UnrecommendationsList.connect("button_press_event", self.OnPopupUnRecMenu)
statusiconwidth = self.images["offline"].get_width() + 4
cols = utils.InitialiseColumns(
self.RecommendationUsersList,
["", statusiconwidth, "pixbuf"],
[_("User"), 100, "text", self.CellDataFunc],
[_("Speed"), 0, "text", self.CellDataFunc],
[_("Files"), 0, "text", self.CellDataFunc],
)
cols[0].set_sort_column_id(4)
cols[1].set_sort_column_id(1)
cols[2].set_sort_column_id(5)
cols[3].set_sort_column_id(6)
self.recommendationusers = {}
self.recommendationuserslist = gtk.ListStore(
gobject.TYPE_OBJECT,
gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_INT,
gobject.TYPE_INT,
gobject.TYPE_INT
)
self.RecommendationUsersList.set_model(self.recommendationuserslist)
self.recommendationuserslist.set_sort_column_id(1, gtk.SortType.ASCENDING)
self.ru_popup_menu = popup = utils.PopupMenu(self)
popup.setup(
("#" + _("Send _message"), popup.OnSendMessage),
("", None),
("#" + _("Show IP a_ddress"), popup.OnShowIPaddress),
("#" + _("Get user i_nfo"), popup.OnGetUserInfo),
("#" + _("Brow_se files"), popup.OnBrowseUser),
("#" + _("Gi_ve privileges"), popup.OnGivePrivileges),
("", None),
("$" + _("_Add user to list"), popup.OnAddToList),
("$" + _("_Ban this user"), popup.OnBanUser),
("$" + _("_Ignore this user"), popup.OnIgnoreUser)
)
self.RecommendationUsersList.connect("button_press_event", self.OnPopupRUMenu)
def download_large_folder(self, username, folder, files, numfiles, msg):
FolderDownload(
self,
title=_('Nicotine+') + ': Download %(num)i files?' % {'num': numfiles},
message=_("Are you sure you wish to download %(num)i files from %(user)s's directory %(folder)s?") % {'num': numfiles, 'user': username, 'folder': folder},
modal=True,
data=msg,
callback=self.folder_download_response
)
def folder_download_response(self, dialog, response, data):
if response == gtk.ResponseType.CANCEL:
dialog.destroy()
return
elif response == gtk.ResponseType.OK:
dialog.destroy()
self.np.transfers.FolderContentsResponse(data)
def on_quit_response(self, dialog, response):
checkbox = dialog.checkbox.get_active()
dialog.destroy()
if response == gtk.ResponseType.OK:
if checkbox:
self.np.config.sections["ui"]["exitdialog"] = 0
if self.TrayApp.trayicon:
self.TrayApp.destroy_trayicon()
self.MainWindow.destroy()
gtk.main_quit()
elif response == gtk.ResponseType.CANCEL:
pass
elif response == gtk.ResponseType.REJECT:
if checkbox:
self.np.config.sections["ui"]["exitdialog"] = 2
if self.is_mapped:
self.MainWindow.unmap()
self.is_mapped = False
def on_clear_response(self, dialog, response, direction):
dialog.destroy()
if response == gtk.ResponseType.OK:
if direction == "down":
self.downloads.ClearTransfers(["Queued"])
elif direction == "up":
self.uploads.ClearTransfers(["Queued"])
def onOpenRoomList(self, dialog, response):
dialog.destroy()
if response == gtk.ResponseType.OK:
self.show_room_list1.set_active(True)
def OnGetUserInfo(self, widget):
text = self.UserInfoCombo.get_child().get_text()
if not text:
return
self.LocalUserInfoRequest(text)
self.UserInfoCombo.get_child().set_text("")
def OnGetShares(self, widget):
text = self.UserBrowseCombo.get_child().get_text()
if not text:
return
self.BrowseUser(text)
self.UserBrowseCombo.get_child().set_text("")
def OnLoadFromDisk(self, widget):
sharesdir = os.path.join(self.data_dir, "usershares")
try:
if not os.path.exists(sharesdir):
os.makedirs(sharesdir)
except Exception as msg:
log.addwarning(_("Can't create directory '%(folder)s', reported error: %(error)s") % {'folder': sharesdir, 'error': msg})
shares = ChooseFile(self.MainWindow.get_toplevel(), sharesdir, multiple=True)
if shares is None:
return
for share in shares:
try:
import pickle as mypickle
import bz2
sharefile = bz2.BZ2File(share)
mylist = mypickle.load(sharefile)
sharefile.close()
if not isinstance(mylist, (list, dict)):
raise TypeError("Bad data in file %(sharesdb)s" % {'sharesdb': share})
username = share.split(os.sep)[-1]
self.userbrowse.InitWindow(username, None)
if username in self.userbrowse.users:
self.userbrowse.users[username].LoadShares(mylist)
except Exception as msg:
log.addwarning(_("Loading Shares from disk failed: %(error)s") % {'error': msg})
def OnNowPlayingConfigure(self, widget):
self.now.NowPlaying.show()
self.now.NowPlaying.deiconify()
def OnGetPrivateChat(self, widget):
text = self.UserPrivateCombo.get_child().get_text()
if not text:
return
self.privatechats.SendMessage(text, None, 1)
self.UserPrivateCombo.get_child().set_text("")
def OnOpenPrivateChat(self, widget, prefix=""):
# popup
users = []
for entry in self.np.config.sections["server"]["userlist"]:
users.append(entry[0])
users.sort()
user = input_box(
self,
title=_('Nicotine+:') + " " + _("Start Message"),
message=_('Enter the User who you wish to send a private message:'),
droplist=users
)
if user is not None:
self.privatechats.SendMessage(user, None, 1)
self.ChangeMainPage(None, "chatrooms")
def OnGetAUsersInfo(self, widget, prefix=""):
# popup
users = []
for entry in self.np.config.sections["server"]["userlist"]:
users.append(entry[0])
users.sort()
user = input_box(
self,
title=_('Nicotine+: Get User Info'),
message=_('Enter the User whose User Info you wish to receive:'),
droplist=users
)
if user is None:
pass
else:
self.LocalUserInfoRequest(user)
def OnGetAUsersIP(self, widget, prefix=""):
users = []
for entry in self.np.config.sections["server"]["userlist"]:
users.append(entry[0])
users.sort()
user = input_box(
self,
title=_("Nicotine+: Get A User's IP"),
message=_('Enter the User whose IP Address you wish to receive:'),
droplist=users
)
if user is None:
pass
else:
self.np.queue.put(slskmessages.GetPeerAddress(user))
def OnGetAUsersShares(self, widget, prefix=""):
users = []
for entry in self.np.config.sections["server"]["userlist"]:
users.append(entry[0])
users.sort()
user = input_box(
self,
title=_("Nicotine+: Get A User's Shares List"),
message=_('Enter the User whose Shares List you wish to receive:'),
droplist=users
)
if user is None:
pass
else:
self.BrowseUser(user)
def button_press(self, widget, event):
try:
if event.type == Gdk.EventType.BUTTON_PRESS:
widget.popup(None, None, None, None, event.button, event.time)
# Tell calling code that we have handled this event the buck
# stops here.
return True
# Tell calling code that we have not handled this event pass it on.
return False
except Exception as e:
log.addwarning(_("button_press error, %(error)s") % {'error': e})
def OnPageRemoved(self, MainNotebook, child, page_num):
name = self.MatchMainNotebox(child)
self.np.config.sections["ui"]["modes_visible"][name] = 0
self.OnPageReordered(MainNotebook, child, page_num)
def OnPageAdded(self, MainNotebook, child, page_num):
name = self.MatchMainNotebox(child)
self.np.config.sections["ui"]["modes_visible"][name] = 1
self.OnPageReordered(MainNotebook, child, page_num)
def OnPageReordered(self, MainNotebook, child, page_num):
if self.exiting:
return
tabs = []
for children in self.MainNotebook.get_children():
tabs.append(self.MatchMainNotebox(children))
self.np.config.sections["ui"]["modes_order"] = tabs
def SetMainTabsVisibility(self):
tabs = self.temp_modes_order
order = 0
for name in tabs:
tab = self.MatchMainNamePage(name)
self.MainNotebook.reorder_child(tab, order)
order += 1
visible = self.np.config.sections["ui"]["modes_visible"]
for name in visible:
tab = self.MatchMainNamePage(name)
if tab is None:
continue
eventbox = self.MainNotebook.get_tab_label(tab)
if not visible[name]:
if tab not in self.MainNotebook.get_children():
return
if tab in self.HiddenTabs:
return
self.HiddenTabs[tab] = eventbox
num = self.MainNotebook.page_num(tab)
self.MainNotebook.remove_page(num)
def SetLastSessionTab(self):
try:
if self.np.config.sections["ui"]["tab_select_previous"]:
lasttabid = int(self.np.config.sections["ui"]["last_tab_id"])
if 0 <= lasttabid <= self.MainNotebook.get_n_pages():
self.MainNotebook.set_current_page(lasttabid)
return
except Exception:
pass
self.MainNotebook.set_current_page(0)
def HideTab(self, widget, lista):
eventbox, child = lista
tab = self.__dict__[child]
if tab not in self.MainNotebook.get_children():
return
if tab in self.HiddenTabs:
return
self.HiddenTabs[tab] = eventbox
num = self.MainNotebook.page_num(tab)
self.MainNotebook.remove_page(num)
def ShowTab(self, widget, lista):
name, child = lista
if child in self.MainNotebook.get_children():
return
if child not in self.HiddenTabs:
return
eventbox = self.HiddenTabs[child]
self.MainNotebook.append_page(child, eventbox)
self.MainNotebook.set_tab_reorderable(child, self.np.config.sections["ui"]["tab_reorderable"])
del self.HiddenTabs[child]
def on_tab_click(self, widget, event, id, child):
if event.type == Gdk.EventType.BUTTON_PRESS and event.button == 3:
self.__dict__[id].popup(None, None, None, None, event.button, event.time)
pass
def BuddiesCombosFill(self, nothing):
for widget in self.BuddiesComboEntries:
gobject.idle_add(widget.Fill)
def OnAutoAway(self):
if not self.away:
self.autoaway = True
self.OnAway(None)
return False
def OnButtonPress(self, widget, event):
if self.autoaway:
self.OnAway(None)
self.autoaway = False
if self.awaytimer is not None:
gobject.source_remove(self.awaytimer)
autoaway = self.np.config.sections["server"]["autoaway"]
if autoaway > 0:
self.awaytimer = gobject.timeout_add(1000 * 60 * autoaway, self.OnAutoAway)
else:
self.awaytimer = None
def OnKeyPress(self, widget, event):
self.OnButtonPress(None, None)
if event.state & (Gdk.ModifierType.MOD1_MASK | Gdk.ModifierType.CONTROL_MASK) != Gdk.ModifierType.MOD1_MASK:
return False
for i in range(1, 10):
if event.keyval == Gdk.keyval_from_name(str(i)):
self.MainNotebook.set_current_page(i - 1)
widget.emit_stop_by_name("key_press_event")
return True
return False
def emit_network_event(self, msgs):
lo = [msg for msg in msgs if msg.__class__ is slskmessages.FileSearchResult]
hi = [msg for msg in msgs if msg.__class__ is not slskmessages.FileSearchResult]
if hi:
self.MainWindow.emit("network_event", hi)
if lo:
self.MainWindow.emit("network_event_lo", lo)
return False
# Recieved a network event via emit_network_event
# with at least one, but possibly more messages
# call the appropriate event class for these message
# @param self NicotineFrame (Class)
# @param widget the main window
# @param msgs a list of messages
def OnNetworkEvent(self, widget, msgs):
for i in msgs:
if i.__class__ in self.np.events:
self.np.events[i.__class__](i)
else:
self.logMessage("No handler for class %s %s" % (i.__class__, vars(i)))
def callback(self, msgs):
if len(msgs) > 0:
gobject.idle_add(self.emit_network_event, msgs[:])
def networkcallback(self, msgs):
curtime = time.time()
for i in msgs[:]:
if i.__class__ is slskmessages.DownloadFile or i.__class__ is slskmessages.UploadFile:
self.transfermsgs[i.conn] = i
msgs.remove(i)
if i.__class__ is slskmessages.ConnClose:
msgs = self.postTransferMsgs(msgs, curtime)
if curtime - self.transfermsgspostedtime > 1.0:
msgs = self.postTransferMsgs(msgs, curtime)
if len(msgs) > 0:
gobject.idle_add(self.emit_network_event, msgs[:])
def postTransferMsgs(self, msgs, curtime):
trmsgs = []
for (key, value) in self.transfermsgs.items():
trmsgs.append(value)
msgs = trmsgs + msgs
self.transfermsgs = {}
self.transfermsgspostedtime = curtime
return msgs
def CellDataFunc(self, column, cellrenderer, model, iter, dummy="dummy"):
colour = self.np.config.sections["ui"]["search"]
if colour == "":
colour = None
cellrenderer.set_property("foreground", colour)
def ChangeListFont(self, listview, font):
for c in listview.get_columns():
for r in c.get_cells():
if type(r) in (gtk.CellRendererText, gtk.CellRendererCombo):
r.set_property("font", font)
def UpdateColours(self, first=0):
if first:
self.tag_log = self.LogWindow.get_buffer().create_tag()
color = self.np.config.sections["ui"]["chatremote"]
if color == "":
color = None
self.tag_log.set_property("foreground", color)
font = self.np.config.sections["ui"]["chatfont"]
self.tag_log.set_property("font", font)
self.SetTextBG(self.LogWindow)
self.SetTextBG(self.userlist.UserList)
# self.ChangeListFont( self.UserList, self.frame.np.config.sections["ui"]["listfont"])
for listview in [
self.userlist.UserList,
self.RecommendationsList,
self.UnrecommendationsList,
self.RecommendationUsersList,
self.LikesList,
self.DislikesList,
self.roomlist.RoomsList
]:
self.ChangeListFont(listview, self.np.config.sections["ui"]["listfont"])
self.SetTextBG(self.RecommendationsList)
self.SetTextBG(self.UnrecommendationsList)
self.SetTextBG(self.RecommendationUsersList)
self.SetTextBG(self.LikesList)
self.SetTextBG(self.DislikesList)
self.SetTextBG(self.UserPrivateCombo.get_child())
self.SetTextBG(self.UserInfoCombo.get_child())
self.SetTextBG(self.UserBrowseCombo.get_child())
self.SetTextBG(self.SearchEntry)
def SetTextBG(self, widget, bgcolor="", fgcolor=""):
if bgcolor == "" and self.np.config.sections["ui"]["textbg"] == "":
colour = None
else:
if bgcolor == "":
bgcolor = self.np.config.sections["ui"]["textbg"]
colour = Gdk.color_parse(bgcolor)
widget.modify_base(gtk.StateFlags.NORMAL, colour)
widget.modify_bg(gtk.StateFlags.NORMAL, colour)
widgetlist = [gtk.Entry, gtk.SpinButton]
if SEXY:
widgetlist.append(sexy.SpellEntry)
if type(widget) in widgetlist:
if fgcolor != "":
colour = Gdk.color_parse(fgcolor)
elif fgcolor == "" and self.np.config.sections["ui"]["inputcolor"] == "":
colour = None
elif fgcolor == "" and self.np.config.sections["ui"]["inputcolor"] != "":
fgcolor = self.np.config.sections["ui"]["inputcolor"]
colour = Gdk.color_parse(fgcolor)
widget.modify_text(gtk.StateFlags.NORMAL, colour)
widget.modify_fg(gtk.StateFlags.NORMAL, colour)
if type(widget) is gtk.TreeView:
colour = self.np.config.sections["ui"]["search"]
if colour == "":
colour = None
for c in widget.get_columns():
for r in c.get_cells():
if type(r) in (gtk.CellRendererText, gtk.CellRendererCombo):
r.set_property("foreground", colour)
def PopupMessage(self, popup):
dialog = gtk.MessageDialog(type=gtk.MessageType.WARNING, buttons=gtk.ButtonsType.OK, message_format=popup.title)
dialog.format_secondary_text(popup.message)
dialog.connect('response', lambda dialog, response: dialog.destroy())
dialog.show()
def logCallback(self, timestamp, level, msg):
gobject.idle_add(self.updateLog, msg, level, priority=gobject.PRIORITY_DEFAULT)
def logMessage(self, msg, debugLevel=0):
log.add(msg, debugLevel)
def updateLog(self, msg, debugLevel=None):
'''For information about debug levels see
pydoc pynicotine.logfacility.logger.add
'''
if self.np.config.sections["logging"]["debug"]:
if debugLevel in (None, 0) or debugLevel in self.np.config.sections["logging"]["debugmodes"]:
AppendLine(self.LogWindow, msg, self.tag_log, scroll=True)
if self.np.config.sections["logging"]["logcollapsed"]:
self.SetStatusText(msg)
else:
if debugLevel in (None, 0, 1):
try:
AppendLine(self.LogWindow, msg, self.tag_log, scroll=True)
if self.np.config.sections["logging"]["logcollapsed"]:
self.SetStatusText(msg)
except Exception as e:
print(e)
return False
def ScrollBottom(self, widget):
va = widget.get_vadjustment()
try:
va.set_value(va.upper - va.page_size)
except AttributeError:
pass
widget.set_vadjustment(va)
return False
def SetStatusText(self, msg):
self.Statusbar.pop(self.status_context_id)
self.Statusbar.push(self.status_context_id, str(msg))
def OnWindowChange(self, widget, blag):
(width, height) = self.MainWindow.get_size()
self.np.config.sections["ui"]["height"] = height
self.np.config.sections["ui"]["width"] = width
(xpos, ypos) = self.MainWindow.get_position()
self.np.config.sections["ui"]["xposition"] = xpos
self.np.config.sections["ui"]["yposition"] = ypos
def OnDestroy(self, widget):
self.SaveColumns()
self.np.config.sections["ui"]["last_tab_id"] = self.MainNotebook.get_current_page()
self.np.config.sections["privatechat"]["users"] = list(self.privatechats.users.keys())
self.np.protothread.abort()
self.np.StopTimers()
if not self.manualdisconnect:
self.OnDisconnect(None)
self.np.config.writeConfig()
# Cleaning up the trayicon
if self.TrayApp.trayicon:
self.TrayApp.destroy_trayicon()
# Closing up all shelves db
for db in [
"sharedfiles", "sharedfilesstreams", "wordindex",
"fileindex", "sharedmtimes",
"bsharedfiles", "bsharedfilesstreams", "bwordindex",
"bfileindex", "bsharedmtimes"
]:
self.np.config.sections["transfers"][db].close()
# Exiting GTK
gtk.main_quit()
def OnFirstConnect(self, widget):
# Test if we want to do a port mapping
if self.np.config.sections["server"]["upnp"]:
# Initialise a UPnPPortMapping object
upnp = UPnPPortMapping()
# Check if we can do a port mapping
(self.upnppossible, errors) = upnp.IsPossible()
# Test if we are able to do a port mapping
if self.upnppossible:
# Do the port mapping
_thread.start_new_thread(upnp.AddPortMapping, (self, self.np))
else:
# Display errors
if errors is not None:
for err in errors:
log.addwarning(err)
# If not we connect without changing anything
self.OnConnect(-1)
else:
# If not we connect without changing anything
self.OnConnect(-1)
def OnConnect(self, widget):
self.TrayApp.tray_status["status"] = "trayicon_connect"
self.TrayApp.SetImage()
if self.np.serverconn is not None:
return
if widget != -1:
while not self.np.queue.empty():
self.np.queue.get(0)
self.SetUserStatus("...")
server = self.np.config.sections["server"]["server"]
self.SetStatusText(_("Connecting to %(host)s:%(port)s") % {'host': server[0], 'port': server[1]})
self.np.queue.put(slskmessages.ServerConn(None, server))
if self.np.servertimer is not None:
self.np.servertimer.cancel()
self.np.servertimer = None
def OnDisconnect(self, event):
self.disconnect1.set_sensitive(0)
self.manualdisconnect = 1
self.np.queue.put(slskmessages.ConnClose(self.np.serverconn))
def FetchUserListStatus(self):
for user in self.userlist.userlist:
self.np.queue.put(slskmessages.AddUser(user[0]))
return False
def ConnClose(self, conn, addr):
if self.awaytimer is not None:
gobject.source_remove(self.awaytimer)
self.awaytimer = None
if self.autoaway:
self.autoaway = self.away = False
self.SetWidgetOnlineStatus(False)
self.SetUserStatus(_("Offline"))
self.TrayApp.tray_status["status"] = "trayicon_disconnect"
self.TrayApp.SetImage()
self.Searches.interval = 0
self.chatrooms.ConnClose()
self.privatechats.ConnClose()
self.Searches.ConnClose()
self.uploads.ConnClose()
self.downloads.ConnClose()
self.userlist.ConnClose()
self.userinfo.ConnClose()
self.userbrowse.ConnClose()
def SetWidgetOnlineStatus(self, status):
self.connect1.set_sensitive(not status)
self.disconnect1.set_sensitive(status)
self.awayreturn1.set_sensitive(status)
self.check_privileges1.set_sensitive(status)
self.roomlist.CreateRoomEntry.set_sensitive(status)
self.roomlist.RoomsList.set_sensitive(status)
self.roomlist.SearchRooms.set_sensitive(status)
self.roomlist.FindRoom.set_sensitive(status)
self.UserPrivateCombo.set_sensitive(status)
self.sPrivateChatButton.set_sensitive(status)
self.UserBrowseCombo.set_sensitive(status)
self.sSharesButton.set_sensitive(status)
self.UserInfoCombo.set_sensitive(status)
self.sUserinfoButton.set_sensitive(status)
self.UserSearchCombo.set_sensitive(status)
self.SearchEntryCombo.set_sensitive(status)
self.SearchButton.set_sensitive(status)
self.SimilarUsersButton.set_sensitive(status)
self.GlobalRecommendationsButton.set_sensitive(status)
self.RecommendationsButton.set_sensitive(status)
self.DownloadButtons.set_sensitive(status)
self.UploadButtons.set_sensitive(status)
def ConnectError(self, conn):
self.SetWidgetOnlineStatus(False)
self.SetUserStatus(_("Offline"))
self.TrayApp.tray_status["status"] = "trayicon_disconnect"
self.TrayApp.SetImage()
self.uploads.ConnClose()
self.downloads.ConnClose()
def SetUserStatus(self, status):
self.UserStatus.pop(self.user_context_id)
self.UserStatus.push(self.user_context_id, status)
def SetSocketStatus(self, status):
self.SocketStatus.pop(self.socket_context_id)
self.SocketStatus.push(self.socket_context_id, _("%(current)s/%(limit)s Connections") % {'current': status, 'limit': slskproto.MAXFILELIMIT})
def InitInterface(self, msg):
if self.away == 0:
self.SetUserStatus(_("Online"))
self.TrayApp.tray_status["status"] = "trayicon_connect"
self.TrayApp.SetImage()
autoaway = self.np.config.sections["server"]["autoaway"]
if autoaway > 0:
self.awaytimer = gobject.timeout_add(1000 * 60 * autoaway, self.OnAutoAway)
else:
self.awaytimer = None
else:
self.SetUserStatus(_("Away"))
self.TrayApp.tray_status["status"] = "trayicon_away"
self.TrayApp.SetImage()
self.SetWidgetOnlineStatus(True)
self.uploads.InitInterface(self.np.transfers.uploads)
self.downloads.InitInterface(self.np.transfers.downloads)
gobject.idle_add(self.FetchUserListStatus)
if msg.banner != "":
AppendLine(self.LogWindow, msg.banner, self.tag_log)
return self.privatechats, self.chatrooms, self.userinfo, self.userbrowse, self.Searches, self.downloads, self.uploads, self.userlist
def GetStatusImage(self, status):
if status == 1:
return self.images["away"]
elif status == 2:
return self.images["online"]
else:
return self.images["offline"]
def HasUserFlag(self, user, flag):
if flag not in self.flag_images:
self.GetFlagImage(flag)
if flag not in self.flag_images:
return
self.flag_users[user] = flag
self.chatrooms.roomsctrl.SetUserFlag(user, flag)
self.userlist.SetUserFlag(user, flag)
def GetUserFlag(self, user):
if user not in self.flag_users:
for i in self.np.config.sections["server"]["userlist"]:
if user == i[0] and i[6] is not None:
return i[6]
return None
else:
return self.flag_users[user]
def GetFlagImage(self, flag):
if flag is None:
return
if flag not in self.flag_images:
if hasattr(imagedata, flag):
img = None
try:
loader = GdkPixbuf.PixbufLoader()
data = getattr(imagedata, flag)
loader.write(data)
loader.close()
img = loader.get_pixbuf()
except Exception as e:
log.addwarning(_("Error loading image for %(flag)s: %(error)s") % {'flag': flag, 'error': e})
self.flag_images[flag] = img
return img
else:
return None
else:
return self.flag_images[flag]
def OnShowDebug(self, widget):
if not self.startup:
self.np.config.sections["logging"]["debug"] = self.show_debug_info1.get_active()
if self.show_debug_info1.get_active():
self.debugButtonsBox.show()
else:
self.debugButtonsBox.hide()
def OnAway(self, widget):
self.away = (self.away + 1) % 2
if self.away == 0:
self.SetUserStatus(_("Online"))
self.TrayApp.tray_status["status"] = "trayicon_connect"
self.TrayApp.SetImage()
else:
self.SetUserStatus(_("Away"))
self.TrayApp.tray_status["status"] = "trayicon_away"
self.TrayApp.SetImage()
self.np.queue.put(slskmessages.SetStatus(self.away and 1 or 2))
self.privatechats.UpdateColours()
def OnExit(self, widget):
self.exiting = 1
self.MainWindow.destroy()
def OnSearch(self, widget):
self.Searches.OnSearch()
def OnClearSearchHistory(self, widget):
self.Searches.OnClearSearchHistory()
def ChatRequestIcon(self, status=0, widget=None):
if status == 1 and not self.got_focus:
self.MainWindow.set_icon(self.images["hilite"])
if self.MainNotebook.get_current_page() == self.MainNotebook.page_num(self.chathbox):
return
tablabel = self.GetTabLabel(self.ChatTabLabel)
if not tablabel:
return
if status == 0:
if tablabel.get_image() == self.images["hilite"]:
return
tablabel.set_image(status == 1 and self.images["hilite"] or self.images["hilite3"])
tablabel.set_text_color(status + 1)
def GetTabLabel(self, TabLabel):
tablabel = None
if type(TabLabel) is ImageLabel:
tablabel = TabLabel
elif type(TabLabel) is gtk.EventBox:
tablabel = TabLabel.get_child()
return tablabel
def RequestIcon(self, TabLabel, widget=None):
if TabLabel == self.PrivateChatTabLabel and not self.got_focus:
self.MainWindow.set_icon(self.images["hilite"])
tablabel = self.GetTabLabel(TabLabel)
if not tablabel:
return
if self.current_tab != TabLabel:
tablabel.set_image(self.images["hilite"])
tablabel.set_text_color(2)
def OnSwitchPage(self, notebook, page, page_nr):
tabLabels = []
tabs = self.MainNotebook.get_children()
for i in tabs:
tabLabels.append(self.MainNotebook.get_tab_label(i))
l = tabLabels[page_nr] # noqa: E741
compare = {
self.ChatTabLabel: self.ChatNotebook,
self.PrivateChatTabLabel: self.PrivatechatNotebook,
self.DownloadsTabLabel: None,
self.UploadsTabLabel: None,
self.SearchTabLabel: self.SearchNotebook,
self.UserInfoTabLabel: self.UserInfoNotebook,
self.UserBrowseTabLabel: self.UserBrowseNotebook,
self.InterestsTabLabel: None
}
if "BuddiesTabLabel" in self.__dict__:
compare[self.BuddiesTabLabel] = None
n = compare[l]
self.current_tab = l
if l is not None:
if type(l) is ImageLabel:
l.set_image(self.images["empty"])
l.set_text_color(0)
elif type(l) is gtk.EventBox:
l.get_child().set_image(self.images["empty"])
l.get_child().set_text_color(0)
if n is not None and type(n.Notebook) not in [gtk.HPaned, gtk.VBox]:
n.popup_disable()
n.popup_enable()
if n.get_current_page() != -1:
n.dismiss_icon(n, None, n.get_current_page())
if page_nr == self.MainNotebook.page_num(self.chathbox):
if self.chatrooms:
p = n.get_current_page()
self.chatrooms.roomsctrl.OnSwitchPage(n.Notebook, None, p, 1)
elif page_nr == self.MainNotebook.page_num(self.privatevbox):
p = n.get_current_page()
if "privatechats" in self.__dict__:
self.privatechats.OnSwitchPage(n.Notebook, None, p, 1)
elif page_nr == self.MainNotebook.page_num(self.uploadsvbox):
self.uploads._update()
elif page_nr == self.MainNotebook.page_num(self.downloadsvbox):
self.downloads._update()
def UpdateBandwidth(self):
def _calc(line):
bandwidth = 0.0
users = 0 # noqa: F841
line = [i for i in line if i.conn is not None] # noqa: E741
for i in line:
if i.speed is not None:
bandwidth = bandwidth + i.speed
return len(line), bandwidth
def _num_users(line):
users = []
for i in line:
if i.user not in users:
users.append(i.user)
return len(users), len(line)
if self.np.transfers is not None:
usersdown, down = _calc(self.np.transfers.downloads)
usersup, up = _calc(self.np.transfers.uploads)
total_usersdown, filesdown = _num_users(self.np.transfers.downloads)
total_usersup, filesup = _num_users(self.np.transfers.uploads)
else:
down = up = 0.0
filesup = filesdown = total_usersdown = total_usersup = usersdown = usersup = 0
self.DownloadUsers.set_text(_("Users: %s") % total_usersdown)
self.UploadUsers.set_text(_("Users: %s") % total_usersup)
self.DownloadFiles.set_text(_("Files: %s") % filesdown)
self.UploadFiles.set_text(_("Files: %s") % filesup)
self.DownStatus.pop(self.down_context_id)
self.UpStatus.pop(self.up_context_id)
self.DownStatus.push(self.down_context_id, _("Down: %(num)i users, %(speed).1f KB/s") % {'num': usersdown, 'speed': down})
self.UpStatus.push(self.up_context_id, _("Up: %(num)i users, %(speed).1f KB/s") % {'num': usersup, 'speed': up})
self.TrayApp.SetToolTip(_("Nicotine+ Transfers: %(speeddown).1f KB/s Down, %(speedup).1f KB/s Up") % {'speeddown': down, 'speedup': up})
def BanUser(self, user):
if self.np.transfers is not None:
self.np.transfers.BanUser(user)
def UserIpIsBlocked(self, user):
for ip, username in list(self.np.config.sections["server"]["ipblocklist"].items()):
if user == username:
return True
return False
def BlockedUserIp(self, user):
for ip, username in list(self.np.config.sections["server"]["ipblocklist"].items()):
if user == username:
return ip
return None
def UserIpIsIgnored(self, user):
for ip, username in list(self.np.config.sections["server"]["ipignorelist"].items()):
if user == username:
return True
return False
def IgnoredUserIp(self, user):
for ip, username in list(self.np.config.sections["server"]["ipignorelist"].items()):
if user == username:
return ip
return None
def IgnoreIP(self, ip):
if ip is None or ip == "" or ip.count(".") != 3:
return
ipignorelist = self.np.config.sections["server"]["ipignorelist"]
if ip not in ipignorelist:
ipignorelist[ip] = ""
self.np.config.writeConfiguration()
self.settingswindow.pages["Ignore List"].SetSettings(self.np.config.sections)
def OnIgnoreIP(self, user):
if user not in self.np.users or type(self.np.users[user].addr) is not tuple:
if user not in self.np.ipignore_requested:
self.np.ipignore_requested[user] = 0
self.np.queue.put(slskmessages.GetPeerAddress(user))
return
ipignorelist = self.np.config.sections["server"]["ipignorelist"]
ip, port = self.np.users[user].addr
if ip not in ipignorelist or self.np.config.sections["server"]["ipignorelist"][ip] != user:
self.np.config.sections["server"]["ipignorelist"][ip] = user
self.np.config.writeConfiguration()
self.settingswindow.pages["Ignore List"].SetSettings(self.np.config.sections)
def OnUnIgnoreIP(self, user):
ipignorelist = self.np.config.sections["server"]["ipignorelist"]
if self.UserIpIsIgnored(user):
ip = self.IgnoredUserIp(user)
if ip is not None:
del ipignorelist[ip]
self.np.config.writeConfiguration()
self.settingswindow.pages["Ignore List"].SetSettings(self.np.config.sections)
return True
if user not in self.np.users:
if user not in self.np.ipignore_requested:
self.np.ipignore_requested[user] = 1
self.np.queue.put(slskmessages.GetPeerAddress(user))
return
if not type(self.np.users[user].addr) is tuple:
return
ip, port = self.np.users[user].addr
if ip in ipignorelist:
del ipignorelist[ip]
self.np.config.writeConfiguration()
self.settingswindow.pages["Ignore List"].SetSettings(self.np.config.sections)
def OnBlockUser(self, user):
if user not in self.np.users or type(self.np.users[user].addr) is not tuple:
if user not in self.np.ipblock_requested:
self.np.ipblock_requested[user] = 0
self.np.queue.put(slskmessages.GetPeerAddress(user))
return
ip, port = self.np.users[user].addr
if ip not in self.np.config.sections["server"]["ipblocklist"] or self.np.config.sections["server"]["ipblocklist"][ip] != user:
self.np.config.sections["server"]["ipblocklist"][ip] = user
self.np.config.writeConfiguration()
self.settingswindow.pages["Ban List"].SetSettings(self.np.config.sections)
def OnUnBlockUser(self, user):
if self.UserIpIsBlocked(user):
ip = self.BlockedUserIp(user)
if ip is not None:
del self.np.config.sections["server"]["ipblocklist"][ip]
self.np.config.writeConfiguration()
self.settingswindow.pages["Ban List"].SetSettings(self.np.config.sections)
return True
if user not in self.np.users:
if user not in self.np.ipblock_requested:
self.np.ipblock_requested[user] = 1
self.np.queue.put(slskmessages.GetPeerAddress(user))
return
if not type(self.np.users[user].addr) is tuple:
return
ip, port = self.np.users[user].addr
if ip in self.np.config.sections["server"]["ipblocklist"]:
del self.np.config.sections["server"]["ipblocklist"][ip]
self.np.config.writeConfiguration()
self.settingswindow.pages["Ban List"].SetSettings(self.np.config.sections)
def UnbanUser(self, user):
if user in self.np.config.sections["server"]["banlist"]:
self.np.config.sections["server"]["banlist"].remove(user)
self.np.config.writeConfiguration()
def IgnoreUser(self, user):
if user not in self.np.config.sections["server"]["ignorelist"]:
self.np.config.sections["server"]["ignorelist"].append(user)
self.np.config.writeConfiguration()
def UnignoreUser(self, user):
if user in self.np.config.sections["server"]["ignorelist"]:
self.np.config.sections["server"]["ignorelist"].remove(user)
self.np.config.writeConfiguration()
def OnRescan(self, widget=None, rebuild=False):
if self.rescanning:
return
self.rescanning = 1
self.rescan_public.set_sensitive(False)
self.rebuild_public.set_sensitive(False)
self.browse_public_shares.set_sensitive(False)
self.logMessage(_("Rescanning started"))
shared = self.np.config.sections["transfers"]["shared"][:]
if self.np.config.sections["transfers"]["sharedownloaddir"]:
shared.append((_('Downloaded'), self.np.config.sections["transfers"]["downloaddir"]))
cleanedshares = []
for combo in shared:
if combo not in cleanedshares:
cleanedshares.append(combo)
msg = slskmessages.RescanShares(cleanedshares, lambda: None)
_thread.start_new_thread(self.np.shares.RescanShares, (msg, rebuild))
def OnRebuild(self, widget=None):
self.OnRescan(widget, rebuild=True)
def OnBuddyRescan(self, widget=None, rebuild=False):
if self.brescanning:
return
self.brescanning = 1
self.rescan_buddy.set_sensitive(False)
self.rebuild_buddy.set_sensitive(False)
self.browse_buddy_shares.set_sensitive(False)
self.logMessage(_("Rescanning Buddy Shares started"))
shared = self.np.config.sections["transfers"]["buddyshared"][:] + self.np.config.sections["transfers"]["shared"][:]
if self.np.config.sections["transfers"]["sharedownloaddir"]:
shared.append((_('Downloaded'), self.np.config.sections["transfers"]["downloaddir"]))
cleanedshares = []
for i in shared:
if i not in cleanedshares:
cleanedshares.append(i)
msg = slskmessages.RescanBuddyShares(cleanedshares, lambda: None)
_thread.start_new_thread(self.np.shares.RescanBuddyShares, (msg, rebuild))
def OnBuddyRebuild(self, widget=None):
self.OnBuddyRescan(widget, rebuild=True)
def _BuddyRescanFinished(self, data):
self.np.config.setBuddyShares(*data)
self.np.config.writeShares()
if self.np.config.sections["transfers"]["enablebuddyshares"]:
self.rescan_buddy.set_sensitive(True)
self.rebuild_buddy.set_sensitive(True)
self.browse_buddy_shares.set_sensitive(True)
if self.np.transfers is not None:
self.np.shares.sendNumSharedFoldersFiles()
self.brescanning = 0
self.logMessage(_("Rescanning Buddy Shares finished"))
self.BuddySharesProgress.hide()
self.np.shares.CompressShares("buddy")
def _RescanFinished(self, data):
self.np.config.setShares(*data)
self.np.config.writeShares()
if self.np.config.sections["transfers"]["shared"]:
self.rescan_public.set_sensitive(True)
self.rebuild_public.set_sensitive(True)
self.browse_public_shares.set_sensitive(True)
if self.np.transfers is not None:
self.np.shares.sendNumSharedFoldersFiles()
self.rescanning = 0
self.logMessage(_("Rescanning finished"))
self.SharesProgress.hide()
self.np.shares.CompressShares("normal")
def RescanFinished(self, data, type):
if type == "buddy":
gobject.idle_add(self._BuddyRescanFinished, data)
elif type == "normal":
gobject.idle_add(self._RescanFinished, data)
def OnSettingsShares(self, widget):
self.OnSettings(widget, 'Shares')
def OnSettingsSearches(self, widget):
self.OnSettings(widget, 'Searches')
def OnSettingsDownloads(self, widget):
self.OnSettings(widget, 'Downloads')
self.settingswindow.pages["Downloads"].DownloadFilters.set_expanded(True)
def OnSettingsUploads(self, widget):
self.OnSettings(widget, 'Transfers')
self.settingswindow.pages["Transfers"].Uploads.set_expanded(True)
def OnSettingsUserinfo(self, widget):
self.OnSettings(widget, 'User info')
def OnSettingsLogging(self, widget):
self.OnSettings(widget, 'Logging')
def OnSettingsIgnore(self, widget):
self.OnSettings(widget, 'Ingore List')
def OnSettingsBanIgnore(self, widget):
self.OnSettings(widget, 'Ban List')
def OnFastConfigure(self, widget):
if not self.settingswindow.SettingsWindow.get_property("visible"):
self.fastconfigure.show()
def OnSettings(self, widget, page=None):
if not self.fastconfigure.window.get_property("visible"):
self.settingswindow.SetSettings(self.np.config.sections)
if page:
self.settingswindow.SwitchToPage(page)
self.settingswindow.SettingsWindow.show()
self.settingswindow.SettingsWindow.deiconify()
def OnSettingsClosed(self, widget, msg):
if msg == "cancel":
self.settingswindow.SettingsWindow.hide()
return
output = self.settingswindow.GetSettings()
if type(output) is not tuple:
return
if msg == "ok":
self.settingswindow.SettingsWindow.hide()
needrescan, needcolors, needcompletion, config = output
for (key, data) in list(config.items()):
self.np.config.sections[key].update(data)
config = self.np.config.sections
# Write utils.py options
utils.DECIMALSEP = config["ui"]["decimalsep"]
utils.CATCH_URLS = config["urls"]["urlcatching"]
utils.HUMANIZE_URLS = config["urls"]["humanizeurls"]
utils.PROTOCOL_HANDLERS = config["urls"]["protocols"].copy()
utils.PROTOCOL_HANDLERS["slsk"] = self.OnSoulSeek
utils.USERNAMEHOTSPOTS = config["ui"]["usernamehotspots"]
uselimit = config["transfers"]["uselimit"]
uploadlimit = config["transfers"]["uploadlimit"]
limitby = config["transfers"]["limitby"]
if config["transfers"]["geoblock"]:
panic = config["transfers"]["geopanic"]
cc = config["transfers"]["geoblockcc"]
self.np.queue.put(slskmessages.SetGeoBlock([panic, cc]))
else:
self.np.queue.put(slskmessages.SetGeoBlock(None))
self.np.queue.put(slskmessages.SetUploadLimit(uselimit, uploadlimit, limitby))
self.np.queue.put(slskmessages.SetDownloadLimit(config["transfers"]["downloadlimit"]))
self.np.ToggleRespondDistributed(None, settings=True)
# Modify GUI
self.UpdateDownloadFilters()
self.np.config.writeConfiguration()
if not config["ui"]["trayicon"] and self.TrayApp.HAVE_TRAYICON:
self.TrayApp.destroy_trayicon()
elif config["ui"]["trayicon"] and not self.TrayApp.HAVE_TRAYICON:
if self.TrayApp.trayicon is None and not self.TrayApp.TRAYICON_CREATED:
self.TrayApp.Load()
else:
self.TrayApp.HAVE_TRAYICON = True
self.TrayApp.Draw()
if needcompletion:
self.chatrooms.roomsctrl.UpdateCompletions()
self.privatechats.UpdateCompletions()
if needcolors:
self.chatrooms.roomsctrl.UpdateColours()
self.privatechats.UpdateColours()
self.Searches.UpdateColours()
self.downloads.UpdateColours()
self.uploads.UpdateColours()
self.userinfo.UpdateColours()
self.userbrowse.UpdateColours()
self.settingswindow.UpdateColours()
self.userlist.UpdateColours()
self.UpdateColours()
self.OnShowChatButtons()
for w in [self.ChatNotebook, self.PrivatechatNotebook, self.UserInfoNotebook, self.UserBrowseNotebook, self.SearchNotebook]:
w.set_tab_closers(config["ui"]["tabclosers"])
w.set_reorderable(config["ui"]["tab_reorderable"])
w.show_images(config["ui"]["tab_icons"])
w.set_text_colors(None)
try:
for tab in self.MainNotebook.get_children():
self.MainNotebook.set_tab_reorderable(tab, config["ui"]["tab_reorderable"])
except Exception:
# Old gtk
pass
tabLabels = [
self.ChatTabLabel,
self.PrivateChatTabLabel,
self.DownloadsTabLabel,
self.UploadsTabLabel,
self.SearchTabLabel,
self.UserInfoTabLabel,
self.UserBrowseTabLabel,
self.InterestsTabLabel
]
if "BuddiesTabLabel" in self.__dict__:
tabLabels.append(self.BuddiesTabLabel)
for label_tab in tabLabels:
if type(label_tab) is ImageLabel:
label_tab.show_image(config["ui"]["tab_icons"])
label_tab.set_text_color(None)
elif type(label_tab) is gtk.EventBox:
label_tab.get_child().show_image(config["ui"]["tab_icons"])
label_tab.get_child().set_text_color(None)
self.SetTabPositions()
if self.np.transfers is not None:
self.np.transfers.checkUploadQueue()
self.UpdateTransferButtons()
if needrescan:
self.needrescan = True
if msg == "ok" and self.needrescan:
self.needrescan = False
# Rescan public shares if needed
if not self.np.config.sections["transfers"]["friendsonly"]:
self.OnRescan()
# Rescan buddy shares if needed
if self.np.config.sections["transfers"]["enablebuddyshares"]:
self.OnBuddyRescan()
ConfigUnset = self.np.config.needConfig()
if ConfigUnset > 1:
if self.np.transfers is not None:
self.connect1.set_sensitive(0)
self.OnFastConfigure(None)
else:
if self.np.transfers is None:
self.connect1.set_sensitive(1)
self.pluginhandler.check_enabled()
def OnChangePassword(self, password):
self.np.queue.put(slskmessages.ChangePassword(password))
def OnBackupConfig(self, widget=None):
response = SaveFile(
self.MainWindow.get_toplevel(),
os.path.dirname(self.np.config.filename),
title="Pick a filename for config backup, or cancel to use a timestamp"
)
if response:
error, message = self.np.config.writeConfigBackup(response[0])
else:
error, message = self.np.config.writeConfigBackup()
if error:
self.logMessage("Error backing up config: %s" % message)
else:
self.logMessage("Config backed up to: %s" % message)
def AutoReplace(self, message):
if self.np.config.sections["words"]["replacewords"]:
autoreplaced = self.np.config.sections["words"]["autoreplaced"]
for word, replacement in list(autoreplaced.items()):
message = message.replace(word, replacement)
return message
def CensorChat(self, message):
if self.np.config.sections["words"]["censorwords"]:
filler = self.np.config.sections["words"]["censorfill"]
censored = self.np.config.sections["words"]["censored"]
for word in censored:
message = message.replace(word, filler * len(word))
return message
def getTabPosition(self, string):
if string in ("Top", "top", _("Top")):
position = gtk.PositionType.TOP
elif string in ("Bottom", "bottom", _("Bottom")):
position = gtk.PositionType.BOTTOM
elif string in ("Left", "left", _("Left")):
position = gtk.PositionType.LEFT
elif string in ("Right", "right", _("Right")):
position = gtk.PositionType.RIGHT
else:
position = gtk.PositionType.TOP
return position
def SetTabPositions(self):
ui = self.np.config.sections["ui"]
self.ChatNotebook.set_tab_pos(self.getTabPosition(ui["tabrooms"]))
self.ChatNotebook.set_tab_angle(ui["labelrooms"])
self.MainNotebook.set_tab_pos(self.getTabPosition(ui["tabmain"]))
for label_tab in [
self.ChatTabLabel,
self.PrivateChatTabLabel,
self.SearchTabLabel,
self.UserInfoTabLabel,
self.DownloadsTabLabel,
self.UploadsTabLabel,
self.UserBrowseTabLabel,
self.InterestsTabLabel
]:
label_tab.get_child().set_angle(ui["labelmain"])
if "BuddiesTabLabel" in self.__dict__:
self.BuddiesTabLabel.set_angle(ui["labelmain"])
self.PrivatechatNotebook.set_tab_pos(self.getTabPosition(ui["tabprivate"]))
self.PrivatechatNotebook.set_tab_angle(ui["labelprivate"])
self.UserInfoNotebook.set_tab_pos(self.getTabPosition(ui["tabinfo"]))
self.UserInfoNotebook.set_tab_angle(ui["labelinfo"])
self.UserBrowseNotebook.set_tab_pos(self.getTabPosition(ui["tabbrowse"]))
self.UserBrowseNotebook.set_tab_angle(ui["labelbrowse"])
self.SearchNotebook.set_tab_pos(self.getTabPosition(ui["tabsearch"]))
self.SearchNotebook.set_tab_angle(ui["labelsearch"])
def CreateIconButton(self, icon, icontype, callback, label=None):
button = gtk.Button()
button.connect_object("clicked", callback, "")
button.show()
Alignment = gtk.Alignment(xalign=0.5, yalign=0.5, xscale=0, yscale=0)
Alignment.show()
Hbox = gtk.HBox(False, 2)
Hbox.show()
Hbox.set_spacing(2)
image = gtk.Image()
image.set_padding(0, 0)
if icontype == "stock":
image.set_from_stock(icon, 4)
else:
image.set_from_pixbuf(icon)
image.show()
Hbox.pack_start(image, False, False, 0)
Alignment.add(Hbox)
if label:
Label = gtk.Label(label)
Label.set_padding(0, 0)
Label.show()
Hbox.pack_start(Label, False, False, 0)
button.add(Alignment)
return button
def UpdateDownloadFilters(self):
proccessedfilters = []
outfilter = "(\\\\("
failed = {}
df = self.np.config.sections["transfers"]["downloadfilters"]
df.sort()
# Get Filters from config file and check their escaped status
# Test if they are valid regular expressions and save error messages
for item in df:
filter, escaped = item
if escaped:
dfilter = re.escape(filter)
dfilter = dfilter.replace("\\*", ".*")
else:
dfilter = filter
try:
re.compile("(" + dfilter + ")")
outfilter += dfilter
proccessedfilters.append(dfilter)
except Exception as e:
failed[dfilter] = e
proccessedfilters.append(dfilter)
if item is not df[-1]:
outfilter += "|"
# Crop trailing pipes
while outfilter[-1] == "|":
outfilter = outfilter[:-1]
outfilter += ")$)"
try:
re.compile(outfilter)
self.np.config.sections["transfers"]["downloadregexp"] = outfilter
# Send error messages for each failed filter to log window
if len(list(failed.keys())) >= 1:
errors = ""
for filter, error in list(failed.items()):
errors += "Filter: %s Error: %s " % (filter, error)
error = _("Error: %(num)d Download filters failed! %(error)s " % {'num': len(list(failed.keys())), 'error': errors})
self.logMessage(error)
except Exception as e:
# Strange that individual filters _and_ the composite filter both fail
self.logMessage(_("Error: Download Filter failed! Verify your filters. Reason: %s" % e))
self.np.config.sections["transfers"]["downloadregexp"] = ""
def UpdateTransferButtons(self):
if self.np.config.sections["transfers"]["enabletransferbuttons"]:
self.DownloadButtons.show()
self.UploadButtons.show()
else:
self.UploadButtons.hide()
self.DownloadButtons.hide()
def OnProjectWebsite(self, widget):
url = "https://nicotine-plus.org/"
OpenUri(url, self.MainWindow)
def onProjectGithubPage(self, widget):
url = "https://github.com/Nicotine-Plus/nicotine-plus"
OpenUri(url, self.MainWindow)
def OnCheckLatest(self, widget):
checklatest(self.MainWindow)
def OnReportBug(self, widget):
url = "https://github.com/Nicotine-Plus/nicotine-plus/issues"
OpenUri(url, self.MainWindow)
def OnAbout(self, widget):
dlg = AboutDialog(self.MainWindow)
dlg.run()
dlg.destroy()
def OnAboutChatroomCommands(self, widget, parent=None):
if parent is None:
parent = self.MainWindow
dlg = AboutRoomsDialog(parent)
dlg.run()
dlg.destroy()
def OnAboutPrivateChatCommands(self, widget):
dlg = AboutPrivateDialog(self.MainWindow)
dlg.run()
dlg.destroy()
def OnAboutFilters(self, widget):
dlg = AboutFiltersDialog(self.MainWindow)
dlg.run()
dlg.destroy()
def OnShowChatButtons(self, widget=None):
if widget is not None:
show = widget.get_active()
self.np.config.sections["ui"]["chat_hidebuttons"] = (not show)
if self.chatrooms is None:
return
for room in list(self.chatrooms.roomsctrl.joinedrooms.values()):
room.OnShowChatButtons(not self.np.config.sections["ui"]["chat_hidebuttons"])
self.np.config.writeConfiguration()
def OnShowLog(self, widget):
show = widget.get_active()
self.np.config.sections["logging"]["logcollapsed"] = (not show)
if not show:
if self.debugLogBox in self.vpaned1.get_children():
self.vpaned1.remove(self.debugLogBox)
else:
if self.debugLogBox not in self.vpaned1.get_children():
self.vpaned1.pack2(self.debugLogBox, True, True)
ScrollBottom(self.LogScrolledWindow)
self.np.config.writeConfiguration()
def OnShowFlags(self, widget):
if self.chatrooms is None:
return
show = widget.get_active()
self.np.config.sections["columns"]["hideflags"] = (not show)
for room in self.chatrooms.roomsctrl.joinedrooms:
self.chatrooms.roomsctrl.joinedrooms[room].cols[1].set_visible(show)
self.np.config.sections["columns"]["chatrooms"][room][1] = int(show)
self.userlist.cols[1].set_visible(show)
self.np.config.sections["columns"]["userlist"][1] = int(show)
self.np.config.writeConfiguration()
def OnShowRoomList(self, widget):
show = widget.get_active()
self.np.config.sections["ui"]["roomlistcollapsed"] = (not show)
if not show:
if self.roomlist.vbox2 in self.vpaned3.get_children():
self.vpaned3.remove(self.roomlist.vbox2)
if self.userlist.userlistvbox not in self.vpaned3.get_children():
self.vpaned3.hide()
else:
if self.roomlist.vbox2 not in self.vpaned3.get_children():
self.vpaned3.pack2(self.roomlist.vbox2, True, True)
self.vpaned3.show()
self.np.config.writeConfiguration()
def OnToggleBuddyList(self, widget):
""" Function used to switch around the UI the BuddyList position """
tab = always = chatrooms = False
if self.buddylist_in_tab.get_active():
tab = True
if self.buddylist_always_visible.get_active():
always = True
if self.buddylist_in_chatrooms1.get_active():
chatrooms = True
if self.userlist.userlistvbox in self.MainNotebook.get_children():
if tab:
return
self.MainNotebook.remove_page(self.MainNotebook.page_num(self.userlist.userlistvbox))
if self.userlist.userlistvbox in self.vpanedm.get_children():
if always:
return
self.vpanedm.remove(self.userlist.userlistvbox)
if self.userlist.userlistvbox in self.vpaned3.get_children():
if chatrooms:
return
self.vpaned3.remove(self.userlist.userlistvbox)
if not self.show_room_list1.get_active():
if not chatrooms:
self.vpaned3.hide()
# Reinitialize the userlist to avoid an error that freeze the UI on recent GTK versions
# Warning: invalid cast from 'GailPaned' to 'GailNotebook'
self.userlist = None
self.userlist = UserList(self)
if tab:
self.BuddiesTabLabel = ImageLabel(_("Buddy List"), self.images["empty"])
self.BuddiesTabLabel.show()
if self.userlist.userlistvbox not in self.MainNotebook.get_children():
self.MainNotebook.append_page(self.userlist.userlistvbox, self.BuddiesTabLabel)
if self.userlist.userlistvbox in self.MainNotebook.get_children():
self.MainNotebook.set_tab_reorderable(self.userlist.userlistvbox, self.np.config.sections["ui"]["tab_reorderable"])
self.userlist.BuddiesLabel.hide()
self.np.config.sections["ui"]["buddylistinchatrooms"] = 0
if chatrooms:
self.vpaned3.show()
if self.userlist.userlistvbox not in self.vpaned3.get_children():
self.vpaned3.pack1(self.userlist.userlistvbox, True, True)
self.userlist.BuddiesLabel.show()
self.np.config.sections["ui"]["buddylistinchatrooms"] = 1
if always:
self.vpanedm.show()
if self.userlist.userlistvbox not in self.vpanedm.get_children():
self.vpanedm.pack2(self.userlist.userlistvbox, True, True)
self.userlist.BuddiesLabel.show()
self.np.config.sections["ui"]["buddylistinchatrooms"] = 2
else:
self.vpanedm.hide()
self.np.config.writeConfiguration()
def OnCheckPrivileges(self, widget):
self.np.queue.put(slskmessages.CheckPrivileges())
def OnSoulSeek(self, url):
try:
user, file = urllib.request.url2pathname(url[7:]).split("/", 1)
if file[-1] == "/":
self.np.ProcessRequestToPeer(user, slskmessages.FolderContentsRequest(None, file[:-1].replace("/", "\\")))
else:
self.np.transfers.getFile(user, file.replace("/", "\\"), "")
except Exception:
self.logMessage(_("Invalid SoulSeek meta-url: %s") % url)
def SetClipboardURL(self, user, path):
self.clip.set_text("slsk://" + urllib.request.pathname2url("%s/%s" % (user, path.replace("\\", "/"))), -1)
self.clip_data = "slsk://" + urllib.request.pathname2url("%s/%s" % (user, path.replace("\\", "/")))
def OnSelectionGet(self, widget, data, info, timestamp):
data.set_text(self.clip_data, -1)
def LocalUserInfoRequest(self, user):
# Hack for local userinfo requests, for extra security
if user == self.np.config.sections["server"]["login"]:
try:
if self.np.config.sections["userinfo"]["pic"] != "":
if sys.platform == "win32":
userpic = "%s" % self.np.config.sections["userinfo"]["pic"]
else:
userpic = self.np.config.sections["userinfo"]["pic"]
if os.path.exists(userpic):
has_pic = True
f = open(userpic, 'rb')
pic = f.read()
f.close()
else:
has_pic = False
pic = None
else:
has_pic = False
pic = None
except Exception:
pic = None
descr = eval(self.np.config.sections["userinfo"]["descr"])
if self.np.transfers is not None:
totalupl = self.np.transfers.getTotalUploadsAllowed()
queuesize = self.np.transfers.getUploadQueueSizes()[0]
slotsavail = self.np.transfers.allowNewUploads()
ua = self.np.config.sections["transfers"]["remotedownloads"]
if ua:
uploadallowed = self.np.config.sections["transfers"]["uploadallowed"]
else:
uploadallowed = ua
self.userinfo.ShowLocalInfo(user, descr, has_pic, pic, totalupl, queuesize, slotsavail, uploadallowed)
else:
self.np.ProcessRequestToPeer(user, slskmessages.UserInfoRequest(None), self.userinfo)
def BrowseUser(self, user):
""" Browse a user shares """
login = self.np.config.sections["server"]["login"]
if user is not None:
if user == login:
self.OnBrowsePublicShares(None)
else:
self.np.ProcessRequestToPeer(user, slskmessages.GetSharedFileList(None), self.userbrowse)
def OnBrowsePublicShares(self, widget):
""" Browse your own public shares """
login = self.np.config.sections["server"]["login"]
# Deactivate if we only share with buddies
if self.np.config.sections["transfers"]["friendsonly"]:
m = slskmessages.SharedFileList(None, {})
else:
m = slskmessages.SharedFileList(None, self.np.config.sections["transfers"]["sharedfilesstreams"])
m.parseNetworkMessage(m.makeNetworkMessage(nozlib=1), nozlib=1)
self.userbrowse.ShowInfo(login, m)
def OnBrowseBuddyShares(self, widget):
""" Browse your own buddy shares """
login = self.np.config.sections["server"]["login"]
# Show public shares if we don't have specific shares for buddies
if not self.np.config.sections["transfers"]["enablebuddyshares"]:
m = slskmessages.SharedFileList(None, self.np.config.sections["transfers"]["sharedfilesstreams"])
else:
m = slskmessages.SharedFileList(None, self.np.config.sections["transfers"]["bsharedfilesstreams"])
m.parseNetworkMessage(m.makeNetworkMessage(nozlib=1), nozlib=1)
self.userbrowse.ShowInfo(login, m)
def PrivateRoomRemoveUser(self, room, user):
self.np.queue.put(slskmessages.PrivateRoomRemoveUser(room, user))
def PrivateRoomAddUser(self, room, user):
self.np.queue.put(slskmessages.PrivateRoomAddUser(room, user))
def PrivateRoomAddOperator(self, room, user):
self.np.queue.put(slskmessages.PrivateRoomAddOperator(room, user))
def PrivateRoomRemoveOperator(self, room, user):
self.np.queue.put(slskmessages.PrivateRoomRemoveOperator(room, user))
def OnFocusIn(self, widget, event):
self.MainWindow.set_icon(self.images["n"])
self.got_focus = True
if self.MainWindow.get_urgency_hint():
self.MainWindow.set_urgency_hint(False)
def OnFocusOut(self, widget, event):
self.got_focus = False
def EntryCompletionFindMatch(self, completion, entry_text, iter, widget):
model = completion.get_model()
item_text = model.get_value(iter, 0)
ix = widget.get_position()
config = self.np.config.sections["words"]
if entry_text is None or entry_text == "" or entry_text.isspace() or item_text is None:
return False
# Get word to the left of current position
if " " in entry_text:
split_key = entry_text[:ix].split(" ")[-1]
else:
split_key = entry_text
if split_key.isspace() or split_key == "" or len(split_key) < config["characters"]:
return False
# case-insensitive matching
if item_text.lower().startswith(split_key) and item_text.lower() != split_key:
return True
return False
def EntryCompletionFoundMatch(self, completion, model, iter, widget):
current_text = widget.get_text()
ix = widget.get_position()
# if more than a word has been typed, we throw away the
# one to the left of our current position because we want
# to replace it with the matching word
if " " in current_text:
prefix = " ".join(current_text[:ix].split(" ")[:-1])
suffix = " ".join(current_text[ix:].split(" "))
# add the matching word
new_text = "%s %s%s" % (prefix, model[iter][0], suffix)
# set back the whole text
widget.set_text(new_text)
# move the cursor at the end
widget.set_position(len(prefix) + len(model[iter][0]) + 1)
else:
new_text = "%s" % (model[iter][0])
widget.set_text(new_text)
widget.set_position(-1)
# stop the event propagation
return True
def OnPopupLogMenu(self, widget, event):
if event.button != 3:
return False
widget.emit_stop_by_name("button-press-event")
self.logpopupmenu.popup(None, None, None, None, event.button, event.time)
return True
#
# Everything related to the log window
#
def OnFindLogWindow(self, widget):
self.OnFindTextview(None, self.LogWindow)
def OnCopyLogWindow(self, widget):
bound = self.LogWindow.get_buffer().get_selection_bounds()
if bound is not None and len(bound) == 2:
start, end = bound
log = self.LogWindow.get_buffer().get_text(start, end, True)
self.clip.set_text(log, -1)
def OnCopyAllLogWindow(self, widget):
start, end = self.LogWindow.get_buffer().get_bounds()
log = self.LogWindow.get_buffer().get_text(start, end, True)
self.clip.set_text(log, -1)
def OnClearLogWindow(self, widget):
self.LogWindow.get_buffer().set_text("")
#
# Finding text in a text view
# Used in private msg, chatrooms and log window
#
def OnFindTextview(self, widget, textview, repeat=False):
if "FindDialog" not in self.__dict__:
self.FindDialog = FindDialog(
self,
_('Enter the string to search for:'),
"",
textview=textview,
modal=False
)
self.FindDialog.set_title(_('Nicotine+: Find string'))
self.FindDialog.set_icon(self.images["n"])
self.FindDialog.set_transient_for(self.MainWindow)
self.FindDialog.show()
self.FindDialog.connect("find-click", self.OnFindClicked)
return
if textview is not self.FindDialog.textview:
repeat = False
self.FindDialog.textview = textview
self.FindDialog.currentPosition = None
self.FindDialog.nextPosition = None
self.FindDialog.set_transient_for(self.MainWindow)
self.FindDialog.show()
self.FindDialog.deiconify()
if repeat:
self.OnFindClicked(widget, self.FindDialog.lastdirection)
else:
self.FindDialog.entry.set_text("")
def OnFindClicked(self, widget, direction):
if self.FindDialog.textview is None:
return
self.FindDialog.lastdirection = direction
textview = self.FindDialog.textview
buffer = textview.get_buffer()
start, end = buffer.get_bounds()
query = self.FindDialog.entry.get_text()
textview.emit("select-all", False)
if self.FindDialog.currentPosition is None:
self.FindDialog.currentPosition = buffer.create_mark(None, start, False)
self.FindDialog.nextPosition = buffer.create_mark(None, start, False)
second = 0 # noqa: F841
if direction == "next":
current = buffer.get_mark("insert")
iter = buffer.get_iter_at_mark(current)
match1 = iter.forward_search(query, gtk.TextSearchFlags.TEXT_ONLY, limit=None)
if match1 is not None and len(match1) == 2:
match_start, match_end = match1
buffer.place_cursor(match_end)
buffer.select_range(match_end, match_start)
textview.scroll_to_iter(match_start, 0, False, 0.5, 0.5)
else:
iter = start
match1 = iter.forward_search(query, gtk.TextSearchFlags.TEXT_ONLY, limit=None)
if match1 is not None and len(match1) == 2:
match_start, match_end = match1
buffer.place_cursor(match_end)
buffer.select_range(match_end, match_start)
textview.scroll_to_iter(match_start, 0, False, 0.5, 0.5)
elif direction == "previous":
current = buffer.get_mark("insert")
iter = buffer.get_iter_at_mark(current)
match1 = iter.backward_search(query, gtk.TextSearchFlags.TEXT_ONLY, limit=None)
if match1 is not None and len(match1) == 2:
match_start, match_end = match1
buffer.place_cursor(match_start)
buffer.select_range(match_start, match_end)
textview.scroll_to_iter(match_start, 0, False, 0.5, 0.5)
else:
iter = end
match1 = iter.backward_search(query, gtk.TextSearchFlags.TEXT_ONLY, limit=None)
if match1 is not None and len(match1) == 2:
match_start, match_end = match1
buffer.place_cursor(match_start)
buffer.select_range(match_start, match_end)
textview.scroll_to_iter(match_start, 0, False, 0.5, 0.5)
return
def OnAddThingILike(self, widget):
thing = utils.InputDialog(self.MainWindow, _("Add thing I like"), _("I like") + ":")
if thing and thing.lower() not in self.np.config.sections["interests"]["likes"]:
thing = thing.lower()
self.np.config.sections["interests"]["likes"].append(thing)
self.likes[thing] = self.likeslist.append([thing])
self.np.config.writeConfiguration()
self.np.queue.put(slskmessages.AddThingILike(thing))
def OnAddThingIDislike(self, widget):
thing = utils.InputDialog(self.MainWindow, _("Add thing I don't like"), _("I don't like") + ":")
if thing and thing.lower() not in self.np.config.sections["interests"]["dislikes"]:
thing = thing.lower()
self.np.config.sections["interests"]["dislikes"].append(thing)
self.dislikes[thing] = self.dislikeslist.append([thing])
self.np.config.writeConfiguration()
self.np.queue.put(slskmessages.AddThingIHate(thing))
def SetRecommendations(self, title, recom):
self.recommendationslist.clear()
for (thing, rating) in recom.items():
self.recommendationslist.append([thing, Humanize(rating), rating])
self.recommendationslist.set_sort_column_id(2, gtk.SortType.DESCENDING)
def SetUnrecommendations(self, title, recom):
self.unrecommendationslist.clear()
for (thing, rating) in recom.items():
self.unrecommendationslist.append([thing, Humanize(rating), rating])
self.unrecommendationslist.set_sort_column_id(2, gtk.SortType.ASCENDING)
def GlobalRecommendations(self, msg):
self.SetRecommendations("Global recommendations", msg.recommendations)
self.SetUnrecommendations("Unrecommendations", msg.unrecommendations)
def Recommendations(self, msg):
self.SetRecommendations("Recommendations", msg.recommendations)
self.SetUnrecommendations("Unrecommendations", msg.unrecommendations)
def ItemRecommendations(self, msg):
self.SetRecommendations(_("Recommendations for %s") % msg.thing, msg.recommendations)
self.SetUnrecommendations("Unrecommendations", msg.unrecommendations)
def OnGlobalRecommendationsClicked(self, widget):
self.np.queue.put(slskmessages.GlobalRecommendations())
def OnRecommendationsClicked(self, widget):
self.np.queue.put(slskmessages.Recommendations())
def OnSimilarUsersClicked(self, widget):
self.np.queue.put(slskmessages.SimilarUsers())
def SimilarUsers(self, msg):
self.recommendationuserslist.clear()
self.recommendationusers = {}
for user in list(msg.users.keys()):
iter = self.recommendationuserslist.append([self.images["offline"], user, "0", "0", 0, 0, 0])
self.recommendationusers[user] = iter
self.np.queue.put(slskmessages.AddUser(user))
def ItemSimilarUsers(self, msg):
self.recommendationuserslist.clear()
self.recommendationusers = {}
for user in msg.users:
iter = self.recommendationuserslist.append([self.images["offline"], user, "0", "0", 0, 0, 0])
self.recommendationusers[user] = iter
self.np.queue.put(slskmessages.AddUser(user))
def GetUserStatus(self, msg):
if msg.user not in self.recommendationusers:
return
img = self.GetStatusImage(msg.status)
self.recommendationuserslist.set(self.recommendationusers[msg.user], 0, img, 4, msg.status)
def GetUserStats(self, msg):
if msg.user not in self.recommendationusers:
return
self.recommendationuserslist.set(self.recommendationusers[msg.user], 2, HumanSpeed(msg.avgspeed), 3, Humanize(msg.files), 5, msg.avgspeed, 6, msg.files)
def OnPopupRUMenu(self, widget, event):
items = self.ru_popup_menu.get_children()
d = self.RecommendationUsersList.get_path_at_pos(int(event.x), int(event.y))
if not d:
return
path, column, x, y = d
user = self.recommendationuserslist.get_value(self.recommendationuserslist.get_iter(path), 1)
if event.button != 3:
if event.type == Gdk.EventType._2BUTTON_PRESS:
self.privatechats.SendMessage(user)
self.ChangeMainPage(None, "private")
return
self.ru_popup_menu.set_user(user)
items[7].set_active(user in [i[0] for i in self.np.config.sections["server"]["userlist"]])
items[8].set_active(user in self.np.config.sections["server"]["banlist"])
items[9].set_active(user in self.np.config.sections["server"]["ignorelist"])
self.ru_popup_menu.popup(None, None, None, None, event.button, event.time)
def OnRemoveThingILike(self, widget):
thing = self.til_popup_menu.get_user()
if thing not in self.np.config.sections["interests"]["likes"]:
return
self.likeslist.remove(self.likes[thing])
del self.likes[thing]
self.np.config.sections["interests"]["likes"].remove(thing)
self.np.config.writeConfiguration()
self.np.queue.put(slskmessages.RemoveThingILike(thing))
def OnRecommendItem(self, widget):
thing = self.til_popup_menu.get_user()
self.np.queue.put(slskmessages.ItemRecommendations(thing))
self.np.queue.put(slskmessages.ItemSimilarUsers(thing))
def OnPopupTILMenu(self, widget, event):
if event.button != 3:
return
d = self.LikesList.get_path_at_pos(int(event.x), int(event.y))
if not d:
return
path, column, x, y = d
iter = self.likeslist.get_iter(path)
thing = self.likeslist.get_value(iter, 0)
self.til_popup_menu.set_user(thing)
self.til_popup_menu.popup(None, None, None, None, event.button, event.time)
def OnRemoveThingIDislike(self, widget):
thing = self.tidl_popup_menu.get_user()
if thing not in self.np.config.sections["interests"]["dislikes"]:
return
self.dislikeslist.remove(self.dislikes[thing])
del self.dislikes[thing]
self.np.config.sections["interests"]["dislikes"].remove(thing)
self.np.config.writeConfiguration()
self.np.queue.put(slskmessages.RemoveThingIHate(thing))
def OnPopupTIDLMenu(self, widget, event):
if event.button != 3:
return
d = self.DislikesList.get_path_at_pos(int(event.x), int(event.y))
if not d:
return
path, column, x, y = d
iter = self.dislikeslist.get_iter(path)
thing = self.dislikeslist.get_value(iter, 0)
self.tidl_popup_menu.set_user(thing)
self.tidl_popup_menu.popup(None, None, None, None, event.button, event.time)
def OnLikeRecommendation(self, widget):
thing = widget.get_parent().get_user()
if widget.get_active() and thing not in self.np.config.sections["interests"]["likes"]:
self.np.config.sections["interests"]["likes"].append(thing)
self.likes[thing] = self.likeslist.append([thing])
self.np.config.writeConfiguration()
self.np.queue.put(slskmessages.AddThingILike(thing))
elif not widget.get_active() and thing in self.np.config.sections["interests"]["likes"]:
self.likeslist.remove(self.likes[thing])
del self.likes[thing]
self.np.config.sections["interests"]["likes"].remove(thing)
self.np.config.writeConfiguration()
self.np.queue.put(slskmessages.RemoveThingILike(thing))
def OnDislikeRecommendation(self, widget):
thing = widget.get_parent().get_user()
if widget.get_active() and thing not in self.np.config.sections["interests"]["dislikes"]:
self.np.config.sections["interests"]["dislikes"].append(thing)
self.dislikes[thing] = self.dislikeslist.append([thing])
self.np.config.writeConfiguration()
self.np.queue.put(slskmessages.AddThingIHate(thing))
elif not widget.get_active() and thing in self.np.config.sections["interests"]["dislikes"]:
self.dislikeslist.remove(self.dislikes[thing])
del self.dislikes[thing]
self.np.config.sections["interests"]["dislikes"].remove(thing)
self.np.config.writeConfiguration()
self.np.queue.put(slskmessages.RemoveThingIHate(thing))
def OnRecommendRecommendation(self, widget):
thing = self.r_popup_menu.get_user()
self.np.queue.put(slskmessages.ItemRecommendations(thing))
self.np.queue.put(slskmessages.ItemSimilarUsers(thing))
def OnRecommendSearch(self, widget):
thing = widget.get_parent().get_user()
self.SearchEntry.set_text(thing)
self.ChangeMainPage(None, "search")
def OnPopupRMenu(self, widget, event):
if event.button != 3:
return
d = self.RecommendationsList.get_path_at_pos(int(event.x), int(event.y))
if not d:
return
path, column, x, y = d
iter = self.recommendationslist.get_iter(path)
thing = self.recommendationslist.get_value(iter, 0)
items = self.r_popup_menu.get_children()
self.r_popup_menu.set_user(thing)
items[0].set_active(thing in self.np.config.sections["interests"]["likes"])
items[1].set_active(thing in self.np.config.sections["interests"]["dislikes"])
self.r_popup_menu.popup(None, None, None, None, event.button, event.time)
def OnPopupUnRecMenu(self, widget, event):
if event.button != 3:
return
d = self.UnrecommendationsList.get_path_at_pos(int(event.x), int(event.y))
if not d:
return
path, column, x, y = d
iter = self.unrecommendationslist.get_iter(path)
thing = self.unrecommendationslist.get_value(iter, 0)
items = self.ur_popup_menu.get_children()
self.ur_popup_menu.set_user(thing)
items[0].set_active(thing in self.np.config.sections["interests"]["likes"])
items[1].set_active(thing in self.np.config.sections["interests"]["dislikes"])
self.ur_popup_menu.popup(None, None, None, None, event.button, event.time)
def OnShowTickers(self, widget):
if not self.chatrooms:
return
show = widget.get_active()
self.np.config.sections["ticker"]["hide"] = (not show)
self.np.config.writeConfiguration()
for room in list(self.chatrooms.roomsctrl.joinedrooms.values()):
room.ShowTicker(show)
def RecommendationsExpanderStatus(self, widget):
if widget.get_property("expanded"):
self.RecommendationsVbox.set_child_packing(widget, False, True, 0, 0)
else:
self.RecommendationsVbox.set_child_packing(widget, True, True, 0, 0)
def GivePrivileges(self, user, days):
self.np.queue.put(slskmessages.GivePrivileges(user, days))
def MatchMainNotebox(self, tab):
if tab == self.chathbox:
name = "chatrooms" # Chatrooms
elif tab == self.privatevbox:
name = "private" # Private rooms
elif tab == self.downloadsvbox:
name = "downloads" # Downloads
elif tab == self.uploadsvbox:
name = "uploads" # Uploads
elif tab == self.searchvbox:
name = "search" # Searches
elif tab == self.userinfovbox:
name = "userinfo" # Userinfo
elif tab == self.userbrowsevbox:
name = "userbrowse" # User browse
elif tab == self.interestsvbox:
name = "interests" # Interests
elif tab == self.userlist.userlistvbox:
name = "userlist" # Buddy list
else:
# this should never happen, unless you've renamed a widget
return
return name
def MatchMainNamePage(self, tab):
if tab == "chatrooms":
child = self.chathbox # Chatrooms
elif tab == "private":
child = self.privatevbox # Private rooms
elif tab == "downloads":
child = self.downloadsvbox # Downloads
elif tab == "uploads":
child = self.uploadsvbox # Uploads
elif tab == "search":
child = self.searchvbox # Searches
elif tab == "userinfo":
child = self.userinfovbox # Userinfo
elif tab == "userbrowse":
child = self.userbrowsevbox # User browse
elif tab == "interests":
child = self.interestsvbox # Interests
elif tab == "userlist":
child = self.userlist.userlistvbox # Buddy list
else:
# this should never happen, unless you've renamed a widget
return
return child
def ChangeMainPage(self, widget, tab):
page_num = self.MainNotebook.page_num
if tab == "chatrooms":
child = self.chathbox # Chatrooms
elif tab == "private":
child = self.privatevbox # Private rooms
elif tab == "downloads":
child = self.downloadsvbox # Downloads
elif tab == "uploads":
child = self.uploadsvbox # Uploads
elif tab == "search":
child = self.searchvbox # Searches
elif tab == "userinfo":
child = self.userinfovbox # Userinfo
elif tab == "userbrowse":
child = self.userbrowsevbox # User browse
elif tab == "interests":
child = self.interestsvbox # Interests
elif tab == "userlist":
child = self.userlist.userlistvbox # Buddy list
else:
# this should never happen, unless you've renamed a widget
return
if child in self.MainNotebook.get_children():
self.MainNotebook.set_current_page(page_num(child))
else:
self.ShowTab(widget, [tab, child])
def OnChatRooms(self, widget):
self.ChangeMainPage(widget, "chatrooms")
def OnPrivateChat(self, widget):
self.ChangeMainPage(widget, "private")
def OnDownloads(self, widget):
self.ChangeMainPage(widget, "downloads")
def OnUploads(self, widget):
self.ChangeMainPage(widget, "uploads")
def OnSearchFiles(self, widget):
self.ChangeMainPage(widget, "search")
def OnUserInfo(self, widget):
self.ChangeMainPage(widget, "userinfo")
def OnUserBrowse(self, widget):
self.ChangeMainPage(widget, "userbrowse")
def OnInterests(self, widget):
self.ChangeMainPage(widget, "interests")
def OnUserList(self, widget):
self.ChangeMainPage(widget, "userlist")
class Notifications:
def __init__(self, frame):
self.frame = frame
self.tts = []
self.tts_playing = False
self.continue_playing = False
def Add(self, location, user, room=None, tab=True):
hilites = self.frame.TrayApp.tray_status["hilites"]
if location == "rooms" and room is not None and user is not None:
if room not in hilites["rooms"]:
hilites["rooms"].append(room)
self.sound("room_nick", user, place=room)
self.frame.TrayApp.SetImage()
elif location == "private":
if user in hilites[location]:
hilites[location].remove(user)
hilites[location].append(user)
elif user not in hilites[location]:
hilites[location].append(user)
self.sound(location, user)
self.frame.TrayApp.SetImage()
if tab and self.frame.np.config.sections["ui"]["urgencyhint"] and not self.frame.got_focus:
self.frame.MainWindow.set_urgency_hint(True)
self.SetTitle(user)
def ClearPage(self, notebook, item):
(page, label, window, focused) = item
location = None
if notebook is self.frame.ChatNotebook:
location = "rooms"
self.Clear(location, room=label)
elif notebook is self.frame.PrivatechatNotebook:
location = "private"
self.Clear(location, user=label)
def Clear(self, location, user=None, room=None):
if location == "rooms" and room is not None:
if room in self.frame.TrayApp.tray_status["hilites"]["rooms"]:
self.frame.TrayApp.tray_status["hilites"]["rooms"].remove(room)
self.SetTitle(room)
elif location == "private":
if user in self.frame.TrayApp.tray_status["hilites"]["private"]:
self.frame.TrayApp.tray_status["hilites"]["private"].remove(user)
self.SetTitle(user)
self.frame.TrayApp.SetImage()
def SetTitle(self, user=None):
if self.frame.TrayApp.tray_status["hilites"]["rooms"] == [] and self.frame.TrayApp.tray_status["hilites"]["private"] == []:
# Reset Title
if self.frame.MainWindow.get_title() != _("Nicotine+") + " " + version:
self.frame.MainWindow.set_title(_("Nicotine+") + " " + version)
else:
# Private Chats have a higher priority
if len(self.frame.TrayApp.tray_status["hilites"]["private"]) > 0:
user = self.frame.TrayApp.tray_status["hilites"]["private"][-1]
self.frame.MainWindow.set_title(
_("Nicotine+") + " " + version + " :: " + _("Private Message from %(user)s") % {'user': user}
)
# Allow for the possibility the username is not available
elif len(self.frame.TrayApp.tray_status["hilites"]["rooms"]) > 0:
room = self.frame.TrayApp.tray_status["hilites"]["rooms"][-1]
if user is None:
self.frame.MainWindow.set_title(
_("Nicotine+") + " " + version + " :: " + _("You've been mentioned in the %(room)s room") % {'room': room}
)
else:
self.frame.MainWindow.set_title(
_("Nicotine+") + " " + version + " :: " + _("%(user)s mentioned you in the %(room)s room") % {'user': user, 'room': room}
)
def new_tts(self, message):
if not self.frame.np.config.sections["ui"]["speechenabled"]:
return
if message not in self.tts:
self.tts.append(message)
_thread.start_new_thread(self.play_tts, ())
def play_tts(self):
if self.tts_playing:
self.continue_playing = True
return
for message in self.tts[:]:
self.tts_player(message)
if message in self.tts:
self.tts.remove(message)
self.tts_playing = False
if self.continue_playing:
self.continue_playing = False
self.play_tts()
def tts_clean(self, message):
for i in ["_", "[", "]", "(", ")"]:
message = message.replace(i, " ")
return message
def tts_player(self, message):
self.tts_playing = True
executeCommand(self.frame.np.config.sections["ui"]["speechcommand"], message)
def sound(self, message, user, place=None):
if sys.platform == "win32":
return
if self.frame.np.config.sections["ui"]["speechenabled"]:
if message == "room_nick" and place is not None:
self.new_tts(
_("%(myusername)s, the user, %(username)s has mentioned your name in the room, %(place)s.") % {
"myusername": self.frame.np.config.sections["server"]["login"],
"username": user,
"place": place
}
)
elif message == "private":
self.new_tts(
_("%(myusername)s, you have recieved a private message from %(username)s.") % {
"myusername": self.frame.np.config.sections["server"]["login"],
"username": user
}
)
return
if "soundenabled" not in self.frame.np.config.sections["ui"] or not self.frame.np.config.sections["ui"]["soundenabled"]:
return
if "soundcommand" not in self.frame.np.config.sections["ui"]:
return
command = self.frame.np.config.sections["ui"]["soundcommand"]
path = None
exists = 0
if message == "private":
soundtitle = "private"
elif message == "room_nick":
soundtitle = "room_nick"
if "soundtheme" in self.frame.np.config.sections["ui"]:
path = os.path.expanduser(os.path.join(self.frame.np.config.sections["ui"]["soundtheme"], "%s.ogg" % soundtitle))
if os.path.exists(path):
exists = 1
else:
path = None
if not exists:
path = "%s/share/nicotine/sounds/default/%s.ogg" % (sys.prefix, soundtitle)
if os.path.exists(path):
exists = 1
else:
path = None
if not exists:
path = "sounds/default/%s.ogg" % soundtitle
if os.path.exists(path):
exists = 1
else:
path = None
if path is not None and exists:
if command == "Gstreamer (gst-python)":
if self.frame.gstreamer.player is None:
return
self.frame.gstreamer.play(path)
else:
os.system("%s %s &" % (command, path))
class TrayApp:
def __init__(self, frame):
self.frame = frame
self.trayicon = None
self.TRAYICON_CREATED = 0
self.HAVE_TRAYICON = False
self.tray_status = {
"hilites": {
"rooms": [],
"private": []
},
"status": "trayicon_disconnect",
"last": "trayicon_disconnect"
}
self.CreateMenu()
def HideUnhideWindow(self, widget):
if self.frame.is_mapped:
self.frame.MainWindow.unmap()
self.frame.is_mapped = False
else:
self.frame.MainWindow.map()
# weird, but this allows us to easily a minimized nicotine from one
# desktop to another by clicking on the tray icon
if self.frame.minimized:
self.frame.MainWindow.present()
self.frame.MainWindow.grab_focus()
self.frame.is_mapped = True
self.frame.chatrooms.roomsctrl.ClearNotifications()
self.frame.privatechats.ClearNotifications()
def Create(self):
self.Load()
self.Draw()
def Load(self):
trayicon = gtk.StatusIcon()
self.trayicon = trayicon
self.HAVE_TRAYICON = True
def destroy_trayicon(self):
if not self.TRAYICON_CREATED:
return
self.TRAYICON_CREATED = 0
self.HAVE_TRAYICON = False
self.tray_popup_menu.destroy()
self.trayicon.set_visible(False)
self.trayicon = None
def Draw(self):
if not self.HAVE_TRAYICON or self.trayicon is None or self.TRAYICON_CREATED:
return
self.TRAYICON_CREATED = 1
self.trayicon.set_visible(True)
self.trayicon.connect("popup-menu", self.OnStatusIconPopup)
self.trayicon.connect("activate", self.OnStatusIconClicked)
self.SetImage(self.tray_status["status"])
self.SetToolTip("Nicotine+")
def SetImage(self, status=None):
# Abort if Trayicon module wasn't loaded
if not self.HAVE_TRAYICON or self.trayicon is None or not self.TRAYICON_CREATED:
return
try:
if status is not None:
self.tray_status["status"] = status
# Check for hilites, and display hilite icon if there is a room or private hilite
if self.tray_status["hilites"]["rooms"] == [] and self.tray_status["hilites"]["private"] == []:
# If there is no hilite, display the status
icon = self.tray_status["status"]
else:
icon = "trayicon_msg"
if icon != self.tray_status["last"]:
self.tray_status["last"] = icon
self.trayicon.set_from_pixbuf(self.frame.images[icon])
except Exception as e:
log.addwarning(_("ERROR: cannot set trayicon image: %(error)s") % {'error': e})
def CreateMenu(self):
try:
self.tray_popup_menu_server = popup0 = PopupMenu(self, False)
popup0.setup(
("#" + _("Connect"), self.frame.OnConnect),
("#" + _("Disconnect"), self.frame.OnDisconnect)
)
self.tray_popup_menu = popup = PopupMenu(self, False)
popup.setup(
("#" + _("Hide / Show Nicotine+"), self.HideUnhideWindow),
(1, _("Server"), self.tray_popup_menu_server, self.OnPopupServer),
("#" + _("Settings"), self.frame.OnSettings),
("#" + _("Send Message"), self.frame.OnOpenPrivateChat),
("#" + _("Lookup a User's IP"), self.frame.OnGetAUsersIP),
("#" + _("Lookup a User's Info"), self.frame.OnGetAUsersInfo),
("#" + _("Lookup a User's Shares"), self.frame.OnGetAUsersShares),
("$" + _("Toggle Away"), self.frame.OnAway),
("#" + _("Quit"), self.frame.OnExit)
)
except Exception as e:
log.addwarning(_('ERROR: tray menu, %(error)s') % {'error': e})
def OnPopupServer(self, widget):
items = self.tray_popup_menu_server.get_children()
if self.tray_status["status"] == "trayicon_disconnect":
items[0].set_sensitive(True)
items[1].set_sensitive(False)
else:
items[0].set_sensitive(False)
items[1].set_sensitive(True)
return
def OnStatusIconClicked(self, status_icon):
self.HideUnhideWindow(None)
def OnStatusIconPopup(self, status_icon, button, activate_time):
if button == 3:
self.tray_popup_menu.popup(None, None, None, None, button, activate_time)
def SetToolTip(self, string):
if self.trayicon is not None:
self.trayicon.set_tooltip_text(string)
class gstreamer:
def __init__(self):
self.player = None
try:
gi.require_version('Gst', '1.0')
from gi.repository import Gst
Gst.init(None)
except Exception as error: # noqa: F841
return
self.gst = Gst
try:
self.player = Gst.ElementFactory.make("playbin", "player")
fakesink = Gst.ElementFactory.make('fakesink', "my-fakesink")
self.player.set_property("video-sink", fakesink)
except Exception as error:
log.addwarning(_("ERROR: Gstreamer-python could not play: %(error)s") % {'error': error})
self.gst = self.player = None
return
self.bus = self.player.get_bus()
self.bus.add_signal_watch()
self.bus.connect('message', self.on_gst_message)
def play(self, path):
self.player.set_property('uri', "file://" + path)
self.player.set_state(self.gst.State.PLAYING)
def on_gst_message(self, bus, message):
t = message.type
if t == self.gst.MessageType.EOS:
self.player.set_state(self.gst.State.NULL)
elif t == self.gst.MessageType.ERROR:
self.player.set_state(self.gst.State.NULL)
class MainApp:
def __init__(self, data_dir, config, plugins, trayicon, start_hidden, bindip, port):
self.frame = NicotineFrame(data_dir, config, plugins, trayicon, start_hidden, bindip, port)
def MainLoop(self):
signal.signal(signal.SIGINT, signal.SIG_IGN)
self.frame.MainWindow.show()
Gdk.threads_init()
Gdk.threads_enter() # Without this N+ hangs on XP (Vista and Linux don't have that problem)
gtk.main()
Gdk.threads_leave()
|
eLvErDe/nicotine-plus
|
pynicotine/gtkgui/frame.py
|
Python
|
gpl-3.0
| 137,582
|
"""
WSGI config for my_site project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "my_site.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
tridvaodin/Assignments-Valya-Maskaliova
|
project/my_site/wsgi.py
|
Python
|
gpl-2.0
| 389
|
from models import SpeciesRepresentation, Specimen, Species
from mediaman.views import MediaFileUploader, ParseError, RecordError
from haystack.query import SearchQuerySet
from bulkimport import BulkDataImportHandler, BulkImportForm
from django.shortcuts import render
import re
class ShellsImagesUploader(MediaFileUploader):
upload_types = (
('SI', 'Specimen Images', 'handle_specimen_image'),
)
def handle_specimen_image(self, formdata, uploaded_file, user):
species = self.name_to_id(uploaded_file.name, formdata['pathinfo0'])
sr = self.set_mediafile_attrs(SpeciesRepresentation(),
uploaded_file, formdata, user)
sr.image = uploaded_file
sr.species = species
sr.save()
@staticmethod
def name_to_id(filename, path=None):
name = re.sub('\d*\..*$', '', filename)
results = SearchQuerySet().auto_query(name)
if len(results) < 1:
raise RecordError('Cannot find record matching "%s"' % name)
elif len(results) > 1:
raise RecordError('%s records found matching: "%s"' % (len(results), name))
else:
return results[0].object
def setup_bulk_importer():
# setup mappings
bi = BulkDataImportHandler()
bi.add_mapping(Species, {
'Class': 'class_name',
'Subclass': 'subclass',
'Order': 'order',
'Superfamily': 'superfamily',
'Family': 'family',
'Subfamily': 'subfamily',
'Genus': 'genus',
'Subgenus': 'subgenus',
'Species': 'species',
'Author Name / Year': 'authority',
'Synonyms': 'synonyms',
'Common Names': 'common_names',
'Geographic Range': 'geographic_range',
'Habitat': 'habitat',
'Shell Size': 'shell_size',
'Shell Sculpture': 'shell_sculpture',
'Shell Colour': 'shell_colour',
'References': 'references',
'NOTES': 'location_notes',
'Additional Information': 'additional_information',
})
bi.add_mapping(Specimen, {
'Specimen Collection Date': 'collection_date',
'Specimen Collection Location': 'collection_location',
'Specimen Collection Information': 'collection_information',
})
def link(first, specimen):
specimen.species = first
specimen.save()
bi.add_linking_function(link)
return bi
def upload_shells_spreadsheet(request):
if request.method == 'POST':
form = BulkImportForm(request.POST, request.FILES)
if form.is_valid():
spreadsheet = form.files['spreadsheet']
bi = setup_bulk_importer()
bi.process_spreadsheet(spreadsheet)
else:
form = BulkImportForm()
return render(request, 'spreadsheet_upload.html', {
'form': form
})
|
uq-eresearch/archaeology-reference-collections
|
apps/shells/admin_views.py
|
Python
|
bsd-3-clause
| 2,834
|
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.IEC61968.Common.Document import Document
class CustomerAccount(Document):
"""Assignment of a group of products and services purchased by the Customer through a CustomerAgreement, used as a mechanism for customer billing and payment. It contains common information from the various types of CustomerAgreements to create billings (invoices) for a Customer and receive payment.
"""
def __init__(self, PaymentTransactions=None, CustomerAgreements=None, *args, **kw_args):
"""Initialises a new 'CustomerAccount' instance.
@param PaymentTransactions: All payment transactions for this customer account.
@param CustomerAgreements: All agreements for this customer account.
"""
self._PaymentTransactions = []
self.PaymentTransactions = [] if PaymentTransactions is None else PaymentTransactions
self._CustomerAgreements = []
self.CustomerAgreements = [] if CustomerAgreements is None else CustomerAgreements
super(CustomerAccount, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = ["PaymentTransactions", "CustomerAgreements"]
_many_refs = ["PaymentTransactions", "CustomerAgreements"]
def getPaymentTransactions(self):
"""All payment transactions for this customer account.
"""
return self._PaymentTransactions
def setPaymentTransactions(self, value):
for x in self._PaymentTransactions:
x.CustomerAccount = None
for y in value:
y._CustomerAccount = self
self._PaymentTransactions = value
PaymentTransactions = property(getPaymentTransactions, setPaymentTransactions)
def addPaymentTransactions(self, *PaymentTransactions):
for obj in PaymentTransactions:
obj.CustomerAccount = self
def removePaymentTransactions(self, *PaymentTransactions):
for obj in PaymentTransactions:
obj.CustomerAccount = None
def getCustomerAgreements(self):
"""All agreements for this customer account.
"""
return self._CustomerAgreements
def setCustomerAgreements(self, value):
for x in self._CustomerAgreements:
x.CustomerAccount = None
for y in value:
y._CustomerAccount = self
self._CustomerAgreements = value
CustomerAgreements = property(getCustomerAgreements, setCustomerAgreements)
def addCustomerAgreements(self, *CustomerAgreements):
for obj in CustomerAgreements:
obj.CustomerAccount = self
def removeCustomerAgreements(self, *CustomerAgreements):
for obj in CustomerAgreements:
obj.CustomerAccount = None
|
rwl/PyCIM
|
CIM14/IEC61968/Customers/CustomerAccount.py
|
Python
|
mit
| 3,833
|
# $Id$
#
# Copyright (C) 2001-2006 greg Landrum and Rational Discovery LLC
#
# @@ All Rights Reserved @@
# This file is part of the RDKit.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the RDKit source tree.
#
""" unit testing code for the C++ BitVects
"""
from __future__ import print_function
import unittest,os,sys
from rdkit.six.moves import cPickle
from rdkit.DataStructs import cDataStructs
klass = cDataStructs.SparseBitVect
def feq(n1,n2,tol=1e-4):
return abs(n1-n2)<=tol
def ieq(n1,n2):
return abs(n1-n2)==0
class TestCase(unittest.TestCase):
def setUp(self):
print('\n%s: '%self.shortDescription(),end='')
sys.stdout.flush()
def testSparseIdx(self):
""" test indexing into SparseBitVects
"""
v = klass(10)
ok = 1
v[0] = 1
v[2] = 1
v[9] = 1
try:
v[10] = 1
except IndexError:
ok = 1
except:
assert 0, 'setting high bit should have failed with an IndexError'
else:
assert 0, 'setting high bit should have failed'
assert v[0] == 1, 'bad bit'
assert v[1] == 0, 'bad bit'
assert v[2] == 1, 'bad bit'
assert v[9] == 1, 'bad bit'
assert v[-1] == 1, 'bad bit'
assert v[-2] == 0, 'bad bit'
try:
foo = v[10]
except IndexError:
ok = 1
except:
assert 0, 'getting high bit should have failed with an IndexError'
else:
assert 0, 'getting high bit should have failed'
def testSparseBitGet(self):
""" test operations to get sparse bits
"""
v = klass(10)
v[0] = 1
v[2] = 1
v[6] = 1
assert len(v)==10,'len(SparseBitVect) failed'
assert v.GetNumOnBits()==3,'NumOnBits failed'
assert tuple(v.GetOnBits())==(0,2,6), 'GetOnBits failed'
def testSparseBitOps(self):
""" test bit operations on SparseBitVects
"""
v1 = klass(10)
v1[0] = 1
v1[2] = 1
v1[6] = 1
v2 = klass(10)
v2[0] = 1
v2[3] = 1
v2[6] = 1
assert tuple((v1&v2).GetOnBits()) == (0,6),'binary & failed'
assert tuple((v1&v2).GetOnBits()) == (0,6),'binary & failed'
assert tuple((v1|v2).GetOnBits()) == (0,2,3,6),'binary | failed'
assert tuple((v1^v2).GetOnBits()) == (2,3),'binary ^ failed'
def testTanimotoSim(self):
""" test Tanimoto Similarity measure
"""
v1 = klass(10)
v1[0] = 1
v1[2] = 1
v1[6] = 1
v2 = klass(10)
v2[0] = 1
v2[3] = 1
v2[6] = 1
v3 = klass(10)
v3[1] = 1
v3[4] = 1
v3[8] = 1
assert feq(cDataStructs.TanimotoSimilarity(v1,v1),1.0),'bad v1,v1 TanimotoSimilarity'
assert feq(cDataStructs.TanimotoSimilarity(v2,v2),1.0),'bad v2,v2 TanimotoSimilarity'
assert feq(cDataStructs.TanimotoSimilarity(v1,v2),0.5),'bad v1,v2 TanimotoSimilarity'
assert feq(cDataStructs.TanimotoSimilarity(v2,v1),0.5),'bad v2,v1 TanimotoSimilarity'
assert feq(cDataStructs.TanimotoSimilarity(v1,v3),0.0),'bad v1,v3 TanimotoSimilarity'
assert feq(cDataStructs.TanimotoSimilarity(v2,v3),0.0),'bad v2,v3 TanimotoSimilarity'
def testOnBitSim(self):
""" test On Bit Similarity measure
"""
v1 = klass(10)
v1[0] = 1
v1[2] = 1
v1[6] = 1
v2 = klass(10)
v2[0] = 1
v2[3] = 1
v2[6] = 1
v3 = klass(10)
v3[1] = 1
v3[4] = 1
v3[8] = 1
assert feq(cDataStructs.OnBitSimilarity(v1,v1),1.0),'bad v1,v1 OnBitSimilarity'
assert feq(cDataStructs.OnBitSimilarity(v2,v2),1.0),'bad v2,v2 OnBitSimilarity'
assert feq(cDataStructs.OnBitSimilarity(v1,v2),0.5),'bad v1,v2 OnBitSimilarity'
assert feq(cDataStructs.OnBitSimilarity(v2,v1),0.5),'bad v2,v1 OnBitSimilarity'
assert feq(cDataStructs.OnBitSimilarity(v1,v3),0.0),'bad v1,v3 OnBitSimilarity'
assert feq(cDataStructs.OnBitSimilarity(v2,v3),0.0),'bad v2,v3 OnBitSimilarity'
def testNumBitsInCommon(self):
""" test calculation of Number of Bits in Common
"""
v1 = klass(10)
v1[0] = 1
v1[2] = 1
v1[6] = 1
v2 = klass(10)
v2[0] = 1
v2[3] = 1
v2[6] = 1
v3 = klass(10)
v3[1] = 1
v3[4] = 1
v3[8] = 1
assert ieq(cDataStructs.NumBitsInCommon(v1,v1),10),'bad v1,v1 NumBitsInCommon'
assert ieq(cDataStructs.NumBitsInCommon(v2,v2),10),'bad v2,v2 NumBitsInCommon'
assert ieq(cDataStructs.NumBitsInCommon(v1,v2),8),'bad v1,v2 NumBitsInCommon'
assert ieq(cDataStructs.NumBitsInCommon(v2,v1),8),'bad v2,v1 NumBitsInCommon'
assert ieq(cDataStructs.NumBitsInCommon(v1,v3),4),'bad v1,v3 NumBitsInCommon'
assert ieq(cDataStructs.NumBitsInCommon(v2,v3),4),'bad v2,v3 NumBitsInCommon'
def testAllBitSim(self):
""" test All Bit Similarity measure
"""
v1 = klass(10)
v1[0] = 1
v1[2] = 1
v1[6] = 1
v2 = klass(10)
v2[0] = 1
v2[3] = 1
v2[6] = 1
v3 = klass(10)
v3[1] = 1
v3[4] = 1
v3[8] = 1
assert feq(cDataStructs.AllBitSimilarity(v1,v1),1.0),'bad v1,v1 AllBitSimilarity'
assert feq(cDataStructs.AllBitSimilarity(v2,v2),1.0),'bad v2,v2 AllBitSimilarity'
assert feq(cDataStructs.AllBitSimilarity(v1,v2),0.8),'bad v1,v2 AllBitSimilarity'
assert feq(cDataStructs.AllBitSimilarity(v2,v1),0.8),'bad v2,v1 AllBitSimilarity'
assert feq(cDataStructs.AllBitSimilarity(v1,v3),0.4),'bad v1,v3 AllBitSimilarity'
assert feq(cDataStructs.AllBitSimilarity(v2,v3),0.4),'bad v2,v3 AllBitSimilarity'
def testStringOps(self):
""" test serialization operations
"""
v1 = klass(10)
v1[0] = 1
v1[2] = 1
v1[6] = 1
s = v1.ToBinary()
v2 = klass(s)
assert tuple(v2.GetOnBits())==tuple(v1.GetOnBits()),'To/From string failed'
def testOnBitsInCommon(self):
""" test OnBitsInCommon
"""
v1 = klass(10)
v1[0] = 1
v1[2] = 1
v1[6] = 1
v2 = klass(10)
v2[0] = 1
v2[3] = 1
v2[6] = 1
v3 = cDataStructs.OnBitsInCommon(v1,v2)
assert tuple(v3)==(0,6),'bad on bits in common'
def testOffBitsInCommon(self):
""" test OffBitsInCommon
"""
v1 = klass(10)
v1[0] = 1
v1[2] = 1
v1[6] = 1
v2 = klass(10)
v2[0] = 1
v2[3] = 1
v2[6] = 1
v3 = cDataStructs.OffBitsInCommon(v1,v2)
assert tuple(v3)==(1,4,5,7,8,9),'bad off bits in common'
def testOnBitProjSimilarity(self):
""" test OnBitProjSimilarity
"""
v1 = klass(10)
v1[1] = 1
v1[2] = 1
v1[3] = 1
v2 = klass(10)
v2[2] = 1
v2[3] = 1
res = cDataStructs.OnBitProjSimilarity(v1,v2)
assert feq(res[0],0.666667),'bad 1st OnBitsProjSimilarity'
assert feq(res[1],1.0),'bad 2nd OnBitsProjSimilarity'
res = cDataStructs.OnBitProjSimilarity(v2,v1)
assert feq(res[1],0.666667),'bad 1st OnBitsProjSimilarity'
assert feq(res[0],1.0),'bad 2nd OnBitsProjSimilarity'
def testOffBitProjSimilarity(self):
""" test OffBitProjSimilarity
"""
v1 = klass(10)
v1[1] = 1
v1[2] = 1
v1[3] = 1
v2 = klass(10)
v2[2] = 1
v2[3] = 1
res = cDataStructs.OffBitProjSimilarity(v1,v2)
assert feq(res[0],1.0),'bad 1st OffBitsProjSimilarity'
assert feq(res[1],0.875),'bad 2nd OffBitsProjSimilarity'
res = cDataStructs.OffBitProjSimilarity(v2,v1)
assert feq(res[1],1.0),'bad 1st OffBitsProjSimilarity'
assert feq(res[0],0.875),'bad 2nd OffBitsProjSimilarity'
def testPkl(self):
""" test pickling
"""
v1 = klass(10)
v1[1] = 1
v1[2] = 1
v1[3] = 1
pklName = 'foo.pkl'
outF = open(pklName,'wb+')
cPickle.dump(v1,outF)
outF.close()
inF = open(pklName,'rb')
v2 = cPickle.load(inF)
inF.close()
os.unlink(pklName)
assert tuple(v1.GetOnBits())==tuple(v2.GetOnBits()),'pkl failed'
def testFingerprints(self):
" test the parsing of daylight fingerprints "
#actual daylight output:
rawD="""
0,Cc1n[nH]c(=O)nc1N,.b+HHa.EgU6+ibEIr89.CpX0g8FZiXH+R0+Ps.mr6tg.2
1,Cc1n[nH]c(=O)[nH]c1=O,.b7HEa..ccc+gWEIr89.8lV8gOF3aXFFR.+Ps.mZ6lg.2
2,Cc1nnc(NN)nc1O,.H+nHq2EcY09y5EIr9e.8p50h0NgiWGNx4+Hm+Gbslw.2
3,Cc1nnc(N)nc1C,.1.HHa..cUI6i5E2rO8.Op10d0NoiWGVx.+Hm.Gb6lo.2
"""
dists="""0,0,1.000000
0,1,0.788991
0,2,0.677165
0,3,0.686957
1,1,1.000000
1,2,0.578125
1,3,0.591304
2,2,1.000000
2,3,0.732759
3,3,1.000000
"""
fps = []
for line in rawD.split('\n'):
if line:
sbv = klass(256)
id,smi,fp=line.split(',')
cDataStructs.InitFromDaylightString(sbv,fp)
fps.append(sbv)
ds = dists.split('\n')
whichd=0
for i in range(len(fps)):
for j in range(i,len(fps)):
idx1,idx2,tgt = ds[whichd].split(',')
whichd += 1
tgt = float(tgt)
dist = cDataStructs.TanimotoSimilarity(fps[i],fps[j])
assert feq(tgt,dist),'tanimoto between fps %d and %d failed'%(int(idx1),int(idx2))
def testFold(self):
""" test folding fingerprints
"""
v1 = klass(16)
v1[1] = 1
v1[12] = 1
v1[9] = 1
try:
v2 = cDataStructs.FoldFingerprint(v1)
except:
assert 0,'Fold with no args failed'
assert v1.GetNumBits()/2==v2.GetNumBits(),'bad num bits post folding'
try:
v2 = cDataStructs.FoldFingerprint(v1,2)
except:
assert 0,'Fold with arg failed'
assert v1.GetNumBits()/2==v2.GetNumBits(),'bad num bits post folding'
v2 = cDataStructs.FoldFingerprint(v1,4)
assert v1.GetNumBits()/4==v2.GetNumBits(),'bad num bits post folding'
def testOtherSims(self):
""" test other similarity measures
"""
v1 = klass(10)
v1[0] = 1
v1[2] = 1
v1[6] = 1
v2 = klass(10)
v2[0] = 1
v2[3] = 1
v2[6] = 1
assert feq(cDataStructs.CosineSimilarity(v1,v2),.6667)
assert feq(cDataStructs.KulczynskiSimilarity(v1,v2),.6667)
assert feq(cDataStructs.DiceSimilarity(v1,v2),.6667)
assert feq(cDataStructs.SokalSimilarity(v1,v2),.3333)
assert feq(cDataStructs.McConnaugheySimilarity(v1,v2),.3333)
assert feq(cDataStructs.AsymmetricSimilarity(v1,v2),.6667)
assert feq(cDataStructs.BraunBlanquetSimilarity(v1,v2),.6667)
assert feq(cDataStructs.RusselSimilarity(v1,v2),.2000)
assert feq(cDataStructs.RogotGoldbergSimilarity(v1,v2),.7619)
def testQuickSims(self):
""" the asymmetric similarity stuff (bv,pkl)
"""
v1 = klass(10)
v1[0] = 1
v1[2] = 1
v1[6] = 1
v2 = klass(10)
v2[0] = 1
v2[3] = 1
v2[6] = 1
pkl = v2.ToBinary()
v2 = pkl
assert feq(cDataStructs.CosineSimilarity(v1,v2),.6667)
assert feq(cDataStructs.KulczynskiSimilarity(v1,v2),.6667)
assert feq(cDataStructs.DiceSimilarity(v1,v2),.6667)
assert feq(cDataStructs.SokalSimilarity(v1,v2),.3333)
assert feq(cDataStructs.McConnaugheySimilarity(v1,v2),.3333)
assert feq(cDataStructs.AsymmetricSimilarity(v1,v2),.6667)
assert feq(cDataStructs.BraunBlanquetSimilarity(v1,v2),.6667)
assert feq(cDataStructs.RusselSimilarity(v1,v2),.2000)
assert feq(cDataStructs.RogotGoldbergSimilarity(v1,v2),.7619)
if __name__ == '__main__':
unittest.main()
|
soerendip42/rdkit
|
rdkit/DataStructs/UnitTestcBitVect.py
|
Python
|
bsd-3-clause
| 10,981
|
from nose.tools import assert_equal
from sirl.models.base import LocalController
from sirl.models.base import MDPReward
from sirl.models.base import MDP
# from sirl.models import AlgoParams
def test_local_controller():
try:
LocalController()
assert False, 'Abstract method instantiated'
except TypeError:
pass
class ConcreteLC(LocalController):
def __init__(self, world, kind):
super(ConcreteLC, self).__init__(world, kind)
def __call__(self, state, action, duration, max_speed):
return 42
def trajectory(self, start, target, max_speed):
return None
subclass = ConcreteLC(None, kind='some-name')
assert_equal(subclass.kind, 'some-name')
assert_equal(subclass(1, 2, 3, 4), 42)
def test_mdp_reward():
try:
MDPReward()
assert False, 'Abstract method instantiated'
except TypeError:
pass
class ConcreteReward(MDPReward):
def __init__(self, world, kind):
super(ConcreteReward, self).__init__(world, kind)
def __call__(self, state_a, state_b):
return 42
@property
def dim(self):
return 21
subclass = ConcreteReward(world=None, kind='some-name')
assert_equal(subclass.kind, 'some-name')
assert_equal(subclass(1, 2), 42)
|
makokal/scalable-irl
|
sirl/tests/test_models.py
|
Python
|
bsd-3-clause
| 1,351
|
# Copyright 2015 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test class for iRMC Power Driver
"""
import mock
from oslo_config import cfg
from ironic.common import boot_devices
from ironic.common import exception
from ironic.common import states
from ironic.conductor import task_manager
from ironic.conductor import utils as manager_utils
from ironic.drivers.modules.irmc import common as irmc_common
from ironic.drivers.modules.irmc import deploy as irmc_deploy
from ironic.drivers.modules.irmc import power as irmc_power
from ironic.tests.conductor import utils as mgr_utils
from ironic.tests.db import base as db_base
from ironic.tests.db import utils as db_utils
from ironic.tests.objects import utils as obj_utils
INFO_DICT = db_utils.get_test_irmc_info()
CONF = cfg.CONF
@mock.patch.object(irmc_common, 'get_irmc_client', spec_set=True,
autospec=True)
class IRMCPowerInternalMethodsTestCase(db_base.DbTestCase):
def setUp(self):
super(IRMCPowerInternalMethodsTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver='fake_irmc')
driver_info = INFO_DICT
self.node = db_utils.create_test_node(
driver='fake_irmc',
driver_info=driver_info,
instance_uuid='instance_uuid_123')
@mock.patch.object(irmc_power, '_attach_boot_iso_if_needed')
def test__set_power_state_power_on_ok(
self,
_attach_boot_iso_if_needed_mock,
get_irmc_client_mock):
irmc_client = get_irmc_client_mock.return_value
target_state = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
irmc_power._set_power_state(task, target_state)
_attach_boot_iso_if_needed_mock.assert_called_once_with(task)
irmc_client.assert_called_once_with(irmc_power.scci.POWER_ON)
def test__set_power_state_power_off_ok(self,
get_irmc_client_mock):
irmc_client = get_irmc_client_mock.return_value
target_state = states.POWER_OFF
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
irmc_power._set_power_state(task, target_state)
irmc_client.assert_called_once_with(irmc_power.scci.POWER_OFF)
@mock.patch.object(irmc_power, '_attach_boot_iso_if_needed')
def test__set_power_state_power_reboot_ok(
self,
_attach_boot_iso_if_needed_mock,
get_irmc_client_mock):
irmc_client = get_irmc_client_mock.return_value
target_state = states.REBOOT
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
irmc_power._set_power_state(task, target_state)
_attach_boot_iso_if_needed_mock.assert_called_once_with(task)
irmc_client.assert_called_once_with(irmc_power.scci.POWER_RESET)
def test__set_power_state_invalid_target_state(self,
get_irmc_client_mock):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.InvalidParameterValue,
irmc_power._set_power_state,
task,
states.ERROR)
def test__set_power_state_scci_exception(self,
get_irmc_client_mock):
irmc_client = get_irmc_client_mock.return_value
irmc_client.side_effect = Exception()
irmc_power.scci.SCCIClientError = Exception
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.IRMCOperationError,
irmc_power._set_power_state,
task,
states.POWER_ON)
@mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
autospec=True)
@mock.patch.object(irmc_deploy, 'setup_vmedia_for_boot', spec_set=True,
autospec=True)
def test__attach_boot_iso_if_needed(
self,
setup_vmedia_mock,
set_boot_device_mock,
get_irmc_client_mock):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.node.provision_state = states.ACTIVE
task.node.driver_internal_info['irmc_boot_iso'] = 'boot-iso'
irmc_power._attach_boot_iso_if_needed(task)
setup_vmedia_mock.assert_called_once_with(task, 'boot-iso')
set_boot_device_mock.assert_called_once_with(
task, boot_devices.CDROM)
@mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
autospec=True)
@mock.patch.object(irmc_deploy, 'setup_vmedia_for_boot', spec_set=True,
autospec=True)
def test__attach_boot_iso_if_needed_on_rebuild(
self,
setup_vmedia_mock,
set_boot_device_mock,
get_irmc_client_mock):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.node.provision_state = states.DEPLOYING
task.node.driver_internal_info['irmc_boot_iso'] = 'boot-iso'
irmc_power._attach_boot_iso_if_needed(task)
self.assertFalse(setup_vmedia_mock.called)
self.assertFalse(set_boot_device_mock.called)
class IRMCPowerTestCase(db_base.DbTestCase):
def setUp(self):
super(IRMCPowerTestCase, self).setUp()
driver_info = INFO_DICT
mgr_utils.mock_the_extension_manager(driver="fake_irmc")
self.node = obj_utils.create_test_node(self.context,
driver='fake_irmc',
driver_info=driver_info)
def test_get_properties(self):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
properties = task.driver.get_properties()
for prop in irmc_common.COMMON_PROPERTIES:
self.assertIn(prop, properties)
@mock.patch.object(irmc_common, 'parse_driver_info', spec_set=True,
autospec=True)
def test_validate(self, mock_drvinfo):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.driver.power.validate(task)
mock_drvinfo.assert_called_once_with(task.node)
@mock.patch.object(irmc_common, 'parse_driver_info', spec_set=True,
autospec=True)
def test_validate_fail(self, mock_drvinfo):
side_effect = iter([exception.InvalidParameterValue("Invalid Input")])
mock_drvinfo.side_effect = side_effect
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.InvalidParameterValue,
task.driver.power.validate,
task)
@mock.patch('ironic.drivers.modules.irmc.power.ipmitool.IPMIPower',
spec_set=True, autospec=True)
def test_get_power_state(self, mock_IPMIPower):
ipmi_power = mock_IPMIPower.return_value
ipmi_power.get_power_state.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertEqual(states.POWER_ON,
task.driver.power.get_power_state(task))
ipmi_power.get_power_state.assert_called_once_with(task)
@mock.patch.object(irmc_power, '_set_power_state', spec_set=True,
autospec=True)
def test_set_power_state(self, mock_set_power):
mock_set_power.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.power.set_power_state(task, states.POWER_ON)
mock_set_power.assert_called_once_with(task, states.POWER_ON)
@mock.patch.object(irmc_power, '_set_power_state', spec_set=True,
autospec=True)
@mock.patch.object(irmc_power.IRMCPower, 'get_power_state', spec_set=True,
autospec=True)
def test_reboot_reboot(self, mock_get_power, mock_set_power):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
mock_get_power.return_value = states.POWER_ON
task.driver.power.reboot(task)
mock_get_power.assert_called_once_with(
task.driver.power, task)
mock_set_power.assert_called_once_with(task, states.REBOOT)
@mock.patch.object(irmc_power, '_set_power_state', spec_set=True,
autospec=True)
@mock.patch.object(irmc_power.IRMCPower, 'get_power_state', spec_set=True,
autospec=True)
def test_reboot_power_on(self, mock_get_power, mock_set_power):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
mock_get_power.return_value = states.POWER_OFF
task.driver.power.reboot(task)
mock_get_power.assert_called_once_with(
task.driver.power, task)
mock_set_power.assert_called_once_with(task, states.POWER_ON)
|
redhat-openstack/ironic
|
ironic/tests/drivers/irmc/test_power.py
|
Python
|
apache-2.0
| 10,352
|
import csv
import indicoio
from indicoio.custom import Collection
# insert your API key
indicoio.config.api_key = "YOUR_API_KEY"
def clean_article(article):
return article.replace("\n", " ").decode('cp1252').encode('utf-8', 'replace')
def test_model(test_list):
cleaned_test = [clean_article(text) for row in test_list for text in row]
print "Articles cleaned and ready for analysis!"
for data in cleaned_test:
print collection.predict(data)
if __name__ == "__main__":
# Replace "YOUR_COLLECTION_NAME" with the name you gave your dataset in CrowdLabel
collection = Collection("YOUR_COLLECTION_NAME")
with open('test_articles.csv', 'rU') as f:
test_list = csv.reader(f)
test_model(test_list)
|
IndicoDataSolutions/SuperCell
|
personalized_content_filter/main.py
|
Python
|
mit
| 752
|
# coding: utf-8
# Author: Johannes Schönberger
#
# License: BSD 3 clause
import numpy as np
import warnings
from ..base import BaseEstimator, MetaEstimatorMixin, RegressorMixin, clone
from ..base import MultiOutputMixin
from ..utils import check_random_state, check_consistent_length
from ..utils.random import sample_without_replacement
from ..utils.validation import check_is_fitted, _check_sample_weight
from ._base import LinearRegression
from ..utils.validation import has_fit_parameter
from ..exceptions import ConvergenceWarning
_EPSILON = np.spacing(1)
def _dynamic_max_trials(n_inliers, n_samples, min_samples, probability):
"""Determine number trials such that at least one outlier-free subset is
sampled for the given inlier/outlier ratio.
Parameters
----------
n_inliers : int
Number of inliers in the data.
n_samples : int
Total number of samples in the data.
min_samples : int
Minimum number of samples chosen randomly from original data.
probability : float
Probability (confidence) that one outlier-free sample is generated.
Returns
-------
trials : int
Number of trials.
"""
inlier_ratio = n_inliers / float(n_samples)
nom = max(_EPSILON, 1 - probability)
denom = max(_EPSILON, 1 - inlier_ratio ** min_samples)
if nom == 1:
return 0
if denom == 1:
return float('inf')
return abs(float(np.ceil(np.log(nom) / np.log(denom))))
class RANSACRegressor(MetaEstimatorMixin, RegressorMixin,
MultiOutputMixin, BaseEstimator):
"""RANSAC (RANdom SAmple Consensus) algorithm.
RANSAC is an iterative algorithm for the robust estimation of parameters
from a subset of inliers from the complete data set.
Read more in the :ref:`User Guide <ransac_regression>`.
Parameters
----------
base_estimator : object, default=None
Base estimator object which implements the following methods:
* `fit(X, y)`: Fit model to given training data and target values.
* `score(X, y)`: Returns the mean accuracy on the given test data,
which is used for the stop criterion defined by `stop_score`.
Additionally, the score is used to decide which of two equally
large consensus sets is chosen as the better one.
* `predict(X)`: Returns predicted values using the linear model,
which is used to compute residual error using loss function.
If `base_estimator` is None, then
:class:`~sklearn.linear_model.LinearRegression` is used for
target values of dtype float.
Note that the current implementation only supports regression
estimators.
min_samples : int (>= 1) or float ([0, 1]), default=None
Minimum number of samples chosen randomly from original data. Treated
as an absolute number of samples for `min_samples >= 1`, treated as a
relative number `ceil(min_samples * X.shape[0]`) for
`min_samples < 1`. This is typically chosen as the minimal number of
samples necessary to estimate the given `base_estimator`. By default a
``sklearn.linear_model.LinearRegression()`` estimator is assumed and
`min_samples` is chosen as ``X.shape[1] + 1``.
residual_threshold : float, default=None
Maximum residual for a data sample to be classified as an inlier.
By default the threshold is chosen as the MAD (median absolute
deviation) of the target values `y`.
is_data_valid : callable, default=None
This function is called with the randomly selected data before the
model is fitted to it: `is_data_valid(X, y)`. If its return value is
False the current randomly chosen sub-sample is skipped.
is_model_valid : callable, default=None
This function is called with the estimated model and the randomly
selected data: `is_model_valid(model, X, y)`. If its return value is
False the current randomly chosen sub-sample is skipped.
Rejecting samples with this function is computationally costlier than
with `is_data_valid`. `is_model_valid` should therefore only be used if
the estimated model is needed for making the rejection decision.
max_trials : int, default=100
Maximum number of iterations for random sample selection.
max_skips : int, default=np.inf
Maximum number of iterations that can be skipped due to finding zero
inliers or invalid data defined by ``is_data_valid`` or invalid models
defined by ``is_model_valid``.
.. versionadded:: 0.19
stop_n_inliers : int, default=np.inf
Stop iteration if at least this number of inliers are found.
stop_score : float, default=np.inf
Stop iteration if score is greater equal than this threshold.
stop_probability : float in range [0, 1], default=0.99
RANSAC iteration stops if at least one outlier-free set of the training
data is sampled in RANSAC. This requires to generate at least N
samples (iterations)::
N >= log(1 - probability) / log(1 - e**m)
where the probability (confidence) is typically set to high value such
as 0.99 (the default) and e is the current fraction of inliers w.r.t.
the total number of samples.
loss : string, callable, default='absolute_error'
String inputs, 'absolute_error' and 'squared_error' are supported which
find the absolute error and squared error per sample respectively.
If ``loss`` is a callable, then it should be a function that takes
two arrays as inputs, the true and predicted value and returns a 1-D
array with the i-th value of the array corresponding to the loss
on ``X[i]``.
If the loss on a sample is greater than the ``residual_threshold``,
then this sample is classified as an outlier.
.. versionadded:: 0.18
.. deprecated:: 1.0
The loss 'squared_loss' was deprecated in v1.0 and will be removed
in version 1.2. Use `loss='squared_error'` which is equivalent.
.. deprecated:: 1.0
The loss 'absolute_loss' was deprecated in v1.0 and will be removed
in version 1.2. Use `loss='absolute_error'` which is equivalent.
random_state : int, RandomState instance, default=None
The generator used to initialize the centers.
Pass an int for reproducible output across multiple function calls.
See :term:`Glossary <random_state>`.
Attributes
----------
estimator_ : object
Best fitted model (copy of the `base_estimator` object).
n_trials_ : int
Number of random selection trials until one of the stop criteria is
met. It is always ``<= max_trials``.
inlier_mask_ : bool array of shape [n_samples]
Boolean mask of inliers classified as ``True``.
n_skips_no_inliers_ : int
Number of iterations skipped due to finding zero inliers.
.. versionadded:: 0.19
n_skips_invalid_data_ : int
Number of iterations skipped due to invalid data defined by
``is_data_valid``.
.. versionadded:: 0.19
n_skips_invalid_model_ : int
Number of iterations skipped due to an invalid model defined by
``is_model_valid``.
.. versionadded:: 0.19
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
Examples
--------
>>> from sklearn.linear_model import RANSACRegressor
>>> from sklearn.datasets import make_regression
>>> X, y = make_regression(
... n_samples=200, n_features=2, noise=4.0, random_state=0)
>>> reg = RANSACRegressor(random_state=0).fit(X, y)
>>> reg.score(X, y)
0.9885...
>>> reg.predict(X[:1,])
array([-31.9417...])
References
----------
.. [1] https://en.wikipedia.org/wiki/RANSAC
.. [2] https://www.sri.com/sites/default/files/publications/ransac-publication.pdf
.. [3] http://www.bmva.org/bmvc/2009/Papers/Paper355/Paper355.pdf
""" # noqa: E501
def __init__(self, base_estimator=None, *, min_samples=None,
residual_threshold=None, is_data_valid=None,
is_model_valid=None, max_trials=100, max_skips=np.inf,
stop_n_inliers=np.inf, stop_score=np.inf,
stop_probability=0.99, loss='absolute_error',
random_state=None):
self.base_estimator = base_estimator
self.min_samples = min_samples
self.residual_threshold = residual_threshold
self.is_data_valid = is_data_valid
self.is_model_valid = is_model_valid
self.max_trials = max_trials
self.max_skips = max_skips
self.stop_n_inliers = stop_n_inliers
self.stop_score = stop_score
self.stop_probability = stop_probability
self.random_state = random_state
self.loss = loss
def fit(self, X, y, sample_weight=None):
"""Fit estimator using RANSAC algorithm.
Parameters
----------
X : array-like or sparse matrix, shape [n_samples, n_features]
Training data.
y : array-like of shape (n_samples,) or (n_samples, n_targets)
Target values.
sample_weight : array-like of shape (n_samples,), default=None
Individual weights for each sample
raises error if sample_weight is passed and base_estimator
fit method does not support it.
.. versionadded:: 0.18
Raises
------
ValueError
If no valid consensus set could be found. This occurs if
`is_data_valid` and `is_model_valid` return False for all
`max_trials` randomly chosen sub-samples.
"""
# Need to validate separately here.
# We can't pass multi_ouput=True because that would allow y to be csr.
check_X_params = dict(accept_sparse='csr')
check_y_params = dict(ensure_2d=False)
X, y = self._validate_data(X, y, validate_separately=(check_X_params,
check_y_params))
check_consistent_length(X, y)
if self.base_estimator is not None:
base_estimator = clone(self.base_estimator)
else:
base_estimator = LinearRegression()
if self.min_samples is None:
# assume linear model by default
min_samples = X.shape[1] + 1
elif 0 < self.min_samples < 1:
min_samples = np.ceil(self.min_samples * X.shape[0])
elif self.min_samples >= 1:
if self.min_samples % 1 != 0:
raise ValueError("Absolute number of samples must be an "
"integer value.")
min_samples = self.min_samples
else:
raise ValueError("Value for `min_samples` must be scalar and "
"positive.")
if min_samples > X.shape[0]:
raise ValueError("`min_samples` may not be larger than number "
"of samples: n_samples = %d." % (X.shape[0]))
if self.stop_probability < 0 or self.stop_probability > 1:
raise ValueError("`stop_probability` must be in range [0, 1].")
if self.residual_threshold is None:
# MAD (median absolute deviation)
residual_threshold = np.median(np.abs(y - np.median(y)))
else:
residual_threshold = self.residual_threshold
# TODO: Remove absolute_loss in v1.2.
if self.loss in ("absolute_error", "absolute_loss"):
if self.loss == "absolute_loss":
warnings.warn(
"The loss 'absolute_loss' was deprecated in v1.0 and will "
"be removed in version 1.2. Use `loss='absolute_error'` "
"which is equivalent.",
FutureWarning
)
if y.ndim == 1:
loss_function = lambda y_true, y_pred: np.abs(y_true - y_pred)
else:
loss_function = lambda \
y_true, y_pred: np.sum(np.abs(y_true - y_pred), axis=1)
# TODO: Remove squared_loss in v1.2.
elif self.loss in ("squared_error", "squared_loss"):
if self.loss == "squared_loss":
warnings.warn(
"The loss 'squared_loss' was deprecated in v1.0 and will "
"be removed in version 1.2. Use `loss='squared_error'` "
"which is equivalent.",
FutureWarning
)
if y.ndim == 1:
loss_function = lambda y_true, y_pred: (y_true - y_pred) ** 2
else:
loss_function = lambda \
y_true, y_pred: np.sum((y_true - y_pred) ** 2, axis=1)
elif callable(self.loss):
loss_function = self.loss
else:
raise ValueError(
"loss should be 'absolute_error', 'squared_error' or a "
"callable. Got %s. " % self.loss)
random_state = check_random_state(self.random_state)
try: # Not all estimator accept a random_state
base_estimator.set_params(random_state=random_state)
except ValueError:
pass
estimator_fit_has_sample_weight = has_fit_parameter(base_estimator,
"sample_weight")
estimator_name = type(base_estimator).__name__
if (sample_weight is not None and not
estimator_fit_has_sample_weight):
raise ValueError("%s does not support sample_weight. Samples"
" weights are only used for the calibration"
" itself." % estimator_name)
if sample_weight is not None:
sample_weight = _check_sample_weight(sample_weight, X)
n_inliers_best = 1
score_best = -np.inf
inlier_mask_best = None
X_inlier_best = None
y_inlier_best = None
inlier_best_idxs_subset = None
self.n_skips_no_inliers_ = 0
self.n_skips_invalid_data_ = 0
self.n_skips_invalid_model_ = 0
# number of data samples
n_samples = X.shape[0]
sample_idxs = np.arange(n_samples)
self.n_trials_ = 0
max_trials = self.max_trials
while self.n_trials_ < max_trials:
self.n_trials_ += 1
if (self.n_skips_no_inliers_ + self.n_skips_invalid_data_ +
self.n_skips_invalid_model_) > self.max_skips:
break
# choose random sample set
subset_idxs = sample_without_replacement(n_samples, min_samples,
random_state=random_state)
X_subset = X[subset_idxs]
y_subset = y[subset_idxs]
# check if random sample set is valid
if (self.is_data_valid is not None
and not self.is_data_valid(X_subset, y_subset)):
self.n_skips_invalid_data_ += 1
continue
# fit model for current random sample set
if sample_weight is None:
base_estimator.fit(X_subset, y_subset)
else:
base_estimator.fit(X_subset, y_subset,
sample_weight=sample_weight[subset_idxs])
# check if estimated model is valid
if (self.is_model_valid is not None and not
self.is_model_valid(base_estimator, X_subset, y_subset)):
self.n_skips_invalid_model_ += 1
continue
# residuals of all data for current random sample model
y_pred = base_estimator.predict(X)
residuals_subset = loss_function(y, y_pred)
# classify data into inliers and outliers
inlier_mask_subset = residuals_subset < residual_threshold
n_inliers_subset = np.sum(inlier_mask_subset)
# less inliers -> skip current random sample
if n_inliers_subset < n_inliers_best:
self.n_skips_no_inliers_ += 1
continue
# extract inlier data set
inlier_idxs_subset = sample_idxs[inlier_mask_subset]
X_inlier_subset = X[inlier_idxs_subset]
y_inlier_subset = y[inlier_idxs_subset]
# score of inlier data set
score_subset = base_estimator.score(X_inlier_subset,
y_inlier_subset)
# same number of inliers but worse score -> skip current random
# sample
if (n_inliers_subset == n_inliers_best
and score_subset < score_best):
continue
# save current random sample as best sample
n_inliers_best = n_inliers_subset
score_best = score_subset
inlier_mask_best = inlier_mask_subset
X_inlier_best = X_inlier_subset
y_inlier_best = y_inlier_subset
inlier_best_idxs_subset = inlier_idxs_subset
max_trials = min(
max_trials,
_dynamic_max_trials(n_inliers_best, n_samples,
min_samples, self.stop_probability))
# break if sufficient number of inliers or score is reached
if n_inliers_best >= self.stop_n_inliers or \
score_best >= self.stop_score:
break
# if none of the iterations met the required criteria
if inlier_mask_best is None:
if ((self.n_skips_no_inliers_ + self.n_skips_invalid_data_ +
self.n_skips_invalid_model_) > self.max_skips):
raise ValueError(
"RANSAC skipped more iterations than `max_skips` without"
" finding a valid consensus set. Iterations were skipped"
" because each randomly chosen sub-sample failed the"
" passing criteria. See estimator attributes for"
" diagnostics (n_skips*).")
else:
raise ValueError(
"RANSAC could not find a valid consensus set. All"
" `max_trials` iterations were skipped because each"
" randomly chosen sub-sample failed the passing criteria."
" See estimator attributes for diagnostics (n_skips*).")
else:
if (self.n_skips_no_inliers_ + self.n_skips_invalid_data_ +
self.n_skips_invalid_model_) > self.max_skips:
warnings.warn("RANSAC found a valid consensus set but exited"
" early due to skipping more iterations than"
" `max_skips`. See estimator attributes for"
" diagnostics (n_skips*).",
ConvergenceWarning)
# estimate final model using all inliers
if sample_weight is None:
base_estimator.fit(X_inlier_best, y_inlier_best)
else:
base_estimator.fit(
X_inlier_best,
y_inlier_best,
sample_weight=sample_weight[inlier_best_idxs_subset])
self.estimator_ = base_estimator
self.inlier_mask_ = inlier_mask_best
return self
def predict(self, X):
"""Predict using the estimated model.
This is a wrapper for `estimator_.predict(X)`.
Parameters
----------
X : numpy array of shape [n_samples, n_features]
Returns
-------
y : array, shape = [n_samples] or [n_samples, n_targets]
Returns predicted values.
"""
check_is_fitted(self)
return self.estimator_.predict(X)
def score(self, X, y):
"""Returns the score of the prediction.
This is a wrapper for `estimator_.score(X, y)`.
Parameters
----------
X : numpy array or sparse matrix of shape [n_samples, n_features]
Training data.
y : array, shape = [n_samples] or [n_samples, n_targets]
Target values.
Returns
-------
z : float
Score of the prediction.
"""
check_is_fitted(self)
return self.estimator_.score(X, y)
def _more_tags(self):
return {
'_xfail_checks': {
'check_sample_weights_invariance':
('zero sample_weight is not equivalent to removing samples'),
}
}
|
kevin-intel/scikit-learn
|
sklearn/linear_model/_ransac.py
|
Python
|
bsd-3-clause
| 20,838
|
#
# python-lxc: Python bindings for LXC
#
# (C) Copyright Canonical Ltd. 2012
#
# Authors:
# Stéphane Graber <stgraber@ubuntu.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
import _lxc
import glob
import os
import subprocess
import tempfile
import time
import warnings
warnings.warn("The python-lxc API isn't yet stable "
"and may change at any point in the future.", Warning, 2)
class ContainerNetwork():
props = {}
def __init__(self, container, index):
self.container = container
self.index = index
for key in self.container.get_keys("lxc.network.%s" % self.index):
if "." in key:
self.props[key.replace(".", "_")] = key
else:
self.props[key] = key
if not self.props:
return False
def __delattr__(self, key):
if key in ["container", "index", "props"]:
return object.__delattr__(self, key)
if key not in self.props:
raise AttributeError("'%s' network has no attribute '%s'" % (
self.__get_network_item("type"), key))
return self.__clear_network_item(self.props[key])
def __dir__(self):
return sorted(self.props.keys())
def __getattr__(self, key):
if key in ["container", "index", "props"]:
return object.__getattribute__(self, key)
if key not in self.props:
raise AttributeError("'%s' network has no attribute '%s'" % (
self.__get_network_item("type"), key))
return self.__get_network_item(self.props[key])
def __hasattr__(self, key):
if key in ["container", "index", "props"]:
return object.__hasattr__(self, key)
if key not in self.props:
raise AttributeError("'%s' network has no attribute '%s'" % (
self.__get_network_item("type"), key))
return True
def __repr__(self):
return "'%s' network at index '%s'" % (
self.__get_network_item("type"), self.index)
def __setattr__(self, key, value):
if key in ["container", "index", "props"]:
return object.__setattr__(self, key, value)
if key not in self.props:
raise AttributeError("'%s' network has no attribute '%s'" % (
self.__get_network_item("type"), key))
return self.__set_network_item(self.props[key], value)
def __clear_network_item(self, key):
return self.container.clear_config_item("lxc.network.%s.%s" % (
self.index, key))
def __get_network_item(self, key):
return self.container.get_config_item("lxc.network.%s.%s" % (
self.index, key))
def __set_network_item(self, key, value):
return self.container.set_config_item("lxc.network.%s.%s" % (
self.index, key), value)
class ContainerNetworkList():
def __init__(self, container):
self.container = container
def __getitem__(self, index):
count = len(self.container.get_config_item("lxc.network"))
if index >= count:
raise IndexError("list index out of range")
return ContainerNetwork(self.container, index)
def __len__(self):
return len(self.container.get_config_item("lxc.network"))
def add(self, network_type):
index = len(self.container.get_config_item("lxc.network"))
return self.container.set_config_item("lxc.network.%s.type" % index,
network_type)
def remove(self, index):
count = len(self.container.get_config_item("lxc.network"))
if index >= count:
raise IndexError("list index out of range")
return self.container.clear_config_item("lxc.network.%s" % index)
class Container(_lxc.Container):
def __init__(self, name):
"""
Creates a new Container instance.
"""
if os.geteuid() != 0:
raise Exception("Running as non-root.")
_lxc.Container.__init__(self, name)
self.network = ContainerNetworkList(self)
def append_config_item(self, key, value):
"""
Append 'value' to 'key', assuming 'key' is a list.
If 'key' isn't a list, 'value' will be set as the value of 'key'.
"""
return _lxc.Container.set_config_item(self, key, value)
def attach(self, namespace="ALL", *cmd):
"""
Attach to a running container.
"""
if not self.running:
return False
attach = ["lxc-attach", "-n", self.name]
if namespace != "ALL":
attach += ["-s", namespace]
if cmd:
attach += ["--"] + list(cmd)
if subprocess.call(
attach,
universal_newlines=True) != 0:
return False
return True
def create(self, template, args={}):
"""
Create a new rootfs for the container.
"template" must be a valid template name.
"args" (optional) is a dictionary of parameters and values to pass
to the template.
"""
template_args = []
for item in args.items():
template_args.append("--%s" % item[0])
template_args.append("%s" % item[1])
return _lxc.Container.create(self, template, tuple(template_args))
def clone(self, container):
"""
Clone an existing container into a new one.
"""
if self.defined:
return False
if isinstance(container, Container):
source = container
else:
source = Container(container)
if not source.defined:
return False
if subprocess.call(
["lxc-clone", "-o", source.name, "-n", self.name],
universal_newlines=True) != 0:
return False
self.load_config()
return True
def console(self, tty="1"):
"""
Access the console of a container.
"""
if not self.running:
return False
if subprocess.call(
["lxc-console", "-n", self.name, "-t", "%s" % tty],
universal_newlines=True) != 0:
return False
return True
def get_config_item(self, key):
"""
Returns the value for a given config key.
A list is returned when multiple values are set.
"""
value = _lxc.Container.get_config_item(self, key)
if value is False:
return False
elif value.endswith("\n"):
return value.rstrip("\n").split("\n")
else:
return value
def get_ips(self, timeout=60, interface=None, protocol=None):
"""
Returns the list of IP addresses for the container.
"""
if not self.defined or not self.running:
return False
try:
os.makedirs("/run/netns")
except:
pass
path = tempfile.mktemp(dir="/run/netns")
os.symlink("/proc/%s/ns/net" % self.init_pid, path)
ips = []
count = 0
while count < timeout:
if count != 0:
time.sleep(1)
base_cmd = ["ip", "netns", "exec", path.split("/")[-1], "ip"]
# Get IPv6
if protocol in ("ipv6", None):
ip6_cmd = base_cmd + ["-6", "addr", "show", "scope", "global"]
if interface:
ip = subprocess.Popen(ip6_cmd + ["dev", interface],
stdout=subprocess.PIPE, universal_newlines=True)
else:
ip = subprocess.Popen(ip6_cmd, stdout=subprocess.PIPE,
universal_newlines=True)
ip.wait()
for line in ip.stdout.read().split("\n"):
fields = line.split()
if len(fields) > 2 and fields[0] == "inet6":
ips.append(fields[1].split('/')[0])
# Get IPv4
if protocol in ("ipv4", None):
ip4_cmd = base_cmd + ["-4", "addr", "show", "scope", "global"]
if interface:
ip = subprocess.Popen(ip4_cmd + ["dev", interface],
stdout=subprocess.PIPE, universal_newlines=True)
else:
ip = subprocess.Popen(ip4_cmd, stdout=subprocess.PIPE,
universal_newlines=True)
ip.wait()
for line in ip.stdout.read().split("\n"):
fields = line.split()
if len(fields) > 2 and fields[0] == "inet":
ips.append(fields[1].split('/')[0])
if ips:
break
count += 1
os.remove(path)
return ips
def get_keys(self, key):
"""
Returns a list of valid sub-keys.
"""
value = _lxc.Container.get_keys(self, key)
if value is False:
return False
elif value.endswith("\n"):
return value.rstrip("\n").split("\n")
else:
return value
def set_config_item(self, key, value):
"""
Set a config key to a provided value.
The value can be a list for the keys supporting multiple values.
"""
old_value = self.get_config_item(key)
# Check if it's a list
def set_key(key, value):
self.clear_config_item(key)
if isinstance(value, list):
for entry in value:
if not _lxc.Container.set_config_item(self, key, entry):
return False
else:
_lxc.Container.set_config_item(self, key, value)
set_key(key, value)
new_value = self.get_config_item(key)
if isinstance(value, str) and isinstance(new_value, str) and \
value == new_value:
return True
elif isinstance(value, list) and isinstance(new_value, list) and \
set(value) == set(new_value):
return True
elif isinstance(value, str) and isinstance(new_value, list) and \
set([value]) == set(new_value):
return True
elif old_value:
set_key(key, old_value)
return False
else:
self.clear_config_item(key)
return False
def wait(self, state, timeout = -1):
"""
Wait for the container to reach a given state or timeout.
"""
if isinstance(state, str):
state = state.upper()
return _lxc.Container.wait(self, state, timeout)
def list_containers(as_object=False):
"""
List the containers on the system.
"""
containers = []
for entry in glob.glob("/var/lib/lxc/*/config"):
if as_object:
containers.append(Container(entry.split("/")[-2]))
else:
containers.append(entry.split("/")[-2])
return containers
|
coconutpilot/lxc
|
src/python-lxc/lxc/__init__.py
|
Python
|
lgpl-2.1
| 11,816
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import threading
import unittest
from devil.android import device_errors
from devil.android import device_utils
_devices_lock = threading.Lock()
_devices_condition = threading.Condition(_devices_lock)
_devices = set()
def PrepareDevices(*_args):
raw_devices = device_utils.DeviceUtils.HealthyDevices()
live_devices = []
for d in raw_devices:
try:
d.WaitUntilFullyBooted(timeout=5, retries=0)
live_devices.append(str(d))
except (device_errors.CommandFailedError, device_errors.CommandTimeoutError,
device_errors.DeviceUnreachableError):
pass
with _devices_lock:
_devices.update(set(live_devices))
if not _devices:
raise Exception('No live devices attached.')
class DeviceTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(DeviceTestCase, self).__init__(*args, **kwargs)
self.serial = None
#override
def setUp(self):
super(DeviceTestCase, self).setUp()
with _devices_lock:
while not _devices:
_devices_condition.wait(5)
self.serial = _devices.pop()
#override
def tearDown(self):
super(DeviceTestCase, self).tearDown()
with _devices_lock:
_devices.add(self.serial)
_devices_condition.notify()
|
endlessm/chromium-browser
|
third_party/catapult/devil/devil/android/device_test_case.py
|
Python
|
bsd-3-clause
| 1,406
|
import logging
from scrapy.utils.reqser import request_to_dict, request_from_dict
from . import picklecompat
from .exp import EmptyQueueException
from .logger import sm_log
# EMPTY QUEUE RETURN CODE
EMPTY_QUEUE_CODE = -1
class Base(object):
"""Per-spider base queue class"""
def __init__(self, server, spider, key, serializer=None):
"""Initialize per-spider redis queue.
Parameters
----------
server : StrictRedis
remote client instance.
spider : Spider
Scrapy spider instance.
key: str
remote key where to put and get messages.
serializer : object
Serializer object with ``loads`` and ``dumps`` methods.
"""
if serializer is None:
# Backward compatibility.
# TODO: deprecate pickle.
serializer = picklecompat
if not hasattr(serializer, 'loads'):
raise TypeError("serializer does not implement 'loads' function: %r"
% serializer)
if not hasattr(serializer, 'dumps'):
raise TypeError("serializer '%s' does not implement 'dumps' function: %r"
% serializer)
self.server = server
self.spider = spider
self.key = key % {'spider': spider.name}
self.serializer = serializer
def _encode_request(self, request):
"""Encode a request object"""
obj = request_to_dict(request, self.spider)
obj['id'] = request.id
return self.serializer.dumps(obj)
def _decode_request(self, encoded_request):
"""Decode an request previously encoded"""
obj = self.serializer.loads(encoded_request)
request = request_from_dict(obj, self.spider)
setattr(request, 'id', obj['id'])
return request
def __len__(self):
"""Return the length of the queue"""
raise NotImplementedError
def push(self, request):
"""Push a request"""
raise NotImplementedError
def pop(self, timeout=0):
"""Pop a request"""
raise NotImplementedError
def clear(self):
"""Clear queue/stack"""
self.server.delete(self.key)
class RemoteQueue(Base):
"""url remote server queue for inserting and fetching url. Scrapy's Request may contains cookie, header,
refer to "scrapy/utils/reqser.py"
"""
def push(self, request):
url = request.url
id = request.id
priority = request.priority
req_data = self._encode_request(request)
source = self.spider.name
data = dict(
id=id,
url=url,
priority=priority,
source=source,
content=req_data
)
sm_log.info("enqueue: [%s]" % url)
resp = self.server.insert(data)
code = resp['code']
if code != 0:
msg = "Error. code: [%s], msg: [%s], data: \n%s"%(code, resp['msg'], resp['data'])
raise QueueException(msg)
def pop(self, queue="", by="", timeout=0):
""" get url result from remote server, TODO
:param queue: queue name, usually spider name
:param by: 'lifo', 'fifo', 'priority'
:param timeout:
:return: request content
"""
resp = self.server.get(queue="", sort="", timeout=timeout)
code = resp['code']
if code == EMPTY_QUEUE_CODE:
# queue is empty
raise EmptyQueueException("Queue is empty")
wrapped_request = resp['data']
content = wrapped_request['content']
if content:
request = self._decode_request(content)
sm_log.info("dequeue: [%s]" % request.url)
return request
def __len__(self):
pass
class QueueException(Exception):
pass
# TODO: Deprecate the use of these names.
SpiderQueue = RemoteQueue
SpiderStack = RemoteQueue
SpiderPriorityQueue = RemoteQueue
|
mavarick/scrapy-mysql
|
scrapy_mysql/queue.py
|
Python
|
mit
| 3,976
|
# Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
def execute(filters=None):
columns = get_columns(filters.item)
data = get_data(filters.item)
return columns, data
def get_data(item):
if not item:
return []
item_dicts = []
variants = None
variant_results = frappe.db.sql("""select name from `tabItem`
where variant_of = %s""", item, as_dict=1)
if not variant_results:
frappe.msgprint(_("There isn't any item variant for the selected item"))
return []
else:
variants = ", ".join([frappe.db.escape(variant['name']) for variant in variant_results])
order_count_map = get_open_sales_orders_map(variants)
stock_details_map = get_stock_details_map(variants)
buying_price_map = get_buying_price_map(variants)
selling_price_map = get_selling_price_map(variants)
attr_val_map = get_attribute_values_map(variants)
attribute_list = [d[0] for d in frappe.db.sql("""select attribute
from `tabItem Variant Attribute`
where parent in ({variants}) group by attribute""".format(variants=variants))]
# Prepare dicts
variant_dicts = [{"variant_name": d['name']} for d in variant_results]
for item_dict in variant_dicts:
name = item_dict["variant_name"]
for d in attribute_list:
attr_dict = attr_val_map[name]
if attr_dict and attr_dict.get(d):
item_dict[d] = attr_val_map[name][d]
item_dict["Open Orders"] = order_count_map.get(name) or 0
if stock_details_map.get(name):
item_dict["Inventory"] = stock_details_map.get(name)["Inventory"] or 0
item_dict["In Production"] = stock_details_map.get(name)["In Production"] or 0
item_dict["Available Selling"] = stock_details_map.get(name)["Available Selling"] or 0
else:
item_dict["Inventory"] = item_dict["In Production"] = item_dict["Available Selling"] = 0
item_dict["Avg. Buying Price List Rate"] = buying_price_map.get(name) or 0
item_dict["Avg. Selling Price List Rate"] = selling_price_map.get(name) or 0
item_dicts.append(item_dict)
return item_dicts
def get_columns(item):
columns = [{
"fieldname": "variant_name",
"label": "Variant",
"fieldtype": "Link",
"options": "Item",
"width": 200
}]
item_doc = frappe.get_doc("Item", item)
for d in item_doc.attributes:
columns.append(d.attribute + ":Data:100")
columns += [_("Avg. Buying Price List Rate") + ":Currency:110", _("Avg. Selling Price List Rate") + ":Currency:110",
_("Inventory") + ":Float:100", _("In Production") + ":Float:100",
_("Open Orders") + ":Float:100", _("Available Selling") + ":Float:100"
]
return columns
def get_open_sales_orders_map(variants):
open_sales_orders = frappe.db.sql("""
select
count(*) as count,
item_code
from
`tabSales Order Item`
where
docstatus = 1 and
qty > ifnull(delivered_qty, 0) and
item_code in ({variants})
group by
item_code
""".format(variants=variants), as_dict=1)
order_count_map = {}
for d in open_sales_orders:
order_count_map[d["item_code"]] = d["count"]
return order_count_map
def get_stock_details_map(variants):
stock_details = frappe.db.sql("""
select
sum(planned_qty) as planned_qty,
sum(actual_qty) as actual_qty,
sum(projected_qty) as projected_qty,
item_code
from
`tabBin`
where
item_code in ({variants})
group by
item_code
""".format(variants=variants), as_dict=1)
stock_details_map = {}
for d in stock_details:
name = d["item_code"]
stock_details_map[name] = {
"Inventory" :d["actual_qty"],
"In Production" :d["planned_qty"],
"Available Selling" :d["projected_qty"]
}
return stock_details_map
def get_buying_price_map(variants):
buying = frappe.db.sql("""
select
avg(price_list_rate) as avg_rate,
item_code
from
`tabItem Price`
where
item_code in ({variants}) and buying=1
group by
item_code
""".format(variants=variants), as_dict=1)
buying_price_map = {}
for d in buying:
buying_price_map[d["item_code"]] = d["avg_rate"]
return buying_price_map
def get_selling_price_map(variants):
selling = frappe.db.sql("""
select
avg(price_list_rate) as avg_rate,
item_code
from
`tabItem Price`
where
item_code in ({variants}) and selling=1
group by
item_code
""".format(variants=variants), as_dict=1)
selling_price_map = {}
for d in selling:
selling_price_map[d["item_code"]] = d["avg_rate"]
return selling_price_map
def get_attribute_values_map(variants):
list_attr = frappe.db.sql("""
select
attribute, attribute_value, parent
from
`tabItem Variant Attribute`
where
parent in ({variants})
""".format(variants=variants), as_dict=1)
attr_val_map = {}
for d in list_attr:
name = d["parent"]
if not attr_val_map.get(name):
attr_val_map[name] = {}
attr_val_map[name][d["attribute"]] = d["attribute_value"]
return attr_val_map
|
Zlash65/erpnext
|
erpnext/stock/report/item_variant_details/item_variant_details.py
|
Python
|
gpl-3.0
| 4,905
|
import random
import string
import aiohttp.web
AUTH_TOKEN_HEADER = 'X-AuthToken'
def random_string(length=6, chars=string.ascii_lowercase + \
string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for x in range(length))
def generate_api_auth_token():
return random_string(32)
def require_auth_token(f):
async def inner(resource, request, *args, **kwargs):
req_token = request.headers.get(AUTH_TOKEN_HEADER, '')
auth_token = resource.app.config.get('api.auth_token')
if not auth_token or (req_token == auth_token):
return await f(resource, request, *args, **kwargs)
else:
raise aiohttp.web.HTTPForbidden()
return inner
def require_identity(f):
'''
An API resource with this decorator requires that the identity is initialized, i.e. that the
user keys have been generated.
'''
def inner(resource, request, *args, **kwargs):
resource.app.identity.assert_initialized()
return f(resource, request, *args, **kwargs)
return inner
|
syncrypt/client
|
syncrypt/api/auth.py
|
Python
|
gpl-3.0
| 1,101
|
import os
from coinpy.tools.ssl.ssl import ssl_RAND_add
if os.name == 'nt':
from coinpy.tools.seeds.perfmon import get_perfmon_data
from coinpy.tools.seeds.performance_counter import get_performance_counter
""" Get a list of (random_data, entropy) tuples suitable for ssl.RAND_add """
def get_system_seeds():
perfmon = get_perfmon_data()
perf_counter = get_performance_counter()
return [(perfmon, len(perfmon) / 100.0),
(perf_counter, 1)]
else:
pass
def ssl_add_system_seeds():
for data, entropy in get_system_seeds():
ssl_RAND_add(data, entropy)
|
sirk390/coinpy
|
coinpy-lib/src/coinpy/tools/seeds/system_seeds.py
|
Python
|
lgpl-3.0
| 629
|
#!/usr/bin/env python3.6
## RPI Gpio OSC ##
## v0.05 ##
## gllmar 2016-2017 ##
#import OSC #de pyosc
from pythonosc import osc_message_builder
from pythonosc import udp_client
import RPi.GPIO as GPIO
import time
import argparse
import errno
import datetime
import sys
__author__ = 'gllmAr'
# gestion des arguments
parser = argparse.ArgumentParser(description='RPI Gpio interrupt OSC')
parser.add_argument('-g','--gpioBoard', help='gpio Board Mode (0=GPIO.BCM, 1=GPIO.BOARD)', default='0', type=int)
parser.add_argument('-i','--inputPin', help='input Pin', default='23', type=int)
parser.add_argument('-d','--destination',help='destination ip address', default='127.0.0.1')
parser.add_argument('-p','--outputPort', help='Output Port ', default='9999', type=int)
parser.add_argument('-o','--oscPath', help='Osc path', default='/gpioOSC')
parser.add_argument('-b','--bouncetime', help='(de)bouncetime', default='200', type=int)
parser.add_argument('-r','--resistance', help='pull_up_down resistance (0=off, 1=pullUp, 2=pullDown)', default='2', type=int)
parser.add_argument('-t','--trigger', help='trigger mode (0= READING, 1=FALLING, 2=RISING, 3=BOTH)', default='0', type=int)
parser.add_argument('-D','--Debug', help='Debug mode on ', default='1', type=int)
args = parser.parse_args()
# definir le callback,
# sera envoye dans un thread par la fonction gpio.add_event_detect
flag = 0 # quand un interrupt est detecte ajouter 1 et envoyer le message OSC
def flagUp(channel):
global flag # rendre la variable globale (pas tres clair pourquoi ca ne communique pas )
flag += 1 # incrementer de 1 a chaque interrupt
gpioBoardMode = "0"
#PUD_OFF, PUD_UP or PUD_DOWN
if args.gpioBoard == 0:
gpioBoardMode = "GPIO.BCM"
GPIO.setmode(GPIO.BCM)
elif args.gpioBoard == 1:
gpioBoardMode = "GPIO.BOARD"
GPIO.setmode(GPIO.BOARD)
else:
print("gpioBoard number not valid(try 0/1)")
sys.exit()
resistanceType = "0"
#PUD_OFF, PUD_UP or PUD_DOWN
if args.resistance == 0:
resistanceType = "GPIO.PUD_OFF"
GPIO.setup(args.inputPin, GPIO.IN, pull_up_down=GPIO.PUD_OFF)
elif args.resistance == 1:
resistanceType = "GPIO.PUD_UP"
GPIO.setup(args.inputPin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
elif args.resistance == 2:
resistanceType = "GPIO.PUD_DOWN"
GPIO.setup(args.inputPin, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
else:
print("resistance number not valid(try 0/1/2)")
sys.exit()
triggerType = "0"
#GPIO.FALLING , GPIO.RISING, GPIO.BOTH
if args.trigger == 0:
triggerType = "GPIO.READING"
elif args.trigger == 1:
triggerType = "GPIO.FALLING"
GPIO.add_event_detect(args.inputPin, GPIO.FALLING, callback=flagUp, bouncetime=args.bouncetime)
elif args.trigger == 2:
triggerType = "GPIO.RISING"
GPIO.add_event_detect(args.inputPin, GPIO.RISING, callback=flagUp, bouncetime=args.bouncetime)
elif args.trigger == 3:
triggerType = "GPIO.BOTH"
GPIO.add_event_detect(args.inputPin, GPIO.BOTH, callback=flagUp, bouncetime=args.bouncetime)
else:
print("trigger number not valid(try 0/1/2)")
sys.exit()
## Printer les arguments ##
print ("RPI Gpio interrupt OSC")
print ("destination Address: %s" % args.destination )
print ("outputPort: %s" % args.outputPort )
print ("InputPin: %s" % args.inputPin )
print ("gpioBoard: %s" % gpioBoardMode )
print ("oscPath: %s" % args.oscPath )
print ("bouncetime: %s" % args.bouncetime )
print ("resistance: %s" % resistanceType )
print ("trigger: %s" % triggerType )
print ("Debug: %s" % args.Debug )
## definir la fonction d envoi python 2.7##
#c = OSC.OSCClient()
#def sendOSC(value):
# try:
# c.connect((args.destination, args.outputPort)) # connection
# oscmsg = OSC.OSCMessage()
# oscmsg.setAddress(args.oscPath)
# oscmsg.append(value)
# c.send(oscmsg)
# if args.Debug:
# now = datetime.datetime.now()
# print now.isoformat() +" "+ str(value)
# except OSC.OSCClientError:
# print "Connection Refused"
client = udp_client.SimpleUDPClient(args.destination, args.outputPort)
def sendOSC(value):
client.send_message(args.oscPath, value)
# une loop infinie sur le thread principale qui attend
# et qui envois si la variable change
try:
while 1:
if args.trigger == 0:
if (GPIO.input(args.inputPin)):
sendOSC(1)
else:
sendOSC(0)
if flag > 0:
sendOSC(1)
flag = 0
time.sleep(.1)
# sortir du programme avec CTRL+C et cleaner le GPIO
except KeyboardInterrupt:
GPIO.cleanup() # clean up GPIO on CTRL+C exit
|
gllmAR/rpi-gpioOsc
|
gpioOSC.py
|
Python
|
mit
| 4,647
|
# Copyright 2010 The Go Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
"""GDB Pretty printers and convencience functions for Go's runtime structures.
This script is loaded by GDB when it finds a .debug_gdb_scripts
section in the compiled binary. The [68]l linkers emit this with a
path to this file based on the path to the runtime package.
"""
# Known issues:
# - pretty printing only works for the 'native' strings. E.g. 'type
# foo string' will make foo a plain struct in the eyes of gdb,
# circumventing the pretty print triggering.
import sys, re
print >>sys.stderr, "Loading Go Runtime support."
# allow to manually reload while developing
goobjfile = gdb.current_objfile() or gdb.objfiles()[0]
goobjfile.pretty_printers = []
#
# Pretty Printers
#
class StringTypePrinter:
"Pretty print Go strings."
pattern = re.compile(r'^struct string$')
def __init__(self, val):
self.val = val
def display_hint(self):
return 'string'
def to_string(self):
l = int(self.val['len'])
return self.val['str'].string("utf-8", "ignore", l)
class SliceTypePrinter:
"Pretty print slices."
pattern = re.compile(r'^struct \[\]')
def __init__(self, val):
self.val = val
def display_hint(self):
return 'array'
def to_string(self):
return str(self.val.type)[6:] # skip 'struct '
def children(self):
ptr = self.val["array"]
for idx in range(self.val["len"]):
yield ('[%d]' % idx, (ptr + idx).dereference())
class MapTypePrinter:
"""Pretty print map[K]V types.
Map-typed go variables are really pointers. dereference them in gdb
to inspect their contents with this pretty printer.
"""
pattern = re.compile(r'^struct hash<.*>$')
def __init__(self, val):
self.val = val
def display_hint(self):
return 'map'
def to_string(self):
return str(self.val.type)
def children(self):
stab = self.val['st']
i = 0
for v in self.traverse_hash(stab):
yield ("[%d]" % i, v['key'])
yield ("[%d]" % (i + 1), v['val'])
i += 2
def traverse_hash(self, stab):
ptr = stab['entry'].address
end = stab['end']
while ptr < end:
v = ptr.dereference()
ptr = ptr + 1
if v['hash'] == 0: continue
if v['hash'] & 63 == 63: # subtable
for v in self.traverse_hash(v['key'].cast(self.val['st'].type)):
yield v
else:
yield v
class ChanTypePrinter:
"""Pretty print chan[T] types.
Chan-typed go variables are really pointers. dereference them in gdb
to inspect their contents with this pretty printer.
"""
pattern = re.compile(r'^struct hchan<.*>$')
def __init__(self, val):
self.val = val
def display_hint(self):
return 'array'
def to_string(self):
return str(self.val.type)
def children(self):
# see chan.c chanbuf()
et = [x.type for x in self.val['free'].type.target().fields() if x.name == 'elem'][0]
ptr = (self.val.address + 1).cast(et.pointer())
for i in range(self.val["qcount"]):
j = (self.val["recvx"] + i) % self.val["dataqsiz"]
yield ('[%d]' % i, (ptr + j).dereference())
#
# Register all the *Printer classes above.
#
def makematcher(klass):
def matcher(val):
try:
if klass.pattern.match(str(val.type)):
return klass(val)
except:
pass
return matcher
goobjfile.pretty_printers.extend([makematcher(k) for k in vars().values() if hasattr(k, 'pattern')])
#
# For reference, this is what we're trying to do:
# eface: p *(*(struct 'runtime.commonType'*)'main.e'->type_->data)->string
# iface: p *(*(struct 'runtime.commonType'*)'main.s'->tab->Type->data)->string
#
# interface types can't be recognized by their name, instead we check
# if they have the expected fields. Unfortunately the mapping of
# fields to python attributes in gdb.py isn't complete: you can't test
# for presence other than by trapping.
def is_iface(val):
try:
return str(val['tab'].type) == "struct runtime.itab *" \
and str(val['data'].type) == "void *"
except:
pass
def is_eface(val):
try:
return str(val['_type'].type) == "struct runtime._type *" \
and str(val['data'].type) == "void *"
except:
pass
def lookup_type(name):
try:
return gdb.lookup_type(name)
except:
pass
try:
return gdb.lookup_type('struct ' + name)
except:
pass
try:
return gdb.lookup_type('struct ' + name[1:]).pointer()
except:
pass
def iface_dtype(obj):
"Decode type of the data field of an eface or iface struct."
if is_iface(obj):
go_type_ptr = obj['tab']['_type']
elif is_eface(obj):
go_type_ptr = obj['_type']
else:
return
ct = gdb.lookup_type("struct runtime.commonType").pointer()
dynamic_go_type = go_type_ptr['ptr'].cast(ct).dereference()
dtype_name = dynamic_go_type['string'].dereference()['str'].string()
type_size = int(dynamic_go_type['size'])
uintptr_size = int(dynamic_go_type['size'].type.sizeof) # size is itself an uintptr
dynamic_gdb_type = lookup_type(dtype_name)
if type_size > uintptr_size:
dynamic_gdb_type = dynamic_gdb_type.pointer()
return dynamic_gdb_type
class IfacePrinter:
"""Pretty print interface values
Casts the data field to the appropriate dynamic type."""
def __init__(self, val):
self.val = val
def display_hint(self):
return 'string'
def to_string(self):
if self.val['data'] == 0:
return 0x0
try:
dtype = iface_dtype(self.val)
except:
return "<bad dynamic type>"
try:
return self.val['data'].cast(dtype).dereference()
except:
pass
return self.val['data'].cast(dtype)
def ifacematcher(val):
if is_iface(val) or is_eface(val):
return IfacePrinter(val)
goobjfile.pretty_printers.append(ifacematcher)
#
# Convenience Functions
#
class GoLenFunc(gdb.Function):
"Length of strings, slices, maps or channels"
how = ((StringTypePrinter, 'len' ),
(SliceTypePrinter, 'len'),
(MapTypePrinter, 'count'),
(ChanTypePrinter, 'qcount'))
def __init__(self):
super(GoLenFunc, self).__init__("len")
def invoke(self, obj):
typename = str(obj.type)
for klass, fld in self.how:
if klass.pattern.match(typename):
return obj[fld]
class GoCapFunc(gdb.Function):
"Capacity of slices or channels"
how = ((SliceTypePrinter, 'cap'),
(ChanTypePrinter, 'dataqsiz'))
def __init__(self):
super(GoCapFunc, self).__init__("cap")
def invoke(self, obj):
typename = str(obj.type)
for klass, fld in self.how:
if klass.pattern.match(typename):
return obj[fld]
class DTypeFunc(gdb.Function):
"""Cast Interface values to their dynamic type.
For non-interface types this behaves as the identity operation.
"""
def __init__(self):
super(DTypeFunc, self).__init__("dtype")
def invoke(self, obj):
try:
return obj['data'].cast(iface_dtype(obj))
except:
pass
return obj
#
# Commands
#
sts = ( 'idle', 'runnable', 'running', 'syscall', 'waiting', 'moribund', 'dead', 'recovery')
def linked_list(ptr, linkfield):
while ptr:
yield ptr
ptr = ptr[linkfield]
class GoroutinesCmd(gdb.Command):
"List all goroutines."
def __init__(self):
super(GoroutinesCmd, self).__init__("info goroutines", gdb.COMMAND_STACK, gdb.COMPLETE_NONE)
def invoke(self, arg, from_tty):
# args = gdb.string_to_argv(arg)
vp = gdb.lookup_type('void').pointer()
for ptr in linked_list(gdb.parse_and_eval("'runtime.allg'"), 'alllink'):
if ptr['status'] == 6: # 'gdead'
continue
s = ' '
if ptr['m']:
s = '*'
pc = ptr['sched']['pc'].cast(vp)
sp = ptr['sched']['sp'].cast(vp)
blk = gdb.block_for_pc(long((pc)))
print s, ptr['goid'], "%8s" % sts[long((ptr['status']))], blk.function
def find_goroutine(goid):
vp = gdb.lookup_type('void').pointer()
for ptr in linked_list(gdb.parse_and_eval("'runtime.allg'"), 'alllink'):
if ptr['status'] == 6: # 'gdead'
continue
if ptr['goid'] == goid:
return [ptr['sched'][x].cast(vp) for x in 'pc', 'sp']
return None, None
class GoroutineCmd(gdb.Command):
"""Execute gdb command in the context of goroutine <goid>.
Switch PC and SP to the ones in the goroutine's G structure,
execute an arbitrary gdb command, and restore PC and SP.
Usage: (gdb) goroutine <goid> <gdbcmd>
Note that it is ill-defined to modify state in the context of a goroutine.
Restrict yourself to inspecting values.
"""
def __init__(self):
super(GoroutineCmd, self).__init__("goroutine", gdb.COMMAND_STACK, gdb.COMPLETE_NONE)
def invoke(self, arg, from_tty):
goid, cmd = arg.split(None, 1)
pc, sp = find_goroutine(int(goid))
if not pc:
print "No such goroutine: ", goid
return
save_frame = gdb.selected_frame()
gdb.parse_and_eval('$save_pc = $pc')
gdb.parse_and_eval('$save_sp = $sp')
gdb.parse_and_eval('$pc = 0x%x' % long(pc))
gdb.parse_and_eval('$sp = 0x%x' % long(sp))
try:
gdb.execute(cmd)
finally:
gdb.parse_and_eval('$pc = $save_pc')
gdb.parse_and_eval('$sp = $save_sp')
save_frame.select()
class GoIfaceCmd(gdb.Command):
"Print Static and dynamic interface types"
def __init__(self):
super(GoIfaceCmd, self).__init__("iface", gdb.COMMAND_DATA, gdb.COMPLETE_SYMBOL)
def invoke(self, arg, from_tty):
for obj in gdb.string_to_argv(arg):
try:
#TODO fix quoting for qualified variable names
obj = gdb.parse_and_eval("%s" % obj)
except Exception, e:
print "Can't parse ", obj, ": ", e
continue
dtype = iface_dtype(obj)
if not dtype:
print "Not an interface: ", obj.type
continue
print "%s: %s" % (obj.type, dtype)
# TODO: print interface's methods and dynamic type's func pointers thereof.
#rsc: "to find the number of entries in the itab's Fn field look at itab.inter->numMethods
#i am sure i have the names wrong but look at the interface type and its method count"
# so Itype will start with a commontype which has kind = interface
#
# Register all convience functions and CLI commands
#
for k in vars().values():
if hasattr(k, 'invoke'):
k()
|
SDpower/golang
|
src/pkg/runtime/runtime-gdb.py
|
Python
|
bsd-3-clause
| 10,018
|
"""
ass.ets
Asset management for webapps.
:copyright: (c) 2012 by Herrn Kaste <herr.kaste@gmail.com>.
:license: BSD, see LICENSE for more details.
"""
__version__ = "0.1.1"
from bundles import Environment, Assets, bundle, Bundle, Manifest
from filters import FilterError
import filters as f
from options import Option, Options, dict_getter
from ass.ets.workers import worker, filter, Incompatible
|
kaste/ass.ets
|
ass/ets/__init__.py
|
Python
|
bsd-2-clause
| 422
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import time
import math
import numpy as np
from koheron import command
class Oscillo(object):
def __init__(self, client):
self.client = client
self.wfm_size = 8192
self.sampling_rate = 125e6
self.t = np.arange(self.wfm_size)/self.sampling_rate
self.dac = np.zeros((2, self.wfm_size))
self.adc = np.zeros((2, self.wfm_size))
self.spectrum = np.zeros((2, int(self.wfm_size / 2)))
self.avg_spectrum = np.zeros((2, int(self.wfm_size / 2)))
@command()
def set_dac_periods(self, period0, period1):
''' Select the periods played on each address generator
ex: self.set_dac_periods(8192, 4096)
'''
pass
@command()
def set_num_average_min(self, num_average_min):
''' Set the minimum of averages that will be computed on the FPGA
The effective number of averages is >= num_average_min.
'''
pass
@command()
def set_average_period(self, average_period):
''' Set the period of the averaging module and reset the module.
'''
self.period = average_period
@command()
def set_average(self, is_average):
''' is_average = True enables averaging. '''
pass
@command()
def get_num_average(self, channel):
''' Get the number of averages corresponding to the last acquisition. '''
num_average = self.client.recv_uint32()
return num_average
@command()
def get_decimated_data(self, decim_factor, index_low, index_high):
decimated_data = self.client.recv_vector(dtype='float32')
return decimated_data
def get_adc(self):
self.adc = np.reshape(self.get_decimated_data(1, 0, self.wfm_size), (2, self.wfm_size))
def get_spectrum(self):
fft_adc = np.fft.fft(self.adc, axis=1)
self.spectrum = fft_adc[:, 0:self.wfm_size / 2]
def get_avg_spectrum(self, n_avg=1):
self.avg_spectrum = np.zeros((2, int(self.wfm_size / 2)))
for i in range(n_avg):
self.get_adc()
fft_adc = np.abs(np.fft.fft(self.adc, axis=1))
self.avg_spectrum += fft_adc[:, 0:int(self.wfm_size / 2)]
self.avg_spectrum /= n_avg
@command()
def reset_acquisition(self):
pass
@command(funcname='reset')
def reset_dac(self):
pass
def reset(self):
self.reset_dac()
# Modulation
def set_dac(self, channels=[0,1]):
""" Write the BRAM corresponding on the selected channels
(dac0 or dac1) with the array stored in self.dac[channel,:].
ex: self.set_dac(channel=[0])
"""
@command(classname='Modulation')
def set_dac_buffer(self, channel, arr):
pass
for channel in channels:
data = np.int16(16384 * (self.dac[channel,:]))
set_dac_buffer(self, channel, np.uint32(data[1::2] + data[::2] * 65536))
@command(classname='Modulation')
def get_modulation_status(self):
return self.client.recv_tuple('IIffffff')
@command(classname='Modulation')
def set_waveform_type(self, channel, wfm_type):
pass
@command(classname='Modulation')
def set_dac_amplitude(self, channel, amplitude_value):
pass
@command(classname='Modulation')
def set_dac_frequency(self, channel, frequency_value):
pass
@command(classname='Modulation')
def set_dac_offset(self, channel, frequency_value):
pass
|
Koheron/lase
|
examples/drivers/oscillo.py
|
Python
|
mit
| 3,532
|
# -*- coding: utf-8 -*-
#
# input.py - input line for chat and debug window
#
# Copyright (C) 2011-2015 Sébastien Helleu <flashcode@flashtux.org>
#
# This file is part of QWeeChat, a Qt remote GUI for WeeChat.
#
# QWeeChat is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# QWeeChat is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with QWeeChat. If not, see <http://www.gnu.org/licenses/>.
#
import qt_compat
QtCore = qt_compat.import_module('QtCore')
QtGui = qt_compat.import_module('QtGui')
class InputLineEdit(QtGui.QLineEdit):
"""Input line."""
bufferSwitchPrev = qt_compat.Signal()
bufferSwitchNext = qt_compat.Signal()
textSent = qt_compat.Signal(str)
def __init__(self, scroll_widget):
QtGui.QLineEdit.__init__(self)
self.scroll_widget = scroll_widget
self._history = []
self._history_index = -1
self.returnPressed.connect(self._input_return_pressed)
def keyPressEvent(self, event):
key = event.key()
modifiers = event.modifiers()
bar = self.scroll_widget.verticalScrollBar()
if modifiers == QtCore.Qt.ControlModifier:
if key == QtCore.Qt.Key_PageUp:
self.bufferSwitchPrev.emit()
elif key == QtCore.Qt.Key_PageDown:
self.bufferSwitchNext.emit()
else:
QtGui.QLineEdit.keyPressEvent(self, event)
elif modifiers == QtCore.Qt.AltModifier:
if key in (QtCore.Qt.Key_Left, QtCore.Qt.Key_Up):
self.bufferSwitchPrev.emit()
elif key in (QtCore.Qt.Key_Right, QtCore.Qt.Key_Down):
self.bufferSwitchNext.emit()
elif key == QtCore.Qt.Key_PageUp:
bar.setValue(bar.value() - (bar.pageStep() / 10))
elif key == QtCore.Qt.Key_PageDown:
bar.setValue(bar.value() + (bar.pageStep() / 10))
elif key == QtCore.Qt.Key_Home:
bar.setValue(bar.minimum())
elif key == QtCore.Qt.Key_End:
bar.setValue(bar.maximum())
else:
QtGui.QLineEdit.keyPressEvent(self, event)
elif key == QtCore.Qt.Key_PageUp:
bar.setValue(bar.value() - bar.pageStep())
elif key == QtCore.Qt.Key_PageDown:
bar.setValue(bar.value() + bar.pageStep())
elif key == QtCore.Qt.Key_Up:
self._history_navigate(-1)
elif key == QtCore.Qt.Key_Down:
self._history_navigate(1)
else:
QtGui.QLineEdit.keyPressEvent(self, event)
def _input_return_pressed(self):
self._history.append(self.text().encode('utf-8'))
self._history_index = len(self._history)
self.textSent.emit(self.text())
self.clear()
def _history_navigate(self, direction):
if self._history:
self._history_index += direction
if self._history_index < 0:
self._history_index = 0
return
if self._history_index > len(self._history) - 1:
self._history_index = len(self._history)
self.clear()
return
self.setText(self._history[self._history_index])
|
GeoffMaciolek/qweechat
|
qweechat/input.py
|
Python
|
gpl-3.0
| 3,654
|
'''
Created by auto_sdk on 2015.06.23
'''
from aliyun.api.base import RestApi
class Ecs20140526DescribeInstanceMonitorDataRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.EndTime = None
self.InstanceId = None
self.Period = None
self.StartTime = None
def getapiname(self):
return 'ecs.aliyuncs.com.DescribeInstanceMonitorData.2014-05-26'
|
francisar/rds_manager
|
aliyun/api/rest/Ecs20140526DescribeInstanceMonitorDataRequest.py
|
Python
|
mit
| 416
|
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def DistributedVirtualSwitchManagerDvsProductSpec(vim, *args, **kwargs):
'''This class is used to specify ProductSpec for the DVS. The two properties are
strictly mutually exclusive. If both properties are set, then an
InvalidArgument fault would be thrown.'''
obj = vim.client.factory.create('ns0:DistributedVirtualSwitchManagerDvsProductSpec')
# do some validation checking...
if (len(args) + len(kwargs)) < 0:
raise IndexError('Expected at least 1 arguments got: %d' % len(args))
required = [ ]
optional = [ 'distributedVirtualSwitch', 'newSwitchProductSpec', 'dynamicProperty',
'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
|
xuru/pyvisdk
|
pyvisdk/do/distributed_virtual_switch_manager_dvs_product_spec.py
|
Python
|
mit
| 1,242
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'YandexMap'
db.create_table('cmsplugin_yandexmap', (
('cmsplugin_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['cms.CMSPlugin'], unique=True, primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True)),
('address', self.gf('django.db.models.fields.CharField')(max_length=150)),
('zipcode', self.gf('django.db.models.fields.CharField')(max_length=30)),
('city', self.gf('django.db.models.fields.CharField')(max_length=100)),
('content', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('zoom', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('lat', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=10, decimal_places=6, blank=True)),
('lng', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=10, decimal_places=6, blank=True)),
))
db.send_create_signal('cmsplugin_yandexmap', ['YandexMap'])
def backwards(self, orm):
# Deleting model 'YandexMap'
db.delete_table('cmsplugin_yandexmap')
models = {
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'cmsplugin_yandexmap.yandexmap': {
'Meta': {'object_name': 'YandexMap', 'db_table': "'cmsplugin_yandexmap'", '_ormbases': ['cms.CMSPlugin']},
'address': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'content': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'lat': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '6', 'blank': 'True'}),
'lng': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '6', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'zoom': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['cmsplugin_yandexmap']
|
moskrc/cmsplugin_yandexmap
|
cmsplugin_yandexmap/migrations/0001_initial.py
|
Python
|
mit
| 4,409
|
import csv
from Transaction import RawTransaction,BasicTransaction,replaceUndumpableData,UNITS, \
PRICE,AGENCY,VENDOR,PSC,DESCR,DATE,LONGDESCR,AWARDIDIDV,DATASOURCE
from Transaction import ensureZipCodeHasFiveDigits,MANUFACTURER_NAME,MANUFACTURER_PART_NUMBER,BUREAU,CONTRACT_NUMBER,TO_ZIP_CODE,FROM_ZIP_CODE,UNIT_OF_ISSUE
import datetime
import calendar
import sys, traceback
import logging
import os
logger = logging.getLogger('PricesPaidTrans')
hdlr = logging.FileHandler('../logs/PricesPaidTrans.log')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
def tryToInferUnitsFromDescriptionOrDefaultToOne(descr):
return "1"
def getDictionaryFromUSASpending(raw,datasource):
try:
# Choosing the "Charge Processing Date" as the official date"
d = datetime.datetime.strptime(raw.data[14].strip(' \t\n\r'),"%m/%d/%Y")
return { \
DATASOURCE : datasource, \
DESCR : replaceUndumpableData(raw.data[31]), \
UNITS : tryToInferUnitsFromDescriptionOrDefaultToOne(replaceUndumpableData(raw.data[32])), \
PRICE : replaceUndumpableData(raw.data[4]), \
AGENCY : replaceUndumpableData(raw.data[5]), \
VENDOR : replaceUndumpableData(raw.data[43]), \
# I know all of this data is office supplies---this may not be too accurate
# but it matches
PSC : replaceUndumpableData(raw.data[80]), \
"product_service_code" : replaceUndumpableData(raw.data[80]), \
"naics_code" : replaceUndumpableData(raw.data[109]), \
LONGDESCR : replaceUndumpableData(raw.data[31]), \
DATE : replaceUndumpableData(d.date().isoformat()), \
TO_ZIP_CODE : replaceUndumpableData(raw.data[63]), \
"street_address" : replaceUndumpableData(raw.data[52]), \
"city" : replaceUndumpableData(raw.data[55]), \
"state" : replaceUndumpableData(raw.data[56]), \
"vendor_state_code" : replaceUndumpableData(raw.data[59]), \
"congressionaldistrict" : replaceUndumpableData(raw.data[58]), \
"duns_number" : replaceUndumpableData(raw.data[64]), \
"phoneno" : replaceUndumpableData(raw.data[67]), \
"extent_competed" : replaceUndumpableData(raw.data[103]), \
"reason_not_competed" : replaceUndumpableData(raw.data[104]), \
AWARDIDIDV : replaceUndumpableData("USASpending")
}
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback,
limit=2, file=sys.stderr)
logger.error("don't know what went wrong here")
return {}
def loadUSASpendingFromCSVFile(filename,pattern,adapter,LIMIT_NUM_MATCHING_TRANSACTIONS,errorfile):
try:
logger.error('USASpending reader opened:'+filename)
transactions = []
with open(filename, 'rb') as f:
basename = os.path.basename(filename)
reader = csv.reader(f)
logger.error('USASpending reader opened:'+filename)
n = len(transactions)
i = 0
for row in reader:
tr = RawTransaction("spud")
tr.data = row;
try:
bt = BasicTransaction(adapter,tr,basename)
if (pattern):
result = re.search(pattern, bt.getSearchMemento())
else:
result = True
if (result):
if (bt.isValidTransaction()):
transactions.append(bt)
i = i + 1
if (i+n) > LIMIT_NUM_MATCHING_TRANSACTIONS:
break
except:
print "Error on this row:"
print repr(row)
return transactions
except IOError as e:
print "I/O error({0}): {1}".format(e.errno, e.strerror)
|
GSA/PricesPaidAPI
|
USASpendingAdapter.py
|
Python
|
unlicense
| 3,973
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.