CombinedText stringlengths 4 3.42M |
|---|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
__version__ = "1.2.0"
app_dir = os.path.dirname(__file__)
app_dir_components = app_dir.split(os.sep)
base_dir = os.sep.join(app_dir_components[:-1])
from adwords_reports.client import Client
from adwords_reports.report_definition import ReportDefinition
updated version string
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
__version__ = "1.2.1"
app_dir = os.path.dirname(__file__)
app_dir_components = app_dir.split(os.sep)
base_dir = os.sep.join(app_dir_components[:-1])
from adwords_reports.client import Client
from adwords_reports.report_definition import ReportDefinition
|
# -*- coding: utf-8 -*-
# Copyright 2015 Cyan, Inc.
# Copyright 2018 Ciena Corporation
import logging
import uuid
from mock import ANY, Mock, call, patch
import six
from twisted.internet.defer import CancelledError as tid_CancelledError
from twisted.internet.defer import Deferred, fail, succeed
from twisted.internet.task import LoopingCall
from twisted.python.failure import Failure
from twisted.test.proto_helpers import MemoryReactorClock
from twisted.trial import unittest
from .. import producer as aProducer
from ..common import (PRODUCER_ACK_NOT_REQUIRED, BrokerNotAvailableError,
CancelledError, FailedPayloadsError,
LeaderNotAvailableError, NoResponseError,
NotLeaderForPartitionError, OffsetOutOfRangeError,
ProduceRequest, ProduceResponse,
UnknownTopicOrPartitionError, UnsupportedCodecError)
from ..kafkacodec import create_message_set
from ..producer import Producer
from .testutil import make_send_requests, random_string
log = logging.getLogger(__name__)
class ProducerSendMessagesValidationTests(unittest.SynchronousTestCase):
"""
Test the validation `afkak.producer.Producer.send_messages()` applies to
its arguments.
:ivar producer: `Producer` with default arguments.
"""
def setUp(self):
client = Mock(reactor=MemoryReactorClock())
self.producer = Producer(client)
self.addCleanup(self.producer.stop)
def test_topic_type(self):
"""
`TypeError` results when the *topic* argument is not text.
"""
self.failureResultOf(self.producer.send_messages(1234, msgs=[b'']), TypeError)
def test_topic_bytes(self):
"""
`TypeError` results when the *topic* argument is a bytestring on Python 3.
"""
if not six.PY3:
raise unittest.SkipTest('str is bytes on Python 2')
self.failureResultOf(self.producer.send_messages(b'topic', msgs=[b'']), TypeError)
def test_empty_messages(self):
"""
`ValueError` results when the *msgs* argument is not passed or is
empty.
"""
self.failureResultOf(self.producer.send_messages('topic'), ValueError)
self.failureResultOf(self.producer.send_messages('topic', msgs=[]), ValueError)
def test_message_type(self):
"""
`TypeError` results when members of the *msgs* sequence are not
`bytes` or ``None``.
"""
self.failureResultOf(self.producer.send_messages('topic', msgs=[1, 2, 3]), TypeError)
self.failureResultOf(self.producer.send_messages('topic', msgs=[u'asdf']), TypeError)
def test_none_message(self):
"""
A message may be ``None``. This doesn't make much sense unless there is
also a key.
"""
d = self.producer.send_messages('topic', key=b'key', msgs=[None])
d.addErrback(lambda f: None) # Handle the cancellation failure from producer.stop().
self.assertNoResult(d)
def test_key_type(self):
"""
The key must not be unicode, but bytes.
"""
self.failureResultOf(self.producer.send_messages('topic', key=u'key', msgs=[b'msg']), TypeError)
class TestAfkakProducer(unittest.TestCase):
_messages = {}
topic = None
def msgs(self, iterable):
return [self.msg(x) for x in iterable]
def msg(self, s):
if s not in self._messages:
self._messages[s] = b'%s-%s-%s' % (
str(s).encode('utf-8'),
self.id().encode('ascii'),
str(uuid.uuid4()).encode('ascii'),
)
return self._messages[s]
def setUp(self):
super(TestAfkakProducer, self).setUp()
if not self.topic:
self.topic = "{}-{}".format(
self.id()[self.id().rindex(".") + 1:],
random_string(10),
)
def test_producer_init_simplest(self):
producer = Producer(Mock())
self.assertEqual(
producer.__repr__(),
"<Producer <class 'afkak.partitioner.RoundRobinPartitioner'>:"
"Unbatched:1:1000>")
producer.stop()
def test_producer_init_batch(self):
producer = Producer(Mock(reactor=MemoryReactorClock()), batch_send=True)
looper = producer.sendLooper
self.assertEqual(type(looper), LoopingCall)
self.assertTrue(looper.running)
producer.stop()
self.assertFalse(looper.running)
self.assertEqual(
producer.__repr__(),
"<Producer <class 'afkak.partitioner.RoundRobinPartitioner'>:"
"10cnt/32768bytes/30secs:1:1000>")
def test_producer_bad_codec_value(self):
with self.assertRaises(UnsupportedCodecError):
Producer(Mock(), codec=99)
def test_producer_bad_codec_type(self):
with self.assertRaises(TypeError):
Producer(Mock(), codec='bogus')
def test_producer_send_messages(self):
first_part = 23
client = Mock(reactor=MemoryReactorClock())
ret = Deferred()
client.send_produce_request.return_value = ret
client.topic_partitions = {self.topic: [first_part, 101, 102, 103]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
ack_timeout = 5
producer = Producer(client, ack_timeout=ack_timeout)
d = producer.send_messages(self.topic, msgs=msgs)
# Check the expected request was sent
msgSet = create_message_set(
make_send_requests(msgs), producer.codec)
req = ProduceRequest(self.topic, first_part, msgSet)
client.send_produce_request.assert_called_once_with(
[req], acks=producer.req_acks, timeout=ack_timeout,
fail_on_error=False)
# Check results when "response" fires
self.assertNoResult(d)
resp = [ProduceResponse(self.topic, first_part, 0, 10)]
ret.callback(resp)
result = self.successResultOf(d)
self.assertEqual(result, resp[0])
producer.stop()
def test_producer_send_messages_keyed(self):
"""
Test that messages sent with a key are actually sent with that key
"""
first_part = 43
second_part = 56
client = Mock(reactor=MemoryReactorClock())
ret1 = Deferred()
client.send_produce_request.side_effect = [ret1]
client.topic_partitions = {self.topic: [first_part, second_part, 102]}
client.metadata_error_for_topic.return_value = False
msgs1 = [self.msg("one"), self.msg("two")]
msgs2 = [self.msg("three"), self.msg("four")]
key1 = b'35'
key2 = b'foo'
ack_timeout = 5
# Even though we're sending keyed messages, we use the default
# round-robin partitioner, since the requests are easier to predict
producer = Producer(client, ack_timeout=ack_timeout, batch_send=True,
batch_every_n=4)
d1 = producer.send_messages(self.topic, key=key1, msgs=msgs1)
d2 = producer.send_messages(self.topic, key=key2, msgs=msgs2)
# Check the expected request was sent
msgSet1 = create_message_set(
make_send_requests(msgs1, key=key1), producer.codec)
msgSet2 = create_message_set(
make_send_requests(msgs2, key=key2), producer.codec)
req1 = ProduceRequest(self.topic, first_part, msgSet1)
req2 = ProduceRequest(self.topic, second_part, msgSet2)
# Annoying, but order of requests is indeterminate...
client.send_produce_request.assert_called_once_with(
ANY, acks=producer.req_acks, timeout=ack_timeout,
fail_on_error=False)
self.assertEqual(sorted([req1, req2]),
sorted(client.send_produce_request.call_args[0][0]))
# Check results when "response" fires
self.assertNoResult(d1)
self.assertNoResult(d2)
resp = [ProduceResponse(self.topic, first_part, 0, 10),
ProduceResponse(self.topic, second_part, 0, 23)]
ret1.callback(resp)
result = self.successResultOf(d1)
self.assertEqual(result, resp[0])
result = self.successResultOf(d2)
self.assertEqual(result, resp[1])
producer.stop()
def test_producer_send_messages_keyed_same_partition(self):
"""test_producer_send_messages_keyed_same_partition
Test that messages sent with a key are actually sent with that key,
even if they go to the same topic/partition (batching preserves keys)
"""
first_part = 43
second_part = 55
client = Mock(reactor=MemoryReactorClock())
ret1 = Deferred()
client.send_produce_request.side_effect = [ret1]
client.topic_partitions = {self.topic: [first_part, second_part]}
client.metadata_error_for_topic.return_value = False
msgs1 = [self.msg("one"), self.msg("two")]
msgs2 = [self.msg("odd_man_out")]
msgs3 = [self.msg("three"), self.msg("four")]
key1 = b'99'
key3 = b'foo'
ack_timeout = 5
# Even though we're sending keyed messages, we use the default
# round-robin partitioner, since the requests are easier to predict
producer = Producer(client, ack_timeout=ack_timeout, batch_send=True,
batch_every_n=4)
d1 = producer.send_messages(self.topic, key=key1, msgs=msgs1)
d2 = producer.send_messages(self.topic, msgs=msgs2)
d3 = producer.send_messages(self.topic, key=key3, msgs=msgs3)
# Check the expected request was sent
msgSet1 = create_message_set(
[make_send_requests(msgs1, key=key1)[0],
make_send_requests(msgs3, key=key3)[0]], producer.codec)
msgSet2 = create_message_set(make_send_requests(
msgs2), producer.codec)
req1 = ProduceRequest(self.topic, first_part, msgSet1)
req2 = ProduceRequest(self.topic, second_part, msgSet2)
# Annoying, but order of requests is indeterminate...
client.send_produce_request.assert_called_once_with(
ANY, acks=producer.req_acks, timeout=ack_timeout,
fail_on_error=False)
self.assertEqual(sorted([req1, req2]),
sorted(client.send_produce_request.call_args[0][0]))
# Check results when "response" fires
self.assertNoResult(d1)
self.assertNoResult(d2)
self.assertNoResult(d3)
resp = [ProduceResponse(self.topic, first_part, 0, 10),
ProduceResponse(self.topic, second_part, 0, 23)]
ret1.callback(resp)
result = self.successResultOf(d1)
self.assertEqual(result, resp[0])
result = self.successResultOf(d2)
self.assertEqual(result, resp[1])
result = self.successResultOf(d3)
self.assertEqual(result, resp[0])
producer.stop()
def test_producer_send_messages_no_acks(self):
first_part = 19
client = Mock(reactor=MemoryReactorClock())
ret = Deferred()
client.send_produce_request.return_value = ret
client.topic_partitions = {self.topic: [first_part, 101, 102, 103]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
ack_timeout = 5
producer = Producer(client, ack_timeout=ack_timeout,
req_acks=PRODUCER_ACK_NOT_REQUIRED)
d = producer.send_messages(self.topic, msgs=msgs)
# Check the expected request was sent
msgSet = create_message_set(
make_send_requests(msgs), producer.codec)
req = ProduceRequest(self.topic, first_part, msgSet)
client.send_produce_request.assert_called_once_with(
[req], acks=producer.req_acks, timeout=ack_timeout,
fail_on_error=False)
# Check results when "response" fires
self.assertNoResult(d)
ret.callback([])
result = self.successResultOf(d)
self.assertEqual(result, None)
producer.stop()
def test_producer_send_messages_no_retry_fail(self):
client = Mock(reactor=MemoryReactorClock())
f = Failure(BrokerNotAvailableError())
client.send_produce_request.side_effect = [fail(f)]
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
producer = Producer(client, max_req_attempts=1)
d = producer.send_messages(self.topic, msgs=msgs)
# Check the expected request was sent
msgSet = create_message_set(
make_send_requests(msgs), producer.codec)
req = ProduceRequest(self.topic, 0, msgSet)
client.send_produce_request.assert_called_once_with(
[req], acks=producer.req_acks, timeout=producer.ack_timeout,
fail_on_error=False)
self.failureResultOf(d, BrokerNotAvailableError)
producer.stop()
def test_producer_send_messages_unexpected_err(self):
client = Mock(reactor=MemoryReactorClock())
f = Failure(TypeError())
client.send_produce_request.side_effect = [fail(f)]
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
producer = Producer(client)
with patch.object(aProducer, 'log') as klog:
d = producer.send_messages(self.topic, msgs=msgs)
klog.error.assert_called_once_with(
'Unexpected failure: %r in _handle_send_response', f)
self.failureResultOf(d, TypeError)
producer.stop()
def test_producer_send_messages_None_for_null_msg(self):
first_part = 23
client = Mock(reactor=MemoryReactorClock())
ret = Deferred()
client.send_produce_request.return_value = ret
client.topic_partitions = {self.topic: [first_part, 101, 102, 103]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), None, self.msg("two")]
ack_timeout = 5
producer = Producer(client, ack_timeout=ack_timeout)
d = producer.send_messages(self.topic, msgs=msgs)
# Check the expected request was sent
msgSet = create_message_set(
make_send_requests(msgs), producer.codec)
req = ProduceRequest(self.topic, first_part, msgSet)
client.send_produce_request.assert_called_once_with(
[req], acks=producer.req_acks, timeout=ack_timeout,
fail_on_error=False)
# Check results when "response" fires
self.assertNoResult(d)
resp = [ProduceResponse(self.topic, first_part, 0, 10)]
ret.callback(resp)
result = self.successResultOf(d)
self.assertEqual(result, resp[0])
producer.stop()
def test_producer_complete_batch_send_unexpected_error(self):
# Purely for coverage
client = Mock(reactor=MemoryReactorClock())
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
e = ValueError('test_producer_complete_batch_send_unexpected_error')
client.send_produce_request.side_effect = e
msgs = [self.msg("one"), self.msg("two")]
producer = Producer(client)
with patch.object(aProducer, 'log') as klog:
producer.send_messages(self.topic, msgs=msgs)
# The error 'e' gets wrapped in a failure with a traceback, so
# we can't easily match the call exactly...
klog.error.assert_called_once_with(
'Failure detected in _complete_batch_send: %r\n%r', ANY, ANY)
producer.stop()
def test_producer_send_messages_batched(self):
clock = MemoryReactorClock()
client = Mock(reactor=clock)
f = Failure(BrokerNotAvailableError())
ret = [fail(f), succeed([ProduceResponse(self.topic, 0, 0, 10)])]
client.send_produce_request.side_effect = ret
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
batch_n = 2
producer = Producer(client, batch_every_n=batch_n, batch_send=True)
d = producer.send_messages(self.topic, msgs=msgs)
# Check the expected request was sent
msgSet = create_message_set(
make_send_requests(msgs), producer.codec)
req = ProduceRequest(self.topic, ANY, msgSet)
client.send_produce_request.assert_called_once_with(
[req], acks=producer.req_acks, timeout=producer.ack_timeout,
fail_on_error=False)
# At first, there's no result. Have to retry due to first failure
self.assertNoResult(d)
clock.advance(producer._retry_interval)
self.successResultOf(d)
producer.stop()
def test_producer_send_messages_batched_partial_success(self):
"""
This tests the complexity of the error handling for a single batch
request.
Scenario: The producer's caller sends 5 requests to two (total) topics
The client's metadata is such that the producer will produce
requests to post msgs to 5 separate topic/partition tuples
The batch size is reached, so the producer sends the request
The caller then cancels one of the requests
The (mock) client returns partial success in the form of a
FailedPayloadsError.
The Producer then should return the successful results and
retry the failed.
The (mock) client then "succeeds" the remaining results.
"""
clock = MemoryReactorClock()
client = Mock(reactor=clock)
topic2 = u'tpsmbps_two'
client.topic_partitions = {self.topic: [0, 1, 2, 3], topic2: [4, 5, 6]}
client.metadata_error_for_topic.return_value = False
init_resp = [ProduceResponse(self.topic, 0, 0, 10),
ProduceResponse(self.topic, 1, 6, 20),
ProduceResponse(topic2, 5, 0, 30),
]
next_resp = [ProduceResponse(self.topic, 2, 0, 10),
ProduceResponse(self.topic, 1, 0, 20),
ProduceResponse(topic2, 4, 0, 30),
]
failed_payloads = [(ProduceRequest(self.topic, ANY, ANY),
NotLeaderForPartitionError()),
(ProduceRequest(topic2, ANY, ANY),
BrokerNotAvailableError()),
]
f = Failure(FailedPayloadsError(init_resp, failed_payloads))
ret = [fail(f), succeed(next_resp)]
client.send_produce_request.side_effect = ret
msgs = self.msgs(range(10))
results = []
producer = Producer(client, batch_send=True, batch_every_t=0)
# Send 5 total requests: 4 here, one after we make sure we didn't
# send early
results.append(producer.send_messages(self.topic, msgs=msgs[0:3]))
results.append(producer.send_messages(topic2, msgs=msgs[3:5]))
results.append(producer.send_messages(self.topic, msgs=msgs[5:8]))
results.append(producer.send_messages(topic2, msgs=msgs[8:9]))
# No call yet, not enough messages
self.assertFalse(client.send_produce_request.called)
# Enough messages to start the request
results.append(producer.send_messages(self.topic, msgs=msgs[9:10]))
# Before the retry, there should be some results
self.assertEqual(init_resp[0], self.successResultOf(results[0]))
self.assertEqual(init_resp[2], self.successResultOf(results[3]))
# Advance the clock
clock.advance(producer._retry_interval)
# Check the otehr results came in
self.assertEqual(next_resp[0], self.successResultOf(results[4]))
self.assertEqual(next_resp[1], self.successResultOf(results[2]))
self.assertEqual(next_resp[2], self.successResultOf(results[1]))
producer.stop()
def test_producer_send_messages_batched_fail(self):
clock = MemoryReactorClock()
client = Mock(reactor=clock)
ret = [Deferred(), Deferred(), Deferred()]
client.send_produce_request.side_effect = ret
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
batch_t = 5
producer = Producer(client, batch_every_t=batch_t, batch_send=True,
max_req_attempts=3)
# Advance the clock to ensure when no messages to send no error
clock.advance(batch_t)
d = producer.send_messages(self.topic, msgs=msgs)
# Check no request was yet sent
self.assertFalse(client.send_produce_request.called)
# Advance the clock
clock.advance(batch_t)
# Check the expected request was sent
msgSet = create_message_set(
make_send_requests(msgs), producer.codec)
req = ProduceRequest(self.topic, 0, msgSet)
produce_request_call = call([req], acks=producer.req_acks,
timeout=producer.ack_timeout,
fail_on_error=False)
produce_request_calls = [produce_request_call]
client.send_produce_request.assert_has_calls(produce_request_calls)
self.assertNoResult(d)
# Fire the failure from the first request to the client
ret[0].errback(OffsetOutOfRangeError(
'test_producer_send_messages_batched_fail'))
# Still no result, producer should retry first
self.assertNoResult(d)
# Check retry wasn't immediate
self.assertEqual(client.send_produce_request.call_count, 1)
# Advance the clock by the retry delay
clock.advance(producer._retry_interval)
# Check 2nd send_produce_request (1st retry) was sent
produce_request_calls.append(produce_request_call)
client.send_produce_request.assert_has_calls(produce_request_calls)
# Fire the failure from the 2nd request to the client
ret[1].errback(BrokerNotAvailableError(
'test_producer_send_messages_batched_fail_2'))
# Still no result, producer should retry one more time
self.assertNoResult(d)
# Advance the clock by the retry delay
clock.advance(producer._retry_interval * 1.1)
# Check 3nd send_produce_request (2st retry) was sent
produce_request_calls.append(produce_request_call)
client.send_produce_request.assert_has_calls(produce_request_calls)
# Fire the failure from the 2nd request to the client
ret[2].errback(LeaderNotAvailableError(
'test_producer_send_messages_batched_fail_3'))
self.failureResultOf(d, LeaderNotAvailableError)
producer.stop()
def test_producer_cancel_request_in_batch(self):
# Test cancelling a request before it's begun to be processed
client = Mock(reactor=MemoryReactorClock())
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
msgs2 = [self.msg("three"), self.msg("four")]
batch_n = 3
producer = Producer(client, batch_every_n=batch_n, batch_send=True)
d1 = producer.send_messages(self.topic, msgs=msgs)
# Check that no request was sent
self.assertFalse(client.send_produce_request.called)
d1.cancel()
self.failureResultOf(d1, CancelledError)
d2 = producer.send_messages(self.topic, msgs=msgs2)
# Check that still no request was sent
self.assertFalse(client.send_produce_request.called)
self.assertNoResult(d2)
producer.stop()
def test_producer_cancel_request_in_batch_None_for_null_msg(self):
# Test cancelling a request before it's begun to be processed
client = Mock(reactor=MemoryReactorClock())
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
msgs2 = [self.msg("three"), self.msg("four")]
batch_n = 3
producer = Producer(client, batch_every_n=batch_n, batch_send=True)
d1 = producer.send_messages(self.topic, msgs=msgs)
# Check that no request was sent
self.assertFalse(client.send_produce_request.called)
d1.cancel()
self.failureResultOf(d1, CancelledError)
d2 = producer.send_messages(self.topic, msgs=msgs2)
# Check that still no request was sent
self.assertFalse(client.send_produce_request.called)
self.assertNoResult(d2)
producer.stop()
def test_producer_cancel_getting_topic(self):
# Test cancelling while waiting to retry getting metadata
clock = MemoryReactorClock()
client = Mock(reactor=clock)
client.topic_partitions = {} # start with no metadata
rets = [Deferred(), Deferred()]
client.load_metadata_for_topics.side_effect = rets
msgs = [self.msg("one"), self.msg("two")]
producer = Producer(client)
d1 = producer.send_messages(self.topic, msgs=msgs)
# Check that no request was sent
self.assertFalse(client.send_produce_request.called)
# Fire the result of load_metadata_for_topics, but
# metadata_error_for_topic is still True, so it'll retry after delay
# Advance the clock, some, but not enough to retry
rets[0].callback(None)
# Advance to partway thru the delay
clock.advance(producer._retry_interval / 2)
# Cancel the request and ake sure we got the CancelledError
d1.cancel()
self.failureResultOf(d1, CancelledError)
# Check that still no request was sent
self.assertFalse(client.send_produce_request.called)
# Setup the client's topics and trigger the metadata deferred
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
rets[1].callback(None)
# Check that still no request was sent
self.assertFalse(client.send_produce_request.called)
# Advance the clock again to complete the delay
clock.advance(producer._retry_interval)
# Make sure the retry got reset
self.assertEqual(producer._retry_interval,
producer._init_retry_interval)
producer.stop()
def test_producer_cancel_one_request_getting_topic(self):
# Test cancelling a request after it's begun to be processed
client = Mock(reactor=MemoryReactorClock())
client.topic_partitions = {}
ret = Deferred()
client.load_metadata_for_topics.return_value = ret
msgs = [self.msg("one"), self.msg("two")]
msgs2 = [self.msg("three"), self.msg("four")]
batch_n = 4
producer = Producer(client, batch_every_n=batch_n, batch_send=True)
d1 = producer.send_messages(self.topic, msgs=msgs)
# Check that no request was sent
self.assertFalse(client.send_produce_request.called)
# This will trigger the metadata lookup
d2 = producer.send_messages(self.topic, msgs=msgs2)
d1.cancel()
self.failureResultOf(d1, CancelledError)
# Check that still no request was sent
self.assertFalse(client.send_produce_request.called)
self.assertNoResult(d2)
# Setup the client's topics and trigger the metadata deferred
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
ret.callback(None)
# Expect that only the msgs2 messages were sent
msgSet = create_message_set(
make_send_requests(msgs2), producer.codec)
req = ProduceRequest(self.topic, 1, msgSet)
client.send_produce_request.assert_called_once_with(
[req], acks=producer.req_acks, timeout=producer.ack_timeout,
fail_on_error=False)
producer.stop()
def test_producer_stop_during_request(self):
"""
Test stopping producer while it's waiting for reply from client
"""
clock = MemoryReactorClock()
client = Mock(reactor=clock)
f = Failure(BrokerNotAvailableError())
ret = [fail(f), Deferred()]
client.send_produce_request.side_effect = ret
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
batch_n = 2
producer = Producer(client, batch_every_n=batch_n, batch_send=True)
d = producer.send_messages(self.topic, msgs=msgs)
# At first, there's no result. Have to retry due to first failure
self.assertNoResult(d)
clock.advance(producer._retry_interval)
producer.stop()
self.failureResultOf(d, tid_CancelledError)
def test_producer_stop_waiting_to_retry(self):
"""
Test stopping producer while it's waiting to retry a request
"""
clock = MemoryReactorClock()
client = Mock(reactor=clock)
f = Failure(BrokerNotAvailableError())
ret = [fail(f)]
client.send_produce_request.side_effect = ret
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
batch_n = 2
producer = Producer(client, batch_every_n=batch_n, batch_send=True)
d = producer.send_messages(self.topic, msgs=msgs)
# At first, there's no result. Have to retry due to first failure
self.assertNoResult(d)
# Advance the clock, some, but not enough to retry
clock.advance(producer._retry_interval / 2)
# Stop the producer before the retry
producer.stop()
self.failureResultOf(d, tid_CancelledError)
def test_producer_send_messages_unknown_topic(self):
clock = MemoryReactorClock()
client = Mock(reactor=clock)
ds = [Deferred() for _ in range(Producer.DEFAULT_REQ_ATTEMPTS)]
client.load_metadata_for_topics.side_effect = ds
client.metadata_error_for_topic.return_value = 3
client.topic_partitions = {}
msgs = [self.msg("one"), self.msg("two")]
ack_timeout = 5
producer = Producer(client, ack_timeout=ack_timeout)
d = producer.send_messages(self.topic, msgs=msgs)
# d is waiting on result from ds[0] for load_metadata_for_topics
self.assertNoResult(d)
# fire it with client still reporting no metadata for topic
# The producer will retry the lookup DEFAULT_REQ_ATTEMPTS times...
for i in range(Producer.DEFAULT_REQ_ATTEMPTS):
ds[i].callback(None)
# And then wait producer._retry_interval for a call back...
clock.advance(producer._retry_interval + 0.01)
self.failureResultOf(d, UnknownTopicOrPartitionError)
self.assertFalse(client.send_produce_request.called)
producer.stop()
def test_producer_send_messages_bad_response(self):
first_part = 68
client = Mock(reactor=MemoryReactorClock())
ret = Deferred()
client.send_produce_request.return_value = ret
client.topic_partitions = {self.topic: [first_part, 101, 102, 103]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
ack_timeout = 5
producer = Producer(client, ack_timeout=ack_timeout)
d = producer.send_messages(self.topic, msgs=msgs)
# Check the expected request was sent
msgSet = create_message_set(
make_send_requests(msgs), producer.codec)
req = ProduceRequest(self.topic, first_part, msgSet)
client.send_produce_request.assert_called_once_with(
[req], acks=producer.req_acks, timeout=ack_timeout,
fail_on_error=False)
# Check results when "response" fires
self.assertNoResult(d)
ret.callback([])
self.failureResultOf(d, NoResponseError)
producer.stop()
def test_producer_send_timer_failed(self):
"""
Test that the looping call is restarted when _send_batch errs
Somewhat artificial test to confirm that when failures occur in
_send_batch (which cause the looping call to terminate) that the
looping call is restarted.
"""
clock = MemoryReactorClock()
client = Mock(reactor=clock)
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
batch_t = 5
with patch.object(aProducer, 'log') as klog:
producer = Producer(client, batch_send=True, batch_every_t=batch_t)
msgs = [self.msg("one"), self.msg("two")]
d = producer.send_messages(self.topic, msgs=msgs)
# Check no request was yet sent
self.assertFalse(client.send_produce_request.called)
# Patch Producer's Deferred to throw an exception
with patch.object(aProducer, 'Deferred') as d:
d.side_effect = ValueError(
"test_producer_send_timer_failed induced failure")
# Advance the clock
clock.advance(batch_t)
# Check the expected message was logged by the looping call restart
klog.warning.assert_called_once_with('_send_timer_failed:%r: %s',
ANY, ANY)
# Check that the looping call was restarted
self.assertTrue(producer.sendLooper.running)
producer.stop()
def test_producer_send_timer_stopped_error(self):
# Purely for coverage
client = Mock(reactor=MemoryReactorClock())
producer = Producer(client, batch_send=True)
with patch.object(aProducer, 'log') as klog:
producer._send_timer_stopped('Borg')
klog.warning.assert_called_once_with(
'commitTimerStopped with wrong timer:%s not:%s', 'Borg',
producer.sendLooper)
producer.stop()
def test_producer_non_integral_batch_every_n(self):
client = Mock(reactor=MemoryReactorClock())
with self.assertRaises(TypeError):
producer = Producer(client, batch_send=True, batch_every_n="10")
producer.__repr__() # pragma: no cover # STFU pyflakes
def test_producer_non_integral_batch_every_b(self):
client = Mock(reactor=MemoryReactorClock())
with self.assertRaises(TypeError):
producer = Producer(client, batch_send=True, batch_every_b="10")
producer.__repr__() # pragma: no cover # STFU pyflakes
Add missing test coverage
# -*- coding: utf-8 -*-
# Copyright 2015 Cyan, Inc.
# Copyright 2018 Ciena Corporation
import logging
import uuid
from mock import ANY, Mock, call, patch
import six
from twisted.internet.defer import CancelledError as tid_CancelledError
from twisted.internet.defer import Deferred, fail, succeed
from twisted.internet.task import LoopingCall
from twisted.python.failure import Failure
from twisted.test.proto_helpers import MemoryReactorClock
from twisted.trial import unittest
from .. import producer as aProducer
from ..common import (PRODUCER_ACK_NOT_REQUIRED, BrokerNotAvailableError,
CancelledError, FailedPayloadsError,
LeaderNotAvailableError, NoResponseError,
NotLeaderForPartitionError, OffsetOutOfRangeError,
ProduceRequest, ProduceResponse,
UnknownTopicOrPartitionError, UnsupportedCodecError)
from ..kafkacodec import create_message_set
from ..producer import Producer
from .testutil import make_send_requests, random_string
log = logging.getLogger(__name__)
class ProducerSendMessagesValidationTests(unittest.SynchronousTestCase):
"""
Test the validation `afkak.producer.Producer.send_messages()` applies to
its arguments.
:ivar producer: `Producer` with default arguments.
"""
def setUp(self):
client = Mock(reactor=MemoryReactorClock())
self.producer = Producer(client)
self.addCleanup(self.producer.stop)
def test_topic_type(self):
"""
`TypeError` results when the *topic* argument is not text.
"""
self.failureResultOf(self.producer.send_messages(1234, msgs=[b'']), TypeError)
def test_topic_bytes(self):
"""
`TypeError` results when the *topic* argument is a bytestring on Python 3.
"""
if not six.PY3:
raise unittest.SkipTest('str is bytes on Python 2')
self.failureResultOf(self.producer.send_messages(b'topic', msgs=[b'']), TypeError)
def test_empty_messages(self):
"""
`ValueError` results when the *msgs* argument is not passed or is
empty.
"""
self.failureResultOf(self.producer.send_messages('topic'), ValueError)
self.failureResultOf(self.producer.send_messages('topic', msgs=[]), ValueError)
def test_message_type(self):
"""
`TypeError` results when members of the *msgs* sequence are not
`bytes` or ``None``.
"""
self.failureResultOf(self.producer.send_messages('topic', msgs=[1, 2, 3]), TypeError)
self.failureResultOf(self.producer.send_messages('topic', msgs=[u'asdf']), TypeError)
def test_none_message(self):
"""
A message may be ``None``. This doesn't make much sense unless there is
also a key.
"""
d = self.producer.send_messages('topic', key=b'key', msgs=[None])
d.addErrback(lambda f: None) # Handle the cancellation failure from producer.stop().
self.assertNoResult(d)
def test_key_type(self):
"""
The key must not be unicode, but bytes.
"""
self.failureResultOf(self.producer.send_messages('topic', key=u'key', msgs=[b'msg']), TypeError)
class TestAfkakProducer(unittest.TestCase):
_messages = {}
topic = None
def msgs(self, iterable):
return [self.msg(x) for x in iterable]
def msg(self, s):
if s not in self._messages:
self._messages[s] = b'%s-%s-%s' % (
str(s).encode('utf-8'),
self.id().encode('ascii'),
str(uuid.uuid4()).encode('ascii'),
)
return self._messages[s]
def setUp(self):
super(TestAfkakProducer, self).setUp()
if not self.topic:
self.topic = "{}-{}".format(
self.id()[self.id().rindex(".") + 1:],
random_string(10),
)
def test_producer_init_simplest(self):
producer = Producer(Mock())
self.assertEqual(
producer.__repr__(),
"<Producer <class 'afkak.partitioner.RoundRobinPartitioner'>:"
"Unbatched:1:1000>")
producer.stop()
def test_producer_init_batch(self):
producer = Producer(Mock(reactor=MemoryReactorClock()), batch_send=True)
looper = producer.sendLooper
self.assertEqual(type(looper), LoopingCall)
self.assertTrue(looper.running)
producer.stop()
self.assertFalse(looper.running)
self.assertEqual(
producer.__repr__(),
"<Producer <class 'afkak.partitioner.RoundRobinPartitioner'>:"
"10cnt/32768bytes/30secs:1:1000>")
def test_producer_bad_codec_value(self):
with self.assertRaises(UnsupportedCodecError):
Producer(Mock(), codec=99)
def test_producer_bad_codec_type(self):
with self.assertRaises(TypeError):
Producer(Mock(), codec='bogus')
def test_producer_send_messages(self):
first_part = 23
client = Mock(reactor=MemoryReactorClock())
ret = Deferred()
client.send_produce_request.return_value = ret
client.topic_partitions = {self.topic: [first_part, 101, 102, 103]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
ack_timeout = 5
producer = Producer(client, ack_timeout=ack_timeout)
d = producer.send_messages(self.topic, msgs=msgs)
# Check the expected request was sent
msgSet = create_message_set(
make_send_requests(msgs), producer.codec)
req = ProduceRequest(self.topic, first_part, msgSet)
client.send_produce_request.assert_called_once_with(
[req], acks=producer.req_acks, timeout=ack_timeout,
fail_on_error=False)
# Check results when "response" fires
self.assertNoResult(d)
resp = [ProduceResponse(self.topic, first_part, 0, 10)]
ret.callback(resp)
result = self.successResultOf(d)
self.assertEqual(result, resp[0])
producer.stop()
def test_producer_send_messages_keyed(self):
"""
Test that messages sent with a key are actually sent with that key
"""
first_part = 43
second_part = 56
client = Mock(reactor=MemoryReactorClock())
ret1 = Deferred()
client.send_produce_request.side_effect = [ret1]
client.topic_partitions = {self.topic: [first_part, second_part, 102]}
client.metadata_error_for_topic.return_value = False
msgs1 = [self.msg("one"), self.msg("two")]
msgs2 = [self.msg("three"), self.msg("four")]
key1 = b'35'
key2 = b'foo'
ack_timeout = 5
# Even though we're sending keyed messages, we use the default
# round-robin partitioner, since the requests are easier to predict
producer = Producer(client, ack_timeout=ack_timeout, batch_send=True,
batch_every_n=4)
d1 = producer.send_messages(self.topic, key=key1, msgs=msgs1)
d2 = producer.send_messages(self.topic, key=key2, msgs=msgs2)
# Check the expected request was sent
msgSet1 = create_message_set(
make_send_requests(msgs1, key=key1), producer.codec)
msgSet2 = create_message_set(
make_send_requests(msgs2, key=key2), producer.codec)
req1 = ProduceRequest(self.topic, first_part, msgSet1)
req2 = ProduceRequest(self.topic, second_part, msgSet2)
# Annoying, but order of requests is indeterminate...
client.send_produce_request.assert_called_once_with(
ANY, acks=producer.req_acks, timeout=ack_timeout,
fail_on_error=False)
self.assertEqual(sorted([req1, req2]),
sorted(client.send_produce_request.call_args[0][0]))
# Check results when "response" fires
self.assertNoResult(d1)
self.assertNoResult(d2)
resp = [ProduceResponse(self.topic, first_part, 0, 10),
ProduceResponse(self.topic, second_part, 0, 23)]
ret1.callback(resp)
result = self.successResultOf(d1)
self.assertEqual(result, resp[0])
result = self.successResultOf(d2)
self.assertEqual(result, resp[1])
producer.stop()
def test_producer_send_messages_keyed_same_partition(self):
"""test_producer_send_messages_keyed_same_partition
Test that messages sent with a key are actually sent with that key,
even if they go to the same topic/partition (batching preserves keys)
"""
first_part = 43
second_part = 55
client = Mock(reactor=MemoryReactorClock())
ret1 = Deferred()
client.send_produce_request.side_effect = [ret1]
client.topic_partitions = {self.topic: [first_part, second_part]}
client.metadata_error_for_topic.return_value = False
msgs1 = [self.msg("one"), self.msg("two")]
msgs2 = [self.msg("odd_man_out")]
msgs3 = [self.msg("three"), self.msg("four")]
key1 = b'99'
key3 = b'foo'
ack_timeout = 5
# Even though we're sending keyed messages, we use the default
# round-robin partitioner, since the requests are easier to predict
producer = Producer(client, ack_timeout=ack_timeout, batch_send=True,
batch_every_n=4)
d1 = producer.send_messages(self.topic, key=key1, msgs=msgs1)
d2 = producer.send_messages(self.topic, msgs=msgs2)
d3 = producer.send_messages(self.topic, key=key3, msgs=msgs3)
# Check the expected request was sent
msgSet1 = create_message_set(
[make_send_requests(msgs1, key=key1)[0],
make_send_requests(msgs3, key=key3)[0]], producer.codec)
msgSet2 = create_message_set(make_send_requests(
msgs2), producer.codec)
req1 = ProduceRequest(self.topic, first_part, msgSet1)
req2 = ProduceRequest(self.topic, second_part, msgSet2)
# Annoying, but order of requests is indeterminate...
client.send_produce_request.assert_called_once_with(
ANY, acks=producer.req_acks, timeout=ack_timeout,
fail_on_error=False)
self.assertEqual(sorted([req1, req2]),
sorted(client.send_produce_request.call_args[0][0]))
# Check results when "response" fires
self.assertNoResult(d1)
self.assertNoResult(d2)
self.assertNoResult(d3)
resp = [ProduceResponse(self.topic, first_part, 0, 10),
ProduceResponse(self.topic, second_part, 0, 23)]
ret1.callback(resp)
result = self.successResultOf(d1)
self.assertEqual(result, resp[0])
result = self.successResultOf(d2)
self.assertEqual(result, resp[1])
result = self.successResultOf(d3)
self.assertEqual(result, resp[0])
producer.stop()
def test_producer_send_messages_no_acks(self):
first_part = 19
client = Mock(reactor=MemoryReactorClock())
ret = Deferred()
client.send_produce_request.return_value = ret
client.topic_partitions = {self.topic: [first_part, 101, 102, 103]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
ack_timeout = 5
producer = Producer(client, ack_timeout=ack_timeout,
req_acks=PRODUCER_ACK_NOT_REQUIRED)
d = producer.send_messages(self.topic, msgs=msgs)
# Check the expected request was sent
msgSet = create_message_set(
make_send_requests(msgs), producer.codec)
req = ProduceRequest(self.topic, first_part, msgSet)
client.send_produce_request.assert_called_once_with(
[req], acks=producer.req_acks, timeout=ack_timeout,
fail_on_error=False)
# Check results when "response" fires
self.assertNoResult(d)
ret.callback([])
result = self.successResultOf(d)
self.assertEqual(result, None)
producer.stop()
def test_producer_send_messages_no_retry_fail(self):
client = Mock(reactor=MemoryReactorClock())
f = Failure(BrokerNotAvailableError())
client.send_produce_request.side_effect = [fail(f)]
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
producer = Producer(client, max_req_attempts=1)
d = producer.send_messages(self.topic, msgs=msgs)
# Check the expected request was sent
msgSet = create_message_set(
make_send_requests(msgs), producer.codec)
req = ProduceRequest(self.topic, 0, msgSet)
client.send_produce_request.assert_called_once_with(
[req], acks=producer.req_acks, timeout=producer.ack_timeout,
fail_on_error=False)
self.failureResultOf(d, BrokerNotAvailableError)
producer.stop()
def test_producer_send_messages_unexpected_err(self):
client = Mock(reactor=MemoryReactorClock())
f = Failure(TypeError())
client.send_produce_request.side_effect = [fail(f)]
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
producer = Producer(client)
with patch.object(aProducer, 'log') as klog:
d = producer.send_messages(self.topic, msgs=msgs)
klog.error.assert_called_once_with(
'Unexpected failure: %r in _handle_send_response', f)
self.failureResultOf(d, TypeError)
producer.stop()
def test_producer_send_messages_None_for_null_msg(self):
first_part = 23
client = Mock(reactor=MemoryReactorClock())
ret = Deferred()
client.send_produce_request.return_value = ret
client.topic_partitions = {self.topic: [first_part, 101, 102, 103]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), None, self.msg("two")]
ack_timeout = 5
producer = Producer(client, ack_timeout=ack_timeout)
d = producer.send_messages(self.topic, msgs=msgs)
# Check the expected request was sent
msgSet = create_message_set(
make_send_requests(msgs), producer.codec)
req = ProduceRequest(self.topic, first_part, msgSet)
client.send_produce_request.assert_called_once_with(
[req], acks=producer.req_acks, timeout=ack_timeout,
fail_on_error=False)
# Check results when "response" fires
self.assertNoResult(d)
resp = [ProduceResponse(self.topic, first_part, 0, 10)]
ret.callback(resp)
result = self.successResultOf(d)
self.assertEqual(result, resp[0])
producer.stop()
def test_producer_complete_batch_send_unexpected_error(self):
# Purely for coverage
client = Mock(reactor=MemoryReactorClock())
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
e = ValueError('test_producer_complete_batch_send_unexpected_error')
client.send_produce_request.side_effect = e
msgs = [self.msg("one"), self.msg("two")]
producer = Producer(client)
with patch.object(aProducer, 'log') as klog:
producer.send_messages(self.topic, msgs=msgs)
# The error 'e' gets wrapped in a failure with a traceback, so
# we can't easily match the call exactly...
klog.error.assert_called_once_with(
'Failure detected in _complete_batch_send: %r\n%r', ANY, ANY)
producer.stop()
def test_producer_send_messages_batched(self):
clock = MemoryReactorClock()
client = Mock(reactor=clock)
f = Failure(BrokerNotAvailableError())
ret = [fail(f), succeed([ProduceResponse(self.topic, 0, 0, 10)])]
client.send_produce_request.side_effect = ret
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
batch_n = 2
producer = Producer(client, batch_every_n=batch_n, batch_send=True)
d = producer.send_messages(self.topic, msgs=msgs)
# Check the expected request was sent
msgSet = create_message_set(
make_send_requests(msgs), producer.codec)
req = ProduceRequest(self.topic, ANY, msgSet)
client.send_produce_request.assert_called_once_with(
[req], acks=producer.req_acks, timeout=producer.ack_timeout,
fail_on_error=False)
# At first, there's no result. Have to retry due to first failure
self.assertNoResult(d)
clock.advance(producer._retry_interval)
self.successResultOf(d)
producer.stop()
def test_producer_send_messages_batched_partial_success(self):
"""
This tests the complexity of the error handling for a single batch
request.
Scenario: The producer's caller sends 5 requests to two (total) topics
The client's metadata is such that the producer will produce
requests to post msgs to 5 separate topic/partition tuples
The batch size is reached, so the producer sends the request
The caller then cancels one of the requests
The (mock) client returns partial success in the form of a
FailedPayloadsError.
The Producer then should return the successful results and
retry the failed.
The (mock) client then "succeeds" the remaining results.
"""
clock = MemoryReactorClock()
client = Mock(reactor=clock)
topic2 = u'tpsmbps_two'
client.topic_partitions = {self.topic: [0, 1, 2, 3], topic2: [4, 5, 6]}
client.metadata_error_for_topic.return_value = False
init_resp = [ProduceResponse(self.topic, 0, 0, 10),
ProduceResponse(self.topic, 1, 6, 20),
ProduceResponse(topic2, 5, 0, 30),
]
next_resp = [ProduceResponse(self.topic, 2, 0, 10),
ProduceResponse(self.topic, 1, 0, 20),
ProduceResponse(topic2, 4, 0, 30),
]
failed_payloads = [(ProduceRequest(self.topic, ANY, ANY),
NotLeaderForPartitionError()),
(ProduceRequest(topic2, ANY, ANY),
BrokerNotAvailableError()),
]
f = Failure(FailedPayloadsError(init_resp, failed_payloads))
ret = [fail(f), succeed(next_resp)]
client.send_produce_request.side_effect = ret
msgs = self.msgs(range(10))
results = []
producer = Producer(client, batch_send=True, batch_every_t=0)
# Send 5 total requests: 4 here, one after we make sure we didn't
# send early
results.append(producer.send_messages(self.topic, msgs=msgs[0:3]))
results.append(producer.send_messages(topic2, msgs=msgs[3:5]))
results.append(producer.send_messages(self.topic, msgs=msgs[5:8]))
results.append(producer.send_messages(topic2, msgs=msgs[8:9]))
# No call yet, not enough messages
self.assertFalse(client.send_produce_request.called)
# Enough messages to start the request
client.reset_topic_metadata.reset_mock()
results.append(producer.send_messages(self.topic, msgs=msgs[9:10]))
# Before the retry, there should be some results
self.assertEqual(init_resp[0], self.successResultOf(results[0]))
self.assertEqual(init_resp[2], self.successResultOf(results[3]))
# And the errors should have forced a metadata reset on one of the topics.
client.reset_topic_metadata.assert_called_with(self.topic)
# Advance the clock to trigger retries.
clock.advance(producer._retry_interval)
# Check the otehr results came in
self.assertEqual(next_resp[0], self.successResultOf(results[4]))
self.assertEqual(next_resp[1], self.successResultOf(results[2]))
self.assertEqual(next_resp[2], self.successResultOf(results[1]))
producer.stop()
def test_producer_send_messages_batched_fail(self):
clock = MemoryReactorClock()
client = Mock(reactor=clock)
ret = [Deferred(), Deferred(), Deferred()]
client.send_produce_request.side_effect = ret
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
batch_t = 5
producer = Producer(client, batch_every_t=batch_t, batch_send=True,
max_req_attempts=3)
# Advance the clock to ensure when no messages to send no error
clock.advance(batch_t)
d = producer.send_messages(self.topic, msgs=msgs)
# Check no request was yet sent
self.assertFalse(client.send_produce_request.called)
# Advance the clock
clock.advance(batch_t)
# Check the expected request was sent
msgSet = create_message_set(
make_send_requests(msgs), producer.codec)
req = ProduceRequest(self.topic, 0, msgSet)
produce_request_call = call([req], acks=producer.req_acks,
timeout=producer.ack_timeout,
fail_on_error=False)
produce_request_calls = [produce_request_call]
client.send_produce_request.assert_has_calls(produce_request_calls)
self.assertNoResult(d)
# Fire the failure from the first request to the client
ret[0].errback(OffsetOutOfRangeError(
'test_producer_send_messages_batched_fail'))
# Still no result, producer should retry first
self.assertNoResult(d)
# Check retry wasn't immediate
self.assertEqual(client.send_produce_request.call_count, 1)
# Advance the clock by the retry delay
clock.advance(producer._retry_interval)
# Check 2nd send_produce_request (1st retry) was sent
produce_request_calls.append(produce_request_call)
client.send_produce_request.assert_has_calls(produce_request_calls)
# Fire the failure from the 2nd request to the client
ret[1].errback(BrokerNotAvailableError(
'test_producer_send_messages_batched_fail_2'))
# Still no result, producer should retry one more time
self.assertNoResult(d)
# Advance the clock by the retry delay
clock.advance(producer._retry_interval * 1.1)
# Check 3nd send_produce_request (2st retry) was sent
produce_request_calls.append(produce_request_call)
client.send_produce_request.assert_has_calls(produce_request_calls)
# Fire the failure from the 2nd request to the client
ret[2].errback(LeaderNotAvailableError(
'test_producer_send_messages_batched_fail_3'))
self.failureResultOf(d, LeaderNotAvailableError)
producer.stop()
def test_producer_cancel_request_in_batch(self):
# Test cancelling a request before it's begun to be processed
client = Mock(reactor=MemoryReactorClock())
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
msgs2 = [self.msg("three"), self.msg("four")]
batch_n = 3
producer = Producer(client, batch_every_n=batch_n, batch_send=True)
d1 = producer.send_messages(self.topic, msgs=msgs)
# Check that no request was sent
self.assertFalse(client.send_produce_request.called)
d1.cancel()
self.failureResultOf(d1, CancelledError)
d2 = producer.send_messages(self.topic, msgs=msgs2)
# Check that still no request was sent
self.assertFalse(client.send_produce_request.called)
self.assertNoResult(d2)
producer.stop()
def test_producer_cancel_request_in_batch_None_for_null_msg(self):
# Test cancelling a request before it's begun to be processed
client = Mock(reactor=MemoryReactorClock())
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
msgs2 = [self.msg("three"), self.msg("four")]
batch_n = 3
producer = Producer(client, batch_every_n=batch_n, batch_send=True)
d1 = producer.send_messages(self.topic, msgs=msgs)
# Check that no request was sent
self.assertFalse(client.send_produce_request.called)
d1.cancel()
self.failureResultOf(d1, CancelledError)
d2 = producer.send_messages(self.topic, msgs=msgs2)
# Check that still no request was sent
self.assertFalse(client.send_produce_request.called)
self.assertNoResult(d2)
producer.stop()
def test_producer_cancel_getting_topic(self):
# Test cancelling while waiting to retry getting metadata
clock = MemoryReactorClock()
client = Mock(reactor=clock)
client.topic_partitions = {} # start with no metadata
rets = [Deferred(), Deferred()]
client.load_metadata_for_topics.side_effect = rets
msgs = [self.msg("one"), self.msg("two")]
producer = Producer(client)
d1 = producer.send_messages(self.topic, msgs=msgs)
# Check that no request was sent
self.assertFalse(client.send_produce_request.called)
# Fire the result of load_metadata_for_topics, but
# metadata_error_for_topic is still True, so it'll retry after delay
# Advance the clock, some, but not enough to retry
rets[0].callback(None)
# Advance to partway thru the delay
clock.advance(producer._retry_interval / 2)
# Cancel the request and ake sure we got the CancelledError
d1.cancel()
self.failureResultOf(d1, CancelledError)
# Check that still no request was sent
self.assertFalse(client.send_produce_request.called)
# Setup the client's topics and trigger the metadata deferred
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
rets[1].callback(None)
# Check that still no request was sent
self.assertFalse(client.send_produce_request.called)
# Advance the clock again to complete the delay
clock.advance(producer._retry_interval)
# Make sure the retry got reset
self.assertEqual(producer._retry_interval,
producer._init_retry_interval)
producer.stop()
def test_producer_cancel_one_request_getting_topic(self):
# Test cancelling a request after it's begun to be processed
client = Mock(reactor=MemoryReactorClock())
client.topic_partitions = {}
ret = Deferred()
client.load_metadata_for_topics.return_value = ret
msgs = [self.msg("one"), self.msg("two")]
msgs2 = [self.msg("three"), self.msg("four")]
batch_n = 4
producer = Producer(client, batch_every_n=batch_n, batch_send=True)
d1 = producer.send_messages(self.topic, msgs=msgs)
# Check that no request was sent
self.assertFalse(client.send_produce_request.called)
# This will trigger the metadata lookup
d2 = producer.send_messages(self.topic, msgs=msgs2)
d1.cancel()
self.failureResultOf(d1, CancelledError)
# Check that still no request was sent
self.assertFalse(client.send_produce_request.called)
self.assertNoResult(d2)
# Setup the client's topics and trigger the metadata deferred
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
ret.callback(None)
# Expect that only the msgs2 messages were sent
msgSet = create_message_set(
make_send_requests(msgs2), producer.codec)
req = ProduceRequest(self.topic, 1, msgSet)
client.send_produce_request.assert_called_once_with(
[req], acks=producer.req_acks, timeout=producer.ack_timeout,
fail_on_error=False)
producer.stop()
def test_producer_stop_during_request(self):
"""
Test stopping producer while it's waiting for reply from client
"""
clock = MemoryReactorClock()
client = Mock(reactor=clock)
f = Failure(BrokerNotAvailableError())
ret = [fail(f), Deferred()]
client.send_produce_request.side_effect = ret
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
batch_n = 2
producer = Producer(client, batch_every_n=batch_n, batch_send=True)
d = producer.send_messages(self.topic, msgs=msgs)
# At first, there's no result. Have to retry due to first failure
self.assertNoResult(d)
clock.advance(producer._retry_interval)
producer.stop()
self.failureResultOf(d, tid_CancelledError)
def test_producer_stop_waiting_to_retry(self):
"""
Test stopping producer while it's waiting to retry a request
"""
clock = MemoryReactorClock()
client = Mock(reactor=clock)
f = Failure(BrokerNotAvailableError())
ret = [fail(f)]
client.send_produce_request.side_effect = ret
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
batch_n = 2
producer = Producer(client, batch_every_n=batch_n, batch_send=True)
d = producer.send_messages(self.topic, msgs=msgs)
# At first, there's no result. Have to retry due to first failure
self.assertNoResult(d)
# Advance the clock, some, but not enough to retry
clock.advance(producer._retry_interval / 2)
# Stop the producer before the retry
producer.stop()
self.failureResultOf(d, tid_CancelledError)
def test_producer_send_messages_unknown_topic(self):
clock = MemoryReactorClock()
client = Mock(reactor=clock)
ds = [Deferred() for _ in range(Producer.DEFAULT_REQ_ATTEMPTS)]
client.load_metadata_for_topics.side_effect = ds
client.metadata_error_for_topic.return_value = 3
client.topic_partitions = {}
msgs = [self.msg("one"), self.msg("two")]
ack_timeout = 5
producer = Producer(client, ack_timeout=ack_timeout)
d = producer.send_messages(self.topic, msgs=msgs)
# d is waiting on result from ds[0] for load_metadata_for_topics
self.assertNoResult(d)
# fire it with client still reporting no metadata for topic
# The producer will retry the lookup DEFAULT_REQ_ATTEMPTS times...
for i in range(Producer.DEFAULT_REQ_ATTEMPTS):
ds[i].callback(None)
# And then wait producer._retry_interval for a call back...
clock.advance(producer._retry_interval + 0.01)
self.failureResultOf(d, UnknownTopicOrPartitionError)
self.assertFalse(client.send_produce_request.called)
producer.stop()
def test_producer_send_messages_bad_response(self):
first_part = 68
client = Mock(reactor=MemoryReactorClock())
ret = Deferred()
client.send_produce_request.return_value = ret
client.topic_partitions = {self.topic: [first_part, 101, 102, 103]}
client.metadata_error_for_topic.return_value = False
msgs = [self.msg("one"), self.msg("two")]
ack_timeout = 5
producer = Producer(client, ack_timeout=ack_timeout)
d = producer.send_messages(self.topic, msgs=msgs)
# Check the expected request was sent
msgSet = create_message_set(
make_send_requests(msgs), producer.codec)
req = ProduceRequest(self.topic, first_part, msgSet)
client.send_produce_request.assert_called_once_with(
[req], acks=producer.req_acks, timeout=ack_timeout,
fail_on_error=False)
# Check results when "response" fires
self.assertNoResult(d)
ret.callback([])
self.failureResultOf(d, NoResponseError)
producer.stop()
def test_producer_send_timer_failed(self):
"""
Test that the looping call is restarted when _send_batch errs
Somewhat artificial test to confirm that when failures occur in
_send_batch (which cause the looping call to terminate) that the
looping call is restarted.
"""
clock = MemoryReactorClock()
client = Mock(reactor=clock)
client.topic_partitions = {self.topic: [0, 1, 2, 3]}
client.metadata_error_for_topic.return_value = False
batch_t = 5
with patch.object(aProducer, 'log') as klog:
producer = Producer(client, batch_send=True, batch_every_t=batch_t)
msgs = [self.msg("one"), self.msg("two")]
d = producer.send_messages(self.topic, msgs=msgs)
# Check no request was yet sent
self.assertFalse(client.send_produce_request.called)
# Patch Producer's Deferred to throw an exception
with patch.object(aProducer, 'Deferred') as d:
d.side_effect = ValueError(
"test_producer_send_timer_failed induced failure")
# Advance the clock
clock.advance(batch_t)
# Check the expected message was logged by the looping call restart
klog.warning.assert_called_once_with('_send_timer_failed:%r: %s',
ANY, ANY)
# Check that the looping call was restarted
self.assertTrue(producer.sendLooper.running)
producer.stop()
def test_producer_send_timer_stopped_error(self):
# Purely for coverage
client = Mock(reactor=MemoryReactorClock())
producer = Producer(client, batch_send=True)
with patch.object(aProducer, 'log') as klog:
producer._send_timer_stopped('Borg')
klog.warning.assert_called_once_with(
'commitTimerStopped with wrong timer:%s not:%s', 'Borg',
producer.sendLooper)
producer.stop()
def test_producer_non_integral_batch_every_n(self):
client = Mock(reactor=MemoryReactorClock())
with self.assertRaises(TypeError):
producer = Producer(client, batch_send=True, batch_every_n="10")
producer.__repr__() # pragma: no cover # STFU pyflakes
def test_producer_non_integral_batch_every_b(self):
client = Mock(reactor=MemoryReactorClock())
with self.assertRaises(TypeError):
producer = Producer(client, batch_send=True, batch_every_b="10")
producer.__repr__() # pragma: no cover # STFU pyflakes
|
import cStringIO
from baseutils import strtoint
elf = None
section_header = {
"index": 0,
"name": "",
"type": "",
"address": 0,
"offset": 0,
"size": 0,
"entsize": 0,
"flags": "",
"link": "",
"info": "",
"align": 0
}
elf_arch_type = {
0: "EM_NONE",
3: "EM_386",
62: "EM_X86_64"
}
elf_encoding = {
0: "ELFDATANONE",
1: "ELFDATA2LSB",
2: "ELFDATA2MSB"
}
elf_class_type = {
0: "ELFCLASSNONE",
1: "ELFCLASS32",
2: "ELFCLASS64"
}
elf_type = {
0: "ET_NONE",
1: "ET_REL",
2: "ET_EXEC",
3: "ET_DYN",
4: "ET_CORE"
}
sh_type = {
0: "SHT_NULL",
1: "SHT_PROGBITS",
2: "SHT_SYMTAB",
3: "SHT_STRTAB",
4: "SHT_RELA",
5: "SHT_HASH",
6: "SHT_DYNAMIC",
7: "SHT_NOTE",
8: "SHT_NOBITS",
9: "SHT_REL",
10: "SHT_SHLIB",
11: "SHT_DYNSYM",
14: "SHT_INIT_ARRAY",
15: "SHT_FINI_ARRAY",
16: "SHT_PREINIT_ARRAY",
17: "SHT_GROUP",
18: "SHT_SYMTAB_SHNDX",
0x60000000: "SHT_LOOS",
0x6fffffff: "SHT_HIOS",
0x6fff4700: "SHT_GNU_INCREMENTAL_INPUTS",
0x6ffffff5: "SHT_GNU_ATTRIBUTES",
0x6ffffff6: "SHT_GNU_HASH",
0x6ffffff7: "SHT_GNU_LIBLIST",
0x6ffffffd: "SHT_GNU_verdef",
0x6ffffffe: "SHT_GNU_verneed",
0x6fffffff: "SHT_GNU_versym"
}
sh_flags = {
0: "SHF_NONE",
1 << 0: "SHF_WRITE",
1 << 1: "SHF_ALLOC",
1 << 2: "SHF_EXECINSTR",
1 << 4: "SHF_MERGE",
1 << 5: "SHF_STRINGS",
1 << 6: "SHF_INFO_LINK",
1 << 7: "SHF_LINK_ORDER",
1 << 8: "SHF_OS_NONCONFORMING",
1 << 9: "SHF_GROUP",
1 << 10: "SHF_TLS",
0x0ff00000: "SHF_MASKOS",
0xf0000000: "SHF_MASKPROC",
0x80000000: "SHF_EXCLUDE"
}
def decide_shflags(flag):
t = []
for key in sh_flags:
if flag & key:
t.append(sh_flags[key])
return "+".join(t)
program_header = {
"type": "",
"offset": 0,
"virtaddr": 0,
"physaddr": 0,
"filesize": 0,
"memsize": 0,
"flags": "",
"align": 0
}
ph_type = {
0: "PT_NULL",
1: "PT_LOAD",
2: "PT_DYNMAIC",
3: "PT_INTERP",
4: "PT_NOTE",
5: "PT_SHLIB",
6: "PT_PHDR",
7: "PT_TLS",
0x60000000: "PT_LOOS",
0x6fffffff: "PT_HIOS",
0x70000000: "PT_LOPROC",
0x7fffffff: "PT_HIPROC",
0x6474e550: "PT_GNU_EH_FRAME",
0x6474e551: "PT_GNU_STACK",
0x6474e552: "PT_GNU_RELRO"
}
ph_flags = {
0: "NULL",
1: "PF_X",
2: "PF_W",
3: "PF_W + PF_X",
4: "PF_R",
5: "PF_R + PF_X",
6: "PF_R + PF_W",
7: "PF_R + PF_W + PF_X"
}
dynamic_type = {
0: "DT_NULL",
1: "DT_NEEDED",
2: "DT_PLTRELSZ",
3: "DT_PLTGOT",
4: "DT_HASH",
5: "DT_STRTAB",
6: "DT_SYMTAB",
7: "DT_RELA",
8: "DT_RELASZ",
9: "DT_RELAENT",
10: "DT_STRSZ",
11: "DT_SYMENT",
12: "DT_INIT",
13: "DT_FINI",
14: "DT_SONAME",
15: "DT_RPATH",
16: "DT_SYMBOLIC",
17: "DT_REL",
18: "DT_RELSZ",
19: "DT_RELENT",
20: "DT_PLTREL",
21: "DT_DEBUG",
22: "DT_TEXTREL",
23: "DT_JMPREL",
24: "DT_BIND_NOW",
25: "DT_INIT_ARRAY",
26: "DT_FINI_ARRAY",
27: "DT_INIT_ARRAYSZ",
28: "DT_FINI_ARRAYSZ",
29: "DT_RUNPATH",
30: "DT_FLAGS",
31: "DT_ENCODING",
32: "DT_PREINIT_ARRAY",
33: "DT_PREINIT_ARRAYSZ",
0x6000000d: "DT_LOOS",
0x6ffff000: "DT_HIOS",
0x70000000: "DT_LOPROC",
0x7fffffff: "DT_HIPROC",
0x6ffffd00: "DT_VALRNGLO",
0x6ffffdf5: "DT_GNU_PRELINKED",
0x6ffffdf6: "DT_GNU_CONFLICTSZ",
0x6ffffdf7: "DT_GNU_LIBLISTSZ",
0x6ffffdf8: "DT_CHECKSUM",
0x6ffffdf9: "DT_PLTPADSZ",
0x6ffffdfa: "DT_MOVEENT",
0x6ffffdfb: "DT_MOVESZ",
0x6ffffdfc: "DT_FEATURE",
0x6ffffdfd: "DT_POSFLAG_1",
0x6ffffdfe: "DT_SYMINSZ",
0x6ffffdff: "DT_SYMINENT",
0x6ffffe00: "DT_ADDRRNGLO",
0x6ffffef5: "DT_GNU_HASH",
0x6ffffef6: "DT_TLSDESC_PLT",
0x6ffffef7: "DT_TLSDESC_GOT",
0x6ffffef8: "DT_GNU_CONFLICT",
0x6ffffef9: "DT_GNU_LIBLIST",
0x6ffffefa: "DT_CONFIG",
0x6ffffefb: "DT_DEPAUDIT",
0x6ffffefc: "DT_AUDIT",
0x6ffffefd: "DT_PLTPAD",
0x6ffffefe: "DT_MOVETAB",
0x6ffffeff: "DT_SYMINFO",
0x6ffffff9: "DT_RELACOUNT",
0x6ffffffa: "DT_RELCOUNT",
0x6ffffffb: "DT_RELCOUNT",
0x6ffffffc: "DT_VERDEF",
0x6ffffffd: "DT_VERDEFNUM",
0x6ffffffe: "DT_VERNEED",
0x6fffffff: "DT_VERNEEDNUM",
0x6ffffff0: "DT_VERSYM"
}
rel_type = {
0: "R_386_NONE",
1: "R_386_32",
2: "R_386_PC32",
3: "R_386_GOT32",
4: "R_386_PLT32",
5: "R_386_COPY",
6: "R_386_GLOB_DAT",
7: "R_386_JMP_SLOT",
8: "R_386_RELATIVE",
9: "R_386_GOTOFF",
10: "R_386_GOTPC"
}
sym_type = {
0: "STT_NOTYPE",
1: "STT_OBJECT",
2: "STT_FUNC",
3: "STT_SECTION",
4: "STT_FILE",
5: "STT_COMMON",
6: "STT_TLS",
8: "STT_RELC",
9: "STT_SRELC",
10: "STT_LOOS",
12: "STT_HIOS",
13: "STT_LOPROC",
15: "STT_HIPROC"
}
sym_bind_type = {
0: "STB_LOCAL",
1: "STB_GLOBAL",
2: "STB_WEAK"
}
sym_spec_index = {
0: "SHN_UNDEF",
0xff00: "SHN_LOPROC",
0xff1f: "SHN_HIPROC",
0xfff1: "SHN_ABS",
0xfff2: "SHN_COMMON",
0xffff: "HIRESERVE"
}
sym_vis_type = {
0: "STV_DEFAULT",
1: "STV_INTERNAL",
2: "STV_HIDDEN",
3: "STV_PROTECTED"
}
def read_header(buffer):
buffer.seek(0)
elf_header = elf['elf_header']
elf_header["file_ident"] = buffer.read(4)
assert elf_header["file_ident"] == "\x7fELF"
elf_header["file_class"] = strtoint(buffer.read(1))
elf_header["file_encoding"] = strtoint(buffer.read(1))
elf_header["file_version"] = strtoint(buffer.read(1))
#ignore 9 chars
buffer.read(9)
elf_header["e_type"] = strtoint(buffer.read(2))
elf_header["e_machine"] = strtoint(buffer.read(2))
elf_header["e_version"] = strtoint(buffer.read(4))
elf_header["e_entry"] = strtoint(buffer.read(8))
elf_header["e_phoff"] = strtoint(buffer.read(8))
elf_header["e_shoff"] = strtoint(buffer.read(8))
elf_header["e_flags"] = strtoint(buffer.read(4))
elf_header["e_ehsize"] = strtoint(buffer.read(2))
elf_header["e_phentsize"] = strtoint(buffer.read(2))
elf_header["e_phnum"] = strtoint(buffer.read(2))
elf_header["e_shentsize"] = strtoint(buffer.read(2))
elf_header["e_shnum"] = strtoint(buffer.read(2))
elf_header["e_shstrndx"] = strtoint(buffer.read(2))
def read_section_header(buffer):
elf_header = elf["elf_header"]
sections = elf["sections"]
e_shoff = elf_header["e_shoff"]
buffer.seek(e_shoff)
e_shnum = elf_header["e_shnum"]
e_shentsize = elf_header["e_shentsize"]
for num in range(e_shnum):
sections.append({
"name": strtoint(buffer.read(4)),
"type": strtoint(buffer.read(4)),
"flag": strtoint(buffer.read(8)),
"addr": strtoint(buffer.read(8)),
"offset": strtoint(buffer.read(8)),
"size": strtoint(buffer.read(8)),
"link": strtoint(buffer.read(4)),
"info": strtoint(buffer.read(4)),
"align": strtoint(buffer.read(8)),
"entsize": strtoint(buffer.read(8))
})
def read_program_header(buffer):
elf_header = elf["elf_header"]
programs = elf["programs"]
buffer.seek(elf_header["e_phoff"])
e_phnum = elf_header["e_phnum"]
e_phentsize = elf_header["e_phentsize"]
for num in range(e_phnum):
entry = {
"type": strtoint(buffer.read(4)),
"flag": strtoint(buffer.read(4)),
"offset": strtoint(buffer.read(8)),
"virt": strtoint(buffer.read(8)),
"phys": strtoint(buffer.read(8)),
"filesize": strtoint(buffer.read(8)),
"memsize": strtoint(buffer.read(8)),
"align": strtoint(buffer.read(8))
}
#INTERP
if entry['type'] == 3:
mark = buffer.tell()
buffer.seek(entry['offset'])
elf['interpreter'] = buffer.read(entry['filesize'])
buffer.seek(mark)
programs.append(entry)
def build_strtab(buffer, section):
buffer.seek(section["offset"])
size = section["size"]
strtabdata = buffer.read(size)
strtab = {}
j = 0
while j < size:
if strtabdata[j] == "\x00":
end = strtabdata.find("\x00", j+1)
if end == -1:
break
name = strtabdata[j+1:end]
more = name.find(".", 1)
if more > 0:
strtab[j+more+1] = name[more:]
strtab[j+1] = name
j = end
continue
j += 1
return strtab
def search_sections(key, value):
pocket = []
sections = elf["sections"]
for section in sections:
if section[key] == value:
pocket.append(section)
return pocket
def read_strtab(buffer):
elf_header = elf["elf_header"]
sections = elf["sections"]
strtab_sections = []
for section in sections:
if section["type"] == 3:
strtab_sections.append(section)
shstrtab_section = None
for section in strtab_sections:
buffer.seek(section["offset"])
if ".text" in buffer.read(section["size"]):
shstrtab_section = section
if not shstrtab_section:
print "error: where is .shstrtab?"
return
shstrtab = build_strtab(buffer, shstrtab_section)
for section in sections[1:]:
section["name"] = shstrtab[section["name"]]
for section in strtab_sections:
if section["name"] == ".shstrtab":
continue
strtab = build_strtab(buffer, section)
elf["strtabs"][section["name"]] = strtab
def read_symtab(buffer):
sections = elf["sections"]
symtabs = elf["symtabs"]
symtab_sections = []
for section in sections:
if section["type"] == 2:
symtab_sections.append(section)
if section["type"] == 11:
symtab_sections.append(section)
for section in symtab_sections:
buffer.seek(section["offset"])
extra = section["align"] - (section["entsize"] / section["align"])
total = section["size"] / section["entsize"]
symtab = []
for entry in range(total):
name = strtoint(buffer.read(4))
info = strtoint(buffer.read(1))
_bind = info >> 4
_type = info & 0xf
symtab.append({
"name": name,
"bind": _bind,
"type": _type,
"vis": strtoint(buffer.read(1)),
"index": strtoint(buffer.read(2)),
"value": strtoint(buffer.read(8)),
"size": strtoint(buffer.read(8))
})
symtabs[section["name"]] = symtab
if ".symtab" in elf["symtabs"]:
strtab = elf["strtabs"][".strtab"]
for symbol in elf["symtabs"][".symtab"]:
if symbol["name"]:
symbol["name"] = strtab[symbol["name"]]
dynsym = elf["strtabs"][".dynstr"]
for symbol in elf["symtabs"][".dynsym"]:
if symbol["name"]:
try:
symbol["name"] = dynsym[symbol["name"]]
except:
symbol["name"] = "unknown"
def read_rela(buffer):
sections = elf["sections"]
def read_dynamic(buffer):
sections = elf["sections"]
dynamic = None
for section in sections:
if section["type"] == 6:
dynamic = section
dynamic_list = elf["dynamic"]
buffer.seek(dynamic["offset"])
total = dynamic["size"] / dynamic["entsize"]
for entry in range(total):
d_tag = strtoint(buffer.read(8))
value = strtoint(buffer.read(8))
dynamic_list.append({d_tag: value})
if not d_tag:
break
in_symtab = [1, 14, 15]
strtab = elf["strtabs"][".strtab"]
dyntab = elf["strtabs"][".dynstr"]
for entry in dynamic_list:
d_tag = entry.keys()[0]
if d_tag in in_symtab:
if not d_tag:
continue
if not entry[d_tag]:
continue
try:
name = strtab[entry[d_tag]]
except:
name = dyntab[entry[d_tag]]
entry[d_tag] = name
def set_target(path):
global elf
elf = {
"elf_header": {},
"sections": [],
"programs": [],
"interpreter": "",
"strtabs": {},
"symtabs": {},
"dynamic": []
}
buffer = cStringIO.StringIO()
with open(path, "r") as binfile:
buffer.write(binfile.read())
buffer.seek(0)
read_header(buffer)
read_section_header(buffer)
read_program_header(buffer)
read_strtab(buffer)
read_symtab(buffer)
read_dynamic(buffer)
return elf
elfutils.py: performance update, _read = buffer.read
import cStringIO
from baseutils import strtoint
elf = None
_read = None
section_header = {
"index": 0,
"name": "",
"type": "",
"address": 0,
"offset": 0,
"size": 0,
"entsize": 0,
"flags": "",
"link": "",
"info": "",
"align": 0
}
elf_arch_type = {
0: "EM_NONE",
3: "EM_386",
62: "EM_X86_64"
}
elf_encoding = {
0: "ELFDATANONE",
1: "ELFDATA2LSB",
2: "ELFDATA2MSB"
}
elf_class_type = {
0: "ELFCLASSNONE",
1: "ELFCLASS32",
2: "ELFCLASS64"
}
elf_type = {
0: "ET_NONE",
1: "ET_REL",
2: "ET_EXEC",
3: "ET_DYN",
4: "ET_CORE"
}
sh_type = {
0: "SHT_NULL",
1: "SHT_PROGBITS",
2: "SHT_SYMTAB",
3: "SHT_STRTAB",
4: "SHT_RELA",
5: "SHT_HASH",
6: "SHT_DYNAMIC",
7: "SHT_NOTE",
8: "SHT_NOBITS",
9: "SHT_REL",
10: "SHT_SHLIB",
11: "SHT_DYNSYM",
14: "SHT_INIT_ARRAY",
15: "SHT_FINI_ARRAY",
16: "SHT_PREINIT_ARRAY",
17: "SHT_GROUP",
18: "SHT_SYMTAB_SHNDX",
0x60000000: "SHT_LOOS",
0x6fffffff: "SHT_HIOS",
0x6fff4700: "SHT_GNU_INCREMENTAL_INPUTS",
0x6ffffff5: "SHT_GNU_ATTRIBUTES",
0x6ffffff6: "SHT_GNU_HASH",
0x6ffffff7: "SHT_GNU_LIBLIST",
0x6ffffffd: "SHT_GNU_verdef",
0x6ffffffe: "SHT_GNU_verneed",
0x6fffffff: "SHT_GNU_versym"
}
sh_flags = {
0: "SHF_NONE",
1 << 0: "SHF_WRITE",
1 << 1: "SHF_ALLOC",
1 << 2: "SHF_EXECINSTR",
1 << 4: "SHF_MERGE",
1 << 5: "SHF_STRINGS",
1 << 6: "SHF_INFO_LINK",
1 << 7: "SHF_LINK_ORDER",
1 << 8: "SHF_OS_NONCONFORMING",
1 << 9: "SHF_GROUP",
1 << 10: "SHF_TLS",
0x0ff00000: "SHF_MASKOS",
0xf0000000: "SHF_MASKPROC",
0x80000000: "SHF_EXCLUDE"
}
def decide_shflags(flag):
t = []
for key in sh_flags:
if flag & key:
t.append(sh_flags[key])
return "+".join(t)
program_header = {
"type": "",
"offset": 0,
"virtaddr": 0,
"physaddr": 0,
"filesize": 0,
"memsize": 0,
"flags": "",
"align": 0
}
ph_type = {
0: "PT_NULL",
1: "PT_LOAD",
2: "PT_DYNMAIC",
3: "PT_INTERP",
4: "PT_NOTE",
5: "PT_SHLIB",
6: "PT_PHDR",
7: "PT_TLS",
0x60000000: "PT_LOOS",
0x6fffffff: "PT_HIOS",
0x70000000: "PT_LOPROC",
0x7fffffff: "PT_HIPROC",
0x6474e550: "PT_GNU_EH_FRAME",
0x6474e551: "PT_GNU_STACK",
0x6474e552: "PT_GNU_RELRO"
}
ph_flags = {
0: "NULL",
1: "PF_X",
2: "PF_W",
3: "PF_W + PF_X",
4: "PF_R",
5: "PF_R + PF_X",
6: "PF_R + PF_W",
7: "PF_R + PF_W + PF_X"
}
dynamic_type = {
0: "DT_NULL",
1: "DT_NEEDED",
2: "DT_PLTRELSZ",
3: "DT_PLTGOT",
4: "DT_HASH",
5: "DT_STRTAB",
6: "DT_SYMTAB",
7: "DT_RELA",
8: "DT_RELASZ",
9: "DT_RELAENT",
10: "DT_STRSZ",
11: "DT_SYMENT",
12: "DT_INIT",
13: "DT_FINI",
14: "DT_SONAME",
15: "DT_RPATH",
16: "DT_SYMBOLIC",
17: "DT_REL",
18: "DT_RELSZ",
19: "DT_RELENT",
20: "DT_PLTREL",
21: "DT_DEBUG",
22: "DT_TEXTREL",
23: "DT_JMPREL",
24: "DT_BIND_NOW",
25: "DT_INIT_ARRAY",
26: "DT_FINI_ARRAY",
27: "DT_INIT_ARRAYSZ",
28: "DT_FINI_ARRAYSZ",
29: "DT_RUNPATH",
30: "DT_FLAGS",
31: "DT_ENCODING",
32: "DT_PREINIT_ARRAY",
33: "DT_PREINIT_ARRAYSZ",
0x6000000d: "DT_LOOS",
0x6ffff000: "DT_HIOS",
0x70000000: "DT_LOPROC",
0x7fffffff: "DT_HIPROC",
0x6ffffd00: "DT_VALRNGLO",
0x6ffffdf5: "DT_GNU_PRELINKED",
0x6ffffdf6: "DT_GNU_CONFLICTSZ",
0x6ffffdf7: "DT_GNU_LIBLISTSZ",
0x6ffffdf8: "DT_CHECKSUM",
0x6ffffdf9: "DT_PLTPADSZ",
0x6ffffdfa: "DT_MOVEENT",
0x6ffffdfb: "DT_MOVESZ",
0x6ffffdfc: "DT_FEATURE",
0x6ffffdfd: "DT_POSFLAG_1",
0x6ffffdfe: "DT_SYMINSZ",
0x6ffffdff: "DT_SYMINENT",
0x6ffffe00: "DT_ADDRRNGLO",
0x6ffffef5: "DT_GNU_HASH",
0x6ffffef6: "DT_TLSDESC_PLT",
0x6ffffef7: "DT_TLSDESC_GOT",
0x6ffffef8: "DT_GNU_CONFLICT",
0x6ffffef9: "DT_GNU_LIBLIST",
0x6ffffefa: "DT_CONFIG",
0x6ffffefb: "DT_DEPAUDIT",
0x6ffffefc: "DT_AUDIT",
0x6ffffefd: "DT_PLTPAD",
0x6ffffefe: "DT_MOVETAB",
0x6ffffeff: "DT_SYMINFO",
0x6ffffff9: "DT_RELACOUNT",
0x6ffffffa: "DT_RELCOUNT",
0x6ffffffb: "DT_RELCOUNT",
0x6ffffffc: "DT_VERDEF",
0x6ffffffd: "DT_VERDEFNUM",
0x6ffffffe: "DT_VERNEED",
0x6fffffff: "DT_VERNEEDNUM",
0x6ffffff0: "DT_VERSYM"
}
rel_type = {
0: "R_386_NONE",
1: "R_386_32",
2: "R_386_PC32",
3: "R_386_GOT32",
4: "R_386_PLT32",
5: "R_386_COPY",
6: "R_386_GLOB_DAT",
7: "R_386_JMP_SLOT",
8: "R_386_RELATIVE",
9: "R_386_GOTOFF",
10: "R_386_GOTPC"
}
sym_type = {
0: "STT_NOTYPE",
1: "STT_OBJECT",
2: "STT_FUNC",
3: "STT_SECTION",
4: "STT_FILE",
5: "STT_COMMON",
6: "STT_TLS",
8: "STT_RELC",
9: "STT_SRELC",
10: "STT_LOOS",
12: "STT_HIOS",
13: "STT_LOPROC",
15: "STT_HIPROC"
}
sym_bind_type = {
0: "STB_LOCAL",
1: "STB_GLOBAL",
2: "STB_WEAK"
}
sym_spec_index = {
0: "SHN_UNDEF",
0xff00: "SHN_LOPROC",
0xff1f: "SHN_HIPROC",
0xfff1: "SHN_ABS",
0xfff2: "SHN_COMMON",
0xffff: "HIRESERVE"
}
sym_vis_type = {
0: "STV_DEFAULT",
1: "STV_INTERNAL",
2: "STV_HIDDEN",
3: "STV_PROTECTED"
}
def read_header(buffer):
buffer.seek(0)
elf_header = elf['elf_header']
elf_header["file_ident"] = _read(4)
assert elf_header["file_ident"] == "\x7fELF"
elf_header["file_class"] = strtoint(_read(1))
elf_header["file_encoding"] = strtoint(_read(1))
elf_header["file_version"] = strtoint(_read(1))
#ignore 9 chars
_read(9)
elf_header["e_type"] = strtoint(_read(2))
elf_header["e_machine"] = strtoint(_read(2))
elf_header["e_version"] = strtoint(_read(4))
elf_header["e_entry"] = strtoint(_read(8))
elf_header["e_phoff"] = strtoint(_read(8))
elf_header["e_shoff"] = strtoint(_read(8))
elf_header["e_flags"] = strtoint(_read(4))
elf_header["e_ehsize"] = strtoint(_read(2))
elf_header["e_phentsize"] = strtoint(_read(2))
elf_header["e_phnum"] = strtoint(_read(2))
elf_header["e_shentsize"] = strtoint(_read(2))
elf_header["e_shnum"] = strtoint(_read(2))
elf_header["e_shstrndx"] = strtoint(_read(2))
def read_section_header(buffer):
elf_header = elf["elf_header"]
sections = elf["sections"]
e_shoff = elf_header["e_shoff"]
buffer.seek(e_shoff)
e_shnum = elf_header["e_shnum"]
e_shentsize = elf_header["e_shentsize"]
for num in range(e_shnum):
sections.append({
"name": strtoint(_read(4)),
"type": strtoint(_read(4)),
"flag": strtoint(_read(8)),
"addr": strtoint(_read(8)),
"offset": strtoint(_read(8)),
"size": strtoint(_read(8)),
"link": strtoint(_read(4)),
"info": strtoint(_read(4)),
"align": strtoint(_read(8)),
"entsize": strtoint(_read(8))
})
def read_program_header(buffer):
elf_header = elf["elf_header"]
programs = elf["programs"]
buffer.seek(elf_header["e_phoff"])
e_phnum = elf_header["e_phnum"]
e_phentsize = elf_header["e_phentsize"]
for num in range(e_phnum):
entry = {
"type": strtoint(_read(4)),
"flag": strtoint(_read(4)),
"offset": strtoint(_read(8)),
"virt": strtoint(_read(8)),
"phys": strtoint(_read(8)),
"filesize": strtoint(_read(8)),
"memsize": strtoint(_read(8)),
"align": strtoint(_read(8))
}
#INTERP
if entry['type'] == 3:
mark = buffer.tell()
buffer.seek(entry['offset'])
elf['interpreter'] = _read(entry['filesize'])
buffer.seek(mark)
programs.append(entry)
def build_strtab(buffer, section):
buffer.seek(section["offset"])
size = section["size"]
strtabdata = _read(size)
strtab = {}
j = 0
while j < size:
if strtabdata[j] == "\x00":
end = strtabdata.find("\x00", j+1)
if end == -1:
break
name = strtabdata[j+1:end]
more = name.find(".", 1)
if more > 0:
strtab[j+more+1] = name[more:]
strtab[j+1] = name
j = end
continue
j += 1
return strtab
def search_sections(key, value):
pocket = []
sections = elf["sections"]
for section in sections:
if section[key] == value:
pocket.append(section)
return pocket
def read_strtab(buffer):
elf_header = elf["elf_header"]
sections = elf["sections"]
strtab_sections = []
for section in sections:
if section["type"] == 3:
strtab_sections.append(section)
shstrtab_section = None
for section in strtab_sections:
buffer.seek(section["offset"])
if ".text" in _read(section["size"]):
shstrtab_section = section
if not shstrtab_section:
print "error: where is .shstrtab?"
return
shstrtab = build_strtab(buffer, shstrtab_section)
for section in sections[1:]:
section["name"] = shstrtab[section["name"]]
for section in strtab_sections:
if section["name"] == ".shstrtab":
continue
strtab = build_strtab(buffer, section)
elf["strtabs"][section["name"]] = strtab
def read_symtab(buffer):
sections = elf["sections"]
symtabs = elf["symtabs"]
symtab_sections = []
for section in sections:
if section["type"] == 2:
symtab_sections.append(section)
if section["type"] == 11:
symtab_sections.append(section)
for section in symtab_sections:
buffer.seek(section["offset"])
extra = section["align"] - (section["entsize"] / section["align"])
total = section["size"] / section["entsize"]
symtab = []
for entry in range(total):
name = strtoint(_read(4))
info = strtoint(_read(1))
_bind = info >> 4
_type = info & 0xf
symtab.append({
"name": name,
"bind": _bind,
"type": _type,
"vis": strtoint(_read(1)),
"index": strtoint(_read(2)),
"value": strtoint(_read(8)),
"size": strtoint(_read(8))
})
symtabs[section["name"]] = symtab
if ".symtab" in elf["symtabs"]:
strtab = elf["strtabs"][".strtab"]
for symbol in elf["symtabs"][".symtab"]:
if symbol["name"]:
symbol["name"] = strtab[symbol["name"]]
dynsym = elf["strtabs"][".dynstr"]
for symbol in elf["symtabs"][".dynsym"]:
if symbol["name"]:
try:
symbol["name"] = dynsym[symbol["name"]]
except:
symbol["name"] = "unknown"
def read_rela(buffer):
sections = elf["sections"]
def read_dynamic(buffer):
sections = elf["sections"]
dynamic = None
for section in sections:
if section["type"] == 6:
dynamic = section
dynamic_list = elf["dynamic"]
buffer.seek(dynamic["offset"])
total = dynamic["size"] / dynamic["entsize"]
for entry in range(total):
d_tag = strtoint(_read(8))
value = strtoint(_read(8))
dynamic_list.append({d_tag: value})
if not d_tag:
break
in_symtab = [1, 14, 15]
strtab = elf["strtabs"][".strtab"]
dyntab = elf["strtabs"][".dynstr"]
for entry in dynamic_list:
d_tag = entry.keys()[0]
if d_tag in in_symtab:
if not d_tag:
continue
if not entry[d_tag]:
continue
try:
name = strtab[entry[d_tag]]
except:
name = dyntab[entry[d_tag]]
entry[d_tag] = name
def set_target(path):
global elf
global _read
elf = {
"elf_header": {},
"sections": [],
"programs": [],
"interpreter": "",
"strtabs": {},
"symtabs": {},
"dynamic": []
}
buffer = cStringIO.StringIO()
with open(path, "r") as binfile:
buffer.write(binfile.read())
buffer.seek(0)
_read = buffer.read
read_header(buffer)
read_section_header(buffer)
read_program_header(buffer)
read_strtab(buffer)
read_symtab(buffer)
read_dynamic(buffer)
return elf
|
import os
from socrate import system
DEFAULT_CONFIG = {
# Specific to the admin UI
'DOCKER_SOCKET': 'unix:///var/run/docker.sock',
'BABEL_DEFAULT_LOCALE': 'en',
'BABEL_DEFAULT_TIMEZONE': 'UTC',
'BOOTSTRAP_SERVE_LOCAL': True,
'RATELIMIT_STORAGE_URL': 'redis://redis/2',
'QUOTA_STORAGE_URL': 'redis://redis/1',
'DEBUG': False,
'DOMAIN_REGISTRATION': False,
'TEMPLATES_AUTO_RELOAD': True,
# Database settings
'DB_FLAVOR': None,
'DB_USER': 'mailu',
'DB_PW': None,
'DB_HOST': 'database',
'DB_NAME': 'mailu',
'SQLITE_DATABASE_FILE':'data/main.db',
'SQLALCHEMY_DATABASE_URI': 'sqlite:////data/main.db',
'SQLALCHEMY_TRACK_MODIFICATIONS': False,
# Statistics management
'INSTANCE_ID_PATH': '/data/instance',
'STATS_ENDPOINT': '0.{}.stats.mailu.io',
# Common configuration variables
'SECRET_KEY': 'changeMe',
'DOMAIN': 'mailu.io',
'HOSTNAMES': 'mail.mailu.io,alternative.mailu.io,yetanother.mailu.io',
'POSTMASTER': 'postmaster',
'TLS_FLAVOR': 'cert',
'AUTH_RATELIMIT': '10/minute;1000/hour',
'DISABLE_STATISTICS': False,
# Mail settings
'DMARC_RUA': None,
'DMARC_RUF': None,
'WELCOME': False,
'WELCOME_SUBJECT': 'Dummy welcome topic',
'WELCOME_BODY': 'Dummy welcome body',
'DKIM_SELECTOR': 'dkim',
'DKIM_PATH': '/dkim/{domain}.{selector}.key',
'DEFAULT_QUOTA': 1000000000,
# Web settings
'SITENAME': 'Mailu',
'WEBSITE': 'https://mailu.io',
'WEB_ADMIN': '/admin',
'WEB_WEBMAIL': '/webmail',
'WEBMAIL': 'none',
'RECAPTCHA_PUBLIC_KEY': '',
'RECAPTCHA_PRIVATE_KEY': '',
# Advanced settings
'PASSWORD_SCHEME': 'BLF-CRYPT',
# Host settings
'HOST_IMAP': 'imap',
'HOST_POP3': 'imap',
'HOST_SMTP': 'smtp',
'HOST_WEBMAIL': 'webmail',
'HOST_FRONT': 'front',
'HOST_AUTHSMTP': os.environ.get('HOST_SMTP', 'smtp'),
'SUBNET': '192.168.203.0/24',
'POD_ADDRESS_RANGE': None
}
class ConfigManager(dict):
""" Naive configuration manager that uses environment only
"""
DB_TEMPLATES = {
'sqlite': 'sqlite:////{SQLITE_DATABASE_FILE}',
'postgresql': 'postgresql://{DB_USER}:{DB_PW}@{DB_HOST}/{DB_NAME}',
'mysql': 'mysql://{DB_USER}:{DB_PW}@{DB_HOST}/{DB_NAME}'
}
HOSTS = ('IMAP', 'POP3', 'AUTHSMTP', 'SMTP')
OPTIONAL_HOSTS = ('WEBMAIL', 'ANTISPAM')
def __init__(self):
self.config = dict()
def resolve_host(self):
optional = [item for item in self.OPTIONAL_HOSTS if item in self.config]
for item in self.HOSTS + optional:
host = 'HOST_' + item
self.config[host] = system.resolve_address(self.config[host])
def __coerce_value(self, value):
if isinstance(value, str) and value.lower() in ('true','yes'):
return True
elif isinstance(value, str) and value.lower() in ('false', 'no'):
return False
return value
def init_app(self, app):
self.config.update(app.config)
# get environment variables
self.config.update({
key: self.__coerce_value(os.environ.get(key, value))
for key, value in DEFAULT_CONFIG.items()
})
self.resolve_host()
# automatically set the sqlalchemy string
if self.config['DB_FLAVOR']:
template = self.DB_TEMPLATES[self.config['DB_FLAVOR']]
self.config['SQLALCHEMY_DATABASE_URI'] = template.format(**self.config)
# update the app config itself
app.config = self
def setdefault(self, key, value):
if key not in self.config:
self.config[key] = value
return self.config[key]
def get(self, *args):
return self.config.get(*args)
def keys(self):
return self.config.keys()
def __getitem__(self, key):
return self.config.get(key)
def __setitem__(self, key, value):
self.config[key] = value
def __contains__(self, key):
return key in self.config
Fix hostname resolution
import os
from socrate import system
DEFAULT_CONFIG = {
# Specific to the admin UI
'DOCKER_SOCKET': 'unix:///var/run/docker.sock',
'BABEL_DEFAULT_LOCALE': 'en',
'BABEL_DEFAULT_TIMEZONE': 'UTC',
'BOOTSTRAP_SERVE_LOCAL': True,
'RATELIMIT_STORAGE_URL': 'redis://redis/2',
'QUOTA_STORAGE_URL': 'redis://redis/1',
'DEBUG': False,
'DOMAIN_REGISTRATION': False,
'TEMPLATES_AUTO_RELOAD': True,
# Database settings
'DB_FLAVOR': None,
'DB_USER': 'mailu',
'DB_PW': None,
'DB_HOST': 'database',
'DB_NAME': 'mailu',
'SQLITE_DATABASE_FILE':'data/main.db',
'SQLALCHEMY_DATABASE_URI': 'sqlite:////data/main.db',
'SQLALCHEMY_TRACK_MODIFICATIONS': False,
# Statistics management
'INSTANCE_ID_PATH': '/data/instance',
'STATS_ENDPOINT': '0.{}.stats.mailu.io',
# Common configuration variables
'SECRET_KEY': 'changeMe',
'DOMAIN': 'mailu.io',
'HOSTNAMES': 'mail.mailu.io,alternative.mailu.io,yetanother.mailu.io',
'POSTMASTER': 'postmaster',
'TLS_FLAVOR': 'cert',
'AUTH_RATELIMIT': '10/minute;1000/hour',
'DISABLE_STATISTICS': False,
# Mail settings
'DMARC_RUA': None,
'DMARC_RUF': None,
'WELCOME': False,
'WELCOME_SUBJECT': 'Dummy welcome topic',
'WELCOME_BODY': 'Dummy welcome body',
'DKIM_SELECTOR': 'dkim',
'DKIM_PATH': '/dkim/{domain}.{selector}.key',
'DEFAULT_QUOTA': 1000000000,
# Web settings
'SITENAME': 'Mailu',
'WEBSITE': 'https://mailu.io',
'WEB_ADMIN': '/admin',
'WEB_WEBMAIL': '/webmail',
'WEBMAIL': 'none',
'RECAPTCHA_PUBLIC_KEY': '',
'RECAPTCHA_PRIVATE_KEY': '',
# Advanced settings
'PASSWORD_SCHEME': 'BLF-CRYPT',
# Host settings
'HOST_IMAP': 'imap',
'HOST_POP3': 'imap',
'HOST_SMTP': 'smtp',
'HOST_WEBMAIL': 'webmail',
'HOST_FRONT': 'front',
'HOST_AUTHSMTP': os.environ.get('HOST_SMTP', 'smtp'),
'SUBNET': '192.168.203.0/24',
'POD_ADDRESS_RANGE': None
}
class ConfigManager(dict):
""" Naive configuration manager that uses environment only
"""
DB_TEMPLATES = {
'sqlite': 'sqlite:////{SQLITE_DATABASE_FILE}',
'postgresql': 'postgresql://{DB_USER}:{DB_PW}@{DB_HOST}/{DB_NAME}',
'mysql': 'mysql://{DB_USER}:{DB_PW}@{DB_HOST}/{DB_NAME}'
}
HOSTS = ('IMAP', 'POP3', 'AUTHSMTP', 'SMTP')
OPTIONAL_HOSTS = ('WEBMAIL', 'ANTISPAM')
def __init__(self):
self.config = dict()
def resolve_host(self):
optional = [item for item in self.OPTIONAL_HOSTS if item in self.config]
for item in list(self.HOSTS) + optional:
host = 'HOST_' + item
self.config[host] = system.resolve_address(self.config[host])
def __coerce_value(self, value):
if isinstance(value, str) and value.lower() in ('true','yes'):
return True
elif isinstance(value, str) and value.lower() in ('false', 'no'):
return False
return value
def init_app(self, app):
self.config.update(app.config)
# get environment variables
self.config.update({
key: self.__coerce_value(os.environ.get(key, value))
for key, value in DEFAULT_CONFIG.items()
})
self.resolve_host()
# automatically set the sqlalchemy string
if self.config['DB_FLAVOR']:
template = self.DB_TEMPLATES[self.config['DB_FLAVOR']]
self.config['SQLALCHEMY_DATABASE_URI'] = template.format(**self.config)
# update the app config itself
app.config = self
def setdefault(self, key, value):
if key not in self.config:
self.config[key] = value
return self.config[key]
def get(self, *args):
return self.config.get(*args)
def keys(self):
return self.config.keys()
def __getitem__(self, key):
return self.config.get(key)
def __setitem__(self, key, value):
self.config[key] = value
def __contains__(self, key):
return key in self.config
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import time
import numpy as np
import torch
from torch.autograd import Variable
from core.agent_single_process import AgentSingleProcess
from utils.helpers import A3C_Experience
class A3CSingleProcess(AgentSingleProcess):
epsilon = 1e-13
def __init__(self, master, process_id=0):
super(A3CSingleProcess, self).__init__(master, process_id)
# lstm hidden states
if self.master.enable_lstm:
self.lstm_layer_count = self.model.lstm_layer_count if hasattr(self.model, "lstm_layer_count") else 1
self._reset_lstm_hidden_vb_episode() # clear up hidden state
self._reset_lstm_hidden_vb_rollout() # detach the previous variable from the computation graph
# NOTE global variable pi
if self.master.enable_continuous:
self.pi_vb = Variable(torch.Tensor([math.pi]).type(self.master.dtype))
self.master.logger.warning(
"Registered A3C-SingleProcess-Agent #" + str(self.process_id) + " w/ Env (seed:" + str(
self.env.seed) + ").")
# NOTE: to be called at the beginning of each new episode, clear up the hidden state
def _reset_lstm_hidden_vb_episode(self, training=True): # seq_len, batch_size, hidden_vb_dim
not_training = not training
if hasattr(self.master, "num_robots"):
r = self.master.num_robots
if self.master.enable_continuous: # TODO: what here?
self.lstm_hidden_vb = (
Variable(torch.zeros(2, self.model.hidden_vb_dim).type(self.master.dtype), volatile=not_training),
Variable(torch.zeros(2, self.model.hidden_vb_dim).type(self.master.dtype), volatile=not_training))
if self.lstm_layer_count == 2:
self.lstm_hidden_vb2 = (
Variable(torch.zeros(2, self.model.hidden_vb2_dim).type(self.master.dtype),
volatile=not_training),
Variable(torch.zeros(2, self.model.hidden_vb2_dim).type(self.master.dtype),
volatile=not_training))
else:
self.lstm_hidden_vb = (
Variable(torch.zeros(r, self.model.hidden_vb_dim).type(self.master.dtype), volatile=not_training),
Variable(torch.zeros(r, self.model.hidden_vb_dim).type(self.master.dtype), volatile=not_training))
if self.lstm_layer_count == 2:
self.lstm_hidden_vb2 = (
Variable(torch.zeros(r, self.model.hidden_vb2_dim).type(self.master.dtype),
volatile=not_training),
Variable(torch.zeros(r, self.model.hidden_vb2_dim).type(self.master.dtype),
volatile=not_training))
else:
if self.master.enable_continuous:
self.lstm_hidden_vb = (
Variable(torch.zeros(2, self.model.hidden_vb_dim).type(self.master.dtype), volatile=not_training),
Variable(torch.zeros(2, self.model.hidden_vb_dim).type(self.master.dtype), volatile=not_training))
if self.lstm_layer_count == 2:
self.lstm_hidden_vb2 = (
Variable(torch.zeros(2, self.model.hidden_vb2_dim).type(self.master.dtype),
volatile=not_training),
Variable(torch.zeros(2, self.model.hidden_vb2_dim).type(self.master.dtype),
volatile=not_training))
else:
self.lstm_hidden_vb = (
Variable(torch.zeros(1, self.model.hidden_vb_dim).type(self.master.dtype), volatile=not_training),
Variable(torch.zeros(1, self.model.hidden_vb_dim).type(self.master.dtype), volatile=not_training))
if self.lstm_layer_count == 2:
self.lstm_hidden_vb2 = (
Variable(torch.zeros(1, self.model.hidden_vb2_dim).type(self.master.dtype),
volatile=not_training),
Variable(torch.zeros(1, self.model.hidden_vb2_dim).type(self.master.dtype),
volatile=not_training))
# NOTE: to be called at the beginning of each rollout, detach the previous variable from the graph
def _reset_lstm_hidden_vb_rollout(self):
self.lstm_hidden_vb = (Variable(self.lstm_hidden_vb[0].data),
Variable(self.lstm_hidden_vb[1].data))
if self.lstm_layer_count == 2:
self.lstm_hidden_vb2 = (Variable(self.lstm_hidden_vb2[0].data),
Variable(self.lstm_hidden_vb2[1].data))
def _preprocessState(self, state, is_valotile=False):
if isinstance(state, list):
state_vb = []
for i in range(len(state)):
state_vb.append(
Variable(torch.from_numpy(state[i]).unsqueeze(0).type(self.master.dtype), volatile=is_valotile))
else:
state_vb = Variable(torch.from_numpy(state).unsqueeze(0).type(self.master.dtype), volatile=is_valotile)
return state_vb
def _forward(self, state_vb):
if self.master.enable_continuous: # NOTE continuous control p_vb here is the mu_vb of continuous action dist
if self.master.enable_lstm:
if self.lstm_layer_count == 1:
p_vb, sig_vb, v_vb, self.lstm_hidden_vb = self.model(state_vb, self.lstm_hidden_vb)
elif self.lstm_layer_count == 2:
p_vb, sig_vb, v_vb, self.lstm_hidden_vb, self.lstm_hidden_vb2 = self.model(state_vb,
self.lstm_hidden_vb,
self.lstm_hidden_vb2)
else:
p_vb, sig_vb, v_vb = self.model(state_vb)
if self.training:
_eps = torch.randn(p_vb.size())
action = (p_vb + sig_vb.sqrt() * Variable(_eps)).data.numpy() # TODO:?
else:
action = p_vb.data.numpy()
return action, p_vb, sig_vb, v_vb
else:
if self.master.enable_lstm:
if self.lstm_layer_count == 1:
p_vb, v_vb, self.lstm_hidden_vb = self.model(state_vb, self.lstm_hidden_vb)
elif self.lstm_layer_count == 2:
p_vb, v_vb, self.lstm_hidden_vb, self.lstm_hidden_vb2 = self.model(state_vb, self.lstm_hidden_vb,
self.lstm_hidden_vb2)
else:
p_vb, v_vb = self.model(state_vb)
if self.training:
action = p_vb.multinomial().data.squeeze().numpy()
else:
action = p_vb.max(1)[1].data.squeeze().numpy()
return action, p_vb, v_vb
def _normal(self, x, mu, sigma_sq):
a = (-1 * (x - mu).pow(2) / (2 * sigma_sq)).exp()
b = 1 / (2 * sigma_sq * self.pi_vb.expand_as(sigma_sq)).sqrt()
return (a * b).log()
# noinspection PyPep8Naming
class A3CLearner(A3CSingleProcess):
def __init__(self, master, process_id=0):
master.logger.warning(
"<===================================> A3C-Learner #" + str(process_id) + " {Env & Model}")
super(A3CLearner, self).__init__(master, process_id)
self._reset_rollout()
self.training = True # choose actions by polynomial
self.model.train(self.training)
if self.master.icm:
self.icm_inv_model.train(self.training)
self.icm_fwd_model.train(self.training)
# local counters
self.frame_step = 0 # local frame step counter
self.train_step = 0 # local train step counter
# local training stats
self.p_loss_avg = 0. # global policy loss
self.v_loss_avg = 0. # global value loss
self.loss_avg = 0. # global value loss
self.loss_counter = 0 # storing this many losses
self.icm_inv_loss_avg = 0.
self.icm_fwd_loss_avg = 0.
self.icm_inv_accuracy_avg = 0.
self.grad_magnitude_avg = 0.
self.grad_magnitude_max = 0.
self._reset_training_loggings()
# copy local training stats to global every prog_freq
self.last_prog = time.time()
def _reset_training_loggings(self):
self.p_loss_avg = 0.
self.v_loss_avg = 0.
self.loss_avg = 0.
self.loss_counter = 0
self.icm_inv_loss_avg = 0.
self.icm_fwd_loss_avg = 0.
self.icm_inv_accuracy_avg = 0.
self.grad_magnitude_avg = 0.
self.grad_magnitude_max = 0.
def _reset_rollout(self): # for storing the experiences collected through one rollout
self.rollout = A3C_Experience(state0=[],
action=[],
reward=[],
state1=[],
terminal1=[],
policy_vb=[],
sigmoid_vb=[],
value0_vb=[])
def _get_valueT_vb(self):
if self.rollout.terminal1[-1]: # for terminal sT
valueT_vb = Variable(torch.zeros(self.master.num_robots, 1))
else: # for non-terminal sT
sT_vb = self._preprocessState(self.rollout.state1[-1], True) # bootstrap from last state
if self.master.enable_continuous:
if self.master.enable_lstm:
if self.lstm_layer_count == 1:
_, _, valueT_vb, _ = self.model(sT_vb, self.lstm_hidden_vb) # NOTE: only doing inference here
elif self.lstm_layer_count == 2:
_, _, valueT_vb, _, _ = self.model(sT_vb, self.lstm_hidden_vb,
self.lstm_hidden_vb2) # NOTE: only doing inference here
else:
_, _, valueT_vb = self.model(sT_vb) # NOTE: only doing inference here
else:
if self.master.enable_lstm:
if self.lstm_layer_count == 1:
_, valueT_vb, _ = self.model(sT_vb, self.lstm_hidden_vb) # NOTE: only doing inference here
elif self.lstm_layer_count == 2:
_, valueT_vb, _, _ = self.model(sT_vb, self.lstm_hidden_vb,
self.lstm_hidden_vb2) # NOTE: only doing inference here
else:
_, valueT_vb = self.model(sT_vb) # NOTE: only doing inference here
# NOTE: here valueT_vb.volatile=True since sT_vb.volatile=True
# NOTE: if we use detach() here, it would remain volatile
# NOTE: then all the follow-up computations would only give volatile loss variables
valueT_vb = Variable(valueT_vb.data)
return valueT_vb
def _backward(self):
rollout_steps = len(self.rollout.reward)
# ICM first if enabled
if self.master.icm:
# if rollout_steps > 1:
# pass
# TODO: also use target data in the state?
state_start = np.array(self.rollout.state0).reshape(-1, self.master.state_shape + 3)[:,
:self.master.state_shape]
state_next = np.array(self.rollout.state1).reshape(-1, self.master.state_shape + 3)[:,
:self.master.state_shape]
state_start = Variable(torch.from_numpy(state_start).type(self.master.dtype))
state_next = Variable(torch.from_numpy(state_next).type(self.master.dtype))
actions = np.array(self.rollout.action).reshape(-1)
actions = Variable(torch.from_numpy(actions).long(), requires_grad=False)
features, features_next, action_logits, action_probs = \
self.icm_inv_model.forward((state_start, state_next))
features_next = features_next.detach()
icm_inv_loss = self.icm_inv_loss_criterion(action_logits, actions)
icm_inv_loss_mean = icm_inv_loss.mean()
icm_inv_loss_mean.backward()
# TODO: right to create new Variable here?
# otherwise RuntimeError: Trying to backward through the graph a second time
features_next_pred = self.icm_fwd_model.forward((Variable(features.data), actions))
icm_fwd_loss = self.icm_fwd_loss_criterion(features_next_pred, features_next).mean(dim=1)
icm_fwd_loss_mean = icm_fwd_loss.mean()
# TODO: does this backpropagate through the inverse model too?
icm_fwd_loss_mean.backward()
self.icm_inv_loss_avg += icm_inv_loss_mean.data.numpy()
self.icm_fwd_loss_avg += icm_fwd_loss_mean.data.numpy()
self.icm_inv_accuracy_avg += actions.eq(action_probs.max(1)[1]).sum().data.numpy()[0] / float(
actions.size()[0])
icm_inv_loss_detached = Variable(icm_inv_loss.data)
icm_fwd_loss_detached = Variable(icm_fwd_loss.data)
# preparation
policy_vb = self.rollout.policy_vb
if self.master.enable_continuous:
action_batch_vb = Variable(torch.from_numpy(np.array(self.rollout.action)))
if self.master.use_cuda:
action_batch_vb = action_batch_vb.cuda()
sigma_vb = self.rollout.sigmoid_vb
else:
action_batch_vb = Variable(torch.from_numpy(np.array(self.rollout.action)).long())
if self.master.use_cuda:
action_batch_vb = action_batch_vb.cuda()
policy_log_vb = [torch.log(policy_vb[i] + self.epsilon) for i in range(rollout_steps)]
entropy_vb = [- (policy_log_vb[i] * policy_vb[i]).sum(1) for i in range(rollout_steps)]
if hasattr(self.master, "num_robots"):
policy_log_vb = [
policy_log_vb[i].gather(1, action_batch_vb[i].unsqueeze(0).view(self.master.num_robots, -1)) for i
in range(rollout_steps)]
else:
policy_log_vb = [policy_log_vb[i].gather(1, action_batch_vb[i].unsqueeze(0)) for i in
range(rollout_steps)]
valueT_vb = self._get_valueT_vb()
self.rollout.value0_vb.append(
Variable(valueT_vb.data)) # NOTE: only this last entry is Volatile, all others are still in the graph
gae_ts = torch.zeros(self.master.num_robots, 1)
# compute loss
policy_loss_vb = 0.
value_loss_vb = 0.
for i in reversed(range(rollout_steps)):
reward_vb = Variable(torch.from_numpy(self.rollout.reward[i])).float().view(-1, 1)
# TODO: for comparison; turn back on later!
# if self.master.icm:
# reward_vb += 0.20 * (icm_inv_loss_detached[i] + icm_fwd_loss_detached[i])
valueT_vb = self.master.gamma * valueT_vb + reward_vb
advantage_vb = valueT_vb - self.rollout.value0_vb[i]
value_loss_vb = value_loss_vb + 0.5 * advantage_vb.pow(2)
# Generalized Advantage Estimation
tderr_ts = reward_vb.data + self.master.gamma * self.rollout.value0_vb[i + 1].data - \
self.rollout.value0_vb[i].data
gae_ts = self.master.gamma * gae_ts * self.master.tau + tderr_ts
if self.master.enable_continuous:
_log_prob = self._normal(action_batch_vb[i], policy_vb[i], sigma_vb[i])
_entropy = 0.5 * ((sigma_vb[i] * 2 * self.pi_vb.expand_as(sigma_vb[i])).log() + 1)
policy_loss_vb -= (_log_prob * Variable(gae_ts).expand_as(
_log_prob)).sum() + self.master.beta * _entropy.sum()
else:
policy_loss_vb -= policy_log_vb[i] * Variable(gae_ts) + (self.master.beta * entropy_vb[i]).view(
self.master.num_robots, -1)
loss_vb = policy_loss_vb + 0.5 * value_loss_vb
loss_vb = loss_vb.mean(dim=0)
loss_vb.backward()
torch.nn.utils.clip_grad_norm(self.model.parameters(), self.master.clip_grad)
# targets random for each episode, each robot has its target # DONE
# random map for each episode # DONE
# update a3c code for rewards for each robot # DONE
self._ensure_global_grads()
self.master.optimizer.step()
if self.master.icm:
self.master.icm_inv_optimizer.step()
self.master.icm_fwd_optimizer.step()
self.train_step += 1
self.master.train_step.value += 1
# adjust learning rate if enabled
if self.master.lr_decay:
self.master.lr_adjusted.value = max(
self.master.lr * (self.master.steps - self.master.train_step.value) / self.master.steps, 1e-32)
adjust_learning_rate(self.master.optimizer, self.master.lr_adjusted.value)
# log training stats
self.p_loss_avg += policy_loss_vb.data.numpy()
self.v_loss_avg += value_loss_vb.data.numpy()
self.loss_avg += loss_vb.data.numpy()
self.grad_magnitude_avg += np.mean([np.abs(p.grad.data.norm()) for p in self.model.parameters()])
self.grad_magnitude_max = np.max(
[np.abs(p.grad.data.norm()) for p in self.model.parameters()] + [self.grad_magnitude_max]
)
self.loss_counter += 1
def _rollout(self, episode_steps, episode_reward):
# reset rollout experiences
self._reset_rollout()
t_start = self.frame_step
# continue to rollout only if:
# 1. not running out of max steps of this current rollout, and
# 2. not terminal, and
# 3. not exceeding max steps of this current episode
# 4. master not exceeding max train steps
while (self.frame_step - t_start) < self.master.rollout_steps \
and not self.experience.terminal1 \
and (self.master.early_stop is None or episode_steps < self.master.early_stop):
# NOTE: here first store the last frame: experience.state1 as rollout.state0
self.rollout.state0.append(self.experience.state1)
# then get the action to take from rollout.state0 (experience.state1)
if self.master.enable_continuous:
action, p_vb, sig_vb, v_vb = self._forward(self._preprocessState(self.experience.state1))
self.rollout.sigmoid_vb.append(sig_vb)
else:
action, p_vb, v_vb = self._forward(self._preprocessState(self.experience.state1))
# then execute action in env to get a new experience.state1 -> rollout.state1
self.experience = self.env.step(action)
# push experience into rollout
self.rollout.action.append(action)
self.rollout.reward.append(self.experience.reward)
self.rollout.state1.append(self.experience.state1)
self.rollout.terminal1.append(self.experience.terminal1)
self.rollout.policy_vb.append(p_vb)
self.rollout.value0_vb.append(v_vb)
episode_steps += 1
episode_reward += self.experience.reward
self.frame_step += 1
self.master.frame_step.value += 1
if self.master.frame_step.value % (1000 * self.master.rollout_steps) == 0:
print("train step: {0}, frame step {1}, time: {2}".format(self.master.train_step.value,
self.master.frame_step.value, time.time()))
# NOTE: we put this condition in the end to make sure this current rollout won't be empty
if self.master.train_step.value >= self.master.steps:
break
return episode_steps, episode_reward
def run(self):
# make sure processes are not completely synced by sleeping a bit
time.sleep(int(np.random.rand() * (self.process_id + 5)))
nepisodes = 0
nepisodes_solved = 0
episode_steps = None
episode_reward = None
should_start_new = True
while self.master.train_step.value < self.master.steps:
# sync in every step
self._sync_local_with_global()
self.model.zero_grad()
if self.master.icm:
self.icm_inv_model.zero_grad()
self.icm_fwd_model.zero_grad()
# start of a new episode
if should_start_new:
episode_steps = 0
episode_reward = np.zeros(self.master.num_robots)
# reset lstm_hidden_vb for new episode
if self.master.enable_lstm:
# NOTE: clear hidden state at the beginning of each episode
self._reset_lstm_hidden_vb_episode()
# Obtain the initial observation by resetting the environment
self._reset_experience()
self.experience = self.env.reset()
assert self.experience.state1 is not None
# reset flag
should_start_new = False
if self.master.enable_lstm:
# NOTE: detach the previous hidden variable from the graph at the beginning of each rollout
self._reset_lstm_hidden_vb_rollout()
# Run a rollout for rollout_steps or until terminal
episode_steps, episode_reward = self._rollout(episode_steps, episode_reward)
if self.experience.terminal1 or \
self.master.early_stop and episode_steps >= self.master.early_stop:
nepisodes += 1
should_start_new = True
if self.experience.terminal1:
nepisodes_solved += 1
self.master.terminations_count.value += 1
# calculate loss
self._backward()
# copy local training stats to global at prog_freq, and clear up local stats
if time.time() - self.last_prog >= self.master.prog_freq:
self.master.p_loss_avg.value += self.p_loss_avg.mean()
self.master.v_loss_avg.value += self.v_loss_avg.mean()
self.master.loss_avg.value += self.loss_avg.mean()
self.master.loss_counter.value += self.loss_counter
self.master.grad_magnitude_avg.value += self.grad_magnitude_avg
val = self.master.grad_magnitude_max.value
self.master.grad_magnitude_max.value = max(self.grad_magnitude_max, val)
self.master.icm_inv_loss_avg.value += self.icm_inv_loss_avg
self.master.icm_fwd_loss_avg.value += self.icm_fwd_loss_avg
self.master.icm_inv_accuracy_avg.value += self.icm_inv_accuracy_avg
self._reset_training_loggings()
self.last_prog = time.time()
class A3CEvaluator(A3CSingleProcess):
def __init__(self, master, process_id=0):
master.logger.warning("<===================================> A3C-Evaluator {Env & Model}")
super(A3CEvaluator, self).__init__(master, process_id)
self.training = False # choose actions w/ max probability
self.model.train(self.training)
if self.master.icm:
self.icm_inv_model.train(self.training)
self.icm_fwd_model.train(self.training)
self._reset_loggings()
self.start_time = time.time()
self.last_eval = time.time()
def _reset_loggings(self):
# training stats across all processes
self.p_loss_avg_log = []
self.v_loss_avg_log = []
self.loss_avg_log = []
self.icm_inv_loss_avg_log = []
self.icm_fwd_loss_avg_log = []
self.icm_inv_accuracy_avg_log = []
self.grad_magnitude_avg_log = []
self.grad_magnitude_max_log = []
# evaluation stats
self.entropy_avg_log = []
self.v_avg_log = []
self.steps_avg_log = []
self.steps_std_log = []
self.reward_avg_log = []
self.reward_std_log = []
self.nepisodes_log = []
self.nepisodes_solved_log = []
self.repisodes_solved_log = []
self.terminals_reached_log = []
self.action_counts = np.zeros(self.master.action_dim)
# placeholders for windows for online curve plotting
if self.master.visualize:
# training stats across all processes
self.win_p_loss_avg = "win_p_loss_avg"
self.win_v_loss_avg = "win_v_loss_avg"
self.win_loss_avg = "win_loss_avg"
self.win_icm_inv_loss_avg = "win_icm_inv_loss_avg"
self.win_icm_fwd_loss_avg = "win_icm_fwd_loss_avg"
self.win_icm_inv_accuracy_avg = "win_icm_inv_accuracy_avg"
self.win_grad_magnitude_avg = "win_grad_magnitude_avg"
self.win_grad_magnitude_max = "win_grad_magnitude_max"
# evaluation stats
self.win_entropy_avg = "win_entropy_avg"
self.win_v_avg = "win_v_avg"
self.win_steps_avg = "win_steps_avg"
self.win_steps_std = "win_steps_std"
self.win_reward_avg = "win_reward_avg"
self.win_reward_std = "win_reward_std"
self.win_nepisodes = "win_nepisodes"
self.win_nepisodes_solved = "win_nepisodes_solved"
self.win_repisodes_solved = "win_repisodes_solved"
self.win_terminals_reached = "win_terminals_reached"
self.win_action_counts = "action_counts"
def _eval_model(self):
self.last_eval = time.time()
eval_at_train_step = self.master.train_step.value
eval_at_frame_step = self.master.frame_step.value
# first grab the latest global model to do the evaluation
self._sync_local_with_global()
# evaluate
eval_step = 0
eval_entropy_log = []
eval_v_log = []
eval_nepisodes = 0
eval_nepisodes_solved = 0
eval_episode_steps = None
eval_episode_steps_log = []
eval_episode_reward = None
eval_episode_reward_log = []
eval_should_start_new = True
while eval_step < self.master.eval_steps:
if eval_should_start_new: # start of a new episode
eval_episode_steps = 0
eval_episode_reward = 0.
# reset lstm_hidden_vb for new episode
if self.master.enable_lstm:
# NOTE: clear hidden state at the beginning of each episode
self._reset_lstm_hidden_vb_episode(self.training)
# Obtain the initial observation by resetting the environment
self._reset_experience()
self.experience = self.env.reset()
assert self.experience.state1 is not None
if not self.training:
if self.master.visualize: self.env.visual()
if self.master.render: self.env.render()
# reset flag
eval_should_start_new = False
if self.master.enable_lstm:
# NOTE: detach the previous hidden variable from the graph at the beginning of each step
# NOTE: not necessary here in evaluation but we do it anyways
self._reset_lstm_hidden_vb_rollout()
# Run a single step
if self.master.enable_continuous:
eval_action, p_vb, sig_vb, v_vb = self._forward(self._preprocessState(self.experience.state1, True))
else:
eval_action, p_vb, v_vb = self._forward(self._preprocessState(self.experience.state1, True))
self.experience = self.env.step(eval_action)
self.action_counts[eval_action] += 1
if not self.training:
if self.master.visualize: self.env.visual()
if self.master.render: self.env.render()
if self.experience.terminal1 or \
self.master.early_stop and (eval_episode_steps + 1) == self.master.early_stop or \
(eval_step + 1) == self.master.eval_steps:
eval_should_start_new = True
eval_episode_steps += 1
eval_episode_reward += self.experience.reward
eval_step += 1
if eval_should_start_new:
eval_nepisodes += 1
if self.experience.terminal1:
eval_nepisodes_solved += 1
# This episode is finished, report and reset
# NOTE make no sense for continuous
if self.master.enable_continuous:
eval_entropy_log.append(
[0.5 * ((sig_vb * 2 * self.pi_vb.expand_as(sig_vb)).log() + 1).data.numpy()])
else:
eval_entropy_log.append(
[np.mean((-torch.log(p_vb.data.squeeze() + self.epsilon) * p_vb.data.squeeze()).numpy())])
eval_v_log.append([v_vb.data.numpy()])
eval_episode_steps_log.append([eval_episode_steps])
eval_episode_reward_log.append([eval_episode_reward])
self._reset_experience()
eval_episode_steps = None
eval_episode_reward = None
# Logging for this evaluation phase
loss_counter = self.master.loss_counter.value
p_loss_avg = self.master.p_loss_avg.value / loss_counter if loss_counter > 0 else 0.
v_loss_avg = self.master.v_loss_avg.value / loss_counter if loss_counter > 0 else 0.
loss_avg = self.master.loss_avg.value / loss_counter if loss_counter > 0 else 0.
icm_inv_loss_avg = self.master.icm_inv_loss_avg.value / loss_counter if loss_counter > 0 else 0.
icm_fwd_loss_avg = self.master.icm_fwd_loss_avg.value / loss_counter if loss_counter > 0 else 0.
icm_inv_accuracy_avg = self.master.icm_inv_accuracy_avg.value / loss_counter if loss_counter > 0 else 0.
grad_magnitude_avg = self.master.grad_magnitude_avg.value / loss_counter if loss_counter > 0 else 0.
grad_magnitude_max = self.master.grad_magnitude_max.value
self.master._reset_training_logs()
def _log_at_step(eval_at_step):
self.p_loss_avg_log.append([eval_at_step, p_loss_avg])
self.v_loss_avg_log.append([eval_at_step, v_loss_avg])
self.loss_avg_log.append([eval_at_step, loss_avg])
self.icm_inv_loss_avg_log.append([eval_at_step, icm_inv_loss_avg])
self.icm_fwd_loss_avg_log.append([eval_at_step, icm_fwd_loss_avg])
self.icm_inv_accuracy_avg_log.append([eval_at_step, icm_inv_accuracy_avg])
self.grad_magnitude_avg_log.append([eval_at_step, grad_magnitude_avg])
self.grad_magnitude_max_log.append([eval_at_step, grad_magnitude_max])
self.entropy_avg_log.append([eval_at_step, np.mean(np.asarray(eval_entropy_log))])
self.v_avg_log.append([eval_at_step, np.mean(np.asarray(eval_v_log))])
self.steps_avg_log.append([eval_at_step, np.mean(np.asarray(eval_episode_steps_log))])
self.steps_std_log.append([eval_at_step, np.std(np.asarray(eval_episode_steps_log))])
self.reward_avg_log.append([eval_at_step, np.mean(np.asarray(eval_episode_reward_log))])
self.reward_std_log.append([eval_at_step, np.std(np.asarray(eval_episode_reward_log))])
self.nepisodes_log.append([eval_at_step, eval_nepisodes])
self.nepisodes_solved_log.append([eval_at_step, eval_nepisodes_solved])
self.repisodes_solved_log.append(
[eval_at_step, (eval_nepisodes_solved / eval_nepisodes) if eval_nepisodes > 0 else 0.])
self.terminals_reached_log.append([self.master.train_step.value, self.master.terminations_count.value])
# logging
self.master.logger.warning("Reporting @ Step: " + str(eval_at_step) + " | Elapsed Time: " + str(
time.time() - self.start_time))
self.master.logger.warning("Iteration: {}; lr: {}".format(eval_at_step, self.master.lr_adjusted.value))
self.master.logger.warning("Iteration: {}; p_loss_avg: {}".format(eval_at_step, self.p_loss_avg_log[-1][1]))
self.master.logger.warning("Iteration: {}; v_loss_avg: {}".format(eval_at_step, self.v_loss_avg_log[-1][1]))
self.master.logger.warning("Iteration: {}; loss_avg: {}".format(eval_at_step, self.loss_avg_log[-1][1]))
self.master.logger.warning(
"Iteration: {}; icm_inv_loss_avg: {}".format(eval_at_step, self.icm_inv_loss_avg_log[-1][1]))
self.master.logger.warning(
"Iteration: {}; icm_fwd_loss_avg: {}".format(eval_at_step, self.icm_fwd_loss_avg_log[-1][1]))
self.master.logger.warning(
"Iteration: {}; icm_inv_accuracy_avg: {}".format(eval_at_step, self.icm_inv_accuracy_avg_log[-1][1]))
self.master.logger.warning(
"Iteration: {}; grad_magnitude_avg: {}".format(eval_at_step, self.grad_magnitude_avg_log[-1][1]))
self.master.logger.warning(
"Iteration: {}; grad_magnitude_max: {}".format(eval_at_step, self.grad_magnitude_max_log[-1][1]))
self.master._reset_training_logs()
self.master.logger.warning(
"Evaluating @ Step: " + str(eval_at_train_step) + " | (" + str(eval_at_frame_step) + " frames)...")
self.master.logger.warning("Evaluation Took: " + str(time.time() - self.last_eval))
self.master.logger.warning(
"Iteration: {}; entropy_avg: {}".format(eval_at_step, self.entropy_avg_log[-1][1]))
self.master.logger.warning("Iteration: {}; v_avg: {}".format(eval_at_step, self.v_avg_log[-1][1]))
self.master.logger.warning("Iteration: {}; steps_avg: {}".format(eval_at_step, self.steps_avg_log[-1][1]))
self.master.logger.warning("Iteration: {}; steps_std: {}".format(eval_at_step, self.steps_std_log[-1][1]))
self.master.logger.warning("Iteration: {}; reward_avg: {}".format(eval_at_step, self.reward_avg_log[-1][1]))
self.master.logger.warning("Iteration: {}; reward_std: {}".format(eval_at_step, self.reward_std_log[-1][1]))
self.master.logger.warning("Iteration: {}; nepisodes: {}".format(eval_at_step, self.nepisodes_log[-1][1]))
self.master.logger.warning(
"Iteration: {}; nepisodes_solved: {}".format(eval_at_step, self.nepisodes_solved_log[-1][1]))
self.master.logger.warning(
"Iteration: {}; repisodes_solved: {}".format(eval_at_step, self.repisodes_solved_log[-1][1]))
self.master.logger.warning(
"Iteration: {}; terminals_reached: {}".format(eval_at_step, self.terminals_reached_log[-1][1]))
self.master.logger.warning(
"Iteration: {}; action_counts: {}".format(eval_at_step, self.action_counts))
if self.master.enable_log_at_train_step:
_log_at_step(eval_at_train_step)
else:
_log_at_step(eval_at_frame_step)
# plotting
if self.master.visualize:
self.win_p_loss_avg = self.master.vis.scatter(X=np.array(self.p_loss_avg_log), env=self.master.refs,
win=self.win_p_loss_avg, opts=dict(title="p_loss_avg"))
self.win_v_loss_avg = self.master.vis.scatter(X=np.array(self.v_loss_avg_log), env=self.master.refs,
win=self.win_v_loss_avg, opts=dict(title="v_loss_avg"))
self.win_loss_avg = self.master.vis.scatter(X=np.array(self.loss_avg_log), env=self.master.refs,
win=self.win_loss_avg, opts=dict(title="loss_avg"))
self.win_icm_inv_loss_avg = self.master.vis.scatter(X=np.array(self.icm_inv_loss_avg_log),
env=self.master.refs,
win=self.win_icm_inv_loss_avg,
opts=dict(title="icm_inv_loss_avg"))
self.win_icm_fwd_loss_avg = self.master.vis.scatter(X=np.array(self.icm_fwd_loss_avg_log),
env=self.master.refs,
win=self.win_icm_fwd_loss_avg,
opts=dict(title="icm_fwd_loss_avg"))
self.win_icm_inv_accuracy_avg = self.master.vis.scatter(X=np.array(self.icm_inv_accuracy_avg_log),
env=self.master.refs,
win=self.win_icm_inv_accuracy_avg,
opts=dict(title="icm_inv_accuracy_avg"))
self.win_grad_magnitude_avg = self.master.vis.scatter(X=np.array(self.grad_magnitude_avg_log),
env=self.master.refs,
win=self.win_grad_magnitude_avg,
opts=dict(title="grad_magnitude_avg"))
# TODO: add avg to name
self.win_grad_magnitude_max = self.master.vis.scatter(X=np.array(self.grad_magnitude_max_log),
env=self.master.refs,
win=self.win_grad_magnitude_max,
opts=dict(title="grad_magnitude_max_avg"))
self.win_entropy_avg = self.master.vis.scatter(X=np.array(self.entropy_avg_log), env=self.master.refs,
win=self.win_entropy_avg, opts=dict(title="entropy_avg"))
self.win_v_avg = self.master.vis.scatter(X=np.array(self.v_avg_log), env=self.master.refs,
win=self.win_v_avg, opts=dict(title="v_avg"))
self.win_steps_avg = self.master.vis.scatter(X=np.array(self.steps_avg_log), env=self.master.refs,
win=self.win_steps_avg, opts=dict(title="steps_avg"))
# self.win_steps_std = self.master.vis.scatter(X=np.array(self.steps_std_log), env=self.master.refs, win=self.win_steps_std, opts=dict(title="steps_std"))
self.win_reward_avg = self.master.vis.scatter(X=np.array(self.reward_avg_log), env=self.master.refs,
win=self.win_reward_avg, opts=dict(title="reward_avg"))
# self.win_reward_std = self.master.vis.scatter(X=np.array(self.reward_std_log), env=self.master.refs, win=self.win_reward_std, opts=dict(title="reward_std"))
self.win_nepisodes = self.master.vis.scatter(X=np.array(self.nepisodes_log), env=self.master.refs,
win=self.win_nepisodes, opts=dict(title="nepisodes"))
self.win_nepisodes_solved = self.master.vis.scatter(X=np.array(self.nepisodes_solved_log),
env=self.master.refs, win=self.win_nepisodes_solved,
opts=dict(title="nepisodes_solved"))
self.win_repisodes_solved = self.master.vis.scatter(X=np.array(self.repisodes_solved_log),
env=self.master.refs, win=self.win_repisodes_solved,
opts=dict(title="repisodes_solved"))
self.win_terminals_reached = self.master.vis.scatter(X=np.array(self.terminals_reached_log),
env=self.master.refs, win=self.win_terminals_reached,
opts=dict(title="terminals_reached"))
self.win_action_counts = self.master.vis.bar(X=self.action_counts, env=self.master.refs,
win=self.win_action_counts,
opts=dict(title="action_counts"))
self.last_eval = time.time()
# save model
self.master._save_model(eval_at_train_step, self.reward_avg_log[-1][1])
if self.master.icm:
self.master._save_icm_models(eval_at_train_step,
self.icm_inv_loss_avg_log[-1][1], self.icm_fwd_loss_avg_log[-1][1])
def run(self):
while self.master.train_step.value < self.master.steps:
if time.time() - self.last_eval > self.master.eval_freq:
self._eval_model()
# we also do a final evaluation after training is done
self._eval_model()
class A3CTester(A3CSingleProcess):
def __init__(self, master, process_id=0):
master.logger.warning("<===================================> A3C-Tester {Env & Model}")
super(A3CTester, self).__init__(master, process_id)
self.training = False # choose actions w/ max probability
# self.training = True # choose actions by polynomial (?)
self.model.train(self.training)
if self.master.icm:
self.icm_inv_model.train(self.training)
self.icm_fwd_model.train(self.training)
self._reset_loggings()
self.start_time = time.time()
# TODO: add terminations count to the log here too?
# TODO: add ICM logs here?
def _reset_loggings(self):
# testing stats
self.steps_avg_log = []
self.steps_std_log = []
self.reward_avg_log = []
self.reward_std_log = []
self.nepisodes_log = []
self.nepisodes_solved_log = []
self.repisodes_solved_log = []
# placeholders for windows for online curve plotting
if self.master.visualize:
# evaluation stats
self.win_steps_avg = "win_steps_avg"
self.win_steps_std = "win_steps_std"
self.win_reward_avg = "win_reward_avg"
self.win_reward_std = "win_reward_std"
self.win_nepisodes = "win_nepisodes"
self.win_nepisodes_solved = "win_nepisodes_solved"
self.win_repisodes_solved = "win_repisodes_solved"
def run(self):
test_step = 0
test_nepisodes = 0
test_nepisodes_solved = 0
test_episode_steps = None
test_episode_steps_log = []
test_episode_reward = None
test_episode_reward_log = []
test_should_start_new = True
while test_nepisodes < self.master.test_nepisodes:
if test_should_start_new: # start of a new episode
test_episode_steps = 0
test_episode_reward = 0.
# reset lstm_hidden_vb for new episode
if self.master.enable_lstm:
# NOTE: clear hidden state at the beginning of each episode
self._reset_lstm_hidden_vb_episode(self.training)
# Obtain the initial observation by resetting the environment
self._reset_experience()
self.experience = self.env.reset()
assert self.experience.state1 is not None
if not self.training:
if self.master.visualize: self.env.visual()
if self.master.render: self.env.render()
# reset flag
test_should_start_new = False
if self.master.enable_lstm:
# NOTE: detach the previous hidden variable from the graph at the beginning of each step
# NOTE: not necessary here in testing but we do it anyways
self._reset_lstm_hidden_vb_rollout()
# Run a single step
if self.master.enable_continuous:
test_action, p_vb, sig_vb, v_vb = self._forward(self._preprocessState(self.experience.state1, True))
else:
test_action, p_vb, v_vb = self._forward(self._preprocessState(self.experience.state1, True))
self.experience = self.env.step(test_action)
if not self.training:
if self.master.visualize: self.env.visual()
if self.master.render: self.env.render()
if self.experience.terminal1 or \
self.master.early_stop and (test_episode_steps + 1) == self.master.early_stop:
test_should_start_new = True
test_episode_steps += 1
test_episode_reward += self.experience.reward
test_step += 1
if test_should_start_new:
test_nepisodes += 1
if self.experience.terminal1:
test_nepisodes_solved += 1
# This episode is finished, report and reset
test_episode_steps_log.append([test_episode_steps])
test_episode_reward_log.append([test_episode_reward])
self._reset_experience()
test_episode_steps = None
test_episode_reward = None
self.steps_avg_log.append([test_nepisodes, np.mean(np.asarray(test_episode_steps_log))])
self.steps_std_log.append([test_nepisodes, np.std(np.asarray(test_episode_steps_log))])
del test_episode_steps_log
self.reward_avg_log.append([test_nepisodes, np.mean(np.asarray(test_episode_reward_log))])
self.reward_std_log.append([test_nepisodes, np.std(np.asarray(test_episode_reward_log))])
del test_episode_reward_log
self.nepisodes_log.append([test_nepisodes, test_nepisodes])
self.nepisodes_solved_log.append([test_nepisodes, test_nepisodes_solved])
self.repisodes_solved_log.append(
[test_nepisodes, (test_nepisodes_solved / test_nepisodes) if test_nepisodes > 0 else 0.])
# plotting
if self.master.visualize:
self.win_steps_avg = self.master.vis.scatter(X=np.array(self.steps_avg_log), env=self.master.refs,
win=self.win_steps_avg, opts=dict(title="steps_avg"))
# self.win_steps_std = self.master.vis.scatter(X=np.array(self.steps_std_log), env=self.master.refs, win=self.win_steps_std, opts=dict(title="steps_std"))
self.win_reward_avg = self.master.vis.scatter(X=np.array(self.reward_avg_log), env=self.master.refs,
win=self.win_reward_avg, opts=dict(title="reward_avg"))
# self.win_reward_std = self.master.vis.scatter(X=np.array(self.reward_std_log), env=self.master.refs, win=self.win_reward_std, opts=dict(title="reward_std"))
self.win_nepisodes = self.master.vis.scatter(X=np.array(self.nepisodes_log), env=self.master.refs,
win=self.win_nepisodes, opts=dict(title="nepisodes"))
self.win_nepisodes_solved = self.master.vis.scatter(X=np.array(self.nepisodes_solved_log),
env=self.master.refs, win=self.win_nepisodes_solved,
opts=dict(title="nepisodes_solved"))
self.win_repisodes_solved = self.master.vis.scatter(X=np.array(self.repisodes_solved_log),
env=self.master.refs, win=self.win_repisodes_solved,
opts=dict(title="repisodes_solved"))
# logging
self.master.logger.warning("Testing Took: " + str(time.time() - self.start_time))
self.master.logger.warning("Testing: steps_avg: {}".format(self.steps_avg_log[-1][1]))
self.master.logger.warning("Testing: steps_std: {}".format(self.steps_std_log[-1][1]))
self.master.logger.warning("Testing: reward_avg: {}".format(self.reward_avg_log[-1][1]))
self.master.logger.warning("Testing: reward_std: {}".format(self.reward_std_log[-1][1]))
self.master.logger.warning("Testing: nepisodes: {}".format(self.nepisodes_log[-1][1]))
self.master.logger.warning("Testing: nepisodes_solved: {}".format(self.nepisodes_solved_log[-1][1]))
self.master.logger.warning("Testing: repisodes_solved: {}".format(self.repisodes_solved_log[-1][1]))
icm logging fixes
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import time
import numpy as np
import torch
from torch.autograd import Variable
from core.agent_single_process import AgentSingleProcess
from utils.helpers import A3C_Experience
class A3CSingleProcess(AgentSingleProcess):
epsilon = 1e-13
def __init__(self, master, process_id=0):
super(A3CSingleProcess, self).__init__(master, process_id)
# lstm hidden states
if self.master.enable_lstm:
self.lstm_layer_count = self.model.lstm_layer_count if hasattr(self.model, "lstm_layer_count") else 1
self._reset_lstm_hidden_vb_episode() # clear up hidden state
self._reset_lstm_hidden_vb_rollout() # detach the previous variable from the computation graph
# NOTE global variable pi
if self.master.enable_continuous:
self.pi_vb = Variable(torch.Tensor([math.pi]).type(self.master.dtype))
self.master.logger.warning(
"Registered A3C-SingleProcess-Agent #" + str(self.process_id) + " w/ Env (seed:" + str(
self.env.seed) + ").")
# NOTE: to be called at the beginning of each new episode, clear up the hidden state
def _reset_lstm_hidden_vb_episode(self, training=True): # seq_len, batch_size, hidden_vb_dim
not_training = not training
if hasattr(self.master, "num_robots"):
r = self.master.num_robots
if self.master.enable_continuous: # TODO: what here?
self.lstm_hidden_vb = (
Variable(torch.zeros(2, self.model.hidden_vb_dim).type(self.master.dtype), volatile=not_training),
Variable(torch.zeros(2, self.model.hidden_vb_dim).type(self.master.dtype), volatile=not_training))
if self.lstm_layer_count == 2:
self.lstm_hidden_vb2 = (
Variable(torch.zeros(2, self.model.hidden_vb2_dim).type(self.master.dtype),
volatile=not_training),
Variable(torch.zeros(2, self.model.hidden_vb2_dim).type(self.master.dtype),
volatile=not_training))
else:
self.lstm_hidden_vb = (
Variable(torch.zeros(r, self.model.hidden_vb_dim).type(self.master.dtype), volatile=not_training),
Variable(torch.zeros(r, self.model.hidden_vb_dim).type(self.master.dtype), volatile=not_training))
if self.lstm_layer_count == 2:
self.lstm_hidden_vb2 = (
Variable(torch.zeros(r, self.model.hidden_vb2_dim).type(self.master.dtype),
volatile=not_training),
Variable(torch.zeros(r, self.model.hidden_vb2_dim).type(self.master.dtype),
volatile=not_training))
else:
if self.master.enable_continuous:
self.lstm_hidden_vb = (
Variable(torch.zeros(2, self.model.hidden_vb_dim).type(self.master.dtype), volatile=not_training),
Variable(torch.zeros(2, self.model.hidden_vb_dim).type(self.master.dtype), volatile=not_training))
if self.lstm_layer_count == 2:
self.lstm_hidden_vb2 = (
Variable(torch.zeros(2, self.model.hidden_vb2_dim).type(self.master.dtype),
volatile=not_training),
Variable(torch.zeros(2, self.model.hidden_vb2_dim).type(self.master.dtype),
volatile=not_training))
else:
self.lstm_hidden_vb = (
Variable(torch.zeros(1, self.model.hidden_vb_dim).type(self.master.dtype), volatile=not_training),
Variable(torch.zeros(1, self.model.hidden_vb_dim).type(self.master.dtype), volatile=not_training))
if self.lstm_layer_count == 2:
self.lstm_hidden_vb2 = (
Variable(torch.zeros(1, self.model.hidden_vb2_dim).type(self.master.dtype),
volatile=not_training),
Variable(torch.zeros(1, self.model.hidden_vb2_dim).type(self.master.dtype),
volatile=not_training))
# NOTE: to be called at the beginning of each rollout, detach the previous variable from the graph
def _reset_lstm_hidden_vb_rollout(self):
self.lstm_hidden_vb = (Variable(self.lstm_hidden_vb[0].data),
Variable(self.lstm_hidden_vb[1].data))
if self.lstm_layer_count == 2:
self.lstm_hidden_vb2 = (Variable(self.lstm_hidden_vb2[0].data),
Variable(self.lstm_hidden_vb2[1].data))
def _preprocessState(self, state, is_valotile=False):
if isinstance(state, list):
state_vb = []
for i in range(len(state)):
state_vb.append(
Variable(torch.from_numpy(state[i]).unsqueeze(0).type(self.master.dtype), volatile=is_valotile))
else:
state_vb = Variable(torch.from_numpy(state).unsqueeze(0).type(self.master.dtype), volatile=is_valotile)
return state_vb
def _forward(self, state_vb):
if self.master.enable_continuous: # NOTE continuous control p_vb here is the mu_vb of continuous action dist
if self.master.enable_lstm:
if self.lstm_layer_count == 1:
p_vb, sig_vb, v_vb, self.lstm_hidden_vb = self.model(state_vb, self.lstm_hidden_vb)
elif self.lstm_layer_count == 2:
p_vb, sig_vb, v_vb, self.lstm_hidden_vb, self.lstm_hidden_vb2 = self.model(state_vb,
self.lstm_hidden_vb,
self.lstm_hidden_vb2)
else:
p_vb, sig_vb, v_vb = self.model(state_vb)
if self.training:
_eps = torch.randn(p_vb.size())
action = (p_vb + sig_vb.sqrt() * Variable(_eps)).data.numpy() # TODO:?
else:
action = p_vb.data.numpy()
return action, p_vb, sig_vb, v_vb
else:
if self.master.enable_lstm:
if self.lstm_layer_count == 1:
p_vb, v_vb, self.lstm_hidden_vb = self.model(state_vb, self.lstm_hidden_vb)
elif self.lstm_layer_count == 2:
p_vb, v_vb, self.lstm_hidden_vb, self.lstm_hidden_vb2 = self.model(state_vb, self.lstm_hidden_vb,
self.lstm_hidden_vb2)
else:
p_vb, v_vb = self.model(state_vb)
if self.training:
action = p_vb.multinomial().data.squeeze().numpy()
else:
action = p_vb.max(1)[1].data.squeeze().numpy()
return action, p_vb, v_vb
def _normal(self, x, mu, sigma_sq):
a = (-1 * (x - mu).pow(2) / (2 * sigma_sq)).exp()
b = 1 / (2 * sigma_sq * self.pi_vb.expand_as(sigma_sq)).sqrt()
return (a * b).log()
# noinspection PyPep8Naming
class A3CLearner(A3CSingleProcess):
def __init__(self, master, process_id=0):
master.logger.warning(
"<===================================> A3C-Learner #" + str(process_id) + " {Env & Model}")
super(A3CLearner, self).__init__(master, process_id)
self._reset_rollout()
self.training = True # choose actions by polynomial
self.model.train(self.training)
if self.master.icm:
self.icm_inv_model.train(self.training)
self.icm_fwd_model.train(self.training)
# local counters
self.frame_step = 0 # local frame step counter
self.train_step = 0 # local train step counter
# local training stats
self.p_loss_avg = 0. # global policy loss
self.v_loss_avg = 0. # global value loss
self.loss_avg = 0. # global value loss
self.loss_counter = 0 # storing this many losses
self.icm_inv_loss_avg = 0.
self.icm_fwd_loss_avg = 0.
self.icm_inv_accuracy_avg = 0.
self.grad_magnitude_avg = 0.
self.grad_magnitude_max = 0.
self._reset_training_loggings()
# copy local training stats to global every prog_freq
self.last_prog = time.time()
def _reset_training_loggings(self):
self.p_loss_avg = 0.
self.v_loss_avg = 0.
self.loss_avg = 0.
self.loss_counter = 0
self.icm_inv_loss_avg = 0.
self.icm_fwd_loss_avg = 0.
self.icm_inv_accuracy_avg = 0.
self.grad_magnitude_avg = 0.
self.grad_magnitude_max = 0.
def _reset_rollout(self): # for storing the experiences collected through one rollout
self.rollout = A3C_Experience(state0=[],
action=[],
reward=[],
state1=[],
terminal1=[],
policy_vb=[],
sigmoid_vb=[],
value0_vb=[])
def _get_valueT_vb(self):
if self.rollout.terminal1[-1]: # for terminal sT
valueT_vb = Variable(torch.zeros(self.master.num_robots, 1))
else: # for non-terminal sT
sT_vb = self._preprocessState(self.rollout.state1[-1], True) # bootstrap from last state
if self.master.enable_continuous:
if self.master.enable_lstm:
if self.lstm_layer_count == 1:
_, _, valueT_vb, _ = self.model(sT_vb, self.lstm_hidden_vb) # NOTE: only doing inference here
elif self.lstm_layer_count == 2:
_, _, valueT_vb, _, _ = self.model(sT_vb, self.lstm_hidden_vb,
self.lstm_hidden_vb2) # NOTE: only doing inference here
else:
_, _, valueT_vb = self.model(sT_vb) # NOTE: only doing inference here
else:
if self.master.enable_lstm:
if self.lstm_layer_count == 1:
_, valueT_vb, _ = self.model(sT_vb, self.lstm_hidden_vb) # NOTE: only doing inference here
elif self.lstm_layer_count == 2:
_, valueT_vb, _, _ = self.model(sT_vb, self.lstm_hidden_vb,
self.lstm_hidden_vb2) # NOTE: only doing inference here
else:
_, valueT_vb = self.model(sT_vb) # NOTE: only doing inference here
# NOTE: here valueT_vb.volatile=True since sT_vb.volatile=True
# NOTE: if we use detach() here, it would remain volatile
# NOTE: then all the follow-up computations would only give volatile loss variables
valueT_vb = Variable(valueT_vb.data)
return valueT_vb
def _backward(self):
rollout_steps = len(self.rollout.reward)
# ICM first if enabled
if self.master.icm:
# if rollout_steps > 1:
# pass
# TODO: also use target data in the state?
state_start = np.array(self.rollout.state0).reshape(-1, self.master.state_shape + 3)[:,
:self.master.state_shape]
state_next = np.array(self.rollout.state1).reshape(-1, self.master.state_shape + 3)[:,
:self.master.state_shape]
state_start = Variable(torch.from_numpy(state_start).type(self.master.dtype))
state_next = Variable(torch.from_numpy(state_next).type(self.master.dtype))
actions = np.array(self.rollout.action).reshape(-1)
actions = Variable(torch.from_numpy(actions).long(), requires_grad=False)
features, features_next, action_logits, action_probs = \
self.icm_inv_model.forward((state_start, state_next))
features_next = features_next.detach()
icm_inv_loss = self.icm_inv_loss_criterion(action_logits, actions)
icm_inv_loss_mean = icm_inv_loss.mean()
icm_inv_loss_mean.backward()
# TODO: right to create new Variable here?
# otherwise RuntimeError: Trying to backward through the graph a second time
features_next_pred = self.icm_fwd_model.forward((Variable(features.data), actions))
icm_fwd_loss = self.icm_fwd_loss_criterion(features_next_pred, features_next).mean(dim=1)
icm_fwd_loss_mean = icm_fwd_loss.mean()
# TODO: does this backpropagate through the inverse model too?
icm_fwd_loss_mean.backward()
self.icm_inv_loss_avg += icm_inv_loss_mean.data.numpy()
self.icm_fwd_loss_avg += icm_fwd_loss_mean.data.numpy()
self.icm_inv_accuracy_avg += actions.eq(action_probs.max(1)[1]).sum().data.numpy()[0] / float(
actions.size()[0])
icm_inv_loss_detached = Variable(icm_inv_loss.data)
icm_fwd_loss_detached = Variable(icm_fwd_loss.data)
# preparation
policy_vb = self.rollout.policy_vb
if self.master.enable_continuous:
action_batch_vb = Variable(torch.from_numpy(np.array(self.rollout.action)))
if self.master.use_cuda:
action_batch_vb = action_batch_vb.cuda()
sigma_vb = self.rollout.sigmoid_vb
else:
action_batch_vb = Variable(torch.from_numpy(np.array(self.rollout.action)).long())
if self.master.use_cuda:
action_batch_vb = action_batch_vb.cuda()
policy_log_vb = [torch.log(policy_vb[i] + self.epsilon) for i in range(rollout_steps)]
entropy_vb = [- (policy_log_vb[i] * policy_vb[i]).sum(1) for i in range(rollout_steps)]
if hasattr(self.master, "num_robots"):
policy_log_vb = [
policy_log_vb[i].gather(1, action_batch_vb[i].unsqueeze(0).view(self.master.num_robots, -1)) for i
in range(rollout_steps)]
else:
policy_log_vb = [policy_log_vb[i].gather(1, action_batch_vb[i].unsqueeze(0)) for i in
range(rollout_steps)]
valueT_vb = self._get_valueT_vb()
self.rollout.value0_vb.append(
Variable(valueT_vb.data)) # NOTE: only this last entry is Volatile, all others are still in the graph
gae_ts = torch.zeros(self.master.num_robots, 1)
# compute loss
policy_loss_vb = 0.
value_loss_vb = 0.
for i in reversed(range(rollout_steps)):
reward_vb = Variable(torch.from_numpy(self.rollout.reward[i])).float().view(-1, 1)
# TODO: for comparison; turn back on later!
# if self.master.icm:
# reward_vb += 0.20 * (icm_inv_loss_detached[i] + icm_fwd_loss_detached[i])
valueT_vb = self.master.gamma * valueT_vb + reward_vb
advantage_vb = valueT_vb - self.rollout.value0_vb[i]
value_loss_vb = value_loss_vb + 0.5 * advantage_vb.pow(2)
# Generalized Advantage Estimation
tderr_ts = reward_vb.data + self.master.gamma * self.rollout.value0_vb[i + 1].data - \
self.rollout.value0_vb[i].data
gae_ts = self.master.gamma * gae_ts * self.master.tau + tderr_ts
if self.master.enable_continuous:
_log_prob = self._normal(action_batch_vb[i], policy_vb[i], sigma_vb[i])
_entropy = 0.5 * ((sigma_vb[i] * 2 * self.pi_vb.expand_as(sigma_vb[i])).log() + 1)
policy_loss_vb -= (_log_prob * Variable(gae_ts).expand_as(
_log_prob)).sum() + self.master.beta * _entropy.sum()
else:
policy_loss_vb -= policy_log_vb[i] * Variable(gae_ts) + (self.master.beta * entropy_vb[i]).view(
self.master.num_robots, -1)
loss_vb = policy_loss_vb + 0.5 * value_loss_vb
loss_vb = loss_vb.mean(dim=0)
loss_vb.backward()
torch.nn.utils.clip_grad_norm(self.model.parameters(), self.master.clip_grad)
# targets random for each episode, each robot has its target # DONE
# random map for each episode # DONE
# update a3c code for rewards for each robot # DONE
self._ensure_global_grads()
self.master.optimizer.step()
if self.master.icm:
self.master.icm_inv_optimizer.step()
self.master.icm_fwd_optimizer.step()
self.train_step += 1
self.master.train_step.value += 1
# adjust learning rate if enabled
if self.master.lr_decay:
self.master.lr_adjusted.value = max(
self.master.lr * (self.master.steps - self.master.train_step.value) / self.master.steps, 1e-32)
adjust_learning_rate(self.master.optimizer, self.master.lr_adjusted.value)
# log training stats
self.p_loss_avg += policy_loss_vb.data.numpy()
self.v_loss_avg += value_loss_vb.data.numpy()
self.loss_avg += loss_vb.data.numpy()
self.grad_magnitude_avg += np.mean([np.abs(p.grad.data.norm()) for p in self.model.parameters()])
self.grad_magnitude_max = np.max(
[np.abs(p.grad.data.norm()) for p in self.model.parameters()] + [self.grad_magnitude_max]
)
self.loss_counter += 1
def _rollout(self, episode_steps, episode_reward):
# reset rollout experiences
self._reset_rollout()
t_start = self.frame_step
# continue to rollout only if:
# 1. not running out of max steps of this current rollout, and
# 2. not terminal, and
# 3. not exceeding max steps of this current episode
# 4. master not exceeding max train steps
while (self.frame_step - t_start) < self.master.rollout_steps \
and not self.experience.terminal1 \
and (self.master.early_stop is None or episode_steps < self.master.early_stop):
# NOTE: here first store the last frame: experience.state1 as rollout.state0
self.rollout.state0.append(self.experience.state1)
# then get the action to take from rollout.state0 (experience.state1)
if self.master.enable_continuous:
action, p_vb, sig_vb, v_vb = self._forward(self._preprocessState(self.experience.state1))
self.rollout.sigmoid_vb.append(sig_vb)
else:
action, p_vb, v_vb = self._forward(self._preprocessState(self.experience.state1))
# then execute action in env to get a new experience.state1 -> rollout.state1
self.experience = self.env.step(action)
# push experience into rollout
self.rollout.action.append(action)
self.rollout.reward.append(self.experience.reward)
self.rollout.state1.append(self.experience.state1)
self.rollout.terminal1.append(self.experience.terminal1)
self.rollout.policy_vb.append(p_vb)
self.rollout.value0_vb.append(v_vb)
episode_steps += 1
episode_reward += self.experience.reward
self.frame_step += 1
self.master.frame_step.value += 1
if self.master.frame_step.value % (1000 * self.master.rollout_steps) == 0:
print("train step: {0}, frame step {1}, time: {2}".format(self.master.train_step.value,
self.master.frame_step.value, time.time()))
# NOTE: we put this condition in the end to make sure this current rollout won't be empty
if self.master.train_step.value >= self.master.steps:
break
return episode_steps, episode_reward
def run(self):
# make sure processes are not completely synced by sleeping a bit
time.sleep(int(np.random.rand() * (self.process_id + 5)))
nepisodes = 0
nepisodes_solved = 0
episode_steps = None
episode_reward = None
should_start_new = True
while self.master.train_step.value < self.master.steps:
# sync in every step
self._sync_local_with_global()
self.model.zero_grad()
if self.master.icm:
self.icm_inv_model.zero_grad()
self.icm_fwd_model.zero_grad()
# start of a new episode
if should_start_new:
episode_steps = 0
episode_reward = np.zeros(self.master.num_robots)
# reset lstm_hidden_vb for new episode
if self.master.enable_lstm:
# NOTE: clear hidden state at the beginning of each episode
self._reset_lstm_hidden_vb_episode()
# Obtain the initial observation by resetting the environment
self._reset_experience()
self.experience = self.env.reset()
assert self.experience.state1 is not None
# reset flag
should_start_new = False
if self.master.enable_lstm:
# NOTE: detach the previous hidden variable from the graph at the beginning of each rollout
self._reset_lstm_hidden_vb_rollout()
# Run a rollout for rollout_steps or until terminal
episode_steps, episode_reward = self._rollout(episode_steps, episode_reward)
if self.experience.terminal1 or \
self.master.early_stop and episode_steps >= self.master.early_stop:
nepisodes += 1
should_start_new = True
if self.experience.terminal1:
nepisodes_solved += 1
self.master.terminations_count.value += 1
# calculate loss
self._backward()
# copy local training stats to global at prog_freq, and clear up local stats
if time.time() - self.last_prog >= self.master.prog_freq:
self.master.p_loss_avg.value += self.p_loss_avg.mean()
self.master.v_loss_avg.value += self.v_loss_avg.mean()
self.master.loss_avg.value += self.loss_avg.mean()
self.master.loss_counter.value += self.loss_counter
self.master.grad_magnitude_avg.value += self.grad_magnitude_avg
val = self.master.grad_magnitude_max.value
self.master.grad_magnitude_max.value = max(self.grad_magnitude_max, val)
if self.master.icm:
self.master.icm_inv_loss_avg.value += self.icm_inv_loss_avg
self.master.icm_fwd_loss_avg.value += self.icm_fwd_loss_avg
self.master.icm_inv_accuracy_avg.value += self.icm_inv_accuracy_avg
self._reset_training_loggings()
self.last_prog = time.time()
class A3CEvaluator(A3CSingleProcess):
def __init__(self, master, process_id=0):
master.logger.warning("<===================================> A3C-Evaluator {Env & Model}")
super(A3CEvaluator, self).__init__(master, process_id)
self.training = False # choose actions w/ max probability
self.model.train(self.training)
if self.master.icm:
self.icm_inv_model.train(self.training)
self.icm_fwd_model.train(self.training)
self._reset_loggings()
self.start_time = time.time()
self.last_eval = time.time()
def _reset_loggings(self):
# training stats across all processes
self.p_loss_avg_log = []
self.v_loss_avg_log = []
self.loss_avg_log = []
self.icm_inv_loss_avg_log = []
self.icm_fwd_loss_avg_log = []
self.icm_inv_accuracy_avg_log = []
self.grad_magnitude_avg_log = []
self.grad_magnitude_max_log = []
# evaluation stats
self.entropy_avg_log = []
self.v_avg_log = []
self.steps_avg_log = []
self.steps_std_log = []
self.reward_avg_log = []
self.reward_std_log = []
self.nepisodes_log = []
self.nepisodes_solved_log = []
self.repisodes_solved_log = []
self.terminals_reached_log = []
self.action_counts = np.zeros(self.master.action_dim)
# placeholders for windows for online curve plotting
if self.master.visualize:
# training stats across all processes
self.win_p_loss_avg = "win_p_loss_avg"
self.win_v_loss_avg = "win_v_loss_avg"
self.win_loss_avg = "win_loss_avg"
self.win_icm_inv_loss_avg = "win_icm_inv_loss_avg"
self.win_icm_fwd_loss_avg = "win_icm_fwd_loss_avg"
self.win_icm_inv_accuracy_avg = "win_icm_inv_accuracy_avg"
self.win_grad_magnitude_avg = "win_grad_magnitude_avg"
self.win_grad_magnitude_max = "win_grad_magnitude_max"
# evaluation stats
self.win_entropy_avg = "win_entropy_avg"
self.win_v_avg = "win_v_avg"
self.win_steps_avg = "win_steps_avg"
self.win_steps_std = "win_steps_std"
self.win_reward_avg = "win_reward_avg"
self.win_reward_std = "win_reward_std"
self.win_nepisodes = "win_nepisodes"
self.win_nepisodes_solved = "win_nepisodes_solved"
self.win_repisodes_solved = "win_repisodes_solved"
self.win_terminals_reached = "win_terminals_reached"
self.win_action_counts = "action_counts"
def _eval_model(self):
self.last_eval = time.time()
eval_at_train_step = self.master.train_step.value
eval_at_frame_step = self.master.frame_step.value
# first grab the latest global model to do the evaluation
self._sync_local_with_global()
# evaluate
eval_step = 0
eval_entropy_log = []
eval_v_log = []
eval_nepisodes = 0
eval_nepisodes_solved = 0
eval_episode_steps = None
eval_episode_steps_log = []
eval_episode_reward = None
eval_episode_reward_log = []
eval_should_start_new = True
while eval_step < self.master.eval_steps:
if eval_should_start_new: # start of a new episode
eval_episode_steps = 0
eval_episode_reward = 0.
# reset lstm_hidden_vb for new episode
if self.master.enable_lstm:
# NOTE: clear hidden state at the beginning of each episode
self._reset_lstm_hidden_vb_episode(self.training)
# Obtain the initial observation by resetting the environment
self._reset_experience()
self.experience = self.env.reset()
assert self.experience.state1 is not None
if not self.training:
if self.master.visualize: self.env.visual()
if self.master.render: self.env.render()
# reset flag
eval_should_start_new = False
if self.master.enable_lstm:
# NOTE: detach the previous hidden variable from the graph at the beginning of each step
# NOTE: not necessary here in evaluation but we do it anyways
self._reset_lstm_hidden_vb_rollout()
# Run a single step
if self.master.enable_continuous:
eval_action, p_vb, sig_vb, v_vb = self._forward(self._preprocessState(self.experience.state1, True))
else:
eval_action, p_vb, v_vb = self._forward(self._preprocessState(self.experience.state1, True))
self.experience = self.env.step(eval_action)
self.action_counts[eval_action] += 1
if not self.training:
if self.master.visualize: self.env.visual()
if self.master.render: self.env.render()
if self.experience.terminal1 or \
self.master.early_stop and (eval_episode_steps + 1) == self.master.early_stop or \
(eval_step + 1) == self.master.eval_steps:
eval_should_start_new = True
eval_episode_steps += 1
eval_episode_reward += self.experience.reward
eval_step += 1
if eval_should_start_new:
eval_nepisodes += 1
if self.experience.terminal1:
eval_nepisodes_solved += 1
# This episode is finished, report and reset
# NOTE make no sense for continuous
if self.master.enable_continuous:
eval_entropy_log.append(
[0.5 * ((sig_vb * 2 * self.pi_vb.expand_as(sig_vb)).log() + 1).data.numpy()])
else:
eval_entropy_log.append(
[np.mean((-torch.log(p_vb.data.squeeze() + self.epsilon) * p_vb.data.squeeze()).numpy())])
eval_v_log.append([v_vb.data.numpy()])
eval_episode_steps_log.append([eval_episode_steps])
eval_episode_reward_log.append([eval_episode_reward])
self._reset_experience()
eval_episode_steps = None
eval_episode_reward = None
# Logging for this evaluation phase
loss_counter = self.master.loss_counter.value
p_loss_avg = self.master.p_loss_avg.value / loss_counter if loss_counter > 0 else 0.
v_loss_avg = self.master.v_loss_avg.value / loss_counter if loss_counter > 0 else 0.
loss_avg = self.master.loss_avg.value / loss_counter if loss_counter > 0 else 0.
if self.master.icm:
icm_inv_loss_avg = self.master.icm_inv_loss_avg.value / loss_counter if loss_counter > 0 else 0.
icm_fwd_loss_avg = self.master.icm_fwd_loss_avg.value / loss_counter if loss_counter > 0 else 0.
icm_inv_accuracy_avg = self.master.icm_inv_accuracy_avg.value / loss_counter if loss_counter > 0 else 0.
grad_magnitude_avg = self.master.grad_magnitude_avg.value / loss_counter if loss_counter > 0 else 0.
grad_magnitude_max = self.master.grad_magnitude_max.value
self.master._reset_training_logs()
def _log_at_step(eval_at_step):
self.p_loss_avg_log.append([eval_at_step, p_loss_avg])
self.v_loss_avg_log.append([eval_at_step, v_loss_avg])
self.loss_avg_log.append([eval_at_step, loss_avg])
if self.master.icm:
self.icm_inv_loss_avg_log.append([eval_at_step, icm_inv_loss_avg])
self.icm_fwd_loss_avg_log.append([eval_at_step, icm_fwd_loss_avg])
self.icm_inv_accuracy_avg_log.append([eval_at_step, icm_inv_accuracy_avg])
self.grad_magnitude_avg_log.append([eval_at_step, grad_magnitude_avg])
self.grad_magnitude_max_log.append([eval_at_step, grad_magnitude_max])
self.entropy_avg_log.append([eval_at_step, np.mean(np.asarray(eval_entropy_log))])
self.v_avg_log.append([eval_at_step, np.mean(np.asarray(eval_v_log))])
self.steps_avg_log.append([eval_at_step, np.mean(np.asarray(eval_episode_steps_log))])
self.steps_std_log.append([eval_at_step, np.std(np.asarray(eval_episode_steps_log))])
self.reward_avg_log.append([eval_at_step, np.mean(np.asarray(eval_episode_reward_log))])
self.reward_std_log.append([eval_at_step, np.std(np.asarray(eval_episode_reward_log))])
self.nepisodes_log.append([eval_at_step, eval_nepisodes])
self.nepisodes_solved_log.append([eval_at_step, eval_nepisodes_solved])
self.repisodes_solved_log.append(
[eval_at_step, (eval_nepisodes_solved / eval_nepisodes) if eval_nepisodes > 0 else 0.])
self.terminals_reached_log.append([self.master.train_step.value, self.master.terminations_count.value])
# logging
self.master.logger.warning("Reporting @ Step: " + str(eval_at_step) + " | Elapsed Time: " + str(
time.time() - self.start_time))
self.master.logger.warning("Iteration: {}; lr: {}".format(eval_at_step, self.master.lr_adjusted.value))
self.master.logger.warning("Iteration: {}; p_loss_avg: {}".format(eval_at_step, self.p_loss_avg_log[-1][1]))
self.master.logger.warning("Iteration: {}; v_loss_avg: {}".format(eval_at_step, self.v_loss_avg_log[-1][1]))
self.master.logger.warning("Iteration: {}; loss_avg: {}".format(eval_at_step, self.loss_avg_log[-1][1]))
if self.master.icm:
self.master.logger.warning(
"Iteration: {}; icm_inv_loss_avg: {}".format(eval_at_step, self.icm_inv_loss_avg_log[-1][1]))
self.master.logger.warning(
"Iteration: {}; icm_fwd_loss_avg: {}".format(eval_at_step, self.icm_fwd_loss_avg_log[-1][1]))
self.master.logger.warning(
"Iteration: {}; icm_inv_accuracy_avg: {}".format(eval_at_step, self.icm_inv_accuracy_avg_log[-1][1]))
self.master.logger.warning(
"Iteration: {}; grad_magnitude_avg: {}".format(eval_at_step, self.grad_magnitude_avg_log[-1][1]))
self.master.logger.warning(
"Iteration: {}; grad_magnitude_max: {}".format(eval_at_step, self.grad_magnitude_max_log[-1][1]))
self.master._reset_training_logs()
self.master.logger.warning(
"Evaluating @ Step: " + str(eval_at_train_step) + " | (" + str(eval_at_frame_step) + " frames)...")
self.master.logger.warning("Evaluation Took: " + str(time.time() - self.last_eval))
self.master.logger.warning(
"Iteration: {}; entropy_avg: {}".format(eval_at_step, self.entropy_avg_log[-1][1]))
self.master.logger.warning("Iteration: {}; v_avg: {}".format(eval_at_step, self.v_avg_log[-1][1]))
self.master.logger.warning("Iteration: {}; steps_avg: {}".format(eval_at_step, self.steps_avg_log[-1][1]))
self.master.logger.warning("Iteration: {}; steps_std: {}".format(eval_at_step, self.steps_std_log[-1][1]))
self.master.logger.warning("Iteration: {}; reward_avg: {}".format(eval_at_step, self.reward_avg_log[-1][1]))
self.master.logger.warning("Iteration: {}; reward_std: {}".format(eval_at_step, self.reward_std_log[-1][1]))
self.master.logger.warning("Iteration: {}; nepisodes: {}".format(eval_at_step, self.nepisodes_log[-1][1]))
self.master.logger.warning(
"Iteration: {}; nepisodes_solved: {}".format(eval_at_step, self.nepisodes_solved_log[-1][1]))
self.master.logger.warning(
"Iteration: {}; repisodes_solved: {}".format(eval_at_step, self.repisodes_solved_log[-1][1]))
self.master.logger.warning(
"Iteration: {}; terminals_reached: {}".format(eval_at_step, self.terminals_reached_log[-1][1]))
self.master.logger.warning(
"Iteration: {}; action_counts: {}".format(eval_at_step, self.action_counts))
if self.master.enable_log_at_train_step:
_log_at_step(eval_at_train_step)
else:
_log_at_step(eval_at_frame_step)
# plotting
if self.master.visualize:
self.win_p_loss_avg = self.master.vis.scatter(X=np.array(self.p_loss_avg_log), env=self.master.refs,
win=self.win_p_loss_avg, opts=dict(title="p_loss_avg"))
self.win_v_loss_avg = self.master.vis.scatter(X=np.array(self.v_loss_avg_log), env=self.master.refs,
win=self.win_v_loss_avg, opts=dict(title="v_loss_avg"))
self.win_loss_avg = self.master.vis.scatter(X=np.array(self.loss_avg_log), env=self.master.refs,
win=self.win_loss_avg, opts=dict(title="loss_avg"))
if self.master.icm:
self.win_icm_inv_loss_avg = self.master.vis.scatter(X=np.array(self.icm_inv_loss_avg_log),
env=self.master.refs,
win=self.win_icm_inv_loss_avg,
opts=dict(title="icm_inv_loss_avg"))
self.win_icm_fwd_loss_avg = self.master.vis.scatter(X=np.array(self.icm_fwd_loss_avg_log),
env=self.master.refs,
win=self.win_icm_fwd_loss_avg,
opts=dict(title="icm_fwd_loss_avg"))
self.win_icm_inv_accuracy_avg = self.master.vis.scatter(X=np.array(self.icm_inv_accuracy_avg_log),
env=self.master.refs,
win=self.win_icm_inv_accuracy_avg,
opts=dict(title="icm_inv_accuracy_avg"))
self.win_grad_magnitude_avg = self.master.vis.scatter(X=np.array(self.grad_magnitude_avg_log),
env=self.master.refs,
win=self.win_grad_magnitude_avg,
opts=dict(title="grad_magnitude_avg"))
# TODO: add avg to name
self.win_grad_magnitude_max = self.master.vis.scatter(X=np.array(self.grad_magnitude_max_log),
env=self.master.refs,
win=self.win_grad_magnitude_max,
opts=dict(title="grad_magnitude_max_avg"))
self.win_entropy_avg = self.master.vis.scatter(X=np.array(self.entropy_avg_log), env=self.master.refs,
win=self.win_entropy_avg, opts=dict(title="entropy_avg"))
self.win_v_avg = self.master.vis.scatter(X=np.array(self.v_avg_log), env=self.master.refs,
win=self.win_v_avg, opts=dict(title="v_avg"))
self.win_steps_avg = self.master.vis.scatter(X=np.array(self.steps_avg_log), env=self.master.refs,
win=self.win_steps_avg, opts=dict(title="steps_avg"))
# self.win_steps_std = self.master.vis.scatter(X=np.array(self.steps_std_log), env=self.master.refs, win=self.win_steps_std, opts=dict(title="steps_std"))
self.win_reward_avg = self.master.vis.scatter(X=np.array(self.reward_avg_log), env=self.master.refs,
win=self.win_reward_avg, opts=dict(title="reward_avg"))
# self.win_reward_std = self.master.vis.scatter(X=np.array(self.reward_std_log), env=self.master.refs, win=self.win_reward_std, opts=dict(title="reward_std"))
self.win_nepisodes = self.master.vis.scatter(X=np.array(self.nepisodes_log), env=self.master.refs,
win=self.win_nepisodes, opts=dict(title="nepisodes"))
self.win_nepisodes_solved = self.master.vis.scatter(X=np.array(self.nepisodes_solved_log),
env=self.master.refs, win=self.win_nepisodes_solved,
opts=dict(title="nepisodes_solved"))
self.win_repisodes_solved = self.master.vis.scatter(X=np.array(self.repisodes_solved_log),
env=self.master.refs, win=self.win_repisodes_solved,
opts=dict(title="repisodes_solved"))
self.win_terminals_reached = self.master.vis.scatter(X=np.array(self.terminals_reached_log),
env=self.master.refs, win=self.win_terminals_reached,
opts=dict(title="terminals_reached"))
self.win_action_counts = self.master.vis.bar(X=self.action_counts, env=self.master.refs,
win=self.win_action_counts,
opts=dict(title="action_counts"))
self.last_eval = time.time()
# save model
self.master._save_model(eval_at_train_step, self.reward_avg_log[-1][1])
if self.master.icm:
self.master._save_icm_models(eval_at_train_step,
self.icm_inv_loss_avg_log[-1][1], self.icm_fwd_loss_avg_log[-1][1])
def run(self):
while self.master.train_step.value < self.master.steps:
if time.time() - self.last_eval > self.master.eval_freq:
self._eval_model()
# we also do a final evaluation after training is done
self._eval_model()
class A3CTester(A3CSingleProcess):
def __init__(self, master, process_id=0):
master.logger.warning("<===================================> A3C-Tester {Env & Model}")
super(A3CTester, self).__init__(master, process_id)
self.training = False # choose actions w/ max probability
# self.training = True # choose actions by polynomial (?)
self.model.train(self.training)
if self.master.icm:
self.icm_inv_model.train(self.training)
self.icm_fwd_model.train(self.training)
self._reset_loggings()
self.start_time = time.time()
# TODO: add terminations count to the log here too?
# TODO: add ICM logs here?
def _reset_loggings(self):
# testing stats
self.steps_avg_log = []
self.steps_std_log = []
self.reward_avg_log = []
self.reward_std_log = []
self.nepisodes_log = []
self.nepisodes_solved_log = []
self.repisodes_solved_log = []
# placeholders for windows for online curve plotting
if self.master.visualize:
# evaluation stats
self.win_steps_avg = "win_steps_avg"
self.win_steps_std = "win_steps_std"
self.win_reward_avg = "win_reward_avg"
self.win_reward_std = "win_reward_std"
self.win_nepisodes = "win_nepisodes"
self.win_nepisodes_solved = "win_nepisodes_solved"
self.win_repisodes_solved = "win_repisodes_solved"
def run(self):
test_step = 0
test_nepisodes = 0
test_nepisodes_solved = 0
test_episode_steps = None
test_episode_steps_log = []
test_episode_reward = None
test_episode_reward_log = []
test_should_start_new = True
while test_nepisodes < self.master.test_nepisodes:
if test_should_start_new: # start of a new episode
test_episode_steps = 0
test_episode_reward = 0.
# reset lstm_hidden_vb for new episode
if self.master.enable_lstm:
# NOTE: clear hidden state at the beginning of each episode
self._reset_lstm_hidden_vb_episode(self.training)
# Obtain the initial observation by resetting the environment
self._reset_experience()
self.experience = self.env.reset()
assert self.experience.state1 is not None
if not self.training:
if self.master.visualize: self.env.visual()
if self.master.render: self.env.render()
# reset flag
test_should_start_new = False
if self.master.enable_lstm:
# NOTE: detach the previous hidden variable from the graph at the beginning of each step
# NOTE: not necessary here in testing but we do it anyways
self._reset_lstm_hidden_vb_rollout()
# Run a single step
if self.master.enable_continuous:
test_action, p_vb, sig_vb, v_vb = self._forward(self._preprocessState(self.experience.state1, True))
else:
test_action, p_vb, v_vb = self._forward(self._preprocessState(self.experience.state1, True))
self.experience = self.env.step(test_action)
if not self.training:
if self.master.visualize: self.env.visual()
if self.master.render: self.env.render()
if self.experience.terminal1 or \
self.master.early_stop and (test_episode_steps + 1) == self.master.early_stop:
test_should_start_new = True
test_episode_steps += 1
test_episode_reward += self.experience.reward
test_step += 1
if test_should_start_new:
test_nepisodes += 1
if self.experience.terminal1:
test_nepisodes_solved += 1
# This episode is finished, report and reset
test_episode_steps_log.append([test_episode_steps])
test_episode_reward_log.append([test_episode_reward])
self._reset_experience()
test_episode_steps = None
test_episode_reward = None
self.steps_avg_log.append([test_nepisodes, np.mean(np.asarray(test_episode_steps_log))])
self.steps_std_log.append([test_nepisodes, np.std(np.asarray(test_episode_steps_log))])
del test_episode_steps_log
self.reward_avg_log.append([test_nepisodes, np.mean(np.asarray(test_episode_reward_log))])
self.reward_std_log.append([test_nepisodes, np.std(np.asarray(test_episode_reward_log))])
del test_episode_reward_log
self.nepisodes_log.append([test_nepisodes, test_nepisodes])
self.nepisodes_solved_log.append([test_nepisodes, test_nepisodes_solved])
self.repisodes_solved_log.append(
[test_nepisodes, (test_nepisodes_solved / test_nepisodes) if test_nepisodes > 0 else 0.])
# plotting
if self.master.visualize:
self.win_steps_avg = self.master.vis.scatter(X=np.array(self.steps_avg_log), env=self.master.refs,
win=self.win_steps_avg, opts=dict(title="steps_avg"))
# self.win_steps_std = self.master.vis.scatter(X=np.array(self.steps_std_log), env=self.master.refs, win=self.win_steps_std, opts=dict(title="steps_std"))
self.win_reward_avg = self.master.vis.scatter(X=np.array(self.reward_avg_log), env=self.master.refs,
win=self.win_reward_avg, opts=dict(title="reward_avg"))
# self.win_reward_std = self.master.vis.scatter(X=np.array(self.reward_std_log), env=self.master.refs, win=self.win_reward_std, opts=dict(title="reward_std"))
self.win_nepisodes = self.master.vis.scatter(X=np.array(self.nepisodes_log), env=self.master.refs,
win=self.win_nepisodes, opts=dict(title="nepisodes"))
self.win_nepisodes_solved = self.master.vis.scatter(X=np.array(self.nepisodes_solved_log),
env=self.master.refs, win=self.win_nepisodes_solved,
opts=dict(title="nepisodes_solved"))
self.win_repisodes_solved = self.master.vis.scatter(X=np.array(self.repisodes_solved_log),
env=self.master.refs, win=self.win_repisodes_solved,
opts=dict(title="repisodes_solved"))
# logging
self.master.logger.warning("Testing Took: " + str(time.time() - self.start_time))
self.master.logger.warning("Testing: steps_avg: {}".format(self.steps_avg_log[-1][1]))
self.master.logger.warning("Testing: steps_std: {}".format(self.steps_std_log[-1][1]))
self.master.logger.warning("Testing: reward_avg: {}".format(self.reward_avg_log[-1][1]))
self.master.logger.warning("Testing: reward_std: {}".format(self.reward_std_log[-1][1]))
self.master.logger.warning("Testing: nepisodes: {}".format(self.nepisodes_log[-1][1]))
self.master.logger.warning("Testing: nepisodes_solved: {}".format(self.nepisodes_solved_log[-1][1]))
self.master.logger.warning("Testing: repisodes_solved: {}".format(self.repisodes_solved_log[-1][1]))
|
#!/usr/bin/env python
import ecto
import ecto_test
def test_feedback():
plasm = ecto.Plasm()
g = ecto_test.Generate("Generator", step=1.0, start=1.0)
add = ecto_test.Add()
source,sink = ecto.EntangledPair()
plasm.connect(source[:] >> add['left'],
g[:] >> add['right'],
add[:] >> sink[:]
)
ecto.view_plasm(plasm)
plasm.execute(niter=1)
assert add.outputs.out == 1 # 0 + 1 = 1
plasm.execute(niter=1)
assert add.outputs.out == 3 # 1 + 2 = 3
plasm.execute(niter=1)
assert add.outputs.out == 6 # 3 + 3 = 6
plasm.execute(niter=1)
assert add.outputs.out == 10 # 6 + 4 = 10
if __name__ == '__main__':
test_feedback()
Disable viewing the plasm in test.
#!/usr/bin/env python
import ecto
import ecto_test
def test_feedback():
plasm = ecto.Plasm()
g = ecto_test.Generate("Generator", step=1.0, start=1.0)
add = ecto_test.Add()
source,sink = ecto.EntangledPair()
plasm.connect(source[:] >> add['left'],
g[:] >> add['right'],
add[:] >> sink[:]
)
#ecto.view_plasm(plasm)
plasm.execute(niter=1)
assert add.outputs.out == 1 # 0 + 1 = 1
plasm.execute(niter=1)
assert add.outputs.out == 3 # 1 + 2 = 3
plasm.execute(niter=1)
assert add.outputs.out == 6 # 3 + 3 = 6
plasm.execute(niter=1)
assert add.outputs.out == 10 # 6 + 4 = 10
if __name__ == '__main__':
test_feedback()
|
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for tfb.MaskedAutoregressiveFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
import six
import itertools
import tensorflow.compat.v1 as tf1
import tensorflow.compat.v2 as tf
from tensorflow_probability.python import bijectors as tfb
from tensorflow_probability.python import distributions as tfd
from tensorflow_probability.python import math as tfp_math
from tensorflow_probability.python.bijectors import masked_autoregressive
from tensorflow_probability.python.internal import tensorshape_util
from tensorflow_probability.python.internal import test_util
tfk = tf.keras
tfkl = tf.keras.layers
def _funnel_bijector_fn(x):
"""Funnel transform."""
batch_shape = tf.shape(x)[:-1]
ndims = 4
scale = tf.concat(
[
tf.ones(tf.concat([batch_shape, [1]], axis=0)),
tf.exp(x[..., :1] / 2) *
tf.ones(tf.concat([batch_shape, [ndims - 1]], axis=0)),
],
axis=-1,
)
return tfb.Scale(scale)
def _masked_autoregressive_2d_template(base_template, event_shape):
def wrapper(x):
x_flat = tf.reshape(
x, tf.concat([tf.shape(x)[:-len(event_shape)], [-1]], -1))
t = base_template(x_flat)
if tf.is_tensor(t):
x_shift, x_log_scale = tf.unstack(t, axis=-1)
else:
x_shift, x_log_scale = t
return tf.reshape(x_shift, tf.shape(x)), tf.reshape(
x_log_scale, tf.shape(x))
return wrapper
def _masked_autoregressive_shift_and_log_scale_fn(hidden_units,
shift_only=False,
activation="relu",
name=None,
**kwargs):
params = 1 if shift_only else 2
layer = tfb.AutoregressiveNetwork(params, hidden_units=hidden_units,
activation=activation, name=name, **kwargs)
if shift_only:
return lambda x: (layer(x)[..., 0], None)
return layer
def _masked_autoregressive_gated_bijector_fn(hidden_units,
activation="relu",
name=None,
**kwargs):
layer = tfb.AutoregressiveNetwork(
2, hidden_units=hidden_units, activation=activation, name=name, **kwargs)
def _bijector_fn(x):
if tensorshape_util.rank(x.shape) == 1:
x = x[tf.newaxis, ...]
reshape_output = lambda x: x[0]
else:
reshape_output = lambda x: x
shift, logit_gate = tf.unstack(layer(x), axis=-1)
shift = reshape_output(shift)
logit_gate = reshape_output(logit_gate)
gate = tf.nn.sigmoid(logit_gate)
return tfb.AffineScalar(shift=(1. - gate) * shift, scale=gate)
return _bijector_fn
@test_util.test_all_tf_execution_regimes
class GenMaskTest(test_util.TestCase):
def test346Exclusive(self):
expected_mask = np.array(
[[0, 0, 0, 0],
[0, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 1, 0, 0],
[1, 1, 0, 0]])
mask = masked_autoregressive._gen_mask(
num_blocks=3, n_in=4, n_out=6, mask_type="exclusive")
self.assertAllEqual(expected_mask, mask)
def test346Inclusive(self):
expected_mask = np.array(
[[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 1, 0, 0],
[1, 1, 0, 0],
[1, 1, 1, 0],
[1, 1, 1, 0]])
mask = masked_autoregressive._gen_mask(
num_blocks=3, n_in=4, n_out=6, mask_type="inclusive")
self.assertAllEqual(expected_mask, mask)
class MakeDenseAutoregressiveMasksTest(test_util.TestCase):
def testRandomMade(self):
hidden_size = 8
num_hidden = 3
params = 2
event_size = 4
def random_made(x):
masks = masked_autoregressive._make_dense_autoregressive_masks(
params=params,
event_size=event_size,
hidden_units=[hidden_size] * num_hidden)
output_sizes = [hidden_size] * num_hidden
input_size = event_size
for (mask, output_size) in zip(masks, output_sizes):
mask = tf.cast(mask, tf.float32)
x = tf.matmul(
x,
np.random.randn(input_size, output_size).astype(np.float32) * mask)
x = tf.nn.relu(x)
input_size = output_size
x = tf.matmul(
x,
np.random.randn(input_size, params * event_size).astype(np.float32) *
masks[-1])
x = tf.reshape(x, [-1, event_size, params])
return x
y = random_made(tf.zeros([1, event_size]))
self.assertEqual([1, event_size, params], y.shape)
def testLeftToRight(self):
masks = masked_autoregressive._make_dense_autoregressive_masks(
params=2,
event_size=3,
hidden_units=[4, 4],
input_order="left-to-right",
hidden_degrees="equal")
self.assertLen(masks, 3)
self.assertAllEqual([
[1, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 0, 0],
], masks[0])
self.assertAllEqual([
[1, 1, 1, 1],
[1, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 1, 1],
], masks[1])
self.assertAllEqual([
[0, 0, 1, 1, 1, 1],
[0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 1, 1],
], masks[2])
def testRandom(self):
masks = masked_autoregressive._make_dense_autoregressive_masks(
params=2,
event_size=3,
hidden_units=[4, 4],
input_order="random",
hidden_degrees="random",
seed=1)
self.assertLen(masks, 3)
self.assertAllEqual([
[1, 0, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
], masks[0])
self.assertAllEqual([
[1, 0, 1, 1],
[1, 1, 1, 1],
[1, 0, 1, 1],
[1, 0, 1, 1],
], masks[1])
self.assertAllEqual([
[0, 0, 1, 1, 0, 0],
[1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 0, 0],
[0, 0, 1, 1, 0, 0],
], masks[2])
def testRightToLeft(self):
masks = masked_autoregressive._make_dense_autoregressive_masks(
params=2,
event_size=3,
hidden_units=[4, 4],
input_order=list(reversed(range(1, 4))),
hidden_degrees="equal")
self.assertLen(masks, 3)
self.assertAllEqual([
[0, 0, 0, 0],
[0, 0, 1, 1],
[1, 1, 1, 1],
], masks[0])
self.assertAllEqual([
[1, 1, 1, 1],
[1, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 1, 1],
], masks[1])
self.assertAllEqual([
[1, 1, 1, 1, 0, 0],
[1, 1, 1, 1, 0, 0],
[1, 1, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0],
], masks[2])
def testUneven(self):
masks = masked_autoregressive._make_dense_autoregressive_masks(
params=2,
event_size=3,
hidden_units=[5, 3],
input_order="left-to-right",
hidden_degrees="equal")
self.assertLen(masks, 3)
self.assertAllEqual([
[1, 1, 1, 1, 1],
[0, 0, 0, 1, 1],
[0, 0, 0, 0, 0],
], masks[0])
self.assertAllEqual([
[1, 1, 1],
[1, 1, 1],
[1, 1, 1],
[0, 0, 1],
[0, 0, 1],
], masks[1])
self.assertAllEqual([
[0, 0, 1, 1, 1, 1],
[0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1],
], masks[2])
@test_util.test_all_tf_execution_regimes
class _MaskedAutoregressiveFlowTest(test_util.VectorDistributionTestHelpers,
test_util.TestCase):
event_shape = [4]
@property
def _autoregressive_flow_kwargs(self):
return {
"shift_and_log_scale_fn":
tfb.masked_autoregressive_default_template(
hidden_layers=[2], shift_only=False),
"is_constant_jacobian":
False,
}
def testNonBatchedBijector(self):
x_ = np.arange(np.prod(self.event_shape)).astype(
np.float32).reshape(self.event_shape)
ma = tfb.MaskedAutoregressiveFlow(
validate_args=True, **self._autoregressive_flow_kwargs)
x = tf.constant(x_)
forward_x = ma.forward(x)
# Use identity to invalidate cache.
inverse_y = ma.inverse(tf.identity(forward_x))
forward_inverse_y = ma.forward(inverse_y)
fldj = ma.forward_log_det_jacobian(x, event_ndims=len(self.event_shape))
# Use identity to invalidate cache.
ildj = ma.inverse_log_det_jacobian(
tf.identity(forward_x), event_ndims=len(self.event_shape))
self.evaluate(tf1.global_variables_initializer())
[
forward_x_,
inverse_y_,
forward_inverse_y_,
ildj_,
fldj_,
] = self.evaluate([
forward_x,
inverse_y,
forward_inverse_y,
ildj,
fldj,
])
self.assertStartsWith(ma.name, "masked_autoregressive_flow")
self.assertAllClose(forward_x_, forward_inverse_y_, rtol=1e-6, atol=0.)
self.assertAllClose(x_, inverse_y_, rtol=1e-5, atol=0.)
self.assertAllClose(ildj_, -fldj_, rtol=1e-6, atol=0.)
def testBatchedBijector(self):
x_ = np.arange(4 * np.prod(self.event_shape)).astype(
np.float32).reshape([4] + self.event_shape) / 10.
ma = tfb.MaskedAutoregressiveFlow(
validate_args=True, **self._autoregressive_flow_kwargs)
x = tf.constant(x_)
forward_x = ma.forward(x)
# Use identity to invalidate cache.
inverse_y = ma.inverse(tf.identity(forward_x))
forward_inverse_y = ma.forward(inverse_y)
fldj = ma.forward_log_det_jacobian(x, event_ndims=len(self.event_shape))
# Use identity to invalidate cache.
ildj = ma.inverse_log_det_jacobian(
tf.identity(forward_x), event_ndims=len(self.event_shape))
self.evaluate(tf1.global_variables_initializer())
[
forward_x_,
inverse_y_,
forward_inverse_y_,
ildj_,
fldj_,
] = self.evaluate([
forward_x,
inverse_y,
forward_inverse_y,
ildj,
fldj,
])
self.assertStartsWith(ma.name, "masked_autoregressive_flow")
self.assertAllClose(forward_x_, forward_inverse_y_, rtol=1e-6, atol=1e-6)
self.assertAllClose(x_, inverse_y_, rtol=1e-4, atol=1e-4)
self.assertAllClose(ildj_, -fldj_, rtol=1e-6, atol=1e-6)
@test_util.numpy_disable_gradient_test
def testGradients(self):
maf = tfb.MaskedAutoregressiveFlow(
validate_args=True, **self._autoregressive_flow_kwargs)
def _transform(x):
y = maf.forward(x)
return maf.inverse(tf.identity(y))
self.evaluate(tf1.global_variables_initializer())
_, gradient = tfp_math.value_and_gradient(_transform,
tf.zeros(self.event_shape))
self.assertIsNotNone(gradient)
def testMutuallyConsistent(self):
maf = tfb.MaskedAutoregressiveFlow(
validate_args=True, **self._autoregressive_flow_kwargs)
base = tfd.Independent(
tfd.Normal(loc=tf.zeros(self.event_shape), scale=1.),
reinterpreted_batch_ndims=len(self.event_shape))
reshape = tfb.Reshape(
event_shape_out=[np.prod(self.event_shape)],
event_shape_in=self.event_shape)
bijector = tfb.Chain([reshape, maf])
dist = tfd.TransformedDistribution(
distribution=base, bijector=bijector, validate_args=True)
self.run_test_sample_consistent_log_prob(
sess_run_fn=self.evaluate,
dist=dist,
num_samples=int(1e6),
radius=1.,
center=0.,
rtol=0.025)
def testInvertMutuallyConsistent(self):
maf = tfb.Invert(
tfb.MaskedAutoregressiveFlow(
validate_args=True, **self._autoregressive_flow_kwargs))
base = tfd.Independent(
tfd.Normal(loc=tf.zeros(self.event_shape), scale=1.),
reinterpreted_batch_ndims=len(self.event_shape))
reshape = tfb.Reshape(
event_shape_out=[np.prod(self.event_shape)],
event_shape_in=self.event_shape)
bijector = tfb.Chain([reshape, maf])
dist = tfd.TransformedDistribution(
distribution=base, bijector=bijector, validate_args=True)
self.run_test_sample_consistent_log_prob(
sess_run_fn=self.evaluate,
dist=dist,
num_samples=int(1e6),
radius=1.,
center=0.,
rtol=0.03)
def testVectorBijectorRaises(self):
with self.assertRaisesRegexp(
ValueError,
"Bijectors with `forward_min_event_ndims` > 0 are not supported"):
def bijector_fn(*args, **kwargs):
del args, kwargs
return tfb.Inline(forward_min_event_ndims=1)
maf = tfb.MaskedAutoregressiveFlow(
bijector_fn=bijector_fn, validate_args=True)
maf.forward([1., 2.])
def testRankChangingBijectorRaises(self):
with self.assertRaisesRegexp(
ValueError, "Bijectors which alter `event_ndims` are not supported."):
def bijector_fn(*args, **kwargs):
del args, kwargs
return tfb.Inline(forward_min_event_ndims=0, inverse_min_event_ndims=1)
maf = tfb.MaskedAutoregressiveFlow(
bijector_fn=bijector_fn, validate_args=True)
maf.forward([1., 2.])
@test_util.numpy_disable_test_missing_functionality("tf.make_template")
@test_util.jax_disable_test_missing_functionality("tf.make_template")
@test_util.test_all_tf_execution_regimes
class MaskedAutoregressiveFlowTest(_MaskedAutoregressiveFlowTest):
pass
@test_util.numpy_disable_test_missing_functionality("Keras")
@test_util.jax_disable_test_missing_functionality("Keras")
@test_util.test_all_tf_execution_regimes
class MaskedAutoregressiveFlowShiftOnlyTest(_MaskedAutoregressiveFlowTest):
@property
def _autoregressive_flow_kwargs(self):
return {
"shift_and_log_scale_fn":
tfb.masked_autoregressive_default_template(
hidden_layers=[2], shift_only=True),
"is_constant_jacobian":
True,
}
@test_util.numpy_disable_test_missing_functionality("Keras")
@test_util.jax_disable_test_missing_functionality("Keras")
@test_util.test_all_tf_execution_regimes
class MaskedAutoregressiveFlowShiftOnlyLayerTest(_MaskedAutoregressiveFlowTest):
@property
def _autoregressive_flow_kwargs(self):
return {
"shift_and_log_scale_fn":
_masked_autoregressive_shift_and_log_scale_fn(
hidden_units=[2], shift_only=True),
"is_constant_jacobian":
True,
}
@test_util.numpy_disable_test_missing_functionality("tf.make_template")
@test_util.jax_disable_test_missing_functionality("tf.make_template")
@test_util.test_all_tf_execution_regimes
class MaskedAutoregressiveFlowUnrollLoopTest(_MaskedAutoregressiveFlowTest):
@property
def _autoregressive_flow_kwargs(self):
return {
"shift_and_log_scale_fn":
tfb.masked_autoregressive_default_template(
hidden_layers=[2], shift_only=False),
"is_constant_jacobian":
False,
"unroll_loop":
True,
}
@test_util.numpy_disable_test_missing_functionality("Keras")
@test_util.jax_disable_test_missing_functionality("Keras")
@test_util.test_all_tf_execution_regimes
class MaskedAutoregressiveFlowUnrollLoopLayerTest(_MaskedAutoregressiveFlowTest
):
@property
def _autoregressive_flow_kwargs(self):
return {
"shift_and_log_scale_fn":
_masked_autoregressive_shift_and_log_scale_fn(
hidden_units=[10, 10], activation="relu"),
"is_constant_jacobian":
False,
"unroll_loop":
True,
}
@test_util.numpy_disable_test_missing_functionality("Keras")
@test_util.jax_disable_test_missing_functionality("Keras")
@test_util.test_all_tf_execution_regimes
class MaskedAutoregressive2DTest(_MaskedAutoregressiveFlowTest):
event_shape = [3, 2]
@property
def _autoregressive_flow_kwargs(self):
return {
"shift_and_log_scale_fn":
_masked_autoregressive_2d_template(
tfb.masked_autoregressive_default_template(
hidden_layers=[np.prod(self.event_shape)],
shift_only=False), self.event_shape),
"is_constant_jacobian":
False,
"event_ndims":
2,
}
@test_util.numpy_disable_test_missing_functionality("Keras")
@test_util.jax_disable_test_missing_functionality("Keras")
@test_util.test_all_tf_execution_regimes
class MaskedAutoregressiveGatedTest(_MaskedAutoregressiveFlowTest):
@property
def _autoregressive_flow_kwargs(self):
return {
"bijector_fn":
_masked_autoregressive_gated_bijector_fn(
hidden_units=[10, 10], activation="relu"),
"is_constant_jacobian":
False,
}
@test_util.numpy_disable_test_missing_functionality("Keras")
@test_util.jax_disable_test_missing_functionality("Keras")
@test_util.test_all_tf_execution_regimes
class MaskedAutoregressive2DLayerTest(_MaskedAutoregressiveFlowTest):
event_shape = [3, 2]
@property
def _autoregressive_flow_kwargs(self):
return {
"shift_and_log_scale_fn":
_masked_autoregressive_2d_template(
_masked_autoregressive_shift_and_log_scale_fn(
hidden_units=[np.prod(self.event_shape)],
shift_only=False), self.event_shape),
"is_constant_jacobian":
False,
"event_ndims":
2,
}
@test_util.test_all_tf_execution_regimes
class MaskedAutoregressiveFunnelTest(_MaskedAutoregressiveFlowTest):
@property
def _autoregressive_flow_kwargs(self):
return {
"bijector_fn":
_funnel_bijector_fn,
"is_constant_jacobian":
False,
}
@test_util.numpy_disable_test_missing_functionality("Keras")
@test_util.jax_disable_test_missing_functionality("Keras")
@test_util.test_all_tf_execution_regimes
class AutoregressiveNetworkTest(test_util.TestCase):
def _count_trainable_params(self, layer):
ret = 0
for w in layer.trainable_weights:
ret += np.prod(w.shape)
return ret
def assertIsAutoregressive(self, f, event_size, order):
input_order = None
if isinstance(order, six.string_types):
if order == "left-to-right":
input_order = range(event_size)
elif order == "right-to-left":
input_order = range(event_size - 1, -1, -1)
elif np.all(np.sort(order) == np.arange(1, event_size + 1)):
input_order = list(np.array(order) - 1)
if input_order is None:
raise ValueError("Invalid input order: '{}'.".format(order))
# Test that if we change dimension `i` of the input, then the only changed
# dimensions `j` of the output are those with larger `input_order[j]`.
# (We could also do this by examining gradients.)
diff = []
mask = []
for i in range(event_size):
x = np.random.randn(event_size)
delta = np.zeros(event_size)
delta[i] = np.random.randn()
diff = self.evaluate(f(x + delta) - f(x))
mask = [[input_order[i] >= input_order[j]] for j in range(event_size)]
self.assertAllClose(np.zeros_like(diff), mask * diff, atol=0., rtol=1e-6)
def test_layer_right_to_left_float64(self):
made = tfb.AutoregressiveNetwork(
params=3, event_shape=4, activation=None, input_order="right-to-left",
dtype=tf.float64, hidden_degrees="random", hidden_units=[10, 7, 10])
self.assertEqual((4, 3), made(np.zeros(4, dtype=np.float64)).shape)
self.assertEqual("float64", made(np.zeros(4, dtype=np.float64)).dtype)
self.assertEqual(5 * 10 + 11 * 7 + 8 * 10 + 11 * 12,
self._count_trainable_params(made))
if not tf.executing_eagerly():
self.evaluate(
tf1.initializers.variables(made.trainable_variables))
self.assertIsAutoregressive(made, event_size=4, order="right-to-left")
def test_layer_callable_activation(self):
made = tfb.AutoregressiveNetwork(
params=2, activation=tf.math.exp, input_order="random",
kernel_regularizer=tfk.regularizers.l2(0.1), bias_initializer="ones",
hidden_units=[9], hidden_degrees="equal")
self.assertEqual((3, 5, 2), made(np.zeros((3, 5))).shape)
self.assertEqual(6 * 9 + 10 * 10, self._count_trainable_params(made))
if not tf.executing_eagerly():
self.evaluate(
tf1.initializers.variables(made.trainable_variables))
self.assertIsAutoregressive(made, event_size=5, order=made._input_order)
def test_layer_smaller_hidden_layers_than_input(self):
made = tfb.AutoregressiveNetwork(
params=1, event_shape=9, activation="relu", use_bias=False,
bias_regularizer=tfk.regularizers.l1(0.5), bias_constraint=tf.math.abs,
input_order="right-to-left", hidden_units=[5, 5])
self.assertEqual((9, 1), made(np.zeros(9)).shape)
self.assertEqual(9 * 5 + 5 * 5 + 5 * 9, self._count_trainable_params(made))
if not tf.executing_eagerly():
self.evaluate(
tf1.initializers.variables(made.trainable_variables))
self.assertIsAutoregressive(made, event_size=9, order="right-to-left")
def test_layer_no_hidden_units(self):
made = tfb.AutoregressiveNetwork(
params=4, event_shape=3, use_bias=False, hidden_degrees="random",
kernel_constraint="unit_norm")
self.assertEqual((2, 2, 5, 3, 4), made(np.zeros((2, 2, 5, 3))).shape)
self.assertEqual(3 * 12, self._count_trainable_params(made))
if not tf.executing_eagerly():
self.evaluate(
tf1.initializers.variables(made.trainable_variables))
self.assertIsAutoregressive(made, event_size=3, order="left-to-right")
def test_layer_v2_kernel_initializer(self):
init = tf.keras.initializers.GlorotNormal()
made = tfb.AutoregressiveNetwork(
params=2, event_shape=4, activation="relu",
hidden_units=[5, 5], kernel_initializer=init)
self.assertEqual((4, 2), made(np.zeros(4)).shape)
self.assertEqual(5 * 5 + 6 * 5 + 6 * 8, self._count_trainable_params(made))
if not tf.executing_eagerly():
self.evaluate(
tf1.initializers.variables(made.trainable_variables))
self.assertIsAutoregressive(made, event_size=4, order="left-to-right")
def test_doc_string(self):
# Generate data.
n = 2000
x2 = np.random.randn(n).astype(dtype=np.float32) * 2.
x1 = np.random.randn(n).astype(dtype=np.float32) + (x2 * x2 / 4.)
data = np.stack([x1, x2], axis=-1)
# Density estimation with MADE.
made = tfb.AutoregressiveNetwork(params=2, hidden_units=[10, 10])
distribution = tfd.TransformedDistribution(
distribution=tfd.Sample(tfd.Normal(0., 1.), [2]),
bijector=tfb.MaskedAutoregressiveFlow(made))
# Construct and fit model.
x_ = tfkl.Input(shape=(2,), dtype=tf.float32)
log_prob_ = distribution.log_prob(x_)
model = tfk.Model(x_, log_prob_)
model.compile(optimizer=tf1.train.AdamOptimizer(),
loss=lambda _, log_prob: -log_prob)
batch_size = 25
model.fit(x=data,
y=np.zeros((n, 0), dtype=np.float32),
batch_size=batch_size,
epochs=1,
steps_per_epoch=1, # Usually `n // batch_size`.
shuffle=True,
verbose=True)
# Use the fitted distribution.
self.assertAllEqual((3, 1, 2), distribution.sample((3, 1)).shape)
self.assertAllEqual(
(3,), distribution.log_prob(np.ones((3, 2), dtype=np.float32)).shape)
def test_doc_string_images_case_1(self):
# Generate fake images.
images = np.random.choice([0, 1], size=(100, 8, 8, 3))
n, width, height, channels = images.shape
# Reshape images to achieve desired autoregressivity.
event_shape = [width * height * channels]
reshaped_images = np.reshape(images, [n, width * height * channels])
made = tfb.AutoregressiveNetwork(params=1, event_shape=event_shape,
hidden_units=[20, 20], activation="relu")
# Density estimation with MADE.
#
# NOTE: Parameterize an autoregressive distribution over an event_shape of
# [width * height * channels], with univariate Bernoulli conditional
# distributions.
distribution = tfd.Autoregressive(
lambda x: tfd.Independent( # pylint: disable=g-long-lambda
tfd.Bernoulli(logits=tf.unstack(made(x), axis=-1)[0],
dtype=tf.float32),
reinterpreted_batch_ndims=1),
sample0=tf.zeros(event_shape, dtype=tf.float32))
# Construct and fit model.
x_ = tfkl.Input(shape=event_shape, dtype=tf.float32)
log_prob_ = distribution.log_prob(x_)
model = tfk.Model(x_, log_prob_)
model.compile(optimizer=tf1.train.AdamOptimizer(),
loss=lambda _, log_prob: -log_prob)
batch_size = 10
model.fit(x=reshaped_images,
y=np.zeros((n, 0), dtype=np.float32),
batch_size=batch_size,
epochs=1,
steps_per_epoch=1, # Usually `n // batch_size`.
shuffle=True,
verbose=True)
# Use the fitted distribution.
self.assertAllEqual(event_shape, distribution.sample().shape)
self.assertAllEqual((n,), distribution.log_prob(reshaped_images).shape)
def test_doc_string_images_case_2(self):
# Generate fake images.
images = np.random.choice([0, 1], size=(100, 8, 8, 3))
n, width, height, channels = images.shape
# Reshape images to achieve desired autoregressivity.
reshaped_images = np.transpose(
np.reshape(images, [n, width * height, channels]),
axes=[0, 2, 1])
made = tfb.AutoregressiveNetwork(params=1, event_shape=[width * height],
hidden_units=[20, 20], activation="relu")
# Density estimation with MADE.
#
# NOTE: Parameterize an autoregressive distribution over an event_shape of
# [channels, width * height], with univariate Bernoulli conditional
# distributions.
distribution = tfd.Autoregressive(
lambda x: tfd.Independent( # pylint: disable=g-long-lambda
tfd.Bernoulli(logits=tf.unstack(made(x), axis=-1)[0],
dtype=tf.float32),
reinterpreted_batch_ndims=2),
sample0=tf.zeros([channels, width * height], dtype=tf.float32))
# Construct and fit model.
x_ = tfkl.Input(shape=(channels, width * height), dtype=tf.float32)
log_prob_ = distribution.log_prob(x_)
model = tfk.Model(x_, log_prob_)
model.compile(optimizer=tf1.train.AdamOptimizer(),
loss=lambda _, log_prob: -log_prob)
batch_size = 10
model.fit(x=reshaped_images,
y=np.zeros((n, 0), dtype=np.float32),
batch_size=batch_size,
epochs=1,
steps_per_epoch=1, # Usually `n // batch_size`.
shuffle=True,
verbose=True)
# Use the fitted distribution.
self.assertAllEqual((7, channels, width * height),
distribution.sample(7).shape)
self.assertAllEqual((n,), distribution.log_prob(reshaped_images).shape)
@test_util.numpy_disable_test_missing_functionality("Keras")
@test_util.jax_disable_test_missing_functionality("Keras")
@test_util.test_all_tf_execution_regimes
class ConditionalTests(test_util.TestCase):
def test_conditional_missing_event_shape(self):
with self.assertRaisesRegexp(
ValueError,
'`event_shape` must be provided when `conditional` is True'):
tfb.AutoregressiveNetwork(
params=2, conditional=True, conditional_shape=[4])
def test_conditional_missing_conditional_shape(self):
with self.assertRaisesRegexp(
ValueError,
'`conditional_shape` must be provided when `conditional` is True'):
tfb.AutoregressiveNetwork(
params=2, conditional=True, event_shape=[4])
def test_conditional_incorrect_layers(self):
with self.assertRaisesRegexp(
ValueError,
'`conditional_input_layers` must be "first_layers" or "all_layers"'):
tfb.AutoregressiveNetwork(
params=2, conditional=True,
event_shape=[4], conditional_shape=[4],
conditional_input_layers="non-existent-option")
def test_conditional_false_with_shape(self):
with self.assertRaisesRegexp(
ValueError,
'`conditional_shape` passed but `conditional` is set to False.'):
tfb.AutoregressiveNetwork(params=2, conditional_shape=[4])
def test_conditional_wrong_shape(self):
with self.assertRaisesRegexp(
ValueError,
'Parameter `conditional_shape` must describe a rank-1 shape'):
tfb.AutoregressiveNetwork(
params=2, conditional=True, event_shape=[4],
conditional_shape=[10, 4])
def test_conditional_missing_tensor(self):
with self.assertRaisesRegexp(
ValueError,
'`conditional` must be passed as a named arguement'):
made = tfb.AutoregressiveNetwork(
params=2, event_shape=[4], conditional=True,
conditional_shape=[6])
made(np.random.normal(0, 1, (1, 4)))
def test_conditional_broadcasting(self):
made = tfb.AutoregressiveNetwork(
params=2, event_shape=[3], conditional=True,
conditional_shape=[4])
input_shapes = [
[3],
[1, 3],
[2, 3],
[1, 2, 3],
[2, 1, 3],
[2, 2, 3]]
cond_shapes = [
[4],
[1, 4],
[2, 4],
[1, 2, 4],
[2, 1, 4],
[2, 2, 4]]
for input_shape, cond_shape in itertools.product(input_shapes, cond_shapes):
made_shape = tf.shape(made(
tf.ones(input_shape),
conditional=tf.ones(cond_shape)))
broadcast_shape = tf.concat(
[tf.broadcast_dynamic_shape(
cond_shape[:-1],
input_shape[:-1]),
input_shape[-1:]],
axis=0)
self.assertAllEqual(
tf.concat([broadcast_shape, [2]], axis=0),
made_shape)
del _MaskedAutoregressiveFlowTest
if __name__ == "__main__":
tf.test.main()
Okay, now all the pylint suggestions should be accounted for
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for tfb.MaskedAutoregressiveFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
import six
import itertools
import tensorflow.compat.v1 as tf1
import tensorflow.compat.v2 as tf
from tensorflow_probability.python import bijectors as tfb
from tensorflow_probability.python import distributions as tfd
from tensorflow_probability.python import math as tfp_math
from tensorflow_probability.python.bijectors import masked_autoregressive
from tensorflow_probability.python.internal import tensorshape_util
from tensorflow_probability.python.internal import test_util
tfk = tf.keras
tfkl = tf.keras.layers
def _funnel_bijector_fn(x):
"""Funnel transform."""
batch_shape = tf.shape(x)[:-1]
ndims = 4
scale = tf.concat(
[
tf.ones(tf.concat([batch_shape, [1]], axis=0)),
tf.exp(x[..., :1] / 2) *
tf.ones(tf.concat([batch_shape, [ndims - 1]], axis=0)),
],
axis=-1,
)
return tfb.Scale(scale)
def _masked_autoregressive_2d_template(base_template, event_shape):
def wrapper(x):
x_flat = tf.reshape(
x, tf.concat([tf.shape(x)[:-len(event_shape)], [-1]], -1))
t = base_template(x_flat)
if tf.is_tensor(t):
x_shift, x_log_scale = tf.unstack(t, axis=-1)
else:
x_shift, x_log_scale = t
return tf.reshape(x_shift, tf.shape(x)), tf.reshape(
x_log_scale, tf.shape(x))
return wrapper
def _masked_autoregressive_shift_and_log_scale_fn(hidden_units,
shift_only=False,
activation="relu",
name=None,
**kwargs):
params = 1 if shift_only else 2
layer = tfb.AutoregressiveNetwork(params, hidden_units=hidden_units,
activation=activation, name=name, **kwargs)
if shift_only:
return lambda x: (layer(x)[..., 0], None)
return layer
def _masked_autoregressive_gated_bijector_fn(hidden_units,
activation="relu",
name=None,
**kwargs):
layer = tfb.AutoregressiveNetwork(
2, hidden_units=hidden_units, activation=activation, name=name, **kwargs)
def _bijector_fn(x):
if tensorshape_util.rank(x.shape) == 1:
x = x[tf.newaxis, ...]
reshape_output = lambda x: x[0]
else:
reshape_output = lambda x: x
shift, logit_gate = tf.unstack(layer(x), axis=-1)
shift = reshape_output(shift)
logit_gate = reshape_output(logit_gate)
gate = tf.nn.sigmoid(logit_gate)
return tfb.AffineScalar(shift=(1. - gate) * shift, scale=gate)
return _bijector_fn
@test_util.test_all_tf_execution_regimes
class GenMaskTest(test_util.TestCase):
def test346Exclusive(self):
expected_mask = np.array(
[[0, 0, 0, 0],
[0, 0, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 1, 0, 0],
[1, 1, 0, 0]])
mask = masked_autoregressive._gen_mask(
num_blocks=3, n_in=4, n_out=6, mask_type="exclusive")
self.assertAllEqual(expected_mask, mask)
def test346Inclusive(self):
expected_mask = np.array(
[[1, 0, 0, 0],
[1, 0, 0, 0],
[1, 1, 0, 0],
[1, 1, 0, 0],
[1, 1, 1, 0],
[1, 1, 1, 0]])
mask = masked_autoregressive._gen_mask(
num_blocks=3, n_in=4, n_out=6, mask_type="inclusive")
self.assertAllEqual(expected_mask, mask)
class MakeDenseAutoregressiveMasksTest(test_util.TestCase):
def testRandomMade(self):
hidden_size = 8
num_hidden = 3
params = 2
event_size = 4
def random_made(x):
masks = masked_autoregressive._make_dense_autoregressive_masks(
params=params,
event_size=event_size,
hidden_units=[hidden_size] * num_hidden)
output_sizes = [hidden_size] * num_hidden
input_size = event_size
for (mask, output_size) in zip(masks, output_sizes):
mask = tf.cast(mask, tf.float32)
x = tf.matmul(
x,
np.random.randn(input_size, output_size).astype(np.float32) * mask)
x = tf.nn.relu(x)
input_size = output_size
x = tf.matmul(
x,
np.random.randn(input_size, params * event_size).astype(np.float32) *
masks[-1])
x = tf.reshape(x, [-1, event_size, params])
return x
y = random_made(tf.zeros([1, event_size]))
self.assertEqual([1, event_size, params], y.shape)
def testLeftToRight(self):
masks = masked_autoregressive._make_dense_autoregressive_masks(
params=2,
event_size=3,
hidden_units=[4, 4],
input_order="left-to-right",
hidden_degrees="equal")
self.assertLen(masks, 3)
self.assertAllEqual([
[1, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 0, 0],
], masks[0])
self.assertAllEqual([
[1, 1, 1, 1],
[1, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 1, 1],
], masks[1])
self.assertAllEqual([
[0, 0, 1, 1, 1, 1],
[0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 1, 1],
], masks[2])
def testRandom(self):
masks = masked_autoregressive._make_dense_autoregressive_masks(
params=2,
event_size=3,
hidden_units=[4, 4],
input_order="random",
hidden_degrees="random",
seed=1)
self.assertLen(masks, 3)
self.assertAllEqual([
[1, 0, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
], masks[0])
self.assertAllEqual([
[1, 0, 1, 1],
[1, 1, 1, 1],
[1, 0, 1, 1],
[1, 0, 1, 1],
], masks[1])
self.assertAllEqual([
[0, 0, 1, 1, 0, 0],
[1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 0, 0],
[0, 0, 1, 1, 0, 0],
], masks[2])
def testRightToLeft(self):
masks = masked_autoregressive._make_dense_autoregressive_masks(
params=2,
event_size=3,
hidden_units=[4, 4],
input_order=list(reversed(range(1, 4))),
hidden_degrees="equal")
self.assertLen(masks, 3)
self.assertAllEqual([
[0, 0, 0, 0],
[0, 0, 1, 1],
[1, 1, 1, 1],
], masks[0])
self.assertAllEqual([
[1, 1, 1, 1],
[1, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 1, 1],
], masks[1])
self.assertAllEqual([
[1, 1, 1, 1, 0, 0],
[1, 1, 1, 1, 0, 0],
[1, 1, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0],
], masks[2])
def testUneven(self):
masks = masked_autoregressive._make_dense_autoregressive_masks(
params=2,
event_size=3,
hidden_units=[5, 3],
input_order="left-to-right",
hidden_degrees="equal")
self.assertLen(masks, 3)
self.assertAllEqual([
[1, 1, 1, 1, 1],
[0, 0, 0, 1, 1],
[0, 0, 0, 0, 0],
], masks[0])
self.assertAllEqual([
[1, 1, 1],
[1, 1, 1],
[1, 1, 1],
[0, 0, 1],
[0, 0, 1],
], masks[1])
self.assertAllEqual([
[0, 0, 1, 1, 1, 1],
[0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1],
], masks[2])
@test_util.test_all_tf_execution_regimes
class _MaskedAutoregressiveFlowTest(test_util.VectorDistributionTestHelpers,
test_util.TestCase):
event_shape = [4]
@property
def _autoregressive_flow_kwargs(self):
return {
"shift_and_log_scale_fn":
tfb.masked_autoregressive_default_template(
hidden_layers=[2], shift_only=False),
"is_constant_jacobian":
False,
}
def testNonBatchedBijector(self):
x_ = np.arange(np.prod(self.event_shape)).astype(
np.float32).reshape(self.event_shape)
ma = tfb.MaskedAutoregressiveFlow(
validate_args=True, **self._autoregressive_flow_kwargs)
x = tf.constant(x_)
forward_x = ma.forward(x)
# Use identity to invalidate cache.
inverse_y = ma.inverse(tf.identity(forward_x))
forward_inverse_y = ma.forward(inverse_y)
fldj = ma.forward_log_det_jacobian(x, event_ndims=len(self.event_shape))
# Use identity to invalidate cache.
ildj = ma.inverse_log_det_jacobian(
tf.identity(forward_x), event_ndims=len(self.event_shape))
self.evaluate(tf1.global_variables_initializer())
[
forward_x_,
inverse_y_,
forward_inverse_y_,
ildj_,
fldj_,
] = self.evaluate([
forward_x,
inverse_y,
forward_inverse_y,
ildj,
fldj,
])
self.assertStartsWith(ma.name, "masked_autoregressive_flow")
self.assertAllClose(forward_x_, forward_inverse_y_, rtol=1e-6, atol=0.)
self.assertAllClose(x_, inverse_y_, rtol=1e-5, atol=0.)
self.assertAllClose(ildj_, -fldj_, rtol=1e-6, atol=0.)
def testBatchedBijector(self):
x_ = np.arange(4 * np.prod(self.event_shape)).astype(
np.float32).reshape([4] + self.event_shape) / 10.
ma = tfb.MaskedAutoregressiveFlow(
validate_args=True, **self._autoregressive_flow_kwargs)
x = tf.constant(x_)
forward_x = ma.forward(x)
# Use identity to invalidate cache.
inverse_y = ma.inverse(tf.identity(forward_x))
forward_inverse_y = ma.forward(inverse_y)
fldj = ma.forward_log_det_jacobian(x, event_ndims=len(self.event_shape))
# Use identity to invalidate cache.
ildj = ma.inverse_log_det_jacobian(
tf.identity(forward_x), event_ndims=len(self.event_shape))
self.evaluate(tf1.global_variables_initializer())
[
forward_x_,
inverse_y_,
forward_inverse_y_,
ildj_,
fldj_,
] = self.evaluate([
forward_x,
inverse_y,
forward_inverse_y,
ildj,
fldj,
])
self.assertStartsWith(ma.name, "masked_autoregressive_flow")
self.assertAllClose(forward_x_, forward_inverse_y_, rtol=1e-6, atol=1e-6)
self.assertAllClose(x_, inverse_y_, rtol=1e-4, atol=1e-4)
self.assertAllClose(ildj_, -fldj_, rtol=1e-6, atol=1e-6)
@test_util.numpy_disable_gradient_test
def testGradients(self):
maf = tfb.MaskedAutoregressiveFlow(
validate_args=True, **self._autoregressive_flow_kwargs)
def _transform(x):
y = maf.forward(x)
return maf.inverse(tf.identity(y))
self.evaluate(tf1.global_variables_initializer())
_, gradient = tfp_math.value_and_gradient(_transform,
tf.zeros(self.event_shape))
self.assertIsNotNone(gradient)
def testMutuallyConsistent(self):
maf = tfb.MaskedAutoregressiveFlow(
validate_args=True, **self._autoregressive_flow_kwargs)
base = tfd.Independent(
tfd.Normal(loc=tf.zeros(self.event_shape), scale=1.),
reinterpreted_batch_ndims=len(self.event_shape))
reshape = tfb.Reshape(
event_shape_out=[np.prod(self.event_shape)],
event_shape_in=self.event_shape)
bijector = tfb.Chain([reshape, maf])
dist = tfd.TransformedDistribution(
distribution=base, bijector=bijector, validate_args=True)
self.run_test_sample_consistent_log_prob(
sess_run_fn=self.evaluate,
dist=dist,
num_samples=int(1e6),
radius=1.,
center=0.,
rtol=0.025)
def testInvertMutuallyConsistent(self):
maf = tfb.Invert(
tfb.MaskedAutoregressiveFlow(
validate_args=True, **self._autoregressive_flow_kwargs))
base = tfd.Independent(
tfd.Normal(loc=tf.zeros(self.event_shape), scale=1.),
reinterpreted_batch_ndims=len(self.event_shape))
reshape = tfb.Reshape(
event_shape_out=[np.prod(self.event_shape)],
event_shape_in=self.event_shape)
bijector = tfb.Chain([reshape, maf])
dist = tfd.TransformedDistribution(
distribution=base, bijector=bijector, validate_args=True)
self.run_test_sample_consistent_log_prob(
sess_run_fn=self.evaluate,
dist=dist,
num_samples=int(1e6),
radius=1.,
center=0.,
rtol=0.03)
def testVectorBijectorRaises(self):
with self.assertRaisesRegexp(
ValueError,
"Bijectors with `forward_min_event_ndims` > 0 are not supported"):
def bijector_fn(*args, **kwargs):
del args, kwargs
return tfb.Inline(forward_min_event_ndims=1)
maf = tfb.MaskedAutoregressiveFlow(
bijector_fn=bijector_fn, validate_args=True)
maf.forward([1., 2.])
def testRankChangingBijectorRaises(self):
with self.assertRaisesRegexp(
ValueError, "Bijectors which alter `event_ndims` are not supported."):
def bijector_fn(*args, **kwargs):
del args, kwargs
return tfb.Inline(forward_min_event_ndims=0, inverse_min_event_ndims=1)
maf = tfb.MaskedAutoregressiveFlow(
bijector_fn=bijector_fn, validate_args=True)
maf.forward([1., 2.])
@test_util.numpy_disable_test_missing_functionality("tf.make_template")
@test_util.jax_disable_test_missing_functionality("tf.make_template")
@test_util.test_all_tf_execution_regimes
class MaskedAutoregressiveFlowTest(_MaskedAutoregressiveFlowTest):
pass
@test_util.numpy_disable_test_missing_functionality("Keras")
@test_util.jax_disable_test_missing_functionality("Keras")
@test_util.test_all_tf_execution_regimes
class MaskedAutoregressiveFlowShiftOnlyTest(_MaskedAutoregressiveFlowTest):
@property
def _autoregressive_flow_kwargs(self):
return {
"shift_and_log_scale_fn":
tfb.masked_autoregressive_default_template(
hidden_layers=[2], shift_only=True),
"is_constant_jacobian":
True,
}
@test_util.numpy_disable_test_missing_functionality("Keras")
@test_util.jax_disable_test_missing_functionality("Keras")
@test_util.test_all_tf_execution_regimes
class MaskedAutoregressiveFlowShiftOnlyLayerTest(_MaskedAutoregressiveFlowTest):
@property
def _autoregressive_flow_kwargs(self):
return {
"shift_and_log_scale_fn":
_masked_autoregressive_shift_and_log_scale_fn(
hidden_units=[2], shift_only=True),
"is_constant_jacobian":
True,
}
@test_util.numpy_disable_test_missing_functionality("tf.make_template")
@test_util.jax_disable_test_missing_functionality("tf.make_template")
@test_util.test_all_tf_execution_regimes
class MaskedAutoregressiveFlowUnrollLoopTest(_MaskedAutoregressiveFlowTest):
@property
def _autoregressive_flow_kwargs(self):
return {
"shift_and_log_scale_fn":
tfb.masked_autoregressive_default_template(
hidden_layers=[2], shift_only=False),
"is_constant_jacobian":
False,
"unroll_loop":
True,
}
@test_util.numpy_disable_test_missing_functionality("Keras")
@test_util.jax_disable_test_missing_functionality("Keras")
@test_util.test_all_tf_execution_regimes
class MaskedAutoregressiveFlowUnrollLoopLayerTest(_MaskedAutoregressiveFlowTest
):
@property
def _autoregressive_flow_kwargs(self):
return {
"shift_and_log_scale_fn":
_masked_autoregressive_shift_and_log_scale_fn(
hidden_units=[10, 10], activation="relu"),
"is_constant_jacobian":
False,
"unroll_loop":
True,
}
@test_util.numpy_disable_test_missing_functionality("Keras")
@test_util.jax_disable_test_missing_functionality("Keras")
@test_util.test_all_tf_execution_regimes
class MaskedAutoregressive2DTest(_MaskedAutoregressiveFlowTest):
event_shape = [3, 2]
@property
def _autoregressive_flow_kwargs(self):
return {
"shift_and_log_scale_fn":
_masked_autoregressive_2d_template(
tfb.masked_autoregressive_default_template(
hidden_layers=[np.prod(self.event_shape)],
shift_only=False), self.event_shape),
"is_constant_jacobian":
False,
"event_ndims":
2,
}
@test_util.numpy_disable_test_missing_functionality("Keras")
@test_util.jax_disable_test_missing_functionality("Keras")
@test_util.test_all_tf_execution_regimes
class MaskedAutoregressiveGatedTest(_MaskedAutoregressiveFlowTest):
@property
def _autoregressive_flow_kwargs(self):
return {
"bijector_fn":
_masked_autoregressive_gated_bijector_fn(
hidden_units=[10, 10], activation="relu"),
"is_constant_jacobian":
False,
}
@test_util.numpy_disable_test_missing_functionality("Keras")
@test_util.jax_disable_test_missing_functionality("Keras")
@test_util.test_all_tf_execution_regimes
class MaskedAutoregressive2DLayerTest(_MaskedAutoregressiveFlowTest):
event_shape = [3, 2]
@property
def _autoregressive_flow_kwargs(self):
return {
"shift_and_log_scale_fn":
_masked_autoregressive_2d_template(
_masked_autoregressive_shift_and_log_scale_fn(
hidden_units=[np.prod(self.event_shape)],
shift_only=False), self.event_shape),
"is_constant_jacobian":
False,
"event_ndims":
2,
}
@test_util.test_all_tf_execution_regimes
class MaskedAutoregressiveFunnelTest(_MaskedAutoregressiveFlowTest):
@property
def _autoregressive_flow_kwargs(self):
return {
"bijector_fn":
_funnel_bijector_fn,
"is_constant_jacobian":
False,
}
@test_util.numpy_disable_test_missing_functionality("Keras")
@test_util.jax_disable_test_missing_functionality("Keras")
@test_util.test_all_tf_execution_regimes
class AutoregressiveNetworkTest(test_util.TestCase):
def _count_trainable_params(self, layer):
ret = 0
for w in layer.trainable_weights:
ret += np.prod(w.shape)
return ret
def assertIsAutoregressive(self, f, event_size, order):
input_order = None
if isinstance(order, six.string_types):
if order == "left-to-right":
input_order = range(event_size)
elif order == "right-to-left":
input_order = range(event_size - 1, -1, -1)
elif np.all(np.sort(order) == np.arange(1, event_size + 1)):
input_order = list(np.array(order) - 1)
if input_order is None:
raise ValueError("Invalid input order: '{}'.".format(order))
# Test that if we change dimension `i` of the input, then the only changed
# dimensions `j` of the output are those with larger `input_order[j]`.
# (We could also do this by examining gradients.)
diff = []
mask = []
for i in range(event_size):
x = np.random.randn(event_size)
delta = np.zeros(event_size)
delta[i] = np.random.randn()
diff = self.evaluate(f(x + delta) - f(x))
mask = [[input_order[i] >= input_order[j]] for j in range(event_size)]
self.assertAllClose(np.zeros_like(diff), mask * diff, atol=0., rtol=1e-6)
def test_layer_right_to_left_float64(self):
made = tfb.AutoregressiveNetwork(
params=3, event_shape=4, activation=None, input_order="right-to-left",
dtype=tf.float64, hidden_degrees="random", hidden_units=[10, 7, 10])
self.assertEqual((4, 3), made(np.zeros(4, dtype=np.float64)).shape)
self.assertEqual("float64", made(np.zeros(4, dtype=np.float64)).dtype)
self.assertEqual(5 * 10 + 11 * 7 + 8 * 10 + 11 * 12,
self._count_trainable_params(made))
if not tf.executing_eagerly():
self.evaluate(
tf1.initializers.variables(made.trainable_variables))
self.assertIsAutoregressive(made, event_size=4, order="right-to-left")
def test_layer_callable_activation(self):
made = tfb.AutoregressiveNetwork(
params=2, activation=tf.math.exp, input_order="random",
kernel_regularizer=tfk.regularizers.l2(0.1), bias_initializer="ones",
hidden_units=[9], hidden_degrees="equal")
self.assertEqual((3, 5, 2), made(np.zeros((3, 5))).shape)
self.assertEqual(6 * 9 + 10 * 10, self._count_trainable_params(made))
if not tf.executing_eagerly():
self.evaluate(
tf1.initializers.variables(made.trainable_variables))
self.assertIsAutoregressive(made, event_size=5, order=made._input_order)
def test_layer_smaller_hidden_layers_than_input(self):
made = tfb.AutoregressiveNetwork(
params=1, event_shape=9, activation="relu", use_bias=False,
bias_regularizer=tfk.regularizers.l1(0.5), bias_constraint=tf.math.abs,
input_order="right-to-left", hidden_units=[5, 5])
self.assertEqual((9, 1), made(np.zeros(9)).shape)
self.assertEqual(9 * 5 + 5 * 5 + 5 * 9, self._count_trainable_params(made))
if not tf.executing_eagerly():
self.evaluate(
tf1.initializers.variables(made.trainable_variables))
self.assertIsAutoregressive(made, event_size=9, order="right-to-left")
def test_layer_no_hidden_units(self):
made = tfb.AutoregressiveNetwork(
params=4, event_shape=3, use_bias=False, hidden_degrees="random",
kernel_constraint="unit_norm")
self.assertEqual((2, 2, 5, 3, 4), made(np.zeros((2, 2, 5, 3))).shape)
self.assertEqual(3 * 12, self._count_trainable_params(made))
if not tf.executing_eagerly():
self.evaluate(
tf1.initializers.variables(made.trainable_variables))
self.assertIsAutoregressive(made, event_size=3, order="left-to-right")
def test_layer_v2_kernel_initializer(self):
init = tf.keras.initializers.GlorotNormal()
made = tfb.AutoregressiveNetwork(
params=2, event_shape=4, activation="relu",
hidden_units=[5, 5], kernel_initializer=init)
self.assertEqual((4, 2), made(np.zeros(4)).shape)
self.assertEqual(5 * 5 + 6 * 5 + 6 * 8, self._count_trainable_params(made))
if not tf.executing_eagerly():
self.evaluate(
tf1.initializers.variables(made.trainable_variables))
self.assertIsAutoregressive(made, event_size=4, order="left-to-right")
def test_doc_string(self):
# Generate data.
n = 2000
x2 = np.random.randn(n).astype(dtype=np.float32) * 2.
x1 = np.random.randn(n).astype(dtype=np.float32) + (x2 * x2 / 4.)
data = np.stack([x1, x2], axis=-1)
# Density estimation with MADE.
made = tfb.AutoregressiveNetwork(params=2, hidden_units=[10, 10])
distribution = tfd.TransformedDistribution(
distribution=tfd.Sample(tfd.Normal(0., 1.), [2]),
bijector=tfb.MaskedAutoregressiveFlow(made))
# Construct and fit model.
x_ = tfkl.Input(shape=(2,), dtype=tf.float32)
log_prob_ = distribution.log_prob(x_)
model = tfk.Model(x_, log_prob_)
model.compile(optimizer=tf1.train.AdamOptimizer(),
loss=lambda _, log_prob: -log_prob)
batch_size = 25
model.fit(x=data,
y=np.zeros((n, 0), dtype=np.float32),
batch_size=batch_size,
epochs=1,
steps_per_epoch=1, # Usually `n // batch_size`.
shuffle=True,
verbose=True)
# Use the fitted distribution.
self.assertAllEqual((3, 1, 2), distribution.sample((3, 1)).shape)
self.assertAllEqual(
(3,), distribution.log_prob(np.ones((3, 2), dtype=np.float32)).shape)
def test_doc_string_images_case_1(self):
# Generate fake images.
images = np.random.choice([0, 1], size=(100, 8, 8, 3))
n, width, height, channels = images.shape
# Reshape images to achieve desired autoregressivity.
event_shape = [width * height * channels]
reshaped_images = np.reshape(images, [n, width * height * channels])
made = tfb.AutoregressiveNetwork(params=1, event_shape=event_shape,
hidden_units=[20, 20], activation="relu")
# Density estimation with MADE.
#
# NOTE: Parameterize an autoregressive distribution over an event_shape of
# [width * height * channels], with univariate Bernoulli conditional
# distributions.
distribution = tfd.Autoregressive(
lambda x: tfd.Independent( # pylint: disable=g-long-lambda
tfd.Bernoulli(logits=tf.unstack(made(x), axis=-1)[0],
dtype=tf.float32),
reinterpreted_batch_ndims=1),
sample0=tf.zeros(event_shape, dtype=tf.float32))
# Construct and fit model.
x_ = tfkl.Input(shape=event_shape, dtype=tf.float32)
log_prob_ = distribution.log_prob(x_)
model = tfk.Model(x_, log_prob_)
model.compile(optimizer=tf1.train.AdamOptimizer(),
loss=lambda _, log_prob: -log_prob)
batch_size = 10
model.fit(x=reshaped_images,
y=np.zeros((n, 0), dtype=np.float32),
batch_size=batch_size,
epochs=1,
steps_per_epoch=1, # Usually `n // batch_size`.
shuffle=True,
verbose=True)
# Use the fitted distribution.
self.assertAllEqual(event_shape, distribution.sample().shape)
self.assertAllEqual((n,), distribution.log_prob(reshaped_images).shape)
def test_doc_string_images_case_2(self):
# Generate fake images.
images = np.random.choice([0, 1], size=(100, 8, 8, 3))
n, width, height, channels = images.shape
# Reshape images to achieve desired autoregressivity.
reshaped_images = np.transpose(
np.reshape(images, [n, width * height, channels]),
axes=[0, 2, 1])
made = tfb.AutoregressiveNetwork(params=1, event_shape=[width * height],
hidden_units=[20, 20], activation="relu")
# Density estimation with MADE.
#
# NOTE: Parameterize an autoregressive distribution over an event_shape of
# [channels, width * height], with univariate Bernoulli conditional
# distributions.
distribution = tfd.Autoregressive(
lambda x: tfd.Independent( # pylint: disable=g-long-lambda
tfd.Bernoulli(logits=tf.unstack(made(x), axis=-1)[0],
dtype=tf.float32),
reinterpreted_batch_ndims=2),
sample0=tf.zeros([channels, width * height], dtype=tf.float32))
# Construct and fit model.
x_ = tfkl.Input(shape=(channels, width * height), dtype=tf.float32)
log_prob_ = distribution.log_prob(x_)
model = tfk.Model(x_, log_prob_)
model.compile(optimizer=tf1.train.AdamOptimizer(),
loss=lambda _, log_prob: -log_prob)
batch_size = 10
model.fit(x=reshaped_images,
y=np.zeros((n, 0), dtype=np.float32),
batch_size=batch_size,
epochs=1,
steps_per_epoch=1, # Usually `n // batch_size`.
shuffle=True,
verbose=True)
# Use the fitted distribution.
self.assertAllEqual((7, channels, width * height),
distribution.sample(7).shape)
self.assertAllEqual((n,), distribution.log_prob(reshaped_images).shape)
@test_util.numpy_disable_test_missing_functionality("Keras")
@test_util.jax_disable_test_missing_functionality("Keras")
@test_util.test_all_tf_execution_regimes
class ConditionalTests(test_util.TestCase):
def test_conditional_missing_event_shape(self):
with self.assertRaisesRegexp(
ValueError,
'`event_shape` must be provided when `conditional` is True'):
tfb.AutoregressiveNetwork(
params=2, conditional=True, conditional_shape=[4])
def test_conditional_missing_conditional_shape(self):
with self.assertRaisesRegexp(
ValueError,
'`conditional_shape` must be provided when `conditional` is True'):
tfb.AutoregressiveNetwork(
params=2, conditional=True, event_shape=[4])
def test_conditional_incorrect_layers(self):
with self.assertRaisesRegexp(
ValueError,
'`conditional_input_layers` must be "first_layers" or "all_layers"'):
tfb.AutoregressiveNetwork(
params=2, conditional=True,
event_shape=[4], conditional_shape=[4],
conditional_input_layers="non-existent-option")
def test_conditional_false_with_shape(self):
with self.assertRaisesRegexp(
ValueError,
'`conditional_shape` passed but `conditional` is set to False.'):
tfb.AutoregressiveNetwork(params=2, conditional_shape=[4])
def test_conditional_wrong_shape(self):
with self.assertRaisesRegexp(
ValueError,
'Parameter `conditional_shape` must describe a rank-1 shape'):
tfb.AutoregressiveNetwork(
params=2, conditional=True, event_shape=[4],
conditional_shape=[10, 4])
def test_conditional_missing_tensor(self):
with self.assertRaisesRegexp(
ValueError,
'`conditional` must be passed as a named arguement'):
made = tfb.AutoregressiveNetwork(
params=2, event_shape=[4], conditional=True,
conditional_shape=[6])
made(np.random.normal(0, 1, (1, 4)))
def test_conditional_broadcasting(self):
made = tfb.AutoregressiveNetwork(
params=2, event_shape=[3], conditional=True,
conditional_shape=[4])
input_shapes = [
[3],
[1, 3],
[2, 3],
[1, 2, 3],
[2, 1, 3],
[2, 2, 3]]
cond_shapes = [
[4],
[1, 4],
[2, 4],
[1, 2, 4],
[2, 1, 4],
[2, 2, 4]]
for input_shape, cond_shape in itertools.product(input_shapes, cond_shapes):
made_shape = tf.shape(made(
tf.ones(input_shape),
conditional=tf.ones(cond_shape)))
broadcast_shape = tf.concat(
[tf.broadcast_dynamic_shape(
cond_shape[:-1],
input_shape[:-1]),
input_shape[-1:]],
axis=0)
self.assertAllEqual(
tf.concat([broadcast_shape, [2]], axis=0),
made_shape)
del _MaskedAutoregressiveFlowTest
if __name__ == "__main__":
tf.test.main()
|
#!/usr/bin/env python
"""
Parameter Variation with Femag
"""
import os
from femagtools.multiproc import Engine
# instead you can use on of the following
#
# from femagtools.condor import Engine
# from femagtools.amazon import Engine
# from femagtools.google import Engine
#
import femagtools.grid
import logging
import numpy as np
parvardef = {
"objective_vars": [
{"name": "dqPar.torque[-1]",
"label": "Load Torque/Nm"},
{"name": "torque[-1].ripple",
"label": "Torque Ripple/Nm"},
{"name": "machine.plfe[-1]",
"label": "Iron Losses/W"}
],
"population_size": 25,
"decision_vars": [
{"steps": 4, "bounds": [-50, 0],
"name": "angl_i_up", "label":"Beta"},
{"steps": 3, "bounds": [100, 200],
"name": "current", "label":"Current/A"}
]
}
operatingConditions = {
"num_move_steps": 49,
"angl_i_up": 0.0,
"calculationMode": "pm_sym_fast",
"wind_temp": 60.0,
"magn_temp": 60.0,
"current": 250.0,
"eval_force": 0,
"skew_angle": 0.0,
"num_par_wdgs": 1,
"num_skew_steps": 0,
"calc_fe_loss": 1,
"speed": 50.0,
"optim_i_up": 0
}
magnetMat = [{
"name": "M395",
"remanenc": 1.17,
"temcoefbr": -0.001,
"spmaweight": 7.5,
"magntemp": 20.0,
"temcoefhc": -0.001,
"hcb": 810000.4,
"relperm": 1.05,
"magncond": 833333,
"magnwidth": 15.0e-3,
"magnlength": 100.0e-3,
"hc_min": 760000.0}
]
magnetizingCurve = "./magnetcurves"
machine = {
"name": "PM 270 L8",
"desc": "PM Motor 270mm 8 poles VMAGN",
"poles": 8,
"outer_diam": 0.26924,
"bore_diam": 0.16192,
"inner_diam": 0.11064,
"airgap": 0.00075,
"lfe": 0.08356,
"stator": {
"num_slots": 48,
"num_slots_gen": 12,
"mcvkey_yoke": "M330-50A",
"nodedist": 4.0,
"statorRotor3": {
"slot_height": 0.0335,
"slot_h1": 0.001,
"slot_h2": 0.0,
"slot_width": 0.00193,
"slot_r1": 0.0001,
"slot_r2": 0.00282,
"wedge_width1": 0.00295,
"wedge_width2": 0.0,
"middle_line": 0.0,
"tooth_width": 0.0,
"slot_top_sh": 0.0}
},
"magnet": {
"nodedist": 1.0,
"material": "M395",
"mcvkey_yoke": "M330-50A",
"magnetIronV": {
"magn_angle": 145.0,
"magn_height": 0.00648,
"magn_width": 0.018,
"condshaft_r": 0.05532,
"magn_num": 1.0,
"air_triangle": 1,
"iron_hs": 0.0001,
"gap_ma_iron": 0.0002,
"iron_height": 0.00261,
"magn_rem": 1.2,
"iron_shape": 0.0802
}
},
"windings": {
"num_phases": 3,
"num_layers": 1,
"num_wires": 9,
"coil_span": 6.0,
"cufilfact": 0.4,
"culength": 1.4,
"slot_indul": 0.5e-3
}
}
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(message)s')
engine = Engine()
userdir = os.path.expanduser('~')
workdir = os.path.join(userdir, 'parvar')
try:
os.makedirs(workdir)
except OSError:
pass
parvar = femagtools.grid.Grid(workdir,
magnetizingCurves=magnetizingCurve,
magnets=magnetMat)
results = parvar(parvardef, machine, operatingConditions, engine)
x = femagtools.grid.create_parameter_range(results['x'])
f = np.reshape(results['f'], (np.shape(results['f'])[0], np.shape(x)[0])).T
# print header
print(' '.join(['{:15}'.format(s)
for s in [d['label']
for d in parvardef['decision_vars']] +
[o['label']
for o in parvardef['objective_vars']]]))
print()
# print values in table format
for l in np.hstack((x, f)):
print(' '.join(['{:15.2f}'.format(y) for y in l]))
# create scatter plot
#
import matplotlib.pyplot as pl
import mpl_toolkits.mplot3d as mpl
fig = pl.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(x[:, 0], x[:, 1], np.array(f[:, 0]))
ax.set_xlabel(parvardef['decision_vars'][0]['label'])
ax.set_ylabel(parvardef['decision_vars'][1]['label'])
ax.set_zlabel(parvardef['objective_vars'][0]['label'])
pl.savefig('parvar.png')
fixed spurious femag aborts on Windows
#!/usr/bin/env python
"""
Parameter Variation with Femag
"""
import os
from femagtools.multiproc import Engine
# instead you can use on of the following
#
# from femagtools.condor import Engine
# from femagtools.amazon import Engine
# from femagtools.google import Engine
#
import femagtools.grid
import logging
import numpy as np
parvardef = {
"objective_vars": [
{"name": "dqPar.torque[-1]",
"label": "Load Torque/Nm"},
{"name": "torque[-1].ripple",
"label": "Torque Ripple/Nm"},
{"name": "machine.plfe[-1]",
"label": "Iron Losses/W"}
],
"population_size": 25,
"decision_vars": [
{"steps": 4, "bounds": [-50, 0],
"name": "angl_i_up", "label":"Beta"},
{"steps": 3, "bounds": [100, 200],
"name": "current", "label":"Current/A"}
]
}
operatingConditions = {
"num_move_steps": 49,
"angl_i_up": 0.0,
"calculationMode": "pm_sym_fast",
"wind_temp": 60.0,
"magn_temp": 60.0,
"current": 250.0,
"eval_force": 0,
"skew_angle": 0.0,
"num_par_wdgs": 1,
"num_skew_steps": 0,
"calc_fe_loss": 1,
"speed": 50.0,
"optim_i_up": 0
}
magnetMat = [{
"name": "M395",
"remanenc": 1.17,
"temcoefbr": -0.001,
"spmaweight": 7.5,
"magntemp": 20.0,
"temcoefhc": -0.001,
"hcb": 810000.4,
"relperm": 1.05,
"magncond": 833333,
"magnwidth": 15.0e-3,
"magnlength": 100.0e-3,
"hc_min": 760000.0}
]
magnetizingCurve = "./magnetcurves"
machine = {
"name": "PM 270 L8",
"desc": "PM Motor 270mm 8 poles VMAGN",
"poles": 8,
"outer_diam": 0.26924,
"bore_diam": 0.16192,
"inner_diam": 0.11064,
"airgap": 0.00075,
"lfe": 0.08356,
"stator": {
"num_slots": 48,
"num_slots_gen": 12,
"mcvkey_yoke": "M330-50A",
"nodedist": 4.0,
"statorRotor3": {
"slot_height": 0.0335,
"slot_h1": 0.001,
"slot_h2": 0.0,
"slot_width": 0.00193,
"slot_r1": 0.0001,
"slot_r2": 0.00282,
"wedge_width1": 0.00295,
"wedge_width2": 0.0,
"middle_line": 0.0,
"tooth_width": 0.0,
"slot_top_sh": 0.0}
},
"magnet": {
"nodedist": 1.0,
"material": "M395",
"mcvkey_yoke": "M330-50A",
"magnetIronV": {
"magn_angle": 145.0,
"magn_height": 0.00648,
"magn_width": 0.018,
"condshaft_r": 0.05532,
"magn_num": 1.0,
"air_triangle": 1,
"iron_hs": 0.0001,
"gap_ma_iron": 0.0002,
"iron_height": 0.00261,
"magn_rem": 1.2,
"iron_shape": 0.0802
}
},
"windings": {
"num_phases": 3,
"num_layers": 1,
"num_wires": 9,
"coil_span": 6.0,
"cufilfact": 0.4,
"culength": 1.4,
"slot_indul": 0.5e-3
}
}
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(message)s')
if __name__ == '__main__':
engine = Engine()
userdir = os.path.expanduser('~')
workdir = os.path.join(userdir, 'parvar')
try:
os.makedirs(workdir)
except OSError:
pass
parvar = femagtools.grid.Grid(workdir,
magnetizingCurves=magnetizingCurve,
magnets=magnetMat)
results = parvar(parvardef, machine, operatingConditions, engine)
x = femagtools.grid.create_parameter_range(results['x'])
f = np.reshape(results['f'], (np.shape(results['f'])[0], np.shape(x)[0])).T
# print header
print(' '.join(['{:15}'.format(s)
for s in [d['label']
for d in parvardef['decision_vars']] +
[o['label']
for o in parvardef['objective_vars']]]))
print()
# print values in table format
for l in np.hstack((x, f)):
print(' '.join(['{:15.2f}'.format(y) for y in l]))
# create scatter plot
#
import matplotlib.pyplot as pl
import mpl_toolkits.mplot3d as mpl
fig = pl.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(x[:, 0], x[:, 1], np.array(f[:, 0]))
ax.set_xlabel(parvardef['decision_vars'][0]['label'])
ax.set_ylabel(parvardef['decision_vars'][1]['label'])
ax.set_zlabel(parvardef['objective_vars'][0]['label'])
pl.savefig('parvar.png')
|
"""
Acceptance tests for Studio's Settings Details pages
"""
from datetime import datetime, timedelta
from nose.plugins.attrib import attr
from common.test.acceptance.fixtures.config import ConfigModelFixture
from common.test.acceptance.fixtures.course import CourseFixture
from common.test.acceptance.pages.studio.overview import CourseOutlinePage
from common.test.acceptance.pages.studio.settings import SettingsPage
from common.test.acceptance.tests.helpers import (
element_has_text,
generate_course_key,
is_option_value_selected,
select_option_by_value
)
from common.test.acceptance.tests.studio.base_studio_test import StudioCourseTest
@attr(shard=4)
class StudioSettingsDetailsTest(StudioCourseTest):
"""Base class for settings and details page tests."""
def setUp(self, is_staff=True):
super(StudioSettingsDetailsTest, self).setUp(is_staff=is_staff)
self.settings_detail = SettingsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
# Before every test, make sure to visit the page first
self.settings_detail.visit()
@attr(shard=4)
class SettingsMilestonesTest(StudioSettingsDetailsTest):
"""
Tests for milestones feature in Studio's settings tab
"""
def test_page_has_prerequisite_field(self):
"""
Test to make sure page has pre-requisite course field if milestones app is enabled.
"""
self.assertTrue(self.settings_detail.pre_requisite_course_options)
def test_prerequisite_course_save_successfully(self):
"""
Scenario: Selecting course from Pre-Requisite course drop down save the selected course as pre-requisite
course.
Given that I am on the Schedule & Details page on studio
When I select an item in pre-requisite course drop down and click Save Changes button
Then My selected item should be saved as pre-requisite course
And My selected item should be selected after refreshing the page.'
"""
course_number = self.unique_id
CourseFixture(
org='test_org',
number=course_number,
run='test_run',
display_name='Test Course' + course_number
).install()
pre_requisite_course_key = generate_course_key(
org='test_org',
number=course_number,
run='test_run'
)
pre_requisite_course_id = unicode(pre_requisite_course_key)
# Refresh the page to load the new course fixture and populate the prrequisite course dropdown
# Then select the prerequisite course and save the changes
self.settings_detail.refresh_page()
self.settings_detail.wait_for_prerequisite_course_options()
select_option_by_value(
browser_query=self.settings_detail.pre_requisite_course_options,
value=pre_requisite_course_id
)
self.settings_detail.save_changes()
self.assertEqual(
'Your changes have been saved.',
self.settings_detail.alert_confirmation_title.text
)
# Refresh the page again and confirm the prerequisite course selection is properly reflected
self.settings_detail.refresh_page()
self.settings_detail.wait_for_prerequisite_course_options()
self.assertTrue(is_option_value_selected(
browser_query=self.settings_detail.pre_requisite_course_options,
value=pre_requisite_course_id
))
# Set the prerequisite course back to None and save the changes
select_option_by_value(
browser_query=self.settings_detail.pre_requisite_course_options,
value=''
)
self.settings_detail.save_changes()
self.assertEqual(
'Your changes have been saved.',
self.settings_detail.alert_confirmation_title.text
)
# Refresh the page again to confirm the None selection is properly reflected
self.settings_detail.refresh_page()
self.settings_detail.wait_for_prerequisite_course_options()
self.assertTrue(is_option_value_selected(
browser_query=self.settings_detail.pre_requisite_course_options,
value=''
))
# Re-pick the prerequisite course and confirm no errors are thrown (covers a discovered bug)
select_option_by_value(
browser_query=self.settings_detail.pre_requisite_course_options,
value=pre_requisite_course_id
)
self.settings_detail.save_changes()
self.assertEqual(
'Your changes have been saved.',
self.settings_detail.alert_confirmation_title.text
)
# Refresh the page again to confirm the prerequisite course selection is properly reflected
self.settings_detail.refresh_page()
self.settings_detail.wait_for_prerequisite_course_options()
dropdown_status = is_option_value_selected(
browser_query=self.settings_detail.pre_requisite_course_options,
value=pre_requisite_course_id
)
self.assertTrue(dropdown_status)
def test_page_has_enable_entrance_exam_field(self):
"""
Test to make sure page has 'enable entrance exam' field.
"""
self.assertTrue(self.settings_detail.entrance_exam_field)
def test_enable_entrance_exam_for_course(self):
"""
Test that entrance exam should be created after checking the 'enable entrance exam' checkbox.
And also that the entrance exam is destroyed after deselecting the checkbox.
"""
self.settings_detail.require_entrance_exam(required=True)
self.settings_detail.save_changes()
# getting the course outline page.
course_outline_page = CourseOutlinePage(
self.browser, self.course_info['org'], self.course_info['number'], self.course_info['run']
)
course_outline_page.visit()
# title with text 'Entrance Exam' should be present on page.
self.assertTrue(element_has_text(
page=course_outline_page,
css_selector='span.section-title',
text='Entrance Exam'
))
# Delete the currently created entrance exam.
self.settings_detail.visit()
self.settings_detail.require_entrance_exam(required=False)
self.settings_detail.save_changes()
course_outline_page.visit()
self.assertFalse(element_has_text(
page=course_outline_page,
css_selector='span.section-title',
text='Entrance Exam'
))
def test_entrance_exam_has_unit_button(self):
"""
Test that entrance exam should be created after checking the 'enable entrance exam' checkbox.
And user has option to add units only instead of any Subsection.
"""
self.settings_detail.require_entrance_exam(required=True)
self.settings_detail.save_changes()
# getting the course outline page.
course_outline_page = CourseOutlinePage(
self.browser, self.course_info['org'], self.course_info['number'], self.course_info['run']
)
course_outline_page.visit()
course_outline_page.wait_for_ajax()
# button with text 'New Unit' should be present.
self.assertTrue(element_has_text(
page=course_outline_page,
css_selector='.add-item a.button-new',
text='New Unit'
))
# button with text 'New Subsection' should not be present.
self.assertFalse(element_has_text(
page=course_outline_page,
css_selector='.add-item a.button-new',
text='New Subsection'
))
@attr(shard=4)
class CoursePacingTest(StudioSettingsDetailsTest):
"""Tests for setting a course to self-paced."""
def populate_course_fixture(self, __):
ConfigModelFixture('/config/self_paced', {'enabled': True}).install()
# Set the course start date to tomorrow in order to allow setting pacing
self.course_fixture.add_course_details({'start_date': datetime.now() + timedelta(days=1)})
def test_default_instructor_paced(self):
"""
Test that the 'instructor paced' button is checked by default.
"""
self.assertEqual(self.settings_detail.course_pacing, 'Instructor-Paced')
def test_self_paced(self):
"""
Test that the 'self-paced' button is checked for a self-paced
course.
"""
self.course_fixture.add_course_details({
'self_paced': True
})
self.course_fixture.configure_course()
self.settings_detail.refresh_page()
self.assertEqual(self.settings_detail.course_pacing, 'Self-Paced')
def test_set_self_paced(self):
"""
Test that the self-paced option is persisted correctly.
"""
self.settings_detail.course_pacing = 'Self-Paced'
self.settings_detail.save_changes()
self.settings_detail.refresh_page()
self.assertEqual(self.settings_detail.course_pacing, 'Self-Paced')
def test_toggle_pacing_after_course_start(self):
"""
Test that course authors cannot toggle the pacing of their course
while the course is running.
"""
self.course_fixture.add_course_details({'start_date': datetime.now()})
self.course_fixture.configure_course()
self.settings_detail.refresh_page()
self.assertTrue(self.settings_detail.course_pacing_disabled())
self.assertIn('Course pacing cannot be changed', self.settings_detail.course_pacing_disabled_text)
Removed flakey test
"""
Acceptance tests for Studio's Settings Details pages
"""
from datetime import datetime, timedelta
from nose.plugins.attrib import attr
from common.test.acceptance.fixtures.config import ConfigModelFixture
from common.test.acceptance.fixtures.course import CourseFixture
from common.test.acceptance.pages.studio.overview import CourseOutlinePage
from common.test.acceptance.pages.studio.settings import SettingsPage
from common.test.acceptance.tests.helpers import (
element_has_text,
generate_course_key,
is_option_value_selected,
select_option_by_value
)
from common.test.acceptance.tests.studio.base_studio_test import StudioCourseTest
@attr(shard=4)
class StudioSettingsDetailsTest(StudioCourseTest):
"""Base class for settings and details page tests."""
def setUp(self, is_staff=True):
super(StudioSettingsDetailsTest, self).setUp(is_staff=is_staff)
self.settings_detail = SettingsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
# Before every test, make sure to visit the page first
self.settings_detail.visit()
@attr(shard=4)
class SettingsMilestonesTest(StudioSettingsDetailsTest):
"""
Tests for milestones feature in Studio's settings tab
"""
def test_page_has_prerequisite_field(self):
"""
Test to make sure page has pre-requisite course field if milestones app is enabled.
"""
self.assertTrue(self.settings_detail.pre_requisite_course_options)
def test_prerequisite_course_save_successfully(self):
"""
Scenario: Selecting course from Pre-Requisite course drop down save the selected course as pre-requisite
course.
Given that I am on the Schedule & Details page on studio
When I select an item in pre-requisite course drop down and click Save Changes button
Then My selected item should be saved as pre-requisite course
And My selected item should be selected after refreshing the page.'
"""
course_number = self.unique_id
CourseFixture(
org='test_org',
number=course_number,
run='test_run',
display_name='Test Course' + course_number
).install()
pre_requisite_course_key = generate_course_key(
org='test_org',
number=course_number,
run='test_run'
)
pre_requisite_course_id = unicode(pre_requisite_course_key)
# Refresh the page to load the new course fixture and populate the prrequisite course dropdown
# Then select the prerequisite course and save the changes
self.settings_detail.refresh_page()
self.settings_detail.wait_for_prerequisite_course_options()
select_option_by_value(
browser_query=self.settings_detail.pre_requisite_course_options,
value=pre_requisite_course_id
)
self.settings_detail.save_changes()
self.assertEqual(
'Your changes have been saved.',
self.settings_detail.alert_confirmation_title.text
)
# Refresh the page again and confirm the prerequisite course selection is properly reflected
self.settings_detail.refresh_page()
self.settings_detail.wait_for_prerequisite_course_options()
self.assertTrue(is_option_value_selected(
browser_query=self.settings_detail.pre_requisite_course_options,
value=pre_requisite_course_id
))
# Set the prerequisite course back to None and save the changes
select_option_by_value(
browser_query=self.settings_detail.pre_requisite_course_options,
value=''
)
self.settings_detail.save_changes()
self.assertEqual(
'Your changes have been saved.',
self.settings_detail.alert_confirmation_title.text
)
# Refresh the page again to confirm the None selection is properly reflected
self.settings_detail.refresh_page()
self.settings_detail.wait_for_prerequisite_course_options()
self.assertTrue(is_option_value_selected(
browser_query=self.settings_detail.pre_requisite_course_options,
value=''
))
# Re-pick the prerequisite course and confirm no errors are thrown (covers a discovered bug)
select_option_by_value(
browser_query=self.settings_detail.pre_requisite_course_options,
value=pre_requisite_course_id
)
self.settings_detail.save_changes()
self.assertEqual(
'Your changes have been saved.',
self.settings_detail.alert_confirmation_title.text
)
# Refresh the page again to confirm the prerequisite course selection is properly reflected
self.settings_detail.refresh_page()
self.settings_detail.wait_for_prerequisite_course_options()
dropdown_status = is_option_value_selected(
browser_query=self.settings_detail.pre_requisite_course_options,
value=pre_requisite_course_id
)
self.assertTrue(dropdown_status)
def test_page_has_enable_entrance_exam_field(self):
"""
Test to make sure page has 'enable entrance exam' field.
"""
self.assertTrue(self.settings_detail.entrance_exam_field)
def test_entrance_exam_has_unit_button(self):
"""
Test that entrance exam should be created after checking the 'enable entrance exam' checkbox.
And user has option to add units only instead of any Subsection.
"""
self.settings_detail.require_entrance_exam(required=True)
self.settings_detail.save_changes()
# getting the course outline page.
course_outline_page = CourseOutlinePage(
self.browser, self.course_info['org'], self.course_info['number'], self.course_info['run']
)
course_outline_page.visit()
course_outline_page.wait_for_ajax()
# button with text 'New Unit' should be present.
self.assertTrue(element_has_text(
page=course_outline_page,
css_selector='.add-item a.button-new',
text='New Unit'
))
# button with text 'New Subsection' should not be present.
self.assertFalse(element_has_text(
page=course_outline_page,
css_selector='.add-item a.button-new',
text='New Subsection'
))
@attr(shard=4)
class CoursePacingTest(StudioSettingsDetailsTest):
"""Tests for setting a course to self-paced."""
def populate_course_fixture(self, __):
ConfigModelFixture('/config/self_paced', {'enabled': True}).install()
# Set the course start date to tomorrow in order to allow setting pacing
self.course_fixture.add_course_details({'start_date': datetime.now() + timedelta(days=1)})
def test_default_instructor_paced(self):
"""
Test that the 'instructor paced' button is checked by default.
"""
self.assertEqual(self.settings_detail.course_pacing, 'Instructor-Paced')
def test_self_paced(self):
"""
Test that the 'self-paced' button is checked for a self-paced
course.
"""
self.course_fixture.add_course_details({
'self_paced': True
})
self.course_fixture.configure_course()
self.settings_detail.refresh_page()
self.assertEqual(self.settings_detail.course_pacing, 'Self-Paced')
def test_set_self_paced(self):
"""
Test that the self-paced option is persisted correctly.
"""
self.settings_detail.course_pacing = 'Self-Paced'
self.settings_detail.save_changes()
self.settings_detail.refresh_page()
self.assertEqual(self.settings_detail.course_pacing, 'Self-Paced')
def test_toggle_pacing_after_course_start(self):
"""
Test that course authors cannot toggle the pacing of their course
while the course is running.
"""
self.course_fixture.add_course_details({'start_date': datetime.now()})
self.course_fixture.configure_course()
self.settings_detail.refresh_page()
self.assertTrue(self.settings_detail.course_pacing_disabled())
self.assertIn('Course pacing cannot be changed', self.settings_detail.course_pacing_disabled_text)
|
import datetime
import functools
import hashlib
import inspect
import sys
import time
import uuid
import calendar
import unittest
import platform
import warnings
import types
import numbers
import inspect
from dateutil import parser
from dateutil.tz import tzlocal
try:
from maya import MayaDT
except ImportError:
MayaDT = None
real_time = time.time
real_localtime = time.localtime
real_gmtime = time.gmtime
real_strftime = time.strftime
real_clock = time.clock
real_date = datetime.date
real_datetime = datetime.datetime
real_date_objects = [real_time, real_localtime, real_gmtime, real_strftime, real_date, real_datetime]
_real_time_object_ids = set(id(obj) for obj in real_date_objects)
try:
real_uuid_generate_time = uuid._uuid_generate_time
uuid_generate_time_attr = '_uuid_generate_time'
except AttributeError:
uuid._load_system_functions()
real_uuid_generate_time = uuid._generate_time_safe
uuid_generate_time_attr = '_generate_time_safe'
except ImportError:
real_uuid_generate_time = None
uuid_generate_time_attr = None
try:
real_uuid_create = uuid._UuidCreate
except (AttributeError, ImportError):
real_uuid_create = None
try:
import copy_reg as copyreg
except ImportError:
import copyreg
try:
iscoroutinefunction = inspect.iscoroutinefunction
from freezegun._async import wrap_coroutine
except AttributeError:
iscoroutinefunction = lambda x: False
def wrap_coroutine(*args):
raise NotImplementedError()
# keep a cache of module attributes otherwise freezegun will need to analyze too many modules all the time
_GLOBAL_MODULES_CACHE = {}
def _get_module_attributes(module):
result = []
try:
module_attributes = dir(module)
except TypeError:
return result
for attribute_name in module_attributes:
try:
attribute_value = getattr(module, attribute_name)
except (ImportError, AttributeError, TypeError):
# For certain libraries, this can result in ImportError(_winreg) or AttributeError (celery)
continue
else:
result.append((attribute_name, attribute_value))
return result
def _setup_module_cache(module):
date_attrs = []
all_module_attributes = _get_module_attributes(module)
for attribute_name, attribute_value in all_module_attributes:
if id(attribute_value) in _real_time_object_ids:
date_attrs.append((attribute_name, attribute_value))
_GLOBAL_MODULES_CACHE[module.__name__] = (_get_module_attributes_hash(module), date_attrs)
def _get_module_attributes_hash(module):
try:
module_dir = dir(module)
except TypeError:
module_dir = []
return '{0}-{1}'.format(id(module), hash(frozenset(module_dir)))
def _get_cached_module_attributes(module):
module_hash, cached_attrs = _GLOBAL_MODULES_CACHE.get(module.__name__, ('0', []))
if _get_module_attributes_hash(module) == module_hash:
return cached_attrs
# cache miss: update the cache and return the refreshed value
_setup_module_cache(module)
# return the newly cached value
module_hash, cached_attrs = _GLOBAL_MODULES_CACHE[module.__name__]
return cached_attrs
# Stolen from six
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
return meta("NewBase", bases, {})
_is_cpython = (
hasattr(platform, 'python_implementation') and
platform.python_implementation().lower() == "cpython"
)
class BaseFakeTime(object):
call_stack_inspection_limit = 5
def _should_use_real_time(self, call_stack, modules_to_ignore):
if not self.stack_inspection_limit:
return False
if not modules_to_ignore:
return False
stack_limit = min(len(call_stack), self.call_stack_inspection_limit)
# Start at 1 to ignore the current frame (index 0)
for i in range(1, stack_limit):
mod = inspect.getmodule(call_stack[i][0])
if mod.__name__.startswith(modules_to_ignore):
return True
return False
class FakeTime(BaseFakeTime):
def __init__(self, time_to_freeze, previous_time_function, ignore=None):
self.time_to_freeze = time_to_freeze
self.previous_time_function = previous_time_function
self.ignore = ignore
def __call__(self):
call_stack = inspect.stack()
if self._should_use_real_time(call_stack, self.ignore):
return real_time()
current_time = self.time_to_freeze()
return calendar.timegm(current_time.timetuple()) + current_time.microsecond / 1000000.0
class FakeLocalTime(BaseFakeTime):
def __init__(self, time_to_freeze, previous_localtime_function=None, ignore=None):
self.time_to_freeze = time_to_freeze
self.previous_localtime_function = previous_localtime_function
self.ignore = ignore
def __call__(self, t=None):
if t is not None:
return real_localtime(t)
call_stack = inspect.stack()
if self._should_use_real_time(call_stack, self.ignore):
return real_localtime()
shifted_time = self.time_to_freeze() - datetime.timedelta(seconds=time.timezone)
return shifted_time.timetuple()
class FakeGMTTime(BaseFakeTime):
def __init__(self, time_to_freeze, previous_gmtime_function, ignore=None):
self.time_to_freeze = time_to_freeze
self.previous_gmtime_function = previous_gmtime_function
self.ignore = ignore
def __call__(self, t=None):
if t is not None:
return real_gmtime(t)
call_stack = inspect.stack()
if self._should_use_real_time(call_stack, self.ignore):
return real_gmtime()
return self.time_to_freeze().timetuple()
class FakeStrfTime(BaseFakeTime):
def __init__(self, time_to_freeze, previous_strftime_function, ignore=None):
self.time_to_freeze = time_to_freeze
self.previous_strftime_function = previous_strftime_function
self.ignore = ignore
def __call__(self, format, time_to_format=None):
if time_to_format is None:
call_stack = inspect.stack()
if not self._should_use_real_time(call_stack, self.ignore):
time_to_format = FakeLocalTime(self.time_to_freeze)()
return real_strftime(format, time_to_format)
class FakeClock(BaseFakeTime):
times_to_freeze = []
def __init__(self, previous_clock_function, tick=False, ignore=None):
self.previous_clock_function = previous_clock_function
self.tick = tick
self.ignore = ignore
def __call__(self, *args, **kwargs):
call_stack = inspect.stack()
if self._should_use_real_time(call_stack, self.ignore):
return self.previous_clock_function()
if len(self.times_to_freeze) == 1:
return 0.0 if not self.tick else self.previous_clock_function()
first_frozen_time = self.times_to_freeze[0]()
last_frozen_time = self.times_to_freeze[-1]()
timedelta = (last_frozen_time - first_frozen_time)
total_seconds = timedelta.total_seconds()
if self.tick:
total_seconds += self.previous_clock_function()
return total_seconds
class FakeDateMeta(type):
@classmethod
def __instancecheck__(self, obj):
return isinstance(obj, real_date)
def datetime_to_fakedatetime(datetime):
return FakeDatetime(datetime.year,
datetime.month,
datetime.day,
datetime.hour,
datetime.minute,
datetime.second,
datetime.microsecond,
datetime.tzinfo)
def date_to_fakedate(date):
return FakeDate(date.year,
date.month,
date.day)
class FakeDate(with_metaclass(FakeDateMeta, real_date)):
dates_to_freeze = []
tz_offsets = []
def __new__(cls, *args, **kwargs):
return real_date.__new__(cls, *args, **kwargs)
def __add__(self, other):
result = real_date.__add__(self, other)
if result is NotImplemented:
return result
return date_to_fakedate(result)
def __sub__(self, other):
result = real_date.__sub__(self, other)
if result is NotImplemented:
return result
if isinstance(result, real_date):
return date_to_fakedate(result)
else:
return result
@classmethod
def today(cls):
result = cls._date_to_freeze() + cls._tz_offset()
return date_to_fakedate(result)
@classmethod
def _date_to_freeze(cls):
return cls.dates_to_freeze[-1]()
@classmethod
def _tz_offset(cls):
return cls.tz_offsets[-1]
FakeDate.min = date_to_fakedate(real_date.min)
FakeDate.max = date_to_fakedate(real_date.max)
class FakeDatetimeMeta(FakeDateMeta):
@classmethod
def __instancecheck__(self, obj):
return isinstance(obj, real_datetime)
class FakeDatetime(with_metaclass(FakeDatetimeMeta, real_datetime, FakeDate)):
times_to_freeze = []
tz_offsets = []
def __new__(cls, *args, **kwargs):
return real_datetime.__new__(cls, *args, **kwargs)
def __add__(self, other):
result = real_datetime.__add__(self, other)
if result is NotImplemented:
return result
return datetime_to_fakedatetime(result)
def __sub__(self, other):
result = real_datetime.__sub__(self, other)
if result is NotImplemented:
return result
if isinstance(result, real_datetime):
return datetime_to_fakedatetime(result)
else:
return result
def astimezone(self, tz=None):
if tz is None:
tz = tzlocal()
return datetime_to_fakedatetime(real_datetime.astimezone(self, tz))
@classmethod
def now(cls, tz=None):
now = cls._time_to_freeze() or real_datetime.now()
if tz:
result = tz.fromutc(now.replace(tzinfo=tz)) + cls._tz_offset()
else:
result = now + cls._tz_offset()
return datetime_to_fakedatetime(result)
def date(self):
return date_to_fakedate(self)
@classmethod
def today(cls):
return cls.now(tz=None)
@classmethod
def utcnow(cls):
result = cls._time_to_freeze() or real_datetime.utcnow()
return datetime_to_fakedatetime(result)
@classmethod
def _time_to_freeze(cls):
if cls.times_to_freeze:
return cls.times_to_freeze[-1]()
@classmethod
def _tz_offset(cls):
return cls.tz_offsets[-1]
FakeDatetime.min = datetime_to_fakedatetime(real_datetime.min)
FakeDatetime.max = datetime_to_fakedatetime(real_datetime.max)
def convert_to_timezone_naive(time_to_freeze):
"""
Converts a potentially timezone-aware datetime to be a naive UTC datetime
"""
if time_to_freeze.tzinfo:
time_to_freeze -= time_to_freeze.utcoffset()
time_to_freeze = time_to_freeze.replace(tzinfo=None)
return time_to_freeze
def pickle_fake_date(datetime_):
# A pickle function for FakeDate
return FakeDate, (
datetime_.year,
datetime_.month,
datetime_.day,
)
def pickle_fake_datetime(datetime_):
# A pickle function for FakeDatetime
return FakeDatetime, (
datetime_.year,
datetime_.month,
datetime_.day,
datetime_.hour,
datetime_.minute,
datetime_.second,
datetime_.microsecond,
datetime_.tzinfo,
)
def _parse_time_to_freeze(time_to_freeze_str):
"""Parses all the possible inputs for freeze_time
:returns: a naive ``datetime.datetime`` object
"""
if time_to_freeze_str is None:
time_to_freeze_str = datetime.datetime.utcnow()
if isinstance(time_to_freeze_str, datetime.datetime):
time_to_freeze = time_to_freeze_str
elif isinstance(time_to_freeze_str, datetime.date):
time_to_freeze = datetime.datetime.combine(time_to_freeze_str, datetime.time())
elif isinstance(time_to_freeze_str, datetime.timedelta):
time_to_freeze = datetime.datetime.utcnow() + time_to_freeze_str
else:
time_to_freeze = parser.parse(time_to_freeze_str)
return convert_to_timezone_naive(time_to_freeze)
def _parse_tz_offset(tz_offset):
if isinstance(tz_offset, datetime.timedelta):
return tz_offset
else:
return datetime.timedelta(hours=tz_offset)
class TickingDateTimeFactory(object):
def __init__(self, time_to_freeze, start):
self.time_to_freeze = time_to_freeze
self.start = start
def __call__(self):
return self.time_to_freeze + (real_datetime.now() - self.start)
class FrozenDateTimeFactory(object):
def __init__(self, time_to_freeze):
self.time_to_freeze = time_to_freeze
def __call__(self):
return self.time_to_freeze
def tick(self, delta=datetime.timedelta(seconds=1)):
if isinstance(delta, numbers.Real):
self.time_to_freeze += datetime.timedelta(seconds=delta)
else:
self.time_to_freeze += delta
def move_to(self, target_datetime):
"""Moves frozen date to the given ``target_datetime``"""
target_datetime = _parse_time_to_freeze(target_datetime)
delta = target_datetime - self.time_to_freeze
self.tick(delta=delta)
class _freeze_time(object):
def __init__(self, time_to_freeze_str, tz_offset, ignore, tick, as_arg):
self.time_to_freeze = _parse_time_to_freeze(time_to_freeze_str)
self.tz_offset = _parse_tz_offset(tz_offset)
self.ignore = tuple(ignore)
self.tick = tick
self.undo_changes = []
self.modules_at_start = set()
self.as_arg = as_arg
def __call__(self, func):
if inspect.isclass(func):
return self.decorate_class(func)
elif iscoroutinefunction(func):
return self.decorate_coroutine(func)
return self.decorate_callable(func)
def decorate_class(self, klass):
if issubclass(klass, unittest.TestCase):
# If it's a TestCase, we assume you want to freeze the time for the
# tests, from setUpClass to tearDownClass
# Use getattr as in Python 2.6 they are optional
orig_setUpClass = getattr(klass, 'setUpClass', None)
orig_tearDownClass = getattr(klass, 'tearDownClass', None)
@classmethod
def setUpClass(cls):
self.start()
if orig_setUpClass is not None:
orig_setUpClass()
@classmethod
def tearDownClass(cls):
if orig_tearDownClass is not None:
orig_tearDownClass()
self.stop()
klass.setUpClass = setUpClass
klass.tearDownClass = tearDownClass
return klass
else:
seen = set()
klasses = klass.mro() if hasattr(klass, 'mro') else [klass] + list(klass.__bases__)
for base_klass in klasses:
for (attr, attr_value) in base_klass.__dict__.items():
if attr.startswith('_') or attr in seen:
continue
seen.add(attr)
if not callable(attr_value) or inspect.isclass(attr_value):
continue
try:
setattr(klass, attr, self(attr_value))
except (AttributeError, TypeError):
# Sometimes we can't set this for built-in types and custom callables
continue
return klass
def __enter__(self):
return self.start()
def __exit__(self, *args):
self.stop()
def start(self):
if self.tick:
time_to_freeze = TickingDateTimeFactory(self.time_to_freeze, real_datetime.now())
else:
time_to_freeze = FrozenDateTimeFactory(self.time_to_freeze)
# Change the modules
datetime.datetime = FakeDatetime
datetime.datetime.times_to_freeze.append(time_to_freeze)
datetime.datetime.tz_offsets.append(self.tz_offset)
datetime.date = FakeDate
datetime.date.dates_to_freeze.append(time_to_freeze)
datetime.date.tz_offsets.append(self.tz_offset)
fake_time = FakeTime(time_to_freeze, time.time, ignore=self.ignore)
fake_localtime = FakeLocalTime(time_to_freeze, time.localtime, ignore=self.ignore)
fake_gmtime = FakeGMTTime(time_to_freeze, time.gmtime, ignore=self.ignore)
fake_strftime = FakeStrfTime(time_to_freeze, time.strftime, ignore=self.ignore)
fake_clock = FakeClock(time.clock, tick=self.tick, ignore=self.ignore)
fake_clock.times_to_freeze.append(time_to_freeze)
time.time = fake_time
time.localtime = fake_localtime
time.gmtime = fake_gmtime
time.strftime = fake_strftime
time.clock = fake_clock
if uuid_generate_time_attr:
setattr(uuid, uuid_generate_time_attr, None)
uuid._UuidCreate = None
uuid._last_timestamp = None
copyreg.dispatch_table[real_datetime] = pickle_fake_datetime
copyreg.dispatch_table[real_date] = pickle_fake_date
# Change any place where the module had already been imported
to_patch = [
('real_date', real_date, 'FakeDate', FakeDate),
('real_datetime', real_datetime, 'FakeDatetime', FakeDatetime),
('real_gmtime', real_gmtime, 'FakeGMTTime', fake_gmtime),
('real_localtime', real_localtime, 'FakeLocalTime', fake_localtime),
('real_strftime', real_strftime, 'FakeStrfTime', fake_strftime),
('real_time', real_time, 'FakeTime', fake_time),
('real_clock', real_clock, 'FakeClock', fake_clock),
]
self.fake_names = tuple(fake_name for real_name, real, fake_name, fake in to_patch)
self.reals = dict((id(fake), real) for real_name, real, fake_name, fake in to_patch)
fakes = dict((id(real), fake) for real_name, real, fake_name, fake in to_patch)
add_change = self.undo_changes.append
# Save the current loaded modules
self.modules_at_start = set(sys.modules.keys())
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
for mod_name, module in list(sys.modules.items()):
if mod_name is None or module is None or mod_name == __name__:
continue
elif mod_name.startswith(self.ignore) or mod_name.endswith('.six.moves'):
continue
elif (not hasattr(module, "__name__") or module.__name__ in ('datetime', 'time')):
continue
module_attrs = _get_cached_module_attributes(module)
for attribute_name, attribute_value in module_attrs:
fake = fakes.get(id(attribute_value))
if fake:
setattr(module, attribute_name, fake)
add_change((module, attribute_name, attribute_value))
return time_to_freeze
def stop(self):
datetime.datetime.times_to_freeze.pop()
datetime.datetime.tz_offsets.pop()
datetime.date.dates_to_freeze.pop()
datetime.date.tz_offsets.pop()
time.clock.times_to_freeze.pop()
if not datetime.datetime.times_to_freeze:
datetime.datetime = real_datetime
datetime.date = real_date
copyreg.dispatch_table.pop(real_datetime)
copyreg.dispatch_table.pop(real_date)
for module, module_attribute, original_value in self.undo_changes:
setattr(module, module_attribute, original_value)
self.undo_changes = []
# Restore modules loaded after start()
modules_to_restore = set(sys.modules.keys()) - self.modules_at_start
self.modules_at_start = set()
with warnings.catch_warnings():
warnings.simplefilter('ignore')
for mod_name in modules_to_restore:
module = sys.modules.get(mod_name, None)
if mod_name is None or module is None:
continue
elif mod_name.startswith(self.ignore) or mod_name.endswith('.six.moves'):
continue
elif (not hasattr(module, "__name__") or module.__name__ in ('datetime', 'time')):
continue
for module_attribute in dir(module):
if module_attribute in self.fake_names:
continue
try:
attribute_value = getattr(module, module_attribute)
except (ImportError, AttributeError, TypeError):
# For certain libraries, this can result in ImportError(_winreg) or AttributeError (celery)
continue
real = self.reals.get(id(attribute_value))
if real:
setattr(module, module_attribute, real)
time.time = time.time.previous_time_function
time.gmtime = time.gmtime.previous_gmtime_function
time.localtime = time.localtime.previous_localtime_function
time.strftime = time.strftime.previous_strftime_function
time.clock = time.clock.previous_clock_function
if uuid_generate_time_attr:
setattr(uuid, uuid_generate_time_attr, real_uuid_generate_time)
uuid._UuidCreate = real_uuid_create
uuid._last_timestamp = None
def decorate_coroutine(self, coroutine):
return wrap_coroutine(self, coroutine)
def decorate_callable(self, func):
def wrapper(*args, **kwargs):
with self as time_factory:
if self.as_arg:
result = func(time_factory, *args, **kwargs)
else:
result = func(*args, **kwargs)
return result
functools.update_wrapper(wrapper, func)
# update_wrapper already sets __wrapped__ in Python 3.2+, this is only
# needed for Python 2.x support
wrapper.__wrapped__ = func
return wrapper
def freeze_time(time_to_freeze=None, tz_offset=0, ignore=None, tick=False, as_arg=False):
# Python3 doesn't have basestring, but it does have str.
try:
string_type = basestring
except NameError:
string_type = str
acceptable_times = (type(None), string_type, datetime.date, datetime.timedelta,
types.FunctionType, types.GeneratorType)
if MayaDT is not None:
acceptable_times += MayaDT,
if not isinstance(time_to_freeze, acceptable_times):
raise TypeError(('freeze_time() expected None, a string, date instance, datetime '
'instance, MayaDT, timedelta instance, function or a generator, but got '
'type {0}.').format(type(time_to_freeze)))
if tick and not _is_cpython:
raise SystemError('Calling freeze_time with tick=True is only compatible with CPython')
if isinstance(time_to_freeze, types.FunctionType):
return freeze_time(time_to_freeze(), tz_offset, ignore, tick)
if isinstance(time_to_freeze, types.GeneratorType):
return freeze_time(next(time_to_freeze), tz_offset, ignore, tick)
if MayaDT is not None and isinstance(time_to_freeze, MayaDT):
return freeze_time(time_to_freeze.datetime(), tz_offset, ignore,
tick, as_arg)
if ignore is None:
ignore = []
ignore.append('nose.plugins')
ignore.append('six.moves')
ignore.append('django.utils.six.moves')
ignore.append('google.gax')
ignore.append('threading')
ignore.append('Queue')
return _freeze_time(time_to_freeze, tz_offset, ignore, tick, as_arg)
# Setup adapters for sqlite
try:
import sqlite3
except ImportError:
# Some systems have trouble with this
pass
else:
# These are copied from Python sqlite3.dbapi2
def adapt_date(val):
return val.isoformat()
def adapt_datetime(val):
return val.isoformat(" ")
sqlite3.register_adapter(FakeDate, adapt_date)
sqlite3.register_adapter(FakeDatetime, adapt_datetime)
# Setup converters for pymysql
try:
import pymysql.converters
except ImportError:
pass
else:
pymysql.converters.encoders[FakeDate] = pymysql.converters.encoders[real_date]
pymysql.converters.conversions[FakeDate] = pymysql.converters.encoders[real_date]
pymysql.converters.encoders[FakeDatetime] = pymysql.converters.encoders[real_datetime]
pymysql.converters.conversions[FakeDatetime] = pymysql.converters.encoders[real_datetime]
Missed a variable rename. Fixed
import datetime
import functools
import hashlib
import inspect
import sys
import time
import uuid
import calendar
import unittest
import platform
import warnings
import types
import numbers
import inspect
from dateutil import parser
from dateutil.tz import tzlocal
try:
from maya import MayaDT
except ImportError:
MayaDT = None
real_time = time.time
real_localtime = time.localtime
real_gmtime = time.gmtime
real_strftime = time.strftime
real_clock = time.clock
real_date = datetime.date
real_datetime = datetime.datetime
real_date_objects = [real_time, real_localtime, real_gmtime, real_strftime, real_date, real_datetime]
_real_time_object_ids = set(id(obj) for obj in real_date_objects)
try:
real_uuid_generate_time = uuid._uuid_generate_time
uuid_generate_time_attr = '_uuid_generate_time'
except AttributeError:
uuid._load_system_functions()
real_uuid_generate_time = uuid._generate_time_safe
uuid_generate_time_attr = '_generate_time_safe'
except ImportError:
real_uuid_generate_time = None
uuid_generate_time_attr = None
try:
real_uuid_create = uuid._UuidCreate
except (AttributeError, ImportError):
real_uuid_create = None
try:
import copy_reg as copyreg
except ImportError:
import copyreg
try:
iscoroutinefunction = inspect.iscoroutinefunction
from freezegun._async import wrap_coroutine
except AttributeError:
iscoroutinefunction = lambda x: False
def wrap_coroutine(*args):
raise NotImplementedError()
# keep a cache of module attributes otherwise freezegun will need to analyze too many modules all the time
_GLOBAL_MODULES_CACHE = {}
def _get_module_attributes(module):
result = []
try:
module_attributes = dir(module)
except TypeError:
return result
for attribute_name in module_attributes:
try:
attribute_value = getattr(module, attribute_name)
except (ImportError, AttributeError, TypeError):
# For certain libraries, this can result in ImportError(_winreg) or AttributeError (celery)
continue
else:
result.append((attribute_name, attribute_value))
return result
def _setup_module_cache(module):
date_attrs = []
all_module_attributes = _get_module_attributes(module)
for attribute_name, attribute_value in all_module_attributes:
if id(attribute_value) in _real_time_object_ids:
date_attrs.append((attribute_name, attribute_value))
_GLOBAL_MODULES_CACHE[module.__name__] = (_get_module_attributes_hash(module), date_attrs)
def _get_module_attributes_hash(module):
try:
module_dir = dir(module)
except TypeError:
module_dir = []
return '{0}-{1}'.format(id(module), hash(frozenset(module_dir)))
def _get_cached_module_attributes(module):
module_hash, cached_attrs = _GLOBAL_MODULES_CACHE.get(module.__name__, ('0', []))
if _get_module_attributes_hash(module) == module_hash:
return cached_attrs
# cache miss: update the cache and return the refreshed value
_setup_module_cache(module)
# return the newly cached value
module_hash, cached_attrs = _GLOBAL_MODULES_CACHE[module.__name__]
return cached_attrs
# Stolen from six
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
return meta("NewBase", bases, {})
_is_cpython = (
hasattr(platform, 'python_implementation') and
platform.python_implementation().lower() == "cpython"
)
class BaseFakeTime(object):
call_stack_inspection_limit = 5
def _should_use_real_time(self, call_stack, modules_to_ignore):
if not self.call_stack_inspection_limit:
return False
if not modules_to_ignore:
return False
stack_limit = min(len(call_stack), self.call_stack_inspection_limit)
# Start at 1 to ignore the current frame (index 0)
for i in range(1, stack_limit):
mod = inspect.getmodule(call_stack[i][0])
if mod.__name__.startswith(modules_to_ignore):
return True
return False
class FakeTime(BaseFakeTime):
def __init__(self, time_to_freeze, previous_time_function, ignore=None):
self.time_to_freeze = time_to_freeze
self.previous_time_function = previous_time_function
self.ignore = ignore
def __call__(self):
call_stack = inspect.stack()
if self._should_use_real_time(call_stack, self.ignore):
return real_time()
current_time = self.time_to_freeze()
return calendar.timegm(current_time.timetuple()) + current_time.microsecond / 1000000.0
class FakeLocalTime(BaseFakeTime):
def __init__(self, time_to_freeze, previous_localtime_function=None, ignore=None):
self.time_to_freeze = time_to_freeze
self.previous_localtime_function = previous_localtime_function
self.ignore = ignore
def __call__(self, t=None):
if t is not None:
return real_localtime(t)
call_stack = inspect.stack()
if self._should_use_real_time(call_stack, self.ignore):
return real_localtime()
shifted_time = self.time_to_freeze() - datetime.timedelta(seconds=time.timezone)
return shifted_time.timetuple()
class FakeGMTTime(BaseFakeTime):
def __init__(self, time_to_freeze, previous_gmtime_function, ignore=None):
self.time_to_freeze = time_to_freeze
self.previous_gmtime_function = previous_gmtime_function
self.ignore = ignore
def __call__(self, t=None):
if t is not None:
return real_gmtime(t)
call_stack = inspect.stack()
if self._should_use_real_time(call_stack, self.ignore):
return real_gmtime()
return self.time_to_freeze().timetuple()
class FakeStrfTime(BaseFakeTime):
def __init__(self, time_to_freeze, previous_strftime_function, ignore=None):
self.time_to_freeze = time_to_freeze
self.previous_strftime_function = previous_strftime_function
self.ignore = ignore
def __call__(self, format, time_to_format=None):
if time_to_format is None:
call_stack = inspect.stack()
if not self._should_use_real_time(call_stack, self.ignore):
time_to_format = FakeLocalTime(self.time_to_freeze)()
return real_strftime(format, time_to_format)
class FakeClock(BaseFakeTime):
times_to_freeze = []
def __init__(self, previous_clock_function, tick=False, ignore=None):
self.previous_clock_function = previous_clock_function
self.tick = tick
self.ignore = ignore
def __call__(self, *args, **kwargs):
call_stack = inspect.stack()
if self._should_use_real_time(call_stack, self.ignore):
return self.previous_clock_function()
if len(self.times_to_freeze) == 1:
return 0.0 if not self.tick else self.previous_clock_function()
first_frozen_time = self.times_to_freeze[0]()
last_frozen_time = self.times_to_freeze[-1]()
timedelta = (last_frozen_time - first_frozen_time)
total_seconds = timedelta.total_seconds()
if self.tick:
total_seconds += self.previous_clock_function()
return total_seconds
class FakeDateMeta(type):
@classmethod
def __instancecheck__(self, obj):
return isinstance(obj, real_date)
def datetime_to_fakedatetime(datetime):
return FakeDatetime(datetime.year,
datetime.month,
datetime.day,
datetime.hour,
datetime.minute,
datetime.second,
datetime.microsecond,
datetime.tzinfo)
def date_to_fakedate(date):
return FakeDate(date.year,
date.month,
date.day)
class FakeDate(with_metaclass(FakeDateMeta, real_date)):
dates_to_freeze = []
tz_offsets = []
def __new__(cls, *args, **kwargs):
return real_date.__new__(cls, *args, **kwargs)
def __add__(self, other):
result = real_date.__add__(self, other)
if result is NotImplemented:
return result
return date_to_fakedate(result)
def __sub__(self, other):
result = real_date.__sub__(self, other)
if result is NotImplemented:
return result
if isinstance(result, real_date):
return date_to_fakedate(result)
else:
return result
@classmethod
def today(cls):
result = cls._date_to_freeze() + cls._tz_offset()
return date_to_fakedate(result)
@classmethod
def _date_to_freeze(cls):
return cls.dates_to_freeze[-1]()
@classmethod
def _tz_offset(cls):
return cls.tz_offsets[-1]
FakeDate.min = date_to_fakedate(real_date.min)
FakeDate.max = date_to_fakedate(real_date.max)
class FakeDatetimeMeta(FakeDateMeta):
@classmethod
def __instancecheck__(self, obj):
return isinstance(obj, real_datetime)
class FakeDatetime(with_metaclass(FakeDatetimeMeta, real_datetime, FakeDate)):
times_to_freeze = []
tz_offsets = []
def __new__(cls, *args, **kwargs):
return real_datetime.__new__(cls, *args, **kwargs)
def __add__(self, other):
result = real_datetime.__add__(self, other)
if result is NotImplemented:
return result
return datetime_to_fakedatetime(result)
def __sub__(self, other):
result = real_datetime.__sub__(self, other)
if result is NotImplemented:
return result
if isinstance(result, real_datetime):
return datetime_to_fakedatetime(result)
else:
return result
def astimezone(self, tz=None):
if tz is None:
tz = tzlocal()
return datetime_to_fakedatetime(real_datetime.astimezone(self, tz))
@classmethod
def now(cls, tz=None):
now = cls._time_to_freeze() or real_datetime.now()
if tz:
result = tz.fromutc(now.replace(tzinfo=tz)) + cls._tz_offset()
else:
result = now + cls._tz_offset()
return datetime_to_fakedatetime(result)
def date(self):
return date_to_fakedate(self)
@classmethod
def today(cls):
return cls.now(tz=None)
@classmethod
def utcnow(cls):
result = cls._time_to_freeze() or real_datetime.utcnow()
return datetime_to_fakedatetime(result)
@classmethod
def _time_to_freeze(cls):
if cls.times_to_freeze:
return cls.times_to_freeze[-1]()
@classmethod
def _tz_offset(cls):
return cls.tz_offsets[-1]
FakeDatetime.min = datetime_to_fakedatetime(real_datetime.min)
FakeDatetime.max = datetime_to_fakedatetime(real_datetime.max)
def convert_to_timezone_naive(time_to_freeze):
"""
Converts a potentially timezone-aware datetime to be a naive UTC datetime
"""
if time_to_freeze.tzinfo:
time_to_freeze -= time_to_freeze.utcoffset()
time_to_freeze = time_to_freeze.replace(tzinfo=None)
return time_to_freeze
def pickle_fake_date(datetime_):
# A pickle function for FakeDate
return FakeDate, (
datetime_.year,
datetime_.month,
datetime_.day,
)
def pickle_fake_datetime(datetime_):
# A pickle function for FakeDatetime
return FakeDatetime, (
datetime_.year,
datetime_.month,
datetime_.day,
datetime_.hour,
datetime_.minute,
datetime_.second,
datetime_.microsecond,
datetime_.tzinfo,
)
def _parse_time_to_freeze(time_to_freeze_str):
"""Parses all the possible inputs for freeze_time
:returns: a naive ``datetime.datetime`` object
"""
if time_to_freeze_str is None:
time_to_freeze_str = datetime.datetime.utcnow()
if isinstance(time_to_freeze_str, datetime.datetime):
time_to_freeze = time_to_freeze_str
elif isinstance(time_to_freeze_str, datetime.date):
time_to_freeze = datetime.datetime.combine(time_to_freeze_str, datetime.time())
elif isinstance(time_to_freeze_str, datetime.timedelta):
time_to_freeze = datetime.datetime.utcnow() + time_to_freeze_str
else:
time_to_freeze = parser.parse(time_to_freeze_str)
return convert_to_timezone_naive(time_to_freeze)
def _parse_tz_offset(tz_offset):
if isinstance(tz_offset, datetime.timedelta):
return tz_offset
else:
return datetime.timedelta(hours=tz_offset)
class TickingDateTimeFactory(object):
def __init__(self, time_to_freeze, start):
self.time_to_freeze = time_to_freeze
self.start = start
def __call__(self):
return self.time_to_freeze + (real_datetime.now() - self.start)
class FrozenDateTimeFactory(object):
def __init__(self, time_to_freeze):
self.time_to_freeze = time_to_freeze
def __call__(self):
return self.time_to_freeze
def tick(self, delta=datetime.timedelta(seconds=1)):
if isinstance(delta, numbers.Real):
self.time_to_freeze += datetime.timedelta(seconds=delta)
else:
self.time_to_freeze += delta
def move_to(self, target_datetime):
"""Moves frozen date to the given ``target_datetime``"""
target_datetime = _parse_time_to_freeze(target_datetime)
delta = target_datetime - self.time_to_freeze
self.tick(delta=delta)
class _freeze_time(object):
def __init__(self, time_to_freeze_str, tz_offset, ignore, tick, as_arg):
self.time_to_freeze = _parse_time_to_freeze(time_to_freeze_str)
self.tz_offset = _parse_tz_offset(tz_offset)
self.ignore = tuple(ignore)
self.tick = tick
self.undo_changes = []
self.modules_at_start = set()
self.as_arg = as_arg
def __call__(self, func):
if inspect.isclass(func):
return self.decorate_class(func)
elif iscoroutinefunction(func):
return self.decorate_coroutine(func)
return self.decorate_callable(func)
def decorate_class(self, klass):
if issubclass(klass, unittest.TestCase):
# If it's a TestCase, we assume you want to freeze the time for the
# tests, from setUpClass to tearDownClass
# Use getattr as in Python 2.6 they are optional
orig_setUpClass = getattr(klass, 'setUpClass', None)
orig_tearDownClass = getattr(klass, 'tearDownClass', None)
@classmethod
def setUpClass(cls):
self.start()
if orig_setUpClass is not None:
orig_setUpClass()
@classmethod
def tearDownClass(cls):
if orig_tearDownClass is not None:
orig_tearDownClass()
self.stop()
klass.setUpClass = setUpClass
klass.tearDownClass = tearDownClass
return klass
else:
seen = set()
klasses = klass.mro() if hasattr(klass, 'mro') else [klass] + list(klass.__bases__)
for base_klass in klasses:
for (attr, attr_value) in base_klass.__dict__.items():
if attr.startswith('_') or attr in seen:
continue
seen.add(attr)
if not callable(attr_value) or inspect.isclass(attr_value):
continue
try:
setattr(klass, attr, self(attr_value))
except (AttributeError, TypeError):
# Sometimes we can't set this for built-in types and custom callables
continue
return klass
def __enter__(self):
return self.start()
def __exit__(self, *args):
self.stop()
def start(self):
if self.tick:
time_to_freeze = TickingDateTimeFactory(self.time_to_freeze, real_datetime.now())
else:
time_to_freeze = FrozenDateTimeFactory(self.time_to_freeze)
# Change the modules
datetime.datetime = FakeDatetime
datetime.datetime.times_to_freeze.append(time_to_freeze)
datetime.datetime.tz_offsets.append(self.tz_offset)
datetime.date = FakeDate
datetime.date.dates_to_freeze.append(time_to_freeze)
datetime.date.tz_offsets.append(self.tz_offset)
fake_time = FakeTime(time_to_freeze, time.time, ignore=self.ignore)
fake_localtime = FakeLocalTime(time_to_freeze, time.localtime, ignore=self.ignore)
fake_gmtime = FakeGMTTime(time_to_freeze, time.gmtime, ignore=self.ignore)
fake_strftime = FakeStrfTime(time_to_freeze, time.strftime, ignore=self.ignore)
fake_clock = FakeClock(time.clock, tick=self.tick, ignore=self.ignore)
fake_clock.times_to_freeze.append(time_to_freeze)
time.time = fake_time
time.localtime = fake_localtime
time.gmtime = fake_gmtime
time.strftime = fake_strftime
time.clock = fake_clock
if uuid_generate_time_attr:
setattr(uuid, uuid_generate_time_attr, None)
uuid._UuidCreate = None
uuid._last_timestamp = None
copyreg.dispatch_table[real_datetime] = pickle_fake_datetime
copyreg.dispatch_table[real_date] = pickle_fake_date
# Change any place where the module had already been imported
to_patch = [
('real_date', real_date, 'FakeDate', FakeDate),
('real_datetime', real_datetime, 'FakeDatetime', FakeDatetime),
('real_gmtime', real_gmtime, 'FakeGMTTime', fake_gmtime),
('real_localtime', real_localtime, 'FakeLocalTime', fake_localtime),
('real_strftime', real_strftime, 'FakeStrfTime', fake_strftime),
('real_time', real_time, 'FakeTime', fake_time),
('real_clock', real_clock, 'FakeClock', fake_clock),
]
self.fake_names = tuple(fake_name for real_name, real, fake_name, fake in to_patch)
self.reals = dict((id(fake), real) for real_name, real, fake_name, fake in to_patch)
fakes = dict((id(real), fake) for real_name, real, fake_name, fake in to_patch)
add_change = self.undo_changes.append
# Save the current loaded modules
self.modules_at_start = set(sys.modules.keys())
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
for mod_name, module in list(sys.modules.items()):
if mod_name is None or module is None or mod_name == __name__:
continue
elif mod_name.startswith(self.ignore) or mod_name.endswith('.six.moves'):
continue
elif (not hasattr(module, "__name__") or module.__name__ in ('datetime', 'time')):
continue
module_attrs = _get_cached_module_attributes(module)
for attribute_name, attribute_value in module_attrs:
fake = fakes.get(id(attribute_value))
if fake:
setattr(module, attribute_name, fake)
add_change((module, attribute_name, attribute_value))
return time_to_freeze
def stop(self):
datetime.datetime.times_to_freeze.pop()
datetime.datetime.tz_offsets.pop()
datetime.date.dates_to_freeze.pop()
datetime.date.tz_offsets.pop()
time.clock.times_to_freeze.pop()
if not datetime.datetime.times_to_freeze:
datetime.datetime = real_datetime
datetime.date = real_date
copyreg.dispatch_table.pop(real_datetime)
copyreg.dispatch_table.pop(real_date)
for module, module_attribute, original_value in self.undo_changes:
setattr(module, module_attribute, original_value)
self.undo_changes = []
# Restore modules loaded after start()
modules_to_restore = set(sys.modules.keys()) - self.modules_at_start
self.modules_at_start = set()
with warnings.catch_warnings():
warnings.simplefilter('ignore')
for mod_name in modules_to_restore:
module = sys.modules.get(mod_name, None)
if mod_name is None or module is None:
continue
elif mod_name.startswith(self.ignore) or mod_name.endswith('.six.moves'):
continue
elif (not hasattr(module, "__name__") or module.__name__ in ('datetime', 'time')):
continue
for module_attribute in dir(module):
if module_attribute in self.fake_names:
continue
try:
attribute_value = getattr(module, module_attribute)
except (ImportError, AttributeError, TypeError):
# For certain libraries, this can result in ImportError(_winreg) or AttributeError (celery)
continue
real = self.reals.get(id(attribute_value))
if real:
setattr(module, module_attribute, real)
time.time = time.time.previous_time_function
time.gmtime = time.gmtime.previous_gmtime_function
time.localtime = time.localtime.previous_localtime_function
time.strftime = time.strftime.previous_strftime_function
time.clock = time.clock.previous_clock_function
if uuid_generate_time_attr:
setattr(uuid, uuid_generate_time_attr, real_uuid_generate_time)
uuid._UuidCreate = real_uuid_create
uuid._last_timestamp = None
def decorate_coroutine(self, coroutine):
return wrap_coroutine(self, coroutine)
def decorate_callable(self, func):
def wrapper(*args, **kwargs):
with self as time_factory:
if self.as_arg:
result = func(time_factory, *args, **kwargs)
else:
result = func(*args, **kwargs)
return result
functools.update_wrapper(wrapper, func)
# update_wrapper already sets __wrapped__ in Python 3.2+, this is only
# needed for Python 2.x support
wrapper.__wrapped__ = func
return wrapper
def freeze_time(time_to_freeze=None, tz_offset=0, ignore=None, tick=False, as_arg=False):
# Python3 doesn't have basestring, but it does have str.
try:
string_type = basestring
except NameError:
string_type = str
acceptable_times = (type(None), string_type, datetime.date, datetime.timedelta,
types.FunctionType, types.GeneratorType)
if MayaDT is not None:
acceptable_times += MayaDT,
if not isinstance(time_to_freeze, acceptable_times):
raise TypeError(('freeze_time() expected None, a string, date instance, datetime '
'instance, MayaDT, timedelta instance, function or a generator, but got '
'type {0}.').format(type(time_to_freeze)))
if tick and not _is_cpython:
raise SystemError('Calling freeze_time with tick=True is only compatible with CPython')
if isinstance(time_to_freeze, types.FunctionType):
return freeze_time(time_to_freeze(), tz_offset, ignore, tick)
if isinstance(time_to_freeze, types.GeneratorType):
return freeze_time(next(time_to_freeze), tz_offset, ignore, tick)
if MayaDT is not None and isinstance(time_to_freeze, MayaDT):
return freeze_time(time_to_freeze.datetime(), tz_offset, ignore,
tick, as_arg)
if ignore is None:
ignore = []
ignore.append('nose.plugins')
ignore.append('six.moves')
ignore.append('django.utils.six.moves')
ignore.append('google.gax')
ignore.append('threading')
ignore.append('Queue')
return _freeze_time(time_to_freeze, tz_offset, ignore, tick, as_arg)
# Setup adapters for sqlite
try:
import sqlite3
except ImportError:
# Some systems have trouble with this
pass
else:
# These are copied from Python sqlite3.dbapi2
def adapt_date(val):
return val.isoformat()
def adapt_datetime(val):
return val.isoformat(" ")
sqlite3.register_adapter(FakeDate, adapt_date)
sqlite3.register_adapter(FakeDatetime, adapt_datetime)
# Setup converters for pymysql
try:
import pymysql.converters
except ImportError:
pass
else:
pymysql.converters.encoders[FakeDate] = pymysql.converters.encoders[real_date]
pymysql.converters.conversions[FakeDate] = pymysql.converters.encoders[real_date]
pymysql.converters.encoders[FakeDatetime] = pymysql.converters.encoders[real_datetime]
pymysql.converters.conversions[FakeDatetime] = pymysql.converters.encoders[real_datetime]
|
"""Remove unused tables
Revision ID: bc497a3d230
Revises: 9440ed72930
Create Date: 2017-05-10 18:27:13.954893
"""
# revision identifiers, used by Alembic.
revision = 'bc497a3d230'
down_revision = '9440ed72930'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(u'Apps_ibfk_2', 'Apps', type_='foreignkey')
op.drop_column('Apps', u'spec_id')
op.drop_column('Apps', u'description')
op.drop_table(u'AppVersions')
op.drop_table(u'Messages')
op.drop_table(u'AppVars')
op.drop_table(u'Bundles')
op.drop_table(u'Specs')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('Apps', sa.Column(u'description', mysql.VARCHAR(length=1000), nullable=True))
op.add_column('Apps', sa.Column(u'spec_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
op.create_foreign_key(u'Apps_ibfk_2', 'Apps', u'Specs', [u'spec_id'], [u'id'])
op.create_table(u'Specs',
sa.Column(u'id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column(u'url', mysql.VARCHAR(length=500), nullable=False),
sa.Column(u'pid', mysql.VARCHAR(length=60), nullable=False),
sa.PrimaryKeyConstraint(u'id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(u'Bundles',
sa.Column(u'id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column(u'lang', mysql.VARCHAR(length=15), nullable=True),
sa.Column(u'target', mysql.VARCHAR(length=30), nullable=True),
sa.Column(u'app_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint([u'app_id'], [u'Apps.id'], name=u'Bundles_ibfk_1'),
sa.PrimaryKeyConstraint(u'id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(u'AppVars',
sa.Column(u'var_id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column(u'name', mysql.VARCHAR(length=50), nullable=True),
sa.Column(u'value', mysql.VARCHAR(length=500), nullable=True),
sa.Column(u'app_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.ForeignKeyConstraint([u'app_id'], [u'Apps.id'], name=u'AppVars_ibfk_1'),
sa.PrimaryKeyConstraint(u'var_id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(u'Messages',
sa.Column(u'id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column(u'key', mysql.VARCHAR(length=250), nullable=True),
sa.Column(u'value', mysql.TEXT(), nullable=True),
sa.Column(u'bundle_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint([u'bundle_id'], [u'Bundles.id'], name=u'Messages_ibfk_1'),
sa.PrimaryKeyConstraint(u'id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(u'AppVersions',
sa.Column(u'version_id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column(u'app_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column(u'creation_date', sa.DATETIME(), nullable=False),
sa.ForeignKeyConstraint([u'app_id'], [u'Apps.id'], name=u'AppVersions_ibfk_1'),
sa.PrimaryKeyConstraint(u'version_id', u'app_id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
### end Alembic commands ###
Fix the build
"""Remove unused tables
Revision ID: bc497a3d230
Revises: 9440ed72930
Create Date: 2017-05-10 18:27:13.954893
"""
# revision identifiers, used by Alembic.
revision = 'bc497a3d230'
down_revision = '9440ed72930'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
try:
op.drop_constraint(u'Apps_ibfk_2', 'Apps', type_='foreignkey')
except:
print("No alter in SQLite")
try:
op.drop_column('Apps', u'spec_id')
op.drop_column('Apps', u'description')
except:
print("No alter in SQLite")
op.drop_table(u'AppVersions')
op.drop_table(u'Messages')
op.drop_table(u'AppVars')
op.drop_table(u'Bundles')
try:
op.drop_table(u'Specs')
except:
print("No alter in SQLite")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('Apps', sa.Column(u'description', mysql.VARCHAR(length=1000), nullable=True))
op.add_column('Apps', sa.Column(u'spec_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
op.create_foreign_key(u'Apps_ibfk_2', 'Apps', u'Specs', [u'spec_id'], [u'id'])
op.create_table(u'Specs',
sa.Column(u'id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column(u'url', mysql.VARCHAR(length=500), nullable=False),
sa.Column(u'pid', mysql.VARCHAR(length=60), nullable=False),
sa.PrimaryKeyConstraint(u'id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(u'Bundles',
sa.Column(u'id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column(u'lang', mysql.VARCHAR(length=15), nullable=True),
sa.Column(u'target', mysql.VARCHAR(length=30), nullable=True),
sa.Column(u'app_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint([u'app_id'], [u'Apps.id'], name=u'Bundles_ibfk_1'),
sa.PrimaryKeyConstraint(u'id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(u'AppVars',
sa.Column(u'var_id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column(u'name', mysql.VARCHAR(length=50), nullable=True),
sa.Column(u'value', mysql.VARCHAR(length=500), nullable=True),
sa.Column(u'app_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.ForeignKeyConstraint([u'app_id'], [u'Apps.id'], name=u'AppVars_ibfk_1'),
sa.PrimaryKeyConstraint(u'var_id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(u'Messages',
sa.Column(u'id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column(u'key', mysql.VARCHAR(length=250), nullable=True),
sa.Column(u'value', mysql.TEXT(), nullable=True),
sa.Column(u'bundle_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint([u'bundle_id'], [u'Bundles.id'], name=u'Messages_ibfk_1'),
sa.PrimaryKeyConstraint(u'id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(u'AppVersions',
sa.Column(u'version_id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column(u'app_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column(u'creation_date', sa.DATETIME(), nullable=False),
sa.ForeignKeyConstraint([u'app_id'], [u'Apps.id'], name=u'AppVersions_ibfk_1'),
sa.PrimaryKeyConstraint(u'version_id', u'app_id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
### end Alembic commands ###
|
f90 tool now uses common implementation for add_to_env.
|
Add a function to retrieve which perflib implements which interface.
|
Cleanup
Former-commit-id: d7ccdf72506cea5eb6adb70e3755707b1d2fbdc1 [formerly d7ccdf72506cea5eb6adb70e3755707b1d2fbdc1 [formerly 06ae4a649e270515307a5b9595a60728be38e083]]
Former-commit-id: 3b7565204bee2019183cb89c03cb3957f49b039a
Former-commit-id: bf59227f904494232e56b57cf2ac857731b6b0c9
|
Optimized build_maps
Former-commit-id: 1f4ac43df6b212fb41852facf0f62355ca1e0f81
|
First draft of glexec probe cleanup tests
git-svn-id: c36fc656d29f06fca4500ebb6a8d6f432478a0b6@16859 4e558342-562e-0410-864c-e07659590f8c
|
#!/usr/bin/env python
#
# @file develop.py
# @authors Bryan O'Sullivan, Mark Palange, Aaron Brashears
# @brief Fire and forget script to appropriately configure cmake for SL.
#
# $LicenseInfo:firstyear=2007&license=viewergpl$
#
# Copyright (c) 2007, 2008 Linden Research, Inc.
#
# Second Life Viewer Source Code
# The source code in this file ("Source Code") is provided by Linden Lab
# to you under the terms of the GNU General Public License, version 2.0
# ("GPL"), unless you have obtained a separate licensing agreement
# ("Other License"), formally executed by you and Linden Lab. Terms of
# the GPL can be found in doc/GPL-license.txt in this distribution, or
# online at http://secondlife.com/developers/opensource/gplv2
#
# There are special exceptions to the terms and conditions of the GPL as
# it is applied to this Source Code. View the full text of the exception
# in the file doc/FLOSS-exception.txt in this software distribution, or
# online at http://secondlife.com/developers/opensource/flossexception
#
# By copying, modifying or distributing this software, you acknowledge
# that you have read and understood your obligations described above,
# and agree to abide by those obligations.
#
# ALL LINDEN LAB SOURCE CODE IS PROVIDED "AS IS." LINDEN LAB MAKES NO
# WARRANTIES, EXPRESS, IMPLIED OR OTHERWISE, REGARDING ITS ACCURACY,
# COMPLETENESS OR PERFORMANCE.
# $/LicenseInfo$
import errno
import getopt
import os
import random
import re
import shutil
import socket
import sys
import commands
class CommandError(Exception):
pass
def mkdir(path):
try:
os.mkdir(path)
return path
except OSError, err:
if err.errno != errno.EEXIST or not os.path.isdir(path):
raise
def quote(opts):
return '"' + '" "'.join([ opt.replace('"', '') for opt in opts ]) + '"'
class PlatformSetup(object):
generator = None
build_types = {}
for t in ('Debug', 'Release', 'RelWithDebInfo'):
build_types[t.lower()] = t
build_type = build_types['relwithdebinfo']
standalone = 'FALSE'
unattended = 'FALSE'
build_server = 'TRUE'
package = 'FALSE'
distcc = True
cmake_opts = []
def __init__(self):
self.script_dir = os.path.realpath(
os.path.dirname(__import__(__name__).__file__))
def os(self):
'''Return the name of the OS.'''
raise NotImplemented('os')
def arch(self):
'''Return the CPU architecture.'''
return None
def platform(self):
'''Return a stringified two-tuple of the OS name and CPU
architecture.'''
ret = self.os()
if self.arch():
ret += '-' + self.arch()
return ret
def build_dirs(self):
'''Return the top-level directories in which builds occur.
This can return more than one directory, e.g. if doing a
32-bit viewer and server build on Linux.'''
return ['build-' + self.platform()]
def cmake_commandline(self, src_dir, build_dir, opts, simple):
'''Return the command line to run cmake with.'''
args = dict(
dir=src_dir,
generator=self.generator,
opts=quote(opts),
standalone=self.standalone,
unattended=self.unattended,
build_server=self.build_server,
type=self.build_type.upper(),
)
#if simple:
# return 'cmake %(opts)s %(dir)r' % args
return ('cmake -DCMAKE_BUILD_TYPE:STRING=%(type)s '
'-DSTANDALONE:BOOL=%(standalone)s '
'-DUNATTENDED:BOOL=%(unattended)s '
'-DSERVER:BOOL=%(build_server)s '
'-G %(generator)r %(opts)s %(dir)r' % args)
def run(self, command, name=None):
'''Run a program. If the program fails, raise an exception.'''
ret = os.system(command)
if ret:
if name is None:
name = command.split(None, 1)[0]
if os.WIFEXITED(ret):
event = 'exited'
status = 'status %d' % os.WEXITSTATUS(ret)
elif os.WIFSIGNALED(ret):
event = 'was killed'
status = 'signal %d' % os.WTERMSIG(ret)
else:
event = 'died unexpectedly (!?)'
status = '16-bit status %d' % ret
raise CommandError('the command %r %s with %s' %
(name, event, status))
def run_cmake(self, args=[]):
'''Run cmake.'''
# do a sanity check to make sure we have a generator
if not hasattr(self, 'generator'):
raise "No generator available for '%s'" % (self.__name__,)
cwd = os.getcwd()
created = []
try:
for d in self.build_dirs():
simple = True
if mkdir(d):
created.append(d)
simple = False
try:
os.chdir(d)
cmd = self.cmake_commandline(cwd, d, args, simple)
print 'Running %r in %r' % (cmd, d)
self.run(cmd, 'cmake')
finally:
os.chdir(cwd)
except:
# If we created a directory in which to run cmake and
# something went wrong, the directory probably just
# contains garbage, so delete it.
os.chdir(cwd)
for d in created:
print 'Cleaning %r' % d
shutil.rmtree(d)
raise
def parse_build_opts(self, arguments):
opts, targets = getopt.getopt(arguments, 'o:', ['option='])
build_opts = []
for o, a in opts:
if o in ('-o', '--option'):
build_opts.append(a)
return build_opts, targets
def run_build(self, opts, targets):
'''Build the default targets for this platform.'''
raise NotImplemented('run_build')
def cleanup(self):
'''Delete all build directories.'''
cleaned = 0
for d in self.build_dirs():
if os.path.isdir(d):
print 'Cleaning %r' % d
shutil.rmtree(d)
cleaned += 1
if not cleaned:
print 'Nothing to clean up!'
def is_internal_tree(self):
'''Indicate whether we are building in an internal source tree.'''
return os.path.isdir(os.path.join(self.script_dir, 'newsim'))
class UnixSetup(PlatformSetup):
'''Generic Unixy build instructions.'''
def __init__(self):
super(UnixSetup, self).__init__()
self.generator = 'Unix Makefiles'
def os(self):
return 'unix'
def arch(self):
cpu = os.uname()[-1]
if cpu.endswith('386'):
cpu = 'i386'
elif cpu.endswith('86'):
cpu = 'i686'
elif cpu in ('athlon',):
cpu = 'i686'
elif cpu == 'Power Macintosh':
cpu = 'ppc'
return cpu
class LinuxSetup(UnixSetup):
def __init__(self):
super(LinuxSetup, self).__init__()
def os(self):
return 'linux'
def build_dirs(self):
# Only build the server code if (a) we have it and (b) we're
# on 32-bit x86.
if self.arch() == 'i686' and self.is_internal_tree():
return ['viewer-' + self.platform(), 'server-' + self.platform()]
else:
return ['viewer-' + self.platform()]
def find_in_path(self, name, defval=None, basename=False):
for p in os.getenv('PATH', '/usr/bin').split(':'):
path = os.path.join(p, name)
if os.access(path, os.X_OK):
return [basename and os.path.basename(path) or path]
if defval:
return [defval]
return []
def cmake_commandline(self, src_dir, build_dir, opts, simple):
args = dict(
dir=src_dir,
generator=self.generator,
opts=quote(opts),
standalone=self.standalone,
unattended=self.unattended,
type=self.build_type.upper()
)
if not self.is_internal_tree():
args.update({'cxx':'g++', 'server':'FALSE', 'viewer':'TRUE'})
else:
if self.distcc:
distcc = self.find_in_path('distcc')
baseonly = True
else:
distcc = []
baseonly = False
if 'server' in build_dir:
gcc33 = distcc + self.find_in_path('g++-3.3', 'g++', baseonly)
args.update({'cxx':' '.join(gcc33), 'server':'TRUE',
'viewer':'FALSE'})
else:
gcc41 = distcc + self.find_in_path('g++-4.1', 'g++', baseonly)
args.update({'cxx': ' '.join(gcc41), 'server':'FALSE',
'viewer':'TRUE'})
#if simple:
# return (('cmake %(opts)s '
# '-DSERVER:BOOL=%(server)s '
# '-DVIEWER:BOOL=%(viewer)s '
# '%(dir)r') % args)
cmd = (('cmake -DCMAKE_BUILD_TYPE:STRING=%(type)s '
'-G %(generator)r -DSERVER:BOOL=%(server)s '
'-DVIEWER:BOOL=%(viewer)s -DSTANDALONE:BOOL=%(standalone)s '
'-DUNATTENDED:BOOL=%(unattended)s '
'%(opts)s %(dir)r')
% args)
if 'CXX' not in os.environ:
args.update({'cmd':cmd})
cmd = ('CXX=%(cxx)r %(cmd)s' % args)
return cmd
def run_build(self, opts, targets):
job_count = None
for i in range(len(opts)):
if opts[i].startswith('-j'):
try:
job_count = int(opts[i][2:])
except ValueError:
try:
job_count = int(opts[i+1])
except ValueError:
job_count = True
def get_cpu_count():
count = 0
for line in open('/proc/cpuinfo'):
if re.match(r'processor\s*:', line):
count += 1
return count
def localhost():
count = get_cpu_count()
return 'localhost/' + str(count), count
def get_distcc_hosts():
try:
hosts = []
name = os.getenv('DISTCC_DIR', '/etc/distcc') + '/hosts'
for l in open(name):
l = l[l.find('#')+1:].strip()
if l: hosts.append(l)
return hosts
except IOError:
return (os.getenv('DISTCC_HOSTS', '').split() or
[localhost()[0]])
def count_distcc_hosts():
cpus = 0
hosts = 0
for host in get_distcc_hosts():
m = re.match(r'.*/(\d+)', host)
hosts += 1
cpus += m and int(m.group(1)) or 1
return hosts, cpus
def mk_distcc_hosts():
'''Generate a list of LL-internal machines to build on.'''
loc_entry, cpus = localhost()
hosts = [loc_entry]
dead = []
stations = [s for s in xrange(36) if s not in dead]
random.shuffle(stations)
hosts += ['station%d.lindenlab.com/2,lzo' % s for s in stations]
cpus += 2 * len(stations)
return ' '.join(hosts), cpus
if job_count is None:
hosts, job_count = count_distcc_hosts()
if hosts == 1 and socket.gethostname().startswith('station'):
hosts, job_count = mk_distcc_hosts()
os.putenv('DISTCC_HOSTS', hosts)
opts.extend(['-j', str(job_count)])
if targets:
targets = ' '.join(targets)
else:
targets = 'all'
for d in self.build_dirs():
cmd = 'make -C %r %s %s' % (d, ' '.join(opts), targets)
print 'Running %r' % cmd
self.run(cmd)
class DarwinSetup(UnixSetup):
def __init__(self):
super(DarwinSetup, self).__init__()
self.generator = 'Xcode'
def os(self):
return 'darwin'
def arch(self):
if self.unattended == 'TRUE':
return 'universal'
else:
return UnixSetup.arch(self)
def cmake_commandline(self, src_dir, build_dir, opts, simple):
args = dict(
dir=src_dir,
generator=self.generator,
opts=quote(opts),
standalone=self.standalone,
unattended=self.unattended,
build_server=self.build_server,
universal='',
type=self.build_type.upper()
)
if self.unattended == 'TRUE':
args['universal'] = '-DCMAKE_OSX_ARCHITECTURES:STRING=\'i386;ppc\''
#if simple:
# return 'cmake %(opts)s %(dir)r' % args
return ('cmake -G %(generator)r '
'-DCMAKE_BUILD_TYPE:STRING=%(type)s '
'-DSTANDALONE:BOOL=%(standalone)s '
'-DUNATTENDED:BOOL=%(unattended)s '
'-DSERVER:BOOL=%(build_server)s '
'%(universal)s '
'%(opts)s %(dir)r' % args)
def run_build(self, opts, targets):
cwd = os.getcwd()
if targets:
targets = ' '.join(['-target ' + repr(t) for t in targets])
else:
targets = ''
cmd = ('xcodebuild -parallelizeTargets '
'-configuration %s %s %s' %
(self.build_type, ' '.join(opts), targets))
for d in self.build_dirs():
try:
os.chdir(d)
print 'Running %r in %r' % (cmd, d)
self.run(cmd)
finally:
os.chdir(cwd)
class WindowsSetup(PlatformSetup):
gens = {
'vc71' : {
'gen' : r'Visual Studio 7 .NET 2003',
'ver' : r'7.1'
},
'vc80' : {
'gen' : r'Visual Studio 8 2005',
'ver' : r'8.0'
},
'vc90' : {
'gen' : r'Visual Studio 9 2008',
'ver' : r'9.0'
}
}
gens['vs2003'] = gens['vc71']
gens['vs2005'] = gens['vc80']
gens['vs2008'] = gens['vc90']
def __init__(self):
super(WindowsSetup, self).__init__()
self._generator = None
self.incredibuild = False
def _get_generator(self):
if self._generator is None:
for version in 'vc71 vc80 vc90'.split():
if self.find_visual_studio(version):
self._generator = version
print 'Building with ', self.gens[version]['gen']
break
else:
print >> sys.stderr, 'Cannot find a Visual Studio installation!'
eys.exit(1)
return self._generator
def _set_generator(self, gen):
self._generator = gen
generator = property(_get_generator, _set_generator)
def os(self):
return 'win32'
def build_dirs(self):
return ['build-' + self.generator]
def cmake_commandline(self, src_dir, build_dir, opts, simple):
args = dict(
dir=src_dir,
generator=self.gens[self.generator.lower()]['gen'],
opts=quote(opts),
standalone=self.standalone,
unattended=self.unattended,
build_server=self.build_server,
package=self.package,
)
#if simple:
# return 'cmake %(opts)s "%(dir)s"' % args
return ('cmake -G "%(generator)s" '
'-DSTANDALONE:BOOL=%(standalone)s '
'-DUNATTENDED:BOOL=%(unattended)s '
'-DSERVER:BOOL=%(build_server)s '
'-DPACKAGE:BOOL=%(package)s '
'%(opts)s "%(dir)s"' % args)
def find_visual_studio(self, gen=None):
if gen is None:
gen = self._generator
gen = gen.lower()
try:
import _winreg
key_str = (r'SOFTWARE\Microsoft\VisualStudio\%s\Setup\VS' %
self.gens[gen]['ver'])
value_str = (r'EnvironmentDirectory')
print ('Reading VS environment from HKEY_LOCAL_MACHINE\%s\%s' %
(key_str, value_str))
print key_str
reg = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
key = _winreg.OpenKey(reg, key_str)
value = _winreg.QueryValueEx(key, value_str)[0]
print 'Found: %s' % value
return value
except WindowsError, err:
print >> sys.stderr, "Didn't find ", self.gens[gen]['gen']
return ''
def get_build_cmd(self):
if self.incredibuild:
config = self.build_type
if self.gens[self.generator]['ver'] in [ r'8.0', r'9.0' ]:
config = '\"%s|Win32\"' % config
return "buildconsole Secondlife.sln /build %s" % config
# devenv.com is CLI friendly, devenv.exe... not so much.
return ('"%sdevenv.com" Secondlife.sln /build %s' %
(self.find_visual_studio(), self.build_type))
# this override of run exists because the PlatformSetup version
# uses Unix/Mac only calls. Freakin' os module!
def run(self, command, name=None):
'''Run a program. If the program fails, raise an exception.'''
ret = os.system(command)
if ret:
if name is None:
name = command.split(None, 1)[0]
raise CommandError('the command %r exited with %s' %
(name, ret))
def run_cmake(self, args=[]):
'''Override to add the vstool.exe call after running cmake.'''
PlatformSetup.run_cmake(self, args)
if self.unattended == 'FALSE':
for build_dir in self.build_dirs():
vstool_cmd = os.path.join('tools','vstool','VSTool.exe') \
+ ' --solution ' \
+ os.path.join(build_dir,'SecondLife.sln') \
+ ' --config RelWithDebInfo' \
+ ' --startup secondlife-bin'
print 'Running %r in %r' % (vstool_cmd, os.getcwd())
self.run(vstool_cmd)
def run_build(self, opts, targets):
cwd = os.getcwd()
build_cmd = self.get_build_cmd()
for d in self.build_dirs():
try:
os.chdir(d)
if targets:
for t in targets:
cmd = '%s /project %s %s' % (build_cmd, t, ' '.join(opts))
print 'Running %r in %r' % (cmd, d)
self.run(cmd)
else:
cmd = '%s %s' % (build_cmd, ' '.join(opts))
print 'Running %r in %r' % (cmd, d)
self.run(cmd)
finally:
os.chdir(cwd)
class CygwinSetup(WindowsSetup):
def __init__(self):
super(CygwinSetup, self).__init__()
self.generator = 'vc71'
def cmake_commandline(self, src_dir, build_dir, opts, simple):
dos_dir = commands.getoutput("cygpath -w %s" % src_dir)
args = dict(
dir=dos_dir,
generator=self.gens[self.generator.lower()]['gen'],
opts=quote(opts),
standalone=self.standalone,
unattended=self.unattended,
)
#if simple:
# return 'cmake %(opts)s "%(dir)s"' % args
return ('cmake -G "%(generator)s" '
'-DUNATTENDED:BOOl=%(unattended)s '
'-DSTANDALONE:BOOL=%(standalone)s '
'%(opts)s "%(dir)s"' % args)
setup_platform = {
'darwin': DarwinSetup,
'linux2': LinuxSetup,
'win32' : WindowsSetup,
'cygwin' : CygwinSetup
}
usage_msg = '''
Usage: develop.py [options] command [command-options]
Options:
-h | --help print this help message
--standalone build standalone, without Linden prebuild libraries
--unattended build unattended, do not invoke any tools requiring
a human response
-t | --type=NAME build type ("Debug", "Release", or "RelWithDebInfo")
-V | --viewer-only build/configure the viewer, skipping the server, for
Windows/Mac only (Linux has auto-detect)
-P | --package config 'package' target in Windows solution that will
build an installer
-N | --no-distcc disable use of distcc
-G | --generator=NAME generator name
Windows: VC71 or VS2003 (default), VC80 (VS2005) or VC90 (VS2008)
Mac OS X: Xcode (default), Unix Makefiles
Linux: Unix Makefiles (default), KDevelop3
Commands:
build configure and build default target
clean delete all build directories (does not affect sources)
configure configure project by running cmake
If you do not specify a command, the default is "configure".
'''
def main(arguments):
setup = setup_platform[sys.platform]()
try:
opts, args = getopt.getopt(
arguments,
'?hNVPt:G:',
['help', 'standalone', 'no-distcc', 'viewer-only', 'package', 'unattended', 'type=', 'incredibuild', 'generator='])
except getopt.GetoptError, err:
print >> sys.stderr, 'Error:', err
sys.exit(1)
for o, a in opts:
if o in ('-?', '-h', '--help'):
print usage_msg.strip()
sys.exit(0)
elif o in ('--standalone',):
setup.standalone = 'TRUE'
elif o in ('--unattended',):
setup.unattended = 'TRUE'
elif o in ('-t', '--type'):
try:
setup.build_type = setup.build_types[a.lower()]
except KeyError:
print >> sys.stderr, 'Error: unknown build type', repr(a)
print >> sys.stderr, 'Supported build types:'
types = setup.build_types.values()
types.sort()
for t in types:
print ' ', t
sys.exit(1)
elif o in ('-G', '--generator'):
setup.generator = a
elif o in ('-N', '--no-distcc'):
setup.distcc = False
elif o in ('--incredibuild'):
setup.incredibuild = True
elif o in ('-V', '--viewer-only'):
setup.build_server = 'FALSE'
elif o in ('-P', '--package'):
setup.package = 'TRUE'
else:
print >> sys.stderr, 'INTERNAL ERROR: unhandled option', repr(o)
sys.exit(1)
if not args:
setup.run_cmake()
return
try:
cmd = args.pop(0)
if cmd in ('cmake', 'configure'):
setup.run_cmake(args)
elif cmd == 'build':
for d in setup.build_dirs():
if not os.path.exists(d):
raise CommandError('run "develop.py cmake" first')
setup.run_cmake()
opts, targets = setup.parse_build_opts(args)
setup.run_build(opts, targets)
elif cmd == 'clean':
if args:
raise CommandError('clean takes no arguments')
setup.cleanup()
else:
print >> sys.stderr, 'Error: unknown subcommand', repr(cmd)
print >> sys.stderr, "(run 'develop.py --help' for help)"
sys.exit(1)
except CommandError, err:
print >> sys.stderr, 'Error:', err
sys.exit(1)
except getopt.GetoptError, err:
print >> sys.stderr, 'Error with %r subcommand: %s' % (cmd, err)
sys.exit(1)
if __name__ == '__main__':
main(sys.argv[1:])
Revert last change to develop.py because it breaks manually passing -DPACKAGE:BOOL=ON on the command line.
Merging revisions 95284-95283 of svn+ssh://svn.lindenlab.com/svn/linden/release into E:\release, respecting ancestry
#!/usr/bin/env python
#
# @file develop.py
# @authors Bryan O'Sullivan, Mark Palange, Aaron Brashears
# @brief Fire and forget script to appropriately configure cmake for SL.
#
# $LicenseInfo:firstyear=2007&license=viewergpl$
#
# Copyright (c) 2007, 2008 Linden Research, Inc.
#
# Second Life Viewer Source Code
# The source code in this file ("Source Code") is provided by Linden Lab
# to you under the terms of the GNU General Public License, version 2.0
# ("GPL"), unless you have obtained a separate licensing agreement
# ("Other License"), formally executed by you and Linden Lab. Terms of
# the GPL can be found in doc/GPL-license.txt in this distribution, or
# online at http://secondlife.com/developers/opensource/gplv2
#
# There are special exceptions to the terms and conditions of the GPL as
# it is applied to this Source Code. View the full text of the exception
# in the file doc/FLOSS-exception.txt in this software distribution, or
# online at http://secondlife.com/developers/opensource/flossexception
#
# By copying, modifying or distributing this software, you acknowledge
# that you have read and understood your obligations described above,
# and agree to abide by those obligations.
#
# ALL LINDEN LAB SOURCE CODE IS PROVIDED "AS IS." LINDEN LAB MAKES NO
# WARRANTIES, EXPRESS, IMPLIED OR OTHERWISE, REGARDING ITS ACCURACY,
# COMPLETENESS OR PERFORMANCE.
# $/LicenseInfo$
import errno
import getopt
import os
import random
import re
import shutil
import socket
import sys
import commands
class CommandError(Exception):
pass
def mkdir(path):
try:
os.mkdir(path)
return path
except OSError, err:
if err.errno != errno.EEXIST or not os.path.isdir(path):
raise
def quote(opts):
return '"' + '" "'.join([ opt.replace('"', '') for opt in opts ]) + '"'
class PlatformSetup(object):
generator = None
build_types = {}
for t in ('Debug', 'Release', 'RelWithDebInfo'):
build_types[t.lower()] = t
build_type = build_types['relwithdebinfo']
standalone = 'FALSE'
unattended = 'FALSE'
distcc = True
cmake_opts = []
def __init__(self):
self.script_dir = os.path.realpath(
os.path.dirname(__import__(__name__).__file__))
def os(self):
'''Return the name of the OS.'''
raise NotImplemented('os')
def arch(self):
'''Return the CPU architecture.'''
return None
def platform(self):
'''Return a stringified two-tuple of the OS name and CPU
architecture.'''
ret = self.os()
if self.arch():
ret += '-' + self.arch()
return ret
def build_dirs(self):
'''Return the top-level directories in which builds occur.
This can return more than one directory, e.g. if doing a
32-bit viewer and server build on Linux.'''
return ['build-' + self.platform()]
def cmake_commandline(self, src_dir, build_dir, opts, simple):
'''Return the command line to run cmake with.'''
args = dict(
dir=src_dir,
generator=self.generator,
opts=quote(opts),
standalone=self.standalone,
unattended=self.unattended,
type=self.build_type.upper(),
)
#if simple:
# return 'cmake %(opts)s %(dir)r' % args
return ('cmake -DCMAKE_BUILD_TYPE:STRING=%(type)s '
'-DSTANDALONE:BOOL=%(standalone)s '
'-DUNATTENDED:BOOL=%(unattended)s '
'-G %(generator)r %(opts)s %(dir)r' % args)
def run(self, command, name=None):
'''Run a program. If the program fails, raise an exception.'''
ret = os.system(command)
if ret:
if name is None:
name = command.split(None, 1)[0]
if os.WIFEXITED(ret):
event = 'exited'
status = 'status %d' % os.WEXITSTATUS(ret)
elif os.WIFSIGNALED(ret):
event = 'was killed'
status = 'signal %d' % os.WTERMSIG(ret)
else:
event = 'died unexpectedly (!?)'
status = '16-bit status %d' % ret
raise CommandError('the command %r %s with %s' %
(name, event, status))
def run_cmake(self, args=[]):
'''Run cmake.'''
# do a sanity check to make sure we have a generator
if not hasattr(self, 'generator'):
raise "No generator available for '%s'" % (self.__name__,)
cwd = os.getcwd()
created = []
try:
for d in self.build_dirs():
simple = True
if mkdir(d):
created.append(d)
simple = False
try:
os.chdir(d)
cmd = self.cmake_commandline(cwd, d, args, simple)
print 'Running %r in %r' % (cmd, d)
self.run(cmd, 'cmake')
finally:
os.chdir(cwd)
except:
# If we created a directory in which to run cmake and
# something went wrong, the directory probably just
# contains garbage, so delete it.
os.chdir(cwd)
for d in created:
print 'Cleaning %r' % d
shutil.rmtree(d)
raise
def parse_build_opts(self, arguments):
opts, targets = getopt.getopt(arguments, 'o:', ['option='])
build_opts = []
for o, a in opts:
if o in ('-o', '--option'):
build_opts.append(a)
return build_opts, targets
def run_build(self, opts, targets):
'''Build the default targets for this platform.'''
raise NotImplemented('run_build')
def cleanup(self):
'''Delete all build directories.'''
cleaned = 0
for d in self.build_dirs():
if os.path.isdir(d):
print 'Cleaning %r' % d
shutil.rmtree(d)
cleaned += 1
if not cleaned:
print 'Nothing to clean up!'
def is_internal_tree(self):
'''Indicate whether we are building in an internal source tree.'''
return os.path.isdir(os.path.join(self.script_dir, 'newsim'))
class UnixSetup(PlatformSetup):
'''Generic Unixy build instructions.'''
def __init__(self):
super(UnixSetup, self).__init__()
self.generator = 'Unix Makefiles'
def os(self):
return 'unix'
def arch(self):
cpu = os.uname()[-1]
if cpu.endswith('386'):
cpu = 'i386'
elif cpu.endswith('86'):
cpu = 'i686'
elif cpu in ('athlon',):
cpu = 'i686'
elif cpu == 'Power Macintosh':
cpu = 'ppc'
return cpu
class LinuxSetup(UnixSetup):
def __init__(self):
super(LinuxSetup, self).__init__()
def os(self):
return 'linux'
def build_dirs(self):
# Only build the server code if (a) we have it and (b) we're
# on 32-bit x86.
if self.arch() == 'i686' and self.is_internal_tree():
return ['viewer-' + self.platform(), 'server-' + self.platform()]
else:
return ['viewer-' + self.platform()]
def find_in_path(self, name, defval=None, basename=False):
for p in os.getenv('PATH', '/usr/bin').split(':'):
path = os.path.join(p, name)
if os.access(path, os.X_OK):
return [basename and os.path.basename(path) or path]
if defval:
return [defval]
return []
def cmake_commandline(self, src_dir, build_dir, opts, simple):
args = dict(
dir=src_dir,
generator=self.generator,
opts=quote(opts),
standalone=self.standalone,
unattended=self.unattended,
type=self.build_type.upper()
)
if not self.is_internal_tree():
args.update({'cxx':'g++', 'server':'FALSE', 'viewer':'TRUE'})
else:
if self.distcc:
distcc = self.find_in_path('distcc')
baseonly = True
else:
distcc = []
baseonly = False
if 'server' in build_dir:
gcc33 = distcc + self.find_in_path('g++-3.3', 'g++', baseonly)
args.update({'cxx':' '.join(gcc33), 'server':'TRUE',
'viewer':'FALSE'})
else:
gcc41 = distcc + self.find_in_path('g++-4.1', 'g++', baseonly)
args.update({'cxx': ' '.join(gcc41), 'server':'FALSE',
'viewer':'TRUE'})
#if simple:
# return (('cmake %(opts)s '
# '-DSERVER:BOOL=%(server)s '
# '-DVIEWER:BOOL=%(viewer)s '
# '%(dir)r') % args)
cmd = (('cmake -DCMAKE_BUILD_TYPE:STRING=%(type)s '
'-G %(generator)r -DSERVER:BOOL=%(server)s '
'-DVIEWER:BOOL=%(viewer)s -DSTANDALONE:BOOL=%(standalone)s '
'-DUNATTENDED:BOOL=%(unattended)s '
'%(opts)s %(dir)r')
% args)
if 'CXX' not in os.environ:
args.update({'cmd':cmd})
cmd = ('CXX=%(cxx)r %(cmd)s' % args)
return cmd
def run_build(self, opts, targets):
job_count = None
for i in range(len(opts)):
if opts[i].startswith('-j'):
try:
job_count = int(opts[i][2:])
except ValueError:
try:
job_count = int(opts[i+1])
except ValueError:
job_count = True
def get_cpu_count():
count = 0
for line in open('/proc/cpuinfo'):
if re.match(r'processor\s*:', line):
count += 1
return count
def localhost():
count = get_cpu_count()
return 'localhost/' + str(count), count
def get_distcc_hosts():
try:
hosts = []
name = os.getenv('DISTCC_DIR', '/etc/distcc') + '/hosts'
for l in open(name):
l = l[l.find('#')+1:].strip()
if l: hosts.append(l)
return hosts
except IOError:
return (os.getenv('DISTCC_HOSTS', '').split() or
[localhost()[0]])
def count_distcc_hosts():
cpus = 0
hosts = 0
for host in get_distcc_hosts():
m = re.match(r'.*/(\d+)', host)
hosts += 1
cpus += m and int(m.group(1)) or 1
return hosts, cpus
def mk_distcc_hosts():
'''Generate a list of LL-internal machines to build on.'''
loc_entry, cpus = localhost()
hosts = [loc_entry]
dead = []
stations = [s for s in xrange(36) if s not in dead]
random.shuffle(stations)
hosts += ['station%d.lindenlab.com/2,lzo' % s for s in stations]
cpus += 2 * len(stations)
return ' '.join(hosts), cpus
if job_count is None:
hosts, job_count = count_distcc_hosts()
if hosts == 1 and socket.gethostname().startswith('station'):
hosts, job_count = mk_distcc_hosts()
os.putenv('DISTCC_HOSTS', hosts)
opts.extend(['-j', str(job_count)])
if targets:
targets = ' '.join(targets)
else:
targets = 'all'
for d in self.build_dirs():
cmd = 'make -C %r %s %s' % (d, ' '.join(opts), targets)
print 'Running %r' % cmd
self.run(cmd)
class DarwinSetup(UnixSetup):
def __init__(self):
super(DarwinSetup, self).__init__()
self.generator = 'Xcode'
def os(self):
return 'darwin'
def arch(self):
if self.unattended == 'TRUE':
return 'universal'
else:
return UnixSetup.arch(self)
def cmake_commandline(self, src_dir, build_dir, opts, simple):
args = dict(
dir=src_dir,
generator=self.generator,
opts=quote(opts),
standalone=self.standalone,
unattended=self.unattended,
universal='',
type=self.build_type.upper()
)
if self.unattended == 'TRUE':
args['universal'] = '-DCMAKE_OSX_ARCHITECTURES:STRING=\'i386;ppc\''
#if simple:
# return 'cmake %(opts)s %(dir)r' % args
return ('cmake -G %(generator)r '
'-DCMAKE_BUILD_TYPE:STRING=%(type)s '
'-DSTANDALONE:BOOL=%(standalone)s '
'-DUNATTENDED:BOOL=%(unattended)s '
'%(universal)s '
'%(opts)s %(dir)r' % args)
def run_build(self, opts, targets):
cwd = os.getcwd()
if targets:
targets = ' '.join(['-target ' + repr(t) for t in targets])
else:
targets = ''
cmd = ('xcodebuild -parallelizeTargets '
'-configuration %s %s %s' %
(self.build_type, ' '.join(opts), targets))
for d in self.build_dirs():
try:
os.chdir(d)
print 'Running %r in %r' % (cmd, d)
self.run(cmd)
finally:
os.chdir(cwd)
class WindowsSetup(PlatformSetup):
gens = {
'vc71' : {
'gen' : r'Visual Studio 7 .NET 2003',
'ver' : r'7.1'
},
'vc80' : {
'gen' : r'Visual Studio 8 2005',
'ver' : r'8.0'
},
'vc90' : {
'gen' : r'Visual Studio 9 2008',
'ver' : r'9.0'
}
}
gens['vs2003'] = gens['vc71']
gens['vs2005'] = gens['vc80']
gens['vs2008'] = gens['vc90']
def __init__(self):
super(WindowsSetup, self).__init__()
self._generator = None
self.incredibuild = False
def _get_generator(self):
if self._generator is None:
for version in 'vc71 vc80 vc90'.split():
if self.find_visual_studio(version):
self._generator = version
print 'Building with ', self.gens[version]['gen']
break
else:
print >> sys.stderr, 'Cannot find a Visual Studio installation!'
eys.exit(1)
return self._generator
def _set_generator(self, gen):
self._generator = gen
generator = property(_get_generator, _set_generator)
def os(self):
return 'win32'
def build_dirs(self):
return ['build-' + self.generator]
def cmake_commandline(self, src_dir, build_dir, opts, simple):
args = dict(
dir=src_dir,
generator=self.gens[self.generator.lower()]['gen'],
opts=quote(opts),
standalone=self.standalone,
unattended=self.unattended,
)
#if simple:
# return 'cmake %(opts)s "%(dir)s"' % args
return ('cmake -G "%(generator)s" '
'-DSTANDALONE:BOOL=%(standalone)s '
'-DUNATTENDED:BOOL=%(unattended)s '
'%(opts)s "%(dir)s"' % args)
def find_visual_studio(self, gen=None):
if gen is None:
gen = self._generator
gen = gen.lower()
try:
import _winreg
key_str = (r'SOFTWARE\Microsoft\VisualStudio\%s\Setup\VS' %
self.gens[gen]['ver'])
value_str = (r'EnvironmentDirectory')
print ('Reading VS environment from HKEY_LOCAL_MACHINE\%s\%s' %
(key_str, value_str))
print key_str
reg = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
key = _winreg.OpenKey(reg, key_str)
value = _winreg.QueryValueEx(key, value_str)[0]
print 'Found: %s' % value
return value
except WindowsError, err:
print >> sys.stderr, "Didn't find ", self.gens[gen]['gen']
return ''
def get_build_cmd(self):
if self.incredibuild:
config = self.build_type
if self.gens[self.generator]['ver'] in [ r'8.0', r'9.0' ]:
config = '\"%s|Win32\"' % config
return "buildconsole Secondlife.sln /build %s" % config
# devenv.com is CLI friendly, devenv.exe... not so much.
return ('"%sdevenv.com" Secondlife.sln /build %s' %
(self.find_visual_studio(), self.build_type))
# this override of run exists because the PlatformSetup version
# uses Unix/Mac only calls. Freakin' os module!
def run(self, command, name=None):
'''Run a program. If the program fails, raise an exception.'''
ret = os.system(command)
if ret:
if name is None:
name = command.split(None, 1)[0]
raise CommandError('the command %r exited with %s' %
(name, ret))
def run_cmake(self, args=[]):
'''Override to add the vstool.exe call after running cmake.'''
PlatformSetup.run_cmake(self, args)
if self.unattended == 'FALSE':
for build_dir in self.build_dirs():
vstool_cmd = os.path.join('tools','vstool','VSTool.exe') \
+ ' --solution ' \
+ os.path.join(build_dir,'SecondLife.sln') \
+ ' --config RelWithDebInfo' \
+ ' --startup secondlife-bin'
print 'Running %r in %r' % (vstool_cmd, os.getcwd())
self.run(vstool_cmd)
def run_build(self, opts, targets):
cwd = os.getcwd()
build_cmd = self.get_build_cmd()
for d in self.build_dirs():
try:
os.chdir(d)
if targets:
for t in targets:
cmd = '%s /project %s %s' % (build_cmd, t, ' '.join(opts))
print 'Running %r in %r' % (cmd, d)
self.run(cmd)
else:
cmd = '%s %s' % (build_cmd, ' '.join(opts))
print 'Running %r in %r' % (cmd, d)
self.run(cmd)
finally:
os.chdir(cwd)
class CygwinSetup(WindowsSetup):
def __init__(self):
super(CygwinSetup, self).__init__()
self.generator = 'vc71'
def cmake_commandline(self, src_dir, build_dir, opts, simple):
dos_dir = commands.getoutput("cygpath -w %s" % src_dir)
args = dict(
dir=dos_dir,
generator=self.gens[self.generator.lower()]['gen'],
opts=quote(opts),
standalone=self.standalone,
unattended=self.unattended,
)
#if simple:
# return 'cmake %(opts)s "%(dir)s"' % args
return ('cmake -G "%(generator)s" '
'-DUNATTENDED:BOOl=%(unattended)s '
'-DSTANDALONE:BOOL=%(standalone)s '
'%(opts)s "%(dir)s"' % args)
setup_platform = {
'darwin': DarwinSetup,
'linux2': LinuxSetup,
'win32' : WindowsSetup,
'cygwin' : CygwinSetup
}
usage_msg = '''
Usage: develop.py [options] command [command-options]
Options:
-h | --help print this help message
--standalone build standalone, without Linden prebuild libraries
--unattended build unattended, do not invoke any tools requiring
a human response
-t | --type=NAME build type ("Debug", "Release", or "RelWithDebInfo")
-N | --no-distcc disable use of distcc
-G | --generator=NAME generator name
Windows: VC71 or VS2003 (default), VC80 (VS2005) or VC90 (VS2008)
Mac OS X: Xcode (default), Unix Makefiles
Linux: Unix Makefiles (default), KDevelop3
Commands:
build configure and build default target
clean delete all build directories (does not affect sources)
configure configure project by running cmake
If you do not specify a command, the default is "configure".
'''
def main(arguments):
setup = setup_platform[sys.platform]()
try:
opts, args = getopt.getopt(
arguments,
'?hNt:G:',
['help', 'standalone', 'no-distcc', 'unattended', 'type=', 'incredibuild', 'generator='])
except getopt.GetoptError, err:
print >> sys.stderr, 'Error:', err
sys.exit(1)
for o, a in opts:
if o in ('-?', '-h', '--help'):
print usage_msg.strip()
sys.exit(0)
elif o in ('--standalone',):
setup.standalone = 'TRUE'
elif o in ('--unattended',):
setup.unattended = 'TRUE'
elif o in ('-t', '--type'):
try:
setup.build_type = setup.build_types[a.lower()]
except KeyError:
print >> sys.stderr, 'Error: unknown build type', repr(a)
print >> sys.stderr, 'Supported build types:'
types = setup.build_types.values()
types.sort()
for t in types:
print ' ', t
sys.exit(1)
elif o in ('-G', '--generator'):
setup.generator = a
elif o in ('-N', '--no-distcc'):
setup.distcc = False
elif o in ('--incredibuild'):
setup.incredibuild = True
else:
print >> sys.stderr, 'INTERNAL ERROR: unhandled option', repr(o)
sys.exit(1)
if not args:
setup.run_cmake()
return
try:
cmd = args.pop(0)
if cmd in ('cmake', 'configure'):
setup.run_cmake(args)
elif cmd == 'build':
for d in setup.build_dirs():
if not os.path.exists(d):
raise CommandError('run "develop.py cmake" first')
setup.run_cmake()
opts, targets = setup.parse_build_opts(args)
setup.run_build(opts, targets)
elif cmd == 'clean':
if args:
raise CommandError('clean takes no arguments')
setup.cleanup()
else:
print >> sys.stderr, 'Error: unknown subcommand', repr(cmd)
print >> sys.stderr, "(run 'develop.py --help' for help)"
sys.exit(1)
except CommandError, err:
print >> sys.stderr, 'Error:', err
sys.exit(1)
except getopt.GetoptError, err:
print >> sys.stderr, 'Error with %r subcommand: %s' % (cmd, err)
sys.exit(1)
if __name__ == '__main__':
main(sys.argv[1:])
|
import unittest
import numpy
import six
import chainer
from chainer.backends import cuda
from chainer import testing
from chainer.testing import attr
from chainer.utils import type_check
def accuracy(x, t, ignore_label):
x_ = numpy.rollaxis(x, 1, x.ndim).reshape(t.size, -1)
t_ = t.ravel()
if ignore_label is not None:
count = 0
for i in six.moves.range(t_.size):
pred = x_[i].argmax()
if t_[i] != ignore_label and pred == t_[i]:
count += 1
total = (t_ != ignore_label).sum()
else:
count = 0
for i in six.moves.range(t_.size):
pred = x_[i].argmax()
if pred == t_[i]:
count += 1
total = t_.size
if total == 0:
return 0.0
else:
return float(count) / total
@testing.parameterize(
*testing.product_dict(
[{'x_shape': (10, 3), 't_shape': (10,)},
{'x_shape': (10, 3, 1), 't_shape': (10,)},
{'x_shape': (10, 3, 1, 1), 't_shape': (10,)},
{'x_shape': (10, 3, 5), 't_shape': (10, 5)},
{'x_shape': (10, 3, 5, 4), 't_shape': (10, 5, 4)},
{'x_shape': (10, 3, 5, 4, 1), 't_shape': (10, 5, 4)},
{'x_shape': (10, 3, 5, 4, 1, 1), 't_shape': (10, 5, 4)}],
[{'ignore_label': None, 't_data': 'randint'},
{'ignore_label': 0, 't_data': 'randint'},
{'ignore_label': 0, 't_data': 'zero'}],
[{'dtype': numpy.float16},
{'dtype': numpy.float32},
{'dtype': numpy.float64}],
[{'label_dtype': numpy.int8},
{'label_dtype': numpy.int16},
{'label_dtype': numpy.int32},
{'label_dtype': numpy.int64}]
)
)
class TestAccuracy(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, self.x_shape).astype(self.dtype)
if self.t_data == 'randint':
self.t = numpy.random.randint(
3, size=self.t_shape).astype(self.label_dtype)
elif self.t_data == 'zero':
self.t = numpy.zeros(self.t_shape).astype(self.label_dtype)
self.check_forward_options = {}
if self.dtype == numpy.float16:
self.check_forward_options = {'atol': 1e-4, 'rtol': 1e-3}
def check_forward(self, x_data, t_data):
x = chainer.Variable(x_data)
t = chainer.Variable(t_data)
y = chainer.functions.accuracy(x, t, self.ignore_label)
self.assertEqual(y.data.dtype, self.dtype)
self.assertEqual((), y.data.shape)
expected = accuracy(self.x, self.t, self.ignore_label)
testing.assert_allclose(
expected, cuda.to_cpu(y.data), **self.check_forward_options)
def test_forward_cpu(self):
self.check_forward(self.x, self.t)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.t))
@testing.parameterize(
{'x_shape': (10, 3), 't_shape': (4,)},
{'x_shape': (10, 3, 2), 't_shape': (10,)},
{'x_shape': (10, 3, 1, 2), 't_shape': (10,)},
{'x_shape': (10, 3, 4), 't_shape': (10, 5)},
{'x_shape': (10, 3, 5, 2), 't_shape': (10, 5)},
{'x_shape': (10, 3, 5, 1, 2), 't_shape': (10, 5)},
)
class TestInvalidShape(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1,
self.x_shape).astype(numpy.float32)
self.t = numpy.random.randint(3, size=self.t_shape).astype(numpy.int32)
def check_invalid_shape(self, xp):
x = chainer.Variable(xp.asarray(self.x))
t = chainer.Variable(xp.asarray(self.t))
with self.assertRaises(type_check.InvalidType):
chainer.functions.accuracy(x, t)
def test_invalid_shape_cpu(self):
self.check_invalid_shape(numpy)
@attr.gpu
def test_invalid_shape_gpu(self):
self.check_invalid_shape(cuda.cupy)
testing.run_module(__name__, __file__)
Simplify F.accuracy test
import unittest
import numpy
import six
import chainer
from chainer.backends import cuda
from chainer import testing
from chainer import functions
from chainer.testing import attr
from chainer.utils import force_array
from chainer.utils import type_check
def accuracy(x, t, ignore_label):
x_ = numpy.rollaxis(x, 1, x.ndim).reshape(t.size, -1)
t_ = t.ravel()
if ignore_label is not None:
count = 0
for i in six.moves.range(t_.size):
pred = x_[i].argmax()
if t_[i] != ignore_label and pred == t_[i]:
count += 1
total = (t_ != ignore_label).sum()
else:
count = 0
for i in six.moves.range(t_.size):
pred = x_[i].argmax()
if pred == t_[i]:
count += 1
total = t_.size
if total == 0:
return 0.0
else:
return float(count) / total
@testing.parameterize(
*testing.product_dict(
[{'x_shape': (10, 3), 't_shape': (10,)},
{'x_shape': (10, 3, 1), 't_shape': (10,)},
{'x_shape': (10, 3, 1, 1), 't_shape': (10,)},
{'x_shape': (10, 3, 5), 't_shape': (10, 5)},
{'x_shape': (10, 3, 5, 4), 't_shape': (10, 5, 4)},
{'x_shape': (10, 3, 5, 4, 1), 't_shape': (10, 5, 4)},
{'x_shape': (10, 3, 5, 4, 1, 1), 't_shape': (10, 5, 4)}],
[{'ignore_label': None, 't_data': 'randint'},
{'ignore_label': 0, 't_data': 'randint'},
{'ignore_label': 0, 't_data': 'zero'}],
[{'dtype': numpy.float16},
{'dtype': numpy.float32},
{'dtype': numpy.float64}],
[{'label_dtype': numpy.int8},
{'label_dtype': numpy.int16},
{'label_dtype': numpy.int32},
{'label_dtype': numpy.int64}]
)
)
@testing.fix_random()
@testing.inject_backend_tests(
None,
# CPU tests
[
{},
]
# GPU tests
+ testing.product({
'use_cuda': [True],
'cuda_device': [0, 1],
})
# ChainerX tests
+ testing.product({
'use_chainerx': [True],
'chainerx_device': ['native:0', 'cuda:0', 'cuda:1'],
})
)
class TestAccuracy(testing.FunctionTestCase):
def setUp(self):
self.skip_backward_test = True
self.skip_double_backward_test = True
if self.dtype == numpy.float16:
self.check_forward_options.update({'atol': 1e-4, 'rtol': 1e-3})
def generate_inputs(self):
x = numpy.random.uniform(-1, 1, self.x_shape).astype(self.dtype)
if self.t_data == 'randint':
t = numpy.random.randint(
3, size=self.t_shape).astype(self.label_dtype)
elif self.t_data == 'zero':
t = numpy.zeros(self.t_shape).astype(self.label_dtype)
return x, t
def forward(self, inputs, device):
x, t = inputs
return functions.accuracy(x, t, self.ignore_label),
def forward_expected(self, inputs):
x, t = inputs
expected = accuracy(x, t, self.ignore_label)
expected = force_array(expected, self.dtype)
return expected,
@testing.parameterize(
{'x_shape': (10, 3), 't_shape': (4,)},
{'x_shape': (10, 3, 2), 't_shape': (10,)},
{'x_shape': (10, 3, 1, 2), 't_shape': (10,)},
{'x_shape': (10, 3, 4), 't_shape': (10, 5)},
{'x_shape': (10, 3, 5, 2), 't_shape': (10, 5)},
{'x_shape': (10, 3, 5, 1, 2), 't_shape': (10, 5)},
)
class TestInvalidShape(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1,
self.x_shape).astype(numpy.float32)
self.t = numpy.random.randint(3, size=self.t_shape).astype(numpy.int32)
def check_invalid_shape(self, xp):
x = chainer.Variable(xp.asarray(self.x))
t = chainer.Variable(xp.asarray(self.t))
with self.assertRaises(type_check.InvalidType):
functions.accuracy(x, t)
def test_invalid_shape_cpu(self):
self.check_invalid_shape(numpy)
@attr.gpu
def test_invalid_shape_gpu(self):
self.check_invalid_shape(cuda.cupy)
testing.run_module(__name__, __file__)
|
#
# Copyright 2013 eNovance <licensing@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests alarm operation."""
import datetime
import json as jsonlib
import os
import mock
from oslo_utils import uuidutils
import six
from six import moves
import webtest
from aodh.api import app
from aodh import messaging
from aodh.storage import models
from aodh.tests import constants
from aodh.tests.functional.api import v2
def default_alarms(auth_headers):
return [models.Alarm(name='name1',
type='threshold',
enabled=True,
alarm_id='a',
description='a',
state='insufficient data',
state_reason='Not evaluated',
severity='critical',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=True,
user_id=auth_headers['X-User-Id'],
project_id=auth_headers['X-Project-Id'],
time_constraints=[dict(name='testcons',
start='0 11 * * *',
duration=300)],
rule=dict(comparison_operator='gt',
threshold=2.0,
statistic='avg',
evaluation_periods=60,
period=1,
meter_name='meter.test',
query=[{'field': 'project_id',
'op': 'eq', 'value':
auth_headers['X-Project-Id']}
]),
),
models.Alarm(name='name2',
type='threshold',
enabled=True,
alarm_id='b',
description='b',
state='insufficient data',
state_reason='Not evaluated',
severity='critical',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=auth_headers['X-User-Id'],
project_id=auth_headers['X-Project-Id'],
time_constraints=[],
rule=dict(comparison_operator='gt',
threshold=4.0,
statistic='avg',
evaluation_periods=60,
period=1,
meter_name='meter.test',
query=[{'field': 'project_id',
'op': 'eq', 'value':
auth_headers['X-Project-Id']}
]),
),
models.Alarm(name='name3',
type='threshold',
enabled=True,
alarm_id='c',
description='c',
state='insufficient data',
state_reason='Not evaluated',
severity='moderate',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=auth_headers['X-User-Id'],
project_id=auth_headers['X-Project-Id'],
time_constraints=[],
rule=dict(comparison_operator='gt',
threshold=3.0,
statistic='avg',
evaluation_periods=60,
period=1,
meter_name='meter.mine',
query=[{'field': 'project_id',
'op': 'eq', 'value':
auth_headers['X-Project-Id']}
]),
)]
class TestAlarmsBase(v2.FunctionalTest):
def setUp(self):
super(TestAlarmsBase, self).setUp()
self.auth_headers = {'X-User-Id': uuidutils.generate_uuid(),
'X-Project-Id': uuidutils.generate_uuid()}
@staticmethod
def _add_default_threshold_rule(alarm):
if (alarm['type'] == 'threshold' and
'exclude_outliers' not in alarm['threshold_rule']):
alarm['threshold_rule']['exclude_outliers'] = False
def _verify_alarm(self, json, alarm, expected_name=None):
if expected_name and alarm.name != expected_name:
self.fail("Alarm not found")
self._add_default_threshold_rule(json)
for key in json:
if key.endswith('_rule'):
storage_key = 'rule'
else:
storage_key = key
self.assertEqual(json[key], getattr(alarm, storage_key))
def _get_alarm(self, id, auth_headers=None):
data = self.get_json('/alarms',
headers=auth_headers or self.auth_headers)
match = [a for a in data if a['alarm_id'] == id]
self.assertEqual(1, len(match), 'alarm %s not found' % id)
return match[0]
def _update_alarm(self, id, updated_data, auth_headers=None):
data = self._get_alarm(id, auth_headers)
data.update(updated_data)
self.put_json('/alarms/%s' % id,
params=data,
headers=auth_headers or self.auth_headers)
def _delete_alarm(self, id, auth_headers=None):
self.delete('/alarms/%s' % id,
headers=auth_headers or self.auth_headers,
status=204)
class TestListEmptyAlarms(TestAlarmsBase):
def test_empty(self):
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual([], data)
class TestAlarms(TestAlarmsBase):
def setUp(self):
super(TestAlarms, self).setUp()
for alarm in default_alarms(self.auth_headers):
self.alarm_conn.create_alarm(alarm)
def test_list_alarms(self):
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(3, len(data))
self.assertEqual(set(['name1', 'name2', 'name3']),
set(r['name'] for r in data))
self.assertEqual(set(['meter.test', 'meter.mine']),
set(r['threshold_rule']['meter_name']
for r in data if 'threshold_rule' in r))
def test_alarms_query_with_timestamp(self):
date_time = datetime.datetime(2012, 7, 2, 10, 41)
isotime = date_time.isoformat()
resp = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'timestamp',
'op': 'gt',
'value': isotime}],
expect_errors=True)
self.assertEqual(resp.status_code, 400)
self.assertEqual(resp.json['error_message']['faultstring'],
'Unknown argument: "timestamp": '
'not valid for this resource')
def test_alarms_query_with_meter(self):
resp = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'meter',
'op': 'eq',
'value': 'meter.mine'}],
)
self.assertEqual(1, len(resp))
self.assertEqual('c',
resp[0]['alarm_id'])
self.assertEqual('meter.mine',
resp[0]
['threshold_rule']
['meter_name'])
def test_alarms_query_with_state(self):
alarm = models.Alarm(name='disabled',
type='threshold',
enabled=False,
alarm_id='c',
description='c',
state='ok',
state_reason='Not evaluated',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
time_constraints=[],
rule=dict(comparison_operator='gt',
threshold=3.0,
statistic='avg',
evaluation_periods=60,
period=1,
meter_name='meter.mine',
query=[
{'field': 'project_id',
'op': 'eq', 'value':
self.auth_headers['X-Project-Id']}
]),
severity='critical')
self.alarm_conn.update_alarm(alarm)
resp = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'state',
'op': 'eq',
'value': 'ok'}],
)
self.assertEqual(1, len(resp))
self.assertEqual('ok', resp[0]['state'])
def test_list_alarms_by_type(self):
alarms = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'type',
'op': 'eq',
'value': 'threshold'}])
self.assertEqual(3, len(alarms))
self.assertEqual(set(['threshold']),
set(alarm['type'] for alarm in alarms))
def test_get_not_existing_alarm(self):
resp = self.get_json('/alarms/alarm-id-3',
headers=self.auth_headers,
expect_errors=True)
self.assertEqual(404, resp.status_code)
self.assertEqual('Alarm alarm-id-3 not found in project %s' %
self.auth_headers["X-Project-Id"],
resp.json['error_message']['faultstring'])
def test_get_alarm(self):
alarms = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'name',
'value': 'name1',
}])
self.assertEqual('name1', alarms[0]['name'])
self.assertEqual('meter.test',
alarms[0]['threshold_rule']['meter_name'])
one = self.get_json('/alarms/%s' % alarms[0]['alarm_id'],
headers=self.auth_headers)
self.assertEqual('name1', one['name'])
self.assertEqual('meter.test', one['threshold_rule']['meter_name'])
self.assertEqual(alarms[0]['alarm_id'], one['alarm_id'])
self.assertEqual(alarms[0]['repeat_actions'], one['repeat_actions'])
self.assertEqual(alarms[0]['time_constraints'],
one['time_constraints'])
def test_get_alarm_disabled(self):
alarm = models.Alarm(name='disabled',
type='threshold',
enabled=False,
alarm_id='c',
description='c',
state='insufficient data',
state_reason='Not evaluated',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
time_constraints=[],
rule=dict(comparison_operator='gt',
threshold=3.0,
statistic='avg',
evaluation_periods=60,
period=1,
meter_name='meter.mine',
query=[
{'field': 'project_id',
'op': 'eq', 'value':
self.auth_headers['X-Project-Id']}
]),
severity='critical')
self.alarm_conn.update_alarm(alarm)
alarms = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'enabled',
'value': 'False'}])
self.assertEqual(1, len(alarms))
self.assertEqual('disabled', alarms[0]['name'])
one = self.get_json('/alarms/%s' % alarms[0]['alarm_id'],
headers=self.auth_headers)
self.assertEqual('disabled', one['name'])
def test_get_alarm_project_filter_wrong_op_normal_user(self):
project = self.auth_headers['X-Project-Id']
def _test(field, op):
response = self.get_json('/alarms',
q=[{'field': field,
'op': op,
'value': project}],
expect_errors=True,
status=400,
headers=self.auth_headers)
faultstring = ('Invalid input for field/attribute op. '
'Value: \'%(op)s\'. unimplemented operator '
'for %(field)s' % {'field': field, 'op': op})
self.assertEqual(faultstring,
response.json['error_message']['faultstring'])
_test('project', 'ne')
_test('project_id', 'ne')
def test_get_alarm_project_filter_normal_user(self):
project = self.auth_headers['X-Project-Id']
def _test(field):
alarms = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': field,
'op': 'eq',
'value': project}])
self.assertEqual(3, len(alarms))
_test('project')
_test('project_id')
def test_get_alarm_other_project_normal_user(self):
def _test(field):
response = self.get_json('/alarms',
q=[{'field': field,
'op': 'eq',
'value': 'other-project'}],
expect_errors=True,
status=401,
headers=self.auth_headers)
faultstring = 'Not Authorized to access project other-project'
self.assertEqual(faultstring,
response.json['error_message']['faultstring'])
_test('project')
_test('project_id')
def test_get_alarm_forbiden(self):
pf = os.path.abspath('aodh/tests/functional/api/v2/policy.json-test')
self.CONF.set_override('policy_file', pf, group='oslo_policy')
self.CONF.set_override('auth_mode', None, group='api')
self.app = webtest.TestApp(app.load_app(self.CONF))
response = self.get_json('/alarms',
expect_errors=True,
status=403,
headers=self.auth_headers)
faultstring = 'RBAC Authorization Failed'
self.assertEqual(403, response.status_code)
self.assertEqual(faultstring,
response.json['error_message']['faultstring'])
def test_post_alarm_wsme_workaround(self):
jsons = {
'type': {
'name': 'missing type',
'threshold_rule': {
'meter_name': 'ameter',
'threshold': 2.0,
}
},
'name': {
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'threshold': 2.0,
}
},
'threshold_rule/meter_name': {
'name': 'missing meter_name',
'type': 'threshold',
'threshold_rule': {
'threshold': 2.0,
}
},
'threshold_rule/threshold': {
'name': 'missing threshold',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
}
},
}
for field, json in six.iteritems(jsons):
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
self.assertEqual("Invalid input for field/attribute %s."
" Value: \'None\'. Mandatory field missing."
% field.split('/', 1)[-1],
resp.json['error_message']['faultstring'])
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_invalid_alarm_time_constraint_start(self):
json = {
'name': 'added_alarm_invalid_constraint_duration',
'type': 'threshold',
'time_constraints': [
{
'name': 'testcons',
'start': '11:00am',
'duration': 10
}
],
'threshold_rule': {
'meter_name': 'ameter',
'threshold': 300.0
}
}
self.post_json('/alarms', params=json, expect_errors=True, status=400,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_duplicate_time_constraint_name(self):
json = {
'name': 'added_alarm_duplicate_constraint_name',
'type': 'threshold',
'time_constraints': [
{
'name': 'testcons',
'start': '* 11 * * *',
'duration': 10
},
{
'name': 'testcons',
'start': '* * * * *',
'duration': 20
}
],
'threshold_rule': {
'meter_name': 'ameter',
'threshold': 300.0
}
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
self.assertEqual(
"Time constraint names must be unique for a given alarm.",
resp.json['error_message']['faultstring'])
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_alarm_null_time_constraint(self):
json = {
'name': 'added_alarm_invalid_constraint_duration',
'type': 'threshold',
'time_constraints': None,
'threshold_rule': {
'meter_name': 'ameter',
'threshold': 300.0
}
}
self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers)
def test_post_invalid_alarm_time_constraint_duration(self):
json = {
'name': 'added_alarm_invalid_constraint_duration',
'type': 'threshold',
'time_constraints': [
{
'name': 'testcons',
'start': '* 11 * * *',
'duration': -1,
}
],
'threshold_rule': {
'meter_name': 'ameter',
'threshold': 300.0
}
}
self.post_json('/alarms', params=json, expect_errors=True, status=400,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_invalid_alarm_time_constraint_timezone(self):
json = {
'name': 'added_alarm_invalid_constraint_timezone',
'type': 'threshold',
'time_constraints': [
{
'name': 'testcons',
'start': '* 11 * * *',
'duration': 10,
'timezone': 'aaaa'
}
],
'threshold_rule': {
'meter_name': 'ameter',
'threshold': 300.0
}
}
self.post_json('/alarms', params=json, expect_errors=True, status=400,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_invalid_alarm_period(self):
json = {
'name': 'added_alarm_invalid_period',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'comparison_operator': 'gt',
'threshold': 2.0,
'statistic': 'avg',
'period': -1,
}
}
self.post_json('/alarms', params=json, expect_errors=True, status=400,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_null_rule(self):
json = {
'name': 'added_alarm_invalid_threshold_rule',
'type': 'threshold',
'threshold_rule': None,
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
self.assertEqual(
"threshold_rule must be set for threshold type alarm",
resp.json['error_message']['faultstring'])
def test_post_invalid_alarm_input_state(self):
json = {
'name': 'alarm1',
'state': 'bad_state',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'comparison_operator': 'gt',
'threshold': 50.0
}
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
expected_err_msg = ("Invalid input for field/attribute state."
" Value: 'bad_state'.")
self.assertIn(expected_err_msg,
resp.json['error_message']['faultstring'])
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_invalid_alarm_input_severity(self):
json = {
'name': 'alarm1',
'state': 'ok',
'severity': 'bad_value',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'comparison_operator': 'gt',
'threshold': 50.0
}
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
expected_err_msg = ("Invalid input for field/attribute severity."
" Value: 'bad_value'.")
self.assertIn(expected_err_msg,
resp.json['error_message']['faultstring'])
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_invalid_alarm_input_type(self):
json = {
'name': 'alarm3',
'state': 'ok',
'type': 'bad_type',
'threshold_rule': {
'meter_name': 'ameter',
'comparison_operator': 'gt',
'threshold': 50.0
}
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
expected_err_msg = ("Invalid input for field/attribute"
" type."
" Value: 'bad_type'.")
self.assertIn(expected_err_msg,
resp.json['error_message']['faultstring'])
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_invalid_alarm_input_enabled_str(self):
json = {
'name': 'alarm5',
'enabled': 'bad_enabled',
'state': 'ok',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'comparison_operator': 'gt',
'threshold': 50.0
}
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
expected_err_msg = "Value not an unambiguous boolean: bad_enabled"
self.assertIn(expected_err_msg,
resp.json['error_message']['faultstring'])
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_invalid_alarm_input_enabled_int(self):
json = {
'name': 'alarm6',
'enabled': 0,
'state': 'ok',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'comparison_operator': 'gt',
'threshold': 50.0
}
}
resp = self.post_json('/alarms', params=json,
headers=self.auth_headers)
self.assertFalse(resp.json['enabled'])
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(4, len(alarms))
def _do_post_alarm_invalid_action(self, ok_actions=None,
alarm_actions=None,
insufficient_data_actions=None,
error_message=None):
ok_actions = ok_actions or []
alarm_actions = alarm_actions or []
insufficient_data_actions = insufficient_data_actions or []
json = {
'enabled': False,
'name': 'added_alarm',
'state': 'ok',
'type': 'threshold',
'ok_actions': ok_actions,
'alarm_actions': alarm_actions,
'insufficient_data_actions': insufficient_data_actions,
'repeat_actions': True,
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': '3',
'period': '180',
}
}
resp = self.post_json('/alarms', params=json, status=400,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
self.assertEqual(error_message,
resp.json['error_message']['faultstring'])
def test_post_invalid_alarm_ok_actions(self):
self._do_post_alarm_invalid_action(
ok_actions=['spam://something/ok'],
error_message='Unsupported action spam://something/ok')
def test_post_invalid_alarm_alarm_actions(self):
self._do_post_alarm_invalid_action(
alarm_actions=['spam://something/alarm'],
error_message='Unsupported action spam://something/alarm')
def test_post_invalid_alarm_insufficient_data_actions(self):
self._do_post_alarm_invalid_action(
insufficient_data_actions=['spam://something/insufficient'],
error_message='Unsupported action spam://something/insufficient')
@staticmethod
def _fake_urlsplit(*args, **kwargs):
raise Exception("Evil urlsplit!")
def test_post_invalid_alarm_actions_format(self):
with mock.patch('oslo_utils.netutils.urlsplit',
self._fake_urlsplit):
self._do_post_alarm_invalid_action(
alarm_actions=['http://[::1'],
error_message='Unable to parse action http://[::1')
def test_post_alarm_defaults(self):
to_check = {
'enabled': True,
'name': 'added_alarm_defaults',
'ok_actions': [],
'alarm_actions': [],
'insufficient_data_actions': [],
'repeat_actions': False,
}
json = {
'name': 'added_alarm_defaults',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'threshold': 300.0
}
}
self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(4, len(alarms))
for alarm in alarms:
if alarm.name == 'added_alarm_defaults':
for key in to_check:
self.assertEqual(to_check[key],
getattr(alarm, key))
break
else:
self.fail("Alarm not found")
def test_post_alarm_with_same_name(self):
json = {
'enabled': False,
'name': 'dup_alarm_name',
'state': 'ok',
'type': 'threshold',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': '3',
'period': '180',
}
}
resp1 = self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers)
resp2 = self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers)
self.assertEqual(resp1.json['name'], resp2.json['name'])
self.assertNotEqual(resp1.json['alarm_id'], resp2.json['alarm_id'])
alarms = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'name',
'value': 'dup_alarm_name'}])
self.assertEqual(2, len(alarms))
def _do_test_post_alarm(self, exclude_outliers=None):
json = {
'enabled': False,
'name': 'added_alarm',
'state': 'ok',
'state_reason': 'ignored',
'type': 'threshold',
'severity': 'low',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': '3',
'period': '180',
}
}
if exclude_outliers is not None:
json['threshold_rule']['exclude_outliers'] = exclude_outliers
self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms(enabled=False))
self.assertEqual(1, len(alarms))
json['threshold_rule']['query'].append({
'field': 'project_id', 'op': 'eq',
'value': self.auth_headers['X-Project-Id']})
# to check to IntegerType type conversion
json['threshold_rule']['evaluation_periods'] = 3
json['threshold_rule']['period'] = 180
# to check it's read only
json['state_reason'] = "Not evaluated yet"
self._verify_alarm(json, alarms[0], 'added_alarm')
def test_post_alarm_outlier_exclusion_set(self):
self._do_test_post_alarm(True)
def test_post_alarm_outlier_exclusion_clear(self):
self._do_test_post_alarm(False)
def test_post_alarm_outlier_exclusion_defaulted(self):
self._do_test_post_alarm()
def test_post_alarm_noauth(self):
json = {
'enabled': False,
'name': 'added_alarm',
'state': 'ok',
'type': 'threshold',
'severity': 'low',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': '3',
'exclude_outliers': False,
'period': '180',
}
}
self.post_json('/alarms', params=json, status=201)
alarms = list(self.alarm_conn.get_alarms(enabled=False))
self.assertEqual(1, len(alarms))
# to check to BoundedInt type conversion
json['threshold_rule']['evaluation_periods'] = 3
json['threshold_rule']['period'] = 180
if alarms[0].name == 'added_alarm':
for key in json:
if key.endswith('_rule'):
storage_key = 'rule'
else:
storage_key = key
self.assertEqual(getattr(alarms[0], storage_key),
json[key])
else:
self.fail("Alarm not found")
def _do_test_post_alarm_as_admin(self, explicit_project_constraint):
"""Test the creation of an alarm as admin for another project."""
json = {
'enabled': False,
'name': 'added_alarm',
'state': 'ok',
'type': 'threshold',
'user_id': 'auseridthatisnotmine',
'project_id': 'aprojectidthatisnotmine',
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': 3,
'period': 180,
}
}
if explicit_project_constraint:
project_constraint = {'field': 'project_id', 'op': 'eq',
'value': 'aprojectidthatisnotmine'}
json['threshold_rule']['query'].append(project_constraint)
headers = {}
headers.update(self.auth_headers)
headers['X-Roles'] = 'admin'
self.post_json('/alarms', params=json, status=201,
headers=headers)
alarms = list(self.alarm_conn.get_alarms(enabled=False))
self.assertEqual(1, len(alarms))
self.assertEqual('auseridthatisnotmine', alarms[0].user_id)
self.assertEqual('aprojectidthatisnotmine', alarms[0].project_id)
self._add_default_threshold_rule(json)
if alarms[0].name == 'added_alarm':
for key in json:
if key.endswith('_rule'):
storage_key = 'rule'
if explicit_project_constraint:
self.assertEqual(json[key],
getattr(alarms[0], storage_key))
else:
query = getattr(alarms[0], storage_key).get('query')
self.assertEqual(2, len(query))
implicit_constraint = {
u'field': u'project_id',
u'value': u'aprojectidthatisnotmine',
u'op': u'eq'
}
self.assertEqual(implicit_constraint, query[1])
else:
self.assertEqual(json[key], getattr(alarms[0], key))
else:
self.fail("Alarm not found")
def test_post_alarm_as_admin_explicit_project_constraint(self):
"""Test the creation of an alarm as admin for another project.
With an explicit query constraint on the owner's project ID.
"""
self._do_test_post_alarm_as_admin(True)
def test_post_alarm_as_admin_implicit_project_constraint(self):
"""Test the creation of an alarm as admin for another project.
Test without an explicit query constraint on the owner's project ID.
"""
self._do_test_post_alarm_as_admin(False)
def test_post_alarm_as_admin_no_user(self):
"""Test the creation of an alarm.
Test the creation of an alarm as admin for another project but
forgetting to set the values.
"""
json = {
'enabled': False,
'name': 'added_alarm',
'state': 'ok',
'type': 'threshold',
'project_id': 'aprojectidthatisnotmine',
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'},
{'field': 'project_id', 'op': 'eq',
'value': 'aprojectidthatisnotmine'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': 3,
'period': 180,
}
}
headers = {}
headers.update(self.auth_headers)
headers['X-Roles'] = 'admin'
self.post_json('/alarms', params=json, status=201,
headers=headers)
alarms = list(self.alarm_conn.get_alarms(enabled=False))
self.assertEqual(1, len(alarms))
self.assertEqual(self.auth_headers['X-User-Id'], alarms[0].user_id)
self.assertEqual('aprojectidthatisnotmine', alarms[0].project_id)
self._verify_alarm(json, alarms[0], 'added_alarm')
def test_post_alarm_as_admin_no_project(self):
"""Test the creation of an alarm.
Test the creation of an alarm as admin for another project but
forgetting to set the values.
"""
json = {
'enabled': False,
'name': 'added_alarm',
'state': 'ok',
'type': 'threshold',
'user_id': 'auseridthatisnotmine',
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'},
{'field': 'project_id', 'op': 'eq',
'value': 'aprojectidthatisnotmine'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': 3,
'period': 180,
}
}
headers = {}
headers.update(self.auth_headers)
headers['X-Roles'] = 'admin'
self.post_json('/alarms', params=json, status=201,
headers=headers)
alarms = list(self.alarm_conn.get_alarms(enabled=False))
self.assertEqual(1, len(alarms))
self.assertEqual('auseridthatisnotmine', alarms[0].user_id)
self.assertEqual(self.auth_headers['X-Project-Id'],
alarms[0].project_id)
self._verify_alarm(json, alarms[0], 'added_alarm')
@staticmethod
def _alarm_representation_owned_by(identifiers):
json = {
'name': 'added_alarm',
'enabled': False,
'type': 'threshold',
'ok_actions': ['http://something/ok'],
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': 3,
'period': 180,
}
}
for aspect, id in six.iteritems(identifiers):
json['%s_id' % aspect] = id
return json
def _do_test_post_alarm_as_nonadmin_on_behalf_of_another(self,
identifiers):
"""Test posting an alarm.
Test that posting an alarm as non-admin on behalf of another
user/project fails with an explicit 401 instead of reverting
to the requestor's identity.
"""
json = self._alarm_representation_owned_by(identifiers)
headers = {}
headers.update(self.auth_headers)
headers['X-Roles'] = 'demo'
resp = self.post_json('/alarms', params=json, status=401,
headers=headers)
aspect = 'user' if 'user' in identifiers else 'project'
params = dict(aspect=aspect, id=identifiers[aspect])
self.assertEqual("Not Authorized to access %(aspect)s %(id)s" % params,
resp.json['error_message']['faultstring'])
def test_post_alarm_as_nonadmin_on_behalf_of_another_user(self):
identifiers = dict(user='auseridthatisnotmine')
self._do_test_post_alarm_as_nonadmin_on_behalf_of_another(identifiers)
def test_post_alarm_as_nonadmin_on_behalf_of_another_project(self):
identifiers = dict(project='aprojectidthatisnotmine')
self._do_test_post_alarm_as_nonadmin_on_behalf_of_another(identifiers)
def test_post_alarm_as_nonadmin_on_behalf_of_another_creds(self):
identifiers = dict(user='auseridthatisnotmine',
project='aprojectidthatisnotmine')
self._do_test_post_alarm_as_nonadmin_on_behalf_of_another(identifiers)
def _do_test_post_alarm_as_nonadmin_on_behalf_of_self(self, identifiers):
"""Test posting an alarm.
Test posting an alarm as non-admin on behalf of own user/project
creates alarm associated with the requestor's identity.
"""
json = self._alarm_representation_owned_by(identifiers)
headers = {}
headers.update(self.auth_headers)
headers['X-Roles'] = 'demo'
self.post_json('/alarms', params=json, status=201, headers=headers)
alarms = list(self.alarm_conn.get_alarms(enabled=False))
self.assertEqual(1, len(alarms))
self.assertEqual(alarms[0].user_id,
self.auth_headers['X-User-Id'])
self.assertEqual(alarms[0].project_id,
self.auth_headers['X-Project-Id'])
def test_post_alarm_as_nonadmin_on_behalf_of_own_user(self):
identifiers = dict(user=self.auth_headers['X-User-Id'])
self._do_test_post_alarm_as_nonadmin_on_behalf_of_self(identifiers)
def test_post_alarm_as_nonadmin_on_behalf_of_own_project(self):
identifiers = dict(project=self.auth_headers['X-Project-Id'])
self._do_test_post_alarm_as_nonadmin_on_behalf_of_self(identifiers)
def test_post_alarm_as_nonadmin_on_behalf_of_own_creds(self):
identifiers = dict(user=self.auth_headers['X-User-Id'],
project=self.auth_headers['X-Project-Id'])
self._do_test_post_alarm_as_nonadmin_on_behalf_of_self(identifiers)
def test_post_alarm_with_mismatch_between_type_and_rule(self):
"""Test the creation of an combination alarm with threshold rule."""
json = {
'enabled': False,
'name': 'added_alarm',
'state': 'ok',
'type': 'gnocchi_resources_threshold',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': '3',
'period': '180',
}
}
resp = self.post_json('/alarms', params=json,
expect_errors=True, status=400,
headers=self.auth_headers)
self.assertEqual(
"gnocchi_resources_threshold_rule must "
"be set for gnocchi_resources_threshold type alarm",
resp.json['error_message']['faultstring'])
def test_post_alarm_with_duplicate_actions(self):
body = {
'name': 'dup-alarm-actions',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': '3',
'period': '180',
},
'alarm_actions': ['http://no.where', 'http://no.where']
}
resp = self.post_json('/alarms', params=body,
headers=self.auth_headers)
self.assertEqual(201, resp.status_code)
alarms = list(self.alarm_conn.get_alarms(name='dup-alarm-actions'))
self.assertEqual(1, len(alarms))
self.assertEqual(['http://no.where'], alarms[0].alarm_actions)
def test_post_alarm_with_too_many_actions(self):
self.CONF.set_override('alarm_max_actions', 1, group='api')
body = {
'name': 'alarm-with-many-actions',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': '3',
'period': '180',
},
'alarm_actions': ['http://no.where', 'http://no.where2']
}
resp = self.post_json('/alarms', params=body, expect_errors=True,
headers=self.auth_headers)
self.assertEqual(400, resp.status_code)
self.assertEqual("alarm_actions count exceeds maximum value 1",
resp.json['error_message']['faultstring'])
def test_post_alarm_normal_user_set_log_actions(self):
body = {
'name': 'log_alarm_actions',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': '3',
'period': '180',
},
'alarm_actions': ['log://']
}
resp = self.post_json('/alarms', params=body, expect_errors=True,
headers=self.auth_headers)
self.assertEqual(401, resp.status_code)
expected_msg = ("You are not authorized to create action: log://")
self.assertEqual(expected_msg,
resp.json['error_message']['faultstring'])
def test_post_alarm_normal_user_set_test_actions(self):
body = {
'name': 'test_alarm_actions',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': '3',
'period': '180',
},
'alarm_actions': ['test://']
}
resp = self.post_json('/alarms', params=body, expect_errors=True,
headers=self.auth_headers)
self.assertEqual(401, resp.status_code)
expected_msg = ("You are not authorized to create action: test://")
self.assertEqual(expected_msg,
resp.json['error_message']['faultstring'])
def test_post_alarm_admin_user_set_log_test_actions(self):
body = {
'name': 'admin_alarm_actions',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': '3',
'period': '180',
},
'alarm_actions': ['test://', 'log://']
}
headers = self.auth_headers
headers['X-Roles'] = 'admin'
self.post_json('/alarms', params=body, status=201,
headers=headers)
alarms = list(self.alarm_conn.get_alarms(name='admin_alarm_actions'))
self.assertEqual(1, len(alarms))
self.assertEqual(['test://', 'log://'],
alarms[0].alarm_actions)
def test_exercise_state_reason(self):
body = {
'name': 'nostate',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': '3',
'period': '180',
},
}
headers = self.auth_headers
headers['X-Roles'] = 'admin'
self.post_json('/alarms', params=body, status=201,
headers=headers)
alarms = list(self.alarm_conn.get_alarms(name='nostate'))
self.assertEqual(1, len(alarms))
alarm_id = alarms[0].alarm_id
alarm = self._get_alarm(alarm_id)
self.assertEqual("insufficient data", alarm['state'])
self.assertEqual("Not evaluated yet", alarm['state_reason'])
# Ensure state reason is updated
alarm = self._get_alarm('a')
alarm['state'] = 'ok'
self.put_json('/alarms/%s' % alarm_id,
params=alarm,
headers=self.auth_headers)
alarm = self._get_alarm(alarm_id)
self.assertEqual("ok", alarm['state'])
self.assertEqual("Manually set via API", alarm['state_reason'])
# Ensure state reason read only
alarm = self._get_alarm('a')
alarm['state'] = 'alarm'
alarm['state_reason'] = 'oh no!'
self.put_json('/alarms/%s' % alarm_id,
params=alarm,
headers=self.auth_headers)
alarm = self._get_alarm(alarm_id)
self.assertEqual("alarm", alarm['state'])
self.assertEqual("Manually set via API", alarm['state_reason'])
def test_post_alarm_without_actions(self):
body = {
'name': 'alarm_actions_none',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': '3',
'period': '180',
},
'alarm_actions': None
}
headers = self.auth_headers
headers['X-Roles'] = 'admin'
self.post_json('/alarms', params=body, status=201,
headers=headers)
alarms = list(self.alarm_conn.get_alarms(name='alarm_actions_none'))
self.assertEqual(1, len(alarms))
# FIXME(sileht): This should really returns [] not None
# but SQL just stores the json dict as is...
# migration script for sql will be a mess because we have
# to parse all JSON :(
# I guess we assume that wsme convert the None input to []
# because of the array type, but it won't...
self.assertIsNone(alarms[0].alarm_actions)
def test_post_alarm_trust(self):
json = {
'name': 'added_alarm_defaults',
'type': 'threshold',
'ok_actions': ['trust+http://my.server:1234/foo'],
'threshold_rule': {
'meter_name': 'ameter',
'threshold': 300.0
}
}
auth = mock.Mock()
trust_client = mock.Mock()
with mock.patch('aodh.keystone_client.get_client') as client:
mock_session = mock.Mock()
mock_session.get_user_id.return_value = 'my_user'
client.return_value = mock.Mock(session=mock_session)
with mock.patch('keystoneclient.v3.client.Client') as sub_client:
sub_client.return_value = trust_client
trust_client.trusts.create.return_value = mock.Mock(id='5678')
self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers,
extra_environ={'keystone.token_auth': auth})
trust_client.trusts.create.assert_called_once_with(
trustor_user=self.auth_headers['X-User-Id'],
trustee_user='my_user',
project=self.auth_headers['X-Project-Id'],
impersonation=True,
role_names=[])
alarms = list(self.alarm_conn.get_alarms())
for alarm in alarms:
if alarm.name == 'added_alarm_defaults':
self.assertEqual(
['trust+http://5678:delete@my.server:1234/foo'],
alarm.ok_actions)
break
else:
self.fail("Alarm not found")
with mock.patch('aodh.keystone_client.get_client') as client:
client.return_value = mock.Mock(
auth_ref=mock.Mock(user_id='my_user'))
with mock.patch('keystoneclient.v3.client.Client') as sub_client:
sub_client.return_value = trust_client
self.delete('/alarms/%s' % alarm.alarm_id,
headers=self.auth_headers,
status=204,
extra_environ={'keystone.token_auth': auth})
trust_client.trusts.delete.assert_called_once_with('5678')
def test_put_alarm(self):
json = {
'enabled': False,
'name': 'name_put',
'state': 'ok',
'type': 'threshold',
'severity': 'critical',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': 3,
'period': 180,
}
}
data = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'name',
'value': 'name1',
}])
self.assertEqual(1, len(data))
alarm_id = data[0]['alarm_id']
self.put_json('/alarms/%s' % alarm_id,
params=json,
headers=self.auth_headers)
alarm = list(self.alarm_conn.get_alarms(alarm_id=alarm_id,
enabled=False))[0]
json['threshold_rule']['query'].append({
'field': 'project_id', 'op': 'eq',
'value': self.auth_headers['X-Project-Id']})
self._verify_alarm(json, alarm)
def test_put_alarm_as_admin(self):
json = {
'user_id': 'myuserid',
'project_id': 'myprojectid',
'enabled': False,
'name': 'name_put',
'state': 'ok',
'type': 'threshold',
'severity': 'critical',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'},
{'field': 'project_id', 'op': 'eq',
'value': 'myprojectid'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': 3,
'period': 180,
}
}
headers = {}
headers.update(self.auth_headers)
headers['X-Roles'] = 'admin'
data = self.get_json('/alarms',
headers=headers,
q=[{'field': 'name',
'value': 'name1',
}])
self.assertEqual(1, len(data))
alarm_id = data[0]['alarm_id']
self.put_json('/alarms/%s' % alarm_id,
params=json,
headers=headers)
alarm = list(self.alarm_conn.get_alarms(alarm_id=alarm_id,
enabled=False))[0]
self.assertEqual('myuserid', alarm.user_id)
self.assertEqual('myprojectid', alarm.project_id)
self._verify_alarm(json, alarm)
def test_put_alarm_wrong_field(self):
json = {
'this_can_not_be_correct': 'ha',
'enabled': False,
'name': 'name1',
'state': 'ok',
'type': 'threshold',
'severity': 'critical',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': 3,
'period': 180,
}
}
data = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'name',
'value': 'name1',
}])
self.assertEqual(1, len(data))
alarm_id = data[0]['alarm_id']
resp = self.put_json('/alarms/%s' % alarm_id,
expect_errors=True,
params=json,
headers=self.auth_headers)
self.assertEqual(400, resp.status_code)
def test_put_alarm_with_existing_name(self):
"""Test that update a threshold alarm with an existing name."""
json = {
'enabled': False,
'name': 'name1',
'state': 'ok',
'type': 'threshold',
'severity': 'critical',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': 3,
'period': 180,
}
}
data = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'name',
'value': 'name2',
}])
self.assertEqual(1, len(data))
alarm_id = data[0]['alarm_id']
resp = self.put_json('/alarms/%s' % alarm_id,
params=json,
headers=self.auth_headers)
self.assertEqual(200, resp.status_code)
def test_put_invalid_alarm_actions(self):
json = {
'enabled': False,
'name': 'name1',
'state': 'ok',
'type': 'threshold',
'severity': 'critical',
'ok_actions': ['spam://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.field',
'op': 'eq',
'value': '5',
'type': 'string'}],
'comparison_operator': 'le',
'statistic': 'count',
'threshold': 50,
'evaluation_periods': 3,
'period': 180,
}
}
data = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'name',
'value': 'name2',
}])
self.assertEqual(1, len(data))
alarm_id = data[0]['alarm_id']
resp = self.put_json('/alarms/%s' % alarm_id,
expect_errors=True, status=400,
params=json,
headers=self.auth_headers)
self.assertEqual(
'Unsupported action spam://something/ok',
resp.json['error_message']['faultstring'])
def test_put_alarm_trust(self):
data = self._get_alarm('a')
data.update({'ok_actions': ['trust+http://something/ok']})
trust_client = mock.Mock()
with mock.patch('aodh.keystone_client.get_client') as client:
client.return_value = mock.Mock(
auth_ref=mock.Mock(user_id='my_user'))
with mock.patch('keystoneclient.v3.client.Client') as sub_client:
sub_client.return_value = trust_client
trust_client.trusts.create.return_value = mock.Mock(id='5678')
self.put_json('/alarms/%s' % data['alarm_id'],
params=data,
headers=self.auth_headers)
data = self._get_alarm('a')
self.assertEqual(
['trust+http://5678:delete@something/ok'], data['ok_actions'])
data.update({'ok_actions': ['http://no-trust-something/ok']})
with mock.patch('aodh.keystone_client.get_client') as client:
client.return_value = mock.Mock(
auth_ref=mock.Mock(user_id='my_user'))
with mock.patch('keystoneclient.v3.client.Client') as sub_client:
sub_client.return_value = trust_client
self.put_json('/alarms/%s' % data['alarm_id'],
params=data,
headers=self.auth_headers)
trust_client.trusts.delete.assert_called_once_with('5678')
data = self._get_alarm('a')
self.assertEqual(
['http://no-trust-something/ok'], data['ok_actions'])
def test_delete_alarm(self):
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(3, len(data))
resp = self.delete('/alarms/%s' % data[0]['alarm_id'],
headers=self.auth_headers,
status=204)
self.assertEqual(b'', resp.body)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(2, len(alarms))
def test_get_state_alarm(self):
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(3, len(data))
resp = self.get_json('/alarms/%s/state' % data[0]['alarm_id'],
headers=self.auth_headers)
self.assertEqual(resp, data[0]['state'])
def test_set_state_alarm(self):
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(3, len(data))
resp = self.put_json('/alarms/%s/state' % data[0]['alarm_id'],
headers=self.auth_headers,
params='alarm')
alarms = list(self.alarm_conn.get_alarms(alarm_id=data[0]['alarm_id']))
self.assertEqual(1, len(alarms))
self.assertEqual('alarm', alarms[0].state)
self.assertEqual('Manually set via API',
alarms[0].state_reason)
self.assertEqual('alarm', resp.json)
def test_set_invalid_state_alarm(self):
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(3, len(data))
self.put_json('/alarms/%s/state' % data[0]['alarm_id'],
headers=self.auth_headers,
params='not valid',
status=400)
def test_alarms_sends_notification(self):
# Hit the AlarmsController ...
json = {
'name': 'sent_notification',
'type': 'threshold',
'severity': 'low',
'threshold_rule': {
'meter_name': 'ameter',
'comparison_operator': 'gt',
'threshold': 2.0,
'statistic': 'avg',
}
}
with mock.patch.object(messaging, 'get_notifier') as get_notifier:
notifier = get_notifier.return_value
self.post_json('/alarms', params=json, headers=self.auth_headers)
get_notifier.assert_called_once_with(mock.ANY,
publisher_id='aodh.api')
calls = notifier.info.call_args_list
self.assertEqual(1, len(calls))
args, _ = calls[0]
context, event_type, payload = args
self.assertEqual('alarm.creation', event_type)
self.assertEqual('sent_notification', payload['detail']['name'])
self.assertEqual('ameter', payload['detail']['rule']['meter_name'])
self.assertTrue(set(['alarm_id', 'detail', 'event_id', 'on_behalf_of',
'project_id', 'timestamp', 'type',
'user_id']).issubset(payload.keys()))
def test_alarm_sends_notification(self):
with mock.patch.object(messaging, 'get_notifier') as get_notifier:
notifier = get_notifier.return_value
self._update_alarm('a', dict(name='new_name'))
get_notifier.assert_called_once_with(mock.ANY,
publisher_id='aodh.api')
calls = notifier.info.call_args_list
self.assertEqual(1, len(calls))
args, _ = calls[0]
context, event_type, payload = args
self.assertEqual('alarm.rule_change', event_type)
self.assertEqual('new_name', payload['detail']['name'])
self.assertTrue(set(['alarm_id', 'detail', 'event_id', 'on_behalf_of',
'project_id', 'timestamp', 'type',
'user_id']).issubset(payload.keys()))
def test_delete_alarm_sends_notification(self):
with mock.patch.object(messaging, 'get_notifier') as get_notifier:
notifier = get_notifier.return_value
self._delete_alarm(default_alarms(self.auth_headers)[1].alarm_id)
get_notifier.assert_called_once_with(mock.ANY,
publisher_id='aodh.api')
calls = notifier.info.call_args_list
self.assertEqual(1, len(calls))
args, _ = calls[0]
context, event_type, payload = args
self.assertEqual('alarm.deletion', event_type)
self.assertEqual('insufficient data', payload['detail']['state'])
self.assertTrue(set(['alarm_id', 'detail', 'event_id', 'on_behalf_of',
'project_id', 'timestamp', 'type', 'severity',
'user_id']).issubset(payload.keys()))
class TestAlarmsHistory(TestAlarmsBase):
def setUp(self):
super(TestAlarmsHistory, self).setUp()
alarm = models.Alarm(
name='name1',
type='threshold',
enabled=True,
alarm_id='a',
description='a',
state='insufficient data',
state_reason='insufficient data',
severity='critical',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=True,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
time_constraints=[dict(name='testcons',
start='0 11 * * *',
duration=300)],
rule=dict(comparison_operator='gt',
threshold=2.0,
statistic='avg',
evaluation_periods=60,
period=1,
meter_name='meter.test',
query=[dict(field='project_id',
op='eq',
value=self.auth_headers['X-Project-Id'])
]))
self.alarm_conn.create_alarm(alarm)
def _get_alarm_history(self, alarm_id, auth_headers=None, query=None,
expect_errors=False, status=200):
url = '/alarms/%s/history' % alarm_id
if query:
url += '?q.op=%(op)s&q.value=%(value)s&q.field=%(field)s' % query
resp = self.get_json(url,
headers=auth_headers or self.auth_headers,
expect_errors=expect_errors)
if expect_errors:
self.assertEqual(status, resp.status_code)
return resp
def _assert_is_subset(self, expected, actual):
for k, v in six.iteritems(expected):
current = actual.get(k)
if k == 'detail' and isinstance(v, dict):
current = jsonlib.loads(current)
self.assertEqual(v, current, 'mismatched field: %s' % k)
self.assertIsNotNone(actual['event_id'])
def _assert_in_json(self, expected, actual):
actual = jsonlib.dumps(jsonlib.loads(actual), sort_keys=True)
for k, v in six.iteritems(expected):
fragment = jsonlib.dumps({k: v}, sort_keys=True)[1:-1]
self.assertIn(fragment, actual,
'%s not in %s' % (fragment, actual))
def test_record_alarm_history_config(self):
self.CONF.set_override('record_history', False)
history = self._get_alarm_history('a')
self.assertEqual([], history)
self._update_alarm('a', dict(name='renamed'))
history = self._get_alarm_history('a')
self.assertEqual([], history)
self.CONF.set_override('record_history', True)
self._update_alarm('a', dict(name='foobar'))
history = self._get_alarm_history('a')
self.assertEqual(1, len(history))
def test_record_alarm_history_severity(self):
alarm = self._get_alarm('a')
history = self._get_alarm_history('a')
self.assertEqual([], history)
self.assertEqual('critical', alarm['severity'])
self._update_alarm('a', dict(severity='low'))
new_alarm = self._get_alarm('a')
history = self._get_alarm_history('a')
self.assertEqual(1, len(history))
self.assertEqual(jsonlib.dumps({'severity': 'low'}),
history[0]['detail'])
self.assertEqual('low', new_alarm['severity'])
def test_record_alarm_history_statistic(self):
alarm = self._get_alarm('a')
history = self._get_alarm_history('a')
self.assertEqual([], history)
self.assertEqual('avg', alarm['threshold_rule']['statistic'])
rule = alarm['threshold_rule'].copy()
rule['statistic'] = 'min'
data = dict(threshold_rule=rule)
self._update_alarm('a', data)
new_alarm = self._get_alarm('a')
history = self._get_alarm_history('a')
self.assertEqual(1, len(history))
self.assertEqual("min", jsonlib.loads(history[0]['detail'])
['rule']["statistic"])
self.assertEqual('min', new_alarm['threshold_rule']['statistic'])
def test_redundant_update_alarm_property_no_history_change(self):
alarm = self._get_alarm('a')
history = self._get_alarm_history('a')
self.assertEqual([], history)
self.assertEqual('critical', alarm['severity'])
self._update_alarm('a', dict(severity='low'))
new_alarm = self._get_alarm('a')
history = self._get_alarm_history('a')
self.assertEqual(1, len(history))
self.assertEqual(jsonlib.dumps({'severity': 'low'}),
history[0]['detail'])
self.assertEqual('low', new_alarm['severity'])
self._update_alarm('a', dict(severity='low'))
updated_history = self._get_alarm_history('a')
self.assertEqual(1, len(updated_history))
self.assertEqual(jsonlib.dumps({'severity': 'low'}),
updated_history[0]['detail'])
self.assertEqual(history, updated_history)
def test_get_recorded_alarm_history_on_create(self):
new_alarm = {
'name': 'new_alarm',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'query': [],
'comparison_operator': 'le',
'statistic': 'max',
'threshold': 42.0,
'period': 60,
'evaluation_periods': 1,
}
}
self.post_json('/alarms', params=new_alarm, status=201,
headers=self.auth_headers)
alarms = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'name',
'value': 'new_alarm',
}])
self.assertEqual(1, len(alarms))
alarm = alarms[0]
history = self._get_alarm_history(alarm['alarm_id'])
self.assertEqual(1, len(history))
self._assert_is_subset(dict(alarm_id=alarm['alarm_id'],
on_behalf_of=alarm['project_id'],
project_id=alarm['project_id'],
type='creation',
user_id=alarm['user_id']),
history[0])
self._add_default_threshold_rule(new_alarm)
new_alarm['rule'] = new_alarm['threshold_rule']
del new_alarm['threshold_rule']
new_alarm['rule']['query'].append({
'field': 'project_id', 'op': 'eq',
'value': self.auth_headers['X-Project-Id']})
self._assert_in_json(new_alarm, history[0]['detail'])
def _do_test_get_recorded_alarm_history_on_update(self,
data,
type,
detail,
auth=None):
alarm = self._get_alarm('a')
history = self._get_alarm_history('a')
self.assertEqual([], history)
self._update_alarm('a', data, auth)
history = self._get_alarm_history('a')
self.assertEqual(1, len(history))
project_id = auth['X-Project-Id'] if auth else alarm['project_id']
user_id = auth['X-User-Id'] if auth else alarm['user_id']
self._assert_is_subset(dict(alarm_id=alarm['alarm_id'],
detail=detail,
on_behalf_of=alarm['project_id'],
project_id=project_id,
type=type,
user_id=user_id),
history[0])
def test_get_recorded_alarm_history_rule_change(self):
data = dict(name='renamed')
detail = '{"name": "renamed"}'
self._do_test_get_recorded_alarm_history_on_update(data,
'rule change',
detail)
def test_get_recorded_alarm_history_state_transition_on_behalf_of(self):
# credentials for new non-admin user, on who's behalf the alarm
# is created
member_user = uuidutils.generate_uuid()
member_project = uuidutils.generate_uuid()
member_auth = {'X-Roles': 'member',
'X-User-Id': member_user,
'X-Project-Id': member_project}
new_alarm = {
'name': 'new_alarm',
'type': 'threshold',
'state': 'ok',
'threshold_rule': {
'meter_name': 'other_meter',
'query': [{'field': 'project_id',
'op': 'eq',
'value': member_project}],
'comparison_operator': 'le',
'statistic': 'max',
'threshold': 42.0,
'evaluation_periods': 1,
'period': 60
}
}
self.post_json('/alarms', params=new_alarm, status=201,
headers=member_auth)
alarm = self.get_json('/alarms', headers=member_auth)[0]
# effect a state transition as a new administrative user
admin_user = uuidutils.generate_uuid()
admin_project = uuidutils.generate_uuid()
admin_auth = {'X-Roles': 'admin',
'X-User-Id': admin_user,
'X-Project-Id': admin_project}
data = dict(state='alarm')
self._update_alarm(alarm['alarm_id'], data, auth_headers=admin_auth)
self._add_default_threshold_rule(new_alarm)
new_alarm['rule'] = new_alarm['threshold_rule']
del new_alarm['threshold_rule']
# ensure that both the creation event and state transition
# are visible to the non-admin alarm owner and admin user alike
for auth in [member_auth, admin_auth]:
history = self._get_alarm_history(alarm['alarm_id'],
auth_headers=auth)
self.assertEqual(2, len(history), 'hist: %s' % history)
self._assert_is_subset(dict(alarm_id=alarm['alarm_id'],
detail={"state": "alarm",
"state_reason":
"Manually set via API"},
on_behalf_of=alarm['project_id'],
project_id=admin_project,
type='rule change',
user_id=admin_user),
history[0])
self._assert_is_subset(dict(alarm_id=alarm['alarm_id'],
on_behalf_of=alarm['project_id'],
project_id=member_project,
type='creation',
user_id=member_user),
history[1])
self._assert_in_json(new_alarm, history[1]['detail'])
# ensure on_behalf_of cannot be constrained in an API call
query = dict(field='on_behalf_of',
op='eq',
value=alarm['project_id'])
self._get_alarm_history(alarm['alarm_id'], auth_headers=auth,
query=query, expect_errors=True,
status=400)
def test_get_recorded_alarm_history_segregation(self):
data = dict(name='renamed')
detail = '{"name": "renamed"}'
self._do_test_get_recorded_alarm_history_on_update(data,
'rule change',
detail)
auth = {'X-Roles': 'member',
'X-User-Id': uuidutils.generate_uuid(),
'X-Project-Id': uuidutils.generate_uuid()}
self._get_alarm_history('a', auth_headers=auth,
expect_errors=True, status=404)
def test_delete_alarm_history_after_deletion(self):
self._update_alarm('a', dict(name='renamed'))
history = self._get_alarm_history('a')
self.assertEqual(1, len(history))
self.delete('/alarms/%s' % 'a',
headers=self.auth_headers,
status=204)
self._get_alarm_history('a', expect_errors=True, status=404)
def test_get_alarm_history_ordered_by_recentness(self):
for i in moves.xrange(10):
self._update_alarm('a', dict(name='%s' % i))
history = self._get_alarm_history('a')
self.assertEqual(10, len(history), 'hist: %s' % history)
self._assert_is_subset(dict(alarm_id='a',
type='rule change'),
history[0])
for i in moves.xrange(1, 11):
detail = '{"name": "%s"}' % (10 - i)
self._assert_is_subset(dict(alarm_id='a',
detail=detail,
type='rule change'),
history[i - 1])
def test_get_alarm_history_constrained_by_timestamp(self):
alarm = self._get_alarm('a')
self._update_alarm('a', dict(name='renamed'))
after = datetime.datetime.utcnow().isoformat()
query = dict(field='timestamp', op='gt', value=after)
history = self._get_alarm_history('a', query=query)
self.assertEqual(0, len(history))
query['op'] = 'le'
history = self._get_alarm_history('a', query=query)
self.assertEqual(1, len(history))
detail = '{"name": "renamed"}'
self._assert_is_subset(dict(alarm_id=alarm['alarm_id'],
detail=detail,
on_behalf_of=alarm['project_id'],
project_id=alarm['project_id'],
type='rule change',
user_id=alarm['user_id']),
history[0])
def test_get_alarm_history_constrained_by_type(self):
alarm = self._get_alarm('a')
self._update_alarm('a', dict(name='renamed2'))
query = dict(field='type', op='eq', value='rule change')
history = self._get_alarm_history('a', query=query)
self.assertEqual(1, len(history))
detail = '{"name": "renamed2"}'
self._assert_is_subset(dict(alarm_id=alarm['alarm_id'],
detail=detail,
on_behalf_of=alarm['project_id'],
project_id=alarm['project_id'],
type='rule change',
user_id=alarm['user_id']),
history[0])
def test_get_alarm_history_constrained_by_alarm_id_failed(self):
query = dict(field='alarm_id', op='eq', value='a')
resp = self._get_alarm_history('a', query=query,
expect_errors=True, status=400)
msg = ('Unknown argument: "alarm_id": unrecognized'
" field in query: [<Query {key!r} eq"
" {value!r} Unset>], valid keys: ['project', "
"'search_offset', 'severity', 'timestamp',"
" 'type', 'user']")
msg = msg.format(key=u'alarm_id', value=u'a')
self.assertEqual(msg,
resp.json['error_message']['faultstring'])
def test_get_alarm_history_constrained_by_not_supported_rule(self):
query = dict(field='abcd', op='eq', value='abcd')
resp = self._get_alarm_history('a', query=query,
expect_errors=True, status=400)
msg = ('Unknown argument: "abcd": unrecognized'
" field in query: [<Query {key!r} eq"
" {value!r} Unset>], valid keys: ['project', "
"'search_offset', 'severity', 'timestamp',"
" 'type', 'user']")
msg = msg.format(key=u'abcd', value=u'abcd')
self.assertEqual(msg,
resp.json['error_message']['faultstring'])
def test_get_alarm_history_constrained_by_severity(self):
self._update_alarm('a', dict(severity='low'))
query = dict(field='severity', op='eq', value='low')
history = self._get_alarm_history('a', query=query)
self.assertEqual(1, len(history))
self.assertEqual(jsonlib.dumps({'severity': 'low'}),
history[0]['detail'])
def test_get_nonexistent_alarm_history(self):
self._get_alarm_history('foobar', expect_errors=True, status=404)
class TestAlarmsQuotas(TestAlarmsBase):
def _test_alarm_quota(self):
alarm = {
'name': 'alarm',
'type': 'threshold',
'user_id': self.auth_headers['X-User-Id'],
'project_id': self.auth_headers['X-Project-Id'],
'threshold_rule': {
'meter_name': 'testmeter',
'query': [],
'comparison_operator': 'le',
'statistic': 'max',
'threshold': 42.0,
'period': 60,
'evaluation_periods': 1,
}
}
resp = self.post_json('/alarms', params=alarm,
headers=self.auth_headers)
self.assertEqual(201, resp.status_code)
alarms = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(1, len(alarms))
alarm['name'] = 'another_user_alarm'
resp = self.post_json('/alarms', params=alarm,
expect_errors=True,
headers=self.auth_headers)
self.assertEqual(403, resp.status_code)
faultstring = 'Alarm quota exceeded for user'
self.assertIn(faultstring,
resp.json['error_message']['faultstring'])
alarms = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(1, len(alarms))
def test_alarms_quotas(self):
self.CONF.set_override('user_alarm_quota', 1, 'api')
self.CONF.set_override('project_alarm_quota', 1, 'api')
self._test_alarm_quota()
def test_project_alarms_quotas(self):
self.CONF.set_override('project_alarm_quota', 1, 'api')
self._test_alarm_quota()
def test_user_alarms_quotas(self):
self.CONF.set_override('user_alarm_quota', 1, 'api')
self._test_alarm_quota()
def test_larger_limit_project_alarms_quotas(self):
self.CONF.set_override('user_alarm_quota', 1, 'api')
self.CONF.set_override('project_alarm_quota', 2, 'api')
self._test_alarm_quota()
def test_larger_limit_user_alarms_quotas(self):
self.CONF.set_override('user_alarm_quota', 2, 'api')
self.CONF.set_override('project_alarm_quota', 1, 'api')
self._test_alarm_quota()
def test_larger_limit_user_alarm_quotas_multitenant_user(self):
self.CONF.set_override('user_alarm_quota', 2, 'api')
self.CONF.set_override('project_alarm_quota', 1, 'api')
def _test(field, value):
query = [{
'field': field,
'op': 'eq',
'value': value
}]
alarms = self.get_json('/alarms', q=query,
headers=self.auth_headers)
self.assertEqual(1, len(alarms))
alarm = {
'name': 'alarm',
'type': 'threshold',
'user_id': self.auth_headers['X-User-Id'],
'project_id': self.auth_headers['X-Project-Id'],
'threshold_rule': {
'meter_name': 'testmeter',
'query': [],
'comparison_operator': 'le',
'statistic': 'max',
'threshold': 42.0,
'period': 60,
'evaluation_periods': 1,
}
}
resp = self.post_json('/alarms', params=alarm,
headers=self.auth_headers)
self.assertEqual(201, resp.status_code)
_test('project_id', self.auth_headers['X-Project-Id'])
self.auth_headers['X-Project-Id'] = uuidutils.generate_uuid()
alarm['name'] = 'another_user_alarm'
alarm['project_id'] = self.auth_headers['X-Project-Id']
resp = self.post_json('/alarms', params=alarm,
headers=self.auth_headers)
self.assertEqual(201, resp.status_code)
_test('project_id', self.auth_headers['X-Project-Id'])
self.auth_headers["X-roles"] = "admin"
alarms = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(2, len(alarms))
class TestAlarmsRuleThreshold(TestAlarmsBase):
def test_post_invalid_alarm_statistic(self):
json = {
'name': 'added_alarm',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'comparison_operator': 'gt',
'threshold': 2.0,
'statistic': 'magic',
}
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
expected_err_msg = ("Invalid input for field/attribute"
" statistic. Value: 'magic'.")
self.assertIn(expected_err_msg,
resp.json['error_message']['faultstring'])
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(0, len(alarms))
def test_post_invalid_alarm_input_comparison_operator(self):
json = {
'name': 'alarm2',
'state': 'ok',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'comparison_operator': 'bad_co',
'threshold': 50.0
}
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
expected_err_msg = ("Invalid input for field/attribute"
" comparison_operator."
" Value: 'bad_co'.")
self.assertIn(expected_err_msg,
resp.json['error_message']['faultstring'])
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(0, len(alarms))
def test_post_invalid_alarm_query(self):
json = {
'name': 'added_alarm',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.invalid',
'field': 'gt',
'value': 'value'}],
'comparison_operator': 'gt',
'threshold': 2.0,
'statistic': 'avg',
}
}
self.post_json('/alarms', params=json, expect_errors=True, status=400,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(0, len(alarms))
def test_post_invalid_alarm_query_field_type(self):
json = {
'name': 'added_alarm',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.valid',
'op': 'eq',
'value': 'value',
'type': 'blob'}],
'comparison_operator': 'gt',
'threshold': 2.0,
'statistic': 'avg',
}
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
expected_error_message = 'The data type blob is not supported.'
fault_string = resp.json['error_message']['faultstring']
self.assertTrue(fault_string.startswith(expected_error_message))
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(0, len(alarms))
def test_post_invalid_alarm_query_non_field(self):
json = {
'name': 'added_alarm',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'q.field': 'metadata.valid',
'value': 'value'}],
'threshold': 2.0,
}
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
expected_error_message = ("Unknown attribute for argument "
"data.threshold_rule.query: q.field")
fault_string = resp.json['error_message']['faultstring']
self.assertEqual(expected_error_message, fault_string)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(0, len(alarms))
def test_post_invalid_alarm_query_non_value(self):
json = {
'name': 'added_alarm',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'metadata.valid',
'q.value': 'value'}],
'threshold': 2.0,
}
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
expected_error_message = ("Unknown attribute for argument "
"data.threshold_rule.query: q.value")
fault_string = resp.json['error_message']['faultstring']
self.assertEqual(expected_error_message, fault_string)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(0, len(alarms))
def test_post_invalid_alarm_timestamp_in_threshold_rule(self):
date_time = datetime.datetime(2012, 7, 2, 10, 41)
isotime = date_time.isoformat()
json = {
'name': 'invalid_alarm',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'timestamp',
'op': 'gt',
'value': isotime}],
'comparison_operator': 'gt',
'threshold': 2.0,
}
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(0, len(alarms))
self.assertEqual(
'Unknown argument: "timestamp": '
'not valid for this resource',
resp.json['error_message']['faultstring'])
def test_post_threshold_rule_defaults(self):
to_check = {
'name': 'added_alarm_defaults',
'state': 'insufficient data',
'description': ('Alarm when ameter is eq a avg of '
'300.0 over 60 seconds'),
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'query': [{'field': 'project_id',
'op': 'eq',
'value': self.auth_headers['X-Project-Id']}],
'threshold': 300.0,
'comparison_operator': 'eq',
'statistic': 'avg',
'evaluation_periods': 1,
'period': 60,
}
}
self._add_default_threshold_rule(to_check)
json = {
'name': 'added_alarm_defaults',
'type': 'threshold',
'threshold_rule': {
'meter_name': 'ameter',
'threshold': 300.0
}
}
self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(1, len(alarms))
for alarm in alarms:
if alarm.name == 'added_alarm_defaults':
for key in to_check:
if key.endswith('_rule'):
storage_key = 'rule'
else:
storage_key = key
self.assertEqual(to_check[key],
getattr(alarm, storage_key))
break
else:
self.fail("Alarm not found")
class TestAlarmsRuleGnocchi(TestAlarmsBase):
def setUp(self):
super(TestAlarmsRuleGnocchi, self).setUp()
for alarm in [
models.Alarm(name='name1',
type='gnocchi_resources_threshold',
enabled=True,
alarm_id='e',
description='e',
state='insufficient data',
state_reason='Not evaluated',
severity='critical',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=True,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
time_constraints=[],
rule=dict(comparison_operator='gt',
threshold=2.0,
aggregation_method='mean',
granularity=60,
evaluation_periods=1,
metric='meter.test',
resource_type='instance',
resource_id=(
'6841c175-d7c4-4bc2-bc7a-1c7832271b8f'),
)
),
models.Alarm(name='name2',
type='gnocchi_aggregation_by_metrics_threshold',
enabled=True,
alarm_id='f',
description='f',
state='insufficient data',
state_reason='Not evaluated',
severity='critical',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=True,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
time_constraints=[],
rule=dict(comparison_operator='gt',
threshold=2.0,
aggregation_method='mean',
evaluation_periods=1,
granularity=60,
metrics=[
'41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e']
),
),
models.Alarm(name='name3',
type='gnocchi_aggregation_by_resources_threshold',
enabled=True,
alarm_id='g',
description='f',
state='insufficient data',
state_reason='Not evaluated',
severity='critical',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=True,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
time_constraints=[],
rule=dict(comparison_operator='gt',
threshold=2.0,
aggregation_method='mean',
granularity=60,
evaluation_periods=1,
metric='meter.test',
resource_type='instance',
query='{"=": {"server_group": '
'"my_autoscaling_group"}}')
),
]:
self.alarm_conn.create_alarm(alarm)
def test_list_alarms(self):
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(3, len(data))
self.assertEqual(set(['name1', 'name2', 'name3']),
set(r['name'] for r in data))
self.assertEqual(set(['meter.test']),
set(r['gnocchi_resources_threshold_rule']['metric']
for r in data
if 'gnocchi_resources_threshold_rule' in r))
def test_post_gnocchi_metrics_alarm_cached(self):
# NOTE(gordc): cache is a decorator and therefore, gets mocked across
# entire scenario. ideally we should test both scenario but tough.
# assume cache will return aggregation_method == ['count'] always.
json = {
'enabled': False,
'name': 'name_post',
'state': 'ok',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'severity': 'critical',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
'gnocchi_aggregation_by_metrics_threshold_rule': {
'metrics': ['b3d9d8ab-05e8-439f-89ad-5e978dd2a5eb',
'009d4faf-c275-46f0-8f2d-670b15bac2b0'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': 3,
'granularity': 180,
}
}
with mock.patch('aodh.api.controllers.v2.alarm_rules.'
'gnocchi.client') as clientlib:
c = clientlib.Client.return_value
c.capabilities.list.return_value = {
'aggregation_methods': ['count']}
self.post_json('/alarms', params=json, headers=self.auth_headers)
with mock.patch('aodh.api.controllers.v2.alarm_rules.'
'gnocchi.client') as clientlib:
self.post_json('/alarms', params=json, headers=self.auth_headers)
self.assertFalse(clientlib.called)
def test_post_gnocchi_resources_alarm(self):
json = {
'enabled': False,
'name': 'name_post',
'state': 'ok',
'type': 'gnocchi_resources_threshold',
'severity': 'critical',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
'gnocchi_resources_threshold_rule': {
'metric': 'ameter',
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': 3,
'granularity': 180,
'resource_type': 'instance',
'resource_id': '209ef69c-c10c-4efb-90ff-46f4b2d90d2e',
}
}
with mock.patch('aodh.api.controllers.v2.alarm_rules.'
'gnocchi.client') as clientlib:
c = clientlib.Client.return_value
c.capabilities.list.return_value = {
'aggregation_methods': ['count']}
self.post_json('/alarms', params=json, headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms(enabled=False))
self.assertEqual(1, len(alarms))
self._verify_alarm(json, alarms[0])
def test_post_gnocchi_metrics_alarm(self):
json = {
'enabled': False,
'name': 'name_post',
'state': 'ok',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'severity': 'critical',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
'gnocchi_aggregation_by_metrics_threshold_rule': {
'metrics': ['b3d9d8ab-05e8-439f-89ad-5e978dd2a5eb',
'009d4faf-c275-46f0-8f2d-670b15bac2b0'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': 3,
'granularity': 180,
}
}
with mock.patch('aodh.api.controllers.v2.alarm_rules.'
'gnocchi.client') as clientlib:
c = clientlib.Client.return_value
c.capabilities.list.return_value = {
'aggregation_methods': ['count']}
self.post_json('/alarms', params=json, headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms(enabled=False))
self.assertEqual(1, len(alarms))
self._verify_alarm(json, alarms[0])
@mock.patch('aodh.keystone_client.get_client')
def test_post_gnocchi_aggregation_alarm_project_constraint(self,
get_client):
json = {
'enabled': False,
'name': 'project_constraint',
'state': 'ok',
'type': 'gnocchi_aggregation_by_resources_threshold',
'severity': 'critical',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
'gnocchi_aggregation_by_resources_threshold_rule': {
'metric': 'ameter',
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': 3,
'granularity': 180,
'resource_type': 'instance',
'query': '{"=": {"server_group": "my_autoscaling_group"}}',
}
}
expected_query = {"and": [
{"or": [
{"=": {"created_by_project_id":
self.auth_headers['X-Project-Id']}},
{"and": [
{"=": {"created_by_project_id": "<my-uuid>"}},
{"=": {"project_id": self.auth_headers['X-Project-Id']}}
]},
]},
{"=": {"server_group": "my_autoscaling_group"}},
]}
ks_client = mock.Mock()
ks_client.projects.find.return_value = mock.Mock(id='<my-uuid>')
get_client.return_value = ks_client
with mock.patch('aodh.api.controllers.v2.alarm_rules.'
'gnocchi.client') as clientlib:
c = clientlib.Client.return_value
c.capabilities.list.return_value = {
'aggregation_methods': ['count']}
self.post_json('/alarms', params=json, headers=self.auth_headers)
self.assertEqual([mock.call(
aggregation='count',
metrics='ameter',
needed_overlap=0,
start="-1 day",
stop="now",
query=expected_query,
resource_type="instance")],
c.metric.aggregation.mock_calls),
alarms = list(self.alarm_conn.get_alarms(enabled=False))
self.assertEqual(1, len(alarms))
json['gnocchi_aggregation_by_resources_threshold_rule']['query'] = (
jsonlib.dumps(expected_query))
self._verify_alarm(json, alarms[0])
class TestAlarmsEvent(TestAlarmsBase):
def test_list_alarms(self):
alarm = models.Alarm(name='event.alarm.1',
type='event',
enabled=True,
alarm_id='h',
description='h',
state='insufficient data',
state_reason='insufficient data',
severity='moderate',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
time_constraints=[],
rule=dict(event_type='event.test',
query=[]),
)
self.alarm_conn.create_alarm(alarm)
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(1, len(data))
self.assertEqual(set(['event.alarm.1']),
set(r['name'] for r in data))
self.assertEqual(set(['event.test']),
set(r['event_rule']['event_type']
for r in data if 'event_rule' in r))
def test_post_event_alarm_defaults(self):
to_check = {
'enabled': True,
'name': 'added_alarm_defaults',
'state': 'insufficient data',
'description': 'Alarm when * event occurred.',
'type': 'event',
'ok_actions': [],
'alarm_actions': [],
'insufficient_data_actions': [],
'repeat_actions': False,
'rule': {
'event_type': '*',
'query': [],
}
}
json = {
'name': 'added_alarm_defaults',
'type': 'event',
'event_rule': {
'event_type': '*',
'query': []
}
}
self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(1, len(alarms))
for alarm in alarms:
if alarm.name == 'added_alarm_defaults':
for key in to_check:
self.assertEqual(to_check[key], getattr(alarm, key))
break
else:
self.fail("Alarm not found")
class TestAlarmsCompositeRule(TestAlarmsBase):
def setUp(self):
super(TestAlarmsCompositeRule, self).setUp()
self.sub_rule1 = {
"type": "threshold",
"meter_name": "cpu_util",
"evaluation_periods": 5,
"threshold": 0.8,
"query": [{
"field": "metadata.metering.stack_id",
"value": "36b20eb3-d749-4964-a7d2-a71147cd8147",
"op": "eq"
}],
"statistic": "avg",
"period": 60,
"exclude_outliers": False,
"comparison_operator": "gt"
}
self.sub_rule2 = {
"type": "threshold",
"meter_name": "disk.iops",
"evaluation_periods": 4,
"threshold": 200,
"query": [{
"field": "metadata.metering.stack_id",
"value": "36b20eb3-d749-4964-a7d2-a71147cd8147",
"op": "eq"
}],
"statistic": "max",
"period": 60,
"exclude_outliers": False,
"comparison_operator": "gt"
}
self.sub_rule3 = {
"type": "threshold",
"meter_name": "network.incoming.packets.rate",
"evaluation_periods": 3,
"threshold": 1000,
"query": [{
"field": "metadata.metering.stack_id",
"value":
"36b20eb3-d749-4964-a7d2-a71147cd8147",
"op": "eq"
}],
"statistic": "avg",
"period": 60,
"exclude_outliers": False,
"comparison_operator": "gt"
}
self.rule = {
"or": [self.sub_rule1,
{
"and": [self.sub_rule2, self.sub_rule3]
}]}
def test_list_alarms(self):
alarm = models.Alarm(name='composite_alarm',
type='composite',
enabled=True,
alarm_id='composite',
description='composite',
state='insufficient data',
state_reason='insufficient data',
severity='moderate',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
time_constraints=[],
rule=self.rule,
)
self.alarm_conn.create_alarm(alarm)
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(1, len(data))
self.assertEqual(set(['composite_alarm']),
set(r['name'] for r in data))
self.assertEqual(self.rule, data[0]['composite_rule'])
def test_post_with_composite_rule(self):
json = {
"type": "composite",
"name": "composite_alarm",
"composite_rule": self.rule,
"repeat_actions": False
}
self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(1, len(alarms))
self.assertEqual(self.rule, alarms[0].rule)
def test_post_with_sub_rule_with_wrong_type(self):
self.sub_rule1['type'] = 'non-type'
json = {
"type": "composite",
"name": "composite_alarm",
"composite_rule": self.rule,
"repeat_actions": False
}
response = self.post_json('/alarms', params=json, status=400,
expect_errors=True,
headers=self.auth_headers)
err = ("Unsupported sub-rule type :non-type in composite "
"rule, should be one of: "
"['gnocchi_aggregation_by_metrics_threshold', "
"'gnocchi_aggregation_by_resources_threshold', "
"'gnocchi_resources_threshold', 'threshold']")
faultstring = response.json['error_message']['faultstring']
self.assertEqual(err, faultstring)
def test_post_with_sub_rule_with_only_required_params(self):
sub_rulea = {
"meter_name": "cpu_util",
"threshold": 0.8,
"type": "threshold"}
sub_ruleb = {
"meter_name": "disk.iops",
"threshold": 200,
"type": "threshold"}
json = {
"type": "composite",
"name": "composite_alarm",
"composite_rule": {"and": [sub_rulea, sub_ruleb]},
"repeat_actions": False
}
self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(1, len(alarms))
def test_post_with_sub_rule_with_invalid_params(self):
self.sub_rule1['threshold'] = False
json = {
"type": "composite",
"name": "composite_alarm",
"composite_rule": self.rule,
"repeat_actions": False
}
response = self.post_json('/alarms', params=json, status=400,
expect_errors=True,
headers=self.auth_headers)
faultstring = ("Invalid input for field/attribute threshold. "
"Value: 'False'. Wrong type. Expected '%s', got '%s'"
% (type(1.0), type(True)))
self.assertEqual(faultstring,
response.json['error_message']['faultstring'])
class TestPaginationQuery(TestAlarmsBase):
def setUp(self):
super(TestPaginationQuery, self).setUp()
for alarm in default_alarms(self.auth_headers):
self.alarm_conn.create_alarm(alarm)
def test_pagination_query_single_sort(self):
data = self.get_json('/alarms?sort=name:desc',
headers=self.auth_headers)
names = [a['name'] for a in data]
self.assertEqual(['name3', 'name2', 'name1'], names)
data = self.get_json('/alarms?sort=name:asc',
headers=self.auth_headers)
names = [a['name'] for a in data]
self.assertEqual(['name1', 'name2', 'name3'], names)
def test_sort_by_severity_with_its_value(self):
if self.engine != "mysql":
self.skipTest("This is only implemented for MySQL")
data = self.get_json('/alarms?sort=severity:asc',
headers=self.auth_headers)
severities = [a['severity'] for a in data]
self.assertEqual(['moderate', 'critical', 'critical'],
severities)
data = self.get_json('/alarms?sort=severity:desc',
headers=self.auth_headers)
severities = [a['severity'] for a in data]
self.assertEqual(['critical', 'critical', 'moderate'],
severities)
def test_pagination_query_limit(self):
data = self.get_json('/alarms?limit=2', headers=self.auth_headers)
self.assertEqual(2, len(data))
def test_pagination_query_limit_sort(self):
data = self.get_json('/alarms?sort=name:asc&limit=2',
headers=self.auth_headers)
self.assertEqual(2, len(data))
def test_pagination_query_marker(self):
data = self.get_json('/alarms?sort=name:desc',
headers=self.auth_headers)
self.assertEqual(3, len(data))
alarm_ids = [a['alarm_id'] for a in data]
names = [a['name'] for a in data]
self.assertEqual(['name3', 'name2', 'name1'], names)
marker_url = ('/alarms?sort=name:desc&marker=%s' % alarm_ids[1])
data = self.get_json(marker_url, headers=self.auth_headers)
self.assertEqual(1, len(data))
new_alarm_ids = [a['alarm_id'] for a in data]
self.assertEqual(alarm_ids[2:], new_alarm_ids)
new_names = [a['name'] for a in data]
self.assertEqual(['name1'], new_names)
def test_pagination_query_multiple_sorts(self):
new_alarms = default_alarms(self.auth_headers)
for a_id in zip(new_alarms, ['e', 'f', 'g', 'h']):
a_id[0].alarm_id = a_id[1]
self.alarm_conn.create_alarm(a_id[0])
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(6, len(data))
sort_url = '/alarms?sort=name:desc&sort=alarm_id:asc'
data = self.get_json(sort_url, headers=self.auth_headers)
name_ids = [(a['name'], a['alarm_id']) for a in data]
expected = [('name3', 'c'),
('name3', 'g'), ('name2', 'b'), ('name2', 'f'),
('name1', 'a'), ('name1', 'e')]
self.assertEqual(expected, name_ids)
def test_pagination_query_invalid_sort_key(self):
resp = self.get_json('/alarms?sort=invalid_key:desc',
headers=self.auth_headers,
expect_errors=True)
self.assertEqual(resp.status_code, 400)
self.assertEqual("Invalid input for field/attribute sort. Value: "
"'invalid_key:desc'. the sort parameter should be"
" a pair of sort key and sort dir combined with "
"':', or only sort key specified and sort dir will "
"be default 'asc', the supported sort keys are: "
"('alarm_id', 'enabled', 'name', 'type', 'severity',"
" 'timestamp', 'user_id', 'project_id', 'state', "
"'repeat_actions', 'state_timestamp')",
resp.json['error_message']['faultstring'])
def test_pagination_query_only_sort_key_specified(self):
data = self.get_json('/alarms?sort=name',
headers=self.auth_headers)
names = [a['name'] for a in data]
self.assertEqual(['name1', 'name2', 'name3'], names)
def test_pagination_query_history_data(self):
for i in moves.xrange(10):
self._update_alarm('a', dict(name='%s' % i))
url = '/alarms/a/history?sort=event_id:desc&sort=timestamp:desc'
data = self.get_json(url, headers=self.auth_headers)
sorted_data = sorted(data,
key=lambda d: (d['event_id'], d['timestamp']),
reverse=True)
self.assertEqual(sorted_data, data)
Remove ceilometer-api from test_alarm_scenarios
Change-Id: I77bfc58e2bdfa7e50350bd47938c44a901cd505a
#
# Copyright 2013 eNovance <licensing@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests alarm operation."""
import datetime
import json as jsonlib
import operator
import os
import fixtures
import mock
from oslo_utils import uuidutils
import six
from six import moves
import webtest
from aodh.api import app
from aodh import messaging
from aodh.storage import models
from aodh.tests import constants
from aodh.tests.functional.api import v2
RULE_KEY = 'gnocchi_aggregation_by_metrics_threshold_rule'
def default_alarms(auth_headers):
return [models.Alarm(name='name1',
type='gnocchi_aggregation_by_metrics_threshold',
enabled=True,
alarm_id='a',
description='a',
state='insufficient data',
state_reason='Not evaluated',
severity='critical',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=True,
user_id=auth_headers['X-User-Id'],
project_id=auth_headers['X-Project-Id'],
time_constraints=[dict(name='testcons',
start='0 11 * * *',
duration=300)],
rule=dict(comparison_operator='gt',
threshold=2.0,
aggregation_method='mean',
evaluation_periods=60,
granularity=1,
metrics=[
'41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'
])
),
models.Alarm(name='name2',
type='gnocchi_aggregation_by_metrics_threshold',
enabled=True,
alarm_id='b',
description='b',
state='insufficient data',
state_reason='Not evaluated',
severity='critical',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=auth_headers['X-User-Id'],
project_id=auth_headers['X-Project-Id'],
time_constraints=[],
rule=dict(comparison_operator='gt',
threshold=4.0,
aggregation_method='mean',
evaluation_periods=60,
granularity=1,
metrics=[
'41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'
])
),
models.Alarm(name='name3',
type='gnocchi_aggregation_by_metrics_threshold',
enabled=True,
alarm_id='c',
description='c',
state='insufficient data',
state_reason='Not evaluated',
severity='moderate',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=auth_headers['X-User-Id'],
project_id=auth_headers['X-Project-Id'],
time_constraints=[],
rule=dict(comparison_operator='gt',
threshold=3.0,
aggregation_method='mean',
evaluation_periods=60,
granularity=1,
metrics=[
'95f3c171-5605-4021-87ed-eede77101268',
'bf588a78-56c7-4ba4-be46-d71e5002e030',
])
)]
class TestAlarmsBase(v2.FunctionalTest):
def setUp(self):
super(TestAlarmsBase, self).setUp()
self.auth_headers = {'X-User-Id': uuidutils.generate_uuid(),
'X-Project-Id': uuidutils.generate_uuid()}
c = mock.Mock()
c.capabilities.list.return_value = {'aggregation_methods': [
'count', 'mean', 'max', 'min', 'first', 'last', 'std']}
self.useFixture(fixtures.MockPatch(
'aodh.api.controllers.v2.alarm_rules.gnocchi.client.Client',
return_value=c
))
def _verify_alarm(self, json, alarm, expected_name=None):
if expected_name and alarm.name != expected_name:
self.fail("Alarm not found")
for key in json:
if key.endswith('_rule'):
storage_key = 'rule'
else:
storage_key = key
self.assertEqual(json[key], getattr(alarm, storage_key))
def _get_alarm(self, id, auth_headers=None):
data = self.get_json('/alarms',
headers=auth_headers or self.auth_headers)
match = [a for a in data if a['alarm_id'] == id]
self.assertEqual(1, len(match), 'alarm %s not found' % id)
return match[0]
def _update_alarm(self, id, updated_data, auth_headers=None):
data = self._get_alarm(id, auth_headers)
data.update(updated_data)
self.put_json('/alarms/%s' % id,
params=data,
headers=auth_headers or self.auth_headers)
def _delete_alarm(self, id, auth_headers=None):
self.delete('/alarms/%s' % id,
headers=auth_headers or self.auth_headers,
status=204)
class TestListEmptyAlarms(TestAlarmsBase):
def test_empty(self):
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual([], data)
class TestAlarms(TestAlarmsBase):
def setUp(self):
super(TestAlarms, self).setUp()
for alarm in default_alarms(self.auth_headers):
self.alarm_conn.create_alarm(alarm)
def test_list_alarms(self):
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(3, len(data))
self.assertEqual(set(['name1', 'name2', 'name3']),
set(r['name'] for r in data))
self.assertEqual([['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
['95f3c171-5605-4021-87ed-eede77101268',
'bf588a78-56c7-4ba4-be46-d71e5002e030']],
[r[RULE_KEY]['metrics']
for r in sorted(data,
key=operator.itemgetter('name'))])
def test_alarms_query_with_timestamp(self):
date_time = datetime.datetime(2012, 7, 2, 10, 41)
isotime = date_time.isoformat()
resp = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'timestamp',
'op': 'gt',
'value': isotime}],
expect_errors=True)
self.assertEqual(resp.status_code, 400)
self.assertEqual(resp.json['error_message']['faultstring'],
'Unknown argument: "timestamp": '
'not valid for this resource')
def test_alarms_query_with_state(self):
alarm = models.Alarm(name='disabled',
type='gnocchi_aggregation_by_metrics_threshold',
enabled=False,
alarm_id='c',
description='c',
state='ok',
state_reason='Not evaluated',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
time_constraints=[],
rule=dict(
comparison_operator='gt',
threshold=3.0,
aggregation_method='mean',
evaluation_periods=60,
granularity=1,
metrics=[
'41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e',
]),
severity='critical')
self.alarm_conn.update_alarm(alarm)
resp = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'state',
'op': 'eq',
'value': 'ok'}],
)
self.assertEqual(1, len(resp))
self.assertEqual('ok', resp[0]['state'])
def test_list_alarms_by_type(self):
alarms = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'type',
'op': 'eq',
'value':
'gnocchi_aggregation_by_metrics_threshold'
}])
self.assertEqual(3, len(alarms))
self.assertEqual(set(['gnocchi_aggregation_by_metrics_threshold']),
set(alarm['type'] for alarm in alarms))
def test_get_not_existing_alarm(self):
resp = self.get_json('/alarms/alarm-id-3',
headers=self.auth_headers,
expect_errors=True)
self.assertEqual(404, resp.status_code)
self.assertEqual('Alarm alarm-id-3 not found in project %s' %
self.auth_headers["X-Project-Id"],
resp.json['error_message']['faultstring'])
def test_get_alarm(self):
alarms = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'name',
'value': 'name1',
}])
self.assertEqual('name1', alarms[0]['name'])
self.assertEqual(['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
alarms[0][RULE_KEY]['metrics'])
one = self.get_json('/alarms/%s' % alarms[0]['alarm_id'],
headers=self.auth_headers)
self.assertEqual('name1', one['name'])
self.assertEqual(['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
one[RULE_KEY]['metrics'])
self.assertEqual(alarms[0]['alarm_id'], one['alarm_id'])
self.assertEqual(alarms[0]['repeat_actions'], one['repeat_actions'])
self.assertEqual(alarms[0]['time_constraints'],
one['time_constraints'])
def test_get_alarm_disabled(self):
alarm = models.Alarm(name='disabled',
type='gnocchi_aggregation_by_metrics_threshold',
enabled=False,
alarm_id='c',
description='c',
state='insufficient data',
state_reason='Not evaluated',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
time_constraints=[],
rule=dict(
comparison_operator='gt',
threshold=3.0,
aggregation_method='mean',
evaluation_periods=60,
granularity=1,
metrics=[
'41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e',
]
),
severity='critical')
self.alarm_conn.update_alarm(alarm)
alarms = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'enabled',
'value': 'False'}])
self.assertEqual(1, len(alarms))
self.assertEqual('disabled', alarms[0]['name'])
one = self.get_json('/alarms/%s' % alarms[0]['alarm_id'],
headers=self.auth_headers)
self.assertEqual('disabled', one['name'])
def test_get_alarm_project_filter_wrong_op_normal_user(self):
project = self.auth_headers['X-Project-Id']
def _test(field, op):
response = self.get_json('/alarms',
q=[{'field': field,
'op': op,
'value': project}],
expect_errors=True,
status=400,
headers=self.auth_headers)
faultstring = ('Invalid input for field/attribute op. '
'Value: \'%(op)s\'. unimplemented operator '
'for %(field)s' % {'field': field, 'op': op})
self.assertEqual(faultstring,
response.json['error_message']['faultstring'])
_test('project', 'ne')
_test('project_id', 'ne')
def test_get_alarm_project_filter_normal_user(self):
project = self.auth_headers['X-Project-Id']
def _test(field):
alarms = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': field,
'op': 'eq',
'value': project}])
self.assertEqual(3, len(alarms))
_test('project')
_test('project_id')
def test_get_alarm_other_project_normal_user(self):
def _test(field):
response = self.get_json('/alarms',
q=[{'field': field,
'op': 'eq',
'value': 'other-project'}],
expect_errors=True,
status=401,
headers=self.auth_headers)
faultstring = 'Not Authorized to access project other-project'
self.assertEqual(faultstring,
response.json['error_message']['faultstring'])
_test('project')
_test('project_id')
def test_get_alarm_forbiden(self):
pf = os.path.abspath('aodh/tests/functional/api/v2/policy.json-test')
self.CONF.set_override('policy_file', pf, group='oslo_policy')
self.CONF.set_override('auth_mode', None, group='api')
self.app = webtest.TestApp(app.load_app(self.CONF))
response = self.get_json('/alarms',
expect_errors=True,
status=403,
headers=self.auth_headers)
faultstring = 'RBAC Authorization Failed'
self.assertEqual(403, response.status_code)
self.assertEqual(faultstring,
response.json['error_message']['faultstring'])
def test_post_alarm_wsme_workaround(self):
jsons = {
'type': {
'name': 'missing type',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'aggregation_method': 'mean',
'threshold': 2.0,
}
},
'name': {
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'aggregation_method': 'mean',
'threshold': 2.0,
}
},
'threshold_rule/metrics': {
'name': 'missing metrics',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'aggregation_method': 'mean',
'threshold': 2.0,
}
},
'threshold_rule/threshold': {
'name': 'missing threshold',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'aggregation_method': 'mean',
}
},
}
for field, json in six.iteritems(jsons):
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
self.assertEqual("Invalid input for field/attribute %s."
" Value: \'None\'. Mandatory field missing."
% field.split('/', 1)[-1],
resp.json['error_message']['faultstring'])
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_invalid_alarm_time_constraint_start(self):
json = {
'name': 'added_alarm_invalid_constraint_duration',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'time_constraints': [
{
'name': 'testcons',
'start': '11:00am',
'duration': 10
}
],
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
"aggregation_method": "mean",
'threshold': 300.0
}
}
self.post_json('/alarms', params=json, expect_errors=True, status=400,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_duplicate_time_constraint_name(self):
json = {
'name': 'added_alarm_duplicate_constraint_name',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'time_constraints': [
{
'name': 'testcons',
'start': '* 11 * * *',
'duration': 10
},
{
'name': 'testcons',
'start': '* * * * *',
'duration': 20
}
],
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
"aggregation_method": "mean",
'threshold': 300.0
}
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
self.assertEqual(
"Time constraint names must be unique for a given alarm.",
resp.json['error_message']['faultstring'])
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_alarm_null_time_constraint(self):
json = {
'name': 'added_alarm_invalid_constraint_duration',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'time_constraints': None,
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'aggregation_method': 'mean',
'threshold': 300.0
}
}
self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers)
def test_post_invalid_alarm_time_constraint_duration(self):
json = {
'name': 'added_alarm_invalid_constraint_duration',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'time_constraints': [
{
'name': 'testcons',
'start': '* 11 * * *',
'duration': -1,
}
],
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'threshold': 300.0
}
}
self.post_json('/alarms', params=json, expect_errors=True, status=400,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_invalid_alarm_time_constraint_timezone(self):
json = {
'name': 'added_alarm_invalid_constraint_timezone',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'time_constraints': [
{
'name': 'testcons',
'start': '* 11 * * *',
'duration': 10,
'timezone': 'aaaa'
}
],
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'threshold': 300.0
}
}
self.post_json('/alarms', params=json, expect_errors=True, status=400,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_invalid_alarm_granularity(self):
json = {
'name': 'added_alarm_invalid_granularity',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'gt',
'threshold': 2.0,
'aggregation_method': 'mean',
'granularity': -1,
}
}
self.post_json('/alarms', params=json, expect_errors=True, status=400,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_null_rule(self):
json = {
'name': 'added_alarm_invalid_threshold_rule',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: None,
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
self.assertEqual(
"gnocchi_aggregation_by_metrics_threshold_rule "
"must be set for gnocchi_aggregation_by_metrics_threshold "
"type alarm",
resp.json['error_message']['faultstring'])
def test_post_invalid_alarm_input_state(self):
json = {
'name': 'alarm1',
'state': 'bad_state',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'gt',
'threshold': 50.0
}
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
expected_err_msg = ("Invalid input for field/attribute state."
" Value: 'bad_state'.")
self.assertIn(expected_err_msg,
resp.json['error_message']['faultstring'])
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_invalid_alarm_input_severity(self):
json = {
'name': 'alarm1',
'state': 'ok',
'severity': 'bad_value',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'gt',
'threshold': 50.0
}
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
expected_err_msg = ("Invalid input for field/attribute severity."
" Value: 'bad_value'.")
self.assertIn(expected_err_msg,
resp.json['error_message']['faultstring'])
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_invalid_alarm_input_type(self):
json = {
'name': 'alarm3',
'state': 'ok',
'type': 'bad_type',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'gt',
'threshold': 50.0
}
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
expected_err_msg = ("Invalid input for field/attribute"
" type."
" Value: 'bad_type'.")
self.assertIn(expected_err_msg,
resp.json['error_message']['faultstring'])
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_invalid_alarm_input_enabled_str(self):
json = {
'name': 'alarm5',
'enabled': 'bad_enabled',
'state': 'ok',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'gt',
'threshold': 50.0
}
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
expected_err_msg = "Value not an unambiguous boolean: bad_enabled"
self.assertIn(expected_err_msg,
resp.json['error_message']['faultstring'])
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
def test_post_invalid_alarm_input_enabled_int(self):
json = {
'name': 'alarm6',
'enabled': 0,
'state': 'ok',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'gt',
'aggregation_method': 'mean',
'threshold': 50.0
}
}
resp = self.post_json('/alarms', params=json,
headers=self.auth_headers)
self.assertFalse(resp.json['enabled'])
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(4, len(alarms))
def _do_post_alarm_invalid_action(self, ok_actions=None,
alarm_actions=None,
insufficient_data_actions=None,
error_message=None):
ok_actions = ok_actions or []
alarm_actions = alarm_actions or []
insufficient_data_actions = insufficient_data_actions or []
json = {
'enabled': False,
'name': 'added_alarm',
'state': 'ok',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'ok_actions': ok_actions,
'alarm_actions': alarm_actions,
'insufficient_data_actions': insufficient_data_actions,
'repeat_actions': True,
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': '3',
'granularity': '180',
}
}
resp = self.post_json('/alarms', params=json, status=400,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(3, len(alarms))
self.assertEqual(error_message,
resp.json['error_message']['faultstring'])
def test_post_invalid_alarm_ok_actions(self):
self._do_post_alarm_invalid_action(
ok_actions=['spam://something/ok'],
error_message='Unsupported action spam://something/ok')
def test_post_invalid_alarm_alarm_actions(self):
self._do_post_alarm_invalid_action(
alarm_actions=['spam://something/alarm'],
error_message='Unsupported action spam://something/alarm')
def test_post_invalid_alarm_insufficient_data_actions(self):
self._do_post_alarm_invalid_action(
insufficient_data_actions=['spam://something/insufficient'],
error_message='Unsupported action spam://something/insufficient')
@staticmethod
def _fake_urlsplit(*args, **kwargs):
raise Exception("Evil urlsplit!")
def test_post_invalid_alarm_actions_format(self):
with mock.patch('oslo_utils.netutils.urlsplit',
self._fake_urlsplit):
self._do_post_alarm_invalid_action(
alarm_actions=['http://[::1'],
error_message='Unable to parse action http://[::1')
def test_post_alarm_defaults(self):
to_check = {
'enabled': True,
'name': 'added_alarm_defaults',
'ok_actions': [],
'alarm_actions': [],
'insufficient_data_actions': [],
'repeat_actions': False,
}
json = {
'name': 'added_alarm_defaults',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'aggregation_method': 'mean',
'threshold': 300.0
}
}
self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(4, len(alarms))
for alarm in alarms:
if alarm.name == 'added_alarm_defaults':
for key in to_check:
self.assertEqual(to_check[key],
getattr(alarm, key))
break
else:
self.fail("Alarm not found")
def test_post_alarm_with_same_name(self):
json = {
'enabled': False,
'name': 'dup_alarm_name',
'state': 'ok',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': '3',
'granularity': '180',
}
}
resp1 = self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers)
resp2 = self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers)
self.assertEqual(resp1.json['name'], resp2.json['name'])
self.assertNotEqual(resp1.json['alarm_id'], resp2.json['alarm_id'])
alarms = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'name',
'value': 'dup_alarm_name'}])
self.assertEqual(2, len(alarms))
def test_post_alarm_noauth(self):
json = {
'enabled': False,
'name': 'added_alarm',
'state': 'ok',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'severity': 'low',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': '3',
'granularity': '180',
}
}
self.post_json('/alarms', params=json, status=201)
alarms = list(self.alarm_conn.get_alarms(enabled=False))
self.assertEqual(1, len(alarms))
# to check to BoundedInt type conversion
json[RULE_KEY]['evaluation_periods'] = 3
json[RULE_KEY]['granularity'] = 180
if alarms[0].name == 'added_alarm':
for key in json:
if key.endswith('_rule'):
storage_key = 'rule'
else:
storage_key = key
self.assertEqual(getattr(alarms[0], storage_key),
json[key])
else:
self.fail("Alarm not found")
@staticmethod
def _alarm_representation_owned_by(identifiers):
json = {
'name': 'added_alarm',
'enabled': False,
'type': 'gnocchi_aggregation_by_metrics_threshold',
'ok_actions': ['http://something/ok'],
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': 3,
'granularity': 180,
}
}
for aspect, id in six.iteritems(identifiers):
json['%s_id' % aspect] = id
return json
def _do_test_post_alarm_as_nonadmin_on_behalf_of_another(self,
identifiers):
"""Test posting an alarm.
Test that posting an alarm as non-admin on behalf of another
user/project fails with an explicit 401 instead of reverting
to the requestor's identity.
"""
json = self._alarm_representation_owned_by(identifiers)
headers = {}
headers.update(self.auth_headers)
headers['X-Roles'] = 'demo'
resp = self.post_json('/alarms', params=json, status=401,
headers=headers)
aspect = 'user' if 'user' in identifiers else 'project'
params = dict(aspect=aspect, id=identifiers[aspect])
self.assertEqual("Not Authorized to access %(aspect)s %(id)s" % params,
resp.json['error_message']['faultstring'])
def test_post_alarm_as_nonadmin_on_behalf_of_another_user(self):
identifiers = dict(user='auseridthatisnotmine')
self._do_test_post_alarm_as_nonadmin_on_behalf_of_another(identifiers)
def test_post_alarm_as_nonadmin_on_behalf_of_another_project(self):
identifiers = dict(project='aprojectidthatisnotmine')
self._do_test_post_alarm_as_nonadmin_on_behalf_of_another(identifiers)
def test_post_alarm_as_nonadmin_on_behalf_of_another_creds(self):
identifiers = dict(user='auseridthatisnotmine',
project='aprojectidthatisnotmine')
self._do_test_post_alarm_as_nonadmin_on_behalf_of_another(identifiers)
def _do_test_post_alarm_as_nonadmin_on_behalf_of_self(self, identifiers):
"""Test posting an alarm.
Test posting an alarm as non-admin on behalf of own user/project
creates alarm associated with the requestor's identity.
"""
json = self._alarm_representation_owned_by(identifiers)
headers = {}
headers.update(self.auth_headers)
headers['X-Roles'] = 'demo'
self.post_json('/alarms', params=json, status=201, headers=headers)
alarms = list(self.alarm_conn.get_alarms(enabled=False))
self.assertEqual(1, len(alarms))
self.assertEqual(alarms[0].user_id,
self.auth_headers['X-User-Id'])
self.assertEqual(alarms[0].project_id,
self.auth_headers['X-Project-Id'])
def test_post_alarm_as_nonadmin_on_behalf_of_own_user(self):
identifiers = dict(user=self.auth_headers['X-User-Id'])
self._do_test_post_alarm_as_nonadmin_on_behalf_of_self(identifiers)
def test_post_alarm_as_nonadmin_on_behalf_of_own_project(self):
identifiers = dict(project=self.auth_headers['X-Project-Id'])
self._do_test_post_alarm_as_nonadmin_on_behalf_of_self(identifiers)
def test_post_alarm_as_nonadmin_on_behalf_of_own_creds(self):
identifiers = dict(user=self.auth_headers['X-User-Id'],
project=self.auth_headers['X-Project-Id'])
self._do_test_post_alarm_as_nonadmin_on_behalf_of_self(identifiers)
def test_post_alarm_with_mismatch_between_type_and_rule(self):
"""Test the creation of an combination alarm with threshold rule."""
json = {
'enabled': False,
'name': 'added_alarm',
'state': 'ok',
'type': 'gnocchi_resources_threshold',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': '3',
'granularity': '180',
}
}
resp = self.post_json('/alarms', params=json,
expect_errors=True, status=400,
headers=self.auth_headers)
self.assertEqual(
"gnocchi_resources_threshold_rule must "
"be set for gnocchi_resources_threshold type alarm",
resp.json['error_message']['faultstring'])
def test_post_alarm_with_duplicate_actions(self):
body = {
'name': 'dup-alarm-actions',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': '3',
'granularity': '180',
},
'alarm_actions': ['http://no.where', 'http://no.where']
}
resp = self.post_json('/alarms', params=body,
headers=self.auth_headers)
self.assertEqual(201, resp.status_code)
alarms = list(self.alarm_conn.get_alarms(name='dup-alarm-actions'))
self.assertEqual(1, len(alarms))
self.assertEqual(['http://no.where'], alarms[0].alarm_actions)
def test_post_alarm_with_too_many_actions(self):
self.CONF.set_override('alarm_max_actions', 1, group='api')
body = {
'name': 'alarm-with-many-actions',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': '3',
'granularity': '180',
},
'alarm_actions': ['http://no.where', 'http://no.where2']
}
resp = self.post_json('/alarms', params=body, expect_errors=True,
headers=self.auth_headers)
self.assertEqual(400, resp.status_code)
self.assertEqual("alarm_actions count exceeds maximum value 1",
resp.json['error_message']['faultstring'])
def test_post_alarm_normal_user_set_log_actions(self):
body = {
'name': 'log_alarm_actions',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': '3',
'granularity': '180',
},
'alarm_actions': ['log://']
}
resp = self.post_json('/alarms', params=body, expect_errors=True,
headers=self.auth_headers)
self.assertEqual(401, resp.status_code)
expected_msg = ("You are not authorized to create action: log://")
self.assertEqual(expected_msg,
resp.json['error_message']['faultstring'])
def test_post_alarm_normal_user_set_test_actions(self):
body = {
'name': 'test_alarm_actions',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': '3',
'granularity': '180',
},
'alarm_actions': ['test://']
}
resp = self.post_json('/alarms', params=body, expect_errors=True,
headers=self.auth_headers)
self.assertEqual(401, resp.status_code)
expected_msg = ("You are not authorized to create action: test://")
self.assertEqual(expected_msg,
resp.json['error_message']['faultstring'])
def test_post_alarm_admin_user_set_log_test_actions(self):
body = {
'name': 'admin_alarm_actions',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': '3',
'granularity': '180',
},
'alarm_actions': ['test://', 'log://']
}
headers = self.auth_headers
headers['X-Roles'] = 'admin'
self.post_json('/alarms', params=body, status=201,
headers=headers)
alarms = list(self.alarm_conn.get_alarms(name='admin_alarm_actions'))
self.assertEqual(1, len(alarms))
self.assertEqual(['test://', 'log://'],
alarms[0].alarm_actions)
def test_exercise_state_reason(self):
body = {
'name': 'nostate',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': '3',
'granularity': '180',
},
}
headers = self.auth_headers
headers['X-Roles'] = 'admin'
self.post_json('/alarms', params=body, status=201,
headers=headers)
alarms = list(self.alarm_conn.get_alarms(name='nostate'))
self.assertEqual(1, len(alarms))
alarm_id = alarms[0].alarm_id
alarm = self._get_alarm(alarm_id)
self.assertEqual("insufficient data", alarm['state'])
self.assertEqual("Not evaluated yet", alarm['state_reason'])
# Ensure state reason is updated
alarm = self._get_alarm('a')
alarm['state'] = 'ok'
self.put_json('/alarms/%s' % alarm_id,
params=alarm,
headers=self.auth_headers)
alarm = self._get_alarm(alarm_id)
self.assertEqual("ok", alarm['state'])
self.assertEqual("Manually set via API", alarm['state_reason'])
# Ensure state reason read only
alarm = self._get_alarm('a')
alarm['state'] = 'alarm'
alarm['state_reason'] = 'oh no!'
self.put_json('/alarms/%s' % alarm_id,
params=alarm,
headers=self.auth_headers)
alarm = self._get_alarm(alarm_id)
self.assertEqual("alarm", alarm['state'])
self.assertEqual("Manually set via API", alarm['state_reason'])
def test_post_alarm_without_actions(self):
body = {
'name': 'alarm_actions_none',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': '3',
'granularity': '180',
},
'alarm_actions': None
}
headers = self.auth_headers
headers['X-Roles'] = 'admin'
self.post_json('/alarms', params=body, status=201,
headers=headers)
alarms = list(self.alarm_conn.get_alarms(name='alarm_actions_none'))
self.assertEqual(1, len(alarms))
# FIXME(sileht): This should really returns [] not None
# but SQL just stores the json dict as is...
# migration script for sql will be a mess because we have
# to parse all JSON :(
# I guess we assume that wsme convert the None input to []
# because of the array type, but it won't...
self.assertIsNone(alarms[0].alarm_actions)
def test_post_alarm_trust(self):
json = {
'name': 'added_alarm_defaults',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'ok_actions': ['trust+http://my.server:1234/foo'],
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'aggregation_method': 'mean',
'threshold': 300.0
}
}
auth = mock.Mock()
trust_client = mock.Mock()
with mock.patch('aodh.keystone_client.get_client') as client:
mock_session = mock.Mock()
mock_session.get_user_id.return_value = 'my_user'
client.return_value = mock.Mock(session=mock_session)
with mock.patch('keystoneclient.v3.client.Client') as sub_client:
sub_client.return_value = trust_client
trust_client.trusts.create.return_value = mock.Mock(id='5678')
self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers,
extra_environ={'keystone.token_auth': auth})
trust_client.trusts.create.assert_called_once_with(
trustor_user=self.auth_headers['X-User-Id'],
trustee_user='my_user',
project=self.auth_headers['X-Project-Id'],
impersonation=True,
role_names=[])
alarms = list(self.alarm_conn.get_alarms())
for alarm in alarms:
if alarm.name == 'added_alarm_defaults':
self.assertEqual(
['trust+http://5678:delete@my.server:1234/foo'],
alarm.ok_actions)
break
else:
self.fail("Alarm not found")
with mock.patch('aodh.keystone_client.get_client') as client:
client.return_value = mock.Mock(
auth_ref=mock.Mock(user_id='my_user'))
with mock.patch('keystoneclient.v3.client.Client') as sub_client:
sub_client.return_value = trust_client
self.delete('/alarms/%s' % alarm.alarm_id,
headers=self.auth_headers,
status=204,
extra_environ={'keystone.token_auth': auth})
trust_client.trusts.delete.assert_called_once_with('5678')
def test_put_alarm(self):
json = {
'enabled': False,
'name': 'name_put',
'state': 'ok',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'severity': 'critical',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': 3,
'granularity': 180,
}
}
data = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'name',
'value': 'name1',
}])
self.assertEqual(1, len(data))
alarm_id = data[0]['alarm_id']
self.put_json('/alarms/%s' % alarm_id,
params=json,
headers=self.auth_headers)
alarm = list(self.alarm_conn.get_alarms(alarm_id=alarm_id,
enabled=False))[0]
self._verify_alarm(json, alarm)
def test_put_alarm_as_admin(self):
json = {
'user_id': 'myuserid',
'project_id': 'myprojectid',
'enabled': False,
'name': 'name_put',
'state': 'ok',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'severity': 'critical',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': 3,
'granularity': 180,
}
}
headers = {}
headers.update(self.auth_headers)
headers['X-Roles'] = 'admin'
data = self.get_json('/alarms',
headers=headers,
q=[{'field': 'name',
'value': 'name1',
}])
self.assertEqual(1, len(data))
alarm_id = data[0]['alarm_id']
self.put_json('/alarms/%s' % alarm_id,
params=json,
headers=headers)
alarm = list(self.alarm_conn.get_alarms(alarm_id=alarm_id,
enabled=False))[0]
self.assertEqual('myuserid', alarm.user_id)
self.assertEqual('myprojectid', alarm.project_id)
self._verify_alarm(json, alarm)
def test_put_alarm_wrong_field(self):
json = {
'this_can_not_be_correct': 'ha',
'enabled': False,
'name': 'name1',
'state': 'ok',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'severity': 'critical',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': 3,
'granularity': 180,
}
}
data = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'name',
'value': 'name1',
}])
self.assertEqual(1, len(data))
alarm_id = data[0]['alarm_id']
resp = self.put_json('/alarms/%s' % alarm_id,
expect_errors=True,
params=json,
headers=self.auth_headers)
self.assertEqual(400, resp.status_code)
def test_put_alarm_with_existing_name(self):
"""Test that update a threshold alarm with an existing name."""
json = {
'enabled': False,
'name': 'name1',
'state': 'ok',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'severity': 'critical',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': 3,
'granularity': 180,
}
}
data = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'name',
'value': 'name2',
}])
self.assertEqual(1, len(data))
alarm_id = data[0]['alarm_id']
resp = self.put_json('/alarms/%s' % alarm_id,
params=json,
headers=self.auth_headers)
self.assertEqual(200, resp.status_code)
def test_put_invalid_alarm_actions(self):
json = {
'enabled': False,
'name': 'name1',
'state': 'ok',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'severity': 'critical',
'ok_actions': ['spam://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': 3,
'granularity': 180,
}
}
data = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'name',
'value': 'name2',
}])
self.assertEqual(1, len(data))
alarm_id = data[0]['alarm_id']
resp = self.put_json('/alarms/%s' % alarm_id,
expect_errors=True, status=400,
params=json,
headers=self.auth_headers)
self.assertEqual(
'Unsupported action spam://something/ok',
resp.json['error_message']['faultstring'])
def test_put_alarm_trust(self):
data = self._get_alarm('a')
data.update({'ok_actions': ['trust+http://something/ok']})
trust_client = mock.Mock()
with mock.patch('aodh.keystone_client.get_client') as client:
client.return_value = mock.Mock(
auth_ref=mock.Mock(user_id='my_user'))
with mock.patch('keystoneclient.v3.client.Client') as sub_client:
sub_client.return_value = trust_client
trust_client.trusts.create.return_value = mock.Mock(id='5678')
self.put_json('/alarms/%s' % data['alarm_id'],
params=data,
headers=self.auth_headers)
data = self._get_alarm('a')
self.assertEqual(
['trust+http://5678:delete@something/ok'], data['ok_actions'])
data.update({'ok_actions': ['http://no-trust-something/ok']})
with mock.patch('aodh.keystone_client.get_client') as client:
client.return_value = mock.Mock(
auth_ref=mock.Mock(user_id='my_user'))
with mock.patch('keystoneclient.v3.client.Client') as sub_client:
sub_client.return_value = trust_client
self.put_json('/alarms/%s' % data['alarm_id'],
params=data,
headers=self.auth_headers)
trust_client.trusts.delete.assert_called_once_with('5678')
data = self._get_alarm('a')
self.assertEqual(
['http://no-trust-something/ok'], data['ok_actions'])
def test_delete_alarm(self):
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(3, len(data))
resp = self.delete('/alarms/%s' % data[0]['alarm_id'],
headers=self.auth_headers,
status=204)
self.assertEqual(b'', resp.body)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(2, len(alarms))
def test_get_state_alarm(self):
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(3, len(data))
resp = self.get_json('/alarms/%s/state' % data[0]['alarm_id'],
headers=self.auth_headers)
self.assertEqual(resp, data[0]['state'])
def test_set_state_alarm(self):
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(3, len(data))
resp = self.put_json('/alarms/%s/state' % data[0]['alarm_id'],
headers=self.auth_headers,
params='alarm')
alarms = list(self.alarm_conn.get_alarms(alarm_id=data[0]['alarm_id']))
self.assertEqual(1, len(alarms))
self.assertEqual('alarm', alarms[0].state)
self.assertEqual('Manually set via API',
alarms[0].state_reason)
self.assertEqual('alarm', resp.json)
def test_set_invalid_state_alarm(self):
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(3, len(data))
self.put_json('/alarms/%s/state' % data[0]['alarm_id'],
headers=self.auth_headers,
params='not valid',
status=400)
def test_alarms_sends_notification(self):
# Hit the AlarmsController ...
json = {
'name': 'sent_notification',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'severity': 'low',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'gt',
'threshold': 2.0,
'aggregation_method': 'mean',
}
}
with mock.patch.object(messaging, 'get_notifier') as get_notifier:
notifier = get_notifier.return_value
self.post_json('/alarms', params=json, headers=self.auth_headers)
get_notifier.assert_called_once_with(mock.ANY,
publisher_id='aodh.api')
calls = notifier.info.call_args_list
self.assertEqual(1, len(calls))
args, _ = calls[0]
context, event_type, payload = args
self.assertEqual('alarm.creation', event_type)
self.assertEqual('sent_notification', payload['detail']['name'])
self.assertEqual(['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
payload['detail']['rule']['metrics'])
self.assertTrue(set(['alarm_id', 'detail', 'event_id', 'on_behalf_of',
'project_id', 'timestamp', 'type',
'user_id']).issubset(payload.keys()))
def test_alarm_sends_notification(self):
with mock.patch.object(messaging, 'get_notifier') as get_notifier:
notifier = get_notifier.return_value
self._update_alarm('a', dict(name='new_name'))
get_notifier.assert_called_once_with(mock.ANY,
publisher_id='aodh.api')
calls = notifier.info.call_args_list
self.assertEqual(1, len(calls))
args, _ = calls[0]
context, event_type, payload = args
self.assertEqual('alarm.rule_change', event_type)
self.assertEqual('new_name', payload['detail']['name'])
self.assertTrue(set(['alarm_id', 'detail', 'event_id', 'on_behalf_of',
'project_id', 'timestamp', 'type',
'user_id']).issubset(payload.keys()))
def test_delete_alarm_sends_notification(self):
with mock.patch.object(messaging, 'get_notifier') as get_notifier:
notifier = get_notifier.return_value
self._delete_alarm(default_alarms(self.auth_headers)[1].alarm_id)
get_notifier.assert_called_once_with(mock.ANY,
publisher_id='aodh.api')
calls = notifier.info.call_args_list
self.assertEqual(1, len(calls))
args, _ = calls[0]
context, event_type, payload = args
self.assertEqual('alarm.deletion', event_type)
self.assertEqual('insufficient data', payload['detail']['state'])
self.assertTrue(set(['alarm_id', 'detail', 'event_id', 'on_behalf_of',
'project_id', 'timestamp', 'type', 'severity',
'user_id']).issubset(payload.keys()))
class TestAlarmsHistory(TestAlarmsBase):
def setUp(self):
super(TestAlarmsHistory, self).setUp()
alarm = models.Alarm(
name='name1',
type='gnocchi_aggregation_by_metrics_threshold',
enabled=True,
alarm_id='a',
description='a',
state='insufficient data',
state_reason='insufficient data',
severity='critical',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=True,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
time_constraints=[dict(name='testcons',
start='0 11 * * *',
duration=300)],
rule=dict(comparison_operator='gt',
threshold=2.0,
aggregation_method='mean',
evaluation_periods=60,
granularity=1,
metrics=['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e']))
self.alarm_conn.create_alarm(alarm)
def _get_alarm_history(self, alarm_id, auth_headers=None, query=None,
expect_errors=False, status=200):
url = '/alarms/%s/history' % alarm_id
if query:
url += '?q.op=%(op)s&q.value=%(value)s&q.field=%(field)s' % query
resp = self.get_json(url,
headers=auth_headers or self.auth_headers,
expect_errors=expect_errors)
if expect_errors:
self.assertEqual(status, resp.status_code)
return resp
def _assert_is_subset(self, expected, actual):
for k, v in six.iteritems(expected):
current = actual.get(k)
if k == 'detail' and isinstance(v, dict):
current = jsonlib.loads(current)
self.assertEqual(v, current, 'mismatched field: %s' % k)
self.assertIsNotNone(actual['event_id'])
def _assert_in_json(self, expected, actual):
actual = jsonlib.dumps(jsonlib.loads(actual), sort_keys=True)
for k, v in six.iteritems(expected):
fragment = jsonlib.dumps({k: v}, sort_keys=True)[1:-1]
self.assertIn(fragment, actual,
'%s not in %s' % (fragment, actual))
def test_record_alarm_history_config(self):
self.CONF.set_override('record_history', False)
history = self._get_alarm_history('a')
self.assertEqual([], history)
self._update_alarm('a', dict(name='renamed'))
history = self._get_alarm_history('a')
self.assertEqual([], history)
self.CONF.set_override('record_history', True)
self._update_alarm('a', dict(name='foobar'))
history = self._get_alarm_history('a')
self.assertEqual(1, len(history))
def test_record_alarm_history_severity(self):
alarm = self._get_alarm('a')
history = self._get_alarm_history('a')
self.assertEqual([], history)
self.assertEqual('critical', alarm['severity'])
self._update_alarm('a', dict(severity='low'))
new_alarm = self._get_alarm('a')
history = self._get_alarm_history('a')
self.assertEqual(1, len(history))
self.assertEqual(jsonlib.dumps({'severity': 'low'}),
history[0]['detail'])
self.assertEqual('low', new_alarm['severity'])
def test_record_alarm_history_statistic(self):
alarm = self._get_alarm('a')
history = self._get_alarm_history('a')
self.assertEqual([], history)
self.assertEqual('mean', alarm[RULE_KEY]['aggregation_method'])
rule = alarm[RULE_KEY].copy()
rule['aggregation_method'] = 'min'
data = dict(gnocchi_aggregation_by_metrics_threshold_rule=rule)
self._update_alarm('a', data)
new_alarm = self._get_alarm('a')
history = self._get_alarm_history('a')
self.assertEqual(1, len(history))
self.assertEqual("min", jsonlib.loads(history[0]['detail'])
['rule']["aggregation_method"])
self.assertEqual('min', new_alarm[RULE_KEY]['aggregation_method'])
def test_redundant_update_alarm_property_no_history_change(self):
alarm = self._get_alarm('a')
history = self._get_alarm_history('a')
self.assertEqual([], history)
self.assertEqual('critical', alarm['severity'])
self._update_alarm('a', dict(severity='low'))
new_alarm = self._get_alarm('a')
history = self._get_alarm_history('a')
self.assertEqual(1, len(history))
self.assertEqual(jsonlib.dumps({'severity': 'low'}),
history[0]['detail'])
self.assertEqual('low', new_alarm['severity'])
self._update_alarm('a', dict(severity='low'))
updated_history = self._get_alarm_history('a')
self.assertEqual(1, len(updated_history))
self.assertEqual(jsonlib.dumps({'severity': 'low'}),
updated_history[0]['detail'])
self.assertEqual(history, updated_history)
def test_get_recorded_alarm_history_on_create(self):
new_alarm = {
'name': 'new_alarm',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'max',
'threshold': 42.0,
'granularity': 60,
'evaluation_periods': 1,
}
}
self.post_json('/alarms', params=new_alarm, status=201,
headers=self.auth_headers)
alarms = self.get_json('/alarms',
headers=self.auth_headers,
q=[{'field': 'name',
'value': 'new_alarm',
}])
self.assertEqual(1, len(alarms))
alarm = alarms[0]
history = self._get_alarm_history(alarm['alarm_id'])
self.assertEqual(1, len(history))
self._assert_is_subset(dict(alarm_id=alarm['alarm_id'],
on_behalf_of=alarm['project_id'],
project_id=alarm['project_id'],
type='creation',
user_id=alarm['user_id']),
history[0])
new_alarm['rule'] = new_alarm[RULE_KEY]
del new_alarm[RULE_KEY]
self._assert_in_json(new_alarm, history[0]['detail'])
def _do_test_get_recorded_alarm_history_on_update(self,
data,
type,
detail,
auth=None):
alarm = self._get_alarm('a')
history = self._get_alarm_history('a')
self.assertEqual([], history)
self._update_alarm('a', data, auth)
history = self._get_alarm_history('a')
self.assertEqual(1, len(history))
project_id = auth['X-Project-Id'] if auth else alarm['project_id']
user_id = auth['X-User-Id'] if auth else alarm['user_id']
self._assert_is_subset(dict(alarm_id=alarm['alarm_id'],
detail=detail,
on_behalf_of=alarm['project_id'],
project_id=project_id,
type=type,
user_id=user_id),
history[0])
def test_get_recorded_alarm_history_rule_change(self):
data = dict(name='renamed')
detail = '{"name": "renamed"}'
self._do_test_get_recorded_alarm_history_on_update(data,
'rule change',
detail)
def test_get_recorded_alarm_history_state_transition_on_behalf_of(self):
# credentials for new non-admin user, on who's behalf the alarm
# is created
member_user = uuidutils.generate_uuid()
member_project = uuidutils.generate_uuid()
member_auth = {'X-Roles': 'member',
'X-User-Id': member_user,
'X-Project-Id': member_project}
new_alarm = {
'name': 'new_alarm',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'state': 'ok',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'max',
'threshold': 42.0,
'evaluation_periods': 1,
'granularity': 60
}
}
self.post_json('/alarms', params=new_alarm, status=201,
headers=member_auth)
alarm = self.get_json('/alarms', headers=member_auth)[0]
# effect a state transition as a new administrative user
admin_user = uuidutils.generate_uuid()
admin_project = uuidutils.generate_uuid()
admin_auth = {'X-Roles': 'admin',
'X-User-Id': admin_user,
'X-Project-Id': admin_project}
data = dict(state='alarm')
self._update_alarm(alarm['alarm_id'], data, auth_headers=admin_auth)
new_alarm['rule'] = new_alarm[RULE_KEY]
del new_alarm[RULE_KEY]
# ensure that both the creation event and state transition
# are visible to the non-admin alarm owner and admin user alike
for auth in [member_auth, admin_auth]:
history = self._get_alarm_history(alarm['alarm_id'],
auth_headers=auth)
self.assertEqual(2, len(history), 'hist: %s' % history)
self._assert_is_subset(dict(alarm_id=alarm['alarm_id'],
detail={"state": "alarm",
"state_reason":
"Manually set via API"},
on_behalf_of=alarm['project_id'],
project_id=admin_project,
type='rule change',
user_id=admin_user),
history[0])
self._assert_is_subset(dict(alarm_id=alarm['alarm_id'],
on_behalf_of=alarm['project_id'],
project_id=member_project,
type='creation',
user_id=member_user),
history[1])
self._assert_in_json(new_alarm, history[1]['detail'])
# ensure on_behalf_of cannot be constrained in an API call
query = dict(field='on_behalf_of',
op='eq',
value=alarm['project_id'])
self._get_alarm_history(alarm['alarm_id'], auth_headers=auth,
query=query, expect_errors=True,
status=400)
def test_get_recorded_alarm_history_segregation(self):
data = dict(name='renamed')
detail = '{"name": "renamed"}'
self._do_test_get_recorded_alarm_history_on_update(data,
'rule change',
detail)
auth = {'X-Roles': 'member',
'X-User-Id': uuidutils.generate_uuid(),
'X-Project-Id': uuidutils.generate_uuid()}
self._get_alarm_history('a', auth_headers=auth,
expect_errors=True, status=404)
def test_delete_alarm_history_after_deletion(self):
self._update_alarm('a', dict(name='renamed'))
history = self._get_alarm_history('a')
self.assertEqual(1, len(history))
self.delete('/alarms/%s' % 'a',
headers=self.auth_headers,
status=204)
self._get_alarm_history('a', expect_errors=True, status=404)
def test_get_alarm_history_ordered_by_recentness(self):
for i in moves.xrange(10):
self._update_alarm('a', dict(name='%s' % i))
history = self._get_alarm_history('a')
self.assertEqual(10, len(history), 'hist: %s' % history)
self._assert_is_subset(dict(alarm_id='a',
type='rule change'),
history[0])
for i in moves.xrange(1, 11):
detail = '{"name": "%s"}' % (10 - i)
self._assert_is_subset(dict(alarm_id='a',
detail=detail,
type='rule change'),
history[i - 1])
def test_get_alarm_history_constrained_by_timestamp(self):
alarm = self._get_alarm('a')
self._update_alarm('a', dict(name='renamed'))
after = datetime.datetime.utcnow().isoformat()
query = dict(field='timestamp', op='gt', value=after)
history = self._get_alarm_history('a', query=query)
self.assertEqual(0, len(history))
query['op'] = 'le'
history = self._get_alarm_history('a', query=query)
self.assertEqual(1, len(history))
detail = '{"name": "renamed"}'
self._assert_is_subset(dict(alarm_id=alarm['alarm_id'],
detail=detail,
on_behalf_of=alarm['project_id'],
project_id=alarm['project_id'],
type='rule change',
user_id=alarm['user_id']),
history[0])
def test_get_alarm_history_constrained_by_type(self):
alarm = self._get_alarm('a')
self._update_alarm('a', dict(name='renamed2'))
query = dict(field='type', op='eq', value='rule change')
history = self._get_alarm_history('a', query=query)
self.assertEqual(1, len(history))
detail = '{"name": "renamed2"}'
self._assert_is_subset(dict(alarm_id=alarm['alarm_id'],
detail=detail,
on_behalf_of=alarm['project_id'],
project_id=alarm['project_id'],
type='rule change',
user_id=alarm['user_id']),
history[0])
def test_get_alarm_history_constrained_by_alarm_id_failed(self):
query = dict(field='alarm_id', op='eq', value='a')
resp = self._get_alarm_history('a', query=query,
expect_errors=True, status=400)
msg = ('Unknown argument: "alarm_id": unrecognized'
" field in query: [<Query {key!r} eq"
" {value!r} Unset>], valid keys: ['project', "
"'search_offset', 'severity', 'timestamp',"
" 'type', 'user']")
msg = msg.format(key=u'alarm_id', value=u'a')
self.assertEqual(msg,
resp.json['error_message']['faultstring'])
def test_get_alarm_history_constrained_by_not_supported_rule(self):
query = dict(field='abcd', op='eq', value='abcd')
resp = self._get_alarm_history('a', query=query,
expect_errors=True, status=400)
msg = ('Unknown argument: "abcd": unrecognized'
" field in query: [<Query {key!r} eq"
" {value!r} Unset>], valid keys: ['project', "
"'search_offset', 'severity', 'timestamp',"
" 'type', 'user']")
msg = msg.format(key=u'abcd', value=u'abcd')
self.assertEqual(msg,
resp.json['error_message']['faultstring'])
def test_get_alarm_history_constrained_by_severity(self):
self._update_alarm('a', dict(severity='low'))
query = dict(field='severity', op='eq', value='low')
history = self._get_alarm_history('a', query=query)
self.assertEqual(1, len(history))
self.assertEqual(jsonlib.dumps({'severity': 'low'}),
history[0]['detail'])
def test_get_nonexistent_alarm_history(self):
self._get_alarm_history('foobar', expect_errors=True, status=404)
class TestAlarmsQuotas(TestAlarmsBase):
def _test_alarm_quota(self):
alarm = {
'name': 'alarm',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'user_id': self.auth_headers['X-User-Id'],
'project_id': self.auth_headers['X-Project-Id'],
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'max',
'threshold': 42.0,
'granularity': 60,
'evaluation_periods': 1,
}
}
resp = self.post_json('/alarms', params=alarm,
headers=self.auth_headers)
self.assertEqual(201, resp.status_code)
alarms = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(1, len(alarms))
alarm['name'] = 'another_user_alarm'
resp = self.post_json('/alarms', params=alarm,
expect_errors=True,
headers=self.auth_headers)
self.assertEqual(403, resp.status_code)
faultstring = 'Alarm quota exceeded for user'
self.assertIn(faultstring,
resp.json['error_message']['faultstring'])
alarms = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(1, len(alarms))
def test_alarms_quotas(self):
self.CONF.set_override('user_alarm_quota', 1, 'api')
self.CONF.set_override('project_alarm_quota', 1, 'api')
self._test_alarm_quota()
def test_project_alarms_quotas(self):
self.CONF.set_override('project_alarm_quota', 1, 'api')
self._test_alarm_quota()
def test_user_alarms_quotas(self):
self.CONF.set_override('user_alarm_quota', 1, 'api')
self._test_alarm_quota()
def test_larger_limit_project_alarms_quotas(self):
self.CONF.set_override('user_alarm_quota', 1, 'api')
self.CONF.set_override('project_alarm_quota', 2, 'api')
self._test_alarm_quota()
def test_larger_limit_user_alarms_quotas(self):
self.CONF.set_override('user_alarm_quota', 2, 'api')
self.CONF.set_override('project_alarm_quota', 1, 'api')
self._test_alarm_quota()
def test_larger_limit_user_alarm_quotas_multitenant_user(self):
self.CONF.set_override('user_alarm_quota', 2, 'api')
self.CONF.set_override('project_alarm_quota', 1, 'api')
def _test(field, value):
query = [{
'field': field,
'op': 'eq',
'value': value
}]
alarms = self.get_json('/alarms', q=query,
headers=self.auth_headers)
self.assertEqual(1, len(alarms))
alarm = {
'name': 'alarm',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'user_id': self.auth_headers['X-User-Id'],
'project_id': self.auth_headers['X-Project-Id'],
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'le',
'aggregation_method': 'max',
'threshold': 42.0,
'granularity': 60,
'evaluation_periods': 1,
}
}
resp = self.post_json('/alarms', params=alarm,
headers=self.auth_headers)
self.assertEqual(201, resp.status_code)
_test('project_id', self.auth_headers['X-Project-Id'])
self.auth_headers['X-Project-Id'] = uuidutils.generate_uuid()
alarm['name'] = 'another_user_alarm'
alarm['project_id'] = self.auth_headers['X-Project-Id']
resp = self.post_json('/alarms', params=alarm,
headers=self.auth_headers)
self.assertEqual(201, resp.status_code)
_test('project_id', self.auth_headers['X-Project-Id'])
self.auth_headers["X-roles"] = "admin"
alarms = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(2, len(alarms))
class TestAlarmsRuleThreshold(TestAlarmsBase):
def test_post_invalid_alarm_statistic(self):
json = {
'name': 'added_alarm',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'gt',
'threshold': 2.0,
'aggregation_method': 'magic',
}
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
expected_err_msg = ("aggregation_method should be in ['count', "
"'mean', 'max', 'min', 'first', 'last', 'std'] "
"not magic")
self.assertIn(expected_err_msg,
resp.json['error_message']['faultstring'])
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(0, len(alarms))
def test_post_invalid_alarm_input_comparison_operator(self):
json = {
'name': 'alarm2',
'state': 'ok',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'comparison_operator': 'bad_co',
'threshold': 50.0
}
}
resp = self.post_json('/alarms', params=json, expect_errors=True,
status=400, headers=self.auth_headers)
expected_err_msg = ("Invalid input for field/attribute"
" comparison_operator."
" Value: 'bad_co'.")
self.assertIn(expected_err_msg,
resp.json['error_message']['faultstring'])
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(0, len(alarms))
def test_post_threshold_rule_defaults(self):
to_check = {
'name': 'added_alarm_defaults',
'state': 'insufficient data',
'description': ('gnocchi_aggregation_by_metrics_threshold '
'alarm rule'),
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'threshold': 300.0,
'comparison_operator': 'eq',
'aggregation_method': 'mean',
'evaluation_periods': 1,
'granularity': 60,
}
}
json = {
'name': 'added_alarm_defaults',
'type': 'gnocchi_aggregation_by_metrics_threshold',
RULE_KEY: {
'metrics': ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
'aggregation_method': 'mean',
'threshold': 300.0
}
}
self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(1, len(alarms))
for alarm in alarms:
if alarm.name == 'added_alarm_defaults':
for key in to_check:
if key.endswith('_rule'):
storage_key = 'rule'
else:
storage_key = key
self.assertEqual(to_check[key],
getattr(alarm, storage_key))
break
else:
self.fail("Alarm not found")
class TestAlarmsRuleGnocchi(TestAlarmsBase):
def setUp(self):
super(TestAlarmsRuleGnocchi, self).setUp()
for alarm in [
models.Alarm(name='name1',
type='gnocchi_resources_threshold',
enabled=True,
alarm_id='e',
description='e',
state='insufficient data',
state_reason='Not evaluated',
severity='critical',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=True,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
time_constraints=[],
rule=dict(comparison_operator='gt',
threshold=2.0,
aggregation_method='mean',
granularity=60,
evaluation_periods=1,
metric='meter.test',
resource_type='instance',
resource_id=(
'6841c175-d7c4-4bc2-bc7a-1c7832271b8f'),
)
),
models.Alarm(name='name2',
type='gnocchi_aggregation_by_metrics_threshold',
enabled=True,
alarm_id='f',
description='f',
state='insufficient data',
state_reason='Not evaluated',
severity='critical',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=True,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
time_constraints=[],
rule=dict(comparison_operator='gt',
threshold=2.0,
aggregation_method='mean',
evaluation_periods=1,
granularity=60,
metrics=[
'41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e']
),
),
models.Alarm(name='name3',
type='gnocchi_aggregation_by_resources_threshold',
enabled=True,
alarm_id='g',
description='f',
state='insufficient data',
state_reason='Not evaluated',
severity='critical',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=True,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
time_constraints=[],
rule=dict(comparison_operator='gt',
threshold=2.0,
aggregation_method='mean',
granularity=60,
evaluation_periods=1,
metric='meter.test',
resource_type='instance',
query='{"=": {"server_group": '
'"my_autoscaling_group"}}')
),
]:
self.alarm_conn.create_alarm(alarm)
def test_list_alarms(self):
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(3, len(data))
self.assertEqual(set(['name1', 'name2', 'name3']),
set(r['name'] for r in data))
self.assertEqual(set(['meter.test']),
set(r['gnocchi_resources_threshold_rule']['metric']
for r in data
if 'gnocchi_resources_threshold_rule' in r))
def test_post_gnocchi_metrics_alarm_cached(self):
# NOTE(gordc): cache is a decorator and therefore, gets mocked across
# entire scenario. ideally we should test both scenario but tough.
# assume cache will return aggregation_method == ['count'] always.
json = {
'enabled': False,
'name': 'name_post',
'state': 'ok',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'severity': 'critical',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
RULE_KEY: {
'metrics': ['b3d9d8ab-05e8-439f-89ad-5e978dd2a5eb',
'009d4faf-c275-46f0-8f2d-670b15bac2b0'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': 3,
'granularity': 180,
}
}
with mock.patch('aodh.api.controllers.v2.alarm_rules.'
'gnocchi.client') as clientlib:
c = clientlib.Client.return_value
c.capabilities.list.return_value = {
'aggregation_methods': ['count']}
self.post_json('/alarms', params=json, headers=self.auth_headers)
with mock.patch('aodh.api.controllers.v2.alarm_rules.'
'gnocchi.client') as clientlib:
self.post_json('/alarms', params=json, headers=self.auth_headers)
self.assertFalse(clientlib.called)
def test_post_gnocchi_resources_alarm(self):
json = {
'enabled': False,
'name': 'name_post',
'state': 'ok',
'type': 'gnocchi_resources_threshold',
'severity': 'critical',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
'gnocchi_resources_threshold_rule': {
'metric': 'ameter',
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': 3,
'granularity': 180,
'resource_type': 'instance',
'resource_id': '209ef69c-c10c-4efb-90ff-46f4b2d90d2e',
}
}
with mock.patch('aodh.api.controllers.v2.alarm_rules.'
'gnocchi.client') as clientlib:
c = clientlib.Client.return_value
c.capabilities.list.return_value = {
'aggregation_methods': ['count']}
self.post_json('/alarms', params=json, headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms(enabled=False))
self.assertEqual(1, len(alarms))
self._verify_alarm(json, alarms[0])
def test_post_gnocchi_metrics_alarm(self):
json = {
'enabled': False,
'name': 'name_post',
'state': 'ok',
'type': 'gnocchi_aggregation_by_metrics_threshold',
'severity': 'critical',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
RULE_KEY: {
'metrics': ['b3d9d8ab-05e8-439f-89ad-5e978dd2a5eb',
'009d4faf-c275-46f0-8f2d-670b15bac2b0'],
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': 3,
'granularity': 180,
}
}
with mock.patch('aodh.api.controllers.v2.alarm_rules.'
'gnocchi.client') as clientlib:
c = clientlib.Client.return_value
c.capabilities.list.return_value = {
'aggregation_methods': ['count']}
self.post_json('/alarms', params=json, headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms(enabled=False))
self.assertEqual(1, len(alarms))
self._verify_alarm(json, alarms[0])
@mock.patch('aodh.keystone_client.get_client')
def test_post_gnocchi_aggregation_alarm_project_constraint(self,
get_client):
json = {
'enabled': False,
'name': 'project_constraint',
'state': 'ok',
'type': 'gnocchi_aggregation_by_resources_threshold',
'severity': 'critical',
'ok_actions': ['http://something/ok'],
'alarm_actions': ['http://something/alarm'],
'insufficient_data_actions': ['http://something/no'],
'repeat_actions': True,
'gnocchi_aggregation_by_resources_threshold_rule': {
'metric': 'ameter',
'comparison_operator': 'le',
'aggregation_method': 'count',
'threshold': 50,
'evaluation_periods': 3,
'granularity': 180,
'resource_type': 'instance',
'query': '{"=": {"server_group": "my_autoscaling_group"}}',
}
}
expected_query = {"and": [
{"or": [
{"=": {"created_by_project_id":
self.auth_headers['X-Project-Id']}},
{"and": [
{"=": {"created_by_project_id": "<my-uuid>"}},
{"=": {"project_id": self.auth_headers['X-Project-Id']}}
]},
]},
{"=": {"server_group": "my_autoscaling_group"}},
]}
ks_client = mock.Mock()
ks_client.projects.find.return_value = mock.Mock(id='<my-uuid>')
get_client.return_value = ks_client
with mock.patch('aodh.api.controllers.v2.alarm_rules.'
'gnocchi.client') as clientlib:
c = clientlib.Client.return_value
c.capabilities.list.return_value = {
'aggregation_methods': ['count']}
self.post_json('/alarms', params=json, headers=self.auth_headers)
self.assertEqual([mock.call(
aggregation='count',
metrics='ameter',
needed_overlap=0,
start="-1 day",
stop="now",
query=expected_query,
resource_type="instance")],
c.metric.aggregation.mock_calls),
alarms = list(self.alarm_conn.get_alarms(enabled=False))
self.assertEqual(1, len(alarms))
json['gnocchi_aggregation_by_resources_threshold_rule']['query'] = (
jsonlib.dumps(expected_query))
self._verify_alarm(json, alarms[0])
class TestAlarmsEvent(TestAlarmsBase):
def test_list_alarms(self):
alarm = models.Alarm(name='event.alarm.1',
type='event',
enabled=True,
alarm_id='h',
description='h',
state='insufficient data',
state_reason='insufficient data',
severity='moderate',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
time_constraints=[],
rule=dict(event_type='event.test',
query=[]),
)
self.alarm_conn.create_alarm(alarm)
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(1, len(data))
self.assertEqual(set(['event.alarm.1']),
set(r['name'] for r in data))
self.assertEqual(set(['event.test']),
set(r['event_rule']['event_type']
for r in data if 'event_rule' in r))
def test_post_event_alarm_defaults(self):
to_check = {
'enabled': True,
'name': 'added_alarm_defaults',
'state': 'insufficient data',
'description': 'Alarm when * event occurred.',
'type': 'event',
'ok_actions': [],
'alarm_actions': [],
'insufficient_data_actions': [],
'repeat_actions': False,
'rule': {
'event_type': '*',
'query': [],
}
}
json = {
'name': 'added_alarm_defaults',
'type': 'event',
'event_rule': {
'event_type': '*',
'query': []
}
}
self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(1, len(alarms))
for alarm in alarms:
if alarm.name == 'added_alarm_defaults':
for key in to_check:
self.assertEqual(to_check[key], getattr(alarm, key))
break
else:
self.fail("Alarm not found")
class TestAlarmsCompositeRule(TestAlarmsBase):
def setUp(self):
super(TestAlarmsCompositeRule, self).setUp()
self.sub_rule1 = {
"type": "gnocchi_aggregation_by_metrics_threshold",
"metrics": ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
"evaluation_periods": 5,
"threshold": 0.8,
"aggregation_method": "mean",
"granularity": 60,
"comparison_operator": "gt"
}
self.sub_rule2 = {
"type": "gnocchi_aggregation_by_metrics_threshold",
"metrics": ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
"evaluation_periods": 4,
"threshold": 200,
"aggregation_method": "max",
"granularity": 60,
"comparison_operator": "gt"
}
self.sub_rule3 = {
"type": "gnocchi_aggregation_by_metrics_threshold",
"metrics": ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
"evaluation_periods": 3,
"threshold": 1000,
"aggregation_method": "mean",
"granularity": 60,
"comparison_operator": "gt"
}
self.rule = {
"or": [self.sub_rule1,
{
"and": [self.sub_rule2, self.sub_rule3]
}]}
def test_list_alarms(self):
alarm = models.Alarm(name='composite_alarm',
type='composite',
enabled=True,
alarm_id='composite',
description='composite',
state='insufficient data',
state_reason='insufficient data',
severity='moderate',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
time_constraints=[],
rule=self.rule,
)
self.alarm_conn.create_alarm(alarm)
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(1, len(data))
self.assertEqual(set(['composite_alarm']),
set(r['name'] for r in data))
self.assertEqual(self.rule, data[0]['composite_rule'])
def test_post_with_composite_rule(self):
json = {
"type": "composite",
"name": "composite_alarm",
"composite_rule": self.rule,
"repeat_actions": False
}
self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(1, len(alarms))
self.assertEqual(self.rule, alarms[0].rule)
def test_post_with_sub_rule_with_wrong_type(self):
self.sub_rule1['type'] = 'non-type'
json = {
"type": "composite",
"name": "composite_alarm",
"composite_rule": self.rule,
"repeat_actions": False
}
response = self.post_json('/alarms', params=json, status=400,
expect_errors=True,
headers=self.auth_headers)
err = ("Unsupported sub-rule type :non-type in composite "
"rule, should be one of: "
"['gnocchi_aggregation_by_metrics_threshold', "
"'gnocchi_aggregation_by_resources_threshold', "
"'gnocchi_resources_threshold', 'threshold']")
faultstring = response.json['error_message']['faultstring']
self.assertEqual(err, faultstring)
def test_post_with_sub_rule_with_only_required_params(self):
sub_rulea = {
"metrics": ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
"threshold": 0.8,
"aggregation_method": "mean",
"type": "gnocchi_aggregation_by_metrics_threshold"}
sub_ruleb = {
"metrics": ['41869681-5776-46d6-91ed-cccc43b6e4e3',
'a1fb80f4-c242-4f57-87c6-68f47521059e'],
"threshold": 200,
"aggregation_method": "mean",
"type": "gnocchi_aggregation_by_metrics_threshold"}
json = {
"type": "composite",
"name": "composite_alarm",
"composite_rule": {"and": [sub_rulea, sub_ruleb]},
"repeat_actions": False
}
self.post_json('/alarms', params=json, status=201,
headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms())
self.assertEqual(1, len(alarms))
def test_post_with_sub_rule_with_invalid_params(self):
self.sub_rule1['threshold'] = False
json = {
"type": "composite",
"name": "composite_alarm",
"composite_rule": self.rule,
"repeat_actions": False
}
response = self.post_json('/alarms', params=json, status=400,
expect_errors=True,
headers=self.auth_headers)
faultstring = ("Invalid input for field/attribute threshold. "
"Value: 'False'. Wrong type. Expected '%s', got '%s'"
% (type(1.0), type(True)))
self.assertEqual(faultstring,
response.json['error_message']['faultstring'])
class TestPaginationQuery(TestAlarmsBase):
def setUp(self):
super(TestPaginationQuery, self).setUp()
for alarm in default_alarms(self.auth_headers):
self.alarm_conn.create_alarm(alarm)
def test_pagination_query_single_sort(self):
data = self.get_json('/alarms?sort=name:desc',
headers=self.auth_headers)
names = [a['name'] for a in data]
self.assertEqual(['name3', 'name2', 'name1'], names)
data = self.get_json('/alarms?sort=name:asc',
headers=self.auth_headers)
names = [a['name'] for a in data]
self.assertEqual(['name1', 'name2', 'name3'], names)
def test_sort_by_severity_with_its_value(self):
if self.engine != "mysql":
self.skipTest("This is only implemented for MySQL")
data = self.get_json('/alarms?sort=severity:asc',
headers=self.auth_headers)
severities = [a['severity'] for a in data]
self.assertEqual(['moderate', 'critical', 'critical'],
severities)
data = self.get_json('/alarms?sort=severity:desc',
headers=self.auth_headers)
severities = [a['severity'] for a in data]
self.assertEqual(['critical', 'critical', 'moderate'],
severities)
def test_pagination_query_limit(self):
data = self.get_json('/alarms?limit=2', headers=self.auth_headers)
self.assertEqual(2, len(data))
def test_pagination_query_limit_sort(self):
data = self.get_json('/alarms?sort=name:asc&limit=2',
headers=self.auth_headers)
self.assertEqual(2, len(data))
def test_pagination_query_marker(self):
data = self.get_json('/alarms?sort=name:desc',
headers=self.auth_headers)
self.assertEqual(3, len(data))
alarm_ids = [a['alarm_id'] for a in data]
names = [a['name'] for a in data]
self.assertEqual(['name3', 'name2', 'name1'], names)
marker_url = ('/alarms?sort=name:desc&marker=%s' % alarm_ids[1])
data = self.get_json(marker_url, headers=self.auth_headers)
self.assertEqual(1, len(data))
new_alarm_ids = [a['alarm_id'] for a in data]
self.assertEqual(alarm_ids[2:], new_alarm_ids)
new_names = [a['name'] for a in data]
self.assertEqual(['name1'], new_names)
def test_pagination_query_multiple_sorts(self):
new_alarms = default_alarms(self.auth_headers)
for a_id in zip(new_alarms, ['e', 'f', 'g', 'h']):
a_id[0].alarm_id = a_id[1]
self.alarm_conn.create_alarm(a_id[0])
data = self.get_json('/alarms', headers=self.auth_headers)
self.assertEqual(6, len(data))
sort_url = '/alarms?sort=name:desc&sort=alarm_id:asc'
data = self.get_json(sort_url, headers=self.auth_headers)
name_ids = [(a['name'], a['alarm_id']) for a in data]
expected = [('name3', 'c'),
('name3', 'g'), ('name2', 'b'), ('name2', 'f'),
('name1', 'a'), ('name1', 'e')]
self.assertEqual(expected, name_ids)
def test_pagination_query_invalid_sort_key(self):
resp = self.get_json('/alarms?sort=invalid_key:desc',
headers=self.auth_headers,
expect_errors=True)
self.assertEqual(resp.status_code, 400)
self.assertEqual("Invalid input for field/attribute sort. Value: "
"'invalid_key:desc'. the sort parameter should be"
" a pair of sort key and sort dir combined with "
"':', or only sort key specified and sort dir will "
"be default 'asc', the supported sort keys are: "
"('alarm_id', 'enabled', 'name', 'type', 'severity',"
" 'timestamp', 'user_id', 'project_id', 'state', "
"'repeat_actions', 'state_timestamp')",
resp.json['error_message']['faultstring'])
def test_pagination_query_only_sort_key_specified(self):
data = self.get_json('/alarms?sort=name',
headers=self.auth_headers)
names = [a['name'] for a in data]
self.assertEqual(['name1', 'name2', 'name3'], names)
def test_pagination_query_history_data(self):
for i in moves.xrange(10):
self._update_alarm('a', dict(name='%s' % i))
url = '/alarms/a/history?sort=event_id:desc&sort=timestamp:desc'
data = self.get_json(url, headers=self.auth_headers)
sorted_data = sorted(data,
key=lambda d: (d['event_id'], d['timestamp']),
reverse=True)
self.assertEqual(sorted_data, data)
|
import changes
import logging
import flask
import os
import os.path
import warnings
from celery.signals import task_postrun
from datetime import timedelta
from flask import request, session
from flask.ext.sqlalchemy import SQLAlchemy
from flask_debugtoolbar import DebugToolbarExtension
from flask_mail import Mail
from kombu import Queue
from raven.contrib.flask import Sentry
from urlparse import urlparse
from werkzeug.contrib.fixers import ProxyFix
from changes.constants import PROJECT_ROOT
from changes.api.controller import APIController
from changes.ext.celery import Celery
from changes.ext.pubsub import PubSub
from changes.ext.redis import Redis
# because foo.in_([]) ever executing is a bad idea
from sqlalchemy.exc import SAWarning
warnings.simplefilter('error', SAWarning)
class ChangesDebugToolbarExtension(DebugToolbarExtension):
def _show_toolbar(self):
if '__trace__' in request.args:
return True
return super(ChangesDebugToolbarExtension, self)._show_toolbar()
def process_response(self, response):
real_request = request._get_current_object()
# If the http response code is 200 then we process to add the
# toolbar to the returned html response.
if '__trace__' in real_request.args:
for panel in self.debug_toolbars[real_request].panels:
panel.process_response(real_request, response)
if response.is_sequence:
toolbar_html = self.debug_toolbars[real_request].render_toolbar()
response.headers['content-type'] = 'text/html'
response.response = [toolbar_html]
response.content_length = len(toolbar_html)
return response
db = SQLAlchemy(session_options={})
api = APIController(prefix='/api/0')
mail = Mail()
pubsub = PubSub()
queue = Celery()
redis = Redis()
sentry = Sentry(logging=True, level=logging.ERROR)
def create_app(_read_config=True, gevent=False, **config):
app = flask.Flask(__name__,
static_folder=None,
template_folder=os.path.join(PROJECT_ROOT, 'templates'))
app.wsgi_app = ProxyFix(app.wsgi_app)
# app.wsgi_app = TracerMiddleware(app.wsgi_app, app)
# This key is insecure and you should override it on the server
app.config['SECRET_KEY'] = 't\xad\xe7\xff%\xd2.\xfe\x03\x02=\xec\xaf\\2+\xb8=\xf7\x8a\x9aLD\xb1'
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql:///changes'
app.config['SQLALCHEMY_POOL_SIZE'] = 60
app.config['SQLALCHEMY_MAX_OVERFLOW'] = 20
app.config['REDIS_URL'] = 'redis://localhost/0'
app.config['DEBUG'] = True
app.config['HTTP_PORT'] = 5000
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0
app.config['API_TRACEBACKS'] = True
app.config['CELERY_ACCEPT_CONTENT'] = ['changes_json']
app.config['CELERY_ACKS_LATE'] = True
app.config['CELERY_BROKER_URL'] = 'redis://localhost/0'
app.config['CELERY_DEFAULT_QUEUE'] = "default"
app.config['CELERY_DEFAULT_EXCHANGE'] = "default"
app.config['CELERY_DEFAULT_EXCHANGE_TYPE'] = "direct"
app.config['CELERY_DEFAULT_ROUTING_KEY'] = "default"
app.config['CELERY_DISABLE_RATE_LIMITS'] = True
app.config['CELERY_IGNORE_RESULT'] = True
app.config['CELERY_RESULT_BACKEND'] = None
app.config['CELERY_RESULT_SERIALIZER'] = 'changes_json'
app.config['CELERY_SEND_EVENTS'] = False
app.config['CELERY_TASK_RESULT_EXPIRES'] = 1
app.config['CELERY_TASK_SERIALIZER'] = 'changes_json'
app.config['CELERYD_PREFETCH_MULTIPLIER'] = 1
app.config['CELERYD_MAX_TASKS_PER_CHILD'] = 10000
app.config['CELERY_QUEUES'] = (
Queue('job.sync', routing_key='job.sync'),
Queue('job.create', routing_key='job.create'),
Queue('celery', routing_key='celery'),
Queue('default', routing_key='default'),
Queue('repo.sync', routing_key='repo.sync'),
)
app.config['CELERY_ROUTES'] = {
'create_job': {
'queue': 'job.create',
'routing_key': 'job.create',
},
'sync_job': {
'queue': 'job.sync',
'routing_key': 'job.sync',
},
'sync_repo': {
'queue': 'repo.sync',
'routing_key': 'repo.sync',
},
}
app.config['EVENT_LISTENERS'] = (
('changes.listeners.mail.job_finished_handler', 'job.finished'),
('changes.listeners.green_build.build_finished_handler', 'build.finished'),
('changes.listeners.hipchat.build_finished_handler', 'build.finished'),
('changes.listeners.build_revision.revision_created_handler', 'revision.created'),
)
# celerybeat must be running for our cleanup tasks to execute
# e.g. celery worker -B
app.config['CELERYBEAT_SCHEDULE'] = {
'cleanup-builds': {
'task': 'cleanup_builds',
'schedule': timedelta(minutes=1),
},
'check-repos': {
'task': 'check_repos',
'schedule': timedelta(minutes=5),
},
}
app.config['CELERY_TIMEZONE'] = 'UTC'
app.config['SENTRY_DSN'] = None
app.config['JENKINS_AUTH'] = None
app.config['JENKINS_URL'] = None
app.config['JENKINS_TOKEN'] = None
app.config['KOALITY_URL'] = None
app.config['KOALITY_API_KEY'] = None
app.config['GOOGLE_CLIENT_ID'] = None
app.config['GOOGLE_CLIENT_SECRET'] = None
app.config['GOOGLE_DOMAIN'] = None
app.config['REPO_ROOT'] = None
app.config['MAIL_DEFAULT_SENDER'] = 'changes@localhost'
app.config['BASE_URI'] = None
app.config.update(config)
if _read_config:
if os.environ.get('CHANGES_CONF'):
# CHANGES_CONF=/etc/changes.conf.py
app.config.from_envvar('CHANGES_CONF')
else:
# Look for ~/.changes/changes.conf.py
path = os.path.normpath(os.path.expanduser('~/.changes/changes.conf.py'))
app.config.from_pyfile(path, silent=True)
if not app.config['BASE_URI']:
raise ValueError('You must set ``BASE_URI`` in your configuration.')
parsed_url = urlparse(app.config['BASE_URI'])
app.config.setdefault('SERVER_NAME', parsed_url.netloc)
app.config.setdefault('PREFERRED_URL_SCHEME', parsed_url.scheme)
if app.debug:
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0
else:
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 30
if gevent and app.config['SENTRY_DSN']:
app.config['SENTRY_DSN'] = 'gevent+{0}'.format(app.config['SENTRY_DSN'])
# init sentry first
sentry.init_app(app)
@app.before_request
def capture_user(*args, **kwargs):
if 'uid' in session:
sentry.client.user_context({
'id': session['uid'],
'email': session['email'],
})
api.init_app(app)
db.init_app(app)
mail.init_app(app)
pubsub.init_app(app)
queue.init_app(app)
redis.init_app(app)
configure_debug_toolbar(app)
from raven.contrib.celery import register_signal, register_logger_signal
register_signal(sentry.client)
register_logger_signal(sentry.client)
# configure debug routes first
if app.debug:
configure_debug_routes(app)
configure_templates(app)
# TODO: these can be moved to wsgi app entrypoints
configure_api_routes(app)
configure_web_routes(app)
configure_event_listeners(app)
configure_jobs(app)
return app
def configure_debug_toolbar(app):
toolbar = ChangesDebugToolbarExtension(app)
return toolbar
def configure_templates(app):
from changes.utils.times import duration
app.jinja_env.filters['duration'] = duration
def configure_api_routes(app):
from changes.api.auth_index import AuthIndexAPIView
from changes.api.author_build_index import AuthorBuildIndexAPIView
from changes.api.build_comment_index import BuildCommentIndexAPIView
from changes.api.build_details import BuildDetailsAPIView
from changes.api.build_index import BuildIndexAPIView
from changes.api.build_mark_seen import BuildMarkSeenAPIView
from changes.api.build_cancel import BuildCancelAPIView
from changes.api.build_restart import BuildRestartAPIView
from changes.api.build_retry import BuildRetryAPIView
from changes.api.build_test_index import BuildTestIndexAPIView
from changes.api.change_details import ChangeDetailsAPIView
from changes.api.change_index import ChangeIndexAPIView
from changes.api.job_details import JobDetailsAPIView
from changes.api.job_log_details import JobLogDetailsAPIView
from changes.api.jobphase_index import JobPhaseIndexAPIView
from changes.api.node_details import NodeDetailsAPIView
from changes.api.node_index import NodeIndexAPIView
from changes.api.node_job_index import NodeJobIndexAPIView
from changes.api.patch_details import PatchDetailsAPIView
from changes.api.plan_details import PlanDetailsAPIView
from changes.api.plan_index import PlanIndexAPIView
from changes.api.project_build_index import ProjectBuildIndexAPIView
from changes.api.project_build_search import ProjectBuildSearchAPIView
from changes.api.project_commit_details import ProjectCommitDetailsAPIView
from changes.api.project_commit_index import ProjectCommitIndexAPIView
from changes.api.project_index import ProjectIndexAPIView
from changes.api.project_options_index import ProjectOptionsIndexAPIView
from changes.api.project_test_details import ProjectTestDetailsAPIView
from changes.api.project_test_group_index import ProjectTestGroupIndexAPIView
from changes.api.project_test_index import ProjectTestIndexAPIView
from changes.api.project_details import ProjectDetailsAPIView
from changes.api.project_source_details import ProjectSourceDetailsAPIView
from changes.api.project_source_build_index import ProjectSourceBuildIndexAPIView
from changes.api.stream_index import StreamIndexAPIView
from changes.api.task_details import TaskDetailsAPIView
from changes.api.testcase_details import TestCaseDetailsAPIView
api.add_resource(AuthIndexAPIView, '/auth/')
api.add_resource(BuildIndexAPIView, '/builds/')
api.add_resource(AuthorBuildIndexAPIView, '/authors/<author_id>/builds/')
api.add_resource(BuildCommentIndexAPIView, '/builds/<build_id>/comments/')
api.add_resource(BuildDetailsAPIView, '/builds/<build_id>/')
api.add_resource(BuildMarkSeenAPIView, '/builds/<build_id>/mark_seen/')
api.add_resource(BuildCancelAPIView, '/builds/<build_id>/cancel/')
api.add_resource(BuildRestartAPIView, '/builds/<build_id>/restart/')
api.add_resource(BuildRetryAPIView, '/builds/<build_id>/retry/')
api.add_resource(BuildTestIndexAPIView, '/builds/<build_id>/tests/')
api.add_resource(JobDetailsAPIView, '/jobs/<job_id>/')
api.add_resource(JobLogDetailsAPIView, '/jobs/<job_id>/logs/<source_id>/')
api.add_resource(JobPhaseIndexAPIView, '/jobs/<job_id>/phases/')
api.add_resource(ChangeIndexAPIView, '/changes/')
api.add_resource(ChangeDetailsAPIView, '/changes/<change_id>/')
api.add_resource(NodeDetailsAPIView, '/nodes/<node_id>/')
api.add_resource(NodeIndexAPIView, '/nodes/')
api.add_resource(NodeJobIndexAPIView, '/nodes/<node_id>/jobs/')
api.add_resource(PatchDetailsAPIView, '/patches/<patch_id>/')
api.add_resource(PlanIndexAPIView, '/plans/')
api.add_resource(PlanDetailsAPIView, '/plans/<plan_id>/')
api.add_resource(ProjectIndexAPIView, '/projects/')
api.add_resource(ProjectDetailsAPIView, '/projects/<project_id>/')
api.add_resource(ProjectBuildIndexAPIView, '/projects/<project_id>/builds/')
api.add_resource(ProjectBuildSearchAPIView, '/projects/<project_id>/builds/search/')
api.add_resource(ProjectCommitIndexAPIView, '/projects/<project_id>/commits/')
api.add_resource(ProjectCommitDetailsAPIView, '/projects/<project_id>/commits/<commit_id>/')
api.add_resource(ProjectOptionsIndexAPIView, '/projects/<project_id>/options/')
api.add_resource(ProjectTestIndexAPIView, '/projects/<project_id>/tests/')
api.add_resource(ProjectTestGroupIndexAPIView, '/projects/<project_id>/testgroups/')
api.add_resource(ProjectTestDetailsAPIView, '/projects/<project_id>/tests/<test_hash>/')
api.add_resource(ProjectSourceDetailsAPIView, '/projects/<project_id>/sources/<source_id>/')
api.add_resource(ProjectSourceBuildIndexAPIView, '/projects/<project_id>/sources/<source_id>/builds/')
api.add_resource(StreamIndexAPIView, '/stream/')
api.add_resource(TestCaseDetailsAPIView, '/tests/<test_id>/')
api.add_resource(TaskDetailsAPIView, '/tasks/<task_id>/')
def configure_web_routes(app):
from changes.web.auth import AuthorizedView, LoginView, LogoutView
from changes.web.index import IndexView
from changes.web.static import StaticView
if app.debug:
static_root = os.path.join(PROJECT_ROOT, 'static')
revision = '0'
else:
static_root = os.path.join(PROJECT_ROOT, 'static-built')
revision = changes.get_revision() or '0'
app.add_url_rule(
'/static/' + revision + '/<path:filename>',
view_func=StaticView.as_view('static', root=static_root))
app.add_url_rule(
'/partials/<path:filename>',
view_func=StaticView.as_view('partials', root=os.path.join(PROJECT_ROOT, 'partials')))
app.add_url_rule(
'/auth/login/', view_func=LoginView.as_view('login', authorized_url='authorized'))
app.add_url_rule(
'/auth/logout/', view_func=LogoutView.as_view('logout', complete_url='index'))
app.add_url_rule(
'/auth/complete/', view_func=AuthorizedView.as_view('authorized', authorized_url='authorized', complete_url='index'))
app.add_url_rule(
'/<path:path>', view_func=IndexView.as_view('index-path'))
app.add_url_rule(
'/', view_func=IndexView.as_view('index'))
def configure_debug_routes(app):
from changes.debug.reports.build import BuildReportMailView
from changes.debug.mail.job_result import JobResultMailView
app.add_url_rule(
'/debug/mail/report/build/', view_func=BuildReportMailView.as_view('debug-build-report'))
app.add_url_rule(
'/debug/mail/result/job/<job_id>/', view_func=JobResultMailView.as_view('debug-build-result'))
def configure_jobs(app):
from changes.jobs.check_repos import check_repos
from changes.jobs.cleanup_builds import cleanup_builds
from changes.jobs.create_job import create_job
from changes.jobs.notify_listeners import (
notify_build_finished, notify_job_finished, notify_revision_created
)
from changes.jobs.sync_artifact import sync_artifact
from changes.jobs.sync_build import sync_build
from changes.jobs.sync_job import sync_job
from changes.jobs.sync_job_step import sync_job_step
from changes.jobs.sync_repo import sync_repo
from changes.jobs.update_project_stats import (
update_project_stats, update_project_plan_stats)
queue.register('check_repos', check_repos)
queue.register('cleanup_builds', cleanup_builds)
queue.register('create_job', create_job)
queue.register('notify_build_finished', notify_build_finished)
queue.register('notify_job_finished', notify_job_finished)
queue.register('notify_revision_created', notify_revision_created)
queue.register('sync_artifact', sync_artifact)
queue.register('sync_build', sync_build)
queue.register('sync_job', sync_job)
queue.register('sync_job_step', sync_job_step)
queue.register('sync_repo', sync_repo)
queue.register('update_project_stats', update_project_stats)
queue.register('update_project_plan_stats', update_project_plan_stats)
@task_postrun.connect
def cleanup_session(*args, **kwargs):
"""
Emulate a request cycle for each task to ensure the session objects
get cleaned up as expected.
"""
db.session.commit()
db.session.remove()
def register_changes_json():
from kombu.serialization import register
from kombu.utils.encoding import bytes_t
from json import dumps, loads
from uuid import UUID
def _loads(obj):
if isinstance(obj, UUID):
obj = obj.hex
elif isinstance(obj, bytes_t):
obj = obj.decode()
elif isinstance(obj, buffer):
obj = bytes(obj).decode()
return loads(obj)
register('changes_json', dumps, _loads,
content_type='application/json',
content_encoding='utf-8')
register_changes_json()
def configure_event_listeners(app):
from changes.signals import register_listener
from changes.utils.imports import import_string
for func_path, signal_name in app.config['EVENT_LISTENERS']:
func = import_string(func_path)
register_listener(func, signal_name)
Enable debug toolbar 100% of the time
import changes
import logging
import flask
import os
import os.path
import warnings
from celery.signals import task_postrun
from datetime import timedelta
from flask import request, session
from flask.ext.sqlalchemy import SQLAlchemy
from flask_debugtoolbar import DebugToolbarExtension
from flask_mail import Mail
from kombu import Queue
from raven.contrib.flask import Sentry
from urlparse import urlparse
from werkzeug.contrib.fixers import ProxyFix
from changes.constants import PROJECT_ROOT
from changes.api.controller import APIController
from changes.ext.celery import Celery
from changes.ext.pubsub import PubSub
from changes.ext.redis import Redis
# because foo.in_([]) ever executing is a bad idea
from sqlalchemy.exc import SAWarning
warnings.simplefilter('error', SAWarning)
class ChangesDebugToolbarExtension(DebugToolbarExtension):
def _show_toolbar(self):
if '__trace__' in request.args:
return True
return super(ChangesDebugToolbarExtension, self)._show_toolbar()
def process_response(self, response):
real_request = request._get_current_object()
# If the http response code is 200 then we process to add the
# toolbar to the returned html response.
if '__trace__' in real_request.args:
for panel in self.debug_toolbars[real_request].panels:
panel.process_response(real_request, response)
if response.is_sequence:
toolbar_html = self.debug_toolbars[real_request].render_toolbar()
response.headers['content-type'] = 'text/html'
response.response = [toolbar_html]
response.content_length = len(toolbar_html)
return response
db = SQLAlchemy(session_options={})
api = APIController(prefix='/api/0')
mail = Mail()
pubsub = PubSub()
queue = Celery()
redis = Redis()
sentry = Sentry(logging=True, level=logging.ERROR)
def create_app(_read_config=True, gevent=False, **config):
app = flask.Flask(__name__,
static_folder=None,
template_folder=os.path.join(PROJECT_ROOT, 'templates'))
app.wsgi_app = ProxyFix(app.wsgi_app)
# app.wsgi_app = TracerMiddleware(app.wsgi_app, app)
# This key is insecure and you should override it on the server
app.config['SECRET_KEY'] = 't\xad\xe7\xff%\xd2.\xfe\x03\x02=\xec\xaf\\2+\xb8=\xf7\x8a\x9aLD\xb1'
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql:///changes'
app.config['SQLALCHEMY_POOL_SIZE'] = 60
app.config['SQLALCHEMY_MAX_OVERFLOW'] = 20
app.config['REDIS_URL'] = 'redis://localhost/0'
app.config['DEBUG'] = True
app.config['HTTP_PORT'] = 5000
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0
app.config['API_TRACEBACKS'] = True
app.config['CELERY_ACCEPT_CONTENT'] = ['changes_json']
app.config['CELERY_ACKS_LATE'] = True
app.config['CELERY_BROKER_URL'] = 'redis://localhost/0'
app.config['CELERY_DEFAULT_QUEUE'] = "default"
app.config['CELERY_DEFAULT_EXCHANGE'] = "default"
app.config['CELERY_DEFAULT_EXCHANGE_TYPE'] = "direct"
app.config['CELERY_DEFAULT_ROUTING_KEY'] = "default"
app.config['CELERY_DISABLE_RATE_LIMITS'] = True
app.config['CELERY_IGNORE_RESULT'] = True
app.config['CELERY_RESULT_BACKEND'] = None
app.config['CELERY_RESULT_SERIALIZER'] = 'changes_json'
app.config['CELERY_SEND_EVENTS'] = False
app.config['CELERY_TASK_RESULT_EXPIRES'] = 1
app.config['CELERY_TASK_SERIALIZER'] = 'changes_json'
app.config['CELERYD_PREFETCH_MULTIPLIER'] = 1
app.config['CELERYD_MAX_TASKS_PER_CHILD'] = 10000
app.config['CELERY_QUEUES'] = (
Queue('job.sync', routing_key='job.sync'),
Queue('job.create', routing_key='job.create'),
Queue('celery', routing_key='celery'),
Queue('default', routing_key='default'),
Queue('repo.sync', routing_key='repo.sync'),
)
app.config['CELERY_ROUTES'] = {
'create_job': {
'queue': 'job.create',
'routing_key': 'job.create',
},
'sync_job': {
'queue': 'job.sync',
'routing_key': 'job.sync',
},
'sync_repo': {
'queue': 'repo.sync',
'routing_key': 'repo.sync',
},
}
app.config['EVENT_LISTENERS'] = (
('changes.listeners.mail.job_finished_handler', 'job.finished'),
('changes.listeners.green_build.build_finished_handler', 'build.finished'),
('changes.listeners.hipchat.build_finished_handler', 'build.finished'),
('changes.listeners.build_revision.revision_created_handler', 'revision.created'),
)
app.config['DEBUG_TB_ENABLED'] = True
# celerybeat must be running for our cleanup tasks to execute
# e.g. celery worker -B
app.config['CELERYBEAT_SCHEDULE'] = {
'cleanup-builds': {
'task': 'cleanup_builds',
'schedule': timedelta(minutes=1),
},
'check-repos': {
'task': 'check_repos',
'schedule': timedelta(minutes=5),
},
}
app.config['CELERY_TIMEZONE'] = 'UTC'
app.config['SENTRY_DSN'] = None
app.config['JENKINS_AUTH'] = None
app.config['JENKINS_URL'] = None
app.config['JENKINS_TOKEN'] = None
app.config['KOALITY_URL'] = None
app.config['KOALITY_API_KEY'] = None
app.config['GOOGLE_CLIENT_ID'] = None
app.config['GOOGLE_CLIENT_SECRET'] = None
app.config['GOOGLE_DOMAIN'] = None
app.config['REPO_ROOT'] = None
app.config['MAIL_DEFAULT_SENDER'] = 'changes@localhost'
app.config['BASE_URI'] = None
app.config.update(config)
if _read_config:
if os.environ.get('CHANGES_CONF'):
# CHANGES_CONF=/etc/changes.conf.py
app.config.from_envvar('CHANGES_CONF')
else:
# Look for ~/.changes/changes.conf.py
path = os.path.normpath(os.path.expanduser('~/.changes/changes.conf.py'))
app.config.from_pyfile(path, silent=True)
if not app.config['BASE_URI']:
raise ValueError('You must set ``BASE_URI`` in your configuration.')
parsed_url = urlparse(app.config['BASE_URI'])
app.config.setdefault('SERVER_NAME', parsed_url.netloc)
app.config.setdefault('PREFERRED_URL_SCHEME', parsed_url.scheme)
if app.debug:
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0
else:
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 30
if gevent and app.config['SENTRY_DSN']:
app.config['SENTRY_DSN'] = 'gevent+{0}'.format(app.config['SENTRY_DSN'])
# init sentry first
sentry.init_app(app)
@app.before_request
def capture_user(*args, **kwargs):
if 'uid' in session:
sentry.client.user_context({
'id': session['uid'],
'email': session['email'],
})
api.init_app(app)
db.init_app(app)
mail.init_app(app)
pubsub.init_app(app)
queue.init_app(app)
redis.init_app(app)
configure_debug_toolbar(app)
from raven.contrib.celery import register_signal, register_logger_signal
register_signal(sentry.client)
register_logger_signal(sentry.client)
# configure debug routes first
if app.debug:
configure_debug_routes(app)
configure_templates(app)
# TODO: these can be moved to wsgi app entrypoints
configure_api_routes(app)
configure_web_routes(app)
configure_event_listeners(app)
configure_jobs(app)
return app
def configure_debug_toolbar(app):
toolbar = ChangesDebugToolbarExtension(app)
return toolbar
def configure_templates(app):
from changes.utils.times import duration
app.jinja_env.filters['duration'] = duration
def configure_api_routes(app):
from changes.api.auth_index import AuthIndexAPIView
from changes.api.author_build_index import AuthorBuildIndexAPIView
from changes.api.build_comment_index import BuildCommentIndexAPIView
from changes.api.build_details import BuildDetailsAPIView
from changes.api.build_index import BuildIndexAPIView
from changes.api.build_mark_seen import BuildMarkSeenAPIView
from changes.api.build_cancel import BuildCancelAPIView
from changes.api.build_restart import BuildRestartAPIView
from changes.api.build_retry import BuildRetryAPIView
from changes.api.build_test_index import BuildTestIndexAPIView
from changes.api.change_details import ChangeDetailsAPIView
from changes.api.change_index import ChangeIndexAPIView
from changes.api.job_details import JobDetailsAPIView
from changes.api.job_log_details import JobLogDetailsAPIView
from changes.api.jobphase_index import JobPhaseIndexAPIView
from changes.api.node_details import NodeDetailsAPIView
from changes.api.node_index import NodeIndexAPIView
from changes.api.node_job_index import NodeJobIndexAPIView
from changes.api.patch_details import PatchDetailsAPIView
from changes.api.plan_details import PlanDetailsAPIView
from changes.api.plan_index import PlanIndexAPIView
from changes.api.project_build_index import ProjectBuildIndexAPIView
from changes.api.project_build_search import ProjectBuildSearchAPIView
from changes.api.project_commit_details import ProjectCommitDetailsAPIView
from changes.api.project_commit_index import ProjectCommitIndexAPIView
from changes.api.project_index import ProjectIndexAPIView
from changes.api.project_options_index import ProjectOptionsIndexAPIView
from changes.api.project_test_details import ProjectTestDetailsAPIView
from changes.api.project_test_group_index import ProjectTestGroupIndexAPIView
from changes.api.project_test_index import ProjectTestIndexAPIView
from changes.api.project_details import ProjectDetailsAPIView
from changes.api.project_source_details import ProjectSourceDetailsAPIView
from changes.api.project_source_build_index import ProjectSourceBuildIndexAPIView
from changes.api.stream_index import StreamIndexAPIView
from changes.api.task_details import TaskDetailsAPIView
from changes.api.testcase_details import TestCaseDetailsAPIView
api.add_resource(AuthIndexAPIView, '/auth/')
api.add_resource(BuildIndexAPIView, '/builds/')
api.add_resource(AuthorBuildIndexAPIView, '/authors/<author_id>/builds/')
api.add_resource(BuildCommentIndexAPIView, '/builds/<build_id>/comments/')
api.add_resource(BuildDetailsAPIView, '/builds/<build_id>/')
api.add_resource(BuildMarkSeenAPIView, '/builds/<build_id>/mark_seen/')
api.add_resource(BuildCancelAPIView, '/builds/<build_id>/cancel/')
api.add_resource(BuildRestartAPIView, '/builds/<build_id>/restart/')
api.add_resource(BuildRetryAPIView, '/builds/<build_id>/retry/')
api.add_resource(BuildTestIndexAPIView, '/builds/<build_id>/tests/')
api.add_resource(JobDetailsAPIView, '/jobs/<job_id>/')
api.add_resource(JobLogDetailsAPIView, '/jobs/<job_id>/logs/<source_id>/')
api.add_resource(JobPhaseIndexAPIView, '/jobs/<job_id>/phases/')
api.add_resource(ChangeIndexAPIView, '/changes/')
api.add_resource(ChangeDetailsAPIView, '/changes/<change_id>/')
api.add_resource(NodeDetailsAPIView, '/nodes/<node_id>/')
api.add_resource(NodeIndexAPIView, '/nodes/')
api.add_resource(NodeJobIndexAPIView, '/nodes/<node_id>/jobs/')
api.add_resource(PatchDetailsAPIView, '/patches/<patch_id>/')
api.add_resource(PlanIndexAPIView, '/plans/')
api.add_resource(PlanDetailsAPIView, '/plans/<plan_id>/')
api.add_resource(ProjectIndexAPIView, '/projects/')
api.add_resource(ProjectDetailsAPIView, '/projects/<project_id>/')
api.add_resource(ProjectBuildIndexAPIView, '/projects/<project_id>/builds/')
api.add_resource(ProjectBuildSearchAPIView, '/projects/<project_id>/builds/search/')
api.add_resource(ProjectCommitIndexAPIView, '/projects/<project_id>/commits/')
api.add_resource(ProjectCommitDetailsAPIView, '/projects/<project_id>/commits/<commit_id>/')
api.add_resource(ProjectOptionsIndexAPIView, '/projects/<project_id>/options/')
api.add_resource(ProjectTestIndexAPIView, '/projects/<project_id>/tests/')
api.add_resource(ProjectTestGroupIndexAPIView, '/projects/<project_id>/testgroups/')
api.add_resource(ProjectTestDetailsAPIView, '/projects/<project_id>/tests/<test_hash>/')
api.add_resource(ProjectSourceDetailsAPIView, '/projects/<project_id>/sources/<source_id>/')
api.add_resource(ProjectSourceBuildIndexAPIView, '/projects/<project_id>/sources/<source_id>/builds/')
api.add_resource(StreamIndexAPIView, '/stream/')
api.add_resource(TestCaseDetailsAPIView, '/tests/<test_id>/')
api.add_resource(TaskDetailsAPIView, '/tasks/<task_id>/')
def configure_web_routes(app):
from changes.web.auth import AuthorizedView, LoginView, LogoutView
from changes.web.index import IndexView
from changes.web.static import StaticView
if app.debug:
static_root = os.path.join(PROJECT_ROOT, 'static')
revision = '0'
else:
static_root = os.path.join(PROJECT_ROOT, 'static-built')
revision = changes.get_revision() or '0'
app.add_url_rule(
'/static/' + revision + '/<path:filename>',
view_func=StaticView.as_view('static', root=static_root))
app.add_url_rule(
'/partials/<path:filename>',
view_func=StaticView.as_view('partials', root=os.path.join(PROJECT_ROOT, 'partials')))
app.add_url_rule(
'/auth/login/', view_func=LoginView.as_view('login', authorized_url='authorized'))
app.add_url_rule(
'/auth/logout/', view_func=LogoutView.as_view('logout', complete_url='index'))
app.add_url_rule(
'/auth/complete/', view_func=AuthorizedView.as_view('authorized', authorized_url='authorized', complete_url='index'))
app.add_url_rule(
'/<path:path>', view_func=IndexView.as_view('index-path'))
app.add_url_rule(
'/', view_func=IndexView.as_view('index'))
def configure_debug_routes(app):
from changes.debug.reports.build import BuildReportMailView
from changes.debug.mail.job_result import JobResultMailView
app.add_url_rule(
'/debug/mail/report/build/', view_func=BuildReportMailView.as_view('debug-build-report'))
app.add_url_rule(
'/debug/mail/result/job/<job_id>/', view_func=JobResultMailView.as_view('debug-build-result'))
def configure_jobs(app):
from changes.jobs.check_repos import check_repos
from changes.jobs.cleanup_builds import cleanup_builds
from changes.jobs.create_job import create_job
from changes.jobs.notify_listeners import (
notify_build_finished, notify_job_finished, notify_revision_created
)
from changes.jobs.sync_artifact import sync_artifact
from changes.jobs.sync_build import sync_build
from changes.jobs.sync_job import sync_job
from changes.jobs.sync_job_step import sync_job_step
from changes.jobs.sync_repo import sync_repo
from changes.jobs.update_project_stats import (
update_project_stats, update_project_plan_stats)
queue.register('check_repos', check_repos)
queue.register('cleanup_builds', cleanup_builds)
queue.register('create_job', create_job)
queue.register('notify_build_finished', notify_build_finished)
queue.register('notify_job_finished', notify_job_finished)
queue.register('notify_revision_created', notify_revision_created)
queue.register('sync_artifact', sync_artifact)
queue.register('sync_build', sync_build)
queue.register('sync_job', sync_job)
queue.register('sync_job_step', sync_job_step)
queue.register('sync_repo', sync_repo)
queue.register('update_project_stats', update_project_stats)
queue.register('update_project_plan_stats', update_project_plan_stats)
@task_postrun.connect
def cleanup_session(*args, **kwargs):
"""
Emulate a request cycle for each task to ensure the session objects
get cleaned up as expected.
"""
db.session.commit()
db.session.remove()
def register_changes_json():
from kombu.serialization import register
from kombu.utils.encoding import bytes_t
from json import dumps, loads
from uuid import UUID
def _loads(obj):
if isinstance(obj, UUID):
obj = obj.hex
elif isinstance(obj, bytes_t):
obj = obj.decode()
elif isinstance(obj, buffer):
obj = bytes(obj).decode()
return loads(obj)
register('changes_json', dumps, _loads,
content_type='application/json',
content_encoding='utf-8')
register_changes_json()
def configure_event_listeners(app):
from changes.signals import register_listener
from changes.utils.imports import import_string
for func_path, signal_name in app.config['EVENT_LISTENERS']:
func = import_string(func_path)
register_listener(func, signal_name)
|
"""Administrative commands."""
from code import InteractiveConsole
from contextlib import redirect_stdout, redirect_stderr
from inspect import getdoc, isclass, _empty
from sqlalchemy import Boolean, inspect
from .commands import command, LocationTypes
from ..db import (
Player, BuildingType, UnitType, AttackType, FeatureType, Base,
BuildingRecruit
)
from ..menus import Menu, YesNoMenu
from ..options import options
from ..util import english_list
consoles = {}
class Console(InteractiveConsole):
"""A console with updated push and write methods."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name, cls in Base._decl_class_registry.items():
if isclass(cls):
self.locals[name] = cls
self.locals['options'] = options
def write(self, string):
"""Send the provided string to self.player.message."""
self.player.message(string)
def push(self, con, player, location, entry_point, code):
"""Update self.locals, then run the code."""
self.player = player
kwargs = con.get_default_kwargs(player, location, entry_point)
self.locals.update(**kwargs, console=self)
res = super().push(code)
for name in kwargs:
del self.locals[name]
self.player = None
return res
@command(admin=True)
def disconnect(con, command_name, player, args, id, response=None):
"""Disconnect another player."""
p = Player.get(id)
if p is None:
con.message('Invalid ID.')
elif response is None:
m = YesNoMenu(
f'Are you sure you want to disconnect {p}?', command_name,
args=args
)
m.send(con)
elif response:
if not p.connected:
con.message('They are already disconnected.')
else:
p.message(f'You have been booted off the server by {player}.')
p.disconnect()
else:
con.message('Cancelled.')
@command(admin=True)
def delete_player(con, command_name, player, args, id, response=None):
"""Delete another player."""
p = Player.get(id)
if p is None:
con.message('Invalid ID.')
elif response is None:
m = YesNoMenu(
f'Are you sure you want to delete {p}?', command_name, args=args
)
m.send(con)
elif response:
p.message(f'You have been deleted by {player}.')
p.disconnect()
p.delete()
player.message('Done.')
else:
player.message('Cancelled.')
@command(admin=True)
def make_admin(player, id):
"""Make another player an administrator."""
p = Player.get(id)
if p is None:
player.message('Invalid ID.')
else:
p.admin = True
player.message(f'{p} is now an admin.')
@command(admin=True)
def revoke_admin(player, id):
"""Revoke admin privileges for another player."""
p = Player.get(id)
if p is None:
player.message('Invalid ID.')
else:
p.admin = False
player.message(f'{p} is no longer an admin.')
@command(location_type=LocationTypes.any, admin=True, hotkey='backspace')
def python(command_name, con, player, location, entry_point, text=None):
"""Run some code."""
if text is None:
con.text('Code', command_name, value=player.code)
else:
player.code = text
if player.id not in consoles:
consoles[player.id] = Console()
c = consoles[player.id]
with redirect_stdout(c), redirect_stderr(c):
res = c.push(con, player, location, entry_point, text)
if res:
consoles[player.id] = c
@command(location_type=LocationTypes.any, hotkey='m', admin=True)
def make_menu(con):
"""Add / remove types."""
m = Menu('Add / Remove Types')
for cls in (UnitType, BuildingType, AttackType, FeatureType):
m.add_label(cls.__tablename__.replace('_', ' ').title())
for action in ('add', 'edit', 'remove'):
m.add_item(
action.title(), f'{action}_type',
args=dict(class_name=cls.__name__)
)
m.send(con)
@command(admin=True)
def add_type(con, class_name):
"""Add a new type."""
cls = Base._decl_class_registry[class_name]
obj = cls(name='Untitled')
obj.save()
con.call_command('edit_type', class_name=class_name, id=obj.id)
@command(admin=True)
def remove_type(con, command_name, class_name, id=None, response=None):
"""Remove a type."""
cls = Base._decl_class_registry[class_name]
if id is None:
m = Menu('Select Object')
for obj in cls.all():
m.add_item(
obj.get_name(), command_name,
args=dict(class_name=class_name, id=obj.id)
)
m.send(con)
else:
kwargs = dict(class_name=class_name, id=id)
if response is None:
m = YesNoMenu('Are you sure?', command_name, args=kwargs)
m.send(con)
elif response:
obj = cls.get(id)
if obj is None:
con.message('Invalid type.')
else:
obj.delete()
con.message('Done.')
else:
con.message('Cancelled.')
@command(admin=True)
def edit_type(con, command_name, class_name, id=None, column=None, text=None):
"""Edit a type."""
cls = Base._decl_class_registry[class_name]
if id is None:
m = Menu('Objects')
for obj in cls.all():
m.add_item(
obj.get_name(), command_name,
args=dict(class_name=class_name, id=obj.id)
)
m.send(con)
else:
i = inspect(cls)
obj = cls.get(id)
if column is not None:
kwargs = dict(class_name=class_name, id=id, column=column)
c = i.c[column]
if text is None:
keys = list(c.foreign_keys)
if len(keys) == 1:
key = keys[0]
remote_class = Base.get_class_from_table(
key.column.table
)
m = Menu('Select Object')
if c.nullable:
null_kwargs = kwargs.copy()
null_kwargs['text'] = ''
m.add_item('NULL', command_name, args=null_kwargs)
for thing in remote_class.all():
remote_kwargs = kwargs.copy()
remote_kwargs['text'] = str(thing.id)
m.add_item(
thing.get_name(), command_name, args=remote_kwargs
)
return m.send(con)
value = getattr(obj, column)
if value is None:
value = ''
else:
value = str(value)
return con.text(
'Enter value', command_name, value=value, args=kwargs
)
else:
if text == '': # Same as None.
if c.nullable:
value = None
else:
con.message('That column is not nullble.')
value = _empty
else:
try:
value = c.type.python_type(text)
except ValueError:
con.message('Invalid value.')
value = _empty
if value is not _empty:
setattr(obj, column, value)
obj.save()
m = Menu(obj.get_name())
m.add_label(getdoc(cls))
kwargs = dict(class_name=class_name, id=obj.id)
for c in sorted(i.c, key=lambda thing: thing.name):
if c.primary_key:
continue
name = c.name
column_kwargs = kwargs.copy()
column_kwargs['column'] = name
title = name.replace('_', ' ').title()
value = getattr(obj, name)
keys = list(c.foreign_keys)
new_value = None
if value is not None:
if isinstance(c.type, Boolean):
new_value = not value
if keys:
key = keys[0]
remote_class = Base.get_class_from_table(
key.column.table
)
value = remote_class.get(value).get_name()
else:
value = repr(value)
column_kwargs['text'] = new_value
m.add_item(
f'{title}: {value} [{c.type}]', command_name,
args=column_kwargs
)
if cls is BuildingType:
kwargs = dict(building_type_id=obj.id)
el = english_list(obj.builders, empty='None')
m.add_item(
f'Unit types that can build this building: {el}',
'edit_builders', args=kwargs
)
el = english_list(obj.recruits, empty='None')
m.add_item(
f'Unit types this building can recruit: {el}',
'edit_recruits', args=kwargs
)
elif cls is UnitType:
m.add_label('Buildings which can be built by units of this type')
for bt in obj.can_build:
m.add_item(
bt.get_name(), 'edit_type', args=dict(
class_name='BuildingType', id=bt.id
)
)
m.add_label('Buildings which can recruit units of this type')
for bm in BuildingRecruit.all(unit_type_id=obj.id):
bt = BuildingType.get(bm.building_type_id)
m.add_item(
bt.get_name(), 'edit_recruits', args=dict(
building_type_id=bt.id, building_unit_id=bm.id
)
)
m.add_item('Done', command_name, args=dict(class_name=class_name))
m.send(con)
@command(admin=True)
def edit_builders(con, command_name, building_type_id, unit_type_id=None):
"""Add and remove unit types that can build buildings."""
bt = BuildingType.get(building_type_id)
if unit_type_id is None:
m = Menu('Unit Types')
for mt in UnitType.all():
if bt in mt.can_build:
checked = '*'
else:
checked = ' '
m.add_item(
f'{mt.get_name()} ({checked})', command_name, args=dict(
building_type_id=bt.id, unit_type_id=mt.id
)
)
m.add_item(
'Done', 'edit_type', args=dict(class_name='BuildingType', id=bt.id)
)
m.send(con)
else:
mt = UnitType.get(unit_type_id)
if mt in bt.builders:
bt.builders.remove(mt)
action = 'no longer'
else:
bt.builders.append(mt)
action = 'now'
con.message(f'{mt.get_name()} can {action} build {bt.get_name()}.')
con.call_command(command_name, building_type_id=bt.id)
@command(admin=True)
def delete_object(con, command_name, class_name, id=None, response=None):
"""Delete the given object."""
cls = Base._decl_class_registry[class_name]
if id is None:
m = Menu('Objects')
for obj in cls.all():
m.add_item(
str(obj), command_name, args=dict(
class_name=class_name, id=obj.id
)
)
m.send(con)
elif response is None:
m = YesNoMenu(
'Are you sure?', command_name, args=dict(
class_name=class_name, id=id
)
)
m.send(con)
elif response:
obj = cls.get(id)
obj.delete()
con.message('Done.')
else:
con.message('Cancelled.')
@command(admin=True)
def add_recruit(con, command_name, building_type_id, unit_type_id=None):
"""Add a recruit to the given building type."""
bt = BuildingType.get(building_type_id)
if unit_type_id is None:
m = Menu('Unit Types')
for mt in UnitType.all():
m.add_item(
mt.get_name(), command_name, args=dict(
building_type_id=bt.id, unit_type_id=mt.id
)
)
m.send(con)
else:
mt = UnitType.get(unit_type_id)
bt.add_recruit(mt).save()
con.call_command('edit_recruits', building_type_id=bt.id)
@command(admin=True)
def edit_recruits(
con, command_name, building_type_id, building_unit_id=None,
resource_name=None, text=None
):
"""Edit recruits for the given building type."""
columns = inspect(BuildingRecruit).c
resource_names = BuildingRecruit.resource_names()
resource_names.append('pop_time')
bt = BuildingType.get(building_type_id)
if building_unit_id is None:
m = Menu('Recruits')
m.add_item(
'Add Recruit', 'add_recruit', args=dict(building_type_id=bt.id)
)
for bm in BuildingRecruit.all(building_type_id=bt.id):
mt = UnitType.get(bm.unit_type_id)
m.add_item(
f'{mt.get_name()}: {bm.resources_string()}', command_name,
args=dict(building_type_id=bt.id, building_unit_id=bm.id)
)
m.add_item(
'Done', 'edit_type', args=dict(class_name='BuildingType', id=bt.id)
)
m.send(con)
else:
bm = BuildingRecruit.get(building_unit_id)
kwargs = dict(building_type_id=bt.id, building_unit_id=bm.id)
if resource_name is not None:
if text is None:
kwargs['resource_name'] = resource_name
value = getattr(bm, resource_name)
if value is None:
value = ''
return con.text(
'Enter value', command_name, value=value, args=kwargs
)
else:
if not text:
if columns[resource_name].nullable:
value = None
else:
con.message('Value cannot be null.')
value = _empty
else:
try:
value = int(text)
except ValueError:
con.message('Invalid value.')
value = _empty
if value is not _empty:
if resource_name in resource_names:
setattr(bm, resource_name, value)
bm.save()
else:
con.message('Invalid resource name.')
kwargs = dict(building_type_id=bt.id, building_unit_id=bm.id)
m = Menu('Recruit Options')
for name in resource_names:
resource_kwargs = kwargs.copy()
resource_kwargs['resource_name'] = name
value = getattr(bm, name)
m.add_item(
f'{name.title()}: {value}', command_name, args=resource_kwargs
)
m.add_item(
'Delete', 'delete_object', args=dict(
class_name='BuildingRecruit', id=bm.id
)
)
m.add_item(
'Done', 'edit_type', args=dict(class_name='BuildingType', id=bt.id)
)
m.send(con)
Don't require maps for a load of admin commands.
"""Administrative commands."""
from code import InteractiveConsole
from contextlib import redirect_stdout, redirect_stderr
from inspect import getdoc, isclass, _empty
from sqlalchemy import Boolean, inspect
from .commands import command, LocationTypes
from ..db import (
Player, BuildingType, UnitType, AttackType, FeatureType, Base,
BuildingRecruit
)
from ..menus import Menu, YesNoMenu
from ..options import options
from ..util import english_list
consoles = {}
class Console(InteractiveConsole):
"""A console with updated push and write methods."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name, cls in Base._decl_class_registry.items():
if isclass(cls):
self.locals[name] = cls
self.locals['options'] = options
def write(self, string):
"""Send the provided string to self.player.message."""
self.player.message(string)
def push(self, con, player, location, entry_point, code):
"""Update self.locals, then run the code."""
self.player = player
kwargs = con.get_default_kwargs(player, location, entry_point)
self.locals.update(**kwargs, console=self)
res = super().push(code)
for name in kwargs:
del self.locals[name]
self.player = None
return res
@command(location_type=LocationTypes.any, admin=True)
def disconnect(con, command_name, player, args, id, response=None):
"""Disconnect another player."""
p = Player.get(id)
if p is None:
con.message('Invalid ID.')
elif response is None:
m = YesNoMenu(
f'Are you sure you want to disconnect {p}?', command_name,
args=args
)
m.send(con)
elif response:
if not p.connected:
con.message('They are already disconnected.')
else:
p.message(f'You have been booted off the server by {player}.')
p.disconnect()
else:
con.message('Cancelled.')
@command(location_type=LocationTypes.any, admin=True)
def delete_player(con, command_name, player, args, id, response=None):
"""Delete another player."""
p = Player.get(id)
if p is None:
con.message('Invalid ID.')
elif response is None:
m = YesNoMenu(
f'Are you sure you want to delete {p}?', command_name, args=args
)
m.send(con)
elif response:
p.message(f'You have been deleted by {player}.')
p.disconnect()
p.delete()
player.message('Done.')
else:
player.message('Cancelled.')
@command(location_type=LocationTypes.any, admin=True)
def make_admin(player, id):
"""Make another player an administrator."""
p = Player.get(id)
if p is None:
player.message('Invalid ID.')
else:
p.admin = True
player.message(f'{p} is now an admin.')
@command(location_type=LocationTypes.any, admin=True)
def revoke_admin(player, id):
"""Revoke admin privileges for another player."""
p = Player.get(id)
if p is None:
player.message('Invalid ID.')
else:
p.admin = False
player.message(f'{p} is no longer an admin.')
@command(location_type=LocationTypes.any, admin=True, hotkey='backspace')
def python(command_name, con, player, location, entry_point, text=None):
"""Run some code."""
if text is None:
con.text('Code', command_name, value=player.code)
else:
player.code = text
if player.id not in consoles:
consoles[player.id] = Console()
c = consoles[player.id]
with redirect_stdout(c), redirect_stderr(c):
res = c.push(con, player, location, entry_point, text)
if res:
consoles[player.id] = c
@command(location_type=LocationTypes.any, hotkey='m', admin=True)
def make_menu(con):
"""Add / remove types."""
m = Menu('Add / Remove Types')
for cls in (UnitType, BuildingType, AttackType, FeatureType):
m.add_label(cls.__tablename__.replace('_', ' ').title())
for action in ('add', 'edit', 'remove'):
m.add_item(
action.title(), f'{action}_type',
args=dict(class_name=cls.__name__)
)
m.send(con)
@command(location_type=LocationTypes.any, admin=True)
def add_type(con, class_name):
"""Add a new type."""
cls = Base._decl_class_registry[class_name]
obj = cls(name='Untitled')
obj.save()
con.call_command('edit_type', class_name=class_name, id=obj.id)
@command(location_type=LocationTypes.any, admin=True)
def remove_type(con, command_name, class_name, id=None, response=None):
"""Remove a type."""
cls = Base._decl_class_registry[class_name]
if id is None:
m = Menu('Select Object')
for obj in cls.all():
m.add_item(
obj.get_name(), command_name,
args=dict(class_name=class_name, id=obj.id)
)
m.send(con)
else:
kwargs = dict(class_name=class_name, id=id)
if response is None:
m = YesNoMenu('Are you sure?', command_name, args=kwargs)
m.send(con)
elif response:
obj = cls.get(id)
if obj is None:
con.message('Invalid type.')
else:
obj.delete()
con.message('Done.')
else:
con.message('Cancelled.')
@command(location_type=LocationTypes.any, admin=True)
def edit_type(con, command_name, class_name, id=None, column=None, text=None):
"""Edit a type."""
cls = Base._decl_class_registry[class_name]
if id is None:
m = Menu('Objects')
for obj in cls.all():
m.add_item(
obj.get_name(), command_name,
args=dict(class_name=class_name, id=obj.id)
)
m.send(con)
else:
i = inspect(cls)
obj = cls.get(id)
if column is not None:
kwargs = dict(class_name=class_name, id=id, column=column)
c = i.c[column]
if text is None:
keys = list(c.foreign_keys)
if len(keys) == 1:
key = keys[0]
remote_class = Base.get_class_from_table(
key.column.table
)
m = Menu('Select Object')
if c.nullable:
null_kwargs = kwargs.copy()
null_kwargs['text'] = ''
m.add_item('NULL', command_name, args=null_kwargs)
for thing in remote_class.all():
remote_kwargs = kwargs.copy()
remote_kwargs['text'] = str(thing.id)
m.add_item(
thing.get_name(), command_name, args=remote_kwargs
)
return m.send(con)
value = getattr(obj, column)
if value is None:
value = ''
else:
value = str(value)
return con.text(
'Enter value', command_name, value=value, args=kwargs
)
else:
if text == '': # Same as None.
if c.nullable:
value = None
else:
con.message('That column is not nullble.')
value = _empty
else:
try:
value = c.type.python_type(text)
except ValueError:
con.message('Invalid value.')
value = _empty
if value is not _empty:
setattr(obj, column, value)
obj.save()
m = Menu(obj.get_name())
m.add_label(getdoc(cls))
kwargs = dict(class_name=class_name, id=obj.id)
for c in sorted(i.c, key=lambda thing: thing.name):
if c.primary_key:
continue
name = c.name
column_kwargs = kwargs.copy()
column_kwargs['column'] = name
title = name.replace('_', ' ').title()
value = getattr(obj, name)
keys = list(c.foreign_keys)
new_value = None
if value is not None:
if isinstance(c.type, Boolean):
new_value = not value
if keys:
key = keys[0]
remote_class = Base.get_class_from_table(
key.column.table
)
value = remote_class.get(value).get_name()
else:
value = repr(value)
column_kwargs['text'] = new_value
m.add_item(
f'{title}: {value} [{c.type}]', command_name,
args=column_kwargs
)
if cls is BuildingType:
kwargs = dict(building_type_id=obj.id)
el = english_list(obj.builders, empty='None')
m.add_item(
f'Unit types that can build this building: {el}',
'edit_builders', args=kwargs
)
el = english_list(obj.recruits, empty='None')
m.add_item(
f'Unit types this building can recruit: {el}',
'edit_recruits', args=kwargs
)
elif cls is UnitType:
m.add_label('Buildings which can be built by units of this type')
for bt in obj.can_build:
m.add_item(
bt.get_name(), 'edit_type', args=dict(
class_name='BuildingType', id=bt.id
)
)
m.add_label('Buildings which can recruit units of this type')
for bm in BuildingRecruit.all(unit_type_id=obj.id):
bt = BuildingType.get(bm.building_type_id)
m.add_item(
bt.get_name(), 'edit_recruits', args=dict(
building_type_id=bt.id, building_unit_id=bm.id
)
)
m.add_item('Done', command_name, args=dict(class_name=class_name))
m.send(con)
@command(location_type=LocationTypes.any, admin=True)
def edit_builders(con, command_name, building_type_id, unit_type_id=None):
"""Add and remove unit types that can build buildings."""
bt = BuildingType.get(building_type_id)
if unit_type_id is None:
m = Menu('Unit Types')
for mt in UnitType.all():
if bt in mt.can_build:
checked = '*'
else:
checked = ' '
m.add_item(
f'{mt.get_name()} ({checked})', command_name, args=dict(
building_type_id=bt.id, unit_type_id=mt.id
)
)
m.add_item(
'Done', 'edit_type', args=dict(class_name='BuildingType', id=bt.id)
)
m.send(con)
else:
mt = UnitType.get(unit_type_id)
if mt in bt.builders:
bt.builders.remove(mt)
action = 'no longer'
else:
bt.builders.append(mt)
action = 'now'
con.message(f'{mt.get_name()} can {action} build {bt.get_name()}.')
con.call_command(command_name, building_type_id=bt.id)
@command(location_type=LocationTypes.any, admin=True)
def delete_object(con, command_name, class_name, id=None, response=None):
"""Delete the given object."""
cls = Base._decl_class_registry[class_name]
if id is None:
m = Menu('Objects')
for obj in cls.all():
m.add_item(
str(obj), command_name, args=dict(
class_name=class_name, id=obj.id
)
)
m.send(con)
elif response is None:
m = YesNoMenu(
'Are you sure?', command_name, args=dict(
class_name=class_name, id=id
)
)
m.send(con)
elif response:
obj = cls.get(id)
obj.delete()
con.message('Done.')
else:
con.message('Cancelled.')
@command(location_type=LocationTypes.any, admin=True)
def add_recruit(con, command_name, building_type_id, unit_type_id=None):
"""Add a recruit to the given building type."""
bt = BuildingType.get(building_type_id)
if unit_type_id is None:
m = Menu('Unit Types')
for mt in UnitType.all():
m.add_item(
mt.get_name(), command_name, args=dict(
building_type_id=bt.id, unit_type_id=mt.id
)
)
m.send(con)
else:
mt = UnitType.get(unit_type_id)
bt.add_recruit(mt).save()
con.call_command('edit_recruits', building_type_id=bt.id)
@command(location_type=LocationTypes.any, admin=True)
def edit_recruits(
con, command_name, building_type_id, building_unit_id=None,
resource_name=None, text=None
):
"""Edit recruits for the given building type."""
columns = inspect(BuildingRecruit).c
resource_names = BuildingRecruit.resource_names()
resource_names.append('pop_time')
bt = BuildingType.get(building_type_id)
if building_unit_id is None:
m = Menu('Recruits')
m.add_item(
'Add Recruit', 'add_recruit', args=dict(building_type_id=bt.id)
)
for bm in BuildingRecruit.all(building_type_id=bt.id):
mt = UnitType.get(bm.unit_type_id)
m.add_item(
f'{mt.get_name()}: {bm.resources_string()}', command_name,
args=dict(building_type_id=bt.id, building_unit_id=bm.id)
)
m.add_item(
'Done', 'edit_type', args=dict(class_name='BuildingType', id=bt.id)
)
m.send(con)
else:
bm = BuildingRecruit.get(building_unit_id)
kwargs = dict(building_type_id=bt.id, building_unit_id=bm.id)
if resource_name is not None:
if text is None:
kwargs['resource_name'] = resource_name
value = getattr(bm, resource_name)
if value is None:
value = ''
return con.text(
'Enter value', command_name, value=value, args=kwargs
)
else:
if not text:
if columns[resource_name].nullable:
value = None
else:
con.message('Value cannot be null.')
value = _empty
else:
try:
value = int(text)
except ValueError:
con.message('Invalid value.')
value = _empty
if value is not _empty:
if resource_name in resource_names:
setattr(bm, resource_name, value)
bm.save()
else:
con.message('Invalid resource name.')
kwargs = dict(building_type_id=bt.id, building_unit_id=bm.id)
m = Menu('Recruit Options')
for name in resource_names:
resource_kwargs = kwargs.copy()
resource_kwargs['resource_name'] = name
value = getattr(bm, name)
m.add_item(
f'{name.title()}: {value}', command_name, args=resource_kwargs
)
m.add_item(
'Delete', 'delete_object', args=dict(
class_name='BuildingRecruit', id=bm.id
)
)
m.add_item(
'Done', 'edit_type', args=dict(class_name='BuildingType', id=bt.id)
)
m.send(con)
|
# -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Accesses the google.cloud.tasks.v2 CloudTasks API."""
import functools
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.client_options
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.gapic_v1.routing_header
import google.api_core.grpc_helpers
import google.api_core.page_iterator
import google.api_core.path_template
import grpc
from google.cloud.tasks_v2.gapic import cloud_tasks_client_config
from google.cloud.tasks_v2.gapic import enums
from google.cloud.tasks_v2.gapic.transports import cloud_tasks_grpc_transport
from google.cloud.tasks_v2.proto import cloudtasks_pb2
from google.cloud.tasks_v2.proto import cloudtasks_pb2_grpc
from google.cloud.tasks_v2.proto import queue_pb2
from google.cloud.tasks_v2.proto import task_pb2
from google.iam.v1 import iam_policy_pb2
from google.iam.v1 import options_pb2
from google.iam.v1 import policy_pb2
from google.protobuf import empty_pb2
from google.protobuf import field_mask_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-tasks").version
class CloudTasksClient(object):
"""
Cloud Tasks allows developers to manage the execution of background
work in their applications.
"""
SERVICE_ADDRESS = "cloudtasks.googleapis.com:443"
"""The default address of the service."""
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = "google.cloud.tasks.v2.CloudTasks"
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
CloudTasksClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@classmethod
def location_path(cls, project, location):
"""Return a fully-qualified location string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}",
project=project,
location=location,
)
@classmethod
def project_path(cls, project):
"""Return a fully-qualified project string."""
return google.api_core.path_template.expand(
"projects/{project}", project=project
)
@classmethod
def queue_path(cls, project, location, queue):
"""Return a fully-qualified queue string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/queues/{queue}",
project=project,
location=location,
queue=queue,
)
@classmethod
def task_path(cls, project, location, queue, task):
"""Return a fully-qualified task string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/queues/{queue}/tasks/{task}",
project=project,
location=location,
queue=queue,
task=task,
)
def __init__(
self,
transport=None,
channel=None,
credentials=None,
client_config=None,
client_info=None,
client_options=None,
):
"""Constructor.
Args:
transport (Union[~.CloudTasksGrpcTransport,
Callable[[~.Credentials, type], ~.CloudTasksGrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
client_options (Union[dict, google.api_core.client_options.ClientOptions]):
Client options used to set user options on the client. API Endpoint
should be set through client_options.
"""
# Raise deprecation warnings for things we want to go away.
if client_config is not None:
warnings.warn(
"The `client_config` argument is deprecated.",
PendingDeprecationWarning,
stacklevel=2,
)
else:
client_config = cloud_tasks_client_config.config
if channel:
warnings.warn(
"The `channel` argument is deprecated; use " "`transport` instead.",
PendingDeprecationWarning,
stacklevel=2,
)
api_endpoint = self.SERVICE_ADDRESS
if client_options:
if type(client_options) == dict:
client_options = google.api_core.client_options.from_dict(
client_options
)
if client_options.api_endpoint:
api_endpoint = client_options.api_endpoint
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=cloud_tasks_grpc_transport.CloudTasksGrpcTransport,
address=api_endpoint,
)
else:
if credentials:
raise ValueError(
"Received both a transport instance and "
"credentials; these are mutually exclusive."
)
self.transport = transport
else:
self.transport = cloud_tasks_grpc_transport.CloudTasksGrpcTransport(
address=api_endpoint, channel=channel, credentials=credentials
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
gapic_version=_GAPIC_LIBRARY_VERSION
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config["interfaces"][self._INTERFACE_NAME]
)
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
# Service calls
def list_queues(
self,
parent,
filter_=None,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists queues.
Queues are returned in lexicographical order.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # Iterate over all results
>>> for element in client.list_queues(parent):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_queues(parent).pages:
... for element in page:
... # process element
... pass
Args:
parent (str): Required. The location name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID``
filter_ (str): ``filter`` can be used to specify a subset of queues. Any ``Queue``
field can be used as a filter and several operators as supported. For
example: ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as
described in `Stackdriver's Advanced Logs
Filters <https://cloud.google.com/logging/docs/view/advanced_filters>`__.
Sample filter "state: PAUSED".
Note that using filters might cause fewer queues than the requested
page\_size to be returned.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.api_core.page_iterator.PageIterator` instance.
An iterable of :class:`~google.cloud.tasks_v2.types.Queue` instances.
You can also iterate over the pages of the response
using its `pages` property.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "list_queues" not in self._inner_api_calls:
self._inner_api_calls[
"list_queues"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_queues,
default_retry=self._method_configs["ListQueues"].retry,
default_timeout=self._method_configs["ListQueues"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.ListQueuesRequest(
parent=parent, filter=filter_, page_size=page_size
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_queues"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="queues",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator
def get_queue(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets a queue.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> response = client.get_queue(name)
Args:
name (str): Required. The resource name of the queue. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID``
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Queue` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "get_queue" not in self._inner_api_calls:
self._inner_api_calls[
"get_queue"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_queue,
default_retry=self._method_configs["GetQueue"].retry,
default_timeout=self._method_configs["GetQueue"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.GetQueueRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["get_queue"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def create_queue(
self,
parent,
queue,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a queue.
Queues created with this method allow tasks to live for a maximum of 31
days. After a task is 31 days old, the task will be deleted regardless
of whether it was dispatched or not.
WARNING: Using this method may have unintended side effects if you are
using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your
queues. Read `Overview of Queue Management and
queue.yaml <https://cloud.google.com/tasks/docs/queue-yaml>`__ before
using this method.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # TODO: Initialize `queue`:
>>> queue = {}
>>>
>>> response = client.create_queue(parent, queue)
Args:
parent (str): Required. The location name in which the queue will be created. For
example: ``projects/PROJECT_ID/locations/LOCATION_ID``
The list of allowed locations can be obtained by calling Cloud Tasks'
implementation of ``ListLocations``.
queue (Union[dict, ~google.cloud.tasks_v2.types.Queue]): Required. The queue to create.
``Queue's name`` cannot be the same as an existing queue.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2.types.Queue`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Queue` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_queue" not in self._inner_api_calls:
self._inner_api_calls[
"create_queue"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_queue,
default_retry=self._method_configs["CreateQueue"].retry,
default_timeout=self._method_configs["CreateQueue"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.CreateQueueRequest(parent=parent, queue=queue)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_queue"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def update_queue(
self,
queue,
update_mask=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Updates a queue.
This method creates the queue if it does not exist and updates the queue
if it does exist.
Queues created with this method allow tasks to live for a maximum of 31
days. After a task is 31 days old, the task will be deleted regardless
of whether it was dispatched or not.
WARNING: Using this method may have unintended side effects if you are
using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your
queues. Read `Overview of Queue Management and
queue.yaml <https://cloud.google.com/tasks/docs/queue-yaml>`__ before
using this method.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> # TODO: Initialize `queue`:
>>> queue = {}
>>>
>>> response = client.update_queue(queue)
Args:
queue (Union[dict, ~google.cloud.tasks_v2.types.Queue]): Required. The queue to create or update.
The queue's ``name`` must be specified.
Output only fields cannot be modified using UpdateQueue. Any value
specified for an output only field will be ignored. The queue's ``name``
cannot be changed.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2.types.Queue`
update_mask (Union[dict, ~google.cloud.tasks_v2.types.FieldMask]): A mask used to specify which fields of the queue are being updated.
If empty, then all fields will be updated.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Queue` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "update_queue" not in self._inner_api_calls:
self._inner_api_calls[
"update_queue"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.update_queue,
default_retry=self._method_configs["UpdateQueue"].retry,
default_timeout=self._method_configs["UpdateQueue"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.UpdateQueueRequest(
queue=queue, update_mask=update_mask
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("queue.name", queue.name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["update_queue"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def delete_queue(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Deletes a queue.
This command will delete the queue even if it has tasks in it.
Note: If you delete a queue, a queue with the same name can't be created
for 7 days.
WARNING: Using this method may have unintended side effects if you are
using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your
queues. Read `Overview of Queue Management and
queue.yaml <https://cloud.google.com/tasks/docs/queue-yaml>`__ before
using this method.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> client.delete_queue(name)
Args:
name (str): Required. The queue name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID``
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "delete_queue" not in self._inner_api_calls:
self._inner_api_calls[
"delete_queue"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_queue,
default_retry=self._method_configs["DeleteQueue"].retry,
default_timeout=self._method_configs["DeleteQueue"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.DeleteQueueRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
self._inner_api_calls["delete_queue"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def purge_queue(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Purges a queue by deleting all of its tasks.
All tasks created before this method is called are permanently deleted.
Purge operations can take up to one minute to take effect. Tasks
might be dispatched before the purge takes effect. A purge is irreversible.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> response = client.purge_queue(name)
Args:
name (str): Required. The queue name. For example:
``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID``
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Queue` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "purge_queue" not in self._inner_api_calls:
self._inner_api_calls[
"purge_queue"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.purge_queue,
default_retry=self._method_configs["PurgeQueue"].retry,
default_timeout=self._method_configs["PurgeQueue"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.PurgeQueueRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["purge_queue"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def pause_queue(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Pauses the queue.
If a queue is paused then the system will stop dispatching tasks until
the queue is resumed via ``ResumeQueue``. Tasks can still be added when
the queue is paused. A queue is paused if its ``state`` is ``PAUSED``.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> response = client.pause_queue(name)
Args:
name (str): Required. The queue name. For example:
``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID``
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Queue` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "pause_queue" not in self._inner_api_calls:
self._inner_api_calls[
"pause_queue"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.pause_queue,
default_retry=self._method_configs["PauseQueue"].retry,
default_timeout=self._method_configs["PauseQueue"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.PauseQueueRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["pause_queue"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def resume_queue(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Resume a queue.
This method resumes a queue after it has been ``PAUSED`` or
``DISABLED``. The state of a queue is stored in the queue's ``state``;
after calling this method it will be set to ``RUNNING``.
WARNING: Resuming many high-QPS queues at the same time can lead to
target overloading. If you are resuming high-QPS queues, follow the
500/50/5 pattern described in `Managing Cloud Tasks Scaling
Risks <https://cloud.google.com/tasks/docs/manage-cloud-task-scaling>`__.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> response = client.resume_queue(name)
Args:
name (str): Required. The queue name. For example:
``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID``
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Queue` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "resume_queue" not in self._inner_api_calls:
self._inner_api_calls[
"resume_queue"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.resume_queue,
default_retry=self._method_configs["ResumeQueue"].retry,
default_timeout=self._method_configs["ResumeQueue"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.ResumeQueueRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["resume_queue"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def get_iam_policy(
self,
resource,
options_=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets the access control policy for a ``Queue``. Returns an empty policy
if the resource exists and does not have a policy set.
Authorization requires the following `Google
IAM <https://cloud.google.com/iam>`__ permission on the specified
resource parent:
- ``cloudtasks.queues.getIamPolicy``
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> resource = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> response = client.get_iam_policy(resource)
Args:
resource (str): REQUIRED: The resource for which the policy is being requested.
See the operation documentation for the appropriate value for this field.
options_ (Union[dict, ~google.cloud.tasks_v2.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying options to
``GetIamPolicy``. This field is only used by Cloud IAM.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2.types.GetPolicyOptions`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Policy` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "get_iam_policy" not in self._inner_api_calls:
self._inner_api_calls[
"get_iam_policy"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_iam_policy,
default_retry=self._method_configs["GetIamPolicy"].retry,
default_timeout=self._method_configs["GetIamPolicy"].timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.GetIamPolicyRequest(
resource=resource, options=options_
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("resource", resource)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["get_iam_policy"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def set_iam_policy(
self,
resource,
policy,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Sets the access control policy for a ``Queue``. Replaces any existing
policy.
Note: The Cloud Console does not check queue-level IAM permissions yet.
Project-level permissions are required to use the Cloud Console.
Authorization requires the following `Google
IAM <https://cloud.google.com/iam>`__ permission on the specified
resource parent:
- ``cloudtasks.queues.setIamPolicy``
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> resource = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> # TODO: Initialize `policy`:
>>> policy = {}
>>>
>>> response = client.set_iam_policy(resource, policy)
Args:
resource (str): REQUIRED: The resource for which the policy is being specified.
See the operation documentation for the appropriate value for this field.
policy (Union[dict, ~google.cloud.tasks_v2.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The
size of the policy is limited to a few 10s of KB. An empty policy is a
valid policy but certain Cloud Platform services (such as Projects)
might reject them.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2.types.Policy`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Policy` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "set_iam_policy" not in self._inner_api_calls:
self._inner_api_calls[
"set_iam_policy"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.set_iam_policy,
default_retry=self._method_configs["SetIamPolicy"].retry,
default_timeout=self._method_configs["SetIamPolicy"].timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("resource", resource)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["set_iam_policy"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def test_iam_permissions(
self,
resource,
permissions,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Returns permissions that a caller has on a ``Queue``. If the resource
does not exist, this will return an empty set of permissions, not a
``NOT_FOUND`` error.
Note: This operation is designed to be used for building
permission-aware UIs and command-line tools, not for authorization
checking. This operation may "fail open" without warning.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> resource = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> # TODO: Initialize `permissions`:
>>> permissions = []
>>>
>>> response = client.test_iam_permissions(resource, permissions)
Args:
resource (str): REQUIRED: The resource for which the policy detail is being requested.
See the operation documentation for the appropriate value for this field.
permissions (list[str]): The set of permissions to check for the ``resource``. Permissions with
wildcards (such as '*' or 'storage.*') are not allowed. For more
information see `IAM
Overview <https://cloud.google.com/iam/docs/overview#permissions>`__.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.TestIamPermissionsResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "test_iam_permissions" not in self._inner_api_calls:
self._inner_api_calls[
"test_iam_permissions"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.test_iam_permissions,
default_retry=self._method_configs["TestIamPermissions"].retry,
default_timeout=self._method_configs["TestIamPermissions"].timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.TestIamPermissionsRequest(
resource=resource, permissions=permissions
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("resource", resource)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["test_iam_permissions"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def list_tasks(
self,
parent,
response_view=None,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists the tasks in a queue.
By default, only the ``BASIC`` view is retrieved due to performance
considerations; ``response_view`` controls the subset of information
which is returned.
The tasks may be returned in any order. The ordering may change at any
time.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> # Iterate over all results
>>> for element in client.list_tasks(parent):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_tasks(parent).pages:
... for element in page:
... # process element
... pass
Args:
parent (str): Required. The queue name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID``
response_view (~google.cloud.tasks_v2.types.View): The response\_view specifies which subset of the ``Task`` will be
returned.
By default response\_view is ``BASIC``; not all information is retrieved
by default because some data, such as payloads, might be desirable to
return only when needed because of its large size or because of the
sensitivity of data that it contains.
Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView``
`Google IAM <https://cloud.google.com/iam/>`___ permission on the
``Task`` resource.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.api_core.page_iterator.PageIterator` instance.
An iterable of :class:`~google.cloud.tasks_v2.types.Task` instances.
You can also iterate over the pages of the response
using its `pages` property.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "list_tasks" not in self._inner_api_calls:
self._inner_api_calls[
"list_tasks"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_tasks,
default_retry=self._method_configs["ListTasks"].retry,
default_timeout=self._method_configs["ListTasks"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.ListTasksRequest(
parent=parent, response_view=response_view, page_size=page_size
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_tasks"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="tasks",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator
def get_task(
self,
name,
response_view=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets a task.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]')
>>>
>>> response = client.get_task(name)
Args:
name (str): Required. The task name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``
response_view (~google.cloud.tasks_v2.types.View): The response\_view specifies which subset of the ``Task`` will be
returned.
By default response\_view is ``BASIC``; not all information is retrieved
by default because some data, such as payloads, might be desirable to
return only when needed because of its large size or because of the
sensitivity of data that it contains.
Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView``
`Google IAM <https://cloud.google.com/iam/>`___ permission on the
``Task`` resource.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Task` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "get_task" not in self._inner_api_calls:
self._inner_api_calls[
"get_task"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_task,
default_retry=self._method_configs["GetTask"].retry,
default_timeout=self._method_configs["GetTask"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.GetTaskRequest(name=name, response_view=response_view)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["get_task"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def create_task(
self,
parent,
task,
response_view=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a task and adds it to a queue.
Tasks cannot be updated after creation; there is no UpdateTask command.
- The maximum task size is 100KB.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> # TODO: Initialize `task`:
>>> task = {}
>>>
>>> response = client.create_task(parent, task)
Args:
parent (str): Required. The queue name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID``
The queue must already exist.
task (Union[dict, ~google.cloud.tasks_v2.types.Task]): Required. The task to add.
Task names have the following format:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``.
The user can optionally specify a task ``name``. If a name is not
specified then the system will generate a random unique task id, which
will be set in the task returned in the ``response``.
If ``schedule_time`` is not set or is in the past then Cloud Tasks will
set it to the current time.
Task De-duplication:
Explicitly specifying a task ID enables task de-duplication. If a task's
ID is identical to that of an existing task or a task that was deleted
or executed recently then the call will fail with ``ALREADY_EXISTS``. If
the task's queue was created using Cloud Tasks, then another task with
the same name can't be created for ~1hour after the original task was
deleted or executed. If the task's queue was created using queue.yaml or
queue.xml, then another task with the same name can't be created for
~9days after the original task was deleted or executed.
Because there is an extra lookup cost to identify duplicate task names,
these ``CreateTask`` calls have significantly increased latency. Using
hashed strings for the task id or for the prefix of the task id is
recommended. Choosing task ids that are sequential or have sequential
prefixes, for example using a timestamp, causes an increase in latency
and error rates in all task commands. The infrastructure relies on an
approximately uniform distribution of task ids to store and serve tasks
efficiently.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2.types.Task`
response_view (~google.cloud.tasks_v2.types.View): The response\_view specifies which subset of the ``Task`` will be
returned.
By default response\_view is ``BASIC``; not all information is retrieved
by default because some data, such as payloads, might be desirable to
return only when needed because of its large size or because of the
sensitivity of data that it contains.
Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView``
`Google IAM <https://cloud.google.com/iam/>`___ permission on the
``Task`` resource.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Task` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_task" not in self._inner_api_calls:
self._inner_api_calls[
"create_task"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_task,
default_retry=self._method_configs["CreateTask"].retry,
default_timeout=self._method_configs["CreateTask"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.CreateTaskRequest(
parent=parent, task=task, response_view=response_view
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_task"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def delete_task(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Deletes a task.
A task can be deleted if it is scheduled or dispatched. A task
cannot be deleted if it has executed successfully or permanently
failed.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]')
>>>
>>> client.delete_task(name)
Args:
name (str): Required. The task name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "delete_task" not in self._inner_api_calls:
self._inner_api_calls[
"delete_task"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_task,
default_retry=self._method_configs["DeleteTask"].retry,
default_timeout=self._method_configs["DeleteTask"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.DeleteTaskRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
self._inner_api_calls["delete_task"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def run_task(
self,
name,
response_view=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Forces a task to run now.
When this method is called, Cloud Tasks will dispatch the task, even if
the task is already running, the queue has reached its ``RateLimits`` or
is ``PAUSED``.
This command is meant to be used for manual debugging. For example,
``RunTask`` can be used to retry a failed task after a fix has been made
or to manually force a task to be dispatched now.
The dispatched task is returned. That is, the task that is returned
contains the ``status`` after the task is dispatched but before the task
is received by its target.
If Cloud Tasks receives a successful response from the task's target,
then the task will be deleted; otherwise the task's ``schedule_time``
will be reset to the time that ``RunTask`` was called plus the retry
delay specified in the queue's ``RetryConfig``.
``RunTask`` returns ``NOT_FOUND`` when it is called on a task that has
already succeeded or permanently failed.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]')
>>>
>>> response = client.run_task(name)
Args:
name (str): Required. The task name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``
response_view (~google.cloud.tasks_v2.types.View): The response\_view specifies which subset of the ``Task`` will be
returned.
By default response\_view is ``BASIC``; not all information is retrieved
by default because some data, such as payloads, might be desirable to
return only when needed because of its large size or because of the
sensitivity of data that it contains.
Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView``
`Google IAM <https://cloud.google.com/iam/>`___ permission on the
``Task`` resource.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Task` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "run_task" not in self._inner_api_calls:
self._inner_api_calls[
"run_task"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.run_task,
default_retry=self._method_configs["RunTask"].retry,
default_timeout=self._method_configs["RunTask"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.RunTaskRequest(name=name, response_view=response_view)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["run_task"](
request, retry=retry, timeout=timeout, metadata=metadata
)
Fix docstring references to 'Task.View' enum. (#9165)
# -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Accesses the google.cloud.tasks.v2 CloudTasks API."""
import functools
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.client_options
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.gapic_v1.routing_header
import google.api_core.grpc_helpers
import google.api_core.page_iterator
import google.api_core.path_template
import grpc
from google.cloud.tasks_v2.gapic import cloud_tasks_client_config
from google.cloud.tasks_v2.gapic import enums
from google.cloud.tasks_v2.gapic.transports import cloud_tasks_grpc_transport
from google.cloud.tasks_v2.proto import cloudtasks_pb2
from google.cloud.tasks_v2.proto import cloudtasks_pb2_grpc
from google.cloud.tasks_v2.proto import queue_pb2
from google.cloud.tasks_v2.proto import task_pb2
from google.iam.v1 import iam_policy_pb2
from google.iam.v1 import options_pb2
from google.iam.v1 import policy_pb2
from google.protobuf import empty_pb2
from google.protobuf import field_mask_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-tasks").version
class CloudTasksClient(object):
"""
Cloud Tasks allows developers to manage the execution of background
work in their applications.
"""
SERVICE_ADDRESS = "cloudtasks.googleapis.com:443"
"""The default address of the service."""
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = "google.cloud.tasks.v2.CloudTasks"
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
CloudTasksClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@classmethod
def location_path(cls, project, location):
"""Return a fully-qualified location string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}",
project=project,
location=location,
)
@classmethod
def project_path(cls, project):
"""Return a fully-qualified project string."""
return google.api_core.path_template.expand(
"projects/{project}", project=project
)
@classmethod
def queue_path(cls, project, location, queue):
"""Return a fully-qualified queue string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/queues/{queue}",
project=project,
location=location,
queue=queue,
)
@classmethod
def task_path(cls, project, location, queue, task):
"""Return a fully-qualified task string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/queues/{queue}/tasks/{task}",
project=project,
location=location,
queue=queue,
task=task,
)
def __init__(
self,
transport=None,
channel=None,
credentials=None,
client_config=None,
client_info=None,
client_options=None,
):
"""Constructor.
Args:
transport (Union[~.CloudTasksGrpcTransport,
Callable[[~.Credentials, type], ~.CloudTasksGrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
client_options (Union[dict, google.api_core.client_options.ClientOptions]):
Client options used to set user options on the client. API Endpoint
should be set through client_options.
"""
# Raise deprecation warnings for things we want to go away.
if client_config is not None:
warnings.warn(
"The `client_config` argument is deprecated.",
PendingDeprecationWarning,
stacklevel=2,
)
else:
client_config = cloud_tasks_client_config.config
if channel:
warnings.warn(
"The `channel` argument is deprecated; use " "`transport` instead.",
PendingDeprecationWarning,
stacklevel=2,
)
api_endpoint = self.SERVICE_ADDRESS
if client_options:
if type(client_options) == dict:
client_options = google.api_core.client_options.from_dict(
client_options
)
if client_options.api_endpoint:
api_endpoint = client_options.api_endpoint
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=cloud_tasks_grpc_transport.CloudTasksGrpcTransport,
address=api_endpoint,
)
else:
if credentials:
raise ValueError(
"Received both a transport instance and "
"credentials; these are mutually exclusive."
)
self.transport = transport
else:
self.transport = cloud_tasks_grpc_transport.CloudTasksGrpcTransport(
address=api_endpoint, channel=channel, credentials=credentials
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
gapic_version=_GAPIC_LIBRARY_VERSION
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config["interfaces"][self._INTERFACE_NAME]
)
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
# Service calls
def list_queues(
self,
parent,
filter_=None,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists queues.
Queues are returned in lexicographical order.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # Iterate over all results
>>> for element in client.list_queues(parent):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_queues(parent).pages:
... for element in page:
... # process element
... pass
Args:
parent (str): Required. The location name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID``
filter_ (str): ``filter`` can be used to specify a subset of queues. Any ``Queue``
field can be used as a filter and several operators as supported. For
example: ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as
described in `Stackdriver's Advanced Logs
Filters <https://cloud.google.com/logging/docs/view/advanced_filters>`__.
Sample filter "state: PAUSED".
Note that using filters might cause fewer queues than the requested
page\_size to be returned.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.api_core.page_iterator.PageIterator` instance.
An iterable of :class:`~google.cloud.tasks_v2.types.Queue` instances.
You can also iterate over the pages of the response
using its `pages` property.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "list_queues" not in self._inner_api_calls:
self._inner_api_calls[
"list_queues"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_queues,
default_retry=self._method_configs["ListQueues"].retry,
default_timeout=self._method_configs["ListQueues"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.ListQueuesRequest(
parent=parent, filter=filter_, page_size=page_size
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_queues"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="queues",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator
def get_queue(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets a queue.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> response = client.get_queue(name)
Args:
name (str): Required. The resource name of the queue. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID``
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Queue` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "get_queue" not in self._inner_api_calls:
self._inner_api_calls[
"get_queue"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_queue,
default_retry=self._method_configs["GetQueue"].retry,
default_timeout=self._method_configs["GetQueue"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.GetQueueRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["get_queue"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def create_queue(
self,
parent,
queue,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a queue.
Queues created with this method allow tasks to live for a maximum of 31
days. After a task is 31 days old, the task will be deleted regardless
of whether it was dispatched or not.
WARNING: Using this method may have unintended side effects if you are
using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your
queues. Read `Overview of Queue Management and
queue.yaml <https://cloud.google.com/tasks/docs/queue-yaml>`__ before
using this method.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # TODO: Initialize `queue`:
>>> queue = {}
>>>
>>> response = client.create_queue(parent, queue)
Args:
parent (str): Required. The location name in which the queue will be created. For
example: ``projects/PROJECT_ID/locations/LOCATION_ID``
The list of allowed locations can be obtained by calling Cloud Tasks'
implementation of ``ListLocations``.
queue (Union[dict, ~google.cloud.tasks_v2.types.Queue]): Required. The queue to create.
``Queue's name`` cannot be the same as an existing queue.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2.types.Queue`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Queue` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_queue" not in self._inner_api_calls:
self._inner_api_calls[
"create_queue"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_queue,
default_retry=self._method_configs["CreateQueue"].retry,
default_timeout=self._method_configs["CreateQueue"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.CreateQueueRequest(parent=parent, queue=queue)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_queue"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def update_queue(
self,
queue,
update_mask=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Updates a queue.
This method creates the queue if it does not exist and updates the queue
if it does exist.
Queues created with this method allow tasks to live for a maximum of 31
days. After a task is 31 days old, the task will be deleted regardless
of whether it was dispatched or not.
WARNING: Using this method may have unintended side effects if you are
using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your
queues. Read `Overview of Queue Management and
queue.yaml <https://cloud.google.com/tasks/docs/queue-yaml>`__ before
using this method.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> # TODO: Initialize `queue`:
>>> queue = {}
>>>
>>> response = client.update_queue(queue)
Args:
queue (Union[dict, ~google.cloud.tasks_v2.types.Queue]): Required. The queue to create or update.
The queue's ``name`` must be specified.
Output only fields cannot be modified using UpdateQueue. Any value
specified for an output only field will be ignored. The queue's ``name``
cannot be changed.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2.types.Queue`
update_mask (Union[dict, ~google.cloud.tasks_v2.types.FieldMask]): A mask used to specify which fields of the queue are being updated.
If empty, then all fields will be updated.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Queue` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "update_queue" not in self._inner_api_calls:
self._inner_api_calls[
"update_queue"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.update_queue,
default_retry=self._method_configs["UpdateQueue"].retry,
default_timeout=self._method_configs["UpdateQueue"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.UpdateQueueRequest(
queue=queue, update_mask=update_mask
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("queue.name", queue.name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["update_queue"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def delete_queue(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Deletes a queue.
This command will delete the queue even if it has tasks in it.
Note: If you delete a queue, a queue with the same name can't be created
for 7 days.
WARNING: Using this method may have unintended side effects if you are
using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your
queues. Read `Overview of Queue Management and
queue.yaml <https://cloud.google.com/tasks/docs/queue-yaml>`__ before
using this method.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> client.delete_queue(name)
Args:
name (str): Required. The queue name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID``
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "delete_queue" not in self._inner_api_calls:
self._inner_api_calls[
"delete_queue"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_queue,
default_retry=self._method_configs["DeleteQueue"].retry,
default_timeout=self._method_configs["DeleteQueue"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.DeleteQueueRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
self._inner_api_calls["delete_queue"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def purge_queue(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Purges a queue by deleting all of its tasks.
All tasks created before this method is called are permanently deleted.
Purge operations can take up to one minute to take effect. Tasks
might be dispatched before the purge takes effect. A purge is irreversible.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> response = client.purge_queue(name)
Args:
name (str): Required. The queue name. For example:
``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID``
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Queue` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "purge_queue" not in self._inner_api_calls:
self._inner_api_calls[
"purge_queue"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.purge_queue,
default_retry=self._method_configs["PurgeQueue"].retry,
default_timeout=self._method_configs["PurgeQueue"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.PurgeQueueRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["purge_queue"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def pause_queue(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Pauses the queue.
If a queue is paused then the system will stop dispatching tasks until
the queue is resumed via ``ResumeQueue``. Tasks can still be added when
the queue is paused. A queue is paused if its ``state`` is ``PAUSED``.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> response = client.pause_queue(name)
Args:
name (str): Required. The queue name. For example:
``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID``
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Queue` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "pause_queue" not in self._inner_api_calls:
self._inner_api_calls[
"pause_queue"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.pause_queue,
default_retry=self._method_configs["PauseQueue"].retry,
default_timeout=self._method_configs["PauseQueue"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.PauseQueueRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["pause_queue"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def resume_queue(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Resume a queue.
This method resumes a queue after it has been ``PAUSED`` or
``DISABLED``. The state of a queue is stored in the queue's ``state``;
after calling this method it will be set to ``RUNNING``.
WARNING: Resuming many high-QPS queues at the same time can lead to
target overloading. If you are resuming high-QPS queues, follow the
500/50/5 pattern described in `Managing Cloud Tasks Scaling
Risks <https://cloud.google.com/tasks/docs/manage-cloud-task-scaling>`__.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> response = client.resume_queue(name)
Args:
name (str): Required. The queue name. For example:
``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID``
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Queue` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "resume_queue" not in self._inner_api_calls:
self._inner_api_calls[
"resume_queue"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.resume_queue,
default_retry=self._method_configs["ResumeQueue"].retry,
default_timeout=self._method_configs["ResumeQueue"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.ResumeQueueRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["resume_queue"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def get_iam_policy(
self,
resource,
options_=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets the access control policy for a ``Queue``. Returns an empty policy
if the resource exists and does not have a policy set.
Authorization requires the following `Google
IAM <https://cloud.google.com/iam>`__ permission on the specified
resource parent:
- ``cloudtasks.queues.getIamPolicy``
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> resource = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> response = client.get_iam_policy(resource)
Args:
resource (str): REQUIRED: The resource for which the policy is being requested.
See the operation documentation for the appropriate value for this field.
options_ (Union[dict, ~google.cloud.tasks_v2.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying options to
``GetIamPolicy``. This field is only used by Cloud IAM.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2.types.GetPolicyOptions`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Policy` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "get_iam_policy" not in self._inner_api_calls:
self._inner_api_calls[
"get_iam_policy"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_iam_policy,
default_retry=self._method_configs["GetIamPolicy"].retry,
default_timeout=self._method_configs["GetIamPolicy"].timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.GetIamPolicyRequest(
resource=resource, options=options_
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("resource", resource)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["get_iam_policy"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def set_iam_policy(
self,
resource,
policy,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Sets the access control policy for a ``Queue``. Replaces any existing
policy.
Note: The Cloud Console does not check queue-level IAM permissions yet.
Project-level permissions are required to use the Cloud Console.
Authorization requires the following `Google
IAM <https://cloud.google.com/iam>`__ permission on the specified
resource parent:
- ``cloudtasks.queues.setIamPolicy``
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> resource = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> # TODO: Initialize `policy`:
>>> policy = {}
>>>
>>> response = client.set_iam_policy(resource, policy)
Args:
resource (str): REQUIRED: The resource for which the policy is being specified.
See the operation documentation for the appropriate value for this field.
policy (Union[dict, ~google.cloud.tasks_v2.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The
size of the policy is limited to a few 10s of KB. An empty policy is a
valid policy but certain Cloud Platform services (such as Projects)
might reject them.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2.types.Policy`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Policy` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "set_iam_policy" not in self._inner_api_calls:
self._inner_api_calls[
"set_iam_policy"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.set_iam_policy,
default_retry=self._method_configs["SetIamPolicy"].retry,
default_timeout=self._method_configs["SetIamPolicy"].timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("resource", resource)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["set_iam_policy"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def test_iam_permissions(
self,
resource,
permissions,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Returns permissions that a caller has on a ``Queue``. If the resource
does not exist, this will return an empty set of permissions, not a
``NOT_FOUND`` error.
Note: This operation is designed to be used for building
permission-aware UIs and command-line tools, not for authorization
checking. This operation may "fail open" without warning.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> resource = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> # TODO: Initialize `permissions`:
>>> permissions = []
>>>
>>> response = client.test_iam_permissions(resource, permissions)
Args:
resource (str): REQUIRED: The resource for which the policy detail is being requested.
See the operation documentation for the appropriate value for this field.
permissions (list[str]): The set of permissions to check for the ``resource``. Permissions with
wildcards (such as '*' or 'storage.*') are not allowed. For more
information see `IAM
Overview <https://cloud.google.com/iam/docs/overview#permissions>`__.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.TestIamPermissionsResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "test_iam_permissions" not in self._inner_api_calls:
self._inner_api_calls[
"test_iam_permissions"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.test_iam_permissions,
default_retry=self._method_configs["TestIamPermissions"].retry,
default_timeout=self._method_configs["TestIamPermissions"].timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.TestIamPermissionsRequest(
resource=resource, permissions=permissions
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("resource", resource)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["test_iam_permissions"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def list_tasks(
self,
parent,
response_view=None,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists the tasks in a queue.
By default, only the ``BASIC`` view is retrieved due to performance
considerations; ``response_view`` controls the subset of information
which is returned.
The tasks may be returned in any order. The ordering may change at any
time.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> # Iterate over all results
>>> for element in client.list_tasks(parent):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_tasks(parent).pages:
... for element in page:
... # process element
... pass
Args:
parent (str): Required. The queue name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID``
response_view (~google.cloud.tasks_v2.enums.Task.View): The response\_view specifies which subset of the ``Task`` will be
returned.
By default response\_view is ``BASIC``; not all information is retrieved
by default because some data, such as payloads, might be desirable to
return only when needed because of its large size or because of the
sensitivity of data that it contains.
Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView``
`Google IAM <https://cloud.google.com/iam/>`___ permission on the
``Task`` resource.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.api_core.page_iterator.PageIterator` instance.
An iterable of :class:`~google.cloud.tasks_v2.types.Task` instances.
You can also iterate over the pages of the response
using its `pages` property.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "list_tasks" not in self._inner_api_calls:
self._inner_api_calls[
"list_tasks"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_tasks,
default_retry=self._method_configs["ListTasks"].retry,
default_timeout=self._method_configs["ListTasks"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.ListTasksRequest(
parent=parent, response_view=response_view, page_size=page_size
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_tasks"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="tasks",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator
def get_task(
self,
name,
response_view=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets a task.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]')
>>>
>>> response = client.get_task(name)
Args:
name (str): Required. The task name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``
response_view (~google.cloud.tasks_v2.enums.Task.View): The response\_view specifies which subset of the ``Task`` will be
returned.
By default response\_view is ``BASIC``; not all information is retrieved
by default because some data, such as payloads, might be desirable to
return only when needed because of its large size or because of the
sensitivity of data that it contains.
Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView``
`Google IAM <https://cloud.google.com/iam/>`___ permission on the
``Task`` resource.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Task` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "get_task" not in self._inner_api_calls:
self._inner_api_calls[
"get_task"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_task,
default_retry=self._method_configs["GetTask"].retry,
default_timeout=self._method_configs["GetTask"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.GetTaskRequest(name=name, response_view=response_view)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["get_task"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def create_task(
self,
parent,
task,
response_view=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a task and adds it to a queue.
Tasks cannot be updated after creation; there is no UpdateTask command.
- The maximum task size is 100KB.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]')
>>>
>>> # TODO: Initialize `task`:
>>> task = {}
>>>
>>> response = client.create_task(parent, task)
Args:
parent (str): Required. The queue name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID``
The queue must already exist.
task (Union[dict, ~google.cloud.tasks_v2.types.Task]): Required. The task to add.
Task names have the following format:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``.
The user can optionally specify a task ``name``. If a name is not
specified then the system will generate a random unique task id, which
will be set in the task returned in the ``response``.
If ``schedule_time`` is not set or is in the past then Cloud Tasks will
set it to the current time.
Task De-duplication:
Explicitly specifying a task ID enables task de-duplication. If a task's
ID is identical to that of an existing task or a task that was deleted
or executed recently then the call will fail with ``ALREADY_EXISTS``. If
the task's queue was created using Cloud Tasks, then another task with
the same name can't be created for ~1hour after the original task was
deleted or executed. If the task's queue was created using queue.yaml or
queue.xml, then another task with the same name can't be created for
~9days after the original task was deleted or executed.
Because there is an extra lookup cost to identify duplicate task names,
these ``CreateTask`` calls have significantly increased latency. Using
hashed strings for the task id or for the prefix of the task id is
recommended. Choosing task ids that are sequential or have sequential
prefixes, for example using a timestamp, causes an increase in latency
and error rates in all task commands. The infrastructure relies on an
approximately uniform distribution of task ids to store and serve tasks
efficiently.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.tasks_v2.types.Task`
response_view (~google.cloud.tasks_v2.enums.Task.View): The response\_view specifies which subset of the ``Task`` will be
returned.
By default response\_view is ``BASIC``; not all information is retrieved
by default because some data, such as payloads, might be desirable to
return only when needed because of its large size or because of the
sensitivity of data that it contains.
Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView``
`Google IAM <https://cloud.google.com/iam/>`___ permission on the
``Task`` resource.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Task` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_task" not in self._inner_api_calls:
self._inner_api_calls[
"create_task"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_task,
default_retry=self._method_configs["CreateTask"].retry,
default_timeout=self._method_configs["CreateTask"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.CreateTaskRequest(
parent=parent, task=task, response_view=response_view
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_task"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def delete_task(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Deletes a task.
A task can be deleted if it is scheduled or dispatched. A task
cannot be deleted if it has executed successfully or permanently
failed.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]')
>>>
>>> client.delete_task(name)
Args:
name (str): Required. The task name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "delete_task" not in self._inner_api_calls:
self._inner_api_calls[
"delete_task"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_task,
default_retry=self._method_configs["DeleteTask"].retry,
default_timeout=self._method_configs["DeleteTask"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.DeleteTaskRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
self._inner_api_calls["delete_task"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def run_task(
self,
name,
response_view=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Forces a task to run now.
When this method is called, Cloud Tasks will dispatch the task, even if
the task is already running, the queue has reached its ``RateLimits`` or
is ``PAUSED``.
This command is meant to be used for manual debugging. For example,
``RunTask`` can be used to retry a failed task after a fix has been made
or to manually force a task to be dispatched now.
The dispatched task is returned. That is, the task that is returned
contains the ``status`` after the task is dispatched but before the task
is received by its target.
If Cloud Tasks receives a successful response from the task's target,
then the task will be deleted; otherwise the task's ``schedule_time``
will be reset to the time that ``RunTask`` was called plus the retry
delay specified in the queue's ``RetryConfig``.
``RunTask`` returns ``NOT_FOUND`` when it is called on a task that has
already succeeded or permanently failed.
Example:
>>> from google.cloud import tasks_v2
>>>
>>> client = tasks_v2.CloudTasksClient()
>>>
>>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]')
>>>
>>> response = client.run_task(name)
Args:
name (str): Required. The task name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``
response_view (~google.cloud.tasks_v2.enums.Task.View): The response\_view specifies which subset of the ``Task`` will be
returned.
By default response\_view is ``BASIC``; not all information is retrieved
by default because some data, such as payloads, might be desirable to
return only when needed because of its large size or because of the
sensitivity of data that it contains.
Authorization for ``FULL`` requires ``cloudtasks.tasks.fullView``
`Google IAM <https://cloud.google.com/iam/>`___ permission on the
``Task`` resource.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.tasks_v2.types.Task` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "run_task" not in self._inner_api_calls:
self._inner_api_calls[
"run_task"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.run_task,
default_retry=self._method_configs["RunTask"].retry,
default_timeout=self._method_configs["RunTask"].timeout,
client_info=self._client_info,
)
request = cloudtasks_pb2.RunTaskRequest(name=name, response_view=response_view)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["run_task"](
request, retry=retry, timeout=timeout, metadata=metadata
)
|
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import decorators
from telemetry.internal.actions import page_action
from telemetry.internal.actions import scroll
from telemetry.internal.actions import utils
from telemetry.testing import tab_test_case
class ScrollActionTest(tab_test_case.TabTestCase):
def _MakePageVerticallyScrollable(self):
# Make page taller than window so it's scrollable vertically.
self._tab.ExecuteJavaScript(
'document.body.style.height ='
'(3 * __GestureCommon_GetWindowHeight() + 1) + "px";')
def _MakePageHorizontallyScrollable(self):
# Make page wider than window so it's scrollable horizontally.
self._tab.ExecuteJavaScript(
'document.body.style.width ='
'(3 * __GestureCommon_GetWindowWidth() + 1) + "px";')
def setUp(self):
tab_test_case.TabTestCase.setUp(self)
self.Navigate('blank.html')
utils.InjectJavaScript(self._tab, 'gesture_common.js')
def _RunScrollDistanceTest(self, distance, speed, source, maxError):
# TODO(bokan): Distance tests will fail on versions of Chrome that haven't
# been fixed. The fixes landed at the same time as the
# setBrowserControlsShown method was added so only run the test if that's
# available. Once that rolls into ref builds we can remove this check.
distanceFixedInChrome = self._tab.EvaluateJavaScript(
"'setBrowserControlsShown' in chrome.gpuBenchmarking")
if not distanceFixedInChrome:
return
# Hide the URL bar so we can measure scrolled distance without worrying
# about the URL bar consuming delta.
self._tab.ExecuteJavaScript(
'chrome.gpuBenchmarking.setBrowserControlsShown(false);')
# Make the document tall enough to accomodate the requested distance but
# also leave enough space so we can tell if the scroll overshoots the
# target.
screenHeight = self._tab.EvaluateJavaScript('window.visualViewport.height')
documentHeight = (screenHeight + distance) * 2
self._tab.ExecuteJavaScript(
'document.body.style.height = "' + str(documentHeight) + 'px";')
self.assertEquals(
self._tab.EvaluateJavaScript('document.scrollingElement.scrollTop'), 0)
# Allow for some visual viewport offset. For example, if the test doesn't
# want any visual viewport offset due to animation handoff error between
# the two viewports.
start_offset = self._tab.EvaluateJavaScript('window.visualViewport.pageTop')
i = scroll.ScrollAction(
distance=distance,
direction="down",
speed_in_pixels_per_second=speed,
synthetic_gesture_source=source)
i.WillRunAction(self._tab)
i.RunAction(self._tab)
actual = self._tab.EvaluateJavaScript(
'window.visualViewport.pageTop') - start_offset
# TODO(bokan): setBrowserControlsShown isn't quite enough. Chrome will hide
# the browser controls but then they animate in after a timeout. We'll need
# to add a way to lock them to hidden. Until then, just increase the
# allowed error.
urlBarError = 150
self.assertAlmostEqual(distance, actual, delta=maxError + urlBarError)
def testScrollDistanceFastTouch(self):
# Just pass the test on platforms that don't support touch (i.e. Mac)
if not page_action.IsGestureSourceTypeSupported(self._tab, 'touch'):
return
# Scrolling distance for touch will have some error from the excess delta
# of the event that crosses the slop threshold but isn't applied.
self._RunScrollDistanceTest(
500000, 200000, page_action.GESTURE_SOURCE_TOUCH, 50)
def testScrollDistanceFastWheel(self):
# Wheel scrolling will have a much greater error than touch. There's 2
# reasons: 1) synthetic wheel gesture accumulate the sent deltas and use
# that to determine how much delta to send at each event dispatch time.
# This assumes that the entire sent delta is applied which is wrong due to
# physical pixel snapping which accumulates over the gesture.
# 2) We can only send delta as ticks of the wheel. If the total delta is
# not a multiple of the tick size, we'll "lose" the remainder.
self._RunScrollDistanceTest(
500000, 200000, page_action.GESTURE_SOURCE_MOUSE, 15000)
def testScrollDistanceSlowTouch(self):
# Just pass the test on platforms that don't support touch (i.e. Mac)
if not page_action.IsGestureSourceTypeSupported(self._tab, 'touch'):
return
# Scrolling slowly produces larger error since each event will have a
# smaller delta. Thus error from snapping in each event will be a larger
# share of the total delta.
self._RunScrollDistanceTest(
1000, 300, page_action.GESTURE_SOURCE_TOUCH, 10)
def testScrollDistanceSlowWheel(self):
self._RunScrollDistanceTest(
1000, 300, page_action.GESTURE_SOURCE_MOUSE, 200)
@decorators.Disabled('chromeos', 'linux') # crbug.com/805523
@decorators.Disabled('win-reference') # crbug.com/805523
def testWheelScrollDistanceWhileZoomed(self):
# TODO(bokan): This API was added recently so only run the test once it's
# available. Remove this check once it rolls into stable builds.
chromeSupportsSetPageScaleFactor = self._tab.EvaluateJavaScript(
"'setPageScaleFactor' in chrome.gpuBenchmarking")
if not chromeSupportsSetPageScaleFactor:
return
self._tab.EvaluateJavaScript('chrome.gpuBenchmarking.setPageScaleFactor(2)')
# Wheel scrolling can cause animated scrolls. This is a problem here since
# Chrome currently doesn't hand off the animation between the visual and
# layout viewports. To account for this, scroll the visual viewport to it's
# maximum extent so that the entire scroll goes to the layout viewport.
screenHeight = self._tab.EvaluateJavaScript('window.visualViewport.height')
i = scroll.ScrollAction(
distance=screenHeight*2,
direction="down",
speed_in_pixels_per_second=5000,
synthetic_gesture_source=page_action.GESTURE_SOURCE_MOUSE)
i.WillRunAction(self._tab)
i.RunAction(self._tab)
# Ensure the layout viewport isn't scrolled but the visual is.
self.assertGreater(
self._tab.EvaluateJavaScript('window.visualViewport.offsetTop'),
screenHeight / 2 - 1)
self.assertEqual(self._tab.EvaluateJavaScript('window.scrollY'), 0)
self._RunScrollDistanceTest(
2000, 2000, page_action.GESTURE_SOURCE_MOUSE, 60)
def testTouchScrollDistanceWhileZoomed(self):
# Just pass the test on platforms that don't support touch (i.e. Mac)
if not page_action.IsGestureSourceTypeSupported(self._tab, 'touch'):
return
# TODO(bokan): This API was added recently so only run the test once it's
# available. Remove this check once it rolls into stable builds.
chromeSupportsSetPageScaleFactor = self._tab.EvaluateJavaScript(
"'setPageScaleFactor' in chrome.gpuBenchmarking")
if not chromeSupportsSetPageScaleFactor:
return
self._tab.EvaluateJavaScript('chrome.gpuBenchmarking.setPageScaleFactor(2)')
self._RunScrollDistanceTest(
2000, 2000, page_action.GESTURE_SOURCE_TOUCH, 20)
def testScrollAction(self):
self._MakePageVerticallyScrollable()
self.assertEquals(
self._tab.EvaluateJavaScript('document.scrollingElement.scrollTop'), 0)
i = scroll.ScrollAction()
i.WillRunAction(self._tab)
self._tab.ExecuteJavaScript("""
window.__scrollAction.beginMeasuringHook = function() {
window.__didBeginMeasuring = true;
};
window.__scrollAction.endMeasuringHook = function() {
window.__didEndMeasuring = true;
};""")
i.RunAction(self._tab)
self.assertTrue(self._tab.EvaluateJavaScript('window.__didBeginMeasuring'))
self.assertTrue(self._tab.EvaluateJavaScript('window.__didEndMeasuring'))
scroll_position = self._tab.EvaluateJavaScript(
'document.scrollingElement.scrollTop')
self.assertTrue(
scroll_position != 0, msg='scroll_position=%d;' % (scroll_position))
# https://github.com/catapult-project/catapult/issues/3099
@decorators.Disabled('android')
def testDiagonalScrollAction(self):
# Diagonal scrolling was not supported in the ScrollAction until Chrome
# branch number 2332
branch_num = self._tab.browser._browser_backend.devtools_client \
.GetChromeBranchNumber()
if branch_num < 2332:
return
self._MakePageVerticallyScrollable()
self.assertEquals(
self._tab.EvaluateJavaScript('document.scrollingElement.scrollTop'), 0)
self._MakePageHorizontallyScrollable()
self.assertEquals(
self._tab.EvaluateJavaScript('document.scrollingElement.scrollLeft'), 0)
i = scroll.ScrollAction(direction='downright')
i.WillRunAction(self._tab)
i.RunAction(self._tab)
viewport_top = self._tab.EvaluateJavaScript(
'document.scrollingElement.scrollTop')
self.assertTrue(viewport_top != 0, msg='viewport_top=%d;' % viewport_top)
viewport_left = self._tab.EvaluateJavaScript(
'document.scrollingElement.scrollLeft')
self.assertTrue(viewport_left != 0, msg='viewport_left=%d;' % viewport_left)
def testBoundingClientRect(self):
# Verify that the rect returned by getBoundingVisibleRect() in scroll.js is
# completely contained within the viewport. Scroll events dispatched by the
# scrolling API use the center of this rect as their location, and this
# location needs to be within the viewport bounds to correctly decide
# between main-thread and impl-thread scroll. If the scrollable area were
# not clipped to the viewport bounds, then the instance used here (the
# scrollable area being more than twice as tall as the viewport) would
# result in a scroll location outside of the viewport bounds.
self._MakePageVerticallyScrollable()
self.assertEquals(
self._tab.EvaluateJavaScript('document.scrollingElement.scrollTop'), 0)
self._MakePageHorizontallyScrollable()
self.assertEquals(
self._tab.EvaluateJavaScript('document.scrollingElement.scrollLeft'), 0)
self._tab.ExecuteJavaScript("""
window.scrollTo(__GestureCommon_GetWindowWidth(),
__GestureCommon_GetWindowHeight());""")
rect_top = int(
self._tab.EvaluateJavaScript(
'__GestureCommon_GetBoundingVisibleRect(document.body).top'))
rect_height = int(
self._tab.EvaluateJavaScript(
'__GestureCommon_GetBoundingVisibleRect(document.body).height'))
rect_bottom = rect_top + rect_height
rect_left = int(
self._tab.EvaluateJavaScript(
'__GestureCommon_GetBoundingVisibleRect(document.body).left'))
rect_width = int(
self._tab.EvaluateJavaScript(
'__GestureCommon_GetBoundingVisibleRect(document.body).width'))
rect_right = rect_left + rect_width
viewport_height = int(
self._tab.EvaluateJavaScript('__GestureCommon_GetWindowHeight()'))
viewport_width = int(
self._tab.EvaluateJavaScript('__GestureCommon_GetWindowWidth()'))
self.assertTrue(rect_top >= 0, msg='%s >= %s' % (rect_top, 0))
self.assertTrue(rect_left >= 0, msg='%s >= %s' % (rect_left, 0))
self.assertTrue(
rect_bottom <= viewport_height,
msg='%s + %s <= %s' % (rect_top, rect_height, viewport_height))
self.assertTrue(
rect_right <= viewport_width,
msg='%s + %s <= %s' % (rect_left, rect_width, viewport_width))
Disable failing scroll tests on Android
These tests are failing for the reference build update
from 68.0.3440.85 to 72.0.3626.105
Bug: chromium:934649
Change-Id: I2c500de7fc832677701a55190262ad503e995716
TBR=sadrul@chromium.org
Reviewed-on: https://chromium-review.googlesource.com/c/1481949
Reviewed-by: Caleb Rouleau <58bdb6276c01e3ed53e660bf706aa902204feb0d@chromium.org>
Commit-Queue: Caleb Rouleau <58bdb6276c01e3ed53e660bf706aa902204feb0d@chromium.org>
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import decorators
from telemetry.internal.actions import page_action
from telemetry.internal.actions import scroll
from telemetry.internal.actions import utils
from telemetry.testing import tab_test_case
class ScrollActionTest(tab_test_case.TabTestCase):
def _MakePageVerticallyScrollable(self):
# Make page taller than window so it's scrollable vertically.
self._tab.ExecuteJavaScript(
'document.body.style.height ='
'(3 * __GestureCommon_GetWindowHeight() + 1) + "px";')
def _MakePageHorizontallyScrollable(self):
# Make page wider than window so it's scrollable horizontally.
self._tab.ExecuteJavaScript(
'document.body.style.width ='
'(3 * __GestureCommon_GetWindowWidth() + 1) + "px";')
def setUp(self):
tab_test_case.TabTestCase.setUp(self)
self.Navigate('blank.html')
utils.InjectJavaScript(self._tab, 'gesture_common.js')
def _RunScrollDistanceTest(self, distance, speed, source, maxError):
# TODO(bokan): Distance tests will fail on versions of Chrome that haven't
# been fixed. The fixes landed at the same time as the
# setBrowserControlsShown method was added so only run the test if that's
# available. Once that rolls into ref builds we can remove this check.
distanceFixedInChrome = self._tab.EvaluateJavaScript(
"'setBrowserControlsShown' in chrome.gpuBenchmarking")
if not distanceFixedInChrome:
return
# Hide the URL bar so we can measure scrolled distance without worrying
# about the URL bar consuming delta.
self._tab.ExecuteJavaScript(
'chrome.gpuBenchmarking.setBrowserControlsShown(false);')
# Make the document tall enough to accomodate the requested distance but
# also leave enough space so we can tell if the scroll overshoots the
# target.
screenHeight = self._tab.EvaluateJavaScript('window.visualViewport.height')
documentHeight = (screenHeight + distance) * 2
self._tab.ExecuteJavaScript(
'document.body.style.height = "' + str(documentHeight) + 'px";')
self.assertEquals(
self._tab.EvaluateJavaScript('document.scrollingElement.scrollTop'), 0)
# Allow for some visual viewport offset. For example, if the test doesn't
# want any visual viewport offset due to animation handoff error between
# the two viewports.
start_offset = self._tab.EvaluateJavaScript('window.visualViewport.pageTop')
i = scroll.ScrollAction(
distance=distance,
direction="down",
speed_in_pixels_per_second=speed,
synthetic_gesture_source=source)
i.WillRunAction(self._tab)
i.RunAction(self._tab)
actual = self._tab.EvaluateJavaScript(
'window.visualViewport.pageTop') - start_offset
# TODO(bokan): setBrowserControlsShown isn't quite enough. Chrome will hide
# the browser controls but then they animate in after a timeout. We'll need
# to add a way to lock them to hidden. Until then, just increase the
# allowed error.
urlBarError = 150
self.assertAlmostEqual(distance, actual, delta=maxError + urlBarError)
def testScrollDistanceFastTouch(self):
# Just pass the test on platforms that don't support touch (i.e. Mac)
if not page_action.IsGestureSourceTypeSupported(self._tab, 'touch'):
return
# Scrolling distance for touch will have some error from the excess delta
# of the event that crosses the slop threshold but isn't applied.
self._RunScrollDistanceTest(
500000, 200000, page_action.GESTURE_SOURCE_TOUCH, 50)
@decorators.Disabled('android') # crbug.com/934649
def testScrollDistanceFastWheel(self):
# Wheel scrolling will have a much greater error than touch. There's 2
# reasons: 1) synthetic wheel gesture accumulate the sent deltas and use
# that to determine how much delta to send at each event dispatch time.
# This assumes that the entire sent delta is applied which is wrong due to
# physical pixel snapping which accumulates over the gesture.
# 2) We can only send delta as ticks of the wheel. If the total delta is
# not a multiple of the tick size, we'll "lose" the remainder.
self._RunScrollDistanceTest(
500000, 200000, page_action.GESTURE_SOURCE_MOUSE, 15000)
def testScrollDistanceSlowTouch(self):
# Just pass the test on platforms that don't support touch (i.e. Mac)
if not page_action.IsGestureSourceTypeSupported(self._tab, 'touch'):
return
# Scrolling slowly produces larger error since each event will have a
# smaller delta. Thus error from snapping in each event will be a larger
# share of the total delta.
self._RunScrollDistanceTest(
1000, 300, page_action.GESTURE_SOURCE_TOUCH, 10)
@decorators.Disabled('android') # crbug.com/934649
def testScrollDistanceSlowWheel(self):
self._RunScrollDistanceTest(
1000, 300, page_action.GESTURE_SOURCE_MOUSE, 200)
@decorators.Disabled('android') # crbug.com/934649
@decorators.Disabled('chromeos', 'linux') # crbug.com/805523
@decorators.Disabled('win-reference') # crbug.com/805523
def testWheelScrollDistanceWhileZoomed(self):
# TODO(bokan): This API was added recently so only run the test once it's
# available. Remove this check once it rolls into stable builds.
chromeSupportsSetPageScaleFactor = self._tab.EvaluateJavaScript(
"'setPageScaleFactor' in chrome.gpuBenchmarking")
if not chromeSupportsSetPageScaleFactor:
return
self._tab.EvaluateJavaScript('chrome.gpuBenchmarking.setPageScaleFactor(2)')
# Wheel scrolling can cause animated scrolls. This is a problem here since
# Chrome currently doesn't hand off the animation between the visual and
# layout viewports. To account for this, scroll the visual viewport to it's
# maximum extent so that the entire scroll goes to the layout viewport.
screenHeight = self._tab.EvaluateJavaScript('window.visualViewport.height')
i = scroll.ScrollAction(
distance=screenHeight*2,
direction="down",
speed_in_pixels_per_second=5000,
synthetic_gesture_source=page_action.GESTURE_SOURCE_MOUSE)
i.WillRunAction(self._tab)
i.RunAction(self._tab)
# Ensure the layout viewport isn't scrolled but the visual is.
self.assertGreater(
self._tab.EvaluateJavaScript('window.visualViewport.offsetTop'),
screenHeight / 2 - 1)
self.assertEqual(self._tab.EvaluateJavaScript('window.scrollY'), 0)
self._RunScrollDistanceTest(
2000, 2000, page_action.GESTURE_SOURCE_MOUSE, 60)
def testTouchScrollDistanceWhileZoomed(self):
# Just pass the test on platforms that don't support touch (i.e. Mac)
if not page_action.IsGestureSourceTypeSupported(self._tab, 'touch'):
return
# TODO(bokan): This API was added recently so only run the test once it's
# available. Remove this check once it rolls into stable builds.
chromeSupportsSetPageScaleFactor = self._tab.EvaluateJavaScript(
"'setPageScaleFactor' in chrome.gpuBenchmarking")
if not chromeSupportsSetPageScaleFactor:
return
self._tab.EvaluateJavaScript('chrome.gpuBenchmarking.setPageScaleFactor(2)')
self._RunScrollDistanceTest(
2000, 2000, page_action.GESTURE_SOURCE_TOUCH, 20)
def testScrollAction(self):
self._MakePageVerticallyScrollable()
self.assertEquals(
self._tab.EvaluateJavaScript('document.scrollingElement.scrollTop'), 0)
i = scroll.ScrollAction()
i.WillRunAction(self._tab)
self._tab.ExecuteJavaScript("""
window.__scrollAction.beginMeasuringHook = function() {
window.__didBeginMeasuring = true;
};
window.__scrollAction.endMeasuringHook = function() {
window.__didEndMeasuring = true;
};""")
i.RunAction(self._tab)
self.assertTrue(self._tab.EvaluateJavaScript('window.__didBeginMeasuring'))
self.assertTrue(self._tab.EvaluateJavaScript('window.__didEndMeasuring'))
scroll_position = self._tab.EvaluateJavaScript(
'document.scrollingElement.scrollTop')
self.assertTrue(
scroll_position != 0, msg='scroll_position=%d;' % (scroll_position))
# https://github.com/catapult-project/catapult/issues/3099
@decorators.Disabled('android')
def testDiagonalScrollAction(self):
# Diagonal scrolling was not supported in the ScrollAction until Chrome
# branch number 2332
branch_num = self._tab.browser._browser_backend.devtools_client \
.GetChromeBranchNumber()
if branch_num < 2332:
return
self._MakePageVerticallyScrollable()
self.assertEquals(
self._tab.EvaluateJavaScript('document.scrollingElement.scrollTop'), 0)
self._MakePageHorizontallyScrollable()
self.assertEquals(
self._tab.EvaluateJavaScript('document.scrollingElement.scrollLeft'), 0)
i = scroll.ScrollAction(direction='downright')
i.WillRunAction(self._tab)
i.RunAction(self._tab)
viewport_top = self._tab.EvaluateJavaScript(
'document.scrollingElement.scrollTop')
self.assertTrue(viewport_top != 0, msg='viewport_top=%d;' % viewport_top)
viewport_left = self._tab.EvaluateJavaScript(
'document.scrollingElement.scrollLeft')
self.assertTrue(viewport_left != 0, msg='viewport_left=%d;' % viewport_left)
def testBoundingClientRect(self):
# Verify that the rect returned by getBoundingVisibleRect() in scroll.js is
# completely contained within the viewport. Scroll events dispatched by the
# scrolling API use the center of this rect as their location, and this
# location needs to be within the viewport bounds to correctly decide
# between main-thread and impl-thread scroll. If the scrollable area were
# not clipped to the viewport bounds, then the instance used here (the
# scrollable area being more than twice as tall as the viewport) would
# result in a scroll location outside of the viewport bounds.
self._MakePageVerticallyScrollable()
self.assertEquals(
self._tab.EvaluateJavaScript('document.scrollingElement.scrollTop'), 0)
self._MakePageHorizontallyScrollable()
self.assertEquals(
self._tab.EvaluateJavaScript('document.scrollingElement.scrollLeft'), 0)
self._tab.ExecuteJavaScript("""
window.scrollTo(__GestureCommon_GetWindowWidth(),
__GestureCommon_GetWindowHeight());""")
rect_top = int(
self._tab.EvaluateJavaScript(
'__GestureCommon_GetBoundingVisibleRect(document.body).top'))
rect_height = int(
self._tab.EvaluateJavaScript(
'__GestureCommon_GetBoundingVisibleRect(document.body).height'))
rect_bottom = rect_top + rect_height
rect_left = int(
self._tab.EvaluateJavaScript(
'__GestureCommon_GetBoundingVisibleRect(document.body).left'))
rect_width = int(
self._tab.EvaluateJavaScript(
'__GestureCommon_GetBoundingVisibleRect(document.body).width'))
rect_right = rect_left + rect_width
viewport_height = int(
self._tab.EvaluateJavaScript('__GestureCommon_GetWindowHeight()'))
viewport_width = int(
self._tab.EvaluateJavaScript('__GestureCommon_GetWindowWidth()'))
self.assertTrue(rect_top >= 0, msg='%s >= %s' % (rect_top, 0))
self.assertTrue(rect_left >= 0, msg='%s >= %s' % (rect_left, 0))
self.assertTrue(
rect_bottom <= viewport_height,
msg='%s + %s <= %s' % (rect_top, rect_height, viewport_height))
self.assertTrue(
rect_right <= viewport_width,
msg='%s + %s <= %s' % (rect_left, rect_width, viewport_width))
|
from app.modules.shogi import Koma
emoji_prefix = "slackshogisss_"
emoji_separetor = ":"
koma2emoji = {
Koma.empty: emoji_separetor + emoji_prefix + "mu" + emoji_separetor,
Koma.fu: emoji_separetor + emoji_prefix + "fu" + emoji_separetor,
Koma.kyosha: emoji_separetor + emoji_prefix + "kyou" + emoji_separetor ,
Koma.keima: emoji_separetor + emoji_prefix + "kei" + emoji_separetor,
Koma.gin: emoji_separetor + emoji_prefix + "gin" + emoji_separetor,
Koma.kin: emoji_separetor + emoji_prefix + "kin" + emoji_separetor,
Koma.kaku: emoji_separetor + emoji_prefix + "kaku" + emoji_separetor,
Koma.hisha: emoji_separetor + emoji_prefix + "hi" + emoji_separetor,
Koma.gyoku: emoji_separetor + emoji_prefix + "gyoku" + emoji_separetor,
Koma.promoted_fu: emoji_separetor + emoji_prefix + "tokin" + emoji_separetor,
Koma.promoted_kyosha: emoji_separetor + emoji_prefix + "narikyou" + emoji_separetor,
Koma.promoted_keima: emoji_separetor + emoji_prefix + "narikei" + emoji_separetor,
Koma.promoted_gin: emoji_separetor + emoji_prefix + "narigin" + emoji_separetor,
Koma.promoted_kaku: emoji_separetor + emoji_prefix + "uma" + emoji_separetor,
Koma.promoted_hisha: emoji_separetor + emoji_prefix + "ryu" + emoji_separetor,
Koma.opponent_fu: emoji_separetor + emoji_prefix + "fu_enemy" + emoji_separetor,
Koma.opponent_kyosha: emoji_separetor + emoji_prefix + "kyou_enemy" + emoji_separetor,
Koma.opponent_keima: emoji_separetor + emoji_prefix + "kei_enemy" + emoji_separetor,
Koma.opponent_gin: emoji_separetor + emoji_prefix + "gin_enemy" + emoji_separetor,
Koma.opponent_kin: emoji_separetor + emoji_prefix + "kin_enemy" + emoji_separetor,
Koma.opponent_kaku: emoji_separetor + emoji_prefix + "kaku_enemy" + emoji_separetor,
Koma.opponent_hisha: emoji_separetor + emoji_prefix + "hi_enemy" + emoji_separetor,
Koma.opponent_gyoku: emoji_separetor + emoji_prefix + "ou_enemy" + emoji_separetor,
Koma.opponent_promoted_fu: emoji_separetor + emoji_prefix + "tokin_enemy" + emoji_separetor,
Koma.opponent_promoted_kyosha: emoji_separetor + emoji_prefix + "narikyou_enemy" + emoji_separetor,
Koma.opponent_promoted_keima: emoji_separetor + emoji_prefix + "narikei_enemy" + emoji_separetor,
Koma.opponent_promoted_gin: emoji_separetor + emoji_prefix + "narigin_enemy" + emoji_separetor,
Koma.opponent_promoted_kaku: emoji_separetor + emoji_prefix + "uma_enemy" + emoji_separetor,
Koma.opponent_promoted_hisha: emoji_separetor + emoji_prefix + "ryu_enemy" + emoji_separetor,
}
class ShogiOutput:
@staticmethod
def make_board_emoji(board_info):
output_text = ""
if not board_info["turn"]:
output_text = "[手番]"
output_text += "後手 {} : ".format(board_info["info"]["second"]["name"])
cnt = 0
if board_info["second"]:
for koma in board_info["second"]:
cnt += 1
# if a number of motigoma is more than 7,
# go to next line.
if cnt == 7:
output_text += "\n "
cnt = 1
output_text += koma2emoji[koma] + " "
else:
output_text += "持ち駒なし"
output_text += "\n\n"
# board
for y in range(9):
for x in range(9):
if not board_info["_shogi"].shogi.last_move_x == None and x == board_info["_shogi"].shogi.last_move_x and y == board_info["_shogi"].shogi.last_move_y:
output_text += koma2emoji[board_info["board"][y][x]].replace(emoji_prefix, emoji_prefix + "last_")
else:
output_text += koma2emoji[board_info["board"][y][x]]
output_text += "\n"
output_text += "\n"
# socond koma
if board_info["turn"]:
output_text += "[手番]"
output_text += "先手 {} : ".format(board_info["info"]["first"]["name"])
cnt = 0
if board_info["first"]:
for koma in board_info["first"]:
cnt += 1
# if a number of motigoma is more than 7,
# go to next line.
if cnt == 7:
output_text += "\n "
cnt = 1
output_text += koma2emoji[koma] + " "
else:
output_text += "持ち駒なし"
output_text += "\n"
return output_text
fix yossy review comment
from app.modules.shogi import Koma
emoji_prefix = "slackshogisss_"
emoji_separetor = ":"
koma2emoji = {
Koma.empty: emoji_separetor + emoji_prefix + "mu" + emoji_separetor,
Koma.fu: emoji_separetor + emoji_prefix + "fu" + emoji_separetor,
Koma.kyosha: emoji_separetor + emoji_prefix + "kyou" + emoji_separetor ,
Koma.keima: emoji_separetor + emoji_prefix + "kei" + emoji_separetor,
Koma.gin: emoji_separetor + emoji_prefix + "gin" + emoji_separetor,
Koma.kin: emoji_separetor + emoji_prefix + "kin" + emoji_separetor,
Koma.kaku: emoji_separetor + emoji_prefix + "kaku" + emoji_separetor,
Koma.hisha: emoji_separetor + emoji_prefix + "hi" + emoji_separetor,
Koma.gyoku: emoji_separetor + emoji_prefix + "gyoku" + emoji_separetor,
Koma.promoted_fu: emoji_separetor + emoji_prefix + "tokin" + emoji_separetor,
Koma.promoted_kyosha: emoji_separetor + emoji_prefix + "narikyou" + emoji_separetor,
Koma.promoted_keima: emoji_separetor + emoji_prefix + "narikei" + emoji_separetor,
Koma.promoted_gin: emoji_separetor + emoji_prefix + "narigin" + emoji_separetor,
Koma.promoted_kaku: emoji_separetor + emoji_prefix + "uma" + emoji_separetor,
Koma.promoted_hisha: emoji_separetor + emoji_prefix + "ryu" + emoji_separetor,
Koma.opponent_fu: emoji_separetor + emoji_prefix + "fu_enemy" + emoji_separetor,
Koma.opponent_kyosha: emoji_separetor + emoji_prefix + "kyou_enemy" + emoji_separetor,
Koma.opponent_keima: emoji_separetor + emoji_prefix + "kei_enemy" + emoji_separetor,
Koma.opponent_gin: emoji_separetor + emoji_prefix + "gin_enemy" + emoji_separetor,
Koma.opponent_kin: emoji_separetor + emoji_prefix + "kin_enemy" + emoji_separetor,
Koma.opponent_kaku: emoji_separetor + emoji_prefix + "kaku_enemy" + emoji_separetor,
Koma.opponent_hisha: emoji_separetor + emoji_prefix + "hi_enemy" + emoji_separetor,
Koma.opponent_gyoku: emoji_separetor + emoji_prefix + "ou_enemy" + emoji_separetor,
Koma.opponent_promoted_fu: emoji_separetor + emoji_prefix + "tokin_enemy" + emoji_separetor,
Koma.opponent_promoted_kyosha: emoji_separetor + emoji_prefix + "narikyou_enemy" + emoji_separetor,
Koma.opponent_promoted_keima: emoji_separetor + emoji_prefix + "narikei_enemy" + emoji_separetor,
Koma.opponent_promoted_gin: emoji_separetor + emoji_prefix + "narigin_enemy" + emoji_separetor,
Koma.opponent_promoted_kaku: emoji_separetor + emoji_prefix + "uma_enemy" + emoji_separetor,
Koma.opponent_promoted_hisha: emoji_separetor + emoji_prefix + "ryu_enemy" + emoji_separetor,
}
class ShogiOutput:
@staticmethod
def make_board_emoji(board_info):
output_text = ""
if not board_info["turn"]:
output_text = "[手番]"
output_text += "後手 {} : ".format(board_info["info"]["second"]["name"])
cnt = 0
if board_info["second"]:
for koma in board_info["second"]:
cnt += 1
# if a number of motigoma is more than 7,
# go to next line.
if cnt == 7:
output_text += "\n "
cnt = 1
output_text += koma2emoji[koma] + " "
else:
output_text += "持ち駒なし"
output_text += "\n\n"
# board
for y in range(9):
for x in range(9):
if x == board_info["_shogi"].shogi.last_move_x and \
y == board_info["_shogi"].shogi.last_move_y:
output_text += koma2emoji[
board_info["board"][y][x]
].replace(emoji_prefix,
emoji_prefix + "last_"
)
else:
output_text += koma2emoji[board_info["board"][y][x]]
output_text += "\n"
output_text += "\n"
# socond koma
if board_info["turn"]:
output_text += "[手番]"
output_text += "先手 {} : ".format(board_info["info"]["first"]["name"])
cnt = 0
if board_info["first"]:
for koma in board_info["first"]:
cnt += 1
# if a number of motigoma is more than 7,
# go to next line.
if cnt == 7:
output_text += "\n "
cnt = 1
output_text += koma2emoji[koma] + " "
else:
output_text += "持ち駒なし"
output_text += "\n"
return output_text
|
import os
import threading
import Queue
import traceback
import atexit
import weakref
import __future__
# note that the whole code of this module (as well as some
# other modules) execute not only on the local side but
# also on any gateway's remote side. On such remote sides
# we cannot assume the py library to be there and
# InstallableGateway._remote_bootstrap_gateway() (located
# in register.py) will take care to send source fragments
# to the other side. Yes, it is fragile but we have a
# few tests that try to catch when we mess up.
# XXX the following lines should not be here
if 'ThreadOut' not in globals():
import py
from py.code import Source
from py.__.execnet.channel import ChannelFactory, Channel
from py.__.execnet.message import Message
ThreadOut = py._thread.ThreadOut
import os
debug = 0 # open('/tmp/execnet-debug-%d' % os.getpid() , 'wa')
sysex = (KeyboardInterrupt, SystemExit)
# ----------------------------------------------------------
# cleanup machinery (for exiting processes)
# ----------------------------------------------------------
class GatewayCleanup:
def __init__(self):
self._activegateways = weakref.WeakKeyDictionary()
atexit.register(self.cleanup_atexit)
def register(self, gateway):
assert gateway not in self._activegateways
self._activegateways[gateway] = True
def unregister(self, gateway):
del self._activegateways[gateway]
def cleanup_atexit(self):
if debug:
print >>debug, "="*20 + "cleaning up" + "=" * 20
debug.flush()
for gw in self._activegateways.keys():
gw.exit()
#gw.join() # should work as well
# ----------------------------------------------------------
# Base Gateway (used for both remote and local side)
# ----------------------------------------------------------
class Gateway(object):
class _StopExecLoop(Exception): pass
_ThreadOut = ThreadOut
remoteaddress = ""
_requestqueue = None
_cleanup = GatewayCleanup()
def __init__(self, io, _startcount=2):
""" initialize core gateway, using the given
inputoutput object.
"""
self._io = io
self._channelfactory = ChannelFactory(self, _startcount)
self._cleanup.register(self)
def _initreceive(self, requestqueue=False):
if requestqueue:
self._requestqueue = Queue.Queue()
self._receiverthread = threading.Thread(name="receiver",
target=self._thread_receiver)
self._receiverthread.setDaemon(1)
self._receiverthread.start()
def __repr__(self):
""" return string representing gateway type and status. """
addr = self.remoteaddress
if addr:
addr = '[%s]' % (addr,)
else:
addr = ''
try:
r = (self._receiverthread.isAlive() and "receiving" or
"not receiving")
s = "sending" # XXX
i = len(self._channelfactory.channels())
except AttributeError:
r = s = "uninitialized"
i = "no"
return "<%s%s %s/%s (%s active channels)>" %(
self.__class__.__name__, addr, r, s, i)
def _trace(self, *args):
if debug:
try:
l = "\n".join(args).split(os.linesep)
id = getid(self)
for x in l:
print >>debug, x
debug.flush()
except sysex:
raise
except:
traceback.print_exc()
def _traceex(self, excinfo):
try:
l = traceback.format_exception(*excinfo)
errortext = "".join(l)
except:
errortext = '%s: %s' % (excinfo[0].__name__,
excinfo[1])
self._trace(errortext)
def _thread_receiver(self):
""" thread to read and handle Messages half-sync-half-async. """
try:
from sys import exc_info
while 1:
try:
msg = Message.readfrom(self._io)
self._trace("received <- %r" % msg)
msg.received(self)
except sysex:
break
except EOFError:
break
except:
self._traceex(exc_info())
break
finally:
self._stopexec()
self._stopsend()
self._channelfactory._finished_receiving()
self._trace('leaving %r' % threading.currentThread())
from sys import exc_info
def _send(self, msg):
if msg is None:
self._io.close_write()
else:
try:
msg.writeto(self._io)
except:
excinfo = self.exc_info()
self._traceex(excinfo)
msg.post_sent(self, excinfo)
else:
msg.post_sent(self)
self._trace('sent -> %r' % msg)
def _local_redirect_thread_output(self, outid, errid):
l = []
for name, id in ('stdout', outid), ('stderr', errid):
if id:
channel = self._channelfactory.new(outid)
out = self._ThreadOut(sys, name)
out.setwritefunc(channel.send)
l.append((out, channel))
def close():
for out, channel in l:
out.delwritefunc()
channel.close()
return close
def _local_schedulexec(self, channel, sourcetask):
if self._requestqueue is not None:
self._requestqueue.put((channel, sourcetask))
else:
# we will not execute, let's send back an error
# to inform the other side
channel.close("execution disallowed")
def _servemain(self, joining=True):
from sys import exc_info
self._initreceive(requestqueue=True)
try:
while 1:
item = self._requestqueue.get()
if item is None:
self._stopsend()
break
try:
self._executetask(item)
except self._StopExecLoop:
break
finally:
self._trace("_servemain finished")
if joining:
self.join()
def remote_init_threads(self, num=None):
""" start up to 'num' threads for subsequent
remote_exec() invocations to allow concurrent
execution.
"""
if hasattr(self, '_remotechannelthread'):
raise IOError("remote threads already running")
from py.__.thread import pool
source = py.code.Source(pool, """
execpool = WorkerPool(maxthreads=%r)
gw = channel.gateway
while 1:
task = gw._requestqueue.get()
if task is None:
gw._stopsend()
execpool.shutdown()
execpool.join()
raise gw._StopExecLoop
execpool.dispatch(gw._executetask, task)
""" % num)
self._remotechannelthread = self.remote_exec(source)
def _executetask(self, item):
""" execute channel/source items. """
from sys import exc_info
channel, (source, outid, errid) = item
try:
loc = { 'channel' : channel }
self._trace("execution starts:", repr(source)[:50])
close = self._local_redirect_thread_output(outid, errid)
try:
co = compile(source+'\n', '', 'exec',
__future__.CO_GENERATOR_ALLOWED)
exec co in loc
finally:
close()
self._trace("execution finished:", repr(source)[:50])
except (KeyboardInterrupt, SystemExit):
pass
except self._StopExecLoop:
channel.close()
raise
except:
excinfo = exc_info()
l = traceback.format_exception(*excinfo)
errortext = "".join(l)
channel.close(errortext)
self._trace(errortext)
else:
channel.close()
def _newredirectchannelid(self, callback):
if callback is None:
return
if hasattr(callback, 'write'):
callback = callback.write
assert callable(callback)
chan = self.newchannel()
chan.setcallback(callback)
return chan.id
# _____________________________________________________________________
#
# High Level Interface
# _____________________________________________________________________
#
def newchannel(self):
""" return new channel object. """
return self._channelfactory.new()
def remote_exec(self, source, stdout=None, stderr=None):
""" return channel object and connect it to a remote
execution thread where the given 'source' executes
and has the sister 'channel' object in its global
namespace. The callback functions 'stdout' and
'stderr' get called on receival of remote
stdout/stderr output strings.
"""
try:
source = str(Source(source))
except NameError:
try:
import py
source = str(py.code.Source(source))
except ImportError:
pass
channel = self.newchannel()
outid = self._newredirectchannelid(stdout)
errid = self._newredirectchannelid(stderr)
self._send(Message.CHANNEL_OPEN(
channel.id, (source, outid, errid)))
return channel
def _remote_redirect(self, stdout=None, stderr=None):
""" return a handle representing a redirection of a remote
end's stdout to a local file object. with handle.close()
the redirection will be reverted.
"""
clist = []
for name, out in ('stdout', stdout), ('stderr', stderr):
if out:
outchannel = self.newchannel()
outchannel.setcallback(getattr(out, 'write', out))
channel = self.remote_exec("""
import sys
outchannel = channel.receive()
outchannel.gateway._ThreadOut(sys, %r).setdefaultwriter(outchannel.send)
""" % name)
channel.send(outchannel)
clist.append(channel)
for c in clist:
c.waitclose()
class Handle:
def close(_):
for name, out in ('stdout', stdout), ('stderr', stderr):
if out:
c = self.remote_exec("""
import sys
channel.gateway._ThreadOut(sys, %r).resetdefault()
""" % name)
c.waitclose()
return Handle()
def exit(self):
""" Try to stop all exec and IO activity. """
self._cleanup.unregister(self)
self._stopexec()
self._stopsend()
def _stopsend(self):
self._send(None)
def _stopexec(self):
if self._requestqueue is not None:
self._requestqueue.put(None)
def join(self, joinexec=True):
""" Wait for all IO (and by default all execution activity)
to stop. the joinexec parameter is obsolete.
"""
current = threading.currentThread()
if self._receiverthread.isAlive():
self._trace("joining receiver thread")
self._receiverthread.join()
def getid(gw, cache={}):
name = gw.__class__.__name__
try:
return cache.setdefault(name, {})[id(gw)]
except KeyError:
cache[name][id(gw)] = x = "%s:%s.%d" %(os.getpid(), gw.__class__.__name__, len(cache[name]))
return x
[svn r57544] still notify callbacks if we can't send to the other side anymore.
import os
import threading
import Queue
import traceback
import atexit
import weakref
import __future__
# note that the whole code of this module (as well as some
# other modules) execute not only on the local side but
# also on any gateway's remote side. On such remote sides
# we cannot assume the py library to be there and
# InstallableGateway._remote_bootstrap_gateway() (located
# in register.py) will take care to send source fragments
# to the other side. Yes, it is fragile but we have a
# few tests that try to catch when we mess up.
# XXX the following lines should not be here
if 'ThreadOut' not in globals():
import py
from py.code import Source
from py.__.execnet.channel import ChannelFactory, Channel
from py.__.execnet.message import Message
ThreadOut = py._thread.ThreadOut
import os
debug = 0 # open('/tmp/execnet-debug-%d' % os.getpid() , 'wa')
sysex = (KeyboardInterrupt, SystemExit)
# ----------------------------------------------------------
# cleanup machinery (for exiting processes)
# ----------------------------------------------------------
class GatewayCleanup:
def __init__(self):
self._activegateways = weakref.WeakKeyDictionary()
atexit.register(self.cleanup_atexit)
def register(self, gateway):
assert gateway not in self._activegateways
self._activegateways[gateway] = True
def unregister(self, gateway):
del self._activegateways[gateway]
def cleanup_atexit(self):
if debug:
print >>debug, "="*20 + "cleaning up" + "=" * 20
debug.flush()
for gw in self._activegateways.keys():
gw.exit()
#gw.join() # should work as well
# ----------------------------------------------------------
# Base Gateway (used for both remote and local side)
# ----------------------------------------------------------
class Gateway(object):
class _StopExecLoop(Exception): pass
_ThreadOut = ThreadOut
remoteaddress = ""
_requestqueue = None
_cleanup = GatewayCleanup()
def __init__(self, io, _startcount=2):
""" initialize core gateway, using the given
inputoutput object.
"""
self._io = io
self._channelfactory = ChannelFactory(self, _startcount)
self._cleanup.register(self)
def _initreceive(self, requestqueue=False):
if requestqueue:
self._requestqueue = Queue.Queue()
self._receiverthread = threading.Thread(name="receiver",
target=self._thread_receiver)
self._receiverthread.setDaemon(1)
self._receiverthread.start()
def __repr__(self):
""" return string representing gateway type and status. """
addr = self.remoteaddress
if addr:
addr = '[%s]' % (addr,)
else:
addr = ''
try:
r = (self._receiverthread.isAlive() and "receiving" or
"not receiving")
s = "sending" # XXX
i = len(self._channelfactory.channels())
except AttributeError:
r = s = "uninitialized"
i = "no"
return "<%s%s %s/%s (%s active channels)>" %(
self.__class__.__name__, addr, r, s, i)
def _trace(self, *args):
if debug:
try:
l = "\n".join(args).split(os.linesep)
id = getid(self)
for x in l:
print >>debug, x
debug.flush()
except sysex:
raise
except:
traceback.print_exc()
def _traceex(self, excinfo):
try:
l = traceback.format_exception(*excinfo)
errortext = "".join(l)
except:
errortext = '%s: %s' % (excinfo[0].__name__,
excinfo[1])
self._trace(errortext)
def _thread_receiver(self):
""" thread to read and handle Messages half-sync-half-async. """
try:
from sys import exc_info
while 1:
try:
msg = Message.readfrom(self._io)
self._trace("received <- %r" % msg)
msg.received(self)
except sysex:
break
except EOFError:
break
except:
self._traceex(exc_info())
break
finally:
# XXX we need to signal fatal error states to
# channels/callbacks, particularly ones
# where the other side just died.
self._stopexec()
try:
self._stopsend()
except IOError:
self._trace('IOError on _stopsend()')
self._channelfactory._finished_receiving()
self._trace('leaving %r' % threading.currentThread())
from sys import exc_info
def _send(self, msg):
if msg is None:
self._io.close_write()
else:
try:
msg.writeto(self._io)
except:
excinfo = self.exc_info()
self._traceex(excinfo)
msg.post_sent(self, excinfo)
else:
msg.post_sent(self)
self._trace('sent -> %r' % msg)
def _local_redirect_thread_output(self, outid, errid):
l = []
for name, id in ('stdout', outid), ('stderr', errid):
if id:
channel = self._channelfactory.new(outid)
out = self._ThreadOut(sys, name)
out.setwritefunc(channel.send)
l.append((out, channel))
def close():
for out, channel in l:
out.delwritefunc()
channel.close()
return close
def _local_schedulexec(self, channel, sourcetask):
if self._requestqueue is not None:
self._requestqueue.put((channel, sourcetask))
else:
# we will not execute, let's send back an error
# to inform the other side
channel.close("execution disallowed")
def _servemain(self, joining=True):
from sys import exc_info
self._initreceive(requestqueue=True)
try:
while 1:
item = self._requestqueue.get()
if item is None:
self._stopsend()
break
try:
self._executetask(item)
except self._StopExecLoop:
break
finally:
self._trace("_servemain finished")
if joining:
self.join()
def remote_init_threads(self, num=None):
""" start up to 'num' threads for subsequent
remote_exec() invocations to allow concurrent
execution.
"""
if hasattr(self, '_remotechannelthread'):
raise IOError("remote threads already running")
from py.__.thread import pool
source = py.code.Source(pool, """
execpool = WorkerPool(maxthreads=%r)
gw = channel.gateway
while 1:
task = gw._requestqueue.get()
if task is None:
gw._stopsend()
execpool.shutdown()
execpool.join()
raise gw._StopExecLoop
execpool.dispatch(gw._executetask, task)
""" % num)
self._remotechannelthread = self.remote_exec(source)
def _executetask(self, item):
""" execute channel/source items. """
from sys import exc_info
channel, (source, outid, errid) = item
try:
loc = { 'channel' : channel }
self._trace("execution starts:", repr(source)[:50])
close = self._local_redirect_thread_output(outid, errid)
try:
co = compile(source+'\n', '', 'exec',
__future__.CO_GENERATOR_ALLOWED)
exec co in loc
finally:
close()
self._trace("execution finished:", repr(source)[:50])
except (KeyboardInterrupt, SystemExit):
pass
except self._StopExecLoop:
channel.close()
raise
except:
excinfo = exc_info()
l = traceback.format_exception(*excinfo)
errortext = "".join(l)
channel.close(errortext)
self._trace(errortext)
else:
channel.close()
def _newredirectchannelid(self, callback):
if callback is None:
return
if hasattr(callback, 'write'):
callback = callback.write
assert callable(callback)
chan = self.newchannel()
chan.setcallback(callback)
return chan.id
# _____________________________________________________________________
#
# High Level Interface
# _____________________________________________________________________
#
def newchannel(self):
""" return new channel object. """
return self._channelfactory.new()
def remote_exec(self, source, stdout=None, stderr=None):
""" return channel object and connect it to a remote
execution thread where the given 'source' executes
and has the sister 'channel' object in its global
namespace. The callback functions 'stdout' and
'stderr' get called on receival of remote
stdout/stderr output strings.
"""
try:
source = str(Source(source))
except NameError:
try:
import py
source = str(py.code.Source(source))
except ImportError:
pass
channel = self.newchannel()
outid = self._newredirectchannelid(stdout)
errid = self._newredirectchannelid(stderr)
self._send(Message.CHANNEL_OPEN(
channel.id, (source, outid, errid)))
return channel
def _remote_redirect(self, stdout=None, stderr=None):
""" return a handle representing a redirection of a remote
end's stdout to a local file object. with handle.close()
the redirection will be reverted.
"""
clist = []
for name, out in ('stdout', stdout), ('stderr', stderr):
if out:
outchannel = self.newchannel()
outchannel.setcallback(getattr(out, 'write', out))
channel = self.remote_exec("""
import sys
outchannel = channel.receive()
outchannel.gateway._ThreadOut(sys, %r).setdefaultwriter(outchannel.send)
""" % name)
channel.send(outchannel)
clist.append(channel)
for c in clist:
c.waitclose()
class Handle:
def close(_):
for name, out in ('stdout', stdout), ('stderr', stderr):
if out:
c = self.remote_exec("""
import sys
channel.gateway._ThreadOut(sys, %r).resetdefault()
""" % name)
c.waitclose()
return Handle()
def exit(self):
""" Try to stop all exec and IO activity. """
self._cleanup.unregister(self)
self._stopexec()
self._stopsend()
def _stopsend(self):
self._send(None)
def _stopexec(self):
if self._requestqueue is not None:
self._requestqueue.put(None)
def join(self, joinexec=True):
""" Wait for all IO (and by default all execution activity)
to stop. the joinexec parameter is obsolete.
"""
current = threading.currentThread()
if self._receiverthread.isAlive():
self._trace("joining receiver thread")
self._receiverthread.join()
def getid(gw, cache={}):
name = gw.__class__.__name__
try:
return cache.setdefault(name, {})[id(gw)]
except KeyError:
cache[name][id(gw)] = x = "%s:%s.%d" %(os.getpid(), gw.__class__.__name__, len(cache[name]))
return x
|
# (c) 2014, James Tanner <tanner.jc@gmail.com>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# ansible-pull is a script that runs ansible in local mode
# after checking out a playbooks directory from source repo. There is an
# example playbook to bootstrap this script in the examples/ dir which
# installs ansible and sets it up to run on cron.
import os
import shutil
import tempfile
from io import BytesIO
from subprocess import call
from ansible import errors
from hashlib import sha256
from hashlib import md5
from binascii import hexlify
from binascii import unhexlify
from ansible import constants as C
try:
from Crypto.Hash import SHA256, HMAC
HAS_HASH = True
except ImportError:
HAS_HASH = False
# Counter import fails for 2.0.1, requires >= 2.6.1 from pip
try:
from Crypto.Util import Counter
HAS_COUNTER = True
except ImportError:
HAS_COUNTER = False
# KDF import fails for 2.0.1, requires >= 2.6.1 from pip
try:
from Crypto.Protocol.KDF import PBKDF2
HAS_PBKDF2 = True
except ImportError:
HAS_PBKDF2 = False
# AES IMPORTS
try:
from Crypto.Cipher import AES as AES
HAS_AES = True
except ImportError:
HAS_AES = False
CRYPTO_UPGRADE = "ansible-vault requires a newer version of pycrypto than the one installed on your platform. You may fix this with OS-specific commands such as: yum install python-devel; rpm -e --nodeps python-crypto; pip install pycrypto"
HEADER='$ANSIBLE_VAULT'
CIPHER_WHITELIST=['AES', 'AES256']
class VaultLib(object):
def __init__(self, password):
self.password = password
self.cipher_name = None
self.version = '1.1'
def is_encrypted(self, data):
if data.startswith(HEADER):
return True
else:
return False
def encrypt(self, data):
if self.is_encrypted(data):
raise errors.AnsibleError("data is already encrypted")
if not self.cipher_name:
self.cipher_name = "AES256"
#raise errors.AnsibleError("the cipher must be set before encrypting data")
if 'Vault' + self.cipher_name in globals() and self.cipher_name in CIPHER_WHITELIST:
cipher = globals()['Vault' + self.cipher_name]
this_cipher = cipher()
else:
raise errors.AnsibleError("%s cipher could not be found" % self.cipher_name)
"""
# combine sha + data
this_sha = sha256(data).hexdigest()
tmp_data = this_sha + "\n" + data
"""
# encrypt sha + data
enc_data = this_cipher.encrypt(data, self.password)
# add header
tmp_data = self._add_header(enc_data)
return tmp_data
def decrypt(self, data):
if self.password is None:
raise errors.AnsibleError("A vault password must be specified to decrypt data")
if not self.is_encrypted(data):
raise errors.AnsibleError("data is not encrypted")
# clean out header
data = self._split_header(data)
# create the cipher object
if 'Vault' + self.cipher_name in globals() and self.cipher_name in CIPHER_WHITELIST:
cipher = globals()['Vault' + self.cipher_name]
this_cipher = cipher()
else:
raise errors.AnsibleError("%s cipher could not be found" % self.cipher_name)
# try to unencrypt data
data = this_cipher.decrypt(data, self.password)
return data
def _add_header(self, data):
# combine header and encrypted data in 80 char columns
#tmpdata = hexlify(data)
tmpdata = [data[i:i+80] for i in range(0, len(data), 80)]
if not self.cipher_name:
raise errors.AnsibleError("the cipher must be set before adding a header")
dirty_data = HEADER + ";" + str(self.version) + ";" + self.cipher_name + "\n"
for l in tmpdata:
dirty_data += l + '\n'
return dirty_data
def _split_header(self, data):
# used by decrypt
tmpdata = data.split('\n')
tmpheader = tmpdata[0].strip().split(';')
self.version = str(tmpheader[1].strip())
self.cipher_name = str(tmpheader[2].strip())
clean_data = '\n'.join(tmpdata[1:])
"""
# strip out newline, join, unhex
clean_data = [ x.strip() for x in clean_data ]
clean_data = unhexlify(''.join(clean_data))
"""
return clean_data
def __enter__(self):
return self
def __exit__(self, *err):
pass
class VaultEditor(object):
# uses helper methods for write_file(self, filename, data)
# to write a file so that code isn't duplicated for simple
# file I/O, ditto read_file(self, filename) and launch_editor(self, filename)
# ... "Don't Repeat Yourself", etc.
def __init__(self, cipher_name, password, filename):
# instantiates a member variable for VaultLib
self.cipher_name = cipher_name
self.password = password
self.filename = filename
def create_file(self):
""" create a new encrypted file """
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
if os.path.isfile(self.filename):
raise errors.AnsibleError("%s exists, please use 'edit' instead" % self.filename)
# drop the user into vim on file
EDITOR = os.environ.get('EDITOR','vim')
call([EDITOR, self.filename])
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
this_vault.cipher_name = self.cipher_name
enc_data = this_vault.encrypt(tmpdata)
self.write_data(enc_data, self.filename)
def decrypt_file(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
if not os.path.isfile(self.filename):
raise errors.AnsibleError("%s does not exist" % self.filename)
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
if this_vault.is_encrypted(tmpdata):
dec_data = this_vault.decrypt(tmpdata)
self.write_data(dec_data, self.filename)
else:
raise errors.AnsibleError("%s is not encrypted" % self.filename)
def edit_file(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
# decrypt to tmpfile
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
dec_data = this_vault.decrypt(tmpdata)
_, tmp_path = tempfile.mkstemp()
self.write_data(dec_data, tmp_path)
# drop the user into vim on the tmp file
EDITOR = os.environ.get('EDITOR','vim')
call([EDITOR, tmp_path])
new_data = self.read_data(tmp_path)
# create new vault
new_vault = VaultLib(self.password)
# we want the cipher to default to AES256
#new_vault.cipher_name = this_vault.cipher_name
# encrypt new data a write out to tmp
enc_data = new_vault.encrypt(new_data)
self.write_data(enc_data, tmp_path)
# shuffle tmp file into place
self.shuffle_files(tmp_path, self.filename)
def encrypt_file(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
if not os.path.isfile(self.filename):
raise errors.AnsibleError("%s does not exist" % self.filename)
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
this_vault.cipher_name = self.cipher_name
if not this_vault.is_encrypted(tmpdata):
enc_data = this_vault.encrypt(tmpdata)
self.write_data(enc_data, self.filename)
else:
raise errors.AnsibleError("%s is already encrypted" % self.filename)
def rekey_file(self, new_password):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
# decrypt
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
dec_data = this_vault.decrypt(tmpdata)
# create new vault
new_vault = VaultLib(new_password)
# we want to force cipher to the default
#new_vault.cipher_name = this_vault.cipher_name
# re-encrypt data and re-write file
enc_data = new_vault.encrypt(dec_data)
self.write_data(enc_data, self.filename)
def read_data(self, filename):
f = open(filename, "rb")
tmpdata = f.read()
f.close()
return tmpdata
def write_data(self, data, filename):
if os.path.isfile(filename):
os.remove(filename)
f = open(filename, "wb")
f.write(data)
f.close()
def shuffle_files(self, src, dest):
# overwrite dest with src
if os.path.isfile(dest):
os.remove(dest)
shutil.move(src, dest)
########################################
# CIPHERS #
########################################
class VaultAES(object):
# this version has been obsoleted by the VaultAES256 class
# which uses encrypt-then-mac (fixing order) and also improving the KDF used
# code remains for upgrade purposes only
# http://stackoverflow.com/a/16761459
def __init__(self):
if not HAS_AES:
raise errors.AnsibleError(CRYPTO_UPGRADE)
def aes_derive_key_and_iv(self, password, salt, key_length, iv_length):
""" Create a key and an initialization vector """
d = d_i = ''
while len(d) < key_length + iv_length:
d_i = md5(d_i + password + salt).digest()
d += d_i
key = d[:key_length]
iv = d[key_length:key_length+iv_length]
return key, iv
def encrypt(self, data, password, key_length=32):
""" Read plaintext data from in_file and write encrypted to out_file """
# combine sha + data
this_sha = sha256(data).hexdigest()
tmp_data = this_sha + "\n" + data
in_file = BytesIO(tmp_data)
in_file.seek(0)
out_file = BytesIO()
bs = AES.block_size
# Get a block of random data. EL does not have Crypto.Random.new()
# so os.urandom is used for cross platform purposes
salt = os.urandom(bs - len('Salted__'))
key, iv = self.aes_derive_key_and_iv(password, salt, key_length, bs)
cipher = AES.new(key, AES.MODE_CBC, iv)
out_file.write('Salted__' + salt)
finished = False
while not finished:
chunk = in_file.read(1024 * bs)
if len(chunk) == 0 or len(chunk) % bs != 0:
padding_length = (bs - len(chunk) % bs) or bs
chunk += padding_length * chr(padding_length)
finished = True
out_file.write(cipher.encrypt(chunk))
out_file.seek(0)
enc_data = out_file.read()
tmp_data = hexlify(enc_data)
return tmp_data
def decrypt(self, data, password, key_length=32):
""" Read encrypted data from in_file and write decrypted to out_file """
# http://stackoverflow.com/a/14989032
data = ''.join(data.split('\n'))
data = unhexlify(data)
in_file = BytesIO(data)
in_file.seek(0)
out_file = BytesIO()
bs = AES.block_size
salt = in_file.read(bs)[len('Salted__'):]
key, iv = self.aes_derive_key_and_iv(password, salt, key_length, bs)
cipher = AES.new(key, AES.MODE_CBC, iv)
next_chunk = ''
finished = False
while not finished:
chunk, next_chunk = next_chunk, cipher.decrypt(in_file.read(1024 * bs))
if len(next_chunk) == 0:
padding_length = ord(chunk[-1])
chunk = chunk[:-padding_length]
finished = True
out_file.write(chunk)
# reset the stream pointer to the beginning
out_file.seek(0)
new_data = out_file.read()
# split out sha and verify decryption
split_data = new_data.split("\n")
this_sha = split_data[0]
this_data = '\n'.join(split_data[1:])
test_sha = sha256(this_data).hexdigest()
if this_sha != test_sha:
raise errors.AnsibleError("Decryption failed")
#return out_file.read()
return this_data
class VaultAES256(object):
"""
Vault implementation using AES-CTR with an HMAC-SHA256 authentication code.
Keys are derived using PBKDF2
"""
# http://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html
def __init__(self):
if not HAS_PBKDF2 or not HAS_COUNTER or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
def gen_key_initctr(self, password, salt):
# 16 for AES 128, 32 for AES256
keylength = 32
# match the size used for counter.new to avoid extra work
ivlength = 16
hash_function = SHA256
# make two keys and one iv
pbkdf2_prf = lambda p, s: HMAC.new(p, s, hash_function).digest()
derivedkey = PBKDF2(password, salt, dkLen=(2 * keylength) + ivlength,
count=10000, prf=pbkdf2_prf)
#import epdb; epdb.st()
key1 = derivedkey[:keylength]
key2 = derivedkey[keylength:(keylength * 2)]
iv = derivedkey[(keylength * 2):(keylength * 2) + ivlength]
return key1, key2, hexlify(iv)
def encrypt(self, data, password):
salt = os.urandom(32)
key1, key2, iv = self.gen_key_initctr(password, salt)
# PKCS#7 PAD DATA http://tools.ietf.org/html/rfc5652#section-6.3
bs = AES.block_size
padding_length = (bs - len(data) % bs) or bs
data += padding_length * chr(padding_length)
# COUNTER.new PARAMETERS
# 1) nbits (integer) - Length of the counter, in bits.
# 2) initial_value (integer) - initial value of the counter. "iv" from gen_key_initctr
ctr = Counter.new(128, initial_value=long(iv, 16))
# AES.new PARAMETERS
# 1) AES key, must be either 16, 24, or 32 bytes long -- "key" from gen_key_initctr
# 2) MODE_CTR, is the recommended mode
# 3) counter=<CounterObject>
cipher = AES.new(key1, AES.MODE_CTR, counter=ctr)
# ENCRYPT PADDED DATA
cryptedData = cipher.encrypt(data)
# COMBINE SALT, DIGEST AND DATA
hmac = HMAC.new(key2, cryptedData, SHA256)
message = "%s\n%s\n%s" % ( hexlify(salt), hmac.hexdigest(), hexlify(cryptedData) )
message = hexlify(message)
return message
def decrypt(self, data, password):
# SPLIT SALT, DIGEST, AND DATA
data = ''.join(data.split("\n"))
data = unhexlify(data)
salt, cryptedHmac, cryptedData = data.split("\n", 2)
salt = unhexlify(salt)
cryptedData = unhexlify(cryptedData)
key1, key2, iv = self.gen_key_initctr(password, salt)
# EXIT EARLY IF DIGEST DOESN'T MATCH
hmacDecrypt = HMAC.new(key2, cryptedData, SHA256)
if not self.is_equal(cryptedHmac, hmacDecrypt.hexdigest()):
return None
# SET THE COUNTER AND THE CIPHER
ctr = Counter.new(128, initial_value=long(iv, 16))
cipher = AES.new(key1, AES.MODE_CTR, counter=ctr)
# DECRYPT PADDED DATA
decryptedData = cipher.decrypt(cryptedData)
# UNPAD DATA
padding_length = ord(decryptedData[-1])
decryptedData = decryptedData[:-padding_length]
return decryptedData
def is_equal(self, a, b):
# http://codahale.com/a-lesson-in-timing-attacks/
if len(a) != len(b):
return False
result = 0
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
Changed call to EDITOR to allow for parameters
The EDITOR environment variable is used to create and edit files in the vault.
But if the EDITOR variable contains parameters, subprocess.call() breaks.
This fixes the EDITOR environment variable to be safely split into a list.
It adds a dependency on shlex.
# (c) 2014, James Tanner <tanner.jc@gmail.com>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# ansible-pull is a script that runs ansible in local mode
# after checking out a playbooks directory from source repo. There is an
# example playbook to bootstrap this script in the examples/ dir which
# installs ansible and sets it up to run on cron.
import os
import shlex
import shutil
import tempfile
from io import BytesIO
from subprocess import call
from ansible import errors
from hashlib import sha256
from hashlib import md5
from binascii import hexlify
from binascii import unhexlify
from ansible import constants as C
try:
from Crypto.Hash import SHA256, HMAC
HAS_HASH = True
except ImportError:
HAS_HASH = False
# Counter import fails for 2.0.1, requires >= 2.6.1 from pip
try:
from Crypto.Util import Counter
HAS_COUNTER = True
except ImportError:
HAS_COUNTER = False
# KDF import fails for 2.0.1, requires >= 2.6.1 from pip
try:
from Crypto.Protocol.KDF import PBKDF2
HAS_PBKDF2 = True
except ImportError:
HAS_PBKDF2 = False
# AES IMPORTS
try:
from Crypto.Cipher import AES as AES
HAS_AES = True
except ImportError:
HAS_AES = False
CRYPTO_UPGRADE = "ansible-vault requires a newer version of pycrypto than the one installed on your platform. You may fix this with OS-specific commands such as: yum install python-devel; rpm -e --nodeps python-crypto; pip install pycrypto"
HEADER='$ANSIBLE_VAULT'
CIPHER_WHITELIST=['AES', 'AES256']
class VaultLib(object):
def __init__(self, password):
self.password = password
self.cipher_name = None
self.version = '1.1'
def is_encrypted(self, data):
if data.startswith(HEADER):
return True
else:
return False
def encrypt(self, data):
if self.is_encrypted(data):
raise errors.AnsibleError("data is already encrypted")
if not self.cipher_name:
self.cipher_name = "AES256"
#raise errors.AnsibleError("the cipher must be set before encrypting data")
if 'Vault' + self.cipher_name in globals() and self.cipher_name in CIPHER_WHITELIST:
cipher = globals()['Vault' + self.cipher_name]
this_cipher = cipher()
else:
raise errors.AnsibleError("%s cipher could not be found" % self.cipher_name)
"""
# combine sha + data
this_sha = sha256(data).hexdigest()
tmp_data = this_sha + "\n" + data
"""
# encrypt sha + data
enc_data = this_cipher.encrypt(data, self.password)
# add header
tmp_data = self._add_header(enc_data)
return tmp_data
def decrypt(self, data):
if self.password is None:
raise errors.AnsibleError("A vault password must be specified to decrypt data")
if not self.is_encrypted(data):
raise errors.AnsibleError("data is not encrypted")
# clean out header
data = self._split_header(data)
# create the cipher object
if 'Vault' + self.cipher_name in globals() and self.cipher_name in CIPHER_WHITELIST:
cipher = globals()['Vault' + self.cipher_name]
this_cipher = cipher()
else:
raise errors.AnsibleError("%s cipher could not be found" % self.cipher_name)
# try to unencrypt data
data = this_cipher.decrypt(data, self.password)
return data
def _add_header(self, data):
# combine header and encrypted data in 80 char columns
#tmpdata = hexlify(data)
tmpdata = [data[i:i+80] for i in range(0, len(data), 80)]
if not self.cipher_name:
raise errors.AnsibleError("the cipher must be set before adding a header")
dirty_data = HEADER + ";" + str(self.version) + ";" + self.cipher_name + "\n"
for l in tmpdata:
dirty_data += l + '\n'
return dirty_data
def _split_header(self, data):
# used by decrypt
tmpdata = data.split('\n')
tmpheader = tmpdata[0].strip().split(';')
self.version = str(tmpheader[1].strip())
self.cipher_name = str(tmpheader[2].strip())
clean_data = '\n'.join(tmpdata[1:])
"""
# strip out newline, join, unhex
clean_data = [ x.strip() for x in clean_data ]
clean_data = unhexlify(''.join(clean_data))
"""
return clean_data
def __enter__(self):
return self
def __exit__(self, *err):
pass
class VaultEditor(object):
# uses helper methods for write_file(self, filename, data)
# to write a file so that code isn't duplicated for simple
# file I/O, ditto read_file(self, filename) and launch_editor(self, filename)
# ... "Don't Repeat Yourself", etc.
def __init__(self, cipher_name, password, filename):
# instantiates a member variable for VaultLib
self.cipher_name = cipher_name
self.password = password
self.filename = filename
def create_file(self):
""" create a new encrypted file """
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
if os.path.isfile(self.filename):
raise errors.AnsibleError("%s exists, please use 'edit' instead" % self.filename)
# drop the user into vim on file
call(self._editor_shell_command(self.filename))
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
this_vault.cipher_name = self.cipher_name
enc_data = this_vault.encrypt(tmpdata)
self.write_data(enc_data, self.filename)
def decrypt_file(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
if not os.path.isfile(self.filename):
raise errors.AnsibleError("%s does not exist" % self.filename)
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
if this_vault.is_encrypted(tmpdata):
dec_data = this_vault.decrypt(tmpdata)
self.write_data(dec_data, self.filename)
else:
raise errors.AnsibleError("%s is not encrypted" % self.filename)
def edit_file(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
# decrypt to tmpfile
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
dec_data = this_vault.decrypt(tmpdata)
_, tmp_path = tempfile.mkstemp()
self.write_data(dec_data, tmp_path)
# drop the user into vim on the tmp file
call(self._editor_shell_command(tmp_path))
new_data = self.read_data(tmp_path)
# create new vault
new_vault = VaultLib(self.password)
# we want the cipher to default to AES256
#new_vault.cipher_name = this_vault.cipher_name
# encrypt new data a write out to tmp
enc_data = new_vault.encrypt(new_data)
self.write_data(enc_data, tmp_path)
# shuffle tmp file into place
self.shuffle_files(tmp_path, self.filename)
def encrypt_file(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
if not os.path.isfile(self.filename):
raise errors.AnsibleError("%s does not exist" % self.filename)
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
this_vault.cipher_name = self.cipher_name
if not this_vault.is_encrypted(tmpdata):
enc_data = this_vault.encrypt(tmpdata)
self.write_data(enc_data, self.filename)
else:
raise errors.AnsibleError("%s is already encrypted" % self.filename)
def rekey_file(self, new_password):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
# decrypt
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
dec_data = this_vault.decrypt(tmpdata)
# create new vault
new_vault = VaultLib(new_password)
# we want to force cipher to the default
#new_vault.cipher_name = this_vault.cipher_name
# re-encrypt data and re-write file
enc_data = new_vault.encrypt(dec_data)
self.write_data(enc_data, self.filename)
def read_data(self, filename):
f = open(filename, "rb")
tmpdata = f.read()
f.close()
return tmpdata
def write_data(self, data, filename):
if os.path.isfile(filename):
os.remove(filename)
f = open(filename, "wb")
f.write(data)
f.close()
def shuffle_files(self, src, dest):
# overwrite dest with src
if os.path.isfile(dest):
os.remove(dest)
shutil.move(src, dest)
def _editor_shell_command(self, filename):
EDITOR = os.environ.get('EDITOR','vim')
editor = shlex.split(EDITOR)
editor.append(filename)
return editor
########################################
# CIPHERS #
########################################
class VaultAES(object):
# this version has been obsoleted by the VaultAES256 class
# which uses encrypt-then-mac (fixing order) and also improving the KDF used
# code remains for upgrade purposes only
# http://stackoverflow.com/a/16761459
def __init__(self):
if not HAS_AES:
raise errors.AnsibleError(CRYPTO_UPGRADE)
def aes_derive_key_and_iv(self, password, salt, key_length, iv_length):
""" Create a key and an initialization vector """
d = d_i = ''
while len(d) < key_length + iv_length:
d_i = md5(d_i + password + salt).digest()
d += d_i
key = d[:key_length]
iv = d[key_length:key_length+iv_length]
return key, iv
def encrypt(self, data, password, key_length=32):
""" Read plaintext data from in_file and write encrypted to out_file """
# combine sha + data
this_sha = sha256(data).hexdigest()
tmp_data = this_sha + "\n" + data
in_file = BytesIO(tmp_data)
in_file.seek(0)
out_file = BytesIO()
bs = AES.block_size
# Get a block of random data. EL does not have Crypto.Random.new()
# so os.urandom is used for cross platform purposes
salt = os.urandom(bs - len('Salted__'))
key, iv = self.aes_derive_key_and_iv(password, salt, key_length, bs)
cipher = AES.new(key, AES.MODE_CBC, iv)
out_file.write('Salted__' + salt)
finished = False
while not finished:
chunk = in_file.read(1024 * bs)
if len(chunk) == 0 or len(chunk) % bs != 0:
padding_length = (bs - len(chunk) % bs) or bs
chunk += padding_length * chr(padding_length)
finished = True
out_file.write(cipher.encrypt(chunk))
out_file.seek(0)
enc_data = out_file.read()
tmp_data = hexlify(enc_data)
return tmp_data
def decrypt(self, data, password, key_length=32):
""" Read encrypted data from in_file and write decrypted to out_file """
# http://stackoverflow.com/a/14989032
data = ''.join(data.split('\n'))
data = unhexlify(data)
in_file = BytesIO(data)
in_file.seek(0)
out_file = BytesIO()
bs = AES.block_size
salt = in_file.read(bs)[len('Salted__'):]
key, iv = self.aes_derive_key_and_iv(password, salt, key_length, bs)
cipher = AES.new(key, AES.MODE_CBC, iv)
next_chunk = ''
finished = False
while not finished:
chunk, next_chunk = next_chunk, cipher.decrypt(in_file.read(1024 * bs))
if len(next_chunk) == 0:
padding_length = ord(chunk[-1])
chunk = chunk[:-padding_length]
finished = True
out_file.write(chunk)
# reset the stream pointer to the beginning
out_file.seek(0)
new_data = out_file.read()
# split out sha and verify decryption
split_data = new_data.split("\n")
this_sha = split_data[0]
this_data = '\n'.join(split_data[1:])
test_sha = sha256(this_data).hexdigest()
if this_sha != test_sha:
raise errors.AnsibleError("Decryption failed")
#return out_file.read()
return this_data
class VaultAES256(object):
"""
Vault implementation using AES-CTR with an HMAC-SHA256 authentication code.
Keys are derived using PBKDF2
"""
# http://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html
def __init__(self):
if not HAS_PBKDF2 or not HAS_COUNTER or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
def gen_key_initctr(self, password, salt):
# 16 for AES 128, 32 for AES256
keylength = 32
# match the size used for counter.new to avoid extra work
ivlength = 16
hash_function = SHA256
# make two keys and one iv
pbkdf2_prf = lambda p, s: HMAC.new(p, s, hash_function).digest()
derivedkey = PBKDF2(password, salt, dkLen=(2 * keylength) + ivlength,
count=10000, prf=pbkdf2_prf)
#import epdb; epdb.st()
key1 = derivedkey[:keylength]
key2 = derivedkey[keylength:(keylength * 2)]
iv = derivedkey[(keylength * 2):(keylength * 2) + ivlength]
return key1, key2, hexlify(iv)
def encrypt(self, data, password):
salt = os.urandom(32)
key1, key2, iv = self.gen_key_initctr(password, salt)
# PKCS#7 PAD DATA http://tools.ietf.org/html/rfc5652#section-6.3
bs = AES.block_size
padding_length = (bs - len(data) % bs) or bs
data += padding_length * chr(padding_length)
# COUNTER.new PARAMETERS
# 1) nbits (integer) - Length of the counter, in bits.
# 2) initial_value (integer) - initial value of the counter. "iv" from gen_key_initctr
ctr = Counter.new(128, initial_value=long(iv, 16))
# AES.new PARAMETERS
# 1) AES key, must be either 16, 24, or 32 bytes long -- "key" from gen_key_initctr
# 2) MODE_CTR, is the recommended mode
# 3) counter=<CounterObject>
cipher = AES.new(key1, AES.MODE_CTR, counter=ctr)
# ENCRYPT PADDED DATA
cryptedData = cipher.encrypt(data)
# COMBINE SALT, DIGEST AND DATA
hmac = HMAC.new(key2, cryptedData, SHA256)
message = "%s\n%s\n%s" % ( hexlify(salt), hmac.hexdigest(), hexlify(cryptedData) )
message = hexlify(message)
return message
def decrypt(self, data, password):
# SPLIT SALT, DIGEST, AND DATA
data = ''.join(data.split("\n"))
data = unhexlify(data)
salt, cryptedHmac, cryptedData = data.split("\n", 2)
salt = unhexlify(salt)
cryptedData = unhexlify(cryptedData)
key1, key2, iv = self.gen_key_initctr(password, salt)
# EXIT EARLY IF DIGEST DOESN'T MATCH
hmacDecrypt = HMAC.new(key2, cryptedData, SHA256)
if not self.is_equal(cryptedHmac, hmacDecrypt.hexdigest()):
return None
# SET THE COUNTER AND THE CIPHER
ctr = Counter.new(128, initial_value=long(iv, 16))
cipher = AES.new(key1, AES.MODE_CTR, counter=ctr)
# DECRYPT PADDED DATA
decryptedData = cipher.decrypt(cryptedData)
# UNPAD DATA
padding_length = ord(decryptedData[-1])
decryptedData = decryptedData[:-padding_length]
return decryptedData
def is_equal(self, a, b):
# http://codahale.com/a-lesson-in-timing-attacks/
if len(a) != len(b):
return False
result = 0
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
|
# -*- coding: utf-8 -*-
# This file is part of Workout Manager.
#
# Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
import logging
import json
import uuid
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.shortcuts import get_object_or_404
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.forms import ModelForm
from django.forms import ModelChoiceField
from django.core.context_processors import csrf
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse_lazy
from django.contrib.auth.decorators import permission_required
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_lazy
from django.views.generic import DeleteView
from django.views.generic import CreateView
from django.views.generic import UpdateView
from manager.utils import load_language
from manager.models import WorkoutLog
from exercises.models import Exercise
from exercises.models import ExerciseComment
from exercises.models import ExerciseCategory
from exercises.models import Muscle
from workout_manager.generic_views import YamlFormMixin
from workout_manager.generic_views import YamlDeleteMixin
logger = logging.getLogger('workout_manager.custom')
# ************************
# Exercise comments
# ************************
class ExerciseCommentForm(ModelForm):
class Meta:
model = ExerciseComment
exclude = ('exercise',)
class ExerciseCommentEditView(YamlFormMixin, UpdateView):
"""
Generic view to update an existing exercise comment
"""
model = ExerciseComment
form_class = ExerciseCommentForm
title = ugettext_lazy('Edit exercise comment')
def get_success_url(self):
return reverse('exercises.views.exercise_view', kwargs={'id': self.object.exercise.id})
# Send some additional data to the template
def get_context_data(self, **kwargs):
context = super(ExerciseCommentEditView, self).get_context_data(**kwargs)
context['form_action'] = reverse('exercisecomment-edit',
kwargs={'pk': self.object.id})
return context
class ExerciseCommentAddView(YamlFormMixin, CreateView):
"""
Generic view to add a new exercise comment
"""
model = ExerciseComment
form_class = ExerciseCommentForm
title = ugettext_lazy('Add exercise comment')
def form_valid(self, form):
form.instance.exercise = Exercise.objects.get(pk=self.kwargs['exercise_pk'])
return super(ExerciseCommentAddView, self).form_valid(form)
def get_success_url(self):
return reverse('exercises.views.exercise_view', kwargs={'id': self.object.exercise.id})
def get_context_data(self, **kwargs):
'''
Send some additional data to the template
'''
context = super(ExerciseCommentAddView, self).get_context_data(**kwargs)
context['form_action'] = reverse('exercisecomment-add',
kwargs={'exercise_pk': self.kwargs['exercise_pk']})
return context
@permission_required('manager.add_exercisecomment')
def exercisecomment_delete(request, id):
# Load the comment
comment = get_object_or_404(ExerciseComment, pk=id)
exercise_id = comment.exercise.id
comment.delete()
return HttpResponseRedirect(reverse('exercises.views.exercise_view', kwargs={'id': exercise_id}))
# ************************
# Exercises
# ************************
def exercise_overview(request):
"""Overview with all exercises
"""
language = load_language()
template_data = {}
template_data.update(csrf(request))
template_data['categories'] = ExerciseCategory.objects.filter(language=language.id)
return render_to_response('overview.html',
template_data,
context_instance=RequestContext(request))
from django.views.generic import ListView
class MuscleListView(ListView):
'''
Overview of all muscles and their exercises
'''
model = Muscle
queryset = Muscle.objects.all().order_by('-is_front', 'name'),
context_object_name = 'muscle_list'
template_name = 'muscle_overview.html'
def get_context_data(self, **kwargs):
'''
Set the language and navigation tab
'''
context = super(MuscleListView, self).get_context_data(**kwargs)
context['language'] = load_language()
return context
def exercise_view(request, id, slug=None):
'''
Detail view for an exercise
'''
template_data = {}
template_data['comment_edit'] = False
# Load the exercise itself
exercise = get_object_or_404(Exercise, pk=id)
template_data['exercise'] = exercise
# Create the backgrounds that show what muscles the exercise works on
backgrounds_back = []
backgrounds_front = []
for muscle in exercise.muscles.all():
if muscle.is_front:
backgrounds_front.append('images/muscles/main/muscle-%s.svg' % muscle.id)
else:
backgrounds_back.append('images/muscles/main/muscle-%s.svg' % muscle.id)
for muscle in exercise.muscles_secondary.all():
if muscle.is_front:
backgrounds_front.append('images/muscles/secondary/muscle-%s.svg' % muscle.id)
else:
backgrounds_back.append('images/muscles/secondary/muscle-%s.svg' % muscle.id)
# Append the "main" background, with the silhouette of the human body
# This has to happen as the last step, so it is rendered behind the muscles.
if backgrounds_front:
backgrounds_front.append('images/muscles/muscular_system_front.svg')
if backgrounds_back:
backgrounds_back.append('images/muscles/muscular_system_back.svg')
template_data['muscle_backgrounds_front'] = backgrounds_front
template_data['muscle_backgrounds_back'] = backgrounds_back
# If the user is logged in, load the log and prepare the entries for
# rendering in the D3 chart
entry = WorkoutLog()
entry_log = []
chart_data = []
if request.user.is_authenticated():
logs = WorkoutLog.objects.filter(user=request.user,
exercise=exercise)
entry_log, chart_data = entry.process_log_entries(logs)
template_data['logs'] = entry_log
template_data['json'] = chart_data
template_data['svg_uuid'] = str(uuid.uuid4())
template_data['reps'] = _("Reps")
# Render
return render_to_response('view.html',
template_data,
context_instance=RequestContext(request))
class ExercisesEditAddView(YamlFormMixin):
"""
Generic view to subclass from for exercise adding and editing, since they
share all this settings
"""
model = Exercise
form_fields = ['name',
'category',
'muscles',
'muscles_secondary',
'description']
select_lists = ['category']
title = ugettext_lazy('Add exercise')
custom_js = 'init_tinymce();'
def get_form_class(self):
# Define the exercise form here because only at this point during the request
# have we access to the currently used language. In other places Django defaults
# to 'en-us'.
class ExerciseForm(ModelForm):
language = load_language()
category = ModelChoiceField(queryset=ExerciseCategory.objects.filter(language=language.id))
class Meta:
model = Exercise
class Media:
js = ('js/tinymce/tiny_mce.js',)
return ExerciseForm
class ExerciseUpdateView(ExercisesEditAddView, UpdateView):
"""
Generic view to update an existing exercise
"""
def get_context_data(self, **kwargs):
context = super(ExerciseUpdateView, self).get_context_data(**kwargs)
context['form_action'] = reverse('exercise-edit', kwargs={'pk': self.object.id})
context['title'] = _('Edit %s') % self.object.name
return context
class ExerciseAddView(ExercisesEditAddView, CreateView):
"""
Generic view to add a new exercise
"""
form_action = reverse_lazy('exercise-add')
class ExerciseDeleteView(YamlDeleteMixin, DeleteView):
"""
Generic view to delete an existing exercise
"""
model = Exercise
success_url = reverse_lazy('exercises.views.exercise_overview')
delete_message = ugettext_lazy('This will delete the exercise from all workouts.')
# Send some additional data to the template
def get_context_data(self, **kwargs):
context = super(ExerciseDeleteView, self).get_context_data(**kwargs)
context['title'] = _('Delete exercise %s?') % self.object.name
context['form_action'] = reverse('exercise-delete', kwargs={'pk': self.kwargs['pk']})
return context
def exercise_search(request):
"""Search an exercise, return the result as a JSON list
"""
# Perform the search
q = request.GET.get('term', '')
user_language = load_language()
exercises = Exercise.objects.filter(name__icontains=q, category__language_id=user_language)\
.order_by('category__name', 'name')
# AJAX-request, this comes from the autocompleter. Create a list and send
# it back as JSON
if request.is_ajax():
results = []
for exercise in exercises:
exercise_json = {}
exercise_json['id'] = exercise.id
exercise_json['name'] = exercise.name
exercise_json['value'] = exercise.name
exercise_json['category'] = exercise.category.name
results.append(exercise_json)
data = json.dumps(results)
# Return the results to the server
mimetype = 'application/json'
return HttpResponse(data, mimetype)
# Usual search (perhaps JS disabled), present the results as normal HTML page
else:
template_data = {}
template_data.update(csrf(request))
template_data['exercises'] = exercises
template_data['search_term'] = q
return render_to_response('exercise_search.html',
template_data,
context_instance=RequestContext(request))
# ************************
# Exercise categories
# ************************
class ExerciseCategoryForm(ModelForm):
class Meta:
model = ExerciseCategory
exclude = ('language',)
class ExerciseCategoryAddView(YamlFormMixin, CreateView):
"""
Generic view to add a new exercise category
"""
model = ExerciseCategory
form_class = ExerciseCategoryForm
success_url = reverse_lazy('exercises.views.exercise_overview')
title = ugettext_lazy('Add category')
form_action = reverse_lazy('exercisecategory-add')
def form_valid(self, form):
form.instance.language = load_language()
return super(ExerciseCategoryAddView, self).form_valid(form)
class ExerciseCategoryUpdateView(YamlFormMixin, UpdateView):
"""
Generic view to update an existing exercise category
"""
model = ExerciseCategory
form_class = ExerciseCategoryForm
success_url = reverse_lazy('exercises.views.exercise_overview')
# Send some additional data to the template
def get_context_data(self, **kwargs):
context = super(ExerciseCategoryUpdateView, self).get_context_data(**kwargs)
context['form_action'] = reverse('exercisecategory-edit', kwargs={'pk': self.object.id})
context['title'] = _('Edit %s') % self.object.name
return context
def form_valid(self, form):
form.instance.language = load_language()
return super(ExerciseCategoryUpdateView, self).form_valid(form)
class ExerciseCategoryDeleteView(YamlDeleteMixin, DeleteView):
"""
Generic view to delete an existing exercise category
"""
model = ExerciseCategory
success_url = reverse_lazy('exercises.views.exercise_overview')
delete_message = ugettext_lazy('This will also delete all exercises in this category.')
# Send some additional data to the template
def get_context_data(self, **kwargs):
context = super(ExerciseCategoryDeleteView, self).get_context_data(**kwargs)
context['title'] = _('Delete category %s?') % self.object.name
context['form_action'] = reverse('exercise-delete', kwargs={'pk': self.kwargs['pk']})
return context
Remove unnecessary code
--HG--
branch : 1.1-dev
# -*- coding: utf-8 -*-
# This file is part of Workout Manager.
#
# Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
import logging
import json
import uuid
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.shortcuts import get_object_or_404
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.forms import ModelForm
from django.forms import ModelChoiceField
from django.core.context_processors import csrf
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse_lazy
from django.contrib.auth.decorators import permission_required
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_lazy
from django.views.generic import DeleteView
from django.views.generic import CreateView
from django.views.generic import UpdateView
from manager.utils import load_language
from manager.models import WorkoutLog
from exercises.models import Exercise
from exercises.models import ExerciseComment
from exercises.models import ExerciseCategory
from exercises.models import Muscle
from workout_manager.generic_views import YamlFormMixin
from workout_manager.generic_views import YamlDeleteMixin
logger = logging.getLogger('workout_manager.custom')
# ************************
# Exercise comments
# ************************
class ExerciseCommentForm(ModelForm):
class Meta:
model = ExerciseComment
exclude = ('exercise',)
class ExerciseCommentEditView(YamlFormMixin, UpdateView):
"""
Generic view to update an existing exercise comment
"""
model = ExerciseComment
form_class = ExerciseCommentForm
title = ugettext_lazy('Edit exercise comment')
def get_success_url(self):
return reverse('exercises.views.exercise_view', kwargs={'id': self.object.exercise.id})
# Send some additional data to the template
def get_context_data(self, **kwargs):
context = super(ExerciseCommentEditView, self).get_context_data(**kwargs)
context['form_action'] = reverse('exercisecomment-edit',
kwargs={'pk': self.object.id})
return context
class ExerciseCommentAddView(YamlFormMixin, CreateView):
"""
Generic view to add a new exercise comment
"""
model = ExerciseComment
form_class = ExerciseCommentForm
title = ugettext_lazy('Add exercise comment')
def form_valid(self, form):
form.instance.exercise = Exercise.objects.get(pk=self.kwargs['exercise_pk'])
return super(ExerciseCommentAddView, self).form_valid(form)
def get_success_url(self):
return reverse('exercises.views.exercise_view', kwargs={'id': self.object.exercise.id})
def get_context_data(self, **kwargs):
'''
Send some additional data to the template
'''
context = super(ExerciseCommentAddView, self).get_context_data(**kwargs)
context['form_action'] = reverse('exercisecomment-add',
kwargs={'exercise_pk': self.kwargs['exercise_pk']})
return context
@permission_required('manager.add_exercisecomment')
def exercisecomment_delete(request, id):
# Load the comment
comment = get_object_or_404(ExerciseComment, pk=id)
exercise_id = comment.exercise.id
comment.delete()
return HttpResponseRedirect(reverse('exercises.views.exercise_view', kwargs={'id': exercise_id}))
# ************************
# Exercises
# ************************
def exercise_overview(request):
"""Overview with all exercises
"""
language = load_language()
template_data = {}
template_data.update(csrf(request))
template_data['categories'] = ExerciseCategory.objects.filter(language=language.id)
return render_to_response('overview.html',
template_data,
context_instance=RequestContext(request))
from django.views.generic import ListView
class MuscleListView(ListView):
'''
Overview of all muscles and their exercises
'''
model = Muscle
queryset = Muscle.objects.all().order_by('-is_front', 'name'),
context_object_name = 'muscle_list'
template_name = 'muscle_overview.html'
def exercise_view(request, id, slug=None):
'''
Detail view for an exercise
'''
template_data = {}
template_data['comment_edit'] = False
# Load the exercise itself
exercise = get_object_or_404(Exercise, pk=id)
template_data['exercise'] = exercise
# Create the backgrounds that show what muscles the exercise works on
backgrounds_back = []
backgrounds_front = []
for muscle in exercise.muscles.all():
if muscle.is_front:
backgrounds_front.append('images/muscles/main/muscle-%s.svg' % muscle.id)
else:
backgrounds_back.append('images/muscles/main/muscle-%s.svg' % muscle.id)
for muscle in exercise.muscles_secondary.all():
if muscle.is_front:
backgrounds_front.append('images/muscles/secondary/muscle-%s.svg' % muscle.id)
else:
backgrounds_back.append('images/muscles/secondary/muscle-%s.svg' % muscle.id)
# Append the "main" background, with the silhouette of the human body
# This has to happen as the last step, so it is rendered behind the muscles.
if backgrounds_front:
backgrounds_front.append('images/muscles/muscular_system_front.svg')
if backgrounds_back:
backgrounds_back.append('images/muscles/muscular_system_back.svg')
template_data['muscle_backgrounds_front'] = backgrounds_front
template_data['muscle_backgrounds_back'] = backgrounds_back
# If the user is logged in, load the log and prepare the entries for
# rendering in the D3 chart
entry = WorkoutLog()
entry_log = []
chart_data = []
if request.user.is_authenticated():
logs = WorkoutLog.objects.filter(user=request.user,
exercise=exercise)
entry_log, chart_data = entry.process_log_entries(logs)
template_data['logs'] = entry_log
template_data['json'] = chart_data
template_data['svg_uuid'] = str(uuid.uuid4())
template_data['reps'] = _("Reps")
# Render
return render_to_response('view.html',
template_data,
context_instance=RequestContext(request))
class ExercisesEditAddView(YamlFormMixin):
"""
Generic view to subclass from for exercise adding and editing, since they
share all this settings
"""
model = Exercise
form_fields = ['name',
'category',
'muscles',
'muscles_secondary',
'description']
select_lists = ['category']
title = ugettext_lazy('Add exercise')
custom_js = 'init_tinymce();'
def get_form_class(self):
# Define the exercise form here because only at this point during the request
# have we access to the currently used language. In other places Django defaults
# to 'en-us'.
class ExerciseForm(ModelForm):
language = load_language()
category = ModelChoiceField(queryset=ExerciseCategory.objects.filter(language=language.id))
class Meta:
model = Exercise
class Media:
js = ('js/tinymce/tiny_mce.js',)
return ExerciseForm
class ExerciseUpdateView(ExercisesEditAddView, UpdateView):
"""
Generic view to update an existing exercise
"""
def get_context_data(self, **kwargs):
context = super(ExerciseUpdateView, self).get_context_data(**kwargs)
context['form_action'] = reverse('exercise-edit', kwargs={'pk': self.object.id})
context['title'] = _('Edit %s') % self.object.name
return context
class ExerciseAddView(ExercisesEditAddView, CreateView):
"""
Generic view to add a new exercise
"""
form_action = reverse_lazy('exercise-add')
class ExerciseDeleteView(YamlDeleteMixin, DeleteView):
"""
Generic view to delete an existing exercise
"""
model = Exercise
success_url = reverse_lazy('exercises.views.exercise_overview')
delete_message = ugettext_lazy('This will delete the exercise from all workouts.')
# Send some additional data to the template
def get_context_data(self, **kwargs):
context = super(ExerciseDeleteView, self).get_context_data(**kwargs)
context['title'] = _('Delete exercise %s?') % self.object.name
context['form_action'] = reverse('exercise-delete', kwargs={'pk': self.kwargs['pk']})
return context
def exercise_search(request):
"""Search an exercise, return the result as a JSON list
"""
# Perform the search
q = request.GET.get('term', '')
user_language = load_language()
exercises = Exercise.objects.filter(name__icontains=q, category__language_id=user_language)\
.order_by('category__name', 'name')
# AJAX-request, this comes from the autocompleter. Create a list and send
# it back as JSON
if request.is_ajax():
results = []
for exercise in exercises:
exercise_json = {}
exercise_json['id'] = exercise.id
exercise_json['name'] = exercise.name
exercise_json['value'] = exercise.name
exercise_json['category'] = exercise.category.name
results.append(exercise_json)
data = json.dumps(results)
# Return the results to the server
mimetype = 'application/json'
return HttpResponse(data, mimetype)
# Usual search (perhaps JS disabled), present the results as normal HTML page
else:
template_data = {}
template_data.update(csrf(request))
template_data['exercises'] = exercises
template_data['search_term'] = q
return render_to_response('exercise_search.html',
template_data,
context_instance=RequestContext(request))
# ************************
# Exercise categories
# ************************
class ExerciseCategoryForm(ModelForm):
class Meta:
model = ExerciseCategory
exclude = ('language',)
class ExerciseCategoryAddView(YamlFormMixin, CreateView):
"""
Generic view to add a new exercise category
"""
model = ExerciseCategory
form_class = ExerciseCategoryForm
success_url = reverse_lazy('exercises.views.exercise_overview')
title = ugettext_lazy('Add category')
form_action = reverse_lazy('exercisecategory-add')
def form_valid(self, form):
form.instance.language = load_language()
return super(ExerciseCategoryAddView, self).form_valid(form)
class ExerciseCategoryUpdateView(YamlFormMixin, UpdateView):
"""
Generic view to update an existing exercise category
"""
model = ExerciseCategory
form_class = ExerciseCategoryForm
success_url = reverse_lazy('exercises.views.exercise_overview')
# Send some additional data to the template
def get_context_data(self, **kwargs):
context = super(ExerciseCategoryUpdateView, self).get_context_data(**kwargs)
context['form_action'] = reverse('exercisecategory-edit', kwargs={'pk': self.object.id})
context['title'] = _('Edit %s') % self.object.name
return context
def form_valid(self, form):
form.instance.language = load_language()
return super(ExerciseCategoryUpdateView, self).form_valid(form)
class ExerciseCategoryDeleteView(YamlDeleteMixin, DeleteView):
"""
Generic view to delete an existing exercise category
"""
model = ExerciseCategory
success_url = reverse_lazy('exercises.views.exercise_overview')
delete_message = ugettext_lazy('This will also delete all exercises in this category.')
# Send some additional data to the template
def get_context_data(self, **kwargs):
context = super(ExerciseCategoryDeleteView, self).get_context_data(**kwargs)
context['title'] = _('Delete category %s?') % self.object.name
context['form_action'] = reverse('exercise-delete', kwargs={'pk': self.kwargs['pk']})
return context
|
# -*- coding: cp1252 -*-
from __future__ import division
from fractions import Fraction
import os
import exifread
import sys
import math
import glob
# iso = tags['EXIF ISOSpeedRatings']
# length35mm = float(tags['EXIF FocalLengthIn35mmFilm'].printable)
# To be done:
# def calculateEffectiveFocalLength(focal_length):
def getExposureRatio(exposure):
if exposure < 1/1000: # alle 1/1000 valotusaika lasketaan tervksi
return 0.0;
if exposure > 1/4: # yli 1/4 valotusaika lasketaan eptervksi
return 1.0
exposure = math.log(exposure, 2)
min = -10
max = -2
normalized = (exposure-(min))/(max-min)
return normalized
def parseExif(pathToImage):
# Open image file for reading (binary mode)
image = open(pathToImage, 'rb')
# Return Exif tags
tags = exifread.process_file(image)
return tags;
if __name__ == "__main__":
# tiedosto = sys.argv[1]
# folder = os.getcwd()
points = 0
num_files = 0
for tiedosto in glob.glob('*.jpg'):
tags = parseExif(tiedosto)
if "EXIF ExposureTime" in tags:
exposure = tags["EXIF ExposureTime"]
exposure = eval(exposure.printable)
# print tiedosto + ": "
ratio = getExposureRatio(exposure)
print ratio
points += ratio
num_files += 1
print "Keskiarvo: "
print points / num_files
nopeampi kuvienkäsittely
# -*- coding: cp1252 -*-
from __future__ import division
from fractions import Fraction
import os
import exifread
import sys
import math
import glob
# iso = tags['EXIF ISOSpeedRatings']
# length35mm = float(tags['EXIF FocalLengthIn35mmFilm'].printable)
# To be done:
# def calculateEffectiveFocalLength(focal_length):
def getExposureRatio(exposure):
if exposure < 1/1000: # alle 1/1000 valotusaika lasketaan tervksi
return 0.0;
if exposure > 1/4: # yli 1/4 valotusaika lasketaan eptervksi
return 1.0
exposure = math.log(exposure, 2)
min = -10
max = -2
normalized = (exposure-(min))/(max-min)
return normalized
def parseExif(pathToImage):
# Open image file for reading (binary mode)
image = open(pathToImage, 'rb')
# Return Exif tags
tags = exifread.process_file(image, details=False)
return tags;
if __name__ == "__main__":
# tiedosto = sys.argv[1]
# folder = os.getcwd()
points = 0
num_files = 0
for tiedosto in glob.glob('*.jpg'):
tags = parseExif(tiedosto)
if "EXIF ExposureTime" in tags:
exposure = tags["EXIF ExposureTime"]
exposure = eval(exposure.printable)
# print tiedosto + ": "
ratio = getExposureRatio(exposure)
print tiedosto + ": " + str(ratio)
points += ratio
num_files += 1
print "Keskiarvo: "
print points / num_files
|
from eppy.xmldict import XmlDictObject, _BASE_NSMAP, dict2xml, ElementTree
import copy
from . import childorder
from .utils import gen_trid
EPP_NSMAP = dict(_BASE_NSMAP)
EPP_STD_OBJECTS_MAP = {
'domain': 'urn:ietf:params:xml:ns:domain-1.0',
'host': 'urn:ietf:params:xml:ns:host-1.0',
'contact': 'urn:ietf:params:xml:ns:contact-1.0',
}
EPP_STD_EXT_MAP = {
'rgp': 'urn:ietf:params:xml:ns:rgp-1.0',
}
EPP_NSMAP.update(EPP_STD_OBJECTS_MAP)
EPP_NSMAP.update(EPP_STD_EXT_MAP)
EPP_NSMAP.update({
'': 'urn:ietf:params:xml:ns:epp-1.0',
'epp': 'urn:ietf:params:xml:ns:epp-1.0',
'secDNS10': 'urn:ietf:params:xml:ns:secDNS-1.0',
'secDNS': 'urn:ietf:params:xml:ns:secDNS-1.1',
'namestoreExt': 'http://www.verisign-grs.com/epp/namestoreExt-1.1',
'launch': 'urn:ietf:params:xml:ns:launch-1.0',
'smd': 'urn:ietf:params:xml:ns:signedMark-1.0',
'mark': 'urn:ietf:params:xml:ns:mark-1.0',
})
class EppDoc(XmlDictObject):
def __init__(self, dct=None, nsmap=None, extra_nsmap=None):
# NOTE: setting attributes in __init__ will require special handling, see XmlDictObject
if not nsmap:
nsmap = getattr(self.__class__, '_nsmap', EPP_NSMAP).copy()
if not dct:
dct = self.cmddef()
super(EppDoc, self).__init__(dct, nsmap=nsmap, extra_nsmap=extra_nsmap)
def to_xml(self, force_prefix):
# build a dictionary containing the definition of the order that child elements should be serialized
# NOTE: this does not contain the root element
# ``self._childorder`` is defined relative to self._path, so we do some tree grafting here
qualified_childorder = dpath_make(self._path[1:])
if self._path[1:]:
dpath_get(qualified_childorder, self._path[1:-1])[self._path[-1]] = self._childorder
else:
qualified_childorder = self._childorder
return super(EppDoc, self).to_xml(qualified_childorder, force_prefix=force_prefix)
def __unicode__(self):
return self.to_xml(force_prefix=False)
def __str__(self):
return unicode(self).encode('utf-8')
@classmethod
def cmddef(cls):
"""
Create an `XmlDictObject` based on the `_path` defined, and goes through each super class to wire up
the _childorder
"""
dct = dpath_make(cls._path)
# we need to search mro because if we just did `cls._childorder` it could come from any superclass,
# which may not correspond to the same level where `cls._path` is defined.
# Also, we want to be able to have each level define its own childorder.
for aclass in cls.__mro__:
if aclass == EppDoc:
# done searching
break
if '_childorder' in aclass.__dict__:
dpath_get(dct, aclass._path)['_order'] = aclass._childorder.get('__order', tuple())
if '_nsmap' in aclass.__dict__:
dpath_get(dct, aclass._path)['_nsmap'] = aclass._nsmap
return dct
@classmethod
def annotate(cls, dct=None):
"""
annotate the given `dct` (or create an empty one) by wiring up the _childorder and _nsmap fields
"""
dct = dct or dpath_make(cls._path)
# we need to search mro because if we just did `cls._childorder` it could come from any superclass,
# which may not correspond to the same level where `cls._path` is defined.
# Also, we want to be able to have each level define its own childorder.
for aclass in cls.__mro__:
if aclass == EppDoc:
# done searching
break
if '_childorder' in aclass.__dict__:
# recursively annotate the dict items
cls._annotate_order_recurse(dpath_get(dct, aclass._path), aclass._childorder)
# dpath_get(dct, aclass._path)['_order'] = aclass._childorder['__order']
if '_nsmap' in aclass.__dict__:
dpath_get(dct, aclass._path)['_nsmap'] = aclass._nsmap
return dct
def freeze(self):
return self.__class__.annotate(self)
@classmethod
def _annotate_order_recurse(cls, dct, childorder):
if childorder.get('__order'):
dct['_order'] = childorder['__order']
for k in (k for k in childorder.keys() if k != '__order'):
child = dct.get(k)
if isinstance(child, dict):
cls._annotate_order_recurse(child, childorder[k])
if isinstance(child, (list, tuple)):
# if there are multiple elements, we need to put the `_order` key in each element
for c in child:
if isinstance(c, dict):
cls._annotate_order_recurse(c, childorder[k])
@classmethod
def from_xml(cls, buf, default_prefix='epp', extra_nsmap=None):
return super(EppDoc, cls).from_xml(buf, default_prefix=default_prefix, extra_nsmap=extra_nsmap)
def normalize_response(self, respdoc):
"""
perform any cleanup of a response document resulting from this command
"""
pass
class EppHello(EppDoc):
_path = ('epp', 'hello')
class EppCommand(EppDoc):
_path = ('epp', 'command')
_childorder = {'__order': childorder.CMD_BASE}
def to_xml(self, force_prefix):
if hasattr(self, 'namestore_product') and self.namestore_product:
self['epp']['command'].setdefault(
'extension', {})['namestoreExt:namestoreExt'] = {'namestoreExt:subProduct': self.namestore_product}
del self.namestore_product
if hasattr(self, 'phases') and self.phases:
self.add_command_extension(self.phases)
del self.phases
return super(EppCommand, self).to_xml(force_prefix)
def add_command_extension(self, ext_dict):
self['epp']['command'].setdefault('extension', {}).update(ext_dict.freeze() if isinstance(ext_dict, EppDoc) else ext_dict)
def add_clTRID(self, clTRID=None):
self['epp']['command']['clTRID'] = clTRID or gen_trid()
class EppLoginCommand(EppCommand):
_path = ('epp', 'command', 'login')
_childorder = {'__order': childorder.CMD_LOGIN,
'svcs': {'__order': ['objURI', 'svcExtension']}}
def __init__(self, dct=None, nsmap=None, extra_nsmap=None, obj_uris=None, extra_obj_uris=None, extra_ext_uris=None, **kwargs):
super(EppLoginCommand, self).__init__(dct=None, nsmap=nsmap, extra_nsmap=extra_nsmap)
login = dpath_get(self, self._path)
if not hasattr(self, 'options'):
self.options = {'version': '1.0', 'lang': 'en'}
self.options._order = ['version', 'lang']
if not hasattr(self, 'svcs'):
extra_obj_uris = extra_obj_uris or []
obj_uris = copy.copy(obj_uris or EPP_STD_OBJECTS_MAP.values())
for uri in extra_obj_uris:
if ':' not in uri:
# if no colon, treat it as a well-known namespace prefix
uri = EPP_NSMAP[uri]
if uri not in obj_uris:
obj_uris.append(uri)
self.svcs = dict(objURI=obj_uris)
ext_uris = []
extra_ext_uris = extra_ext_uris or []
for uri in extra_ext_uris:
if ':' not in uri:
# if no colon, treat it as a well-known namespace prefix
uri = EPP_NSMAP[uri]
if uri not in obj_uris:
ext_uris.append(uri)
if ext_uris:
self.svcs.svcExtension = dict(extURI=ext_uris)
#self.svcs._order = ['objURI', 'svcExtension']
class EppLogoutCommand(EppCommand):
_path = ('epp', 'command', 'logout')
class EppCheckCommand(EppCommand):
_path = ('epp', 'command', 'check')
class EppCheckDomainCommand(EppCheckCommand):
_path = ('epp', 'command', 'check', 'domain:check')
class EppCheckHostCommand(EppCommand):
_path = ('epp', 'command', 'check', 'host:check')
class EppCheckContactCommand(EppCheckCommand):
_path = EppCheckCommand._path + ('contact:check',)
class EppInfoCommand(EppCommand):
_path = ('epp', 'command', 'info')
class EppInfoDomainCommand(EppInfoCommand):
_path = EppInfoCommand._path + ('domain:info',)
_childorder = {'__order': childorder.CMD_INFO_DOMAIN}
class EppInfoContactCommand(EppInfoCommand):
_path = EppInfoCommand._path + ('contact:info',)
_childorder = {'__order': childorder.CMD_INFO_CONTACT}
def normalize_response(self, respdoc):
"""
clean up voice and fax
"""
super(EppInfoContactCommand, self).normalize_response(respdoc)
try:
voice = respdoc.resData['contact:infData']['voice']
except (AttributeError, KeyError):
pass
else:
if not isinstance(voice, dict):
respdoc.resData['contact:infData']['voice'] = {'_text': voice}
try:
fax = respdoc.resData['contact:infData']['fax']
except (AttributeError, KeyError):
pass
else:
if not isinstance(fax, dict):
respdoc.resData['contact:infData']['fax'] = {'_text': fax}
class EppInfoHostCommand(EppInfoCommand):
_path = EppInfoCommand._path + ('host:info',)
def normalize_response(self, respdoc):
"""
clean up addr
"""
super(EppInfoHostCommand, self).normalize_response(respdoc)
try:
addrs = respdoc.resData['host:infData']['addr']
except (AttributeError, KeyError):
return
if addrs:
for i, addr in enumerate(addrs):
if not isinstance(addr, dict):
# it should be a text
addrs[i] = dict(_text=addr)
class EppCreateCommand(EppCommand):
_path = ('epp', 'command', 'create')
class EppCreateDomainCommand(EppCreateCommand):
_path = EppCreateCommand._path + ('domain:create',)
_childorder = {'__order': childorder.CMD_CREATE_DOMAIN}
class EppCreateContactCommand(EppCreateCommand):
_path = EppCreateCommand._path + ('contact:create',)
_childorder = {
'__order': childorder.CMD_CREATE_CONTACT,
'postalInfo': {
'__order': childorder.POSTAL_INFO,
'addr': {
'__order': childorder.ADDR
},
},
'disclose': {
'__order': childorder.DISCLOSE
}
}
class EppCreateHostCommand(EppCreateCommand):
_path = EppCreateCommand._path + ('host:create',)
_childorder = {'__order': childorder.CMD_CREATE_HOST}
class EppRenewCommand(EppCommand):
_path = ('epp', 'command', 'renew')
class EppRenewDomainCommand(EppRenewCommand):
_path = EppRenewCommand._path + ('domain:renew',)
_childorder = {'__order': childorder.CMD_RENEW_DOMAIN}
class EppUpdateCommand(EppCommand):
_path = ('epp', 'command', 'update')
class EppUpdateDomainCommand(EppUpdateCommand):
_path = EppUpdateCommand._path + ('domain:update',)
_childorder = {'__order': childorder.CMD_UPDATE_DOMAIN}
def add_secdns_data(self, data):
secdns_data = dict()
for action, value in data.iteritems():
update_data_key = 'secDNS:%s' % action
update_data = list()
tmp_dict = dict()
for item in value:
record_type = item['type']
record_key = 'secDNS:%sData' % record_type
if record_type == 'maxSigLife':
update_data.append({record_key: [item['value'], ]})
continue
if record_type == 'ds':
order = ['keyTag', 'alg', 'digestType', 'digest']
else:
order = ['flags', 'protocol', 'alg', 'pubKey']
record_data = dict(('secDNS:%s' % k, v) for k, v in item['data'].iteritems())
record_data['_order'] = order
update_data.append({record_key: record_data})
for item in update_data:
for key, val in item.iteritems():
if key in tmp_dict:
tmp_dict[key].append(val)
else:
tmp_dict[key] = [val, ]
update_data = [{k: v[0] if len(v) == 1 else v} for k, v in tmp_dict.iteritems()]
secdns_data[update_data_key] = update_data
self['epp']['command'].setdefault('extension', {})['secDNS:update'] = secdns_data
class EppUpdateContactCommand(EppUpdateCommand):
_path = EppUpdateCommand._path + ('contact:update',)
_childorder = {
'__order': childorder.CMD_UPDATE_CONTACT,
'chg': {
'__order': childorder.CMD_UPDATE_CONTACT_CHG,
'postalInfo': {
'__order': childorder.POSTAL_INFO,
'addr': {
'__order': childorder.ADDR
},
},
},
}
class EppUpdateHostCommand(EppUpdateCommand):
_path = EppUpdateCommand._path + ('host:update',)
_childorder = {'__order': childorder.CMD_UPDATE_DOMAIN}
class EppDeleteCommand(EppCommand):
_path = ('epp', 'command', 'delete')
class EppDeleteContactCommand(EppDeleteCommand):
_path = EppDeleteCommand._path + ('contact:delete',)
class EppDeleteDomainCommand(EppDeleteCommand):
_path = EppDeleteCommand._path + ('domain:delete',)
class EppDeleteHostCommand(EppDeleteCommand):
_path = EppDeleteCommand._path + ('host:delete',)
class EppPollCommand(EppCommand):
_path = ('epp', 'command', 'poll')
def __init__(self, op, msgID=None):
pollattr = {"@op": op}
if msgID is not None:
pollattr['@msgID'] = str(msgID)
dct = {
'epp': {
'command': {
'poll': pollattr,
},
},
}
super(EppPollCommand, self).__init__(dct)
class EppTransferCommand(EppCommand):
_path = EppCommand._path + ('transfer',)
def __init__(self, op):
dct = self.cmddef()
dct['epp']['command']['transfer']['@op'] = op
super(EppTransferCommand, self).__init__(dct)
class EppTransferDomainCommand(EppTransferCommand):
_path = EppTransferCommand._path + ('domain:transfer',)
_childorder = {'__order': childorder.CMD_TRANSFER_DOMAON}
@classmethod
def cmddef(cls):
dct = EppTransferCommand.cmddef()
dpath = dpath_get(dct, EppTransferCommand._path)
dpath['domain:transfer'] = {}
dpath = dpath_get(dct, cls._path)
dpath['_order'] = ['name', 'period', 'authInfo']
return dct
class EppTransferContactCommand(EppTransferCommand):
_path = EppTransferCommand._path + ('contact:transfer',)
_childorder = {'__order': childorder.CMD_TRANSFER_CONTACT}
@classmethod
def cmddef(cls):
dct = EppTransferCommand.cmddef()
dpath = dpath_get(dct, EppTransferCommand._path)
dpath['contact:transfer'] = {}
dpath = dpath_get(dct, cls._path)
dpath['_order'] = ['id', 'period', 'authInfo']
return dct
class EppResponse(EppDoc):
_path = ('epp', 'response')
_childorder = {'__order': ('result', 'msgQ', 'resData', 'extension', 'trID')}
_multi_nodes = set([
# If the command was processed successfully, only one <result>
# element MUST be returned. If the command was not processed
# successfully, multiple <result> elements MAY be returned to
# document failure conditions.
('epp', 'response', 'result'),
('epp', 'response', 'resData', 'domain:infData', 'status'),
('epp', 'response', 'resData', 'domain:infData', 'ns', 'hostObj'),
('epp', 'response', 'resData', 'domain:infData', 'host'),
('epp', 'response', 'resData', 'domain:chkData', 'cd'),
('epp', 'response', 'resData', 'host:infData', 'status'),
('epp', 'response', 'resData', 'host:infData', 'addr'),
('epp', 'response', 'resData', 'host:chkData', 'cd'),
('epp', 'response', 'resData', 'contact:infData', 'status'),
('epp', 'response', 'resData', 'contact:infData', 'postalInfo'),
('epp', 'response', 'resData', 'contact:infData', 'postalInfo', 'addr', 'street'),
('epp', 'response', 'resData', 'contact:chkData', 'cd'),
('epp', 'response', 'extension', 'launch:chkData', 'cd'),
('epp', 'response', 'extension', 'rgp:infData', 'rgpStatus'),
])
def __init__(self, dct=None, extra_nsmap=None):
if dct is None:
dct = {'epp': {'response': {}}}
super(EppResponse, self).__init__(dct, extra_nsmap=extra_nsmap)
@property
def code(self):
res = self.first_result
if res:
return res['@code']
else:
return '0000'
@property
def ok(self):
return self.code == '1000'
@property
def pending(self):
return self.code == '1001'
@property
def success(self):
return self.code in ('1000', '1001')
@property
def msg(self):
res = self.first_result
if res:
m = res['msg']
if isinstance(m, dict):
m = m.get('_text', u'')
value = res.get('value', [{}])
if isinstance(value, dict):
value = [value]
# take the first
valuemsg = u', '.join(value[0].values())
if valuemsg:
m = u'{}; {}'.format(m, valuemsg)
return m
else:
return ''
@property
def first_result(self):
if hasattr(self, 'result') and len(self.result):
return self.result[0]
else:
return None
@property
def response_extension(self):
return self['epp']['response']['extension']
def get_response_extension(self, key):
return self.response_extension[key]
def dpath_get(dct, path, default=None):
default = {} if default is None else default
it = dct
for p in path:
it = it.get(p, default)
return it
def dpath_make(path):
out = {}
it = out
for p in path:
it[p] = {}
it = it[p]
return out
if __name__ == '__main__':
import sys
from eppy.xmldict import xml2dict
from StringIO import StringIO
try:
from simplejson import dumps as json_encode
except ImportError:
from json import dumps as json_encode
cmd = EppCreateDomainCommand()
cmd.name = 'hello.me'
cmd.ns = dict(hostObj=['ns1.acme.com', 'ns2.acme.com'])
cmd.contact = [{'@type': 'admin', '_text': 'wil001a'}]
cmd.authInfo=dict(pw='fooBAR')
#print "handcrafted = ", json_encode(cmd)
xml = cmd.to_xml()
print xml
root = ElementTree.parse(StringIO(xml)).getroot()
cmd2 = xml2dict(root, outerclass=EppCreateDomainCommand, default_prefix="epp")
print repr(cmd2)
print json_encode(cmd2)
print "domain = ", cmd2.name
print "again back to XML="
print cmd2.to_xml()
sys.exit(0)
cmd = {
'epp:create': {
#'{urn:ietf:params:xml:ns:domain-1.0}create': {
'domain:create': {
'_order': ['name', 'period', 'ns', 'registrant', 'contact', 'authInfo'],
#'@xmlns:domain': 'urn:ietf:params:xml:ns:domain-1.0',
'name': 'hello.com',
'domain:registrant': 'wil001',
'contact': [
{'@type': 'admin', '_text': 'wil001a'},
{'@type': 'billing', '_text': 'wil001b'},
],
'ns': {
'hostObj': [
'ns1.example.com',
'ns2.example.com',
]
},
'authInfo': {
'pw': 'fooBar'
}
}
}
}
eppdoc = EppCommand(cmd)
from xml.etree import ElementTree
from eppy.xmldict import dict2xml
print ElementTree.tostring(dict2xml(eppdoc))
dsData and keyData are multi nodes
from eppy.xmldict import XmlDictObject, _BASE_NSMAP, dict2xml, ElementTree
import copy
from . import childorder
from .utils import gen_trid
EPP_NSMAP = dict(_BASE_NSMAP)
EPP_STD_OBJECTS_MAP = {
'domain': 'urn:ietf:params:xml:ns:domain-1.0',
'host': 'urn:ietf:params:xml:ns:host-1.0',
'contact': 'urn:ietf:params:xml:ns:contact-1.0',
}
EPP_STD_EXT_MAP = {
'rgp': 'urn:ietf:params:xml:ns:rgp-1.0',
}
EPP_NSMAP.update(EPP_STD_OBJECTS_MAP)
EPP_NSMAP.update(EPP_STD_EXT_MAP)
EPP_NSMAP.update({
'': 'urn:ietf:params:xml:ns:epp-1.0',
'epp': 'urn:ietf:params:xml:ns:epp-1.0',
'secDNS10': 'urn:ietf:params:xml:ns:secDNS-1.0',
'secDNS': 'urn:ietf:params:xml:ns:secDNS-1.1',
'namestoreExt': 'http://www.verisign-grs.com/epp/namestoreExt-1.1',
'launch': 'urn:ietf:params:xml:ns:launch-1.0',
'smd': 'urn:ietf:params:xml:ns:signedMark-1.0',
'mark': 'urn:ietf:params:xml:ns:mark-1.0',
})
class EppDoc(XmlDictObject):
def __init__(self, dct=None, nsmap=None, extra_nsmap=None):
# NOTE: setting attributes in __init__ will require special handling, see XmlDictObject
if not nsmap:
nsmap = getattr(self.__class__, '_nsmap', EPP_NSMAP).copy()
if not dct:
dct = self.cmddef()
super(EppDoc, self).__init__(dct, nsmap=nsmap, extra_nsmap=extra_nsmap)
def to_xml(self, force_prefix):
# build a dictionary containing the definition of the order that child elements should be serialized
# NOTE: this does not contain the root element
# ``self._childorder`` is defined relative to self._path, so we do some tree grafting here
qualified_childorder = dpath_make(self._path[1:])
if self._path[1:]:
dpath_get(qualified_childorder, self._path[1:-1])[self._path[-1]] = self._childorder
else:
qualified_childorder = self._childorder
return super(EppDoc, self).to_xml(qualified_childorder, force_prefix=force_prefix)
def __unicode__(self):
return self.to_xml(force_prefix=False)
def __str__(self):
return unicode(self).encode('utf-8')
@classmethod
def cmddef(cls):
"""
Create an `XmlDictObject` based on the `_path` defined, and goes through each super class to wire up
the _childorder
"""
dct = dpath_make(cls._path)
# we need to search mro because if we just did `cls._childorder` it could come from any superclass,
# which may not correspond to the same level where `cls._path` is defined.
# Also, we want to be able to have each level define its own childorder.
for aclass in cls.__mro__:
if aclass == EppDoc:
# done searching
break
if '_childorder' in aclass.__dict__:
dpath_get(dct, aclass._path)['_order'] = aclass._childorder.get('__order', tuple())
if '_nsmap' in aclass.__dict__:
dpath_get(dct, aclass._path)['_nsmap'] = aclass._nsmap
return dct
@classmethod
def annotate(cls, dct=None):
"""
annotate the given `dct` (or create an empty one) by wiring up the _childorder and _nsmap fields
"""
dct = dct or dpath_make(cls._path)
# we need to search mro because if we just did `cls._childorder` it could come from any superclass,
# which may not correspond to the same level where `cls._path` is defined.
# Also, we want to be able to have each level define its own childorder.
for aclass in cls.__mro__:
if aclass == EppDoc:
# done searching
break
if '_childorder' in aclass.__dict__:
# recursively annotate the dict items
cls._annotate_order_recurse(dpath_get(dct, aclass._path), aclass._childorder)
# dpath_get(dct, aclass._path)['_order'] = aclass._childorder['__order']
if '_nsmap' in aclass.__dict__:
dpath_get(dct, aclass._path)['_nsmap'] = aclass._nsmap
return dct
def freeze(self):
return self.__class__.annotate(self)
@classmethod
def _annotate_order_recurse(cls, dct, childorder):
if childorder.get('__order'):
dct['_order'] = childorder['__order']
for k in (k for k in childorder.keys() if k != '__order'):
child = dct.get(k)
if isinstance(child, dict):
cls._annotate_order_recurse(child, childorder[k])
if isinstance(child, (list, tuple)):
# if there are multiple elements, we need to put the `_order` key in each element
for c in child:
if isinstance(c, dict):
cls._annotate_order_recurse(c, childorder[k])
@classmethod
def from_xml(cls, buf, default_prefix='epp', extra_nsmap=None):
return super(EppDoc, cls).from_xml(buf, default_prefix=default_prefix, extra_nsmap=extra_nsmap)
def normalize_response(self, respdoc):
"""
perform any cleanup of a response document resulting from this command
"""
pass
class EppHello(EppDoc):
_path = ('epp', 'hello')
class EppCommand(EppDoc):
_path = ('epp', 'command')
_childorder = {'__order': childorder.CMD_BASE}
def to_xml(self, force_prefix):
if hasattr(self, 'namestore_product') and self.namestore_product:
self['epp']['command'].setdefault(
'extension', {})['namestoreExt:namestoreExt'] = {'namestoreExt:subProduct': self.namestore_product}
del self.namestore_product
if hasattr(self, 'phases') and self.phases:
self.add_command_extension(self.phases)
del self.phases
return super(EppCommand, self).to_xml(force_prefix)
def add_command_extension(self, ext_dict):
self['epp']['command'].setdefault('extension', {}).update(ext_dict.freeze() if isinstance(ext_dict, EppDoc) else ext_dict)
def add_clTRID(self, clTRID=None):
self['epp']['command']['clTRID'] = clTRID or gen_trid()
class EppLoginCommand(EppCommand):
_path = ('epp', 'command', 'login')
_childorder = {'__order': childorder.CMD_LOGIN,
'svcs': {'__order': ['objURI', 'svcExtension']}}
def __init__(self, dct=None, nsmap=None, extra_nsmap=None, obj_uris=None, extra_obj_uris=None, extra_ext_uris=None, **kwargs):
super(EppLoginCommand, self).__init__(dct=None, nsmap=nsmap, extra_nsmap=extra_nsmap)
login = dpath_get(self, self._path)
if not hasattr(self, 'options'):
self.options = {'version': '1.0', 'lang': 'en'}
self.options._order = ['version', 'lang']
if not hasattr(self, 'svcs'):
extra_obj_uris = extra_obj_uris or []
obj_uris = copy.copy(obj_uris or EPP_STD_OBJECTS_MAP.values())
for uri in extra_obj_uris:
if ':' not in uri:
# if no colon, treat it as a well-known namespace prefix
uri = EPP_NSMAP[uri]
if uri not in obj_uris:
obj_uris.append(uri)
self.svcs = dict(objURI=obj_uris)
ext_uris = []
extra_ext_uris = extra_ext_uris or []
for uri in extra_ext_uris:
if ':' not in uri:
# if no colon, treat it as a well-known namespace prefix
uri = EPP_NSMAP[uri]
if uri not in obj_uris:
ext_uris.append(uri)
if ext_uris:
self.svcs.svcExtension = dict(extURI=ext_uris)
#self.svcs._order = ['objURI', 'svcExtension']
class EppLogoutCommand(EppCommand):
_path = ('epp', 'command', 'logout')
class EppCheckCommand(EppCommand):
_path = ('epp', 'command', 'check')
class EppCheckDomainCommand(EppCheckCommand):
_path = ('epp', 'command', 'check', 'domain:check')
class EppCheckHostCommand(EppCommand):
_path = ('epp', 'command', 'check', 'host:check')
class EppCheckContactCommand(EppCheckCommand):
_path = EppCheckCommand._path + ('contact:check',)
class EppInfoCommand(EppCommand):
_path = ('epp', 'command', 'info')
class EppInfoDomainCommand(EppInfoCommand):
_path = EppInfoCommand._path + ('domain:info',)
_childorder = {'__order': childorder.CMD_INFO_DOMAIN}
class EppInfoContactCommand(EppInfoCommand):
_path = EppInfoCommand._path + ('contact:info',)
_childorder = {'__order': childorder.CMD_INFO_CONTACT}
def normalize_response(self, respdoc):
"""
clean up voice and fax
"""
super(EppInfoContactCommand, self).normalize_response(respdoc)
try:
voice = respdoc.resData['contact:infData']['voice']
except (AttributeError, KeyError):
pass
else:
if not isinstance(voice, dict):
respdoc.resData['contact:infData']['voice'] = {'_text': voice}
try:
fax = respdoc.resData['contact:infData']['fax']
except (AttributeError, KeyError):
pass
else:
if not isinstance(fax, dict):
respdoc.resData['contact:infData']['fax'] = {'_text': fax}
class EppInfoHostCommand(EppInfoCommand):
_path = EppInfoCommand._path + ('host:info',)
def normalize_response(self, respdoc):
"""
clean up addr
"""
super(EppInfoHostCommand, self).normalize_response(respdoc)
try:
addrs = respdoc.resData['host:infData']['addr']
except (AttributeError, KeyError):
return
if addrs:
for i, addr in enumerate(addrs):
if not isinstance(addr, dict):
# it should be a text
addrs[i] = dict(_text=addr)
class EppCreateCommand(EppCommand):
_path = ('epp', 'command', 'create')
class EppCreateDomainCommand(EppCreateCommand):
_path = EppCreateCommand._path + ('domain:create',)
_childorder = {'__order': childorder.CMD_CREATE_DOMAIN}
class EppCreateContactCommand(EppCreateCommand):
_path = EppCreateCommand._path + ('contact:create',)
_childorder = {
'__order': childorder.CMD_CREATE_CONTACT,
'postalInfo': {
'__order': childorder.POSTAL_INFO,
'addr': {
'__order': childorder.ADDR
},
},
'disclose': {
'__order': childorder.DISCLOSE
}
}
class EppCreateHostCommand(EppCreateCommand):
_path = EppCreateCommand._path + ('host:create',)
_childorder = {'__order': childorder.CMD_CREATE_HOST}
class EppRenewCommand(EppCommand):
_path = ('epp', 'command', 'renew')
class EppRenewDomainCommand(EppRenewCommand):
_path = EppRenewCommand._path + ('domain:renew',)
_childorder = {'__order': childorder.CMD_RENEW_DOMAIN}
class EppUpdateCommand(EppCommand):
_path = ('epp', 'command', 'update')
class EppUpdateDomainCommand(EppUpdateCommand):
_path = EppUpdateCommand._path + ('domain:update',)
_childorder = {'__order': childorder.CMD_UPDATE_DOMAIN}
def add_secdns_data(self, data):
secdns_data = dict()
for action, value in data.iteritems():
update_data_key = 'secDNS:%s' % action
update_data = list()
tmp_dict = dict()
for item in value:
record_type = item['type']
record_key = 'secDNS:%sData' % record_type
if record_type == 'maxSigLife':
update_data.append({record_key: [item['value'], ]})
continue
if record_type == 'ds':
order = ['keyTag', 'alg', 'digestType', 'digest']
else:
order = ['flags', 'protocol', 'alg', 'pubKey']
record_data = dict(('secDNS:%s' % k, v) for k, v in item['data'].iteritems())
record_data['_order'] = order
update_data.append({record_key: record_data})
for item in update_data:
for key, val in item.iteritems():
if key in tmp_dict:
tmp_dict[key].append(val)
else:
tmp_dict[key] = [val, ]
update_data = [{k: v[0] if len(v) == 1 else v} for k, v in tmp_dict.iteritems()]
secdns_data[update_data_key] = update_data
self['epp']['command'].setdefault('extension', {})['secDNS:update'] = secdns_data
class EppUpdateContactCommand(EppUpdateCommand):
_path = EppUpdateCommand._path + ('contact:update',)
_childorder = {
'__order': childorder.CMD_UPDATE_CONTACT,
'chg': {
'__order': childorder.CMD_UPDATE_CONTACT_CHG,
'postalInfo': {
'__order': childorder.POSTAL_INFO,
'addr': {
'__order': childorder.ADDR
},
},
},
}
class EppUpdateHostCommand(EppUpdateCommand):
_path = EppUpdateCommand._path + ('host:update',)
_childorder = {'__order': childorder.CMD_UPDATE_DOMAIN}
class EppDeleteCommand(EppCommand):
_path = ('epp', 'command', 'delete')
class EppDeleteContactCommand(EppDeleteCommand):
_path = EppDeleteCommand._path + ('contact:delete',)
class EppDeleteDomainCommand(EppDeleteCommand):
_path = EppDeleteCommand._path + ('domain:delete',)
class EppDeleteHostCommand(EppDeleteCommand):
_path = EppDeleteCommand._path + ('host:delete',)
class EppPollCommand(EppCommand):
_path = ('epp', 'command', 'poll')
def __init__(self, op, msgID=None):
pollattr = {"@op": op}
if msgID is not None:
pollattr['@msgID'] = str(msgID)
dct = {
'epp': {
'command': {
'poll': pollattr,
},
},
}
super(EppPollCommand, self).__init__(dct)
class EppTransferCommand(EppCommand):
_path = EppCommand._path + ('transfer',)
def __init__(self, op):
dct = self.cmddef()
dct['epp']['command']['transfer']['@op'] = op
super(EppTransferCommand, self).__init__(dct)
class EppTransferDomainCommand(EppTransferCommand):
_path = EppTransferCommand._path + ('domain:transfer',)
_childorder = {'__order': childorder.CMD_TRANSFER_DOMAON}
@classmethod
def cmddef(cls):
dct = EppTransferCommand.cmddef()
dpath = dpath_get(dct, EppTransferCommand._path)
dpath['domain:transfer'] = {}
dpath = dpath_get(dct, cls._path)
dpath['_order'] = ['name', 'period', 'authInfo']
return dct
class EppTransferContactCommand(EppTransferCommand):
_path = EppTransferCommand._path + ('contact:transfer',)
_childorder = {'__order': childorder.CMD_TRANSFER_CONTACT}
@classmethod
def cmddef(cls):
dct = EppTransferCommand.cmddef()
dpath = dpath_get(dct, EppTransferCommand._path)
dpath['contact:transfer'] = {}
dpath = dpath_get(dct, cls._path)
dpath['_order'] = ['id', 'period', 'authInfo']
return dct
class EppResponse(EppDoc):
_path = ('epp', 'response')
_childorder = {'__order': ('result', 'msgQ', 'resData', 'extension', 'trID')}
_multi_nodes = set([
# If the command was processed successfully, only one <result>
# element MUST be returned. If the command was not processed
# successfully, multiple <result> elements MAY be returned to
# document failure conditions.
('epp', 'response', 'result'),
('epp', 'response', 'resData', 'domain:infData', 'status'),
('epp', 'response', 'resData', 'domain:infData', 'ns', 'hostObj'),
('epp', 'response', 'resData', 'domain:infData', 'host'),
('epp', 'response', 'resData', 'domain:chkData', 'cd'),
('epp', 'response', 'resData', 'host:infData', 'status'),
('epp', 'response', 'resData', 'host:infData', 'addr'),
('epp', 'response', 'resData', 'host:chkData', 'cd'),
('epp', 'response', 'resData', 'contact:infData', 'status'),
('epp', 'response', 'resData', 'contact:infData', 'postalInfo'),
('epp', 'response', 'resData', 'contact:infData', 'postalInfo', 'addr', 'street'),
('epp', 'response', 'resData', 'contact:chkData', 'cd'),
('epp', 'response', 'extension', 'launch:chkData', 'cd'),
('epp', 'response', 'extension', 'rgp:infData', 'rgpStatus'),
('epp', 'response', 'extension', 'secDNS:infData', 'dsData'),
('epp', 'response', 'extension', 'secDNS:infData', 'keyData'),
])
def __init__(self, dct=None, extra_nsmap=None):
if dct is None:
dct = {'epp': {'response': {}}}
super(EppResponse, self).__init__(dct, extra_nsmap=extra_nsmap)
@property
def code(self):
res = self.first_result
if res:
return res['@code']
else:
return '0000'
@property
def ok(self):
return self.code == '1000'
@property
def pending(self):
return self.code == '1001'
@property
def success(self):
return self.code in ('1000', '1001')
@property
def msg(self):
res = self.first_result
if res:
m = res['msg']
if isinstance(m, dict):
m = m.get('_text', u'')
value = res.get('value', [{}])
if isinstance(value, dict):
value = [value]
# take the first
valuemsg = u', '.join(value[0].values())
if valuemsg:
m = u'{}; {}'.format(m, valuemsg)
return m
else:
return ''
@property
def first_result(self):
if hasattr(self, 'result') and len(self.result):
return self.result[0]
else:
return None
@property
def response_extension(self):
return self['epp']['response']['extension']
def get_response_extension(self, key):
return self.response_extension[key]
def dpath_get(dct, path, default=None):
default = {} if default is None else default
it = dct
for p in path:
it = it.get(p, default)
return it
def dpath_make(path):
out = {}
it = out
for p in path:
it[p] = {}
it = it[p]
return out
if __name__ == '__main__':
import sys
from eppy.xmldict import xml2dict
from StringIO import StringIO
try:
from simplejson import dumps as json_encode
except ImportError:
from json import dumps as json_encode
cmd = EppCreateDomainCommand()
cmd.name = 'hello.me'
cmd.ns = dict(hostObj=['ns1.acme.com', 'ns2.acme.com'])
cmd.contact = [{'@type': 'admin', '_text': 'wil001a'}]
cmd.authInfo=dict(pw='fooBAR')
#print "handcrafted = ", json_encode(cmd)
xml = cmd.to_xml()
print xml
root = ElementTree.parse(StringIO(xml)).getroot()
cmd2 = xml2dict(root, outerclass=EppCreateDomainCommand, default_prefix="epp")
print repr(cmd2)
print json_encode(cmd2)
print "domain = ", cmd2.name
print "again back to XML="
print cmd2.to_xml()
sys.exit(0)
cmd = {
'epp:create': {
#'{urn:ietf:params:xml:ns:domain-1.0}create': {
'domain:create': {
'_order': ['name', 'period', 'ns', 'registrant', 'contact', 'authInfo'],
#'@xmlns:domain': 'urn:ietf:params:xml:ns:domain-1.0',
'name': 'hello.com',
'domain:registrant': 'wil001',
'contact': [
{'@type': 'admin', '_text': 'wil001a'},
{'@type': 'billing', '_text': 'wil001b'},
],
'ns': {
'hostObj': [
'ns1.example.com',
'ns2.example.com',
]
},
'authInfo': {
'pw': 'fooBar'
}
}
}
}
eppdoc = EppCommand(cmd)
from xml.etree import ElementTree
from eppy.xmldict import dict2xml
print ElementTree.tostring(dict2xml(eppdoc))
|
import random
import time
import threading
from . import socket
class Timeout(Exception):
pass
class Error(Exception):
pass
class Channel(object):
Timeout = Timeout
Error = Error
def __init__(self, name):
self._name = name
self._listeners = {}
self._responses = {}
self._responders = {}
self._lock = threading.Lock()
socket.on('message', self._on_message)
def _clean_responses(self):
""" Clean up responses that have not been processed """
self._lock.acquire()
now = time.time()
for id, response in self._responses.iteritems():
if now - response["time"] > 10:
self._responses.pop(id, None)
self._lock.release()
def _on_response(self, message):
self._lock.acquire()
message["time"] = time.time()
self._responses[message["id"]] = message
self._lock.release()
def _on_broadcast(self, message):
self._lock.acquire()
callbacks = self._listeners.get(message["name"], None)
if not callbacks:
return self._lock.release()
for callback in callbacks:
try:
callback(message.payload)
except:
pass
self._lock.release()
def _on_request(self, message):
self._lock.acquire()
if not self._responders.get(message["name"]):
return self._lock.release()
callback = self._responders.get(message["name"])
try:
payload = callback(message.payload)
except:
socket.send({
"type": "response",
"id": message["id"],
"error": "An error occured"
})
else:
socket.send({
"type": "response",
"id": message["id"],
"payload": payload
})
self._lock.release()
def _on_message(self, message):
if message["type"] == "response":
self._clean_responses()
return self._on_response(message)
if message["channel"] != self._name:
return
if message["type"] == "broadcast":
self._on_broadcast(message)
elif message["type"] == "request":
self._on_request(message)
def request(self, name=None, target=None, payload=None):
message = {}
message["type"] = "request"
message["id"] = str(random.random())
message["channel"] = self._name
message["name"] = name
message["payload"] = payload
message["device"] = {} # This will be deprecated
message["device"]["target"] = target
socket.send(message)
start = time.time()
while time.time() - start < 10:
self._lock.acquire()
response = self._responses.get(message["id"], None)
if response:
self._responses.pop(message["id"], None)
self._lock.release()
if response.get('error', None):
raise Error(response.get('error'))
return response.get('payload', None)
self._lock.release()
time.sleep(.1)
self._lock.acquire()
self._responses.pop(message["id"], None)
self._lock.release()
raise self.Timeout()
def broadcast(self, name=None, payload=None):
socket.send({
'type': 'broadcast',
'channel': self._name,
'name': name,
'payload': payload
})
def listen(self, name=None, callback=None):
if not self._listeners.get(name):
self._listeners[name] = []
self._listeners[name].append(callback)
def fling(self, uuid=None):
return self.broadcast(name='fling', payload={'uuid': uuid})
def respond(self, name=None, callback=None):
self._responders[name] = callback
Use dictionaries in listener callbacks.
import random
import time
import threading
from . import socket
class Timeout(Exception):
pass
class Error(Exception):
pass
class Channel(object):
Timeout = Timeout
Error = Error
def __init__(self, name):
self._name = name
self._listeners = {}
self._responses = {}
self._responders = {}
self._lock = threading.Lock()
socket.on('message', self._on_message)
def _clean_responses(self):
""" Clean up responses that have not been processed """
self._lock.acquire()
now = time.time()
for id, response in self._responses.iteritems():
if now - response["time"] > 10:
self._responses.pop(id, None)
self._lock.release()
def _on_response(self, message):
self._lock.acquire()
message["time"] = time.time()
self._responses[message["id"]] = message
self._lock.release()
def _on_broadcast(self, message):
self._lock.acquire()
callbacks = self._listeners.get(message["name"], None)
if not callbacks:
return self._lock.release()
for callback in callbacks:
try:
callback(message['payload'])
except:
pass
self._lock.release()
def _on_request(self, message):
self._lock.acquire()
if not self._responders.get(message["name"]):
return self._lock.release()
callback = self._responders.get(message["name"])
try:
payload = callback(message['payload'])
except:
socket.send({
"type": "response",
"id": message["id"],
"error": "An error occured"
})
else:
socket.send({
"type": "response",
"id": message["id"],
"payload": payload
})
self._lock.release()
def _on_message(self, message):
if message["type"] == "response":
self._clean_responses()
return self._on_response(message)
if message["channel"] != self._name:
return
if message["type"] == "broadcast":
self._on_broadcast(message)
elif message["type"] == "request":
self._on_request(message)
def request(self, name=None, target=None, payload=None):
message = {}
message["type"] = "request"
message["id"] = str(random.random())
message["channel"] = self._name
message["name"] = name
message["payload"] = payload
message["device"] = {} # This will be deprecated
message["device"]["target"] = target
socket.send(message)
start = time.time()
while time.time() - start < 10:
self._lock.acquire()
response = self._responses.get(message["id"], None)
if response:
self._responses.pop(message["id"], None)
self._lock.release()
if response.get('error', None):
raise Error(response.get('error'))
return response.get('payload', None)
self._lock.release()
time.sleep(.1)
self._lock.acquire()
self._responses.pop(message["id"], None)
self._lock.release()
raise self.Timeout()
def broadcast(self, name=None, payload=None):
socket.send({
'type': 'broadcast',
'channel': self._name,
'name': name,
'payload': payload
})
def listen(self, name=None, callback=None):
if not self._listeners.get(name):
self._listeners[name] = []
self._listeners[name].append(callback)
def fling(self, uuid=None):
return self.broadcast(name='fling', payload={'uuid': uuid})
def respond(self, name=None, callback=None):
self._responders[name] = callback
|
from epys.utils import is_elapsed_time, parse_time, getMonth
import pandas as pd
from datetime import datetime
import os
class EVF:
def __init__(self, fname):
# Variable initialization
self.WTF = list()
self.meta = dict()
self.header = list()
self.ref_date = None
self.init_values = list()
self.include_files = list()
self.propagation_delay = None
# Loading the given file
self.load(fname)
def load(self, fname):
# Storing the name of the file for editting purposes
self.fname = fname
# Auxiliary dictionary to speed up the data convertion into pandas
aux_dict = dict(raw_time=[], time=[], event=[], experiment=[], item=[],
count=[], comment=[])
# Importing the file
out_ouf_metadata = False
with open(fname) as f:
for line in f:
if '\n' in line[0]:
pass
# Filtering lines with comments
elif '#' in line[0]:
if not out_ouf_metadata:
self.header.append(line)
self._read_metada(line)
else:
self.WTF.append(line)
# Storing events
elif is_elapsed_time(line.split()[0]):
aux_dict = self._read_events(line, aux_dict)
# Useful data from the header
else:
# We can say we are out of the metadate here because
# start_time and end_time are mandatory in the files
out_ouf_metadata = True
self._read_header_line(line.split())
# Creating the pandas dataframe
self.events = pd.DataFrame(aux_dict)
# Sorting by the time
self.events = self.events.sort(['time'])
# Sorting the columns in the dataframe
cols = ['raw_time', 'time', 'event', 'experiment', 'item', 'count',
'comment']
self.events = self.events[cols]
def _read_metada(self, line):
if ': ' in line:
self.meta[line[1:line.index(': ')].strip()] = \
line[line.index(': ') + 1:-1].strip()
def _read_events(self, line, aux_dict):
# Storing comments
if '#' in line:
index = line.index('#')
aux_dict['comment'].append(line[index:-1])
else:
aux_dict['comment'].append(None)
# Consecutive whitespace are regarded as a single separator
l = line.split()
aux_dict['raw_time'].append(l[0])
aux_dict['time'].append(self._to_datetime(l[0]))
aux_dict['event'].append(l[1])
l = [e.upper() for e in line.split()]
if 'ITEM' in l:
# In the file it should be: EXP = <experiment> ITEM = <item>
aux_dict['experiment'].append(l[l.index('ITEM') - 1])
# In the file it should be: ITEM = <item>
aux_dict['item'].append(l[l.index('ITEM') + 2])
# Removing last parenthesis if exist
if aux_dict['item'][-1][-1] == ')':
aux_dict['item'][-1] = aux_dict['item'][-1][:-1]
if '#' in aux_dict['item'][-1]:
aux_dict['item'][-1] = \
aux_dict['item'][-1][:aux_dict['item'][-1].index('#') - 1]
else:
# Storing empty values
aux_dict['experiment'].append(None)
aux_dict['item'].append(None)
if 'COUNT' in l or '(COUNT' in l:
if 'COUNT' in l:
# In the file it should be: COUNT = <count>
aux_dict['count'].append(l[l.index('COUNT') + 2])
else:
# In the file it should be: (COUNT = <count>)
aux_dict['count'].append(l[l.index('(COUNT') + 2])
# Removing useless characters at the end
if aux_dict['count'][-1][-1] == ')':
aux_dict['count'][-1] = aux_dict['count'][-1][:-1]
if '#' in aux_dict['count'][-1]:
aux_dict['count'][-1] = \
aux_dict[
'count'][-1][:aux_dict['count'][-1].index('#') - 1]
else:
aux_dict['count'].append(None)
return aux_dict
def _read_header_line(self, line):
if 'Ref_date:' in line:
# Storing them in "raw" format
self.raw_ref_time = line[1]
# Getting the reference date from the header and transforming it
# into datetime format
self.ref_date = self._ref_date_to_datetime(line[1])
elif 'Start_time:' in line:
# Storing them in "raw" format
self.raw_start_time = line[1]
# Storing them in datetime format
self.start_time = self._to_datetime(line[1])
elif 'End_time:' in line:
# Storing them in "raw" format
self.raw_end_time = line[1]
# Storing them in datetime format
self.end_time = self._to_datetime(line[1])
elif 'Propagation_delay:' in line:
self.propagation_delay = line[1:]
elif 'Init_value:' in line:
# Storing them in "raw" format
self.init_values.append(line[1:])
# Sometimes it appears as Include instead of Include_file
elif 'Include_file:' in line or 'Include:' in line:
self.include_files.append(line[1:])
def _ref_date_to_datetime(self, ref_date):
ref_date = ref_date.split('-')[0] + "-" +\
str(getMonth(ref_date.split('-')[1])) + "-" + \
ref_date.split('-')[2]
return datetime.strptime(ref_date, "%d-%m-%Y")
def _to_datetime(self, element):
if self.ref_date is None and '-' not in element:
return parse_time(element)
else:
if '-' in element:
date = self._ref_date_to_datetime(element.split('_')[0])
return parse_time("000_" + element.split('_')[1], date)
return parse_time(element, self.ref_date)
def to_file(self, fname):
# Creating file if the file doesn't exist and truncating it if exists
with open(fname, 'w') as f:
# Copying the header
[f.write(line) for line in self.header]
# Copying the useful data in the header
# Reg_date
if self.ref_date is not None:
f.write("Ref_date: " + self.raw_ref_time + "\n#\n")
# Start and End time
f.write("Start_time: " + self.raw_start_time + "\n")
f.write("End_time: " + self.raw_end_time + "\n#\n")
# Propagation delay
if self.propagation_delay is not None:
output = ""
for element in self.propagation_delay:
output += " " + element
f.write("Propagation_delay: " + output + "\n#\n")
# Init values
if len(self.init_values) > 0:
for value in self.init_values:
output = ""
for element in value:
output += " " + element
f.write("Init_value: " + output + "\n")
f.write("#\n")
# Include files
if len(self.include_files) > 0:
for include in self.include_files:
output = ""
for element in include:
output += " " + element
f.write("Include_file: " + output + "\n")
f.write("#\n")
# Copying events
f.write("# Events_in_list: " + len(self.events.index) + "\n#\n")
f.write("#\n# Time Event\n#\n")
for index, row in self.events.iterrows():
output = row['raw_time'] + " " + row['event']
if row['experiment'] is not None:
output += " (EXP = " + row['experiment'] + " "
output += "ITEM = " + row['item'] + ")"
if row['count'] is not None:
output += " (COUNT = " + row['count'] + ")"
if row['comment'] is not None:
output += " #" + row['comment']
output += "\n"
f.write(output)
f.write("#\n")
f.close()
def check_consistency(self):
if self.events['time'].min() < self.start_time:
print ("There is an time event before the official start_time")
print (self.events['time'].min() + " is before than "
+ self.start_time)
raise NameError('Events before start_time')
elif self.events['time'].max() > self.end_time:
print ("There is an time event after the official end_time")
print (self.events['time'].max() + " is after than "
+ self.end_time)
raise NameError('Events after end_time')
elif self.check_if_included_files_exist_in_directory():
print ("Everything seems to be ok, congratulations! :)")
def check_if_included_files_exist_in_directory(self):
files_exist = True
# Getting the path of the directory where we are working
path = os.path.dirname(os.path.abspath(self.fname))
for fname in self.include_files:
# Removing possible problematic characters
fname = fname[0].strip('"')
if not os.path.isfile(os.path.join(path, fname)):
files_exist = False
output = "It seems as if " + fname + "is not in the same "
output += "directory as " + os.path.basename(self.fname)
print (output)
# Perhaps raising an exception here in the future...
return files_exist
Small changes with output file format.
from epys.utils import is_elapsed_time, parse_time, getMonth
import pandas as pd
from datetime import datetime
import os
class EVF:
def __init__(self, fname):
# Variable initialization
self.WTF = list()
self.meta = dict()
self.header = list()
self.ref_date = None
self.init_values = list()
self.include_files = list()
self.propagation_delay = None
# Loading the given file
self.load(fname)
def load(self, fname):
# Storing the name of the file for editting purposes
self.fname = fname
# Auxiliary dictionary to speed up the data convertion into pandas
aux_dict = dict(raw_time=[], time=[], event=[], experiment=[], item=[],
count=[], comment=[])
# Importing the file
out_ouf_metadata = False
with open(fname) as f:
for line in f:
if '\n' in line[0]:
pass
# Filtering lines with comments
elif '#' in line[0]:
if not out_ouf_metadata:
self.header.append(line)
self._read_metada(line)
else:
self.WTF.append(line)
# Storing events
elif is_elapsed_time(line.split()[0]):
aux_dict = self._read_events(line, aux_dict)
# Useful data from the header
else:
# We can say we are out of the metadate here because
# start_time and end_time are mandatory in the files
out_ouf_metadata = True
self._read_header_line(line.split())
# Creating the pandas dataframe
self.events = pd.DataFrame(aux_dict)
# Sorting by the time
self.events = self.events.sort(['time'])
# Sorting the columns in the dataframe
cols = ['raw_time', 'time', 'event', 'experiment', 'item', 'count',
'comment']
self.events = self.events[cols]
def _read_metada(self, line):
if ': ' in line:
self.meta[line[1:line.index(': ')].strip()] = \
line[line.index(': ') + 1:-1].strip()
def _read_events(self, line, aux_dict):
# Storing comments
if '#' in line:
index = line.index('#')
aux_dict['comment'].append(line[index:-1])
else:
aux_dict['comment'].append(None)
# Consecutive whitespace are regarded as a single separator
l = line.split()
aux_dict['raw_time'].append(l[0])
aux_dict['time'].append(self._to_datetime(l[0]))
aux_dict['event'].append(l[1])
l = [e.upper() for e in line.split()]
if 'ITEM' in l:
# In the file it should be: EXP = <experiment> ITEM = <item>
aux_dict['experiment'].append(l[l.index('ITEM') - 1])
# In the file it should be: ITEM = <item>
aux_dict['item'].append(l[l.index('ITEM') + 2])
# Removing last parenthesis if exist
if aux_dict['item'][-1][-1] == ')':
aux_dict['item'][-1] = aux_dict['item'][-1][:-1]
if '#' in aux_dict['item'][-1]:
aux_dict['item'][-1] = \
aux_dict['item'][-1][:aux_dict['item'][-1].index('#') - 1]
else:
# Storing empty values
aux_dict['experiment'].append(None)
aux_dict['item'].append(None)
if 'COUNT' in l or '(COUNT' in l:
if 'COUNT' in l:
# In the file it should be: COUNT = <count>
aux_dict['count'].append(l[l.index('COUNT') + 2])
else:
# In the file it should be: (COUNT = <count>)
aux_dict['count'].append(l[l.index('(COUNT') + 2])
# Removing useless characters at the end
if aux_dict['count'][-1][-1] == ')':
aux_dict['count'][-1] = aux_dict['count'][-1][:-1]
if '#' in aux_dict['count'][-1]:
aux_dict['count'][-1] = \
aux_dict[
'count'][-1][:aux_dict['count'][-1].index('#') - 1]
else:
aux_dict['count'].append(None)
return aux_dict
def _read_header_line(self, line):
if 'Ref_date:' in line:
# Storing them in "raw" format
self.raw_ref_time = line[1]
# Getting the reference date from the header and transforming it
# into datetime format
self.ref_date = self._ref_date_to_datetime(line[1])
elif 'Start_time:' in line:
# Storing them in "raw" format
self.raw_start_time = line[1]
# Storing them in datetime format
self.start_time = self._to_datetime(line[1])
elif 'End_time:' in line:
# Storing them in "raw" format
self.raw_end_time = line[1]
# Storing them in datetime format
self.end_time = self._to_datetime(line[1])
elif 'Propagation_delay:' in line:
self.propagation_delay = line[1:]
elif 'Init_value:' in line:
# Storing them in "raw" format
self.init_values.append(line[1:])
# Sometimes it appears as Include instead of Include_file
elif 'Include_file:' in line or 'Include:' in line:
self.include_files.append(line[1:])
def _ref_date_to_datetime(self, ref_date):
ref_date = ref_date.split('-')[0] + "-" +\
str(getMonth(ref_date.split('-')[1])) + "-" + \
ref_date.split('-')[2]
return datetime.strptime(ref_date, "%d-%m-%Y")
def _to_datetime(self, element):
if self.ref_date is None and '-' not in element:
return parse_time(element)
else:
if '-' in element:
date = self._ref_date_to_datetime(element.split('_')[0])
return parse_time("000_" + element.split('_')[1], date)
return parse_time(element, self.ref_date)
def to_file(self, fname):
# Creating file if the file doesn't exist and truncating it if exists
with open(fname, 'w') as f:
# Copying the header
[f.write(line) for line in self.header]
# Copying the useful data in the header
# Reg_date
if self.ref_date is not None:
f.write("Ref_date: " + self.raw_ref_time + "\n#\n")
# Start and End time
f.write("Start_time: " + self.raw_start_time + "\n")
f.write("End_time: " + self.raw_end_time + "\n#\n")
# Propagation delay
if self.propagation_delay is not None:
output = ""
for element in self.propagation_delay:
output += " " + element
f.write("Propagation_delay: " + output + "\n#\n")
# Init values
if len(self.init_values) > 0:
for value in self.init_values:
output = ""
for element in value:
output += " " + element
f.write("Init_value: " + output + "\n")
f.write("#\n")
# Include files
if len(self.include_files) > 0:
for include in self.include_files:
output = ""
for element in include:
output += " " + element
f.write("Include_file: " + output + "\n")
f.write("#\n")
# Copying events
f.write("# Events_in_list: " + str(len(self.events.index))
+ "\n#\n")
f.write("# Time Event\n#\n")
for index, row in self.events.iterrows():
output = row['raw_time'] + " " + row['event']
if row['experiment'] is not None:
output += " (EXP = " + row['experiment'] + " "
output += "ITEM = " + row['item'] + ")"
if row['count'] is not None:
output += " (COUNT = " + row['count'] + ")"
if row['comment'] is not None:
output += " # " + row['comment']
output += "\n"
f.write(output)
f.write("#\n")
f.close()
def check_consistency(self):
if self.events['time'].min() < self.start_time:
print ("There is an time event before the official start_time")
print (self.events['time'].min() + " is before than "
+ self.start_time)
raise NameError('Events before start_time')
elif self.events['time'].max() > self.end_time:
print ("There is an time event after the official end_time")
print (self.events['time'].max() + " is after than "
+ self.end_time)
raise NameError('Events after end_time')
elif self.check_if_included_files_exist_in_directory():
print ("Everything seems to be ok, congratulations! :)")
def check_if_included_files_exist_in_directory(self):
files_exist = True
# Getting the path of the directory where we are working
path = os.path.dirname(os.path.abspath(self.fname))
for fname in self.include_files:
# Removing possible problematic characters
fname = fname[0].strip('"')
if not os.path.isfile(os.path.join(path, fname)):
files_exist = False
output = "It seems as if " + fname + "is not in the same "
output += "directory as " + os.path.basename(self.fname)
print (output)
# Perhaps raising an exception here in the future...
return files_exist
|
from setuptools import setup
from os import path
setup(
name='autocrop',
version='0.1.0',
description='Image auto-cropper',
long_description=open(path.join(path.abspath(path.dirname(__file__)), 'README.md')).read(),
url='https://github.com/tinkerX3/autocrop',
author='Tomi Pozderec',
author_email='tomi.pozderec@gmail.com',
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 4 - Beta',
'Topic :: Scientific/Engineering :: Image Recognition',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
],
keywords='content-aware image cropping',
install_requires=[
'Pillow',
'numpy',
'scikit-image'
],
entry_points={
'console_scripts': [
'autocrop=autocrop:main',
],
},
)
Deleted setupy.py xD
|
import json
import os
import pytest
import numpy as np
from laserchicken import compute_neighbors
from laserchicken import keys
from laserchicken import read_las
from laserchicken.feature_extractor.pulse_penetration_feature_extractor import GROUND_TAGS
from laserchicken.keys import point
from laserchicken.utils import copy_point_cloud
from laserchicken.volume_specification import InfiniteCylinder
from . import _feature_map
from . import compute_features
np.random.seed(1234)
_TEST_FILE_NAME = 'AHN3.las'
_TEST_NEIGHBORHOODS_FILE_NAME = 'AHN3_1000_random_neighbors.json'
_TEST_DATA_SOURCE = 'testdata'
_CYLINDER = InfiniteCylinder(4)
_PC_260807 = read_las.read(os.path.join(_TEST_DATA_SOURCE, _TEST_FILE_NAME))
_PC_1000 = copy_point_cloud(_PC_260807, array_mask=(
np.random.choice(range(len(_PC_260807[keys.point]['x']['data'])), size=1000, replace=False)))
_PC_10 = copy_point_cloud(_PC_260807, array_mask=(
np.random.choice(range(len(_PC_260807[keys.point]['x']['data'])), size=10, replace=False)))
_1000_NEIGHBORHOODS_IN_260807 = next(compute_neighbors.compute_neighborhoods(_PC_260807, _PC_1000, _CYLINDER))
_10_NEIGHBORHOODS_IN_260807 = next(compute_neighbors.compute_neighborhoods(_PC_260807, _PC_10, _CYLINDER))
_260807_NEIGHBORHOODS_IN_10 = next(compute_neighbors.compute_neighborhoods(_PC_10, _PC_260807, _CYLINDER))
feature_names = [name for name in _feature_map()]
@pytest.mark.parametrize("feature", feature_names)
def test_completeTile_consistentOutput(feature):
target_point_cloud = copy_point_cloud(_PC_1000)
compute_features(copy_point_cloud(_PC_260807), _1000_NEIGHBORHOODS_IN_260807, 0, target_point_cloud,
[feature], volume=_CYLINDER)
_assert_consistent_attribute_length(target_point_cloud)
@pytest.mark.parametrize("feature", feature_names)
def test_manyTargets_consistentOutput(feature):
target_point_cloud = copy_point_cloud(_PC_260807)
compute_features(copy_point_cloud(_PC_10), _260807_NEIGHBORHOODS_IN_10, 0, target_point_cloud,
[feature], volume=_CYLINDER)
_assert_consistent_attribute_length(target_point_cloud)
@pytest.mark.parametrize("feature", feature_names)
def test_manyTargetsBigEnvironment_consistentOutput(feature):
target_point_cloud = copy_point_cloud(_PC_260807)
compute_features(copy_point_cloud(_PC_1000), _260807_NEIGHBORHOODS_IN_10, 0, target_point_cloud,
[feature], volume=_CYLINDER)
_assert_consistent_attribute_length(target_point_cloud)
@pytest.mark.parametrize("feature", feature_names)
def test_xAllZeros_consistentOutput(feature):
n = 10
pc = _create_point_cloud(x=0, n=n)
compute_features(pc, [[] for _ in range(n)], 0, pc, [feature], volume=_CYLINDER)
_assert_consistent_attribute_length(pc)
@pytest.mark.parametrize("feature", feature_names)
def test_yAllZeros_consistentOutput(feature):
n = 10
pc = _create_point_cloud(y=0, n=n)
compute_features(pc, [[] for _ in range(n)], 0, pc, [feature], volume=_CYLINDER)
_assert_consistent_attribute_length(pc)
@pytest.mark.parametrize("feature", feature_names)
def test_zAllZeros_consistentOutput(feature):
n = 10
pc = _create_point_cloud(z=0, n=n)
compute_features(pc, [[] for _ in range(n)], 0, pc, [feature], volume=_CYLINDER)
_assert_consistent_attribute_length(pc)
@pytest.mark.parametrize("feature", feature_names)
def test_zeroPoints_consistentOutput(feature):
n = 0
pc = _create_point_cloud(n=n)
compute_features(pc, [[] for _ in range(n)], 0, pc, [feature], volume=_CYLINDER)
_assert_consistent_attribute_length(pc)
@pytest.mark.parametrize("feature", feature_names)
def test_zeroNeighbors_consistentOutput(feature):
_test_consistent_output_with_n_neighbors(feature, 0)
@pytest.mark.parametrize("feature", feature_names)
def test_oneNeighbor_consistentOutput(feature):
_test_consistent_output_with_n_neighbors(feature, 1)
@pytest.mark.parametrize("feature", feature_names)
def test_twoNeighbors_consistentOutput(feature):
_test_consistent_output_with_n_neighbors(feature, 2)
def _test_consistent_output_with_n_neighbors(feature, n_neighbors):
n_points = 10
pc = _create_point_cloud(n=n_points)
compute_features(pc, [range(n_neighbors) for _ in range(n_points)], 0, pc, [feature], volume=_CYLINDER)
_assert_consistent_attribute_length(pc)
@pytest.mark.parametrize("feature", feature_names)
def test_inputNotChanged(feature):
original_environment = _PC_260807
environment = copy_point_cloud(original_environment)
original_targets = _PC_10
targets = copy_point_cloud(original_targets)
original_neighborhoods = _10_NEIGHBORHOODS_IN_260807
neighborhoods = [[e for e in l] for l in original_neighborhoods]
compute_features(environment, neighborhoods, 0, targets, [feature],
volume=_CYLINDER)
_assert_attributes_not_changed(original_environment, environment)
_assert_attributes_not_changed(original_targets, targets)
assert json.dumps(original_neighborhoods) == json.dumps(neighborhoods)
def _create_point_cloud(x=None, y=None, z=None, n=10):
tag = GROUND_TAGS[0]
pc = {point: {'x': {'data': np.array([x if x is not None else i for i in range(n)]), 'type': 'float'},
'y': {'data': np.array([y if y is not None else i for i in range(n)]), 'type': 'float'},
'z': {'data': np.array([z if z is not None else i for i in range(n)]), 'type': 'float'},
'raw_classification': {'data': np.array([i if i % 2 == 0 else tag for i in range(n)]),
'type': 'float'}}}
return pc
def _assert_attributes_not_changed(original_point_cloud, new_point_cloud):
for attribute in original_point_cloud[keys.point]:
np.testing.assert_array_almost_equal(new_point_cloud[keys.point][attribute]['data'],
original_point_cloud[keys.point][attribute]['data'])
def _assert_consistent_attribute_length(target_point_cloud):
n_elements = len(target_point_cloud[keys.point]['x'])
for key in target_point_cloud[keys.point]:
assert n_elements == len(target_point_cloud[keys.point][key])
add max sample size to neighbor computation in feature tests
Mostly as an example as its sample size is too great to make a significant difference with the already small volume that is used.
import json
import os
import pytest
import numpy as np
from laserchicken import compute_neighbors
from laserchicken import keys
from laserchicken import read_las
from laserchicken.feature_extractor.pulse_penetration_feature_extractor import GROUND_TAGS
from laserchicken.keys import point
from laserchicken.utils import copy_point_cloud
from laserchicken.volume_specification import InfiniteCylinder
from . import _feature_map
from . import compute_features
np.random.seed(1234)
_TEST_FILE_NAME = 'AHN3.las'
_TEST_NEIGHBORHOODS_FILE_NAME = 'AHN3_1000_random_neighbors.json'
_TEST_DATA_SOURCE = 'testdata'
_CYLINDER = InfiniteCylinder(4)
_PC_260807 = read_las.read(os.path.join(_TEST_DATA_SOURCE, _TEST_FILE_NAME))
_PC_1000 = copy_point_cloud(_PC_260807, array_mask=(
np.random.choice(range(len(_PC_260807[keys.point]['x']['data'])), size=1000, replace=False)))
_PC_10 = copy_point_cloud(_PC_260807, array_mask=(
np.random.choice(range(len(_PC_260807[keys.point]['x']['data'])), size=10, replace=False)))
_1000_NEIGHBORHOODS_IN_260807 = next(
compute_neighbors.compute_neighborhoods(_PC_260807, _PC_1000, _CYLINDER, sample_size=500))
_10_NEIGHBORHOODS_IN_260807 = next(
compute_neighbors.compute_neighborhoods(_PC_260807, _PC_10, _CYLINDER, sample_size=500))
_260807_NEIGHBORHOODS_IN_10 = next(
compute_neighbors.compute_neighborhoods(_PC_10, _PC_260807, _CYLINDER, sample_size=500))
feature_names = [name for name in _feature_map()]
@pytest.mark.parametrize("feature", feature_names)
def test_completeTile_consistentOutput(feature):
target_point_cloud = copy_point_cloud(_PC_1000)
compute_features(copy_point_cloud(_PC_260807), _1000_NEIGHBORHOODS_IN_260807, 0, target_point_cloud,
[feature], volume=_CYLINDER)
_assert_consistent_attribute_length(target_point_cloud)
@pytest.mark.parametrize("feature", feature_names)
def test_manyTargets_consistentOutput(feature):
target_point_cloud = copy_point_cloud(_PC_260807)
compute_features(copy_point_cloud(_PC_10), _260807_NEIGHBORHOODS_IN_10, 0, target_point_cloud,
[feature], volume=_CYLINDER)
_assert_consistent_attribute_length(target_point_cloud)
@pytest.mark.parametrize("feature", feature_names)
def test_manyTargetsBigEnvironment_consistentOutput(feature):
target_point_cloud = copy_point_cloud(_PC_260807)
compute_features(copy_point_cloud(_PC_1000), _260807_NEIGHBORHOODS_IN_10, 0, target_point_cloud,
[feature], volume=_CYLINDER)
_assert_consistent_attribute_length(target_point_cloud)
@pytest.mark.parametrize("feature", feature_names)
def test_xAllZeros_consistentOutput(feature):
n = 10
pc = _create_point_cloud(x=0, n=n)
compute_features(pc, [[] for _ in range(n)], 0, pc, [feature], volume=_CYLINDER)
_assert_consistent_attribute_length(pc)
@pytest.mark.parametrize("feature", feature_names)
def test_yAllZeros_consistentOutput(feature):
n = 10
pc = _create_point_cloud(y=0, n=n)
compute_features(pc, [[] for _ in range(n)], 0, pc, [feature], volume=_CYLINDER)
_assert_consistent_attribute_length(pc)
@pytest.mark.parametrize("feature", feature_names)
def test_zAllZeros_consistentOutput(feature):
n = 10
pc = _create_point_cloud(z=0, n=n)
compute_features(pc, [[] for _ in range(n)], 0, pc, [feature], volume=_CYLINDER)
_assert_consistent_attribute_length(pc)
@pytest.mark.parametrize("feature", feature_names)
def test_zeroPoints_consistentOutput(feature):
n = 0
pc = _create_point_cloud(n=n)
compute_features(pc, [[] for _ in range(n)], 0, pc, [feature], volume=_CYLINDER)
_assert_consistent_attribute_length(pc)
@pytest.mark.parametrize("feature", feature_names)
def test_zeroNeighbors_consistentOutput(feature):
_test_consistent_output_with_n_neighbors(feature, 0)
@pytest.mark.parametrize("feature", feature_names)
def test_oneNeighbor_consistentOutput(feature):
_test_consistent_output_with_n_neighbors(feature, 1)
@pytest.mark.parametrize("feature", feature_names)
def test_twoNeighbors_consistentOutput(feature):
_test_consistent_output_with_n_neighbors(feature, 2)
def _test_consistent_output_with_n_neighbors(feature, n_neighbors):
n_points = 10
pc = _create_point_cloud(n=n_points)
compute_features(pc, [range(n_neighbors) for _ in range(n_points)], 0, pc, [feature], volume=_CYLINDER)
_assert_consistent_attribute_length(pc)
@pytest.mark.parametrize("feature", feature_names)
def test_inputNotChanged(feature):
original_environment = _PC_260807
environment = copy_point_cloud(original_environment)
original_targets = _PC_10
targets = copy_point_cloud(original_targets)
original_neighborhoods = _10_NEIGHBORHOODS_IN_260807
neighborhoods = [[e for e in l] for l in original_neighborhoods]
compute_features(environment, neighborhoods, 0, targets, [feature],
volume=_CYLINDER)
_assert_attributes_not_changed(original_environment, environment)
_assert_attributes_not_changed(original_targets, targets)
assert json.dumps(original_neighborhoods) == json.dumps(neighborhoods)
def _create_point_cloud(x=None, y=None, z=None, n=10):
tag = GROUND_TAGS[0]
pc = {point: {'x': {'data': np.array([x if x is not None else i for i in range(n)]), 'type': 'float'},
'y': {'data': np.array([y if y is not None else i for i in range(n)]), 'type': 'float'},
'z': {'data': np.array([z if z is not None else i for i in range(n)]), 'type': 'float'},
'raw_classification': {'data': np.array([i if i % 2 == 0 else tag for i in range(n)]),
'type': 'float'}}}
return pc
def _assert_attributes_not_changed(original_point_cloud, new_point_cloud):
for attribute in original_point_cloud[keys.point]:
np.testing.assert_array_almost_equal(new_point_cloud[keys.point][attribute]['data'],
original_point_cloud[keys.point][attribute]['data'])
def _assert_consistent_attribute_length(target_point_cloud):
n_elements = len(target_point_cloud[keys.point]['x'])
for key in target_point_cloud[keys.point]:
assert n_elements == len(target_point_cloud[keys.point][key])
|
Added 10s delay after each bulk upload
|
"""Leetcode 987. Vertical Order Traversal of a Binary Tree
Medium
URL: https://leetcode.com/problems/vertical-order-traversal-of-a-binary-tree/
Given a binary tree, return the vertical order traversal of its nodes values.
For each node at position (X, Y), its left and right children respectively will be
at positions (X-1, Y-1) and (X+1, Y-1).
Running a vertical line from X = -infinity to X = +infinity, whenever the vertical
line touches some nodes, we report the values of the nodes in order from top to
bottom (decreasing Y coordinates).
If two nodes have the same position, then the value of the node that is reported
first is the value that is smaller.
Return an list of non-empty reports in order of X coordinate.
Every report will have a list of values of nodes.
Example 1:
3
/ \
9 20
/ \
15 7
Input: [3,9,20,null,null,15,7]
Output: [[9],[3,15],[20],[7]]
Explanation:
Without loss of generality, we can assume the root node is at position (0, 0):
Then, the node with value 9 occurs at position (-1, -1);
The nodes with values 3 and 15 occur at positions (0, 0) and (0, -2);
The node with value 20 occurs at position (1, -1);
The node with value 7 occurs at position (2, -2).
Example 2:
1
/ \
2 3
/ \ / \
4 5 6 7
Input: [1,2,3,4,5,6,7]
Output: [[4],[2],[1,5,6],[3],[7]]
Explanation:
The node with value 5 and the node with value 6 have the same position according to the given scheme.
However, in the report "[1,5,6]", the node value of 5 comes first since 5 is smaller than 6.
Note:
- The tree will have between 1 and 1000 nodes.
- Each node's value will be between 0 and 1000.
"""
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, val):
self.val = val
self.left = None
self.right = None
class SolutionOrderValsDictSortedLevelOrderValsDict(object):
def verticalTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[List[int]]
Time complexity: O(n+n*logn), where n is number of nodes.
Space complexity: O(n).
"""
from collections import defaultdict
from collections import deque
# Create dict: vertical order->list(vals).
vorder_vals_d = defaultdict(list)
# Apply level traversal by queue.
queue = deque([(root, 0)])
while queue:
# Create dict: level order->list(vals)
level_order_vals_d = defaultdict(list)
for i in range(len(queue)):
current, order = queue.pop()
level_order_vals_d[order].append(current.val)
if current.left:
queue.appendleft((current.left, order - 1))
if current.right:
queue.appendleft((current.right, order + 1))
# After level traversal, append sorted vals to vorder_vals_d.
for order, vals in level_order_vals_d.items():
vorder_vals_d[order].extend(sorted(vals))
# Sort dict by order to return vals.
return [vals for order, vals in sorted(vorder_vals_d.items())]
def main():
# Input: [3,9,20,null,null,15,7]
# Output: [[9],[3,15],[20],[7]]
root = TreeNode(3)
root.left = TreeNode(9)
root.right = TreeNode(20)
root.right.left = TreeNode(15)
root.right.right = TreeNode(7)
print SolutionOrderValsDictSortedLevelOrderValsDict().verticalTraversal(root)
# Input: [1,2,3,4,5,6,7]
# Output: [[4],[2],[1,5,6],[3],[7]]
root = TreeNode(1)
root.left = TreeNode(2)
root.right = TreeNode(3)
root.left.left = TreeNode(4)
root.left.right = TreeNode(5)
root.right.left = TreeNode(6)
root.right.right = TreeNode(7)
print SolutionOrderValsDictSortedLevelOrderValsDict().verticalTraversal(root)
# Input: [0,2,1,3,null,null,null,4,5,null,7,6,null,10,8,11,9]
# Output: [[4,10,11],[3,6,7],[2,5,8,9],[0],[1]]
root = TreeNode(0)
root.left = TreeNode(2)
root.right = TreeNode(1)
root.left.left = TreeNode(3)
root.left.right = None
root.right.left = None
root.right.right = None
root.left.left.left = TreeNode(4)
root.left.left.right = TreeNode(5)
root.left.left.left.right = TreeNode(7)
root.left.left.right.left = TreeNode(6)
root.left.left.left.right.left = TreeNode(10)
root.left.left.left.right.right = TreeNode(8)
root.left.left.right.left.left = TreeNode(11)
root.left.left.right.left.right = TreeNode(9)
print SolutionOrderValsDictSortedLevelOrderValsDict().verticalTraversal(root)
if __name__ == '__main__':
main()
Revise to level_vorder_vals_d & vorder
"""Leetcode 987. Vertical Order Traversal of a Binary Tree
Medium
URL: https://leetcode.com/problems/vertical-order-traversal-of-a-binary-tree/
Given a binary tree, return the vertical order traversal of its nodes values.
For each node at position (X, Y), its left and right children respectively will be
at positions (X-1, Y-1) and (X+1, Y-1).
Running a vertical line from X = -infinity to X = +infinity, whenever the vertical
line touches some nodes, we report the values of the nodes in order from top to
bottom (decreasing Y coordinates).
If two nodes have the same position, then the value of the node that is reported
first is the value that is smaller.
Return an list of non-empty reports in order of X coordinate.
Every report will have a list of values of nodes.
Example 1:
3
/ \
9 20
/ \
15 7
Input: [3,9,20,null,null,15,7]
Output: [[9],[3,15],[20],[7]]
Explanation:
Without loss of generality, we can assume the root node is at position (0, 0):
Then, the node with value 9 occurs at position (-1, -1);
The nodes with values 3 and 15 occur at positions (0, 0) and (0, -2);
The node with value 20 occurs at position (1, -1);
The node with value 7 occurs at position (2, -2).
Example 2:
1
/ \
2 3
/ \ / \
4 5 6 7
Input: [1,2,3,4,5,6,7]
Output: [[4],[2],[1,5,6],[3],[7]]
Explanation:
The node with value 5 and the node with value 6 have the same position according to the given scheme.
However, in the report "[1,5,6]", the node value of 5 comes first since 5 is smaller than 6.
Note:
- The tree will have between 1 and 1000 nodes.
- Each node's value will be between 0 and 1000.
"""
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, val):
self.val = val
self.left = None
self.right = None
class SolutionOrderValsDictSortedLevelOrderValsDict(object):
def verticalTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[List[int]]
Time complexity: O(n+n*logn), where n is number of nodes.
Space complexity: O(n).
"""
from collections import defaultdict
from collections import deque
# Create dict: vertical order->list(vals).
vorder_vals_d = defaultdict(list)
# Apply level traversal by queue.
queue = deque([(root, 0)])
while queue:
# Create dict: level vertical order->list(vals)
level_vorder_vals_d = defaultdict(list)
for i in range(len(queue)):
current, vorder = queue.pop()
level_vorder_vals_d[vorder].append(current.val)
if current.left:
queue.appendleft((current.left, vorder - 1))
if current.right:
queue.appendleft((current.right, vorder + 1))
# After level traversal, append sorted vals to vorder_vals_d.
for vorder, vals in level_vorder_vals_d.items():
vorder_vals_d[vorder].extend(sorted(vals))
# Sort dict by vertical order to return vals.
return [vals for vorder, vals in sorted(vorder_vals_d.items())]
def main():
# Input: [3,9,20,null,null,15,7]
# Output: [[9],[3,15],[20],[7]]
root = TreeNode(3)
root.left = TreeNode(9)
root.right = TreeNode(20)
root.right.left = TreeNode(15)
root.right.right = TreeNode(7)
print SolutionOrderValsDictSortedLevelOrderValsDict().verticalTraversal(root)
# Input: [1,2,3,4,5,6,7]
# Output: [[4],[2],[1,5,6],[3],[7]]
root = TreeNode(1)
root.left = TreeNode(2)
root.right = TreeNode(3)
root.left.left = TreeNode(4)
root.left.right = TreeNode(5)
root.right.left = TreeNode(6)
root.right.right = TreeNode(7)
print SolutionOrderValsDictSortedLevelOrderValsDict().verticalTraversal(root)
# Input: [0,2,1,3,null,null,null,4,5,null,7,6,null,10,8,11,9]
# Output: [[4,10,11],[3,6,7],[2,5,8,9],[0],[1]]
root = TreeNode(0)
root.left = TreeNode(2)
root.right = TreeNode(1)
root.left.left = TreeNode(3)
root.left.right = None
root.right.left = None
root.right.right = None
root.left.left.left = TreeNode(4)
root.left.left.right = TreeNode(5)
root.left.left.left.right = TreeNode(7)
root.left.left.right.left = TreeNode(6)
root.left.left.left.right.left = TreeNode(10)
root.left.left.left.right.right = TreeNode(8)
root.left.left.right.left.left = TreeNode(11)
root.left.left.right.left.right = TreeNode(9)
print SolutionOrderValsDictSortedLevelOrderValsDict().verticalTraversal(root)
if __name__ == '__main__':
main()
|
from django import template
register=template.Library()
@register.filter
def stringformat(value, fmt='{}'):
'''
format the value
'''
if isinstance(value, dict):
return fmt.format(**value)
return fmt.format(value)
TemplateTag<stringformat> will return an error string instead of raising an exception when error
from django import template
register=template.Library()
@register.filter
def stringformat(value, fmt='{}'):
'''
format the value
'''
try:
if isinstance(value, dict):
return fmt.format(**value)
return fmt.format(value)
except:
return 'Value[%s]::%s cannot format by pattern: %s'%(value, type(value).__name__, fmt)
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Module implementing RNN Cells."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import contextlib
import math
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops.math_ops import sigmoid
from tensorflow.python.ops.math_ops import tanh
from tensorflow.python.ops.rnn_cell_impl import _RNNCell as RNNCell
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import nest
_BIAS_VARIABLE_NAME = "biases"
_WEIGHTS_VARIABLE_NAME = "weights"
@contextlib.contextmanager
def _checked_scope(cell, scope, reuse=None, **kwargs):
if reuse is not None:
kwargs["reuse"] = reuse
with vs.variable_scope(scope, **kwargs) as checking_scope:
scope_name = checking_scope.name
if hasattr(cell, "_scope"):
cell_scope = cell._scope # pylint: disable=protected-access
if cell_scope.name != checking_scope.name:
raise ValueError(
"Attempt to reuse RNNCell %s with a different variable scope than "
"its first use. First use of cell was with scope '%s', this "
"attempt is with scope '%s'. Please create a new instance of the "
"cell if you would like it to use a different set of weights. "
"If before you were using: MultiRNNCell([%s(...)] * num_layers), "
"change to: MultiRNNCell([%s(...) for _ in range(num_layers)]). "
"If before you were using the same cell instance as both the "
"forward and reverse cell of a bidirectional RNN, simply create "
"two instances (one for forward, one for reverse). "
"In May 2017, we will start transitioning this cell's behavior "
"to use existing stored weights, if any, when it is called "
"with scope=None (which can lead to silent model degradation, so "
"this error will remain until then.)"
% (cell, cell_scope.name, scope_name, type(cell).__name__,
type(cell).__name__))
else:
weights_found = False
try:
with vs.variable_scope(checking_scope, reuse=True):
vs.get_variable(_WEIGHTS_VARIABLE_NAME)
weights_found = True
except ValueError:
pass
if weights_found and reuse is None:
raise ValueError(
"Attempt to have a second RNNCell use the weights of a variable "
"scope that already has weights: '%s'; and the cell was not "
"constructed as %s(..., reuse=True). "
"To share the weights of an RNNCell, simply "
"reuse it in your second calculation, or create a new one with "
"the argument reuse=True." % (scope_name, type(cell).__name__))
# Everything is OK. Update the cell's scope and yield it.
cell._scope = checking_scope # pylint: disable=protected-access
yield checking_scope
class BasicRNNCell(RNNCell):
"""The most basic RNN cell."""
def __init__(self, num_units, input_size=None, activation=tanh, reuse=None):
if input_size is not None:
logging.warn("%s: The input_size parameter is deprecated.", self)
self._num_units = num_units
self._activation = activation
self._reuse = reuse
@property
def state_size(self):
return self._num_units
@property
def output_size(self):
return self._num_units
def __call__(self, inputs, state, scope=None):
"""Most basic RNN: output = new_state = act(W * input + U * state + B)."""
with _checked_scope(self, scope or "basic_rnn_cell", reuse=self._reuse):
output = self._activation(
_linear([inputs, state], self._num_units, True))
return output, output
class GRUCell(RNNCell):
"""Gated Recurrent Unit cell (cf. http://arxiv.org/abs/1406.1078)."""
def __init__(self, num_units, input_size=None, activation=tanh, reuse=None):
if input_size is not None:
logging.warn("%s: The input_size parameter is deprecated.", self)
self._num_units = num_units
self._activation = activation
self._reuse = reuse
@property
def state_size(self):
return self._num_units
@property
def output_size(self):
return self._num_units
def __call__(self, inputs, state, scope=None):
"""Gated recurrent unit (GRU) with nunits cells."""
with _checked_scope(self, scope or "gru_cell", reuse=self._reuse):
with vs.variable_scope("gates"): # Reset gate and update gate.
# We start with bias of 1.0 to not reset and not update.
r, u = array_ops.split(
value=_linear(
[inputs, state], 2 * self._num_units, True, 1.0),
num_or_size_splits=2,
axis=1)
r, u = sigmoid(r), sigmoid(u)
with vs.variable_scope("candidate"):
c = self._activation(_linear([inputs, r * state],
self._num_units, True))
new_h = u * state + (1 - u) * c
return new_h, new_h
_LSTMStateTuple = collections.namedtuple("LSTMStateTuple", ("c", "h"))
class LSTMStateTuple(_LSTMStateTuple):
"""Tuple used by LSTM Cells for `state_size`, `zero_state`, and output state.
Stores two elements: `(c, h)`, in that order.
Only used when `state_is_tuple=True`.
"""
__slots__ = ()
@property
def dtype(self):
(c, h) = self
if not c.dtype == h.dtype:
raise TypeError("Inconsistent internal state: %s vs %s" %
(str(c.dtype), str(h.dtype)))
return c.dtype
class BasicLSTMCell(RNNCell):
"""Basic LSTM recurrent network cell.
The implementation is based on: http://arxiv.org/abs/1409.2329.
We add forget_bias (default: 1) to the biases of the forget gate in order to
reduce the scale of forgetting in the beginning of the training.
It does not allow cell clipping, a projection layer, and does not
use peep-hole connections: it is the basic baseline.
For advanced models, please use the full LSTMCell that follows.
"""
def __init__(self, num_units, forget_bias=1.0, input_size=None,
state_is_tuple=True, activation=tanh, reuse=None):
"""Initialize the basic LSTM cell.
Args:
num_units: int, The number of units in the LSTM cell.
forget_bias: float, The bias added to forget gates (see above).
input_size: Deprecated and unused.
state_is_tuple: If True, accepted and returned states are 2-tuples of
the `c_state` and `m_state`. If False, they are concatenated
along the column axis. The latter behavior will soon be deprecated.
activation: Activation function of the inner states.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
"""
if not state_is_tuple:
logging.warn("%s: Using a concatenated state is slower and will soon be "
"deprecated. Use state_is_tuple=True.", self)
if input_size is not None:
logging.warn("%s: The input_size parameter is deprecated.", self)
self._num_units = num_units
self._forget_bias = forget_bias
self._state_is_tuple = state_is_tuple
self._activation = activation
self._reuse = reuse
@property
def state_size(self):
return (LSTMStateTuple(self._num_units, self._num_units)
if self._state_is_tuple else 2 * self._num_units)
@property
def output_size(self):
return self._num_units
def __call__(self, inputs, state, scope=None):
"""Long short-term memory cell (LSTM)."""
with _checked_scope(self, scope or "basic_lstm_cell", reuse=self._reuse):
# Parameters of gates are concatenated into one multiply for efficiency.
if self._state_is_tuple:
c, h = state
else:
c, h = array_ops.split(value=state, num_or_size_splits=2, axis=1)
concat = _linear([inputs, h], 4 * self._num_units, True)
# i = input_gate, j = new_input, f = forget_gate, o = output_gate
i, j, f, o = array_ops.split(value=concat, num_or_size_splits=4, axis=1)
new_c = (c * sigmoid(f + self._forget_bias) + sigmoid(i) *
self._activation(j))
new_h = self._activation(new_c) * sigmoid(o)
if self._state_is_tuple:
new_state = LSTMStateTuple(new_c, new_h)
else:
new_state = array_ops.concat([new_c, new_h], 1)
return new_h, new_state
class LSTMCell(RNNCell):
"""Long short-term memory unit (LSTM) recurrent network cell.
The default non-peephole implementation is based on:
http://deeplearning.cs.cmu.edu/pdfs/Hochreiter97_lstm.pdf
S. Hochreiter and J. Schmidhuber.
"Long Short-Term Memory". Neural Computation, 9(8):1735-1780, 1997.
The peephole implementation is based on:
https://research.google.com/pubs/archive/43905.pdf
Hasim Sak, Andrew Senior, and Francoise Beaufays.
"Long short-term memory recurrent neural network architectures for
large scale acoustic modeling." INTERSPEECH, 2014.
The class uses optional peep-hole connections, optional cell clipping, and
an optional projection layer.
"""
def __init__(self, num_units, input_size=None,
use_peepholes=False, cell_clip=None,
initializer=None, num_proj=None, proj_clip=None,
num_unit_shards=None, num_proj_shards=None,
forget_bias=1.0, state_is_tuple=True,
activation=tanh, reuse=None):
"""Initialize the parameters for an LSTM cell.
Args:
num_units: int, The number of units in the LSTM cell
input_size: Deprecated and unused.
use_peepholes: bool, set True to enable diagonal/peephole connections.
cell_clip: (optional) A float value, if provided the cell state is clipped
by this value prior to the cell output activation.
initializer: (optional) The initializer to use for the weight and
projection matrices.
num_proj: (optional) int, The output dimensionality for the projection
matrices. If None, no projection is performed.
proj_clip: (optional) A float value. If `num_proj > 0` and `proj_clip` is
provided, then the projected values are clipped elementwise to within
`[-proj_clip, proj_clip]`.
num_unit_shards: Deprecated, will be removed by Jan. 2017.
Use a variable_scope partitioner instead.
num_proj_shards: Deprecated, will be removed by Jan. 2017.
Use a variable_scope partitioner instead.
forget_bias: Biases of the forget gate are initialized by default to 1
in order to reduce the scale of forgetting at the beginning of
the training.
state_is_tuple: If True, accepted and returned states are 2-tuples of
the `c_state` and `m_state`. If False, they are concatenated
along the column axis. This latter behavior will soon be deprecated.
activation: Activation function of the inner states.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
"""
if not state_is_tuple:
logging.warn("%s: Using a concatenated state is slower and will soon be "
"deprecated. Use state_is_tuple=True.", self)
if input_size is not None:
logging.warn("%s: The input_size parameter is deprecated.", self)
if num_unit_shards is not None or num_proj_shards is not None:
logging.warn(
"%s: The num_unit_shards and proj_unit_shards parameters are "
"deprecated and will be removed in Jan 2017. "
"Use a variable scope with a partitioner instead.", self)
self._num_units = num_units
self._use_peepholes = use_peepholes
self._cell_clip = cell_clip
self._initializer = initializer
self._num_proj = num_proj
self._proj_clip = proj_clip
self._num_unit_shards = num_unit_shards
self._num_proj_shards = num_proj_shards
self._forget_bias = forget_bias
self._state_is_tuple = state_is_tuple
self._activation = activation
self._reuse = reuse
if num_proj:
self._state_size = (
LSTMStateTuple(num_units, num_proj)
if state_is_tuple else num_units + num_proj)
self._output_size = num_proj
else:
self._state_size = (
LSTMStateTuple(num_units, num_units)
if state_is_tuple else 2 * num_units)
self._output_size = num_units
@property
def state_size(self):
return self._state_size
@property
def output_size(self):
return self._output_size
def __call__(self, inputs, state, scope=None):
"""Run one step of LSTM.
Args:
inputs: input Tensor, 2D, batch x num_units.
state: if `state_is_tuple` is False, this must be a state Tensor,
`2-D, batch x state_size`. If `state_is_tuple` is True, this must be a
tuple of state Tensors, both `2-D`, with column sizes `c_state` and
`m_state`.
scope: VariableScope for the created subgraph; defaults to "lstm_cell".
Returns:
A tuple containing:
- A `2-D, [batch x output_dim]`, Tensor representing the output of the
LSTM after reading `inputs` when previous state was `state`.
Here output_dim is:
num_proj if num_proj was set,
num_units otherwise.
- Tensor(s) representing the new state of LSTM after reading `inputs` when
the previous state was `state`. Same type and shape(s) as `state`.
Raises:
ValueError: If input size cannot be inferred from inputs via
static shape inference.
"""
num_proj = self._num_units if self._num_proj is None else self._num_proj
if self._state_is_tuple:
(c_prev, m_prev) = state
else:
c_prev = array_ops.slice(state, [0, 0], [-1, self._num_units])
m_prev = array_ops.slice(state, [0, self._num_units], [-1, num_proj])
dtype = inputs.dtype
input_size = inputs.get_shape().with_rank(2)[1]
if input_size.value is None:
raise ValueError("Could not infer input size from inputs.get_shape()[-1]")
with _checked_scope(self, scope or "lstm_cell",
initializer=self._initializer,
reuse=self._reuse) as unit_scope:
if self._num_unit_shards is not None:
unit_scope.set_partitioner(
partitioned_variables.fixed_size_partitioner(
self._num_unit_shards))
# i = input_gate, j = new_input, f = forget_gate, o = output_gate
lstm_matrix = _linear([inputs, m_prev], 4 * self._num_units, bias=True)
i, j, f, o = array_ops.split(
value=lstm_matrix, num_or_size_splits=4, axis=1)
# Diagonal connections
if self._use_peepholes:
with vs.variable_scope(unit_scope) as projection_scope:
if self._num_unit_shards is not None:
projection_scope.set_partitioner(None)
w_f_diag = vs.get_variable(
"w_f_diag", shape=[self._num_units], dtype=dtype)
w_i_diag = vs.get_variable(
"w_i_diag", shape=[self._num_units], dtype=dtype)
w_o_diag = vs.get_variable(
"w_o_diag", shape=[self._num_units], dtype=dtype)
if self._use_peepholes:
c = (sigmoid(f + self._forget_bias + w_f_diag * c_prev) * c_prev +
sigmoid(i + w_i_diag * c_prev) * self._activation(j))
else:
c = (sigmoid(f + self._forget_bias) * c_prev + sigmoid(i) *
self._activation(j))
if self._cell_clip is not None:
# pylint: disable=invalid-unary-operand-type
c = clip_ops.clip_by_value(c, -self._cell_clip, self._cell_clip)
# pylint: enable=invalid-unary-operand-type
if self._use_peepholes:
m = sigmoid(o + w_o_diag * c) * self._activation(c)
else:
m = sigmoid(o) * self._activation(c)
if self._num_proj is not None:
with vs.variable_scope("projection") as proj_scope:
if self._num_proj_shards is not None:
proj_scope.set_partitioner(
partitioned_variables.fixed_size_partitioner(
self._num_proj_shards))
m = _linear(m, self._num_proj, bias=False)
if self._proj_clip is not None:
# pylint: disable=invalid-unary-operand-type
m = clip_ops.clip_by_value(m, -self._proj_clip, self._proj_clip)
# pylint: enable=invalid-unary-operand-type
new_state = (LSTMStateTuple(c, m) if self._state_is_tuple else
array_ops.concat([c, m], 1))
return m, new_state
class OutputProjectionWrapper(RNNCell):
"""Operator adding an output projection to the given cell.
Note: in many cases it may be more efficient to not use this wrapper,
but instead concatenate the whole sequence of your outputs in time,
do the projection on this batch-concatenated sequence, then split it
if needed or directly feed into a softmax.
"""
def __init__(self, cell, output_size, reuse=None):
"""Create a cell with output projection.
Args:
cell: an RNNCell, a projection to output_size is added to it.
output_size: integer, the size of the output after projection.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
Raises:
TypeError: if cell is not an RNNCell.
ValueError: if output_size is not positive.
"""
if not isinstance(cell, RNNCell):
raise TypeError("The parameter cell is not RNNCell.")
if output_size < 1:
raise ValueError("Parameter output_size must be > 0: %d." % output_size)
self._cell = cell
self._output_size = output_size
self._reuse = reuse
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._output_size
def __call__(self, inputs, state, scope=None):
"""Run the cell and output projection on inputs, starting from state."""
output, res_state = self._cell(inputs, state)
# Default scope: "OutputProjectionWrapper"
with _checked_scope(self, scope or "output_projection_wrapper",
reuse=self._reuse):
projected = _linear(output, self._output_size, True)
return projected, res_state
class InputProjectionWrapper(RNNCell):
"""Operator adding an input projection to the given cell.
Note: in many cases it may be more efficient to not use this wrapper,
but instead concatenate the whole sequence of your inputs in time,
do the projection on this batch-concatenated sequence, then split it.
"""
def __init__(self, cell, num_proj, input_size=None):
"""Create a cell with input projection.
Args:
cell: an RNNCell, a projection of inputs is added before it.
num_proj: Python integer. The dimension to project to.
input_size: Deprecated and unused.
Raises:
TypeError: if cell is not an RNNCell.
"""
if input_size is not None:
logging.warn("%s: The input_size parameter is deprecated.", self)
if not isinstance(cell, RNNCell):
raise TypeError("The parameter cell is not RNNCell.")
self._cell = cell
self._num_proj = num_proj
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._cell.output_size
def __call__(self, inputs, state, scope=None):
"""Run the input projection and then the cell."""
# Default scope: "InputProjectionWrapper"
with vs.variable_scope(scope or "input_projection_wrapper"):
projected = _linear(inputs, self._num_proj, True)
return self._cell(projected, state)
class DropoutWrapper(RNNCell):
"""Operator adding dropout to inputs and outputs of the given cell."""
def __init__(self, cell, input_keep_prob=1.0, output_keep_prob=1.0,
seed=None):
"""Create a cell with added input and/or output dropout.
Dropout is never used on the state.
Args:
cell: an RNNCell, a projection to output_size is added to it.
input_keep_prob: unit Tensor or float between 0 and 1, input keep
probability; if it is float and 1, no input dropout will be added.
output_keep_prob: unit Tensor or float between 0 and 1, output keep
probability; if it is float and 1, no output dropout will be added.
seed: (optional) integer, the randomness seed.
Raises:
TypeError: if cell is not an RNNCell.
ValueError: if keep_prob is not between 0 and 1.
"""
if not isinstance(cell, RNNCell):
raise TypeError("The parameter cell is not a RNNCell.")
if (isinstance(input_keep_prob, float) and
not (input_keep_prob >= 0.0 and input_keep_prob <= 1.0)):
raise ValueError("Parameter input_keep_prob must be between 0 and 1: %d"
% input_keep_prob)
if (isinstance(output_keep_prob, float) and
not (output_keep_prob >= 0.0 and output_keep_prob <= 1.0)):
raise ValueError("Parameter output_keep_prob must be between 0 and 1: %d"
% output_keep_prob)
self._cell = cell
self._input_keep_prob = input_keep_prob
self._output_keep_prob = output_keep_prob
self._seed = seed
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._cell.output_size
def __call__(self, inputs, state, scope=None):
"""Run the cell with the declared dropouts."""
if (not isinstance(self._input_keep_prob, float) or
self._input_keep_prob < 1):
inputs = nn_ops.dropout(inputs, self._input_keep_prob, seed=self._seed)
output, new_state = self._cell(inputs, state, scope)
if (not isinstance(self._output_keep_prob, float) or
self._output_keep_prob < 1):
output = nn_ops.dropout(output, self._output_keep_prob, seed=self._seed)
return output, new_state
class ResidualWrapper(RNNCell):
"""RNNCell wrapper that ensures cell inputs are added to the outputs."""
def __init__(self, cell):
"""Constructs a `ResidualWrapper` for `cell`.
Args:
cell: An instance of `RNNCell`.
"""
self._cell = cell
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._cell.output_size
def __call__(self, inputs, state, scope=None):
"""Run the cell and add its inputs to its outputs.
Args:
inputs: cell inputs.
state: cell state.
scope: optional cell scope.
Returns:
Tuple of cell outputs and new state.
Raises:
TypeError: If cell inputs and outputs have different structure (type).
ValueError: If cell inputs and outputs have different structure (value).
"""
outputs, new_state = self._cell(inputs, state, scope=scope)
nest.assert_same_structure(inputs, outputs)
# Ensure shapes match
def assert_shape_match(inp, out):
inp.get_shape().assert_is_compatible_with(out.get_shape())
nest.map_structure(assert_shape_match, inputs, outputs)
res_outputs = nest.map_structure(
lambda inp, out: inp + out, inputs, outputs)
return (res_outputs, new_state)
class DeviceWrapper(RNNCell):
"""Operator that ensures an RNNCell runs on a particular device."""
def __init__(self, cell, device):
"""Construct a `DeviceWrapper` for `cell` with device `device`.
Ensures the wrapped `cell` is called with `tf.device(device)`.
Args:
cell: An instance of `RNNCell`.
device: A device string or function, for passing to `tf.device`.
"""
self._cell = cell
self._device = device
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._cell.output_size
def __call__(self, inputs, state, scope=None):
"""Run the cell on specified device."""
with ops.device(self._device):
return self._cell(inputs, state, scope=scope)
class EmbeddingWrapper(RNNCell):
"""Operator adding input embedding to the given cell.
Note: in many cases it may be more efficient to not use this wrapper,
but instead concatenate the whole sequence of your inputs in time,
do the embedding on this batch-concatenated sequence, then split it and
feed into your RNN.
"""
def __init__(self, cell, embedding_classes, embedding_size, initializer=None,
reuse=None):
"""Create a cell with an added input embedding.
Args:
cell: an RNNCell, an embedding will be put before its inputs.
embedding_classes: integer, how many symbols will be embedded.
embedding_size: integer, the size of the vectors we embed into.
initializer: an initializer to use when creating the embedding;
if None, the initializer from variable scope or a default one is used.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
Raises:
TypeError: if cell is not an RNNCell.
ValueError: if embedding_classes is not positive.
"""
if not isinstance(cell, RNNCell):
raise TypeError("The parameter cell is not RNNCell.")
if embedding_classes <= 0 or embedding_size <= 0:
raise ValueError("Both embedding_classes and embedding_size must be > 0: "
"%d, %d." % (embedding_classes, embedding_size))
self._cell = cell
self._embedding_classes = embedding_classes
self._embedding_size = embedding_size
self._initializer = initializer
self._reuse = reuse
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._cell.output_size
def __call__(self, inputs, state, scope=None):
"""Run the cell on embedded inputs."""
with _checked_scope(self, scope or "embedding_wrapper", reuse=self._reuse):
with ops.device("/cpu:0"):
if self._initializer:
initializer = self._initializer
elif vs.get_variable_scope().initializer:
initializer = vs.get_variable_scope().initializer
else:
# Default initializer for embeddings should have variance=1.
sqrt3 = math.sqrt(3) # Uniform(-sqrt(3), sqrt(3)) has variance=1.
initializer = init_ops.random_uniform_initializer(-sqrt3, sqrt3)
if type(state) is tuple:
data_type = state[0].dtype
else:
data_type = state.dtype
embedding = vs.get_variable(
"embedding", [self._embedding_classes, self._embedding_size],
initializer=initializer,
dtype=data_type)
embedded = embedding_ops.embedding_lookup(
embedding, array_ops.reshape(inputs, [-1]))
return self._cell(embedded, state)
class MultiRNNCell(RNNCell):
"""RNN cell composed sequentially of multiple simple cells."""
def __init__(self, cells, state_is_tuple=True):
"""Create a RNN cell composed sequentially of a number of RNNCells.
Args:
cells: list of RNNCells that will be composed in this order.
state_is_tuple: If True, accepted and returned states are n-tuples, where
`n = len(cells)`. If False, the states are all
concatenated along the column axis. This latter behavior will soon be
deprecated.
Raises:
ValueError: if cells is empty (not allowed), or at least one of the cells
returns a state tuple but the flag `state_is_tuple` is `False`.
"""
if not cells:
raise ValueError("Must specify at least one cell for MultiRNNCell.")
if not nest.is_sequence(cells):
raise TypeError(
"cells must be a list or tuple, but saw: %s." % cells)
self._cells = cells
self._state_is_tuple = state_is_tuple
if not state_is_tuple:
if any(nest.is_sequence(c.state_size) for c in self._cells):
raise ValueError("Some cells return tuples of states, but the flag "
"state_is_tuple is not set. State sizes are: %s"
% str([c.state_size for c in self._cells]))
@property
def state_size(self):
if self._state_is_tuple:
return tuple(cell.state_size for cell in self._cells)
else:
return sum([cell.state_size for cell in self._cells])
@property
def output_size(self):
return self._cells[-1].output_size
def __call__(self, inputs, state, scope=None):
"""Run this multi-layer cell on inputs, starting from state."""
with vs.variable_scope(scope or "multi_rnn_cell"):
cur_state_pos = 0
cur_inp = inputs
new_states = []
for i, cell in enumerate(self._cells):
with vs.variable_scope("cell_%d" % i):
if self._state_is_tuple:
if not nest.is_sequence(state):
raise ValueError(
"Expected state to be a tuple of length %d, but received: %s"
% (len(self.state_size), state))
cur_state = state[i]
else:
cur_state = array_ops.slice(
state, [0, cur_state_pos], [-1, cell.state_size])
cur_state_pos += cell.state_size
cur_inp, new_state = cell(cur_inp, cur_state)
new_states.append(new_state)
new_states = (tuple(new_states) if self._state_is_tuple else
array_ops.concat(new_states, 1))
return cur_inp, new_states
class _SlimRNNCell(RNNCell):
"""A simple wrapper for slim.rnn_cells."""
def __init__(self, cell_fn):
"""Create a SlimRNNCell from a cell_fn.
Args:
cell_fn: a function which takes (inputs, state, scope) and produces the
outputs and the new_state. Additionally when called with inputs=None and
state=None it should return (initial_outputs, initial_state).
Raises:
TypeError: if cell_fn is not callable
ValueError: if cell_fn cannot produce a valid initial state.
"""
if not callable(cell_fn):
raise TypeError("cell_fn %s needs to be callable", cell_fn)
self._cell_fn = cell_fn
self._cell_name = cell_fn.func.__name__
init_output, init_state = self._cell_fn(None, None)
output_shape = init_output.get_shape()
state_shape = init_state.get_shape()
self._output_size = output_shape.with_rank(2)[1].value
self._state_size = state_shape.with_rank(2)[1].value
if self._output_size is None:
raise ValueError("Initial output created by %s has invalid shape %s" %
(self._cell_name, output_shape))
if self._state_size is None:
raise ValueError("Initial state created by %s has invalid shape %s" %
(self._cell_name, state_shape))
@property
def state_size(self):
return self._state_size
@property
def output_size(self):
return self._output_size
def __call__(self, inputs, state, scope=None):
scope = scope or self._cell_name
output, state = self._cell_fn(inputs, state, scope=scope)
return output, state
def _linear(args, output_size, bias, bias_start=0.0):
"""Linear map: sum_i(args[i] * W[i]), where W[i] is a variable.
Args:
args: a 2D Tensor or a list of 2D, batch x n, Tensors.
output_size: int, second dimension of W[i].
bias: boolean, whether to add a bias term or not.
bias_start: starting value to initialize the bias; 0 by default.
Returns:
A 2D Tensor with shape [batch x output_size] equal to
sum_i(args[i] * W[i]), where W[i]s are newly created matrices.
Raises:
ValueError: if some of the arguments has unspecified or wrong shape.
"""
if args is None or (nest.is_sequence(args) and not args):
raise ValueError("`args` must be specified")
if not nest.is_sequence(args):
args = [args]
# Calculate the total size of arguments on dimension 1.
total_arg_size = 0
shapes = [a.get_shape() for a in args]
for shape in shapes:
if shape.ndims != 2:
raise ValueError("linear is expecting 2D arguments: %s" % shapes)
if shape[1].value is None:
raise ValueError("linear expects shape[1] to be provided for shape %s, "
"but saw %s" % (shape, shape[1]))
else:
total_arg_size += shape[1].value
dtype = [a.dtype for a in args][0]
# Now the computation.
scope = vs.get_variable_scope()
with vs.variable_scope(scope) as outer_scope:
weights = vs.get_variable(
_WEIGHTS_VARIABLE_NAME, [total_arg_size, output_size], dtype=dtype)
if len(args) == 1:
res = math_ops.matmul(args[0], weights)
else:
res = math_ops.matmul(array_ops.concat(args, 1), weights)
if not bias:
return res
with vs.variable_scope(outer_scope) as inner_scope:
inner_scope.set_partitioner(None)
biases = vs.get_variable(
_BIAS_VARIABLE_NAME, [output_size],
dtype=dtype,
initializer=init_ops.constant_initializer(bias_start, dtype=dtype))
return nn_ops.bias_add(res, biases)
Remove sigmoid by calling sigmoid before split (#7749)
Remove one sigmoid function, by calling sigmoid before split
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Module implementing RNN Cells."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import contextlib
import math
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops.math_ops import sigmoid
from tensorflow.python.ops.math_ops import tanh
from tensorflow.python.ops.rnn_cell_impl import _RNNCell as RNNCell
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import nest
_BIAS_VARIABLE_NAME = "biases"
_WEIGHTS_VARIABLE_NAME = "weights"
@contextlib.contextmanager
def _checked_scope(cell, scope, reuse=None, **kwargs):
if reuse is not None:
kwargs["reuse"] = reuse
with vs.variable_scope(scope, **kwargs) as checking_scope:
scope_name = checking_scope.name
if hasattr(cell, "_scope"):
cell_scope = cell._scope # pylint: disable=protected-access
if cell_scope.name != checking_scope.name:
raise ValueError(
"Attempt to reuse RNNCell %s with a different variable scope than "
"its first use. First use of cell was with scope '%s', this "
"attempt is with scope '%s'. Please create a new instance of the "
"cell if you would like it to use a different set of weights. "
"If before you were using: MultiRNNCell([%s(...)] * num_layers), "
"change to: MultiRNNCell([%s(...) for _ in range(num_layers)]). "
"If before you were using the same cell instance as both the "
"forward and reverse cell of a bidirectional RNN, simply create "
"two instances (one for forward, one for reverse). "
"In May 2017, we will start transitioning this cell's behavior "
"to use existing stored weights, if any, when it is called "
"with scope=None (which can lead to silent model degradation, so "
"this error will remain until then.)"
% (cell, cell_scope.name, scope_name, type(cell).__name__,
type(cell).__name__))
else:
weights_found = False
try:
with vs.variable_scope(checking_scope, reuse=True):
vs.get_variable(_WEIGHTS_VARIABLE_NAME)
weights_found = True
except ValueError:
pass
if weights_found and reuse is None:
raise ValueError(
"Attempt to have a second RNNCell use the weights of a variable "
"scope that already has weights: '%s'; and the cell was not "
"constructed as %s(..., reuse=True). "
"To share the weights of an RNNCell, simply "
"reuse it in your second calculation, or create a new one with "
"the argument reuse=True." % (scope_name, type(cell).__name__))
# Everything is OK. Update the cell's scope and yield it.
cell._scope = checking_scope # pylint: disable=protected-access
yield checking_scope
class BasicRNNCell(RNNCell):
"""The most basic RNN cell."""
def __init__(self, num_units, input_size=None, activation=tanh, reuse=None):
if input_size is not None:
logging.warn("%s: The input_size parameter is deprecated.", self)
self._num_units = num_units
self._activation = activation
self._reuse = reuse
@property
def state_size(self):
return self._num_units
@property
def output_size(self):
return self._num_units
def __call__(self, inputs, state, scope=None):
"""Most basic RNN: output = new_state = act(W * input + U * state + B)."""
with _checked_scope(self, scope or "basic_rnn_cell", reuse=self._reuse):
output = self._activation(
_linear([inputs, state], self._num_units, True))
return output, output
class GRUCell(RNNCell):
"""Gated Recurrent Unit cell (cf. http://arxiv.org/abs/1406.1078)."""
def __init__(self, num_units, input_size=None, activation=tanh, reuse=None):
if input_size is not None:
logging.warn("%s: The input_size parameter is deprecated.", self)
self._num_units = num_units
self._activation = activation
self._reuse = reuse
@property
def state_size(self):
return self._num_units
@property
def output_size(self):
return self._num_units
def __call__(self, inputs, state, scope=None):
"""Gated recurrent unit (GRU) with nunits cells."""
with _checked_scope(self, scope or "gru_cell", reuse=self._reuse):
with vs.variable_scope("gates"): # Reset gate and update gate.
# We start with bias of 1.0 to not reset and not update.
value = sigmoid(_linear(
[inputs, state], 2 * self._num_units, True, 1.0))
r, u = array_ops.split(
value=value,
num_or_size_splits=2,
axis=1)
with vs.variable_scope("candidate"):
c = self._activation(_linear([inputs, r * state],
self._num_units, True))
new_h = u * state + (1 - u) * c
return new_h, new_h
_LSTMStateTuple = collections.namedtuple("LSTMStateTuple", ("c", "h"))
class LSTMStateTuple(_LSTMStateTuple):
"""Tuple used by LSTM Cells for `state_size`, `zero_state`, and output state.
Stores two elements: `(c, h)`, in that order.
Only used when `state_is_tuple=True`.
"""
__slots__ = ()
@property
def dtype(self):
(c, h) = self
if not c.dtype == h.dtype:
raise TypeError("Inconsistent internal state: %s vs %s" %
(str(c.dtype), str(h.dtype)))
return c.dtype
class BasicLSTMCell(RNNCell):
"""Basic LSTM recurrent network cell.
The implementation is based on: http://arxiv.org/abs/1409.2329.
We add forget_bias (default: 1) to the biases of the forget gate in order to
reduce the scale of forgetting in the beginning of the training.
It does not allow cell clipping, a projection layer, and does not
use peep-hole connections: it is the basic baseline.
For advanced models, please use the full LSTMCell that follows.
"""
def __init__(self, num_units, forget_bias=1.0, input_size=None,
state_is_tuple=True, activation=tanh, reuse=None):
"""Initialize the basic LSTM cell.
Args:
num_units: int, The number of units in the LSTM cell.
forget_bias: float, The bias added to forget gates (see above).
input_size: Deprecated and unused.
state_is_tuple: If True, accepted and returned states are 2-tuples of
the `c_state` and `m_state`. If False, they are concatenated
along the column axis. The latter behavior will soon be deprecated.
activation: Activation function of the inner states.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
"""
if not state_is_tuple:
logging.warn("%s: Using a concatenated state is slower and will soon be "
"deprecated. Use state_is_tuple=True.", self)
if input_size is not None:
logging.warn("%s: The input_size parameter is deprecated.", self)
self._num_units = num_units
self._forget_bias = forget_bias
self._state_is_tuple = state_is_tuple
self._activation = activation
self._reuse = reuse
@property
def state_size(self):
return (LSTMStateTuple(self._num_units, self._num_units)
if self._state_is_tuple else 2 * self._num_units)
@property
def output_size(self):
return self._num_units
def __call__(self, inputs, state, scope=None):
"""Long short-term memory cell (LSTM)."""
with _checked_scope(self, scope or "basic_lstm_cell", reuse=self._reuse):
# Parameters of gates are concatenated into one multiply for efficiency.
if self._state_is_tuple:
c, h = state
else:
c, h = array_ops.split(value=state, num_or_size_splits=2, axis=1)
concat = _linear([inputs, h], 4 * self._num_units, True)
# i = input_gate, j = new_input, f = forget_gate, o = output_gate
i, j, f, o = array_ops.split(value=concat, num_or_size_splits=4, axis=1)
new_c = (c * sigmoid(f + self._forget_bias) + sigmoid(i) *
self._activation(j))
new_h = self._activation(new_c) * sigmoid(o)
if self._state_is_tuple:
new_state = LSTMStateTuple(new_c, new_h)
else:
new_state = array_ops.concat([new_c, new_h], 1)
return new_h, new_state
class LSTMCell(RNNCell):
"""Long short-term memory unit (LSTM) recurrent network cell.
The default non-peephole implementation is based on:
http://deeplearning.cs.cmu.edu/pdfs/Hochreiter97_lstm.pdf
S. Hochreiter and J. Schmidhuber.
"Long Short-Term Memory". Neural Computation, 9(8):1735-1780, 1997.
The peephole implementation is based on:
https://research.google.com/pubs/archive/43905.pdf
Hasim Sak, Andrew Senior, and Francoise Beaufays.
"Long short-term memory recurrent neural network architectures for
large scale acoustic modeling." INTERSPEECH, 2014.
The class uses optional peep-hole connections, optional cell clipping, and
an optional projection layer.
"""
def __init__(self, num_units, input_size=None,
use_peepholes=False, cell_clip=None,
initializer=None, num_proj=None, proj_clip=None,
num_unit_shards=None, num_proj_shards=None,
forget_bias=1.0, state_is_tuple=True,
activation=tanh, reuse=None):
"""Initialize the parameters for an LSTM cell.
Args:
num_units: int, The number of units in the LSTM cell
input_size: Deprecated and unused.
use_peepholes: bool, set True to enable diagonal/peephole connections.
cell_clip: (optional) A float value, if provided the cell state is clipped
by this value prior to the cell output activation.
initializer: (optional) The initializer to use for the weight and
projection matrices.
num_proj: (optional) int, The output dimensionality for the projection
matrices. If None, no projection is performed.
proj_clip: (optional) A float value. If `num_proj > 0` and `proj_clip` is
provided, then the projected values are clipped elementwise to within
`[-proj_clip, proj_clip]`.
num_unit_shards: Deprecated, will be removed by Jan. 2017.
Use a variable_scope partitioner instead.
num_proj_shards: Deprecated, will be removed by Jan. 2017.
Use a variable_scope partitioner instead.
forget_bias: Biases of the forget gate are initialized by default to 1
in order to reduce the scale of forgetting at the beginning of
the training.
state_is_tuple: If True, accepted and returned states are 2-tuples of
the `c_state` and `m_state`. If False, they are concatenated
along the column axis. This latter behavior will soon be deprecated.
activation: Activation function of the inner states.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
"""
if not state_is_tuple:
logging.warn("%s: Using a concatenated state is slower and will soon be "
"deprecated. Use state_is_tuple=True.", self)
if input_size is not None:
logging.warn("%s: The input_size parameter is deprecated.", self)
if num_unit_shards is not None or num_proj_shards is not None:
logging.warn(
"%s: The num_unit_shards and proj_unit_shards parameters are "
"deprecated and will be removed in Jan 2017. "
"Use a variable scope with a partitioner instead.", self)
self._num_units = num_units
self._use_peepholes = use_peepholes
self._cell_clip = cell_clip
self._initializer = initializer
self._num_proj = num_proj
self._proj_clip = proj_clip
self._num_unit_shards = num_unit_shards
self._num_proj_shards = num_proj_shards
self._forget_bias = forget_bias
self._state_is_tuple = state_is_tuple
self._activation = activation
self._reuse = reuse
if num_proj:
self._state_size = (
LSTMStateTuple(num_units, num_proj)
if state_is_tuple else num_units + num_proj)
self._output_size = num_proj
else:
self._state_size = (
LSTMStateTuple(num_units, num_units)
if state_is_tuple else 2 * num_units)
self._output_size = num_units
@property
def state_size(self):
return self._state_size
@property
def output_size(self):
return self._output_size
def __call__(self, inputs, state, scope=None):
"""Run one step of LSTM.
Args:
inputs: input Tensor, 2D, batch x num_units.
state: if `state_is_tuple` is False, this must be a state Tensor,
`2-D, batch x state_size`. If `state_is_tuple` is True, this must be a
tuple of state Tensors, both `2-D`, with column sizes `c_state` and
`m_state`.
scope: VariableScope for the created subgraph; defaults to "lstm_cell".
Returns:
A tuple containing:
- A `2-D, [batch x output_dim]`, Tensor representing the output of the
LSTM after reading `inputs` when previous state was `state`.
Here output_dim is:
num_proj if num_proj was set,
num_units otherwise.
- Tensor(s) representing the new state of LSTM after reading `inputs` when
the previous state was `state`. Same type and shape(s) as `state`.
Raises:
ValueError: If input size cannot be inferred from inputs via
static shape inference.
"""
num_proj = self._num_units if self._num_proj is None else self._num_proj
if self._state_is_tuple:
(c_prev, m_prev) = state
else:
c_prev = array_ops.slice(state, [0, 0], [-1, self._num_units])
m_prev = array_ops.slice(state, [0, self._num_units], [-1, num_proj])
dtype = inputs.dtype
input_size = inputs.get_shape().with_rank(2)[1]
if input_size.value is None:
raise ValueError("Could not infer input size from inputs.get_shape()[-1]")
with _checked_scope(self, scope or "lstm_cell",
initializer=self._initializer,
reuse=self._reuse) as unit_scope:
if self._num_unit_shards is not None:
unit_scope.set_partitioner(
partitioned_variables.fixed_size_partitioner(
self._num_unit_shards))
# i = input_gate, j = new_input, f = forget_gate, o = output_gate
lstm_matrix = _linear([inputs, m_prev], 4 * self._num_units, bias=True)
i, j, f, o = array_ops.split(
value=lstm_matrix, num_or_size_splits=4, axis=1)
# Diagonal connections
if self._use_peepholes:
with vs.variable_scope(unit_scope) as projection_scope:
if self._num_unit_shards is not None:
projection_scope.set_partitioner(None)
w_f_diag = vs.get_variable(
"w_f_diag", shape=[self._num_units], dtype=dtype)
w_i_diag = vs.get_variable(
"w_i_diag", shape=[self._num_units], dtype=dtype)
w_o_diag = vs.get_variable(
"w_o_diag", shape=[self._num_units], dtype=dtype)
if self._use_peepholes:
c = (sigmoid(f + self._forget_bias + w_f_diag * c_prev) * c_prev +
sigmoid(i + w_i_diag * c_prev) * self._activation(j))
else:
c = (sigmoid(f + self._forget_bias) * c_prev + sigmoid(i) *
self._activation(j))
if self._cell_clip is not None:
# pylint: disable=invalid-unary-operand-type
c = clip_ops.clip_by_value(c, -self._cell_clip, self._cell_clip)
# pylint: enable=invalid-unary-operand-type
if self._use_peepholes:
m = sigmoid(o + w_o_diag * c) * self._activation(c)
else:
m = sigmoid(o) * self._activation(c)
if self._num_proj is not None:
with vs.variable_scope("projection") as proj_scope:
if self._num_proj_shards is not None:
proj_scope.set_partitioner(
partitioned_variables.fixed_size_partitioner(
self._num_proj_shards))
m = _linear(m, self._num_proj, bias=False)
if self._proj_clip is not None:
# pylint: disable=invalid-unary-operand-type
m = clip_ops.clip_by_value(m, -self._proj_clip, self._proj_clip)
# pylint: enable=invalid-unary-operand-type
new_state = (LSTMStateTuple(c, m) if self._state_is_tuple else
array_ops.concat([c, m], 1))
return m, new_state
class OutputProjectionWrapper(RNNCell):
"""Operator adding an output projection to the given cell.
Note: in many cases it may be more efficient to not use this wrapper,
but instead concatenate the whole sequence of your outputs in time,
do the projection on this batch-concatenated sequence, then split it
if needed or directly feed into a softmax.
"""
def __init__(self, cell, output_size, reuse=None):
"""Create a cell with output projection.
Args:
cell: an RNNCell, a projection to output_size is added to it.
output_size: integer, the size of the output after projection.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
Raises:
TypeError: if cell is not an RNNCell.
ValueError: if output_size is not positive.
"""
if not isinstance(cell, RNNCell):
raise TypeError("The parameter cell is not RNNCell.")
if output_size < 1:
raise ValueError("Parameter output_size must be > 0: %d." % output_size)
self._cell = cell
self._output_size = output_size
self._reuse = reuse
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._output_size
def __call__(self, inputs, state, scope=None):
"""Run the cell and output projection on inputs, starting from state."""
output, res_state = self._cell(inputs, state)
# Default scope: "OutputProjectionWrapper"
with _checked_scope(self, scope or "output_projection_wrapper",
reuse=self._reuse):
projected = _linear(output, self._output_size, True)
return projected, res_state
class InputProjectionWrapper(RNNCell):
"""Operator adding an input projection to the given cell.
Note: in many cases it may be more efficient to not use this wrapper,
but instead concatenate the whole sequence of your inputs in time,
do the projection on this batch-concatenated sequence, then split it.
"""
def __init__(self, cell, num_proj, input_size=None):
"""Create a cell with input projection.
Args:
cell: an RNNCell, a projection of inputs is added before it.
num_proj: Python integer. The dimension to project to.
input_size: Deprecated and unused.
Raises:
TypeError: if cell is not an RNNCell.
"""
if input_size is not None:
logging.warn("%s: The input_size parameter is deprecated.", self)
if not isinstance(cell, RNNCell):
raise TypeError("The parameter cell is not RNNCell.")
self._cell = cell
self._num_proj = num_proj
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._cell.output_size
def __call__(self, inputs, state, scope=None):
"""Run the input projection and then the cell."""
# Default scope: "InputProjectionWrapper"
with vs.variable_scope(scope or "input_projection_wrapper"):
projected = _linear(inputs, self._num_proj, True)
return self._cell(projected, state)
class DropoutWrapper(RNNCell):
"""Operator adding dropout to inputs and outputs of the given cell."""
def __init__(self, cell, input_keep_prob=1.0, output_keep_prob=1.0,
seed=None):
"""Create a cell with added input and/or output dropout.
Dropout is never used on the state.
Args:
cell: an RNNCell, a projection to output_size is added to it.
input_keep_prob: unit Tensor or float between 0 and 1, input keep
probability; if it is float and 1, no input dropout will be added.
output_keep_prob: unit Tensor or float between 0 and 1, output keep
probability; if it is float and 1, no output dropout will be added.
seed: (optional) integer, the randomness seed.
Raises:
TypeError: if cell is not an RNNCell.
ValueError: if keep_prob is not between 0 and 1.
"""
if not isinstance(cell, RNNCell):
raise TypeError("The parameter cell is not a RNNCell.")
if (isinstance(input_keep_prob, float) and
not (input_keep_prob >= 0.0 and input_keep_prob <= 1.0)):
raise ValueError("Parameter input_keep_prob must be between 0 and 1: %d"
% input_keep_prob)
if (isinstance(output_keep_prob, float) and
not (output_keep_prob >= 0.0 and output_keep_prob <= 1.0)):
raise ValueError("Parameter output_keep_prob must be between 0 and 1: %d"
% output_keep_prob)
self._cell = cell
self._input_keep_prob = input_keep_prob
self._output_keep_prob = output_keep_prob
self._seed = seed
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._cell.output_size
def __call__(self, inputs, state, scope=None):
"""Run the cell with the declared dropouts."""
if (not isinstance(self._input_keep_prob, float) or
self._input_keep_prob < 1):
inputs = nn_ops.dropout(inputs, self._input_keep_prob, seed=self._seed)
output, new_state = self._cell(inputs, state, scope)
if (not isinstance(self._output_keep_prob, float) or
self._output_keep_prob < 1):
output = nn_ops.dropout(output, self._output_keep_prob, seed=self._seed)
return output, new_state
class ResidualWrapper(RNNCell):
"""RNNCell wrapper that ensures cell inputs are added to the outputs."""
def __init__(self, cell):
"""Constructs a `ResidualWrapper` for `cell`.
Args:
cell: An instance of `RNNCell`.
"""
self._cell = cell
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._cell.output_size
def __call__(self, inputs, state, scope=None):
"""Run the cell and add its inputs to its outputs.
Args:
inputs: cell inputs.
state: cell state.
scope: optional cell scope.
Returns:
Tuple of cell outputs and new state.
Raises:
TypeError: If cell inputs and outputs have different structure (type).
ValueError: If cell inputs and outputs have different structure (value).
"""
outputs, new_state = self._cell(inputs, state, scope=scope)
nest.assert_same_structure(inputs, outputs)
# Ensure shapes match
def assert_shape_match(inp, out):
inp.get_shape().assert_is_compatible_with(out.get_shape())
nest.map_structure(assert_shape_match, inputs, outputs)
res_outputs = nest.map_structure(
lambda inp, out: inp + out, inputs, outputs)
return (res_outputs, new_state)
class DeviceWrapper(RNNCell):
"""Operator that ensures an RNNCell runs on a particular device."""
def __init__(self, cell, device):
"""Construct a `DeviceWrapper` for `cell` with device `device`.
Ensures the wrapped `cell` is called with `tf.device(device)`.
Args:
cell: An instance of `RNNCell`.
device: A device string or function, for passing to `tf.device`.
"""
self._cell = cell
self._device = device
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._cell.output_size
def __call__(self, inputs, state, scope=None):
"""Run the cell on specified device."""
with ops.device(self._device):
return self._cell(inputs, state, scope=scope)
class EmbeddingWrapper(RNNCell):
"""Operator adding input embedding to the given cell.
Note: in many cases it may be more efficient to not use this wrapper,
but instead concatenate the whole sequence of your inputs in time,
do the embedding on this batch-concatenated sequence, then split it and
feed into your RNN.
"""
def __init__(self, cell, embedding_classes, embedding_size, initializer=None,
reuse=None):
"""Create a cell with an added input embedding.
Args:
cell: an RNNCell, an embedding will be put before its inputs.
embedding_classes: integer, how many symbols will be embedded.
embedding_size: integer, the size of the vectors we embed into.
initializer: an initializer to use when creating the embedding;
if None, the initializer from variable scope or a default one is used.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
Raises:
TypeError: if cell is not an RNNCell.
ValueError: if embedding_classes is not positive.
"""
if not isinstance(cell, RNNCell):
raise TypeError("The parameter cell is not RNNCell.")
if embedding_classes <= 0 or embedding_size <= 0:
raise ValueError("Both embedding_classes and embedding_size must be > 0: "
"%d, %d." % (embedding_classes, embedding_size))
self._cell = cell
self._embedding_classes = embedding_classes
self._embedding_size = embedding_size
self._initializer = initializer
self._reuse = reuse
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._cell.output_size
def __call__(self, inputs, state, scope=None):
"""Run the cell on embedded inputs."""
with _checked_scope(self, scope or "embedding_wrapper", reuse=self._reuse):
with ops.device("/cpu:0"):
if self._initializer:
initializer = self._initializer
elif vs.get_variable_scope().initializer:
initializer = vs.get_variable_scope().initializer
else:
# Default initializer for embeddings should have variance=1.
sqrt3 = math.sqrt(3) # Uniform(-sqrt(3), sqrt(3)) has variance=1.
initializer = init_ops.random_uniform_initializer(-sqrt3, sqrt3)
if type(state) is tuple:
data_type = state[0].dtype
else:
data_type = state.dtype
embedding = vs.get_variable(
"embedding", [self._embedding_classes, self._embedding_size],
initializer=initializer,
dtype=data_type)
embedded = embedding_ops.embedding_lookup(
embedding, array_ops.reshape(inputs, [-1]))
return self._cell(embedded, state)
class MultiRNNCell(RNNCell):
"""RNN cell composed sequentially of multiple simple cells."""
def __init__(self, cells, state_is_tuple=True):
"""Create a RNN cell composed sequentially of a number of RNNCells.
Args:
cells: list of RNNCells that will be composed in this order.
state_is_tuple: If True, accepted and returned states are n-tuples, where
`n = len(cells)`. If False, the states are all
concatenated along the column axis. This latter behavior will soon be
deprecated.
Raises:
ValueError: if cells is empty (not allowed), or at least one of the cells
returns a state tuple but the flag `state_is_tuple` is `False`.
"""
if not cells:
raise ValueError("Must specify at least one cell for MultiRNNCell.")
if not nest.is_sequence(cells):
raise TypeError(
"cells must be a list or tuple, but saw: %s." % cells)
self._cells = cells
self._state_is_tuple = state_is_tuple
if not state_is_tuple:
if any(nest.is_sequence(c.state_size) for c in self._cells):
raise ValueError("Some cells return tuples of states, but the flag "
"state_is_tuple is not set. State sizes are: %s"
% str([c.state_size for c in self._cells]))
@property
def state_size(self):
if self._state_is_tuple:
return tuple(cell.state_size for cell in self._cells)
else:
return sum([cell.state_size for cell in self._cells])
@property
def output_size(self):
return self._cells[-1].output_size
def __call__(self, inputs, state, scope=None):
"""Run this multi-layer cell on inputs, starting from state."""
with vs.variable_scope(scope or "multi_rnn_cell"):
cur_state_pos = 0
cur_inp = inputs
new_states = []
for i, cell in enumerate(self._cells):
with vs.variable_scope("cell_%d" % i):
if self._state_is_tuple:
if not nest.is_sequence(state):
raise ValueError(
"Expected state to be a tuple of length %d, but received: %s"
% (len(self.state_size), state))
cur_state = state[i]
else:
cur_state = array_ops.slice(
state, [0, cur_state_pos], [-1, cell.state_size])
cur_state_pos += cell.state_size
cur_inp, new_state = cell(cur_inp, cur_state)
new_states.append(new_state)
new_states = (tuple(new_states) if self._state_is_tuple else
array_ops.concat(new_states, 1))
return cur_inp, new_states
class _SlimRNNCell(RNNCell):
"""A simple wrapper for slim.rnn_cells."""
def __init__(self, cell_fn):
"""Create a SlimRNNCell from a cell_fn.
Args:
cell_fn: a function which takes (inputs, state, scope) and produces the
outputs and the new_state. Additionally when called with inputs=None and
state=None it should return (initial_outputs, initial_state).
Raises:
TypeError: if cell_fn is not callable
ValueError: if cell_fn cannot produce a valid initial state.
"""
if not callable(cell_fn):
raise TypeError("cell_fn %s needs to be callable", cell_fn)
self._cell_fn = cell_fn
self._cell_name = cell_fn.func.__name__
init_output, init_state = self._cell_fn(None, None)
output_shape = init_output.get_shape()
state_shape = init_state.get_shape()
self._output_size = output_shape.with_rank(2)[1].value
self._state_size = state_shape.with_rank(2)[1].value
if self._output_size is None:
raise ValueError("Initial output created by %s has invalid shape %s" %
(self._cell_name, output_shape))
if self._state_size is None:
raise ValueError("Initial state created by %s has invalid shape %s" %
(self._cell_name, state_shape))
@property
def state_size(self):
return self._state_size
@property
def output_size(self):
return self._output_size
def __call__(self, inputs, state, scope=None):
scope = scope or self._cell_name
output, state = self._cell_fn(inputs, state, scope=scope)
return output, state
def _linear(args, output_size, bias, bias_start=0.0):
"""Linear map: sum_i(args[i] * W[i]), where W[i] is a variable.
Args:
args: a 2D Tensor or a list of 2D, batch x n, Tensors.
output_size: int, second dimension of W[i].
bias: boolean, whether to add a bias term or not.
bias_start: starting value to initialize the bias; 0 by default.
Returns:
A 2D Tensor with shape [batch x output_size] equal to
sum_i(args[i] * W[i]), where W[i]s are newly created matrices.
Raises:
ValueError: if some of the arguments has unspecified or wrong shape.
"""
if args is None or (nest.is_sequence(args) and not args):
raise ValueError("`args` must be specified")
if not nest.is_sequence(args):
args = [args]
# Calculate the total size of arguments on dimension 1.
total_arg_size = 0
shapes = [a.get_shape() for a in args]
for shape in shapes:
if shape.ndims != 2:
raise ValueError("linear is expecting 2D arguments: %s" % shapes)
if shape[1].value is None:
raise ValueError("linear expects shape[1] to be provided for shape %s, "
"but saw %s" % (shape, shape[1]))
else:
total_arg_size += shape[1].value
dtype = [a.dtype for a in args][0]
# Now the computation.
scope = vs.get_variable_scope()
with vs.variable_scope(scope) as outer_scope:
weights = vs.get_variable(
_WEIGHTS_VARIABLE_NAME, [total_arg_size, output_size], dtype=dtype)
if len(args) == 1:
res = math_ops.matmul(args[0], weights)
else:
res = math_ops.matmul(array_ops.concat(args, 1), weights)
if not bias:
return res
with vs.variable_scope(outer_scope) as inner_scope:
inner_scope.set_partitioner(None)
biases = vs.get_variable(
_BIAS_VARIABLE_NAME, [output_size],
dtype=dtype,
initializer=init_ops.constant_initializer(bias_start, dtype=dtype))
return nn_ops.bias_add(res, biases)
|
from __future__ import unicode_literals
import logging
from time import time
import six
from django.db import models, DatabaseError
try:
from django.urls import reverse
except ImportError:
from django.core.urlresolvers import reverse
from django.conf import settings
from explorer import app_settings
from explorer.utils import (
passes_blacklist,
swap_params,
extract_params,
shared_dict_update,
get_s3_bucket,
get_params_for_url,
get_valid_connection
)
MSG_FAILED_BLACKLIST = "Query failed the SQL blacklist: %s"
logger = logging.getLogger(__name__)
@six.python_2_unicode_compatible
class Query(models.Model):
title = models.CharField(max_length=255)
sql = models.TextField()
description = models.TextField(null=True, blank=True)
created_by_user = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
last_run_date = models.DateTimeField(auto_now=True)
snapshot = models.BooleanField(default=False, help_text="Include in snapshot task (if enabled)")
connection = models.CharField(blank=True, null=True, max_length=128,
help_text="Name of DB connection (as specified in settings) to use for this query. Will use EXPLORER_DEFAULT_CONNECTION if left blank")
def __init__(self, *args, **kwargs):
self.params = kwargs.get('params')
kwargs.pop('params', None)
super(Query, self).__init__(*args, **kwargs)
class Meta:
ordering = ['title']
verbose_name_plural = 'Queries'
def __str__(self):
return six.text_type(self.title)
def get_run_count(self):
return self.querylog_set.count()
def avg_duration(self):
return self.querylog_set.aggregate(models.Avg('duration'))['duration__avg']
def passes_blacklist(self):
return passes_blacklist(self.final_sql())
def final_sql(self):
return swap_params(self.sql, self.available_params())
def execute_query_only(self):
return QueryResult(self.final_sql(), get_valid_connection(self.connection))
def execute_with_logging(self, executing_user):
ql = self.log(executing_user)
ret = self.execute()
ql.duration = ret.duration
ql.save()
return ret, ql
def execute(self):
ret = self.execute_query_only()
ret.process()
return ret
def available_params(self):
"""
Merge parameter values into a dictionary of available parameters
:param param_values: A dictionary of Query param values.
:return: A merged dictionary of parameter names and values. Values of non-existent parameters are removed.
"""
p = extract_params(self.sql)
if self.params:
shared_dict_update(p, self.params)
return p
def get_absolute_url(self):
return reverse("query_detail", kwargs={'query_id': self.id})
@property
def params_for_url(self):
return get_params_for_url(self)
def log(self, user=None):
if user:
# In Django<1.10, is_anonymous was a method.
try:
is_anonymous = user.is_anonymous()
except TypeError:
is_anonymous = user.is_anonymous
if is_anonymous:
user = None
ql = QueryLog(sql=self.final_sql(), query_id=self.id, run_by_user=user, connection=self.connection)
ql.save()
return ql
@property
def shared(self):
return self.id in set(sum(app_settings.EXPLORER_GET_USER_QUERY_VIEWS().values(), []))
@property
def snapshots(self):
if app_settings.ENABLE_TASKS:
b = get_s3_bucket()
keys = b.list(prefix='query-%s/snap-' % self.id)
keys_s = sorted(keys, key=lambda k: k.last_modified)
return [SnapShot(k.generate_url(expires_in=0, query_auth=False),
k.last_modified) for k in keys_s]
class SnapShot(object):
def __init__(self, url, last_modified):
self.url = url
self.last_modified = last_modified
class QueryLog(models.Model):
sql = models.TextField(null=True, blank=True)
query = models.ForeignKey(Query, null=True, blank=True, on_delete=models.SET_NULL)
run_by_user = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.CASCADE)
run_at = models.DateTimeField(auto_now_add=True)
duration = models.FloatField(blank=True, null=True) # milliseconds
connection = models.CharField(blank=True, null=True, max_length=128)
@property
def is_playground(self):
return self.query_id is None
class Meta:
ordering = ['-run_at']
class QueryResult(object):
def __init__(self, sql, connection):
self.sql = sql
self.connection = connection
cursor, duration = self.execute_query()
self._description = cursor.description or []
self._data = [list(r) for r in cursor.fetchall()]
self.duration = duration
cursor.close()
self._headers = self._get_headers()
self._summary = {}
@property
def data(self):
return self._data or []
@property
def headers(self):
return self._headers or []
@property
def header_strings(self):
return [str(h) for h in self.headers]
def _get_headers(self):
return [ColumnHeader(d[0]) for d in self._description] if self._description else [ColumnHeader('--')]
def _get_numerics(self):
if hasattr(self.connection.Database, "NUMBER"):
return [ix for ix, c in enumerate(self._description) if hasattr(c, 'type_code') and c.type_code in self.connection.Database.NUMBER.values]
elif self.data:
d = self.data[0]
return [ix for ix, _ in enumerate(self._description) if not isinstance(d[ix], six.string_types) and six.text_type(d[ix]).isnumeric()]
return []
def _get_transforms(self):
transforms = dict(app_settings.EXPLORER_TRANSFORMS)
return [(ix, transforms[str(h)]) for ix, h in enumerate(self.headers) if str(h) in transforms.keys()]
def column(self, ix):
return [r[ix] for r in self.data]
def process(self):
start_time = time()
self.process_columns()
self.process_rows()
logger.info("Explorer Query Processing took %sms." % ((time() - start_time) * 1000))
def process_columns(self):
for ix in self._get_numerics():
self.headers[ix].add_summary(self.column(ix))
def process_rows(self):
transforms = self._get_transforms()
if transforms:
for r in self.data:
for ix, t in transforms:
r[ix] = t.format(str(r[ix]))
def execute_query(self):
cursor = self.connection.cursor()
start_time = time()
try:
cursor.execute(self.sql)
except DatabaseError as e:
cursor.close()
raise e
return cursor, ((time() - start_time) * 1000)
@six.python_2_unicode_compatible
class ColumnHeader(object):
def __init__(self, title):
self.title = title.strip()
self.summary = None
def add_summary(self, column):
self.summary = ColumnSummary(self, column)
def __str__(self):
return self.title
@six.python_2_unicode_compatible
class ColumnStat(object):
def __init__(self, label, statfn, precision=2, handles_null=False):
self.label = label
self.statfn = statfn
self.precision = precision
self.handles_null = handles_null
def __call__(self, coldata):
self.value = round(float(self.statfn(coldata)), self.precision) if coldata else 0
def __str__(self):
return self.label
@six.python_2_unicode_compatible
class ColumnSummary(object):
def __init__(self, header, col):
self._header = header
self._stats = [
ColumnStat("Sum", sum),
ColumnStat("Avg", lambda x: float(sum(x)) / float(len(x))),
ColumnStat("Min", min),
ColumnStat("Max", max),
ColumnStat("NUL", lambda x: int(sum(map(lambda y: 1 if y is None else 0, x))), 0, True)
]
without_nulls = list(map(lambda x: 0 if x is None else x, col))
for stat in self._stats:
stat(col) if stat.handles_null else stat(without_nulls)
@property
def stats(self):
return {c.label: c.value for c in self._stats}
def __str__(self):
return str(self._header)
Run queries as atomic requests
Without this, a query error (eg. a typo in a table name) causes a 500-level error if the default database connection has ATOMIC_REQUESTS set to True. This is because once the query fails, no other database query succeeds (eg. trying to access the database to render the error page).
from __future__ import unicode_literals
import logging
from time import time
import six
from django.db import models, DatabaseError, transaction
try:
from django.urls import reverse
except ImportError:
from django.core.urlresolvers import reverse
from django.conf import settings
from explorer import app_settings
from explorer.utils import (
passes_blacklist,
swap_params,
extract_params,
shared_dict_update,
get_s3_bucket,
get_params_for_url,
get_valid_connection
)
MSG_FAILED_BLACKLIST = "Query failed the SQL blacklist: %s"
logger = logging.getLogger(__name__)
@six.python_2_unicode_compatible
class Query(models.Model):
title = models.CharField(max_length=255)
sql = models.TextField()
description = models.TextField(null=True, blank=True)
created_by_user = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
last_run_date = models.DateTimeField(auto_now=True)
snapshot = models.BooleanField(default=False, help_text="Include in snapshot task (if enabled)")
connection = models.CharField(blank=True, null=True, max_length=128,
help_text="Name of DB connection (as specified in settings) to use for this query. Will use EXPLORER_DEFAULT_CONNECTION if left blank")
def __init__(self, *args, **kwargs):
self.params = kwargs.get('params')
kwargs.pop('params', None)
super(Query, self).__init__(*args, **kwargs)
class Meta:
ordering = ['title']
verbose_name_plural = 'Queries'
def __str__(self):
return six.text_type(self.title)
def get_run_count(self):
return self.querylog_set.count()
def avg_duration(self):
return self.querylog_set.aggregate(models.Avg('duration'))['duration__avg']
def passes_blacklist(self):
return passes_blacklist(self.final_sql())
def final_sql(self):
return swap_params(self.sql, self.available_params())
def execute_query_only(self):
return QueryResult(self.final_sql(), get_valid_connection(self.connection))
def execute_with_logging(self, executing_user):
ql = self.log(executing_user)
ret = self.execute()
ql.duration = ret.duration
ql.save()
return ret, ql
def execute(self):
ret = self.execute_query_only()
ret.process()
return ret
def available_params(self):
"""
Merge parameter values into a dictionary of available parameters
:param param_values: A dictionary of Query param values.
:return: A merged dictionary of parameter names and values. Values of non-existent parameters are removed.
"""
p = extract_params(self.sql)
if self.params:
shared_dict_update(p, self.params)
return p
def get_absolute_url(self):
return reverse("query_detail", kwargs={'query_id': self.id})
@property
def params_for_url(self):
return get_params_for_url(self)
def log(self, user=None):
if user:
# In Django<1.10, is_anonymous was a method.
try:
is_anonymous = user.is_anonymous()
except TypeError:
is_anonymous = user.is_anonymous
if is_anonymous:
user = None
ql = QueryLog(sql=self.final_sql(), query_id=self.id, run_by_user=user, connection=self.connection)
ql.save()
return ql
@property
def shared(self):
return self.id in set(sum(app_settings.EXPLORER_GET_USER_QUERY_VIEWS().values(), []))
@property
def snapshots(self):
if app_settings.ENABLE_TASKS:
b = get_s3_bucket()
keys = b.list(prefix='query-%s/snap-' % self.id)
keys_s = sorted(keys, key=lambda k: k.last_modified)
return [SnapShot(k.generate_url(expires_in=0, query_auth=False),
k.last_modified) for k in keys_s]
class SnapShot(object):
def __init__(self, url, last_modified):
self.url = url
self.last_modified = last_modified
class QueryLog(models.Model):
sql = models.TextField(null=True, blank=True)
query = models.ForeignKey(Query, null=True, blank=True, on_delete=models.SET_NULL)
run_by_user = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.CASCADE)
run_at = models.DateTimeField(auto_now_add=True)
duration = models.FloatField(blank=True, null=True) # milliseconds
connection = models.CharField(blank=True, null=True, max_length=128)
@property
def is_playground(self):
return self.query_id is None
class Meta:
ordering = ['-run_at']
class QueryResult(object):
def __init__(self, sql, connection):
self.sql = sql
self.connection = connection
cursor, duration = self.execute_query()
self._description = cursor.description or []
self._data = [list(r) for r in cursor.fetchall()]
self.duration = duration
cursor.close()
self._headers = self._get_headers()
self._summary = {}
@property
def data(self):
return self._data or []
@property
def headers(self):
return self._headers or []
@property
def header_strings(self):
return [str(h) for h in self.headers]
def _get_headers(self):
return [ColumnHeader(d[0]) for d in self._description] if self._description else [ColumnHeader('--')]
def _get_numerics(self):
if hasattr(self.connection.Database, "NUMBER"):
return [ix for ix, c in enumerate(self._description) if hasattr(c, 'type_code') and c.type_code in self.connection.Database.NUMBER.values]
elif self.data:
d = self.data[0]
return [ix for ix, _ in enumerate(self._description) if not isinstance(d[ix], six.string_types) and six.text_type(d[ix]).isnumeric()]
return []
def _get_transforms(self):
transforms = dict(app_settings.EXPLORER_TRANSFORMS)
return [(ix, transforms[str(h)]) for ix, h in enumerate(self.headers) if str(h) in transforms.keys()]
def column(self, ix):
return [r[ix] for r in self.data]
def process(self):
start_time = time()
self.process_columns()
self.process_rows()
logger.info("Explorer Query Processing took %sms." % ((time() - start_time) * 1000))
def process_columns(self):
for ix in self._get_numerics():
self.headers[ix].add_summary(self.column(ix))
def process_rows(self):
transforms = self._get_transforms()
if transforms:
for r in self.data:
for ix, t in transforms:
r[ix] = t.format(str(r[ix]))
def execute_query(self):
cursor = self.connection.cursor()
start_time = time()
try:
with transaction.atomic(self.connection.alias):
cursor.execute(self.sql)
except DatabaseError as e:
cursor.close()
raise e
return cursor, ((time() - start_time) * 1000)
@six.python_2_unicode_compatible
class ColumnHeader(object):
def __init__(self, title):
self.title = title.strip()
self.summary = None
def add_summary(self, column):
self.summary = ColumnSummary(self, column)
def __str__(self):
return self.title
@six.python_2_unicode_compatible
class ColumnStat(object):
def __init__(self, label, statfn, precision=2, handles_null=False):
self.label = label
self.statfn = statfn
self.precision = precision
self.handles_null = handles_null
def __call__(self, coldata):
self.value = round(float(self.statfn(coldata)), self.precision) if coldata else 0
def __str__(self):
return self.label
@six.python_2_unicode_compatible
class ColumnSummary(object):
def __init__(self, header, col):
self._header = header
self._stats = [
ColumnStat("Sum", sum),
ColumnStat("Avg", lambda x: float(sum(x)) / float(len(x))),
ColumnStat("Min", min),
ColumnStat("Max", max),
ColumnStat("NUL", lambda x: int(sum(map(lambda y: 1 if y is None else 0, x))), 0, True)
]
without_nulls = list(map(lambda x: 0 if x is None else x, col))
for stat in self._stats:
stat(col) if stat.handles_null else stat(without_nulls)
@property
def stats(self):
return {c.label: c.value for c in self._stats}
def __str__(self):
return str(self._header)
|
import traceback
from sqlalchemy import Column, Boolean, Integer, String, ForeignKey, create_engine, DateTime
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, sessionmaker, scoped_session
from datetime import datetime
Base = declarative_base()
engine = create_engine('sqlite:///banpool.db')
Base.metadata.bind = engine
session_factory = sessionmaker(bind=engine)
Session = scoped_session(session_factory)
session = Session()
class BanPoolManager:
def add_user_to_banpool(self, banpool_name, user_id):
"""
Add a User ID to the banpool
:param banpool_name:
:param user_id: Discord User ID
:return:
"""
try:
# Identify the banpool that the User ID will be added to
banpool_query = session.query(BanPool).filter(BanPool.pool_name==banpool_name)
if banpool_query.count() > 0:
banpool = banpool_query.one()
# The banpool name existed
if banpool:
# Determine if the user has already been added to the banpool
user_query = session.query(DiscordUser).filter(DiscordUser.banpool_id==banpool.id, DiscordUser.user_id==user_id)
if user_query.count() == 0:
ban_date = datetime.now()
new_discord_user = DiscordUser(user_id=user_id, ban_date=ban_date, banpool_id=banpool.id)
session.add(new_discord_user)
session.commit()
return "User has been added to the banpool.", True
else:
return "This user is already a part of this banpool.", False
else:
# The banpool name did not exist
return "This banpool does not exist.", False
except:
print(traceback.format_exc())
return "An error has occurred", False
def add_userlist_to_banpool(self, banpool_name, user_id_list):
"""
Add a list of User IDs separated by comma to the banpool
:param banpool_name:
:param user_id_list:
:return:
"""
# Process the list and turn it to a python list
try:
user_list = user_id_list.split(',')
except:
print(traceback.format_exc())
return "Your userlist wasn't properly formatted. Separate each ID with a comma.", False
try:
for user in user_list:
self.add_user_to_banpool(banpool_name, user)
return "Users have been processed. Non-duplicates have been added to the ban list.", True
except:
print(traceback.format_exc())
return "An error has occurred.", False
def add_user_to_exceptions(self, user_id, server_id):
"""
Add a User ID+Server ID to the ban exceptions list
:param user_id: Discord User ID
:param server_id: Discord Server ID
:return:
"""
try:
user_exception_query = session.query(BanExceptions).filter(BanExceptions.user_id==user_id, BanExceptions.server_id==server_id)
# This user doesn't have an exception for this server
if user_exception_query.count() == 0:
exception_date = datetime.now()
new_exception = BanExceptions(user_id=user_id, server_id=server_id, exception_date=exception_date)
session.add(new_exception)
session.commit()
return "The user has been added to exceptions for this server", True
# This user already has an exception for this server
else:
return "This user already has an exception for this server.", False
except:
print(traceback.format_exc())
return "An error has occurred.", False
def banpool_list(self):
try:
banpool_list = session.query(BanPool)
list_result = []
for result in banpool_list:
list_result.append(result)
return list_result
except:
print(traceback.format_exc())
return None
def banpool_user_list(self, banpool_name):
"""
Return a list of User IDs in a banpool
:param banpool_name:
:return:
"""
try:
# Identify the banpool that the User ID will be added to
banpool = session.query(BanPool).filter(BanPool.pool_name==banpool_name).one()
if banpool:
user_list = []
# The banpool name existed
for user in banpool.banned_users:
user_list.append(user)
return user_list
else:
return None
except:
print(traceback.format_exc())
return None
def build_db(self):
Base.metadata.create_all(engine)
def create_banpool(self, banpool_name, banpool_description):
"""
Creates a banpool with banpool_name
:param banpool_name:
:return:
"""
try:
query = session.query(BanPool).filter(BanPool.pool_name==banpool_name)
if query.count() == 0:
# BanPool name wasn't found so create it
new_banpool = BanPool(pool_name=banpool_name, pool_description=banpool_description)
session.add(new_banpool)
session.commit()
return "The banpool has been created.", True
else:
return "This banpool name already exists", False
except:
print(traceback.format_exc())
return "An error has occurred.", False
def exception_list(self):
try:
exceptions_list = session.query(BanExceptions)
list_result = []
for result in exceptions_list:
list_result.append(result)
return list_result
except:
print(traceback.format_exc())
def is_user_in_banpool(self, banpool_name, user_id):
"""
Checks if the User ID is in the banpool
:param banpool_name:
:param user_id:
:return:
"""
try:
# Identify the banpool
banpool = session.query(BanPool).filter(BanPool.pool_name==banpool_name).one()
if banpool:
user_query = session.query(DiscordUser).filter(DiscordUser.user_id==user_id, DiscordUser.banpool_id==banpool.id)
if user_query.count() > 0:
return True
else:
return False
except:
print(traceback.format_exc())
return False
def is_user_banned(self, user_id):
"""
Checks if the user is in any banpool
:param user_id:
:return:
"""
try:
user_query = session.query(DiscordUser).filter(DiscordUser.user_id==user_id)
if user_query.count() > 0:
user = user_query.one()
banpool = session.query(BanPool).filter(BanPool.id==user.banpool_id).one()
return banpool.pool_name, True
else:
return "User is not in any banpool.", False
except:
print(traceback.format_exc())
return "An error has occurred.", False
def is_user_in_exceptions(self, user_id, server_id):
"""
Checks if a User ID is in the exception list for server_id
:param user_id:
:param server_id:
:return:
"""
try:
query = session.query(BanExceptions).filter(BanExceptions.server_id==server_id, BanExceptions.user_id==user_id)
if query.count() > 0:
return True
else:
return False
except:
print(traceback.format_exc())
return False
def remove_user_from_banpool(self, banpool_name, user_id):
"""
Removes a User ID from the banpool
:param banpool_name:
:param user_id:
:return:
"""
try:
# Identify the banpool that the User ID will be added to
banpool = session.query(BanPool).filter(BanPool.pool_name==banpool_name).one()
if banpool:
# Find if the user is in the banpool
user_query = session.query(DiscordUser).filter(DiscordUser.banpool_id==banpool.id, DiscordUser.user_id==user_id)
if user_query.count() > 0:
user = user_query.one()
if user:
session.delete(user)
session.commit()
return "User has been removed from the banpool.", True
else:
return "User not found in banpool.", False
else:
return "This banpool does not exist.", False
except:
print(traceback.format_exc())
return "An error has occurred.", False
def remove_user_from_exceptions(self, user_id, server_id):
"""
Removes a User ID and Server ID combination from exceptions
:param user_id:
:param server_id:
:return:
"""
try:
user_exception = session.query(BanExceptions).filter(BanExceptions.user_id==user_id, BanExceptions.server_id==server_id)
if user_exception.count() > 0:
user = user_exception.one()
session.delete(user)
session.commit()
return "User has been removed from exception list", True
else:
return "User wasn't found in exception list", False
except:
print(traceback.format_exc())
return "An error has occurred.", False
class BanPool(Base):
__tablename__ = 'banpool'
id = Column(Integer, primary_key=True)
pool_name = Column(String)
pool_description = Column(String)
banned_users = relationship('DiscordUser')
def __repr__(self):
return '<BanPool(id={}, pool_name={}, pool_description={}>'.format(
self.id, self.pool_name, self.pool_description
)
class BanExceptions(Base):
__tablename__ = 'banexceptions'
id = Column(Integer, primary_key=True)
user_id = Column(Integer)
server_id = Column(Integer)
exception_date = Column(DateTime)
def __repr__(self):
return '<BanExceptions(id={}, user_id={}, server_id={}, exception_date={}>'.format(
self.id, self.user_id, self.server_id, self.exception_date
)
class DiscordUser(Base):
__tablename__ = 'discordusers'
id = Column(Integer, primary_key=True)
user_id = Column(Integer)
ban_date = Column(DateTime)
banpool_id = Column(Integer, ForeignKey('banpool.id'))
def __repr__(self):
return '<DiscordUser(id={}, user_id={}, ban_date={}, banpool_id={}>'.format(
self.id, self.user_id, self.ban_date, self.banpool_id
)
Starting banpool reason work
import traceback
from sqlalchemy import Column, Boolean, Integer, String, ForeignKey, create_engine, DateTime
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, sessionmaker, scoped_session
from datetime import datetime
Base = declarative_base()
engine = create_engine('sqlite:///banpool.db')
Base.metadata.bind = engine
session_factory = sessionmaker(bind=engine)
Session = scoped_session(session_factory)
session = Session()
# TODO:
# Add reason to add_user_to_banpool
# Add reason to add_userlist_to_banpool
# Add reason to discord chat commands
# Test
class BanPoolManager:
def add_user_to_banpool(self, banpool_name, user_id, reason):
"""
Add a User ID to the banpool
:param banpool_name:
:param user_id: Discord User ID
:return:
"""
try:
# Identify the banpool that the User ID will be added to
banpool_query = session.query(BanPool).filter(BanPool.pool_name==banpool_name)
if banpool_query.count() > 0:
banpool = banpool_query.one()
# The banpool name existed
if banpool:
# Determine if the user has already been added to the banpool
user_query = session.query(DiscordUser).filter(DiscordUser.banpool_id==banpool.id, DiscordUser.user_id==user_id)
if user_query.count() == 0:
ban_date = datetime.now()
new_discord_user = DiscordUser(user_id=user_id, ban_date=ban_date, banpool_id=banpool.id)
session.add(new_discord_user)
session.commit()
return "User has been added to the banpool.", True
else:
return "This user is already a part of this banpool.", False
else:
# The banpool name did not exist
return "This banpool does not exist.", False
except:
print(traceback.format_exc())
return "An error has occurred", False
def add_userlist_to_banpool(self, banpool_name, user_id_list):
"""
Add a list of User IDs separated by comma to the banpool
:param banpool_name:
:param user_id_list:
:return:
"""
# Process the list and turn it to a python list
try:
user_list = user_id_list.split(',')
except:
print(traceback.format_exc())
return "Your userlist wasn't properly formatted. Separate each ID with a comma.", False
try:
for user in user_list:
self.add_user_to_banpool(banpool_name, user)
return "Users have been processed. Non-duplicates have been added to the ban list.", True
except:
print(traceback.format_exc())
return "An error has occurred.", False
def add_user_to_exceptions(self, user_id, server_id):
"""
Add a User ID+Server ID to the ban exceptions list
:param user_id: Discord User ID
:param server_id: Discord Server ID
:return:
"""
try:
user_exception_query = session.query(BanExceptions).filter(BanExceptions.user_id==user_id, BanExceptions.server_id==server_id)
# This user doesn't have an exception for this server
if user_exception_query.count() == 0:
exception_date = datetime.now()
new_exception = BanExceptions(user_id=user_id, server_id=server_id, exception_date=exception_date)
session.add(new_exception)
session.commit()
return "The user has been added to exceptions for this server", True
# This user already has an exception for this server
else:
return "This user already has an exception for this server.", False
except:
print(traceback.format_exc())
return "An error has occurred.", False
def banpool_list(self):
try:
banpool_list = session.query(BanPool)
list_result = []
for result in banpool_list:
list_result.append(result)
return list_result
except:
print(traceback.format_exc())
return None
def banpool_user_list(self, banpool_name):
"""
Return a list of User IDs in a banpool
:param banpool_name:
:return:
"""
try:
# Identify the banpool that the User ID will be added to
banpool = session.query(BanPool).filter(BanPool.pool_name==banpool_name).one()
if banpool:
user_list = []
# The banpool name existed
for user in banpool.banned_users:
user_list.append(user)
return user_list
else:
return None
except:
print(traceback.format_exc())
return None
def build_db(self):
Base.metadata.create_all(engine)
def create_banpool(self, banpool_name, banpool_description):
"""
Creates a banpool with banpool_name
:param banpool_name:
:return:
"""
try:
query = session.query(BanPool).filter(BanPool.pool_name==banpool_name)
if query.count() == 0:
# BanPool name wasn't found so create it
new_banpool = BanPool(pool_name=banpool_name, pool_description=banpool_description)
session.add(new_banpool)
session.commit()
return "The banpool has been created.", True
else:
return "This banpool name already exists", False
except:
print(traceback.format_exc())
return "An error has occurred.", False
def exception_list(self):
try:
exceptions_list = session.query(BanExceptions)
list_result = []
for result in exceptions_list:
list_result.append(result)
return list_result
except:
print(traceback.format_exc())
def is_user_in_banpool(self, banpool_name, user_id):
"""
Checks if the User ID is in the banpool
:param banpool_name:
:param user_id:
:return:
"""
try:
# Identify the banpool
banpool = session.query(BanPool).filter(BanPool.pool_name==banpool_name).one()
if banpool:
user_query = session.query(DiscordUser).filter(DiscordUser.user_id==user_id, DiscordUser.banpool_id==banpool.id)
if user_query.count() > 0:
return True
else:
return False
except:
print(traceback.format_exc())
return False
def is_user_banned(self, user_id):
"""
Checks if the user is in any banpool
:param user_id:
:return:
"""
try:
user_query = session.query(DiscordUser).filter(DiscordUser.user_id==user_id)
if user_query.count() > 0:
user = user_query.one()
banpool = session.query(BanPool).filter(BanPool.id==user.banpool_id).one()
return banpool.pool_name, True
else:
return "User is not in any banpool.", False
except:
print(traceback.format_exc())
return "An error has occurred.", False
def is_user_in_exceptions(self, user_id, server_id):
"""
Checks if a User ID is in the exception list for server_id
:param user_id:
:param server_id:
:return:
"""
try:
query = session.query(BanExceptions).filter(BanExceptions.server_id==server_id, BanExceptions.user_id==user_id)
if query.count() > 0:
return True
else:
return False
except:
print(traceback.format_exc())
return False
def remove_user_from_banpool(self, banpool_name, user_id):
"""
Removes a User ID from the banpool
:param banpool_name:
:param user_id:
:return:
"""
try:
# Identify the banpool that the User ID will be added to
banpool = session.query(BanPool).filter(BanPool.pool_name==banpool_name).one()
if banpool:
# Find if the user is in the banpool
user_query = session.query(DiscordUser).filter(DiscordUser.banpool_id==banpool.id, DiscordUser.user_id==user_id)
if user_query.count() > 0:
user = user_query.one()
if user:
session.delete(user)
session.commit()
return "User has been removed from the banpool.", True
else:
return "User not found in banpool.", False
else:
return "This banpool does not exist.", False
except:
print(traceback.format_exc())
return "An error has occurred.", False
def remove_user_from_exceptions(self, user_id, server_id):
"""
Removes a User ID and Server ID combination from exceptions
:param user_id:
:param server_id:
:return:
"""
try:
user_exception = session.query(BanExceptions).filter(BanExceptions.user_id==user_id, BanExceptions.server_id==server_id)
if user_exception.count() > 0:
user = user_exception.one()
session.delete(user)
session.commit()
return "User has been removed from exception list", True
else:
return "User wasn't found in exception list", False
except:
print(traceback.format_exc())
return "An error has occurred.", False
class BanPool(Base):
__tablename__ = 'banpool'
id = Column(Integer, primary_key=True)
pool_name = Column(String)
pool_description = Column(String)
banned_users = relationship('DiscordUser')
def __repr__(self):
return '<BanPool(id={}, pool_name={}, pool_description={}>'.format(
self.id, self.pool_name, self.pool_description
)
class BanExceptions(Base):
__tablename__ = 'banexceptions'
id = Column(Integer, primary_key=True)
user_id = Column(Integer)
server_id = Column(Integer)
exception_date = Column(DateTime)
def __repr__(self):
return '<BanExceptions(id={}, user_id={}, server_id={}, exception_date={}>'.format(
self.id, self.user_id, self.server_id, self.exception_date
)
class DiscordUser(Base):
__tablename__ = 'discordusers'
id = Column(Integer, primary_key=True)
user_id = Column(Integer)
ban_date = Column(DateTime)
banpool_id = Column(Integer, ForeignKey('banpool.id'))
reason = Column(String)
def __repr__(self):
return '<DiscordUser(id={}, user_id={}, ban_date={}, banpool_id={}> reason={}'.format(
self.id, self.user_id, self.ban_date, self.banpool_id, self.reason
)
|
# This Python file uses the following encoding: utf-8
import yarn
stream_fields = [
'active',
'team_name',
'streamer_list',
'prompt_tick', # Minutes
'donation_currency',
'donation_url',
'bot_name'
]
# TODO: import these from a txt or csv file rather than hard coding them alongside other values
# TODO, write test to check that these actually exist
active_charity_streams = [
# TEAM WOTSIT
dict(
zip(
stream_fields,
[
False,
'wotsit',
[
'kateclick',
'natswright',
'symyoulater',
'pleijpje'
],
5,
'£',
'https://gameblast16.everydayhero.com/uk/team-wotsit',
'default'
]
)
),
# TEAM SAGARACE
dict(
zip(
stream_fields,
[
True,
'sagArace',
[
'bubblemapgaminglive',
'misfits_enterprises'
],
5,
'£',
'https://gameblast16.everydayhero.com/uk/SagARace',
'purrbot9000'
]
)
),
# TEAM TIIQ
dict(
zip(
stream_fields,
[
False,
'tiiq',
[
'djtruthsayer',
'cmdrhughmann'
],
10,
'£',
'https://gameblast16.everydayhero.com/uk/tiiq',
'tiiqhuntergames'
]
)
)
]
def get_donation_amount(url=None, verbose=False):
if url is None:
if verbose:
print('[-] No URL given, returning error')
return 'ERROR: NO URL GIVEN'
if verbose:
print('[+] Attempting to scrape the charity URL')
try:
soup = yarn.soup_page(url=url)
except Exception as e:
if verbose:
print('[-] Unable to soup the charity URL: {}'.format(e))
return 'ERROR: COULD NOT SCRAPE DONATION AMOUNT'
else:
# Here put the specific scraping method required, depending on the website
container_div = soup.find('div', {'id': 'bar--donation__main'})
sub_container = container_div.find_all('div', {'class': 'donation-bar__detail'})
raised_amount = sub_container[2].strong.text.strip('£')
goal_amount = sub_container[3].strong.text.strip('£')
raised_amount = raised_amount.replace(',', '')
goal_amount = goal_amount.replace(',', '')
percentage_raised = round((float(raised_amount) / float(goal_amount)) * 100, 2)
if verbose:
print('[+] Current amount: {}'.format(raised_amount))
print('[+] Current goal: {}'.format(goal_amount))
print('[+] Percentage raised: {}%'.format(percentage_raised))
return [raised_amount, goal_amount, percentage_raised]
if __name__ == '__main__':
print('[!] Test running the get donation amount method')
for charity_stream in active_charity_streams:
print('[!] Team {} have raised: {}'.format(
charity_stream['team_name'],
get_donation_amount(url=charity_stream['donation_url'], verbose=True)
))
Removed unneeded comment
# This Python file uses the following encoding: utf-8
import yarn
stream_fields = [
'active',
'team_name',
'streamer_list',
'prompt_tick', # Minutes
'donation_currency',
'donation_url',
'bot_name'
]
# TODO: import these from a txt or csv file rather than hard coding them alongside other values
active_charity_streams = [
# TEAM WOTSIT
dict(
zip(
stream_fields,
[
False,
'wotsit',
[
'kateclick',
'natswright',
'symyoulater',
'pleijpje'
],
5,
'£',
'https://gameblast16.everydayhero.com/uk/team-wotsit',
'default'
]
)
),
# TEAM SAGARACE
dict(
zip(
stream_fields,
[
True,
'sagArace',
[
'bubblemapgaminglive',
'misfits_enterprises'
],
5,
'£',
'https://gameblast16.everydayhero.com/uk/SagARace',
'purrbot9000'
]
)
),
# TEAM TIIQ
dict(
zip(
stream_fields,
[
False,
'tiiq',
[
'djtruthsayer',
'cmdrhughmann'
],
10,
'£',
'https://gameblast16.everydayhero.com/uk/tiiq',
'tiiqhuntergames'
]
)
)
]
def get_donation_amount(url=None, verbose=False):
if url is None:
if verbose:
print('[-] No URL given, returning error')
return 'ERROR: NO URL GIVEN'
if verbose:
print('[+] Attempting to scrape the charity URL')
try:
soup = yarn.soup_page(url=url)
except Exception as e:
if verbose:
print('[-] Unable to soup the charity URL: {}'.format(e))
return 'ERROR: COULD NOT SCRAPE DONATION AMOUNT'
else:
# Here put the specific scraping method required, depending on the website
container_div = soup.find('div', {'id': 'bar--donation__main'})
sub_container = container_div.find_all('div', {'class': 'donation-bar__detail'})
raised_amount = sub_container[2].strong.text.strip('£')
goal_amount = sub_container[3].strong.text.strip('£')
raised_amount = raised_amount.replace(',', '')
goal_amount = goal_amount.replace(',', '')
percentage_raised = round((float(raised_amount) / float(goal_amount)) * 100, 2)
if verbose:
print('[+] Current amount: {}'.format(raised_amount))
print('[+] Current goal: {}'.format(goal_amount))
print('[+] Percentage raised: {}%'.format(percentage_raised))
return [raised_amount, goal_amount, percentage_raised]
if __name__ == '__main__':
print('[!] Test running the get donation amount method')
for charity_stream in active_charity_streams:
print('[!] Team {} have raised: {}'.format(
charity_stream['team_name'],
get_donation_amount(url=charity_stream['donation_url'], verbose=True)
))
|
# -*- encoding: utf-8 -*-
import json
import pytest
from django.conf import settings
from django.core.urlresolvers import reverse
from selenium.webdriver.support.select import Select
from splinter.exceptions import ElementDoesNotExist
from multiseek.logic import DATE, AUTOCOMPLETE, RANGE, STRING, VALUE_LIST, get_registry
class SplinterLoginMixin:
def login(self, username="admin", password="password"):
url = self.browser.url
self.browser.visit(self.live_server + reverse("admin:login"))
self.browser.fill('username', username)
self.browser.fill('password', password)
self.browser.find_by_css("input[type=submit]").click()
self.browser.visit(url)
class MultiseekWebPage(SplinterLoginMixin):
"""Helper functions, that take care of the multiseek form web page
"""
def __init__(self, registry, browser, live_server):
self.browser = browser
self.registry = registry
self.live_server = live_server
def get_frame(self, id):
"""Ta funkcja zwraca multiseekową "ramkę" po jej ID
"""
frame = self.browser.find_by_id(id)
ret = dict()
ret['frame'] = frame[0]
fieldset = frame.find_by_tag('fieldset')
ret['add_field'] = fieldset.find_by_id("add_field")[0]
ret['add_frame'] = fieldset.find_by_id("add_frame")[0]
ret['fields'] = fieldset.find_by_id("field-list")[0]
return ret
def extract_field_data(self, element):
"""Ta funkcja zwraca słownik z wartościami dla danego pola w
formularzu. Pole - czyli wiersz z kolejnymi selectami:
pole przeszukiwane, operacja, wartość wyszukiwana,
następna operacja, przycisk zamknięcia
Z pomocniczych wartości, zwracanych w słowniku mamy 'type' czyli
tekstowy typ, odpowiadający definicjom w bpp.multiseek.logic.fields.keys()
Zwracana wartość słownika 'value' może być różna dla różnych typów
pól (np dla multiseek.logic.RANGE jest to lista z wartościami z obu pól)
"""
ret = {}
for elem in ['type', 'op', 'prev-op', 'close-button']:
try:
e = element.find_by_id(elem)[0]
except ElementDoesNotExist, x:
# prev-op may be None
if elem != 'prev-op':
raise x
e = None
ret[elem] = e
selected = ret['type'].value
ret['selected'] = selected
inner_type = self.registry.field_by_name.get(selected).type
ret['inner_type'] = inner_type
if inner_type in [STRING, VALUE_LIST]:
ret['value_widget'] = element.find_by_id("value")
elif inner_type == RANGE:
ret['value_widget'] = [
element.find_by_id("value_min"),
element.find_by_id("value_max")]
elif inner_type == DATE:
ret['value_widget'] = [
element.find_by_id("value"),
element.find_by_id("value_max")]
elif inner_type == AUTOCOMPLETE:
ret['value_widget'] = element.find_by_id("value")
else:
raise NotImplementedError(inner_type)
code = '$("#%s").multiseekField("getValue")' % element['id']
ret['value'] = self.browser.evaluate_script(code)
if ret['inner_type'] in (DATE, AUTOCOMPLETE, RANGE):
if ret['value']:
ret['value'] = json.loads(ret['value'])
return ret
def get_field(self, id):
field = self.browser.find_by_id(id)
if len(field) != 1:
raise Exception("field not found")
return self.extract_field_data(field[0])
def serialize(self):
"""Zwraca wartość funkcji serialize() dla formularza, w postaci
listy -- czyli obiekt JSON"""
return self.browser.evaluate_script("$('#frame-0').multiseekFrame('serialize')")
def get_field_value(self, field):
return self.browser.evaluate_script('$("#%s").multiseekField("getValue")' % field)
def add_frame(self, frame="frame-0", prev_op=None):
if not prev_op:
return self.execute_script(
"""$("#%s").multiseekFrame('addFrame');""" % frame)
return self.execute_script("""
$("#%s").multiseekFrame('addFrame', '%s');
""" % (frame, prev_op))
def add_field(self, frame, label, op, value):
code = """
$("#%(frame)s").multiseekFrame("addField", "%(label)s", "%(op)s", %(value)s);
""" % dict(frame=frame,
label=unicode(label),
op=unicode(op),
value=json.dumps(value))
self.browser.execute_script(code)
def load_form_by_name(self, name):
self.browser.reload()
select = self.browser.find_by_id("formsSelector")
for elem in select.find_by_tag('option'):
if elem.text == name:
elem.click()
break
self.accept_alert()
self.browser.reload()
def reset_form(self):
self.browser.find_by_id("resetFormButton").click()
def click_save_button(self):
button = self.browser.find_by_id("saveFormButton").first
button.type("\n") # Keys.ENTER)
def save_form_as(self, name):
self.click_save_button()
with self.browser.get_alert() as alert:
alert.fill_with(name)
alert.accept()
def count_elements_in_form_selector(self, name):
select = self.browser.find_by_id("formsSelector")
assert select.visible == True
passed = 0
for option in select.find_by_tag("option"):
if option.text == name:
passed += 1
return passed
def accept_alert(self):
with self.browser.get_alert() as alert:
alert.accept()
def dismiss_alert(self):
with self.browser.get_alert() as alert:
alert.dismiss()
@pytest.fixture
def multiseek_page(browser, live_server):
browser.visit(live_server + reverse('multiseek:index'))
registry = get_registry(settings.MULTISEEK_REGISTRY)
return MultiseekWebPage(browser=browser, registry=registry, live_server=live_server)
@pytest.fixture
def multiseek_admin_page(multiseek_page, admin_user):
multiseek_page.login("admin", "password")
return multiseek_page
@pytest.fixture(scope='session')
def splinter_firefox_profile_preferences():
return {
"browser.startup.homepage": "about:blank",
"startup.homepage_welcome_url": "about:blank",
"startup.homepage_welcome_url.additional": "about:blank"
}
Proper exception handling
# -*- encoding: utf-8 -*-
import json
import pytest
from django.conf import settings
from django.core.urlresolvers import reverse
from selenium.webdriver.support.select import Select
from splinter.exceptions import ElementDoesNotExist
from multiseek.logic import DATE, AUTOCOMPLETE, RANGE, STRING, VALUE_LIST, get_registry
class SplinterLoginMixin:
def login(self, username="admin", password="password"):
url = self.browser.url
self.browser.visit(self.live_server + reverse("admin:login"))
self.browser.fill('username', username)
self.browser.fill('password', password)
self.browser.find_by_css("input[type=submit]").click()
self.browser.visit(url)
class MultiseekWebPage(SplinterLoginMixin):
"""Helper functions, that take care of the multiseek form web page
"""
def __init__(self, registry, browser, live_server):
self.browser = browser
self.registry = registry
self.live_server = live_server
def get_frame(self, id):
"""Ta funkcja zwraca multiseekową "ramkę" po jej ID
"""
frame = self.browser.find_by_id(id)
ret = dict()
ret['frame'] = frame[0]
fieldset = frame.find_by_tag('fieldset')
ret['add_field'] = fieldset.find_by_id("add_field")[0]
ret['add_frame'] = fieldset.find_by_id("add_frame")[0]
ret['fields'] = fieldset.find_by_id("field-list")[0]
return ret
def extract_field_data(self, element):
"""Ta funkcja zwraca słownik z wartościami dla danego pola w
formularzu. Pole - czyli wiersz z kolejnymi selectami:
pole przeszukiwane, operacja, wartość wyszukiwana,
następna operacja, przycisk zamknięcia
Z pomocniczych wartości, zwracanych w słowniku mamy 'type' czyli
tekstowy typ, odpowiadający definicjom w bpp.multiseek.logic.fields.keys()
Zwracana wartość słownika 'value' może być różna dla różnych typów
pól (np dla multiseek.logic.RANGE jest to lista z wartościami z obu pól)
"""
ret = {}
for elem in ['type', 'op', 'prev-op', 'close-button']:
try:
e = element.find_by_id(elem)[0]
except ElementDoesNotExist as x:
# prev-op may be None
if elem != 'prev-op':
raise x
e = None
ret[elem] = e
selected = ret['type'].value
ret['selected'] = selected
inner_type = self.registry.field_by_name.get(selected).type
ret['inner_type'] = inner_type
if inner_type in [STRING, VALUE_LIST]:
ret['value_widget'] = element.find_by_id("value")
elif inner_type == RANGE:
ret['value_widget'] = [
element.find_by_id("value_min"),
element.find_by_id("value_max")]
elif inner_type == DATE:
ret['value_widget'] = [
element.find_by_id("value"),
element.find_by_id("value_max")]
elif inner_type == AUTOCOMPLETE:
ret['value_widget'] = element.find_by_id("value")
else:
raise NotImplementedError(inner_type)
code = '$("#%s").multiseekField("getValue")' % element['id']
ret['value'] = self.browser.evaluate_script(code)
if ret['inner_type'] in (DATE, AUTOCOMPLETE, RANGE):
if ret['value']:
ret['value'] = json.loads(ret['value'])
return ret
def get_field(self, id):
field = self.browser.find_by_id(id)
if len(field) != 1:
raise Exception("field not found")
return self.extract_field_data(field[0])
def serialize(self):
"""Zwraca wartość funkcji serialize() dla formularza, w postaci
listy -- czyli obiekt JSON"""
return self.browser.evaluate_script("$('#frame-0').multiseekFrame('serialize')")
def get_field_value(self, field):
return self.browser.evaluate_script('$("#%s").multiseekField("getValue")' % field)
def add_frame(self, frame="frame-0", prev_op=None):
if not prev_op:
return self.execute_script(
"""$("#%s").multiseekFrame('addFrame');""" % frame)
return self.execute_script("""
$("#%s").multiseekFrame('addFrame', '%s');
""" % (frame, prev_op))
def add_field(self, frame, label, op, value):
code = """
$("#%(frame)s").multiseekFrame("addField", "%(label)s", "%(op)s", %(value)s);
""" % dict(frame=frame,
label=unicode(label),
op=unicode(op),
value=json.dumps(value))
self.browser.execute_script(code)
def load_form_by_name(self, name):
self.browser.reload()
select = self.browser.find_by_id("formsSelector")
for elem in select.find_by_tag('option'):
if elem.text == name:
elem.click()
break
self.accept_alert()
self.browser.reload()
def reset_form(self):
self.browser.find_by_id("resetFormButton").click()
def click_save_button(self):
button = self.browser.find_by_id("saveFormButton").first
button.type("\n") # Keys.ENTER)
def save_form_as(self, name):
self.click_save_button()
with self.browser.get_alert() as alert:
alert.fill_with(name)
alert.accept()
def count_elements_in_form_selector(self, name):
select = self.browser.find_by_id("formsSelector")
assert select.visible == True
passed = 0
for option in select.find_by_tag("option"):
if option.text == name:
passed += 1
return passed
def accept_alert(self):
with self.browser.get_alert() as alert:
alert.accept()
def dismiss_alert(self):
with self.browser.get_alert() as alert:
alert.dismiss()
@pytest.fixture
def multiseek_page(browser, live_server):
browser.visit(live_server + reverse('multiseek:index'))
registry = get_registry(settings.MULTISEEK_REGISTRY)
return MultiseekWebPage(browser=browser, registry=registry, live_server=live_server)
@pytest.fixture
def multiseek_admin_page(multiseek_page, admin_user):
multiseek_page.login("admin", "password")
return multiseek_page
@pytest.fixture(scope='session')
def splinter_firefox_profile_preferences():
return {
"browser.startup.homepage": "about:blank",
"startup.homepage_welcome_url": "about:blank",
"startup.homepage_welcome_url.additional": "about:blank"
}
|
# encoding=utf-8
from kombu import Exchange, Connection, Producer
import logging
class RabbitMqHandler(object):
def __init__(self, connection, exchange_name, type='direct', durable=True):
self._logger = logging.getLogger(__name__)
try:
self._connection = Connection(connection)
self._producer = Producer(self._connection)
self._task_exchange = Exchange(name=exchange_name, type=type, durable=durable)
except Exception:
self._logger.info('badly formated token %s', auth).exception('Unable to activate the producer')
raise
def errback(exc, interval):
self._logger.info('Error: %r', exc, exc_info=1)
self._logger.info('Retry in %s seconds.', interval)
def publish(self, payload, routing_key=None, serializer=None):
publish = self._connection.ensure(self._producer, self._producer.publish, errback = self.errback, max_retries=3)
publish(payload,
serializer=serializer,
exchange=self._task_exchange,
declare=[self._task_exchange],
routing_key=routing_key)
self._connection.release()
Fix error message 'ChannelError: channel disconnected'
# encoding=utf-8
from kombu import Exchange, Connection, Producer
import logging
class RabbitMqHandler(object):
def __init__(self, connection, exchange_name, type='direct', durable=True):
self._logger = logging.getLogger(__name__)
try:
self._connection = Connection(connection)
self._producer = Producer(self._connection)
self._task_exchange = Exchange(name=exchange_name, type=type, durable=durable)
except Exception:
self._logger.info('badly formated token %s', auth).exception('Unable to activate the producer')
raise
def errback(exc, interval):
self._logger.info('Error: %r', exc, exc_info=1)
self._logger.info('Retry in %s seconds.', interval)
def publish(self, payload, routing_key=None, serializer=None):
publish = self._connection.ensure(self._producer, self._producer.publish, errback = self.errback, max_retries=3)
publish(payload,
serializer=serializer,
exchange=self._task_exchange,
declare=[self._task_exchange],
routing_key=routing_key)
|
# encoding: utf-8
class Froshki(object):
"""
Base class for Froshki objetcs.
"""
default_values = {}
def __new__(klass, *args, **kwargs):
attr_names = []
class_dict = klass.__dict__
if '_registered_attrs' not in class_dict:
for name in class_dict:
obj = class_dict[name]
if isinstance(obj, Attribute):
attr_names.append(name)
attr_descriptor = AttributeDescriptor(
name, obj,
)
setattr(klass, name, attr_descriptor)
setattr(klass, '_registered_attrs', tuple(attr_names))
instance = object.__new__(klass)
return instance
def __init__(self, source=None, **init_attrs):
self._data = {}
# Attribute values' overwrites are ordered
# by asccending assignment-style explicity.
self._attrs_default()
if source is not None:
self._attrs_from_source(source)
self._overwrite_kw_attrs(init_attrs)
def _attrs_default(self):
self._update_attrs(self.__class__.default_values)
def _attrs_from_source(self, attr_source):
self._update_attrs(attr_source)
def _overwrite_kw_attrs(self, init_attrs):
self._update_attrs(init_attrs)
def _update_attrs(self, attr_source):
for name in attr_source:
if name not in self._registered_attrs:
raise TypeError(
"'{klass}' has no attirbute {attr}".format(
klass=self.__class__.__name__,
attr=name,
)
)
else:
self._set_attr_data(name, attr_source[name])
def _set_attr_data(self, name, input_value):
attr_obj = getattr(self.__class__, name)
input_value = attr_obj.transform(input_value)
self._data[name] = input_value
class Attribute(object):
"""
Base class for Froshki objects' attributes.
"""
@classmethod
def transform(klass, input_value):
"""
Transform input values to store into Froshki_data.
Override this method for customization.
"""
return input_value
class AttributeDescriptor(object):
"""
Abstracts attribute access to Froshki objects.
"""
def __init__(self, attr_name, attr_obj):
self._attr_name = attr_name
self._attr = attr_obj
def __get__(self, instance, klass):
if not instance:
return self._attr
else:
return instance._data.get(self._attr_name, None)
def __set__(self, instance, value):
if not instance:
return object.__set__(self, instance, value)
else:
self._set_data(instance, value)
def _set_data(self, froshki, input_value):
attr_obj = self._attr
input_value = attr_obj.transform(input_value)
froshki._data[self._attr_name] = input_value
Refactor attribute accessing.
* For efficiency readson.
# encoding: utf-8
class Froshki(object):
"""
Base class for Froshki objetcs.
"""
default_values = {}
def __new__(klass, *args, **kwargs):
attr_names = []
class_dict = klass.__dict__
if '_registered_attrs' not in class_dict:
for name in class_dict:
obj = class_dict[name]
if isinstance(obj, Attribute):
attr_names.append(name)
attr_descriptor = AttributeDescriptor(
name, obj,
)
setattr(klass, name, attr_descriptor)
setattr(klass, '_registered_attrs', tuple(attr_names))
instance = object.__new__(klass)
return instance
def __init__(self, source=None, **init_attrs):
self._data = {}
# Attribute values' overwrites are ordered
# by asccending assignment-style explicity.
self._attrs_default()
if source is not None:
self._attrs_from_source(source)
self._overwrite_kw_attrs(init_attrs)
def _attrs_default(self):
self._update_attrs(self.__class__.default_values)
def _attrs_from_source(self, attr_source):
self._update_attrs(attr_source)
def _overwrite_kw_attrs(self, init_attrs):
self._update_attrs(init_attrs)
def _update_attrs(self, attr_source):
registered_attrs = self._registered_attrs
for name in attr_source:
if name not in registered_attrs:
raise TypeError(
"'{klass}' has no attirbute {attr}".format(
klass=self.__class__.__name__,
attr=name,
)
)
else:
self._set_attr_data(name, attr_source[name])
def _set_attr_data(self, name, input_value):
attr_obj = getattr(self.__class__, name)
input_value = attr_obj.transform(input_value)
self._data[name] = input_value
class Attribute(object):
"""
Base class for Froshki objects' attributes.
"""
@classmethod
def transform(klass, input_value):
"""
Transform input values to store into Froshki_data.
Override this method for customization.
"""
return input_value
class AttributeDescriptor(object):
"""
Abstracts attribute access to Froshki objects.
"""
def __init__(self, attr_name, attr_obj):
self._attr_name = attr_name
self._attr = attr_obj
def __get__(self, instance, klass):
if not instance:
return self._attr
else:
return instance._data.get(self._attr_name, None)
def __set__(self, instance, value):
if not instance:
return object.__set__(self, instance, value)
else:
self._set_data(instance, value)
def _set_data(self, froshki, input_value):
attr_obj = self._attr
input_value = attr_obj.transform(input_value)
froshki._data[self._attr_name] = input_value
|
import smtpd
import asyncore
import os
import email
def parsemail(mail, savename):
prefix = savename
mails = []
names = []
def parsesingle(mail):
if mail.is_multipart():
for m in mail.get_payload():
parsesingle(m)
return
name = mail.get_param("name")
if name:
# attachment
name = email.Header.decode_header(email.Header.Header(name))[0][0]
charset = mail.get_content_charset()
contenttype = mail.get_content_type()
data = mail.get_payload(decode=True)
if charset and contenttype and contenttype.upper().startswith('TEXT'):
data = unicode(data, str(charset), "ignore").encode('utf8', 'replace')
if name:
# save attachment
names.append(name)
attindex = len(names)
try:
f = open(u'%s.atach.%d.%s'%(prefix, attindex, fname), 'wb')
except:
f = open('%s.atach.%d'%(prefix, attindex), 'wb')
f.write(data)
f.close()
else:
mails.append(data)
parsesingle(mail)
mailtext = '\r\n\r\n'.join(mails)
with open(savename, 'wb') as f:
f.write(mailtext)
return mailtext
class MainSMTPServer(smtpd.SMTPServer):
__version__ = 'TEST EMAIL SERVER'
def process_message(self, peer, mailfrom, rcpttos, data):
import time
d = os.path.join(os.getcwd(), 'inbox')
try:
os.makedirs(d)
except:
pass
ts = time.strftime('%Y%m%d%H%M%S')
mail = email.message_from_string(data)
mailtext = parsemail(mail, os.path.join(d, '%s.txt'%ts))
for t in rcpttos:
fn = os.path.join(d, '%s-%s'%(ts, t))
print fn
with open(fn,'wb') as f:
f.write(data)
kf = '%-15s'
print time.strftime('%Y-%m-%d %H:%M:%S')
print kf%'Client',':', '%s:%s'%peer
print kf%'Mail From',':', mailfrom
print kf%'Mail To',':', rcpttos
print kf%'Mail Lenth',':', len(data)
print mailtext
return
if __name__ == "__main__":
addr = ('0.0.0.0', 25)
smtp_server = MainSMTPServer(addr, None)
print 'mail server @ %s:%s'%addr
try:
asyncore.loop()
except KeyboardInterrupt:
smtp_server.close()
fix attach name bug
import smtpd
import asyncore
import os
import email
def parsemail(mail, savename):
prefix = savename
mails = []
names = []
def parsesingle(mail):
if mail.is_multipart():
for m in mail.get_payload():
parsesingle(m)
return
name = mail.get_param("name")
if name:
# attachment
name = email.Header.decode_header(email.Header.Header(name))[0][0]
charset = mail.get_content_charset()
contenttype = mail.get_content_type()
data = mail.get_payload(decode=True)
if charset and contenttype and contenttype.upper().startswith('TEXT'):
data = unicode(data, str(charset), "ignore").encode('utf8', 'replace')
if name:
# save attachment
names.append(name)
attindex = len(names)
try:
f = open(u'%s.atach.%d.%s'%(prefix, attindex, name), 'wb')
except:
f = open('%s.atach.%d'%(prefix, attindex), 'wb')
f.write(data)
f.close()
else:
mails.append(data)
parsesingle(mail)
mailtext = '\r\n\r\n'.join(mails)
with open(savename, 'wb') as f:
f.write(mailtext)
return mailtext
class MainSMTPServer(smtpd.SMTPServer):
__version__ = 'TEST EMAIL SERVER'
def process_message(self, peer, mailfrom, rcpttos, data):
import time
d = os.path.join(os.getcwd(), 'inbox')
try:
os.makedirs(d)
except:
pass
ts = time.strftime('%Y%m%d%H%M%S')
mail = email.message_from_string(data)
mailtext = parsemail(mail, os.path.join(d, '%s.txt'%ts))
for t in rcpttos:
fn = os.path.join(d, '%s-%s'%(ts, t))
print fn
with open(fn,'wb') as f:
f.write(data)
kf = '%-15s'
print time.strftime('%Y-%m-%d %H:%M:%S')
print kf%'Client',':', '%s:%s'%peer
print kf%'Mail From',':', mailfrom
print kf%'Mail To',':', rcpttos
print kf%'Mail Lenth',':', len(data)
print mailtext
return
if __name__ == "__main__":
addr = ('0.0.0.0', 25)
smtp_server = MainSMTPServer(addr, None)
print 'mail server @ %s:%s'%addr
try:
asyncore.loop()
except KeyboardInterrupt:
smtp_server.close() |
import sys
stdout = sys.stdout
reload(sys)
sys.setdefaultencoding('utf-8')
sys.stdout = stdout
# import all extractors
from spacy_extractors import age_extractor as spacy_age_extractor
from spacy_extractors import social_media_extractor as spacy_social_media_extractor
from spacy_extractors import date_extractor as spacy_date_extractor
from spacy_extractors import address_extractor as spacy_address_extractor
from spacy_extractors import customized_extractor as custom_spacy_extractor
from data_extractors import landmark_extraction
from data_extractors import dictionary_extractor
from data_extractors import regex_extractor
from data_extractors import height_extractor
from data_extractors import weight_extractor
from data_extractors import address_extractor
from data_extractors import age_extractor
from data_extractors import table_extractor
from data_extractors import url_country_extractor
from data_extractors import geonames_extractor
from data_extractors.digPhoneExtractor import phone_extractor
from data_extractors.digEmailExtractor import email_extractor
from data_extractors.digPriceExtractor import price_extractor
from data_extractors.digReviewIDExtractor import review_id_extractor
from data_extractors import date_parser
from classifiers import country_classifier
from structured_extractors import ReadabilityExtractor, TokenizerExtractor, FaithfulTokenizerExtractor
import json
import gzip
import re
import spacy
import codecs
from jsonpath_rw import parse
import time
import collections
import numbers
from tldextract import tldextract
import pickle
import copy
from collections import OrderedDict
import sys
_KNOWLEDGE_GRAPH = "knowledge_graph"
_EXTRACTION_POLICY = 'extraction_policy'
_KEEP_EXISTING = 'keep_existing'
_REPLACE = 'replace'
_ERROR_HANDLING = 'error_handling'
_IGNORE_EXTRACTION = 'ignore_extraction'
_IGNORE_DOCUMENT = 'ignore_document'
_RAISE_ERROR = 'raise_error'
_CITY_NAME = 'city_name'
_STATE = 'state'
_COUNTRY = 'country'
_CONTENT_EXTRACTION = 'content_extraction'
_SPACY_EXTRACTION = 'spacy_extraction'
_RAW_CONTENT = 'raw_content'
_INPUT_PATH = 'input_path'
_READABILITY = 'readability'
_LANDMARK = 'landmark'
_TITLE = 'title'
_DESCRIPTION = "description"
_STRICT = 'strict'
_FIELD_NAME = 'field_name'
_CONTENT_STRICT = 'content_strict'
_CONTENT_RELAXED = 'content_relaxed'
_YES = 'yes'
_NO = 'no'
_RECALL_PRIORITY = 'recall_priority'
_INFERLINK_EXTRACTIONS = 'inferlink_extractions'
_LANDMARK_THRESHOLD = 'landmark_threshold'
_LANDMARK_RULES = 'landmark_rules'
_URL = 'url'
_AGE = 'age'
_POSTING_DATE = 'posting_date'
_SOCIAL_MEDIA = 'social_media'
_ADDRESS = 'address'
_RESOURCES = 'resources'
_SPACY_FIELD_RULES = "spacy_field_rules"
_DATA_EXTRACTION = 'data_extraction'
_FIELDS = 'fields'
_EXTRACTORS = 'extractors'
_TOKENS = 'tokens'
_TOKENS_ORIGINAL_CASE = "tokens_original_case"
_SIMPLE_TOKENS = 'simple_tokens'
_SIMPLE_TOKENS_ORIGINAL_CASE = 'simple_tokens_original_case'
_TEXT = 'text'
_DICTIONARY = 'dictionary'
_PICKLES = 'pickle'
_NGRAMS = 'ngrams'
_JOINER = 'joiner'
_PRE_FILTER = 'pre_filter'
_POST_FILTER = 'post_filter'
_PRE_PROCESS = "pre_process"
_TABLE = "table"
_STOP_WORDS = "stop_words"
_GEONAMES = "geonames"
_STATE_TO_COUNTRY = "state_to_country"
_STATE_TO_CODES_LOWER = "state_to_codes_lower"
_POPULATED_PLACES = "populated_places"
_POPULATED_CITIES = "populated_cities"
_EXTRACT_USING_DICTIONARY = "extract_using_dictionary"
_EXTRACT_USING_REGEX = "extract_using_regex"
_EXTRACT_FROM_LANDMARK = "extract_from_landmark"
_EXTRACT_PHONE = "extract_phone"
_EXTRACT_EMAIL = "extract_email"
_EXTRACT_PRICE = "extract_price"
_EXTRACT_HEIGHT = "extract_height"
_EXTRACT_WEIGHT = "extract_weight"
_EXTRACT_ADDRESS = "extract_address"
_EXTRACT_AGE = "extract_age"
_CONFIG = "config"
_DICTIONARIES = "dictionaries"
_INFERLINK = "inferlink"
_HTML = "html"
_SEGMENT_TITLE = "title"
_SEGMENT_INFERLINK_DESC = "inferlink_description"
_SEGMENT_OTHER = "other_segment"
_METHOD_INFERLINK = "inferlink"
_SOURCE_TYPE = "source_type"
_OBFUSCATION = "obfuscation"
_INCLUDE_CONTEXT = "include_context"
_KG_ENHANCEMENT = "kg_enhancement"
_DOCUMENT_ID = "document_id"
_TLD = 'tld'
_FEATURE_COMPUTATION = "feature_computation"
class Core(object):
def __init__(self, extraction_config=None, debug=False, load_spacy=False):
self.extraction_config = extraction_config
self.debug = debug
self.html_title_regex = r'<title>(.*?)</title>'
self.tries = dict()
self.pickles = dict()
self.jobjs = dict()
self.global_extraction_policy = None
self.global_error_handling = None
# to make sure we do not parse json_paths more times than needed, we define the following 2 properties
self.content_extraction_path = None
self.data_extraction_path = dict()
if load_spacy:
self.prep_spacy()
else:
self.nlp = None
self.country_code_dict = None
self.matchers = dict()
self.geonames_dict = None
self.state_to_country_dict = None
self.state_to_codes_lower_dict = None
self.populated_cities = None
""" Define all API methods """
def process(self, doc, create_knowledge_graph=False):
try:
if self.extraction_config:
doc_id = None
if _DOCUMENT_ID in self.extraction_config:
doc_id_field = self.extraction_config[_DOCUMENT_ID]
if doc_id_field in doc:
doc_id = doc[doc_id_field]
doc[_DOCUMENT_ID] = doc_id
else:
raise KeyError('{} not found in the input document'.format(doc_id_field))
if _EXTRACTION_POLICY in self.extraction_config:
self.global_extraction_policy = self.extraction_config[_EXTRACTION_POLICY]
if _ERROR_HANDLING in self.extraction_config:
self.global_error_handling = self.extraction_config[_ERROR_HANDLING]
"""Handle content extraction first aka Phase 1"""
if _CONTENT_EXTRACTION in self.extraction_config:
if _CONTENT_EXTRACTION not in doc:
doc[_CONTENT_EXTRACTION] = dict()
ce_config = self.extraction_config[_CONTENT_EXTRACTION]
html_path = ce_config[_INPUT_PATH] if _INPUT_PATH in ce_config else None
if not html_path:
raise KeyError('{} not found in extraction_config'.format(_INPUT_PATH))
if not self.content_extraction_path:
start_time = time.time()
self.content_extraction_path = parse(html_path)
time_taken = time.time() - start_time
if self.debug:
print 'time taken to process parse %s' % time_taken
start_time = time.time()
matches = self.content_extraction_path.find(doc)
time_taken = time.time() - start_time
if self.debug:
print 'time taken to process matches %s' % time_taken
extractors = ce_config[_EXTRACTORS]
for index in range(len(matches)):
for extractor in extractors.keys():
if extractor == _READABILITY:
re_extractors = extractors[extractor]
if isinstance(re_extractors, dict):
re_extractors = [re_extractors]
for re_extractor in re_extractors:
doc[_CONTENT_EXTRACTION] = self.run_readability(doc[_CONTENT_EXTRACTION],
matches[index].value, re_extractor)
elif extractor == _TITLE:
doc[_CONTENT_EXTRACTION] = self.run_title(doc[_CONTENT_EXTRACTION], matches[index].value,
extractors[extractor])
elif extractor == _LANDMARK:
doc[_CONTENT_EXTRACTION] = self.run_landmark(doc[_CONTENT_EXTRACTION], matches[index].value,
extractors[extractor], doc[_URL])
elif extractor == _TABLE:
doc[_CONTENT_EXTRACTION] = self.run_table_extractor(doc[_CONTENT_EXTRACTION],
matches[index].value,
extractors[extractor])
# Add the url as segment as well
if _URL in doc and doc[_URL] and doc[_URL].strip() != '':
doc[_CONTENT_EXTRACTION][_URL] = dict()
doc[_CONTENT_EXTRACTION][_URL][_TEXT] = doc[_URL]
doc[_TLD] = self.extract_tld(doc[_URL])
"""Phase 2: The Data Extraction"""
if _DATA_EXTRACTION in self.extraction_config:
de_configs = self.extraction_config[_DATA_EXTRACTION]
if isinstance(de_configs, dict):
de_configs = [de_configs]
for i in range(len(de_configs)):
de_config = de_configs[i]
input_paths = de_config[_INPUT_PATH] if _INPUT_PATH in de_config else None
if not input_paths:
raise KeyError('{} not found for data extraction in extraction_config'.format(_INPUT_PATH))
if not isinstance(input_paths, list):
input_paths = [input_paths]
for input_path in input_paths:
if _FIELDS in de_config:
if input_path not in self.data_extraction_path:
self.data_extraction_path[input_path] = parse(input_path)
matches = self.data_extraction_path[input_path].find(doc)
for match in matches:
# First rule of DATA Extraction club: Get tokens
# Get the crf tokens
if _TEXT in match.value:
if _TOKENS_ORIGINAL_CASE not in match.value:
match.value[_TOKENS_ORIGINAL_CASE] = self.extract_crftokens(match.value[_TEXT],
lowercase=False)
if _TOKENS not in match.value:
match.value[_TOKENS] = self.crftokens_to_lower(
match.value[_TOKENS_ORIGINAL_CASE])
if _SIMPLE_TOKENS not in match.value:
match.value[_SIMPLE_TOKENS] = self.extract_tokens_from_crf(match.value[_TOKENS])
if _SIMPLE_TOKENS_ORIGINAL_CASE not in match.value:
match.value[_SIMPLE_TOKENS_ORIGINAL_CASE] = self.extract_tokens_from_crf(
match.value[_TOKENS_ORIGINAL_CASE])
# if _TOKENS not in match.value:
# match.value[_TOKENS] = self.extract_crftokens(match.value[_TEXT])
# if _SIMPLE_TOKENS not in match.value:
# match.value[_SIMPLE_TOKENS] = self.extract_tokens_from_crf(match.value[_TOKENS])
fields = de_config[_FIELDS]
for field in fields.keys():
if field != '*':
"""
Special case for inferlink extractions:
For eg, We do not want to extract name from inferlink_posting-date #DUH
"""
run_extractor = True
full_path = str(match.full_path)
segment = self.determine_segment(full_path)
if _INFERLINK in full_path:
if field not in full_path:
run_extractor = False
if _DESCRIPTION in full_path or _TITLE in full_path:
run_extractor = True
if run_extractor:
if _EXTRACTORS in fields[field]:
extractors = fields[field][_EXTRACTORS]
for extractor in extractors.keys():
try:
foo = getattr(self, extractor)
except:
foo = None
if foo:
# score is 1.0 because every method thinks it is the best
score = 1.0
method = extractor
if _CONFIG not in extractors[extractor]:
extractors[extractor][_CONFIG] = dict()
extractors[extractor][_CONFIG][_FIELD_NAME] = field
ep = self.determine_extraction_policy(extractors[extractor])
if extractor == _EXTRACT_FROM_LANDMARK:
if _INFERLINK_EXTRACTIONS in full_path and field in full_path:
method = _METHOD_INFERLINK
if self.check_if_run_extraction(match.value, field,
extractor,
ep):
results = foo(doc, extractors[extractor][_CONFIG])
if results:
self.add_data_extraction_results(match.value,
field,
extractor,
self.add_origin_info(
results,
method,
segment,
score,
doc_id))
if create_knowledge_graph:
self.create_knowledge_graph(doc, field,
results)
else:
if self.check_if_run_extraction(match.value, field,
extractor,
ep):
results = foo(match.value,
extractors[extractor][_CONFIG])
if results:
self.add_data_extraction_results(match.value, field,
extractor,
self.add_origin_info(
results,
method,
segment,
score, doc_id))
if create_knowledge_graph:
self.create_knowledge_graph(doc, field, results)
else: # extract whatever you can!
if _EXTRACTORS in fields[field]:
extractors = fields[field][_EXTRACTORS]
for extractor in extractors.keys():
try:
foo = getattr(self, extractor)
except Exception as e:
foo = None
if foo:
# score is 1.0 because every method thinks it is the best
score = 1.0
method = extractor
if _CONFIG not in extractors[extractor]:
extractors[extractor][_CONFIG] = dict()
ep = self.determine_extraction_policy(extractors[extractor])
if extractor == _EXTRACT_FROM_LANDMARK:
if _INFERLINK_EXTRACTIONS in full_path and field in full_path:
method = _METHOD_INFERLINK
if self.check_if_run_extraction(match.value, field,
extractor,
ep):
results = foo(doc, extractors[extractor][_CONFIG])
if results:
self.add_data_extraction_results(match.value, field,
extractor,
self.add_origin_info(
results,
method,
segment,
score, doc_id))
if create_knowledge_graph:
self.create_knowledge_graph(doc, field, results)
else:
results = foo(match.value,
extractors[extractor][_CONFIG])
if results:
for f, res in results.items():
# extractors[extractor][_CONFIG][_FIELD_NAME] = f
self.add_data_extraction_results(match.value, f,
extractor,
self.add_origin_info(
res,
method,
segment,
score, doc_id))
if create_knowledge_graph:
self.create_knowledge_graph(doc, f, res)
else:
print('method {} not found!'.format(extractor))
"""Optional Phase 3: Knowledge Graph Enhancement"""
if _KG_ENHANCEMENT in self.extraction_config:
kg_configs = self.extraction_config[_KG_ENHANCEMENT]
if isinstance(kg_configs, dict):
kg_configs = [kg_configs]
for i in range(len(kg_configs)):
kg_config = kg_configs[i]
input_paths = kg_config[_INPUT_PATH] if _INPUT_PATH in kg_config else None
if not input_paths:
raise KeyError(
'{} not found for knowledge graph enhancement in extraction_config'.format(_INPUT_PATH))
if not isinstance(input_paths, list):
input_paths = [input_paths]
for input_path in input_paths:
if _FIELDS in kg_config:
if input_path not in self.data_extraction_path:
self.data_extraction_path[input_path] = parse(input_path)
matches = self.data_extraction_path[input_path].find(doc)
for match in matches:
fields = kg_config[_FIELDS]
try:
sorted_fields = self.sort_dictionary_by_fields(fields)
except:
raise ValueError('Please ensure there is a priority added to every field in '
'knowledge_graph enhancement and the priority is an int')
for i in range(0, len(sorted_fields)):
field = sorted_fields[i][0]
if _EXTRACTORS in fields[field]:
extractors = fields[field][_EXTRACTORS]
for extractor in extractors.keys():
try:
foo = getattr(self, extractor)
except:
foo = None
if foo:
if _CONFIG not in extractors[extractor]:
extractors[extractor][_CONFIG] = dict()
extractors[extractor][_CONFIG][_FIELD_NAME] = field
results = foo(match.value, extractors[extractor][_CONFIG])
if results:
# doc[_KNOWLEDGE_GRA][field] = results
self.create_knowledge_graph(doc, field, results)
"""Optional Phase 4: feature computation"""
if _FEATURE_COMPUTATION in self.extraction_config:
kg_configs = self.extraction_config[_FEATURE_COMPUTATION]
if isinstance(kg_configs, dict):
kg_configs = [kg_configs]
for i in range(len(kg_configs)):
kg_config = kg_configs[i]
input_paths = kg_config[_INPUT_PATH] if _INPUT_PATH in kg_config else None
if not input_paths:
raise KeyError(
'{} not found for feature computation in extraction_config'.format(_INPUT_PATH))
if not isinstance(input_paths, list):
input_paths = [input_paths]
for input_path in input_paths:
if _FIELDS in kg_config:
if input_path not in self.data_extraction_path:
self.data_extraction_path[input_path] = parse(input_path)
matches = self.data_extraction_path[input_path].find(doc)
for match in matches:
fields = kg_config[_FIELDS]
for field in fields.keys():
if _EXTRACTORS in fields[field]:
extractors = fields[field][_EXTRACTORS]
for extractor in extractors.keys():
try:
foo = getattr(self, extractor)
except:
foo = None
if foo:
if _CONFIG not in extractors[extractor]:
extractors[extractor][_CONFIG] = dict()
extractors[extractor][_CONFIG][_FIELD_NAME] = field
results = foo(match.value, extractors[extractor][_CONFIG])
if results:
# doc[_KNOWLEDGE_GRAPH][field] = results
self.create_knowledge_graph(doc, field, results)
if _KNOWLEDGE_GRAPH in doc and doc[_KNOWLEDGE_GRAPH]:
doc[_KNOWLEDGE_GRAPH] = self.reformat_knowledge_graph(doc[_KNOWLEDGE_GRAPH])
""" Add title and description as fields in the knowledge graph as well"""
doc = Core.rearrange_description(doc)
doc = Core.rearrange_title(doc)
except Exception as e:
print e
print 'Failed doc:', doc['doc_id']
return None
return doc
@staticmethod
def rearrange_description(doc):
method = 'rearrange_description'
description = None
segment = ''
if _CONTENT_EXTRACTION in doc:
ce = doc[_CONTENT_EXTRACTION]
if _INFERLINK_EXTRACTIONS in ce:
if _DESCRIPTION in ce[_INFERLINK_EXTRACTIONS]:
description = ce[_INFERLINK_EXTRACTIONS][_DESCRIPTION][_TEXT]
segment = _INFERLINK
if not description or description.strip() == '':
if _CONTENT_STRICT in ce:
description = ce[_CONTENT_STRICT][_TEXT]
segment = _CONTENT_STRICT
if description and description != '':
if _KNOWLEDGE_GRAPH not in doc:
doc[_KNOWLEDGE_GRAPH] = dict()
doc[_KNOWLEDGE_GRAPH][_DESCRIPTION] = list()
o = dict()
o['value'] = description
o['key'] = 'description'
o['confidence'] = 1
o['provenance'] = [Core.custom_provenance_object(method, segment, doc[_DOCUMENT_ID])]
doc[_KNOWLEDGE_GRAPH][_DESCRIPTION].append(o)
return doc
@staticmethod
def sort_dictionary_by_fields(dictionary):
sorted_d = OrderedDict(sorted(dictionary.iteritems(), key=lambda x: x[1]['priority']))
return sorted_d.items()
@staticmethod
def custom_provenance_object(method, segment, document_id):
prov = dict()
prov['method'] = method
prov['source'] = dict()
prov['source']['segment'] = segment
prov['source'][_DOCUMENT_ID] = document_id
return prov
@staticmethod
def rearrange_title(doc):
method = 'rearrange_title'
title = None
segment = ''
if _CONTENT_EXTRACTION in doc:
ce = doc[_CONTENT_EXTRACTION]
if _INFERLINK_EXTRACTIONS in ce:
if _TITLE in ce[_INFERLINK_EXTRACTIONS]:
title = ce[_INFERLINK_EXTRACTIONS][_TITLE][_TEXT]
segment = _INFERLINK
if not title or title.strip() == '':
if _TITLE in ce:
title = ce[_TITLE][_TEXT]
segment = _HTML
if title and title != '':
if _KNOWLEDGE_GRAPH not in doc:
doc[_KNOWLEDGE_GRAPH] = dict()
doc[_KNOWLEDGE_GRAPH][_TITLE] = list()
o = dict()
o['value'] = title
o['key'] = 'title'
o['confidence'] = 1
o['provenance'] = [Core.custom_provenance_object(method, segment, doc[_DOCUMENT_ID])]
doc[_KNOWLEDGE_GRAPH][_TITLE].append(o)
return doc
@staticmethod
def extract_tld(url):
return tldextract.extract(url).domain + '.' + tldextract.extract(url).suffix
@staticmethod
def create_knowledge_graph(doc, field_name, extractions):
if _KNOWLEDGE_GRAPH not in doc:
doc[_KNOWLEDGE_GRAPH] = dict()
if field_name not in doc[_KNOWLEDGE_GRAPH]:
doc[_KNOWLEDGE_GRAPH][field_name] = dict()
for extraction in extractions:
key = extraction['value']
if (isinstance(key, basestring) or isinstance(key, numbers.Number)) and field_name != _POSTING_DATE:
# try except block because unicode characters will not be lowered
try:
key = str(key).strip().lower()
except:
pass
if 'metadata' in extraction:
sorted_metadata = Core.sort_dict(extraction['metadata'])
for k, v in sorted_metadata.iteritems():
if isinstance(v, numbers.Number):
v = str(v)
# if v:
# v = v.encode('utf-8')
if v and v.strip() != '':
# key += '-' + str(k) + ':' + str(v)
key = '{}-{}:{}'.format(key, k, v)
if 'key' in extraction:
key = extraction['key']
# TODO FIX THIS HACK
if len(key) > 32766:
key = key[0:500]
if key not in doc[_KNOWLEDGE_GRAPH][field_name]:
doc[_KNOWLEDGE_GRAPH][field_name][key] = list()
doc[_KNOWLEDGE_GRAPH][field_name][key].append(extraction)
return doc
@staticmethod
def reformat_knowledge_graph(knowledge_graph):
new_kg = dict()
for semantic_type in knowledge_graph.keys():
new_kg[semantic_type] = list()
values = knowledge_graph[semantic_type]
for key in values.keys():
o = dict()
o['key'] = key
new_provenances, metadata, value = Core.rearrange_provenance(values[key])
o['provenance'] = new_provenances
if metadata:
o['qualifiers'] = metadata
o['value'] = value
# default confidence value, to be updated by later analysis
o['confidence'] = 1
new_kg[semantic_type].append(o)
return new_kg
@staticmethod
def rearrange_provenance(old_provenances):
new_provenances = list()
metadata = None
value = None
for prov in old_provenances:
new_prov = dict()
method = None
confidence = None
if 'origin' in prov:
origin = prov['origin']
if 'obfuscation' in prov:
origin['extraction_metadata'] = dict()
origin['extraction_metadata']['obfuscation'] = prov['obfuscation']
method = origin['method']
confidence = origin['score']
origin.pop('score', None)
origin.pop('method', None)
new_prov['source'] = origin
if 'context' in prov:
new_prov['source']['context'] = prov['context']
if 'metadata' in prov and not metadata:
metadata = prov['metadata']
if method:
new_prov["method"] = method
if not value:
value = prov['value']
new_prov['extracted_value'] = value
if confidence:
new_prov['confidence'] = dict()
new_prov['confidence']['extraction'] = confidence
new_provenances.append(new_prov)
return new_provenances, metadata, value
@staticmethod
def add_data_extraction_results(d, field_name, method_name, results):
if _DATA_EXTRACTION not in d:
d[_DATA_EXTRACTION] = dict()
if field_name not in d[_DATA_EXTRACTION]:
d[_DATA_EXTRACTION][field_name] = dict()
if method_name not in d[_DATA_EXTRACTION][field_name]:
d[_DATA_EXTRACTION][field_name][method_name] = dict()
if isinstance(results, dict):
results = [results]
if 'results' not in d[_DATA_EXTRACTION][field_name][method_name]:
d[_DATA_EXTRACTION][field_name][method_name]['results'] = results
else:
d[_DATA_EXTRACTION][field_name][method_name]['results'].extend(results)
return d
@staticmethod
def check_if_run_extraction(d, field_name, method_name, extraction_policy):
if _DATA_EXTRACTION not in d:
return True
if field_name not in d[_DATA_EXTRACTION]:
return True
if method_name not in d[_DATA_EXTRACTION][field_name]:
return True
if 'results' not in d[_DATA_EXTRACTION][field_name][method_name]:
return True
else:
if extraction_policy == _REPLACE:
return True
return False
@staticmethod
def determine_segment(json_path):
segment = _SEGMENT_OTHER
if _SEGMENT_INFERLINK_DESC in json_path:
segment = _SEGMENT_INFERLINK_DESC
elif _INFERLINK in json_path and _SEGMENT_INFERLINK_DESC not in json_path:
segment = _HTML
elif _CONTENT_STRICT in json_path:
segment = _CONTENT_STRICT
elif _CONTENT_RELAXED in json_path:
segment = _CONTENT_RELAXED
elif _TITLE in json_path:
segment = _TITLE
elif _URL in json_path:
segment = _URL
return segment
@staticmethod
def add_origin_info(results, method, segment, score, doc_id=None):
if results:
for result in results:
o = dict()
o['segment'] = segment
o['method'] = method
o['score'] = score
if doc_id:
o[_DOCUMENT_ID] = doc_id
result['origin'] = o
return results
def run_landmark(self, content_extraction, html, landmark_config, url):
field_name = landmark_config[_FIELD_NAME] if _FIELD_NAME in landmark_config else _INFERLINK_EXTRACTIONS
ep = self.determine_extraction_policy(landmark_config)
extraction_rules = self.consolidate_landmark_rules()
if _LANDMARK_THRESHOLD in landmark_config:
pct = landmark_config[_LANDMARK_THRESHOLD]
if not 0.0 <= pct <= 1.0:
raise ValueError('landmark threshold should be a float between {} and {}'.format(0.0, 1.0))
else:
pct = 0.5
if field_name not in content_extraction or (field_name in content_extraction and ep == _REPLACE):
start_time = time.time()
ifl_extractions = Core.extract_landmark(html, url, extraction_rules, pct)
if isinstance(ifl_extractions, list):
# we have a rogue post type page, put it in its place
content_extraction[field_name] = dict()
content_extraction[field_name]['inferlink_posts'] = ifl_extractions
else:
time_taken = time.time() - start_time
if self.debug:
print 'time taken to process landmark %s' % time_taken
if ifl_extractions and len(ifl_extractions.keys()) > 0:
content_extraction[field_name] = dict()
for key in ifl_extractions:
o = dict()
o[key] = dict()
o[key]['text'] = ifl_extractions[key]
content_extraction[field_name].update(o)
return content_extraction
def consolidate_landmark_rules(self):
rules = dict()
if _RESOURCES in self.extraction_config:
resources = self.extraction_config[_RESOURCES]
if _LANDMARK in resources:
landmark_rules_file_list = resources[_LANDMARK]
for landmark_rules_file in landmark_rules_file_list:
rules.update(Core.load_json_file(landmark_rules_file))
return rules
else:
raise KeyError('{}.{} not found in provided extraction config'.format(_RESOURCES, _LANDMARK))
else:
raise KeyError('{} not found in provided extraction config'.format(_RESOURCES))
def get_dict_file_name_from_config(self, dict_name):
if _RESOURCES in self.extraction_config:
resources = self.extraction_config[_RESOURCES]
if _DICTIONARIES in resources:
if dict_name in resources[_DICTIONARIES]:
return resources[_DICTIONARIES][dict_name]
else:
raise KeyError(
'{}.{}.{} not found in provided extraction config'.format(_RESOURCES, _DICTIONARIES, dict_name))
else:
raise KeyError('{}.{} not found in provided extraction config'.format(_RESOURCES, _DICTIONARIES))
else:
raise KeyError('{} not found in provided extraction config'.format(_RESOURCES))
def get_pickle_file_name_from_config(self, pickle_name):
if _RESOURCES in self.extraction_config:
resources = self.extraction_config[_RESOURCES]
if _PICKLES in resources:
if pickle_name in resources[_PICKLES]:
return resources[_PICKLES][pickle_name]
else:
raise KeyError(
'{}.{}.{} not found in provided extraction config'.format(_RESOURCES, _PICKLES, pickle_name))
else:
raise KeyError('{}.{} not found in provided extraction config'.format(_RESOURCES, _PICKLES))
else:
raise KeyError('{} not found in provided extraction config'.format(_RESOURCES))
def get_spacy_field_rules_from_config(self, field_name):
if _RESOURCES in self.extraction_config:
resources = self.extraction_config[_RESOURCES]
if _SPACY_FIELD_RULES in resources:
if field_name in resources[_SPACY_FIELD_RULES]:
return resources[_SPACY_FIELD_RULES][field_name]
else:
raise KeyError(
'{}.{}.{} not found in provided extraction config'.format(_RESOURCES, _SPACY_FIELD_RULES,
field_name))
else:
raise KeyError('{}.{} not found in provided extraction config'.format(_RESOURCES, _SPACY_FIELD_RULES))
else:
raise KeyError('{} not found in provided extraction config'.format(_RESOURCES))
def run_title(self, content_extraction, html, title_config):
field_name = title_config[_FIELD_NAME] if _FIELD_NAME in title_config else _TITLE
ep = self.determine_extraction_policy(title_config)
if field_name not in content_extraction or (field_name in content_extraction and ep == _REPLACE):
start_time = time.time()
extracted_title = self.extract_title(html)
if extracted_title:
content_extraction[field_name] = extracted_title
time_taken = time.time() - start_time
if self.debug:
print 'time taken to process title %s' % time_taken
return content_extraction
def run_table_extractor(self, content_extraction, html, table_config):
field_name = table_config[_FIELD_NAME] if _FIELD_NAME in table_config else _TABLE
ep = self.determine_extraction_policy(table_config)
if field_name not in content_extraction or (field_name in content_extraction and ep == _REPLACE):
start_time = time.time()
content_extraction[field_name] = self.extract_table(html)
time_taken = time.time() - start_time
if self.debug:
print 'time taken to process table %s' % time_taken
return content_extraction
def run_readability(self, content_extraction, html, re_extractor):
recall_priority = False
field_name = None
if _STRICT in re_extractor:
recall_priority = False if re_extractor[_STRICT] == _YES else True
field_name = _CONTENT_RELAXED if recall_priority else _CONTENT_STRICT
options = {_RECALL_PRIORITY: recall_priority}
if _FIELD_NAME in re_extractor:
field_name = re_extractor[_FIELD_NAME]
ep = self.determine_extraction_policy(re_extractor)
start_time = time.time()
readability_text = self.extract_readability(html, options)
time_taken = time.time() - start_time
if self.debug:
print 'time taken to process readability %s' % time_taken
if readability_text:
if field_name not in content_extraction or (field_name in content_extraction and ep == _REPLACE):
content_extraction[field_name] = readability_text
return content_extraction
def determine_extraction_policy(self, config):
ep = _REPLACE
if not config:
return ep
if _EXTRACTION_POLICY in config:
ep = config[_EXTRACTION_POLICY]
elif self.global_extraction_policy:
ep = self.global_extraction_policy
if ep and ep != _KEEP_EXISTING and ep != _REPLACE:
raise ValueError('extraction_policy can either be {} or {}'.format(_KEEP_EXISTING, _REPLACE))
return ep
@staticmethod
def _relevant_text_from_context(text_or_tokens, results, field_name):
if results:
tokens_len = len(text_or_tokens)
if not isinstance(results, list):
results = [results]
for result in results:
if 'context' in result:
start = int(result['context']['start'])
end = int(result['context']['end'])
if isinstance(text_or_tokens, basestring):
if start - 10 < 0:
new_start = 0
else:
new_start = start - 10
if end + 10 > tokens_len:
new_end = tokens_len
else:
new_end = end + 10
relevant_text = '<etk \'attribute\' = \'{}\'>{}</etk>'.format(field_name,
text_or_tokens[start:end].encode(
'utf-8'))
result['context']['text'] = '{} {} {}'.format(text_or_tokens[new_start:start].encode('utf-8'),
relevant_text,
text_or_tokens[end:new_end].encode('utf-8'))
result['context']['input'] = _TEXT
else:
if start - 5 < 0:
new_start = 0
else:
new_start = start - 5
if end + 5 > tokens_len:
new_end = tokens_len
else:
new_end = end + 5
relevant_text = '<etk \'attribute\' = \'{}\'>{}</etk>'.format(field_name,
' '.join(text_or_tokens[
start:end]).encode(
'utf-8'))
result['context']['text'] = '{} {} {} '.format(
' '.join(text_or_tokens[new_start:start]).encode('utf-8'),
relevant_text,
' '.join(text_or_tokens[end:new_end]).encode('utf-8'))
result['context']['tokens_left'] = text_or_tokens[new_start:start]
result['context']['tokens_right'] = text_or_tokens[end:new_end]
result['context']['input'] = _TOKENS
return results
@staticmethod
def sort_dict(dictionary):
return collections.OrderedDict(sorted(dictionary.items()))
@staticmethod
def load_json_file(file_name):
json_x = json.load(codecs.open(file_name, 'r'))
return json_x
def load_json(self, json_name):
if json_name not in self.jobjs:
self.jobjs[json_name] = self.load_json_file(self.get_pickle_file_name_from_config(json_name))
return self.jobjs[json_name]
def load_trie(self, file_name):
try:
values = json.load(gzip.open(file_name), 'utf-8')
except:
values = None
if not values:
values = json.load(codecs.open(file_name), 'utf-8')
trie = dictionary_extractor.populate_trie(map(lambda x: x.lower(), values))
return trie
def load_dictionary(self, field_name, dict_name):
if field_name not in self.tries:
self.tries[field_name] = self.load_trie(self.get_dict_file_name_from_config(dict_name))
def load_pickle_file(self, pickle_path):
return pickle.load(open(pickle_path, 'rb'))
def load_pickle(self, pickle_name):
if pickle_name not in self.pickles:
self.pickles[pickle_name] = self.load_pickle_file(self.get_pickle_file_name_from_config(pickle_name))
return self.pickles[pickle_name]
def classify_table(self, d, config):
result = self.classify_table_(d, config)
# return self._relevant_text_from_context([], result, config[_FIELD_NAME])
return result
def classify_table_(self, d, config):
model = config['model']
sem_types = config['sem_types']
cl_model = self.load_pickle(model)
sem_types = self.load_json(sem_types)
tc = table_extractor.TableClassification(sem_types, cl_model)
l = tc.predict_label(d)
tarr = table_extractor.Toolkit.create_table_array(d)
table_extractor.Toolkit.clean_cells(tarr)
res = dict()
res['value'] = l[2]
res['all_labels'] = l
res['context'] = dict(start=0, end=0, input=d['fingerprint'], text=str(tarr))
res['tarr'] = tarr
return [res]
def table_data_extractor(self, d, config):
result = self.table_data_extractor_(d, config)
# return self._relevant_text_from_context([], result, config[_FIELD_NAME])
return result
def table_data_extractor_(self, d, config):
sem_types = config['sem_types']
sem_types = self.load_json(sem_types)
method = config['method']
model = config['model']
if method == 'rule_based':
model = self.load_json(model)
else:
model = self.load_pickle(model)
tie = table_extractor.InformationExtraction(sem_types, method, model)
results = tie.extract(d)
return results
def extract_using_dictionary(self, d, config):
field_name = config[_FIELD_NAME]
# this method is self aware that it needs tokens as input
tokens = d[_SIMPLE_TOKENS]
if not tokens:
return None
if _DICTIONARY not in config:
raise KeyError('No dictionary specified for {}'.format(field_name))
self.load_dictionary(field_name, config[_DICTIONARY])
pre_process = None
if _PRE_PROCESS in config and len(config[_PRE_PROCESS]) > 0:
pre_process = self.string_to_lambda(config[_PRE_PROCESS][0])
if not pre_process:
pre_process = lambda x: x
pre_filter = None
if _PRE_FILTER in config and len(config[_PRE_FILTER]) > 0:
pre_filter = self.string_to_lambda(config[_PRE_FILTER][0])
if not pre_filter:
pre_filter = lambda x: x
post_filter = None
if _PRE_FILTER in config and len(config[_PRE_FILTER]) > 0:
post_filter = self.string_to_lambda(config[_PRE_FILTER][0])
if not post_filter:
post_filter = lambda x: isinstance(x, basestring)
ngrams = int(config[_NGRAMS]) if _NGRAMS in config else 1
joiner = config[_JOINER] if _JOINER in config else ' '
return self._relevant_text_from_context(d[_SIMPLE_TOKENS], self._extract_using_dictionary(tokens, pre_process,
self.tries[
field_name],
pre_filter,
post_filter,
ngrams, joiner),
field_name)
@staticmethod
def _extract_using_dictionary(tokens, pre_process, trie, pre_filter, post_filter, ngrams, joiner):
result = dictionary_extractor.extract_using_dictionary(tokens, pre_process=pre_process,
trie=trie,
pre_filter=pre_filter,
post_filter=post_filter,
ngrams=ngrams,
joiner=joiner)
return result if result and len(result) > 0 else None
def extract_website_domain(self, d, config):
text = d[_TEXT]
field_name = config[_FIELD_NAME]
tld = self.extract_tld(text)
results = {"value": tld}
return self._relevant_text_from_context(d[_TEXT], results, field_name)
def extract_using_regex(self, d, config):
# this method is self aware that it needs the text, so look for text in the input d
text = d[_TEXT]
include_context = True
if "include_context" in config and config['include_context'].lower() == 'false':
include_context = False
if "regex" not in config:
raise KeyError('No regular expression found in {}'.format(json.dumps(config)))
regex = config["regex"]
flags = 0
if "regex_options" in config:
regex_options = config['regex_options']
if not isinstance(regex_options, list):
raise ValueError("regular expression options should be a list in {}".format(json.dumps(config)))
for regex_option in regex_options:
flags = flags | eval("re." + regex_option)
if _PRE_FILTER in config:
text = self.run_user_filters(d, config[_PRE_FILTER], config[_FIELD_NAME])
result = self._extract_using_regex(text, regex, include_context, flags)
# TODO ADD code to handle post_filters
return self._relevant_text_from_context(d[_TEXT], result, config[_FIELD_NAME])
@staticmethod
def _extract_using_regex(text, regex, include_context, flags):
try:
result = regex_extractor.extract(text, regex, include_context, flags)
return result if result and len(result) > 0 else None
except Exception as e:
print e
return None
def extract_using_custom_spacy(self, d, config, field_rules=None):
if not field_rules:
field_rules = self.load_json_file(self.get_spacy_field_rules_from_config(config[_SPACY_FIELD_RULES]))
if not self.nlp:
self.prep_spacy()
# call the custom spacy extractor
nlp_doc = self.nlp(d[_SIMPLE_TOKENS_ORIGINAL_CASE])
results = self._relevant_text_from_context(d[_SIMPLE_TOKENS_ORIGINAL_CASE], custom_spacy_extractor.extract(field_rules, nlp_doc, self.nlp), config[_FIELD_NAME])
return results
def extract_using_spacy(self, d, config):
field_name = config[_FIELD_NAME]
if not self.nlp:
self.prep_spacy()
nlp_doc = self.nlp(d[_SIMPLE_TOKENS])
self.load_matchers(field_name)
results = None
if field_name == _AGE:
results = self._relevant_text_from_context(d[_SIMPLE_TOKENS],
spacy_age_extractor.extract(nlp_doc, self.matchers[_AGE]), _AGE)
elif field_name == _POSTING_DATE:
results = self._relevant_text_from_context(d[_SIMPLE_TOKENS],
spacy_date_extractor.extract(nlp_doc,
self.matchers[_POSTING_DATE]),
_POSTING_DATE)
if _POST_FILTER in config:
post_filters = config[_POST_FILTER]
results = self.run_post_filters_results(results, post_filters)
elif field_name == _SOCIAL_MEDIA:
results = self._relevant_text_from_context(d[_SIMPLE_TOKENS],
spacy_social_media_extractor.extract(nlp_doc,
self.matchers[
_SOCIAL_MEDIA]),
_SOCIAL_MEDIA)
elif field_name == _ADDRESS:
results = self._relevant_text_from_context(d[_SIMPLE_TOKENS],
spacy_address_extractor.extract(nlp_doc,
self.matchers[_ADDRESS]),
_ADDRESS)
return results
def extract_from_landmark(self, doc, config):
field_name = config[_FIELD_NAME]
if _CONTENT_EXTRACTION not in doc:
return None
if _INFERLINK_EXTRACTIONS not in doc[_CONTENT_EXTRACTION]:
return None
results = list()
inferlink_extraction = doc[_CONTENT_EXTRACTION][_INFERLINK_EXTRACTIONS]
fields = None
if _FIELDS in config:
fields = config[_FIELDS]
pre_filters = None
if _PRE_FILTER in config:
pre_filters = config[_PRE_FILTER]
post_filters = None
if _POST_FILTER in config:
post_filters = config[_POST_FILTER]
if fields:
for field in fields:
if field in inferlink_extraction:
d = inferlink_extraction[field]
if pre_filters:
# Assumption all pre_filters are lambdas
d[_TEXT] = self.run_user_filters(d, pre_filters, config[_FIELD_NAME])
result = None
if post_filters:
post_result = self.run_user_filters(d, post_filters, config[_FIELD_NAME])
if post_result:
result = self.handle_text_or_results(post_result)
else:
result = self.handle_text_or_results(d[_TEXT])
if result:
results.extend(result)
else:
for field in inferlink_extraction.keys():
# The logic below: if the inferlink rules do not have semantic information in the field names returned,
# too bad
if field_name in field:
d = inferlink_extraction[field]
if pre_filters:
# Assumption all pre_filters are lambdas
d[_TEXT] = self.run_user_filters(d, pre_filters, config[_FIELD_NAME])
result = None
if post_filters:
post_result = self.run_user_filters(d, post_filters, config[_FIELD_NAME])
if post_result:
result = self.handle_text_or_results(post_result)
else:
result = self.handle_text_or_results(d[_TEXT])
if result:
results.extend(result)
return results if len(results) > 0 else None
def extract_phone(self, d, config):
tokens = d[_SIMPLE_TOKENS]
# source type as in text vs url #SHRUG
source_type = config[_SOURCE_TYPE] if _SOURCE_TYPE in config else 'text'
include_context = True
output_format = _OBFUSCATION
# if _PRE_FILTER in config:
# text = self.run_user_filters(d, config[_PRE_FILTER], config[_FIELD_NAME])
return self._relevant_text_from_context(d[_SIMPLE_TOKENS],
self._extract_phone(tokens, source_type, include_context,
output_format), config[_FIELD_NAME])
@staticmethod
def _extract_phone(tokens, source_type, include_context, output_format):
result = phone_extractor.extract(tokens, source_type, include_context, output_format)
return result if result else None
def extract_email(self, d, config):
text = d[_TEXT]
include_context = True
if _INCLUDE_CONTEXT in config:
include_context = config[_INCLUDE_CONTEXT].upper() == 'TRUE'
if _PRE_FILTER in config:
text = self.run_user_filters(d, config[_PRE_FILTER], config[_FIELD_NAME])
return self._relevant_text_from_context(d[_TEXT], self._extract_email(text, include_context),
config[_FIELD_NAME])
@staticmethod
def _extract_email(text, include_context):
"""
A regular expression based function to extract emails from text
:param text: The input text.
:param include_context: True or False, will include context matched by the regular expressions.
:return: An object, with extracted email and/or context.
"""
return email_extractor.extract(text, include_context)
def extract_price(self, d, config):
text = d[_TEXT]
if _PRE_FILTER in config:
text = self.run_user_filters(d, config[_PRE_FILTER], config[_FIELD_NAME])
return self._relevant_text_from_context(d[_TEXT], self._extract_price(text), config[_FIELD_NAME])
@staticmethod
def _extract_price(text):
return price_extractor.extract(text)
def extract_height(self, d, config):
text = d[_TEXT]
if _PRE_FILTER in config:
text = self.run_user_filters(d, config[_PRE_FILTER], config[_FIELD_NAME])
return self._relevant_text_from_context(d[_TEXT], self._extract_height(text), config[_FIELD_NAME])
@staticmethod
def _extract_height(text):
return height_extractor.extract(text)
def extract_weight(self, d, config):
text = d[_TEXT]
if _PRE_FILTER in config:
text = self.run_user_filters(d, config[_PRE_FILTER], config[_FIELD_NAME])
return self._relevant_text_from_context(d[_TEXT], self._extract_weight(text), config[_FIELD_NAME])
@staticmethod
def _extract_weight(text):
return weight_extractor.extract(text)
def extract_address(self, d, config):
text = d[_TEXT]
if _PRE_FILTER in config:
text = self.run_user_filters(d, config[_PRE_FILTER], config[_FIELD_NAME])
return self._relevant_text_from_context(d[_TEXT], self._extract_address(text), config[_FIELD_NAME])
@staticmethod
def _extract_address(text):
return address_extractor.extract(text)
def extract_age(self, d, config):
text = d[_TEXT]
if _PRE_FILTER in config:
text = self.run_user_filters(d, config[_PRE_FILTER], config[_FIELD_NAME])
return self._relevant_text_from_context(d[_TEXT], self._extract_age(text), config[_FIELD_NAME])
@staticmethod
def _extract_age(text):
return age_extractor.extract(text)
def extract_review_id(self, d, config):
text = d[_TEXT]
if _PRE_FILTER in config:
text = self.run_user_filters(d, config[_PRE_FILTER], config[_FIELD_NAME])
return self._relevant_text_from_context(d[_TEXT], self._extract_review_id(text), config[_FIELD_NAME])
@staticmethod
def _extract_review_id(text):
return review_id_extractor.extract(text)
@staticmethod
def handle_text_or_results(x):
if isinstance(x, basestring):
o = dict()
o['value'] = x
return [o]
if isinstance(x, dict):
return [x]
if isinstance(x, list):
return x
return None
def run_user_filters(self, d, filters, field_name):
result = None
if not isinstance(filters, list):
filters = [filters]
try:
for text_filter in filters:
try:
f = getattr(self, text_filter)
if f:
result = f(d, {_FIELD_NAME: field_name})
except Exception as e:
result = None
if not result:
result = Core.string_to_lambda(text_filter)(d[_TEXT])
except Exception as e:
print 'Error {} in {}'.format(e, 'run_user_filters')
return result
def run_post_filters_results(self, results, post_filters):
if results:
if not isinstance(results, list):
results = [results]
if not isinstance(post_filters, list):
post_filters = [post_filters]
out_results = list()
for post_filter in post_filters:
try:
f = getattr(self, post_filter)
except Exception as e:
raise 'Exception: {}, no function {} defined in core.py'.format(e, post_filter)
for result in results:
val = f(result['value'])
if val:
result['value'] = val
out_results.append(result)
return out_results if len(out_results) > 0 else None
@staticmethod
def string_to_lambda(s):
try:
return lambda x: eval(s)
except:
print 'Error while converting {} to lambda'.format(s)
return None
@staticmethod
def extract_readability(document, options=None):
e = ReadabilityExtractor()
return e.extract(document, options)
def extract_title(self, html_content, options=None):
if html_content:
matches = re.search(self.html_title_regex, html_content, re.IGNORECASE | re.S)
title = None
if matches:
title = matches.group(1)
title = title.replace('\r', '')
title = title.replace('\n', '')
title = title.replace('\t', '')
if not title:
title = ''
return {'text': title}
return None
@staticmethod
def extract_crftokens(text, options=None, lowercase=True):
t = TokenizerExtractor(recognize_linebreaks=True, create_structured_tokens=True)
return t.extract(text, lowercase)
@staticmethod
def crftokens_to_lower(crf_tokens):
lower_crf = copy.deepcopy(crf_tokens)
for tk in lower_crf:
tk['value'] = tk['value'].lower()
return lower_crf
@staticmethod
def extract_tokens_from_crf(crf_tokens):
return [tk['value'] for tk in crf_tokens]
@staticmethod
def extract_tokens_faithful(text, options=None):
ft = FaithfulTokenizerExtractor(recognize_linebreaks=True, create_structured_tokens=True)
return ft.extract(text)
@staticmethod
def extract_tokens_from_faithful(faithful_tokens):
return [tk['value'] for tk in faithful_tokens]
@staticmethod
def filter_tokens(original_tokens, config):
# config contains a list of types of tokens to be removed
# [alphabet, digit, emoji, punctuation, html, html_entity, break]
ft = FaithfulTokenizerExtractor(recognize_linebreaks=True, create_structured_tokens=True)
ft.faithful_tokens = original_tokens
# Return Tokens object which contains - tokens, reverse_map attributes
# The object also has a method get_original_index() to retrieve index in faithful tokens
return ft.filter_tokens(config)
def extract_table(self, html_doc):
return table_extractor.extract(html_doc)
# def extract_stock_tickers(self, doc):
# return extract_stock_tickers(doc)
# def extract_spacy(self, doc):
# return spacy_extractor.spacy_extract(doc)
@staticmethod
def extract_landmark(html, url, extraction_rules, threshold=0.5):
return landmark_extraction.extract(html, url, extraction_rules, threshold)
def prep_spacy(self):
self.nlp = spacy.load('en')
self.old_tokenizer = self.nlp.tokenizer
self.nlp.tokenizer = lambda tokens: self.old_tokenizer.tokens_from_list(tokens)
def load_matchers(self, field_name=None):
if field_name:
if field_name == _AGE:
if _AGE not in self.matchers:
self.matchers[_AGE] = spacy_age_extractor.load_age_matcher(self.nlp)
if field_name == _POSTING_DATE:
if _POSTING_DATE not in self.matchers:
self.matchers[_POSTING_DATE] = spacy_date_extractor.load_date_matcher(self.nlp)
if field_name == _SOCIAL_MEDIA:
if _SOCIAL_MEDIA not in self.matchers:
self.matchers[_SOCIAL_MEDIA] = spacy_social_media_extractor.load_social_media_matcher(self.nlp)
if field_name == _ADDRESS:
if _ADDRESS not in self.matchers:
self.matchers[_ADDRESS] = spacy_address_extractor.load_address_matcher(self.nlp)
@staticmethod
def create_list_data_extraction(data_extraction, field_name, method=_EXTRACT_USING_DICTIONARY):
out = list()
if data_extraction:
if field_name in data_extraction:
extractions = data_extraction[field_name]
if method in extractions:
out = Core.get_value_list_from_results(extractions[method]['results'])
return out
@staticmethod
def get_value_list_from_results(results):
out = list()
if results:
for result in results:
out.append(result['value'])
return out
def extract_country_url(self, d, config):
if not self.country_code_dict:
try:
self.country_code_dict = self.load_json_file(self.get_dict_file_name_from_config('country_code'))
except:
raise '{} dictionary missing from resources'.format('country_code')
tokens_url = d[_SIMPLE_TOKENS]
return self._relevant_text_from_context(tokens_url,
url_country_extractor.extract(tokens_url, self.country_code_dict),
config[_FIELD_NAME])
def geonames_lookup(self, d, config):
field_name = config[_FIELD_NAME]
if not self.geonames_dict:
try:
self.geonames_dict = self.load_json_file(self.get_dict_file_name_from_config(_GEONAMES))
except Exception as e:
raise '{} dictionary missing from resources'.format(_GEONAMES)
if _CITY_NAME in d[_KNOWLEDGE_GRAPH]:
cities = d[_KNOWLEDGE_GRAPH][_CITY_NAME].keys()
else:
return None
populated_places = geonames_extractor.get_populated_places(cities, self.geonames_dict)
# results = geonames_extractor.get_country_from_populated_places(populated_places)
# if results:
# self.create_knowledge_graph(d, _COUNTRY , results)
return populated_places
@staticmethod
def parse_date(d, config={}):
if isinstance(d, basestring):
return Core.spacy_parse_date(d)
else:
try:
return date_parser.convert_to_iso_format(date_parser.parse_date(d[_TEXT]))
except:
return None
@staticmethod
def spacy_parse_date(str_date):
try:
return date_parser.convert_to_iso_format(date_parser.parse_date(str_date))
except:
return None
@staticmethod
def filter_age(d, config):
text = d[_TEXT]
try:
text = text.replace('\n', '')
text = text.replace('\t', '')
num = int(text)
return num if 18 <= num <= 65 else None
except:
pass
return None
def country_from_states(self, d, config):
if not self.state_to_country_dict:
try:
self.state_to_country_dict = self.load_json_file(self.get_dict_file_name_from_config(_STATE_TO_COUNTRY))
except Exception as e:
raise '{} dictionary missing from resources'.format(_STATE_TO_COUNTRY)
if _STATE in d[_KNOWLEDGE_GRAPH]:
states = d[_KNOWLEDGE_GRAPH][_STATE].keys()
else:
return None
return geonames_extractor.get_country_from_states(states, self.state_to_country_dict)
def country_feature(self, d, config):
return country_classifier.calc_country_feature(d[_KNOWLEDGE_GRAPH], self.state_to_country_dict)
def create_city_state_country_triple(self, d, config):
if not self.state_to_codes_lower_dict:
try:
self.state_to_codes_lower_dict = self.load_json_file(self.get_dict_file_name_from_config(_STATE_TO_CODES_LOWER))
except Exception as e:
raise ValueError('{} dictionary missing from resources'.format(_STATE_TO_CODES_LOWER))
if not self.populated_cities:
try:
self.populated_cities = self.load_json_file(self.get_dict_file_name_from_config(_POPULATED_CITIES))
except Exception as e:
raise ValueError('{} dictionary missing from resources'.format(_POPULATED_CITIES))
try:
priori_lst = ['city_state_together_count', 'city_state_code_together_count',
'city_country_together_count', 'city_state_separate_count',
'city_country_separate_count', 'city_state_code_separate_count']
results = [[] for i in range(len(priori_lst)+1)]
knowledge_graph = d[_KNOWLEDGE_GRAPH]
if "populated_places" in knowledge_graph:
pop_places = knowledge_graph["populated_places"]
for place in pop_places:
city_state_together_count = 0
city_state_separate_count = 0
city_state_code_together_count = 0
city_state_code_separate_count = 0
city_country_together_count = 0
city_country_separate_count = 0
city = pop_places[place][0]["value"]
state = pop_places[place][0]["metadata"]["state"]
state = "" if not state else state
country = pop_places[place][0]["metadata"]["country"]
country = "" if not country else country
if state in self.state_to_codes_lower_dict:
state_code = self.state_to_codes_lower_dict[state]
else:
state_code = None
cities = []
if "city_name" in knowledge_graph:
if city in knowledge_graph["city_name"]:
city_lst = knowledge_graph["city_name"][city]
for each_city in city_lst:
if "context" in each_city:
cities.append((each_city["origin"]["segment"],
each_city["context"]["start"], each_city["context"]["end"]))
states = []
if country == "united states":
if "state" in knowledge_graph:
if state in knowledge_graph["state"]:
state_lst = knowledge_graph["state"][state]
for each_state in state_lst:
if "context" in each_state:
states.append((each_state["origin"]["segment"],
each_state["context"]["start"], each_state["context"]["end"]))
countries = []
if "country" in knowledge_graph:
if country in knowledge_graph["country"]:
country_lst = knowledge_graph["country"][country]
for each_country in country_lst:
if "context" in each_country:
countries.append((each_country["origin"]["segment"],
each_country["context"]["start"], each_country["context"]["end"]))
state_codes = []
if country == "united states":
if state_code:
if "states_usa_codes" in knowledge_graph:
if state_code in knowledge_graph["states_usa_codes"]:
state_code_lst = knowledge_graph["states_usa_codes"][state_code]
for each_state_code in state_code_lst:
if "context" in each_state_code:
state_codes.append((each_state_code["origin"]["segment"],
each_state_code["context"]["start"], each_state_code["context"]["end"]))
if cities:
for a_city in cities:
for a_state in states:
if a_city[0] == a_state[0] and a_city[1] != a_state[1] and (abs(a_city[2] - a_state[1])<3 or abs(a_city[1] - a_state[2])<3):
city_state_together_count += 1
else:
city_state_separate_count += 1
for a_state_code in state_codes:
if a_city[0] == a_state_code[0] and a_city[1] != a_state_code[1] and a_state_code[1] - a_city[2]<3 and a_state_code[1] - a_city[2]>0:
city_state_code_together_count += 1
else:
city_state_code_separate_count += 1
for a_country in countries:
if a_city[0] == a_country[0] and a_city[1] != a_country[1] and (abs(a_city[2] - a_country[1])<5 or abs(a_city[1] - a_country[2])<3):
city_country_together_count += 1
else:
city_country_separate_count += 1
result = copy.deepcopy(pop_places[place][0])
result['metadata']['city_state_together_count'] = city_state_together_count
result['metadata']['city_state_separate_count'] = city_state_separate_count
result['metadata']['city_state_code_together_count'] = city_state_code_together_count
result['metadata']['city_state_code_separate_count'] = city_state_code_separate_count
result['metadata']['city_country_together_count'] = city_country_together_count
result['metadata']['city_country_separate_count'] = city_country_separate_count
for priori_idx, counter in enumerate(priori_lst):
if country == "united states":
result_value = city + ',' + state
else:
result_value = city + ',' + country
result['key'] = city+':'+state+':'+country+':'+str(result['metadata']['longitude'])+':'+str(result['metadata']['latitude'])
if result['metadata'][counter] > 0:
if priori_idx < 3:
result['value'] = result_value + "-1.0"
elif priori_idx < 5:
result['value'] = result_value + "-0.8"
else:
result['value'] = result_value + "-0.1"
results[priori_idx].append(result)
break
else:
if priori_idx == 5 and city in self.populated_cities:
result['value'] = result_value + "-0.1"
results[priori_idx+1].append(result)
return_result = None
for priori in range(len(priori_lst)+1):
if results[priori]:
if priori < 3:
return_result = results[priori]
break
else:
high_pop = 0
high_idx = 0
for idx, a_result in enumerate(results[priori]):
if a_result['metadata']['population'] >= high_pop:
high_pop = a_result['metadata']['population']
high_idx = idx
return_result = [results[priori][high_idx]]
break
return return_result
except Exception as e:
print e
return None
records processing url and doc id
import sys
stdout = sys.stdout
reload(sys)
sys.setdefaultencoding('utf-8')
sys.stdout = stdout
# import all extractors
from spacy_extractors import age_extractor as spacy_age_extractor
from spacy_extractors import social_media_extractor as spacy_social_media_extractor
from spacy_extractors import date_extractor as spacy_date_extractor
from spacy_extractors import address_extractor as spacy_address_extractor
from spacy_extractors import customized_extractor as custom_spacy_extractor
from data_extractors import landmark_extraction
from data_extractors import dictionary_extractor
from data_extractors import regex_extractor
from data_extractors import height_extractor
from data_extractors import weight_extractor
from data_extractors import address_extractor
from data_extractors import age_extractor
from data_extractors import table_extractor
from data_extractors import url_country_extractor
from data_extractors import geonames_extractor
from data_extractors.digPhoneExtractor import phone_extractor
from data_extractors.digEmailExtractor import email_extractor
from data_extractors.digPriceExtractor import price_extractor
from data_extractors.digReviewIDExtractor import review_id_extractor
from data_extractors import date_parser
from classifiers import country_classifier
from structured_extractors import ReadabilityExtractor, TokenizerExtractor, FaithfulTokenizerExtractor
import json
import gzip
import re
import spacy
import codecs
from jsonpath_rw import parse
import time
import collections
import numbers
from tldextract import tldextract
import pickle
import copy
from collections import OrderedDict
import sys
_KNOWLEDGE_GRAPH = "knowledge_graph"
_EXTRACTION_POLICY = 'extraction_policy'
_KEEP_EXISTING = 'keep_existing'
_REPLACE = 'replace'
_ERROR_HANDLING = 'error_handling'
_IGNORE_EXTRACTION = 'ignore_extraction'
_IGNORE_DOCUMENT = 'ignore_document'
_RAISE_ERROR = 'raise_error'
_CITY_NAME = 'city_name'
_STATE = 'state'
_COUNTRY = 'country'
_CONTENT_EXTRACTION = 'content_extraction'
_SPACY_EXTRACTION = 'spacy_extraction'
_RAW_CONTENT = 'raw_content'
_INPUT_PATH = 'input_path'
_READABILITY = 'readability'
_LANDMARK = 'landmark'
_TITLE = 'title'
_DESCRIPTION = "description"
_STRICT = 'strict'
_FIELD_NAME = 'field_name'
_CONTENT_STRICT = 'content_strict'
_CONTENT_RELAXED = 'content_relaxed'
_YES = 'yes'
_NO = 'no'
_RECALL_PRIORITY = 'recall_priority'
_INFERLINK_EXTRACTIONS = 'inferlink_extractions'
_LANDMARK_THRESHOLD = 'landmark_threshold'
_LANDMARK_RULES = 'landmark_rules'
_URL = 'url'
_AGE = 'age'
_POSTING_DATE = 'posting_date'
_SOCIAL_MEDIA = 'social_media'
_ADDRESS = 'address'
_RESOURCES = 'resources'
_SPACY_FIELD_RULES = "spacy_field_rules"
_DATA_EXTRACTION = 'data_extraction'
_FIELDS = 'fields'
_EXTRACTORS = 'extractors'
_TOKENS = 'tokens'
_TOKENS_ORIGINAL_CASE = "tokens_original_case"
_SIMPLE_TOKENS = 'simple_tokens'
_SIMPLE_TOKENS_ORIGINAL_CASE = 'simple_tokens_original_case'
_TEXT = 'text'
_DICTIONARY = 'dictionary'
_PICKLES = 'pickle'
_NGRAMS = 'ngrams'
_JOINER = 'joiner'
_PRE_FILTER = 'pre_filter'
_POST_FILTER = 'post_filter'
_PRE_PROCESS = "pre_process"
_TABLE = "table"
_STOP_WORDS = "stop_words"
_GEONAMES = "geonames"
_STATE_TO_COUNTRY = "state_to_country"
_STATE_TO_CODES_LOWER = "state_to_codes_lower"
_POPULATED_PLACES = "populated_places"
_POPULATED_CITIES = "populated_cities"
_EXTRACT_USING_DICTIONARY = "extract_using_dictionary"
_EXTRACT_USING_REGEX = "extract_using_regex"
_EXTRACT_FROM_LANDMARK = "extract_from_landmark"
_EXTRACT_PHONE = "extract_phone"
_EXTRACT_EMAIL = "extract_email"
_EXTRACT_PRICE = "extract_price"
_EXTRACT_HEIGHT = "extract_height"
_EXTRACT_WEIGHT = "extract_weight"
_EXTRACT_ADDRESS = "extract_address"
_EXTRACT_AGE = "extract_age"
_CONFIG = "config"
_DICTIONARIES = "dictionaries"
_INFERLINK = "inferlink"
_HTML = "html"
_SEGMENT_TITLE = "title"
_SEGMENT_INFERLINK_DESC = "inferlink_description"
_SEGMENT_OTHER = "other_segment"
_METHOD_INFERLINK = "inferlink"
_SOURCE_TYPE = "source_type"
_OBFUSCATION = "obfuscation"
_INCLUDE_CONTEXT = "include_context"
_KG_ENHANCEMENT = "kg_enhancement"
_DOCUMENT_ID = "document_id"
_TLD = 'tld'
_FEATURE_COMPUTATION = "feature_computation"
class Core(object):
def __init__(self, extraction_config=None, debug=False, load_spacy=False):
self.extraction_config = extraction_config
self.debug = debug
self.html_title_regex = r'<title>(.*?)</title>'
self.tries = dict()
self.pickles = dict()
self.jobjs = dict()
self.global_extraction_policy = None
self.global_error_handling = None
# to make sure we do not parse json_paths more times than needed, we define the following 2 properties
self.content_extraction_path = None
self.data_extraction_path = dict()
if load_spacy:
self.prep_spacy()
else:
self.nlp = None
self.country_code_dict = None
self.matchers = dict()
self.geonames_dict = None
self.state_to_country_dict = None
self.state_to_codes_lower_dict = None
self.populated_cities = None
""" Define all API methods """
def process(self, doc, create_knowledge_graph=False):
try:
print 'Now Processing url: {}, doc_id: {}'.format(doc['url'], doc['doc_id'])
if self.extraction_config:
doc_id = None
if _DOCUMENT_ID in self.extraction_config:
doc_id_field = self.extraction_config[_DOCUMENT_ID]
if doc_id_field in doc:
doc_id = doc[doc_id_field]
doc[_DOCUMENT_ID] = doc_id
else:
raise KeyError('{} not found in the input document'.format(doc_id_field))
if _EXTRACTION_POLICY in self.extraction_config:
self.global_extraction_policy = self.extraction_config[_EXTRACTION_POLICY]
if _ERROR_HANDLING in self.extraction_config:
self.global_error_handling = self.extraction_config[_ERROR_HANDLING]
"""Handle content extraction first aka Phase 1"""
if _CONTENT_EXTRACTION in self.extraction_config:
if _CONTENT_EXTRACTION not in doc:
doc[_CONTENT_EXTRACTION] = dict()
ce_config = self.extraction_config[_CONTENT_EXTRACTION]
html_path = ce_config[_INPUT_PATH] if _INPUT_PATH in ce_config else None
if not html_path:
raise KeyError('{} not found in extraction_config'.format(_INPUT_PATH))
if not self.content_extraction_path:
start_time = time.time()
self.content_extraction_path = parse(html_path)
time_taken = time.time() - start_time
if self.debug:
print 'time taken to process parse %s' % time_taken
start_time = time.time()
matches = self.content_extraction_path.find(doc)
time_taken = time.time() - start_time
if self.debug:
print 'time taken to process matches %s' % time_taken
extractors = ce_config[_EXTRACTORS]
for index in range(len(matches)):
for extractor in extractors.keys():
if extractor == _READABILITY:
re_extractors = extractors[extractor]
if isinstance(re_extractors, dict):
re_extractors = [re_extractors]
for re_extractor in re_extractors:
doc[_CONTENT_EXTRACTION] = self.run_readability(doc[_CONTENT_EXTRACTION],
matches[index].value, re_extractor)
elif extractor == _TITLE:
doc[_CONTENT_EXTRACTION] = self.run_title(doc[_CONTENT_EXTRACTION], matches[index].value,
extractors[extractor])
elif extractor == _LANDMARK:
doc[_CONTENT_EXTRACTION] = self.run_landmark(doc[_CONTENT_EXTRACTION], matches[index].value,
extractors[extractor], doc[_URL])
elif extractor == _TABLE:
doc[_CONTENT_EXTRACTION] = self.run_table_extractor(doc[_CONTENT_EXTRACTION],
matches[index].value,
extractors[extractor])
# Add the url as segment as well
if _URL in doc and doc[_URL] and doc[_URL].strip() != '':
doc[_CONTENT_EXTRACTION][_URL] = dict()
doc[_CONTENT_EXTRACTION][_URL][_TEXT] = doc[_URL]
doc[_TLD] = self.extract_tld(doc[_URL])
"""Phase 2: The Data Extraction"""
if _DATA_EXTRACTION in self.extraction_config:
de_configs = self.extraction_config[_DATA_EXTRACTION]
if isinstance(de_configs, dict):
de_configs = [de_configs]
for i in range(len(de_configs)):
de_config = de_configs[i]
input_paths = de_config[_INPUT_PATH] if _INPUT_PATH in de_config else None
if not input_paths:
raise KeyError('{} not found for data extraction in extraction_config'.format(_INPUT_PATH))
if not isinstance(input_paths, list):
input_paths = [input_paths]
for input_path in input_paths:
if _FIELDS in de_config:
if input_path not in self.data_extraction_path:
self.data_extraction_path[input_path] = parse(input_path)
matches = self.data_extraction_path[input_path].find(doc)
for match in matches:
# First rule of DATA Extraction club: Get tokens
# Get the crf tokens
if _TEXT in match.value:
if _TOKENS_ORIGINAL_CASE not in match.value:
match.value[_TOKENS_ORIGINAL_CASE] = self.extract_crftokens(match.value[_TEXT],
lowercase=False)
if _TOKENS not in match.value:
match.value[_TOKENS] = self.crftokens_to_lower(
match.value[_TOKENS_ORIGINAL_CASE])
if _SIMPLE_TOKENS not in match.value:
match.value[_SIMPLE_TOKENS] = self.extract_tokens_from_crf(match.value[_TOKENS])
if _SIMPLE_TOKENS_ORIGINAL_CASE not in match.value:
match.value[_SIMPLE_TOKENS_ORIGINAL_CASE] = self.extract_tokens_from_crf(
match.value[_TOKENS_ORIGINAL_CASE])
# if _TOKENS not in match.value:
# match.value[_TOKENS] = self.extract_crftokens(match.value[_TEXT])
# if _SIMPLE_TOKENS not in match.value:
# match.value[_SIMPLE_TOKENS] = self.extract_tokens_from_crf(match.value[_TOKENS])
fields = de_config[_FIELDS]
for field in fields.keys():
if field != '*':
"""
Special case for inferlink extractions:
For eg, We do not want to extract name from inferlink_posting-date #DUH
"""
run_extractor = True
full_path = str(match.full_path)
segment = self.determine_segment(full_path)
if _INFERLINK in full_path:
if field not in full_path:
run_extractor = False
if _DESCRIPTION in full_path or _TITLE in full_path:
run_extractor = True
if run_extractor:
if _EXTRACTORS in fields[field]:
extractors = fields[field][_EXTRACTORS]
for extractor in extractors.keys():
try:
foo = getattr(self, extractor)
except:
foo = None
if foo:
# score is 1.0 because every method thinks it is the best
score = 1.0
method = extractor
if _CONFIG not in extractors[extractor]:
extractors[extractor][_CONFIG] = dict()
extractors[extractor][_CONFIG][_FIELD_NAME] = field
ep = self.determine_extraction_policy(extractors[extractor])
if extractor == _EXTRACT_FROM_LANDMARK:
if _INFERLINK_EXTRACTIONS in full_path and field in full_path:
method = _METHOD_INFERLINK
if self.check_if_run_extraction(match.value, field,
extractor,
ep):
results = foo(doc, extractors[extractor][_CONFIG])
if results:
self.add_data_extraction_results(match.value,
field,
extractor,
self.add_origin_info(
results,
method,
segment,
score,
doc_id))
if create_knowledge_graph:
self.create_knowledge_graph(doc, field,
results)
else:
if self.check_if_run_extraction(match.value, field,
extractor,
ep):
results = foo(match.value,
extractors[extractor][_CONFIG])
if results:
self.add_data_extraction_results(match.value, field,
extractor,
self.add_origin_info(
results,
method,
segment,
score, doc_id))
if create_knowledge_graph:
self.create_knowledge_graph(doc, field, results)
else: # extract whatever you can!
if _EXTRACTORS in fields[field]:
extractors = fields[field][_EXTRACTORS]
for extractor in extractors.keys():
try:
foo = getattr(self, extractor)
except Exception as e:
foo = None
if foo:
# score is 1.0 because every method thinks it is the best
score = 1.0
method = extractor
if _CONFIG not in extractors[extractor]:
extractors[extractor][_CONFIG] = dict()
ep = self.determine_extraction_policy(extractors[extractor])
if extractor == _EXTRACT_FROM_LANDMARK:
if _INFERLINK_EXTRACTIONS in full_path and field in full_path:
method = _METHOD_INFERLINK
if self.check_if_run_extraction(match.value, field,
extractor,
ep):
results = foo(doc, extractors[extractor][_CONFIG])
if results:
self.add_data_extraction_results(match.value, field,
extractor,
self.add_origin_info(
results,
method,
segment,
score, doc_id))
if create_knowledge_graph:
self.create_knowledge_graph(doc, field, results)
else:
results = foo(match.value,
extractors[extractor][_CONFIG])
if results:
for f, res in results.items():
# extractors[extractor][_CONFIG][_FIELD_NAME] = f
self.add_data_extraction_results(match.value, f,
extractor,
self.add_origin_info(
res,
method,
segment,
score, doc_id))
if create_knowledge_graph:
self.create_knowledge_graph(doc, f, res)
else:
print('method {} not found!'.format(extractor))
"""Optional Phase 3: Knowledge Graph Enhancement"""
if _KG_ENHANCEMENT in self.extraction_config:
kg_configs = self.extraction_config[_KG_ENHANCEMENT]
if isinstance(kg_configs, dict):
kg_configs = [kg_configs]
for i in range(len(kg_configs)):
kg_config = kg_configs[i]
input_paths = kg_config[_INPUT_PATH] if _INPUT_PATH in kg_config else None
if not input_paths:
raise KeyError(
'{} not found for knowledge graph enhancement in extraction_config'.format(_INPUT_PATH))
if not isinstance(input_paths, list):
input_paths = [input_paths]
for input_path in input_paths:
if _FIELDS in kg_config:
if input_path not in self.data_extraction_path:
self.data_extraction_path[input_path] = parse(input_path)
matches = self.data_extraction_path[input_path].find(doc)
for match in matches:
fields = kg_config[_FIELDS]
try:
sorted_fields = self.sort_dictionary_by_fields(fields)
except:
raise ValueError('Please ensure there is a priority added to every field in '
'knowledge_graph enhancement and the priority is an int')
for i in range(0, len(sorted_fields)):
field = sorted_fields[i][0]
if _EXTRACTORS in fields[field]:
extractors = fields[field][_EXTRACTORS]
for extractor in extractors.keys():
try:
foo = getattr(self, extractor)
except:
foo = None
if foo:
if _CONFIG not in extractors[extractor]:
extractors[extractor][_CONFIG] = dict()
extractors[extractor][_CONFIG][_FIELD_NAME] = field
results = foo(match.value, extractors[extractor][_CONFIG])
if results:
# doc[_KNOWLEDGE_GRA][field] = results
self.create_knowledge_graph(doc, field, results)
"""Optional Phase 4: feature computation"""
if _FEATURE_COMPUTATION in self.extraction_config:
kg_configs = self.extraction_config[_FEATURE_COMPUTATION]
if isinstance(kg_configs, dict):
kg_configs = [kg_configs]
for i in range(len(kg_configs)):
kg_config = kg_configs[i]
input_paths = kg_config[_INPUT_PATH] if _INPUT_PATH in kg_config else None
if not input_paths:
raise KeyError(
'{} not found for feature computation in extraction_config'.format(_INPUT_PATH))
if not isinstance(input_paths, list):
input_paths = [input_paths]
for input_path in input_paths:
if _FIELDS in kg_config:
if input_path not in self.data_extraction_path:
self.data_extraction_path[input_path] = parse(input_path)
matches = self.data_extraction_path[input_path].find(doc)
for match in matches:
fields = kg_config[_FIELDS]
for field in fields.keys():
if _EXTRACTORS in fields[field]:
extractors = fields[field][_EXTRACTORS]
for extractor in extractors.keys():
try:
foo = getattr(self, extractor)
except:
foo = None
if foo:
if _CONFIG not in extractors[extractor]:
extractors[extractor][_CONFIG] = dict()
extractors[extractor][_CONFIG][_FIELD_NAME] = field
results = foo(match.value, extractors[extractor][_CONFIG])
if results:
# doc[_KNOWLEDGE_GRAPH][field] = results
self.create_knowledge_graph(doc, field, results)
if _KNOWLEDGE_GRAPH in doc and doc[_KNOWLEDGE_GRAPH]:
doc[_KNOWLEDGE_GRAPH] = self.reformat_knowledge_graph(doc[_KNOWLEDGE_GRAPH])
""" Add title and description as fields in the knowledge graph as well"""
doc = Core.rearrange_description(doc)
doc = Core.rearrange_title(doc)
except Exception as e:
print e
print 'Failed doc:', doc['doc_id']
return None
print 'DONE url: {}, doc_id: {}'.format(doc['url'], doc['doc_id'])
return doc
@staticmethod
def rearrange_description(doc):
method = 'rearrange_description'
description = None
segment = ''
if _CONTENT_EXTRACTION in doc:
ce = doc[_CONTENT_EXTRACTION]
if _INFERLINK_EXTRACTIONS in ce:
if _DESCRIPTION in ce[_INFERLINK_EXTRACTIONS]:
description = ce[_INFERLINK_EXTRACTIONS][_DESCRIPTION][_TEXT]
segment = _INFERLINK
if not description or description.strip() == '':
if _CONTENT_STRICT in ce:
description = ce[_CONTENT_STRICT][_TEXT]
segment = _CONTENT_STRICT
if description and description != '':
if _KNOWLEDGE_GRAPH not in doc:
doc[_KNOWLEDGE_GRAPH] = dict()
doc[_KNOWLEDGE_GRAPH][_DESCRIPTION] = list()
o = dict()
o['value'] = description
o['key'] = 'description'
o['confidence'] = 1
o['provenance'] = [Core.custom_provenance_object(method, segment, doc[_DOCUMENT_ID])]
doc[_KNOWLEDGE_GRAPH][_DESCRIPTION].append(o)
return doc
@staticmethod
def sort_dictionary_by_fields(dictionary):
sorted_d = OrderedDict(sorted(dictionary.iteritems(), key=lambda x: x[1]['priority']))
return sorted_d.items()
@staticmethod
def custom_provenance_object(method, segment, document_id):
prov = dict()
prov['method'] = method
prov['source'] = dict()
prov['source']['segment'] = segment
prov['source'][_DOCUMENT_ID] = document_id
return prov
@staticmethod
def rearrange_title(doc):
method = 'rearrange_title'
title = None
segment = ''
if _CONTENT_EXTRACTION in doc:
ce = doc[_CONTENT_EXTRACTION]
if _INFERLINK_EXTRACTIONS in ce:
if _TITLE in ce[_INFERLINK_EXTRACTIONS]:
title = ce[_INFERLINK_EXTRACTIONS][_TITLE][_TEXT]
segment = _INFERLINK
if not title or title.strip() == '':
if _TITLE in ce:
title = ce[_TITLE][_TEXT]
segment = _HTML
if title and title != '':
if _KNOWLEDGE_GRAPH not in doc:
doc[_KNOWLEDGE_GRAPH] = dict()
doc[_KNOWLEDGE_GRAPH][_TITLE] = list()
o = dict()
o['value'] = title
o['key'] = 'title'
o['confidence'] = 1
o['provenance'] = [Core.custom_provenance_object(method, segment, doc[_DOCUMENT_ID])]
doc[_KNOWLEDGE_GRAPH][_TITLE].append(o)
return doc
@staticmethod
def extract_tld(url):
return tldextract.extract(url).domain + '.' + tldextract.extract(url).suffix
@staticmethod
def create_knowledge_graph(doc, field_name, extractions):
if _KNOWLEDGE_GRAPH not in doc:
doc[_KNOWLEDGE_GRAPH] = dict()
if field_name not in doc[_KNOWLEDGE_GRAPH]:
doc[_KNOWLEDGE_GRAPH][field_name] = dict()
for extraction in extractions:
key = extraction['value']
if (isinstance(key, basestring) or isinstance(key, numbers.Number)) and field_name != _POSTING_DATE:
# try except block because unicode characters will not be lowered
try:
key = str(key).strip().lower()
except:
pass
if 'metadata' in extraction:
sorted_metadata = Core.sort_dict(extraction['metadata'])
for k, v in sorted_metadata.iteritems():
if isinstance(v, numbers.Number):
v = str(v)
# if v:
# v = v.encode('utf-8')
if v and v.strip() != '':
# key += '-' + str(k) + ':' + str(v)
key = '{}-{}:{}'.format(key, k, v)
if 'key' in extraction:
key = extraction['key']
# TODO FIX THIS HACK
if len(key) > 32766:
key = key[0:500]
if key not in doc[_KNOWLEDGE_GRAPH][field_name]:
doc[_KNOWLEDGE_GRAPH][field_name][key] = list()
doc[_KNOWLEDGE_GRAPH][field_name][key].append(extraction)
return doc
@staticmethod
def reformat_knowledge_graph(knowledge_graph):
new_kg = dict()
for semantic_type in knowledge_graph.keys():
new_kg[semantic_type] = list()
values = knowledge_graph[semantic_type]
for key in values.keys():
o = dict()
o['key'] = key
new_provenances, metadata, value = Core.rearrange_provenance(values[key])
o['provenance'] = new_provenances
if metadata:
o['qualifiers'] = metadata
o['value'] = value
# default confidence value, to be updated by later analysis
o['confidence'] = 1
new_kg[semantic_type].append(o)
return new_kg
@staticmethod
def rearrange_provenance(old_provenances):
new_provenances = list()
metadata = None
value = None
for prov in old_provenances:
new_prov = dict()
method = None
confidence = None
if 'origin' in prov:
origin = prov['origin']
if 'obfuscation' in prov:
origin['extraction_metadata'] = dict()
origin['extraction_metadata']['obfuscation'] = prov['obfuscation']
method = origin['method']
confidence = origin['score']
origin.pop('score', None)
origin.pop('method', None)
new_prov['source'] = origin
if 'context' in prov:
new_prov['source']['context'] = prov['context']
if 'metadata' in prov and not metadata:
metadata = prov['metadata']
if method:
new_prov["method"] = method
if not value:
value = prov['value']
new_prov['extracted_value'] = value
if confidence:
new_prov['confidence'] = dict()
new_prov['confidence']['extraction'] = confidence
new_provenances.append(new_prov)
return new_provenances, metadata, value
@staticmethod
def add_data_extraction_results(d, field_name, method_name, results):
if _DATA_EXTRACTION not in d:
d[_DATA_EXTRACTION] = dict()
if field_name not in d[_DATA_EXTRACTION]:
d[_DATA_EXTRACTION][field_name] = dict()
if method_name not in d[_DATA_EXTRACTION][field_name]:
d[_DATA_EXTRACTION][field_name][method_name] = dict()
if isinstance(results, dict):
results = [results]
if 'results' not in d[_DATA_EXTRACTION][field_name][method_name]:
d[_DATA_EXTRACTION][field_name][method_name]['results'] = results
else:
d[_DATA_EXTRACTION][field_name][method_name]['results'].extend(results)
return d
@staticmethod
def check_if_run_extraction(d, field_name, method_name, extraction_policy):
if _DATA_EXTRACTION not in d:
return True
if field_name not in d[_DATA_EXTRACTION]:
return True
if method_name not in d[_DATA_EXTRACTION][field_name]:
return True
if 'results' not in d[_DATA_EXTRACTION][field_name][method_name]:
return True
else:
if extraction_policy == _REPLACE:
return True
return False
@staticmethod
def determine_segment(json_path):
segment = _SEGMENT_OTHER
if _SEGMENT_INFERLINK_DESC in json_path:
segment = _SEGMENT_INFERLINK_DESC
elif _INFERLINK in json_path and _SEGMENT_INFERLINK_DESC not in json_path:
segment = _HTML
elif _CONTENT_STRICT in json_path:
segment = _CONTENT_STRICT
elif _CONTENT_RELAXED in json_path:
segment = _CONTENT_RELAXED
elif _TITLE in json_path:
segment = _TITLE
elif _URL in json_path:
segment = _URL
return segment
@staticmethod
def add_origin_info(results, method, segment, score, doc_id=None):
if results:
for result in results:
o = dict()
o['segment'] = segment
o['method'] = method
o['score'] = score
if doc_id:
o[_DOCUMENT_ID] = doc_id
result['origin'] = o
return results
def run_landmark(self, content_extraction, html, landmark_config, url):
field_name = landmark_config[_FIELD_NAME] if _FIELD_NAME in landmark_config else _INFERLINK_EXTRACTIONS
ep = self.determine_extraction_policy(landmark_config)
extraction_rules = self.consolidate_landmark_rules()
if _LANDMARK_THRESHOLD in landmark_config:
pct = landmark_config[_LANDMARK_THRESHOLD]
if not 0.0 <= pct <= 1.0:
raise ValueError('landmark threshold should be a float between {} and {}'.format(0.0, 1.0))
else:
pct = 0.5
if field_name not in content_extraction or (field_name in content_extraction and ep == _REPLACE):
start_time = time.time()
ifl_extractions = Core.extract_landmark(html, url, extraction_rules, pct)
if isinstance(ifl_extractions, list):
# we have a rogue post type page, put it in its place
content_extraction[field_name] = dict()
content_extraction[field_name]['inferlink_posts'] = ifl_extractions
else:
time_taken = time.time() - start_time
if self.debug:
print 'time taken to process landmark %s' % time_taken
if ifl_extractions and len(ifl_extractions.keys()) > 0:
content_extraction[field_name] = dict()
for key in ifl_extractions:
o = dict()
o[key] = dict()
o[key]['text'] = ifl_extractions[key]
content_extraction[field_name].update(o)
return content_extraction
def consolidate_landmark_rules(self):
rules = dict()
if _RESOURCES in self.extraction_config:
resources = self.extraction_config[_RESOURCES]
if _LANDMARK in resources:
landmark_rules_file_list = resources[_LANDMARK]
for landmark_rules_file in landmark_rules_file_list:
rules.update(Core.load_json_file(landmark_rules_file))
return rules
else:
raise KeyError('{}.{} not found in provided extraction config'.format(_RESOURCES, _LANDMARK))
else:
raise KeyError('{} not found in provided extraction config'.format(_RESOURCES))
def get_dict_file_name_from_config(self, dict_name):
if _RESOURCES in self.extraction_config:
resources = self.extraction_config[_RESOURCES]
if _DICTIONARIES in resources:
if dict_name in resources[_DICTIONARIES]:
return resources[_DICTIONARIES][dict_name]
else:
raise KeyError(
'{}.{}.{} not found in provided extraction config'.format(_RESOURCES, _DICTIONARIES, dict_name))
else:
raise KeyError('{}.{} not found in provided extraction config'.format(_RESOURCES, _DICTIONARIES))
else:
raise KeyError('{} not found in provided extraction config'.format(_RESOURCES))
def get_pickle_file_name_from_config(self, pickle_name):
if _RESOURCES in self.extraction_config:
resources = self.extraction_config[_RESOURCES]
if _PICKLES in resources:
if pickle_name in resources[_PICKLES]:
return resources[_PICKLES][pickle_name]
else:
raise KeyError(
'{}.{}.{} not found in provided extraction config'.format(_RESOURCES, _PICKLES, pickle_name))
else:
raise KeyError('{}.{} not found in provided extraction config'.format(_RESOURCES, _PICKLES))
else:
raise KeyError('{} not found in provided extraction config'.format(_RESOURCES))
def get_spacy_field_rules_from_config(self, field_name):
if _RESOURCES in self.extraction_config:
resources = self.extraction_config[_RESOURCES]
if _SPACY_FIELD_RULES in resources:
if field_name in resources[_SPACY_FIELD_RULES]:
return resources[_SPACY_FIELD_RULES][field_name]
else:
raise KeyError(
'{}.{}.{} not found in provided extraction config'.format(_RESOURCES, _SPACY_FIELD_RULES,
field_name))
else:
raise KeyError('{}.{} not found in provided extraction config'.format(_RESOURCES, _SPACY_FIELD_RULES))
else:
raise KeyError('{} not found in provided extraction config'.format(_RESOURCES))
def run_title(self, content_extraction, html, title_config):
field_name = title_config[_FIELD_NAME] if _FIELD_NAME in title_config else _TITLE
ep = self.determine_extraction_policy(title_config)
if field_name not in content_extraction or (field_name in content_extraction and ep == _REPLACE):
start_time = time.time()
extracted_title = self.extract_title(html)
if extracted_title:
content_extraction[field_name] = extracted_title
time_taken = time.time() - start_time
if self.debug:
print 'time taken to process title %s' % time_taken
return content_extraction
def run_table_extractor(self, content_extraction, html, table_config):
field_name = table_config[_FIELD_NAME] if _FIELD_NAME in table_config else _TABLE
ep = self.determine_extraction_policy(table_config)
if field_name not in content_extraction or (field_name in content_extraction and ep == _REPLACE):
start_time = time.time()
content_extraction[field_name] = self.extract_table(html)
time_taken = time.time() - start_time
if self.debug:
print 'time taken to process table %s' % time_taken
return content_extraction
def run_readability(self, content_extraction, html, re_extractor):
recall_priority = False
field_name = None
if _STRICT in re_extractor:
recall_priority = False if re_extractor[_STRICT] == _YES else True
field_name = _CONTENT_RELAXED if recall_priority else _CONTENT_STRICT
options = {_RECALL_PRIORITY: recall_priority}
if _FIELD_NAME in re_extractor:
field_name = re_extractor[_FIELD_NAME]
ep = self.determine_extraction_policy(re_extractor)
start_time = time.time()
readability_text = self.extract_readability(html, options)
time_taken = time.time() - start_time
if self.debug:
print 'time taken to process readability %s' % time_taken
if readability_text:
if field_name not in content_extraction or (field_name in content_extraction and ep == _REPLACE):
content_extraction[field_name] = readability_text
return content_extraction
def determine_extraction_policy(self, config):
ep = _REPLACE
if not config:
return ep
if _EXTRACTION_POLICY in config:
ep = config[_EXTRACTION_POLICY]
elif self.global_extraction_policy:
ep = self.global_extraction_policy
if ep and ep != _KEEP_EXISTING and ep != _REPLACE:
raise ValueError('extraction_policy can either be {} or {}'.format(_KEEP_EXISTING, _REPLACE))
return ep
@staticmethod
def _relevant_text_from_context(text_or_tokens, results, field_name):
if results:
tokens_len = len(text_or_tokens)
if not isinstance(results, list):
results = [results]
for result in results:
if 'context' in result:
start = int(result['context']['start'])
end = int(result['context']['end'])
if isinstance(text_or_tokens, basestring):
if start - 10 < 0:
new_start = 0
else:
new_start = start - 10
if end + 10 > tokens_len:
new_end = tokens_len
else:
new_end = end + 10
relevant_text = '<etk \'attribute\' = \'{}\'>{}</etk>'.format(field_name,
text_or_tokens[start:end].encode(
'utf-8'))
result['context']['text'] = '{} {} {}'.format(text_or_tokens[new_start:start].encode('utf-8'),
relevant_text,
text_or_tokens[end:new_end].encode('utf-8'))
result['context']['input'] = _TEXT
else:
if start - 5 < 0:
new_start = 0
else:
new_start = start - 5
if end + 5 > tokens_len:
new_end = tokens_len
else:
new_end = end + 5
relevant_text = '<etk \'attribute\' = \'{}\'>{}</etk>'.format(field_name,
' '.join(text_or_tokens[
start:end]).encode(
'utf-8'))
result['context']['text'] = '{} {} {} '.format(
' '.join(text_or_tokens[new_start:start]).encode('utf-8'),
relevant_text,
' '.join(text_or_tokens[end:new_end]).encode('utf-8'))
result['context']['tokens_left'] = text_or_tokens[new_start:start]
result['context']['tokens_right'] = text_or_tokens[end:new_end]
result['context']['input'] = _TOKENS
return results
@staticmethod
def sort_dict(dictionary):
return collections.OrderedDict(sorted(dictionary.items()))
@staticmethod
def load_json_file(file_name):
json_x = json.load(codecs.open(file_name, 'r'))
return json_x
def load_json(self, json_name):
if json_name not in self.jobjs:
self.jobjs[json_name] = self.load_json_file(self.get_pickle_file_name_from_config(json_name))
return self.jobjs[json_name]
def load_trie(self, file_name):
try:
values = json.load(gzip.open(file_name), 'utf-8')
except:
values = None
if not values:
values = json.load(codecs.open(file_name), 'utf-8')
trie = dictionary_extractor.populate_trie(map(lambda x: x.lower(), values))
return trie
def load_dictionary(self, field_name, dict_name):
if field_name not in self.tries:
self.tries[field_name] = self.load_trie(self.get_dict_file_name_from_config(dict_name))
def load_pickle_file(self, pickle_path):
return pickle.load(open(pickle_path, 'rb'))
def load_pickle(self, pickle_name):
if pickle_name not in self.pickles:
self.pickles[pickle_name] = self.load_pickle_file(self.get_pickle_file_name_from_config(pickle_name))
return self.pickles[pickle_name]
def classify_table(self, d, config):
result = self.classify_table_(d, config)
# return self._relevant_text_from_context([], result, config[_FIELD_NAME])
return result
def classify_table_(self, d, config):
model = config['model']
sem_types = config['sem_types']
cl_model = self.load_pickle(model)
sem_types = self.load_json(sem_types)
tc = table_extractor.TableClassification(sem_types, cl_model)
l = tc.predict_label(d)
tarr = table_extractor.Toolkit.create_table_array(d)
table_extractor.Toolkit.clean_cells(tarr)
res = dict()
res['value'] = l[2]
res['all_labels'] = l
res['context'] = dict(start=0, end=0, input=d['fingerprint'], text=str(tarr))
res['tarr'] = tarr
return [res]
def table_data_extractor(self, d, config):
result = self.table_data_extractor_(d, config)
# return self._relevant_text_from_context([], result, config[_FIELD_NAME])
return result
def table_data_extractor_(self, d, config):
sem_types = config['sem_types']
sem_types = self.load_json(sem_types)
method = config['method']
model = config['model']
if method == 'rule_based':
model = self.load_json(model)
else:
model = self.load_pickle(model)
tie = table_extractor.InformationExtraction(sem_types, method, model)
results = tie.extract(d)
return results
def extract_using_dictionary(self, d, config):
field_name = config[_FIELD_NAME]
# this method is self aware that it needs tokens as input
tokens = d[_SIMPLE_TOKENS]
if not tokens:
return None
if _DICTIONARY not in config:
raise KeyError('No dictionary specified for {}'.format(field_name))
self.load_dictionary(field_name, config[_DICTIONARY])
pre_process = None
if _PRE_PROCESS in config and len(config[_PRE_PROCESS]) > 0:
pre_process = self.string_to_lambda(config[_PRE_PROCESS][0])
if not pre_process:
pre_process = lambda x: x
pre_filter = None
if _PRE_FILTER in config and len(config[_PRE_FILTER]) > 0:
pre_filter = self.string_to_lambda(config[_PRE_FILTER][0])
if not pre_filter:
pre_filter = lambda x: x
post_filter = None
if _PRE_FILTER in config and len(config[_PRE_FILTER]) > 0:
post_filter = self.string_to_lambda(config[_PRE_FILTER][0])
if not post_filter:
post_filter = lambda x: isinstance(x, basestring)
ngrams = int(config[_NGRAMS]) if _NGRAMS in config else 1
joiner = config[_JOINER] if _JOINER in config else ' '
return self._relevant_text_from_context(d[_SIMPLE_TOKENS], self._extract_using_dictionary(tokens, pre_process,
self.tries[
field_name],
pre_filter,
post_filter,
ngrams, joiner),
field_name)
@staticmethod
def _extract_using_dictionary(tokens, pre_process, trie, pre_filter, post_filter, ngrams, joiner):
result = dictionary_extractor.extract_using_dictionary(tokens, pre_process=pre_process,
trie=trie,
pre_filter=pre_filter,
post_filter=post_filter,
ngrams=ngrams,
joiner=joiner)
return result if result and len(result) > 0 else None
def extract_website_domain(self, d, config):
text = d[_TEXT]
field_name = config[_FIELD_NAME]
tld = self.extract_tld(text)
results = {"value": tld}
return self._relevant_text_from_context(d[_TEXT], results, field_name)
def extract_using_regex(self, d, config):
# this method is self aware that it needs the text, so look for text in the input d
text = d[_TEXT]
include_context = True
if "include_context" in config and config['include_context'].lower() == 'false':
include_context = False
if "regex" not in config:
raise KeyError('No regular expression found in {}'.format(json.dumps(config)))
regex = config["regex"]
flags = 0
if "regex_options" in config:
regex_options = config['regex_options']
if not isinstance(regex_options, list):
raise ValueError("regular expression options should be a list in {}".format(json.dumps(config)))
for regex_option in regex_options:
flags = flags | eval("re." + regex_option)
if _PRE_FILTER in config:
text = self.run_user_filters(d, config[_PRE_FILTER], config[_FIELD_NAME])
result = self._extract_using_regex(text, regex, include_context, flags)
# TODO ADD code to handle post_filters
return self._relevant_text_from_context(d[_TEXT], result, config[_FIELD_NAME])
@staticmethod
def _extract_using_regex(text, regex, include_context, flags):
try:
result = regex_extractor.extract(text, regex, include_context, flags)
return result if result and len(result) > 0 else None
except Exception as e:
print e
return None
def extract_using_custom_spacy(self, d, config, field_rules=None):
if not field_rules:
field_rules = self.load_json_file(self.get_spacy_field_rules_from_config(config[_SPACY_FIELD_RULES]))
if not self.nlp:
self.prep_spacy()
# call the custom spacy extractor
nlp_doc = self.nlp(d[_SIMPLE_TOKENS_ORIGINAL_CASE])
results = self._relevant_text_from_context(d[_SIMPLE_TOKENS_ORIGINAL_CASE], custom_spacy_extractor.extract(field_rules, nlp_doc, self.nlp), config[_FIELD_NAME])
return results
def extract_using_spacy(self, d, config):
field_name = config[_FIELD_NAME]
if not self.nlp:
self.prep_spacy()
nlp_doc = self.nlp(d[_SIMPLE_TOKENS])
self.load_matchers(field_name)
results = None
if field_name == _AGE:
results = self._relevant_text_from_context(d[_SIMPLE_TOKENS],
spacy_age_extractor.extract(nlp_doc, self.matchers[_AGE]), _AGE)
elif field_name == _POSTING_DATE:
results = self._relevant_text_from_context(d[_SIMPLE_TOKENS],
spacy_date_extractor.extract(nlp_doc,
self.matchers[_POSTING_DATE]),
_POSTING_DATE)
if _POST_FILTER in config:
post_filters = config[_POST_FILTER]
results = self.run_post_filters_results(results, post_filters)
elif field_name == _SOCIAL_MEDIA:
results = self._relevant_text_from_context(d[_SIMPLE_TOKENS],
spacy_social_media_extractor.extract(nlp_doc,
self.matchers[
_SOCIAL_MEDIA]),
_SOCIAL_MEDIA)
elif field_name == _ADDRESS:
results = self._relevant_text_from_context(d[_SIMPLE_TOKENS],
spacy_address_extractor.extract(nlp_doc,
self.matchers[_ADDRESS]),
_ADDRESS)
return results
def extract_from_landmark(self, doc, config):
field_name = config[_FIELD_NAME]
if _CONTENT_EXTRACTION not in doc:
return None
if _INFERLINK_EXTRACTIONS not in doc[_CONTENT_EXTRACTION]:
return None
results = list()
inferlink_extraction = doc[_CONTENT_EXTRACTION][_INFERLINK_EXTRACTIONS]
fields = None
if _FIELDS in config:
fields = config[_FIELDS]
pre_filters = None
if _PRE_FILTER in config:
pre_filters = config[_PRE_FILTER]
post_filters = None
if _POST_FILTER in config:
post_filters = config[_POST_FILTER]
if fields:
for field in fields:
if field in inferlink_extraction:
d = inferlink_extraction[field]
if pre_filters:
# Assumption all pre_filters are lambdas
d[_TEXT] = self.run_user_filters(d, pre_filters, config[_FIELD_NAME])
result = None
if post_filters:
post_result = self.run_user_filters(d, post_filters, config[_FIELD_NAME])
if post_result:
result = self.handle_text_or_results(post_result)
else:
result = self.handle_text_or_results(d[_TEXT])
if result:
results.extend(result)
else:
for field in inferlink_extraction.keys():
# The logic below: if the inferlink rules do not have semantic information in the field names returned,
# too bad
if field_name in field:
d = inferlink_extraction[field]
if pre_filters:
# Assumption all pre_filters are lambdas
d[_TEXT] = self.run_user_filters(d, pre_filters, config[_FIELD_NAME])
result = None
if post_filters:
post_result = self.run_user_filters(d, post_filters, config[_FIELD_NAME])
if post_result:
result = self.handle_text_or_results(post_result)
else:
result = self.handle_text_or_results(d[_TEXT])
if result:
results.extend(result)
return results if len(results) > 0 else None
def extract_phone(self, d, config):
tokens = d[_SIMPLE_TOKENS]
# source type as in text vs url #SHRUG
source_type = config[_SOURCE_TYPE] if _SOURCE_TYPE in config else 'text'
include_context = True
output_format = _OBFUSCATION
# if _PRE_FILTER in config:
# text = self.run_user_filters(d, config[_PRE_FILTER], config[_FIELD_NAME])
return self._relevant_text_from_context(d[_SIMPLE_TOKENS],
self._extract_phone(tokens, source_type, include_context,
output_format), config[_FIELD_NAME])
@staticmethod
def _extract_phone(tokens, source_type, include_context, output_format):
result = phone_extractor.extract(tokens, source_type, include_context, output_format)
return result if result else None
def extract_email(self, d, config):
text = d[_TEXT]
include_context = True
if _INCLUDE_CONTEXT in config:
include_context = config[_INCLUDE_CONTEXT].upper() == 'TRUE'
if _PRE_FILTER in config:
text = self.run_user_filters(d, config[_PRE_FILTER], config[_FIELD_NAME])
return self._relevant_text_from_context(d[_TEXT], self._extract_email(text, include_context),
config[_FIELD_NAME])
@staticmethod
def _extract_email(text, include_context):
"""
A regular expression based function to extract emails from text
:param text: The input text.
:param include_context: True or False, will include context matched by the regular expressions.
:return: An object, with extracted email and/or context.
"""
return email_extractor.extract(text, include_context)
def extract_price(self, d, config):
text = d[_TEXT]
if _PRE_FILTER in config:
text = self.run_user_filters(d, config[_PRE_FILTER], config[_FIELD_NAME])
return self._relevant_text_from_context(d[_TEXT], self._extract_price(text), config[_FIELD_NAME])
@staticmethod
def _extract_price(text):
return price_extractor.extract(text)
def extract_height(self, d, config):
text = d[_TEXT]
if _PRE_FILTER in config:
text = self.run_user_filters(d, config[_PRE_FILTER], config[_FIELD_NAME])
return self._relevant_text_from_context(d[_TEXT], self._extract_height(text), config[_FIELD_NAME])
@staticmethod
def _extract_height(text):
return height_extractor.extract(text)
def extract_weight(self, d, config):
text = d[_TEXT]
if _PRE_FILTER in config:
text = self.run_user_filters(d, config[_PRE_FILTER], config[_FIELD_NAME])
return self._relevant_text_from_context(d[_TEXT], self._extract_weight(text), config[_FIELD_NAME])
@staticmethod
def _extract_weight(text):
return weight_extractor.extract(text)
def extract_address(self, d, config):
text = d[_TEXT]
if _PRE_FILTER in config:
text = self.run_user_filters(d, config[_PRE_FILTER], config[_FIELD_NAME])
return self._relevant_text_from_context(d[_TEXT], self._extract_address(text), config[_FIELD_NAME])
@staticmethod
def _extract_address(text):
return address_extractor.extract(text)
def extract_age(self, d, config):
text = d[_TEXT]
if _PRE_FILTER in config:
text = self.run_user_filters(d, config[_PRE_FILTER], config[_FIELD_NAME])
return self._relevant_text_from_context(d[_TEXT], self._extract_age(text), config[_FIELD_NAME])
@staticmethod
def _extract_age(text):
return age_extractor.extract(text)
def extract_review_id(self, d, config):
text = d[_TEXT]
if _PRE_FILTER in config:
text = self.run_user_filters(d, config[_PRE_FILTER], config[_FIELD_NAME])
return self._relevant_text_from_context(d[_TEXT], self._extract_review_id(text), config[_FIELD_NAME])
@staticmethod
def _extract_review_id(text):
return review_id_extractor.extract(text)
@staticmethod
def handle_text_or_results(x):
if isinstance(x, basestring):
o = dict()
o['value'] = x
return [o]
if isinstance(x, dict):
return [x]
if isinstance(x, list):
return x
return None
def run_user_filters(self, d, filters, field_name):
result = None
if not isinstance(filters, list):
filters = [filters]
try:
for text_filter in filters:
try:
f = getattr(self, text_filter)
if f:
result = f(d, {_FIELD_NAME: field_name})
except Exception as e:
result = None
if not result:
result = Core.string_to_lambda(text_filter)(d[_TEXT])
except Exception as e:
print 'Error {} in {}'.format(e, 'run_user_filters')
return result
def run_post_filters_results(self, results, post_filters):
if results:
if not isinstance(results, list):
results = [results]
if not isinstance(post_filters, list):
post_filters = [post_filters]
out_results = list()
for post_filter in post_filters:
try:
f = getattr(self, post_filter)
except Exception as e:
raise 'Exception: {}, no function {} defined in core.py'.format(e, post_filter)
for result in results:
val = f(result['value'])
if val:
result['value'] = val
out_results.append(result)
return out_results if len(out_results) > 0 else None
@staticmethod
def string_to_lambda(s):
try:
return lambda x: eval(s)
except:
print 'Error while converting {} to lambda'.format(s)
return None
@staticmethod
def extract_readability(document, options=None):
e = ReadabilityExtractor()
return e.extract(document, options)
def extract_title(self, html_content, options=None):
if html_content:
matches = re.search(self.html_title_regex, html_content, re.IGNORECASE | re.S)
title = None
if matches:
title = matches.group(1)
title = title.replace('\r', '')
title = title.replace('\n', '')
title = title.replace('\t', '')
if not title:
title = ''
return {'text': title}
return None
@staticmethod
def extract_crftokens(text, options=None, lowercase=True):
t = TokenizerExtractor(recognize_linebreaks=True, create_structured_tokens=True)
return t.extract(text, lowercase)
@staticmethod
def crftokens_to_lower(crf_tokens):
lower_crf = copy.deepcopy(crf_tokens)
for tk in lower_crf:
tk['value'] = tk['value'].lower()
return lower_crf
@staticmethod
def extract_tokens_from_crf(crf_tokens):
return [tk['value'] for tk in crf_tokens]
@staticmethod
def extract_tokens_faithful(text, options=None):
ft = FaithfulTokenizerExtractor(recognize_linebreaks=True, create_structured_tokens=True)
return ft.extract(text)
@staticmethod
def extract_tokens_from_faithful(faithful_tokens):
return [tk['value'] for tk in faithful_tokens]
@staticmethod
def filter_tokens(original_tokens, config):
# config contains a list of types of tokens to be removed
# [alphabet, digit, emoji, punctuation, html, html_entity, break]
ft = FaithfulTokenizerExtractor(recognize_linebreaks=True, create_structured_tokens=True)
ft.faithful_tokens = original_tokens
# Return Tokens object which contains - tokens, reverse_map attributes
# The object also has a method get_original_index() to retrieve index in faithful tokens
return ft.filter_tokens(config)
def extract_table(self, html_doc):
return table_extractor.extract(html_doc)
# def extract_stock_tickers(self, doc):
# return extract_stock_tickers(doc)
# def extract_spacy(self, doc):
# return spacy_extractor.spacy_extract(doc)
@staticmethod
def extract_landmark(html, url, extraction_rules, threshold=0.5):
return landmark_extraction.extract(html, url, extraction_rules, threshold)
def prep_spacy(self):
self.nlp = spacy.load('en')
self.old_tokenizer = self.nlp.tokenizer
self.nlp.tokenizer = lambda tokens: self.old_tokenizer.tokens_from_list(tokens)
def load_matchers(self, field_name=None):
if field_name:
if field_name == _AGE:
if _AGE not in self.matchers:
self.matchers[_AGE] = spacy_age_extractor.load_age_matcher(self.nlp)
if field_name == _POSTING_DATE:
if _POSTING_DATE not in self.matchers:
self.matchers[_POSTING_DATE] = spacy_date_extractor.load_date_matcher(self.nlp)
if field_name == _SOCIAL_MEDIA:
if _SOCIAL_MEDIA not in self.matchers:
self.matchers[_SOCIAL_MEDIA] = spacy_social_media_extractor.load_social_media_matcher(self.nlp)
if field_name == _ADDRESS:
if _ADDRESS not in self.matchers:
self.matchers[_ADDRESS] = spacy_address_extractor.load_address_matcher(self.nlp)
@staticmethod
def create_list_data_extraction(data_extraction, field_name, method=_EXTRACT_USING_DICTIONARY):
out = list()
if data_extraction:
if field_name in data_extraction:
extractions = data_extraction[field_name]
if method in extractions:
out = Core.get_value_list_from_results(extractions[method]['results'])
return out
@staticmethod
def get_value_list_from_results(results):
out = list()
if results:
for result in results:
out.append(result['value'])
return out
def extract_country_url(self, d, config):
if not self.country_code_dict:
try:
self.country_code_dict = self.load_json_file(self.get_dict_file_name_from_config('country_code'))
except:
raise '{} dictionary missing from resources'.format('country_code')
tokens_url = d[_SIMPLE_TOKENS]
return self._relevant_text_from_context(tokens_url,
url_country_extractor.extract(tokens_url, self.country_code_dict),
config[_FIELD_NAME])
def geonames_lookup(self, d, config):
field_name = config[_FIELD_NAME]
if not self.geonames_dict:
try:
self.geonames_dict = self.load_json_file(self.get_dict_file_name_from_config(_GEONAMES))
except Exception as e:
raise '{} dictionary missing from resources'.format(_GEONAMES)
if _CITY_NAME in d[_KNOWLEDGE_GRAPH]:
cities = d[_KNOWLEDGE_GRAPH][_CITY_NAME].keys()
else:
return None
populated_places = geonames_extractor.get_populated_places(cities, self.geonames_dict)
# results = geonames_extractor.get_country_from_populated_places(populated_places)
# if results:
# self.create_knowledge_graph(d, _COUNTRY , results)
return populated_places
@staticmethod
def parse_date(d, config={}):
if isinstance(d, basestring):
return Core.spacy_parse_date(d)
else:
try:
return date_parser.convert_to_iso_format(date_parser.parse_date(d[_TEXT]))
except:
return None
@staticmethod
def spacy_parse_date(str_date):
try:
return date_parser.convert_to_iso_format(date_parser.parse_date(str_date))
except:
return None
@staticmethod
def filter_age(d, config):
text = d[_TEXT]
try:
text = text.replace('\n', '')
text = text.replace('\t', '')
num = int(text)
return num if 18 <= num <= 65 else None
except:
pass
return None
def country_from_states(self, d, config):
if not self.state_to_country_dict:
try:
self.state_to_country_dict = self.load_json_file(self.get_dict_file_name_from_config(_STATE_TO_COUNTRY))
except Exception as e:
raise '{} dictionary missing from resources'.format(_STATE_TO_COUNTRY)
if _STATE in d[_KNOWLEDGE_GRAPH]:
states = d[_KNOWLEDGE_GRAPH][_STATE].keys()
else:
return None
return geonames_extractor.get_country_from_states(states, self.state_to_country_dict)
def country_feature(self, d, config):
return country_classifier.calc_country_feature(d[_KNOWLEDGE_GRAPH], self.state_to_country_dict)
def create_city_state_country_triple(self, d, config):
if not self.state_to_codes_lower_dict:
try:
self.state_to_codes_lower_dict = self.load_json_file(self.get_dict_file_name_from_config(_STATE_TO_CODES_LOWER))
except Exception as e:
raise ValueError('{} dictionary missing from resources'.format(_STATE_TO_CODES_LOWER))
if not self.populated_cities:
try:
self.populated_cities = self.load_json_file(self.get_dict_file_name_from_config(_POPULATED_CITIES))
except Exception as e:
raise ValueError('{} dictionary missing from resources'.format(_POPULATED_CITIES))
try:
priori_lst = ['city_state_together_count', 'city_state_code_together_count',
'city_country_together_count', 'city_state_separate_count',
'city_country_separate_count', 'city_state_code_separate_count']
results = [[] for i in range(len(priori_lst)+1)]
knowledge_graph = d[_KNOWLEDGE_GRAPH]
if "populated_places" in knowledge_graph:
pop_places = knowledge_graph["populated_places"]
for place in pop_places:
city_state_together_count = 0
city_state_separate_count = 0
city_state_code_together_count = 0
city_state_code_separate_count = 0
city_country_together_count = 0
city_country_separate_count = 0
city = pop_places[place][0]["value"]
state = pop_places[place][0]["metadata"]["state"]
state = "" if not state else state
country = pop_places[place][0]["metadata"]["country"]
country = "" if not country else country
if state in self.state_to_codes_lower_dict:
state_code = self.state_to_codes_lower_dict[state]
else:
state_code = None
cities = []
if "city_name" in knowledge_graph:
if city in knowledge_graph["city_name"]:
city_lst = knowledge_graph["city_name"][city]
for each_city in city_lst:
if "context" in each_city:
cities.append((each_city["origin"]["segment"],
each_city["context"]["start"], each_city["context"]["end"]))
states = []
if country == "united states":
if "state" in knowledge_graph:
if state in knowledge_graph["state"]:
state_lst = knowledge_graph["state"][state]
for each_state in state_lst:
if "context" in each_state:
states.append((each_state["origin"]["segment"],
each_state["context"]["start"], each_state["context"]["end"]))
countries = []
if "country" in knowledge_graph:
if country in knowledge_graph["country"]:
country_lst = knowledge_graph["country"][country]
for each_country in country_lst:
if "context" in each_country:
countries.append((each_country["origin"]["segment"],
each_country["context"]["start"], each_country["context"]["end"]))
state_codes = []
if country == "united states":
if state_code:
if "states_usa_codes" in knowledge_graph:
if state_code in knowledge_graph["states_usa_codes"]:
state_code_lst = knowledge_graph["states_usa_codes"][state_code]
for each_state_code in state_code_lst:
if "context" in each_state_code:
state_codes.append((each_state_code["origin"]["segment"],
each_state_code["context"]["start"], each_state_code["context"]["end"]))
if cities:
for a_city in cities:
for a_state in states:
if a_city[0] == a_state[0] and a_city[1] != a_state[1] and (abs(a_city[2] - a_state[1])<3 or abs(a_city[1] - a_state[2])<3):
city_state_together_count += 1
else:
city_state_separate_count += 1
for a_state_code in state_codes:
if a_city[0] == a_state_code[0] and a_city[1] != a_state_code[1] and a_state_code[1] - a_city[2]<3 and a_state_code[1] - a_city[2]>0:
city_state_code_together_count += 1
else:
city_state_code_separate_count += 1
for a_country in countries:
if a_city[0] == a_country[0] and a_city[1] != a_country[1] and (abs(a_city[2] - a_country[1])<5 or abs(a_city[1] - a_country[2])<3):
city_country_together_count += 1
else:
city_country_separate_count += 1
result = copy.deepcopy(pop_places[place][0])
result['metadata']['city_state_together_count'] = city_state_together_count
result['metadata']['city_state_separate_count'] = city_state_separate_count
result['metadata']['city_state_code_together_count'] = city_state_code_together_count
result['metadata']['city_state_code_separate_count'] = city_state_code_separate_count
result['metadata']['city_country_together_count'] = city_country_together_count
result['metadata']['city_country_separate_count'] = city_country_separate_count
for priori_idx, counter in enumerate(priori_lst):
if country == "united states":
result_value = city + ',' + state
else:
result_value = city + ',' + country
result['key'] = city+':'+state+':'+country+':'+str(result['metadata']['longitude'])+':'+str(result['metadata']['latitude'])
if result['metadata'][counter] > 0:
if priori_idx < 3:
result['value'] = result_value + "-1.0"
elif priori_idx < 5:
result['value'] = result_value + "-0.8"
else:
result['value'] = result_value + "-0.1"
results[priori_idx].append(result)
break
else:
if priori_idx == 5 and city in self.populated_cities:
result['value'] = result_value + "-0.1"
results[priori_idx+1].append(result)
return_result = None
for priori in range(len(priori_lst)+1):
if results[priori]:
if priori < 3:
return_result = results[priori]
break
else:
high_pop = 0
high_idx = 0
for idx, a_result in enumerate(results[priori]):
if a_result['metadata']['population'] >= high_pop:
high_pop = a_result['metadata']['population']
high_idx = idx
return_result = [results[priori][high_idx]]
break
return return_result
except Exception as e:
print e
return None
|
# Copyright 2012, Contrail Systems, Inc.
#
"""
.. attention:: Fix the license string
"""
import requests
import re
import uuid
import json
import time
import socket
import netaddr
from netaddr import IPNetwork, IPSet, IPAddress
import gevent
import bottle
from neutron.common import constants
from neutron.common import exceptions
from neutron.api.v2 import attributes as attr
from cfgm_common import exceptions as vnc_exc
from vnc_api.vnc_api import *
_DEFAULT_HEADERS = {
'Content-type': 'application/json; charset="UTF-8"', }
# TODO find if there is a common definition
CREATE = 1
READ = 2
UPDATE = 3
DELETE = 4
IP_PROTOCOL_MAP = {constants.PROTO_NAME_TCP: constants.PROTO_NUM_TCP,
constants.PROTO_NAME_UDP: constants.PROTO_NUM_UDP,
constants.PROTO_NAME_ICMP: constants.PROTO_NUM_ICMP,
constants.PROTO_NAME_ICMP_V6: constants.PROTO_NUM_ICMP_V6}
# SNAT defines
SNAT_SERVICE_TEMPLATE_FQ_NAME = ['default-domain', 'netns-snat-template']
# Security group Exceptions
class SecurityGroupInvalidPortRange(exceptions.InvalidInput):
message = _("For TCP/UDP protocols, port_range_min must be "
"<= port_range_max")
class SecurityGroupInvalidPortValue(exceptions.InvalidInput):
message = _("Invalid value for port %(port)s")
class SecurityGroupInvalidIcmpValue(exceptions.InvalidInput):
message = _("Invalid value for ICMP %(field)s (%(attr)s) "
"%(value)s. It must be 0 to 255.")
class SecurityGroupMissingIcmpType(exceptions.InvalidInput):
message = _("ICMP code (port-range-max) %(value)s is provided"
" but ICMP type (port-range-min) is missing.")
class SecurityGroupInUse(exceptions.InUse):
message = _("Security Group %(id)s in use.")
class SecurityGroupCannotRemoveDefault(exceptions.InUse):
message = _("Removing default security group not allowed.")
class SecurityGroupCannotUpdateDefault(exceptions.InUse):
message = _("Updating default security group not allowed.")
class SecurityGroupDefaultAlreadyExists(exceptions.InUse):
message = _("Default security group already exists.")
class SecurityGroupRuleInvalidProtocol(exceptions.InvalidInput):
message = _("Security group rule protocol %(protocol)s not supported. "
"Only protocol values %(values)s and their integer "
"representation (0 to 255) are supported.")
class SecurityGroupRulesNotSingleTenant(exceptions.InvalidInput):
message = _("Multiple tenant_ids in bulk security group rule create"
" not allowed")
class SecurityGroupRemoteGroupAndRemoteIpPrefix(exceptions.InvalidInput):
message = _("Only remote_ip_prefix or remote_group_id may "
"be provided.")
class SecurityGroupProtocolRequiredWithPorts(exceptions.InvalidInput):
message = _("Must also specify protocol if port range is given.")
class SecurityGroupNotSingleGroupRules(exceptions.InvalidInput):
message = _("Only allowed to update rules for "
"one security profile at a time")
class SecurityGroupNotFound(exceptions.NotFound):
message = _("Security group %(id)s does not exist")
class SecurityGroupRuleNotFound(exceptions.NotFound):
message = _("Security group rule %(id)s does not exist")
class DuplicateSecurityGroupRuleInPost(exceptions.InUse):
message = _("Duplicate Security Group Rule in POST.")
class SecurityGroupRuleExists(exceptions.InUse):
message = _("Security group rule already exists. Group id is %(id)s.")
class SecurityGroupRuleParameterConflict(exceptions.InvalidInput):
message = _("Conflicting value ethertype %(ethertype)s for CIDR %(cidr)s")
# L3 Exceptions
class RouterNotFound(exceptions.NotFound):
message = _("Router %(router_id)s could not be found")
class RouterInUse(exceptions.InUse):
message = _("Router %(router_id)s still has ports")
class RouterInterfaceNotFound(exceptions.NotFound):
message = _("Router %(router_id)s does not have "
"an interface with id %(port_id)s")
class RouterInterfaceNotFoundForSubnet(exceptions.NotFound):
message = _("Router %(router_id)s has no interface "
"on subnet %(subnet_id)s")
class RouterInterfaceInUseByFloatingIP(exceptions.InUse):
message = _("Router interface for subnet %(subnet_id)s on router "
"%(router_id)s cannot be deleted, as it is required "
"by one or more floating IPs.")
class FloatingIPNotFound(exceptions.NotFound):
message = _("Floating IP %(floatingip_id)s could not be found")
class ExternalGatewayForFloatingIPNotFound(exceptions.NotFound):
message = _("External network %(external_network_id)s is not reachable "
"from subnet %(subnet_id)s. Therefore, cannot associate "
"Port %(port_id)s with a Floating IP.")
class FloatingIPPortAlreadyAssociated(exceptions.InUse):
message = _("Cannot associate floating IP %(floating_ip_address)s "
"(%(fip_id)s) with port %(port_id)s "
"using fixed IP %(fixed_ip)s, as that fixed IP already "
"has a floating IP on external network %(net_id)s.")
class L3PortInUse(exceptions.InUse):
message = _("Port %(port_id)s has owner %(device_owner)s and therefore"
" cannot be deleted directly via the port API.")
class RouterExternalGatewayInUseByFloatingIp(exceptions.InUse):
message = _("Gateway cannot be updated for router %(router_id)s, since a "
"gateway to external network %(net_id)s is required by one or "
"more floating IPs.")
# Allowed Address Pair
class AddressPairMatchesPortFixedIPAndMac(exceptions.InvalidInput):
message = _("Port's Fixed IP and Mac Address match an address pair entry.")
class DBInterface(object):
"""
An instance of this class forwards requests to vnc cfg api (web)server
"""
Q_URL_PREFIX = '/extensions/ct'
def __init__(self, admin_name, admin_password, admin_tenant_name,
api_srvr_ip, api_srvr_port, user_info=None,
contrail_extensions_enabled=True,
list_optimization_enabled=False):
self._api_srvr_ip = api_srvr_ip
self._api_srvr_port = api_srvr_port
self._db_cache = {}
self._db_cache['q_networks'] = {}
self._db_cache['q_subnets'] = {}
self._db_cache['q_subnet_maps'] = {}
self._db_cache['q_policies'] = {}
self._db_cache['q_ipams'] = {}
self._db_cache['q_routers'] = {}
self._db_cache['q_floatingips'] = {}
self._db_cache['q_ports'] = {}
self._db_cache['q_fixed_ip_to_subnet'] = {}
#obj-uuid to tenant-uuid mapping
self._db_cache['q_obj_to_tenant'] = {}
self._db_cache['q_tenant_to_def_sg'] = {}
#port count per tenant-id
self._db_cache['q_tenant_port_count'] = {}
self._db_cache['vnc_networks'] = {}
self._db_cache['vnc_ports'] = {}
self._db_cache['vnc_projects'] = {}
self._db_cache['vnc_instance_ips'] = {}
self._db_cache['vnc_routers'] = {}
self._contrail_extensions_enabled = contrail_extensions_enabled
self._list_optimization_enabled = list_optimization_enabled
# Retry till a api-server is up
connected = False
while not connected:
try:
# TODO remove hardcode
self._vnc_lib = VncApi(admin_name, admin_password,
admin_tenant_name, api_srvr_ip,
api_srvr_port, '/', user_info=user_info)
connected = True
except requests.exceptions.RequestException as e:
gevent.sleep(3)
# TODO remove this backward compat code eventually
# changes 'net_fq_name_str pfx/len' key to 'net_id pfx/len' key
subnet_map = self._vnc_lib.kv_retrieve(key=None)
for kv_dict in subnet_map:
key = kv_dict['key']
if len(key.split()) == 1:
subnet_id = key
# uuid key, fixup value portion to 'net_id pfx/len' format
# if not already so
if len(kv_dict['value'].split(':')) == 1:
# new format already, skip
continue
net_fq_name = kv_dict['value'].split()[0].split(':')
try:
net_obj = self._virtual_network_read(fq_name=net_fq_name)
except NoIdError:
self._vnc_lib.kv_delete(subnet_id)
continue
new_subnet_key = '%s %s' % (net_obj.uuid,
kv_dict['value'].split()[1])
self._vnc_lib.kv_store(subnet_id, new_subnet_key)
else: # subnet key
if len(key.split()[0].split(':')) == 1:
# new format already, skip
continue
# delete old key, convert to new key format and save
old_subnet_key = key
self._vnc_lib.kv_delete(old_subnet_key)
subnet_id = kv_dict['value']
net_fq_name = key.split()[0].split(':')
try:
net_obj = self._virtual_network_read(fq_name=net_fq_name)
except NoIdError:
continue
new_subnet_key = '%s %s' % (net_obj.uuid, key.split()[1])
self._vnc_lib.kv_store(new_subnet_key, subnet_id)
#end __init__
# Helper routines
def _request_api_server(self, url, method, data=None, headers=None):
if method == 'GET':
return requests.get(url)
if method == 'POST':
return requests.post(url, data=data, headers=headers)
if method == 'DELETE':
return requests.delete(url)
#end _request_api_server
def _relay_request(self, request):
"""
Send received request to api server
"""
# chop neutron parts of url and add api server address
url_path = re.sub(self.Q_URL_PREFIX, '', request.environ['PATH_INFO'])
url = "http://%s:%s%s" % (self._api_srvr_ip, self._api_srvr_port,
url_path)
return self._request_api_server(
url, request.environ['REQUEST_METHOD'],
request.body, {'Content-type': request.environ['CONTENT_TYPE']})
#end _relay_request
def _validate_project_ids(self, context, project_ids):
if context and not context['is_admin']:
return [context['tenant']]
return_project_ids = []
for project_id in project_ids:
try:
return_project_ids.append(str(uuid.UUID(project_id)))
except ValueError:
continue
return return_project_ids
def _obj_to_dict(self, obj):
return self._vnc_lib.obj_to_dict(obj)
#end _obj_to_dict
def _get_plugin_property(self, property_in):
fq_name=['default-global-system-config'];
gsc_obj = self._vnc_lib.global_system_config_read(fq_name);
plugin_settings = gsc_obj.plugin_tuning.plugin_property
for each_setting in plugin_settings:
if each_setting.property == property_in:
return each_setting.value
return None
#end _get_plugin_property
def _ensure_instance_exists(self, instance_id):
instance_name = instance_id
instance_obj = VirtualMachine(instance_name)
try:
id = self._vnc_lib.obj_to_id(instance_obj)
instance_obj = self._vnc_lib.virtual_machine_read(id=id)
except NoIdError: # instance doesn't exist, create it
# check if instance_id is a uuid value or not
try:
uuid.UUID(instance_id)
instance_obj.uuid = instance_id
except ValueError:
# if instance_id is not a valid uuid, let
# virtual_machine_create generate uuid for the vm
pass
self._vnc_lib.virtual_machine_create(instance_obj)
return instance_obj
#end _ensure_instance_exists
def _ensure_default_security_group_exists(self, proj_id):
# check in cache
sg_uuid = self._db_cache_read('q_tenant_to_def_sg', proj_id)
if sg_uuid:
return
# check in api server
proj_obj = self._vnc_lib.project_read(id=proj_id)
sg_groups = proj_obj.get_security_groups()
for sg_group in sg_groups or []:
if sg_group['to'][-1] == 'default':
self._db_cache_write('q_tenant_to_def_sg',
proj_id, sg_group['uuid'])
return
# does not exist hence create and add cache
sg_uuid = str(uuid.uuid4())
self._db_cache_write('q_tenant_to_def_sg', proj_id, sg_uuid)
sg_obj = SecurityGroup(name='default', parent_obj=proj_obj)
sg_obj.uuid = sg_uuid
self._vnc_lib.security_group_create(sg_obj)
#allow all egress traffic
def_rule = {}
def_rule['port_range_min'] = 0
def_rule['port_range_max'] = 65535
def_rule['direction'] = 'egress'
def_rule['remote_ip_prefix'] = '0.0.0.0/0'
def_rule['remote_group_id'] = None
def_rule['protocol'] = 'any'
rule = self._security_group_rule_neutron_to_vnc(def_rule, CREATE)
self._security_group_rule_create(sg_obj.uuid, rule)
#allow ingress traffic from within default security group
def_rule = {}
def_rule['port_range_min'] = 0
def_rule['port_range_max'] = 65535
def_rule['direction'] = 'ingress'
def_rule['remote_ip_prefix'] = '0.0.0.0/0'
def_rule['remote_group_id'] = None
def_rule['protocol'] = 'any'
rule = self._security_group_rule_neutron_to_vnc(def_rule, CREATE)
self._security_group_rule_create(sg_obj.uuid, rule)
#end _ensure_default_security_group_exists
def _db_cache_read(self, table, key):
try:
return self._db_cache[table][key]
except KeyError:
return None
#end _db_cache_read
def _db_cache_write(self, table, key, val):
self._db_cache[table][key] = val
#end _db_cache_write
def _db_cache_delete(self, table, key):
try:
del self._db_cache[table][key]
except Exception:
pass
#end _db_cache_delete
def _db_cache_flush(self, table):
self._db_cache[table] = {}
#end _db_cache_delete
def _get_obj_tenant_id(self, q_type, obj_uuid):
# Get the mapping from cache, else seed cache and return
try:
return self._db_cache['q_obj_to_tenant'][obj_uuid]
except KeyError:
# Seed the cache and return
if q_type == 'port':
port_obj = self._virtual_machine_interface_read(obj_uuid)
if port_obj.parent_type != "project":
net_id = port_obj.get_virtual_network_refs()[0]['uuid']
# recurse up type-hierarchy
tenant_id = self._get_obj_tenant_id('network', net_id)
else:
tenant_id = port_obj.parent_uuid.replace('-', '')
self._set_obj_tenant_id(obj_uuid, tenant_id)
return tenant_id
if q_type == 'network':
net_obj = self._virtual_network_read(net_id=obj_uuid)
tenant_id = net_obj.parent_uuid.replace('-', '')
self._set_obj_tenant_id(obj_uuid, tenant_id)
return tenant_id
return None
#end _get_obj_tenant_id
def _set_obj_tenant_id(self, obj_uuid, tenant_uuid):
self._db_cache['q_obj_to_tenant'][obj_uuid] = tenant_uuid
#end _set_obj_tenant_id
def _del_obj_tenant_id(self, obj_uuid):
try:
del self._db_cache['q_obj_to_tenant'][obj_uuid]
except Exception:
pass
#end _del_obj_tenant_id
def _project_read(self, proj_id=None, fq_name=None):
if proj_id:
try:
# disable cache for now as fip pool might be put without
# neutron knowing it
raise KeyError
#return self._db_cache['vnc_projects'][proj_id]
except KeyError:
proj_obj = self._vnc_lib.project_read(id=proj_id)
fq_name_str = json.dumps(proj_obj.get_fq_name())
self._db_cache['vnc_projects'][proj_id] = proj_obj
self._db_cache['vnc_projects'][fq_name_str] = proj_obj
return proj_obj
if fq_name:
fq_name_str = json.dumps(fq_name)
try:
# disable cache for now as fip pool might be put without
# neutron knowing it
raise KeyError
#return self._db_cache['vnc_projects'][fq_name_str]
except KeyError:
proj_obj = self._vnc_lib.project_read(fq_name=fq_name)
self._db_cache['vnc_projects'][fq_name_str] = proj_obj
self._db_cache['vnc_projects'][proj_obj.uuid] = proj_obj
return proj_obj
#end _project_read
def _get_tenant_id_for_create(self, context, resource):
if context['is_admin'] and 'tenant_id' in resource:
tenant_id = resource['tenant_id']
elif ('tenant_id' in resource and
resource['tenant_id'] != context['tenant_id']):
reason = _('Cannot create resource for another tenant')
self._raise_contrail_exception(400, exceptions.AdminRequired(reason=reason))
else:
tenant_id = context['tenant_id']
return tenant_id
def _raise_contrail_exception(self, code, exc):
exc_info = {'message': str(exc)}
bottle.abort(code, json.dumps(exc_info))
def _security_group_rule_create(self, sg_id, sg_rule):
try:
sg_vnc = self._vnc_lib.security_group_read(id=sg_id)
except NoIdError:
self._raise_contrail_exception(404, SecurityGroupNotFound(id=sg_id))
rules = sg_vnc.get_security_group_entries()
if rules is None:
rules = PolicyEntriesType([sg_rule])
else:
rules.add_policy_rule(sg_rule)
sg_vnc.set_security_group_entries(rules)
self._vnc_lib.security_group_update(sg_vnc)
return
#end _security_group_rule_create
def _security_group_rule_find(self, sgr_id):
dom_projects = self._project_list_domain(None)
for project in dom_projects:
proj_id = project['uuid']
project_sgs = self._security_group_list_project(proj_id)
for sg_obj in project_sgs:
sgr_entries = sg_obj.get_security_group_entries()
if sgr_entries == None:
continue
for sg_rule in sgr_entries.get_policy_rule():
if sg_rule.get_rule_uuid() == sgr_id:
return sg_obj, sg_rule
return None, None
#end _security_group_rule_find
def _security_group_rule_delete(self, sg_obj, sg_rule):
rules = sg_obj.get_security_group_entries()
rules.get_policy_rule().remove(sg_rule)
sg_obj.set_security_group_entries(rules)
self._vnc_lib.security_group_update(sg_obj)
return
#end _security_group_rule_delete
def _security_group_delete(self, sg_id):
self._vnc_lib.security_group_delete(id=sg_id)
#end _security_group_delete
def _svc_instance_create(self, si_obj):
si_uuid = self._vnc_lib.service_instance_create(si_obj)
st_fq_name = ['default-domain', 'nat-template']
st_obj = self._vnc_lib.service_template_read(fq_name=st_fq_name)
si_obj.set_service_template(st_obj)
self._vnc_lib.service_instance_update(si_obj)
return si_uuid
#end _svc_instance_create
def _svc_instance_delete(self, si_id):
self._vnc_lib.service_instance_delete(id=si_id)
#end _svc_instance_delete
def _route_table_create(self, rt_obj):
rt_uuid = self._vnc_lib.route_table_create(rt_obj)
return rt_uuid
#end _route_table_create
def _route_table_delete(self, rt_id):
self._vnc_lib.route_table_delete(id=rt_id)
#end _route_table_delete
def _resource_create(self, resource_type, obj):
try:
obj_uuid = getattr(self._vnc_lib, resource_type + '_create')(obj)
except RefsExistError:
obj.uuid = str(uuid.uuid4())
obj.name += '-' + obj.uuid
obj.fq_name[-1] += '-' + obj.uuid
obj_uuid = getattr(self._vnc_lib, resource_type + '_create')(obj)
except PermissionDenied as e:
exc_info = {'type': 'BadRequest', 'message': str(e)}
bottle.abort(400, json.dumps(exc_info))
return obj_uuid
#end _resource_create
def _virtual_network_read(self, net_id=None, fq_name=None, fields=None):
if net_id:
try:
# return self._db_cache['vnc_networks'][net_id]
raise KeyError
except KeyError:
net_obj = self._vnc_lib.virtual_network_read(id=net_id,
fields=fields)
fq_name_str = json.dumps(net_obj.get_fq_name())
self._db_cache['vnc_networks'][net_id] = net_obj
self._db_cache['vnc_networks'][fq_name_str] = net_obj
return net_obj
if fq_name:
fq_name_str = json.dumps(fq_name)
try:
# return self._db_cache['vnc_networks'][fq_name_str]
raise KeyError
except KeyError:
net_obj = self._vnc_lib.virtual_network_read(fq_name=fq_name,
fields=fields)
self._db_cache['vnc_networks'][fq_name_str] = net_obj
self._db_cache['vnc_networks'][net_obj.uuid] = net_obj
return net_obj
#end _virtual_network_read
def _virtual_network_update(self, net_obj):
try:
self._vnc_lib.virtual_network_update(net_obj)
except PermissionDenied as e:
exc_info = {'type': 'BadRequest', 'message': str(e)}
bottle.abort(400, json.dumps(exc_info))
except RefsExistError as e:
self._raise_contrail_exception(400, exceptions.BadRequest(
resource='network', msg=str(e)))
# read back to get subnet gw allocated by api-server
fq_name_str = json.dumps(net_obj.get_fq_name())
self._db_cache['vnc_networks'][net_obj.uuid] = net_obj
self._db_cache['vnc_networks'][fq_name_str] = net_obj
#end _virtual_network_update
def _virtual_network_delete(self, net_id):
fq_name_str = None
try:
net_obj = self._db_cache['vnc_networks'][net_id]
fq_name_str = json.dumps(net_obj.get_fq_name())
except KeyError:
net_obj = None
try:
if net_obj and net_obj.get_floating_ip_pools():
fip_pools = net_obj.get_floating_ip_pools()
for fip_pool in fip_pools:
self._floating_ip_pool_delete(fip_pool_id=fip_pool['uuid'])
self._vnc_lib.virtual_network_delete(id=net_id)
except RefsExistError:
self._raise_contrail_exception(404, exceptions.NetworkInUse(net_id=net_id))
try:
del self._db_cache['vnc_networks'][net_id]
if fq_name_str:
del self._db_cache['vnc_networks'][fq_name_str]
except KeyError:
pass
#end _virtual_network_delete
def _virtual_network_list(self, parent_id=None, obj_uuids=None,
fields=None, detail=False, count=False):
return self._vnc_lib.virtual_networks_list(
parent_id=parent_id,
obj_uuids=obj_uuids,
fields=fields,
detail=detail,
count=count)
#end _virtual_network_list
def _virtual_machine_interface_read(self, port_id=None, fq_name=None,
fields=None):
back_ref_fields = ['logical_router_back_refs', 'instance_ip_back_refs', 'floating_ip_back_refs']
if fields:
n_extra_fields = list(set(fields + back_ref_fields))
else:
n_extra_fields = back_ref_fields
if port_id:
try:
# return self._db_cache['vnc_ports'][port_id]
raise KeyError
except KeyError:
port_obj = self._vnc_lib.virtual_machine_interface_read(
id=port_id, fields=n_extra_fields)
fq_name_str = json.dumps(port_obj.get_fq_name())
self._db_cache['vnc_ports'][port_id] = port_obj
self._db_cache['vnc_ports'][fq_name_str] = port_obj
return port_obj
if fq_name:
fq_name_str = json.dumps(fq_name)
try:
# return self._db_cache['vnc_ports'][fq_name_str]
raise KeyError
except KeyError:
port_obj = self._vnc_lib.virtual_machine_interface_read(
fq_name=fq_name, fields=n_extra_fields)
self._db_cache['vnc_ports'][fq_name_str] = port_obj
self._db_cache['vnc_ports'][port_obj.uuid] = port_obj
return port_obj
#end _virtual_machine_interface_read
def _virtual_machine_interface_update(self, port_obj):
self._vnc_lib.virtual_machine_interface_update(port_obj)
fq_name_str = json.dumps(port_obj.get_fq_name())
self._db_cache['vnc_ports'][port_obj.uuid] = port_obj
self._db_cache['vnc_ports'][fq_name_str] = port_obj
#end _virtual_machine_interface_update
def _virtual_machine_interface_delete(self, port_id):
fq_name_str = None
try:
port_obj = self._db_cache['vnc_ports'][port_id]
fq_name_str = json.dumps(port_obj.get_fq_name())
except KeyError:
port_obj = None
self._vnc_lib.virtual_machine_interface_delete(id=port_id)
try:
del self._db_cache['vnc_ports'][port_id]
if fq_name_str:
del self._db_cache['vnc_ports'][fq_name_str]
except KeyError:
pass
#end _virtual_machine_interface_delete
def _virtual_machine_interface_list(self, parent_id=None, back_ref_id=None,
obj_uuids=None, fields=None):
back_ref_fields = ['logical_router_back_refs', 'instance_ip_back_refs', 'floating_ip_back_refs']
if fields:
n_extra_fields = list(set(fields + back_ref_fields))
else:
n_extra_fields = back_ref_fields
vmi_objs = self._vnc_lib.virtual_machine_interfaces_list(
parent_id=parent_id,
back_ref_id=back_ref_id,
obj_uuids=obj_uuids,
detail=True,
fields=n_extra_fields)
return vmi_objs
#end _virtual_machine_interface_list
def _instance_ip_create(self, iip_obj):
iip_uuid = self._vnc_lib.instance_ip_create(iip_obj)
return iip_uuid
#end _instance_ip_create
def _instance_ip_read(self, instance_ip_id=None, fq_name=None):
if instance_ip_id:
try:
# return self._db_cache['vnc_instance_ips'][instance_ip_id]
raise KeyError
except KeyError:
iip_obj = self._vnc_lib.instance_ip_read(id=instance_ip_id)
fq_name_str = json.dumps(iip_obj.get_fq_name())
self._db_cache['vnc_instance_ips'][instance_ip_id] = iip_obj
self._db_cache['vnc_instance_ips'][fq_name_str] = iip_obj
return iip_obj
if fq_name:
fq_name_str = json.dumps(fq_name)
try:
# return self._db_cache['vnc_instance_ips'][fq_name_str]
raise KeyError
except KeyError:
iip_obj = self._vnc_lib.instance_ip_read(fq_name=fq_name)
self._db_cache['vnc_instance_ips'][fq_name_str] = iip_obj
self._db_cache['vnc_instance_ips'][iip_obj.uuid] = iip_obj
return iip_obj
#end _instance_ip_read
def _instance_ip_update(self, iip_obj):
self._vnc_lib.instance_ip_update(iip_obj)
fq_name_str = json.dumps(iip_obj.get_fq_name())
self._db_cache['vnc_instance_ips'][iip_obj.uuid] = iip_obj
self._db_cache['vnc_instance_ips'][fq_name_str] = iip_obj
#end _instance_ip_update
def _instance_ip_delete(self, instance_ip_id):
fq_name_str = None
try:
iip_obj = self._db_cache['vnc_instance_ips'][instance_ip_id]
fq_name_str = json.dumps(iip_obj.get_fq_name())
except KeyError:
iip_obj = None
self._vnc_lib.instance_ip_delete(id=instance_ip_id)
try:
del self._db_cache['vnc_instance_ips'][instance_ip_id]
if fq_name_str:
del self._db_cache['vnc_instance_ips'][fq_name_str]
except KeyError:
pass
#end _instance_ip_delete
def _instance_ip_list(self, back_ref_id=None, obj_uuids=None, fields=None):
iip_objs = self._vnc_lib.instance_ips_list(detail=True,
back_ref_id=back_ref_id,
obj_uuids=obj_uuids,
fields=fields)
return iip_objs
#end _instance_ip_list
def _floating_ip_pool_create(self, fip_pool_obj):
fip_pool_uuid = self._vnc_lib.floating_ip_pool_create(fip_pool_obj)
return fip_pool_uuid
# end _floating_ip_pool_create
def _floating_ip_pool_delete(self, fip_pool_id):
fip_pool_uuid = self._vnc_lib.floating_ip_pool_delete(id=fip_pool_id)
# end _floating_ip_pool_delete
# find projects on a given domain
def _project_list_domain(self, domain_id):
# TODO till domain concept is not present in keystone
fq_name = ['default-domain']
resp_dict = self._vnc_lib.projects_list(parent_fq_name=fq_name)
return resp_dict['projects']
#end _project_list_domain
# find network ids on a given project
def _network_list_project(self, project_id, count=False):
if project_id:
try:
project_uuid = str(uuid.UUID(project_id))
except Exception:
print "Error in converting uuid %s" % (project_id)
else:
project_uuid = None
if count:
ret_val = self._virtual_network_list(parent_id=project_uuid,
count=True)
else:
ret_val = self._virtual_network_list(parent_id=project_uuid,
detail=True)
return ret_val
#end _network_list_project
# find router ids on a given project
def _router_list_project(self, project_id=None):
if project_id:
try:
project_uuid = str(uuid.UUID(project_id))
except Exception:
print "Error in converting uuid %s" % (project_id)
return []
else:
project_uuid = None
resp_dict = self._vnc_lib.logical_routers_list(parent_id=project_uuid)
return resp_dict['logical-routers']
#end _router_list_project
def _ipam_list_project(self, project_id):
try:
project_uuid = str(uuid.UUID(project_id))
except Exception:
print "Error in converting uuid %s" % (project_id)
resp_dict = self._vnc_lib.network_ipams_list(parent_id=project_uuid)
return resp_dict['network-ipams']
#end _ipam_list_project
def _security_group_list_project(self, project_id):
if project_id:
try:
project_uuid = str(uuid.UUID(project_id))
# Trigger a project read to ensure project sync
project_obj = self._project_read(proj_id=project_uuid)
except Exception:
print "Error in converting uuid %s" % (project_id)
else:
project_uuid = None
sg_objs = self._vnc_lib.security_groups_list(parent_id=project_uuid,
detail=True)
return sg_objs
#end _security_group_list_project
def _security_group_entries_list_sg(self, sg_id):
try:
sg_uuid = str(uuid.UUID(sg_id))
except Exception:
print "Error in converting SG uuid %s" % (sg_id)
resp_dict = self._vnc_lib.security_groups_list(obj_uuids=[sg_uuid])
return resp_dict['security-groups']
#end _security_group_entries_list_sg
def _route_table_list_project(self, project_id):
try:
project_uuid = str(uuid.UUID(project_id))
except Exception:
print "Error in converting uuid %s" % (project_id)
resp_dict = self._vnc_lib.route_tables_list(parent_id=project_uuid)
return resp_dict['route-tables']
#end _route_table_list_project
def _svc_instance_list_project(self, project_id):
try:
project_uuid = str(uuid.UUID(project_id))
except Exception:
print "Error in converting uuid %s" % (project_id)
resp_dict = self._vnc_lib.service_instances_list(parent_id=project_id)
return resp_dict['service-instances']
#end _svc_instance_list_project
def _policy_list_project(self, project_id):
try:
project_uuid = str(uuid.UUID(project_id))
except Exception:
print "Error in converting uuid %s" % (project_id)
resp_dict = self._vnc_lib.network_policys_list(parent_id=project_uuid)
return resp_dict['network-policys']
#end _policy_list_project
def _logical_router_read(self, rtr_id=None, fq_name=None):
if rtr_id:
try:
# return self._db_cache['vnc_routers'][rtr_id]
raise KeyError
except KeyError:
rtr_obj = self._vnc_lib.logical_router_read(id=rtr_id)
fq_name_str = json.dumps(rtr_obj.get_fq_name())
self._db_cache['vnc_routers'][rtr_id] = rtr_obj
self._db_cache['vnc_routers'][fq_name_str] = rtr_obj
return rtr_obj
if fq_name:
fq_name_str = json.dumps(fq_name)
try:
# return self._db_cache['vnc_routers'][fq_name_str]
raise KeyError
except KeyError:
rtr_obj = self._vnc_lib.logical_router_read(fq_name=fq_name)
self._db_cache['vnc_routers'][fq_name_str] = rtr_obj
self._db_cache['vnc_routers'][rtr_obj.uuid] = rtr_obj
return rtr_obj
#end _logical_router_read
def _logical_router_update(self, rtr_obj):
self._vnc_lib.logical_router_update(rtr_obj)
fq_name_str = json.dumps(rtr_obj.get_fq_name())
self._db_cache['vnc_routers'][rtr_obj.uuid] = rtr_obj
self._db_cache['vnc_routers'][fq_name_str] = rtr_obj
#end _logical_router_update
def _logical_router_delete(self, rtr_id):
fq_name_str = None
try:
rtr_obj = self._db_cache['vnc_routers'][rtr_id]
fq_name_str = json.dumps(rtr_obj.get_fq_name())
except KeyError:
pass
try:
self._vnc_lib.logical_router_delete(id=rtr_id)
except RefsExistError:
self._raise_contrail_exception(409, RouterInUse(router_id=rtr_id))
try:
del self._db_cache['vnc_routers'][rtr_id]
if fq_name_str:
del self._db_cache['vnc_routers'][fq_name_str]
except KeyError:
pass
#end _logical_router_delete
def _floatingip_list(self, back_ref_id=None):
return self._vnc_lib.floating_ips_list(back_ref_id=back_ref_id,
detail=True)
#end _floatingip_list
# find floating ip pools a project has access to
def _fip_pool_refs_project(self, project_id):
project_obj = self._project_read(proj_id=project_id)
return project_obj.get_floating_ip_pool_refs()
#end _fip_pool_refs_project
def _network_list_shared_and_ext(self):
ret_list = []
nets = self._network_list_project(project_id=None)
for net in nets:
if net.get_router_external() and net.get_is_shared():
ret_list.append(net)
return ret_list
# end _network_list_router_external
def _network_list_router_external(self):
ret_list = []
nets = self._network_list_project(project_id=None)
for net in nets:
if not net.get_router_external():
continue
ret_list.append(net)
return ret_list
# end _network_list_router_external
def _network_list_shared(self):
ret_list = []
nets = self._network_list_project(project_id=None)
for net in nets:
if not net.get_is_shared():
continue
ret_list.append(net)
return ret_list
# end _network_list_shared
# find networks of floating ip pools project has access to
def _fip_pool_ref_networks(self, project_id):
ret_net_objs = self._network_list_shared()
proj_fip_pool_refs = self._fip_pool_refs_project(project_id)
if not proj_fip_pool_refs:
return ret_net_objs
for fip_pool_ref in proj_fip_pool_refs:
fip_uuid = fip_pool_ref['uuid']
fip_pool_obj = self._vnc_lib.floating_ip_pool_read(id=fip_uuid)
net_uuid = fip_pool_obj.parent_uuid
net_obj = self._virtual_network_read(net_id=net_uuid)
ret_net_objs.append(net_obj)
return ret_net_objs
#end _fip_pool_ref_networks
# find floating ip pools defined by network
def _fip_pool_list_network(self, net_id):
resp_dict = self._vnc_lib.floating_ip_pools_list(parent_id=net_id)
return resp_dict['floating-ip-pools']
#end _fip_pool_list_network
def _port_list(self, net_objs, port_objs, iip_objs):
ret_q_ports = []
memo_req = {'networks': {},
'subnets': {},
'instance-ips': {}}
for net_obj in net_objs:
# dictionary of iip_uuid to iip_obj
memo_req['networks'][net_obj.uuid] = net_obj
subnets_info = self._virtual_network_to_subnets(net_obj)
memo_req['subnets'][net_obj.uuid] = subnets_info
for iip_obj in iip_objs:
# dictionary of iip_uuid to iip_obj
memo_req['instance-ips'][iip_obj.uuid] = iip_obj
for port_obj in port_objs:
port_info = self._port_vnc_to_neutron(port_obj, memo_req)
ret_q_ports.append(port_info)
return ret_q_ports
#end _port_list
def _port_list_network(self, network_ids, count=False):
ret_list = []
net_objs = self._virtual_network_list(obj_uuids=network_ids,
fields=['virtual_machine_interface_back_refs'],
detail=True)
if not net_objs:
return ret_list
net_ids = [net_obj.uuid for net_obj in net_objs]
port_objs = self._virtual_machine_interface_list(back_ref_id=net_ids)
iip_objs = self._instance_ip_list(back_ref_id=net_ids)
return self._port_list(net_objs, port_objs, iip_objs)
#end _port_list_network
# find port ids on a given project
def _port_list_project(self, project_id, count=False):
if self._list_optimization_enabled:
port_objs = self._virtual_machine_interface_list(parent_id=project_id,
fields=['instance_ip_back_refs'])
if count:
return len(port_objs)
iip_objs = self._instance_ip_list()
return self._port_list([], port_objs, iip_objs)
else:
if count:
ret_val = 0
else:
ret_val = []
net_objs = self._virtual_network_list(project_id,
fields=['virtual_machine_interface_back_refs'],
detail=True)
if not net_objs:
return ret_val
if count:
for net_obj in net_objs:
port_back_refs = (
net_obj.get_virtual_machine_interface_back_refs() or [])
ret_val = ret_val + len(port_back_refs)
return ret_val
net_ids = [net_obj.uuid for net_obj in net_objs]
port_objs = self._virtual_machine_interface_list(back_ref_id=net_ids)
iip_objs = self._instance_ip_list(back_ref_id=net_ids)
return self._port_list(net_objs, port_objs, iip_objs)
#end _port_list_project
# Returns True if
# * no filter is specified
# OR
# * search-param is not present in filters
# OR
# * 1. search-param is present in filters AND
# 2. resource matches param-list AND
# 3. shared parameter in filters is False
def _filters_is_present(self, filters, key_name, match_value):
if filters:
if key_name in filters:
try:
if key_name == 'tenant_id':
filter_value = [str(uuid.UUID(t_id)) \
for t_id in filters[key_name]]
else:
filter_value = filters[key_name]
idx = filter_value.index(match_value)
except ValueError: # not in requested list
return False
return True
#end _filters_is_present
def _network_read(self, net_uuid):
net_obj = self._virtual_network_read(net_id=net_uuid)
return net_obj
#end _network_read
def _subnet_vnc_create_mapping(self, subnet_id, subnet_key):
self._vnc_lib.kv_store(subnet_id, subnet_key)
self._vnc_lib.kv_store(subnet_key, subnet_id)
self._db_cache['q_subnet_maps'][subnet_id] = subnet_key
self._db_cache['q_subnet_maps'][subnet_key] = subnet_id
#end _subnet_vnc_create_mapping
def _subnet_vnc_read_mapping(self, id=None, key=None):
if id:
try:
subnet_key = self._vnc_lib.kv_retrieve(id)
self._db_cache['q_subnet_maps'][id] = subnet_key
return subnet_key
except NoIdError:
self._raise_contrail_exception(404, exceptions.SubnetNotFound(subnet_id=id))
if key:
subnet_id = self._vnc_lib.kv_retrieve(key)
self._db_cache['q_subnet_maps'][key] = subnet_id
return subnet_id
#end _subnet_vnc_read_mapping
def _subnet_vnc_read_or_create_mapping(self, id=None, key=None):
if id:
return self._subnet_vnc_read_mapping(id=id)
# if subnet was created outside of neutron handle it and create
# neutron representation now (lazily)
try:
return self._subnet_vnc_read_mapping(key=key)
except NoIdError:
subnet_id = str(uuid.uuid4())
self._subnet_vnc_create_mapping(subnet_id, key)
return self._subnet_vnc_read_mapping(key=key)
#end _subnet_vnc_read_or_create_mapping
def _subnet_vnc_delete_mapping(self, subnet_id, subnet_key):
self._vnc_lib.kv_delete(subnet_id)
self._vnc_lib.kv_delete(subnet_key)
try:
del self._db_cache['q_subnet_maps'][subnet_id]
del self._db_cache['q_subnet_maps'][subnet_key]
except KeyError:
pass
#end _subnet_vnc_delete_mapping
def _subnet_vnc_get_key(self, subnet_vnc, net_id):
pfx = subnet_vnc.subnet.get_ip_prefix()
pfx_len = subnet_vnc.subnet.get_ip_prefix_len()
network = IPNetwork('%s/%s' % (pfx, pfx_len))
return '%s %s/%s' % (net_id, str(network.ip), pfx_len)
#end _subnet_vnc_get_key
def _subnet_read(self, net_uuid, subnet_key):
try:
net_obj = self._virtual_network_read(net_id=net_uuid)
except NoIdError:
return None
ipam_refs = net_obj.get_network_ipam_refs()
if not ipam_refs:
return None
# TODO scope for optimization
for ipam_ref in ipam_refs:
subnet_vncs = ipam_ref['attr'].get_ipam_subnets()
for subnet_vnc in subnet_vncs:
if self._subnet_vnc_get_key(subnet_vnc,
net_uuid) == subnet_key:
return subnet_vnc
return None
#end _subnet_read
def _ip_address_to_subnet_id(self, ip_addr, net_obj):
# find subnet-id for ip-addr, called when instance-ip created
ipam_refs = net_obj.get_network_ipam_refs()
if ipam_refs:
for ipam_ref in ipam_refs:
subnet_vncs = ipam_ref['attr'].get_ipam_subnets()
for subnet_vnc in subnet_vncs:
cidr = '%s/%s' % (subnet_vnc.subnet.get_ip_prefix(),
subnet_vnc.subnet.get_ip_prefix_len())
if IPAddress(ip_addr) in IPSet([cidr]):
subnet_key = self._subnet_vnc_get_key(subnet_vnc,
net_obj.uuid)
subnet_id = self._subnet_vnc_read_or_create_mapping(
key=subnet_key)
return subnet_id
return None
#end _ip_address_to_subnet_id
# Returns a list of dicts of subnet-id:cidr for a VN
def _virtual_network_to_subnets(self, net_obj):
ret_subnets = []
ipam_refs = net_obj.get_network_ipam_refs()
if ipam_refs:
for ipam_ref in ipam_refs:
subnet_vncs = ipam_ref['attr'].get_ipam_subnets()
for subnet_vnc in subnet_vncs:
subnet_key = self._subnet_vnc_get_key(subnet_vnc,
net_obj.uuid)
subnet_id = self._subnet_vnc_read_or_create_mapping(
key=subnet_key)
cidr = '%s/%s' % (subnet_vnc.subnet.get_ip_prefix(),
subnet_vnc.subnet.get_ip_prefix_len())
ret_subnets.append({'id': subnet_id, 'cidr': cidr})
return ret_subnets
# end _virtual_network_to_subnets
# Conversion routines between VNC and Quantum objects
def _svc_instance_neutron_to_vnc(self, si_q, oper):
if oper == CREATE:
project_id = str(uuid.UUID(si_q['tenant_id']))
project_obj = self._project_read(proj_id=project_id)
net_id = si_q['external_net']
ext_vn = self._vnc_lib.virtual_network_read(id=net_id)
scale_out = ServiceScaleOutType(max_instances=1, auto_scale=False)
si_prop = ServiceInstanceType(
auto_policy=True,
left_virtual_network="",
right_virtual_network=ext_vn.get_fq_name_str(),
scale_out=scale_out)
si_prop.set_scale_out(scale_out)
si_vnc = ServiceInstance(name=si_q['name'],
parent_obj=project_obj,
service_instance_properties=si_prop)
return si_vnc
#end _svc_instance_neutron_to_vnc
def _svc_instance_vnc_to_neutron(self, si_obj):
si_q_dict = self._obj_to_dict(si_obj)
# replace field names
si_q_dict['id'] = si_obj.uuid
si_q_dict['tenant_id'] = si_obj.parent_uuid.replace('-', '')
si_q_dict['name'] = si_obj.name
si_props = si_obj.get_service_instance_properties()
if si_props:
vn_fq_name = si_props.get_right_virtual_network()
vn_obj = self._vnc_lib.virtual_network_read(fq_name_str=vn_fq_name)
si_q_dict['external_net'] = str(vn_obj.uuid) + ' ' + vn_obj.name
si_q_dict['internal_net'] = ''
return si_q_dict
#end _route_table_vnc_to_neutron
def _route_table_neutron_to_vnc(self, rt_q, oper):
if oper == CREATE:
project_id = str(uuid.UUID(rt_q['tenant_id']))
project_obj = self._project_read(proj_id=project_id)
rt_vnc = RouteTable(name=rt_q['name'],
parent_obj=project_obj)
if not rt_q['routes']:
return rt_vnc
for route in rt_q['routes']['route']:
try:
vm_obj = self._vnc_lib.virtual_machine_read(
id=route['next_hop'])
si_list = vm_obj.get_service_instance_refs()
if si_list:
fq_name = si_list[0]['to']
si_obj = self._vnc_lib.service_instance_read(
fq_name=fq_name)
route['next_hop'] = si_obj.get_fq_name_str()
except Exception as e:
pass
rt_vnc.set_routes(RouteTableType.factory(**rt_q['routes']))
else:
rt_vnc = self._vnc_lib.route_table_read(id=rt_q['id'])
for route in rt_q['routes']['route']:
try:
vm_obj = self._vnc_lib.virtual_machine_read(
id=route['next_hop'])
si_list = vm_obj.get_service_instance_refs()
if si_list:
fq_name = si_list[0]['to']
si_obj = self._vnc_lib.service_instance_read(
fq_name=fq_name)
route['next_hop'] = si_obj.get_fq_name_str()
except Exception as e:
pass
rt_vnc.set_routes(RouteTableType.factory(**rt_q['routes']))
return rt_vnc
#end _route_table_neutron_to_vnc
def _route_table_vnc_to_neutron(self, rt_obj):
rt_q_dict = self._obj_to_dict(rt_obj)
# replace field names
rt_q_dict['id'] = rt_obj.uuid
rt_q_dict['tenant_id'] = rt_obj.parent_uuid.replace('-', '')
rt_q_dict['name'] = rt_obj.name
rt_q_dict['fq_name'] = rt_obj.fq_name
# get route table routes
rt_q_dict['routes'] = rt_q_dict.pop('routes', None)
if rt_q_dict['routes']:
for route in rt_q_dict['routes']['route']:
if route['next_hop_type']:
route['next_hop'] = route['next_hop_type']
return rt_q_dict
#end _route_table_vnc_to_neutron
def _security_group_vnc_to_neutron(self, sg_obj):
sg_q_dict = {}
extra_dict = {}
extra_dict['contrail:fq_name'] = sg_obj.get_fq_name()
# replace field names
sg_q_dict['id'] = sg_obj.uuid
sg_q_dict['tenant_id'] = sg_obj.parent_uuid.replace('-', '')
if not sg_obj.display_name:
# for security groups created directly via vnc_api
sg_q_dict['name'] = sg_obj.get_fq_name()[-1]
else:
sg_q_dict['name'] = sg_obj.display_name
sg_q_dict['description'] = sg_obj.get_id_perms().get_description()
# get security group rules
sg_q_dict['security_group_rules'] = []
rule_list = self.security_group_rules_read(sg_obj.uuid, sg_obj)
if rule_list:
for rule in rule_list:
sg_q_dict['security_group_rules'].append(rule)
if self._contrail_extensions_enabled:
sg_q_dict.update(extra_dict)
return sg_q_dict
#end _security_group_vnc_to_neutron
def _security_group_neutron_to_vnc(self, sg_q, oper):
if oper == CREATE:
project_id = str(uuid.UUID(sg_q['tenant_id']))
def project_read(id):
try:
return self._project_read(proj_id=id)
except NoIdError:
return None
for i in range(10):
project_obj = project_read(project_id)
if project_obj:
break
gevent.sleep(2)
id_perms = IdPermsType(enable=True,
description=sg_q.get('description'))
sg_vnc = SecurityGroup(name=sg_q['name'],
parent_obj=project_obj,
id_perms=id_perms)
else:
sg_vnc = self._vnc_lib.security_group_read(id=sg_q['id'])
if 'name' in sg_q and sg_q['name']:
sg_vnc.display_name = sg_q['name']
if 'description' in sg_q:
id_perms = sg_vnc.get_id_perms()
id_perms.set_description(sg_q['description'])
sg_vnc.set_id_perms(id_perms)
return sg_vnc
#end _security_group_neutron_to_vnc
def _security_group_rule_vnc_to_neutron(self, sg_id, sg_rule, sg_obj=None):
sgr_q_dict = {}
if sg_id == None:
return sgr_q_dict
if not sg_obj:
try:
sg_obj = self._vnc_lib.security_group_read(id=sg_id)
except NoIdError:
self._raise_contrail_exception(404, SecurityGroupNotFound(id=sg_id))
remote_cidr = None
remote_sg_uuid = None
saddr = sg_rule.get_src_addresses()[0]
daddr = sg_rule.get_dst_addresses()[0]
if saddr.get_security_group() == 'local':
direction = 'egress'
addr = daddr
elif daddr.get_security_group() == 'local':
direction = 'ingress'
addr = saddr
else:
self._raise_contrail_exception(404, SecurityGroupRuleNotFound(id=sg_rule.get_rule_uuid()))
if addr.get_subnet():
remote_cidr = '%s/%s' % (addr.get_subnet().get_ip_prefix(),
addr.get_subnet().get_ip_prefix_len())
elif addr.get_security_group():
if addr.get_security_group() != 'any' and \
addr.get_security_group() != 'local':
remote_sg = addr.get_security_group()
try:
if remote_sg != ':'.join(sg_obj.get_fq_name()):
remote_sg_obj = self._vnc_lib.security_group_read(fq_name_str=remote_sg)
else:
remote_sg_obj = sg_obj
remote_sg_uuid = remote_sg_obj.uuid
except NoIdError:
pass
sgr_q_dict['id'] = sg_rule.get_rule_uuid()
sgr_q_dict['tenant_id'] = sg_obj.parent_uuid.replace('-', '')
sgr_q_dict['security_group_id'] = sg_obj.uuid
sgr_q_dict['ethertype'] = 'IPv4'
sgr_q_dict['direction'] = direction
sgr_q_dict['protocol'] = sg_rule.get_protocol()
sgr_q_dict['port_range_min'] = sg_rule.get_dst_ports()[0].\
get_start_port()
sgr_q_dict['port_range_max'] = sg_rule.get_dst_ports()[0].\
get_end_port()
sgr_q_dict['remote_ip_prefix'] = remote_cidr
sgr_q_dict['remote_group_id'] = remote_sg_uuid
return sgr_q_dict
#end _security_group_rule_vnc_to_neutron
def _security_group_rule_neutron_to_vnc(self, sgr_q, oper):
if oper == CREATE:
port_min = 0
port_max = 65535
if sgr_q['port_range_min']:
port_min = sgr_q['port_range_min']
if sgr_q['port_range_max']:
port_max = sgr_q['port_range_max']
endpt = [AddressType(security_group='any')]
if sgr_q['remote_ip_prefix']:
cidr = sgr_q['remote_ip_prefix'].split('/')
pfx = cidr[0]
pfx_len = int(cidr[1])
endpt = [AddressType(subnet=SubnetType(pfx, pfx_len))]
elif sgr_q['remote_group_id']:
sg_obj = self._vnc_lib.security_group_read(
id=sgr_q['remote_group_id'])
endpt = [AddressType(security_group=sg_obj.get_fq_name_str())]
if sgr_q['direction'] == 'ingress':
dir = '>'
local = endpt
remote = [AddressType(security_group='local')]
else:
dir = '>'
remote = endpt
local = [AddressType(security_group='local')]
if not sgr_q['protocol']:
sgr_q['protocol'] = 'any'
sgr_uuid = str(uuid.uuid4())
rule = PolicyRuleType(rule_uuid=sgr_uuid, direction=dir,
protocol=sgr_q['protocol'],
src_addresses=local,
src_ports=[PortType(0, 65535)],
dst_addresses=remote,
dst_ports=[PortType(port_min, port_max)])
return rule
#end _security_group_rule_neutron_to_vnc
def _network_neutron_to_vnc(self, network_q, oper):
net_name = network_q.get('name', None)
try:
external_attr = network_q['router:external']
except KeyError:
external_attr = attr.ATTR_NOT_SPECIFIED
if oper == CREATE:
project_id = str(uuid.UUID(network_q['tenant_id']))
def project_read(id):
try:
return self._project_read(proj_id=id)
except NoIdError:
return None
for i in range(10):
project_obj = project_read(project_id)
if project_obj:
break
gevent.sleep(2)
id_perms = IdPermsType(enable=True)
net_obj = VirtualNetwork(net_name, project_obj, id_perms=id_perms)
if external_attr == attr.ATTR_NOT_SPECIFIED:
net_obj.router_external = False
else:
net_obj.router_external = external_attr
if 'shared' in network_q:
net_obj.is_shared = network_q['shared']
else:
net_obj.is_shared = False
else: # READ/UPDATE/DELETE
net_obj = self._virtual_network_read(net_id=network_q['id'])
if oper == UPDATE:
if 'shared' in network_q:
net_obj.is_shared = network_q['shared']
if external_attr is not attr.ATTR_NOT_SPECIFIED:
net_obj.router_external = external_attr
if 'name' in network_q and network_q['name']:
net_obj.display_name = network_q['name']
id_perms = net_obj.get_id_perms()
if 'admin_state_up' in network_q:
id_perms.enable = network_q['admin_state_up']
net_obj.set_id_perms(id_perms)
if 'contrail:policys' in network_q:
policy_fq_names = network_q['contrail:policys']
# reset and add with newly specified list
net_obj.set_network_policy_list([], [])
seq = 0
for p_fq_name in policy_fq_names:
domain_name, project_name, policy_name = p_fq_name
domain_obj = Domain(domain_name)
project_obj = Project(project_name, domain_obj)
policy_obj = NetworkPolicy(policy_name, project_obj)
net_obj.add_network_policy(policy_obj,
VirtualNetworkPolicyType(
sequence=SequenceType(seq, 0)))
seq = seq + 1
if 'vpc:route_table' in network_q:
rt_fq_name = network_q['vpc:route_table']
if rt_fq_name:
try:
rt_obj = self._vnc_lib.route_table_read(fq_name=rt_fq_name)
net_obj.set_route_table(rt_obj)
except NoIdError:
# TODO add route table specific exception
self._raise_contrail_exception(404, exceptions.NetworkNotFound(net_id=net_obj.uuid))
return net_obj
#end _network_neutron_to_vnc
def _network_vnc_to_neutron(self, net_obj, net_repr='SHOW'):
net_q_dict = {}
extra_dict = {}
id_perms = net_obj.get_id_perms()
perms = id_perms.permissions
net_q_dict['id'] = net_obj.uuid
if not net_obj.display_name:
# for nets created directly via vnc_api
net_q_dict['name'] = net_obj.get_fq_name()[-1]
else:
net_q_dict['name'] = net_obj.display_name
extra_dict['contrail:fq_name'] = net_obj.get_fq_name()
net_q_dict['tenant_id'] = net_obj.parent_uuid.replace('-', '')
net_q_dict['admin_state_up'] = id_perms.enable
if net_obj.is_shared:
net_q_dict['shared'] = True
else:
net_q_dict['shared'] = False
net_q_dict['status'] = (constants.NET_STATUS_ACTIVE if id_perms.enable
else constants.NET_STATUS_DOWN)
if net_obj.router_external:
net_q_dict['router:external'] = True
else:
net_q_dict['router:external'] = False
if net_repr == 'SHOW' or net_repr == 'LIST':
extra_dict['contrail:instance_count'] = 0
net_policy_refs = net_obj.get_network_policy_refs()
if net_policy_refs:
sorted_refs = sorted(
net_policy_refs,
key=lambda t:(t['attr'].sequence.major,
t['attr'].sequence.minor))
extra_dict['contrail:policys'] = \
[np_ref['to'] for np_ref in sorted_refs]
rt_refs = net_obj.get_route_table_refs()
if rt_refs:
extra_dict['vpc:route_table'] = \
[rt_ref['to'] for rt_ref in rt_refs]
ipam_refs = net_obj.get_network_ipam_refs()
net_q_dict['subnets'] = []
if ipam_refs:
extra_dict['contrail:subnet_ipam'] = []
for ipam_ref in ipam_refs:
subnets = ipam_ref['attr'].get_ipam_subnets()
for subnet in subnets:
sn_dict = self._subnet_vnc_to_neutron(subnet, net_obj,
ipam_ref['to'])
net_q_dict['subnets'].append(sn_dict['id'])
sn_ipam = {}
sn_ipam['subnet_cidr'] = sn_dict['cidr']
sn_ipam['ipam_fq_name'] = ipam_ref['to']
extra_dict['contrail:subnet_ipam'].append(sn_ipam)
if self._contrail_extensions_enabled:
net_q_dict.update(extra_dict)
return net_q_dict
#end _network_vnc_to_neutron
def _subnet_neutron_to_vnc(self, subnet_q):
cidr = IPNetwork(subnet_q['cidr'])
pfx = str(cidr.network)
pfx_len = int(cidr.prefixlen)
if cidr.version != 4:
exc_info = {'type': 'BadRequest',
'message': "Bad subnet request: IPv6 is not supported"}
bottle.abort(400, json.dumps(exc_info))
if 'gateway_ip' in subnet_q:
default_gw = subnet_q['gateway_ip']
else:
# Assigned first+1 from cidr
default_gw = str(IPAddress(cidr.first + 1))
if 'allocation_pools' in subnet_q:
alloc_pools = subnet_q['allocation_pools']
else:
# Assigned by address manager
alloc_pools = None
dhcp_option_list = None
if 'dns_nameservers' in subnet_q and subnet_q['dns_nameservers']:
dhcp_options=[]
for dns_server in subnet_q['dns_nameservers']:
dhcp_options.append(DhcpOptionType(dhcp_option_name='6',
dhcp_option_value=dns_server))
if dhcp_options:
dhcp_option_list = DhcpOptionsListType(dhcp_options)
host_route_list = None
if 'host_routes' in subnet_q and subnet_q['host_routes']:
host_routes=[]
for host_route in subnet_q['host_routes']:
host_routes.append(RouteType(prefix=host_route['destination'],
next_hop=host_route['nexthop']))
if host_routes:
host_route_list = RouteTableType(host_routes)
if 'enable_dhcp' in subnet_q:
dhcp_config = subnet_q['enable_dhcp']
else:
dhcp_config = None
sn_name=subnet_q.get('name')
subnet_vnc = IpamSubnetType(subnet=SubnetType(pfx, pfx_len),
default_gateway=default_gw,
enable_dhcp=dhcp_config,
dns_nameservers=None,
allocation_pools=alloc_pools,
addr_from_start=True,
dhcp_option_list=dhcp_option_list,
host_routes=host_route_list,
subnet_name=sn_name)
return subnet_vnc
#end _subnet_neutron_to_vnc
def _subnet_vnc_to_neutron(self, subnet_vnc, net_obj, ipam_fq_name):
sn_q_dict = {}
sn_name = subnet_vnc.get_subnet_name()
if sn_name is not None:
sn_q_dict['name'] = sn_name
else:
sn_q_dict['name'] = ''
sn_q_dict['tenant_id'] = net_obj.parent_uuid.replace('-', '')
sn_q_dict['network_id'] = net_obj.uuid
sn_q_dict['ip_version'] = 4 # TODO ipv6?
sn_q_dict['ipv6_ra_mode'] = None
sn_q_dict['ipv6_address_mode'] = None
cidr = '%s/%s' % (subnet_vnc.subnet.get_ip_prefix(),
subnet_vnc.subnet.get_ip_prefix_len())
sn_q_dict['cidr'] = cidr
subnet_key = self._subnet_vnc_get_key(subnet_vnc, net_obj.uuid)
sn_id = self._subnet_vnc_read_or_create_mapping(key=subnet_key)
sn_q_dict['id'] = sn_id
sn_q_dict['gateway_ip'] = subnet_vnc.default_gateway
alloc_obj_list = subnet_vnc.get_allocation_pools()
allocation_pools = []
for alloc_obj in alloc_obj_list:
first_ip = alloc_obj.get_start()
last_ip = alloc_obj.get_end()
alloc_dict = {'first_ip':first_ip, 'last_ip':last_ip}
allocation_pools.append(alloc_dict)
if allocation_pools is None or not allocation_pools:
if (int(IPNetwork(sn_q_dict['gateway_ip']).network) ==
int(IPNetwork(cidr).network+1)):
first_ip = str(IPNetwork(cidr).network + 2)
else:
first_ip = str(IPNetwork(cidr).network + 1)
last_ip = str(IPNetwork(cidr).broadcast - 1)
cidr_pool = {'first_ip':first_ip, 'last_ip':last_ip}
allocation_pools.append(cidr_pool)
sn_q_dict['allocation_pools'] = allocation_pools
sn_q_dict['enable_dhcp'] = subnet_vnc.get_enable_dhcp()
nameserver_dict_list = list()
dhcp_option_list = subnet_vnc.get_dhcp_option_list()
if dhcp_option_list:
for dhcp_option in dhcp_option_list.dhcp_option:
if dhcp_option.get_dhcp_option_name() == '6':
nameserver_entry = {'address': dhcp_option.get_dhcp_option_value(),
'subnet_id': sn_id}
nameserver_dict_list.append(nameserver_entry)
sn_q_dict['dns_nameservers'] = nameserver_dict_list
host_route_dict_list = list()
host_routes = subnet_vnc.get_host_routes()
if host_routes:
for host_route in host_routes.route:
host_route_entry = {'destination': host_route.get_prefix(),
'nexthop': host_route.get_next_hop(),
'subnet_id': sn_id}
host_route_dict_list.append(host_route_entry)
sn_q_dict['routes'] = host_route_dict_list
if net_obj.is_shared:
sn_q_dict['shared'] = True
else:
sn_q_dict['shared'] = False
return sn_q_dict
#end _subnet_vnc_to_neutron
def _ipam_neutron_to_vnc(self, ipam_q, oper):
ipam_name = ipam_q.get('name', None)
if oper == CREATE:
project_id = str(uuid.UUID(ipam_q['tenant_id']))
project_obj = self._project_read(proj_id=project_id)
ipam_obj = NetworkIpam(ipam_name, project_obj)
else: # READ/UPDATE/DELETE
ipam_obj = self._vnc_lib.network_ipam_read(id=ipam_q['id'])
options_vnc = DhcpOptionsListType()
if ipam_q['mgmt']:
#for opt_q in ipam_q['mgmt'].get('options', []):
# options_vnc.add_dhcp_option(DhcpOptionType(opt_q['option'],
# opt_q['value']))
#ipam_mgmt_vnc = IpamType.factory(
# ipam_method = ipam_q['mgmt']['method'],
# dhcp_option_list = options_vnc)
ipam_obj.set_network_ipam_mgmt(IpamType.factory(**ipam_q['mgmt']))
return ipam_obj
#end _ipam_neutron_to_vnc
def _ipam_vnc_to_neutron(self, ipam_obj):
ipam_q_dict = self._obj_to_dict(ipam_obj)
# replace field names
ipam_q_dict['id'] = ipam_q_dict.pop('uuid')
ipam_q_dict['name'] = ipam_obj.name
ipam_q_dict['tenant_id'] = ipam_obj.parent_uuid.replace('-', '')
ipam_q_dict['mgmt'] = ipam_q_dict.pop('network_ipam_mgmt', None)
net_back_refs = ipam_q_dict.pop('virtual_network_back_refs', None)
if net_back_refs:
ipam_q_dict['nets_using'] = []
for net_back_ref in net_back_refs:
net_fq_name = net_back_ref['to']
ipam_q_dict['nets_using'].append(net_fq_name)
return ipam_q_dict
#end _ipam_vnc_to_neutron
def _policy_neutron_to_vnc(self, policy_q, oper):
policy_name = policy_q.get('name', None)
if oper == CREATE:
project_id = str(uuid.UUID(policy_q['tenant_id']))
project_obj = self._project_read(proj_id=project_id)
policy_obj = NetworkPolicy(policy_name, project_obj)
else: # READ/UPDATE/DELETE
policy_obj = self._vnc_lib.network_policy_read(id=policy_q['id'])
policy_obj.set_network_policy_entries(
PolicyEntriesType.factory(**policy_q['entries']))
return policy_obj
#end _policy_neutron_to_vnc
def _policy_vnc_to_neutron(self, policy_obj):
policy_q_dict = self._obj_to_dict(policy_obj)
# replace field names
policy_q_dict['id'] = policy_q_dict.pop('uuid')
policy_q_dict['name'] = policy_obj.name
policy_q_dict['tenant_id'] = policy_obj.parent_uuid.replace('-', '')
policy_q_dict['entries'] = policy_q_dict.pop('network_policy_entries',
None)
net_back_refs = policy_obj.get_virtual_network_back_refs()
if net_back_refs:
policy_q_dict['nets_using'] = []
for net_back_ref in net_back_refs:
net_fq_name = net_back_ref['to']
policy_q_dict['nets_using'].append(net_fq_name)
return policy_q_dict
#end _policy_vnc_to_neutron
def _router_neutron_to_vnc(self, router_q, oper):
rtr_name = router_q.get('name', None)
if oper == CREATE:
project_id = str(uuid.UUID(router_q['tenant_id']))
def project_read(id):
try:
return self._project_read(proj_id=id)
except NoIdError:
return None
for i in range(10):
project_obj = project_read(project_id)
if project_obj:
break
gevent.sleep(2)
id_perms = IdPermsType(enable=True)
rtr_obj = LogicalRouter(rtr_name, project_obj, id_perms=id_perms)
else: # READ/UPDATE/DELETE
rtr_obj = self._logical_router_read(rtr_id=router_q['id'])
id_perms = rtr_obj.get_id_perms()
if 'admin_state_up' in router_q:
id_perms.enable = router_q['admin_state_up']
rtr_obj.set_id_perms(id_perms)
if 'name' in router_q and router_q['name']:
rtr_obj.display_name = router_q['name']
return rtr_obj
#end _router_neutron_to_vnc
def _router_vnc_to_neutron(self, rtr_obj, rtr_repr='SHOW'):
rtr_q_dict = {}
extra_dict = {}
extra_dict['contrail:fq_name'] = rtr_obj.get_fq_name()
rtr_q_dict['id'] = rtr_obj.uuid
if not rtr_obj.display_name:
rtr_q_dict['name'] = rtr_obj.get_fq_name()[-1]
else:
rtr_q_dict['name'] = rtr_obj.display_name
rtr_q_dict['tenant_id'] = rtr_obj.parent_uuid.replace('-', '')
rtr_q_dict['admin_state_up'] = rtr_obj.get_id_perms().enable
rtr_q_dict['shared'] = False
rtr_q_dict['status'] = constants.NET_STATUS_ACTIVE
rtr_q_dict['gw_port_id'] = None
rtr_q_dict['external_gateway_info'] = None
vn_refs = rtr_obj.get_virtual_network_refs()
if vn_refs:
rtr_q_dict['external_gateway_info'] = {'network_id':
vn_refs[0]['uuid']}
if self._contrail_extensions_enabled:
rtr_q_dict.update(extra_dict)
return rtr_q_dict
#end _router_vnc_to_neutron
def _floatingip_neutron_to_vnc(self, fip_q, oper):
if oper == CREATE:
# TODO for now create from default pool, later
# use first available pool on net
net_id = fip_q['floating_network_id']
try:
fq_name = self._fip_pool_list_network(net_id)[0]['fq_name']
except IndexError:
# IndexError could happens when an attempt to
# retrieve a floating ip pool from a private network.
self._raise_contrail_exception(404, Exception(
"Network %s doesn't provide a floatingip pool", net_id))
fip_pool_obj = self._vnc_lib.floating_ip_pool_read(fq_name=fq_name)
fip_name = str(uuid.uuid4())
fip_obj = FloatingIp(fip_name, fip_pool_obj)
fip_obj.uuid = fip_name
proj_id = str(uuid.UUID(fip_q['tenant_id']))
proj_obj = self._project_read(proj_id=proj_id)
fip_obj.set_project(proj_obj)
else: # READ/UPDATE/DELETE
fip_obj = self._vnc_lib.floating_ip_read(id=fip_q['id'])
if fip_q.get('port_id'):
port_obj = self._virtual_machine_interface_read(
port_id=fip_q['port_id'])
fip_obj.set_virtual_machine_interface(port_obj)
else:
fip_obj.set_virtual_machine_interface_list([])
if fip_q.get('fixed_ip_address'):
fip_obj.set_floating_ip_fixed_ip_address(fip_q['fixed_ip_address'])
else:
# fixed_ip_address not specified, pick from port_obj in create,
# reset in case of disassociate
port_refs = fip_obj.get_virtual_machine_interface_refs()
if not port_refs:
fip_obj.set_floating_ip_fixed_ip_address(None)
else:
port_obj = self._virtual_machine_interface_read(
port_id=port_refs[0]['uuid'], fields=['instance_ip_back_refs'])
iip_refs = port_obj.get_instance_ip_back_refs()
if iip_refs:
iip_obj = self._instance_ip_read(instance_ip_id=iip_refs[0]['uuid'])
fip_obj.set_floating_ip_fixed_ip_address(iip_obj.get_instance_ip_address())
return fip_obj
#end _floatingip_neutron_to_vnc
def _floatingip_vnc_to_neutron(self, fip_obj):
fip_q_dict = {}
floating_net_id = self._vnc_lib.fq_name_to_id('virtual-network',
fip_obj.get_fq_name()[:-2])
tenant_id = fip_obj.get_project_refs()[0]['uuid'].replace('-', '')
port_id = None
fixed_ip = None
router_id = None
port_refs = fip_obj.get_virtual_machine_interface_refs()
if port_refs:
port_id = port_refs[0]['uuid']
port_obj = self._virtual_machine_interface_read(port_id=port_id,
fields=['instance_ip_back_refs'])
# find router_id from port
internal_net_obj = self._virtual_network_read(net_id=port_obj.get_virtual_network_refs()[0]['uuid'])
net_port_objs = [self._virtual_machine_interface_read(port_id=port['uuid']) for port in internal_net_obj.get_virtual_machine_interface_back_refs()]
for net_port_obj in net_port_objs:
routers = net_port_obj.get_logical_router_back_refs()
if routers:
router_id = routers[0]['uuid']
break
fip_q_dict['id'] = fip_obj.uuid
fip_q_dict['tenant_id'] = tenant_id
fip_q_dict['floating_ip_address'] = fip_obj.get_floating_ip_address()
fip_q_dict['floating_network_id'] = floating_net_id
fip_q_dict['router_id'] = router_id
fip_q_dict['port_id'] = port_id
fip_q_dict['fixed_ip_address'] = fip_obj.get_floating_ip_fixed_ip_address()
fip_q_dict['status'] = constants.PORT_STATUS_ACTIVE
return fip_q_dict
#end _floatingip_vnc_to_neutron
def _port_neutron_to_vnc(self, port_q, net_obj, oper):
if oper == CREATE:
project_id = str(uuid.UUID(port_q['tenant_id']))
proj_obj = self._project_read(proj_id=project_id)
id_perms = IdPermsType(enable=True)
port_uuid = str(uuid.uuid4())
if port_q.get('name'):
port_name = port_q['name']
else:
port_name = port_uuid
port_obj = VirtualMachineInterface(port_name, proj_obj,
id_perms=id_perms)
port_obj.uuid = port_uuid
port_obj.set_virtual_network(net_obj)
if ('mac_address' in port_q and port_q['mac_address']):
mac_addrs_obj = MacAddressesType()
mac_addrs_obj.set_mac_address([port_q['mac_address']])
port_obj.set_virtual_machine_interface_mac_addresses(mac_addrs_obj)
port_obj.set_security_group_list([])
if ('security_groups' not in port_q or
port_q['security_groups'].__class__ is object):
sg_obj = SecurityGroup("default", proj_obj)
port_obj.add_security_group(sg_obj)
else: # READ/UPDATE/DELETE
port_obj = self._virtual_machine_interface_read(port_id=port_q['id'])
if 'name' in port_q and port_q['name']:
port_obj.display_name = port_q['name']
if port_q.get('device_owner') != constants.DEVICE_OWNER_ROUTER_INTF:
instance_name = port_q.get('device_id')
if instance_name:
try:
instance_obj = self._ensure_instance_exists(instance_name)
port_obj.set_virtual_machine(instance_obj)
except RefsExistError as e:
exc_info = {'type': 'BadRequest', 'message': str(e)}
bottle.abort(400, json.dumps(exc_info))
if 'device_owner' in port_q:
port_obj.set_virtual_machine_interface_device_owner(port_q.get('device_owner'))
if 'security_groups' in port_q:
port_obj.set_security_group_list([])
for sg_id in port_q.get('security_groups') or []:
# TODO optimize to not read sg (only uuid/fqn needed)
sg_obj = self._vnc_lib.security_group_read(id=sg_id)
port_obj.add_security_group(sg_obj)
id_perms = port_obj.get_id_perms()
if 'admin_state_up' in port_q:
id_perms.enable = port_q['admin_state_up']
port_obj.set_id_perms(id_perms)
if ('extra_dhcp_opts' in port_q):
dhcp_options = []
if port_q['extra_dhcp_opts']:
for option_pair in port_q['extra_dhcp_opts']:
option = \
DhcpOptionType(dhcp_option_name=option_pair['opt_name'],
dhcp_option_value=option_pair['opt_value'])
dhcp_options.append(option)
if dhcp_options:
olist = DhcpOptionsListType(dhcp_options)
port_obj.set_virtual_machine_interface_dhcp_option_list(olist)
else:
port_obj.set_virtual_machine_interface_dhcp_option_list(None)
if ('allowed_address_pairs' in port_q):
aap_array = []
if port_q['allowed_address_pairs']:
for address_pair in port_q['allowed_address_pairs']:
mac_refs = \
port_obj.get_virtual_machine_interface_mac_addresses()
mode = u'active-standby';
if 'mac_address' not in address_pair:
if mac_refs:
address_pair['mac_address'] = mac_refs.mac_address[0]
cidr = address_pair['ip_address'].split('/')
if len(cidr) == 1:
subnet=SubnetType(cidr[0], 32);
elif len(cidr) == 2:
subnet=SubnetType(cidr[0], int(cidr[1]));
else:
self._raise_contrail_exception(400,
exceptions.BadRequest(resource='port',
msg='Invalid address pair argument'))
ip_back_refs = port_obj.get_instance_ip_back_refs()
if ip_back_refs:
for ip_back_ref in ip_back_refs:
iip_uuid = ip_back_ref['uuid']
try:
ip_obj = self._instance_ip_read(instance_ip_id=\
ip_back_ref['uuid'])
except NoIdError:
continue
ip_addr = ip_obj.get_instance_ip_address()
if ((ip_addr == address_pair['ip_address']) and
(mac_refs.mac_address[0] == address_pair['mac_address'])):
self._raise_contrail_exception(400,
AddressPairMatchesPortFixedIPAndMac())
aap = AllowedAddressPair(subnet,
address_pair['mac_address'], mode)
aap_array.append(aap)
if aap_array:
aaps = AllowedAddressPairs()
aaps.set_allowed_address_pair(aap_array)
port_obj.set_virtual_machine_interface_allowed_address_pairs(aaps)
else:
port_obj.set_virtual_machine_interface_allowed_address_pairs(None)
if 'fixed_ips' in port_q:
net_id = (port_q.get('network_id') or
port_obj.get_virtual_network_refs()[0]['uuid'])
for fixed_ip in port_q.get('fixed_ips', []):
if 'ip_address' in fixed_ip:
ip_addr = fixed_ip['ip_address']
if self._ip_addr_in_net_id(ip_addr, net_id):
self._raise_contrail_exception(
409, exceptions.IpAddressInUse(net_id=net_id,
ip_address=ip_addr))
return port_obj
#end _port_neutron_to_vnc
def _port_vnc_to_neutron(self, port_obj, port_req_memo=None):
port_q_dict = {}
extra_dict = {}
extra_dict['contrail:fq_name'] = port_obj.get_fq_name()
if not port_obj.display_name:
# for ports created directly via vnc_api
port_q_dict['name'] = port_obj.get_fq_name()[-1]
else:
port_q_dict['name'] = port_obj.display_name
port_q_dict['id'] = port_obj.uuid
net_refs = port_obj.get_virtual_network_refs()
if net_refs:
net_id = net_refs[0]['uuid']
else:
# TODO hack to force network_id on default port
# as neutron needs it
net_id = self._vnc_lib.obj_to_id(VirtualNetwork())
if port_req_memo is None:
# create a memo only for this port's conversion in this method
port_req_memo = {}
if 'networks' not in port_req_memo:
port_req_memo['networks'] = {}
if 'subnets' not in port_req_memo:
port_req_memo['subnets'] = {}
try:
net_obj = port_req_memo['networks'][net_id]
except KeyError:
net_obj = self._virtual_network_read(net_id=net_id)
port_req_memo['networks'][net_id] = net_obj
subnets_info = self._virtual_network_to_subnets(net_obj)
port_req_memo['subnets'][net_id] = subnets_info
if port_obj.parent_type != "project":
proj_id = net_obj.parent_uuid.replace('-', '')
else:
proj_id = port_obj.parent_uuid.replace('-', '')
self._set_obj_tenant_id(port_obj.uuid, proj_id)
port_q_dict['tenant_id'] = proj_id
port_q_dict['network_id'] = net_id
# TODO RHS below may need fixing
port_q_dict['mac_address'] = ''
mac_refs = port_obj.get_virtual_machine_interface_mac_addresses()
if mac_refs:
port_q_dict['mac_address'] = mac_refs.mac_address[0]
dhcp_options_list = port_obj.get_virtual_machine_interface_dhcp_option_list()
if dhcp_options_list and dhcp_options_list.dhcp_option:
dhcp_options = []
for dhcp_option in dhcp_options_list.dhcp_option:
pair = {"opt_value": dhcp_option.dhcp_option_value,
"opt_name": dhcp_option.dhcp_option_name}
dhcp_options.append(pair)
port_q_dict['extra_dhcp_opts'] = dhcp_options
allowed_address_pairs = port_obj.get_virtual_machine_interface_allowed_address_pairs()
if allowed_address_pairs and allowed_address_pairs.allowed_address_pair:
address_pairs = []
for aap in allowed_address_pairs.allowed_address_pair:
pair = {"ip_address": '%s/%s' % (aap.ip.get_ip_prefix(),
aap.ip.get_ip_prefix_len()),
"mac_address": aap.mac}
address_pairs.append(pair)
port_q_dict['allowed_address_pairs'] = address_pairs
port_q_dict['fixed_ips'] = []
ip_back_refs = getattr(port_obj, 'instance_ip_back_refs', None)
if ip_back_refs:
for ip_back_ref in ip_back_refs:
iip_uuid = ip_back_ref['uuid']
# fetch it from request context cache/memo if there
try:
ip_obj = port_req_memo['instance-ips'][iip_uuid]
except KeyError:
try:
ip_obj = self._instance_ip_read(
instance_ip_id=ip_back_ref['uuid'])
except NoIdError:
continue
ip_addr = ip_obj.get_instance_ip_address()
ip_q_dict = {}
ip_q_dict['port_id'] = port_obj.uuid
ip_q_dict['ip_address'] = ip_addr
ip_q_dict['subnet_id'] = self._ip_address_to_subnet_id(ip_addr,
net_obj)
ip_q_dict['net_id'] = net_id
port_q_dict['fixed_ips'].append(ip_q_dict)
port_q_dict['security_groups'] = []
sg_refs = port_obj.get_security_group_refs()
for sg_ref in sg_refs or []:
port_q_dict['security_groups'].append(sg_ref['uuid'])
port_q_dict['admin_state_up'] = port_obj.get_id_perms().enable
# port can be router interface or vm interface
# for perf read logical_router_back_ref only when we have to
port_parent_name = port_obj.parent_name
router_refs = getattr(port_obj, 'logical_router_back_refs', None)
if router_refs is not None:
port_q_dict['device_id'] = router_refs[0]['uuid']
elif port_obj.parent_type == 'virtual-machine':
port_q_dict['device_id'] = port_obj.parent_name
elif port_obj.get_virtual_machine_refs() is not None:
port_q_dict['device_id'] = \
port_obj.get_virtual_machine_refs()[0]['to'][-1]
else:
port_q_dict['device_id'] = ''
port_q_dict['device_owner'] = \
port_obj.get_virtual_machine_interface_device_owner();
if port_q_dict['device_id']:
port_q_dict['status'] = constants.PORT_STATUS_ACTIVE
else:
port_q_dict['status'] = constants.PORT_STATUS_DOWN
if self._contrail_extensions_enabled:
port_q_dict.update(extra_dict)
return port_q_dict
#end _port_vnc_to_neutron
# public methods
# network api handlers
def network_create(self, network_q):
net_obj = self._network_neutron_to_vnc(network_q, CREATE)
try:
net_uuid = self._resource_create('virtual_network', net_obj)
except RefsExistError:
self._raise_contrail_exception(400, exceptions.BadRequest(
resource='network', msg='Network Already exists'))
if net_obj.router_external:
fip_pool_obj = FloatingIpPool('floating-ip-pool', net_obj)
self._floating_ip_pool_create(fip_pool_obj)
ret_network_q = self._network_vnc_to_neutron(net_obj, net_repr='SHOW')
self._db_cache['q_networks'][net_uuid] = ret_network_q
return ret_network_q
#end network_create
def network_read(self, net_uuid, fields=None):
# see if we can return fast...
#if fields and (len(fields) == 1) and fields[0] == 'tenant_id':
# tenant_id = self._get_obj_tenant_id('network', net_uuid)
# return {'id': net_uuid, 'tenant_id': tenant_id}
try:
net_obj = self._network_read(net_uuid)
except NoIdError:
self._raise_contrail_exception(404, exceptions.NetworkNotFound(net_id=net_uuid))
return self._network_vnc_to_neutron(net_obj, net_repr='SHOW')
#end network_read
def network_update(self, net_id, network_q):
net_obj = self._virtual_network_read(net_id=net_id)
router_external = net_obj.get_router_external()
network_q['id'] = net_id
net_obj = self._network_neutron_to_vnc(network_q, UPDATE)
if net_obj.router_external and not router_external:
fip_pools = net_obj.get_floating_ip_pools()
fip_pool_obj = FloatingIpPool('floating-ip-pool', net_obj)
self._floating_ip_pool_create(fip_pool_obj)
if router_external and not net_obj.router_external:
fip_pools = net_obj.get_floating_ip_pools()
if fip_pools:
for fip_pool in fip_pools:
try:
pool_id = fip_pool['uuid']
self._floating_ip_pool_delete(fip_pool_id=pool_id)
except RefsExistError:
self._raise_contrail_exception(409, exceptions.NetworkInUse(net_id=net_id))
self._virtual_network_update(net_obj)
ret_network_q = self._network_vnc_to_neutron(net_obj, net_repr='SHOW')
self._db_cache['q_networks'][net_id] = ret_network_q
return ret_network_q
#end network_update
def network_delete(self, net_id):
net_obj = self._virtual_network_read(net_id=net_id)
self._virtual_network_delete(net_id=net_id)
try:
del self._db_cache['q_networks'][net_id]
except KeyError:
pass
#end network_delete
# TODO request based on filter contents
def network_list(self, context=None, filters=None):
ret_dict = {}
def _collect_without_prune(net_ids):
for net_id in net_ids:
try:
net_obj = self._network_read(net_id)
net_info = self._network_vnc_to_neutron(net_obj,
net_repr='LIST')
ret_dict[net_id] = net_info
except NoIdError:
pass
#end _collect_without_prune
# collect phase
all_net_objs = [] # all n/ws in all projects
if context and not context['is_admin']:
if filters and 'id' in filters:
_collect_without_prune(filters['id'])
elif filters and 'name' in filters:
net_objs = self._network_list_project(context['tenant'])
all_net_objs.extend(net_objs)
all_net_objs.extend(self._network_list_shared())
all_net_objs.extend(self._network_list_router_external())
elif (filters and 'shared' in filters and filters['shared'][0] and
'router:external' not in filters):
all_net_objs.extend(self._network_list_shared())
elif (filters and 'router:external' in filters and
'shared' not in filters):
all_net_objs.extend(self._network_list_router_external())
elif (filters and 'router:external' in filters and
'shared' in filters):
all_net_objs.extend(self._network_list_shared_and_ext())
else:
project_uuid = str(uuid.UUID(context['tenant']))
if not filters:
all_net_objs.extend(self._network_list_router_external())
all_net_objs.extend(self._network_list_shared())
all_net_objs.extend(self._network_list_project(project_uuid))
# admin role from here on
elif filters and 'tenant_id' in filters:
# project-id is present
if 'id' in filters:
# required networks are also specified,
# just read and populate ret_dict
# prune is skipped because all_net_objs is empty
_collect_without_prune(filters['id'])
else:
# read all networks in project, and prune below
proj_ids = self._validate_project_ids(context, filters['tenant_id'])
for p_id in proj_ids:
all_net_objs.extend(self._network_list_project(p_id))
if 'router:external' in filters:
all_net_objs.extend(self._network_list_router_external())
elif filters and 'id' in filters:
# required networks are specified, just read and populate ret_dict
# prune is skipped because all_net_objs is empty
_collect_without_prune(filters['id'])
elif filters and 'name' in filters:
net_objs = self._network_list_project(None)
all_net_objs.extend(net_objs)
elif filters and 'shared' in filters:
if filters['shared'][0] == True:
nets = self._network_list_shared()
for net in nets:
net_info = self._network_vnc_to_neutron(net,
net_repr='LIST')
ret_dict[net.uuid] = net_info
elif filters and 'router:external' in filters:
nets = self._network_list_router_external()
if filters['router:external'][0] == True:
for net in nets:
net_info = self._network_vnc_to_neutron(net, net_repr='LIST')
ret_dict[net.uuid] = net_info
else:
# read all networks in all projects
all_net_objs.extend(self._virtual_network_list(detail=True))
# prune phase
for net_obj in all_net_objs:
if net_obj.uuid in ret_dict:
continue
net_fq_name = unicode(net_obj.get_fq_name())
if not self._filters_is_present(filters, 'contrail:fq_name',
net_fq_name):
continue
if not self._filters_is_present(filters, 'name',
net_obj.get_display_name()):
continue
if net_obj.is_shared == None:
is_shared = False
else:
is_shared = net_obj.is_shared
if not self._filters_is_present(filters, 'shared',
is_shared):
continue
try:
net_info = self._network_vnc_to_neutron(net_obj,
net_repr='LIST')
except NoIdError:
continue
ret_dict[net_obj.uuid] = net_info
ret_list = []
for net in ret_dict.values():
ret_list.append(net)
return ret_list
#end network_list
def network_count(self, filters=None):
nets_info = self.network_list(filters=filters)
return len(nets_info)
#end network_count
# subnet api handlers
def subnet_create(self, subnet_q):
net_id = subnet_q['network_id']
net_obj = self._virtual_network_read(net_id=net_id)
ipam_fq_name = subnet_q.get('contrail:ipam_fq_name')
if ipam_fq_name:
domain_name, project_name, ipam_name = ipam_fq_name
domain_obj = Domain(domain_name)
project_obj = Project(project_name, domain_obj)
netipam_obj = NetworkIpam(ipam_name, project_obj)
else: # link with project's default ipam or global default ipam
try:
ipam_fq_name = net_obj.get_fq_name()[:-1]
ipam_fq_name.append('default-network-ipam')
netipam_obj = self._vnc_lib.network_ipam_read(fq_name=ipam_fq_name)
except NoIdError:
netipam_obj = NetworkIpam()
ipam_fq_name = netipam_obj.get_fq_name()
subnet_vnc = self._subnet_neutron_to_vnc(subnet_q)
subnet_key = self._subnet_vnc_get_key(subnet_vnc, net_id)
# Locate list of subnets to which this subnet has to be appended
net_ipam_ref = None
ipam_refs = net_obj.get_network_ipam_refs()
if ipam_refs:
for ipam_ref in ipam_refs:
if ipam_ref['to'] == ipam_fq_name:
net_ipam_ref = ipam_ref
break
if not net_ipam_ref:
# First link from net to this ipam
vnsn_data = VnSubnetsType([subnet_vnc])
net_obj.add_network_ipam(netipam_obj, vnsn_data)
else: # virtual-network already linked to this ipam
for subnet in net_ipam_ref['attr'].get_ipam_subnets():
if subnet_key == self._subnet_vnc_get_key(subnet, net_id):
existing_sn_id = self._subnet_vnc_read_mapping(key=subnet_key)
# duplicate !!
data = {'subnet_cidr': subnet_q['cidr'],
'sub_id': existing_sn_id}
msg = (_("Cidr %(subnet_cidr)s "
"overlaps with another subnet"
"of subnet %(sub_id)s") % data)
exc_info = {'type': 'BadRequest',
'message': msg}
bottle.abort(400, json.dumps(exc_info))
subnet_info = self._subnet_vnc_to_neutron(subnet,
net_obj,
ipam_fq_name)
return subnet_info
vnsn_data = net_ipam_ref['attr']
vnsn_data.ipam_subnets.append(subnet_vnc)
# TODO: Add 'ref_update' API that will set this field
net_obj._pending_field_updates.add('network_ipam_refs')
self._virtual_network_update(net_obj)
# allocate an id to the subnet and store mapping with
# api-server
subnet_id = str(uuid.uuid4())
self._subnet_vnc_create_mapping(subnet_id, subnet_key)
# Read in subnet from server to get updated values for gw etc.
subnet_vnc = self._subnet_read(net_obj.uuid, subnet_key)
subnet_info = self._subnet_vnc_to_neutron(subnet_vnc, net_obj,
ipam_fq_name)
#self._db_cache['q_subnets'][subnet_id] = subnet_info
return subnet_info
#end subnet_create
def subnet_read(self, subnet_id):
subnet_key = self._subnet_vnc_read_mapping(id=subnet_id)
net_id = subnet_key.split()[0]
try:
net_obj = self._network_read(net_id)
except NoIdError:
self._raise_contrail_exception(404, exceptions.SubnetNotFound(subnet_id=subnet_id))
ipam_refs = net_obj.get_network_ipam_refs()
if ipam_refs:
for ipam_ref in ipam_refs:
subnet_vncs = ipam_ref['attr'].get_ipam_subnets()
for subnet_vnc in subnet_vncs:
if self._subnet_vnc_get_key(subnet_vnc, net_id) == \
subnet_key:
ret_subnet_q = self._subnet_vnc_to_neutron(
subnet_vnc, net_obj, ipam_ref['to'])
self._db_cache['q_subnets'][subnet_id] = ret_subnet_q
return ret_subnet_q
return {}
#end subnet_read
def subnet_update(self, subnet_id, subnet_q):
if 'gateway_ip' in subnet_q:
if subnet_q['gateway_ip'] != None:
exc_info = {'type': 'BadRequest',
'message': "update of gateway is not supported"}
bottle.abort(400, json.dumps(exc_info))
if 'allocation_pools' in subnet_q:
if subnet_q['allocation_pools'] != None:
exc_info = {'type': 'BadRequest',
'message': "update of allocation_pools is not allowed"}
bottle.abort(400, json.dumps(exc_info))
subnet_key = self._subnet_vnc_read_mapping(id=subnet_id)
net_id = subnet_key.split()[0]
net_obj = self._network_read(net_id)
ipam_refs = net_obj.get_network_ipam_refs()
subnet_found = False
if ipam_refs:
for ipam_ref in ipam_refs:
subnets = ipam_ref['attr'].get_ipam_subnets()
for subnet_vnc in subnets:
if self._subnet_vnc_get_key(subnet_vnc,
net_id) == subnet_key:
subnet_found = True
break
if subnet_found:
if 'name' in subnet_q:
if subnet_q['name'] != None:
subnet_vnc.set_subnet_name(subnet_q['name'])
if 'gateway_ip' in subnet_q:
if subnet_q['gateway_ip'] != None:
subnet_vnc.set_default_gateway(subnet_q['gateway_ip'])
if 'enable_dhcp' in subnet_q:
if subnet_q['enable_dhcp'] != None:
subnet_vnc.set_enable_dhcp(subnet_q['enable_dhcp'])
if 'dns_nameservers' in subnet_q:
if subnet_q['dns_nameservers'] != None:
dhcp_options=[]
for dns_server in subnet_q['dns_nameservers']:
dhcp_options.append(DhcpOptionType(dhcp_option_name='6',
dhcp_option_value=dns_server))
if dhcp_options:
subnet_vnc.set_dhcp_option_list(DhcpOptionsListType(dhcp_options))
else:
subnet_vnc.set_dhcp_option_list(None)
if 'host_routes' in subnet_q:
if subnet_q['host_routes'] != None:
host_routes=[]
for host_route in subnet_q['host_routes']:
host_routes.append(RouteType(prefix=host_route['destination'],
next_hop=host_route['nexthop']))
if host_routes:
subnet_vnc.set_host_routes(RouteTableType(host_routes))
else:
subnet_vnc.set_host_routes(None)
net_obj._pending_field_updates.add('network_ipam_refs')
self._virtual_network_update(net_obj)
ret_subnet_q = self._subnet_vnc_to_neutron(
subnet_vnc, net_obj, ipam_ref['to'])
self._db_cache['q_subnets'][subnet_id] = ret_subnet_q
return ret_subnet_q
return {}
# end subnet_update
def subnet_delete(self, subnet_id):
subnet_key = self._subnet_vnc_read_mapping(id=subnet_id)
net_id = subnet_key.split()[0]
net_obj = self._network_read(net_id)
ipam_refs = net_obj.get_network_ipam_refs()
if ipam_refs:
for ipam_ref in ipam_refs:
orig_subnets = ipam_ref['attr'].get_ipam_subnets()
new_subnets = [subnet_vnc for subnet_vnc in orig_subnets
if self._subnet_vnc_get_key(subnet_vnc,
net_id) != subnet_key]
if len(orig_subnets) != len(new_subnets):
# matched subnet to be deleted
ipam_ref['attr'].set_ipam_subnets(new_subnets)
net_obj._pending_field_updates.add('network_ipam_refs')
try:
self._virtual_network_update(net_obj)
except RefsExistError:
self._raise_contrail_exception(409, exceptions.SubnetInUse(subnet_id=subnet_id))
self._subnet_vnc_delete_mapping(subnet_id, subnet_key)
try:
del self._db_cache['q_subnets'][subnet_id]
except KeyError:
pass
return
#end subnet_delete
def subnets_list(self, context, filters=None):
ret_subnets = []
all_net_objs = []
if filters and 'id' in filters:
# required subnets are specified,
# just read in corresponding net_ids
net_ids = []
for subnet_id in filters['id']:
subnet_key = self._subnet_vnc_read_mapping(id=subnet_id)
net_id = subnet_key.split()[0]
net_ids.append(net_id)
all_net_objs.extend(self._virtual_network_list(obj_uuids=net_ids,
detail=True))
else:
if not context['is_admin']:
proj_id = context['tenant']
else:
proj_id = None
net_objs = self._network_list_project(proj_id)
all_net_objs.extend(net_objs)
net_objs = self._network_list_shared()
all_net_objs.extend(net_objs)
ret_dict = {}
for net_obj in all_net_objs:
if net_obj.uuid in ret_dict:
continue
ret_dict[net_obj.uuid] = 1
ipam_refs = net_obj.get_network_ipam_refs()
if ipam_refs:
for ipam_ref in ipam_refs:
subnet_vncs = ipam_ref['attr'].get_ipam_subnets()
for subnet_vnc in subnet_vncs:
sn_info = self._subnet_vnc_to_neutron(subnet_vnc,
net_obj,
ipam_ref['to'])
sn_id = sn_info['id']
sn_proj_id = sn_info['tenant_id']
sn_net_id = sn_info['network_id']
sn_name = sn_info['name']
if (filters and 'shared' in filters and
filters['shared'][0] == True):
if not net_obj.is_shared:
continue
elif filters:
if not self._filters_is_present(filters, 'id',
sn_id):
continue
if not self._filters_is_present(filters,
'tenant_id',
sn_proj_id):
continue
if not self._filters_is_present(filters,
'network_id',
sn_net_id):
continue
if not self._filters_is_present(filters,
'name',
sn_name):
continue
ret_subnets.append(sn_info)
return ret_subnets
#end subnets_list
def subnets_count(self, context, filters=None):
subnets_info = self.subnets_list(context, filters)
return len(subnets_info)
#end subnets_count
# ipam api handlers
def ipam_create(self, ipam_q):
# TODO remove below once api-server can read and create projects
# from keystone on startup
#self._ensure_project_exists(ipam_q['tenant_id'])
ipam_obj = self._ipam_neutron_to_vnc(ipam_q, CREATE)
ipam_uuid = self._vnc_lib.network_ipam_create(ipam_obj)
return self._ipam_vnc_to_neutron(ipam_obj)
#end ipam_create
def ipam_read(self, ipam_id):
try:
ipam_obj = self._vnc_lib.network_ipam_read(id=ipam_id)
except NoIdError:
# TODO add ipam specific exception
self._raise_contrail_exception(404, exceptions.NetworkNotFound(net_id=ipam_id))
return self._ipam_vnc_to_neutron(ipam_obj)
#end ipam_read
def ipam_update(self, ipam_id, ipam_q):
ipam_q['id'] = ipam_id
ipam_obj = self._ipam_neutron_to_vnc(ipam_q, UPDATE)
self._vnc_lib.network_ipam_update(ipam_obj)
return self._ipam_vnc_to_neutron(ipam_obj)
#end ipam_update
def ipam_delete(self, ipam_id):
self._vnc_lib.network_ipam_delete(id=ipam_id)
#end ipam_delete
# TODO request based on filter contents
def ipam_list(self, context=None, filters=None):
ret_list = []
# collect phase
all_ipams = [] # all ipams in all projects
if filters and 'tenant_id' in filters:
project_ids = self._validate_project_ids(context,
filters['tenant_id'])
for p_id in project_ids:
project_ipams = self._ipam_list_project(p_id)
all_ipams.append(project_ipams)
else: # no filters
dom_projects = self._project_list_domain(None)
for project in dom_projects:
proj_id = project['uuid']
project_ipams = self._ipam_list_project(proj_id)
all_ipams.append(project_ipams)
# prune phase
for project_ipams in all_ipams:
for proj_ipam in project_ipams:
# TODO implement same for name specified in filter
proj_ipam_id = proj_ipam['uuid']
if not self._filters_is_present(filters, 'id', proj_ipam_id):
continue
ipam_info = self.ipam_read(proj_ipam['uuid'])
ret_list.append(ipam_info)
return ret_list
#end ipam_list
def ipam_count(self, filters=None):
ipam_info = self.ipam_list(filters=filters)
return len(ipam_info)
#end ipam_count
# policy api handlers
def policy_create(self, policy_q):
# TODO remove below once api-server can read and create projects
# from keystone on startup
#self._ensure_project_exists(policy_q['tenant_id'])
policy_obj = self._policy_neutron_to_vnc(policy_q, CREATE)
policy_uuid = self._vnc_lib.network_policy_create(policy_obj)
return self._policy_vnc_to_neutron(policy_obj)
#end policy_create
def policy_read(self, policy_id):
try:
policy_obj = self._vnc_lib.network_policy_read(id=policy_id)
except NoIdError:
raise policy.PolicyNotFound(id=policy_id)
return self._policy_vnc_to_neutron(policy_obj)
#end policy_read
def policy_update(self, policy_id, policy):
policy_q = policy
policy_q['id'] = policy_id
policy_obj = self._policy_neutron_to_vnc(policy_q, UPDATE)
self._vnc_lib.network_policy_update(policy_obj)
return self._policy_vnc_to_neutron(policy_obj)
#end policy_update
def policy_delete(self, policy_id):
self._vnc_lib.network_policy_delete(id=policy_id)
#end policy_delete
# TODO request based on filter contents
def policy_list(self, context=None, filters=None):
ret_list = []
# collect phase
all_policys = [] # all policys in all projects
if filters and 'tenant_id' in filters:
project_ids = self._validate_project_ids(context,
filters['tenant_id'])
for p_id in project_ids:
project_policys = self._policy_list_project(p_id)
all_policys.append(project_policys)
else: # no filters
dom_projects = self._project_list_domain(None)
for project in dom_projects:
proj_id = project['uuid']
project_policys = self._policy_list_project(proj_id)
all_policys.append(project_policys)
# prune phase
for project_policys in all_policys:
for proj_policy in project_policys:
# TODO implement same for name specified in filter
proj_policy_id = proj_policy['uuid']
if not self._filters_is_present(filters, 'id', proj_policy_id):
continue
policy_info = self.policy_read(proj_policy['uuid'])
ret_list.append(policy_info)
return ret_list
#end policy_list
def policy_count(self, filters=None):
policy_info = self.policy_list(filters=filters)
return len(policy_info)
#end policy_count
def _router_add_gateway(self, router_q, rtr_obj):
ext_gateway = router_q.get('external_gateway_info', None)
old_ext_gateway = rtr_obj.get_virtual_network_refs()
if ext_gateway or old_ext_gateway:
network_id = ext_gateway.get('network_id', None)
if network_id:
if old_ext_gateway and network_id == old_ext_gateway[0]['uuid']:
return
try:
net_obj = self._virtual_network_read(net_id=network_id)
if not net_obj.get_router_external():
exc_info = {'type': 'BadRequest',
'message': "Network %s is not a valid external network" % network_id}
bottle.abort(400, json.dumps(exc_info))
except NoIdError:
self._raise_contrail_exception(404, exceptions.NetworkNotFound(net_id=network_id))
self._router_set_external_gateway(rtr_obj, net_obj)
else:
self._router_clear_external_gateway(rtr_obj)
def _router_set_external_gateway(self, router_obj, ext_net_obj):
project_obj = self._project_read(proj_id=router_obj.parent_uuid)
# Get netns SNAT service template
try:
st_obj = self._vnc_lib.service_template_read(
fq_name=SNAT_SERVICE_TEMPLATE_FQ_NAME)
except NoIdError:
msg = _("Unable to set or clear the default gateway")
exc_info = {'type': 'BadRequest', 'message': msg}
bottle.abort(400, json.dumps(exc_info))
# Get the service instance if it exists
si_name = 'si_' + router_obj.uuid
si_fq_name = project_obj.get_fq_name() + [si_name]
try:
si_obj = self._vnc_lib.service_instance_read(fq_name=si_fq_name)
si_uuid = si_obj.uuid
except NoIdError:
si_obj = None
# Get route table for default route it it exists
rt_name = 'rt_' + router_obj.uuid
rt_fq_name = project_obj.get_fq_name() + [rt_name]
try:
rt_obj = self._vnc_lib.route_table_read(fq_name=rt_fq_name)
rt_uuid = rt_obj.uuid
except NoIdError:
rt_obj = None
# Set the service instance
si_created = False
if not si_obj:
si_obj = ServiceInstance(si_name, parent_obj=project_obj)
si_created = True
#TODO(ethuleau): For the fail-over SNAT set scale out to 2
si_prop_obj = ServiceInstanceType(
right_virtual_network=ext_net_obj.get_fq_name_str(),
scale_out=ServiceScaleOutType(max_instances=1,
auto_scale=True),
auto_policy=True)
si_obj.set_service_instance_properties(si_prop_obj)
si_obj.set_service_template(st_obj)
if si_created:
si_uuid = self._vnc_lib.service_instance_create(si_obj)
else:
self._vnc_lib.service_instance_update(si_obj)
# Set the route table
route_obj = RouteType(prefix="0.0.0.0/0",
next_hop=si_obj.get_fq_name_str())
rt_created = False
if not rt_obj:
rt_obj = RouteTable(name=rt_name, parent_obj=project_obj)
rt_created = True
rt_obj.set_routes(RouteTableType.factory([route_obj]))
if rt_created:
rt_uuid = self._vnc_lib.route_table_create(rt_obj)
else:
self._vnc_lib.route_table_update(rt_obj)
# Associate route table to all private networks connected onto
# that router
for intf in router_obj.get_virtual_machine_interface_refs() or []:
port_id = intf['uuid']
net_id = self.port_read(port_id)['network_id']
try:
net_obj = self._vnc_lib.virtual_network_read(id=net_id)
except NoIdError:
self._raise_contrail_exception(
404, exceptions.NetworkNotFound(net_id=net_id))
net_obj.set_route_table(rt_obj)
self._vnc_lib.virtual_network_update(net_obj)
# Add logical gateway virtual network
router_obj.set_virtual_network(ext_net_obj)
self._vnc_lib.logical_router_update(router_obj)
def _router_clear_external_gateway(self, router_obj):
project_obj = self._project_read(proj_id=router_obj.parent_uuid)
# Get the service instance if it exists
si_name = 'si_' + router_obj.uuid
si_fq_name = project_obj.get_fq_name() + [si_name]
try:
si_obj = self._vnc_lib.service_instance_read(fq_name=si_fq_name)
si_uuid = si_obj.uuid
except NoIdError:
si_obj = None
# Get route table for default route it it exists
rt_name = 'rt_' + router_obj.uuid
rt_fq_name = project_obj.get_fq_name() + [rt_name]
try:
rt_obj = self._vnc_lib.route_table_read(fq_name=rt_fq_name)
rt_uuid = rt_obj.uuid
except NoIdError:
rt_obj = None
# Delete route table
if rt_obj:
# Disassociate route table to all private networks connected
# onto that router
for net_ref in rt_obj.get_virtual_network_back_refs() or []:
try:
net_obj = self._vnc_lib.virtual_network_read(
id=net_ref['uuid'])
except NoIdError:
continue
net_obj.del_route_table(rt_obj)
self._vnc_lib.virtual_network_update(net_obj)
self._vnc_lib.route_table_delete(id=rt_obj.uuid)
# Delete service instance
if si_obj:
self._vnc_lib.service_instance_delete(id=si_uuid)
# Clear logical gateway virtual network
router_obj.set_virtual_network_list([])
self._vnc_lib.logical_router_update(router_obj)
def _set_snat_routing_table(self, router_obj, network_id):
project_obj = self._project_read(proj_id=router_obj.parent_uuid)
rt_name = 'rt_' + router_obj.uuid
rt_fq_name = project_obj.get_fq_name() + [rt_name]
try:
rt_obj = self._vnc_lib.route_table_read(fq_name=rt_fq_name)
rt_uuid = rt_obj.uuid
except NoIdError:
# No route table set with that router ID, the gateway is not set
return
try:
net_obj = self._vnc_lib.virtual_network_read(id=network_id)
except NoIdError:
raise exceptions.NetworkNotFound(net_id=ext_net_id)
net_obj.set_route_table(rt_obj)
self._vnc_lib.virtual_network_update(net_obj)
def _clear_snat_routing_table(self, router_obj, network_id):
project_obj = self._project_read(proj_id=router_obj.parent_uuid)
rt_name = 'rt_' + router_obj.uuid
rt_fq_name = project_obj.get_fq_name() + [rt_name]
try:
rt_obj = self._vnc_lib.route_table_read(fq_name=rt_fq_name)
rt_uuid = rt_obj.uuid
except NoIdError:
# No route table set with that router ID, the gateway is not set
return
try:
net_obj = self._vnc_lib.virtual_network_read(id=network_id)
except NoIdError:
raise exceptions.NetworkNotFound(net_id=ext_net_id)
net_obj.del_route_table(rt_obj)
self._vnc_lib.virtual_network_update(net_obj)
# router api handlers
def router_create(self, router_q):
#self._ensure_project_exists(router_q['tenant_id'])
rtr_obj = self._router_neutron_to_vnc(router_q, CREATE)
rtr_uuid = self._resource_create('logical_router', rtr_obj)
self._router_add_gateway(router_q, rtr_obj)
ret_router_q = self._router_vnc_to_neutron(rtr_obj, rtr_repr='SHOW')
self._db_cache['q_routers'][rtr_uuid] = ret_router_q
return ret_router_q
#end router_create
def router_read(self, rtr_uuid, fields=None):
# see if we can return fast...
if fields and (len(fields) == 1) and fields[0] == 'tenant_id':
tenant_id = self._get_obj_tenant_id('router', rtr_uuid)
return {'id': rtr_uuid, 'tenant_id': tenant_id}
try:
rtr_obj = self._logical_router_read(rtr_uuid)
except NoIdError:
self._raise_contrail_exception(404, RouterNotFound(router_id=rtr_uuid))
return self._router_vnc_to_neutron(rtr_obj, rtr_repr='SHOW')
#end router_read
def router_update(self, rtr_id, router_q):
router_q['id'] = rtr_id
rtr_obj = self._router_neutron_to_vnc(router_q, UPDATE)
self._logical_router_update(rtr_obj)
self._router_add_gateway(router_q, rtr_obj)
ret_router_q = self._router_vnc_to_neutron(rtr_obj, rtr_repr='SHOW')
self._db_cache['q_routers'][rtr_id] = ret_router_q
return ret_router_q
#end router_update
def router_delete(self, rtr_id):
try:
rtr_obj = self._logical_router_read(rtr_id)
if rtr_obj.get_virtual_machine_interface_refs():
self._raise_contrail_exception(409, RouterInUse(router_id=rtr_id))
except NoIdError:
self._raise_contrail_exception(404, RouterNotFound(router_id=rtr_id))
self._router_clear_external_gateway(rtr_obj)
self._logical_router_delete(rtr_id=rtr_id)
try:
del self._db_cache['q_routers'][rtr_id]
except KeyError:
pass
#end router_delete
# TODO request based on filter contents
def router_list(self, context=None, filters=None):
ret_list = []
if filters and 'shared' in filters:
if filters['shared'][0] == True:
# no support for shared routers
return ret_list
# collect phase
all_rtrs = [] # all n/ws in all projects
if filters and 'tenant_id' in filters:
# project-id is present
if 'id' in filters:
# required routers are also specified,
# just read and populate ret_list
# prune is skipped because all_rtrs is empty
for rtr_id in filters['id']:
try:
rtr_obj = self._logical_router_read(rtr_id)
rtr_info = self._router_vnc_to_neutron(rtr_obj,
rtr_repr='LIST')
ret_list.append(rtr_info)
except NoIdError:
pass
else:
# read all routers in project, and prune below
project_ids = self._validate_project_ids(context,
filters['tenant_id'])
for p_id in project_ids:
if 'router:external' in filters:
all_rtrs.append(self._fip_pool_ref_routers(p_id))
else:
project_rtrs = self._router_list_project(p_id)
all_rtrs.append(project_rtrs)
elif filters and 'id' in filters:
# required routers are specified, just read and populate ret_list
# prune is skipped because all_rtrs is empty
for rtr_id in filters['id']:
try:
rtr_obj = self._logical_router_read(rtr_id)
rtr_info = self._router_vnc_to_neutron(rtr_obj,
rtr_repr='LIST')
ret_list.append(rtr_info)
except NoIdError:
pass
else:
# read all routers in all projects
project_rtrs = self._router_list_project()
all_rtrs.append(project_rtrs)
# prune phase
for project_rtrs in all_rtrs:
for proj_rtr in project_rtrs:
proj_rtr_id = proj_rtr['uuid']
if not self._filters_is_present(filters, 'id', proj_rtr_id):
continue
proj_rtr_fq_name = unicode(proj_rtr['fq_name'])
if not self._filters_is_present(filters, 'contrail:fq_name',
proj_rtr_fq_name):
continue
try:
rtr_obj = self._logical_router_read(proj_rtr['uuid'])
rtr_name = rtr_obj.get_display_name()
if not self._filters_is_present(filters, 'name', rtr_name):
continue
rtr_info = self._router_vnc_to_neutron(rtr_obj,
rtr_repr='LIST')
except NoIdError:
continue
ret_list.append(rtr_info)
return ret_list
#end router_list
def router_count(self, filters=None):
rtrs_info = self.router_list(filters=filters)
return len(rtrs_info)
#end router_count
def _check_for_dup_router_subnet(self, router_id,
network_id, subnet_id, subnet_cidr):
try:
rports = self.port_list(filters={'device_id': [router_id]})
# It's possible these ports are on the same network, but
# different subnets.
new_ipnet = netaddr.IPNetwork(subnet_cidr)
for p in rports:
for ip in p['fixed_ips']:
if ip['subnet_id'] == subnet_id:
msg = (_("Router %s already has a port "
"on subnet %s") % (router_id, subnet_id))
self._raise_contrail_exception(400,
exceptions.BadRequest(resource='router', msg=msg))
sub_id = ip['subnet_id']
subnet = self.subnet_read(sub_id)
cidr = subnet['cidr']
ipnet = netaddr.IPNetwork(cidr)
match1 = netaddr.all_matching_cidrs(new_ipnet, [cidr])
match2 = netaddr.all_matching_cidrs(ipnet, [subnet_cidr])
if match1 or match2:
data = {'subnet_cidr': subnet_cidr,
'subnet_id': subnet_id,
'cidr': cidr,
'sub_id': sub_id}
msg = (_("Cidr %(subnet_cidr)s of subnet "
"%(subnet_id)s overlaps with cidr %(cidr)s "
"of subnet %(sub_id)s") % data)
exc_info = {'type': 'BadRequest',
'message': msg}
bottle.abort(400, json.dumps(exc_info))
except NoIdError:
pass
def add_router_interface(self, context, router_id, port_id=None, subnet_id=None):
router_obj = self._logical_router_read(router_id)
if port_id:
port = self.port_read(port_id)
if (port['device_owner'] == constants.DEVICE_OWNER_ROUTER_INTF and
port['device_id']):
self._raise_contrail_exception(409, exceptions.PortInUse(net_id=port['network_id'],
port_id=port['id'],
device_id=port['device_id']))
fixed_ips = [ip for ip in port['fixed_ips']]
if len(fixed_ips) != 1:
msg = _('Router port must have exactly one fixed IP')
exc_info = {'type': 'BadRequest', 'message': msg}
bottle.abort(400, json.dumps(exc_info))
subnet_id = fixed_ips[0]['subnet_id']
subnet = self.subnet_read(subnet_id)
self._check_for_dup_router_subnet(router_id,
port['network_id'],
subnet['id'],
subnet['cidr'])
elif subnet_id:
subnet = self.subnet_read(subnet_id)
if not subnet['gateway_ip']:
msg = _('Subnet for router interface must have a gateway IP')
exc_info = {'type': 'BadRequest', 'message': msg}
bottle.abort(400, json.dumps(exc_info))
self._check_for_dup_router_subnet(router_id,
subnet['network_id'],
subnet_id,
subnet['cidr'])
fixed_ip = {'ip_address': subnet['gateway_ip'],
'subnet_id': subnet['id']}
port = self.port_create(context, {'tenant_id': subnet['tenant_id'],
'network_id': subnet['network_id'],
'fixed_ips': [fixed_ip],
'admin_state_up': True,
'device_id': router_id,
'device_owner': constants.DEVICE_OWNER_ROUTER_INTF,
'name': ''})
port_id = port['id']
else:
msg = _('Either port or subnet must be specified')
exc_info = {'type': 'BadRequest', 'message': msg}
bottle.abort(400, json.dumps(exc_info))
self._set_snat_routing_table(router_obj, subnet['network_id'])
vmi_obj = self._vnc_lib.virtual_machine_interface_read(id=port_id)
router_obj.add_virtual_machine_interface(vmi_obj)
self._logical_router_update(router_obj)
info = {'id': router_id,
'tenant_id': subnet['tenant_id'],
'port_id': port_id,
'subnet_id': subnet_id}
return info
# end add_router_interface
def remove_router_interface(self, router_id, port_id=None, subnet_id=None):
router_obj = self._logical_router_read(router_id)
subnet = None
if port_id:
port_db = self.port_read(port_id)
if (port_db['device_owner'] != constants.DEVICE_OWNER_ROUTER_INTF
or port_db['device_id'] != router_id):
self._raise_contrail_exception(404, RouterInterfaceNotFound(router_id=router_id,
port_id=port_id))
port_subnet_id = port_db['fixed_ips'][0]['subnet_id']
if subnet_id and (port_subnet_id != subnet_id):
self._raise_contrail_exception(409, exceptions.SubnetMismatchForPort(port_id=port_id,
subnet_id=subnet_id))
subnet_id = port_subnet_id
subnet = self.subnet_read(subnet_id)
network_id = subnet['network_id']
elif subnet_id:
subnet = self.subnet_read(subnet_id)
network_id = subnet['network_id']
for intf in router_obj.get_virtual_machine_interface_refs() or []:
port_id = intf['uuid']
port_db = self.port_read(port_id)
if subnet_id == port_db['fixed_ips'][0]['subnet_id']:
break
else:
msg = _('Subnet %s not connected to router %s') % (subnet_id,
router_id)
exc_info = {'type': 'BadRequest', 'message': msg}
bottle.abort(400, json.dumps(exc_info))
self._clear_snat_routing_table(router_obj, subnet['network_id'])
port_obj = self._virtual_machine_interface_read(port_id)
router_obj.del_virtual_machine_interface(port_obj)
self._vnc_lib.logical_router_update(router_obj)
self.port_delete(port_id)
info = {'id': router_id,
'tenant_id': subnet['tenant_id'],
'port_id': port_id,
'subnet_id': subnet_id}
return info
# end remove_router_interface
# floatingip api handlers
def floatingip_create(self, fip_q):
try:
fip_obj = self._floatingip_neutron_to_vnc(fip_q, CREATE)
except Exception, e:
#logging.exception(e)
msg = _('Internal error when trying to create floating ip. '
'Please be sure the network %s is an external '
'network.') % (fip_q['floating_network_id'])
exc_info = {'type': 'BadRequest', 'message': msg}
bottle.abort(400, json.dumps(exc_info))
try:
fip_uuid = self._vnc_lib.floating_ip_create(fip_obj)
except Exception as e:
self._raise_contrail_exception(409,
exceptions.IpAddressGenerationFailure(net_id=fip_q['floating_network_id']))
fip_obj = self._vnc_lib.floating_ip_read(id=fip_uuid)
return self._floatingip_vnc_to_neutron(fip_obj)
#end floatingip_create
def floatingip_read(self, fip_uuid):
try:
fip_obj = self._vnc_lib.floating_ip_read(id=fip_uuid)
except NoIdError:
self._raise_contrail_exception(404, FloatingIPNotFound(floatingip_id=fip_uuid))
return self._floatingip_vnc_to_neutron(fip_obj)
#end floatingip_read
def floatingip_update(self, fip_id, fip_q):
fip_q['id'] = fip_id
fip_obj = self._floatingip_neutron_to_vnc(fip_q, UPDATE)
self._vnc_lib.floating_ip_update(fip_obj)
return self._floatingip_vnc_to_neutron(fip_obj)
#end floatingip_update
def floatingip_delete(self, fip_id):
self._vnc_lib.floating_ip_delete(id=fip_id)
#end floatingip_delete
def floatingip_list(self, context, filters=None):
# Read in floating ips with either
# - port(s) as anchor
# - project(s) as anchor
# - none as anchor (floating-ip collection)
ret_list = []
proj_ids = None
port_ids = None
if filters:
if 'tenant_id' in filters:
proj_ids = self._validate_project_ids(context,
filters['tenant_id'])
elif 'port_id' in filters:
port_ids = filters['port_id']
else: # no filters
if not context['is_admin']:
proj_ids = [str(uuid.UUID(context['tenant']))]
if port_ids:
fip_objs = self._floatingip_list(back_ref_id=port_ids)
elif proj_ids:
fip_objs = self._floatingip_list(back_ref_id=proj_ids)
else:
fip_objs = self._floatingip_list()
for fip_obj in fip_objs:
if 'floating_ip_address' in filters:
if (fip_obj.get_floating_ip_address() not in
filters['floating_ip_address']):
continue
ret_list.append(self._floatingip_vnc_to_neutron(fip_obj))
return ret_list
#end floatingip_list
def floatingip_count(self, context, filters=None):
floatingip_info = self.floatingip_list(context, filters)
return len(floatingip_info)
#end floatingip_count
def _ip_addr_in_net_id(self, ip_addr, net_id):
"""Checks if ip address is present in net-id."""
net_ip_list = [ipobj.get_instance_ip_address() for ipobj in
self._instance_ip_list(back_ref_id=[net_id])]
return ip_addr in net_ip_list
def _create_instance_ip(self, net_obj, port_obj, ip_addr=None):
ip_name = str(uuid.uuid4())
ip_obj = InstanceIp(name=ip_name)
ip_obj.uuid = ip_name
ip_obj.set_virtual_machine_interface(port_obj)
ip_obj.set_virtual_network(net_obj)
if ip_addr:
ip_obj.set_instance_ip_address(ip_addr)
ip_id = self._instance_ip_create(ip_obj)
return ip_id
# end _create_instance_ip
def _port_create_instance_ip(self, net_obj, port_obj, port_q):
created_iip_ids = []
fixed_ips = port_q.get('fixed_ips')
if fixed_ips is None:
return
for fixed_ip in fixed_ips:
try:
ip_addr = fixed_ip.get('ip_address')
subnet_id = fixed_ip.get('subnet_id')
if not ip_addr and 'subnet_id' in fixed_ip:
subnet_key = self._subnet_vnc_read_mapping(id=subnet_id)
ip_addr = self._vnc_lib.virtual_network_ip_alloc(net_obj,
subnet=subnet_key.split()[1])[0]
ip_id = self._create_instance_ip(net_obj, port_obj, ip_addr)
created_iip_ids.append(ip_id)
except Exception as e:
# Resources are not available
for iip_id in created_iip_ids:
self._instance_ip_delete(instance_ip_id=iip_id)
self._raise_contrail_exception(409,
exceptions.IpAddressGenerationFailure(net_id=net_obj.uuid))
for iip in getattr(port_obj, 'instance_ip_back_refs', []):
if iip['uuid'] not in created_iip_ids:
iip_obj = self._instance_ip_delete(instance_ip_id=iip['uuid'])
# end _port_create_instance_ip
# port api handlers
def port_create(self, context, port_q):
net_id = port_q['network_id']
net_obj = self._network_read(net_id)
tenant_id = self._get_tenant_id_for_create(context, port_q);
proj_id = str(uuid.UUID(tenant_id))
# initialize port object
port_obj = self._port_neutron_to_vnc(port_q, net_obj, CREATE)
# create the object
port_id = self._resource_create('virtual_machine_interface', port_obj)
if 'fixed_ips' in port_q:
self._port_create_instance_ip(net_obj, port_obj, port_q)
elif net_obj.get_network_ipam_refs():
self._port_create_instance_ip(net_obj, port_obj,
{'fixed_ips':[{'ip_address': None}]})
# TODO below reads back default parent name, fix it
port_obj = self._virtual_machine_interface_read(port_id=port_id)
ret_port_q = self._port_vnc_to_neutron(port_obj)
self._set_obj_tenant_id(port_id, proj_id)
# update cache on successful creation
tenant_id = proj_id.replace('-', '')
if tenant_id not in self._db_cache['q_tenant_port_count']:
ncurports = self.port_count({'tenant_id': tenant_id})
else:
ncurports = self._db_cache['q_tenant_port_count'][tenant_id]
self._db_cache['q_tenant_port_count'][tenant_id] = ncurports + 1
return ret_port_q
#end port_create
# TODO add obj param and let caller use below only as a converter
def port_read(self, port_id):
try:
port_obj = self._virtual_machine_interface_read(port_id=port_id)
except NoIdError:
self._raise_contrail_exception(404, exceptions.PortNotFound(port_id=port_id))
ret_port_q = self._port_vnc_to_neutron(port_obj)
self._db_cache['q_ports'][port_id] = ret_port_q
return ret_port_q
#end port_read
def port_update(self, port_id, port_q):
# if ip address passed then use it
req_ip_addrs = []
req_ip_subnets = []
port_q['id'] = port_id
port_obj = self._port_neutron_to_vnc(port_q, None, UPDATE)
net_id = port_obj.get_virtual_network_refs()[0]['uuid']
net_obj = self._network_read(net_id)
self._virtual_machine_interface_update(port_obj)
self._port_create_instance_ip(net_obj, port_obj, port_q)
ret_port_q = self._port_vnc_to_neutron(port_obj)
port_obj = self._virtual_machine_interface_read(port_id=port_id)
self._db_cache['q_ports'][port_id] = ret_port_q
return ret_port_q
#end port_update
def port_delete(self, port_id):
port_obj = self._port_neutron_to_vnc({'id': port_id}, None, DELETE)
if port_obj.parent_type == 'virtual-machine':
instance_id = port_obj.parent_uuid
else:
vm_refs = port_obj.get_virtual_machine_refs()
if vm_refs:
instance_id = vm_refs[0]['uuid']
else:
instance_id = None
if port_obj.get_logical_router_back_refs():
self._raise_contrail_exception(409, L3PortInUse(port_id=port_id,
device_owner=constants.DEVICE_OWNER_ROUTER_INTF))
if port_obj.get_logical_router_back_refs():
self._raise_contrail_exception(409, L3PortInUse(port_id=port_id,
device_owner=constants.DEVICE_OWNER_ROUTER_INTF))
# release instance IP address
iip_back_refs = getattr(port_obj, 'instance_ip_back_refs', None)
if iip_back_refs:
for iip_back_ref in iip_back_refs:
# if name contains IP address then this is shared ip
iip_obj = self._vnc_lib.instance_ip_read(
id=iip_back_ref['uuid'])
# in case of shared ip only delete the link to the VMI
if len(iip_obj.name.split(' ')) > 1:
iip_obj.del_virtual_machine_interface(port_obj)
self._instance_ip_update(iip_obj)
else:
self._instance_ip_delete(
instance_ip_id=iip_back_ref['uuid'])
# disassociate any floating IP used by instance
fip_back_refs = getattr(port_obj, 'floating_ip_back_refs', None)
if fip_back_refs:
for fip_back_ref in fip_back_refs:
self.floatingip_update(fip_back_ref['uuid'], {'port_id': None})
tenant_id = self._get_obj_tenant_id('port', port_id)
self._virtual_machine_interface_delete(port_id=port_id)
# delete instance if this was the last port
try:
if instance_id:
self._vnc_lib.virtual_machine_delete(id=instance_id)
except RefsExistError:
pass
try:
del self._db_cache['q_ports'][port_id]
except KeyError:
pass
# update cache on successful deletion
try:
self._db_cache['q_tenant_port_count'][tenant_id] -= 1
except KeyError:
pass
self._del_obj_tenant_id(port_id)
#end port_delete
def port_list(self, context=None, filters=None):
project_obj = None
ret_q_ports = []
all_project_ids = []
# TODO used to find dhcp server field. support later...
if (filters.get('device_owner') == 'network:dhcp' or
'network:dhcp' in filters.get('device_owner', [])):
return ret_q_ports
if not 'device_id' in filters:
# Listing from back references
if not filters:
# TODO once vmi is linked to project in schema, use project_id
# to limit scope of list
if not context['is_admin']:
project_id = str(uuid.UUID(context['tenant']))
else:
project_id = None
# read all VMI and IIP in detail one-shot
if self._list_optimization_enabled:
all_port_gevent = gevent.spawn(self._virtual_machine_interface_list,
parent_id=project_id)
else:
all_port_gevent = gevent.spawn(self._virtual_machine_interface_list)
port_iip_gevent = gevent.spawn(self._instance_ip_list)
port_net_gevent = gevent.spawn(self._virtual_network_list,
parent_id=project_id,
detail=True)
gevent.joinall([all_port_gevent, port_iip_gevent, port_net_gevent])
all_port_objs = all_port_gevent.value
port_iip_objs = port_iip_gevent.value
port_net_objs = port_net_gevent.value
ret_q_ports = self._port_list(port_net_objs, all_port_objs,
port_iip_objs)
elif 'tenant_id' in filters:
all_project_ids = self._validate_project_ids(context,
filters['tenant_id'])
elif 'name' in filters:
all_project_ids = [str(uuid.UUID(context['tenant']))]
elif 'id' in filters:
# TODO optimize
for port_id in filters['id']:
try:
port_info = self.port_read(port_id)
except NoIdError:
continue
ret_q_ports.append(port_info)
for proj_id in all_project_ids:
ret_q_ports = self._port_list_project(proj_id)
if 'network_id' in filters:
ret_q_ports = self._port_list_network(filters['network_id'])
# prune phase
ret_list = []
for port_obj in ret_q_ports:
if not self._filters_is_present(filters, 'name',
port_obj['name']):
continue
ret_list.append(port_obj)
return ret_list
# Listing from parent to children
device_ids = filters['device_id']
for dev_id in device_ids:
try:
# TODO optimize
port_objs = self._virtual_machine_interface_list(
parent_id=dev_id,
back_ref_id=dev_id)
if not port_objs:
raise NoIdError(None)
for port_obj in port_objs:
port_info = self._port_vnc_to_neutron(port_obj)
ret_q_ports.append(port_info)
except NoIdError:
try:
router_obj = self._logical_router_read(rtr_id=dev_id)
intfs = router_obj.get_virtual_machine_interface_refs()
for intf in (intfs or []):
try:
port_info = self.port_read(intf['uuid'])
except NoIdError:
continue
ret_q_ports.append(port_info)
except NoIdError:
continue
return ret_q_ports
#end port_list
def port_count(self, filters=None):
if (filters.get('device_owner') == 'network:dhcp' or
'network:dhcp' in filters.get('device_owner', [])):
return 0
if 'tenant_id' in filters:
if isinstance(filters['tenant_id'], list):
project_id = str(uuid.UUID(filters['tenant_id'][0]))
else:
project_id = str(uuid.UUID(filters['tenant_id']))
try:
nports = self._db_cache['q_tenant_port_count'][project_id]
if nports < 0:
# TBD Hack. fix in case of multiple q servers after 1.03
nports = 0
del self._db_cache['q_tenant_port_count'][project_id]
return nports
except KeyError:
# do it the hard way but remember for next time
nports = len(self._port_list_project(project_id))
self._db_cache['q_tenant_port_count'][project_id] = nports
else:
# across all projects - TODO very expensive,
# get only a count from api-server!
nports = len(self.port_list(filters=filters))
return nports
#end port_count
# security group api handlers
def security_group_create(self, sg_q):
sg_obj = self._security_group_neutron_to_vnc(sg_q, CREATE)
sg_uuid = self._resource_create('security_group', sg_obj)
#allow all egress traffic
def_rule = {}
def_rule['port_range_min'] = 0
def_rule['port_range_max'] = 65535
def_rule['direction'] = 'egress'
def_rule['remote_ip_prefix'] = '0.0.0.0/0'
def_rule['remote_group_id'] = None
def_rule['protocol'] = 'any'
rule = self._security_group_rule_neutron_to_vnc(def_rule, CREATE)
self._security_group_rule_create(sg_uuid, rule)
ret_sg_q = self._security_group_vnc_to_neutron(sg_obj)
return ret_sg_q
#end security_group_create
def security_group_update(self, sg_id, sg_q):
sg_q['id'] = sg_id
sg_obj = self._security_group_neutron_to_vnc(sg_q, UPDATE)
self._vnc_lib.security_group_update(sg_obj)
ret_sg_q = self._security_group_vnc_to_neutron(sg_obj)
return ret_sg_q
#end security_group_update
def security_group_read(self, sg_id):
try:
sg_obj = self._vnc_lib.security_group_read(id=sg_id)
except NoIdError:
self._raise_contrail_exception(404, SecurityGroupNotFound(id=sg_id))
return self._security_group_vnc_to_neutron(sg_obj)
#end security_group_read
def security_group_delete(self, sg_id):
try:
sg_obj = self._vnc_lib.security_group_read(id=sg_id)
if sg_obj.name == 'default':
self._raise_contrail_exception(409, SecurityGroupCannotRemoveDefault())
except NoIdError:
return
try:
self._security_group_delete(sg_id)
except RefsExistError:
self._raise_contrail_exception(409, SecurityGroupInUse(id=sg_id))
self._db_cache_flush('q_tenant_to_def_sg')
#end security_group_delete
def security_group_list(self, context, filters=None):
ret_list = []
# collect phase
all_sgs = [] # all sgs in all projects
if context and not context['is_admin']:
for i in range(10):
project_sgs = self._security_group_list_project(str(uuid.UUID(context['tenant'])))
if project_sgs:
break
gevent.sleep(3)
all_sgs.append(project_sgs)
else: # admin context
if filters and 'tenant_id' in filters:
project_ids = self._validate_project_ids(context,
filters['tenant_id'])
for p_id in project_ids:
project_sgs = self._security_group_list_project(p_id)
all_sgs.append(project_sgs)
else: # no filters
all_sgs.append(self._security_group_list_project(None))
# prune phase
for project_sgs in all_sgs:
for sg_obj in project_sgs:
if not self._filters_is_present(filters, 'id', sg_obj.uuid):
continue
if not self._filters_is_present(filters, 'name',
sg_obj.get_display_name() or sg_obj.name):
continue
sg_info = self._security_group_vnc_to_neutron(sg_obj)
ret_list.append(sg_info)
return ret_list
#end security_group_list
def _get_ip_proto_number(self, protocol):
if protocol is None:
return
return IP_PROTOCOL_MAP.get(protocol, protocol)
def _validate_port_range(self, rule):
"""Check that port_range is valid."""
if (rule['port_range_min'] is None and
rule['port_range_max'] is None):
return
if not rule['protocol']:
self._raise_contrail_exception(400, SecurityGroupProtocolRequiredWithPorts())
ip_proto = self._get_ip_proto_number(rule['protocol'])
if ip_proto in [constants.PROTO_NUM_TCP, constants.PROTO_NUM_UDP]:
if (rule['port_range_min'] is not None and
rule['port_range_min'] <= rule['port_range_max']):
pass
else:
self._raise_contrail_exception(400, SecurityGroupInvalidPortRange())
elif ip_proto == constants.PROTO_NUM_ICMP:
for attr, field in [('port_range_min', 'type'),
('port_range_max', 'code')]:
if rule[attr] > 255:
self._raise_contrail_exception(400, SecurityGroupInvalidIcmpValue(field=field, attr=attr, value=rule[attr]))
if (rule['port_range_min'] is None and
rule['port_range_max']):
self._raise_contrail_exception(400, SecurityGroupMissingIcmpType(value=rule['port_range_max']))
def security_group_rule_create(self, sgr_q):
self._validate_port_range(sgr_q)
sg_id = sgr_q['security_group_id']
sg_rule = self._security_group_rule_neutron_to_vnc(sgr_q, CREATE)
self._security_group_rule_create(sg_id, sg_rule)
ret_sg_rule_q = self._security_group_rule_vnc_to_neutron(sg_id,
sg_rule)
return ret_sg_rule_q
#end security_group_rule_create
def security_group_rule_read(self, sgr_id):
sg_obj, sg_rule = self._security_group_rule_find(sgr_id)
if sg_obj and sg_rule:
return self._security_group_rule_vnc_to_neutron(sg_obj.uuid,
sg_rule, sg_obj)
self._raise_contrail_exception(404, SecurityGroupRuleNotFound(id=sgr_id))
#end security_group_rule_read
def security_group_rule_delete(self, sgr_id):
sg_obj, sg_rule = self._security_group_rule_find(sgr_id)
if sg_obj and sg_rule:
return self._security_group_rule_delete(sg_obj, sg_rule)
self._raise_contrail_exception(404, SecurityGroupRuleNotFound(id=sgr_id))
#end security_group_rule_delete
def security_group_rules_read(self, sg_id, sg_obj=None):
try:
if not sg_obj:
sg_obj = self._vnc_lib.security_group_read(id=sg_id)
sgr_entries = sg_obj.get_security_group_entries()
sg_rules = []
if sgr_entries == None:
return
for sg_rule in sgr_entries.get_policy_rule():
sg_info = self._security_group_rule_vnc_to_neutron(sg_obj.uuid,
sg_rule,
sg_obj)
sg_rules.append(sg_info)
except NoIdError:
self._raise_contrail_exception(404, SecurityGroupNotFound(id=sg_id))
return sg_rules
#end security_group_rules_read
def security_group_rule_list(self, context=None, filters=None):
ret_list = []
# collect phase
all_sgs = []
if filters and 'tenant_id' in filters:
project_ids = self._validate_project_ids(context,
filters['tenant_id'])
for p_id in project_ids:
project_sgs = self._security_group_list_project(p_id)
all_sgs.append(project_sgs)
else: # no filters
all_sgs.append(self._security_group_list_project(None))
# prune phase
for project_sgs in all_sgs:
for sg_obj in project_sgs:
# TODO implement same for name specified in filter
if not self._filters_is_present(filters, 'id', sg_obj.uuid):
continue
sgr_info = self.security_group_rules_read(sg_obj.uuid, sg_obj)
if sgr_info:
ret_list.extend(sgr_info)
return ret_list
#end security_group_rule_list
#route table api handlers
def route_table_create(self, rt_q):
rt_obj = self._route_table_neutron_to_vnc(rt_q, CREATE)
rt_uuid = self._route_table_create(rt_obj)
ret_rt_q = self._route_table_vnc_to_neutron(rt_obj)
return ret_rt_q
#end security_group_create
def route_table_read(self, rt_id):
try:
rt_obj = self._vnc_lib.route_table_read(id=rt_id)
except NoIdError:
# TODO add route table specific exception
self._raise_contrail_exception(404, exceptions.NetworkNotFound(net_id=rt_id))
return self._route_table_vnc_to_neutron(rt_obj)
#end route_table_read
def route_table_update(self, rt_id, rt_q):
rt_q['id'] = rt_id
rt_obj = self._route_table_neutron_to_vnc(rt_q, UPDATE)
self._vnc_lib.route_table_update(rt_obj)
return self._route_table_vnc_to_neutron(rt_obj)
#end policy_update
def route_table_delete(self, rt_id):
self._route_table_delete(rt_id)
#end route_table_delete
def route_table_list(self, context, filters=None):
ret_list = []
# collect phase
all_rts = [] # all rts in all projects
if filters and 'tenant_id' in filters:
project_ids = self._validate_project_ids(context,
filters['tenant_id'])
for p_id in project_ids:
project_rts = self._route_table_list_project(p_id)
all_rts.append(project_rts)
elif filters and 'name' in filters:
p_id = str(uuid.UUID(context['tenant']))
project_rts = self._route_table_list_project(p_id)
all_rts.append(project_rts)
else: # no filters
dom_projects = self._project_list_domain(None)
for project in dom_projects:
proj_id = project['uuid']
project_rts = self._route_table_list_project(proj_id)
all_rts.append(project_rts)
# prune phase
for project_rts in all_rts:
for proj_rt in project_rts:
# TODO implement same for name specified in filter
proj_rt_id = proj_rt['uuid']
if not self._filters_is_present(filters, 'id', proj_rt_id):
continue
rt_info = self.route_table_read(proj_rt_id)
if not self._filters_is_present(filters, 'name',
rt_info['name']):
continue
ret_list.append(rt_info)
return ret_list
#end route_table_list
#service instance api handlers
def svc_instance_create(self, si_q):
si_obj = self._svc_instance_neutron_to_vnc(si_q, CREATE)
si_uuid = self._svc_instance_create(si_obj)
ret_si_q = self._svc_instance_vnc_to_neutron(si_obj)
return ret_si_q
#end svc_instance_create
def svc_instance_read(self, si_id):
try:
si_obj = self._vnc_lib.service_instance_read(id=si_id)
except NoIdError:
# TODO add svc instance specific exception
self._raise_contrail_exception(404, exceptions.NetworkNotFound(net_id=si_id))
return self._svc_instance_vnc_to_neutron(si_obj)
#end svc_instance_read
def svc_instance_delete(self, si_id):
self._svc_instance_delete(si_id)
#end svc_instance_delete
def svc_instance_list(self, context, filters=None):
ret_list = []
# collect phase
all_sis = [] # all sis in all projects
if filters and 'tenant_id' in filters:
project_ids = self._validate_project_ids(context,
filters['tenant_id'])
for p_id in project_ids:
project_sis = self._svc_instance_list_project(p_id)
all_sis.append(project_sis)
elif filters and 'name' in filters:
p_id = str(uuid.UUID(context['tenant']))
project_sis = self._svc_instance_list_project(p_id)
all_sis.append(project_sis)
else: # no filters
dom_projects = self._project_list_domain(None)
for project in dom_projects:
proj_id = project['uuid']
project_sis = self._svc_instance_list_project(proj_id)
all_sis.append(project_sis)
# prune phase
for project_sis in all_sis:
for proj_si in project_sis:
# TODO implement same for name specified in filter
proj_si_id = proj_si['uuid']
if not self._filters_is_present(filters, 'id', proj_si_id):
continue
si_info = self.svc_instance_read(proj_si_id)
if not self._filters_is_present(filters, 'name',
si_info['name']):
continue
ret_list.append(si_info)
return ret_list
#end svc_instance_list
#end class DBInterface
Remove unnecessary fields from response
Change-Id: I6b0b9cb4324d7c231fdc733e9272fd1b960a183c
Closes-Bug: 1361520
# Copyright 2012, Contrail Systems, Inc.
#
"""
.. attention:: Fix the license string
"""
import requests
import re
import uuid
import json
import time
import socket
import netaddr
from netaddr import IPNetwork, IPSet, IPAddress
import gevent
import bottle
from neutron.common import constants
from neutron.common import exceptions
from neutron.api.v2 import attributes as attr
from cfgm_common import exceptions as vnc_exc
from vnc_api.vnc_api import *
_DEFAULT_HEADERS = {
'Content-type': 'application/json; charset="UTF-8"', }
# TODO find if there is a common definition
CREATE = 1
READ = 2
UPDATE = 3
DELETE = 4
IP_PROTOCOL_MAP = {constants.PROTO_NAME_TCP: constants.PROTO_NUM_TCP,
constants.PROTO_NAME_UDP: constants.PROTO_NUM_UDP,
constants.PROTO_NAME_ICMP: constants.PROTO_NUM_ICMP,
constants.PROTO_NAME_ICMP_V6: constants.PROTO_NUM_ICMP_V6}
# SNAT defines
SNAT_SERVICE_TEMPLATE_FQ_NAME = ['default-domain', 'netns-snat-template']
# Security group Exceptions
class SecurityGroupInvalidPortRange(exceptions.InvalidInput):
message = _("For TCP/UDP protocols, port_range_min must be "
"<= port_range_max")
class SecurityGroupInvalidPortValue(exceptions.InvalidInput):
message = _("Invalid value for port %(port)s")
class SecurityGroupInvalidIcmpValue(exceptions.InvalidInput):
message = _("Invalid value for ICMP %(field)s (%(attr)s) "
"%(value)s. It must be 0 to 255.")
class SecurityGroupMissingIcmpType(exceptions.InvalidInput):
message = _("ICMP code (port-range-max) %(value)s is provided"
" but ICMP type (port-range-min) is missing.")
class SecurityGroupInUse(exceptions.InUse):
message = _("Security Group %(id)s in use.")
class SecurityGroupCannotRemoveDefault(exceptions.InUse):
message = _("Removing default security group not allowed.")
class SecurityGroupCannotUpdateDefault(exceptions.InUse):
message = _("Updating default security group not allowed.")
class SecurityGroupDefaultAlreadyExists(exceptions.InUse):
message = _("Default security group already exists.")
class SecurityGroupRuleInvalidProtocol(exceptions.InvalidInput):
message = _("Security group rule protocol %(protocol)s not supported. "
"Only protocol values %(values)s and their integer "
"representation (0 to 255) are supported.")
class SecurityGroupRulesNotSingleTenant(exceptions.InvalidInput):
message = _("Multiple tenant_ids in bulk security group rule create"
" not allowed")
class SecurityGroupRemoteGroupAndRemoteIpPrefix(exceptions.InvalidInput):
message = _("Only remote_ip_prefix or remote_group_id may "
"be provided.")
class SecurityGroupProtocolRequiredWithPorts(exceptions.InvalidInput):
message = _("Must also specify protocol if port range is given.")
class SecurityGroupNotSingleGroupRules(exceptions.InvalidInput):
message = _("Only allowed to update rules for "
"one security profile at a time")
class SecurityGroupNotFound(exceptions.NotFound):
message = _("Security group %(id)s does not exist")
class SecurityGroupRuleNotFound(exceptions.NotFound):
message = _("Security group rule %(id)s does not exist")
class DuplicateSecurityGroupRuleInPost(exceptions.InUse):
message = _("Duplicate Security Group Rule in POST.")
class SecurityGroupRuleExists(exceptions.InUse):
message = _("Security group rule already exists. Group id is %(id)s.")
class SecurityGroupRuleParameterConflict(exceptions.InvalidInput):
message = _("Conflicting value ethertype %(ethertype)s for CIDR %(cidr)s")
# L3 Exceptions
class RouterNotFound(exceptions.NotFound):
message = _("Router %(router_id)s could not be found")
class RouterInUse(exceptions.InUse):
message = _("Router %(router_id)s still has ports")
class RouterInterfaceNotFound(exceptions.NotFound):
message = _("Router %(router_id)s does not have "
"an interface with id %(port_id)s")
class RouterInterfaceNotFoundForSubnet(exceptions.NotFound):
message = _("Router %(router_id)s has no interface "
"on subnet %(subnet_id)s")
class RouterInterfaceInUseByFloatingIP(exceptions.InUse):
message = _("Router interface for subnet %(subnet_id)s on router "
"%(router_id)s cannot be deleted, as it is required "
"by one or more floating IPs.")
class FloatingIPNotFound(exceptions.NotFound):
message = _("Floating IP %(floatingip_id)s could not be found")
class ExternalGatewayForFloatingIPNotFound(exceptions.NotFound):
message = _("External network %(external_network_id)s is not reachable "
"from subnet %(subnet_id)s. Therefore, cannot associate "
"Port %(port_id)s with a Floating IP.")
class FloatingIPPortAlreadyAssociated(exceptions.InUse):
message = _("Cannot associate floating IP %(floating_ip_address)s "
"(%(fip_id)s) with port %(port_id)s "
"using fixed IP %(fixed_ip)s, as that fixed IP already "
"has a floating IP on external network %(net_id)s.")
class L3PortInUse(exceptions.InUse):
message = _("Port %(port_id)s has owner %(device_owner)s and therefore"
" cannot be deleted directly via the port API.")
class RouterExternalGatewayInUseByFloatingIp(exceptions.InUse):
message = _("Gateway cannot be updated for router %(router_id)s, since a "
"gateway to external network %(net_id)s is required by one or "
"more floating IPs.")
# Allowed Address Pair
class AddressPairMatchesPortFixedIPAndMac(exceptions.InvalidInput):
message = _("Port's Fixed IP and Mac Address match an address pair entry.")
class DBInterface(object):
"""
An instance of this class forwards requests to vnc cfg api (web)server
"""
Q_URL_PREFIX = '/extensions/ct'
def __init__(self, admin_name, admin_password, admin_tenant_name,
api_srvr_ip, api_srvr_port, user_info=None,
contrail_extensions_enabled=True,
list_optimization_enabled=False):
self._api_srvr_ip = api_srvr_ip
self._api_srvr_port = api_srvr_port
self._db_cache = {}
self._db_cache['q_networks'] = {}
self._db_cache['q_subnets'] = {}
self._db_cache['q_subnet_maps'] = {}
self._db_cache['q_policies'] = {}
self._db_cache['q_ipams'] = {}
self._db_cache['q_routers'] = {}
self._db_cache['q_floatingips'] = {}
self._db_cache['q_ports'] = {}
self._db_cache['q_fixed_ip_to_subnet'] = {}
#obj-uuid to tenant-uuid mapping
self._db_cache['q_obj_to_tenant'] = {}
self._db_cache['q_tenant_to_def_sg'] = {}
#port count per tenant-id
self._db_cache['q_tenant_port_count'] = {}
self._db_cache['vnc_networks'] = {}
self._db_cache['vnc_ports'] = {}
self._db_cache['vnc_projects'] = {}
self._db_cache['vnc_instance_ips'] = {}
self._db_cache['vnc_routers'] = {}
self._contrail_extensions_enabled = contrail_extensions_enabled
self._list_optimization_enabled = list_optimization_enabled
# Retry till a api-server is up
connected = False
while not connected:
try:
# TODO remove hardcode
self._vnc_lib = VncApi(admin_name, admin_password,
admin_tenant_name, api_srvr_ip,
api_srvr_port, '/', user_info=user_info)
connected = True
except requests.exceptions.RequestException as e:
gevent.sleep(3)
# TODO remove this backward compat code eventually
# changes 'net_fq_name_str pfx/len' key to 'net_id pfx/len' key
subnet_map = self._vnc_lib.kv_retrieve(key=None)
for kv_dict in subnet_map:
key = kv_dict['key']
if len(key.split()) == 1:
subnet_id = key
# uuid key, fixup value portion to 'net_id pfx/len' format
# if not already so
if len(kv_dict['value'].split(':')) == 1:
# new format already, skip
continue
net_fq_name = kv_dict['value'].split()[0].split(':')
try:
net_obj = self._virtual_network_read(fq_name=net_fq_name)
except NoIdError:
self._vnc_lib.kv_delete(subnet_id)
continue
new_subnet_key = '%s %s' % (net_obj.uuid,
kv_dict['value'].split()[1])
self._vnc_lib.kv_store(subnet_id, new_subnet_key)
else: # subnet key
if len(key.split()[0].split(':')) == 1:
# new format already, skip
continue
# delete old key, convert to new key format and save
old_subnet_key = key
self._vnc_lib.kv_delete(old_subnet_key)
subnet_id = kv_dict['value']
net_fq_name = key.split()[0].split(':')
try:
net_obj = self._virtual_network_read(fq_name=net_fq_name)
except NoIdError:
continue
new_subnet_key = '%s %s' % (net_obj.uuid, key.split()[1])
self._vnc_lib.kv_store(new_subnet_key, subnet_id)
#end __init__
# Helper routines
def _request_api_server(self, url, method, data=None, headers=None):
if method == 'GET':
return requests.get(url)
if method == 'POST':
return requests.post(url, data=data, headers=headers)
if method == 'DELETE':
return requests.delete(url)
#end _request_api_server
def _relay_request(self, request):
"""
Send received request to api server
"""
# chop neutron parts of url and add api server address
url_path = re.sub(self.Q_URL_PREFIX, '', request.environ['PATH_INFO'])
url = "http://%s:%s%s" % (self._api_srvr_ip, self._api_srvr_port,
url_path)
return self._request_api_server(
url, request.environ['REQUEST_METHOD'],
request.body, {'Content-type': request.environ['CONTENT_TYPE']})
#end _relay_request
def _validate_project_ids(self, context, project_ids):
if context and not context['is_admin']:
return [context['tenant']]
return_project_ids = []
for project_id in project_ids:
try:
return_project_ids.append(str(uuid.UUID(project_id)))
except ValueError:
continue
return return_project_ids
def _obj_to_dict(self, obj):
return self._vnc_lib.obj_to_dict(obj)
#end _obj_to_dict
def _get_plugin_property(self, property_in):
fq_name=['default-global-system-config'];
gsc_obj = self._vnc_lib.global_system_config_read(fq_name);
plugin_settings = gsc_obj.plugin_tuning.plugin_property
for each_setting in plugin_settings:
if each_setting.property == property_in:
return each_setting.value
return None
#end _get_plugin_property
def _ensure_instance_exists(self, instance_id):
instance_name = instance_id
instance_obj = VirtualMachine(instance_name)
try:
id = self._vnc_lib.obj_to_id(instance_obj)
instance_obj = self._vnc_lib.virtual_machine_read(id=id)
except NoIdError: # instance doesn't exist, create it
# check if instance_id is a uuid value or not
try:
uuid.UUID(instance_id)
instance_obj.uuid = instance_id
except ValueError:
# if instance_id is not a valid uuid, let
# virtual_machine_create generate uuid for the vm
pass
self._vnc_lib.virtual_machine_create(instance_obj)
return instance_obj
#end _ensure_instance_exists
def _ensure_default_security_group_exists(self, proj_id):
# check in cache
sg_uuid = self._db_cache_read('q_tenant_to_def_sg', proj_id)
if sg_uuid:
return
# check in api server
proj_obj = self._vnc_lib.project_read(id=proj_id)
sg_groups = proj_obj.get_security_groups()
for sg_group in sg_groups or []:
if sg_group['to'][-1] == 'default':
self._db_cache_write('q_tenant_to_def_sg',
proj_id, sg_group['uuid'])
return
# does not exist hence create and add cache
sg_uuid = str(uuid.uuid4())
self._db_cache_write('q_tenant_to_def_sg', proj_id, sg_uuid)
sg_obj = SecurityGroup(name='default', parent_obj=proj_obj)
sg_obj.uuid = sg_uuid
self._vnc_lib.security_group_create(sg_obj)
#allow all egress traffic
def_rule = {}
def_rule['port_range_min'] = 0
def_rule['port_range_max'] = 65535
def_rule['direction'] = 'egress'
def_rule['remote_ip_prefix'] = '0.0.0.0/0'
def_rule['remote_group_id'] = None
def_rule['protocol'] = 'any'
rule = self._security_group_rule_neutron_to_vnc(def_rule, CREATE)
self._security_group_rule_create(sg_obj.uuid, rule)
#allow ingress traffic from within default security group
def_rule = {}
def_rule['port_range_min'] = 0
def_rule['port_range_max'] = 65535
def_rule['direction'] = 'ingress'
def_rule['remote_ip_prefix'] = '0.0.0.0/0'
def_rule['remote_group_id'] = None
def_rule['protocol'] = 'any'
rule = self._security_group_rule_neutron_to_vnc(def_rule, CREATE)
self._security_group_rule_create(sg_obj.uuid, rule)
#end _ensure_default_security_group_exists
def _db_cache_read(self, table, key):
try:
return self._db_cache[table][key]
except KeyError:
return None
#end _db_cache_read
def _db_cache_write(self, table, key, val):
self._db_cache[table][key] = val
#end _db_cache_write
def _db_cache_delete(self, table, key):
try:
del self._db_cache[table][key]
except Exception:
pass
#end _db_cache_delete
def _db_cache_flush(self, table):
self._db_cache[table] = {}
#end _db_cache_delete
def _get_obj_tenant_id(self, q_type, obj_uuid):
# Get the mapping from cache, else seed cache and return
try:
return self._db_cache['q_obj_to_tenant'][obj_uuid]
except KeyError:
# Seed the cache and return
if q_type == 'port':
port_obj = self._virtual_machine_interface_read(obj_uuid)
if port_obj.parent_type != "project":
net_id = port_obj.get_virtual_network_refs()[0]['uuid']
# recurse up type-hierarchy
tenant_id = self._get_obj_tenant_id('network', net_id)
else:
tenant_id = port_obj.parent_uuid.replace('-', '')
self._set_obj_tenant_id(obj_uuid, tenant_id)
return tenant_id
if q_type == 'network':
net_obj = self._virtual_network_read(net_id=obj_uuid)
tenant_id = net_obj.parent_uuid.replace('-', '')
self._set_obj_tenant_id(obj_uuid, tenant_id)
return tenant_id
return None
#end _get_obj_tenant_id
def _set_obj_tenant_id(self, obj_uuid, tenant_uuid):
self._db_cache['q_obj_to_tenant'][obj_uuid] = tenant_uuid
#end _set_obj_tenant_id
def _del_obj_tenant_id(self, obj_uuid):
try:
del self._db_cache['q_obj_to_tenant'][obj_uuid]
except Exception:
pass
#end _del_obj_tenant_id
def _project_read(self, proj_id=None, fq_name=None):
if proj_id:
try:
# disable cache for now as fip pool might be put without
# neutron knowing it
raise KeyError
#return self._db_cache['vnc_projects'][proj_id]
except KeyError:
proj_obj = self._vnc_lib.project_read(id=proj_id)
fq_name_str = json.dumps(proj_obj.get_fq_name())
self._db_cache['vnc_projects'][proj_id] = proj_obj
self._db_cache['vnc_projects'][fq_name_str] = proj_obj
return proj_obj
if fq_name:
fq_name_str = json.dumps(fq_name)
try:
# disable cache for now as fip pool might be put without
# neutron knowing it
raise KeyError
#return self._db_cache['vnc_projects'][fq_name_str]
except KeyError:
proj_obj = self._vnc_lib.project_read(fq_name=fq_name)
self._db_cache['vnc_projects'][fq_name_str] = proj_obj
self._db_cache['vnc_projects'][proj_obj.uuid] = proj_obj
return proj_obj
#end _project_read
def _get_tenant_id_for_create(self, context, resource):
if context['is_admin'] and 'tenant_id' in resource:
tenant_id = resource['tenant_id']
elif ('tenant_id' in resource and
resource['tenant_id'] != context['tenant_id']):
reason = _('Cannot create resource for another tenant')
self._raise_contrail_exception(400, exceptions.AdminRequired(reason=reason))
else:
tenant_id = context['tenant_id']
return tenant_id
def _raise_contrail_exception(self, code, exc):
exc_info = {'message': str(exc)}
bottle.abort(code, json.dumps(exc_info))
def _security_group_rule_create(self, sg_id, sg_rule):
try:
sg_vnc = self._vnc_lib.security_group_read(id=sg_id)
except NoIdError:
self._raise_contrail_exception(404, SecurityGroupNotFound(id=sg_id))
rules = sg_vnc.get_security_group_entries()
if rules is None:
rules = PolicyEntriesType([sg_rule])
else:
rules.add_policy_rule(sg_rule)
sg_vnc.set_security_group_entries(rules)
self._vnc_lib.security_group_update(sg_vnc)
return
#end _security_group_rule_create
def _security_group_rule_find(self, sgr_id):
dom_projects = self._project_list_domain(None)
for project in dom_projects:
proj_id = project['uuid']
project_sgs = self._security_group_list_project(proj_id)
for sg_obj in project_sgs:
sgr_entries = sg_obj.get_security_group_entries()
if sgr_entries == None:
continue
for sg_rule in sgr_entries.get_policy_rule():
if sg_rule.get_rule_uuid() == sgr_id:
return sg_obj, sg_rule
return None, None
#end _security_group_rule_find
def _security_group_rule_delete(self, sg_obj, sg_rule):
rules = sg_obj.get_security_group_entries()
rules.get_policy_rule().remove(sg_rule)
sg_obj.set_security_group_entries(rules)
self._vnc_lib.security_group_update(sg_obj)
return
#end _security_group_rule_delete
def _security_group_delete(self, sg_id):
self._vnc_lib.security_group_delete(id=sg_id)
#end _security_group_delete
def _svc_instance_create(self, si_obj):
si_uuid = self._vnc_lib.service_instance_create(si_obj)
st_fq_name = ['default-domain', 'nat-template']
st_obj = self._vnc_lib.service_template_read(fq_name=st_fq_name)
si_obj.set_service_template(st_obj)
self._vnc_lib.service_instance_update(si_obj)
return si_uuid
#end _svc_instance_create
def _svc_instance_delete(self, si_id):
self._vnc_lib.service_instance_delete(id=si_id)
#end _svc_instance_delete
def _route_table_create(self, rt_obj):
rt_uuid = self._vnc_lib.route_table_create(rt_obj)
return rt_uuid
#end _route_table_create
def _route_table_delete(self, rt_id):
self._vnc_lib.route_table_delete(id=rt_id)
#end _route_table_delete
def _resource_create(self, resource_type, obj):
try:
obj_uuid = getattr(self._vnc_lib, resource_type + '_create')(obj)
except RefsExistError:
obj.uuid = str(uuid.uuid4())
obj.name += '-' + obj.uuid
obj.fq_name[-1] += '-' + obj.uuid
obj_uuid = getattr(self._vnc_lib, resource_type + '_create')(obj)
except PermissionDenied as e:
exc_info = {'type': 'BadRequest', 'message': str(e)}
bottle.abort(400, json.dumps(exc_info))
return obj_uuid
#end _resource_create
def _virtual_network_read(self, net_id=None, fq_name=None, fields=None):
if net_id:
try:
# return self._db_cache['vnc_networks'][net_id]
raise KeyError
except KeyError:
net_obj = self._vnc_lib.virtual_network_read(id=net_id,
fields=fields)
fq_name_str = json.dumps(net_obj.get_fq_name())
self._db_cache['vnc_networks'][net_id] = net_obj
self._db_cache['vnc_networks'][fq_name_str] = net_obj
return net_obj
if fq_name:
fq_name_str = json.dumps(fq_name)
try:
# return self._db_cache['vnc_networks'][fq_name_str]
raise KeyError
except KeyError:
net_obj = self._vnc_lib.virtual_network_read(fq_name=fq_name,
fields=fields)
self._db_cache['vnc_networks'][fq_name_str] = net_obj
self._db_cache['vnc_networks'][net_obj.uuid] = net_obj
return net_obj
#end _virtual_network_read
def _virtual_network_update(self, net_obj):
try:
self._vnc_lib.virtual_network_update(net_obj)
except PermissionDenied as e:
exc_info = {'type': 'BadRequest', 'message': str(e)}
bottle.abort(400, json.dumps(exc_info))
except RefsExistError as e:
self._raise_contrail_exception(400, exceptions.BadRequest(
resource='network', msg=str(e)))
# read back to get subnet gw allocated by api-server
fq_name_str = json.dumps(net_obj.get_fq_name())
self._db_cache['vnc_networks'][net_obj.uuid] = net_obj
self._db_cache['vnc_networks'][fq_name_str] = net_obj
#end _virtual_network_update
def _virtual_network_delete(self, net_id):
fq_name_str = None
try:
net_obj = self._db_cache['vnc_networks'][net_id]
fq_name_str = json.dumps(net_obj.get_fq_name())
except KeyError:
net_obj = None
try:
if net_obj and net_obj.get_floating_ip_pools():
fip_pools = net_obj.get_floating_ip_pools()
for fip_pool in fip_pools:
self._floating_ip_pool_delete(fip_pool_id=fip_pool['uuid'])
self._vnc_lib.virtual_network_delete(id=net_id)
except RefsExistError:
self._raise_contrail_exception(404, exceptions.NetworkInUse(net_id=net_id))
try:
del self._db_cache['vnc_networks'][net_id]
if fq_name_str:
del self._db_cache['vnc_networks'][fq_name_str]
except KeyError:
pass
#end _virtual_network_delete
def _virtual_network_list(self, parent_id=None, obj_uuids=None,
fields=None, detail=False, count=False):
return self._vnc_lib.virtual_networks_list(
parent_id=parent_id,
obj_uuids=obj_uuids,
fields=fields,
detail=detail,
count=count)
#end _virtual_network_list
def _virtual_machine_interface_read(self, port_id=None, fq_name=None,
fields=None):
back_ref_fields = ['logical_router_back_refs', 'instance_ip_back_refs', 'floating_ip_back_refs']
if fields:
n_extra_fields = list(set(fields + back_ref_fields))
else:
n_extra_fields = back_ref_fields
if port_id:
try:
# return self._db_cache['vnc_ports'][port_id]
raise KeyError
except KeyError:
port_obj = self._vnc_lib.virtual_machine_interface_read(
id=port_id, fields=n_extra_fields)
fq_name_str = json.dumps(port_obj.get_fq_name())
self._db_cache['vnc_ports'][port_id] = port_obj
self._db_cache['vnc_ports'][fq_name_str] = port_obj
return port_obj
if fq_name:
fq_name_str = json.dumps(fq_name)
try:
# return self._db_cache['vnc_ports'][fq_name_str]
raise KeyError
except KeyError:
port_obj = self._vnc_lib.virtual_machine_interface_read(
fq_name=fq_name, fields=n_extra_fields)
self._db_cache['vnc_ports'][fq_name_str] = port_obj
self._db_cache['vnc_ports'][port_obj.uuid] = port_obj
return port_obj
#end _virtual_machine_interface_read
def _virtual_machine_interface_update(self, port_obj):
self._vnc_lib.virtual_machine_interface_update(port_obj)
fq_name_str = json.dumps(port_obj.get_fq_name())
self._db_cache['vnc_ports'][port_obj.uuid] = port_obj
self._db_cache['vnc_ports'][fq_name_str] = port_obj
#end _virtual_machine_interface_update
def _virtual_machine_interface_delete(self, port_id):
fq_name_str = None
try:
port_obj = self._db_cache['vnc_ports'][port_id]
fq_name_str = json.dumps(port_obj.get_fq_name())
except KeyError:
port_obj = None
self._vnc_lib.virtual_machine_interface_delete(id=port_id)
try:
del self._db_cache['vnc_ports'][port_id]
if fq_name_str:
del self._db_cache['vnc_ports'][fq_name_str]
except KeyError:
pass
#end _virtual_machine_interface_delete
def _virtual_machine_interface_list(self, parent_id=None, back_ref_id=None,
obj_uuids=None, fields=None):
back_ref_fields = ['logical_router_back_refs', 'instance_ip_back_refs', 'floating_ip_back_refs']
if fields:
n_extra_fields = list(set(fields + back_ref_fields))
else:
n_extra_fields = back_ref_fields
vmi_objs = self._vnc_lib.virtual_machine_interfaces_list(
parent_id=parent_id,
back_ref_id=back_ref_id,
obj_uuids=obj_uuids,
detail=True,
fields=n_extra_fields)
return vmi_objs
#end _virtual_machine_interface_list
def _instance_ip_create(self, iip_obj):
iip_uuid = self._vnc_lib.instance_ip_create(iip_obj)
return iip_uuid
#end _instance_ip_create
def _instance_ip_read(self, instance_ip_id=None, fq_name=None):
if instance_ip_id:
try:
# return self._db_cache['vnc_instance_ips'][instance_ip_id]
raise KeyError
except KeyError:
iip_obj = self._vnc_lib.instance_ip_read(id=instance_ip_id)
fq_name_str = json.dumps(iip_obj.get_fq_name())
self._db_cache['vnc_instance_ips'][instance_ip_id] = iip_obj
self._db_cache['vnc_instance_ips'][fq_name_str] = iip_obj
return iip_obj
if fq_name:
fq_name_str = json.dumps(fq_name)
try:
# return self._db_cache['vnc_instance_ips'][fq_name_str]
raise KeyError
except KeyError:
iip_obj = self._vnc_lib.instance_ip_read(fq_name=fq_name)
self._db_cache['vnc_instance_ips'][fq_name_str] = iip_obj
self._db_cache['vnc_instance_ips'][iip_obj.uuid] = iip_obj
return iip_obj
#end _instance_ip_read
def _instance_ip_update(self, iip_obj):
self._vnc_lib.instance_ip_update(iip_obj)
fq_name_str = json.dumps(iip_obj.get_fq_name())
self._db_cache['vnc_instance_ips'][iip_obj.uuid] = iip_obj
self._db_cache['vnc_instance_ips'][fq_name_str] = iip_obj
#end _instance_ip_update
def _instance_ip_delete(self, instance_ip_id):
fq_name_str = None
try:
iip_obj = self._db_cache['vnc_instance_ips'][instance_ip_id]
fq_name_str = json.dumps(iip_obj.get_fq_name())
except KeyError:
iip_obj = None
self._vnc_lib.instance_ip_delete(id=instance_ip_id)
try:
del self._db_cache['vnc_instance_ips'][instance_ip_id]
if fq_name_str:
del self._db_cache['vnc_instance_ips'][fq_name_str]
except KeyError:
pass
#end _instance_ip_delete
def _instance_ip_list(self, back_ref_id=None, obj_uuids=None, fields=None):
iip_objs = self._vnc_lib.instance_ips_list(detail=True,
back_ref_id=back_ref_id,
obj_uuids=obj_uuids,
fields=fields)
return iip_objs
#end _instance_ip_list
def _floating_ip_pool_create(self, fip_pool_obj):
fip_pool_uuid = self._vnc_lib.floating_ip_pool_create(fip_pool_obj)
return fip_pool_uuid
# end _floating_ip_pool_create
def _floating_ip_pool_delete(self, fip_pool_id):
fip_pool_uuid = self._vnc_lib.floating_ip_pool_delete(id=fip_pool_id)
# end _floating_ip_pool_delete
# find projects on a given domain
def _project_list_domain(self, domain_id):
# TODO till domain concept is not present in keystone
fq_name = ['default-domain']
resp_dict = self._vnc_lib.projects_list(parent_fq_name=fq_name)
return resp_dict['projects']
#end _project_list_domain
# find network ids on a given project
def _network_list_project(self, project_id, count=False):
if project_id:
try:
project_uuid = str(uuid.UUID(project_id))
except Exception:
print "Error in converting uuid %s" % (project_id)
else:
project_uuid = None
if count:
ret_val = self._virtual_network_list(parent_id=project_uuid,
count=True)
else:
ret_val = self._virtual_network_list(parent_id=project_uuid,
detail=True)
return ret_val
#end _network_list_project
# find router ids on a given project
def _router_list_project(self, project_id=None):
if project_id:
try:
project_uuid = str(uuid.UUID(project_id))
except Exception:
print "Error in converting uuid %s" % (project_id)
return []
else:
project_uuid = None
resp_dict = self._vnc_lib.logical_routers_list(parent_id=project_uuid)
return resp_dict['logical-routers']
#end _router_list_project
def _ipam_list_project(self, project_id):
try:
project_uuid = str(uuid.UUID(project_id))
except Exception:
print "Error in converting uuid %s" % (project_id)
resp_dict = self._vnc_lib.network_ipams_list(parent_id=project_uuid)
return resp_dict['network-ipams']
#end _ipam_list_project
def _security_group_list_project(self, project_id):
if project_id:
try:
project_uuid = str(uuid.UUID(project_id))
# Trigger a project read to ensure project sync
project_obj = self._project_read(proj_id=project_uuid)
except Exception:
print "Error in converting uuid %s" % (project_id)
else:
project_uuid = None
sg_objs = self._vnc_lib.security_groups_list(parent_id=project_uuid,
detail=True)
return sg_objs
#end _security_group_list_project
def _security_group_entries_list_sg(self, sg_id):
try:
sg_uuid = str(uuid.UUID(sg_id))
except Exception:
print "Error in converting SG uuid %s" % (sg_id)
resp_dict = self._vnc_lib.security_groups_list(obj_uuids=[sg_uuid])
return resp_dict['security-groups']
#end _security_group_entries_list_sg
def _route_table_list_project(self, project_id):
try:
project_uuid = str(uuid.UUID(project_id))
except Exception:
print "Error in converting uuid %s" % (project_id)
resp_dict = self._vnc_lib.route_tables_list(parent_id=project_uuid)
return resp_dict['route-tables']
#end _route_table_list_project
def _svc_instance_list_project(self, project_id):
try:
project_uuid = str(uuid.UUID(project_id))
except Exception:
print "Error in converting uuid %s" % (project_id)
resp_dict = self._vnc_lib.service_instances_list(parent_id=project_id)
return resp_dict['service-instances']
#end _svc_instance_list_project
def _policy_list_project(self, project_id):
try:
project_uuid = str(uuid.UUID(project_id))
except Exception:
print "Error in converting uuid %s" % (project_id)
resp_dict = self._vnc_lib.network_policys_list(parent_id=project_uuid)
return resp_dict['network-policys']
#end _policy_list_project
def _logical_router_read(self, rtr_id=None, fq_name=None):
if rtr_id:
try:
# return self._db_cache['vnc_routers'][rtr_id]
raise KeyError
except KeyError:
rtr_obj = self._vnc_lib.logical_router_read(id=rtr_id)
fq_name_str = json.dumps(rtr_obj.get_fq_name())
self._db_cache['vnc_routers'][rtr_id] = rtr_obj
self._db_cache['vnc_routers'][fq_name_str] = rtr_obj
return rtr_obj
if fq_name:
fq_name_str = json.dumps(fq_name)
try:
# return self._db_cache['vnc_routers'][fq_name_str]
raise KeyError
except KeyError:
rtr_obj = self._vnc_lib.logical_router_read(fq_name=fq_name)
self._db_cache['vnc_routers'][fq_name_str] = rtr_obj
self._db_cache['vnc_routers'][rtr_obj.uuid] = rtr_obj
return rtr_obj
#end _logical_router_read
def _logical_router_update(self, rtr_obj):
self._vnc_lib.logical_router_update(rtr_obj)
fq_name_str = json.dumps(rtr_obj.get_fq_name())
self._db_cache['vnc_routers'][rtr_obj.uuid] = rtr_obj
self._db_cache['vnc_routers'][fq_name_str] = rtr_obj
#end _logical_router_update
def _logical_router_delete(self, rtr_id):
fq_name_str = None
try:
rtr_obj = self._db_cache['vnc_routers'][rtr_id]
fq_name_str = json.dumps(rtr_obj.get_fq_name())
except KeyError:
pass
try:
self._vnc_lib.logical_router_delete(id=rtr_id)
except RefsExistError:
self._raise_contrail_exception(409, RouterInUse(router_id=rtr_id))
try:
del self._db_cache['vnc_routers'][rtr_id]
if fq_name_str:
del self._db_cache['vnc_routers'][fq_name_str]
except KeyError:
pass
#end _logical_router_delete
def _floatingip_list(self, back_ref_id=None):
return self._vnc_lib.floating_ips_list(back_ref_id=back_ref_id,
detail=True)
#end _floatingip_list
# find floating ip pools a project has access to
def _fip_pool_refs_project(self, project_id):
project_obj = self._project_read(proj_id=project_id)
return project_obj.get_floating_ip_pool_refs()
#end _fip_pool_refs_project
def _network_list_shared_and_ext(self):
ret_list = []
nets = self._network_list_project(project_id=None)
for net in nets:
if net.get_router_external() and net.get_is_shared():
ret_list.append(net)
return ret_list
# end _network_list_router_external
def _network_list_router_external(self):
ret_list = []
nets = self._network_list_project(project_id=None)
for net in nets:
if not net.get_router_external():
continue
ret_list.append(net)
return ret_list
# end _network_list_router_external
def _network_list_shared(self):
ret_list = []
nets = self._network_list_project(project_id=None)
for net in nets:
if not net.get_is_shared():
continue
ret_list.append(net)
return ret_list
# end _network_list_shared
# find networks of floating ip pools project has access to
def _fip_pool_ref_networks(self, project_id):
ret_net_objs = self._network_list_shared()
proj_fip_pool_refs = self._fip_pool_refs_project(project_id)
if not proj_fip_pool_refs:
return ret_net_objs
for fip_pool_ref in proj_fip_pool_refs:
fip_uuid = fip_pool_ref['uuid']
fip_pool_obj = self._vnc_lib.floating_ip_pool_read(id=fip_uuid)
net_uuid = fip_pool_obj.parent_uuid
net_obj = self._virtual_network_read(net_id=net_uuid)
ret_net_objs.append(net_obj)
return ret_net_objs
#end _fip_pool_ref_networks
# find floating ip pools defined by network
def _fip_pool_list_network(self, net_id):
resp_dict = self._vnc_lib.floating_ip_pools_list(parent_id=net_id)
return resp_dict['floating-ip-pools']
#end _fip_pool_list_network
def _port_list(self, net_objs, port_objs, iip_objs):
ret_q_ports = []
memo_req = {'networks': {},
'subnets': {},
'instance-ips': {}}
for net_obj in net_objs:
# dictionary of iip_uuid to iip_obj
memo_req['networks'][net_obj.uuid] = net_obj
subnets_info = self._virtual_network_to_subnets(net_obj)
memo_req['subnets'][net_obj.uuid] = subnets_info
for iip_obj in iip_objs:
# dictionary of iip_uuid to iip_obj
memo_req['instance-ips'][iip_obj.uuid] = iip_obj
for port_obj in port_objs:
port_info = self._port_vnc_to_neutron(port_obj, memo_req)
ret_q_ports.append(port_info)
return ret_q_ports
#end _port_list
def _port_list_network(self, network_ids, count=False):
ret_list = []
net_objs = self._virtual_network_list(obj_uuids=network_ids,
fields=['virtual_machine_interface_back_refs'],
detail=True)
if not net_objs:
return ret_list
net_ids = [net_obj.uuid for net_obj in net_objs]
port_objs = self._virtual_machine_interface_list(back_ref_id=net_ids)
iip_objs = self._instance_ip_list(back_ref_id=net_ids)
return self._port_list(net_objs, port_objs, iip_objs)
#end _port_list_network
# find port ids on a given project
def _port_list_project(self, project_id, count=False):
if self._list_optimization_enabled:
port_objs = self._virtual_machine_interface_list(parent_id=project_id,
fields=['instance_ip_back_refs'])
if count:
return len(port_objs)
iip_objs = self._instance_ip_list()
return self._port_list([], port_objs, iip_objs)
else:
if count:
ret_val = 0
else:
ret_val = []
net_objs = self._virtual_network_list(project_id,
fields=['virtual_machine_interface_back_refs'],
detail=True)
if not net_objs:
return ret_val
if count:
for net_obj in net_objs:
port_back_refs = (
net_obj.get_virtual_machine_interface_back_refs() or [])
ret_val = ret_val + len(port_back_refs)
return ret_val
net_ids = [net_obj.uuid for net_obj in net_objs]
port_objs = self._virtual_machine_interface_list(back_ref_id=net_ids)
iip_objs = self._instance_ip_list(back_ref_id=net_ids)
return self._port_list(net_objs, port_objs, iip_objs)
#end _port_list_project
# Returns True if
# * no filter is specified
# OR
# * search-param is not present in filters
# OR
# * 1. search-param is present in filters AND
# 2. resource matches param-list AND
# 3. shared parameter in filters is False
def _filters_is_present(self, filters, key_name, match_value):
if filters:
if key_name in filters:
try:
if key_name == 'tenant_id':
filter_value = [str(uuid.UUID(t_id)) \
for t_id in filters[key_name]]
else:
filter_value = filters[key_name]
idx = filter_value.index(match_value)
except ValueError: # not in requested list
return False
return True
#end _filters_is_present
def _network_read(self, net_uuid):
net_obj = self._virtual_network_read(net_id=net_uuid)
return net_obj
#end _network_read
def _subnet_vnc_create_mapping(self, subnet_id, subnet_key):
self._vnc_lib.kv_store(subnet_id, subnet_key)
self._vnc_lib.kv_store(subnet_key, subnet_id)
self._db_cache['q_subnet_maps'][subnet_id] = subnet_key
self._db_cache['q_subnet_maps'][subnet_key] = subnet_id
#end _subnet_vnc_create_mapping
def _subnet_vnc_read_mapping(self, id=None, key=None):
if id:
try:
subnet_key = self._vnc_lib.kv_retrieve(id)
self._db_cache['q_subnet_maps'][id] = subnet_key
return subnet_key
except NoIdError:
self._raise_contrail_exception(404, exceptions.SubnetNotFound(subnet_id=id))
if key:
subnet_id = self._vnc_lib.kv_retrieve(key)
self._db_cache['q_subnet_maps'][key] = subnet_id
return subnet_id
#end _subnet_vnc_read_mapping
def _subnet_vnc_read_or_create_mapping(self, id=None, key=None):
if id:
return self._subnet_vnc_read_mapping(id=id)
# if subnet was created outside of neutron handle it and create
# neutron representation now (lazily)
try:
return self._subnet_vnc_read_mapping(key=key)
except NoIdError:
subnet_id = str(uuid.uuid4())
self._subnet_vnc_create_mapping(subnet_id, key)
return self._subnet_vnc_read_mapping(key=key)
#end _subnet_vnc_read_or_create_mapping
def _subnet_vnc_delete_mapping(self, subnet_id, subnet_key):
self._vnc_lib.kv_delete(subnet_id)
self._vnc_lib.kv_delete(subnet_key)
try:
del self._db_cache['q_subnet_maps'][subnet_id]
del self._db_cache['q_subnet_maps'][subnet_key]
except KeyError:
pass
#end _subnet_vnc_delete_mapping
def _subnet_vnc_get_key(self, subnet_vnc, net_id):
pfx = subnet_vnc.subnet.get_ip_prefix()
pfx_len = subnet_vnc.subnet.get_ip_prefix_len()
network = IPNetwork('%s/%s' % (pfx, pfx_len))
return '%s %s/%s' % (net_id, str(network.ip), pfx_len)
#end _subnet_vnc_get_key
def _subnet_read(self, net_uuid, subnet_key):
try:
net_obj = self._virtual_network_read(net_id=net_uuid)
except NoIdError:
return None
ipam_refs = net_obj.get_network_ipam_refs()
if not ipam_refs:
return None
# TODO scope for optimization
for ipam_ref in ipam_refs:
subnet_vncs = ipam_ref['attr'].get_ipam_subnets()
for subnet_vnc in subnet_vncs:
if self._subnet_vnc_get_key(subnet_vnc,
net_uuid) == subnet_key:
return subnet_vnc
return None
#end _subnet_read
def _ip_address_to_subnet_id(self, ip_addr, net_obj):
# find subnet-id for ip-addr, called when instance-ip created
ipam_refs = net_obj.get_network_ipam_refs()
if ipam_refs:
for ipam_ref in ipam_refs:
subnet_vncs = ipam_ref['attr'].get_ipam_subnets()
for subnet_vnc in subnet_vncs:
cidr = '%s/%s' % (subnet_vnc.subnet.get_ip_prefix(),
subnet_vnc.subnet.get_ip_prefix_len())
if IPAddress(ip_addr) in IPSet([cidr]):
subnet_key = self._subnet_vnc_get_key(subnet_vnc,
net_obj.uuid)
subnet_id = self._subnet_vnc_read_or_create_mapping(
key=subnet_key)
return subnet_id
return None
#end _ip_address_to_subnet_id
# Returns a list of dicts of subnet-id:cidr for a VN
def _virtual_network_to_subnets(self, net_obj):
ret_subnets = []
ipam_refs = net_obj.get_network_ipam_refs()
if ipam_refs:
for ipam_ref in ipam_refs:
subnet_vncs = ipam_ref['attr'].get_ipam_subnets()
for subnet_vnc in subnet_vncs:
subnet_key = self._subnet_vnc_get_key(subnet_vnc,
net_obj.uuid)
subnet_id = self._subnet_vnc_read_or_create_mapping(
key=subnet_key)
cidr = '%s/%s' % (subnet_vnc.subnet.get_ip_prefix(),
subnet_vnc.subnet.get_ip_prefix_len())
ret_subnets.append({'id': subnet_id, 'cidr': cidr})
return ret_subnets
# end _virtual_network_to_subnets
# Conversion routines between VNC and Quantum objects
def _svc_instance_neutron_to_vnc(self, si_q, oper):
if oper == CREATE:
project_id = str(uuid.UUID(si_q['tenant_id']))
project_obj = self._project_read(proj_id=project_id)
net_id = si_q['external_net']
ext_vn = self._vnc_lib.virtual_network_read(id=net_id)
scale_out = ServiceScaleOutType(max_instances=1, auto_scale=False)
si_prop = ServiceInstanceType(
auto_policy=True,
left_virtual_network="",
right_virtual_network=ext_vn.get_fq_name_str(),
scale_out=scale_out)
si_prop.set_scale_out(scale_out)
si_vnc = ServiceInstance(name=si_q['name'],
parent_obj=project_obj,
service_instance_properties=si_prop)
return si_vnc
#end _svc_instance_neutron_to_vnc
def _svc_instance_vnc_to_neutron(self, si_obj):
si_q_dict = self._obj_to_dict(si_obj)
# replace field names
si_q_dict['id'] = si_obj.uuid
si_q_dict['tenant_id'] = si_obj.parent_uuid.replace('-', '')
si_q_dict['name'] = si_obj.name
si_props = si_obj.get_service_instance_properties()
if si_props:
vn_fq_name = si_props.get_right_virtual_network()
vn_obj = self._vnc_lib.virtual_network_read(fq_name_str=vn_fq_name)
si_q_dict['external_net'] = str(vn_obj.uuid) + ' ' + vn_obj.name
si_q_dict['internal_net'] = ''
return si_q_dict
#end _route_table_vnc_to_neutron
def _route_table_neutron_to_vnc(self, rt_q, oper):
if oper == CREATE:
project_id = str(uuid.UUID(rt_q['tenant_id']))
project_obj = self._project_read(proj_id=project_id)
rt_vnc = RouteTable(name=rt_q['name'],
parent_obj=project_obj)
if not rt_q['routes']:
return rt_vnc
for route in rt_q['routes']['route']:
try:
vm_obj = self._vnc_lib.virtual_machine_read(
id=route['next_hop'])
si_list = vm_obj.get_service_instance_refs()
if si_list:
fq_name = si_list[0]['to']
si_obj = self._vnc_lib.service_instance_read(
fq_name=fq_name)
route['next_hop'] = si_obj.get_fq_name_str()
except Exception as e:
pass
rt_vnc.set_routes(RouteTableType.factory(**rt_q['routes']))
else:
rt_vnc = self._vnc_lib.route_table_read(id=rt_q['id'])
for route in rt_q['routes']['route']:
try:
vm_obj = self._vnc_lib.virtual_machine_read(
id=route['next_hop'])
si_list = vm_obj.get_service_instance_refs()
if si_list:
fq_name = si_list[0]['to']
si_obj = self._vnc_lib.service_instance_read(
fq_name=fq_name)
route['next_hop'] = si_obj.get_fq_name_str()
except Exception as e:
pass
rt_vnc.set_routes(RouteTableType.factory(**rt_q['routes']))
return rt_vnc
#end _route_table_neutron_to_vnc
def _route_table_vnc_to_neutron(self, rt_obj):
rt_q_dict = self._obj_to_dict(rt_obj)
# replace field names
rt_q_dict['id'] = rt_obj.uuid
rt_q_dict['tenant_id'] = rt_obj.parent_uuid.replace('-', '')
rt_q_dict['name'] = rt_obj.name
rt_q_dict['fq_name'] = rt_obj.fq_name
# get route table routes
rt_q_dict['routes'] = rt_q_dict.pop('routes', None)
if rt_q_dict['routes']:
for route in rt_q_dict['routes']['route']:
if route['next_hop_type']:
route['next_hop'] = route['next_hop_type']
return rt_q_dict
#end _route_table_vnc_to_neutron
def _security_group_vnc_to_neutron(self, sg_obj):
sg_q_dict = {}
extra_dict = {}
extra_dict['contrail:fq_name'] = sg_obj.get_fq_name()
# replace field names
sg_q_dict['id'] = sg_obj.uuid
sg_q_dict['tenant_id'] = sg_obj.parent_uuid.replace('-', '')
if not sg_obj.display_name:
# for security groups created directly via vnc_api
sg_q_dict['name'] = sg_obj.get_fq_name()[-1]
else:
sg_q_dict['name'] = sg_obj.display_name
sg_q_dict['description'] = sg_obj.get_id_perms().get_description()
# get security group rules
sg_q_dict['security_group_rules'] = []
rule_list = self.security_group_rules_read(sg_obj.uuid, sg_obj)
if rule_list:
for rule in rule_list:
sg_q_dict['security_group_rules'].append(rule)
if self._contrail_extensions_enabled:
sg_q_dict.update(extra_dict)
return sg_q_dict
#end _security_group_vnc_to_neutron
def _security_group_neutron_to_vnc(self, sg_q, oper):
if oper == CREATE:
project_id = str(uuid.UUID(sg_q['tenant_id']))
def project_read(id):
try:
return self._project_read(proj_id=id)
except NoIdError:
return None
for i in range(10):
project_obj = project_read(project_id)
if project_obj:
break
gevent.sleep(2)
id_perms = IdPermsType(enable=True,
description=sg_q.get('description'))
sg_vnc = SecurityGroup(name=sg_q['name'],
parent_obj=project_obj,
id_perms=id_perms)
else:
sg_vnc = self._vnc_lib.security_group_read(id=sg_q['id'])
if 'name' in sg_q and sg_q['name']:
sg_vnc.display_name = sg_q['name']
if 'description' in sg_q:
id_perms = sg_vnc.get_id_perms()
id_perms.set_description(sg_q['description'])
sg_vnc.set_id_perms(id_perms)
return sg_vnc
#end _security_group_neutron_to_vnc
def _security_group_rule_vnc_to_neutron(self, sg_id, sg_rule, sg_obj=None):
sgr_q_dict = {}
if sg_id == None:
return sgr_q_dict
if not sg_obj:
try:
sg_obj = self._vnc_lib.security_group_read(id=sg_id)
except NoIdError:
self._raise_contrail_exception(404, SecurityGroupNotFound(id=sg_id))
remote_cidr = None
remote_sg_uuid = None
saddr = sg_rule.get_src_addresses()[0]
daddr = sg_rule.get_dst_addresses()[0]
if saddr.get_security_group() == 'local':
direction = 'egress'
addr = daddr
elif daddr.get_security_group() == 'local':
direction = 'ingress'
addr = saddr
else:
self._raise_contrail_exception(404, SecurityGroupRuleNotFound(id=sg_rule.get_rule_uuid()))
if addr.get_subnet():
remote_cidr = '%s/%s' % (addr.get_subnet().get_ip_prefix(),
addr.get_subnet().get_ip_prefix_len())
elif addr.get_security_group():
if addr.get_security_group() != 'any' and \
addr.get_security_group() != 'local':
remote_sg = addr.get_security_group()
try:
if remote_sg != ':'.join(sg_obj.get_fq_name()):
remote_sg_obj = self._vnc_lib.security_group_read(fq_name_str=remote_sg)
else:
remote_sg_obj = sg_obj
remote_sg_uuid = remote_sg_obj.uuid
except NoIdError:
pass
sgr_q_dict['id'] = sg_rule.get_rule_uuid()
sgr_q_dict['tenant_id'] = sg_obj.parent_uuid.replace('-', '')
sgr_q_dict['security_group_id'] = sg_obj.uuid
sgr_q_dict['ethertype'] = 'IPv4'
sgr_q_dict['direction'] = direction
sgr_q_dict['protocol'] = sg_rule.get_protocol()
sgr_q_dict['port_range_min'] = sg_rule.get_dst_ports()[0].\
get_start_port()
sgr_q_dict['port_range_max'] = sg_rule.get_dst_ports()[0].\
get_end_port()
sgr_q_dict['remote_ip_prefix'] = remote_cidr
sgr_q_dict['remote_group_id'] = remote_sg_uuid
return sgr_q_dict
#end _security_group_rule_vnc_to_neutron
def _security_group_rule_neutron_to_vnc(self, sgr_q, oper):
if oper == CREATE:
port_min = 0
port_max = 65535
if sgr_q['port_range_min']:
port_min = sgr_q['port_range_min']
if sgr_q['port_range_max']:
port_max = sgr_q['port_range_max']
endpt = [AddressType(security_group='any')]
if sgr_q['remote_ip_prefix']:
cidr = sgr_q['remote_ip_prefix'].split('/')
pfx = cidr[0]
pfx_len = int(cidr[1])
endpt = [AddressType(subnet=SubnetType(pfx, pfx_len))]
elif sgr_q['remote_group_id']:
sg_obj = self._vnc_lib.security_group_read(
id=sgr_q['remote_group_id'])
endpt = [AddressType(security_group=sg_obj.get_fq_name_str())]
if sgr_q['direction'] == 'ingress':
dir = '>'
local = endpt
remote = [AddressType(security_group='local')]
else:
dir = '>'
remote = endpt
local = [AddressType(security_group='local')]
if not sgr_q['protocol']:
sgr_q['protocol'] = 'any'
sgr_uuid = str(uuid.uuid4())
rule = PolicyRuleType(rule_uuid=sgr_uuid, direction=dir,
protocol=sgr_q['protocol'],
src_addresses=local,
src_ports=[PortType(0, 65535)],
dst_addresses=remote,
dst_ports=[PortType(port_min, port_max)])
return rule
#end _security_group_rule_neutron_to_vnc
def _network_neutron_to_vnc(self, network_q, oper):
net_name = network_q.get('name', None)
try:
external_attr = network_q['router:external']
except KeyError:
external_attr = attr.ATTR_NOT_SPECIFIED
if oper == CREATE:
project_id = str(uuid.UUID(network_q['tenant_id']))
def project_read(id):
try:
return self._project_read(proj_id=id)
except NoIdError:
return None
for i in range(10):
project_obj = project_read(project_id)
if project_obj:
break
gevent.sleep(2)
id_perms = IdPermsType(enable=True)
net_obj = VirtualNetwork(net_name, project_obj, id_perms=id_perms)
if external_attr == attr.ATTR_NOT_SPECIFIED:
net_obj.router_external = False
else:
net_obj.router_external = external_attr
if 'shared' in network_q:
net_obj.is_shared = network_q['shared']
else:
net_obj.is_shared = False
else: # READ/UPDATE/DELETE
net_obj = self._virtual_network_read(net_id=network_q['id'])
if oper == UPDATE:
if 'shared' in network_q:
net_obj.is_shared = network_q['shared']
if external_attr is not attr.ATTR_NOT_SPECIFIED:
net_obj.router_external = external_attr
if 'name' in network_q and network_q['name']:
net_obj.display_name = network_q['name']
id_perms = net_obj.get_id_perms()
if 'admin_state_up' in network_q:
id_perms.enable = network_q['admin_state_up']
net_obj.set_id_perms(id_perms)
if 'contrail:policys' in network_q:
policy_fq_names = network_q['contrail:policys']
# reset and add with newly specified list
net_obj.set_network_policy_list([], [])
seq = 0
for p_fq_name in policy_fq_names:
domain_name, project_name, policy_name = p_fq_name
domain_obj = Domain(domain_name)
project_obj = Project(project_name, domain_obj)
policy_obj = NetworkPolicy(policy_name, project_obj)
net_obj.add_network_policy(policy_obj,
VirtualNetworkPolicyType(
sequence=SequenceType(seq, 0)))
seq = seq + 1
if 'vpc:route_table' in network_q:
rt_fq_name = network_q['vpc:route_table']
if rt_fq_name:
try:
rt_obj = self._vnc_lib.route_table_read(fq_name=rt_fq_name)
net_obj.set_route_table(rt_obj)
except NoIdError:
# TODO add route table specific exception
self._raise_contrail_exception(404, exceptions.NetworkNotFound(net_id=net_obj.uuid))
return net_obj
#end _network_neutron_to_vnc
def _network_vnc_to_neutron(self, net_obj, net_repr='SHOW'):
net_q_dict = {}
extra_dict = {}
id_perms = net_obj.get_id_perms()
perms = id_perms.permissions
net_q_dict['id'] = net_obj.uuid
if not net_obj.display_name:
# for nets created directly via vnc_api
net_q_dict['name'] = net_obj.get_fq_name()[-1]
else:
net_q_dict['name'] = net_obj.display_name
extra_dict['contrail:fq_name'] = net_obj.get_fq_name()
net_q_dict['tenant_id'] = net_obj.parent_uuid.replace('-', '')
net_q_dict['admin_state_up'] = id_perms.enable
if net_obj.is_shared:
net_q_dict['shared'] = True
else:
net_q_dict['shared'] = False
net_q_dict['status'] = (constants.NET_STATUS_ACTIVE if id_perms.enable
else constants.NET_STATUS_DOWN)
if net_obj.router_external:
net_q_dict['router:external'] = True
else:
net_q_dict['router:external'] = False
if net_repr == 'SHOW' or net_repr == 'LIST':
extra_dict['contrail:instance_count'] = 0
net_policy_refs = net_obj.get_network_policy_refs()
if net_policy_refs:
sorted_refs = sorted(
net_policy_refs,
key=lambda t:(t['attr'].sequence.major,
t['attr'].sequence.minor))
extra_dict['contrail:policys'] = \
[np_ref['to'] for np_ref in sorted_refs]
rt_refs = net_obj.get_route_table_refs()
if rt_refs:
extra_dict['vpc:route_table'] = \
[rt_ref['to'] for rt_ref in rt_refs]
ipam_refs = net_obj.get_network_ipam_refs()
net_q_dict['subnets'] = []
if ipam_refs:
extra_dict['contrail:subnet_ipam'] = []
for ipam_ref in ipam_refs:
subnets = ipam_ref['attr'].get_ipam_subnets()
for subnet in subnets:
sn_dict = self._subnet_vnc_to_neutron(subnet, net_obj,
ipam_ref['to'])
net_q_dict['subnets'].append(sn_dict['id'])
sn_ipam = {}
sn_ipam['subnet_cidr'] = sn_dict['cidr']
sn_ipam['ipam_fq_name'] = ipam_ref['to']
extra_dict['contrail:subnet_ipam'].append(sn_ipam)
if self._contrail_extensions_enabled:
net_q_dict.update(extra_dict)
return net_q_dict
#end _network_vnc_to_neutron
def _subnet_neutron_to_vnc(self, subnet_q):
cidr = IPNetwork(subnet_q['cidr'])
pfx = str(cidr.network)
pfx_len = int(cidr.prefixlen)
if cidr.version != 4:
exc_info = {'type': 'BadRequest',
'message': "Bad subnet request: IPv6 is not supported"}
bottle.abort(400, json.dumps(exc_info))
if 'gateway_ip' in subnet_q:
default_gw = subnet_q['gateway_ip']
else:
# Assigned first+1 from cidr
default_gw = str(IPAddress(cidr.first + 1))
if 'allocation_pools' in subnet_q:
alloc_pools = subnet_q['allocation_pools']
else:
# Assigned by address manager
alloc_pools = None
dhcp_option_list = None
if 'dns_nameservers' in subnet_q and subnet_q['dns_nameservers']:
dhcp_options=[]
for dns_server in subnet_q['dns_nameservers']:
dhcp_options.append(DhcpOptionType(dhcp_option_name='6',
dhcp_option_value=dns_server))
if dhcp_options:
dhcp_option_list = DhcpOptionsListType(dhcp_options)
host_route_list = None
if 'host_routes' in subnet_q and subnet_q['host_routes']:
host_routes=[]
for host_route in subnet_q['host_routes']:
host_routes.append(RouteType(prefix=host_route['destination'],
next_hop=host_route['nexthop']))
if host_routes:
host_route_list = RouteTableType(host_routes)
if 'enable_dhcp' in subnet_q:
dhcp_config = subnet_q['enable_dhcp']
else:
dhcp_config = None
sn_name=subnet_q.get('name')
subnet_vnc = IpamSubnetType(subnet=SubnetType(pfx, pfx_len),
default_gateway=default_gw,
enable_dhcp=dhcp_config,
dns_nameservers=None,
allocation_pools=alloc_pools,
addr_from_start=True,
dhcp_option_list=dhcp_option_list,
host_routes=host_route_list,
subnet_name=sn_name)
return subnet_vnc
#end _subnet_neutron_to_vnc
def _subnet_vnc_to_neutron(self, subnet_vnc, net_obj, ipam_fq_name):
sn_q_dict = {}
sn_name = subnet_vnc.get_subnet_name()
if sn_name is not None:
sn_q_dict['name'] = sn_name
else:
sn_q_dict['name'] = ''
sn_q_dict['tenant_id'] = net_obj.parent_uuid.replace('-', '')
sn_q_dict['network_id'] = net_obj.uuid
sn_q_dict['ip_version'] = 4 # TODO ipv6?
sn_q_dict['ipv6_ra_mode'] = None
sn_q_dict['ipv6_address_mode'] = None
cidr = '%s/%s' % (subnet_vnc.subnet.get_ip_prefix(),
subnet_vnc.subnet.get_ip_prefix_len())
sn_q_dict['cidr'] = cidr
subnet_key = self._subnet_vnc_get_key(subnet_vnc, net_obj.uuid)
sn_id = self._subnet_vnc_read_or_create_mapping(key=subnet_key)
sn_q_dict['id'] = sn_id
sn_q_dict['gateway_ip'] = subnet_vnc.default_gateway
alloc_obj_list = subnet_vnc.get_allocation_pools()
allocation_pools = []
for alloc_obj in alloc_obj_list:
first_ip = alloc_obj.get_start()
last_ip = alloc_obj.get_end()
alloc_dict = {'first_ip':first_ip, 'last_ip':last_ip}
allocation_pools.append(alloc_dict)
if allocation_pools is None or not allocation_pools:
if (int(IPNetwork(sn_q_dict['gateway_ip']).network) ==
int(IPNetwork(cidr).network+1)):
first_ip = str(IPNetwork(cidr).network + 2)
else:
first_ip = str(IPNetwork(cidr).network + 1)
last_ip = str(IPNetwork(cidr).broadcast - 1)
cidr_pool = {'first_ip':first_ip, 'last_ip':last_ip}
allocation_pools.append(cidr_pool)
sn_q_dict['allocation_pools'] = allocation_pools
sn_q_dict['enable_dhcp'] = subnet_vnc.get_enable_dhcp()
nameserver_dict_list = list()
dhcp_option_list = subnet_vnc.get_dhcp_option_list()
if dhcp_option_list:
for dhcp_option in dhcp_option_list.dhcp_option:
if dhcp_option.get_dhcp_option_name() == '6':
nameserver_entry = {'address': dhcp_option.get_dhcp_option_value(),
'subnet_id': sn_id}
nameserver_dict_list.append(nameserver_entry)
sn_q_dict['dns_nameservers'] = nameserver_dict_list
host_route_dict_list = list()
host_routes = subnet_vnc.get_host_routes()
if host_routes:
for host_route in host_routes.route:
host_route_entry = {'destination': host_route.get_prefix(),
'nexthop': host_route.get_next_hop(),
'subnet_id': sn_id}
host_route_dict_list.append(host_route_entry)
sn_q_dict['routes'] = host_route_dict_list
if net_obj.is_shared:
sn_q_dict['shared'] = True
else:
sn_q_dict['shared'] = False
return sn_q_dict
#end _subnet_vnc_to_neutron
def _ipam_neutron_to_vnc(self, ipam_q, oper):
ipam_name = ipam_q.get('name', None)
if oper == CREATE:
project_id = str(uuid.UUID(ipam_q['tenant_id']))
project_obj = self._project_read(proj_id=project_id)
ipam_obj = NetworkIpam(ipam_name, project_obj)
else: # READ/UPDATE/DELETE
ipam_obj = self._vnc_lib.network_ipam_read(id=ipam_q['id'])
options_vnc = DhcpOptionsListType()
if ipam_q['mgmt']:
#for opt_q in ipam_q['mgmt'].get('options', []):
# options_vnc.add_dhcp_option(DhcpOptionType(opt_q['option'],
# opt_q['value']))
#ipam_mgmt_vnc = IpamType.factory(
# ipam_method = ipam_q['mgmt']['method'],
# dhcp_option_list = options_vnc)
ipam_obj.set_network_ipam_mgmt(IpamType.factory(**ipam_q['mgmt']))
return ipam_obj
#end _ipam_neutron_to_vnc
def _ipam_vnc_to_neutron(self, ipam_obj):
ipam_q_dict = self._obj_to_dict(ipam_obj)
# replace field names
ipam_q_dict['id'] = ipam_q_dict.pop('uuid')
ipam_q_dict['name'] = ipam_obj.name
ipam_q_dict['tenant_id'] = ipam_obj.parent_uuid.replace('-', '')
ipam_q_dict['mgmt'] = ipam_q_dict.pop('network_ipam_mgmt', None)
net_back_refs = ipam_q_dict.pop('virtual_network_back_refs', None)
if net_back_refs:
ipam_q_dict['nets_using'] = []
for net_back_ref in net_back_refs:
net_fq_name = net_back_ref['to']
ipam_q_dict['nets_using'].append(net_fq_name)
return ipam_q_dict
#end _ipam_vnc_to_neutron
def _policy_neutron_to_vnc(self, policy_q, oper):
policy_name = policy_q.get('name', None)
if oper == CREATE:
project_id = str(uuid.UUID(policy_q['tenant_id']))
project_obj = self._project_read(proj_id=project_id)
policy_obj = NetworkPolicy(policy_name, project_obj)
else: # READ/UPDATE/DELETE
policy_obj = self._vnc_lib.network_policy_read(id=policy_q['id'])
policy_obj.set_network_policy_entries(
PolicyEntriesType.factory(**policy_q['entries']))
return policy_obj
#end _policy_neutron_to_vnc
def _policy_vnc_to_neutron(self, policy_obj):
policy_q_dict = self._obj_to_dict(policy_obj)
# replace field names
policy_q_dict['id'] = policy_q_dict.pop('uuid')
policy_q_dict['name'] = policy_obj.name
policy_q_dict['tenant_id'] = policy_obj.parent_uuid.replace('-', '')
policy_q_dict['entries'] = policy_q_dict.pop('network_policy_entries',
None)
net_back_refs = policy_obj.get_virtual_network_back_refs()
if net_back_refs:
policy_q_dict['nets_using'] = []
for net_back_ref in net_back_refs:
net_fq_name = net_back_ref['to']
policy_q_dict['nets_using'].append(net_fq_name)
return policy_q_dict
#end _policy_vnc_to_neutron
def _router_neutron_to_vnc(self, router_q, oper):
rtr_name = router_q.get('name', None)
if oper == CREATE:
project_id = str(uuid.UUID(router_q['tenant_id']))
def project_read(id):
try:
return self._project_read(proj_id=id)
except NoIdError:
return None
for i in range(10):
project_obj = project_read(project_id)
if project_obj:
break
gevent.sleep(2)
id_perms = IdPermsType(enable=True)
rtr_obj = LogicalRouter(rtr_name, project_obj, id_perms=id_perms)
else: # READ/UPDATE/DELETE
rtr_obj = self._logical_router_read(rtr_id=router_q['id'])
id_perms = rtr_obj.get_id_perms()
if 'admin_state_up' in router_q:
id_perms.enable = router_q['admin_state_up']
rtr_obj.set_id_perms(id_perms)
if 'name' in router_q and router_q['name']:
rtr_obj.display_name = router_q['name']
return rtr_obj
#end _router_neutron_to_vnc
def _router_vnc_to_neutron(self, rtr_obj, rtr_repr='SHOW'):
rtr_q_dict = {}
extra_dict = {}
extra_dict['contrail:fq_name'] = rtr_obj.get_fq_name()
rtr_q_dict['id'] = rtr_obj.uuid
if not rtr_obj.display_name:
rtr_q_dict['name'] = rtr_obj.get_fq_name()[-1]
else:
rtr_q_dict['name'] = rtr_obj.display_name
rtr_q_dict['tenant_id'] = rtr_obj.parent_uuid.replace('-', '')
rtr_q_dict['admin_state_up'] = rtr_obj.get_id_perms().enable
rtr_q_dict['shared'] = False
rtr_q_dict['status'] = constants.NET_STATUS_ACTIVE
rtr_q_dict['gw_port_id'] = None
rtr_q_dict['external_gateway_info'] = None
vn_refs = rtr_obj.get_virtual_network_refs()
if vn_refs:
rtr_q_dict['external_gateway_info'] = {'network_id':
vn_refs[0]['uuid']}
if self._contrail_extensions_enabled:
rtr_q_dict.update(extra_dict)
return rtr_q_dict
#end _router_vnc_to_neutron
def _floatingip_neutron_to_vnc(self, fip_q, oper):
if oper == CREATE:
# TODO for now create from default pool, later
# use first available pool on net
net_id = fip_q['floating_network_id']
try:
fq_name = self._fip_pool_list_network(net_id)[0]['fq_name']
except IndexError:
# IndexError could happens when an attempt to
# retrieve a floating ip pool from a private network.
self._raise_contrail_exception(404, Exception(
"Network %s doesn't provide a floatingip pool", net_id))
fip_pool_obj = self._vnc_lib.floating_ip_pool_read(fq_name=fq_name)
fip_name = str(uuid.uuid4())
fip_obj = FloatingIp(fip_name, fip_pool_obj)
fip_obj.uuid = fip_name
proj_id = str(uuid.UUID(fip_q['tenant_id']))
proj_obj = self._project_read(proj_id=proj_id)
fip_obj.set_project(proj_obj)
else: # READ/UPDATE/DELETE
fip_obj = self._vnc_lib.floating_ip_read(id=fip_q['id'])
if fip_q.get('port_id'):
port_obj = self._virtual_machine_interface_read(
port_id=fip_q['port_id'])
fip_obj.set_virtual_machine_interface(port_obj)
else:
fip_obj.set_virtual_machine_interface_list([])
if fip_q.get('fixed_ip_address'):
fip_obj.set_floating_ip_fixed_ip_address(fip_q['fixed_ip_address'])
else:
# fixed_ip_address not specified, pick from port_obj in create,
# reset in case of disassociate
port_refs = fip_obj.get_virtual_machine_interface_refs()
if not port_refs:
fip_obj.set_floating_ip_fixed_ip_address(None)
else:
port_obj = self._virtual_machine_interface_read(
port_id=port_refs[0]['uuid'], fields=['instance_ip_back_refs'])
iip_refs = port_obj.get_instance_ip_back_refs()
if iip_refs:
iip_obj = self._instance_ip_read(instance_ip_id=iip_refs[0]['uuid'])
fip_obj.set_floating_ip_fixed_ip_address(iip_obj.get_instance_ip_address())
return fip_obj
#end _floatingip_neutron_to_vnc
def _floatingip_vnc_to_neutron(self, fip_obj):
fip_q_dict = {}
floating_net_id = self._vnc_lib.fq_name_to_id('virtual-network',
fip_obj.get_fq_name()[:-2])
tenant_id = fip_obj.get_project_refs()[0]['uuid'].replace('-', '')
port_id = None
fixed_ip = None
router_id = None
port_refs = fip_obj.get_virtual_machine_interface_refs()
if port_refs:
port_id = port_refs[0]['uuid']
port_obj = self._virtual_machine_interface_read(port_id=port_id,
fields=['instance_ip_back_refs'])
# find router_id from port
internal_net_obj = self._virtual_network_read(net_id=port_obj.get_virtual_network_refs()[0]['uuid'])
net_port_objs = [self._virtual_machine_interface_read(port_id=port['uuid']) for port in internal_net_obj.get_virtual_machine_interface_back_refs()]
for net_port_obj in net_port_objs:
routers = net_port_obj.get_logical_router_back_refs()
if routers:
router_id = routers[0]['uuid']
break
fip_q_dict['id'] = fip_obj.uuid
fip_q_dict['tenant_id'] = tenant_id
fip_q_dict['floating_ip_address'] = fip_obj.get_floating_ip_address()
fip_q_dict['floating_network_id'] = floating_net_id
fip_q_dict['router_id'] = router_id
fip_q_dict['port_id'] = port_id
fip_q_dict['fixed_ip_address'] = fip_obj.get_floating_ip_fixed_ip_address()
fip_q_dict['status'] = constants.PORT_STATUS_ACTIVE
return fip_q_dict
#end _floatingip_vnc_to_neutron
def _port_neutron_to_vnc(self, port_q, net_obj, oper):
if oper == CREATE:
project_id = str(uuid.UUID(port_q['tenant_id']))
proj_obj = self._project_read(proj_id=project_id)
id_perms = IdPermsType(enable=True)
port_uuid = str(uuid.uuid4())
if port_q.get('name'):
port_name = port_q['name']
else:
port_name = port_uuid
port_obj = VirtualMachineInterface(port_name, proj_obj,
id_perms=id_perms)
port_obj.uuid = port_uuid
port_obj.set_virtual_network(net_obj)
if ('mac_address' in port_q and port_q['mac_address']):
mac_addrs_obj = MacAddressesType()
mac_addrs_obj.set_mac_address([port_q['mac_address']])
port_obj.set_virtual_machine_interface_mac_addresses(mac_addrs_obj)
port_obj.set_security_group_list([])
if ('security_groups' not in port_q or
port_q['security_groups'].__class__ is object):
sg_obj = SecurityGroup("default", proj_obj)
port_obj.add_security_group(sg_obj)
else: # READ/UPDATE/DELETE
port_obj = self._virtual_machine_interface_read(port_id=port_q['id'])
if 'name' in port_q and port_q['name']:
port_obj.display_name = port_q['name']
if port_q.get('device_owner') != constants.DEVICE_OWNER_ROUTER_INTF:
instance_name = port_q.get('device_id')
if instance_name:
try:
instance_obj = self._ensure_instance_exists(instance_name)
port_obj.set_virtual_machine(instance_obj)
except RefsExistError as e:
exc_info = {'type': 'BadRequest', 'message': str(e)}
bottle.abort(400, json.dumps(exc_info))
if 'device_owner' in port_q:
port_obj.set_virtual_machine_interface_device_owner(port_q.get('device_owner'))
if 'security_groups' in port_q:
port_obj.set_security_group_list([])
for sg_id in port_q.get('security_groups') or []:
# TODO optimize to not read sg (only uuid/fqn needed)
sg_obj = self._vnc_lib.security_group_read(id=sg_id)
port_obj.add_security_group(sg_obj)
id_perms = port_obj.get_id_perms()
if 'admin_state_up' in port_q:
id_perms.enable = port_q['admin_state_up']
port_obj.set_id_perms(id_perms)
if ('extra_dhcp_opts' in port_q):
dhcp_options = []
if port_q['extra_dhcp_opts']:
for option_pair in port_q['extra_dhcp_opts']:
option = \
DhcpOptionType(dhcp_option_name=option_pair['opt_name'],
dhcp_option_value=option_pair['opt_value'])
dhcp_options.append(option)
if dhcp_options:
olist = DhcpOptionsListType(dhcp_options)
port_obj.set_virtual_machine_interface_dhcp_option_list(olist)
else:
port_obj.set_virtual_machine_interface_dhcp_option_list(None)
if ('allowed_address_pairs' in port_q):
aap_array = []
if port_q['allowed_address_pairs']:
for address_pair in port_q['allowed_address_pairs']:
mac_refs = \
port_obj.get_virtual_machine_interface_mac_addresses()
mode = u'active-standby';
if 'mac_address' not in address_pair:
if mac_refs:
address_pair['mac_address'] = mac_refs.mac_address[0]
cidr = address_pair['ip_address'].split('/')
if len(cidr) == 1:
subnet=SubnetType(cidr[0], 32);
elif len(cidr) == 2:
subnet=SubnetType(cidr[0], int(cidr[1]));
else:
self._raise_contrail_exception(400,
exceptions.BadRequest(resource='port',
msg='Invalid address pair argument'))
ip_back_refs = port_obj.get_instance_ip_back_refs()
if ip_back_refs:
for ip_back_ref in ip_back_refs:
iip_uuid = ip_back_ref['uuid']
try:
ip_obj = self._instance_ip_read(instance_ip_id=\
ip_back_ref['uuid'])
except NoIdError:
continue
ip_addr = ip_obj.get_instance_ip_address()
if ((ip_addr == address_pair['ip_address']) and
(mac_refs.mac_address[0] == address_pair['mac_address'])):
self._raise_contrail_exception(400,
AddressPairMatchesPortFixedIPAndMac())
aap = AllowedAddressPair(subnet,
address_pair['mac_address'], mode)
aap_array.append(aap)
if aap_array:
aaps = AllowedAddressPairs()
aaps.set_allowed_address_pair(aap_array)
port_obj.set_virtual_machine_interface_allowed_address_pairs(aaps)
else:
port_obj.set_virtual_machine_interface_allowed_address_pairs(None)
if 'fixed_ips' in port_q:
net_id = (port_q.get('network_id') or
port_obj.get_virtual_network_refs()[0]['uuid'])
for fixed_ip in port_q.get('fixed_ips', []):
if 'ip_address' in fixed_ip:
ip_addr = fixed_ip['ip_address']
if self._ip_addr_in_net_id(ip_addr, net_id):
self._raise_contrail_exception(
409, exceptions.IpAddressInUse(net_id=net_id,
ip_address=ip_addr))
return port_obj
#end _port_neutron_to_vnc
def _port_vnc_to_neutron(self, port_obj, port_req_memo=None):
port_q_dict = {}
extra_dict = {}
extra_dict['contrail:fq_name'] = port_obj.get_fq_name()
if not port_obj.display_name:
# for ports created directly via vnc_api
port_q_dict['name'] = port_obj.get_fq_name()[-1]
else:
port_q_dict['name'] = port_obj.display_name
port_q_dict['id'] = port_obj.uuid
net_refs = port_obj.get_virtual_network_refs()
if net_refs:
net_id = net_refs[0]['uuid']
else:
# TODO hack to force network_id on default port
# as neutron needs it
net_id = self._vnc_lib.obj_to_id(VirtualNetwork())
if port_req_memo is None:
# create a memo only for this port's conversion in this method
port_req_memo = {}
if 'networks' not in port_req_memo:
port_req_memo['networks'] = {}
if 'subnets' not in port_req_memo:
port_req_memo['subnets'] = {}
try:
net_obj = port_req_memo['networks'][net_id]
except KeyError:
net_obj = self._virtual_network_read(net_id=net_id)
port_req_memo['networks'][net_id] = net_obj
subnets_info = self._virtual_network_to_subnets(net_obj)
port_req_memo['subnets'][net_id] = subnets_info
if port_obj.parent_type != "project":
proj_id = net_obj.parent_uuid.replace('-', '')
else:
proj_id = port_obj.parent_uuid.replace('-', '')
self._set_obj_tenant_id(port_obj.uuid, proj_id)
port_q_dict['tenant_id'] = proj_id
port_q_dict['network_id'] = net_id
# TODO RHS below may need fixing
port_q_dict['mac_address'] = ''
mac_refs = port_obj.get_virtual_machine_interface_mac_addresses()
if mac_refs:
port_q_dict['mac_address'] = mac_refs.mac_address[0]
dhcp_options_list = port_obj.get_virtual_machine_interface_dhcp_option_list()
if dhcp_options_list and dhcp_options_list.dhcp_option:
dhcp_options = []
for dhcp_option in dhcp_options_list.dhcp_option:
pair = {"opt_value": dhcp_option.dhcp_option_value,
"opt_name": dhcp_option.dhcp_option_name}
dhcp_options.append(pair)
port_q_dict['extra_dhcp_opts'] = dhcp_options
allowed_address_pairs = port_obj.get_virtual_machine_interface_allowed_address_pairs()
if allowed_address_pairs and allowed_address_pairs.allowed_address_pair:
address_pairs = []
for aap in allowed_address_pairs.allowed_address_pair:
pair = {"ip_address": '%s/%s' % (aap.ip.get_ip_prefix(),
aap.ip.get_ip_prefix_len()),
"mac_address": aap.mac}
address_pairs.append(pair)
port_q_dict['allowed_address_pairs'] = address_pairs
port_q_dict['fixed_ips'] = []
ip_back_refs = getattr(port_obj, 'instance_ip_back_refs', None)
if ip_back_refs:
for ip_back_ref in ip_back_refs:
iip_uuid = ip_back_ref['uuid']
# fetch it from request context cache/memo if there
try:
ip_obj = port_req_memo['instance-ips'][iip_uuid]
except KeyError:
try:
ip_obj = self._instance_ip_read(
instance_ip_id=ip_back_ref['uuid'])
except NoIdError:
continue
ip_addr = ip_obj.get_instance_ip_address()
ip_q_dict = {}
ip_q_dict['ip_address'] = ip_addr
ip_q_dict['subnet_id'] = self._ip_address_to_subnet_id(ip_addr,
net_obj)
port_q_dict['fixed_ips'].append(ip_q_dict)
port_q_dict['security_groups'] = []
sg_refs = port_obj.get_security_group_refs()
for sg_ref in sg_refs or []:
port_q_dict['security_groups'].append(sg_ref['uuid'])
port_q_dict['admin_state_up'] = port_obj.get_id_perms().enable
# port can be router interface or vm interface
# for perf read logical_router_back_ref only when we have to
port_parent_name = port_obj.parent_name
router_refs = getattr(port_obj, 'logical_router_back_refs', None)
if router_refs is not None:
port_q_dict['device_id'] = router_refs[0]['uuid']
elif port_obj.parent_type == 'virtual-machine':
port_q_dict['device_id'] = port_obj.parent_name
elif port_obj.get_virtual_machine_refs() is not None:
port_q_dict['device_id'] = \
port_obj.get_virtual_machine_refs()[0]['to'][-1]
else:
port_q_dict['device_id'] = ''
port_q_dict['device_owner'] = \
port_obj.get_virtual_machine_interface_device_owner();
if port_q_dict['device_id']:
port_q_dict['status'] = constants.PORT_STATUS_ACTIVE
else:
port_q_dict['status'] = constants.PORT_STATUS_DOWN
if self._contrail_extensions_enabled:
port_q_dict.update(extra_dict)
return port_q_dict
#end _port_vnc_to_neutron
# public methods
# network api handlers
def network_create(self, network_q):
net_obj = self._network_neutron_to_vnc(network_q, CREATE)
try:
net_uuid = self._resource_create('virtual_network', net_obj)
except RefsExistError:
self._raise_contrail_exception(400, exceptions.BadRequest(
resource='network', msg='Network Already exists'))
if net_obj.router_external:
fip_pool_obj = FloatingIpPool('floating-ip-pool', net_obj)
self._floating_ip_pool_create(fip_pool_obj)
ret_network_q = self._network_vnc_to_neutron(net_obj, net_repr='SHOW')
self._db_cache['q_networks'][net_uuid] = ret_network_q
return ret_network_q
#end network_create
def network_read(self, net_uuid, fields=None):
# see if we can return fast...
#if fields and (len(fields) == 1) and fields[0] == 'tenant_id':
# tenant_id = self._get_obj_tenant_id('network', net_uuid)
# return {'id': net_uuid, 'tenant_id': tenant_id}
try:
net_obj = self._network_read(net_uuid)
except NoIdError:
self._raise_contrail_exception(404, exceptions.NetworkNotFound(net_id=net_uuid))
return self._network_vnc_to_neutron(net_obj, net_repr='SHOW')
#end network_read
def network_update(self, net_id, network_q):
net_obj = self._virtual_network_read(net_id=net_id)
router_external = net_obj.get_router_external()
network_q['id'] = net_id
net_obj = self._network_neutron_to_vnc(network_q, UPDATE)
if net_obj.router_external and not router_external:
fip_pools = net_obj.get_floating_ip_pools()
fip_pool_obj = FloatingIpPool('floating-ip-pool', net_obj)
self._floating_ip_pool_create(fip_pool_obj)
if router_external and not net_obj.router_external:
fip_pools = net_obj.get_floating_ip_pools()
if fip_pools:
for fip_pool in fip_pools:
try:
pool_id = fip_pool['uuid']
self._floating_ip_pool_delete(fip_pool_id=pool_id)
except RefsExistError:
self._raise_contrail_exception(409, exceptions.NetworkInUse(net_id=net_id))
self._virtual_network_update(net_obj)
ret_network_q = self._network_vnc_to_neutron(net_obj, net_repr='SHOW')
self._db_cache['q_networks'][net_id] = ret_network_q
return ret_network_q
#end network_update
def network_delete(self, net_id):
net_obj = self._virtual_network_read(net_id=net_id)
self._virtual_network_delete(net_id=net_id)
try:
del self._db_cache['q_networks'][net_id]
except KeyError:
pass
#end network_delete
# TODO request based on filter contents
def network_list(self, context=None, filters=None):
ret_dict = {}
def _collect_without_prune(net_ids):
for net_id in net_ids:
try:
net_obj = self._network_read(net_id)
net_info = self._network_vnc_to_neutron(net_obj,
net_repr='LIST')
ret_dict[net_id] = net_info
except NoIdError:
pass
#end _collect_without_prune
# collect phase
all_net_objs = [] # all n/ws in all projects
if context and not context['is_admin']:
if filters and 'id' in filters:
_collect_without_prune(filters['id'])
elif filters and 'name' in filters:
net_objs = self._network_list_project(context['tenant'])
all_net_objs.extend(net_objs)
all_net_objs.extend(self._network_list_shared())
all_net_objs.extend(self._network_list_router_external())
elif (filters and 'shared' in filters and filters['shared'][0] and
'router:external' not in filters):
all_net_objs.extend(self._network_list_shared())
elif (filters and 'router:external' in filters and
'shared' not in filters):
all_net_objs.extend(self._network_list_router_external())
elif (filters and 'router:external' in filters and
'shared' in filters):
all_net_objs.extend(self._network_list_shared_and_ext())
else:
project_uuid = str(uuid.UUID(context['tenant']))
if not filters:
all_net_objs.extend(self._network_list_router_external())
all_net_objs.extend(self._network_list_shared())
all_net_objs.extend(self._network_list_project(project_uuid))
# admin role from here on
elif filters and 'tenant_id' in filters:
# project-id is present
if 'id' in filters:
# required networks are also specified,
# just read and populate ret_dict
# prune is skipped because all_net_objs is empty
_collect_without_prune(filters['id'])
else:
# read all networks in project, and prune below
proj_ids = self._validate_project_ids(context, filters['tenant_id'])
for p_id in proj_ids:
all_net_objs.extend(self._network_list_project(p_id))
if 'router:external' in filters:
all_net_objs.extend(self._network_list_router_external())
elif filters and 'id' in filters:
# required networks are specified, just read and populate ret_dict
# prune is skipped because all_net_objs is empty
_collect_without_prune(filters['id'])
elif filters and 'name' in filters:
net_objs = self._network_list_project(None)
all_net_objs.extend(net_objs)
elif filters and 'shared' in filters:
if filters['shared'][0] == True:
nets = self._network_list_shared()
for net in nets:
net_info = self._network_vnc_to_neutron(net,
net_repr='LIST')
ret_dict[net.uuid] = net_info
elif filters and 'router:external' in filters:
nets = self._network_list_router_external()
if filters['router:external'][0] == True:
for net in nets:
net_info = self._network_vnc_to_neutron(net, net_repr='LIST')
ret_dict[net.uuid] = net_info
else:
# read all networks in all projects
all_net_objs.extend(self._virtual_network_list(detail=True))
# prune phase
for net_obj in all_net_objs:
if net_obj.uuid in ret_dict:
continue
net_fq_name = unicode(net_obj.get_fq_name())
if not self._filters_is_present(filters, 'contrail:fq_name',
net_fq_name):
continue
if not self._filters_is_present(filters, 'name',
net_obj.get_display_name()):
continue
if net_obj.is_shared == None:
is_shared = False
else:
is_shared = net_obj.is_shared
if not self._filters_is_present(filters, 'shared',
is_shared):
continue
try:
net_info = self._network_vnc_to_neutron(net_obj,
net_repr='LIST')
except NoIdError:
continue
ret_dict[net_obj.uuid] = net_info
ret_list = []
for net in ret_dict.values():
ret_list.append(net)
return ret_list
#end network_list
def network_count(self, filters=None):
nets_info = self.network_list(filters=filters)
return len(nets_info)
#end network_count
# subnet api handlers
def subnet_create(self, subnet_q):
net_id = subnet_q['network_id']
net_obj = self._virtual_network_read(net_id=net_id)
ipam_fq_name = subnet_q.get('contrail:ipam_fq_name')
if ipam_fq_name:
domain_name, project_name, ipam_name = ipam_fq_name
domain_obj = Domain(domain_name)
project_obj = Project(project_name, domain_obj)
netipam_obj = NetworkIpam(ipam_name, project_obj)
else: # link with project's default ipam or global default ipam
try:
ipam_fq_name = net_obj.get_fq_name()[:-1]
ipam_fq_name.append('default-network-ipam')
netipam_obj = self._vnc_lib.network_ipam_read(fq_name=ipam_fq_name)
except NoIdError:
netipam_obj = NetworkIpam()
ipam_fq_name = netipam_obj.get_fq_name()
subnet_vnc = self._subnet_neutron_to_vnc(subnet_q)
subnet_key = self._subnet_vnc_get_key(subnet_vnc, net_id)
# Locate list of subnets to which this subnet has to be appended
net_ipam_ref = None
ipam_refs = net_obj.get_network_ipam_refs()
if ipam_refs:
for ipam_ref in ipam_refs:
if ipam_ref['to'] == ipam_fq_name:
net_ipam_ref = ipam_ref
break
if not net_ipam_ref:
# First link from net to this ipam
vnsn_data = VnSubnetsType([subnet_vnc])
net_obj.add_network_ipam(netipam_obj, vnsn_data)
else: # virtual-network already linked to this ipam
for subnet in net_ipam_ref['attr'].get_ipam_subnets():
if subnet_key == self._subnet_vnc_get_key(subnet, net_id):
existing_sn_id = self._subnet_vnc_read_mapping(key=subnet_key)
# duplicate !!
data = {'subnet_cidr': subnet_q['cidr'],
'sub_id': existing_sn_id}
msg = (_("Cidr %(subnet_cidr)s "
"overlaps with another subnet"
"of subnet %(sub_id)s") % data)
exc_info = {'type': 'BadRequest',
'message': msg}
bottle.abort(400, json.dumps(exc_info))
subnet_info = self._subnet_vnc_to_neutron(subnet,
net_obj,
ipam_fq_name)
return subnet_info
vnsn_data = net_ipam_ref['attr']
vnsn_data.ipam_subnets.append(subnet_vnc)
# TODO: Add 'ref_update' API that will set this field
net_obj._pending_field_updates.add('network_ipam_refs')
self._virtual_network_update(net_obj)
# allocate an id to the subnet and store mapping with
# api-server
subnet_id = str(uuid.uuid4())
self._subnet_vnc_create_mapping(subnet_id, subnet_key)
# Read in subnet from server to get updated values for gw etc.
subnet_vnc = self._subnet_read(net_obj.uuid, subnet_key)
subnet_info = self._subnet_vnc_to_neutron(subnet_vnc, net_obj,
ipam_fq_name)
#self._db_cache['q_subnets'][subnet_id] = subnet_info
return subnet_info
#end subnet_create
def subnet_read(self, subnet_id):
subnet_key = self._subnet_vnc_read_mapping(id=subnet_id)
net_id = subnet_key.split()[0]
try:
net_obj = self._network_read(net_id)
except NoIdError:
self._raise_contrail_exception(404, exceptions.SubnetNotFound(subnet_id=subnet_id))
ipam_refs = net_obj.get_network_ipam_refs()
if ipam_refs:
for ipam_ref in ipam_refs:
subnet_vncs = ipam_ref['attr'].get_ipam_subnets()
for subnet_vnc in subnet_vncs:
if self._subnet_vnc_get_key(subnet_vnc, net_id) == \
subnet_key:
ret_subnet_q = self._subnet_vnc_to_neutron(
subnet_vnc, net_obj, ipam_ref['to'])
self._db_cache['q_subnets'][subnet_id] = ret_subnet_q
return ret_subnet_q
return {}
#end subnet_read
def subnet_update(self, subnet_id, subnet_q):
if 'gateway_ip' in subnet_q:
if subnet_q['gateway_ip'] != None:
exc_info = {'type': 'BadRequest',
'message': "update of gateway is not supported"}
bottle.abort(400, json.dumps(exc_info))
if 'allocation_pools' in subnet_q:
if subnet_q['allocation_pools'] != None:
exc_info = {'type': 'BadRequest',
'message': "update of allocation_pools is not allowed"}
bottle.abort(400, json.dumps(exc_info))
subnet_key = self._subnet_vnc_read_mapping(id=subnet_id)
net_id = subnet_key.split()[0]
net_obj = self._network_read(net_id)
ipam_refs = net_obj.get_network_ipam_refs()
subnet_found = False
if ipam_refs:
for ipam_ref in ipam_refs:
subnets = ipam_ref['attr'].get_ipam_subnets()
for subnet_vnc in subnets:
if self._subnet_vnc_get_key(subnet_vnc,
net_id) == subnet_key:
subnet_found = True
break
if subnet_found:
if 'name' in subnet_q:
if subnet_q['name'] != None:
subnet_vnc.set_subnet_name(subnet_q['name'])
if 'gateway_ip' in subnet_q:
if subnet_q['gateway_ip'] != None:
subnet_vnc.set_default_gateway(subnet_q['gateway_ip'])
if 'enable_dhcp' in subnet_q:
if subnet_q['enable_dhcp'] != None:
subnet_vnc.set_enable_dhcp(subnet_q['enable_dhcp'])
if 'dns_nameservers' in subnet_q:
if subnet_q['dns_nameservers'] != None:
dhcp_options=[]
for dns_server in subnet_q['dns_nameservers']:
dhcp_options.append(DhcpOptionType(dhcp_option_name='6',
dhcp_option_value=dns_server))
if dhcp_options:
subnet_vnc.set_dhcp_option_list(DhcpOptionsListType(dhcp_options))
else:
subnet_vnc.set_dhcp_option_list(None)
if 'host_routes' in subnet_q:
if subnet_q['host_routes'] != None:
host_routes=[]
for host_route in subnet_q['host_routes']:
host_routes.append(RouteType(prefix=host_route['destination'],
next_hop=host_route['nexthop']))
if host_routes:
subnet_vnc.set_host_routes(RouteTableType(host_routes))
else:
subnet_vnc.set_host_routes(None)
net_obj._pending_field_updates.add('network_ipam_refs')
self._virtual_network_update(net_obj)
ret_subnet_q = self._subnet_vnc_to_neutron(
subnet_vnc, net_obj, ipam_ref['to'])
self._db_cache['q_subnets'][subnet_id] = ret_subnet_q
return ret_subnet_q
return {}
# end subnet_update
def subnet_delete(self, subnet_id):
subnet_key = self._subnet_vnc_read_mapping(id=subnet_id)
net_id = subnet_key.split()[0]
net_obj = self._network_read(net_id)
ipam_refs = net_obj.get_network_ipam_refs()
if ipam_refs:
for ipam_ref in ipam_refs:
orig_subnets = ipam_ref['attr'].get_ipam_subnets()
new_subnets = [subnet_vnc for subnet_vnc in orig_subnets
if self._subnet_vnc_get_key(subnet_vnc,
net_id) != subnet_key]
if len(orig_subnets) != len(new_subnets):
# matched subnet to be deleted
ipam_ref['attr'].set_ipam_subnets(new_subnets)
net_obj._pending_field_updates.add('network_ipam_refs')
try:
self._virtual_network_update(net_obj)
except RefsExistError:
self._raise_contrail_exception(409, exceptions.SubnetInUse(subnet_id=subnet_id))
self._subnet_vnc_delete_mapping(subnet_id, subnet_key)
try:
del self._db_cache['q_subnets'][subnet_id]
except KeyError:
pass
return
#end subnet_delete
def subnets_list(self, context, filters=None):
ret_subnets = []
all_net_objs = []
if filters and 'id' in filters:
# required subnets are specified,
# just read in corresponding net_ids
net_ids = []
for subnet_id in filters['id']:
subnet_key = self._subnet_vnc_read_mapping(id=subnet_id)
net_id = subnet_key.split()[0]
net_ids.append(net_id)
all_net_objs.extend(self._virtual_network_list(obj_uuids=net_ids,
detail=True))
else:
if not context['is_admin']:
proj_id = context['tenant']
else:
proj_id = None
net_objs = self._network_list_project(proj_id)
all_net_objs.extend(net_objs)
net_objs = self._network_list_shared()
all_net_objs.extend(net_objs)
ret_dict = {}
for net_obj in all_net_objs:
if net_obj.uuid in ret_dict:
continue
ret_dict[net_obj.uuid] = 1
ipam_refs = net_obj.get_network_ipam_refs()
if ipam_refs:
for ipam_ref in ipam_refs:
subnet_vncs = ipam_ref['attr'].get_ipam_subnets()
for subnet_vnc in subnet_vncs:
sn_info = self._subnet_vnc_to_neutron(subnet_vnc,
net_obj,
ipam_ref['to'])
sn_id = sn_info['id']
sn_proj_id = sn_info['tenant_id']
sn_net_id = sn_info['network_id']
sn_name = sn_info['name']
if (filters and 'shared' in filters and
filters['shared'][0] == True):
if not net_obj.is_shared:
continue
elif filters:
if not self._filters_is_present(filters, 'id',
sn_id):
continue
if not self._filters_is_present(filters,
'tenant_id',
sn_proj_id):
continue
if not self._filters_is_present(filters,
'network_id',
sn_net_id):
continue
if not self._filters_is_present(filters,
'name',
sn_name):
continue
ret_subnets.append(sn_info)
return ret_subnets
#end subnets_list
def subnets_count(self, context, filters=None):
subnets_info = self.subnets_list(context, filters)
return len(subnets_info)
#end subnets_count
# ipam api handlers
def ipam_create(self, ipam_q):
# TODO remove below once api-server can read and create projects
# from keystone on startup
#self._ensure_project_exists(ipam_q['tenant_id'])
ipam_obj = self._ipam_neutron_to_vnc(ipam_q, CREATE)
ipam_uuid = self._vnc_lib.network_ipam_create(ipam_obj)
return self._ipam_vnc_to_neutron(ipam_obj)
#end ipam_create
def ipam_read(self, ipam_id):
try:
ipam_obj = self._vnc_lib.network_ipam_read(id=ipam_id)
except NoIdError:
# TODO add ipam specific exception
self._raise_contrail_exception(404, exceptions.NetworkNotFound(net_id=ipam_id))
return self._ipam_vnc_to_neutron(ipam_obj)
#end ipam_read
def ipam_update(self, ipam_id, ipam_q):
ipam_q['id'] = ipam_id
ipam_obj = self._ipam_neutron_to_vnc(ipam_q, UPDATE)
self._vnc_lib.network_ipam_update(ipam_obj)
return self._ipam_vnc_to_neutron(ipam_obj)
#end ipam_update
def ipam_delete(self, ipam_id):
self._vnc_lib.network_ipam_delete(id=ipam_id)
#end ipam_delete
# TODO request based on filter contents
def ipam_list(self, context=None, filters=None):
ret_list = []
# collect phase
all_ipams = [] # all ipams in all projects
if filters and 'tenant_id' in filters:
project_ids = self._validate_project_ids(context,
filters['tenant_id'])
for p_id in project_ids:
project_ipams = self._ipam_list_project(p_id)
all_ipams.append(project_ipams)
else: # no filters
dom_projects = self._project_list_domain(None)
for project in dom_projects:
proj_id = project['uuid']
project_ipams = self._ipam_list_project(proj_id)
all_ipams.append(project_ipams)
# prune phase
for project_ipams in all_ipams:
for proj_ipam in project_ipams:
# TODO implement same for name specified in filter
proj_ipam_id = proj_ipam['uuid']
if not self._filters_is_present(filters, 'id', proj_ipam_id):
continue
ipam_info = self.ipam_read(proj_ipam['uuid'])
ret_list.append(ipam_info)
return ret_list
#end ipam_list
def ipam_count(self, filters=None):
ipam_info = self.ipam_list(filters=filters)
return len(ipam_info)
#end ipam_count
# policy api handlers
def policy_create(self, policy_q):
# TODO remove below once api-server can read and create projects
# from keystone on startup
#self._ensure_project_exists(policy_q['tenant_id'])
policy_obj = self._policy_neutron_to_vnc(policy_q, CREATE)
policy_uuid = self._vnc_lib.network_policy_create(policy_obj)
return self._policy_vnc_to_neutron(policy_obj)
#end policy_create
def policy_read(self, policy_id):
try:
policy_obj = self._vnc_lib.network_policy_read(id=policy_id)
except NoIdError:
raise policy.PolicyNotFound(id=policy_id)
return self._policy_vnc_to_neutron(policy_obj)
#end policy_read
def policy_update(self, policy_id, policy):
policy_q = policy
policy_q['id'] = policy_id
policy_obj = self._policy_neutron_to_vnc(policy_q, UPDATE)
self._vnc_lib.network_policy_update(policy_obj)
return self._policy_vnc_to_neutron(policy_obj)
#end policy_update
def policy_delete(self, policy_id):
self._vnc_lib.network_policy_delete(id=policy_id)
#end policy_delete
# TODO request based on filter contents
def policy_list(self, context=None, filters=None):
ret_list = []
# collect phase
all_policys = [] # all policys in all projects
if filters and 'tenant_id' in filters:
project_ids = self._validate_project_ids(context,
filters['tenant_id'])
for p_id in project_ids:
project_policys = self._policy_list_project(p_id)
all_policys.append(project_policys)
else: # no filters
dom_projects = self._project_list_domain(None)
for project in dom_projects:
proj_id = project['uuid']
project_policys = self._policy_list_project(proj_id)
all_policys.append(project_policys)
# prune phase
for project_policys in all_policys:
for proj_policy in project_policys:
# TODO implement same for name specified in filter
proj_policy_id = proj_policy['uuid']
if not self._filters_is_present(filters, 'id', proj_policy_id):
continue
policy_info = self.policy_read(proj_policy['uuid'])
ret_list.append(policy_info)
return ret_list
#end policy_list
def policy_count(self, filters=None):
policy_info = self.policy_list(filters=filters)
return len(policy_info)
#end policy_count
def _router_add_gateway(self, router_q, rtr_obj):
ext_gateway = router_q.get('external_gateway_info', None)
old_ext_gateway = rtr_obj.get_virtual_network_refs()
if ext_gateway or old_ext_gateway:
network_id = ext_gateway.get('network_id', None)
if network_id:
if old_ext_gateway and network_id == old_ext_gateway[0]['uuid']:
return
try:
net_obj = self._virtual_network_read(net_id=network_id)
if not net_obj.get_router_external():
exc_info = {'type': 'BadRequest',
'message': "Network %s is not a valid external network" % network_id}
bottle.abort(400, json.dumps(exc_info))
except NoIdError:
self._raise_contrail_exception(404, exceptions.NetworkNotFound(net_id=network_id))
self._router_set_external_gateway(rtr_obj, net_obj)
else:
self._router_clear_external_gateway(rtr_obj)
def _router_set_external_gateway(self, router_obj, ext_net_obj):
project_obj = self._project_read(proj_id=router_obj.parent_uuid)
# Get netns SNAT service template
try:
st_obj = self._vnc_lib.service_template_read(
fq_name=SNAT_SERVICE_TEMPLATE_FQ_NAME)
except NoIdError:
msg = _("Unable to set or clear the default gateway")
exc_info = {'type': 'BadRequest', 'message': msg}
bottle.abort(400, json.dumps(exc_info))
# Get the service instance if it exists
si_name = 'si_' + router_obj.uuid
si_fq_name = project_obj.get_fq_name() + [si_name]
try:
si_obj = self._vnc_lib.service_instance_read(fq_name=si_fq_name)
si_uuid = si_obj.uuid
except NoIdError:
si_obj = None
# Get route table for default route it it exists
rt_name = 'rt_' + router_obj.uuid
rt_fq_name = project_obj.get_fq_name() + [rt_name]
try:
rt_obj = self._vnc_lib.route_table_read(fq_name=rt_fq_name)
rt_uuid = rt_obj.uuid
except NoIdError:
rt_obj = None
# Set the service instance
si_created = False
if not si_obj:
si_obj = ServiceInstance(si_name, parent_obj=project_obj)
si_created = True
#TODO(ethuleau): For the fail-over SNAT set scale out to 2
si_prop_obj = ServiceInstanceType(
right_virtual_network=ext_net_obj.get_fq_name_str(),
scale_out=ServiceScaleOutType(max_instances=1,
auto_scale=True),
auto_policy=True)
si_obj.set_service_instance_properties(si_prop_obj)
si_obj.set_service_template(st_obj)
if si_created:
si_uuid = self._vnc_lib.service_instance_create(si_obj)
else:
self._vnc_lib.service_instance_update(si_obj)
# Set the route table
route_obj = RouteType(prefix="0.0.0.0/0",
next_hop=si_obj.get_fq_name_str())
rt_created = False
if not rt_obj:
rt_obj = RouteTable(name=rt_name, parent_obj=project_obj)
rt_created = True
rt_obj.set_routes(RouteTableType.factory([route_obj]))
if rt_created:
rt_uuid = self._vnc_lib.route_table_create(rt_obj)
else:
self._vnc_lib.route_table_update(rt_obj)
# Associate route table to all private networks connected onto
# that router
for intf in router_obj.get_virtual_machine_interface_refs() or []:
port_id = intf['uuid']
net_id = self.port_read(port_id)['network_id']
try:
net_obj = self._vnc_lib.virtual_network_read(id=net_id)
except NoIdError:
self._raise_contrail_exception(
404, exceptions.NetworkNotFound(net_id=net_id))
net_obj.set_route_table(rt_obj)
self._vnc_lib.virtual_network_update(net_obj)
# Add logical gateway virtual network
router_obj.set_virtual_network(ext_net_obj)
self._vnc_lib.logical_router_update(router_obj)
def _router_clear_external_gateway(self, router_obj):
project_obj = self._project_read(proj_id=router_obj.parent_uuid)
# Get the service instance if it exists
si_name = 'si_' + router_obj.uuid
si_fq_name = project_obj.get_fq_name() + [si_name]
try:
si_obj = self._vnc_lib.service_instance_read(fq_name=si_fq_name)
si_uuid = si_obj.uuid
except NoIdError:
si_obj = None
# Get route table for default route it it exists
rt_name = 'rt_' + router_obj.uuid
rt_fq_name = project_obj.get_fq_name() + [rt_name]
try:
rt_obj = self._vnc_lib.route_table_read(fq_name=rt_fq_name)
rt_uuid = rt_obj.uuid
except NoIdError:
rt_obj = None
# Delete route table
if rt_obj:
# Disassociate route table to all private networks connected
# onto that router
for net_ref in rt_obj.get_virtual_network_back_refs() or []:
try:
net_obj = self._vnc_lib.virtual_network_read(
id=net_ref['uuid'])
except NoIdError:
continue
net_obj.del_route_table(rt_obj)
self._vnc_lib.virtual_network_update(net_obj)
self._vnc_lib.route_table_delete(id=rt_obj.uuid)
# Delete service instance
if si_obj:
self._vnc_lib.service_instance_delete(id=si_uuid)
# Clear logical gateway virtual network
router_obj.set_virtual_network_list([])
self._vnc_lib.logical_router_update(router_obj)
def _set_snat_routing_table(self, router_obj, network_id):
project_obj = self._project_read(proj_id=router_obj.parent_uuid)
rt_name = 'rt_' + router_obj.uuid
rt_fq_name = project_obj.get_fq_name() + [rt_name]
try:
rt_obj = self._vnc_lib.route_table_read(fq_name=rt_fq_name)
rt_uuid = rt_obj.uuid
except NoIdError:
# No route table set with that router ID, the gateway is not set
return
try:
net_obj = self._vnc_lib.virtual_network_read(id=network_id)
except NoIdError:
raise exceptions.NetworkNotFound(net_id=ext_net_id)
net_obj.set_route_table(rt_obj)
self._vnc_lib.virtual_network_update(net_obj)
def _clear_snat_routing_table(self, router_obj, network_id):
project_obj = self._project_read(proj_id=router_obj.parent_uuid)
rt_name = 'rt_' + router_obj.uuid
rt_fq_name = project_obj.get_fq_name() + [rt_name]
try:
rt_obj = self._vnc_lib.route_table_read(fq_name=rt_fq_name)
rt_uuid = rt_obj.uuid
except NoIdError:
# No route table set with that router ID, the gateway is not set
return
try:
net_obj = self._vnc_lib.virtual_network_read(id=network_id)
except NoIdError:
raise exceptions.NetworkNotFound(net_id=ext_net_id)
net_obj.del_route_table(rt_obj)
self._vnc_lib.virtual_network_update(net_obj)
# router api handlers
def router_create(self, router_q):
#self._ensure_project_exists(router_q['tenant_id'])
rtr_obj = self._router_neutron_to_vnc(router_q, CREATE)
rtr_uuid = self._resource_create('logical_router', rtr_obj)
self._router_add_gateway(router_q, rtr_obj)
ret_router_q = self._router_vnc_to_neutron(rtr_obj, rtr_repr='SHOW')
self._db_cache['q_routers'][rtr_uuid] = ret_router_q
return ret_router_q
#end router_create
def router_read(self, rtr_uuid, fields=None):
# see if we can return fast...
if fields and (len(fields) == 1) and fields[0] == 'tenant_id':
tenant_id = self._get_obj_tenant_id('router', rtr_uuid)
return {'id': rtr_uuid, 'tenant_id': tenant_id}
try:
rtr_obj = self._logical_router_read(rtr_uuid)
except NoIdError:
self._raise_contrail_exception(404, RouterNotFound(router_id=rtr_uuid))
return self._router_vnc_to_neutron(rtr_obj, rtr_repr='SHOW')
#end router_read
def router_update(self, rtr_id, router_q):
router_q['id'] = rtr_id
rtr_obj = self._router_neutron_to_vnc(router_q, UPDATE)
self._logical_router_update(rtr_obj)
self._router_add_gateway(router_q, rtr_obj)
ret_router_q = self._router_vnc_to_neutron(rtr_obj, rtr_repr='SHOW')
self._db_cache['q_routers'][rtr_id] = ret_router_q
return ret_router_q
#end router_update
def router_delete(self, rtr_id):
try:
rtr_obj = self._logical_router_read(rtr_id)
if rtr_obj.get_virtual_machine_interface_refs():
self._raise_contrail_exception(409, RouterInUse(router_id=rtr_id))
except NoIdError:
self._raise_contrail_exception(404, RouterNotFound(router_id=rtr_id))
self._router_clear_external_gateway(rtr_obj)
self._logical_router_delete(rtr_id=rtr_id)
try:
del self._db_cache['q_routers'][rtr_id]
except KeyError:
pass
#end router_delete
# TODO request based on filter contents
def router_list(self, context=None, filters=None):
ret_list = []
if filters and 'shared' in filters:
if filters['shared'][0] == True:
# no support for shared routers
return ret_list
# collect phase
all_rtrs = [] # all n/ws in all projects
if filters and 'tenant_id' in filters:
# project-id is present
if 'id' in filters:
# required routers are also specified,
# just read and populate ret_list
# prune is skipped because all_rtrs is empty
for rtr_id in filters['id']:
try:
rtr_obj = self._logical_router_read(rtr_id)
rtr_info = self._router_vnc_to_neutron(rtr_obj,
rtr_repr='LIST')
ret_list.append(rtr_info)
except NoIdError:
pass
else:
# read all routers in project, and prune below
project_ids = self._validate_project_ids(context,
filters['tenant_id'])
for p_id in project_ids:
if 'router:external' in filters:
all_rtrs.append(self._fip_pool_ref_routers(p_id))
else:
project_rtrs = self._router_list_project(p_id)
all_rtrs.append(project_rtrs)
elif filters and 'id' in filters:
# required routers are specified, just read and populate ret_list
# prune is skipped because all_rtrs is empty
for rtr_id in filters['id']:
try:
rtr_obj = self._logical_router_read(rtr_id)
rtr_info = self._router_vnc_to_neutron(rtr_obj,
rtr_repr='LIST')
ret_list.append(rtr_info)
except NoIdError:
pass
else:
# read all routers in all projects
project_rtrs = self._router_list_project()
all_rtrs.append(project_rtrs)
# prune phase
for project_rtrs in all_rtrs:
for proj_rtr in project_rtrs:
proj_rtr_id = proj_rtr['uuid']
if not self._filters_is_present(filters, 'id', proj_rtr_id):
continue
proj_rtr_fq_name = unicode(proj_rtr['fq_name'])
if not self._filters_is_present(filters, 'contrail:fq_name',
proj_rtr_fq_name):
continue
try:
rtr_obj = self._logical_router_read(proj_rtr['uuid'])
rtr_name = rtr_obj.get_display_name()
if not self._filters_is_present(filters, 'name', rtr_name):
continue
rtr_info = self._router_vnc_to_neutron(rtr_obj,
rtr_repr='LIST')
except NoIdError:
continue
ret_list.append(rtr_info)
return ret_list
#end router_list
def router_count(self, filters=None):
rtrs_info = self.router_list(filters=filters)
return len(rtrs_info)
#end router_count
def _check_for_dup_router_subnet(self, router_id,
network_id, subnet_id, subnet_cidr):
try:
rports = self.port_list(filters={'device_id': [router_id]})
# It's possible these ports are on the same network, but
# different subnets.
new_ipnet = netaddr.IPNetwork(subnet_cidr)
for p in rports:
for ip in p['fixed_ips']:
if ip['subnet_id'] == subnet_id:
msg = (_("Router %s already has a port "
"on subnet %s") % (router_id, subnet_id))
self._raise_contrail_exception(400,
exceptions.BadRequest(resource='router', msg=msg))
sub_id = ip['subnet_id']
subnet = self.subnet_read(sub_id)
cidr = subnet['cidr']
ipnet = netaddr.IPNetwork(cidr)
match1 = netaddr.all_matching_cidrs(new_ipnet, [cidr])
match2 = netaddr.all_matching_cidrs(ipnet, [subnet_cidr])
if match1 or match2:
data = {'subnet_cidr': subnet_cidr,
'subnet_id': subnet_id,
'cidr': cidr,
'sub_id': sub_id}
msg = (_("Cidr %(subnet_cidr)s of subnet "
"%(subnet_id)s overlaps with cidr %(cidr)s "
"of subnet %(sub_id)s") % data)
exc_info = {'type': 'BadRequest',
'message': msg}
bottle.abort(400, json.dumps(exc_info))
except NoIdError:
pass
def add_router_interface(self, context, router_id, port_id=None, subnet_id=None):
router_obj = self._logical_router_read(router_id)
if port_id:
port = self.port_read(port_id)
if (port['device_owner'] == constants.DEVICE_OWNER_ROUTER_INTF and
port['device_id']):
self._raise_contrail_exception(409, exceptions.PortInUse(net_id=port['network_id'],
port_id=port['id'],
device_id=port['device_id']))
fixed_ips = [ip for ip in port['fixed_ips']]
if len(fixed_ips) != 1:
msg = _('Router port must have exactly one fixed IP')
exc_info = {'type': 'BadRequest', 'message': msg}
bottle.abort(400, json.dumps(exc_info))
subnet_id = fixed_ips[0]['subnet_id']
subnet = self.subnet_read(subnet_id)
self._check_for_dup_router_subnet(router_id,
port['network_id'],
subnet['id'],
subnet['cidr'])
elif subnet_id:
subnet = self.subnet_read(subnet_id)
if not subnet['gateway_ip']:
msg = _('Subnet for router interface must have a gateway IP')
exc_info = {'type': 'BadRequest', 'message': msg}
bottle.abort(400, json.dumps(exc_info))
self._check_for_dup_router_subnet(router_id,
subnet['network_id'],
subnet_id,
subnet['cidr'])
fixed_ip = {'ip_address': subnet['gateway_ip'],
'subnet_id': subnet['id']}
port = self.port_create(context, {'tenant_id': subnet['tenant_id'],
'network_id': subnet['network_id'],
'fixed_ips': [fixed_ip],
'admin_state_up': True,
'device_id': router_id,
'device_owner': constants.DEVICE_OWNER_ROUTER_INTF,
'name': ''})
port_id = port['id']
else:
msg = _('Either port or subnet must be specified')
exc_info = {'type': 'BadRequest', 'message': msg}
bottle.abort(400, json.dumps(exc_info))
self._set_snat_routing_table(router_obj, subnet['network_id'])
vmi_obj = self._vnc_lib.virtual_machine_interface_read(id=port_id)
router_obj.add_virtual_machine_interface(vmi_obj)
self._logical_router_update(router_obj)
info = {'id': router_id,
'tenant_id': subnet['tenant_id'],
'port_id': port_id,
'subnet_id': subnet_id}
return info
# end add_router_interface
def remove_router_interface(self, router_id, port_id=None, subnet_id=None):
router_obj = self._logical_router_read(router_id)
subnet = None
if port_id:
port_db = self.port_read(port_id)
if (port_db['device_owner'] != constants.DEVICE_OWNER_ROUTER_INTF
or port_db['device_id'] != router_id):
self._raise_contrail_exception(404, RouterInterfaceNotFound(router_id=router_id,
port_id=port_id))
port_subnet_id = port_db['fixed_ips'][0]['subnet_id']
if subnet_id and (port_subnet_id != subnet_id):
self._raise_contrail_exception(409, exceptions.SubnetMismatchForPort(port_id=port_id,
subnet_id=subnet_id))
subnet_id = port_subnet_id
subnet = self.subnet_read(subnet_id)
network_id = subnet['network_id']
elif subnet_id:
subnet = self.subnet_read(subnet_id)
network_id = subnet['network_id']
for intf in router_obj.get_virtual_machine_interface_refs() or []:
port_id = intf['uuid']
port_db = self.port_read(port_id)
if subnet_id == port_db['fixed_ips'][0]['subnet_id']:
break
else:
msg = _('Subnet %s not connected to router %s') % (subnet_id,
router_id)
exc_info = {'type': 'BadRequest', 'message': msg}
bottle.abort(400, json.dumps(exc_info))
self._clear_snat_routing_table(router_obj, subnet['network_id'])
port_obj = self._virtual_machine_interface_read(port_id)
router_obj.del_virtual_machine_interface(port_obj)
self._vnc_lib.logical_router_update(router_obj)
self.port_delete(port_id)
info = {'id': router_id,
'tenant_id': subnet['tenant_id'],
'port_id': port_id,
'subnet_id': subnet_id}
return info
# end remove_router_interface
# floatingip api handlers
def floatingip_create(self, fip_q):
try:
fip_obj = self._floatingip_neutron_to_vnc(fip_q, CREATE)
except Exception, e:
#logging.exception(e)
msg = _('Internal error when trying to create floating ip. '
'Please be sure the network %s is an external '
'network.') % (fip_q['floating_network_id'])
exc_info = {'type': 'BadRequest', 'message': msg}
bottle.abort(400, json.dumps(exc_info))
try:
fip_uuid = self._vnc_lib.floating_ip_create(fip_obj)
except Exception as e:
self._raise_contrail_exception(409,
exceptions.IpAddressGenerationFailure(net_id=fip_q['floating_network_id']))
fip_obj = self._vnc_lib.floating_ip_read(id=fip_uuid)
return self._floatingip_vnc_to_neutron(fip_obj)
#end floatingip_create
def floatingip_read(self, fip_uuid):
try:
fip_obj = self._vnc_lib.floating_ip_read(id=fip_uuid)
except NoIdError:
self._raise_contrail_exception(404, FloatingIPNotFound(floatingip_id=fip_uuid))
return self._floatingip_vnc_to_neutron(fip_obj)
#end floatingip_read
def floatingip_update(self, fip_id, fip_q):
fip_q['id'] = fip_id
fip_obj = self._floatingip_neutron_to_vnc(fip_q, UPDATE)
self._vnc_lib.floating_ip_update(fip_obj)
return self._floatingip_vnc_to_neutron(fip_obj)
#end floatingip_update
def floatingip_delete(self, fip_id):
self._vnc_lib.floating_ip_delete(id=fip_id)
#end floatingip_delete
def floatingip_list(self, context, filters=None):
# Read in floating ips with either
# - port(s) as anchor
# - project(s) as anchor
# - none as anchor (floating-ip collection)
ret_list = []
proj_ids = None
port_ids = None
if filters:
if 'tenant_id' in filters:
proj_ids = self._validate_project_ids(context,
filters['tenant_id'])
elif 'port_id' in filters:
port_ids = filters['port_id']
else: # no filters
if not context['is_admin']:
proj_ids = [str(uuid.UUID(context['tenant']))]
if port_ids:
fip_objs = self._floatingip_list(back_ref_id=port_ids)
elif proj_ids:
fip_objs = self._floatingip_list(back_ref_id=proj_ids)
else:
fip_objs = self._floatingip_list()
for fip_obj in fip_objs:
if 'floating_ip_address' in filters:
if (fip_obj.get_floating_ip_address() not in
filters['floating_ip_address']):
continue
ret_list.append(self._floatingip_vnc_to_neutron(fip_obj))
return ret_list
#end floatingip_list
def floatingip_count(self, context, filters=None):
floatingip_info = self.floatingip_list(context, filters)
return len(floatingip_info)
#end floatingip_count
def _ip_addr_in_net_id(self, ip_addr, net_id):
"""Checks if ip address is present in net-id."""
net_ip_list = [ipobj.get_instance_ip_address() for ipobj in
self._instance_ip_list(back_ref_id=[net_id])]
return ip_addr in net_ip_list
def _create_instance_ip(self, net_obj, port_obj, ip_addr=None):
ip_name = str(uuid.uuid4())
ip_obj = InstanceIp(name=ip_name)
ip_obj.uuid = ip_name
ip_obj.set_virtual_machine_interface(port_obj)
ip_obj.set_virtual_network(net_obj)
if ip_addr:
ip_obj.set_instance_ip_address(ip_addr)
ip_id = self._instance_ip_create(ip_obj)
return ip_id
# end _create_instance_ip
def _port_create_instance_ip(self, net_obj, port_obj, port_q):
created_iip_ids = []
fixed_ips = port_q.get('fixed_ips')
if fixed_ips is None:
return
for fixed_ip in fixed_ips:
try:
ip_addr = fixed_ip.get('ip_address')
subnet_id = fixed_ip.get('subnet_id')
if not ip_addr and 'subnet_id' in fixed_ip:
subnet_key = self._subnet_vnc_read_mapping(id=subnet_id)
ip_addr = self._vnc_lib.virtual_network_ip_alloc(net_obj,
subnet=subnet_key.split()[1])[0]
ip_id = self._create_instance_ip(net_obj, port_obj, ip_addr)
created_iip_ids.append(ip_id)
except Exception as e:
# Resources are not available
for iip_id in created_iip_ids:
self._instance_ip_delete(instance_ip_id=iip_id)
self._raise_contrail_exception(409,
exceptions.IpAddressGenerationFailure(net_id=net_obj.uuid))
for iip in getattr(port_obj, 'instance_ip_back_refs', []):
if iip['uuid'] not in created_iip_ids:
iip_obj = self._instance_ip_delete(instance_ip_id=iip['uuid'])
# end _port_create_instance_ip
# port api handlers
def port_create(self, context, port_q):
net_id = port_q['network_id']
net_obj = self._network_read(net_id)
tenant_id = self._get_tenant_id_for_create(context, port_q);
proj_id = str(uuid.UUID(tenant_id))
# initialize port object
port_obj = self._port_neutron_to_vnc(port_q, net_obj, CREATE)
# create the object
port_id = self._resource_create('virtual_machine_interface', port_obj)
if 'fixed_ips' in port_q:
self._port_create_instance_ip(net_obj, port_obj, port_q)
elif net_obj.get_network_ipam_refs():
self._port_create_instance_ip(net_obj, port_obj,
{'fixed_ips':[{'ip_address': None}]})
# TODO below reads back default parent name, fix it
port_obj = self._virtual_machine_interface_read(port_id=port_id)
ret_port_q = self._port_vnc_to_neutron(port_obj)
self._set_obj_tenant_id(port_id, proj_id)
# update cache on successful creation
tenant_id = proj_id.replace('-', '')
if tenant_id not in self._db_cache['q_tenant_port_count']:
ncurports = self.port_count({'tenant_id': tenant_id})
else:
ncurports = self._db_cache['q_tenant_port_count'][tenant_id]
self._db_cache['q_tenant_port_count'][tenant_id] = ncurports + 1
return ret_port_q
#end port_create
# TODO add obj param and let caller use below only as a converter
def port_read(self, port_id):
try:
port_obj = self._virtual_machine_interface_read(port_id=port_id)
except NoIdError:
self._raise_contrail_exception(404, exceptions.PortNotFound(port_id=port_id))
ret_port_q = self._port_vnc_to_neutron(port_obj)
self._db_cache['q_ports'][port_id] = ret_port_q
return ret_port_q
#end port_read
def port_update(self, port_id, port_q):
# if ip address passed then use it
req_ip_addrs = []
req_ip_subnets = []
port_q['id'] = port_id
port_obj = self._port_neutron_to_vnc(port_q, None, UPDATE)
net_id = port_obj.get_virtual_network_refs()[0]['uuid']
net_obj = self._network_read(net_id)
self._virtual_machine_interface_update(port_obj)
self._port_create_instance_ip(net_obj, port_obj, port_q)
ret_port_q = self._port_vnc_to_neutron(port_obj)
port_obj = self._virtual_machine_interface_read(port_id=port_id)
self._db_cache['q_ports'][port_id] = ret_port_q
return ret_port_q
#end port_update
def port_delete(self, port_id):
port_obj = self._port_neutron_to_vnc({'id': port_id}, None, DELETE)
if port_obj.parent_type == 'virtual-machine':
instance_id = port_obj.parent_uuid
else:
vm_refs = port_obj.get_virtual_machine_refs()
if vm_refs:
instance_id = vm_refs[0]['uuid']
else:
instance_id = None
if port_obj.get_logical_router_back_refs():
self._raise_contrail_exception(409, L3PortInUse(port_id=port_id,
device_owner=constants.DEVICE_OWNER_ROUTER_INTF))
if port_obj.get_logical_router_back_refs():
self._raise_contrail_exception(409, L3PortInUse(port_id=port_id,
device_owner=constants.DEVICE_OWNER_ROUTER_INTF))
# release instance IP address
iip_back_refs = getattr(port_obj, 'instance_ip_back_refs', None)
if iip_back_refs:
for iip_back_ref in iip_back_refs:
# if name contains IP address then this is shared ip
iip_obj = self._vnc_lib.instance_ip_read(
id=iip_back_ref['uuid'])
# in case of shared ip only delete the link to the VMI
if len(iip_obj.name.split(' ')) > 1:
iip_obj.del_virtual_machine_interface(port_obj)
self._instance_ip_update(iip_obj)
else:
self._instance_ip_delete(
instance_ip_id=iip_back_ref['uuid'])
# disassociate any floating IP used by instance
fip_back_refs = getattr(port_obj, 'floating_ip_back_refs', None)
if fip_back_refs:
for fip_back_ref in fip_back_refs:
self.floatingip_update(fip_back_ref['uuid'], {'port_id': None})
tenant_id = self._get_obj_tenant_id('port', port_id)
self._virtual_machine_interface_delete(port_id=port_id)
# delete instance if this was the last port
try:
if instance_id:
self._vnc_lib.virtual_machine_delete(id=instance_id)
except RefsExistError:
pass
try:
del self._db_cache['q_ports'][port_id]
except KeyError:
pass
# update cache on successful deletion
try:
self._db_cache['q_tenant_port_count'][tenant_id] -= 1
except KeyError:
pass
self._del_obj_tenant_id(port_id)
#end port_delete
def port_list(self, context=None, filters=None):
project_obj = None
ret_q_ports = []
all_project_ids = []
# TODO used to find dhcp server field. support later...
if (filters.get('device_owner') == 'network:dhcp' or
'network:dhcp' in filters.get('device_owner', [])):
return ret_q_ports
if not 'device_id' in filters:
# Listing from back references
if not filters:
# TODO once vmi is linked to project in schema, use project_id
# to limit scope of list
if not context['is_admin']:
project_id = str(uuid.UUID(context['tenant']))
else:
project_id = None
# read all VMI and IIP in detail one-shot
if self._list_optimization_enabled:
all_port_gevent = gevent.spawn(self._virtual_machine_interface_list,
parent_id=project_id)
else:
all_port_gevent = gevent.spawn(self._virtual_machine_interface_list)
port_iip_gevent = gevent.spawn(self._instance_ip_list)
port_net_gevent = gevent.spawn(self._virtual_network_list,
parent_id=project_id,
detail=True)
gevent.joinall([all_port_gevent, port_iip_gevent, port_net_gevent])
all_port_objs = all_port_gevent.value
port_iip_objs = port_iip_gevent.value
port_net_objs = port_net_gevent.value
ret_q_ports = self._port_list(port_net_objs, all_port_objs,
port_iip_objs)
elif 'tenant_id' in filters:
all_project_ids = self._validate_project_ids(context,
filters['tenant_id'])
elif 'name' in filters:
all_project_ids = [str(uuid.UUID(context['tenant']))]
elif 'id' in filters:
# TODO optimize
for port_id in filters['id']:
try:
port_info = self.port_read(port_id)
except NoIdError:
continue
ret_q_ports.append(port_info)
for proj_id in all_project_ids:
ret_q_ports = self._port_list_project(proj_id)
if 'network_id' in filters:
ret_q_ports = self._port_list_network(filters['network_id'])
# prune phase
ret_list = []
for port_obj in ret_q_ports:
if not self._filters_is_present(filters, 'name',
port_obj['name']):
continue
ret_list.append(port_obj)
return ret_list
# Listing from parent to children
device_ids = filters['device_id']
for dev_id in device_ids:
try:
# TODO optimize
port_objs = self._virtual_machine_interface_list(
parent_id=dev_id,
back_ref_id=dev_id)
if not port_objs:
raise NoIdError(None)
for port_obj in port_objs:
port_info = self._port_vnc_to_neutron(port_obj)
ret_q_ports.append(port_info)
except NoIdError:
try:
router_obj = self._logical_router_read(rtr_id=dev_id)
intfs = router_obj.get_virtual_machine_interface_refs()
for intf in (intfs or []):
try:
port_info = self.port_read(intf['uuid'])
except NoIdError:
continue
ret_q_ports.append(port_info)
except NoIdError:
continue
return ret_q_ports
#end port_list
def port_count(self, filters=None):
if (filters.get('device_owner') == 'network:dhcp' or
'network:dhcp' in filters.get('device_owner', [])):
return 0
if 'tenant_id' in filters:
if isinstance(filters['tenant_id'], list):
project_id = str(uuid.UUID(filters['tenant_id'][0]))
else:
project_id = str(uuid.UUID(filters['tenant_id']))
try:
nports = self._db_cache['q_tenant_port_count'][project_id]
if nports < 0:
# TBD Hack. fix in case of multiple q servers after 1.03
nports = 0
del self._db_cache['q_tenant_port_count'][project_id]
return nports
except KeyError:
# do it the hard way but remember for next time
nports = len(self._port_list_project(project_id))
self._db_cache['q_tenant_port_count'][project_id] = nports
else:
# across all projects - TODO very expensive,
# get only a count from api-server!
nports = len(self.port_list(filters=filters))
return nports
#end port_count
# security group api handlers
def security_group_create(self, sg_q):
sg_obj = self._security_group_neutron_to_vnc(sg_q, CREATE)
sg_uuid = self._resource_create('security_group', sg_obj)
#allow all egress traffic
def_rule = {}
def_rule['port_range_min'] = 0
def_rule['port_range_max'] = 65535
def_rule['direction'] = 'egress'
def_rule['remote_ip_prefix'] = '0.0.0.0/0'
def_rule['remote_group_id'] = None
def_rule['protocol'] = 'any'
rule = self._security_group_rule_neutron_to_vnc(def_rule, CREATE)
self._security_group_rule_create(sg_uuid, rule)
ret_sg_q = self._security_group_vnc_to_neutron(sg_obj)
return ret_sg_q
#end security_group_create
def security_group_update(self, sg_id, sg_q):
sg_q['id'] = sg_id
sg_obj = self._security_group_neutron_to_vnc(sg_q, UPDATE)
self._vnc_lib.security_group_update(sg_obj)
ret_sg_q = self._security_group_vnc_to_neutron(sg_obj)
return ret_sg_q
#end security_group_update
def security_group_read(self, sg_id):
try:
sg_obj = self._vnc_lib.security_group_read(id=sg_id)
except NoIdError:
self._raise_contrail_exception(404, SecurityGroupNotFound(id=sg_id))
return self._security_group_vnc_to_neutron(sg_obj)
#end security_group_read
def security_group_delete(self, sg_id):
try:
sg_obj = self._vnc_lib.security_group_read(id=sg_id)
if sg_obj.name == 'default':
self._raise_contrail_exception(409, SecurityGroupCannotRemoveDefault())
except NoIdError:
return
try:
self._security_group_delete(sg_id)
except RefsExistError:
self._raise_contrail_exception(409, SecurityGroupInUse(id=sg_id))
self._db_cache_flush('q_tenant_to_def_sg')
#end security_group_delete
def security_group_list(self, context, filters=None):
ret_list = []
# collect phase
all_sgs = [] # all sgs in all projects
if context and not context['is_admin']:
for i in range(10):
project_sgs = self._security_group_list_project(str(uuid.UUID(context['tenant'])))
if project_sgs:
break
gevent.sleep(3)
all_sgs.append(project_sgs)
else: # admin context
if filters and 'tenant_id' in filters:
project_ids = self._validate_project_ids(context,
filters['tenant_id'])
for p_id in project_ids:
project_sgs = self._security_group_list_project(p_id)
all_sgs.append(project_sgs)
else: # no filters
all_sgs.append(self._security_group_list_project(None))
# prune phase
for project_sgs in all_sgs:
for sg_obj in project_sgs:
if not self._filters_is_present(filters, 'id', sg_obj.uuid):
continue
if not self._filters_is_present(filters, 'name',
sg_obj.get_display_name() or sg_obj.name):
continue
sg_info = self._security_group_vnc_to_neutron(sg_obj)
ret_list.append(sg_info)
return ret_list
#end security_group_list
def _get_ip_proto_number(self, protocol):
if protocol is None:
return
return IP_PROTOCOL_MAP.get(protocol, protocol)
def _validate_port_range(self, rule):
"""Check that port_range is valid."""
if (rule['port_range_min'] is None and
rule['port_range_max'] is None):
return
if not rule['protocol']:
self._raise_contrail_exception(400, SecurityGroupProtocolRequiredWithPorts())
ip_proto = self._get_ip_proto_number(rule['protocol'])
if ip_proto in [constants.PROTO_NUM_TCP, constants.PROTO_NUM_UDP]:
if (rule['port_range_min'] is not None and
rule['port_range_min'] <= rule['port_range_max']):
pass
else:
self._raise_contrail_exception(400, SecurityGroupInvalidPortRange())
elif ip_proto == constants.PROTO_NUM_ICMP:
for attr, field in [('port_range_min', 'type'),
('port_range_max', 'code')]:
if rule[attr] > 255:
self._raise_contrail_exception(400, SecurityGroupInvalidIcmpValue(field=field, attr=attr, value=rule[attr]))
if (rule['port_range_min'] is None and
rule['port_range_max']):
self._raise_contrail_exception(400, SecurityGroupMissingIcmpType(value=rule['port_range_max']))
def security_group_rule_create(self, sgr_q):
self._validate_port_range(sgr_q)
sg_id = sgr_q['security_group_id']
sg_rule = self._security_group_rule_neutron_to_vnc(sgr_q, CREATE)
self._security_group_rule_create(sg_id, sg_rule)
ret_sg_rule_q = self._security_group_rule_vnc_to_neutron(sg_id,
sg_rule)
return ret_sg_rule_q
#end security_group_rule_create
def security_group_rule_read(self, sgr_id):
sg_obj, sg_rule = self._security_group_rule_find(sgr_id)
if sg_obj and sg_rule:
return self._security_group_rule_vnc_to_neutron(sg_obj.uuid,
sg_rule, sg_obj)
self._raise_contrail_exception(404, SecurityGroupRuleNotFound(id=sgr_id))
#end security_group_rule_read
def security_group_rule_delete(self, sgr_id):
sg_obj, sg_rule = self._security_group_rule_find(sgr_id)
if sg_obj and sg_rule:
return self._security_group_rule_delete(sg_obj, sg_rule)
self._raise_contrail_exception(404, SecurityGroupRuleNotFound(id=sgr_id))
#end security_group_rule_delete
def security_group_rules_read(self, sg_id, sg_obj=None):
try:
if not sg_obj:
sg_obj = self._vnc_lib.security_group_read(id=sg_id)
sgr_entries = sg_obj.get_security_group_entries()
sg_rules = []
if sgr_entries == None:
return
for sg_rule in sgr_entries.get_policy_rule():
sg_info = self._security_group_rule_vnc_to_neutron(sg_obj.uuid,
sg_rule,
sg_obj)
sg_rules.append(sg_info)
except NoIdError:
self._raise_contrail_exception(404, SecurityGroupNotFound(id=sg_id))
return sg_rules
#end security_group_rules_read
def security_group_rule_list(self, context=None, filters=None):
ret_list = []
# collect phase
all_sgs = []
if filters and 'tenant_id' in filters:
project_ids = self._validate_project_ids(context,
filters['tenant_id'])
for p_id in project_ids:
project_sgs = self._security_group_list_project(p_id)
all_sgs.append(project_sgs)
else: # no filters
all_sgs.append(self._security_group_list_project(None))
# prune phase
for project_sgs in all_sgs:
for sg_obj in project_sgs:
# TODO implement same for name specified in filter
if not self._filters_is_present(filters, 'id', sg_obj.uuid):
continue
sgr_info = self.security_group_rules_read(sg_obj.uuid, sg_obj)
if sgr_info:
ret_list.extend(sgr_info)
return ret_list
#end security_group_rule_list
#route table api handlers
def route_table_create(self, rt_q):
rt_obj = self._route_table_neutron_to_vnc(rt_q, CREATE)
rt_uuid = self._route_table_create(rt_obj)
ret_rt_q = self._route_table_vnc_to_neutron(rt_obj)
return ret_rt_q
#end security_group_create
def route_table_read(self, rt_id):
try:
rt_obj = self._vnc_lib.route_table_read(id=rt_id)
except NoIdError:
# TODO add route table specific exception
self._raise_contrail_exception(404, exceptions.NetworkNotFound(net_id=rt_id))
return self._route_table_vnc_to_neutron(rt_obj)
#end route_table_read
def route_table_update(self, rt_id, rt_q):
rt_q['id'] = rt_id
rt_obj = self._route_table_neutron_to_vnc(rt_q, UPDATE)
self._vnc_lib.route_table_update(rt_obj)
return self._route_table_vnc_to_neutron(rt_obj)
#end policy_update
def route_table_delete(self, rt_id):
self._route_table_delete(rt_id)
#end route_table_delete
def route_table_list(self, context, filters=None):
ret_list = []
# collect phase
all_rts = [] # all rts in all projects
if filters and 'tenant_id' in filters:
project_ids = self._validate_project_ids(context,
filters['tenant_id'])
for p_id in project_ids:
project_rts = self._route_table_list_project(p_id)
all_rts.append(project_rts)
elif filters and 'name' in filters:
p_id = str(uuid.UUID(context['tenant']))
project_rts = self._route_table_list_project(p_id)
all_rts.append(project_rts)
else: # no filters
dom_projects = self._project_list_domain(None)
for project in dom_projects:
proj_id = project['uuid']
project_rts = self._route_table_list_project(proj_id)
all_rts.append(project_rts)
# prune phase
for project_rts in all_rts:
for proj_rt in project_rts:
# TODO implement same for name specified in filter
proj_rt_id = proj_rt['uuid']
if not self._filters_is_present(filters, 'id', proj_rt_id):
continue
rt_info = self.route_table_read(proj_rt_id)
if not self._filters_is_present(filters, 'name',
rt_info['name']):
continue
ret_list.append(rt_info)
return ret_list
#end route_table_list
#service instance api handlers
def svc_instance_create(self, si_q):
si_obj = self._svc_instance_neutron_to_vnc(si_q, CREATE)
si_uuid = self._svc_instance_create(si_obj)
ret_si_q = self._svc_instance_vnc_to_neutron(si_obj)
return ret_si_q
#end svc_instance_create
def svc_instance_read(self, si_id):
try:
si_obj = self._vnc_lib.service_instance_read(id=si_id)
except NoIdError:
# TODO add svc instance specific exception
self._raise_contrail_exception(404, exceptions.NetworkNotFound(net_id=si_id))
return self._svc_instance_vnc_to_neutron(si_obj)
#end svc_instance_read
def svc_instance_delete(self, si_id):
self._svc_instance_delete(si_id)
#end svc_instance_delete
def svc_instance_list(self, context, filters=None):
ret_list = []
# collect phase
all_sis = [] # all sis in all projects
if filters and 'tenant_id' in filters:
project_ids = self._validate_project_ids(context,
filters['tenant_id'])
for p_id in project_ids:
project_sis = self._svc_instance_list_project(p_id)
all_sis.append(project_sis)
elif filters and 'name' in filters:
p_id = str(uuid.UUID(context['tenant']))
project_sis = self._svc_instance_list_project(p_id)
all_sis.append(project_sis)
else: # no filters
dom_projects = self._project_list_domain(None)
for project in dom_projects:
proj_id = project['uuid']
project_sis = self._svc_instance_list_project(proj_id)
all_sis.append(project_sis)
# prune phase
for project_sis in all_sis:
for proj_si in project_sis:
# TODO implement same for name specified in filter
proj_si_id = proj_si['uuid']
if not self._filters_is_present(filters, 'id', proj_si_id):
continue
si_info = self.svc_instance_read(proj_si_id)
if not self._filters_is_present(filters, 'name',
si_info['name']):
continue
ret_list.append(si_info)
return ret_list
#end svc_instance_list
#end class DBInterface
|
from contextlib import contextmanager
import ctypes
from gi.repository import Gst # pylint: disable=E0611
# Here we are using ctypes to call `gst_buffer_map` and `gst_buffer_unmap`
# because PyGObject does not properly expose struct GstMapInfo (see
# [bz #678663]). Apparently this is fixed upstream but we are still awaiting
# an upstream release (Mar 2014). Hopefully this can be removed in the future.
class _GstMapInfo(ctypes.Structure):
_fields_ = [("memory", ctypes.c_void_p), # GstMemory *memory
("flags", ctypes.c_int), # GstMapFlags flags
("data", ctypes.POINTER(ctypes.c_byte)), # guint8 *data
("size", ctypes.c_size_t), # gsize size
("maxsize", ctypes.c_size_t)] # gsize maxsize
_GstMapInfo_p = ctypes.POINTER(_GstMapInfo)
_libgst = ctypes.CDLL("libgstreamer-1.0.so.0")
_libgst.gst_buffer_map.argtypes = [ctypes.c_void_p, _GstMapInfo_p, ctypes.c_int]
_libgst.gst_buffer_map.restype = ctypes.c_int
_libgst.gst_buffer_unmap.argtypes = [ctypes.c_void_p, _GstMapInfo_p]
_libgst.gst_buffer_unmap.restype = None
@contextmanager
def map_gst_buffer(buf, flags):
if not isinstance(buf, Gst.Buffer):
raise TypeError("map_gst_buffer must take a Gst.Buffer")
if flags & Gst.MapFlags.WRITE and not buf.mini_object.is_writable():
raise ValueError(
"Writable array requested but buffer is not writeable")
# hashing a GObject actually gives the address (pointer) of the C struct
# that backs it!:
pbuffer = hash(buf)
mapping = _GstMapInfo()
success = _libgst.gst_buffer_map(pbuffer, mapping, flags)
if not success:
raise RuntimeError("Couldn't map buffer")
try:
yield ctypes.cast(
mapping.data, ctypes.POINTER(ctypes.c_byte * mapping.size)).contents
finally:
_libgst.gst_buffer_unmap(pbuffer, mapping)
def test_map_buffer_reading_data():
Gst.init([])
b = Gst.Buffer.new_wrapped("hello")
with map_gst_buffer(b, Gst.MapFlags.READ) as a:
assert 'hello' == ''.join(chr(x) for x in a)
def test_map_buffer_modifying_data():
Gst.init([])
b = Gst.Buffer.new_wrapped("hello")
with map_gst_buffer(b, Gst.MapFlags.WRITE | Gst.MapFlags.READ) as a:
a[2] = 1
assert b.extract_dup(0, 5) == "he\x01lo"
gst_hacks.py compatibility with OS X
Dynamic libraries are ".dylib" not ".so" on OS X.
If I only do `ctypes.CDLL("libgstreamer-1.0.dylib")`, `dlopen` won't
find the library in `/opt/local/lib` (where GStreamer is installed by
the "macports" package manager), even though I'm using
`/opt/local/bin/python`. Adding `/opt/local/lib` to `DYLD_LIBRARY_PATH`
doesn't work either, I get all sorts of "Symbol not found" errors. So
I'm calculating the path from where GStreamer's GObject-Introspection
typelib data is installed. I've only tested this with macports.
from contextlib import contextmanager
import ctypes
from os.path import dirname
import platform
from gi.repository import Gst # pylint: disable=E0611
# Here we are using ctypes to call `gst_buffer_map` and `gst_buffer_unmap`
# because PyGObject does not properly expose struct GstMapInfo (see
# [bz #678663]). Apparently this is fixed upstream but we are still awaiting
# an upstream release (Mar 2014). Hopefully this can be removed in the future.
class _GstMapInfo(ctypes.Structure):
_fields_ = [("memory", ctypes.c_void_p), # GstMemory *memory
("flags", ctypes.c_int), # GstMapFlags flags
("data", ctypes.POINTER(ctypes.c_byte)), # guint8 *data
("size", ctypes.c_size_t), # gsize size
("maxsize", ctypes.c_size_t)] # gsize maxsize
_GstMapInfo_p = ctypes.POINTER(_GstMapInfo)
if platform.system() == "Darwin":
_libgst = ctypes.CDLL(dirname(Gst.__path__) + "/../libgstreamer-1.0.dylib")
else:
_libgst = ctypes.CDLL("libgstreamer-1.0.so.0")
_libgst.gst_buffer_map.argtypes = [ctypes.c_void_p, _GstMapInfo_p, ctypes.c_int]
_libgst.gst_buffer_map.restype = ctypes.c_int
_libgst.gst_buffer_unmap.argtypes = [ctypes.c_void_p, _GstMapInfo_p]
_libgst.gst_buffer_unmap.restype = None
@contextmanager
def map_gst_buffer(buf, flags):
if not isinstance(buf, Gst.Buffer):
raise TypeError("map_gst_buffer must take a Gst.Buffer")
if flags & Gst.MapFlags.WRITE and not buf.mini_object.is_writable():
raise ValueError(
"Writable array requested but buffer is not writeable")
# hashing a GObject actually gives the address (pointer) of the C struct
# that backs it!:
pbuffer = hash(buf)
mapping = _GstMapInfo()
success = _libgst.gst_buffer_map(pbuffer, mapping, flags)
if not success:
raise RuntimeError("Couldn't map buffer")
try:
yield ctypes.cast(
mapping.data, ctypes.POINTER(ctypes.c_byte * mapping.size)).contents
finally:
_libgst.gst_buffer_unmap(pbuffer, mapping)
def test_map_buffer_reading_data():
Gst.init([])
b = Gst.Buffer.new_wrapped("hello")
with map_gst_buffer(b, Gst.MapFlags.READ) as a:
assert 'hello' == ''.join(chr(x) for x in a)
def test_map_buffer_modifying_data():
Gst.init([])
b = Gst.Buffer.new_wrapped("hello")
with map_gst_buffer(b, Gst.MapFlags.WRITE | Gst.MapFlags.READ) as a:
a[2] = 1
assert b.extract_dup(0, 5) == "he\x01lo"
|
import re
import biothings
from urllib.parse import unquote
def build_column_9( id=None, parent=None, other=None ):
## either the id or parent must be defined
if id is None and parent is None:
raise Exception("ERROR: attempt to build a GFF3 9th column with no ID or Parent attributes")
colstring = None
## ID should always be first, if there is one
if id is not None:
colstring = "ID={0}".format(id)
if parent is not None:
if colstring is not None:
colstring += ";"
colstring += "Parent={0}".format(parent)
return colstring
def column_9_value(value, key):
'''
Pass a case-sensitive key and this function will return the value,
if present, else None.
This was borrowed from the URL below and then modified for Python 3
ftp://ftp.informatics.jax.org/%2Fpub/mgigff/gff3.py
'''
SEMI = ';'
COMMA = ','
EQ = '='
WSP_RE = re.compile(r'^\s*$')
if value == ".":
return {}
c9 = {}
for t in value.split(SEMI):
if WSP_RE.match(t):
continue
tt = t.split(EQ)
if len(tt) != 2:
raise Exception("Bad column 9 format: {0}".format(value) )
n = unquote(tt[0].strip())
[*v] = map(unquote, tt[1].strip().split(COMMA))
if len(v) == 1:
v = v[0]
c9[n] = v
if key in c9:
# this is a list if there were comma-separated values
return c9[key]
else:
return None
def get_gff3_features(gff3_file):
'''
Parses the passed GFF3 file and returns two dicts, loaded with biocode.biothings objects:
1. The first dict are the Assembly objects, keyed on assembly ID. Each Assembly has all of the
children populated, so you can fully recover gene, RNA, exon and CDS features iterating on
the assembly.
2. The second dist is a flat structure of all the descendent feature objects of the Assemblies
keyed by the feature IDs.
See the documentation for each feature type in biocode.biothings for more info
'''
assemblies = dict()
features = dict()
# these are related to parsing any embedded FASTA
in_fasta_section = False
is_assembly_fasta = False
current_fasta_id = None
for line in open(gff3_file):
if in_fasta_section == True:
m = re.search('>(\S+)\s*(.*)', line)
if m:
current_fasta_id = m.group(1)
if current_fasta_id in assemblies:
is_assembly_fasta = True
else:
is_assembly_fasta = False
else:
if is_assembly_fasta == True:
# must be a sequence line for an assembly
# python 2.6+ makes string concatenation amortized O(n)
# http://stackoverflow.com/a/4435752/1368079
assemblies[current_fasta_id].residues += str(line.rstrip())
assemblies[current_fasta_id].length = len( assemblies[current_fasta_id].residues )
continue
elif line.startswith("##FASTA"):
# all data to the end of the file must be FASTA
in_fasta_section = True
continue
cols = line.split("\t")
if len(cols) != 9:
continue
mol_id = cols[0]
# initialize this assembly if we haven't seen it yet
if mol_id not in assemblies:
assemblies[mol_id] = biothings.Assembly( id=mol_id, residues='' )
current_assembly = assemblies[mol_id]
rfmin = int(cols[3]) - 1
rfmax = int(cols[4])
rstrand = None
feat_id = column_9_value(cols[8], 'ID')
parent_id = column_9_value(cols[8], 'Parent')
parent_feat = None
# shared features is not yet supported
if isinstance(parent_id, list):
raise Exception("This line contains a shared feature with multiple parents. This isn't yet supported:\n{0}".format(line))
if parent_id is not None:
if parent_id in features:
parent_feat = features[parent_id]
else:
raise Exception("Error in GFF3: Parent {0} referenced by a child feature before it was defined".format(parent_id) )
if cols[6] == '-':
rstrand = -1
elif cols[6] == '+':
rstrand = 1
else:
rstrand = 0
if cols[2] == 'gene':
gene = biothings.Gene(id=feat_id)
gene.locate_on(target=current_assembly, fmin=rfmin, fmax=rfmax, strand=rstrand)
features[feat_id] = gene
current_assembly.add_gene(gene)
elif cols[2] == 'mRNA':
mRNA = biothings.mRNA(id=feat_id, parent=parent_feat)
mRNA.locate_on(target=current_assembly, fmin=rfmin, fmax=rfmax, strand=rstrand)
parent_feat.add_mRNA(mRNA)
features[feat_id] = mRNA
elif cols[2] == 'rRNA':
rRNA = biothings.rRNA(id=feat_id, parent=parent_feat)
rRNA.locate_on(target=current_assembly, fmin=rfmin, fmax=rfmax, strand=rstrand)
parent_feat.add_rRNA(rRNA)
features[feat_id] = rRNA
elif cols[2] == 'tRNA':
tRNA = biothings.tRNA(id=feat_id, parent=parent_feat)
tRNA.locate_on(target=current_assembly, fmin=rfmin, fmax=rfmax, strand=rstrand)
parent_feat.add_tRNA(tRNA)
features[feat_id] = tRNA
elif cols[2] == 'exon':
exon = biothings.Exon(id=feat_id, parent=parent_feat)
exon.locate_on(target=current_assembly, fmin=rfmin, fmax=rfmax, strand=rstrand)
parent_feat.add_exon(exon)
features[feat_id] = exon
elif cols[2] == 'CDS':
CDS = biothings.CDS(id=feat_id, parent=parent_feat)
CDS.locate_on(target=current_assembly, fmin=rfmin, fmax=rfmax, strand=rstrand)
parent_feat.add_CDS(CDS)
features[feat_id] = CDS
return (assemblies, features)
def parse_gff3_by_relationship( gff3_file ):
'''
Parses a GFF3 file, caring only about the ID/Parent relationships and returning a dict where
they keys are the feature IDs and the values are an dict of dicts. The depth of this depends
on the depth of parent/child relationships, but every node looks like this:
$feat_id = { children: [], fmin: 30, cols: [] }
Every feature must have either and ID or Parent defined, else an error will be thrown.
The structure returned looks like this:
{
$molecule_id = {
fmin: 30,
cols: [...],
children: {
same as this structure
}
}, ...
}
WARNING:
This will currently fail on any GFF that:
- allows the same IDs on multiple lines, such as those for discontiguous features (allowed in spec)
- features with shared parents
'''
## might try turning this into a defaultdict at a later point: http://ohuiginn.net/mt/2010/07/nested_dictionaries_in_python.html
fgraph = dict()
current_molecule_id = None
## key = id, value = parent_id
parentage = dict()
## where features go during processing that have a parent which hasn't been seen yet
purgatory = list()
for line in open(gff3_file):
cols = line.split("\t")
if len(cols) == 9:
cols[8] = cols[8].rstrip()
else:
continue
mol_id = cols[0]
id = column_9_value(cols[8], 'ID')
# shared parents will cause failure here
parent = column_9_value(cols[8], 'Parent')
if id:
parentage[id] = parent
if mol_id != current_molecule_id:
if current_molecule_id is not None:
_reunite_children( fgraph, current_molecule_id, purgatory )
## reset for the next molecule
purgatory = list()
current_molecule_id = mol_id
if current_molecule_id not in fgraph:
fgraph[current_molecule_id] = dict()
molecule = fgraph[current_molecule_id]
if parent:
uparent = _get_ultimate_parent( parentage, parent )
molecule[uparent]['children'].append( {'id': id, 'cols': cols} )
else:
if id is None:
raise Exception("ERROR: Encountered a line without a Parent or ID assigned: {0}".format(line))
## this shouldn't exist already
if id in molecule:
raise Exception("ERROR: found duplicate id ({0}) in input file".format(id) )
molecule[id] = {'fmin': cols[3], 'children': list(), 'cols': cols}
## don't forget to handle the last one
_reunite_children( fgraph, current_molecule_id, purgatory )
return fgraph
def print_biogene( gene=None, fh=None, source=None, on=None ):
'''
WRITE COMMENTS BEFORE COMMIT
'''
## handle defaults
if source is None:
source = '.'
if gene is None:
raise Exception( "ERROR: The print_biogene() function requires a biogene to be passed via the 'gene' argument" );
## we can auto-detect the molecule if the user didn't pass one
# and if there's only one.
if on is None:
on = gene.location().on
gene_loc = gene.location_on( on )
strand = '0'
if gene_loc.strand == 1:
strand = '+'
elif gene_loc.strand == -1:
strand = '-'
columns = ['.']*9
columns[0:3] = [gene_loc.on.id, source, 'gene']
columns[3:7] = [str(gene_loc.fmin + 1), str(gene_loc.fmax), '.', strand]
columns[8] = build_column_9( id=gene.id, parent=None, other=None )
## print the gene line
fh.write( "\t".join(columns) + "\n" )
## modifications for the mRNA
for mRNA in gene.mRNAs():
mRNA_loc = mRNA.location_on( on )
if mRNA_loc is None:
raise Exception("ERROR: Expected mRNA {0} to be located on {1} but it wasn't".format(mRNA.id, on.id))
columns[2] = 'mRNA'
columns[3:5] = [str(mRNA_loc.fmin + 1), str(mRNA_loc.fmax)]
columns[8] = build_column_9( id=mRNA.id, parent=mRNA.parent.id, other=None )
fh.write( "\t".join(columns) + "\n" )
## handle each CDS for this mRNA
for CDS in mRNA.CDSs():
CDS_loc = CDS.location_on( on )
if CDS_loc is None:
raise Exception("ERROR: Expected CDS {0} to be located on {1} but it wasn't".format(CDS.id, on.id) )
columns[2] = 'CDS'
columns[3:5] = [str(CDS_loc.fmin + 1), str(CDS_loc.fmax)]
columns[7] = str(CDS_loc.phase)
columns[8] = build_column_9( id=CDS.id, parent=mRNA.id, other=None )
fh.write( "\t".join(columns) + "\n" )
columns[7] = '.'
## handle each exon for this mRNA
for exon in mRNA.exons():
exon_loc = exon.location_on( on )
if exon_loc is None:
raise Exception("ERROR: Expected exon {0} to be located on {1} but it wasn't".format(exon.id, on.id))
columns[2] = 'exon'
columns[3:5] = [str(exon_loc.fmin + 1), str(exon_loc.fmax)]
columns[8] = build_column_9( id=exon.id, parent=mRNA.id, other=None )
fh.write( "\t".join(columns) + "\n" )
def _reunite_children( fg, mol_id, kids ):
pass
def _get_ultimate_parent( p, id ):
if p is None:
return id
oldest = id
while p[oldest] is not None:
oldest = p[oldest]
return oldest
minor: added missing documentation for print_biogene()
import re
import biothings
from urllib.parse import unquote
def build_column_9( id=None, parent=None, other=None ):
## either the id or parent must be defined
if id is None and parent is None:
raise Exception("ERROR: attempt to build a GFF3 9th column with no ID or Parent attributes")
colstring = None
## ID should always be first, if there is one
if id is not None:
colstring = "ID={0}".format(id)
if parent is not None:
if colstring is not None:
colstring += ";"
colstring += "Parent={0}".format(parent)
return colstring
def column_9_value(value, key):
'''
Pass a case-sensitive key and this function will return the value,
if present, else None.
This was borrowed from the URL below and then modified for Python 3
ftp://ftp.informatics.jax.org/%2Fpub/mgigff/gff3.py
'''
SEMI = ';'
COMMA = ','
EQ = '='
WSP_RE = re.compile(r'^\s*$')
if value == ".":
return {}
c9 = {}
for t in value.split(SEMI):
if WSP_RE.match(t):
continue
tt = t.split(EQ)
if len(tt) != 2:
raise Exception("Bad column 9 format: {0}".format(value) )
n = unquote(tt[0].strip())
[*v] = map(unquote, tt[1].strip().split(COMMA))
if len(v) == 1:
v = v[0]
c9[n] = v
if key in c9:
# this is a list if there were comma-separated values
return c9[key]
else:
return None
def get_gff3_features(gff3_file):
'''
Parses the passed GFF3 file and returns two dicts, loaded with biocode.biothings objects:
1. The first dict are the Assembly objects, keyed on assembly ID. Each Assembly has all of the
children populated, so you can fully recover gene, RNA, exon and CDS features iterating on
the assembly.
2. The second dist is a flat structure of all the descendent feature objects of the Assemblies
keyed by the feature IDs.
See the documentation for each feature type in biocode.biothings for more info
'''
assemblies = dict()
features = dict()
# these are related to parsing any embedded FASTA
in_fasta_section = False
is_assembly_fasta = False
current_fasta_id = None
for line in open(gff3_file):
if in_fasta_section == True:
m = re.search('>(\S+)\s*(.*)', line)
if m:
current_fasta_id = m.group(1)
if current_fasta_id in assemblies:
is_assembly_fasta = True
else:
is_assembly_fasta = False
else:
if is_assembly_fasta == True:
# must be a sequence line for an assembly
# python 2.6+ makes string concatenation amortized O(n)
# http://stackoverflow.com/a/4435752/1368079
assemblies[current_fasta_id].residues += str(line.rstrip())
assemblies[current_fasta_id].length = len( assemblies[current_fasta_id].residues )
continue
elif line.startswith("##FASTA"):
# all data to the end of the file must be FASTA
in_fasta_section = True
continue
cols = line.split("\t")
if len(cols) != 9:
continue
mol_id = cols[0]
# initialize this assembly if we haven't seen it yet
if mol_id not in assemblies:
assemblies[mol_id] = biothings.Assembly( id=mol_id, residues='' )
current_assembly = assemblies[mol_id]
rfmin = int(cols[3]) - 1
rfmax = int(cols[4])
rstrand = None
feat_id = column_9_value(cols[8], 'ID')
parent_id = column_9_value(cols[8], 'Parent')
parent_feat = None
# shared features is not yet supported
if isinstance(parent_id, list):
raise Exception("This line contains a shared feature with multiple parents. This isn't yet supported:\n{0}".format(line))
if parent_id is not None:
if parent_id in features:
parent_feat = features[parent_id]
else:
raise Exception("Error in GFF3: Parent {0} referenced by a child feature before it was defined".format(parent_id) )
if cols[6] == '-':
rstrand = -1
elif cols[6] == '+':
rstrand = 1
else:
rstrand = 0
if cols[2] == 'gene':
gene = biothings.Gene(id=feat_id)
gene.locate_on(target=current_assembly, fmin=rfmin, fmax=rfmax, strand=rstrand)
features[feat_id] = gene
current_assembly.add_gene(gene)
elif cols[2] == 'mRNA':
mRNA = biothings.mRNA(id=feat_id, parent=parent_feat)
mRNA.locate_on(target=current_assembly, fmin=rfmin, fmax=rfmax, strand=rstrand)
parent_feat.add_mRNA(mRNA)
features[feat_id] = mRNA
elif cols[2] == 'rRNA':
rRNA = biothings.rRNA(id=feat_id, parent=parent_feat)
rRNA.locate_on(target=current_assembly, fmin=rfmin, fmax=rfmax, strand=rstrand)
parent_feat.add_rRNA(rRNA)
features[feat_id] = rRNA
elif cols[2] == 'tRNA':
tRNA = biothings.tRNA(id=feat_id, parent=parent_feat)
tRNA.locate_on(target=current_assembly, fmin=rfmin, fmax=rfmax, strand=rstrand)
parent_feat.add_tRNA(tRNA)
features[feat_id] = tRNA
elif cols[2] == 'exon':
exon = biothings.Exon(id=feat_id, parent=parent_feat)
exon.locate_on(target=current_assembly, fmin=rfmin, fmax=rfmax, strand=rstrand)
parent_feat.add_exon(exon)
features[feat_id] = exon
elif cols[2] == 'CDS':
CDS = biothings.CDS(id=feat_id, parent=parent_feat)
CDS.locate_on(target=current_assembly, fmin=rfmin, fmax=rfmax, strand=rstrand)
parent_feat.add_CDS(CDS)
features[feat_id] = CDS
return (assemblies, features)
def parse_gff3_by_relationship( gff3_file ):
'''
Parses a GFF3 file, caring only about the ID/Parent relationships and returning a dict where
they keys are the feature IDs and the values are an dict of dicts. The depth of this depends
on the depth of parent/child relationships, but every node looks like this:
$feat_id = { children: [], fmin: 30, cols: [] }
Every feature must have either and ID or Parent defined, else an error will be thrown.
The structure returned looks like this:
{
$molecule_id = {
fmin: 30,
cols: [...],
children: {
same as this structure
}
}, ...
}
WARNING:
This will currently fail on any GFF that:
- allows the same IDs on multiple lines, such as those for discontiguous features (allowed in spec)
- features with shared parents
'''
## might try turning this into a defaultdict at a later point: http://ohuiginn.net/mt/2010/07/nested_dictionaries_in_python.html
fgraph = dict()
current_molecule_id = None
## key = id, value = parent_id
parentage = dict()
## where features go during processing that have a parent which hasn't been seen yet
purgatory = list()
for line in open(gff3_file):
cols = line.split("\t")
if len(cols) == 9:
cols[8] = cols[8].rstrip()
else:
continue
mol_id = cols[0]
id = column_9_value(cols[8], 'ID')
# shared parents will cause failure here
parent = column_9_value(cols[8], 'Parent')
if id:
parentage[id] = parent
if mol_id != current_molecule_id:
if current_molecule_id is not None:
_reunite_children( fgraph, current_molecule_id, purgatory )
## reset for the next molecule
purgatory = list()
current_molecule_id = mol_id
if current_molecule_id not in fgraph:
fgraph[current_molecule_id] = dict()
molecule = fgraph[current_molecule_id]
if parent:
uparent = _get_ultimate_parent( parentage, parent )
molecule[uparent]['children'].append( {'id': id, 'cols': cols} )
else:
if id is None:
raise Exception("ERROR: Encountered a line without a Parent or ID assigned: {0}".format(line))
## this shouldn't exist already
if id in molecule:
raise Exception("ERROR: found duplicate id ({0}) in input file".format(id) )
molecule[id] = {'fmin': cols[3], 'children': list(), 'cols': cols}
## don't forget to handle the last one
_reunite_children( fgraph, current_molecule_id, purgatory )
return fgraph
def print_biogene( gene=None, fh=None, source=None, on=None ):
'''
This method accepts a Gene object located on an Assembly object (from biothings.py) and prints
the feature graph for that gene in GFF3 format, including the gene, mRNA, CDS and exon features.
'''
## handle defaults
if source is None:
source = '.'
if gene is None:
raise Exception( "ERROR: The print_biogene() function requires a biogene to be passed via the 'gene' argument" );
## we can auto-detect the molecule if the user didn't pass one
# and if there's only one.
if on is None:
on = gene.location().on
gene_loc = gene.location_on( on )
strand = '0'
if gene_loc.strand == 1:
strand = '+'
elif gene_loc.strand == -1:
strand = '-'
columns = ['.']*9
columns[0:3] = [gene_loc.on.id, source, 'gene']
columns[3:7] = [str(gene_loc.fmin + 1), str(gene_loc.fmax), '.', strand]
columns[8] = build_column_9( id=gene.id, parent=None, other=None )
## print the gene line
fh.write( "\t".join(columns) + "\n" )
## modifications for the mRNA
for mRNA in gene.mRNAs():
mRNA_loc = mRNA.location_on( on )
if mRNA_loc is None:
raise Exception("ERROR: Expected mRNA {0} to be located on {1} but it wasn't".format(mRNA.id, on.id))
columns[2] = 'mRNA'
columns[3:5] = [str(mRNA_loc.fmin + 1), str(mRNA_loc.fmax)]
columns[8] = build_column_9( id=mRNA.id, parent=mRNA.parent.id, other=None )
fh.write( "\t".join(columns) + "\n" )
## handle each CDS for this mRNA
for CDS in mRNA.CDSs():
CDS_loc = CDS.location_on( on )
if CDS_loc is None:
raise Exception("ERROR: Expected CDS {0} to be located on {1} but it wasn't".format(CDS.id, on.id) )
columns[2] = 'CDS'
columns[3:5] = [str(CDS_loc.fmin + 1), str(CDS_loc.fmax)]
columns[7] = str(CDS_loc.phase)
columns[8] = build_column_9( id=CDS.id, parent=mRNA.id, other=None )
fh.write( "\t".join(columns) + "\n" )
columns[7] = '.'
## handle each exon for this mRNA
for exon in mRNA.exons():
exon_loc = exon.location_on( on )
if exon_loc is None:
raise Exception("ERROR: Expected exon {0} to be located on {1} but it wasn't".format(exon.id, on.id))
columns[2] = 'exon'
columns[3:5] = [str(exon_loc.fmin + 1), str(exon_loc.fmax)]
columns[8] = build_column_9( id=exon.id, parent=mRNA.id, other=None )
fh.write( "\t".join(columns) + "\n" )
def _reunite_children( fg, mol_id, kids ):
pass
def _get_ultimate_parent( p, id ):
if p is None:
return id
oldest = id
while p[oldest] is not None:
oldest = p[oldest]
return oldest
|
# Tests tests the behavior of a call to send() that is blocking
# after close() is called by a peer, an exception should be thrown
#
# no expected output
def close_it(remoteip, remoteport, sock, thishand, listenhand):
while not mycontext['close_the_socket']:
sleep(1)
sock.close()
def close_the_socket():
mycontext['close_the_socket'] = True
def stop_test(handle,sock):
stopcomm(handle)
sock.close()
if callfunc == "initialize":
mycontext['close_the_socket'] = False
ip = '127.0.0.1'
waitport = 12345
filename = 'oneKfile.txt'
handle = waitforconn(ip,waitport,close_it)
sock = openconn(ip,waitport)
file_obj = open(filename)
file_data = file_obj.read()
file_obj.close
settimer(12,close_the_socket,[])
for i in range(1024): #send until eventually it blocks
try:
sock.send(file_data)
except Exception,e:
if 'Connection reset by peer' not in str(e): raise
stop_test(handle,sock) #got the exception, stop the test
break
stoped test from checking excpetion message
# Tests tests the behavior of a call to send() that is blocking
# after close() is called by a peer, an exception should be thrown
#
# no expected output
def close_it(remoteip, remoteport, sock, thishand, listenhand):
while not mycontext['close_the_socket']:
sleep(1)
sock.close()
def close_the_socket():
mycontext['close_the_socket'] = True
def stop_test(handle,sock):
stopcomm(handle)
sock.close()
def fail():
print 'ERROR: THE SOCKET BLOCKED WITHOUT AN EXCEPTION'
if callfunc == "initialize":
mycontext['close_the_socket'] = False
ip = '127.0.0.1'
waitport = 12345
filename = 'oneKfile.txt'
handle = waitforconn(ip,waitport,close_it)
sock = openconn(ip,waitport)
file_obj = open(filename)
file_data = file_obj.read()
file_obj.close
settimer(12,close_the_socket,[])
failtimer = settimer(15,fail,[])
for i in range(1024): #send until eventually it blocks
try:
sock.send(file_data)
except Exception,e:
canceltimer(failtimer)
stop_test(handle,sock) #got the exception, stop the test
break
|
#
# Copyright (c) 2017 Juniper Networks, Inc. All rights reserved.
#
REACTION_MAP = {
"instance_ip": {
'self': [],
'virtual_machine_interface': [],
},
"floating_ip": {
'self': [],
},
"security_group": {
'self': [],
'virtual_machine_interface': [],
},
"virtual_network": {
'self': [],
},
"virtual_machine": {
'self': ['virtual_machine_interface'],
'virtual_machine_interface': [],
},
"virtual_machine_interface": {
'self': ['virtual_machine', 'security_group',
'instance_ip'],
'security_group': [],
'virtual_machine': [],
},
"project": {
'self': [],
},
}
Service delete issue in nested is fixed.
Since the dependency_tracker in the reaction_map
is not correct, we are not getting the update event
for floating_ip and instance_ip. Due to this config_db is
out of sync with the real db which caused the issue
in deleting objects associated to floating_ip
or instance_ip
Change-Id: Ifa446304f2a6cb5356c00fc1afa70e8dee4501bc
Closes-bug: #1723555
#
# Copyright (c) 2017 Juniper Networks, Inc. All rights reserved.
#
REACTION_MAP = {
"instance_ip": {
'self': ['virtual_machine_interface', 'floating_ip'],
'virtual_machine_interface': [],
'floating_ip': [],
},
"floating_ip": {
'self': ['virtual_machine_interface', 'instance_ip'],
'virtual_machine_interface': [],
'instance_ip': [],
},
"security_group": {
'self': [],
'virtual_machine_interface': [],
},
"virtual_network": {
'self': [],
},
"virtual_machine": {
'self': ['virtual_machine_interface'],
'virtual_machine_interface': [],
},
"virtual_machine_interface": {
'self': ['virtual_machine', 'security_group',
'instance_ip', 'floating_ip'],
'security_group': [],
'virtual_machine': [],
'instance_ip': [],
'floating_ip': [],
},
"project": {
'self': [],
},
}
|
#!/usr/bin/python
#
# (c) 2013, Nimbis Services
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_ami_search
short_description: Retrieve AWS AMI information for a given operating system.
deprecated: "in favor of the ec2_ami_find module"
version_added: "1.6"
description:
- Look up the most recent AMI on AWS for a given operating system.
- Returns C(ami), C(aki), C(ari), C(serial), C(tag)
- If there is no AKI or ARI associated with an image, these will be C(null).
- Only supports images from cloud-images.ubuntu.com
- 'Example output: C({"ami": "ami-69f5a900", "changed": false, "aki": "aki-88aa75e1", "tag": "release", "ari": null, "serial": "20131024"})'
version_added: "1.6"
options:
distro:
description: Linux distribution (e.g., C(ubuntu))
required: true
choices: ["ubuntu"]
release:
description: short name of the release (e.g., C(precise))
required: true
stream:
description: Type of release.
required: false
default: "server"
choices: ["server", "desktop"]
store:
description: Back-end store for instance
required: false
default: "ebs"
choices: ["ebs", "ebs-io1", "ebs-ssd", "instance-store"]
arch:
description: CPU architecture
required: false
default: "amd64"
choices: ["i386", "amd64"]
region:
description: EC2 region
required: false
default: us-east-1
choices: ["ap-northeast-1", "ap-southeast-1", "ap-southeast-2",
"eu-central-1", "eu-west-1", "sa-east-1", "us-east-1",
"us-west-1", "us-west-2", "us-gov-west-1"]
virt:
description: virutalization type
required: false
default: paravirtual
choices: ["paravirtual", "hvm"]
author: Lorin Hochstein
'''
EXAMPLES = '''
- name: Launch an Ubuntu 12.04 (Precise Pangolin) EC2 instance
hosts: 127.0.0.1
connection: local
tasks:
- name: Get the Ubuntu precise AMI
ec2_ami_search: distro=ubuntu release=precise region=us-west-1 store=instance-store
register: ubuntu_image
- name: Start the EC2 instance
ec2: image={{ ubuntu_image.ami }} instance_type=m1.small key_name=mykey
'''
import csv
import json
import urllib2
import urlparse
SUPPORTED_DISTROS = ['ubuntu']
AWS_REGIONS = ['ap-northeast-1',
'ap-southeast-1',
'ap-southeast-2',
'eu-central-1',
'eu-west-1',
'sa-east-1',
'us-east-1',
'us-west-1',
'us-west-2',
"us-gov-west-1"]
def get_url(module, url):
""" Get url and return response """
r, info = fetch_url(module, url)
if info['status'] != 200:
# Backwards compat
info['status_code'] = info['status']
module.fail_json(**info)
return r
def ubuntu(module):
""" Get the ami for ubuntu """
release = module.params['release']
stream = module.params['stream']
store = module.params['store']
arch = module.params['arch']
region = module.params['region']
virt = module.params['virt']
url = get_ubuntu_url(release, stream)
req = get_url(module, url)
reader = csv.reader(req, delimiter='\t')
try:
ami, aki, ari, tag, serial = lookup_ubuntu_ami(reader, release, stream,
store, arch, region, virt)
module.exit_json(changed=False, ami=ami, aki=aki, ari=ari, tag=tag,
serial=serial)
except KeyError:
module.fail_json(msg="No matching AMI found")
def lookup_ubuntu_ami(table, release, stream, store, arch, region, virt):
""" Look up the Ubuntu AMI that matches query given a table of AMIs
table: an iterable that returns a row of
(release, stream, tag, serial, region, ami, aki, ari, virt)
release: ubuntu release name
stream: 'server' or 'desktop'
store: 'ebs', 'ebs-io1', 'ebs-ssd' or 'instance-store'
arch: 'i386' or 'amd64'
region: EC2 region
virt: 'paravirtual' or 'hvm'
Returns (ami, aki, ari, tag, serial)"""
expected = (release, stream, store, arch, region, virt)
for row in table:
(actual_release, actual_stream, tag, serial,
actual_store, actual_arch, actual_region, ami, aki, ari,
actual_virt) = row
actual = (actual_release, actual_stream, actual_store, actual_arch,
actual_region, actual_virt)
if actual == expected:
# aki and ari are sometimes blank
if aki == '':
aki = None
if ari == '':
ari = None
return (ami, aki, ari, tag, serial)
raise KeyError()
def get_ubuntu_url(release, stream):
url = "https://cloud-images.ubuntu.com/query/%s/%s/released.current.txt"
return url % (release, stream)
def main():
arg_spec = dict(
distro=dict(required=True, choices=SUPPORTED_DISTROS),
release=dict(required=True),
stream=dict(required=False, default='server',
choices=['desktop', 'server']),
store=dict(required=False, default='ebs',
choices=['ebs', 'ebs-io1', 'ebs-ssd', 'instance-store']),
arch=dict(required=False, default='amd64',
choices=['i386', 'amd64']),
region=dict(required=False, default='us-east-1', choices=AWS_REGIONS),
virt=dict(required=False, default='paravirtual',
choices=['paravirtual', 'hvm']),
)
module = AnsibleModule(argument_spec=arg_spec)
distro = module.params['distro']
if distro == 'ubuntu':
ubuntu(module)
else:
module.fail_json(msg="Unsupported distro: %s" % distro)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
Remove unneeded urllib2 import
#!/usr/bin/python
#
# (c) 2013, Nimbis Services
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_ami_search
short_description: Retrieve AWS AMI information for a given operating system.
deprecated: "in favor of the ec2_ami_find module"
version_added: "1.6"
description:
- Look up the most recent AMI on AWS for a given operating system.
- Returns C(ami), C(aki), C(ari), C(serial), C(tag)
- If there is no AKI or ARI associated with an image, these will be C(null).
- Only supports images from cloud-images.ubuntu.com
- 'Example output: C({"ami": "ami-69f5a900", "changed": false, "aki": "aki-88aa75e1", "tag": "release", "ari": null, "serial": "20131024"})'
version_added: "1.6"
options:
distro:
description: Linux distribution (e.g., C(ubuntu))
required: true
choices: ["ubuntu"]
release:
description: short name of the release (e.g., C(precise))
required: true
stream:
description: Type of release.
required: false
default: "server"
choices: ["server", "desktop"]
store:
description: Back-end store for instance
required: false
default: "ebs"
choices: ["ebs", "ebs-io1", "ebs-ssd", "instance-store"]
arch:
description: CPU architecture
required: false
default: "amd64"
choices: ["i386", "amd64"]
region:
description: EC2 region
required: false
default: us-east-1
choices: ["ap-northeast-1", "ap-southeast-1", "ap-southeast-2",
"eu-central-1", "eu-west-1", "sa-east-1", "us-east-1",
"us-west-1", "us-west-2", "us-gov-west-1"]
virt:
description: virutalization type
required: false
default: paravirtual
choices: ["paravirtual", "hvm"]
author: Lorin Hochstein
'''
EXAMPLES = '''
- name: Launch an Ubuntu 12.04 (Precise Pangolin) EC2 instance
hosts: 127.0.0.1
connection: local
tasks:
- name: Get the Ubuntu precise AMI
ec2_ami_search: distro=ubuntu release=precise region=us-west-1 store=instance-store
register: ubuntu_image
- name: Start the EC2 instance
ec2: image={{ ubuntu_image.ami }} instance_type=m1.small key_name=mykey
'''
import csv
import json
import urlparse
SUPPORTED_DISTROS = ['ubuntu']
AWS_REGIONS = ['ap-northeast-1',
'ap-southeast-1',
'ap-southeast-2',
'eu-central-1',
'eu-west-1',
'sa-east-1',
'us-east-1',
'us-west-1',
'us-west-2',
"us-gov-west-1"]
def get_url(module, url):
""" Get url and return response """
r, info = fetch_url(module, url)
if info['status'] != 200:
# Backwards compat
info['status_code'] = info['status']
module.fail_json(**info)
return r
def ubuntu(module):
""" Get the ami for ubuntu """
release = module.params['release']
stream = module.params['stream']
store = module.params['store']
arch = module.params['arch']
region = module.params['region']
virt = module.params['virt']
url = get_ubuntu_url(release, stream)
req = get_url(module, url)
reader = csv.reader(req, delimiter='\t')
try:
ami, aki, ari, tag, serial = lookup_ubuntu_ami(reader, release, stream,
store, arch, region, virt)
module.exit_json(changed=False, ami=ami, aki=aki, ari=ari, tag=tag,
serial=serial)
except KeyError:
module.fail_json(msg="No matching AMI found")
def lookup_ubuntu_ami(table, release, stream, store, arch, region, virt):
""" Look up the Ubuntu AMI that matches query given a table of AMIs
table: an iterable that returns a row of
(release, stream, tag, serial, region, ami, aki, ari, virt)
release: ubuntu release name
stream: 'server' or 'desktop'
store: 'ebs', 'ebs-io1', 'ebs-ssd' or 'instance-store'
arch: 'i386' or 'amd64'
region: EC2 region
virt: 'paravirtual' or 'hvm'
Returns (ami, aki, ari, tag, serial)"""
expected = (release, stream, store, arch, region, virt)
for row in table:
(actual_release, actual_stream, tag, serial,
actual_store, actual_arch, actual_region, ami, aki, ari,
actual_virt) = row
actual = (actual_release, actual_stream, actual_store, actual_arch,
actual_region, actual_virt)
if actual == expected:
# aki and ari are sometimes blank
if aki == '':
aki = None
if ari == '':
ari = None
return (ami, aki, ari, tag, serial)
raise KeyError()
def get_ubuntu_url(release, stream):
url = "https://cloud-images.ubuntu.com/query/%s/%s/released.current.txt"
return url % (release, stream)
def main():
arg_spec = dict(
distro=dict(required=True, choices=SUPPORTED_DISTROS),
release=dict(required=True),
stream=dict(required=False, default='server',
choices=['desktop', 'server']),
store=dict(required=False, default='ebs',
choices=['ebs', 'ebs-io1', 'ebs-ssd', 'instance-store']),
arch=dict(required=False, default='amd64',
choices=['i386', 'amd64']),
region=dict(required=False, default='us-east-1', choices=AWS_REGIONS),
virt=dict(required=False, default='paravirtual',
choices=['paravirtual', 'hvm']),
)
module = AnsibleModule(argument_spec=arg_spec)
distro = module.params['distro']
if distro == 'ubuntu':
ubuntu(module)
else:
module.fail_json(msg="Unsupported distro: %s" % distro)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
|
# coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The ImageNet-v2 image classification dataset."""
import os
import tensorflow.compat.v2 as tf
import tensorflow_datasets.public_api as tfds
_CITATION = r"""
@inproceedings{recht2019imagenet,
title={Do ImageNet Classifiers Generalize to ImageNet?},
author={Recht, Benjamin and Roelofs, Rebecca and Schmidt, Ludwig and Shankar, Vaishaal},
booktitle={International Conference on Machine Learning},
pages={5389--5400},
year={2019}
}
"""
_DESCRIPTION = """
ImageNet-v2 is an ImageNet test set (10 per class) collected by closely
following the original labelling protocol. Each image has been labelled by
at least 10 MTurk workers, possibly more, and depending on the strategy used to
select which images to include among the 10 chosen for the given class there are
three different versions of the dataset. Please refer to section four of the
paper for more details on how the different variants were compiled.
The label space is the same as that of ImageNet2012. Each example is
represented as a dictionary with the following keys:
* 'image': The image, a (H, W, 3)-tensor.
* 'label': An integer in the range [0, 1000).
* 'file_name': A unique sting identifying the example within the dataset.
"""
# Note: Bump the version if the links change.
_VERSION = tfds.core.Version('1.0.0')
_ROOT_URL = 'https://s3-us-west-2.amazonaws.com/imagenetv2public'
_IMAGENET_V2_URLS = {
'matched-frequency': _ROOT_URL + '/imagenetv2-matched-frequency.tar.gz',
'threshold-0.7': _ROOT_URL + '/imagenetv2-threshold0.7.tar.gz',
'topimages': _ROOT_URL + '/imagenetv2-topimages.tar.gz',
}
_TAR_TOPDIR = {
'matched-frequency': 'imagenetv2-matched-frequency-format-val',
'threshold-0.7': 'imagenetv2-threshold-0.7-format-val',
'topimages': 'imagenetv2-top-images-format-val',
}
_IMAGENET_LABELS_FILENAME = r'image_classification/imagenet2012_labels.txt'
class ImagenetV2Config(tfds.core.BuilderConfig):
""""Configuration specifying the variant to use."""
def __init__(self, *, variant, **kwargs):
"""The parameters specifying how the dataset will be processed.
The dataset comes in three different variants. Please refer to the paper
on more details how they were collected.
Args:
variant: One of 'matched-frequency', 'threshold-0.7', or 'topimages'.
**kwargs: Passed on to the constructor of `BuilderConfig`.
"""
super(ImagenetV2Config, self).__init__(**kwargs)
if variant not in _IMAGENET_V2_URLS:
raise ValueError('Unknown split number {}, must be one of {}'.format(
variant, list(_IMAGENET_V2_URLS)))
self.variant = variant
def _create_builder_configs():
for variant in _IMAGENET_V2_URLS:
yield ImagenetV2Config(variant=variant,
name=variant,
version=_VERSION,
description=_DESCRIPTION)
class ImagenetV2(tfds.core.GeneratorBasedBuilder):
"""An ImageNet test set recollected by following the original protocol."""
BUILDER_CONFIGS = list(_create_builder_configs())
def _info(self):
names_file = tfds.core.get_tfds_path(_IMAGENET_LABELS_FILENAME)
return tfds.core.DatasetInfo(
builder=self,
# This is the description that will appear on the datasets page.
description=_DESCRIPTION,
# tfds.features.FeatureConnectors
features=tfds.features.FeaturesDict({
'image': tfds.features.Image(encoding_format='jpeg'),
'label': tfds.features.ClassLabel(names_file=names_file),
'file_name': tfds.features.Text(),
}),
# Used if as_supervised=True in builder.as_dataset.
supervised_keys=('image', 'label'),
# Homepage of the dataset for documentation
homepage='https://github.com/modestyachts/ImageNetV2',
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns a SplitGenerator for the test set."""
variant_url = _IMAGENET_V2_URLS[self.builder_config.variant]
imagenet_v2_root = os.path.join(
dl_manager.download_and_extract(variant_url),
_TAR_TOPDIR[self.builder_config.variant])
return [
tfds.core.SplitGenerator(
# The dataset provides only a test split.
name=tfds.Split.TEST,
# These kwargs will be passed to _generate_examples
gen_kwargs={'imagenet_v2_root': imagenet_v2_root},
),
]
def _generate_examples(self, imagenet_v2_root):
"""Yields the examples."""
# The directory structure is `class_number/filename_number.jpg`, where
# class_number is in [0, 1000) and filename_number in [0, 10).
for class_id in tf.io.gfile.listdir(imagenet_v2_root):
class_dir = os.path.join(imagenet_v2_root, class_id)
for image_filename in tf.io.gfile.listdir(class_dir):
image_path = os.path.join(class_dir, image_filename)
features = {
'image': image_path,
'label': int(class_id),
'file_name': image_path,
}
yield image_path, features
Update Config Version
# coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The ImageNet-v2 image classification dataset."""
import os
import tensorflow.compat.v2 as tf
import tensorflow_datasets.public_api as tfds
_CITATION = r"""
@inproceedings{recht2019imagenet,
title={Do ImageNet Classifiers Generalize to ImageNet?},
author={Recht, Benjamin and Roelofs, Rebecca and Schmidt, Ludwig and Shankar, Vaishaal},
booktitle={International Conference on Machine Learning},
pages={5389--5400},
year={2019}
}
"""
_DESCRIPTION = """
ImageNet-v2 is an ImageNet test set (10 per class) collected by closely
following the original labelling protocol. Each image has been labelled by
at least 10 MTurk workers, possibly more, and depending on the strategy used to
select which images to include among the 10 chosen for the given class there are
three different versions of the dataset. Please refer to section four of the
paper for more details on how the different variants were compiled.
The label space is the same as that of ImageNet2012. Each example is
represented as a dictionary with the following keys:
* 'image': The image, a (H, W, 3)-tensor.
* 'label': An integer in the range [0, 1000).
* 'file_name': A unique sting identifying the example within the dataset.
"""
# Note: Bump the version if the links change.
_CONFIG_VERSION = {
'matched-frequency': tfds.core.Version('1.0.0'),
'threshold-0.7': tfds.core.Version('0.1.1'),
'topimages': tfds.core.Version('0.1.1'),
}
_ROOT_URL = 'https://s3-us-west-2.amazonaws.com/imagenetv2public'
_IMAGENET_V2_URLS = {
'matched-frequency': _ROOT_URL + '/imagenetv2-matched-frequency.tar.gz',
'threshold-0.7': _ROOT_URL + '/imagenetv2-threshold0.7.tar.gz',
'topimages': _ROOT_URL + '/imagenetv2-topimages.tar.gz',
}
_TAR_TOPDIR = {
'matched-frequency': 'imagenetv2-matched-frequency-format-val',
'threshold-0.7': 'imagenetv2-threshold-0.7-format-val',
'topimages': 'imagenetv2-top-images-format-val',
}
_IMAGENET_LABELS_FILENAME = r'image_classification/imagenet2012_labels.txt'
class ImagenetV2Config(tfds.core.BuilderConfig):
""""Configuration specifying the variant to use."""
def __init__(self, *, variant, **kwargs):
"""The parameters specifying how the dataset will be processed.
The dataset comes in three different variants. Please refer to the paper
on more details how they were collected.
Args:
variant: One of 'matched-frequency', 'threshold-0.7', or 'topimages'.
**kwargs: Passed on to the constructor of `BuilderConfig`.
"""
super(ImagenetV2Config, self).__init__(**kwargs)
if variant not in _IMAGENET_V2_URLS:
raise ValueError('Unknown split number {}, must be one of {}'.format(
variant, list(_IMAGENET_V2_URLS)))
self.variant = variant
def _create_builder_configs():
for variant in _IMAGENET_V2_URLS:
yield ImagenetV2Config(variant=variant,
name=variant,
version=_CONFIG_VERSION[variant],
description=_DESCRIPTION)
class ImagenetV2(tfds.core.GeneratorBasedBuilder):
"""An ImageNet test set recollected by following the original protocol."""
BUILDER_CONFIGS = list(_create_builder_configs())
def _info(self):
names_file = tfds.core.get_tfds_path(_IMAGENET_LABELS_FILENAME)
return tfds.core.DatasetInfo(
builder=self,
# This is the description that will appear on the datasets page.
description=_DESCRIPTION,
# tfds.features.FeatureConnectors
features=tfds.features.FeaturesDict({
'image': tfds.features.Image(encoding_format='jpeg'),
'label': tfds.features.ClassLabel(names_file=names_file),
'file_name': tfds.features.Text(),
}),
# Used if as_supervised=True in builder.as_dataset.
supervised_keys=('image', 'label'),
# Homepage of the dataset for documentation
homepage='https://github.com/modestyachts/ImageNetV2',
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns a SplitGenerator for the test set."""
variant_url = _IMAGENET_V2_URLS[self.builder_config.variant]
imagenet_v2_root = os.path.join(
dl_manager.download_and_extract(variant_url),
_TAR_TOPDIR[self.builder_config.variant])
return [
tfds.core.SplitGenerator(
# The dataset provides only a test split.
name=tfds.Split.TEST,
# These kwargs will be passed to _generate_examples
gen_kwargs={'imagenet_v2_root': imagenet_v2_root},
),
]
def _generate_examples(self, imagenet_v2_root):
"""Yields the examples."""
# The directory structure is `class_number/filename_number.jpg`, where
# class_number is in [0, 1000) and filename_number in [0, 10).
for class_id in tf.io.gfile.listdir(imagenet_v2_root):
class_dir = os.path.join(imagenet_v2_root, class_id)
for image_filename in tf.io.gfile.listdir(class_dir):
image_path = os.path.join(class_dir, image_filename)
features = {
'image': image_path,
'label': int(class_id),
'file_name': image_path,
}
yield image_path, features
|
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@ecdsa.org
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import sys
import threading
from . import util
from .networks import NetworkConstants
from .bitcoin import *
class VerifyError(Exception):
'''Exception used for blockchain verification errors.'''
def bits_to_work(bits):
return (1 << 256) // (bits_to_target(bits) + 1)
def bits_to_target(bits):
if bits == 0:
return 0
size = bits >> 24
assert size <= 0x1d
word = bits & 0x00ffffff
assert 0x8000 <= word <= 0x7fffff
if size <= 3:
return word >> (8 * (3 - size))
else:
return word << (8 * (size - 3))
def target_to_bits(target):
if target == 0:
return 0
target = min(target, MAX_TARGET)
size = (target.bit_length() + 7) // 8
mask64 = 0xffffffffffffffff
if size <= 3:
compact = (target & mask64) << (8 * (3 - size))
else:
compact = (target >> (8 * (size - 3))) & mask64
if compact & 0x00800000:
compact >>= 8
size += 1
assert compact == (compact & 0x007fffff)
assert size < 256
return compact | size << 24
MAX_BITS = 0x1d00ffff
MAX_TARGET = bits_to_target(MAX_BITS)
def serialize_header(res):
s = int_to_hex(res.get('version'), 4) \
+ rev_hex(res.get('prev_block_hash')) \
+ rev_hex(res.get('merkle_root')) \
+ int_to_hex(int(res.get('timestamp')), 4) \
+ int_to_hex(int(res.get('bits')), 4) \
+ int_to_hex(int(res.get('nonce')), 4)
return s
def deserialize_header(s, height):
hex_to_int = lambda s: int('0x' + bh2u(s[::-1]), 16)
h = {}
h['version'] = hex_to_int(s[0:4])
h['prev_block_hash'] = hash_encode(s[4:36])
h['merkle_root'] = hash_encode(s[36:68])
h['timestamp'] = hex_to_int(s[68:72])
h['bits'] = hex_to_int(s[72:76])
h['nonce'] = hex_to_int(s[76:80])
h['block_height'] = height
return h
def hash_header(header):
if header is None:
return '0' * 64
if header.get('prev_block_hash') is None:
header['prev_block_hash'] = '00'*32
return hash_encode(Hash(bfh(serialize_header(header))))
blockchains = {}
def read_blockchains(config):
blockchains[0] = Blockchain(config, 0, None)
fdir = os.path.join(util.get_headers_dir(config), 'forks')
if not os.path.exists(fdir):
os.mkdir(fdir)
l = filter(lambda x: x.startswith('fork_'), os.listdir(fdir))
l = sorted(l, key = lambda x: int(x.split('_')[1]))
for filename in l:
checkpoint = int(filename.split('_')[2])
parent_id = int(filename.split('_')[1])
b = Blockchain(config, checkpoint, parent_id)
blockchains[b.checkpoint] = b
return blockchains
def check_header(header):
if type(header) is not dict:
return False
for b in blockchains.values():
if b.check_header(header):
return b
return False
def can_connect(header):
for b in blockchains.values():
if b.can_connect(header):
return b
return False
class Blockchain(util.PrintError):
"""
Manages blockchain headers and their verification
"""
def __init__(self, config, checkpoint, parent_id):
self.config = config
self.catch_up = None # interface catching up
self.cur_chunk = None
self.checkpoint = checkpoint
self.parent_id = parent_id
self.lock = threading.Lock()
with self.lock:
self.update_size()
def parent(self):
return blockchains[self.parent_id]
def get_max_child(self):
children = list(filter(lambda y: y.parent_id==self.checkpoint, blockchains.values()))
return max([x.checkpoint for x in children]) if children else None
def get_checkpoint(self):
mc = self.get_max_child()
return mc if mc is not None else self.checkpoint
def get_branch_size(self):
return self.height() - self.get_checkpoint() + 1
def get_name(self):
return self.get_hash(self.get_checkpoint()).lstrip('00')[0:10]
def check_header(self, header):
header_hash = hash_header(header)
height = header.get('block_height')
return header_hash == self.get_hash(height)
def fork(parent, header):
checkpoint = header.get('block_height')
self = Blockchain(parent.config, checkpoint, parent.checkpoint)
open(self.path(), 'w+').close()
self.save_header(header)
return self
def height(self):
return self.checkpoint + self.size() - 1
def size(self):
with self.lock:
return self._size
def update_size(self):
p = self.path()
self._size = os.path.getsize(p)//80 if os.path.exists(p) else 0
def verify_header(self, header, prev_header, bits):
prev_hash = hash_header(prev_header)
_hash = hash_header(header)
if prev_hash != header.get('prev_block_hash'):
raise VerifyError("prev hash mismatch: %s vs %s" % (prev_hash, header.get('prev_block_hash')))
# checkpoint BitcoinCash fork block
if (header.get('block_height') == NetworkConstants.BITCOIN_CASH_FORK_BLOCK_HEIGHT and hash_header(header) != NetworkConstants.BITCOIN_CASH_FORK_BLOCK_HASH):
err_str = "block at height %i is not cash chain fork block. hash %s" % (header.get('block_height'), hash_header(header))
raise VerifyError(err_str)
if bits != header.get('bits'):
raise VerifyError("bits mismatch: %s vs %s" % (bits, header.get('bits')))
target = bits_to_target(bits)
if int('0x' + _hash, 16) > target:
raise VerifyError("insufficient proof of work: %s vs target %s" % (int('0x' + _hash, 16), target))
def verify_chunk(self, index, data):
self.cur_chunk = data
self.cur_chunk_index = index
num = len(data) // 80
prev_header = None
if index != 0:
prev_header = self.read_header(index*2016 - 1)
for i in range(num):
raw_header = data[i*80:(i+1) * 80]
header = deserialize_header(raw_header, index*2016 + i)
bits = self.get_bits(header)
self.verify_header(header, prev_header, bits)
prev_header = header
self.cur_chunk = None
def path(self):
d = util.get_headers_dir(self.config)
filename = 'blockchain_headers' if self.parent_id is None else os.path.join('forks', 'fork_%d_%d'%(self.parent_id, self.checkpoint))
return os.path.join(d, filename)
def save_chunk(self, index, chunk):
filename = self.path()
d = (index * 2016 - self.checkpoint) * 80
if d < 0:
chunk = chunk[-d:]
d = 0
self.write(chunk, d)
self.swap_with_parent()
def swap_with_parent(self):
if self.parent_id is None:
return
parent_branch_size = self.parent().height() - self.checkpoint + 1
if parent_branch_size >= self.size():
return
self.print_error("swap", self.checkpoint, self.parent_id)
parent_id = self.parent_id
checkpoint = self.checkpoint
parent = self.parent()
with open(self.path(), 'rb') as f:
my_data = f.read()
with open(parent.path(), 'rb') as f:
f.seek((checkpoint - parent.checkpoint)*80)
parent_data = f.read(parent_branch_size*80)
self.write(parent_data, 0)
parent.write(my_data, (checkpoint - parent.checkpoint)*80)
# store file path
for b in blockchains.values():
b.old_path = b.path()
# swap parameters
self.parent_id = parent.parent_id; parent.parent_id = parent_id
self.checkpoint = parent.checkpoint; parent.checkpoint = checkpoint
self._size = parent._size; parent._size = parent_branch_size
# move files
for b in blockchains.values():
if b in [self, parent]: continue
if b.old_path != b.path():
self.print_error("renaming", b.old_path, b.path())
os.rename(b.old_path, b.path())
# update pointers
blockchains[self.checkpoint] = self
blockchains[parent.checkpoint] = parent
def write(self, data, offset):
filename = self.path()
with self.lock:
with open(filename, 'rb+') as f:
if offset != self._size*80:
f.seek(offset)
f.truncate()
f.seek(offset)
f.write(data)
f.flush()
os.fsync(f.fileno())
self.update_size()
def save_header(self, header):
delta = header.get('block_height') - self.checkpoint
data = bfh(serialize_header(header))
assert delta == self.size()
assert len(data) == 80
self.write(data, delta*80)
self.swap_with_parent()
def read_header(self, height):
if self.cur_chunk and (height // 2016) == self.cur_chunk_index:
n = height % 2016
h = self.cur_chunk[n * 80: (n + 1) * 80]
return deserialize_header(h, height)
assert self.parent_id != self.checkpoint
if height < 0:
return
if height < self.checkpoint:
return self.parent().read_header(height)
if height > self.height():
return
delta = height - self.checkpoint
name = self.path()
if os.path.exists(name):
with open(name, 'rb') as f:
f.seek(delta * 80)
h = f.read(80)
return deserialize_header(h, height)
def get_hash(self, height):
return hash_header(self.read_header(height))
def BIP9(self, height, flag):
v = self.read_header(height)['version']
return ((v & 0xE0000000) == 0x20000000) and ((v & flag) == flag)
def get_median_time_past(self, height):
if height < 0:
return 0
times = [self.read_header(h)['timestamp']
for h in range(max(0, height - 10), height + 1)]
return sorted(times)[len(times) // 2]
def get_suitable_block_height(self, suitableheight):
#In order to avoid a block in a very skewed timestamp to have too much
#influence, we select the median of the 3 top most block as a start point
#Reference: github.com/Bitcoin-ABC/bitcoin-abc/master/src/pow.cpp#L201
blocks2 = self.read_header(suitableheight)
blocks1 = self.read_header(suitableheight-1)
blocks = self.read_header(suitableheight-2)
if (blocks['timestamp'] > blocks2['timestamp'] ):
blocks,blocks2 = blocks2,blocks
if (blocks['timestamp'] > blocks1['timestamp'] ):
blocks,blocks1 = blocks1,blocks
if (blocks1['timestamp'] > blocks2['timestamp'] ):
blocks1,blocks2 = blocks2,blocks1
return blocks1['block_height']
def get_bits(self, header):
'''Return bits for the given height.'''
# Difficulty adjustment interval?
height = header['block_height']
# Genesis
if height == 0:
return MAX_BITS
prior = self.read_header(height - 1)
bits = prior['bits']
#NOV 13 HF DAA
prevheight = height -1
daa_mtp=self.get_median_time_past(prevheight)
#if (daa_mtp >= 1509559291): #leave this here for testing
if (daa_mtp >= 1510600000):
if NetworkConstants.TESTNET:
# testnet 20 minute rule
if header['timestamp'] - prior['timestamp'] > 20*60:
return MAX_BITS
# determine block range
daa_starting_height=self.get_suitable_block_height(prevheight-144)
daa_ending_height=self.get_suitable_block_height(prevheight)
# calculate cumulative work (EXcluding work from block daa_starting_height, INcluding work from block daa_ending_height)
daa_cumulative_work=0
for daa_i in range (daa_starting_height+1,daa_ending_height+1):
daa_prior = self.read_header(daa_i)
daa_bits_for_a_block=daa_prior['bits']
daa_work_for_a_block=bits_to_work(daa_bits_for_a_block)
daa_cumulative_work += daa_work_for_a_block
# calculate and sanitize elapsed time
daa_starting_timestamp = self.read_header(daa_starting_height)['timestamp']
daa_ending_timestamp = self.read_header(daa_ending_height)['timestamp']
daa_elapsed_time=daa_ending_timestamp-daa_starting_timestamp
if (daa_elapsed_time>172800):
daa_elapsed_time=172800
if (daa_elapsed_time<43200):
daa_elapsed_time=43200
# calculate and return new target
daa_Wn= (daa_cumulative_work*600)//daa_elapsed_time
daa_target= (1 << 256) // daa_Wn -1
daa_retval = target_to_bits(daa_target)
daa_retval = int(daa_retval)
return daa_retval
#END OF NOV-2017 DAA
if height % 2016 == 0:
return self.get_new_bits(height)
if NetworkConstants.TESTNET:
# testnet 20 minute rule
if header['timestamp'] - prior['timestamp'] > 20*60:
return MAX_BITS
return self.read_header(height // 2016 * 2016)['bits']
# bitcoin cash EDA
# Can't go below minimum, so early bail
if bits == MAX_BITS:
return bits
mtp_6blocks = (self.get_median_time_past(height - 1)
- self.get_median_time_past(height - 7))
if mtp_6blocks < 12 * 3600:
return bits
# If it took over 12hrs to produce the last 6 blocks, increase the
# target by 25% (reducing difficulty by 20%).
target = bits_to_target(bits)
target += target >> 2
return target_to_bits(target)
def get_new_bits(self, height):
assert height % 2016 == 0
# Genesis
if height == 0:
return MAX_BITS
first = self.read_header(height - 2016)
prior = self.read_header(height - 1)
prior_target = bits_to_target(prior['bits'])
target_span = 14 * 24 * 60 * 60
span = prior['timestamp'] - first['timestamp']
span = min(max(span, target_span // 4), target_span * 4)
new_target = (prior_target * span) // target_span
return target_to_bits(new_target)
def can_connect(self, header, check_height=True):
height = header['block_height']
if check_height and self.height() != height - 1:
return False
if height == 0:
return hash_header(header) == NetworkConstants.GENESIS
previous_header = self.read_header(height -1)
if not previous_header:
return False
prev_hash = hash_header(previous_header)
if prev_hash != header.get('prev_block_hash'):
return False
bits = self.get_bits(header)
try:
self.verify_header(header, previous_header, bits)
except VerifyError as e:
self.print_error('verify header {} failed at height {:d}: {}'
.format(hash_header(header), height, e))
return False
return True
def connect_chunk(self, idx, hexdata):
try:
data = bfh(hexdata)
self.verify_chunk(idx, data)
self.save_chunk(idx, data)
return True
except VerifyError as e:
self.print_error('verify_chunk failed: {}'.format(e))
return False
Fix #623. Variable used in code path where it did not get assigned. (#728)
* Fix #623. Variable used in code path where it did not get assigned.
* Slight improvement to the non-existant variable usage in the wrong scope.
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@ecdsa.org
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import sys
import threading
from . import util
from .networks import NetworkConstants
from .bitcoin import *
class VerifyError(Exception):
'''Exception used for blockchain verification errors.'''
def bits_to_work(bits):
return (1 << 256) // (bits_to_target(bits) + 1)
def bits_to_target(bits):
if bits == 0:
return 0
size = bits >> 24
assert size <= 0x1d
word = bits & 0x00ffffff
assert 0x8000 <= word <= 0x7fffff
if size <= 3:
return word >> (8 * (3 - size))
else:
return word << (8 * (size - 3))
def target_to_bits(target):
if target == 0:
return 0
target = min(target, MAX_TARGET)
size = (target.bit_length() + 7) // 8
mask64 = 0xffffffffffffffff
if size <= 3:
compact = (target & mask64) << (8 * (3 - size))
else:
compact = (target >> (8 * (size - 3))) & mask64
if compact & 0x00800000:
compact >>= 8
size += 1
assert compact == (compact & 0x007fffff)
assert size < 256
return compact | size << 24
MAX_BITS = 0x1d00ffff
MAX_TARGET = bits_to_target(MAX_BITS)
def serialize_header(res):
s = int_to_hex(res.get('version'), 4) \
+ rev_hex(res.get('prev_block_hash')) \
+ rev_hex(res.get('merkle_root')) \
+ int_to_hex(int(res.get('timestamp')), 4) \
+ int_to_hex(int(res.get('bits')), 4) \
+ int_to_hex(int(res.get('nonce')), 4)
return s
def deserialize_header(s, height):
hex_to_int = lambda s: int('0x' + bh2u(s[::-1]), 16)
h = {}
h['version'] = hex_to_int(s[0:4])
h['prev_block_hash'] = hash_encode(s[4:36])
h['merkle_root'] = hash_encode(s[36:68])
h['timestamp'] = hex_to_int(s[68:72])
h['bits'] = hex_to_int(s[72:76])
h['nonce'] = hex_to_int(s[76:80])
h['block_height'] = height
return h
def hash_header(header):
if header is None:
return '0' * 64
if header.get('prev_block_hash') is None:
header['prev_block_hash'] = '00'*32
return hash_encode(Hash(bfh(serialize_header(header))))
blockchains = {}
def read_blockchains(config):
blockchains[0] = Blockchain(config, 0, None)
fdir = os.path.join(util.get_headers_dir(config), 'forks')
if not os.path.exists(fdir):
os.mkdir(fdir)
l = filter(lambda x: x.startswith('fork_'), os.listdir(fdir))
l = sorted(l, key = lambda x: int(x.split('_')[1]))
for filename in l:
checkpoint = int(filename.split('_')[2])
parent_id = int(filename.split('_')[1])
b = Blockchain(config, checkpoint, parent_id)
blockchains[b.checkpoint] = b
return blockchains
def check_header(header):
if type(header) is not dict:
return False
for b in blockchains.values():
if b.check_header(header):
return b
return False
def can_connect(header):
for b in blockchains.values():
if b.can_connect(header):
return b
return False
class Blockchain(util.PrintError):
"""
Manages blockchain headers and their verification
"""
def __init__(self, config, checkpoint, parent_id):
self.config = config
self.catch_up = None # interface catching up
self.cur_chunk = None
self.checkpoint = checkpoint
self.parent_id = parent_id
self.lock = threading.Lock()
with self.lock:
self.update_size()
def parent(self):
return blockchains[self.parent_id]
def get_max_child(self):
children = list(filter(lambda y: y.parent_id==self.checkpoint, blockchains.values()))
return max([x.checkpoint for x in children]) if children else None
def get_checkpoint(self):
mc = self.get_max_child()
return mc if mc is not None else self.checkpoint
def get_branch_size(self):
return self.height() - self.get_checkpoint() + 1
def get_name(self):
return self.get_hash(self.get_checkpoint()).lstrip('00')[0:10]
def check_header(self, header):
header_hash = hash_header(header)
height = header.get('block_height')
return header_hash == self.get_hash(height)
def fork(parent, header):
checkpoint = header.get('block_height')
self = Blockchain(parent.config, checkpoint, parent.checkpoint)
open(self.path(), 'w+').close()
self.save_header(header)
return self
def height(self):
return self.checkpoint + self.size() - 1
def size(self):
with self.lock:
return self._size
def update_size(self):
p = self.path()
self._size = os.path.getsize(p)//80 if os.path.exists(p) else 0
def verify_header(self, header, prev_header, bits):
prev_hash = hash_header(prev_header)
_hash = hash_header(header)
if prev_hash != header.get('prev_block_hash'):
raise VerifyError("prev hash mismatch: %s vs %s" % (prev_hash, header.get('prev_block_hash')))
# checkpoint BitcoinCash fork block
if (header.get('block_height') == NetworkConstants.BITCOIN_CASH_FORK_BLOCK_HEIGHT and hash_header(header) != NetworkConstants.BITCOIN_CASH_FORK_BLOCK_HASH):
err_str = "block at height %i is not cash chain fork block. hash %s" % (header.get('block_height'), hash_header(header))
raise VerifyError(err_str)
if bits != header.get('bits'):
raise VerifyError("bits mismatch: %s vs %s" % (bits, header.get('bits')))
target = bits_to_target(bits)
if int('0x' + _hash, 16) > target:
raise VerifyError("insufficient proof of work: %s vs target %s" % (int('0x' + _hash, 16), target))
def verify_chunk(self, index, data):
self.cur_chunk = data
self.cur_chunk_index = index
num = len(data) // 80
prev_header = None
if index != 0:
prev_header = self.read_header(index*2016 - 1)
for i in range(num):
raw_header = data[i*80:(i+1) * 80]
header = deserialize_header(raw_header, index*2016 + i)
bits = self.get_bits(header)
self.verify_header(header, prev_header, bits)
prev_header = header
self.cur_chunk = None
def path(self):
d = util.get_headers_dir(self.config)
filename = 'blockchain_headers' if self.parent_id is None else os.path.join('forks', 'fork_%d_%d'%(self.parent_id, self.checkpoint))
return os.path.join(d, filename)
def save_chunk(self, index, chunk):
filename = self.path()
d = (index * 2016 - self.checkpoint) * 80
if d < 0:
chunk = chunk[-d:]
d = 0
self.write(chunk, d)
self.swap_with_parent()
def swap_with_parent(self):
if self.parent_id is None:
return
parent_branch_size = self.parent().height() - self.checkpoint + 1
if parent_branch_size >= self.size():
return
self.print_error("swap", self.checkpoint, self.parent_id)
parent_id = self.parent_id
checkpoint = self.checkpoint
parent = self.parent()
with open(self.path(), 'rb') as f:
my_data = f.read()
with open(parent.path(), 'rb') as f:
f.seek((checkpoint - parent.checkpoint)*80)
parent_data = f.read(parent_branch_size*80)
self.write(parent_data, 0)
parent.write(my_data, (checkpoint - parent.checkpoint)*80)
# store file path
for b in blockchains.values():
b.old_path = b.path()
# swap parameters
self.parent_id = parent.parent_id; parent.parent_id = parent_id
self.checkpoint = parent.checkpoint; parent.checkpoint = checkpoint
self._size = parent._size; parent._size = parent_branch_size
# move files
for b in blockchains.values():
if b in [self, parent]: continue
if b.old_path != b.path():
self.print_error("renaming", b.old_path, b.path())
os.rename(b.old_path, b.path())
# update pointers
blockchains[self.checkpoint] = self
blockchains[parent.checkpoint] = parent
def write(self, data, offset):
filename = self.path()
with self.lock:
with open(filename, 'rb+') as f:
if offset != self._size*80:
f.seek(offset)
f.truncate()
f.seek(offset)
f.write(data)
f.flush()
os.fsync(f.fileno())
self.update_size()
def save_header(self, header):
delta = header.get('block_height') - self.checkpoint
data = bfh(serialize_header(header))
assert delta == self.size()
assert len(data) == 80
self.write(data, delta*80)
self.swap_with_parent()
def read_header(self, height):
if self.cur_chunk and (height // 2016) == self.cur_chunk_index:
n = height % 2016
h = self.cur_chunk[n * 80: (n + 1) * 80]
return deserialize_header(h, height)
assert self.parent_id != self.checkpoint
if height < 0:
return
if height < self.checkpoint:
return self.parent().read_header(height)
if height > self.height():
return
delta = height - self.checkpoint
name = self.path()
if os.path.exists(name):
with open(name, 'rb') as f:
f.seek(delta * 80)
h = f.read(80)
return deserialize_header(h, height)
def get_hash(self, height):
return hash_header(self.read_header(height))
def BIP9(self, height, flag):
v = self.read_header(height)['version']
return ((v & 0xE0000000) == 0x20000000) and ((v & flag) == flag)
def get_median_time_past(self, height):
if height < 0:
return 0
times = [self.read_header(h)['timestamp']
for h in range(max(0, height - 10), height + 1)]
return sorted(times)[len(times) // 2]
def get_suitable_block_height(self, suitableheight):
#In order to avoid a block in a very skewed timestamp to have too much
#influence, we select the median of the 3 top most block as a start point
#Reference: github.com/Bitcoin-ABC/bitcoin-abc/master/src/pow.cpp#L201
blocks2 = self.read_header(suitableheight)
blocks1 = self.read_header(suitableheight-1)
blocks = self.read_header(suitableheight-2)
if (blocks['timestamp'] > blocks2['timestamp'] ):
blocks,blocks2 = blocks2,blocks
if (blocks['timestamp'] > blocks1['timestamp'] ):
blocks,blocks1 = blocks1,blocks
if (blocks1['timestamp'] > blocks2['timestamp'] ):
blocks1,blocks2 = blocks2,blocks1
return blocks1['block_height']
def get_bits(self, header):
'''Return bits for the given height.'''
# Difficulty adjustment interval?
height = header['block_height']
# Genesis
if height == 0:
return MAX_BITS
prior = self.read_header(height - 1)
bits = prior['bits']
#NOV 13 HF DAA
prevheight = height -1
daa_mtp=self.get_median_time_past(prevheight)
#if (daa_mtp >= 1509559291): #leave this here for testing
if (daa_mtp >= 1510600000):
if NetworkConstants.TESTNET:
# testnet 20 minute rule
if header['timestamp'] - prior['timestamp'] > 20*60:
return MAX_BITS
# determine block range
daa_starting_height=self.get_suitable_block_height(prevheight-144)
daa_ending_height=self.get_suitable_block_height(prevheight)
# calculate cumulative work (EXcluding work from block daa_starting_height, INcluding work from block daa_ending_height)
daa_cumulative_work=0
for daa_i in range (daa_starting_height+1,daa_ending_height+1):
daa_prior = self.read_header(daa_i)
daa_bits_for_a_block=daa_prior['bits']
daa_work_for_a_block=bits_to_work(daa_bits_for_a_block)
daa_cumulative_work += daa_work_for_a_block
# calculate and sanitize elapsed time
daa_starting_timestamp = self.read_header(daa_starting_height)['timestamp']
daa_ending_timestamp = self.read_header(daa_ending_height)['timestamp']
daa_elapsed_time=daa_ending_timestamp-daa_starting_timestamp
if (daa_elapsed_time>172800):
daa_elapsed_time=172800
if (daa_elapsed_time<43200):
daa_elapsed_time=43200
# calculate and return new target
daa_Wn= (daa_cumulative_work*600)//daa_elapsed_time
daa_target= (1 << 256) // daa_Wn -1
daa_retval = target_to_bits(daa_target)
daa_retval = int(daa_retval)
return daa_retval
#END OF NOV-2017 DAA
if height % 2016 == 0:
return self.get_new_bits(height)
if NetworkConstants.TESTNET:
# testnet 20 minute rule
if header['timestamp'] - prior['timestamp'] > 20*60:
return MAX_BITS
return self.read_header(height // 2016 * 2016)['bits']
# bitcoin cash EDA
# Can't go below minimum, so early bail
if bits == MAX_BITS:
return bits
mtp_6blocks = (self.get_median_time_past(height - 1)
- self.get_median_time_past(height - 7))
if mtp_6blocks < 12 * 3600:
return bits
# If it took over 12hrs to produce the last 6 blocks, increase the
# target by 25% (reducing difficulty by 20%).
target = bits_to_target(bits)
target += target >> 2
return target_to_bits(target)
def get_new_bits(self, height):
assert height % 2016 == 0
# Genesis
if height == 0:
return MAX_BITS
first = self.read_header(height - 2016)
prior = self.read_header(height - 1)
prior_target = bits_to_target(prior['bits'])
target_span = 14 * 24 * 60 * 60
span = prior['timestamp'] - first['timestamp']
span = min(max(span, target_span // 4), target_span * 4)
new_target = (prior_target * span) // target_span
return target_to_bits(new_target)
def can_connect(self, header, check_height=True):
height = header['block_height']
if check_height and self.height() != height - 1:
return False
if height == 0:
return hash_header(header) == NetworkConstants.GENESIS
previous_header = self.read_header(height -1)
if not previous_header:
return False
prev_hash = hash_header(previous_header)
if prev_hash != header.get('prev_block_hash'):
return False
bits = self.get_bits(header)
try:
self.verify_header(header, previous_header, bits)
except VerifyError as e:
self.print_error('verify header {} failed at height {:d}: {}'
.format(hash_header(header), height, e))
return False
return True
def connect_chunk(self, idx, hexdata):
try:
data = bfh(hexdata)
self.verify_chunk(idx, data)
self.save_chunk(idx, data)
return True
except VerifyError as e:
self.print_error('verify_chunk failed: {}'.format(e))
return False
|
''' All functions acting on the hits of one DUT are listed here'''
from __future__ import division
import logging
import os.path
import re
import tables as tb
import numpy as np
from scipy.ndimage import median_filter
from pixel_clusterizer.clusterizer import HitClusterizer
from testbeam_analysis.tools import analysis_utils, plot_utils
from testbeam_analysis.tools.plot_utils import plot_masked_pixels, plot_cluster_size
def check_file(input_hits_file, n_pixel, output_check_file=None,
event_range=1, plot=True, chunk_size=1000000):
'''Checks the hit table to have proper data.
The checks include:
- hit definitions:
- position has to start at 1 (not 0)
- position should not exceed number of pixels (n_pixel)
- event building
- event number has to be strictly monotone
- hit position correlations of consecutive events are
created. Should be zero for distinctly
built events.
Parameters
----------
input_hits_file : string
File name of the hit table.
output_check_file : string
Filename of the output file with the correlation histograms.
n_pixel : tuple
Tuple of the total number of pixels (column/row).
event_range : integer
The range of events to correlate.
E.g.: event_range = 2 correlates to predecessing event hits.
chunk_size : int
Chunk size of the data when reading from file.
'''
logging.info('=== Check data of hit file %s ===', input_hits_file)
if output_check_file is None:
output_check_file = input_hits_file[:-3] + '_check.h5'
with tb.open_file(output_check_file, mode="w") as out_file_h5:
with tb.open_file(input_hits_file, 'r') as input_file_h5:
shape_column = (n_pixel[0], n_pixel[0])
shape_row = (n_pixel[1], n_pixel[1])
col_corr = np.zeros(shape_column, dtype=np.int)
row_corr = np.zeros(shape_row, dtype=np.int)
last_event = None
out_dE = out_file_h5.create_earray(out_file_h5.root, name='EventDelta',
title='Change of event number per non empty event',
shape=(0, ),
atom=tb.Atom.from_dtype(np.dtype(np.uint64)),
filters=tb.Filters(complib='blosc',
complevel=5,
fletcher32=False))
out_E = out_file_h5.create_earray(out_file_h5.root, name='EventNumber',
title='Event number of non empty event',
shape=(0, ),
atom=tb.Atom.from_dtype(np.dtype(np.uint64)),
filters=tb.Filters(complib='blosc',
complevel=5,
fletcher32=False))
for hits, _ in analysis_utils.data_aligned_at_events(
input_file_h5.root.Hits,
chunk_size=chunk_size):
if not np.all(np.diff(hits['event_number']) >= 0):
raise RuntimeError('The event number does not always increase. \
The hits cannot be used like this!')
if np.any(hits['column'] < 1) or np.any(hits['row'] < 1):
raise RuntimeError('The column/row definition does not \
start at 1!')
if (np.any(hits['column'] > n_pixel[0])
or np.any(hits['row'] > n_pixel[1])):
raise RuntimeError('The column/row definition exceed the nuber \
of pixels (%s/%s)!', n_pixel[0], n_pixel[1])
analysis_utils.correlate_hits_on_event_range(hits,
col_corr,
row_corr,
event_range)
event_numbers = np.unique(hits['event_number'])
event_delta = np.diff(event_numbers)
if last_event:
event_delta = np.concatenate((np.array([event_numbers[0] - last_event]),
event_delta))
last_event = event_numbers[-1]
out_dE.append(event_delta)
out_E.append(event_numbers)
out_col = out_file_h5.create_carray(out_file_h5.root, name='CorrelationColumns',
title='Column Correlation with event range=%s' % event_range,
atom=tb.Atom.from_dtype(col_corr.dtype),
shape=col_corr.shape,
filters=tb.Filters(complib='blosc',
complevel=5,
fletcher32=False))
out_row = out_file_h5.create_carray(out_file_h5.root, name='CorrelationRows',
title='Row Correlation with event range=%s' % event_range,
atom=tb.Atom.from_dtype(row_corr.dtype),
shape=row_corr.shape,
filters=tb.Filters(complib='blosc',
complevel=5,
fletcher32=False))
out_col[:] = col_corr
out_row[:] = row_corr
if plot:
plot_utils.plot_checks(input_corr_file=output_check_file)
def generate_pixel_mask(input_hits_file, n_pixel, pixel_mask_name="NoisyPixelMask", output_mask_file=None, pixel_size=None, threshold=10.0, filter_size=3, dut_name=None, plot=True, chunk_size=1000000):
'''Generating pixel mask from the hit table.
Parameters
----------
input_hits_file : string
File name of the hit table.
n_pixel : tuple
Tuple of the total number of pixels (column/row).
pixel_mask_name : string
Name of the node containing the mask inside the output file.
output_mask_file : string
File name of the output mask file.
pixel_size : tuple
Tuple of the pixel size (column/row). If None, assuming square pixels.
threshold : float
The threshold for pixel masking. The threshold is given in units of
sigma of the pixel noise (background subtracted). The lower the value
the more pixels are masked.
filter_size : scalar or tuple
Adjust the median filter size by giving the number of columns and rows.
The higher the value the more the background is smoothed and more
pixels are masked.
dut_name : string
Name of the DUT. If None, file name of the hit table will be printed.
plot : bool
If True, create additional output plots.
chunk_size : int
Chunk size of the data when reading from file.
'''
logging.info('=== Generating %s for %s ===', ' '.join(item.lower() for item in re.findall('[A-Z][^A-Z]*', pixel_mask_name)), input_hits_file)
if output_mask_file is None:
output_mask_file = os.path.splitext(input_hits_file)[0] + '_' + '_'.join(item.lower() for item in re.findall('[A-Z][^A-Z]*', pixel_mask_name)) + '.h5'
occupancy = None
# Calculating occupancy array
with tb.open_file(input_hits_file, 'r') as input_file_h5:
for hits, _ in analysis_utils.data_aligned_at_events(input_file_h5.root.Hits, chunk_size=chunk_size):
col, row = hits['column'], hits['row']
chunk_occ = analysis_utils.hist_2d_index(col - 1, row - 1, shape=n_pixel)
if occupancy is None:
occupancy = chunk_occ
else:
occupancy = occupancy + chunk_occ
# Run median filter across data, assuming 0 filling past the edges to get expected occupancy
blurred = median_filter(occupancy.astype(np.int32), size=filter_size, mode='constant', cval=0.0)
# Spot noisy pixels maxima by substracting expected occupancy
difference = np.ma.masked_array(occupancy - blurred)
std = np.ma.std(difference)
abs_occ_threshold = threshold * std
occupancy = np.ma.masked_where(difference > abs_occ_threshold, occupancy)
logging.info('Masked %d pixels at threshold %.1f in %s', np.ma.count_masked(occupancy), threshold, input_hits_file)
# Generate tuple col / row array of hot pixels, do not use getmask()
pixel_mask = np.ma.getmaskarray(occupancy)
with tb.open_file(output_mask_file, 'w') as out_file_h5:
# Create occupancy array without masking pixels
occupancy_array_table = out_file_h5.create_carray(out_file_h5.root, name='HistOcc', title='Occupancy Histogram', atom=tb.Atom.from_dtype(occupancy.dtype), shape=occupancy.shape, filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False))
occupancy_array_table[:] = np.ma.getdata(occupancy)
# Create masked pixels array
masked_pixel_table = out_file_h5.create_carray(out_file_h5.root, name=pixel_mask_name, title='Pixel Mask', atom=tb.Atom.from_dtype(pixel_mask.dtype), shape=pixel_mask.shape, filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False))
masked_pixel_table[:] = pixel_mask
if plot:
plot_masked_pixels(input_mask_file=output_mask_file, pixel_size=pixel_size, dut_name=dut_name)
return output_mask_file
def cluster_hits(input_hits_file, output_cluster_file=None, create_cluster_hits_table=False, input_disabled_pixel_mask_file=None, input_noisy_pixel_mask_file=None, min_hit_charge=0, max_hit_charge=None, column_cluster_distance=1, row_cluster_distance=1, frame_cluster_distance=1, dut_name=None, plot=True, chunk_size=1000000):
'''Clusters the hits in the data file containing the hit table.
Parameters
----------
input_hits_file : string
Filename of the input hits file.
output_cluster_file : string
Filename of the output cluster file. If None, the filename will be derived from the input hits file.
create_cluster_hits_table : bool
If True, additionally create cluster hits table.
input_disabled_pixel_mask_file : string
Filename of the input disabled mask file.
input_noisy_pixel_mask_file : string
Filename of the input disabled mask file.
min_hit_charge : uint
Minimum hit charge. Minimum possible hit charge must be given in order to correcly calculate the cluster coordinates.
max_hit_charge : uint
Maximum hit charge. Hits wit charge above the limit will be ignored.
column_cluster_distance : uint
Maximum column distance between hist so that they are assigned to the same cluster. Value of 0 effectively disables the clusterizer in column direction.
row_cluster_distance : uint
Maximum row distance between hist so that they are assigned to the same cluster. Value of 0 effectively disables the clusterizer in row direction.
frame_cluster_distance : uint
Sometimes an event has additional timing information (e.g. bunch crossing ID, frame ID). Value of 0 effectively disables the clusterization in time.
dut_name : string
Name of the DUT. If None, filename of the output cluster file will be used.
plot : bool
If True, create additional output plots.
chunk_size : int
Chunk size of the data when reading from file.
'''
logging.info('=== Clustering hits in %s ===', input_hits_file)
if output_cluster_file is None:
output_cluster_file = os.path.splitext(input_hits_file)[0] + '_clustered.h5'
# Calculate the size in col/row for each cluster
def calc_cluster_dimensions(hits, clusters, cluster_size, cluster_hit_indices, cluster_index, cluster_id, charge_correction, noisy_pixels, disabled_pixels, seed_hit_index):
min_col = hits[cluster_hit_indices[0]].column
max_col = hits[cluster_hit_indices[0]].column
min_row = hits[cluster_hit_indices[0]].row
max_row = hits[cluster_hit_indices[0]].row
for i in cluster_hit_indices[1:]:
if i < 0: # Not used indeces = -1
break
if hits[i].column < min_col:
min_col = hits[i].column
if hits[i].column > max_col:
max_col = hits[i].column
if hits[i].row < min_row:
min_row = hits[i].row
if hits[i].row > max_row:
max_row = hits[i].row
clusters[cluster_index].err_cols = max_col - min_col + 1
clusters[cluster_index].err_rows = max_row - min_row + 1
with tb.open_file(input_hits_file, 'r') as input_file_h5:
with tb.open_file(output_cluster_file, 'w') as output_file_h5:
if input_disabled_pixel_mask_file is not None:
with tb.open_file(input_disabled_pixel_mask_file, 'r') as input_mask_file_h5:
disabled_pixels = np.dstack(np.nonzero(input_mask_file_h5.root.DisabledPixelMask[:]))[0] + 1
input_mask_file_h5.root.DisabledPixelMask._f_copy(newparent=output_file_h5.root)
else:
disabled_pixels = None
if input_noisy_pixel_mask_file is not None:
with tb.open_file(input_noisy_pixel_mask_file, 'r') as input_mask_file_h5:
noisy_pixels = np.dstack(np.nonzero(input_mask_file_h5.root.NoisyPixelMask[:]))[0] + 1
input_mask_file_h5.root.NoisyPixelMask._f_copy(newparent=output_file_h5.root)
else:
noisy_pixels = None
clusterizer = HitClusterizer(column_cluster_distance=column_cluster_distance, row_cluster_distance=row_cluster_distance, frame_cluster_distance=frame_cluster_distance, min_hit_charge=min_hit_charge, max_hit_charge=max_hit_charge)
clusterizer.add_cluster_field(description=('err_cols', '<f4')) # Add an additional field to hold the cluster size in x
clusterizer.add_cluster_field(description=('err_rows', '<f4')) # Add an additional field to hold the cluster size in y
clusterizer.set_end_of_cluster_function(calc_cluster_dimensions) # Set the new function to the clusterizer
cluster_hits_table = None
cluster_table = None
for hits, _ in analysis_utils.data_aligned_at_events(input_file_h5.root.Hits, chunk_size=chunk_size):
if not np.all(np.diff(hits['event_number']) >= 0):
raise RuntimeError('The event number does not always increase. The hits cannot be used like this!')
cluster_hits, clusters = clusterizer.cluster_hits(hits, noisy_pixels=noisy_pixels, disabled_pixels=disabled_pixels) # Cluster hits
if not np.all(np.diff(clusters['event_number']) >= 0):
raise RuntimeError('The event number does not always increase. The cluster cannot be used like this!')
# create cluster hits table dynamically
if create_cluster_hits_table and cluster_hits_table is None:
cluster_hits_table = output_file_h5.create_table(output_file_h5.root, name='ClusterHits', description=cluster_hits.dtype, title='Cluster hits table', filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False))
# create cluster table dynamically
if cluster_table is None:
cluster_table = output_file_h5.create_table(output_file_h5.root, name='Cluster', description=clusters.dtype, title='Cluster table', filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False))
if create_cluster_hits_table:
cluster_hits_table.append(cluster_hits)
cluster_table.append(clusters)
def get_eff_pitch(hist, cluster_size):
''' Effective pitch to describe the cluster
size propability distribution
hist : array like
Histogram with cluster size distribution
cluster_size : Cluster size to calculate the pitch for
'''
return np.sqrt(hight[int(cluster_size)].astype(np.float) / hight.sum())
# Calculate cluster size histogram
with tb.open_file(output_cluster_file, 'r') as input_file_h5:
hight = None
n_hits = 0
n_clusters = input_file_h5.root.Cluster.nrows
for start_index in range(0, n_clusters, chunk_size):
cluster_n_hits = input_file_h5.root.Cluster[start_index:start_index + chunk_size]['n_hits']
# calculate cluster size histogram
if hight is None:
max_cluster_size = np.amax(cluster_n_hits)
hight = analysis_utils.hist_1d_index(cluster_n_hits, shape=(max_cluster_size + 1,))
elif max_cluster_size < np.amax(cluster_n_hits):
max_cluster_size = np.amax(cluster_n_hits)
hight.resize(max_cluster_size + 1)
hight += analysis_utils.hist_1d_index(cluster_n_hits, shape=(max_cluster_size + 1,))
else:
hight += analysis_utils.hist_1d_index(cluster_n_hits, shape=(max_cluster_size + 1,))
n_hits += np.sum(cluster_n_hits)
# Calculate cluster size histogram
with tb.open_file(output_cluster_file, 'r+') as io_file_h5:
for start_index in range(0, io_file_h5.root.Cluster.nrows, chunk_size):
clusters = io_file_h5.root.Cluster[start_index:start_index + chunk_size]
# Set errors for small clusters, where charge sharing enhances resolution
for css in [(1, 1), (1, 2), (2, 1), (2, 2)]:
sel = np.logical_and(clusters['err_cols'] == css[0], clusters['err_rows'] == css[1])
clusters['err_cols'][sel] = get_eff_pitch(hist=hight, cluster_size=css[0]) / np.sqrt(12)
clusters['err_rows'][sel] = get_eff_pitch(hist=hight, cluster_size=css[1]) / np.sqrt(12)
# Set errors for big clusters, where delta electrons reduce resolution
sel = np.logical_or(clusters['err_cols'] > 2, clusters['err_rows'] > 2)
clusters['err_cols'][sel] = clusters['err_cols'][sel] / np.sqrt(12)
clusters['err_rows'][sel] = clusters['err_rows'][sel] / np.sqrt(12)
io_file_h5.root.Cluster[start_index:start_index + chunk_size] = clusters
if plot:
plot_cluster_size(hight, n_hits, n_clusters, max_cluster_size,
dut_name=os.path.split(output_cluster_file)[1],
output_pdf_file=os.path.splitext(output_cluster_file)[0] + '_cluster_size.pdf')
return output_cluster_file
ENH: preparations for multithreading pixel masking
''' All functions acting on the hits of one DUT are listed here'''
from __future__ import division
import logging
import os.path
import re
import tables as tb
import numpy as np
from scipy.ndimage import median_filter
from pixel_clusterizer.clusterizer import HitClusterizer
from testbeam_analysis.tools import smc
from testbeam_analysis.tools import analysis_utils, plot_utils
from testbeam_analysis.tools.plot_utils import plot_masked_pixels, plot_cluster_size
def check_file(input_hits_file, n_pixel, output_check_file=None,
event_range=1, plot=True, chunk_size=1000000):
'''Checks the hit table to have proper data.
The checks include:
- hit definitions:
- position has to start at 1 (not 0)
- position should not exceed number of pixels (n_pixel)
- event building
- event number has to be strictly monotone
- hit position correlations of consecutive events are
created. Should be zero for distinctly
built events.
Parameters
----------
input_hits_file : string
File name of the hit table.
output_check_file : string
Filename of the output file with the correlation histograms.
n_pixel : tuple
Tuple of the total number of pixels (column/row).
event_range : integer
The range of events to correlate.
E.g.: event_range = 2 correlates to predecessing event hits.
chunk_size : int
Chunk size of the data when reading from file.
'''
logging.info('=== Check data of hit file %s ===', input_hits_file)
if output_check_file is None:
output_check_file = input_hits_file[:-3] + '_check.h5'
with tb.open_file(output_check_file, mode="w") as out_file_h5:
with tb.open_file(input_hits_file, 'r') as input_file_h5:
shape_column = (n_pixel[0], n_pixel[0])
shape_row = (n_pixel[1], n_pixel[1])
col_corr = np.zeros(shape_column, dtype=np.int)
row_corr = np.zeros(shape_row, dtype=np.int)
last_event = None
out_dE = out_file_h5.create_earray(out_file_h5.root, name='EventDelta',
title='Change of event number per non empty event',
shape=(0, ),
atom=tb.Atom.from_dtype(np.dtype(np.uint64)),
filters=tb.Filters(complib='blosc',
complevel=5,
fletcher32=False))
out_E = out_file_h5.create_earray(out_file_h5.root, name='EventNumber',
title='Event number of non empty event',
shape=(0, ),
atom=tb.Atom.from_dtype(np.dtype(np.uint64)),
filters=tb.Filters(complib='blosc',
complevel=5,
fletcher32=False))
for hits, _ in analysis_utils.data_aligned_at_events(
input_file_h5.root.Hits,
chunk_size=chunk_size):
if not np.all(np.diff(hits['event_number']) >= 0):
raise RuntimeError('The event number does not always increase. \
The hits cannot be used like this!')
if np.any(hits['column'] < 1) or np.any(hits['row'] < 1):
raise RuntimeError('The column/row definition does not \
start at 1!')
if (np.any(hits['column'] > n_pixel[0])
or np.any(hits['row'] > n_pixel[1])):
raise RuntimeError('The column/row definition exceed the nuber \
of pixels (%s/%s)!', n_pixel[0], n_pixel[1])
analysis_utils.correlate_hits_on_event_range(hits,
col_corr,
row_corr,
event_range)
event_numbers = np.unique(hits['event_number'])
event_delta = np.diff(event_numbers)
if last_event:
event_delta = np.concatenate((np.array([event_numbers[0] - last_event]),
event_delta))
last_event = event_numbers[-1]
out_dE.append(event_delta)
out_E.append(event_numbers)
out_col = out_file_h5.create_carray(out_file_h5.root, name='CorrelationColumns',
title='Column Correlation with event range=%s' % event_range,
atom=tb.Atom.from_dtype(col_corr.dtype),
shape=col_corr.shape,
filters=tb.Filters(complib='blosc',
complevel=5,
fletcher32=False))
out_row = out_file_h5.create_carray(out_file_h5.root, name='CorrelationRows',
title='Row Correlation with event range=%s' % event_range,
atom=tb.Atom.from_dtype(row_corr.dtype),
shape=row_corr.shape,
filters=tb.Filters(complib='blosc',
complevel=5,
fletcher32=False))
out_col[:] = col_corr
out_row[:] = row_corr
if plot:
plot_utils.plot_checks(input_corr_file=output_check_file)
def generate_pixel_mask(input_hits_file, n_pixel, pixel_mask_name="NoisyPixelMask", output_mask_file=None, pixel_size=None, threshold=10.0, filter_size=3, dut_name=None, plot=True, chunk_size=1000000):
'''Generating pixel mask from the hit table.
Parameters
----------
input_hits_file : string
File name of the hit table.
n_pixel : tuple
Tuple of the total number of pixels (column/row).
pixel_mask_name : string
Name of the node containing the mask inside the output file.
output_mask_file : string
File name of the output mask file.
pixel_size : tuple
Tuple of the pixel size (column/row). If None, assuming square pixels.
threshold : float
The threshold for pixel masking. The threshold is given in units of
sigma of the pixel noise (background subtracted). The lower the value
the more pixels are masked.
filter_size : scalar or tuple
Adjust the median filter size by giving the number of columns and rows.
The higher the value the more the background is smoothed and more
pixels are masked.
dut_name : string
Name of the DUT. If None, file name of the hit table will be printed.
plot : bool
If True, create additional output plots.
chunk_size : int
Chunk size of the data when reading from file.
'''
logging.info('=== Generating %s for %s ===', ' '.join(item.lower() for item in re.findall('[A-Z][^A-Z]*', pixel_mask_name)), input_hits_file)
if output_mask_file is None:
output_mask_file = os.path.splitext(input_hits_file)[0] + '_' + '_'.join(item.lower() for item in re.findall('[A-Z][^A-Z]*', pixel_mask_name)) + '.h5'
# # Create occupancy histogram
# def work(hit_chunk):
# col, row = hit_chunk['column'], hit_chunk['row']
# return analysis_utils.hist_2d_index(col - 1, row - 1, shape=n_pixel)
#
# smc.SMC(table_file_in=input_hits_file,
# file_out=output_mask_file,
# func=work,
# node_desc={'name':'HistOcc'},
# n_cores=1,
# chunk_size=chunk_size)
#
# # Create mask from occupancy histogram
# with tb.open_file(output_mask_file, 'r+') as out_file_h5:
# occupancy = out_file_h5.root.HistOcc[:]
# # Run median filter across data, assuming 0 filling past the edges to get expected occupancy
# blurred = median_filter(occupancy.astype(np.int32), size=filter_size, mode='constant', cval=0.0)
# # Spot noisy pixels maxima by substracting expected occupancy
# difference = np.ma.masked_array(occupancy - blurred)
# std = np.ma.std(difference)
# abs_occ_threshold = threshold * std
# occupancy = np.ma.masked_where(difference > abs_occ_threshold, occupancy)
# logging.info('Masked %d pixels at threshold %.1f in %s', np.ma.count_masked(occupancy), threshold, input_hits_file)
# # Generate tuple col / row array of hot pixels, do not use getmask()
# pixel_mask = np.ma.getmaskarray(occupancy)
#
# # Create masked pixels array
# masked_pixel_table = out_file_h5.create_carray(out_file_h5.root, name=pixel_mask_name, title='Pixel Mask', atom=tb.Atom.from_dtype(pixel_mask.dtype), shape=pixel_mask.shape, filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False))
# masked_pixel_table[:] = pixel_mask
#
# if plot:
# plot_masked_pixels(input_mask_file=output_mask_file, pixel_size=pixel_size, dut_name=dut_name)
occupancy = None
# Calculating occupancy array
with tb.open_file(input_hits_file, 'r') as input_file_h5:
for hits, _ in analysis_utils.data_aligned_at_events(input_file_h5.root.Hits, chunk_size=chunk_size):
col, row = hits['column'], hits['row']
chunk_occ = analysis_utils.hist_2d_index(col - 1, row - 1, shape=n_pixel)
if occupancy is None:
occupancy = chunk_occ
else:
occupancy = occupancy + chunk_occ
# Run median filter across data, assuming 0 filling past the edges to get expected occupancy
blurred = median_filter(occupancy.astype(np.int32), size=filter_size, mode='constant', cval=0.0)
# Spot noisy pixels maxima by substracting expected occupancy
difference = np.ma.masked_array(occupancy - blurred)
std = np.ma.std(difference)
abs_occ_threshold = threshold * std
occupancy = np.ma.masked_where(difference > abs_occ_threshold, occupancy)
logging.info('Masked %d pixels at threshold %.1f in %s', np.ma.count_masked(occupancy), threshold, input_hits_file)
# Generate tuple col / row array of hot pixels, do not use getmask()
pixel_mask = np.ma.getmaskarray(occupancy)
with tb.open_file(output_mask_file, 'w') as out_file_h5:
# Create occupancy array without masking pixels
occupancy_array_table = out_file_h5.create_carray(out_file_h5.root, name='HistOcc', title='Occupancy Histogram', atom=tb.Atom.from_dtype(occupancy.dtype), shape=occupancy.shape, filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False))
occupancy_array_table[:] = np.ma.getdata(occupancy)
# Create masked pixels array
masked_pixel_table = out_file_h5.create_carray(out_file_h5.root, name=pixel_mask_name, title='Pixel Mask', atom=tb.Atom.from_dtype(pixel_mask.dtype), shape=pixel_mask.shape, filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False))
masked_pixel_table[:] = pixel_mask
if plot:
plot_masked_pixels(input_mask_file=output_mask_file, pixel_size=pixel_size, dut_name=dut_name)
return output_mask_file
def cluster_hits(input_hits_file, output_cluster_file=None, create_cluster_hits_table=False, input_disabled_pixel_mask_file=None, input_noisy_pixel_mask_file=None, min_hit_charge=0, max_hit_charge=None, column_cluster_distance=1, row_cluster_distance=1, frame_cluster_distance=1, dut_name=None, plot=True, chunk_size=1000000):
'''Clusters the hits in the data file containing the hit table.
Parameters
----------
input_hits_file : string
Filename of the input hits file.
output_cluster_file : string
Filename of the output cluster file. If None, the filename will be derived from the input hits file.
create_cluster_hits_table : bool
If True, additionally create cluster hits table.
input_disabled_pixel_mask_file : string
Filename of the input disabled mask file.
input_noisy_pixel_mask_file : string
Filename of the input disabled mask file.
min_hit_charge : uint
Minimum hit charge. Minimum possible hit charge must be given in order to correcly calculate the cluster coordinates.
max_hit_charge : uint
Maximum hit charge. Hits wit charge above the limit will be ignored.
column_cluster_distance : uint
Maximum column distance between hist so that they are assigned to the same cluster. Value of 0 effectively disables the clusterizer in column direction.
row_cluster_distance : uint
Maximum row distance between hist so that they are assigned to the same cluster. Value of 0 effectively disables the clusterizer in row direction.
frame_cluster_distance : uint
Sometimes an event has additional timing information (e.g. bunch crossing ID, frame ID). Value of 0 effectively disables the clusterization in time.
dut_name : string
Name of the DUT. If None, filename of the output cluster file will be used.
plot : bool
If True, create additional output plots.
chunk_size : int
Chunk size of the data when reading from file.
'''
logging.info('=== Clustering hits in %s ===', input_hits_file)
if output_cluster_file is None:
output_cluster_file = os.path.splitext(input_hits_file)[0] + '_clustered.h5'
# Calculate the size in col/row for each cluster
# This is a end of cluster function automatically
# called when a cluster is finished
def calc_cluster_dimensions(hits, clusters, cluster_size, cluster_hit_indices, cluster_index, cluster_id, charge_correction, noisy_pixels, disabled_pixels, seed_hit_index):
min_col = hits[cluster_hit_indices[0]].column
max_col = hits[cluster_hit_indices[0]].column
min_row = hits[cluster_hit_indices[0]].row
max_row = hits[cluster_hit_indices[0]].row
for i in cluster_hit_indices[1:]:
if i < 0: # Not used indeces = -1
break
if hits[i].column < min_col:
min_col = hits[i].column
if hits[i].column > max_col:
max_col = hits[i].column
if hits[i].row < min_row:
min_row = hits[i].row
if hits[i].row > max_row:
max_row = hits[i].row
clusters[cluster_index].err_cols = max_col - min_col + 1
clusters[cluster_index].err_rows = max_row - min_row + 1
with tb.open_file(input_hits_file, 'r') as input_file_h5:
with tb.open_file(output_cluster_file, 'w') as output_file_h5:
if input_disabled_pixel_mask_file is not None:
with tb.open_file(input_disabled_pixel_mask_file, 'r') as input_mask_file_h5:
disabled_pixels = np.dstack(np.nonzero(input_mask_file_h5.root.DisabledPixelMask[:]))[0] + 1
input_mask_file_h5.root.DisabledPixelMask._f_copy(newparent=output_file_h5.root)
else:
disabled_pixels = None
if input_noisy_pixel_mask_file is not None:
with tb.open_file(input_noisy_pixel_mask_file, 'r') as input_mask_file_h5:
noisy_pixels = np.dstack(np.nonzero(input_mask_file_h5.root.NoisyPixelMask[:]))[0] + 1
input_mask_file_h5.root.NoisyPixelMask._f_copy(newparent=output_file_h5.root)
else:
noisy_pixels = None
clusterizer = HitClusterizer(column_cluster_distance=column_cluster_distance, row_cluster_distance=row_cluster_distance, frame_cluster_distance=frame_cluster_distance, min_hit_charge=min_hit_charge, max_hit_charge=max_hit_charge)
clusterizer.add_cluster_field(description=('err_cols', '<f4')) # Add an additional field to hold the cluster size in x
clusterizer.add_cluster_field(description=('err_rows', '<f4')) # Add an additional field to hold the cluster size in y
clusterizer.set_end_of_cluster_function(calc_cluster_dimensions) # Set the new function to the clusterizer
cluster_hits_table = None
cluster_table = None
for hits, _ in analysis_utils.data_aligned_at_events(input_file_h5.root.Hits, chunk_size=chunk_size):
if not np.all(np.diff(hits['event_number']) >= 0):
raise RuntimeError('The event number does not always increase. The hits cannot be used like this!')
cluster_hits, clusters = clusterizer.cluster_hits(hits, noisy_pixels=noisy_pixels, disabled_pixels=disabled_pixels) # Cluster hits
if not np.all(np.diff(clusters['event_number']) >= 0):
raise RuntimeError('The event number does not always increase. The cluster cannot be used like this!')
# create cluster hits table dynamically
if create_cluster_hits_table and cluster_hits_table is None:
cluster_hits_table = output_file_h5.create_table(output_file_h5.root, name='ClusterHits', description=cluster_hits.dtype, title='Cluster hits table', filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False))
# create cluster table dynamically
if cluster_table is None:
cluster_table = output_file_h5.create_table(output_file_h5.root, name='Cluster', description=clusters.dtype, title='Cluster table', filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False))
if create_cluster_hits_table:
cluster_hits_table.append(cluster_hits)
cluster_table.append(clusters)
def get_eff_pitch(hist, cluster_size):
''' Effective pitch to describe the cluster
size propability distribution
hist : array like
Histogram with cluster size distribution
cluster_size : Cluster size to calculate the pitch for
'''
return np.sqrt(hight[int(cluster_size)].astype(np.float) / hight.sum())
# Calculate cluster size histogram
with tb.open_file(output_cluster_file, 'r') as input_file_h5:
hight = None
n_hits = 0
n_clusters = input_file_h5.root.Cluster.nrows
for start_index in range(0, n_clusters, chunk_size):
cluster_n_hits = input_file_h5.root.Cluster[start_index:start_index + chunk_size]['n_hits']
# calculate cluster size histogram
if hight is None:
max_cluster_size = np.amax(cluster_n_hits)
hight = analysis_utils.hist_1d_index(cluster_n_hits, shape=(max_cluster_size + 1,))
elif max_cluster_size < np.amax(cluster_n_hits):
max_cluster_size = np.amax(cluster_n_hits)
hight.resize(max_cluster_size + 1)
hight += analysis_utils.hist_1d_index(cluster_n_hits, shape=(max_cluster_size + 1,))
else:
hight += analysis_utils.hist_1d_index(cluster_n_hits, shape=(max_cluster_size + 1,))
n_hits += np.sum(cluster_n_hits)
# Calculate cluster size histogram
with tb.open_file(output_cluster_file, 'r+') as io_file_h5:
for start_index in range(0, io_file_h5.root.Cluster.nrows, chunk_size):
clusters = io_file_h5.root.Cluster[start_index:start_index + chunk_size]
# Set errors for small clusters, where charge sharing enhances resolution
for css in [(1, 1), (1, 2), (2, 1), (2, 2)]:
sel = np.logical_and(clusters['err_cols'] == css[0], clusters['err_rows'] == css[1])
clusters['err_cols'][sel] = get_eff_pitch(hist=hight, cluster_size=css[0]) / np.sqrt(12)
clusters['err_rows'][sel] = get_eff_pitch(hist=hight, cluster_size=css[1]) / np.sqrt(12)
# Set errors for big clusters, where delta electrons reduce resolution
sel = np.logical_or(clusters['err_cols'] > 2, clusters['err_rows'] > 2)
clusters['err_cols'][sel] = clusters['err_cols'][sel] / np.sqrt(12)
clusters['err_rows'][sel] = clusters['err_rows'][sel] / np.sqrt(12)
io_file_h5.root.Cluster[start_index:start_index + chunk_size] = clusters
if plot:
plot_cluster_size(hight, n_hits, n_clusters, max_cluster_size,
dut_name=os.path.split(output_cluster_file)[1],
output_pdf_file=os.path.splitext(output_cluster_file)[0] + '_cluster_size.pdf')
return output_cluster_file
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Matt Hite <mhite@hotmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: bigip_node
short_description: "Manages F5 BIG-IP LTM nodes"
description:
- "Manages F5 BIG-IP LTM nodes via iControl SOAP API"
version_added: "1.4"
author: Matt Hite
notes:
- "Requires BIG-IP software version >= 11"
- "F5 developed module 'bigsuds' required (see http://devcentral.f5.com)"
- "Best run as a local_action in your playbook"
requirements:
- bigsuds
options:
server:
description:
- BIG-IP host
required: true
default: null
choices: []
aliases: []
user:
description:
- BIG-IP username
required: true
default: null
choices: []
aliases: []
password:
description:
- BIG-IP password
required: true
default: null
choices: []
aliases: []
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: 1.9.1
state:
description:
- Pool member state
required: true
default: present
choices: ['present', 'absent']
aliases: []
partition:
description:
- Partition
required: false
default: 'Common'
choices: []
aliases: []
name:
description:
- "Node name"
required: false
default: null
choices: []
host:
description:
- "Node IP. Required when state=present and node does not exist. Error when state=absent."
required: true
default: null
choices: []
aliases: ['address', 'ip']
description:
description:
- "Node description."
required: false
default: null
choices: []
'''
EXAMPLES = '''
## playbook task examples:
---
# file bigip-test.yml
# ...
- hosts: bigip-test
tasks:
- name: Add node
local_action: >
bigip_node
server=lb.mydomain.com
user=admin
password=mysecret
state=present
partition=matthite
host="{{ ansible_default_ipv4["address"] }}"
name="{{ ansible_default_ipv4["address"] }}"
# Note that the BIG-IP automatically names the node using the
# IP address specified in previous play's host parameter.
# Future plays referencing this node no longer use the host
# parameter but instead use the name parameter.
# Alternatively, you could have specified a name with the
# name parameter when state=present.
- name: Modify node description
local_action: >
bigip_node
server=lb.mydomain.com
user=admin
password=mysecret
state=present
partition=matthite
name="{{ ansible_default_ipv4["address"] }}"
description="Our best server yet"
- name: Delete node
local_action: >
bigip_node
server=lb.mydomain.com
user=admin
password=mysecret
state=absent
partition=matthite
name="{{ ansible_default_ipv4["address"] }}"
'''
try:
import bigsuds
except ImportError:
bigsuds_found = False
else:
bigsuds_found = True
# ==========================
# bigip_node module specific
#
def bigip_api(bigip, user, password):
api = bigsuds.BIGIP(hostname=bigip, username=user, password=password)
return api
def disable_ssl_cert_validation():
# You probably only want to do this for testing and never in production.
# From https://www.python.org/dev/peps/pep-0476/#id29
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
def node_exists(api, address):
# hack to determine if node exists
result = False
try:
api.LocalLB.NodeAddressV2.get_object_status(nodes=[address])
result = True
except bigsuds.OperationFailed, e:
if "was not found" in str(e):
result = False
else:
# genuine exception
raise
return result
def create_node_address(api, address, name):
try:
api.LocalLB.NodeAddressV2.create(nodes=[name], addresses=[address], limits=[0])
result = True
desc = ""
except bigsuds.OperationFailed, e:
if "already exists" in str(e):
result = False
desc = "referenced name or IP already in use"
else:
# genuine exception
raise
return (result, desc)
def get_node_address(api, name):
return api.LocalLB.NodeAddressV2.get_address(nodes=[name])[0]
def delete_node_address(api, address):
try:
api.LocalLB.NodeAddressV2.delete_node_address(nodes=[address])
result = True
desc = ""
except bigsuds.OperationFailed, e:
if "is referenced by a member of pool" in str(e):
result = False
desc = "node referenced by pool"
else:
# genuine exception
raise
return (result, desc)
def set_node_description(api, name, description):
api.LocalLB.NodeAddressV2.set_description(nodes=[name],
descriptions=[description])
def get_node_description(api, name):
return api.LocalLB.NodeAddressV2.get_description(nodes=[name])[0]
def main():
module = AnsibleModule(
argument_spec = dict(
server = dict(type='str', required=True),
user = dict(type='str', required=True),
password = dict(type='str', required=True),
validate_certs = dict(default='yes', type='bool'),
state = dict(type='str', default='present', choices=['present', 'absent']),
partition = dict(type='str', default='Common'),
name = dict(type='str', required=True),
host = dict(type='str', aliases=['address', 'ip']),
description = dict(type='str')
),
supports_check_mode=True
)
if not bigsuds_found:
module.fail_json(msg="the python bigsuds module is required")
server = module.params['server']
user = module.params['user']
password = module.params['password']
validate_certs = module.params['validate_certs']
state = module.params['state']
partition = module.params['partition']
host = module.params['host']
name = module.params['name']
address = "/%s/%s" % (partition, name)
description = module.params['description']
if not validate_certs:
disable_ssl_cert_validation()
if state == 'absent' and host is not None:
module.fail_json(msg="host parameter invalid when state=absent")
try:
api = bigip_api(server, user, password)
result = {'changed': False} # default
if state == 'absent':
if node_exists(api, address):
if not module.check_mode:
deleted, desc = delete_node_address(api, address)
if not deleted:
module.fail_json(msg="unable to delete: %s" % desc)
else:
result = {'changed': True}
else:
# check-mode return value
result = {'changed': True}
elif state == 'present':
if not node_exists(api, address):
if host is None:
module.fail_json(msg="host parameter required when " \
"state=present and node does not exist")
if not module.check_mode:
created, desc = create_node_address(api, address=host, name=address)
if not created:
module.fail_json(msg="unable to create: %s" % desc)
else:
result = {'changed': True}
if description is not None:
set_node_description(api, address, description)
result = {'changed': True}
else:
# check-mode return value
result = {'changed': True}
else:
# node exists -- potentially modify attributes
if host is not None:
if get_node_address(api, address) != host:
module.fail_json(msg="Changing the node address is " \
"not supported by the API; " \
"delete and recreate the node.")
if description is not None:
if get_node_description(api, address) != description:
if not module.check_mode:
set_node_description(api, address, description)
result = {'changed': True}
except Exception, e:
module.fail_json(msg="received exception: %s" % e)
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
main()
Add enabled/disabled support to bigip_node
This allows one to enable or disable a node, useful for when doing
maintenance on a node to prevent connections from being attempted to it.
This will completely disable the node for any pool it might be in.
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Matt Hite <mhite@hotmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: bigip_node
short_description: "Manages F5 BIG-IP LTM nodes"
description:
- "Manages F5 BIG-IP LTM nodes via iControl SOAP API"
version_added: "1.4"
author: Matt Hite
notes:
- "Requires BIG-IP software version >= 11"
- "F5 developed module 'bigsuds' required (see http://devcentral.f5.com)"
- "Best run as a local_action in your playbook"
requirements:
- bigsuds
options:
server:
description:
- BIG-IP host
required: true
default: null
choices: []
aliases: []
user:
description:
- BIG-IP username
required: true
default: null
choices: []
aliases: []
password:
description:
- BIG-IP password
required: true
default: null
choices: []
aliases: []
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: 1.9.1
state:
description:
- Pool member state
required: true
default: present
choices: ['present', 'absent', 'enabled', 'disabled']
aliases: []
partition:
description:
- Partition
required: false
default: 'Common'
choices: []
aliases: []
name:
description:
- "Node name. Required when state=enabled/disabled"
required: false
default: null
choices: []
host:
description:
- "Node IP. Required when state=present and node does not exist. Error when state=absent."
required: true
default: null
choices: []
aliases: ['address', 'ip']
description:
description:
- "Node description."
required: false
default: null
choices: []
'''
EXAMPLES = '''
## playbook task examples:
---
# file bigip-test.yml
# ...
- hosts: bigip-test
tasks:
- name: Add node
local_action: >
bigip_node
server=lb.mydomain.com
user=admin
password=mysecret
state=present
partition=matthite
host="{{ ansible_default_ipv4["address"] }}"
name="{{ ansible_default_ipv4["address"] }}"
# Note that the BIG-IP automatically names the node using the
# IP address specified in previous play's host parameter.
# Future plays referencing this node no longer use the host
# parameter but instead use the name parameter.
# Alternatively, you could have specified a name with the
# name parameter when state=present.
- name: Modify node description
local_action: >
bigip_node
server=lb.mydomain.com
user=admin
password=mysecret
state=present
partition=matthite
name="{{ ansible_default_ipv4["address"] }}"
description="Our best server yet"
- name: Delete node
local_action: >
bigip_node
server=lb.mydomain.com
user=admin
password=mysecret
state=absent
partition=matthite
name="{{ ansible_default_ipv4["address"] }}"
- name: Disable node
bigip_node: server=lb.mydomain.com user=admin password=mysecret
state=disabled name=mynodename
delegate_to: localhost
'''
try:
import bigsuds
except ImportError:
bigsuds_found = False
else:
bigsuds_found = True
# ==========================
# bigip_node module specific
#
# map of state values
STATES={'enabled': 'STATE_ENABLED',
'disabled': 'STATE_DISABLED'}
STATUSES={'enabled': 'SESSION_STATUS_ENABLED',
'disabled': 'SESSION_STATUS_DISABLED',
'offline': 'SESSION_STATUS_FORCED_DISABLED'}
def bigip_api(bigip, user, password):
api = bigsuds.BIGIP(hostname=bigip, username=user, password=password)
return api
def disable_ssl_cert_validation():
# You probably only want to do this for testing and never in production.
# From https://www.python.org/dev/peps/pep-0476/#id29
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
def node_exists(api, address):
# hack to determine if node exists
result = False
try:
api.LocalLB.NodeAddressV2.get_object_status(nodes=[address])
result = True
except bigsuds.OperationFailed, e:
if "was not found" in str(e):
result = False
else:
# genuine exception
raise
return result
def create_node_address(api, address, name):
try:
api.LocalLB.NodeAddressV2.create(nodes=[name], addresses=[address], limits=[0])
result = True
desc = ""
except bigsuds.OperationFailed, e:
if "already exists" in str(e):
result = False
desc = "referenced name or IP already in use"
else:
# genuine exception
raise
return (result, desc)
def get_node_address(api, name):
return api.LocalLB.NodeAddressV2.get_address(nodes=[name])[0]
def delete_node_address(api, address):
try:
api.LocalLB.NodeAddressV2.delete_node_address(nodes=[address])
result = True
desc = ""
except bigsuds.OperationFailed, e:
if "is referenced by a member of pool" in str(e):
result = False
desc = "node referenced by pool"
else:
# genuine exception
raise
return (result, desc)
def set_node_description(api, name, description):
api.LocalLB.NodeAddressV2.set_description(nodes=[name],
descriptions=[description])
def get_node_description(api, name):
return api.LocalLB.NodeAddressV2.get_description(nodes=[name])[0]
def set_node_disabled(api, name):
set_node_session_enabled_state(api, name, STATES['disabled'])
result = True
desc = ""
return (result, desc)
def set_node_enabled(api, name):
set_node_session_enabled_state(api, name, STATES['enabled'])
result = True
desc = ""
return (result, desc)
def set_node_session_enabled_state(api, name, state):
api.LocalLB.NodeAddressV2.set_session_enabled_state(nodes=[name],
states=[state])
def get_node_session_status(api, name):
return api.LocalLB.NodeAddressV2.get_session_status(nodes=[name])[0]
def main():
module = AnsibleModule(
argument_spec = dict(
server = dict(type='str', required=True),
user = dict(type='str', required=True),
password = dict(type='str', required=True),
validate_certs = dict(default='yes', type='bool'),
state = dict(type='str', default='present',
choices=['present', 'absent', 'disabled', 'enabled']),
partition = dict(type='str', default='Common'),
name = dict(type='str', required=True),
host = dict(type='str', aliases=['address', 'ip']),
description = dict(type='str')
),
supports_check_mode=True
)
if not bigsuds_found:
module.fail_json(msg="the python bigsuds module is required")
server = module.params['server']
user = module.params['user']
password = module.params['password']
validate_certs = module.params['validate_certs']
state = module.params['state']
partition = module.params['partition']
host = module.params['host']
name = module.params['name']
address = "/%s/%s" % (partition, name)
description = module.params['description']
if not validate_certs:
disable_ssl_cert_validation()
if state == 'absent' and host is not None:
module.fail_json(msg="host parameter invalid when state=absent")
try:
api = bigip_api(server, user, password)
result = {'changed': False} # default
if state == 'absent':
if node_exists(api, address):
if not module.check_mode:
deleted, desc = delete_node_address(api, address)
if not deleted:
module.fail_json(msg="unable to delete: %s" % desc)
else:
result = {'changed': True}
else:
# check-mode return value
result = {'changed': True}
elif state == 'present':
if not node_exists(api, address):
if host is None:
module.fail_json(msg="host parameter required when " \
"state=present and node does not exist")
if not module.check_mode:
created, desc = create_node_address(api, address=host, name=address)
if not created:
module.fail_json(msg="unable to create: %s" % desc)
else:
result = {'changed': True}
if description is not None:
set_node_description(api, address, description)
result = {'changed': True}
else:
# check-mode return value
result = {'changed': True}
else:
# node exists -- potentially modify attributes
if host is not None:
if get_node_address(api, address) != host:
module.fail_json(msg="Changing the node address is " \
"not supported by the API; " \
"delete and recreate the node.")
if description is not None:
if get_node_description(api, address) != description:
if not module.check_mode:
set_node_description(api, address, description)
result = {'changed': True}
elif state in ('disabled', 'enabled'):
if name is None:
module.fail_json(msg="name parameter required when " \
"state=enabled/disabled")
if not module.check_mode:
if not node_exists(api, name):
module.fail_json(msg="node does not exist")
status = get_node_session_status(api, name)
if state == 'disabled':
if status not in (STATUSES['disabled'], STATUSES['offline']):
disabled, desc = set_node_disabled(api, name)
if not disabled:
module.fail_json(msg="unable to disable: %s" % desc)
else:
result = {'changed': True}
else:
if status != STATUSES['enabled']:
enabled, desc = set_node_enabled(api, name)
if not enabled:
module.fail_json(msg="unable to enable: %s" % desc)
else:
result = {'changed': True}
else:
# check-mode return value
result = {'changed': True}
except Exception, e:
module.fail_json(msg="received exception: %s" % e)
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
main()
|
#!/usr/bin/python2.5
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views for Student Project.
"""
__authors__ = [
'"Lennard de Rijk" <ljvderijk@gmail.com>',
]
import logging
import time
from django import forms
from django import http
from django.utils.translation import ugettext
from soc.logic import cleaning
from soc.logic import dicts
from soc.views import out_of_band
from soc.views.helper import decorators
from soc.views.helper import dynaform
from soc.views.helper import forms as forms_helper
from soc.views.helper import lists
from soc.views.helper import params as params_helper
from soc.views.helper import redirects
from soc.views.helper import responses
from soc.views.helper import widgets
from soc.views.models import base
from soc.modules.gsoc.logic.models import student as student_logic
from soc.modules.gsoc.logic.models.mentor import logic as mentor_logic
from soc.modules.gsoc.logic.models.org_admin import logic as org_admin_logic
from soc.modules.gsoc.logic.models.organization import logic as org_logic
from soc.modules.gsoc.logic.models.program import logic as program_logic
from soc.modules.gsoc.logic.models.student_project import logic as \
project_logic
from soc.modules.gsoc.views.helper import access
from soc.modules.gsoc.views.models import organization as org_view
class View(base.View):
"""View methods for the Student Project model.
"""
def __init__(self, params=None):
"""Defines the fields and methods required for the base View class
to provide the user with list, public, create, edit and delete views.
Params:
params: a dict with params for this View
"""
rights = access.GSoCChecker(params)
rights['any_access'] = ['allow']
rights['create'] = ['checkIsDeveloper']
rights['edit'] = ['checkIsDeveloper']
rights['delete'] = ['checkIsDeveloper']
rights['show'] = ['allow']
rights['list'] = ['checkIsDeveloper']
rights['manage'] = [('checkHasRoleForScope',
[org_admin_logic, ['active', 'inactive']]),
('checkStudentProjectHasStatus', [['accepted', 'failed', 'completed',
'withdrawn']])]
rights['manage_overview'] = [('checkHasRoleForScope', [
org_admin_logic, ['active', 'inactive']])]
# TODO: lack of better name here!
rights['st_edit'] = [
'checkCanEditStudentProjectAsStudent',
('checkStudentProjectHasStatus',
[['accepted', 'completed']])
]
rights['withdraw'] = ['checkIsHostForProgram']
rights['withdraw_project'] = ['checkIsHostForStudentProject',
('checkStudentProjectHasStatus',
[['accepted', 'completed']])
]
rights['accept_project'] = ['checkIsHostForStudentProject',
('checkStudentProjectHasStatus',
[['withdrawn']])
]
new_params = {}
new_params['logic'] = project_logic
new_params['rights'] = rights
new_params['name'] = "Student Project"
new_params['url_name'] = "gsoc/student_project"
new_params['module_package'] = 'soc.modules.gsoc.views.models'
new_params['sidebar_grouping'] = 'Students'
new_params['scope_view'] = org_view
new_params['scope_redirect'] = redirects.getCreateRedirect
new_params['no_create_with_key_fields'] = True
new_params['extra_dynaexclude'] = ['program', 'status', 'link_id',
'mentor', 'additional_mentors',
'student', 'passed_evaluations',
'failed_evaluations']
new_params['create_extra_dynaproperties'] = {
'scope_path': forms.CharField(widget=forms.HiddenInput,
required=True),
'public_info': forms.fields.CharField(required=True,
widget=widgets.FullTinyMCE(attrs={'rows': 25, 'cols': 100})),
'student_id': forms.CharField(label='Student Link ID',
required=True),
'mentor_id': forms.CharField(label='Mentor Link ID',
required=True),
'clean_abstract': cleaning.clean_content_length('abstract'),
'clean_public_info': cleaning.clean_html_content('public_info'),
'clean_student': cleaning.clean_link_id('student'),
'clean_mentor': cleaning.clean_link_id('mentor'),
'clean_additional_info': cleaning.clean_url('additional_info'),
'clean_feed_url': cleaning.clean_feed_url,
'clean': cleaning.validate_student_project('scope_path',
'mentor_id', 'student_id')
}
new_params['edit_extra_dynaproperties'] = {
'link_id': forms.CharField(widget=forms.HiddenInput),
}
patterns = [
(r'^%(url_name)s/(?P<access_type>manage_overview)/%(scope)s$',
'soc.modules.gsoc.views.models.%(module_name)s.manage_overview',
'Overview of %(name_plural)s to Manage for'),
(r'^%(url_name)s/(?P<access_type>manage)/%(key_fields)s$',
'soc.modules.gsoc.views.models.%(module_name)s.manage',
'Manage %(name)s'),
(r'^%(url_name)s/(?P<access_type>st_edit)/%(key_fields)s$',
'soc.modules.gsoc.views.models.%(module_name)s.st_edit',
'Edit my %(name)s'),
(r'^%(url_name)s/(?P<access_type>withdraw)/(?P<scope_path>%(ulnp)s)/%(lnp)s$',
'soc.modules.gsoc.views.models.%(module_name)s.withdraw',
'Withdraw %(name_plural)s'),
(r'^%(url_name)s/(?P<access_type>withdraw_project)/%(key_fields)s$',
'soc.modules.gsoc.views.models.%(module_name)s.withdraw_project',
'Withdraw a %(name)s'),
(r'^%(url_name)s/(?P<access_type>accept_project)/%(key_fields)s$',
'soc.modules.gsoc.views.models.%(module_name)s.accept_project',
'Accept a %(name)s'),
]
new_params['extra_django_patterns'] = patterns
new_params['edit_template'] = 'soc/student_project/edit.html'
new_params['manage_template'] = 'soc/student_project/manage.html'
new_params['manage_overview_heading'] = \
'soc/student_project/list/heading_manage.html'
new_params['manage_overview_row'] = \
'soc/student_project/list/row_manage.html'
params = dicts.merge(params, new_params)
super(View, self).__init__(params=params)
# create the form that students will use to edit their projects
dynaproperties = {
'public_info': forms.fields.CharField(required=True,
widget=widgets.FullTinyMCE(attrs={'rows': 25, 'cols': 100})),
'clean_abstract': cleaning.clean_content_length('abstract'),
'clean_public_info': cleaning.clean_html_content('public_info'),
'clean_additional_info': cleaning.clean_url('additional_info'),
'clean_feed_url': cleaning.clean_feed_url,
}
student_edit_form = dynaform.newDynaForm(
dynabase = self._params['dynabase'],
dynamodel = self._params['logic'].getModel(),
dynaexclude = self._params['create_dynaexclude'],
dynaproperties = dynaproperties,
)
self._params['student_edit_form'] = student_edit_form
def _editGet(self, request, entity, form):
"""See base.View._editGet().
"""
form.fields['link_id'].initial = entity.link_id
form.fields['student_id'].initial = entity.student.link_id
form.fields['mentor_id'].initial = entity.mentor.link_id
return super(View, self)._editGet(request, entity, form)
def _editPost(self, request, entity, fields):
"""See base.View._editPost().
"""
if not entity:
fields['link_id'] = 't%i' % (int(time.time()*100))
else:
fields['link_id'] = entity.link_id
# fill in the scope via call to super
super(View, self)._editPost(request, entity, fields)
# editing a project so set the program, student and mentor field
if entity:
organization = entity.scope
else:
organization = fields['scope']
fields['program'] = organization.scope
filter = {'scope': fields['program'],
'link_id': fields['student_id']}
fields['student'] = student_logic.logic.getForFields(filter, unique=True)
filter = {'scope': organization,
'link_id': fields['mentor_id'],
'status': 'active'}
fields['mentor'] = mentor_logic.getForFields(filter, unique=True)
def _public(self, request, entity, context):
"""Adds the names of all additional mentors to the context.
For params see base.View._public()
"""
additional_mentors = entity.additional_mentors
if not additional_mentors:
context['additional_mentors'] = []
else:
mentor_names = []
for mentor_key in additional_mentors:
additional_mentor = mentor_logic.getFromKeyName(
mentor_key.id_or_name())
mentor_names.append(additional_mentor.name())
context['additional_mentors'] = ', '.join(mentor_names)
@decorators.merge_params
@decorators.check_access
def withdraw(self, request, access_type,
page_name=None, params=None, **kwargs):
"""View that allows Program Admins to accept or withdraw Student Projects.
For params see base.View().public()
"""
program = program_logic.getFromKeyFieldsOr404(kwargs)
fields = {
'program': program,
'status': ['accepted', 'completed'],
}
ap_params = params.copy() # accepted projects
ap_params['list_action'] = (redirects.getWithdrawProjectRedirect,
ap_params)
ap_params['list_description'] = ugettext(
"An overview of accepted and completed Projects. "
"Click on a project to withdraw it.")
ap_list = lists.getListContent(
request, ap_params, fields, idx=0)
fields['status'] = ['withdrawn']
wp_params = params.copy() # withdrawn projects
wp_params['list_action'] = (redirects.getAcceptProjectRedirect, wp_params)
wp_params['list_description'] = ugettext(
"An overview of withdrawn Projects. "
"Click on a project to undo the withdrawal.")
wp_list = lists.getListContent(
request, wp_params, fields, idx=1)
# fill contents with all the needed lists
contents = [ap_list, wp_list]
# call the _list method from base to display the list
return self._list(request, params, contents, page_name)
@decorators.merge_params
@decorators.check_access
def withdrawProject(self, request, access_type,
page_name=None, params=None, **kwargs):
"""View that allows Program Admins to withdraw Student Projects.
For params see base.View().public()
"""
logic = params['logic']
entity = logic.getFromKeyFieldsOr404(kwargs)
fields = {
'status': 'withdrawn',
}
logic.updateEntityProperties(entity, fields)
url = redirects.getWithdrawRedirect(entity.program, params)
return http.HttpResponseRedirect(url)
@decorators.merge_params
@decorators.check_access
def acceptProject(self, request, access_type,
page_name=None, params=None, **kwargs):
"""View that allows Program Admins to accept Student Projects.
For params see base.View().public()
"""
logic = params['logic']
entity = logic.getFromKeyFieldsOr404(kwargs)
fields = {
'status': 'accepted',
}
logic.updateEntityProperties(entity, fields)
url = redirects.getWithdrawRedirect(entity.program, params)
return http.HttpResponseRedirect(url)
@decorators.merge_params
@decorators.check_access
def manage(self, request, access_type,
page_name=None, params=None, **kwargs):
"""View that allows Organization Admins to manage their Student Projects.
For params see base.View().public()
"""
try:
entity = self._logic.getFromKeyFieldsOr404(kwargs)
except out_of_band.Error, error:
return responses.errorResponse(
error, request, template=params['error_public'])
template = params['manage_template']
# get the context for this webpage
context = responses.getUniversalContext(request)
responses.useJavaScript(context, params['js_uses_all'])
context['page_name'] = "%s '%s' from %s" % (page_name, entity.title,
entity.student.name())
context['entity'] = entity
if project_logic.canChangeMentors(entity):
# only accepted project can have their mentors managed
self._enableMentorManagement(entity, params, context)
context['evaluation_list'] = self._getEvaluationLists(request, params,
entity)
if request.POST:
return self.managePost(request, template, context, params, entity,
**kwargs)
else: #request.GET
return self.manageGet(request, template, context, params, entity,
**kwargs)
def _enableMentorManagement(self, entity, params, context):
"""Sets the data required to manage mentors for a StudentProject.
Args:
entity: StudentProject entity to manage
params: params dict for the manage view
context: context for the manage view
"""
context['can_manage_mentors'] = True
# get all mentors for this organization
fields = {'scope': entity.scope,
'status': 'active'}
mentors = mentor_logic.getForFields(fields)
choices = [(mentor.link_id,'%s (%s)' %(mentor.name(), mentor.link_id))
for mentor in mentors]
# create the form that org admins will use to reassign a mentor
dynafields = [
{'name': 'mentor_id',
'base': forms.ChoiceField,
'label': 'Primary Mentor',
'required': True,
'passthrough': ['required', 'choices', 'label'],
'choices': choices,
},]
dynaproperties = params_helper.getDynaFields(dynafields)
mentor_edit_form = dynaform.newDynaForm(
dynabase = params['dynabase'],
dynaproperties = dynaproperties,
)
params['mentor_edit_form'] = mentor_edit_form
additional_mentors = entity.additional_mentors
# we want to show the names of the additional mentors in the context
# therefore they need to be resolved to entities first
additional_mentors_context = []
for mentor_key in additional_mentors:
mentor_entity = mentor_logic.getFromKeyName(
mentor_key.id_or_name())
additional_mentors_context.append(mentor_entity)
context['additional_mentors'] = additional_mentors_context
# all mentors who are not already an additional mentor or
# the primary mentor are allowed to become an additional mentor
possible_additional_mentors = [m for m in mentors if
(m.key() not in additional_mentors)
and (m.key() != entity.mentor.key())]
# create the information to be shown on the additional mentor form
additional_mentor_choices = [
(mentor.link_id,'%s (%s)' %(mentor.name(), mentor.link_id))
for mentor in possible_additional_mentors]
dynafields = [
{'name': 'mentor_id',
'base': forms.ChoiceField,
'label': 'Co-Mentor',
'required': True,
'passthrough': ['required', 'choices', 'label'],
'choices': additional_mentor_choices,
},]
dynaproperties = params_helper.getDynaFields(dynafields)
additional_mentor_form = dynaform.newDynaForm(
dynabase = params['dynabase'],
dynaproperties = dynaproperties,
)
params['additional_mentor_form'] = additional_mentor_form
def _getEvaluationLists(self, request, params, entity):
"""Returns List Object containing the list to be shown on the Student
Project's manage page.
This list contains all Surveys that have at least one record and will also
contain information about the presence (or absence) of a accompanying
record for the given Student Project.
Args:
request: Django HTTP Request Object
params: the params dict for this View
entity: a StudentProject entity for which the Surveys(Records) should be
retrieved
Returns:
A List Object as specified by this method.
"""
import soc.logic.lists
from soc.modules.gsoc.views.helper import list_info
from soc.modules.gsoc.views.models.grading_project_survey import view as \
grading_survey_view
from soc.modules.gsoc.views.models.project_survey import view as \
project_survey_view
fields = {'scope_path': entity.program.key().id_or_name()}
# get the GradingProjectSurvey list
gps_params = grading_survey_view.getParams().copy()
gps_params['list_key_order'] = None
gps_params['list_heading'] = gps_params['manage_student_project_heading']
gps_params['list_row'] = gps_params['manage_student_project_row']
# list all surveys for this Project's Program
fields['scope_path'] = entity.program.key().id_or_name()
gps_params['list_description'] = \
'List of all Mentor Evaluations for this Project'
gps_params['list_action'] = None
gps_list = lists.getListContent(
request, gps_params, fields, idx=0)
list_info.setProjectSurveyInfoForProject(gps_list, entity, gps_params)
# get the ProjectSurvey list
ps_params = project_survey_view.getParams().copy()
ps_params['list_key_order'] = None
ps_params['list_heading'] = ps_params['manage_student_project_heading']
ps_params['list_row'] = ps_params['manage_student_project_row']
ps_params['list_description'] = \
'List of all Student Evaluations for this Project'
ps_params['list_action'] = None
# list all surveys for this Project's Program
fields['scope_path'] = entity.program.key().id_or_name()
ps_list = lists.getListContent(
request, ps_params, fields, idx=1)
list_info.setProjectSurveyInfoForProject(ps_list, entity, ps_params)
# store both lists in the content
content = [gps_list, ps_list]
for list in content:
# remove all the surveys that have no records attached
list['data'] = [i for i in list['data'] if
list['logic'].hasRecord(i)]
# return the List Object with the filtered list content
return soc.logic.lists.Lists(content)
def manageGet(self, request, template, context, params, entity, **kwargs):
"""Handles the GET request for the project's manage page.
Args:
template: the template used for this view
entity: the student project entity
rest: see base.View.public()
"""
get_dict = request.GET
if 'remove' in get_dict and entity.status == 'accepted':
# get the mentor to remove
fields = {'link_id': get_dict['remove'],
'scope': entity.scope}
mentor = mentor_logic.getForFields(fields, unique=True)
additional_mentors = entity.additional_mentors
# pylint: disable-msg=E1103
if additional_mentors and mentor.key() in additional_mentors:
# remove the mentor from the additional mentors list
additional_mentors.remove(mentor.key())
fields = {'additional_mentors': additional_mentors}
project_logic.updateEntityProperties(entity, fields)
# redirect to the same page without GET arguments
redirect = request.path
return http.HttpResponseRedirect(redirect)
if project_logic.canChangeMentors(entity):
# populate forms with the current mentors set
initial = {'mentor_id': entity.mentor.link_id}
context['mentor_edit_form'] = params['mentor_edit_form'](initial=initial)
context['additional_mentor_form'] = params['additional_mentor_form']()
return responses.respond(request, template, context)
def managePost(self, request, template, context, params, entity, **kwargs):
"""Handles the POST request for the project's manage page.
Args:
template: the template used for this view
entity: the student project entity
rest: see base.View.public()
"""
post_dict = request.POST
if 'set_mentor' in post_dict and project_logic.canChangeMentors(entity):
form = params['mentor_edit_form'](post_dict)
return self._manageSetMentor(request, template, context, params, entity,
form)
elif 'add_additional_mentor' in post_dict and \
project_logic.canChangeMentors(entity):
form = params['additional_mentor_form'](post_dict)
return self._manageAddAdditionalMentor(request, template, context,
params, entity, form)
else:
# unexpected error return the normal page
logging.warning('Unexpected POST data found')
return self.manageGet(request, template, context, params, entity)
def _manageSetMentor(self, request, template, context, params, entity, form):
"""Handles the POST request for changing a Projects's mentor.
Args:
template: the template used for this view
entity: the student project entity
form: instance of the form used to set the mentor
rest: see base.View.public()
"""
if not form.is_valid():
context['mentor_edit_form'] = form
# add an a fresh additional mentors form
context['additional_mentor_form'] = params['additional_mentor_form']()
return responses.respond(request, template, context)
_, fields = forms_helper.collectCleanedFields(form)
# get the mentor from the form
fields = {'link_id': fields['mentor_id'],
'scope': entity.scope,
'status': 'active'}
mentor = mentor_logic.getForFields(fields, unique=True)
# update the project with the assigned mentor
fields = {'mentor': mentor}
additional_mentors = entity.additional_mentors
# pylint: disable-msg=E1103
if additional_mentors and mentor.key() in additional_mentors:
# remove the mentor that is now becoming the primary mentor
additional_mentors.remove(mentor.key())
fields['additional_mentors'] = additional_mentors
# update the project with the new mentor and possible
# new set of additional mentors
project_logic.updateEntityProperties(entity, fields)
# redirect to the same page
redirect = request.path
return http.HttpResponseRedirect(redirect)
def _manageAddAdditionalMentor(self, request, template,
context, params, entity, form):
"""Handles the POST request for changing a Projects's additional mentors.
Args:
template: the template used for this view
entity: the student project entity
form: instance of the form used to add an additional mentor
rest: see base.View.public()
"""
if not form.is_valid():
context['additional_mentor_form'] = form
# add a fresh edit mentor form
initial = {'mentor_id': entity.mentor.link_id}
context['mentor_edit_form'] = params['mentor_edit_form'](initial=initial)
return responses.respond(request, template, context)
_, fields = forms_helper.collectCleanedFields(form)
# get the mentor from the form
fields = {'link_id': fields['mentor_id'],
'scope': entity.scope,
'status': 'active'}
mentor = mentor_logic.getForFields(fields, unique=True)
# add this mentor to the additional mentors
if not entity.additional_mentors:
additional_mentors = [mentor.key()]
else:
additional_mentors = entity.additional_mentors
additional_mentors.append(mentor.key())
fields = {'additional_mentors': additional_mentors}
project_logic.updateEntityProperties(entity, fields)
# redirect to the same page
redirect = request.path
return http.HttpResponseRedirect(redirect)
@decorators.merge_params
@decorators.check_access
def manageOverview(self, request, access_type,
page_name=None, params=None, **kwargs):
"""View that allows Organization Admins to see an overview of
their Organization's Student Projects.
For params see base.View().public()
"""
from soc.modules.gsoc.views.helper import list_info
# make sure the organization exists
org_entity = org_logic.getFromKeyNameOr404(kwargs['scope_path'])
fields = {'scope': org_entity}
# get the context for this webpage
context = responses.getUniversalContext(request)
responses.useJavaScript(context, params['js_uses_all'])
context['page_name'] = '%s %s' % (page_name, org_entity.name)
prefetch = ['student', 'mentor']
list_params = params.copy()
list_params['list_heading'] = params['manage_overview_heading']
list_params['list_row'] = params['manage_overview_row']
#list all active projects
fields['status'] = 'accepted'
active_params = list_params.copy()
active_params['list_description'] = \
'List of all active %(name_plural)s' % list_params
active_params['list_action'] = (redirects.getManageRedirect, list_params)
active_list = lists.getListContent(request, active_params, fields, idx=0,
prefetch=prefetch)
# set the needed info
active_list = list_info.setStudentProjectSurveyInfo(active_list,
org_entity.scope)
# list all failed projects
fields['status'] = 'failed'
failed_params = list_params.copy()
failed_params['list_description'] = ('List of all %(name_plural)s who '
'failed the program.') % list_params
failed_params['list_action'] = (redirects.getManageRedirect, list_params)
failed_list = lists.getListContent(request, failed_params, fields, idx=1,
need_content=True, prefetch=prefetch)
# set the needed info
failed_list = list_info.setStudentProjectSurveyInfo(failed_list,
org_entity.scope)
# list all completed projects
fields['status'] = 'completed'
completed_params = list_params.copy()
completed_params['list_description'] = (
'List of %(name_plural)s that have successfully completed the '
'program.' % list_params)
completed_params['list_action'] = (redirects.getManageRedirect, list_params)
completed_list = lists.getListContent(request, completed_params, fields,
idx=2, need_content=True,
prefetch=prefetch)
# set the needed info
completed_list = list_info.setStudentProjectSurveyInfo(completed_list,
org_entity.scope)
# list all withdrawn projects
fields['status'] = 'withdrawn'
withdrawn_params = list_params.copy()
withdrawn_params['list_description'] = (
'List of %(name_plural)s that have withdrawn from the program.' %(
list_params))
withdrawn_params['list_action'] = (redirects.getManageRedirect, list_params)
withdrawn_list = lists.getListContent(request, withdrawn_params, fields,
idx=3, need_content=True,
prefetch=prefetch)
# set the needed info
withdrawn_list = list_info.setStudentProjectSurveyInfo(withdrawn_list,
org_entity.scope)
# always show the list with active projects
content = [active_list]
if failed_list != None:
# do not show empty failed list
content.append(failed_list)
if completed_list != None:
# do not show empty completed list
content.append(completed_list)
if withdrawn_list != None:
# do not show empty withdrawn list
content.append(withdrawn_list)
# call the _list method from base to display the list
return self._list(request, list_params, content,
context['page_name'], context)
@decorators.merge_params
@decorators.check_access
def stEdit(self, request, access_type,
page_name=None, params=None, **kwargs):
"""View that allows students to edit information about their project.
For params see base.View().public()
"""
try:
entity = self._logic.getFromKeyFieldsOr404(kwargs)
except out_of_band.Error, error:
return responses.errorResponse(
error, request, template=params['error_public'])
# get the context for this webpage
context = responses.getUniversalContext(request)
responses.useJavaScript(context, params['js_uses_all'])
context['page_name'] = page_name
# cancel should go to the public view
params['cancel_redirect'] = redirects.getPublicRedirect(entity, params)
if request.POST:
return self.stEditPost(request, context, params, entity, **kwargs)
else: #request.GET
return self.stEditGet(request, context, params, entity, **kwargs)
def stEditGet(self, request, context, params, entity, **kwargs):
"""Handles the GET request for the student's edit page.
Args:
entity: the student project entity
rest: see base.View.public()
"""
# populate form with the existing entity
form = params['student_edit_form'](instance=entity)
return self._constructResponse(request, entity, context, form, params)
def stEditPost(self, request, context, params, entity, **kwargs):
"""Handles the POST request for the student's edit page.
Args:
entity: the student project entity
rest: see base.View.public()
"""
form = params['student_edit_form'](request.POST)
if not form.is_valid():
return self._constructResponse(request, entity, context, form, params)
_, fields = forms_helper.collectCleanedFields(form)
project_logic.updateEntityProperties(entity, fields)
return self.stEditGet(request, context, params, entity, **kwargs)
view = View()
accept_project = decorators.view(view.acceptProject)
admin = decorators.view(view.admin)
create = decorators.view(view.create)
delete = decorators.view(view.delete)
edit = decorators.view(view.edit)
list = decorators.view(view.list)
manage = decorators.view(view.manage)
manage_overview = decorators.view(view.manageOverview)
public = decorators.view(view.public)
st_edit = decorators.view(view.stEdit)
export = decorators.view(view.export)
pick = decorators.view(view.pick)
withdraw = decorators.view(view.withdraw)
withdraw_project = decorators.view(view.withdrawProject)
Add TODO's to convert student_project.withdraw to use POST
#!/usr/bin/python2.5
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Views for Student Project.
"""
__authors__ = [
'"Lennard de Rijk" <ljvderijk@gmail.com>',
]
import logging
import time
from django import forms
from django import http
from django.utils.translation import ugettext
from soc.logic import cleaning
from soc.logic import dicts
from soc.views import out_of_band
from soc.views.helper import decorators
from soc.views.helper import dynaform
from soc.views.helper import forms as forms_helper
from soc.views.helper import lists
from soc.views.helper import params as params_helper
from soc.views.helper import redirects
from soc.views.helper import responses
from soc.views.helper import widgets
from soc.views.models import base
from soc.modules.gsoc.logic.models import student as student_logic
from soc.modules.gsoc.logic.models.mentor import logic as mentor_logic
from soc.modules.gsoc.logic.models.org_admin import logic as org_admin_logic
from soc.modules.gsoc.logic.models.organization import logic as org_logic
from soc.modules.gsoc.logic.models.program import logic as program_logic
from soc.modules.gsoc.logic.models.student_project import logic as \
project_logic
from soc.modules.gsoc.views.helper import access
from soc.modules.gsoc.views.models import organization as org_view
class View(base.View):
"""View methods for the Student Project model.
"""
def __init__(self, params=None):
"""Defines the fields and methods required for the base View class
to provide the user with list, public, create, edit and delete views.
Params:
params: a dict with params for this View
"""
rights = access.GSoCChecker(params)
rights['any_access'] = ['allow']
rights['create'] = ['checkIsDeveloper']
rights['edit'] = ['checkIsDeveloper']
rights['delete'] = ['checkIsDeveloper']
rights['show'] = ['allow']
rights['list'] = ['checkIsDeveloper']
rights['manage'] = [('checkHasRoleForScope',
[org_admin_logic, ['active', 'inactive']]),
('checkStudentProjectHasStatus', [['accepted', 'failed', 'completed',
'withdrawn']])]
rights['manage_overview'] = [('checkHasRoleForScope', [
org_admin_logic, ['active', 'inactive']])]
# TODO: lack of better name here!
rights['st_edit'] = [
'checkCanEditStudentProjectAsStudent',
('checkStudentProjectHasStatus',
[['accepted', 'completed']])
]
rights['withdraw'] = ['checkIsHostForProgram']
rights['withdraw_project'] = ['checkIsHostForStudentProject',
('checkStudentProjectHasStatus',
[['accepted', 'completed']])
]
rights['accept_project'] = ['checkIsHostForStudentProject',
('checkStudentProjectHasStatus',
[['withdrawn']])
]
new_params = {}
new_params['logic'] = project_logic
new_params['rights'] = rights
new_params['name'] = "Student Project"
new_params['url_name'] = "gsoc/student_project"
new_params['module_package'] = 'soc.modules.gsoc.views.models'
new_params['sidebar_grouping'] = 'Students'
new_params['scope_view'] = org_view
new_params['scope_redirect'] = redirects.getCreateRedirect
new_params['no_create_with_key_fields'] = True
new_params['extra_dynaexclude'] = ['program', 'status', 'link_id',
'mentor', 'additional_mentors',
'student', 'passed_evaluations',
'failed_evaluations']
new_params['create_extra_dynaproperties'] = {
'scope_path': forms.CharField(widget=forms.HiddenInput,
required=True),
'public_info': forms.fields.CharField(required=True,
widget=widgets.FullTinyMCE(attrs={'rows': 25, 'cols': 100})),
'student_id': forms.CharField(label='Student Link ID',
required=True),
'mentor_id': forms.CharField(label='Mentor Link ID',
required=True),
'clean_abstract': cleaning.clean_content_length('abstract'),
'clean_public_info': cleaning.clean_html_content('public_info'),
'clean_student': cleaning.clean_link_id('student'),
'clean_mentor': cleaning.clean_link_id('mentor'),
'clean_additional_info': cleaning.clean_url('additional_info'),
'clean_feed_url': cleaning.clean_feed_url,
'clean': cleaning.validate_student_project('scope_path',
'mentor_id', 'student_id')
}
new_params['edit_extra_dynaproperties'] = {
'link_id': forms.CharField(widget=forms.HiddenInput),
}
patterns = [
(r'^%(url_name)s/(?P<access_type>manage_overview)/%(scope)s$',
'soc.modules.gsoc.views.models.%(module_name)s.manage_overview',
'Overview of %(name_plural)s to Manage for'),
(r'^%(url_name)s/(?P<access_type>manage)/%(key_fields)s$',
'soc.modules.gsoc.views.models.%(module_name)s.manage',
'Manage %(name)s'),
(r'^%(url_name)s/(?P<access_type>st_edit)/%(key_fields)s$',
'soc.modules.gsoc.views.models.%(module_name)s.st_edit',
'Edit my %(name)s'),
(r'^%(url_name)s/(?P<access_type>withdraw)/(?P<scope_path>%(ulnp)s)/%(lnp)s$',
'soc.modules.gsoc.views.models.%(module_name)s.withdraw',
'Withdraw %(name_plural)s'),
(r'^%(url_name)s/(?P<access_type>withdraw_project)/%(key_fields)s$',
'soc.modules.gsoc.views.models.%(module_name)s.withdraw_project',
'Withdraw a %(name)s'),
(r'^%(url_name)s/(?P<access_type>accept_project)/%(key_fields)s$',
'soc.modules.gsoc.views.models.%(module_name)s.accept_project',
'Accept a %(name)s'),
]
new_params['extra_django_patterns'] = patterns
new_params['edit_template'] = 'soc/student_project/edit.html'
new_params['manage_template'] = 'soc/student_project/manage.html'
new_params['manage_overview_heading'] = \
'soc/student_project/list/heading_manage.html'
new_params['manage_overview_row'] = \
'soc/student_project/list/row_manage.html'
params = dicts.merge(params, new_params)
super(View, self).__init__(params=params)
# create the form that students will use to edit their projects
dynaproperties = {
'public_info': forms.fields.CharField(required=True,
widget=widgets.FullTinyMCE(attrs={'rows': 25, 'cols': 100})),
'clean_abstract': cleaning.clean_content_length('abstract'),
'clean_public_info': cleaning.clean_html_content('public_info'),
'clean_additional_info': cleaning.clean_url('additional_info'),
'clean_feed_url': cleaning.clean_feed_url,
}
student_edit_form = dynaform.newDynaForm(
dynabase = self._params['dynabase'],
dynamodel = self._params['logic'].getModel(),
dynaexclude = self._params['create_dynaexclude'],
dynaproperties = dynaproperties,
)
self._params['student_edit_form'] = student_edit_form
def _editGet(self, request, entity, form):
"""See base.View._editGet().
"""
form.fields['link_id'].initial = entity.link_id
form.fields['student_id'].initial = entity.student.link_id
form.fields['mentor_id'].initial = entity.mentor.link_id
return super(View, self)._editGet(request, entity, form)
def _editPost(self, request, entity, fields):
"""See base.View._editPost().
"""
if not entity:
fields['link_id'] = 't%i' % (int(time.time()*100))
else:
fields['link_id'] = entity.link_id
# fill in the scope via call to super
super(View, self)._editPost(request, entity, fields)
# editing a project so set the program, student and mentor field
if entity:
organization = entity.scope
else:
organization = fields['scope']
fields['program'] = organization.scope
filter = {'scope': fields['program'],
'link_id': fields['student_id']}
fields['student'] = student_logic.logic.getForFields(filter, unique=True)
filter = {'scope': organization,
'link_id': fields['mentor_id'],
'status': 'active'}
fields['mentor'] = mentor_logic.getForFields(filter, unique=True)
def _public(self, request, entity, context):
"""Adds the names of all additional mentors to the context.
For params see base.View._public()
"""
additional_mentors = entity.additional_mentors
if not additional_mentors:
context['additional_mentors'] = []
else:
mentor_names = []
for mentor_key in additional_mentors:
additional_mentor = mentor_logic.getFromKeyName(
mentor_key.id_or_name())
mentor_names.append(additional_mentor.name())
context['additional_mentors'] = ', '.join(mentor_names)
@decorators.merge_params
@decorators.check_access
def withdraw(self, request, access_type,
page_name=None, params=None, **kwargs):
"""View that allows Program Admins to accept or withdraw Student Projects.
For params see base.View().public()
"""
program = program_logic.getFromKeyFieldsOr404(kwargs)
fields = {
'program': program,
'status': ['accepted', 'completed'],
}
ap_params = params.copy() # accepted projects
ap_params['list_action'] = (redirects.getWithdrawProjectRedirect,
ap_params)
ap_params['list_description'] = ugettext(
"An overview of accepted and completed Projects. "
"Click on a project to withdraw it.")
ap_list = lists.getListContent(
request, ap_params, fields, idx=0)
fields['status'] = ['withdrawn']
wp_params = params.copy() # withdrawn projects
wp_params['list_action'] = (redirects.getAcceptProjectRedirect, wp_params)
wp_params['list_description'] = ugettext(
"An overview of withdrawn Projects. "
"Click on a project to undo the withdrawal.")
wp_list = lists.getListContent(
request, wp_params, fields, idx=1)
# fill contents with all the needed lists
contents = [ap_list, wp_list]
# call the _list method from base to display the list
return self._list(request, params, contents, page_name)
@decorators.merge_params
@decorators.check_access
def withdrawProject(self, request, access_type,
page_name=None, params=None, **kwargs):
"""View that allows Program Admins to withdraw Student Projects.
For params see base.View().public()
"""
# TODO(POST)
logic = params['logic']
entity = logic.getFromKeyFieldsOr404(kwargs)
fields = {
'status': 'withdrawn',
}
logic.updateEntityProperties(entity, fields)
url = redirects.getWithdrawRedirect(entity.program, params)
return http.HttpResponseRedirect(url)
@decorators.merge_params
@decorators.check_access
def acceptProject(self, request, access_type,
page_name=None, params=None, **kwargs):
"""View that allows Program Admins to accept Student Projects.
For params see base.View().public()
"""
# TODO(POST)
logic = params['logic']
entity = logic.getFromKeyFieldsOr404(kwargs)
fields = {
'status': 'accepted',
}
logic.updateEntityProperties(entity, fields)
url = redirects.getWithdrawRedirect(entity.program, params)
return http.HttpResponseRedirect(url)
@decorators.merge_params
@decorators.check_access
def manage(self, request, access_type,
page_name=None, params=None, **kwargs):
"""View that allows Organization Admins to manage their Student Projects.
For params see base.View().public()
"""
try:
entity = self._logic.getFromKeyFieldsOr404(kwargs)
except out_of_band.Error, error:
return responses.errorResponse(
error, request, template=params['error_public'])
template = params['manage_template']
# get the context for this webpage
context = responses.getUniversalContext(request)
responses.useJavaScript(context, params['js_uses_all'])
context['page_name'] = "%s '%s' from %s" % (page_name, entity.title,
entity.student.name())
context['entity'] = entity
if project_logic.canChangeMentors(entity):
# only accepted project can have their mentors managed
self._enableMentorManagement(entity, params, context)
context['evaluation_list'] = self._getEvaluationLists(request, params,
entity)
if request.POST:
return self.managePost(request, template, context, params, entity,
**kwargs)
else: #request.GET
return self.manageGet(request, template, context, params, entity,
**kwargs)
def _enableMentorManagement(self, entity, params, context):
"""Sets the data required to manage mentors for a StudentProject.
Args:
entity: StudentProject entity to manage
params: params dict for the manage view
context: context for the manage view
"""
context['can_manage_mentors'] = True
# get all mentors for this organization
fields = {'scope': entity.scope,
'status': 'active'}
mentors = mentor_logic.getForFields(fields)
choices = [(mentor.link_id,'%s (%s)' %(mentor.name(), mentor.link_id))
for mentor in mentors]
# create the form that org admins will use to reassign a mentor
dynafields = [
{'name': 'mentor_id',
'base': forms.ChoiceField,
'label': 'Primary Mentor',
'required': True,
'passthrough': ['required', 'choices', 'label'],
'choices': choices,
},]
dynaproperties = params_helper.getDynaFields(dynafields)
mentor_edit_form = dynaform.newDynaForm(
dynabase = params['dynabase'],
dynaproperties = dynaproperties,
)
params['mentor_edit_form'] = mentor_edit_form
additional_mentors = entity.additional_mentors
# we want to show the names of the additional mentors in the context
# therefore they need to be resolved to entities first
additional_mentors_context = []
for mentor_key in additional_mentors:
mentor_entity = mentor_logic.getFromKeyName(
mentor_key.id_or_name())
additional_mentors_context.append(mentor_entity)
context['additional_mentors'] = additional_mentors_context
# all mentors who are not already an additional mentor or
# the primary mentor are allowed to become an additional mentor
possible_additional_mentors = [m for m in mentors if
(m.key() not in additional_mentors)
and (m.key() != entity.mentor.key())]
# create the information to be shown on the additional mentor form
additional_mentor_choices = [
(mentor.link_id,'%s (%s)' %(mentor.name(), mentor.link_id))
for mentor in possible_additional_mentors]
dynafields = [
{'name': 'mentor_id',
'base': forms.ChoiceField,
'label': 'Co-Mentor',
'required': True,
'passthrough': ['required', 'choices', 'label'],
'choices': additional_mentor_choices,
},]
dynaproperties = params_helper.getDynaFields(dynafields)
additional_mentor_form = dynaform.newDynaForm(
dynabase = params['dynabase'],
dynaproperties = dynaproperties,
)
params['additional_mentor_form'] = additional_mentor_form
def _getEvaluationLists(self, request, params, entity):
"""Returns List Object containing the list to be shown on the Student
Project's manage page.
This list contains all Surveys that have at least one record and will also
contain information about the presence (or absence) of a accompanying
record for the given Student Project.
Args:
request: Django HTTP Request Object
params: the params dict for this View
entity: a StudentProject entity for which the Surveys(Records) should be
retrieved
Returns:
A List Object as specified by this method.
"""
import soc.logic.lists
from soc.modules.gsoc.views.helper import list_info
from soc.modules.gsoc.views.models.grading_project_survey import view as \
grading_survey_view
from soc.modules.gsoc.views.models.project_survey import view as \
project_survey_view
fields = {'scope_path': entity.program.key().id_or_name()}
# get the GradingProjectSurvey list
gps_params = grading_survey_view.getParams().copy()
gps_params['list_key_order'] = None
gps_params['list_heading'] = gps_params['manage_student_project_heading']
gps_params['list_row'] = gps_params['manage_student_project_row']
# list all surveys for this Project's Program
fields['scope_path'] = entity.program.key().id_or_name()
gps_params['list_description'] = \
'List of all Mentor Evaluations for this Project'
gps_params['list_action'] = None
gps_list = lists.getListContent(
request, gps_params, fields, idx=0)
list_info.setProjectSurveyInfoForProject(gps_list, entity, gps_params)
# get the ProjectSurvey list
ps_params = project_survey_view.getParams().copy()
ps_params['list_key_order'] = None
ps_params['list_heading'] = ps_params['manage_student_project_heading']
ps_params['list_row'] = ps_params['manage_student_project_row']
ps_params['list_description'] = \
'List of all Student Evaluations for this Project'
ps_params['list_action'] = None
# list all surveys for this Project's Program
fields['scope_path'] = entity.program.key().id_or_name()
ps_list = lists.getListContent(
request, ps_params, fields, idx=1)
list_info.setProjectSurveyInfoForProject(ps_list, entity, ps_params)
# store both lists in the content
content = [gps_list, ps_list]
for list in content:
# remove all the surveys that have no records attached
list['data'] = [i for i in list['data'] if
list['logic'].hasRecord(i)]
# return the List Object with the filtered list content
return soc.logic.lists.Lists(content)
def manageGet(self, request, template, context, params, entity, **kwargs):
"""Handles the GET request for the project's manage page.
Args:
template: the template used for this view
entity: the student project entity
rest: see base.View.public()
"""
get_dict = request.GET
if 'remove' in get_dict and entity.status == 'accepted':
# get the mentor to remove
fields = {'link_id': get_dict['remove'],
'scope': entity.scope}
mentor = mentor_logic.getForFields(fields, unique=True)
additional_mentors = entity.additional_mentors
# pylint: disable-msg=E1103
if additional_mentors and mentor.key() in additional_mentors:
# remove the mentor from the additional mentors list
additional_mentors.remove(mentor.key())
fields = {'additional_mentors': additional_mentors}
project_logic.updateEntityProperties(entity, fields)
# redirect to the same page without GET arguments
redirect = request.path
return http.HttpResponseRedirect(redirect)
if project_logic.canChangeMentors(entity):
# populate forms with the current mentors set
initial = {'mentor_id': entity.mentor.link_id}
context['mentor_edit_form'] = params['mentor_edit_form'](initial=initial)
context['additional_mentor_form'] = params['additional_mentor_form']()
return responses.respond(request, template, context)
def managePost(self, request, template, context, params, entity, **kwargs):
"""Handles the POST request for the project's manage page.
Args:
template: the template used for this view
entity: the student project entity
rest: see base.View.public()
"""
post_dict = request.POST
if 'set_mentor' in post_dict and project_logic.canChangeMentors(entity):
form = params['mentor_edit_form'](post_dict)
return self._manageSetMentor(request, template, context, params, entity,
form)
elif 'add_additional_mentor' in post_dict and \
project_logic.canChangeMentors(entity):
form = params['additional_mentor_form'](post_dict)
return self._manageAddAdditionalMentor(request, template, context,
params, entity, form)
else:
# unexpected error return the normal page
logging.warning('Unexpected POST data found')
return self.manageGet(request, template, context, params, entity)
def _manageSetMentor(self, request, template, context, params, entity, form):
"""Handles the POST request for changing a Projects's mentor.
Args:
template: the template used for this view
entity: the student project entity
form: instance of the form used to set the mentor
rest: see base.View.public()
"""
if not form.is_valid():
context['mentor_edit_form'] = form
# add an a fresh additional mentors form
context['additional_mentor_form'] = params['additional_mentor_form']()
return responses.respond(request, template, context)
_, fields = forms_helper.collectCleanedFields(form)
# get the mentor from the form
fields = {'link_id': fields['mentor_id'],
'scope': entity.scope,
'status': 'active'}
mentor = mentor_logic.getForFields(fields, unique=True)
# update the project with the assigned mentor
fields = {'mentor': mentor}
additional_mentors = entity.additional_mentors
# pylint: disable-msg=E1103
if additional_mentors and mentor.key() in additional_mentors:
# remove the mentor that is now becoming the primary mentor
additional_mentors.remove(mentor.key())
fields['additional_mentors'] = additional_mentors
# update the project with the new mentor and possible
# new set of additional mentors
project_logic.updateEntityProperties(entity, fields)
# redirect to the same page
redirect = request.path
return http.HttpResponseRedirect(redirect)
def _manageAddAdditionalMentor(self, request, template,
context, params, entity, form):
"""Handles the POST request for changing a Projects's additional mentors.
Args:
template: the template used for this view
entity: the student project entity
form: instance of the form used to add an additional mentor
rest: see base.View.public()
"""
if not form.is_valid():
context['additional_mentor_form'] = form
# add a fresh edit mentor form
initial = {'mentor_id': entity.mentor.link_id}
context['mentor_edit_form'] = params['mentor_edit_form'](initial=initial)
return responses.respond(request, template, context)
_, fields = forms_helper.collectCleanedFields(form)
# get the mentor from the form
fields = {'link_id': fields['mentor_id'],
'scope': entity.scope,
'status': 'active'}
mentor = mentor_logic.getForFields(fields, unique=True)
# add this mentor to the additional mentors
if not entity.additional_mentors:
additional_mentors = [mentor.key()]
else:
additional_mentors = entity.additional_mentors
additional_mentors.append(mentor.key())
fields = {'additional_mentors': additional_mentors}
project_logic.updateEntityProperties(entity, fields)
# redirect to the same page
redirect = request.path
return http.HttpResponseRedirect(redirect)
@decorators.merge_params
@decorators.check_access
def manageOverview(self, request, access_type,
page_name=None, params=None, **kwargs):
"""View that allows Organization Admins to see an overview of
their Organization's Student Projects.
For params see base.View().public()
"""
from soc.modules.gsoc.views.helper import list_info
# make sure the organization exists
org_entity = org_logic.getFromKeyNameOr404(kwargs['scope_path'])
fields = {'scope': org_entity}
# get the context for this webpage
context = responses.getUniversalContext(request)
responses.useJavaScript(context, params['js_uses_all'])
context['page_name'] = '%s %s' % (page_name, org_entity.name)
prefetch = ['student', 'mentor']
list_params = params.copy()
list_params['list_heading'] = params['manage_overview_heading']
list_params['list_row'] = params['manage_overview_row']
#list all active projects
fields['status'] = 'accepted'
active_params = list_params.copy()
active_params['list_description'] = \
'List of all active %(name_plural)s' % list_params
active_params['list_action'] = (redirects.getManageRedirect, list_params)
active_list = lists.getListContent(request, active_params, fields, idx=0,
prefetch=prefetch)
# set the needed info
active_list = list_info.setStudentProjectSurveyInfo(active_list,
org_entity.scope)
# list all failed projects
fields['status'] = 'failed'
failed_params = list_params.copy()
failed_params['list_description'] = ('List of all %(name_plural)s who '
'failed the program.') % list_params
failed_params['list_action'] = (redirects.getManageRedirect, list_params)
failed_list = lists.getListContent(request, failed_params, fields, idx=1,
need_content=True, prefetch=prefetch)
# set the needed info
failed_list = list_info.setStudentProjectSurveyInfo(failed_list,
org_entity.scope)
# list all completed projects
fields['status'] = 'completed'
completed_params = list_params.copy()
completed_params['list_description'] = (
'List of %(name_plural)s that have successfully completed the '
'program.' % list_params)
completed_params['list_action'] = (redirects.getManageRedirect, list_params)
completed_list = lists.getListContent(request, completed_params, fields,
idx=2, need_content=True,
prefetch=prefetch)
# set the needed info
completed_list = list_info.setStudentProjectSurveyInfo(completed_list,
org_entity.scope)
# list all withdrawn projects
fields['status'] = 'withdrawn'
withdrawn_params = list_params.copy()
withdrawn_params['list_description'] = (
'List of %(name_plural)s that have withdrawn from the program.' %(
list_params))
withdrawn_params['list_action'] = (redirects.getManageRedirect, list_params)
withdrawn_list = lists.getListContent(request, withdrawn_params, fields,
idx=3, need_content=True,
prefetch=prefetch)
# set the needed info
withdrawn_list = list_info.setStudentProjectSurveyInfo(withdrawn_list,
org_entity.scope)
# always show the list with active projects
content = [active_list]
if failed_list != None:
# do not show empty failed list
content.append(failed_list)
if completed_list != None:
# do not show empty completed list
content.append(completed_list)
if withdrawn_list != None:
# do not show empty withdrawn list
content.append(withdrawn_list)
# call the _list method from base to display the list
return self._list(request, list_params, content,
context['page_name'], context)
@decorators.merge_params
@decorators.check_access
def stEdit(self, request, access_type,
page_name=None, params=None, **kwargs):
"""View that allows students to edit information about their project.
For params see base.View().public()
"""
try:
entity = self._logic.getFromKeyFieldsOr404(kwargs)
except out_of_band.Error, error:
return responses.errorResponse(
error, request, template=params['error_public'])
# get the context for this webpage
context = responses.getUniversalContext(request)
responses.useJavaScript(context, params['js_uses_all'])
context['page_name'] = page_name
# cancel should go to the public view
params['cancel_redirect'] = redirects.getPublicRedirect(entity, params)
if request.POST:
return self.stEditPost(request, context, params, entity, **kwargs)
else: #request.GET
return self.stEditGet(request, context, params, entity, **kwargs)
def stEditGet(self, request, context, params, entity, **kwargs):
"""Handles the GET request for the student's edit page.
Args:
entity: the student project entity
rest: see base.View.public()
"""
# populate form with the existing entity
form = params['student_edit_form'](instance=entity)
return self._constructResponse(request, entity, context, form, params)
def stEditPost(self, request, context, params, entity, **kwargs):
"""Handles the POST request for the student's edit page.
Args:
entity: the student project entity
rest: see base.View.public()
"""
form = params['student_edit_form'](request.POST)
if not form.is_valid():
return self._constructResponse(request, entity, context, form, params)
_, fields = forms_helper.collectCleanedFields(form)
project_logic.updateEntityProperties(entity, fields)
return self.stEditGet(request, context, params, entity, **kwargs)
view = View()
accept_project = decorators.view(view.acceptProject)
admin = decorators.view(view.admin)
create = decorators.view(view.create)
delete = decorators.view(view.delete)
edit = decorators.view(view.edit)
list = decorators.view(view.list)
manage = decorators.view(view.manage)
manage_overview = decorators.view(view.manageOverview)
public = decorators.view(view.public)
st_edit = decorators.view(view.stEdit)
export = decorators.view(view.export)
pick = decorators.view(view.pick)
withdraw = decorators.view(view.withdraw)
withdraw_project = decorators.view(view.withdrawProject)
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Library for finding nodes in a graph based on metadata in meta_graph_def.
This is an internal library for use only by load.py.
"""
from __future__ import absolute_import
from __future__ import division
# Standard __future__ imports
from __future__ import print_function
import collections
# Standard Imports
import tensorflow as tf
from tensorflow_model_analysis import types
from tensorflow_model_analysis.eval_saved_model import constants
from tensorflow_model_analysis.eval_saved_model import encoding
from tensorflow_model_analysis.eval_saved_model import util
from typing import Dict, List, Optional, Text, Tuple, Union
from google.protobuf import any_pb2
from tensorflow.core.protobuf import meta_graph_pb2
CollectionDefValueType = Union[float, int, bytes, any_pb2.Any] # pylint: disable=invalid-name
def extract_signature_inputs_or_outputs_with_prefix(
prefix: Text,
# Inputs and outputs are not actually Dicts, but behave like them
signature_inputs_or_outputs: Dict[Text, meta_graph_pb2.TensorInfo],
key_if_single_element: Optional[Text] = None
) -> Dict[Text, meta_graph_pb2.TensorInfo]:
"""Extracts signature outputs with the given prefix.
This is the reverse of _wrap_and_check_metrics / _wrap_and_check_outputs and
_prefix_output_keys in tf.estimator.export.ExportOutput.
This is designed to extract structures from the SignatureDef outputs map.
Structures of the following form:
<prefix>/key1
<prefix>/key2
will map to dictionary elements like so:
{key1: value1, key2: value2}
Structures of the following form:
<prefix>
<prefix>_extrastuff
<prefix>morestuff
will map to dictionary elements like so:
{<prefix: value1, <prefix>_extrastuff: value2, <prefix>morestuff: value3}
Args:
prefix: Prefix to extract
signature_inputs_or_outputs: Signature inputs or outputs to extract from
key_if_single_element: Key to use in the dictionary if the SignatureDef map
had only one entry with key <prefix> representing a single tensor.
Returns:
Dictionary extracted as described above. The values will be the TensorInfo
associated with the keys.
Raises:
ValueError: There were duplicate keys.
"""
matched_prefix = False
result = {}
for k, v in signature_inputs_or_outputs.items():
if k.startswith(prefix + '/'):
key = k[len(prefix) + 1:]
elif k.startswith(prefix):
if k == prefix:
matched_prefix = True
key = k
else:
continue
if key in result:
raise ValueError(
'key "%s" already in dictionary. you might have repeated keys. '
'prefix was "%s", signature_def values were: %s' %
(prefix, key, signature_inputs_or_outputs))
result[key] = v
if key_if_single_element and matched_prefix and len(result) == 1:
return {key_if_single_element: result[prefix]}
return result
# TODO(b/119308261): Remove once all exported EvalSavedModels are updated.
def load_legacy_inputs(
meta_graph_def: tf.compat.v1.MetaGraphDef,
signature_def: tf.compat.v1.MetaGraphDef.SignatureDefEntry,
graph: tf.Graph) -> Tuple[Dict[Text, types.TensorType], types.TensorType]:
"""Loads legacy inputs.
Args:
meta_graph_def: MetaGraphDef to lookup nodes in.
signature_def: SignatureDef to lookup nodes in.
graph: TensorFlow graph to lookup the nodes in.
Returns:
Tuple of (inputs_map, input_refs_node)
"""
input_node = tf.compat.v1.saved_model.utils.get_tensor_from_tensor_info(
list(signature_def.inputs.values())[0], graph)
try:
input_refs_node = get_node_in_graph(meta_graph_def,
encoding.EXAMPLE_REF_COLLECTION, graph)
except KeyError:
# If we can't find the ExampleRef collection, then this is probably a model
# created before we introduced the ExampleRef parameter to
# EvalInputReceiver. In that case, we default to a tensor of range(0,
# len(input_example)).
# TODO(b/117519999): Remove this backwards-compatibility shim once all
# exported EvalSavedModels have ExampleRef.
input_refs_node = tf.range(tf.size(input=input_node))
inputs_map = collections.OrderedDict(
{list(signature_def.inputs.keys())[0]: input_node})
return (inputs_map, input_refs_node)
# TODO(b/119308261): Remove once all exported EvalSavedModels are updated.
def load_legacy_features_and_labels(
meta_graph_def: tf.compat.v1.MetaGraphDef, graph: tf.Graph
) -> Tuple[Dict[Text, types.TensorType], Dict[Text, types.TensorType]]:
"""Loads legacy features and labels nodes.
Args:
meta_graph_def: MetaGraphDef to lookup nodes in.
graph: TensorFlow graph to lookup the nodes in.
Returns:
Tuple of (features_map, labels_map)
"""
encoded_features_map = collections.OrderedDict(
get_node_map_in_graph(meta_graph_def, encoding.FEATURES_COLLECTION,
[encoding.NODE_SUFFIX], graph))
features_map = collections.OrderedDict()
for key in encoded_features_map:
features_map[key] = encoded_features_map[key][encoding.NODE_SUFFIX]
encoded_labels_map = collections.OrderedDict(
get_node_map_in_graph(meta_graph_def, encoding.LABELS_COLLECTION,
[encoding.NODE_SUFFIX], graph))
labels_map = collections.OrderedDict()
for key in encoded_labels_map:
labels_map[key] = encoded_labels_map[key][encoding.NODE_SUFFIX]
# Assume that KeyType is only Text
# pytype: disable=bad-return-type
return (features_map, labels_map)
# pytype: enable=bad-return-type
def load_tfma_version(
signature_def: tf.compat.v1.MetaGraphDef.SignatureDefEntry,
graph: tf.Graph,
) -> types.TensorType:
"""Loads TFMA version information from signature_def.inputs.
Args:
signature_def: SignatureDef to lookup node in.
graph: TensorFlow graph to lookup the node in.
Returns:
TFMA version tensor.
Raises:
ValueError: If version not found signature_def.inputs.
"""
if constants.SIGNATURE_DEF_TFMA_VERSION_KEY not in signature_def.inputs:
raise ValueError('tfma version not found in signature_def: %s' %
signature_def)
return tf.compat.v1.saved_model.utils.get_tensor_from_tensor_info(
signature_def.inputs[constants.SIGNATURE_DEF_TFMA_VERSION_KEY], graph)
def load_inputs(
signature_def: tf.compat.v1.MetaGraphDef.SignatureDefEntry,
graph: tf.Graph,
) -> Tuple[Dict[Text, types.TensorType], types.TensorType]:
"""Loads input nodes from signature_def.inputs.
Args:
signature_def: SignatureDef to lookup nodes in.
graph: TensorFlow graph to lookup the nodes in.
Returns:
Tuple of (inputs_map, input_refs_node) where inputs_map is an OrderedDict.
Raises:
ValueError: If inputs or input_refs not found signature_def.inputs.
"""
inputs = extract_signature_inputs_or_outputs_with_prefix(
constants.SIGNATURE_DEF_INPUTS_PREFIX, signature_def.inputs)
if not inputs:
raise ValueError('no inputs found in signature_def: %s' % signature_def)
inputs_map = collections.OrderedDict()
# Sort by key name so stable ordering is used when passing to feed_list.
for k in sorted(inputs.keys()):
inputs_map[k] = tf.compat.v1.saved_model.utils.get_tensor_from_tensor_info(
inputs[k], graph)
if constants.SIGNATURE_DEF_INPUT_REFS_KEY not in signature_def.inputs:
raise ValueError('no input_refs found in signature_def: %s' % signature_def)
input_refs_node = tf.compat.v1.saved_model.utils.get_tensor_from_tensor_info(
signature_def.inputs[constants.SIGNATURE_DEF_INPUT_REFS_KEY], graph)
return (inputs_map, input_refs_node)
def load_iterator_initializer_name(
signature_def: tf.compat.v1.MetaGraphDef.SignatureDefEntry,
graph: tf.Graph,
) -> Optional[types.TensorType]:
"""Loads iterator initializer name tensor from signature_def.inputs.
Args:
signature_def: SignatureDef to lookup initializer in.
graph: TensorFlow graph to lookup the initializer in.
Returns:
Tensor containing iterator initializer op name or None if not used.
"""
if constants.SIGNATURE_DEF_ITERATOR_INITIALIZER_KEY in signature_def.inputs:
return tf.compat.v1.saved_model.utils.get_tensor_from_tensor_info(
signature_def.inputs[constants.SIGNATURE_DEF_ITERATOR_INITIALIZER_KEY],
graph)
return None
def load_additional_inputs(
prefix: Text,
signature_def: tf.compat.v1.MetaGraphDef.SignatureDefEntry,
graph: tf.Graph,
) -> Dict[Text, types.TensorType]:
"""Loads additional input tensors from signature_def.inputs.
Args:
prefix: Prefix used for tensors in signature_def.inputs (e.g. features,
labels, etc)
signature_def: SignatureDef to lookup nodes in.
graph: TensorFlow graph to lookup the nodes in.
Returns:
OrderedDict of tensors.
"""
tensors = collections.OrderedDict()
for k, v in extract_signature_inputs_or_outputs_with_prefix(
prefix, signature_def.inputs, util.default_dict_key(prefix)).items():
tensors[k] = tf.compat.v1.saved_model.utils.get_tensor_from_tensor_info(
v, graph)
return tensors
def load_predictions(signature_def: tf.compat.v1.MetaGraphDef.SignatureDefEntry,
graph: tf.Graph) -> Dict[Text, types.TensorType]:
"""Loads prediction nodes from signature_def.outputs.
Args:
signature_def: SignatureDef to lookup nodes in.
graph: TensorFlow graph to lookup the nodes in.
Returns:
Predictions map as an OrderedDict.
"""
# The canonical ordering we use here is simply the ordering we get
# from the predictions collection.
predictions = extract_signature_inputs_or_outputs_with_prefix(
constants.PREDICTIONS_NAME, signature_def.outputs,
util.default_dict_key(constants.PREDICTIONS_NAME))
predictions_map = collections.OrderedDict()
for k, v in predictions.items():
# Extract to dictionary with a single key for consistency with
# how features and labels are extracted.
predictions_map[
k] = tf.compat.v1.saved_model.utils.get_tensor_from_tensor_info(
v, graph)
return predictions_map
def load_metrics(signature_def: tf.compat.v1.MetaGraphDef.SignatureDefEntry,
graph: tf.Graph
) -> Dict[types.FPLKeyType, Dict[Text, types.TensorType]]:
"""Loads metric nodes from signature_def.outputs.
Args:
signature_def: SignatureDef to lookup nodes in.
graph: TensorFlow graph to lookup the nodes in.
Returns:
Metrics map as an OrderedDict.
"""
metrics = extract_signature_inputs_or_outputs_with_prefix(
constants.METRICS_NAME, signature_def.outputs)
metrics_map = collections.defaultdict(dict)
for k, v in metrics.items():
node = tf.compat.v1.saved_model.utils.get_tensor_from_tensor_info(v, graph)
if k.endswith('/' + constants.METRIC_VALUE_SUFFIX):
key = k[:-len(constants.METRIC_VALUE_SUFFIX) - 1]
metrics_map[key][encoding.VALUE_OP_SUFFIX] = node
elif k.endswith('/' + constants.METRIC_UPDATE_SUFFIX):
key = k[:-len(constants.METRIC_UPDATE_SUFFIX) - 1]
metrics_map[key][encoding.UPDATE_OP_SUFFIX] = node
else:
raise ValueError('unrecognised suffix for metric. key was: %s' % k)
return metrics_map
def get_node_map(meta_graph_def: meta_graph_pb2.MetaGraphDef, prefix: Text,
node_suffixes: List[Text]
) -> Dict[types.FPLKeyType, Dict[Text, CollectionDefValueType]]:
"""Get node map from meta_graph_def.
This is designed to extract structures of the following form from the
meta_graph_def collection_def:
prefix/key
key1
key2
key3
prefix/suffix_a
node1
node2
node3
prefix/suffix_b
node4
node5
node6
which will become a dictionary:
{
key1 : {suffix_a: node1, suffix_b: node4}
key2 : {suffix_a: node2, suffix_b: node5}
key3 : {suffix_a: node3, suffix_b: node6}
}.
Keys must always be bytes. Values can be any supported CollectionDef type
(bytes_list, any_list, etc)
Args:
meta_graph_def: MetaGraphDef containing the CollectionDefs to extract the
structure from.
prefix: Prefix for the CollectionDef names.
node_suffixes: The suffixes to the prefix to form the names of the
CollectionDefs to extract the nodes from, e.g. in the example described
above, node_suffixes would be ['suffix_a', 'suffix_b'].
Returns:
A dictionary of dictionaries, as described in the example above.
Raises:
ValueError: The length of some node list did not match length of the key
list.
"""
node_lists = []
for node_suffix in node_suffixes:
collection_def_name = encoding.with_suffix(prefix, node_suffix)
collection_def = meta_graph_def.collection_def.get(collection_def_name)
if collection_def is None:
# If we can't find the CollectionDef, append an empty list.
#
# Either all the CollectionDefs are missing, in which case we correctly
# return an empty dict, or some of the CollectionDefs are non-empty,
# in which case we raise an exception below.
node_lists.append([])
else:
node_lists.append(
getattr(collection_def, collection_def.WhichOneof('kind')).value)
keys = meta_graph_def.collection_def[encoding.with_suffix(
prefix, encoding.KEY_SUFFIX)].bytes_list.value
if not all([len(node_list) == len(keys) for node_list in node_lists]):
raise ValueError('length of each node_list should match length of keys. '
'prefix was %s, node_lists were %s, keys was %s' %
(prefix, node_lists, keys))
result = {}
for key, elems in zip(keys, zip(*node_lists)):
result[encoding.decode_key(key)] = dict(zip(node_suffixes, elems))
return result
def get_node_map_in_graph(
meta_graph_def: meta_graph_pb2.MetaGraphDef, prefix: Text,
node_suffixes: List[Text],
graph: tf.Graph) -> Dict[types.FPLKeyType, Dict[Text, types.TensorType]]:
"""Like get_node_map, but looks up the nodes in the given graph.
Args:
meta_graph_def: MetaGraphDef containing the CollectionDefs to extract the
structure from.
prefix: Prefix for the CollectionDef names.
node_suffixes: The suffixes to the prefix to form the names of the
CollectionDefs to extract the nodes from, e.g. in the example described
above, node_suffixes would be ['suffix_a', 'suffix_b'].
graph: TensorFlow graph to lookup the nodes in.
Returns:
A dictionary of dictionaries like get_node_map, except the values are
the actual nodes in the graph.
"""
node_map = get_node_map(meta_graph_def, prefix, node_suffixes)
result = {}
for key, elems in node_map.items():
result[key] = {
k: encoding.decode_tensor_node(graph, n) for k, n in elems.items()
}
return result
def get_node_wrapped_tensor_info(meta_graph_def: meta_graph_pb2.MetaGraphDef,
path: Text) -> any_pb2.Any:
"""Get the Any-wrapped TensorInfo for the node from the meta_graph_def.
Args:
meta_graph_def: MetaGraphDef containing the CollectionDefs to extract the
node name from.
path: Name of the collection containing the node name.
Returns:
The Any-wrapped TensorInfo for the node retrieved from the CollectionDef.
Raises:
KeyError: There was no CollectionDef with the given name (path).
ValueError: The any_list in the CollectionDef with the given name did
not have length 1.
"""
if path not in meta_graph_def.collection_def:
raise KeyError('could not find path %s in collection defs. meta_graph_def '
'was %s' % (path, meta_graph_def))
if len(meta_graph_def.collection_def[path].any_list.value) != 1:
raise ValueError(
'any_list should be of length 1. path was %s, any_list was: %s.' %
(path, meta_graph_def.collection_def[path].any_list.value))
return meta_graph_def.collection_def[path].any_list.value[0]
def get_node_in_graph(meta_graph_def: meta_graph_pb2.MetaGraphDef, path: Text,
graph: tf.Graph) -> types.TensorType:
"""Like get_node_wrapped_tensor_info, but looks up the node in the graph.
Args:
meta_graph_def: MetaGraphDef containing the CollectionDefs to extract the
node name from.
path: Name of the collection containing the node name.
graph: TensorFlow graph to lookup the nodes in.
Returns:
The node in the graph with the name returned by
get_node_wrapped_tensor_info.
"""
return encoding.decode_tensor_node(
graph, get_node_wrapped_tensor_info(meta_graph_def, path))
Fix the typo inside comments.
PiperOrigin-RevId: 252314878
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Library for finding nodes in a graph based on metadata in meta_graph_def.
This is an internal library for use only by load.py.
"""
from __future__ import absolute_import
from __future__ import division
# Standard __future__ imports
from __future__ import print_function
import collections
# Standard Imports
import tensorflow as tf
from tensorflow_model_analysis import types
from tensorflow_model_analysis.eval_saved_model import constants
from tensorflow_model_analysis.eval_saved_model import encoding
from tensorflow_model_analysis.eval_saved_model import util
from typing import Dict, List, Optional, Text, Tuple, Union
from google.protobuf import any_pb2
from tensorflow.core.protobuf import meta_graph_pb2
CollectionDefValueType = Union[float, int, bytes, any_pb2.Any] # pylint: disable=invalid-name
def extract_signature_inputs_or_outputs_with_prefix(
prefix: Text,
# Inputs and outputs are not actually Dicts, but behave like them
signature_inputs_or_outputs: Dict[Text, meta_graph_pb2.TensorInfo],
key_if_single_element: Optional[Text] = None
) -> Dict[Text, meta_graph_pb2.TensorInfo]:
"""Extracts signature outputs with the given prefix.
This is the reverse of _wrap_and_check_metrics / _wrap_and_check_outputs and
_prefix_output_keys in tf.estimator.export.ExportOutput.
This is designed to extract structures from the SignatureDef outputs map.
Structures of the following form:
<prefix>/key1
<prefix>/key2
will map to dictionary elements like so:
{key1: value1, key2: value2}
Structures of the following form:
<prefix>
<prefix>_extrastuff
<prefix>morestuff
will map to dictionary elements like so:
{<prefix>: value1, <prefix>_extrastuff: value2, <prefix>morestuff: value3}
Args:
prefix: Prefix to extract
signature_inputs_or_outputs: Signature inputs or outputs to extract from
key_if_single_element: Key to use in the dictionary if the SignatureDef map
had only one entry with key <prefix> representing a single tensor.
Returns:
Dictionary extracted as described above. The values will be the TensorInfo
associated with the keys.
Raises:
ValueError: There were duplicate keys.
"""
matched_prefix = False
result = {}
for k, v in signature_inputs_or_outputs.items():
if k.startswith(prefix + '/'):
key = k[len(prefix) + 1:]
elif k.startswith(prefix):
if k == prefix:
matched_prefix = True
key = k
else:
continue
if key in result:
raise ValueError(
'key "%s" already in dictionary. you might have repeated keys. '
'prefix was "%s", signature_def values were: %s' %
(prefix, key, signature_inputs_or_outputs))
result[key] = v
if key_if_single_element and matched_prefix and len(result) == 1:
return {key_if_single_element: result[prefix]}
return result
# TODO(b/119308261): Remove once all exported EvalSavedModels are updated.
def load_legacy_inputs(
meta_graph_def: tf.compat.v1.MetaGraphDef,
signature_def: tf.compat.v1.MetaGraphDef.SignatureDefEntry,
graph: tf.Graph) -> Tuple[Dict[Text, types.TensorType], types.TensorType]:
"""Loads legacy inputs.
Args:
meta_graph_def: MetaGraphDef to lookup nodes in.
signature_def: SignatureDef to lookup nodes in.
graph: TensorFlow graph to lookup the nodes in.
Returns:
Tuple of (inputs_map, input_refs_node)
"""
input_node = tf.compat.v1.saved_model.utils.get_tensor_from_tensor_info(
list(signature_def.inputs.values())[0], graph)
try:
input_refs_node = get_node_in_graph(meta_graph_def,
encoding.EXAMPLE_REF_COLLECTION, graph)
except KeyError:
# If we can't find the ExampleRef collection, then this is probably a model
# created before we introduced the ExampleRef parameter to
# EvalInputReceiver. In that case, we default to a tensor of range(0,
# len(input_example)).
# TODO(b/117519999): Remove this backwards-compatibility shim once all
# exported EvalSavedModels have ExampleRef.
input_refs_node = tf.range(tf.size(input=input_node))
inputs_map = collections.OrderedDict(
{list(signature_def.inputs.keys())[0]: input_node})
return (inputs_map, input_refs_node)
# TODO(b/119308261): Remove once all exported EvalSavedModels are updated.
def load_legacy_features_and_labels(
meta_graph_def: tf.compat.v1.MetaGraphDef, graph: tf.Graph
) -> Tuple[Dict[Text, types.TensorType], Dict[Text, types.TensorType]]:
"""Loads legacy features and labels nodes.
Args:
meta_graph_def: MetaGraphDef to lookup nodes in.
graph: TensorFlow graph to lookup the nodes in.
Returns:
Tuple of (features_map, labels_map)
"""
encoded_features_map = collections.OrderedDict(
get_node_map_in_graph(meta_graph_def, encoding.FEATURES_COLLECTION,
[encoding.NODE_SUFFIX], graph))
features_map = collections.OrderedDict()
for key in encoded_features_map:
features_map[key] = encoded_features_map[key][encoding.NODE_SUFFIX]
encoded_labels_map = collections.OrderedDict(
get_node_map_in_graph(meta_graph_def, encoding.LABELS_COLLECTION,
[encoding.NODE_SUFFIX], graph))
labels_map = collections.OrderedDict()
for key in encoded_labels_map:
labels_map[key] = encoded_labels_map[key][encoding.NODE_SUFFIX]
# Assume that KeyType is only Text
# pytype: disable=bad-return-type
return (features_map, labels_map)
# pytype: enable=bad-return-type
def load_tfma_version(
signature_def: tf.compat.v1.MetaGraphDef.SignatureDefEntry,
graph: tf.Graph,
) -> types.TensorType:
"""Loads TFMA version information from signature_def.inputs.
Args:
signature_def: SignatureDef to lookup node in.
graph: TensorFlow graph to lookup the node in.
Returns:
TFMA version tensor.
Raises:
ValueError: If version not found signature_def.inputs.
"""
if constants.SIGNATURE_DEF_TFMA_VERSION_KEY not in signature_def.inputs:
raise ValueError('tfma version not found in signature_def: %s' %
signature_def)
return tf.compat.v1.saved_model.utils.get_tensor_from_tensor_info(
signature_def.inputs[constants.SIGNATURE_DEF_TFMA_VERSION_KEY], graph)
def load_inputs(
signature_def: tf.compat.v1.MetaGraphDef.SignatureDefEntry,
graph: tf.Graph,
) -> Tuple[Dict[Text, types.TensorType], types.TensorType]:
"""Loads input nodes from signature_def.inputs.
Args:
signature_def: SignatureDef to lookup nodes in.
graph: TensorFlow graph to lookup the nodes in.
Returns:
Tuple of (inputs_map, input_refs_node) where inputs_map is an OrderedDict.
Raises:
ValueError: If inputs or input_refs not found signature_def.inputs.
"""
inputs = extract_signature_inputs_or_outputs_with_prefix(
constants.SIGNATURE_DEF_INPUTS_PREFIX, signature_def.inputs)
if not inputs:
raise ValueError('no inputs found in signature_def: %s' % signature_def)
inputs_map = collections.OrderedDict()
# Sort by key name so stable ordering is used when passing to feed_list.
for k in sorted(inputs.keys()):
inputs_map[k] = tf.compat.v1.saved_model.utils.get_tensor_from_tensor_info(
inputs[k], graph)
if constants.SIGNATURE_DEF_INPUT_REFS_KEY not in signature_def.inputs:
raise ValueError('no input_refs found in signature_def: %s' % signature_def)
input_refs_node = tf.compat.v1.saved_model.utils.get_tensor_from_tensor_info(
signature_def.inputs[constants.SIGNATURE_DEF_INPUT_REFS_KEY], graph)
return (inputs_map, input_refs_node)
def load_iterator_initializer_name(
signature_def: tf.compat.v1.MetaGraphDef.SignatureDefEntry,
graph: tf.Graph,
) -> Optional[types.TensorType]:
"""Loads iterator initializer name tensor from signature_def.inputs.
Args:
signature_def: SignatureDef to lookup initializer in.
graph: TensorFlow graph to lookup the initializer in.
Returns:
Tensor containing iterator initializer op name or None if not used.
"""
if constants.SIGNATURE_DEF_ITERATOR_INITIALIZER_KEY in signature_def.inputs:
return tf.compat.v1.saved_model.utils.get_tensor_from_tensor_info(
signature_def.inputs[constants.SIGNATURE_DEF_ITERATOR_INITIALIZER_KEY],
graph)
return None
def load_additional_inputs(
prefix: Text,
signature_def: tf.compat.v1.MetaGraphDef.SignatureDefEntry,
graph: tf.Graph,
) -> Dict[Text, types.TensorType]:
"""Loads additional input tensors from signature_def.inputs.
Args:
prefix: Prefix used for tensors in signature_def.inputs (e.g. features,
labels, etc)
signature_def: SignatureDef to lookup nodes in.
graph: TensorFlow graph to lookup the nodes in.
Returns:
OrderedDict of tensors.
"""
tensors = collections.OrderedDict()
for k, v in extract_signature_inputs_or_outputs_with_prefix(
prefix, signature_def.inputs, util.default_dict_key(prefix)).items():
tensors[k] = tf.compat.v1.saved_model.utils.get_tensor_from_tensor_info(
v, graph)
return tensors
def load_predictions(signature_def: tf.compat.v1.MetaGraphDef.SignatureDefEntry,
graph: tf.Graph) -> Dict[Text, types.TensorType]:
"""Loads prediction nodes from signature_def.outputs.
Args:
signature_def: SignatureDef to lookup nodes in.
graph: TensorFlow graph to lookup the nodes in.
Returns:
Predictions map as an OrderedDict.
"""
# The canonical ordering we use here is simply the ordering we get
# from the predictions collection.
predictions = extract_signature_inputs_or_outputs_with_prefix(
constants.PREDICTIONS_NAME, signature_def.outputs,
util.default_dict_key(constants.PREDICTIONS_NAME))
predictions_map = collections.OrderedDict()
for k, v in predictions.items():
# Extract to dictionary with a single key for consistency with
# how features and labels are extracted.
predictions_map[
k] = tf.compat.v1.saved_model.utils.get_tensor_from_tensor_info(
v, graph)
return predictions_map
def load_metrics(signature_def: tf.compat.v1.MetaGraphDef.SignatureDefEntry,
graph: tf.Graph
) -> Dict[types.FPLKeyType, Dict[Text, types.TensorType]]:
"""Loads metric nodes from signature_def.outputs.
Args:
signature_def: SignatureDef to lookup nodes in.
graph: TensorFlow graph to lookup the nodes in.
Returns:
Metrics map as an OrderedDict.
"""
metrics = extract_signature_inputs_or_outputs_with_prefix(
constants.METRICS_NAME, signature_def.outputs)
metrics_map = collections.defaultdict(dict)
for k, v in metrics.items():
node = tf.compat.v1.saved_model.utils.get_tensor_from_tensor_info(v, graph)
if k.endswith('/' + constants.METRIC_VALUE_SUFFIX):
key = k[:-len(constants.METRIC_VALUE_SUFFIX) - 1]
metrics_map[key][encoding.VALUE_OP_SUFFIX] = node
elif k.endswith('/' + constants.METRIC_UPDATE_SUFFIX):
key = k[:-len(constants.METRIC_UPDATE_SUFFIX) - 1]
metrics_map[key][encoding.UPDATE_OP_SUFFIX] = node
else:
raise ValueError('unrecognised suffix for metric. key was: %s' % k)
return metrics_map
def get_node_map(meta_graph_def: meta_graph_pb2.MetaGraphDef, prefix: Text,
node_suffixes: List[Text]
) -> Dict[types.FPLKeyType, Dict[Text, CollectionDefValueType]]:
"""Get node map from meta_graph_def.
This is designed to extract structures of the following form from the
meta_graph_def collection_def:
prefix/key
key1
key2
key3
prefix/suffix_a
node1
node2
node3
prefix/suffix_b
node4
node5
node6
which will become a dictionary:
{
key1 : {suffix_a: node1, suffix_b: node4}
key2 : {suffix_a: node2, suffix_b: node5}
key3 : {suffix_a: node3, suffix_b: node6}
}.
Keys must always be bytes. Values can be any supported CollectionDef type
(bytes_list, any_list, etc)
Args:
meta_graph_def: MetaGraphDef containing the CollectionDefs to extract the
structure from.
prefix: Prefix for the CollectionDef names.
node_suffixes: The suffixes to the prefix to form the names of the
CollectionDefs to extract the nodes from, e.g. in the example described
above, node_suffixes would be ['suffix_a', 'suffix_b'].
Returns:
A dictionary of dictionaries, as described in the example above.
Raises:
ValueError: The length of some node list did not match length of the key
list.
"""
node_lists = []
for node_suffix in node_suffixes:
collection_def_name = encoding.with_suffix(prefix, node_suffix)
collection_def = meta_graph_def.collection_def.get(collection_def_name)
if collection_def is None:
# If we can't find the CollectionDef, append an empty list.
#
# Either all the CollectionDefs are missing, in which case we correctly
# return an empty dict, or some of the CollectionDefs are non-empty,
# in which case we raise an exception below.
node_lists.append([])
else:
node_lists.append(
getattr(collection_def, collection_def.WhichOneof('kind')).value)
keys = meta_graph_def.collection_def[encoding.with_suffix(
prefix, encoding.KEY_SUFFIX)].bytes_list.value
if not all([len(node_list) == len(keys) for node_list in node_lists]):
raise ValueError('length of each node_list should match length of keys. '
'prefix was %s, node_lists were %s, keys was %s' %
(prefix, node_lists, keys))
result = {}
for key, elems in zip(keys, zip(*node_lists)):
result[encoding.decode_key(key)] = dict(zip(node_suffixes, elems))
return result
def get_node_map_in_graph(
meta_graph_def: meta_graph_pb2.MetaGraphDef, prefix: Text,
node_suffixes: List[Text],
graph: tf.Graph) -> Dict[types.FPLKeyType, Dict[Text, types.TensorType]]:
"""Like get_node_map, but looks up the nodes in the given graph.
Args:
meta_graph_def: MetaGraphDef containing the CollectionDefs to extract the
structure from.
prefix: Prefix for the CollectionDef names.
node_suffixes: The suffixes to the prefix to form the names of the
CollectionDefs to extract the nodes from, e.g. in the example described
above, node_suffixes would be ['suffix_a', 'suffix_b'].
graph: TensorFlow graph to lookup the nodes in.
Returns:
A dictionary of dictionaries like get_node_map, except the values are
the actual nodes in the graph.
"""
node_map = get_node_map(meta_graph_def, prefix, node_suffixes)
result = {}
for key, elems in node_map.items():
result[key] = {
k: encoding.decode_tensor_node(graph, n) for k, n in elems.items()
}
return result
def get_node_wrapped_tensor_info(meta_graph_def: meta_graph_pb2.MetaGraphDef,
path: Text) -> any_pb2.Any:
"""Get the Any-wrapped TensorInfo for the node from the meta_graph_def.
Args:
meta_graph_def: MetaGraphDef containing the CollectionDefs to extract the
node name from.
path: Name of the collection containing the node name.
Returns:
The Any-wrapped TensorInfo for the node retrieved from the CollectionDef.
Raises:
KeyError: There was no CollectionDef with the given name (path).
ValueError: The any_list in the CollectionDef with the given name did
not have length 1.
"""
if path not in meta_graph_def.collection_def:
raise KeyError('could not find path %s in collection defs. meta_graph_def '
'was %s' % (path, meta_graph_def))
if len(meta_graph_def.collection_def[path].any_list.value) != 1:
raise ValueError(
'any_list should be of length 1. path was %s, any_list was: %s.' %
(path, meta_graph_def.collection_def[path].any_list.value))
return meta_graph_def.collection_def[path].any_list.value[0]
def get_node_in_graph(meta_graph_def: meta_graph_pb2.MetaGraphDef, path: Text,
graph: tf.Graph) -> types.TensorType:
"""Like get_node_wrapped_tensor_info, but looks up the node in the graph.
Args:
meta_graph_def: MetaGraphDef containing the CollectionDefs to extract the
node name from.
path: Name of the collection containing the node name.
graph: TensorFlow graph to lookup the nodes in.
Returns:
The node in the graph with the name returned by
get_node_wrapped_tensor_info.
"""
return encoding.decode_tensor_node(
graph, get_node_wrapped_tensor_info(meta_graph_def, path))
|
# Copyright 2017-2020 TensorHub, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import fnmatch
import os
import re
import sys
import tempfile
import guild
from guild import _test as testlib
from guild import pip_util
from guild import util
INDEX = "tests/uat/README.md"
try:
_workspace_env = os.environ["WORKSPACE"]
except KeyError:
WORKSPACE = None
GUILD_HOME = os.path.abspath(".")
else:
WORKSPACE = os.path.abspath(_workspace_env)
GUILD_HOME = os.path.join(WORKSPACE, ".guild")
TEMP = tempfile.gettempdir()
GUILD_PKG = os.path.abspath(guild.__pkgdir__)
REQUIREMENTS_PATH = os.path.join(GUILD_PKG, "requirements.txt")
EXAMPLES = os.path.abspath(os.getenv("EXAMPLES") or os.path.join(GUILD_PKG, "examples"))
def run():
if not pip_util.running_under_virtualenv():
sys.stderr.write("This command must be run in a virtual environment\n")
sys.exit(1)
tests = _tests_for_index()
_init_workspace()
_mark_passed_tests()
_run_tests(tests)
def _tests_for_index():
index_path = os.path.join(os.path.dirname(__file__), INDEX)
index = open(index_path).read()
return re.findall(r"\((.+?)\.md\)", index)
def _init_workspace():
print("Initializing workspace %s under %s" % (WORKSPACE, sys.executable))
util.ensure_dir(os.path.join(WORKSPACE, "passed-tests"))
util.ensure_dir(os.path.join(WORKSPACE, ".guild"))
def _mark_passed_tests():
passed = os.getenv("PASS")
if not passed:
return
for name in [s.strip() for s in passed.split(",")]:
_mark_test_passed(name)
def _run_tests(tests):
globs = _test_globals()
to_skip = os.getenv("UAT_SKIP", "").split(",")
with _UATEnv():
for name in tests:
print("Running %s:" % name)
if _skip_test(name, to_skip):
print(" skipped (user requested)")
continue
if _test_passed(name):
print(" skipped (already passed)")
continue
filename = os.path.join("tests", "uat", name + ".md")
failed, attempted = testlib.run_test_file(filename, globs)
if not failed:
print(" %i test(s) passed" % attempted)
_mark_test_passed(name)
else:
sys.exit(1)
def _test_globals():
globs = testlib.test_globals()
globs.update(_global_vars())
globs.update(
{"sample": _sample, "example": _example_dir,}
)
return globs
def _global_vars():
return {
name: str(val)
for name, val in globals().items()
if name[0] != "_" and isinstance(val, str)
}
def _sample(path):
return os.path.abspath(testlib.sample(path))
def _UATEnv():
return testlib.Env(
{
"COLUMNS": "999",
"EXAMPLES": EXAMPLES,
"GUILD_HOME": os.path.join(WORKSPACE, ".guild"),
"GUILD_PKG": GUILD_PKG,
"GUILD_PKGDIR": guild.__pkgdir__,
"LANG": os.getenv("LANG", "en_US.UTF-8"),
"REQUIREMENTS_PATH": REQUIREMENTS_PATH,
"TEMP": TEMP,
}
)
def _skip_test(name, skip_patterns):
for p in skip_patterns:
if fnmatch.fnmatch(name, p):
return True
return False
def _example_dir(name):
return os.path.join(EXAMPLES, name)
def _test_passed(name):
return os.path.exists(_test_passed_marker(name))
def _test_passed_marker(name):
return os.path.join(WORKSPACE, "passed-tests", name)
def _mark_test_passed(name):
open(_test_passed_marker(name), "w").close()
Ensure we use abs path for WORKSPACE
# Copyright 2017-2020 TensorHub, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import fnmatch
import os
import re
import sys
import tempfile
import guild
from guild import _test as testlib
from guild import pip_util
from guild import util
INDEX = "tests/uat/README.md"
try:
_workspace_env = os.environ["WORKSPACE"]
except KeyError:
WORKSPACE = None
GUILD_HOME = os.path.abspath(".")
else:
WORKSPACE = os.path.abspath(_workspace_env)
GUILD_HOME = os.path.join(WORKSPACE, ".guild")
TEMP = tempfile.gettempdir()
GUILD_PKG = os.path.abspath(guild.__pkgdir__)
REQUIREMENTS_PATH = os.path.join(GUILD_PKG, "requirements.txt")
EXAMPLES = os.path.abspath(os.getenv("EXAMPLES") or os.path.join(GUILD_PKG, "examples"))
def run():
if not pip_util.running_under_virtualenv():
sys.stderr.write("This command must be run in a virtual environment\n")
sys.exit(1)
tests = _tests_for_index()
_init_workspace()
_mark_passed_tests()
_run_tests(tests)
def _tests_for_index():
index_path = os.path.join(os.path.dirname(__file__), INDEX)
index = open(index_path).read()
return re.findall(r"\((.+?)\.md\)", index)
def _init_workspace():
print("Initializing workspace %s under %s" % (WORKSPACE, sys.executable))
util.ensure_dir(os.path.join(WORKSPACE, "passed-tests"))
util.ensure_dir(os.path.join(WORKSPACE, ".guild"))
def _mark_passed_tests():
passed = os.getenv("PASS")
if not passed:
return
for name in [s.strip() for s in passed.split(",")]:
_mark_test_passed(name)
def _run_tests(tests):
globs = _test_globals()
to_skip = os.getenv("UAT_SKIP", "").split(",")
with _UATEnv():
for name in tests:
print("Running %s:" % name)
if _skip_test(name, to_skip):
print(" skipped (user requested)")
continue
if _test_passed(name):
print(" skipped (already passed)")
continue
filename = os.path.join("tests", "uat", name + ".md")
failed, attempted = testlib.run_test_file(filename, globs)
if not failed:
print(" %i test(s) passed" % attempted)
_mark_test_passed(name)
else:
sys.exit(1)
def _test_globals():
globs = testlib.test_globals()
globs.update(_global_vars())
globs.update(
{"sample": _sample, "example": _example_dir,}
)
return globs
def _global_vars():
return {
name: str(val)
for name, val in globals().items()
if name[0] != "_" and isinstance(val, str)
}
def _sample(path):
return os.path.abspath(testlib.sample(path))
def _UATEnv():
return testlib.Env(
{
"COLUMNS": "999",
"EXAMPLES": EXAMPLES,
"GUILD_HOME": os.path.join(WORKSPACE, ".guild"),
"GUILD_PKG": GUILD_PKG,
"GUILD_PKGDIR": guild.__pkgdir__,
"LANG": os.getenv("LANG", "en_US.UTF-8"),
"REQUIREMENTS_PATH": REQUIREMENTS_PATH,
"TEMP": TEMP,
"WORKSPACE": WORKSPACE,
}
)
def _skip_test(name, skip_patterns):
for p in skip_patterns:
if fnmatch.fnmatch(name, p):
return True
return False
def _example_dir(name):
return os.path.join(EXAMPLES, name)
def _test_passed(name):
return os.path.exists(_test_passed_marker(name))
def _test_passed_marker(name):
return os.path.join(WORKSPACE, "passed-tests", name)
def _mark_test_passed(name):
open(_test_passed_marker(name), "w").close()
|
'''
Created on 8 janv. 2015
@author: Remi Cattiau
'''
from nxdrive.logging_config import get_logger
from nxdrive.engine.workers import EngineWorker
from nxdrive.utils import current_milli_time
from nxdrive.client import NotFound
from time import sleep
from datetime import datetime
from nxdrive.client.common import COLLECTION_SYNC_ROOT_FACTORY_NAME
from nxdrive.client.remote_file_system_client import RemoteFileInfo
from nxdrive.engine.activity import Action
from nxdrive.client.common import safe_filename
from nxdrive.client.base_automation_client import Unauthorized
from nxdrive.utils import path_join
from httplib import BadStatusLine
from urllib2 import HTTPError, URLError
import os
log = get_logger(__name__)
from PyQt4.QtCore import pyqtSignal, pyqtSlot
from nxdrive.engine.workers import ThreadInterrupt
class RemoteWatcher(EngineWorker):
initiate = pyqtSignal()
updated = pyqtSignal()
remoteScanFinished = pyqtSignal()
changesFound = pyqtSignal(object)
remoteWatcherStopped = pyqtSignal()
def __init__(self, engine, dao, delay):
super(RemoteWatcher, self).__init__(engine, dao)
self.unhandle_fs_event = False
self.local_full_scan = dict()
self._full_scan_mode = False
self._last_sync_date = self._dao.get_config('remote_last_sync_date')
self._last_event_log_id = self._dao.get_config('remote_last_event_log_id')
self._last_root_definitions = self._dao.get_config('remote_last_root_definitions')
self._last_remote_full_scan = self._dao.get_config('remote_last_full_scan')
self._client = None
# TO_REVIEW Can be removed
try:
self._client = engine.get_remote_client()
except Unauthorized:
log.error('Got Unauthorized exception while trying to get remote client, setting invalid credentials',
exc_info=True)
self._engine.set_invalid_credentials()
except URLError, HTTPError:
self._client = None
except Exception as e:
log.exception(e)
self._local_client = engine.get_local_client()
self._metrics = dict()
self._metrics['last_remote_scan_time'] = -1
self._metrics['last_remote_update_time'] = -1
self._metrics['empty_polls'] = 0
self.server_interval = delay
self._current_interval = 0
def get_metrics(self):
metrics = super(RemoteWatcher, self).get_metrics()
metrics['last_remote_sync_date'] = self._last_sync_date
metrics['last_event_log_id'] = self._last_event_log_id
metrics['last_root_definitions'] = self._last_root_definitions
metrics['last_remote_full_scan'] = self._last_remote_full_scan
metrics['next_polling'] = self._current_interval
return dict(metrics.items() + self._metrics.items())
@pyqtSlot()
def invalidate_client_cache(self):
self._client = None
def _execute(self):
first_pass = True
try:
while (1):
self._interact()
if self._current_interval == 0:
self._current_interval = self.server_interval
if self._handle_changes(first_pass):
first_pass = False
else:
self._current_interval = self._current_interval - 1
sleep(1)
except ThreadInterrupt:
self.remoteWatcherStopped.emit()
raise
def _scan_remote(self, from_state=None):
"""Recursively scan the bound remote folder looking for updates"""
start_ms = current_milli_time()
try:
if from_state is None:
from_state = self._dao.get_state_from_local('/')
self._client = self._engine.get_remote_client()
remote_info = self._client.get_info(from_state.remote_ref)
self._dao.update_remote_state(from_state, remote_info, from_state.remote_parent_path)
except NotFound:
log.debug("Marking %r as remotely deleted.", from_state)
# Should unbind ?
# from_state.update_remote(None)
self._dao.commit()
self._metrics['last_remote_scan_time'] = current_milli_time() - start_ms
return
self._get_changes()
self._save_changes_state()
# recursive update
self._scan_remote_recursive(from_state, remote_info)
self._last_remote_full_scan = datetime.utcnow()
self._dao.update_config('remote_last_full_scan', self._last_remote_full_scan)
self._dao.clean_scanned()
self._dao.commit()
self._metrics['last_remote_scan_time'] = current_milli_time() - start_ms
log.debug("Remote scan finished in %dms", self._metrics['last_remote_scan_time'])
self.remoteScanFinished.emit()
@pyqtSlot(str)
def scan_pair(self, remote_path):
self._dao.add_path_to_scan(str(remote_path))
def _scan_pair(self, remote_path):
if remote_path is None:
return
remote_path = str(remote_path)
if remote_path[-1:] == '/':
remote_path = remote_path[0:-1]
remote_ref = os.path.basename(remote_path)
parent_path = os.path.dirname(remote_path)
if parent_path == '/':
parent_path = ''
# If pair is present already
try:
child_info = self._client.get_info(remote_ref)
except NotFound:
# The folder has been deleted
return
doc_pair = self._dao.get_state_from_remote_with_path(remote_ref, parent_path)
if doc_pair is not None:
log.debug("Remote scan_pair: %s", doc_pair.local_path)
self._scan_remote_recursive(doc_pair, child_info)
log.debug("Remote scan_pair ended: %s", doc_pair.local_path)
return
log.debug("parent_path: '%s'\t'%s'\t'%s'", parent_path, os.path.basename(parent_path),
os.path.dirname(parent_path))
parent_pair = self._dao.get_state_from_remote_with_path(os.path.basename(parent_path),
os.path.dirname(parent_path))
log.debug("scan_pair: parent_pair: %r", parent_pair)
if parent_pair is None:
return
local_path = path_join(parent_pair.local_path, safe_filename(child_info.name))
remote_parent_path = parent_pair.remote_parent_path + '/' + child_info.uid
row_id = self._dao.insert_remote_state(child_info, remote_parent_path, local_path, parent_pair.local_path)
doc_pair = self._dao.get_state_from_id(row_id, from_write=True)
if child_info.folderish:
log.debug("Remote scan_pair: %s", doc_pair.local_path)
self._scan_remote_recursive(doc_pair, child_info)
log.debug("Remote scan_pair ended: %s", doc_pair.local_path)
def _check_modified(self, child_pair, child_info):
if child_pair.remote_can_delete != child_info.can_delete:
return True
if child_pair.remote_can_rename != child_info.can_rename:
return True
if child_pair.remote_can_update != child_info.can_update:
return True
if child_pair.remote_can_create_child != child_info.can_create_child:
return True
if child_pair.remote_digest != child_info.digest:
return True
return False
def _scan_remote_recursive(self, doc_pair, remote_info,
force_recursion=True, mark_unknown=True):
"""Recursively scan the bound remote folder looking for updates
If force_recursion is True, recursion is done even on
non newly created children.
"""
if not remote_info.folderish:
# No children to align, early stop.
return
# Check if synchronization thread was suspended
self._interact()
remote_parent_path = doc_pair.remote_parent_path + '/' + remote_info.uid
if self._dao.is_path_scanned(remote_parent_path):
log.trace("Skip already remote scanned: %s", doc_pair.local_path)
return
if doc_pair.local_path is not None:
self._action = Action("Remote scanning : " + doc_pair.local_path)
log.debug("Remote scanning: %s", doc_pair.local_path)
if remote_info is None:
raise ValueError("Cannot bind %r to missing remote info" %
doc_pair)
# If a folderish pair state has been remotely updated,
# recursively unmark its local descendants as 'unsynchronized'
# by marking them as 'unknown'.
# This is needed to synchronize unsynchronized items back.
if mark_unknown:
# TODO Should be DAO method
pass
# Detect recently deleted children
children_info = self._client.get_children_info(remote_info.uid)
db_children = self._dao.get_remote_children(doc_pair.remote_ref)
children = dict()
to_scan = []
for child in db_children:
children[child.remote_ref] = child
for child_info in children_info:
log.trace('Scanning remote child: %r', child_info)
child_pair = None
new_pair = False
if child_info.uid in children:
child_pair = children.pop(child_info.uid)
if self._check_modified(child_pair, child_info):
child_pair.remote_state = 'modified'
self._dao.update_remote_state(child_pair, child_info, remote_parent_path)
else:
child_pair, new_pair = self._find_remote_child_match_or_create(doc_pair, child_info)
if ((new_pair or force_recursion) and remote_info.folderish):
to_scan.append((child_pair, child_info))
# Delete remaining
for deleted in children.values():
# TODO Should be DAO
# self._dao.mark_descendants_remotely_deleted(deleted)
self._dao.delete_remote_state(deleted)
for folder in to_scan:
# TODO Optimize by multithreading this too ?
self._scan_remote_recursive(folder[0], folder[1],
mark_unknown=False, force_recursion=force_recursion)
self._dao.add_path_scanned(remote_parent_path)
def _find_remote_child_match_or_create(self, parent_pair, child_info):
local_path = path_join(parent_pair.local_path, safe_filename(child_info.name))
remote_parent_path = parent_pair.remote_parent_path + '/' + parent_pair.remote_ref
# Try to get the local definition if not linked
child_pair = self._dao.get_state_from_local(local_path)
if child_pair is not None:
# Should compare to xattr remote uid
if child_pair.remote_ref is not None:
child_pair = None
else:
self._dao.update_remote_state(child_pair, child_info, remote_parent_path)
if (child_pair.folderish == child_info.folderish and child_pair.local_digest == child_info.digest):
# Use version+1 as we just update the remote info
self._dao.synchronize_state(child_pair, version=child_pair.version + 1)
# Push the remote_Id
self._local_client.set_remote_id(local_path, child_info.uid)
if child_pair.folderish:
self._dao.queue_children(child_pair)
child_pair = self._dao.get_state_from_id(child_pair.id, from_write=True)
return child_pair, False
row_id = self._dao.insert_remote_state(child_info, remote_parent_path, local_path, parent_pair.local_path)
child_pair = self._dao.get_state_from_id(row_id, from_write=True)
return child_pair, True
def _partial_full_scan(self, path):
log.debug("Continue full scan of %s", path)
if path == '/':
self._scan_remote()
else:
self._scan_pair(path)
self._dao.delete_path_to_scan(path)
self._dao.delete_config('remote_need_full_scan')
self._dao.clean_scanned()
def _check_offline(self):
try:
self._client = self._engine.get_remote_client()
except HTTPError as e:
if e.code == 401 or e.code == 403:
if not self._engine.has_invalid_credentials():
self._engine.set_invalid_credentials()
except Unauthorized:
log.debug("Unauthorized caugt")
if not self._engine.has_invalid_credentials():
self._engine.set_invalid_credentials()
except:
pass
if self._client is None:
if not self._engine.is_offline():
self._engine.set_offline()
return None
if self._engine.is_offline():
try:
# Try to get the api
self._client.fetch_api()
# if retrieved
self._engine.set_offline(False)
return self._client
except ThreadInterrupt as e:
raise e
except:
return None
return self._client
def _handle_changes(self, first_pass=False):
log.debug("Handle remote changes, first_pass=%r", first_pass)
self._client = self._check_offline()
if self._client is None:
return False
try:
if self._last_remote_full_scan is None:
log.debug("Remote full scan")
self._action = Action("Remote scanning")
self._scan_remote()
self._end_action()
if first_pass:
self.initiate.emit()
return True
full_scan = self._dao.get_config('remote_need_full_scan', None)
if full_scan is not None:
self._partial_full_scan(full_scan)
return
else:
paths = self._dao.get_paths_to_scan()
if len(paths) > 0:
remote_ref = paths[0].path
self._dao.update_config('remote_need_full_scan', remote_ref)
self._partial_full_scan(remote_ref)
return
self._action = Action("Handle remote changes")
self._update_remote_states()
self._save_changes_state()
if first_pass:
self.initiate.emit()
else:
self.updated.emit()
return True
except HTTPError as e:
if e.code == 401 or e.code == 403:
log.error('Got 401 HTTPError while trying to handle remote changes, setting invalid credentials',
exc_info=True)
self._engine.set_invalid_credentials()
else:
log.exception(e)
self._engine.set_offline()
except (BadStatusLine, URLError) as e:
# Pause the rest of the engine
self._engine.set_offline()
except ThreadInterrupt as e:
raise e
except Exception as e:
log.exception(e)
finally:
self._end_action()
return False
def _save_changes_state(self):
self._dao.update_config('remote_last_sync_date', self._last_sync_date)
self._dao.update_config('remote_last_event_log_id', self._last_event_log_id)
self._dao.update_config('remote_last_root_definitions', self._last_root_definitions)
def _get_changes(self):
"""Fetch incremental change summary from the server"""
summary = self._client.get_changes(self._last_root_definitions, self._last_event_log_id, self._last_sync_date)
self._last_root_definitions = summary['activeSynchronizationRootDefinitions']
self._last_sync_date = summary['syncDate']
if self._client.is_event_log_id_available():
# If available, read 'upperBound' key as last event log id
# according to the new implementation of the audit change finder,
# see https://jira.nuxeo.com/browse/NXP-14826.
self._last_event_log_id = summary['upperBound']
else:
self._last_event_log_id = None
return summary
def _force_scan_recursive(self, doc_pair, remote_info, remote_path=None, force_recursion=True):
if remote_path is None:
remote_path = remote_info.path
self._dao.update_config('remote_need_full_scan', remote_path)
self._scan_remote_recursive(doc_pair, remote_info, force_recursion)
self._dao.delete_config('remote_need_full_scan')
self._dao.clean_scanned()
def _update_remote_states(self):
"""Incrementally update the state of documents from a change summary"""
summary = self._get_changes()
if summary['hasTooManyChanges']:
log.debug("Forced full scan by server")
remote_path = '/'
self._dao.add_path_to_scan(remote_path)
self._dao.update_config('remote_need_full_scan', remote_path)
return
# Fetch all events and consider the most recent first
sorted_changes = sorted(summary['fileSystemChanges'],
key=lambda x: x['eventDate'], reverse=True)
n_changes = len(sorted_changes)
if n_changes > 0:
log.debug("%d remote changes detected", n_changes)
self._metrics['last_changes'] = n_changes
self._metrics['empty_polls'] = 0
self.changesFound.emit(n_changes)
else:
self._metrics['empty_polls'] = self._metrics['empty_polls'] + 1
# Scan events and update the related pair states
refreshed = set()
for change in sorted_changes:
# Check if synchronization thread was suspended
# TODO In case of pause or stop: save the last event id
self._interact()
eventId = change.get('eventId')
remote_ref = change['fileSystemItemId']
if remote_ref in refreshed:
# A more recent version was already processed
continue
fs_item = change.get('fileSystemItem')
new_info = self._client.file_to_info(fs_item) if fs_item else None
# Possibly fetch multiple doc pairs as the same doc can be synchronized at 2 places,
# typically if under a sync root and locally edited.
# See https://jira.nuxeo.com/browse/NXDRIVE-125
doc_pairs = self._dao.get_states_from_remote(remote_ref)
if not doc_pairs:
# Relax constraint on factory name in FileSystemItem id to
# match 'deleted' or 'securityUpdated' events.
# See https://jira.nuxeo.com/browse/NXDRIVE-167
doc_pairs = self._dao.get_states_from_partial_remote(remote_ref)
updated = False
if doc_pairs:
for doc_pair in doc_pairs:
doc_pair_repr = doc_pair.local_path if doc_pair.local_path is not None else doc_pair.remote_name
# This change has no fileSystemItem, it can be either
# a "deleted" event or a "securityUpdated" event
if fs_item is None:
if eventId == 'deleted':
log.debug("Marking doc_pair '%s' as deleted",
doc_pair_repr)
self._dao.delete_remote_state(doc_pair)
elif eventId == 'securityUpdated':
log.debug("Security has been updated for"
" doc_pair '%s' denying Read access,"
" marking it as deleted",
doc_pair_repr)
self._dao.delete_remote_state(doc_pair)
else:
log.debug("Unknow event: '%s'", eventId)
else:
remote_parent_factory = doc_pair.remote_parent_ref.split('#', 1)[0]
new_info_parent_factory = new_info.parent_uid.split('#', 1)[0]
# Specific cases of a move on a locally edited doc
if (eventId == 'documentMoved' and remote_parent_factory == COLLECTION_SYNC_ROOT_FACTORY_NAME):
# If moved from a non sync root to a sync root, break to creation case
# (updated is False).
# If moved from a sync root to a non sync root, break to noop
# (updated is True).
break
elif (eventId == 'documentMoved'
and new_info_parent_factory == COLLECTION_SYNC_ROOT_FACTORY_NAME):
# If moved from a sync root to a non sync root, delete from local sync root
log.debug("Marking doc_pair '%s' as deleted", doc_pair_repr)
self._dao.delete_remote_state(doc_pair)
else:
# Make new_info consistent with actual doc pair parent path for a doc member of a
# collection (typically the Locally Edited one) that is also under a sync root.
# Indeed, in this case, when adapted as a FileSystemItem, its parent path will be the one
# of the sync root because it takes precedence over the collection,
# see AbstractDocumentBackedFileSystemItem constructor.
consistent_new_info = new_info
if remote_parent_factory == COLLECTION_SYNC_ROOT_FACTORY_NAME:
new_info_parent_uid = doc_pair.remote_parent_ref
new_info_path = (doc_pair.remote_parent_path + '/' + remote_ref)
consistent_new_info = RemoteFileInfo(new_info.name, new_info.uid, new_info_parent_uid,
new_info_path, new_info.folderish,
new_info.last_modification_time,
new_info.last_contributor, new_info.digest,
new_info.digest_algorithm, new_info.download_url,
new_info.can_rename, new_info.can_delete,
new_info.can_update, new_info.can_create_child)
# Perform a regular document update on a document
# that has been updated, renamed or moved
eventId = change.get('eventId')
log.debug("Refreshing remote state info"
" for doc_pair '%s' (force_recursion:%d)", doc_pair_repr,
(eventId == "securityUpdated"))
remote_parent_path = doc_pair.remote_parent_path
# if (new_info.digest != doc_pair.local_digest or
# safe_filename(new_info.name) != doc_pair.local_name
# or new_info.parent_uid != doc_pair.remote_parent_ref):
if doc_pair.remote_state != 'created':
doc_pair.remote_state = 'modified'
remote_parent_path = os.path.dirname(new_info.path)
else:
remote_parent_path = os.path.dirname(new_info.path)
# TODO Add modify local_path and local_parent_path if needed
self._dao.update_remote_state(doc_pair, new_info, remote_parent_path)
self._force_scan_recursive(doc_pair, consistent_new_info, remote_path=new_info.path,
force_recursion=(eventId == "securityUpdated"))
updated = True
refreshed.add(remote_ref)
if new_info and not updated:
# Handle new document creations
created = False
parent_pairs = self._dao.get_states_from_remote(new_info.parent_uid)
for parent_pair in parent_pairs:
child_pair, new_pair = (self._find_remote_child_match_or_create(parent_pair, new_info))
if new_pair:
log.debug("Marked doc_pair '%s' as remote creation",
child_pair.remote_name)
if child_pair.folderish and new_pair:
log.debug('Remote recursive scan of the content of %s',
child_pair.remote_name)
remote_path = child_pair.remote_parent_path + "/" + new_info.uid
self._force_scan_recursive(child_pair, new_info, remote_path)
created = True
refreshed.add(remote_ref)
break
if not created:
log.debug("Could not match changed document to a bound local folder: %r", new_info)
NXDRIVE-265: Don't load the remote client on __init__
'''
Created on 8 janv. 2015
@author: Remi Cattiau
'''
from nxdrive.logging_config import get_logger
from nxdrive.engine.workers import EngineWorker
from nxdrive.utils import current_milli_time
from nxdrive.client import NotFound
from time import sleep
from datetime import datetime
from nxdrive.client.common import COLLECTION_SYNC_ROOT_FACTORY_NAME
from nxdrive.client.remote_file_system_client import RemoteFileInfo
from nxdrive.engine.activity import Action
from nxdrive.client.common import safe_filename
from nxdrive.client.base_automation_client import Unauthorized
from nxdrive.utils import path_join
from httplib import BadStatusLine
from urllib2 import HTTPError, URLError
import os
log = get_logger(__name__)
from PyQt4.QtCore import pyqtSignal, pyqtSlot
from nxdrive.engine.workers import ThreadInterrupt
class RemoteWatcher(EngineWorker):
initiate = pyqtSignal()
updated = pyqtSignal()
remoteScanFinished = pyqtSignal()
changesFound = pyqtSignal(object)
remoteWatcherStopped = pyqtSignal()
def __init__(self, engine, dao, delay):
super(RemoteWatcher, self).__init__(engine, dao)
self.unhandle_fs_event = False
self.local_full_scan = dict()
self._full_scan_mode = False
self._last_sync_date = self._dao.get_config('remote_last_sync_date')
self._last_event_log_id = self._dao.get_config('remote_last_event_log_id')
self._last_root_definitions = self._dao.get_config('remote_last_root_definitions')
self._last_remote_full_scan = self._dao.get_config('remote_last_full_scan')
self._client = None
self._local_client = engine.get_local_client()
self._metrics = dict()
self._metrics['last_remote_scan_time'] = -1
self._metrics['last_remote_update_time'] = -1
self._metrics['empty_polls'] = 0
self.server_interval = delay
self._current_interval = 0
def get_metrics(self):
metrics = super(RemoteWatcher, self).get_metrics()
metrics['last_remote_sync_date'] = self._last_sync_date
metrics['last_event_log_id'] = self._last_event_log_id
metrics['last_root_definitions'] = self._last_root_definitions
metrics['last_remote_full_scan'] = self._last_remote_full_scan
metrics['next_polling'] = self._current_interval
return dict(metrics.items() + self._metrics.items())
@pyqtSlot()
def invalidate_client_cache(self):
self._client = None
def _execute(self):
first_pass = True
try:
while (1):
self._interact()
if self._current_interval == 0:
self._current_interval = self.server_interval
if self._handle_changes(first_pass):
first_pass = False
else:
self._current_interval = self._current_interval - 1
sleep(1)
except ThreadInterrupt:
self.remoteWatcherStopped.emit()
raise
def _scan_remote(self, from_state=None):
"""Recursively scan the bound remote folder looking for updates"""
start_ms = current_milli_time()
try:
if from_state is None:
from_state = self._dao.get_state_from_local('/')
self._client = self._engine.get_remote_client()
remote_info = self._client.get_info(from_state.remote_ref)
self._dao.update_remote_state(from_state, remote_info, from_state.remote_parent_path)
except NotFound:
log.debug("Marking %r as remotely deleted.", from_state)
# Should unbind ?
# from_state.update_remote(None)
self._dao.commit()
self._metrics['last_remote_scan_time'] = current_milli_time() - start_ms
return
self._get_changes()
self._save_changes_state()
# recursive update
self._scan_remote_recursive(from_state, remote_info)
self._last_remote_full_scan = datetime.utcnow()
self._dao.update_config('remote_last_full_scan', self._last_remote_full_scan)
self._dao.clean_scanned()
self._dao.commit()
self._metrics['last_remote_scan_time'] = current_milli_time() - start_ms
log.debug("Remote scan finished in %dms", self._metrics['last_remote_scan_time'])
self.remoteScanFinished.emit()
@pyqtSlot(str)
def scan_pair(self, remote_path):
self._dao.add_path_to_scan(str(remote_path))
def _scan_pair(self, remote_path):
if remote_path is None:
return
remote_path = str(remote_path)
if remote_path[-1:] == '/':
remote_path = remote_path[0:-1]
remote_ref = os.path.basename(remote_path)
parent_path = os.path.dirname(remote_path)
if parent_path == '/':
parent_path = ''
# If pair is present already
try:
child_info = self._client.get_info(remote_ref)
except NotFound:
# The folder has been deleted
return
doc_pair = self._dao.get_state_from_remote_with_path(remote_ref, parent_path)
if doc_pair is not None:
log.debug("Remote scan_pair: %s", doc_pair.local_path)
self._scan_remote_recursive(doc_pair, child_info)
log.debug("Remote scan_pair ended: %s", doc_pair.local_path)
return
log.debug("parent_path: '%s'\t'%s'\t'%s'", parent_path, os.path.basename(parent_path),
os.path.dirname(parent_path))
parent_pair = self._dao.get_state_from_remote_with_path(os.path.basename(parent_path),
os.path.dirname(parent_path))
log.debug("scan_pair: parent_pair: %r", parent_pair)
if parent_pair is None:
return
local_path = path_join(parent_pair.local_path, safe_filename(child_info.name))
remote_parent_path = parent_pair.remote_parent_path + '/' + child_info.uid
row_id = self._dao.insert_remote_state(child_info, remote_parent_path, local_path, parent_pair.local_path)
doc_pair = self._dao.get_state_from_id(row_id, from_write=True)
if child_info.folderish:
log.debug("Remote scan_pair: %s", doc_pair.local_path)
self._scan_remote_recursive(doc_pair, child_info)
log.debug("Remote scan_pair ended: %s", doc_pair.local_path)
def _check_modified(self, child_pair, child_info):
if child_pair.remote_can_delete != child_info.can_delete:
return True
if child_pair.remote_can_rename != child_info.can_rename:
return True
if child_pair.remote_can_update != child_info.can_update:
return True
if child_pair.remote_can_create_child != child_info.can_create_child:
return True
if child_pair.remote_digest != child_info.digest:
return True
return False
def _scan_remote_recursive(self, doc_pair, remote_info,
force_recursion=True, mark_unknown=True):
"""Recursively scan the bound remote folder looking for updates
If force_recursion is True, recursion is done even on
non newly created children.
"""
if not remote_info.folderish:
# No children to align, early stop.
return
# Check if synchronization thread was suspended
self._interact()
remote_parent_path = doc_pair.remote_parent_path + '/' + remote_info.uid
if self._dao.is_path_scanned(remote_parent_path):
log.trace("Skip already remote scanned: %s", doc_pair.local_path)
return
if doc_pair.local_path is not None:
self._action = Action("Remote scanning : " + doc_pair.local_path)
log.debug("Remote scanning: %s", doc_pair.local_path)
if remote_info is None:
raise ValueError("Cannot bind %r to missing remote info" %
doc_pair)
# If a folderish pair state has been remotely updated,
# recursively unmark its local descendants as 'unsynchronized'
# by marking them as 'unknown'.
# This is needed to synchronize unsynchronized items back.
if mark_unknown:
# TODO Should be DAO method
pass
# Detect recently deleted children
children_info = self._client.get_children_info(remote_info.uid)
db_children = self._dao.get_remote_children(doc_pair.remote_ref)
children = dict()
to_scan = []
for child in db_children:
children[child.remote_ref] = child
for child_info in children_info:
log.trace('Scanning remote child: %r', child_info)
child_pair = None
new_pair = False
if child_info.uid in children:
child_pair = children.pop(child_info.uid)
if self._check_modified(child_pair, child_info):
child_pair.remote_state = 'modified'
self._dao.update_remote_state(child_pair, child_info, remote_parent_path)
else:
child_pair, new_pair = self._find_remote_child_match_or_create(doc_pair, child_info)
if ((new_pair or force_recursion) and remote_info.folderish):
to_scan.append((child_pair, child_info))
# Delete remaining
for deleted in children.values():
# TODO Should be DAO
# self._dao.mark_descendants_remotely_deleted(deleted)
self._dao.delete_remote_state(deleted)
for folder in to_scan:
# TODO Optimize by multithreading this too ?
self._scan_remote_recursive(folder[0], folder[1],
mark_unknown=False, force_recursion=force_recursion)
self._dao.add_path_scanned(remote_parent_path)
def _find_remote_child_match_or_create(self, parent_pair, child_info):
local_path = path_join(parent_pair.local_path, safe_filename(child_info.name))
remote_parent_path = parent_pair.remote_parent_path + '/' + parent_pair.remote_ref
# Try to get the local definition if not linked
child_pair = self._dao.get_state_from_local(local_path)
if child_pair is not None:
# Should compare to xattr remote uid
if child_pair.remote_ref is not None:
child_pair = None
else:
self._dao.update_remote_state(child_pair, child_info, remote_parent_path)
if (child_pair.folderish == child_info.folderish and child_pair.local_digest == child_info.digest):
# Use version+1 as we just update the remote info
self._dao.synchronize_state(child_pair, version=child_pair.version + 1)
# Push the remote_Id
self._local_client.set_remote_id(local_path, child_info.uid)
if child_pair.folderish:
self._dao.queue_children(child_pair)
child_pair = self._dao.get_state_from_id(child_pair.id, from_write=True)
return child_pair, False
row_id = self._dao.insert_remote_state(child_info, remote_parent_path, local_path, parent_pair.local_path)
child_pair = self._dao.get_state_from_id(row_id, from_write=True)
return child_pair, True
def _partial_full_scan(self, path):
log.debug("Continue full scan of %s", path)
if path == '/':
self._scan_remote()
else:
self._scan_pair(path)
self._dao.delete_path_to_scan(path)
self._dao.delete_config('remote_need_full_scan')
self._dao.clean_scanned()
def _check_offline(self):
try:
self._client = self._engine.get_remote_client()
except HTTPError as e:
if e.code == 401 or e.code == 403:
if not self._engine.has_invalid_credentials():
self._engine.set_invalid_credentials()
except Unauthorized:
log.debug("Unauthorized caugt")
if not self._engine.has_invalid_credentials():
self._engine.set_invalid_credentials()
except:
pass
if self._client is None:
if not self._engine.is_offline():
self._engine.set_offline()
return None
if self._engine.is_offline():
try:
# Try to get the api
self._client.fetch_api()
# if retrieved
self._engine.set_offline(False)
return self._client
except ThreadInterrupt as e:
raise e
except:
return None
return self._client
def _handle_changes(self, first_pass=False):
log.debug("Handle remote changes, first_pass=%r", first_pass)
self._client = self._check_offline()
if self._client is None:
return False
try:
if self._last_remote_full_scan is None:
log.debug("Remote full scan")
self._action = Action("Remote scanning")
self._scan_remote()
self._end_action()
if first_pass:
self.initiate.emit()
return True
full_scan = self._dao.get_config('remote_need_full_scan', None)
if full_scan is not None:
self._partial_full_scan(full_scan)
return
else:
paths = self._dao.get_paths_to_scan()
if len(paths) > 0:
remote_ref = paths[0].path
self._dao.update_config('remote_need_full_scan', remote_ref)
self._partial_full_scan(remote_ref)
return
self._action = Action("Handle remote changes")
self._update_remote_states()
self._save_changes_state()
if first_pass:
self.initiate.emit()
else:
self.updated.emit()
return True
except HTTPError as e:
if e.code == 401 or e.code == 403:
log.error('Got 401 HTTPError while trying to handle remote changes, setting invalid credentials',
exc_info=True)
self._engine.set_invalid_credentials()
else:
log.exception(e)
self._engine.set_offline()
except (BadStatusLine, URLError) as e:
# Pause the rest of the engine
self._engine.set_offline()
except ThreadInterrupt as e:
raise e
except Exception as e:
log.exception(e)
finally:
self._end_action()
return False
def _save_changes_state(self):
self._dao.update_config('remote_last_sync_date', self._last_sync_date)
self._dao.update_config('remote_last_event_log_id', self._last_event_log_id)
self._dao.update_config('remote_last_root_definitions', self._last_root_definitions)
def _get_changes(self):
"""Fetch incremental change summary from the server"""
summary = self._client.get_changes(self._last_root_definitions, self._last_event_log_id, self._last_sync_date)
self._last_root_definitions = summary['activeSynchronizationRootDefinitions']
self._last_sync_date = summary['syncDate']
if self._client.is_event_log_id_available():
# If available, read 'upperBound' key as last event log id
# according to the new implementation of the audit change finder,
# see https://jira.nuxeo.com/browse/NXP-14826.
self._last_event_log_id = summary['upperBound']
else:
self._last_event_log_id = None
return summary
def _force_scan_recursive(self, doc_pair, remote_info, remote_path=None, force_recursion=True):
if remote_path is None:
remote_path = remote_info.path
self._dao.update_config('remote_need_full_scan', remote_path)
self._scan_remote_recursive(doc_pair, remote_info, force_recursion)
self._dao.delete_config('remote_need_full_scan')
self._dao.clean_scanned()
def _update_remote_states(self):
"""Incrementally update the state of documents from a change summary"""
summary = self._get_changes()
if summary['hasTooManyChanges']:
log.debug("Forced full scan by server")
remote_path = '/'
self._dao.add_path_to_scan(remote_path)
self._dao.update_config('remote_need_full_scan', remote_path)
return
# Fetch all events and consider the most recent first
sorted_changes = sorted(summary['fileSystemChanges'],
key=lambda x: x['eventDate'], reverse=True)
n_changes = len(sorted_changes)
if n_changes > 0:
log.debug("%d remote changes detected", n_changes)
self._metrics['last_changes'] = n_changes
self._metrics['empty_polls'] = 0
self.changesFound.emit(n_changes)
else:
self._metrics['empty_polls'] = self._metrics['empty_polls'] + 1
# Scan events and update the related pair states
refreshed = set()
for change in sorted_changes:
# Check if synchronization thread was suspended
# TODO In case of pause or stop: save the last event id
self._interact()
eventId = change.get('eventId')
remote_ref = change['fileSystemItemId']
if remote_ref in refreshed:
# A more recent version was already processed
continue
fs_item = change.get('fileSystemItem')
new_info = self._client.file_to_info(fs_item) if fs_item else None
# Possibly fetch multiple doc pairs as the same doc can be synchronized at 2 places,
# typically if under a sync root and locally edited.
# See https://jira.nuxeo.com/browse/NXDRIVE-125
doc_pairs = self._dao.get_states_from_remote(remote_ref)
if not doc_pairs:
# Relax constraint on factory name in FileSystemItem id to
# match 'deleted' or 'securityUpdated' events.
# See https://jira.nuxeo.com/browse/NXDRIVE-167
doc_pairs = self._dao.get_states_from_partial_remote(remote_ref)
updated = False
if doc_pairs:
for doc_pair in doc_pairs:
doc_pair_repr = doc_pair.local_path if doc_pair.local_path is not None else doc_pair.remote_name
# This change has no fileSystemItem, it can be either
# a "deleted" event or a "securityUpdated" event
if fs_item is None:
if eventId == 'deleted':
log.debug("Marking doc_pair '%s' as deleted",
doc_pair_repr)
self._dao.delete_remote_state(doc_pair)
elif eventId == 'securityUpdated':
log.debug("Security has been updated for"
" doc_pair '%s' denying Read access,"
" marking it as deleted",
doc_pair_repr)
self._dao.delete_remote_state(doc_pair)
else:
log.debug("Unknow event: '%s'", eventId)
else:
remote_parent_factory = doc_pair.remote_parent_ref.split('#', 1)[0]
new_info_parent_factory = new_info.parent_uid.split('#', 1)[0]
# Specific cases of a move on a locally edited doc
if (eventId == 'documentMoved' and remote_parent_factory == COLLECTION_SYNC_ROOT_FACTORY_NAME):
# If moved from a non sync root to a sync root, break to creation case
# (updated is False).
# If moved from a sync root to a non sync root, break to noop
# (updated is True).
break
elif (eventId == 'documentMoved'
and new_info_parent_factory == COLLECTION_SYNC_ROOT_FACTORY_NAME):
# If moved from a sync root to a non sync root, delete from local sync root
log.debug("Marking doc_pair '%s' as deleted", doc_pair_repr)
self._dao.delete_remote_state(doc_pair)
else:
# Make new_info consistent with actual doc pair parent path for a doc member of a
# collection (typically the Locally Edited one) that is also under a sync root.
# Indeed, in this case, when adapted as a FileSystemItem, its parent path will be the one
# of the sync root because it takes precedence over the collection,
# see AbstractDocumentBackedFileSystemItem constructor.
consistent_new_info = new_info
if remote_parent_factory == COLLECTION_SYNC_ROOT_FACTORY_NAME:
new_info_parent_uid = doc_pair.remote_parent_ref
new_info_path = (doc_pair.remote_parent_path + '/' + remote_ref)
consistent_new_info = RemoteFileInfo(new_info.name, new_info.uid, new_info_parent_uid,
new_info_path, new_info.folderish,
new_info.last_modification_time,
new_info.last_contributor, new_info.digest,
new_info.digest_algorithm, new_info.download_url,
new_info.can_rename, new_info.can_delete,
new_info.can_update, new_info.can_create_child)
# Perform a regular document update on a document
# that has been updated, renamed or moved
eventId = change.get('eventId')
log.debug("Refreshing remote state info"
" for doc_pair '%s' (force_recursion:%d)", doc_pair_repr,
(eventId == "securityUpdated"))
remote_parent_path = doc_pair.remote_parent_path
# if (new_info.digest != doc_pair.local_digest or
# safe_filename(new_info.name) != doc_pair.local_name
# or new_info.parent_uid != doc_pair.remote_parent_ref):
if doc_pair.remote_state != 'created':
doc_pair.remote_state = 'modified'
remote_parent_path = os.path.dirname(new_info.path)
else:
remote_parent_path = os.path.dirname(new_info.path)
# TODO Add modify local_path and local_parent_path if needed
self._dao.update_remote_state(doc_pair, new_info, remote_parent_path)
self._force_scan_recursive(doc_pair, consistent_new_info, remote_path=new_info.path,
force_recursion=(eventId == "securityUpdated"))
updated = True
refreshed.add(remote_ref)
if new_info and not updated:
# Handle new document creations
created = False
parent_pairs = self._dao.get_states_from_remote(new_info.parent_uid)
for parent_pair in parent_pairs:
child_pair, new_pair = (self._find_remote_child_match_or_create(parent_pair, new_info))
if new_pair:
log.debug("Marked doc_pair '%s' as remote creation",
child_pair.remote_name)
if child_pair.folderish and new_pair:
log.debug('Remote recursive scan of the content of %s',
child_pair.remote_name)
remote_path = child_pair.remote_parent_path + "/" + new_info.uid
self._force_scan_recursive(child_pair, new_info, remote_path)
created = True
refreshed.add(remote_ref)
break
if not created:
log.debug("Could not match changed document to a bound local folder: %r", new_info)
|
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Setups a local Rietveld instance to test against a live server for
integration tests.
It makes sure Google AppEngine SDK is found, download Rietveld and Django code
if necessary and starts the server on a free inbound TCP port.
"""
import optparse
import os
import socket
import sys
import time
import subprocess2
class Failure(Exception):
pass
def test_port(port):
s = socket.socket()
try:
return s.connect_ex(('127.0.0.1', port)) == 0
finally:
s.close()
def find_free_port():
# Test to find an available port starting at 8080.
port = 8080
max_val = (2<<16)
while test_port(port) and port < max_val:
port += 1
if port == max_val:
raise Failure('Having issues finding an available port')
return port
class LocalRietveld(object):
"""Downloads everything needed to run a local instance of Rietveld."""
def __init__(self, base_dir=None):
# Paths
self.base_dir = base_dir
if not self.base_dir:
self.base_dir = os.path.dirname(os.path.abspath(__file__))
# TODO(maruel): This should be in /tmp but that would mean having to fetch
# everytime. This test is already annoyingly slow.
self.rietveld = os.path.join(self.base_dir, '_rietveld')
self.test_server = None
self.port = None
# Find the GAE SDK
previous_dir = ''
self.sdk_path = ''
base_dir = self.base_dir
while base_dir != previous_dir:
previous_dir = base_dir
self.sdk_path = os.path.join(base_dir, 'google_appengine')
if not os.path.isfile(os.path.join(self.sdk_path, 'VERSION')):
base_dir = os.path.dirname(base_dir)
self.dev_app = os.path.join(self.sdk_path, 'dev_appserver.py')
def install_prerequisites(self):
# First, verify the Google AppEngine SDK is available.
if not os.path.isfile(self.dev_app):
raise Failure(
'Install google_appengine sdk in %s or higher up' % self.base_dir)
# Second, checkout rietveld if not available.
if not os.path.isdir(self.rietveld):
print('Checking out rietveld...')
try:
subprocess2.check_call(
['svn', 'co', '-q', 'http://rietveld.googlecode.com/svn/trunk@681',
self.rietveld])
except (OSError, subprocess2.CalledProcessError), e:
raise Failure('Failed to checkout rietveld\n%s' % e)
else:
print('Syncing rietveld...')
try:
subprocess2.check_call(
['svn', 'up', '-q', '-r', '681'], cwd=self.rietveld)
except (OSError, subprocess2.CalledProcessError), e:
raise Failure('Failed to sync rietveld\n%s' % e)
def start_server(self, verbose=False):
self.install_prerequisites()
self.port = find_free_port()
if verbose:
pipe = None
else:
pipe = subprocess2.VOID
cmd = [
sys.executable,
self.dev_app,
'--skip_sdk_update_check',
'.',
'--port=%d' % self.port,
'--datastore_path=' + os.path.join(self.rietveld, 'tmp.db'),
'-c']
# CHEAP TRICK
# By default you only want to bind on loopback but I'm testing over a
# headless computer so it's useful to be able to access the test instance
# remotely.
if os.environ.get('GAE_LISTEN_ALL', '') == 'true':
cmd.extend(('-a', '0.0.0.0'))
self.test_server = subprocess2.Popen(
cmd, stdout=pipe, stderr=pipe, cwd=self.rietveld)
# Loop until port 127.0.0.1:port opens or the process dies.
while not test_port(self.port):
self.test_server.poll()
if self.test_server.returncode is not None:
raise Failure(
'Test rietveld instance failed early on port %s' %
self.port)
time.sleep(0.01)
def stop_server(self):
if self.test_server:
self.test_server.kill()
self.test_server.wait()
self.test_server = None
self.port = None
def main():
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true')
options, args = parser.parse_args()
if args:
parser.error('Unknown arguments: %s' % ' '.join(args))
instance = LocalRietveld()
try:
instance.start_server(verbose=options.verbose)
print 'Local rietveld instance started on port %d' % instance.port
while True:
time.sleep(0.1)
finally:
instance.stop_server()
if __name__ == '__main__':
main()
Use a newer version of rietveld. It is required to update upload.py
R=cmp@chromium.org
BUG=
TEST=
Review URL: http://codereview.chromium.org/9212062
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@119064 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Setups a local Rietveld instance to test against a live server for
integration tests.
It makes sure Google AppEngine SDK is found, download Rietveld and Django code
if necessary and starts the server on a free inbound TCP port.
"""
import optparse
import os
import shutil
import socket
import sys
import time
try:
import subprocess2
except ImportError:
sys.path.append(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..'))
import subprocess2
class Failure(Exception):
pass
def test_port(port):
s = socket.socket()
try:
return s.connect_ex(('127.0.0.1', port)) == 0
finally:
s.close()
def find_free_port():
# Test to find an available port starting at 8080.
port = 8080
max_val = (2<<16)
while test_port(port) and port < max_val:
port += 1
if port == max_val:
raise Failure('Having issues finding an available port')
return port
class LocalRietveld(object):
"""Downloads everything needed to run a local instance of Rietveld."""
def __init__(self, base_dir=None):
# Paths
self.base_dir = base_dir
if not self.base_dir:
self.base_dir = os.path.dirname(os.path.abspath(__file__))
# TODO(maruel): This should be in /tmp but that would mean having to fetch
# everytime. This test is already annoyingly slow.
self.rietveld = os.path.join(self.base_dir, '_rietveld')
self.test_server = None
self.port = None
# Find the GAE SDK
previous_dir = ''
self.sdk_path = ''
base_dir = self.base_dir
while base_dir != previous_dir:
previous_dir = base_dir
self.sdk_path = os.path.join(base_dir, 'google_appengine')
if not os.path.isfile(os.path.join(self.sdk_path, 'VERSION')):
base_dir = os.path.dirname(base_dir)
self.dev_app = os.path.join(self.sdk_path, 'dev_appserver.py')
def install_prerequisites(self):
# First, verify the Google AppEngine SDK is available.
if not os.path.isfile(self.dev_app):
raise Failure(
'Install google_appengine sdk in %s or higher up' % self.base_dir)
if os.path.isdir(os.path.join(self.rietveld, '.svn')):
# Left over from subversion. Delete it.
shutil.rmtree(self.rietveld)
# Second, checkout rietveld if not available.
rev = '9349cab9a3bb'
if not os.path.isdir(self.rietveld):
print('Checking out rietveld...')
try:
subprocess2.check_call(
[ 'hg', 'clone', '-q', '-u', rev, '-r', rev,
'https://code.google.com/p/rietveld/', self.rietveld])
except (OSError, subprocess2.CalledProcessError), e:
raise Failure(
'Failed to checkout rietveld. Do you have mercurial installed?\n'
'%s' % e)
else:
print('Syncing rietveld...')
try:
subprocess2.check_call(
['hg', 'co', '-q', '-C', rev], cwd=self.rietveld)
except (OSError, subprocess2.CalledProcessError), e:
raise Failure('Failed to sync rietveld\n%s' % e)
def start_server(self, verbose=False):
self.install_prerequisites()
self.port = find_free_port()
if verbose:
pipe = None
else:
pipe = subprocess2.VOID
cmd = [
sys.executable,
self.dev_app,
'--skip_sdk_update_check',
'.',
'--port=%d' % self.port,
'--datastore_path=' + os.path.join(self.rietveld, 'tmp.db'),
'-c']
# CHEAP TRICK
# By default you only want to bind on loopback but I'm testing over a
# headless computer so it's useful to be able to access the test instance
# remotely.
if os.environ.get('GAE_LISTEN_ALL', '') == 'true':
cmd.extend(('-a', '0.0.0.0'))
self.test_server = subprocess2.Popen(
cmd, stdout=pipe, stderr=pipe, cwd=self.rietveld)
# Loop until port 127.0.0.1:port opens or the process dies.
while not test_port(self.port):
self.test_server.poll()
if self.test_server.returncode is not None:
raise Failure(
'Test rietveld instance failed early on port %s' %
self.port)
time.sleep(0.01)
def stop_server(self):
if self.test_server:
self.test_server.kill()
self.test_server.wait()
self.test_server = None
self.port = None
def main():
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true')
options, args = parser.parse_args()
if args:
parser.error('Unknown arguments: %s' % ' '.join(args))
instance = LocalRietveld()
try:
instance.start_server(verbose=options.verbose)
print 'Local rietveld instance started on port %d' % instance.port
while True:
time.sleep(0.1)
finally:
instance.stop_server()
if __name__ == '__main__':
main()
|
# -*- mode: python -*-
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
author: "Benno Joy (@bennojoy)"
module: include_vars
short_description: Load variables from files, dynamically within a task.
description:
- Loads variables from a YAML/JSON file dynamically during task runtime. It can work with conditionals, or use host specific variables to determine the path name to load from.
options:
file:
version_added: "2.2"
description:
- The file name from which variables should be loaded.
- If the path is relative, it will look for the file in vars/ subdirectory of a role or relative to playbook.
name:
version_added: "2.2"
description:
- The name of a variable into which assign the included vars, if omitted (null) they will be made top level vars.
default: null
free-form:
description:
- This module allows you to specify the 'file' option directly w/o any other options.
notes:
- The file is always required either as the explicit option or using the free-form.
version_added: "1.4"
'''
EXAMPLES = """
# Include vars of stuff.yml into the 'stuff' variable (2.2).
- include_vars:
file: stuff.yml
name: stuff
# Conditionally decide to load in variables into 'plans' when x is 0, otherwise do not. (2.2)
- include_vars: file=contingency_plan.yml name=plans
when: x == 0
# Load a variable file based on the OS type, or a default if not found.
- include_vars: "{{ item }}"
with_first_found:
- "{{ ansible_distribution }}.yml"
- "{{ ansible_os_family }}.yml"
- "default.yml"
# bare include (free-form)
- include_vars: myvars.yml
"""
Updated documentation for PR http://github.com/ansible/ansible/pull/17207
# -*- mode: python -*-
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
author: "Allen Sanabria (@linuxdynasty)"
module: include_vars
short_description: Load variables from files, dynamically within a task.
description:
- Loads variables from a YAML/JSON files dynamically from within a file or
from a directory recursively during task runtime. If loading a directory, the files are sorted alphabetically before being loaded.
options:
file:
version_added: "2.2"
description:
- The file name from which variables should be loaded.
- If the path is relative, it will look for the file in vars/ subdirectory of a role or relative to playbook.
dir:
version_added: "2.2"
description:
- The directory name from which the variables should be loaded.
- If the path is relative, it will look for the file in vars/ subdirectory of a role or relative to playbook.
default: null
name:
version_added: "2.2"
description:
- The name of a variable into which assign the included vars, if omitted (null) they will be made top level vars.
default: null
depth:
version_added: "2.2"
description:
- By default, this module will recursively go through each sub directory and load up the variables. By explicitly setting the depth, this module will only go as deep as the depth.
default: 0
files_matching:
version_added: "2.2"
description:
- Limit the variables that are loaded within any directory to this regular expression.
default: null
ignore_files:
version_added: "2.2"
description:
- List of file names to ignore. The defaults can not be overridden, but can be extended.
default: null
free-form:
description:
- This module allows you to specify the 'file' option directly w/o any other options.
'''
EXAMPLES = """
# Include vars of stuff.yml into the 'stuff' variable (2.2).
- include_vars:
file: stuff.yml
name: stuff
# Conditionally decide to load in variables into 'plans' when x is 0, otherwise do not. (2.2)
- include_vars: file=contingency_plan.yml name=plans
when: x == 0
# Load a variable file based on the OS type, or a default if not found.
- include_vars: "{{ item }}"
with_first_found:
- "{{ ansible_distribution }}.yml"
- "{{ ansible_os_family }}.yml"
- "default.yml"
# bare include (free-form)
- include_vars: myvars.yml
# Include all yml files in vars/all and all nested directories
- include_vars:
dir: 'vars/all'
# Include all yml files in vars/all and all nested directories and save the output in test.
- include_vars:
dir: 'vars/all'
name: test
# Include all yml files in vars/services
- include_vars:
dir: 'vars/services'
depth: 1
# Include only bastion.yml files
- include_vars:
dir: 'vars'
files_matching: 'bastion.yml'
# Include only all yml files exception bastion.yml
- include_vars:
dir: 'vars'
ignore_files: 'bastion.yml'
"""
|
"""
follow_statement -> follow_call -> follow_paths -> follow_path
'follow_import'
`get_names_for_scope` and `get_scopes_for_name` are search functions
TODO doc
TODO list comprehensions, priority?
TODO evaluate asserts (type safety)
TODO generators
python 3 stuff:
TODO class decorators
TODO annotations ? how ? type evaluation and return?
TODO nonlocal statement
"""
from _compatibility import next
import itertools
import copy
import parsing
import modules
import debug
import builtin
memoize_caches = []
class MultiLevelStopIteration(Exception):
pass
def clear_caches():
for m in memoize_caches:
m.clear()
def memoize_default(default=None):
"""
This is a typical memoization decorator, BUT there is one difference:
To prevent recursion it sets defaults.
Preventing recursion is in this case the much bigger use than speed. I
don't think, that there is a big speed difference, but there are many cases
where recursion could happen (think about a = b; b = a).
"""
def func(function):
memo = {}
memoize_caches.append(memo)
def wrapper(*args, **kwargs):
key = (args, frozenset(kwargs.items()))
if key in memo:
return memo[key]
else:
memo[key] = default
rv = function(*args, **kwargs)
memo[key] = rv
return rv
return wrapper
return func
class Executable(object):
""" An instance is also an executable - because __init__ is called """
def __init__(self, base, var_args=[]):
self.base = base
# the param input array
self.var_args = var_args
self.func = None
def get_parent_until(self, *args):
return self.base.get_parent_until(*args)
@property
def scope(self):
""" Just try through the whole param array to find the own scope """
for param in self.var_args:
for call in param:
try:
return call.parent_stmt.parent
except AttributeError: # if operators are there
pass
raise IndexError('No params available')
@memoize_default(default=[])
def get_params(self):
"""
This returns the params for an Execution/Instance and is injected as a
'hack' into the parsing.Function class.
This needs to be here, because Instance can have __init__ functions,
which act the same way as normal functions
"""
def gen_param_name_copy(param, keys=[], values=[], array_type=None):
calls = parsing.Array(parsing.Array.NOARRAY,
self.var_args.parent_stmt)
calls.values = values
calls.keys = keys
calls.type = array_type
new_param = copy.copy(param)
new_param._assignment_calls_calculated = True
new_param._assignment_calls = calls
name = copy.copy(param.get_name())
name.parent = new_param
#print 'insert', i, name, calls.values, value, self.func.params
return name
result = []
start_offset = 0
#print '\n\nfunc_params', self.func, self.func.parent, self.func
if isinstance(self.func, InstanceElement):
# care for self -> just exclude it and add the instance
start_offset = 1
self_name = copy.copy(self.func.params[0].get_name())
self_name.parent = self.func.instance
result.append(self_name)
param_dict = {}
for param in self.func.params:
param_dict[str(param.get_name())] = param
# There may be calls, which don't fit all the params, this just ignores
# it.
var_arg_iterator = self.get_var_args_iterator()
non_matching_keys = []
for param in self.func.params[start_offset:]:
# The value and key can both be null. There, the defaults apply.
# args / kwargs will just be empty arrays / dicts, respectively.
key, value = next(var_arg_iterator, (None, None))
while key:
try:
key_param = param_dict[str(key)]
except KeyError:
non_matching_keys.append((key, value))
else:
result.append(gen_param_name_copy(key_param,
values=[value]))
key, value = next(var_arg_iterator, (None, None))
#debug.warning('Too many arguments given.', value)
assignment = param.get_assignment_calls().values[0]
keys = []
values = []
array_type = None
if assignment[0] == '*':
# *args param
array_type = parsing.Array.TUPLE
if value:
values.append(value)
for key, value in var_arg_iterator:
# iterate until a key argument is found
if key:
var_arg_iterator.push_back(key, value)
break
values.append(value)
elif assignment[0] == '**':
# **kwargs param
array_type = parsing.Array.DICT
if non_matching_keys:
keys, values = zip(*non_matching_keys)
else:
# normal param
if value:
values = [value]
result.append(gen_param_name_copy(param, keys=keys, values=values,
array_type=array_type))
return result
def get_var_args_iterator(self):
"""
Yields a key/value pair, the key is None, if its not a named arg.
"""
def iterate():
# var_args is typically an Array, and not a list
for var_arg in self.var_args:
# *args
if var_arg[0] == '*':
arrays = follow_call_list(self.scope, [var_arg[1:]])
for array in arrays:
for field in array.get_contents():
yield None, field
# **kwargs
elif var_arg[0] == '**':
arrays = follow_call_list(self.scope, [var_arg[1:]])
for array in arrays:
for key, field in array.get_contents():
# take the first index
if isinstance(key, parsing.Name):
name = key
else:
name = key[0].name
yield name, field
yield var_arg
# normal arguments (including key arguments)
else:
if len(var_arg) > 1 and var_arg[1] == '=':
# this is a named parameter
yield var_arg[0].name, var_arg[2:]
else:
yield None, var_arg
class PushBackIterator(object):
def __init__(self, iterator):
self.pushes = []
self.iterator = iterator
def push_back(self, key, value):
self.pushes.append((key,value))
def __iter__(self):
return self
def next(self):
""" Python 2 Compatibility """
return self.__next__()
def __next__(self):
try:
return self.pushes.pop()
except IndexError:
return next(self.iterator)
return iter(PushBackIterator(iterate()))
def set_param_cb(self, func):
self.func = func
func.param_cb = self.get_params
class Instance(Executable):
""" This class is used to evaluate instances. """
def __init__(self, base, var_args=[]):
super(Instance, self).__init__(base, var_args)
if var_args:
self.set_init_params()
def set_init_params(self):
for sub in self.base.subscopes:
if isinstance(sub, parsing.Function) \
and sub.name.get_code() == '__init__':
self.set_param_cb(InstanceElement(self, sub))
def get_func_self_name(self, func):
"""
Returns the name of the first param in a class method (which is
normally self
"""
try:
return func.params[0].used_vars[0].names[0]
except:
return None
def get_defined_names(self):
"""
Get the instance vars of a class. This includes the vars of all
classes
"""
def add_self_name(name):
n = copy.copy(name)
n.names = n.names[1:]
names.append(InstanceElement(self, n))
names = []
# this loop adds the names of the self object, copies them and removes
# the self.
for s in self.base.subscopes:
# get the self name, if there's one
self_name = self.get_func_self_name(s)
if self_name:
for n in s.get_set_vars():
# Only names with the selfname are being added.
# It is also important, that they have a len() of 2,
# because otherwise, they are just something else
if n.names[0] == self_name and len(n.names) == 2:
add_self_name(n)
for var in self.base.get_defined_names(as_instance=True):
# functions are also instance elements
if isinstance(var.parent, (parsing.Function)):
var = InstanceElement(self, var)
names.append(var)
return names
def parent(self):
return self.base.parent
def __repr__(self):
return "<p%s of %s (var_args: %s)>" % \
(self.__class__.__name__, self.base, len(self.var_args or []))
class InstanceElement(object):
def __init__(self, instance, var):
super(InstanceElement, self).__init__()
self.instance = instance
self.var = var
@property
def parent(self):
return InstanceElement(self.instance, self.var.parent)
@property
def param_cb(self):
return self.var.param_cb
@param_cb.setter
def param_cb(self, value):
self.var.param_cb = value
def __getattr__(self, name):
return getattr(self.var, name)
def __repr__(self):
return "<%s of %s>" % (self.__class__.__name__, self.var)
class Class(object):
def __init__(self, base):
self.base = base
def get_defined_names(self, as_instance=False):
def in_iterable(name, iterable):
for i in iterable:
# only the last name is important, because these names have a
# maximal length of 2, with the first one being `self`.
if i.names[-1] == name.names[-1]:
return True
return False
names = self.base.get_defined_names()
# check super classes:
for s in self.base.supers:
for cls in follow_statement(s):
# get the inherited names
if as_instance:
cls = Instance(cls)
for i in cls.get_defined_names():
if not in_iterable(i, names):
names.append(i)
return names
def __getattr__(self, name):
return getattr(self.base, name)
def __repr__(self):
return "<p%s of %s>" % (self.__class__.__name__, self.base)
class Execution(Executable):
"""
This class is used to evaluate functions and their returns.
"""
cache = {}
def process_decorators(self):
""" Returns the function, that is to be executed in the end """
func = self.base
# only enter it, if has not already been processed
if hasattr(func, 'is_decorated') and not func.is_decorated:
for dec in reversed(self.base.decorators):
debug.dbg('decorator:', dec, func)
dec_results = follow_statement(dec)
if not len(dec_results):
debug.warning('decorator func not found', self.base)
return []
if len(dec_results) > 1:
debug.warning('multiple decorators found', self.base,
dec_results)
decorator = dec_results.pop()
# create param array
params = parsing.Array(parsing.Array.NOARRAY, func)
params.values = [[func]]
wrappers = Execution(decorator, params).get_return_types()
if not len(wrappers):
debug.warning('no wrappers found', self.base)
return []
if len(wrappers) > 1:
debug.warning('multiple wrappers found', self.base,
wrappers)
# this is here, that the wrapper gets executed
func = wrappers[0]
debug.dbg('decorator end')
#print dec.parent
return func
@memoize_default(default=[])
def get_return_types(self):
"""
Get the return vars of a function.
"""
stmts = []
#print '\n\n', self.var_args, self.var_args.values, self.var_args.parent_stmt
if isinstance(self.base, Class):
# there maybe executions of executions
stmts = [Instance(self.base, self.var_args)]
else:
func = self.process_decorators()
# set the callback function to get the var_args
# don't do this with exceptions, as usual, because some deeper
# exceptions could be catched - and I wouldn't know what happened.
if hasattr(func, 'returns'):
self.set_param_cb(func)
self.base.is_decorated = True
ret = func.returns
for s in ret:
#temp, s.parent = s.parent, self
stmts += follow_statement(s)
#s.parent = temp
# reset the callback function on exit
# TODO how can we deactivate this again?
#self.base.param_cb = None
# func could have changed because of decorators, so clear them
# again
self.base.is_decorated = False
else:
debug.warning("no execution possible", func)
debug.dbg('exec results:', stmts, self.base, repr(self))
return strip_imports(stmts)
def __repr__(self):
return "<%s of %s>" % \
(self.__class__.__name__, self.base)
class Array(object):
"""
Used as a mirror to parsing.Array, if needed. It defines some getter
methods which are important in this module.
"""
def __init__(self, array):
self._array = array
def get_index_types(self, index=None):
values = self._array.values
if index is not None:
if [x for x in index if ':' in x]:
return [self]
else:
# This is indexing only one element, with a fixed index number,
# otherwise it just ignores the index (e.g. [1+1])
try:
# multiple elements in the array
i = index.get_only_subelement().name
except AttributeError:
pass
else:
try:
return self.get_exact_index_types(i)
except (IndexError, KeyError):
pass
return self.follow_values(values)
def get_exact_index_types(self, index):
if self._array.type == parsing.Array.DICT:
old_index = index
index = None
for i, key_elements in enumerate(self._array.keys):
# because we only want the key to be a string
if len(key_elements) == 1:
try:
str_key = key_elements.get_code()
except AttributeError:
try:
str_key = key_elements[0].name
except AttributeError:
str_key = None
if old_index == str_key:
index = i
break
if index is None:
raise KeyError('No key found in dictionary')
values = [self._array[index]]
return self.follow_values(values)
def follow_values(self, values):
""" helper function for the index getters """
scope = self._array.parent_stmt.parent
return follow_call_list(scope, values)
def get_defined_names(self):
""" This method generates all ArrayElements for one parsing.Array. """
# array.type is a string with the type, e.g. 'list'
scope = get_scopes_for_name(builtin.Builtin.scope, self._array.type)[0]
names = scope.get_defined_names()
return [ArrayElement(n) for n in names]
def get_contents(self):
return self._array
def __repr__(self):
return "<p%s of %s>" % (self.__class__.__name__, self._array)
class ArrayElement(object):
def __init__(self, name):
super(ArrayElement, self).__init__()
self.name = name
@property
def parent(self):
raise NotImplementedError("This shouldn't happen")
@property
def returns(self):
return self.name.parent.returns
@property
def names(self):
return self.name.names
def __repr__(self):
return "<%s of %s>" % (self.__class__.__name__, self.name)
def get_defined_names_for_position(obj, position):
names = obj.get_defined_names()
if not position:
return names
names_new = []
for n in names:
if (n.line_nr, n.indent) < position:
names_new.append(n)
return names_new
def get_names_for_scope(scope, position=None, star_search=True):
"""
Get all completions possible for the current scope.
The star search option is only here to provide an optimization. Otherwise
the whole thing would probably start a little recursive madness.
"""
start_scope = scope
while scope:
# class variables/functions are only available
if (not isinstance(scope, Class) or scope == start_scope) \
and not isinstance(scope, parsing.Flow):
try:
yield scope, get_defined_names_for_position(scope, position)
except StopIteration:
raise MultiLevelStopIteration('StopIteration raised somewhere')
scope = scope.parent
# add star imports
if star_search:
for s in remove_star_imports(start_scope.get_parent_until()):
for g in get_names_for_scope(s, star_search=False):
yield g
# add builtins to the global scope
builtin_scope = builtin.Builtin.scope
yield builtin_scope, builtin_scope.get_defined_names()
def get_scopes_for_name(scope, name_str, position=None, search_global=False):
"""
:param position: Position of the last statement ->tuple of line, indent
:return: List of Names. Their parents are the scopes, they are defined in.
:rtype: list
"""
def remove_statements(result):
"""
This is the part where statements are being stripped.
Due to lazy evaluation, statements like a = func; b = a; b() have to be
evaluated.
"""
res_new = []
for r in result:
if isinstance(r, parsing.Statement) \
or isinstance(r, InstanceElement) \
and isinstance(r.var, parsing.Statement):
# global variables handling
if r.is_global():
for token_name in r.token_list[1:]:
if isinstance(token_name, parsing.Name):
res_new += get_scopes_for_name(r.parent,
str(token_name))
else:
scopes = follow_statement(r, seek_name=name_str)
res_new += remove_statements(scopes)
else:
if isinstance(r, parsing.Class):
r = Class(r)
res_new.append(r)
debug.dbg('sfn remove, new: %s, old: %s' % (res_new, result))
return res_new
def filter_name(scope_generator):
def handle_non_arrays(name):
result = []
par = name.parent
if isinstance(par, parsing.Flow):
if par.command == 'for':
# take the first statement (for has always only
# one, remember `in`). And follow it. After that,
# get the types which are in the array
arrays = follow_statement(par.inits[0])
for array in arrays:
in_vars = array.get_index_types()
if len(par.set_vars) > 1:
var_arr = par.set_stmt.get_assignment_calls()
result += assign_tuples(var_arr, in_vars, name_str)
else:
result += in_vars
else:
debug.warning('Flow: Why are you here? %s' % par.command)
elif isinstance(par, parsing.Param) \
and isinstance(par.parent.parent, parsing.Class) \
and par.position == 0:
# this is where self gets added - this happens at another
# place, if the var_args are clear. But some times the class is
# not known. Therefore set self.
result.append(Instance(Class(par.parent.parent)))
result.append(par)
else:
result.append(par)
return result
result = []
# compare func uses the tuple of line/indent = row/column
comparison_func = lambda name: (name.line_nr, name.indent)
for scope, name_list in scope_generator:
# here is the position stuff happening (sorting of variables)
for name in sorted(name_list, key=comparison_func, reverse=True):
if name_str == name.get_code():
result += handle_non_arrays(name)
#print name, name.parent.parent, scope
# this means that a definition was found and is not e.g.
# in if/else.
if name.parent.parent == scope:
break
# if there are results, ignore the other scopes
if result:
break
debug.dbg('sfn filter', name_str, result)
return result
if search_global:
scope_generator = get_names_for_scope(scope, position=position)
else:
if position:
names = get_defined_names_for_position(scope, position)
else:
names = scope.get_defined_names()
scope_generator = iter([(scope, names)])
#print ' ln', position
return remove_statements(filter_name(scope_generator))
def strip_imports(scopes):
"""
Here we strip the imports - they don't get resolved necessarily.
Really used anymore?
"""
result = []
for s in scopes:
if isinstance(s, parsing.Import):
#print 'dini mueter, steile griech!'
try:
result += follow_import(s)
except modules.ModuleNotFound:
debug.warning('Module not found: ' + str(s))
else:
result.append(s)
return result
def assign_tuples(tup, results, seek_name):
"""
This is a normal assignment checker. In python functions and other things
can return tuples:
>>> a, b = 1, ""
>>> a, (b, c) = 1, ("", 1.0)
Here, if seek_name is "a", the number type will be returned.
The first part (before `=`) is the param tuples, the second one result.
:type tup: parsing.Array
"""
def eval_results(index):
types = []
for r in results:
if hasattr(r, "get_exact_index_types"):
types += r.get_exact_index_types(index)
else:
debug.warning("assign tuples: invalid tuple lookup")
return types
result = []
if tup.type == parsing.Array.NOARRAY:
# here we have unnessecary braces, which we just remove
arr = tup.get_only_subelement()
result = assign_tuples(arr, results, seek_name)
else:
for i, t in enumerate(tup):
# used in assignments. there is just one call and no other things,
# therefor we can just assume, that the first part is important.
if len(t) != 1:
raise AttributeError('Array length should be 1')
t = t[0]
# check the left part, if it's still tuples in it or a Call
if isinstance(t, parsing.Array):
# these are "sub" tuples
result += assign_tuples(t, eval_results(i), seek_name)
else:
if t.name.names[-1] == seek_name:
result += eval_results(i)
return result
@memoize_default(default=[])
def follow_statement(stmt, scope=None, seek_name=None):
"""
:param stmt: contains a statement
:param scope: contains a scope. If not given, takes the parent of stmt.
"""
if scope is None:
scope = stmt.get_parent_until(parsing.Function, Execution,
parsing.Class, Instance,
InstanceElement)
debug.dbg('follow_stmt', stmt, 'in', scope, seek_name)
call_list = stmt.get_assignment_calls()
debug.dbg('calls', call_list, call_list.values)
result = follow_call_list(scope, call_list)
# assignment checking is only important if the statement defines multiple
# variables
if len(stmt.get_set_vars()) > 1 and seek_name and stmt.assignment_details:
# TODO this should have its own call_list, because call_list can also
# return 3 results for 2 variables.
new_result = []
for op, set_vars in stmt.assignment_details:
new_result += assign_tuples(set_vars, result, seek_name)
result = new_result
return result
def follow_call_list(scope, call_list):
"""
The call_list has a special structure.
This can be either `parsing.Array` or `list of list`.
It is used to evaluate a two dimensional object, that has calls, arrays and
operators in it.
"""
if parsing.Array.is_type(call_list, parsing.Array.TUPLE,
parsing.Array.DICT):
# Tuples can stand just alone without any braces. These would be
# recognized as separate calls, but actually are a tuple.
result = follow_call(scope, call_list)
else:
result = []
for calls in call_list:
for call in calls:
if parsing.Array.is_type(call, parsing.Array.NOARRAY):
result += follow_call_list(scope, call)
else:
# with things like params, these can also be functions, etc
if isinstance(call, (parsing.Function, parsing.Class)):
result.append(call)
elif not isinstance(call, str):
# The string tokens are just operations (+, -, etc.)
result += follow_call(scope, call)
return set(result)
def follow_call(scope, call):
""" Follow a call is following a function, variable, string, etc. """
path = call.generate_call_list()
position = (call.parent_stmt.line_nr, call.parent_stmt.indent)
current = next(path)
if isinstance(current, parsing.Array):
result = [Array(current)]
else:
# TODO add better care for int/unicode, now str/float are just used
# instead
if not isinstance(current, parsing.NamePart):
if current.type == parsing.Call.STRING:
scopes = get_scopes_for_name(builtin.Builtin.scope, 'str')
elif current.type == parsing.Call.NUMBER:
scopes = get_scopes_for_name(builtin.Builtin.scope, 'float')
else:
debug.warning('unknown type:', current.type, current)
# make instances of those number/string objects
scopes = [Instance(s) for s in scopes]
else:
# this is the first global lookup
scopes = get_scopes_for_name(scope, current, position=position,
search_global=True)
result = strip_imports(scopes)
if result != scopes:
# reset the position, when imports where stripped
position = None
debug.dbg('call before result %s, current %s, scope %s'
% (result, current, scope))
result = follow_paths(path, result, position=position)
return result
def follow_paths(path, results, position=None):
results_new = []
if results:
if len(results) > 1:
iter_paths = itertools.tee(path, len(results))
else:
iter_paths = [path]
for i, r in enumerate(results):
fp = follow_path(iter_paths[i], r, position=position)
if fp is not None:
results_new += fp
else:
# this means stop iteration
return results
return results_new
def follow_path(path, scope, position=None):
"""
Takes a generator and tries to complete the path.
"""
# current is either an Array or a Scope
try:
current = next(path)
except StopIteration:
return None
debug.dbg('follow', current, scope)
result = []
if isinstance(current, parsing.Array):
# this must be an execution, either () or []
if current.type == parsing.Array.LIST:
print 'cur', current, scope
result = scope.get_index_types(current)
elif current.type not in [parsing.Array.DICT]:
# scope must be a class or func - make an instance or execution
debug.dbg('exec', scope)
result = Execution(scope, current).get_return_types()
else:
# curly braces are not allowed, because they make no sense
debug.warning('strange function call with {}', current, scope)
else:
if isinstance(scope, parsing.Function):
# TODO check default function methods and return them
result = []
else:
# TODO check magic class methods and return them also
# this is the typical lookup while chaining things
result = strip_imports(get_scopes_for_name(scope, current,
position=position))
return follow_paths(path, result, position=position)
def follow_import(_import):
"""
follows a module name and returns the parser.
:param _import: The import statement.
:type _import: parsing.Import
"""
# set path together
ns_list = []
if _import.from_ns:
ns_list += _import.from_ns.names
if _import.namespace:
ns_list += _import.namespace.names
loaded_in = _import.get_parent_until()
scope, rest = modules.find_module(loaded_in, ns_list)
if rest:
scopes = follow_path(iter(rest), scope)
else:
scopes = [scope]
new = []
for scope in scopes:
new += remove_star_imports(scope)
scopes += new
debug.dbg('after import', scopes, rest)
return scopes
def remove_star_imports(scope):
"""
TODO doc
"""
modules = strip_imports(i for i in scope.get_imports() if i.star)
new = []
for m in modules:
new += remove_star_imports(m)
modules += new
# filter duplicate modules
return set(modules)
prints deleted
"""
follow_statement -> follow_call -> follow_paths -> follow_path
'follow_import'
`get_names_for_scope` and `get_scopes_for_name` are search functions
TODO doc
TODO list comprehensions, priority?
TODO evaluate asserts (type safety)
TODO generators
python 3 stuff:
TODO class decorators
TODO annotations ? how ? type evaluation and return?
TODO nonlocal statement
"""
from _compatibility import next
import itertools
import copy
import parsing
import modules
import debug
import builtin
memoize_caches = []
class MultiLevelStopIteration(Exception):
pass
def clear_caches():
for m in memoize_caches:
m.clear()
def memoize_default(default=None):
"""
This is a typical memoization decorator, BUT there is one difference:
To prevent recursion it sets defaults.
Preventing recursion is in this case the much bigger use than speed. I
don't think, that there is a big speed difference, but there are many cases
where recursion could happen (think about a = b; b = a).
"""
def func(function):
memo = {}
memoize_caches.append(memo)
def wrapper(*args, **kwargs):
key = (args, frozenset(kwargs.items()))
if key in memo:
return memo[key]
else:
memo[key] = default
rv = function(*args, **kwargs)
memo[key] = rv
return rv
return wrapper
return func
class Executable(object):
""" An instance is also an executable - because __init__ is called """
def __init__(self, base, var_args=[]):
self.base = base
# the param input array
self.var_args = var_args
self.func = None
def get_parent_until(self, *args):
return self.base.get_parent_until(*args)
@property
def scope(self):
""" Just try through the whole param array to find the own scope """
for param in self.var_args:
for call in param:
try:
return call.parent_stmt.parent
except AttributeError: # if operators are there
pass
raise IndexError('No params available')
@memoize_default(default=[])
def get_params(self):
"""
This returns the params for an Execution/Instance and is injected as a
'hack' into the parsing.Function class.
This needs to be here, because Instance can have __init__ functions,
which act the same way as normal functions
"""
def gen_param_name_copy(param, keys=[], values=[], array_type=None):
calls = parsing.Array(parsing.Array.NOARRAY,
self.var_args.parent_stmt)
calls.values = values
calls.keys = keys
calls.type = array_type
new_param = copy.copy(param)
new_param._assignment_calls_calculated = True
new_param._assignment_calls = calls
name = copy.copy(param.get_name())
name.parent = new_param
#print 'insert', i, name, calls.values, value, self.func.params
return name
result = []
start_offset = 0
#print '\n\nfunc_params', self.func, self.func.parent, self.func
if isinstance(self.func, InstanceElement):
# care for self -> just exclude it and add the instance
start_offset = 1
self_name = copy.copy(self.func.params[0].get_name())
self_name.parent = self.func.instance
result.append(self_name)
param_dict = {}
for param in self.func.params:
param_dict[str(param.get_name())] = param
# There may be calls, which don't fit all the params, this just ignores
# it.
var_arg_iterator = self.get_var_args_iterator()
non_matching_keys = []
for param in self.func.params[start_offset:]:
# The value and key can both be null. There, the defaults apply.
# args / kwargs will just be empty arrays / dicts, respectively.
key, value = next(var_arg_iterator, (None, None))
while key:
try:
key_param = param_dict[str(key)]
except KeyError:
non_matching_keys.append((key, value))
else:
result.append(gen_param_name_copy(key_param,
values=[value]))
key, value = next(var_arg_iterator, (None, None))
#debug.warning('Too many arguments given.', value)
assignment = param.get_assignment_calls().values[0]
keys = []
values = []
array_type = None
if assignment[0] == '*':
# *args param
array_type = parsing.Array.TUPLE
if value:
values.append(value)
for key, value in var_arg_iterator:
# iterate until a key argument is found
if key:
var_arg_iterator.push_back(key, value)
break
values.append(value)
elif assignment[0] == '**':
# **kwargs param
array_type = parsing.Array.DICT
if non_matching_keys:
keys, values = zip(*non_matching_keys)
else:
# normal param
if value:
values = [value]
result.append(gen_param_name_copy(param, keys=keys, values=values,
array_type=array_type))
return result
def get_var_args_iterator(self):
"""
Yields a key/value pair, the key is None, if its not a named arg.
"""
def iterate():
# var_args is typically an Array, and not a list
for var_arg in self.var_args:
# *args
if var_arg[0] == '*':
arrays = follow_call_list(self.scope, [var_arg[1:]])
for array in arrays:
for field in array.get_contents():
yield None, field
# **kwargs
elif var_arg[0] == '**':
arrays = follow_call_list(self.scope, [var_arg[1:]])
for array in arrays:
for key, field in array.get_contents():
# take the first index
if isinstance(key, parsing.Name):
name = key
else:
name = key[0].name
yield name, field
yield var_arg
# normal arguments (including key arguments)
else:
if len(var_arg) > 1 and var_arg[1] == '=':
# this is a named parameter
yield var_arg[0].name, var_arg[2:]
else:
yield None, var_arg
class PushBackIterator(object):
def __init__(self, iterator):
self.pushes = []
self.iterator = iterator
def push_back(self, key, value):
self.pushes.append((key,value))
def __iter__(self):
return self
def next(self):
""" Python 2 Compatibility """
return self.__next__()
def __next__(self):
try:
return self.pushes.pop()
except IndexError:
return next(self.iterator)
return iter(PushBackIterator(iterate()))
def set_param_cb(self, func):
self.func = func
func.param_cb = self.get_params
class Instance(Executable):
""" This class is used to evaluate instances. """
def __init__(self, base, var_args=[]):
super(Instance, self).__init__(base, var_args)
if var_args:
self.set_init_params()
def set_init_params(self):
for sub in self.base.subscopes:
if isinstance(sub, parsing.Function) \
and sub.name.get_code() == '__init__':
self.set_param_cb(InstanceElement(self, sub))
def get_func_self_name(self, func):
"""
Returns the name of the first param in a class method (which is
normally self
"""
try:
return func.params[0].used_vars[0].names[0]
except:
return None
def get_defined_names(self):
"""
Get the instance vars of a class. This includes the vars of all
classes
"""
def add_self_name(name):
n = copy.copy(name)
n.names = n.names[1:]
names.append(InstanceElement(self, n))
names = []
# this loop adds the names of the self object, copies them and removes
# the self.
for s in self.base.subscopes:
# get the self name, if there's one
self_name = self.get_func_self_name(s)
if self_name:
for n in s.get_set_vars():
# Only names with the selfname are being added.
# It is also important, that they have a len() of 2,
# because otherwise, they are just something else
if n.names[0] == self_name and len(n.names) == 2:
add_self_name(n)
for var in self.base.get_defined_names(as_instance=True):
# functions are also instance elements
if isinstance(var.parent, (parsing.Function)):
var = InstanceElement(self, var)
names.append(var)
return names
def parent(self):
return self.base.parent
def __repr__(self):
return "<p%s of %s (var_args: %s)>" % \
(self.__class__.__name__, self.base, len(self.var_args or []))
class InstanceElement(object):
def __init__(self, instance, var):
super(InstanceElement, self).__init__()
self.instance = instance
self.var = var
@property
def parent(self):
return InstanceElement(self.instance, self.var.parent)
@property
def param_cb(self):
return self.var.param_cb
@param_cb.setter
def param_cb(self, value):
self.var.param_cb = value
def __getattr__(self, name):
return getattr(self.var, name)
def __repr__(self):
return "<%s of %s>" % (self.__class__.__name__, self.var)
class Class(object):
def __init__(self, base):
self.base = base
def get_defined_names(self, as_instance=False):
def in_iterable(name, iterable):
for i in iterable:
# only the last name is important, because these names have a
# maximal length of 2, with the first one being `self`.
if i.names[-1] == name.names[-1]:
return True
return False
names = self.base.get_defined_names()
# check super classes:
for s in self.base.supers:
for cls in follow_statement(s):
# get the inherited names
if as_instance:
cls = Instance(cls)
for i in cls.get_defined_names():
if not in_iterable(i, names):
names.append(i)
return names
def __getattr__(self, name):
return getattr(self.base, name)
def __repr__(self):
return "<p%s of %s>" % (self.__class__.__name__, self.base)
class Execution(Executable):
"""
This class is used to evaluate functions and their returns.
"""
cache = {}
def process_decorators(self):
""" Returns the function, that is to be executed in the end """
func = self.base
# only enter it, if has not already been processed
if hasattr(func, 'is_decorated') and not func.is_decorated:
for dec in reversed(self.base.decorators):
debug.dbg('decorator:', dec, func)
dec_results = follow_statement(dec)
if not len(dec_results):
debug.warning('decorator func not found', self.base)
return []
if len(dec_results) > 1:
debug.warning('multiple decorators found', self.base,
dec_results)
decorator = dec_results.pop()
# create param array
params = parsing.Array(parsing.Array.NOARRAY, func)
params.values = [[func]]
wrappers = Execution(decorator, params).get_return_types()
if not len(wrappers):
debug.warning('no wrappers found', self.base)
return []
if len(wrappers) > 1:
debug.warning('multiple wrappers found', self.base,
wrappers)
# this is here, that the wrapper gets executed
func = wrappers[0]
debug.dbg('decorator end')
#print dec.parent
return func
@memoize_default(default=[])
def get_return_types(self):
"""
Get the return vars of a function.
"""
stmts = []
#print '\n\n', self.var_args, self.var_args.values, self.var_args.parent_stmt
if isinstance(self.base, Class):
# there maybe executions of executions
stmts = [Instance(self.base, self.var_args)]
else:
func = self.process_decorators()
# set the callback function to get the var_args
# don't do this with exceptions, as usual, because some deeper
# exceptions could be catched - and I wouldn't know what happened.
if hasattr(func, 'returns'):
self.set_param_cb(func)
self.base.is_decorated = True
ret = func.returns
for s in ret:
#temp, s.parent = s.parent, self
stmts += follow_statement(s)
#s.parent = temp
# reset the callback function on exit
# TODO how can we deactivate this again?
#self.base.param_cb = None
# func could have changed because of decorators, so clear them
# again
self.base.is_decorated = False
else:
debug.warning("no execution possible", func)
debug.dbg('exec results:', stmts, self.base, repr(self))
return strip_imports(stmts)
def __repr__(self):
return "<%s of %s>" % \
(self.__class__.__name__, self.base)
class Array(object):
"""
Used as a mirror to parsing.Array, if needed. It defines some getter
methods which are important in this module.
"""
def __init__(self, array):
self._array = array
def get_index_types(self, index=None):
values = self._array.values
if index is not None:
if [x for x in index if ':' in x]:
return [self]
else:
# This is indexing only one element, with a fixed index number,
# otherwise it just ignores the index (e.g. [1+1])
try:
# multiple elements in the array
i = index.get_only_subelement().name
except AttributeError:
pass
else:
try:
return self.get_exact_index_types(i)
except (IndexError, KeyError):
pass
return self.follow_values(values)
def get_exact_index_types(self, index):
if self._array.type == parsing.Array.DICT:
old_index = index
index = None
for i, key_elements in enumerate(self._array.keys):
# because we only want the key to be a string
if len(key_elements) == 1:
try:
str_key = key_elements.get_code()
except AttributeError:
try:
str_key = key_elements[0].name
except AttributeError:
str_key = None
if old_index == str_key:
index = i
break
if index is None:
raise KeyError('No key found in dictionary')
values = [self._array[index]]
return self.follow_values(values)
def follow_values(self, values):
""" helper function for the index getters """
scope = self._array.parent_stmt.parent
return follow_call_list(scope, values)
def get_defined_names(self):
""" This method generates all ArrayElements for one parsing.Array. """
# array.type is a string with the type, e.g. 'list'
scope = get_scopes_for_name(builtin.Builtin.scope, self._array.type)[0]
names = scope.get_defined_names()
return [ArrayElement(n) for n in names]
def get_contents(self):
return self._array
def __repr__(self):
return "<p%s of %s>" % (self.__class__.__name__, self._array)
class ArrayElement(object):
def __init__(self, name):
super(ArrayElement, self).__init__()
self.name = name
@property
def parent(self):
raise NotImplementedError("This shouldn't happen")
@property
def returns(self):
return self.name.parent.returns
@property
def names(self):
return self.name.names
def __repr__(self):
return "<%s of %s>" % (self.__class__.__name__, self.name)
def get_defined_names_for_position(obj, position):
names = obj.get_defined_names()
if not position:
return names
names_new = []
for n in names:
if (n.line_nr, n.indent) < position:
names_new.append(n)
return names_new
def get_names_for_scope(scope, position=None, star_search=True):
"""
Get all completions possible for the current scope.
The star search option is only here to provide an optimization. Otherwise
the whole thing would probably start a little recursive madness.
"""
start_scope = scope
while scope:
# class variables/functions are only available
if (not isinstance(scope, Class) or scope == start_scope) \
and not isinstance(scope, parsing.Flow):
try:
yield scope, get_defined_names_for_position(scope, position)
except StopIteration:
raise MultiLevelStopIteration('StopIteration raised somewhere')
scope = scope.parent
# add star imports
if star_search:
for s in remove_star_imports(start_scope.get_parent_until()):
for g in get_names_for_scope(s, star_search=False):
yield g
# add builtins to the global scope
builtin_scope = builtin.Builtin.scope
yield builtin_scope, builtin_scope.get_defined_names()
def get_scopes_for_name(scope, name_str, position=None, search_global=False):
"""
:param position: Position of the last statement ->tuple of line, indent
:return: List of Names. Their parents are the scopes, they are defined in.
:rtype: list
"""
def remove_statements(result):
"""
This is the part where statements are being stripped.
Due to lazy evaluation, statements like a = func; b = a; b() have to be
evaluated.
"""
res_new = []
for r in result:
if isinstance(r, parsing.Statement) \
or isinstance(r, InstanceElement) \
and isinstance(r.var, parsing.Statement):
# global variables handling
if r.is_global():
for token_name in r.token_list[1:]:
if isinstance(token_name, parsing.Name):
res_new += get_scopes_for_name(r.parent,
str(token_name))
else:
scopes = follow_statement(r, seek_name=name_str)
res_new += remove_statements(scopes)
else:
if isinstance(r, parsing.Class):
r = Class(r)
res_new.append(r)
debug.dbg('sfn remove, new: %s, old: %s' % (res_new, result))
return res_new
def filter_name(scope_generator):
def handle_non_arrays(name):
result = []
par = name.parent
if isinstance(par, parsing.Flow):
if par.command == 'for':
# take the first statement (for has always only
# one, remember `in`). And follow it. After that,
# get the types which are in the array
arrays = follow_statement(par.inits[0])
for array in arrays:
in_vars = array.get_index_types()
if len(par.set_vars) > 1:
var_arr = par.set_stmt.get_assignment_calls()
result += assign_tuples(var_arr, in_vars, name_str)
else:
result += in_vars
else:
debug.warning('Flow: Why are you here? %s' % par.command)
elif isinstance(par, parsing.Param) \
and isinstance(par.parent.parent, parsing.Class) \
and par.position == 0:
# this is where self gets added - this happens at another
# place, if the var_args are clear. But some times the class is
# not known. Therefore set self.
result.append(Instance(Class(par.parent.parent)))
result.append(par)
else:
result.append(par)
return result
result = []
# compare func uses the tuple of line/indent = row/column
comparison_func = lambda name: (name.line_nr, name.indent)
for scope, name_list in scope_generator:
# here is the position stuff happening (sorting of variables)
for name in sorted(name_list, key=comparison_func, reverse=True):
if name_str == name.get_code():
result += handle_non_arrays(name)
#print name, name.parent.parent, scope
# this means that a definition was found and is not e.g.
# in if/else.
if name.parent.parent == scope:
break
# if there are results, ignore the other scopes
if result:
break
debug.dbg('sfn filter', name_str, result)
return result
if search_global:
scope_generator = get_names_for_scope(scope, position=position)
else:
if position:
names = get_defined_names_for_position(scope, position)
else:
names = scope.get_defined_names()
scope_generator = iter([(scope, names)])
#print ' ln', position
return remove_statements(filter_name(scope_generator))
def strip_imports(scopes):
"""
Here we strip the imports - they don't get resolved necessarily.
Really used anymore?
"""
result = []
for s in scopes:
if isinstance(s, parsing.Import):
#print 'dini mueter, steile griech!'
try:
result += follow_import(s)
except modules.ModuleNotFound:
debug.warning('Module not found: ' + str(s))
else:
result.append(s)
return result
def assign_tuples(tup, results, seek_name):
"""
This is a normal assignment checker. In python functions and other things
can return tuples:
>>> a, b = 1, ""
>>> a, (b, c) = 1, ("", 1.0)
Here, if seek_name is "a", the number type will be returned.
The first part (before `=`) is the param tuples, the second one result.
:type tup: parsing.Array
"""
def eval_results(index):
types = []
for r in results:
if hasattr(r, "get_exact_index_types"):
types += r.get_exact_index_types(index)
else:
debug.warning("assign tuples: invalid tuple lookup")
return types
result = []
if tup.type == parsing.Array.NOARRAY:
# here we have unnessecary braces, which we just remove
arr = tup.get_only_subelement()
result = assign_tuples(arr, results, seek_name)
else:
for i, t in enumerate(tup):
# used in assignments. there is just one call and no other things,
# therefor we can just assume, that the first part is important.
if len(t) != 1:
raise AttributeError('Array length should be 1')
t = t[0]
# check the left part, if it's still tuples in it or a Call
if isinstance(t, parsing.Array):
# these are "sub" tuples
result += assign_tuples(t, eval_results(i), seek_name)
else:
if t.name.names[-1] == seek_name:
result += eval_results(i)
return result
@memoize_default(default=[])
def follow_statement(stmt, scope=None, seek_name=None):
"""
:param stmt: contains a statement
:param scope: contains a scope. If not given, takes the parent of stmt.
"""
if scope is None:
scope = stmt.get_parent_until(parsing.Function, Execution,
parsing.Class, Instance,
InstanceElement)
debug.dbg('follow_stmt', stmt, 'in', scope, seek_name)
call_list = stmt.get_assignment_calls()
debug.dbg('calls', call_list, call_list.values)
result = follow_call_list(scope, call_list)
# assignment checking is only important if the statement defines multiple
# variables
if len(stmt.get_set_vars()) > 1 and seek_name and stmt.assignment_details:
# TODO this should have its own call_list, because call_list can also
# return 3 results for 2 variables.
new_result = []
for op, set_vars in stmt.assignment_details:
new_result += assign_tuples(set_vars, result, seek_name)
result = new_result
return result
def follow_call_list(scope, call_list):
"""
The call_list has a special structure.
This can be either `parsing.Array` or `list of list`.
It is used to evaluate a two dimensional object, that has calls, arrays and
operators in it.
"""
if parsing.Array.is_type(call_list, parsing.Array.TUPLE,
parsing.Array.DICT):
# Tuples can stand just alone without any braces. These would be
# recognized as separate calls, but actually are a tuple.
result = follow_call(scope, call_list)
else:
result = []
for calls in call_list:
for call in calls:
if parsing.Array.is_type(call, parsing.Array.NOARRAY):
result += follow_call_list(scope, call)
else:
# with things like params, these can also be functions, etc
if isinstance(call, (parsing.Function, parsing.Class)):
result.append(call)
elif not isinstance(call, str):
# The string tokens are just operations (+, -, etc.)
result += follow_call(scope, call)
return set(result)
def follow_call(scope, call):
""" Follow a call is following a function, variable, string, etc. """
path = call.generate_call_list()
position = (call.parent_stmt.line_nr, call.parent_stmt.indent)
current = next(path)
if isinstance(current, parsing.Array):
result = [Array(current)]
else:
# TODO add better care for int/unicode, now str/float are just used
# instead
if not isinstance(current, parsing.NamePart):
if current.type == parsing.Call.STRING:
scopes = get_scopes_for_name(builtin.Builtin.scope, 'str')
elif current.type == parsing.Call.NUMBER:
scopes = get_scopes_for_name(builtin.Builtin.scope, 'float')
else:
debug.warning('unknown type:', current.type, current)
# make instances of those number/string objects
scopes = [Instance(s) for s in scopes]
else:
# this is the first global lookup
scopes = get_scopes_for_name(scope, current, position=position,
search_global=True)
result = strip_imports(scopes)
if result != scopes:
# reset the position, when imports where stripped
position = None
debug.dbg('call before result %s, current %s, scope %s'
% (result, current, scope))
result = follow_paths(path, result, position=position)
return result
def follow_paths(path, results, position=None):
results_new = []
if results:
if len(results) > 1:
iter_paths = itertools.tee(path, len(results))
else:
iter_paths = [path]
for i, r in enumerate(results):
fp = follow_path(iter_paths[i], r, position=position)
if fp is not None:
results_new += fp
else:
# this means stop iteration
return results
return results_new
def follow_path(path, scope, position=None):
"""
Takes a generator and tries to complete the path.
"""
# current is either an Array or a Scope
try:
current = next(path)
except StopIteration:
return None
debug.dbg('follow', current, scope)
result = []
if isinstance(current, parsing.Array):
# this must be an execution, either () or []
if current.type == parsing.Array.LIST:
result = scope.get_index_types(current)
elif current.type not in [parsing.Array.DICT]:
# scope must be a class or func - make an instance or execution
debug.dbg('exec', scope)
result = Execution(scope, current).get_return_types()
else:
# curly braces are not allowed, because they make no sense
debug.warning('strange function call with {}', current, scope)
else:
if isinstance(scope, parsing.Function):
# TODO check default function methods and return them
result = []
else:
# TODO check magic class methods and return them also
# this is the typical lookup while chaining things
result = strip_imports(get_scopes_for_name(scope, current,
position=position))
return follow_paths(path, result, position=position)
def follow_import(_import):
"""
follows a module name and returns the parser.
:param _import: The import statement.
:type _import: parsing.Import
"""
# set path together
ns_list = []
if _import.from_ns:
ns_list += _import.from_ns.names
if _import.namespace:
ns_list += _import.namespace.names
loaded_in = _import.get_parent_until()
scope, rest = modules.find_module(loaded_in, ns_list)
if rest:
scopes = follow_path(iter(rest), scope)
else:
scopes = [scope]
new = []
for scope in scopes:
new += remove_star_imports(scope)
scopes += new
debug.dbg('after import', scopes, rest)
return scopes
def remove_star_imports(scope):
"""
TODO doc
"""
modules = strip_imports(i for i in scope.get_imports() if i.star)
new = []
for m in modules:
new += remove_star_imports(m)
modules += new
# filter duplicate modules
return set(modules)
|
import re
from seleniumhelpers import SeleniumTestCase
from django.test.utils import override_settings
from django.conf import settings
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import TimeoutException
@override_settings(EXTJS4_DEBUG=False)
class SubjectAdminSeleniumTestCase(SeleniumTestCase):
def browseTo(self, path):
self.getPath('/devilry_subjectadmin/#' + path)
def login(self, username, password='test'):
self.selenium.get('%s%s' % (self.live_server_url, settings.LOGIN_URL))
username_input = self.selenium.find_element_by_name("username")
username_input.send_keys(username)
password_input = self.selenium.find_element_by_name("password")
password_input.send_keys(password)
self.selenium.find_element_by_xpath('//input[@value="Log in"]').click()
def get_absolute_url(self, path):
return '{live_server_url}/devilry_subjectadmin/#{path}'.format(live_server_url=self.live_server_url,
path=path)
def get_breadcrumbstring(self, expected_contains):
self.waitForCssSelector('.devilry_extjsextras_breadcrumb')
def breadcrumbLoaded(breadcrumb):
return expected_contains in breadcrumb.text
breadcrumb = self.selenium.find_element_by_css_selector('.devilry_extjsextras_breadcrumb')
self.waitFor(breadcrumb, breadcrumbLoaded)
return re.split('\s*\/\s*', breadcrumb.text)
class RenameBasenodeTestMixin(object):
renamebutton_id = None
def _get_field(self, containercls, fieldname):
field = self.selenium.find_element_by_css_selector('{0} input[name={1}]'.format(containercls, fieldname))
return field
def _init_renametest(self):
self.selenium.find_element_by_css_selector('#{0} button'.format(self.renamebutton_id)).click()
self.waitForCssSelector('.devilry_rename_basenode_window')
window = self.selenium.find_element_by_css_selector('.devilry_rename_basenode_window')
short_name = self._get_field('.devilry_rename_basenode_window', 'short_name')
long_name = self._get_field('.devilry_rename_basenode_window', 'long_name')
savebutton = window.find_element_by_css_selector('.devilry_extjsextras_savebutton button')
return window, short_name, long_name, savebutton
def rename_test_helper(self, basenode):
window, short_name, long_name, savebutton = self._init_renametest()
self.assertEquals(short_name.get_attribute('value'), basenode.short_name)
self.assertEquals(long_name.get_attribute('value'), basenode.long_name)
short_name.clear()
self.waitForDisabled(savebutton)
short_name.send_keys('renamed-shortname')
long_name.clear()
self.waitForDisabled(savebutton)
long_name.send_keys('Renamed long name')
self.waitForEnabled(savebutton)
savebutton.click()
self.waitForTitleContains('renamed-shortname')
updated = basenode.__class__.objects.get(id=basenode.id)
self.assertEquals(updated.short_name, 'renamed-shortname')
self.assertEquals(updated.long_name, 'Renamed long name')
def rename_test_failure_helper(self):
window, short_name, long_name, savebutton = self._init_renametest()
short_name.clear()
short_name.send_keys('Renamed-shortname')
self.assertEquals(len(self.selenium.find_elements_by_css_selector('.devilry_extjsextras_alertmessagelist .alert-error')), 0)
savebutton.click()
self.waitForCssSelector('.devilry_extjsextras_alertmessagelist', within=window)
self.assertEquals(len(self.selenium.find_elements_by_css_selector('.devilry_extjsextras_alertmessagelist .alert-error')), 1)
class DeleteBasenodeTestMixin(object):
deletebutton_id = None
def click_delete_button(self):
self.selenium.find_element_by_css_selector('#{0} button'.format(self.deletebutton_id)).click()
def perform_delete(self):
self.click_delete_button()
self.waitForCssSelector('.devilry_confirmdeletedialog')
window = self.selenium.find_element_by_css_selector('.devilry_confirmdeletedialog')
inputfield = self._get_field('.devilry_confirmdeletedialog', 'confirm_text')
deletebutton = window.find_element_by_css_selector('.devilry_deletebutton button')
inputfield.send_keys('DELETE')
self.waitForEnabled(deletebutton)
deletebutton.click()
class EditAdministratorsTestMixin(object):
"""
Test the Edit/manage administrators window.
Requires ``self.testhelper = TestHelper()`` in ``setUp()`` of supclass, and
the subclass must implement browseToTestBasenode().
"""
def browseToTestBasenode(self):
"""
Browse to the basenode that is returned by :meth:`.getBasenode`.
"""
raise NotImplementedError()
def getBasenode(self):
"""
Get the basenode that is used to test edit admins.
"""
raise NotImplementedError()
def _open_edit_administrators_window(self):
self.waitForCssSelector('.devilry_subjectadmin_adminsbox button')
self.selenium.find_element_by_css_selector('.devilry_subjectadmin_adminsbox button').click()
self.waitForCssSelector('.devilry_subjectadmin_manageadminspanel')
def _add_user_via_ui(self, username, query=None):
query = query or username
textfield = self.selenium.find_element_by_css_selector('.devilry_usersearch_autocompleteuserwidget input[type=text]')
textfield.send_keys(query)
self.waitForCssSelector('.autocompleteuserwidget_matchlist .matchlistitem_{username}'.format(username=username))
textfield.send_keys(Keys.RETURN)
def assertUserInEditTable(self, username):
cssquery = '.devilry_subjectadmin_manageadminspanel .x-grid .prettyformattedusercell_{username}'.format(username=username)
try:
self.waitForCssSelector(cssquery)
except TimeoutException, e:
self.fail('User "{username}" not in grid table'.format(username=username))
def assertUserInAdminsList(self, username):
cssquery = '.devilry_subjectadmin_administratorlist .administratorlistitem_{username}'.format(username=username)
try:
self.waitForCssSelector(cssquery)
except TimeoutException, e:
self.fail('User "{username}" not in administrator list'.format(username=username))
def assertUserNotInEditTable(self, username):
cssquery = '.devilry_subjectadmin_manageadminspanel .x-grid .prettyformattedusercell_{username}'.format(username=username)
try:
self.waitFor(self.selenium, lambda s: len(self.selenium.find_elements_by_css_selector(cssquery)) == 0)
except TimeoutException, e:
self.fail('User "{username}" not in grid table'.format(username=username))
def assertUserNotInAdminsList(self, username):
cssquery = '.devilry_subjectadmin_administratorlist .administratorlistitem_{username}'.format(username=username)
try:
self.waitFor(self.selenium, lambda s: len(self.selenium.find_elements_by_css_selector(cssquery)) == 0)
except TimeoutException, e:
self.fail('User "{username}" not in administrator list'.format(username=username))
def test_add_administrators(self):
self.browseToTestBasenode()
basenode = self.getBasenode()
self.assertEquals(basenode.admins.all().count(), 0)
self._open_edit_administrators_window()
self.testhelper.create_user('userone')
self._add_user_via_ui('userone')
self.assertUserInEditTable('userone')
self.assertUserInAdminsList('userone')
self.assertIn(self.testhelper.userone, basenode.admins.all())
self.testhelper.create_user('usertwo')
self._add_user_via_ui('usertwo')
self.assertUserInEditTable('usertwo')
self.assertUserInAdminsList('usertwo')
self.assertIn(self.testhelper.usertwo, basenode.admins.all())
self.assertEquals(basenode.admins.all().count(), 2)
def test_add_administrator_by_email(self):
self.browseToTestBasenode()
basenode = self.getBasenode()
self.assertEquals(basenode.admins.all().count(), 0)
self._open_edit_administrators_window()
self.testhelper.create_user('testuser1')
self.testhelper.create_user('testuser2')
user = self.testhelper.create_user('testuser3')
user.email = 'superman@example.com'
user.save()
self._add_user_via_ui('testuser3', query='man@exa')
self.assertUserInEditTable('testuser3')
self.assertUserInAdminsList('testuser3')
self.assertIn(self.testhelper.testuser3, basenode.admins.all())
def test_add_administrator_by_fullname(self):
self.browseToTestBasenode()
basenode = self.getBasenode()
self.assertEquals(basenode.admins.all().count(), 0)
self._open_edit_administrators_window()
self.testhelper.create_user('testuser1')
self.testhelper.create_user('testuser2')
user = self.testhelper.create_user('testuser3')
user.devilryuserprofile.full_name = 'Superman'
user.devilryuserprofile.save()
self._add_user_via_ui('testuser3', query='uperma')
self.assertUserInEditTable('testuser3')
self.assertUserInAdminsList('testuser3')
self.assertIn(self.testhelper.testuser3, basenode.admins.all())
def _get_remove_button(self):
return self.selenium.find_element_by_css_selector('.devilry_subjectadmin_manageadminspanel .removeButton button')
def _click_selectall_button(self):
self.selenium.find_element_by_css_selector('.devilry_subjectadmin_manageadminspanel .selectAllButton button').click()
def _get_gridcell_cssquery(self, username):
return '.devilry_subjectadmin_manageadminspanel .x-grid .prettyformattedusercell_{username}'.format(username=username)
def _get_gridcell(self, username):
cssquery = self._get_gridcell_cssquery(username)
self.waitForCssSelector(cssquery, timeout=5)
return self.selenium.find_element_by_css_selector(cssquery)
def _select_user(self, username):
gridcell = self._get_gridcell(username)
gridcell.click()
def _remove_using_ui(self):
self._get_remove_button().click()
self.waitForCssSelector('.x-message-box')
def click_yes_button():
for button in self.selenium.find_elements_by_css_selector('.x-message-box button'):
if button.text.strip() == 'Yes':
button.click()
return
self.fail('Could not find the "Yes" button')
click_yes_button()
def test_remove_administrator(self):
basenode = self.getBasenode()
basenode.admins.add(self.testhelper.create_user('userone'))
self.browseToTestBasenode()
self._open_edit_administrators_window()
self.assertIn(self.testhelper.userone, basenode.admins.all())
self.assertUserInEditTable('userone')
self.assertUserInAdminsList('userone')
self._select_user('userone')
self._remove_using_ui()
self.assertUserNotInEditTable('userone')
self.assertUserNotInAdminsList('userone')
self.assertNotIn(self.testhelper.userone, basenode.admins.all())
def test_remove_many_administrators(self):
basenode = self.getBasenode()
basenode.admins.add(self.testhelper.create_user('userone'))
basenode.admins.add(self.testhelper.create_user('usertwo'))
basenode.admins.add(self.testhelper.create_user('userthree'))
self.browseToTestBasenode()
self._open_edit_administrators_window()
self.assertEquals(basenode.admins.all().count(), 3)
self.assertUserInEditTable('userone')
self.assertUserInEditTable('usertwo')
self.assertUserInEditTable('userthree')
self._click_selectall_button()
self._remove_using_ui()
self.assertUserNotInEditTable('userone')
self.assertUserNotInEditTable('usertwo')
self.assertUserNotInEditTable('userthree')
self.assertEquals(basenode.admins.all().count(), 0)
def test_remove_disabled_enabled(self):
basenode = self.getBasenode()
basenode.admins.add(self.testhelper.create_user('userone'))
self.browseToTestBasenode()
self._open_edit_administrators_window()
self.assertFalse(self._get_remove_button().is_enabled())
self._select_user('userone')
self.assertTrue(self._get_remove_button().is_enabled())
def test_search(self):
basenode = self.getBasenode()
basenode.admins.add(self.testhelper.create_user('userone'))
basenode.admins.add(self.testhelper.create_user('usertwo'))
self.browseToTestBasenode()
self._open_edit_administrators_window()
searchfield = self.selenium.find_element_by_css_selector('.devilry_subjectadmin_manageadminspanel .searchfield input[type=text]')
self.assertEquals(len(self.selenium.find_elements_by_css_selector('.devilry_subjectadmin_manageadminspanel .x-grid .prettyformattedusercell')), 2)
searchfield.send_keys('one')
self.assertEquals(len(self.selenium.find_elements_by_css_selector('.devilry_subjectadmin_manageadminspanel .x-grid .prettyformattedusercell')), 1)
devilry_subjectadmin.tests.base: Use waitForCssSelectorNotFound.
import re
from seleniumhelpers import SeleniumTestCase
from django.test.utils import override_settings
from django.conf import settings
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import TimeoutException
@override_settings(EXTJS4_DEBUG=False)
class SubjectAdminSeleniumTestCase(SeleniumTestCase):
def browseTo(self, path):
self.getPath('/devilry_subjectadmin/#' + path)
def login(self, username, password='test'):
self.selenium.get('%s%s' % (self.live_server_url, settings.LOGIN_URL))
username_input = self.selenium.find_element_by_name("username")
username_input.send_keys(username)
password_input = self.selenium.find_element_by_name("password")
password_input.send_keys(password)
self.selenium.find_element_by_xpath('//input[@value="Log in"]').click()
def get_absolute_url(self, path):
return '{live_server_url}/devilry_subjectadmin/#{path}'.format(live_server_url=self.live_server_url,
path=path)
def get_breadcrumbstring(self, expected_contains):
self.waitForCssSelector('.devilry_extjsextras_breadcrumb')
def breadcrumbLoaded(breadcrumb):
return expected_contains in breadcrumb.text
breadcrumb = self.selenium.find_element_by_css_selector('.devilry_extjsextras_breadcrumb')
self.waitFor(breadcrumb, breadcrumbLoaded)
return re.split('\s*\/\s*', breadcrumb.text)
class RenameBasenodeTestMixin(object):
renamebutton_id = None
def _get_field(self, containercls, fieldname):
field = self.selenium.find_element_by_css_selector('{0} input[name={1}]'.format(containercls, fieldname))
return field
def _init_renametest(self):
self.selenium.find_element_by_css_selector('#{0} button'.format(self.renamebutton_id)).click()
self.waitForCssSelector('.devilry_rename_basenode_window')
window = self.selenium.find_element_by_css_selector('.devilry_rename_basenode_window')
short_name = self._get_field('.devilry_rename_basenode_window', 'short_name')
long_name = self._get_field('.devilry_rename_basenode_window', 'long_name')
savebutton = window.find_element_by_css_selector('.devilry_extjsextras_savebutton button')
return window, short_name, long_name, savebutton
def rename_test_helper(self, basenode):
window, short_name, long_name, savebutton = self._init_renametest()
self.assertEquals(short_name.get_attribute('value'), basenode.short_name)
self.assertEquals(long_name.get_attribute('value'), basenode.long_name)
short_name.clear()
self.waitForDisabled(savebutton)
short_name.send_keys('renamed-shortname')
long_name.clear()
self.waitForDisabled(savebutton)
long_name.send_keys('Renamed long name')
self.waitForEnabled(savebutton)
savebutton.click()
self.waitForTitleContains('renamed-shortname')
updated = basenode.__class__.objects.get(id=basenode.id)
self.assertEquals(updated.short_name, 'renamed-shortname')
self.assertEquals(updated.long_name, 'Renamed long name')
def rename_test_failure_helper(self):
window, short_name, long_name, savebutton = self._init_renametest()
short_name.clear()
short_name.send_keys('Renamed-shortname')
self.assertEquals(len(self.selenium.find_elements_by_css_selector('.devilry_extjsextras_alertmessagelist .alert-error')), 0)
savebutton.click()
self.waitForCssSelector('.devilry_extjsextras_alertmessagelist', within=window)
self.assertEquals(len(self.selenium.find_elements_by_css_selector('.devilry_extjsextras_alertmessagelist .alert-error')), 1)
class DeleteBasenodeTestMixin(object):
deletebutton_id = None
def click_delete_button(self):
self.selenium.find_element_by_css_selector('#{0} button'.format(self.deletebutton_id)).click()
def perform_delete(self):
self.click_delete_button()
self.waitForCssSelector('.devilry_confirmdeletedialog')
window = self.selenium.find_element_by_css_selector('.devilry_confirmdeletedialog')
inputfield = self._get_field('.devilry_confirmdeletedialog', 'confirm_text')
deletebutton = window.find_element_by_css_selector('.devilry_deletebutton button')
inputfield.send_keys('DELETE')
self.waitForEnabled(deletebutton)
deletebutton.click()
class EditAdministratorsTestMixin(object):
"""
Test the Edit/manage administrators window.
Requires ``self.testhelper = TestHelper()`` in ``setUp()`` of supclass, and
the subclass must implement browseToTestBasenode().
"""
def browseToTestBasenode(self):
"""
Browse to the basenode that is returned by :meth:`.getBasenode`.
"""
raise NotImplementedError()
def getBasenode(self):
"""
Get the basenode that is used to test edit admins.
"""
raise NotImplementedError()
def _open_edit_administrators_window(self):
self.waitForCssSelector('.devilry_subjectadmin_adminsbox button')
self.selenium.find_element_by_css_selector('.devilry_subjectadmin_adminsbox button').click()
self.waitForCssSelector('.devilry_subjectadmin_manageadminspanel')
def _add_user_via_ui(self, username, query=None):
query = query or username
textfield = self.selenium.find_element_by_css_selector('.devilry_usersearch_autocompleteuserwidget input[type=text]')
textfield.send_keys(query)
self.waitForCssSelector('.autocompleteuserwidget_matchlist .matchlistitem_{username}'.format(username=username))
textfield.send_keys(Keys.RETURN)
def assertUserInEditTable(self, username):
cssquery = '.devilry_subjectadmin_manageadminspanel .x-grid .prettyformattedusercell_{username}'.format(username=username)
self.waitForCssSelector(cssquery,
msg='User "{username}" not in grid table'.format(username=username))
def assertUserInAdminsList(self, username):
cssquery = '.devilry_subjectadmin_administratorlist .administratorlistitem_{username}'.format(username=username)
self.waitForCssSelector(cssquery,
msg='User "{username}" not in administrator list'.format(username=username))
def assertUserNotInEditTable(self, username):
cssquery = '.devilry_subjectadmin_manageadminspanel .x-grid .prettyformattedusercell_{username}'.format(username=username)
self.waitForCssSelectorNotFound(cssquery,
msg='User "{username}" not in grid table'.format(username=username))
def assertUserNotInAdminsList(self, username):
cssquery = '.devilry_subjectadmin_administratorlist .administratorlistitem_{username}'.format(username=username)
self.waitForCssSelectorNotFound(cssquery,
msg='User "{username}" not in administrator list'.format(username=username))
def test_add_administrators(self):
self.browseToTestBasenode()
basenode = self.getBasenode()
self.assertEquals(basenode.admins.all().count(), 0)
self._open_edit_administrators_window()
self.testhelper.create_user('userone')
self._add_user_via_ui('userone')
self.assertUserInEditTable('userone')
self.assertUserInAdminsList('userone')
self.assertIn(self.testhelper.userone, basenode.admins.all())
self.testhelper.create_user('usertwo')
self._add_user_via_ui('usertwo')
self.assertUserInEditTable('usertwo')
self.assertUserInAdminsList('usertwo')
self.assertIn(self.testhelper.usertwo, basenode.admins.all())
self.assertEquals(basenode.admins.all().count(), 2)
def test_add_administrator_by_email(self):
self.browseToTestBasenode()
basenode = self.getBasenode()
self.assertEquals(basenode.admins.all().count(), 0)
self._open_edit_administrators_window()
self.testhelper.create_user('testuser1')
self.testhelper.create_user('testuser2')
user = self.testhelper.create_user('testuser3')
user.email = 'superman@example.com'
user.save()
self._add_user_via_ui('testuser3', query='man@exa')
self.assertUserInEditTable('testuser3')
self.assertUserInAdminsList('testuser3')
self.assertIn(self.testhelper.testuser3, basenode.admins.all())
def test_add_administrator_by_fullname(self):
self.browseToTestBasenode()
basenode = self.getBasenode()
self.assertEquals(basenode.admins.all().count(), 0)
self._open_edit_administrators_window()
self.testhelper.create_user('testuser1')
self.testhelper.create_user('testuser2')
user = self.testhelper.create_user('testuser3')
user.devilryuserprofile.full_name = 'Superman'
user.devilryuserprofile.save()
self._add_user_via_ui('testuser3', query='uperma')
self.assertUserInEditTable('testuser3')
self.assertUserInAdminsList('testuser3')
self.assertIn(self.testhelper.testuser3, basenode.admins.all())
def _get_remove_button(self):
return self.selenium.find_element_by_css_selector('.devilry_subjectadmin_manageadminspanel .removeButton button')
def _click_selectall_button(self):
self.selenium.find_element_by_css_selector('.devilry_subjectadmin_manageadminspanel .selectAllButton button').click()
def _get_gridcell_cssquery(self, username):
return '.devilry_subjectadmin_manageadminspanel .x-grid .prettyformattedusercell_{username}'.format(username=username)
def _get_gridcell(self, username):
cssquery = self._get_gridcell_cssquery(username)
self.waitForCssSelector(cssquery, timeout=5)
return self.selenium.find_element_by_css_selector(cssquery)
def _select_user(self, username):
gridcell = self._get_gridcell(username)
gridcell.click()
def _remove_using_ui(self):
self._get_remove_button().click()
self.waitForCssSelector('.x-message-box')
def click_yes_button():
for button in self.selenium.find_elements_by_css_selector('.x-message-box button'):
if button.text.strip() == 'Yes':
button.click()
return
self.fail('Could not find the "Yes" button')
click_yes_button()
def test_remove_administrator(self):
basenode = self.getBasenode()
basenode.admins.add(self.testhelper.create_user('userone'))
self.browseToTestBasenode()
self._open_edit_administrators_window()
self.assertIn(self.testhelper.userone, basenode.admins.all())
self.assertUserInEditTable('userone')
self.assertUserInAdminsList('userone')
self._select_user('userone')
self._remove_using_ui()
self.assertUserNotInEditTable('userone')
self.assertUserNotInAdminsList('userone')
self.assertNotIn(self.testhelper.userone, basenode.admins.all())
def test_remove_many_administrators(self):
basenode = self.getBasenode()
basenode.admins.add(self.testhelper.create_user('userone'))
basenode.admins.add(self.testhelper.create_user('usertwo'))
basenode.admins.add(self.testhelper.create_user('userthree'))
self.browseToTestBasenode()
self._open_edit_administrators_window()
self.assertEquals(basenode.admins.all().count(), 3)
self.assertUserInEditTable('userone')
self.assertUserInEditTable('usertwo')
self.assertUserInEditTable('userthree')
self._click_selectall_button()
self._remove_using_ui()
self.assertUserNotInEditTable('userone')
self.assertUserNotInEditTable('usertwo')
self.assertUserNotInEditTable('userthree')
self.assertEquals(basenode.admins.all().count(), 0)
def test_remove_disabled_enabled(self):
basenode = self.getBasenode()
basenode.admins.add(self.testhelper.create_user('userone'))
self.browseToTestBasenode()
self._open_edit_administrators_window()
self.assertFalse(self._get_remove_button().is_enabled())
self._select_user('userone')
self.assertTrue(self._get_remove_button().is_enabled())
def test_search(self):
basenode = self.getBasenode()
basenode.admins.add(self.testhelper.create_user('userone'))
basenode.admins.add(self.testhelper.create_user('usertwo'))
self.browseToTestBasenode()
self._open_edit_administrators_window()
searchfield = self.selenium.find_element_by_css_selector('.devilry_subjectadmin_manageadminspanel .searchfield input[type=text]')
self.assertEquals(len(self.selenium.find_elements_by_css_selector('.devilry_subjectadmin_manageadminspanel .x-grid .prettyformattedusercell')), 2)
searchfield.send_keys('one')
self.waitFor(self.selenium,
lambda s: len(self.selenium.find_elements_by_css_selector('.devilry_subjectadmin_manageadminspanel .x-grid .prettyformattedusercell')) == 1)
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Interpreter version: python 2.7
#
# Imports =====================================================================
from collections import namedtuple
# Requests ====================================================================
class LinkUpdateResponse(namedtuple("LinkUpdateResponse", ["uuid",
"doc_number",
"status"])):
pass
#3: Parameter ._session_id added to LinkUpdateResponse.
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Interpreter version: python 2.7
#
# Imports =====================================================================
from collections import namedtuple
# Requests ====================================================================
class LinkUpdateResponse(namedtuple("LinkUpdateResponse", ["uuid",
"doc_number",
"status",
"_session_id"])):
pass
|
"""
Fulara Abstract Domain
========================
Generic abstract domain to abstract scalar variables and dictionary contents.
Dictionaries are abstracted by a set of abstract segments.
:Authors: Lowis Engel
"""
from collections import defaultdict
from copy import deepcopy, copy
from enum import Enum
from typing import Tuple, Set, Type, Callable, Dict, Iterator, Optional, List
from lyra.abstract_domains.container.fulara.fulara_lattice import FularaLattice
from lyra.abstract_domains.container.fulara.key_wrapper import KeyWrapper
from lyra.abstract_domains.container.fulara.value_wrapper import ValueWrapper
from lyra.abstract_domains.lattice import Lattice, BottomMixin
from lyra.abstract_domains.state import EnvironmentMixin
from lyra.abstract_domains.state import State
from lyra.abstract_domains.store import Store
from lyra.core.expressions import VariableIdentifier, Expression, Subscription, DictDisplay, \
BinaryComparisonOperation, Keys, Items, Values, TupleDisplay, ExpressionVisitor, \
NegationFreeNormalExpression, Input
from lyra.core.types import DictLyraType, BooleanLyraType, IntegerLyraType, \
FloatLyraType, StringLyraType
from lyra.core.utils import copy_docstring
# special variable names:
k_name = "0v_k"
v_name = "0v_v"
scalar_types = {BooleanLyraType, IntegerLyraType, FloatLyraType, StringLyraType}
class Scope(Enum):
"""Scope type. Either ``Branch`` or ``Loop``."""
Branch = 0
Loop = 1
class InRelationState(State, BottomMixin):
"""'In' lattice element
i.e. a set of 3-tuples, covering the in-relationship between variables
and the corresponding dictionary and between the variables introduced by loop or if conditions
The tuples consist of a dictionary variable, a key variable and a value variable,
where either the key or value variable can possibly be None.
(dict, key, value), (dict, key, None), (dict, None, value)
The default element is the empty set (top)
.. document private methods
.. automethod:: InRelationState._less_equal
.. automethod:: InRelationState._meet
.. automethod:: InRelationState._join
.. automethod:: InRelationState._widening
"""
def __init__(self,
tuple_set: Set[Tuple[VariableIdentifier, Optional[VariableIdentifier],
Optional[VariableIdentifier]]] = None,
scopes: List[Scope] = None):
super().__init__()
self._tuple_set = tuple_set or set()
self._scopes = scopes or list()
@property
def tuple_set(self):
"""Current tuple set."""
if self.is_bottom():
return set()
return self._tuple_set
@property
def scopes(self):
"""Current stack of scope types."""
return self._scopes
@property
def scope(self):
"""Current scope type."""
return self._scopes[-1]
def __repr__(self):
if self.is_bottom():
return "⊥"
# output tuples sorted by their variable names
str_tuples = map(lambda t: f"({t[0]}, {t[1]}, {t[2]})", self.tuple_set)
str_tuples = sorted(str_tuples)
result = "{" + ", ".join(str_tuples) + "}"
return result
@copy_docstring(Lattice.top)
def top(self):
"""The top lattice element is ``{}``."""
self._replace(InRelationState(scopes=self.scopes))
return self
@copy_docstring(Lattice.is_top)
def is_top(self) -> bool:
return self.tuple_set == set()
@copy_docstring(Lattice._less_equal)
def _less_equal(self, other: 'InRelationState') -> bool:
"""An element is less_equal another,
if its tuple set is a superset of the tuple set of the other"""
return self.tuple_set.issuperset(other.tuple_set)
@copy_docstring(Lattice._join)
def _join(self, other: 'InRelationState') -> 'InRelationState':
"""Intersection of the tuple sets"""
new_set = self.tuple_set.intersection(other.tuple_set)
return self._replace(InRelationState(new_set, scopes=self.scopes))
@copy_docstring(Lattice._meet)
def _meet(self, other: 'InRelationState') -> 'InRelationState':
"""Union of the tuple sets"""
new_set = self.tuple_set.union(other.tuple_set)
return self._replace(InRelationState(new_set, scopes=self.scopes))
@copy_docstring(Lattice._widening)
def _widening(self, other: 'InRelationState') -> 'InRelationState':
# only finitely many variable combinations -> widening not needed?
return self._join(other)
# helpers
def find_key(self, k: VariableIdentifier) \
-> Iterator[Tuple[VariableIdentifier, VariableIdentifier,
Optional[VariableIdentifier]]]:
"""Returns the tuples from the set that have k at the key position"""
if self.is_bottom():
return iter(()) # empty iterator
return filter(lambda t: (t[1] and t[1] == k), self.tuple_set)
def find_value(self, v: VariableIdentifier) \
-> Iterator[Tuple[VariableIdentifier, Optional[VariableIdentifier],
VariableIdentifier]]:
"""Returns the tuples from the set that have v at the value position"""
if self.is_bottom():
return iter(()) # empty iterator
return filter(lambda t: (t[2] and t[2] == v), self.tuple_set)
def find_var(self, v: VariableIdentifier) \
-> Iterator[Tuple[VariableIdentifier, Optional[VariableIdentifier],
Optional[VariableIdentifier]]]:
"""Returns the tuples from the set that have v at the dict OR key OR value position"""
if self.is_bottom():
return iter(()) # empty iterator
return filter(lambda t: (t[0] == v) or (t[1] and t[1] == v) or (t[2] and t[2] == v),
self.tuple_set)
def k_v_tuples(self) \
-> Iterator[Tuple[VariableIdentifier, VariableIdentifier, VariableIdentifier]]:
"""Returns all tuples without a None (i.e. with a key & a value variable)"""
if self.is_bottom():
return iter(()) # empty iterator
return filter(lambda t: (t[1] is not None) and (t[2] is not None), self.tuple_set)
def forget_variable(self, v: VariableIdentifier):
"""Removes variable from its tuple(s)"""
if self.is_bottom():
return
for v_tuple in self.find_var(v):
self.tuple_set.remove(v_tuple)
if (v != v_tuple[0]) and (v_tuple[1] is not None) and (v_tuple[2] is not None):
# must keep relationship with other variable
if v_tuple[1] == v:
new_tuple = (v_tuple[0], None, v_tuple[2])
else: # left_tuple[2] == v
new_tuple = (v_tuple[0], v_tuple[1], None)
self.tuple_set.add(new_tuple)
@copy_docstring(State._assign)
def _assign(self, left: Expression, right: Expression):
if self.is_bottom():
return self
if isinstance(left, VariableIdentifier):
if not isinstance(right, VariableIdentifier):
# invalidate left, since overwritten
self.forget_variable(left)
else: # TODO: are there other relevant cases?
if left != right:
# invalidate left, since overwritten
self.forget_variable(left)
# copy tuples of 'right'
new_tuples = set()
for right_tuple in self.find_var(right):
if right_tuple[0] == right:
new_tuples.add((left, right_tuple[1], right_tuple[2]))
elif right_tuple[1] and right_tuple[1] == right:
new_tuples.add((right_tuple[0], left, right_tuple[2]))
else: # right_tuple[2] == right
new_tuples.add((right_tuple[0], right_tuple[1], left))
self.tuple_set.update(new_tuples)
return self
@copy_docstring(State._assume)
def _assume(self, condition: Expression) -> 'InRelationState':
if self.is_bottom():
return self
if isinstance(condition, BinaryComparisonOperation): # TODO: boolean conjunctions of them?
if condition.operator == BinaryComparisonOperation.Operator.In:
if isinstance(condition.left, VariableIdentifier):
if self.scope == Scope.Loop: # variable gets overwritten
self.forget_variable(condition.left)
if isinstance(condition.right, Keys):
new_tuple = (condition.right.target_dict, condition.left, None)
self.tuple_set.add(new_tuple)
elif isinstance(condition.right, Values):
new_tuple = (condition.right.target_dict, None, condition.left)
self.tuple_set.add(new_tuple)
elif isinstance(condition.left, TupleDisplay) \
and isinstance(condition.right, Items):
left_items = condition.left.items
if self.scope == Scope.Loop: # variables get overwritten
self.forget_variable(left_items[0])
self.forget_variable(left_items[1])
new_tuple = (condition.right.target_dict, left_items[0], left_items[1])
self.tuple_set.add(new_tuple)
elif condition.operator == BinaryComparisonOperation.Operator.NotIn:
if isinstance(condition.left, VariableIdentifier):
if isinstance(condition.right, Keys):
# forget the affected relation
del_tuple = (condition.right.target_dict, condition.left, None)
self.tuple_set.discard(del_tuple)
elif isinstance(condition.right, Values):
# forget the affected relation
del_tuple = (condition.right.target_dict, None, condition.left)
self.tuple_set.discard(del_tuple)
elif isinstance(condition.left, TupleDisplay) \
and isinstance(condition.right, Items):
# forget the affected relations
d = condition.right.target_dict
k = condition.left.items[0]
v = condition.left.items[1]
remove_set = set()
for t in self.tuple_set:
if t[0] == d:
if ((t[1] is not None) and (t[1] == k)) or t[1] is None:
if ((t[2] is not None) and t[2] == v) or t[2] is None:
remove_set.add(t)
self.tuple_set.difference_update(remove_set)
return self
@copy_docstring(State.enter_if)
def enter_if(self) -> 'InRelationState':
self.scopes.append(Scope.Branch)
return self
@copy_docstring(State.exit_if)
def exit_if(self) -> 'InRelationState':
self.scopes.pop()
return self # nothing to be done
@copy_docstring(State.enter_loop)
def enter_loop(self) -> 'InRelationState':
self.scopes.append(Scope.Loop)
return self
@copy_docstring(State.exit_loop)
def exit_loop(self) -> 'InRelationState':
self.scopes.pop()
return self # nothing to be done
@copy_docstring(State._output)
def _output(self, output: Expression) -> 'InRelationState':
return self # nothing to be done
@copy_docstring(State._substitute)
def _substitute(self, left: Expression, right: Expression) -> 'InRelationState':
raise RuntimeError("Unexpected substitute in a forward analysis!")
class BoolLattice(Lattice):
"""Boolean lattice element (True/False),
where True overapproximates concrete True values
(so True -> may be True in the concrete)
(Maybe) True
|
False
The default element is True (top)
.. document private methods
.. automethod:: BoolLattice._less_equal
.. automethod:: BoolLattice._meet
.. automethod:: BoolLattice._join
.. automethod:: BoolLattice._widening
"""
def __init__(self, value: bool = True):
super().__init__()
self._value = value
@property
def value(self):
"""Current boolean value."""
return self._value
def __repr__(self):
return repr(self.value)
@copy_docstring(Lattice.bottom)
def bottom(self):
"""The bottom lattice element is ``False``."""
self._replace(BoolLattice(False))
return self
@copy_docstring(Lattice.top)
def top(self):
"""The top lattice element is ``True``."""
self._replace(BoolLattice())
return self
@copy_docstring(Lattice.is_bottom)
def is_bottom(self) -> bool:
return not self._value
@copy_docstring(Lattice.is_top)
def is_top(self) -> bool:
return self._value
@copy_docstring(Lattice._less_equal)
def _less_equal(self, other: 'BoolLattice') -> bool:
pass # already handled by less_equal
@copy_docstring(Lattice._join)
def _join(self, other: 'BoolLattice') -> 'BoolLattice':
pass # already handled by join
@copy_docstring(Lattice._meet)
def _meet(self, other: 'BoolLattice') -> 'BoolLattice':
pass # already handled by meet
@copy_docstring(Lattice._widening)
def _widening(self, other: 'BoolLattice') -> 'BoolLattice':
pass # already handled by widening
def forget_variable(self, variable: VariableIdentifier):
pass # no variables stored
class FularaState(State):
"""Dictionary content analysis state.
An element of the dictionary content abstract domain.
It consists of the following 4 elements:
- Abstract state from a given domain A over all scalar variables,
abstracting their values
- Map from each dictionary variables to a FularaLattice-element with a given key domain K
and value domain V, abstracting the contents of the dictionaries
- Map from each dictionary variables to a FularaLattice-element with a given key domain K
and the BoolLattice as value domain,
abstracting the initialization info of the dictionary elements
(True = may be uninitialized, False/Not present = definitely initialized)
- Relational InRelationState to cover relations between variables and dictionaries
introduced by 'in' conditions
Everything is Top by default
.. document private methods
.. automethod:: FularaState._assign
.. automethod:: FularaState._assume
.. automethod:: FularaState._output
.. automethod:: FularaState._substitute
.. automethod:: FularaState._temp_cleanup
.. automethod:: FularaState._update_dict_from_refined_scalar
"""
# here the Union type means a logical AND: Domains should inherit from both Wrapper and State
def __init__(self, scalar_domain: Type[EnvironmentMixin],
key_domain: Type[KeyWrapper],
value_domain: Type[ValueWrapper],
update_key_from_scalar: Callable[[KeyWrapper, EnvironmentMixin], KeyWrapper],
update_val_from_scalar: Callable[[ValueWrapper, EnvironmentMixin], ValueWrapper],
scalar_vars: Set[VariableIdentifier] = None,
dict_vars: Set[VariableIdentifier] = None,
scalar_k_conv: Callable[[EnvironmentMixin], KeyWrapper]
= lambda x: x,
k_scalar_conv: Callable[[KeyWrapper], EnvironmentMixin]
= lambda x: x,
scalar_v_conv: Callable[[EnvironmentMixin], ValueWrapper]
= lambda x: x,
v_scalar_conv: Callable[[ValueWrapper], EnvironmentMixin]
= lambda x: x):
"""
:param scalar_domain: domain for abstraction of scalar variable values,
ranges over the scalar variables (should accept a set of variables in in __init__)
(may have different abstract domains for different types)
:param key_domain: domain for abstraction of dictionary keys,
ranges over the scalar variables and the special key variable v_k
and should therefore have a 'scalar_variables' and a 'k_var' argument in __init__
:param value_domain: domain for abstraction of dictionary values,
ranges over the scalar variables and the special value variable v_v
and should therefore have a 'scalar_variables' and a 'v_var' argument in __init__
:param update_key_from_scalar: Function to update the scalar part of a given key_domain
element to the scalar_domain element, given as second argument
:param update_val_from_scalar: Function to update the scalar part of a given value_domain
element to the scalar_domain element, given as second argument
:param scalar_vars: list of scalar variables, whose values should be abstracted
:param dict_vars: list of dictionary variables, whose values should be abstracted
:param scalar_k_conv: conversion function to convert from scalar domain elements
to key domain elements
:param k_scalar_conv: conversion function to convert from key domain elements
to scalar domain elements
:param scalar_v_conv: conversion function to convert from scalar domain elements
to value domain elements
:param v_scalar_conv: conversion function to convert from value domain elements
to scalar domain elements
"""
super().__init__()
self._s_vars = scalar_vars or set()
self._d_vars = dict_vars or set()
self._k_domain = key_domain
self._v_domain = value_domain
self._s_domain = scalar_domain
self._scalar_state = scalar_domain(scalar_vars) # require as input?
arguments = {}
for dv in dict_vars:
typ = dv.typ
if isinstance(typ, DictLyraType): # should be true
if typ not in arguments:
k_var = VariableIdentifier(typ.key_typ, k_name)
v_var = VariableIdentifier(typ.val_typ, v_name)
arguments[typ] = {'key_domain': key_domain, 'value_domain': value_domain,
'key_d_args': {'scalar_variables': scalar_vars,
'k_var': k_var},
'value_d_args': {'scalar_variables': scalar_vars,
'v_var': v_var}}
else:
raise TypeError("Dictionary variables should be of DictLyraType")
lattices = defaultdict(lambda: FularaLattice)
self._dict_store = Store(dict_vars, lattices, arguments)
for k in arguments.keys():
arguments[k]['value_domain'] = BoolLattice
del arguments[k]['value_d_args']
self._init_store = Store(dict_vars, lattices, arguments)
self._update_k_from_s = update_key_from_scalar
self._update_v_from_s = update_val_from_scalar
self._s_k_conv = scalar_k_conv
self._k_s_conv = k_scalar_conv
self._s_v_conv = scalar_v_conv
self._v_s_conv = v_scalar_conv
self._in_relations = InRelationState()
self._scopes = list() # stack of scope types
@property
def scalar_state(self) -> EnvironmentMixin:
"""Abstract state of scalar variable values."""
return self._scalar_state
@property
def dict_store(self) -> Store:
"""Abstract store of dictionary variable contents."""
return self._dict_store
@property
def init_store(self) -> Store:
"""Abstract store of dictionary variable initialization info."""
return self._init_store
@property
def in_relations(self) -> InRelationState:
"""Relational state storing relationships introduced by 'in'-conditions."""
return self._in_relations
@property
def update_k_from_s(self):
"""Function to update the scalar part of a given key abstraction
to the scalar abstraction, given as second argument"""
return self._update_k_from_s
@property
def update_v_from_s(self):
"""Function to update the scalar part of a given value abstraction
to the scalar abstraction, given as second argument"""
return self._update_v_from_s
@property
def v_domain(self) -> Type[ValueWrapper]:
"""Domain for dictionary values"""
return self._v_domain
@property
def k_domain(self) -> Type[KeyWrapper]:
"""Domain for dictionary keys"""
return self._k_domain
@property
def s_k_conv(self):
"""Function to convert from scalar domain elements to key domain elements"""
return self._s_k_conv
@property
def k_s_conv(self):
"""Function to convert from key domain elements to scalar domain elements"""
return self._k_s_conv
@property
def s_v_conv(self):
"""Function to convert from scalar domain elements to value domain elements"""
return self._s_v_conv
@property
def v_s_conv(self):
"""Function to convert from value domain elements to scalar domain elements"""
return self._v_s_conv
@property
def scopes(self):
"""Current stack of scope types."""
return self._scopes
@property
def scope(self):
"""Current scope type."""
return self._scopes[-1]
def __repr__(self):
return f"{self.scalar_state}, {self.dict_store}, {self.init_store}, {self.in_relations}"
@copy_docstring(Lattice.bottom)
def bottom(self) -> 'FularaState':
"""The bottom lattice element is defined point-wise."""
self.scalar_state.bottom()
self.dict_store.bottom()
self.init_store.bottom()
self.in_relations.bottom()
return self
@copy_docstring(Lattice.top)
def top(self) -> 'FularaState':
"""The top lattice element is defined point-wise."""
self.scalar_state.top()
self.dict_store.top()
self.init_store.top()
self.in_relations.top()
return self
@copy_docstring(Lattice.is_bottom)
def is_bottom(self) -> bool:
"""The current state is bottom if `any` of its four elements is bottom"""
scalar_b = self.scalar_state.is_bottom()
dict_b = self.dict_store.is_bottom()
init_b = self.init_store.is_bottom()
in_b = self.in_relations.is_bottom()
return scalar_b or dict_b or init_b or in_b
@copy_docstring(Lattice.is_top)
def is_top(self) -> bool:
"""The current state is bottom if `all` of its four elements are top"""
scalar_t = self.scalar_state.is_top()
dict_t = self.dict_store.is_top()
init_t = self.init_store.is_top()
in_t = self.in_relations.is_top()
return scalar_t and dict_t and init_t and in_t
@copy_docstring(Lattice._less_equal)
def _less_equal(self, other: 'FularaState') -> bool:
"""Defined point-wise"""
scalar_le = self.scalar_state.less_equal(other.scalar_state)
dict_le = self.dict_store.less_equal(other.dict_store)
init_le = self.init_store.less_equal(other.init_store)
in_le = self.in_relations.less_equal(other.in_relations)
return scalar_le and dict_le and init_le and in_le
def join_with_scalar(self, self_store: Store, other_store: Store, value_with_scalar: bool):
"""
Joins the two dictionary stores, setting the scalar information
for non-overlapping segments to the current scalar state.
The result is directly written to self_store.
(adapted from Store.join and fulara_lattice._join/dnorm,
adding update of scalar information for non-overlapping segments)
:param self_store: Store of FularaLattices to be overwritten by join
:param other_store: Store of FulararLattices to be joined with self_store
:param value_with_scalar: Indicates, if the value abstract domain
contains scalar information and so if it should be updated
"""
if other_store.is_bottom() or self_store.is_top():
pass
elif self_store.is_bottom() or other_store.is_top():
self_store._replace(other_store)
else:
for var in self_store.store:
self_lattice: FularaLattice = self_store.store[var]
other_lattice: FularaLattice = other_store.store[var]
# states for var can't be bottom, because then the whole store would be bottom
if self_lattice.is_top():
pass
elif other_lattice.is_top():
self_lattice._replace(other_lattice)
else:
if len(self_lattice.segments) > len(other_lattice.segments):
segment_set = other_lattice.segments
result_set = copy(self_lattice.segments)
else:
segment_set = self_lattice.segments
result_set = copy(other_lattice.segments)
unjoined_result = copy(result_set)
for s in segment_set:
remove_set = set()
s_joined = False
for r in result_set:
s_meet_r = deepcopy(s[0]).meet(r[0])
if not s_meet_r.key_is_bottom(): # not disjoint -> join segments
s = (deepcopy(s[0]).join(deepcopy(r[0])),
deepcopy(s[1]).join(deepcopy(r[1])))
unjoined_result.discard(r)
s_joined = True
remove_set.add(r)
result_set.difference_update(remove_set)
if s_joined:
result_set.add(s)
else:
new_k = self.update_k_from_s(s[0], self.scalar_state)
if value_with_scalar:
new_v = self.update_v_from_s(s[1], self.scalar_state)
else:
new_v = deepcopy(s[1])
result_set.add((new_k, new_v))
result_set.difference_update(unjoined_result)
for r in unjoined_result:
new_k = self.update_k_from_s(r[0], self.scalar_state)
if value_with_scalar:
new_v = self.update_v_from_s(r[1], self.scalar_state)
else:
new_v = deepcopy(r[1])
result_set.add((new_k, new_v))
self_lattice.segments.clear()
self_lattice.segments.update(result_set)
@copy_docstring(Lattice._join)
def _join(self, other: 'FularaState') -> 'FularaState':
"""Defined point-wise"""
self.scalar_state.join(other.scalar_state)
# self.dict_store.join(other.dict_store)
self.join_with_scalar(self.dict_store, other.dict_store, True)
# self.init_store.join(other.init_store)
self.join_with_scalar(self.init_store, other.init_store, False)
self.in_relations.join(other.in_relations)
return self
@copy_docstring(Lattice._meet)
def _meet(self, other: 'FularaState'):
"""Defined point-wise"""
self.scalar_state.meet(other.scalar_state)
self.dict_store.meet(other.dict_store)
self.init_store.meet(other.init_store)
self.in_relations.meet(other.in_relations)
return self
@copy_docstring(Lattice._widening)
def _widening(self, other: 'FularaState'):
"""To avoid imprecise widening of FularaLattice, first widens the scalar state"""
old_scalar = deepcopy(self.scalar_state)
self.scalar_state.widening(other.scalar_state)
if old_scalar != self.scalar_state:
self.dict_store.join(other.dict_store)
self.init_store.join(other.init_store)
self.in_relations.join(other.in_relations)
else:
self.dict_store.widening(other.dict_store)
self.init_store.widening(other.init_store)
self.in_relations.widening(other.in_relations)
return self
# helper
def eval_key(self, key_expr: Expression) -> KeyWrapper:
"""evaluates key_expr in the scalar_state and assigns it to v_k in a key state"""
scalar_copy = deepcopy(self.scalar_state)
v_k = VariableIdentifier(key_expr.typ, k_name) # TODO: type?
scalar_copy.add_variable(v_k)
scalar_copy.assign({v_k}, {key_expr})
return self._s_k_conv(scalar_copy)
# helper
def eval_value(self, value_expr: Expression) -> ValueWrapper:
"""evaluates value_expr in the scalar_state and assigns it to v_v in a value state"""
scalar_copy = deepcopy(self.scalar_state)
v_v = VariableIdentifier(value_expr.typ, v_name) # TODO: type?
scalar_copy.add_variable(v_v)
scalar_copy.assign({v_v}, {value_expr})
return self._s_v_conv(scalar_copy)
# helper
def _temp_cleanup(self, evaluation: Dict[Subscription, VariableIdentifier]):
"""Deletes all temporary variables of evaluation
and assigns them back to the dictionary subscription before that"""
current_temps = set(evaluation.values())
for expr, var in evaluation.items():
# 'assign expr = var' to update relationships
d = expr.target
k_abs = self.eval_key(expr.key)
v_abs = self.eval_value(var)
# temporary variables not needed in dict abstractions
for temp in current_temps: # TODO: better way?
k_abs.remove_variable(temp)
v_abs.remove_variable(temp)
d_lattice: 'FularaLattice' = self.dict_store.store[d]
if k_abs.is_singleton():
# STRONG UPDATE
d_lattice.partition_add(k_abs, v_abs)
i_lattice: 'FularaLattice' = self.init_store.store[d]
i_lattice.partition_add(k_abs, BoolLattice(False))
else:
# WEAK UPDATE
# -> meet
assign_lattice = deepcopy(d_lattice)
assign_lattice.partition_update({(k_abs, v_abs)})
d_lattice.meet(assign_lattice)
# remove temporary var
self.scalar_state.remove_variable(var)
current_temps.remove(var)
def update_dict_from_scalar(self, store: Store, value_with_scalar: bool):
"""Updates the scalar information of the given dictionary store to the current scalar state
:param store: Store of FularaLattices to be updated
:param value_with_scalar: Indicates, if the value abstract domain
contains scalar information and so if it should be updated
"""
d_lattice: FularaLattice
for d_lattice in store.store.values():
updated_segments = set()
for (k, v) in d_lattice.segments:
new_k = self.update_k_from_s(k, self.scalar_state)
if value_with_scalar:
new_v = self.update_v_from_s(v, self.scalar_state)
else:
new_v = deepcopy(v)
updated_segments.add((new_k, new_v))
d_lattice.segments.clear()
d_lattice.segments.update(updated_segments)
@copy_docstring(State._assign)
def _assign(self, left: Expression, right: Expression):
if self.is_bottom(): # unreachable
return self
all_ids = left.ids().union(right.ids())
if all(type(ident.typ) in scalar_types for ident in all_ids): # TODO: use not any?
# completely SCALAR STMT
# update scalar part
self.scalar_state.assign({left}, {right})
# update relations with scalar variables in dict stores
for d_lattice in self.dict_store.store.values():
for (k1, v) in d_lattice.segments:
d_lattice.segments.remove((k1, v)) # needed, because tuple is immutable?
k1.assign({left}, {right})
v.assign({left}, {right})
d_lattice.segments.add((k1, v))
d_lattice.d_norm_own()
for i_lattice in self.init_store.store.values():
for (k2, b) in i_lattice.segments: # b must be True
i_lattice.segments.remove((k2, b)) # needed, because tuple is immutable?
k2.assign({left}, {right})
i_lattice.segments.add((k2, b))
i_lattice.d_norm_own()
elif isinstance(left, VariableIdentifier):
if type(left.typ) in scalar_types: # assignment to scalar variable
evaluation = dict()
scalar_right = self.read_eval.visit(right, self, evaluation)
self.scalar_state.assign({left}, {scalar_right})
self._temp_cleanup(evaluation)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
elif isinstance(left.typ, DictLyraType): # overwrite dictionary
if isinstance(right, VariableIdentifier):
self.dict_store.store[left] = deepcopy(self.dict_store.store[right])
self.init_store.store[left] = deepcopy(self.init_store.store[right])
elif isinstance(right, DictDisplay):
# "NEW DICT"
left_lattice: FularaLattice = self.dict_store.store[left]
left_i_lattice: FularaLattice = self.init_store.store[left]
# erase all dict contents before:
left_lattice.empty()
# everything uninitialized,
# but scalars should conform with scalar state -> copy from scalar state:
v_k = VariableIdentifier(left.typ.key_typ, k_name)
s_state = deepcopy(self.scalar_state)
s_state.add_variable(v_k)
top_state = self.s_k_conv(s_state)
top_segment = (top_state, BoolLattice(True))
left_i_lattice.segments.clear()
left_i_lattice.segments.add(top_segment)
for i in range(len(right.keys)): # similar to write
k_abs = self.eval_key(right.keys[i])
v_abs = self.eval_value(right.values[i])
# k_abs must be a singleton -> 'strong update'
left_lattice.partition_add(k_abs, v_abs)
left_i_lattice.partition_add(k_abs, BoolLattice(False))
elif isinstance(right, Input): # TODO: add special dictinput() function?
# everything set to top,
# but copy from scalar state to have a more precise abstraction of it
left_lattice: FularaLattice = self.dict_store.store[left]
left_i_lattice: FularaLattice = self.init_store.store[left]
v_k = VariableIdentifier(left.typ.key_typ, k_name)
s_state = deepcopy(self.scalar_state)
s_state.add_variable(v_k)
top_k_state = self.s_k_conv(s_state)
v_v = VariableIdentifier(left.typ.val_typ, v_name)
s_state = deepcopy(self.scalar_state)
s_state.add_variable(v_v)
top_v_state = self.s_v_conv(s_state)
left_lattice.segments.clear()
top_segment = (top_k_state, top_v_state)
left_lattice.segments.add(top_segment)
left_i_lattice.segments.clear()
top_bool_segment = (deepcopy(top_k_state), BoolLattice(True))
left_i_lattice.segments.add(top_bool_segment)
else:
raise NotImplementedError(
f"Assignment '{left} = {right}' is not yet supported")
else:
raise NotImplementedError(
f"Assignment '{left} = {right}' is not yet supported")
elif isinstance(left, Subscription) and isinstance(left.target.typ, DictLyraType):
# DICT WRITE
d = left.target
k_abs = self.eval_key(left.key) # TODO: nested subscripts -> read_eval
evaluation = dict()
scalar_right = self.read_eval.visit(right, self, evaluation)
v_abs = self.eval_value(scalar_right)
for temp in evaluation.values():
v_abs.remove_variable(temp)
self._temp_cleanup(evaluation) # TODO: no assign needed?
d_lattice: 'FularaLattice' = self.dict_store.store[d]
if k_abs.is_singleton():
# STRONG UPDATE
d_lattice.partition_add(k_abs, v_abs)
i_lattice: 'FularaLattice' = self.init_store.store[d]
i_lattice.partition_add(k_abs, BoolLattice(False))
else:
# WEAK UPDATE (with partitioning)
d_lattice.partition_update({(k_abs, v_abs)})
else:
raise NotImplementedError(f"Assignment '{left} = {right}' is not yet supported")
# TODO: other stmts
# update relations
self.in_relations.assign({left}, {right})
return self
@copy_docstring(State._assume)
def _assume(self, condition: Expression) -> 'FularaState':
if self.is_bottom(): # unreachable
return self
condition = NegationFreeNormalExpression().visit(condition) # eliminate negations
if isinstance(condition, BinaryComparisonOperation):
if condition.operator == BinaryComparisonOperation.Operator.In:
# refine in_relations
self.in_relations.assume({condition})
# refine variable(s):
if isinstance(condition.right, Keys) \
and isinstance(condition.left, VariableIdentifier):
d = condition.right.target_dict
d_lattice: FularaLattice = self.dict_store.store[d]
k_abs: KeyWrapper = d_lattice.get_keys_joined() # TODO: check if bottom?
v_k = k_abs.k_var
if self.scope == Scope.Loop: # -> overwrite old value
self.scalar_state.add_variable(v_k)
self.scalar_state.meet(self._k_s_conv(k_abs))
self.scalar_state.assign({condition.left}, {v_k})
self.scalar_state.remove_variable(v_k)
else: # meet after assignment -> only refine old value
assign_state = self._k_s_conv(k_abs)
assign_state.assign({condition.left}, {v_k})
assign_state.remove_variable(v_k)
self.scalar_state.meet(assign_state)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
return self
elif isinstance(condition.right, Values) \
and isinstance(condition.left, VariableIdentifier):
d = condition.right.target_dict
d_lattice: FularaLattice = self.dict_store.store[d]
v_abs: ValueWrapper = d_lattice.get_values_joined()
v_v = v_abs.v_var
if self.scope == Scope.Loop: # -> overwrite old value
self.scalar_state.add_variable(v_v)
self.scalar_state.meet(self._v_s_conv(v_abs))
self.scalar_state.assign({condition.left}, {v_v})
self.scalar_state.remove_variable(v_v)
else: # meet after assignment -> only refine old value
assign_state = self._v_s_conv(v_abs)
assign_state.assign({condition.left}, {v_v})
assign_state.remove_variable(v_v)
self.scalar_state.meet(assign_state)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
return self
elif isinstance(condition.right, Items) \
and isinstance(condition.left, TupleDisplay):
d = condition.right.target_dict
d_lattice: FularaLattice = self.dict_store.store[d]
k_abs: KeyWrapper = d_lattice.get_keys_joined()
v_k = k_abs.k_var
v_abs = d_lattice.get_values_joined()
v_v = v_abs.v_var
if self.scope == Scope.Loop: # -> overwrite old value
self.scalar_state.add_variable(v_k)
self.scalar_state.meet(self._k_s_conv(k_abs))
self.scalar_state.assign({condition.left.items[0]}, {v_k})
self.scalar_state.remove_variable(v_k)
self.scalar_state.add_variable(v_v)
self.scalar_state.meet(self._v_s_conv(v_abs))
self.scalar_state.assign({condition.left.items[1]}, {v_v})
self.scalar_state.remove_variable(v_v)
else:
k_s_state = self.k_s_conv(k_abs)
k_s_state.assign({condition.left.items[0]}, {v_k})
k_s_state.remove_variable(v_k)
v_s_state = self.v_s_conv(v_abs)
v_s_state.assign({condition.left.items[1]}, {v_v})
v_s_state.remove_variable(v_v)
assign_state = k_s_state
assign_state.meet(v_s_state)
self.scalar_state.meet(assign_state)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
return self
elif condition.operator == BinaryComparisonOperation.Operator.NotIn:
# refine in_relations
self.in_relations.assume({condition})
if isinstance(condition.right, Keys):
d = condition.right.target_dict
i_lattice: FularaLattice = self.init_store.store[d]
if self.scope == Scope.Loop:
# check for definitely initialized elements:
if i_lattice.is_bottom() or len(i_lattice.segments) != 1:
# v_k not top (don't do partition_update on init_store elements
# -> cannot have multiple segments whose keys add up to top)
# -> loop is definitely executed at least once
# -> loop var can only have values from the dictionary == IN case
d_lattice: FularaLattice = self.dict_store.store[d]
k_abs: KeyWrapper = d_lattice.get_keys_joined()
v_k = k_abs.k_var
self.scalar_state.add_variable(v_k)
self.scalar_state.meet(self._k_s_conv(k_abs))
self.scalar_state.assign({condition.left}, {v_k})
self.scalar_state.remove_variable(v_k)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
# else: can have any value from before or inside the loop -> return self
else:
# get possibly uninitialized keys
k_abs: KeyWrapper = i_lattice.get_keys_joined()
v_k = k_abs.k_var
assign_state = self._k_s_conv(k_abs)
assign_state.assign({condition.left}, {v_k})
assign_state.remove_variable(v_k)
self.scalar_state.meet(assign_state)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
return self
elif isinstance(condition.right, Values):
if self.scope == Scope.Loop:
d = condition.right.target_dict
i_lattice: FularaLattice = self.init_store.store[d]
# check for definitely initialized elements:
if i_lattice.is_bottom() or len(i_lattice.segments) != 1:
# v_k not top (don't do partition_update on init_store elements
# -> cannot have multiple segments whose keys add up to top)
# -> loop is definitely executed at least once
# -> loop var can only have values from the dictionary == IN case
d_lattice: FularaLattice = self.dict_store.store[d]
v_abs = d_lattice.get_values_joined()
v_v = v_abs.v_var
self.scalar_state.add_variable(v_v)
self.scalar_state.meet(self._v_s_conv(v_abs))
self.scalar_state.assign({condition.left.items[1]}, {v_v})
self.scalar_state.remove_variable(v_v)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
# else: TODO: refine value variable abstraction
return self
elif isinstance(condition.right, Items):
d = condition.right.target_dict
i_lattice: FularaLattice = self.init_store.store[d]
if self.scope == Scope.Loop:
# check for definitely initialized elements:
if i_lattice.is_bottom() or len(i_lattice.segments) != 1:
# v_k not top (don't do partition_update on init_store elements
# -> cannot have multiple segments whose keys add up to top)
# -> loop is definitely executed at least once
# -> loop var can only have values from the dictionary == IN case
d_lattice: FularaLattice = self.dict_store.store[d]
k_abs: KeyWrapper = d_lattice.get_keys_joined()
v_k = k_abs.k_var
self.scalar_state.add_variable(v_k)
self.scalar_state.meet(self._k_s_conv(k_abs))
self.scalar_state.assign({condition.left.items[0]}, {v_k})
self.scalar_state.remove_variable(v_k)
v_abs = d_lattice.get_values_joined()
v_v = v_abs.v_var
self.scalar_state.add_variable(v_v)
self.scalar_state.meet(self._v_s_conv(v_abs))
self.scalar_state.assign({condition.left.items[1]}, {v_v})
self.scalar_state.remove_variable(v_v)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
# else: can have any value from before or inside the loop -> return self
else:
# TODO: refine value variable abstraction
# get possibly uninitialized keys
k_abs: KeyWrapper = i_lattice.get_keys_joined()
v_k = k_abs.k_var
assign_state = self._k_s_conv(k_abs)
assign_state.assign({condition.left.items[0]}, {v_k})
assign_state.remove_variable(v_k)
self.scalar_state.meet(assign_state)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
return self
# default: try in scalar domain
evaluation = dict()
scalar_condition = self.read_eval.visit(condition, self, evaluation)
self.scalar_state.assume({scalar_condition})
self._temp_cleanup(evaluation)
# check, if coupled loop variables (from items-loops) get refined
# -> refine their counterpart
cond_vars = condition.ids()
for (d_var, k_var, v_var) in self.in_relations.k_v_tuples():
if k_var in cond_vars:
if v_var in cond_vars:
raise NotImplementedError(
f"Conditions like {condition} containing both the key "
f"and value loop variable of a .items()-loop are not yet supported!")
else:
# refine v_var according to refined k_var
# -> re-evaluate: v_var meet d_var[k_var]
d_lattice: FularaLattice = self.dict_store.store[d_var]
k_abs = self.eval_key(k_var)
if not k_abs.is_top(): # TODO: check for less_equal old?
scalar_vars = self._s_vars.copy()
v_v = VariableIdentifier(d_var.typ.val_typ, v_name)
v_abs = d_lattice.v_domain(scalar_vars, v_v).bottom()
for (k, v) in d_lattice.segments:
key_meet_k = deepcopy(k_abs).meet(k)
if not key_meet_k.key_is_bottom(): # key may be contained in this segment
v_abs.join(deepcopy(v))
scalar_copy = deepcopy(self.scalar_state)
scalar_copy.add_variable(v_v)
scalar_copy.meet(self.v_s_conv(v_abs))
scalar_copy.assign({v_var}, {v_v})
scalar_copy.remove_variable(v_v)
self.scalar_state.meet(scalar_copy)
elif v_var in cond_vars:
# refine k_var according to refined v_bar -> k_var, s.t. d_var[k_var] = v_var
d_lattice: FularaLattice = self.dict_store.store[d_var]
v_abs = self.eval_value(v_var)
if not v_abs.is_top(): # TODO: check for less_equal old?
scalar_vars = self._s_vars.copy()
v_k = VariableIdentifier(d_var.typ.key_typ, k_name)
k_abs = d_lattice.k_domain(scalar_vars, v_k).bottom()
for (k, v) in d_lattice.segments:
value_meet_v = deepcopy(v_abs).meet(v)
if not value_meet_v.key_is_bottom(): # value may be contained in this segment
k_abs.join(deepcopy(k))
scalar_copy = deepcopy(self.scalar_state)
scalar_copy.add_variable(v_k)
scalar_copy.meet(self.k_s_conv(k_abs))
scalar_copy.assign({k_var}, {v_k})
scalar_copy.remove_variable(v_k)
self.scalar_state.meet(scalar_copy)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
# no 'in'-condition -> no need to update in_relations
return self
@copy_docstring(State.enter_if)
def enter_if(self) -> 'FularaState':
if not self.is_bottom(): # not yet analyzed/unreachable
self.scopes.append(Scope.Branch)
self.in_relations.enter_if()
return self # nothing else to be done
@copy_docstring(State.exit_if)
def exit_if(self) -> 'FularaState':
if not self.is_bottom(): # not yet analyzed/unreachable
assert self.scope == Scope.Branch
self.scopes.pop()
self.in_relations.exit_if()
return self # nothing to be done
@copy_docstring(State.enter_loop)
def enter_loop(self) -> 'FularaState':
if not self.is_bottom(): # not yet analyzed/unreachable
self.scopes.append(Scope.Loop)
self.in_relations.enter_loop()
return self # nothing else to be done
@copy_docstring(State.exit_loop)
def exit_loop(self) -> 'FularaState':
if not self.is_bottom(): # not yet analyzed/unreachable
assert self.scope == Scope.Loop
self.scopes.pop()
self.in_relations.exit_loop()
return self # nothing to be done
@copy_docstring(State._output)
def _output(self, output: Expression) -> 'FularaState':
return self # nothing to be done
@copy_docstring(State._substitute)
def _substitute(self, left: Expression, right: Expression) -> 'FularaState':
raise RuntimeError("Unexpected substitute in a forward analysis!")
# expression evaluation
class DictReadEvaluation:
"""Visitor that performs the evaluation of dictionary reads in the DictContentDomain
lattice by replacing them with a temporary (scalar) variable holding
the corresponding value in the scalar state.
Adapted from `ExpressionVisitor`.
"""
@copy_docstring(ExpressionVisitor.visit)
def visit(self, expr, *args, **kwargs):
"""
:param expr: current expression
:keyword arguments:
* *state* -- current FularaState
* *evaluation* -- dictionary mapping from dictionary read expressions,
that already got evaluated to temporary variables (VariableIdentifier)
:return expression with replaced dictionary reads
"""
method = 'visit_' + expr.__class__.__name__
return getattr(self, method, self.default_visit)(expr, *args, **kwargs)
@copy_docstring(ExpressionVisitor.visit_Subscription)
def visit_Subscription(self, expr: Subscription, state: 'FularaState' = None,
evaluation=None):
if isinstance(expr.target.typ, DictLyraType):
if expr in evaluation: # already evaluated
return evaluation[expr]
else:
d = expr.target
d_lattice: FularaLattice = state.dict_store.store[d]
k_abs = state.eval_key(expr.key)
for old_temp in evaluation.values(): # remove already added temp vars
k_abs.remove_variable(old_temp)
scalar_vars = state._s_vars.copy()
v_var = VariableIdentifier(d.typ.val_typ, v_name)
v_abs = d_lattice.v_domain(scalar_vars, v_var).bottom()
for (k, v) in d_lattice.segments:
key_meet_k = deepcopy(k_abs).meet(k)
if not key_meet_k.key_is_bottom(): # key may be contained in this segment
v_abs.join(deepcopy(v))
for old_temp in evaluation.values(): # add already added temp vars
v_abs.add_variable(old_temp)
state.scalar_state.add_variable(v_var)
state.scalar_state.meet(state.v_s_conv(v_abs))
# use increasing numbers for temp_var names
temp_var = VariableIdentifier(d.typ.val_typ, str(len(evaluation)) + "v")
state.scalar_state.add_variable(temp_var)
state.scalar_state.assign({temp_var}, {v_var})
state.scalar_state.remove_variable(v_var)
evaluation[expr] = temp_var
return temp_var
else:
return self.default_visit(expr, state, evaluation)
def default_visit(self, expr: Expression, state: 'FularaState' = None,
evaluation=None):
"""default: visit & replace children (adapted from expressions._iter_child_exprs)"""
new_expr = copy(expr)
for name, field in new_expr.__dict__.items():
if isinstance(field, Expression):
new_expr.__dict__[name] = self.visit(field, state, evaluation) # replace
elif isinstance(field, list):
for idx, item in enumerate(field):
if isinstance(item, Expression):
field[idx] = self.visit(item, state, evaluation)
return new_expr
read_eval = DictReadEvaluation() # static class member shared between all instances
fixed copy-paste bug
"""
Fulara Abstract Domain
========================
Generic abstract domain to abstract scalar variables and dictionary contents.
Dictionaries are abstracted by a set of abstract segments.
:Authors: Lowis Engel
"""
from collections import defaultdict
from copy import deepcopy, copy
from enum import Enum
from typing import Tuple, Set, Type, Callable, Dict, Iterator, Optional, List
from lyra.abstract_domains.container.fulara.fulara_lattice import FularaLattice
from lyra.abstract_domains.container.fulara.key_wrapper import KeyWrapper
from lyra.abstract_domains.container.fulara.value_wrapper import ValueWrapper
from lyra.abstract_domains.lattice import Lattice, BottomMixin
from lyra.abstract_domains.state import EnvironmentMixin
from lyra.abstract_domains.state import State
from lyra.abstract_domains.store import Store
from lyra.core.expressions import VariableIdentifier, Expression, Subscription, DictDisplay, \
BinaryComparisonOperation, Keys, Items, Values, TupleDisplay, ExpressionVisitor, \
NegationFreeNormalExpression, Input
from lyra.core.types import DictLyraType, BooleanLyraType, IntegerLyraType, \
FloatLyraType, StringLyraType
from lyra.core.utils import copy_docstring
# special variable names:
k_name = "0v_k"
v_name = "0v_v"
scalar_types = {BooleanLyraType, IntegerLyraType, FloatLyraType, StringLyraType}
class Scope(Enum):
"""Scope type. Either ``Branch`` or ``Loop``."""
Branch = 0
Loop = 1
class InRelationState(State, BottomMixin):
"""'In' lattice element
i.e. a set of 3-tuples, covering the in-relationship between variables
and the corresponding dictionary and between the variables introduced by loop or if conditions
The tuples consist of a dictionary variable, a key variable and a value variable,
where either the key or value variable can possibly be None.
(dict, key, value), (dict, key, None), (dict, None, value)
The default element is the empty set (top)
.. document private methods
.. automethod:: InRelationState._less_equal
.. automethod:: InRelationState._meet
.. automethod:: InRelationState._join
.. automethod:: InRelationState._widening
"""
def __init__(self,
tuple_set: Set[Tuple[VariableIdentifier, Optional[VariableIdentifier],
Optional[VariableIdentifier]]] = None,
scopes: List[Scope] = None):
super().__init__()
self._tuple_set = tuple_set or set()
self._scopes = scopes or list()
@property
def tuple_set(self):
"""Current tuple set."""
if self.is_bottom():
return set()
return self._tuple_set
@property
def scopes(self):
"""Current stack of scope types."""
return self._scopes
@property
def scope(self):
"""Current scope type."""
return self._scopes[-1]
def __repr__(self):
if self.is_bottom():
return "⊥"
# output tuples sorted by their variable names
str_tuples = map(lambda t: f"({t[0]}, {t[1]}, {t[2]})", self.tuple_set)
str_tuples = sorted(str_tuples)
result = "{" + ", ".join(str_tuples) + "}"
return result
@copy_docstring(Lattice.top)
def top(self):
"""The top lattice element is ``{}``."""
self._replace(InRelationState(scopes=self.scopes))
return self
@copy_docstring(Lattice.is_top)
def is_top(self) -> bool:
return self.tuple_set == set()
@copy_docstring(Lattice._less_equal)
def _less_equal(self, other: 'InRelationState') -> bool:
"""An element is less_equal another,
if its tuple set is a superset of the tuple set of the other"""
return self.tuple_set.issuperset(other.tuple_set)
@copy_docstring(Lattice._join)
def _join(self, other: 'InRelationState') -> 'InRelationState':
"""Intersection of the tuple sets"""
new_set = self.tuple_set.intersection(other.tuple_set)
return self._replace(InRelationState(new_set, scopes=self.scopes))
@copy_docstring(Lattice._meet)
def _meet(self, other: 'InRelationState') -> 'InRelationState':
"""Union of the tuple sets"""
new_set = self.tuple_set.union(other.tuple_set)
return self._replace(InRelationState(new_set, scopes=self.scopes))
@copy_docstring(Lattice._widening)
def _widening(self, other: 'InRelationState') -> 'InRelationState':
# only finitely many variable combinations -> widening not needed?
return self._join(other)
# helpers
def find_key(self, k: VariableIdentifier) \
-> Iterator[Tuple[VariableIdentifier, VariableIdentifier,
Optional[VariableIdentifier]]]:
"""Returns the tuples from the set that have k at the key position"""
if self.is_bottom():
return iter(()) # empty iterator
return filter(lambda t: (t[1] and t[1] == k), self.tuple_set)
def find_value(self, v: VariableIdentifier) \
-> Iterator[Tuple[VariableIdentifier, Optional[VariableIdentifier],
VariableIdentifier]]:
"""Returns the tuples from the set that have v at the value position"""
if self.is_bottom():
return iter(()) # empty iterator
return filter(lambda t: (t[2] and t[2] == v), self.tuple_set)
def find_var(self, v: VariableIdentifier) \
-> Iterator[Tuple[VariableIdentifier, Optional[VariableIdentifier],
Optional[VariableIdentifier]]]:
"""Returns the tuples from the set that have v at the dict OR key OR value position"""
if self.is_bottom():
return iter(()) # empty iterator
return filter(lambda t: (t[0] == v) or (t[1] and t[1] == v) or (t[2] and t[2] == v),
self.tuple_set)
def k_v_tuples(self) \
-> Iterator[Tuple[VariableIdentifier, VariableIdentifier, VariableIdentifier]]:
"""Returns all tuples without a None (i.e. with a key & a value variable)"""
if self.is_bottom():
return iter(()) # empty iterator
return filter(lambda t: (t[1] is not None) and (t[2] is not None), self.tuple_set)
def forget_variable(self, v: VariableIdentifier):
"""Removes variable from its tuple(s)"""
if self.is_bottom():
return
for v_tuple in self.find_var(v):
self.tuple_set.remove(v_tuple)
if (v != v_tuple[0]) and (v_tuple[1] is not None) and (v_tuple[2] is not None):
# must keep relationship with other variable
if v_tuple[1] == v:
new_tuple = (v_tuple[0], None, v_tuple[2])
else: # left_tuple[2] == v
new_tuple = (v_tuple[0], v_tuple[1], None)
self.tuple_set.add(new_tuple)
@copy_docstring(State._assign)
def _assign(self, left: Expression, right: Expression):
if self.is_bottom():
return self
if isinstance(left, VariableIdentifier):
if not isinstance(right, VariableIdentifier):
# invalidate left, since overwritten
self.forget_variable(left)
else: # TODO: are there other relevant cases?
if left != right:
# invalidate left, since overwritten
self.forget_variable(left)
# copy tuples of 'right'
new_tuples = set()
for right_tuple in self.find_var(right):
if right_tuple[0] == right:
new_tuples.add((left, right_tuple[1], right_tuple[2]))
elif right_tuple[1] and right_tuple[1] == right:
new_tuples.add((right_tuple[0], left, right_tuple[2]))
else: # right_tuple[2] == right
new_tuples.add((right_tuple[0], right_tuple[1], left))
self.tuple_set.update(new_tuples)
return self
@copy_docstring(State._assume)
def _assume(self, condition: Expression) -> 'InRelationState':
if self.is_bottom():
return self
if isinstance(condition, BinaryComparisonOperation): # TODO: boolean conjunctions of them?
if condition.operator == BinaryComparisonOperation.Operator.In:
if isinstance(condition.left, VariableIdentifier):
if self.scope == Scope.Loop: # variable gets overwritten
self.forget_variable(condition.left)
if isinstance(condition.right, Keys):
new_tuple = (condition.right.target_dict, condition.left, None)
self.tuple_set.add(new_tuple)
elif isinstance(condition.right, Values):
new_tuple = (condition.right.target_dict, None, condition.left)
self.tuple_set.add(new_tuple)
elif isinstance(condition.left, TupleDisplay) \
and isinstance(condition.right, Items):
left_items = condition.left.items
if self.scope == Scope.Loop: # variables get overwritten
self.forget_variable(left_items[0])
self.forget_variable(left_items[1])
new_tuple = (condition.right.target_dict, left_items[0], left_items[1])
self.tuple_set.add(new_tuple)
elif condition.operator == BinaryComparisonOperation.Operator.NotIn:
if isinstance(condition.left, VariableIdentifier):
if isinstance(condition.right, Keys):
# forget the affected relation
del_tuple = (condition.right.target_dict, condition.left, None)
self.tuple_set.discard(del_tuple)
elif isinstance(condition.right, Values):
# forget the affected relation
del_tuple = (condition.right.target_dict, None, condition.left)
self.tuple_set.discard(del_tuple)
elif isinstance(condition.left, TupleDisplay) \
and isinstance(condition.right, Items):
# forget the affected relations
d = condition.right.target_dict
k = condition.left.items[0]
v = condition.left.items[1]
remove_set = set()
for t in self.tuple_set:
if t[0] == d:
if ((t[1] is not None) and (t[1] == k)) or t[1] is None:
if ((t[2] is not None) and t[2] == v) or t[2] is None:
remove_set.add(t)
self.tuple_set.difference_update(remove_set)
return self
@copy_docstring(State.enter_if)
def enter_if(self) -> 'InRelationState':
self.scopes.append(Scope.Branch)
return self
@copy_docstring(State.exit_if)
def exit_if(self) -> 'InRelationState':
self.scopes.pop()
return self # nothing to be done
@copy_docstring(State.enter_loop)
def enter_loop(self) -> 'InRelationState':
self.scopes.append(Scope.Loop)
return self
@copy_docstring(State.exit_loop)
def exit_loop(self) -> 'InRelationState':
self.scopes.pop()
return self # nothing to be done
@copy_docstring(State._output)
def _output(self, output: Expression) -> 'InRelationState':
return self # nothing to be done
@copy_docstring(State._substitute)
def _substitute(self, left: Expression, right: Expression) -> 'InRelationState':
raise RuntimeError("Unexpected substitute in a forward analysis!")
class BoolLattice(Lattice):
"""Boolean lattice element (True/False),
where True overapproximates concrete True values
(so True -> may be True in the concrete)
(Maybe) True
|
False
The default element is True (top)
.. document private methods
.. automethod:: BoolLattice._less_equal
.. automethod:: BoolLattice._meet
.. automethod:: BoolLattice._join
.. automethod:: BoolLattice._widening
"""
def __init__(self, value: bool = True):
super().__init__()
self._value = value
@property
def value(self):
"""Current boolean value."""
return self._value
def __repr__(self):
return repr(self.value)
@copy_docstring(Lattice.bottom)
def bottom(self):
"""The bottom lattice element is ``False``."""
self._replace(BoolLattice(False))
return self
@copy_docstring(Lattice.top)
def top(self):
"""The top lattice element is ``True``."""
self._replace(BoolLattice())
return self
@copy_docstring(Lattice.is_bottom)
def is_bottom(self) -> bool:
return not self._value
@copy_docstring(Lattice.is_top)
def is_top(self) -> bool:
return self._value
@copy_docstring(Lattice._less_equal)
def _less_equal(self, other: 'BoolLattice') -> bool:
pass # already handled by less_equal
@copy_docstring(Lattice._join)
def _join(self, other: 'BoolLattice') -> 'BoolLattice':
pass # already handled by join
@copy_docstring(Lattice._meet)
def _meet(self, other: 'BoolLattice') -> 'BoolLattice':
pass # already handled by meet
@copy_docstring(Lattice._widening)
def _widening(self, other: 'BoolLattice') -> 'BoolLattice':
pass # already handled by widening
def forget_variable(self, variable: VariableIdentifier):
pass # no variables stored
class FularaState(State):
"""Dictionary content analysis state.
An element of the dictionary content abstract domain.
It consists of the following 4 elements:
- Abstract state from a given domain A over all scalar variables,
abstracting their values
- Map from each dictionary variables to a FularaLattice-element with a given key domain K
and value domain V, abstracting the contents of the dictionaries
- Map from each dictionary variables to a FularaLattice-element with a given key domain K
and the BoolLattice as value domain,
abstracting the initialization info of the dictionary elements
(True = may be uninitialized, False/Not present = definitely initialized)
- Relational InRelationState to cover relations between variables and dictionaries
introduced by 'in' conditions
Everything is Top by default
.. document private methods
.. automethod:: FularaState._assign
.. automethod:: FularaState._assume
.. automethod:: FularaState._output
.. automethod:: FularaState._substitute
.. automethod:: FularaState._temp_cleanup
.. automethod:: FularaState._update_dict_from_refined_scalar
"""
# here the Union type means a logical AND: Domains should inherit from both Wrapper and State
def __init__(self, scalar_domain: Type[EnvironmentMixin],
key_domain: Type[KeyWrapper],
value_domain: Type[ValueWrapper],
update_key_from_scalar: Callable[[KeyWrapper, EnvironmentMixin], KeyWrapper],
update_val_from_scalar: Callable[[ValueWrapper, EnvironmentMixin], ValueWrapper],
scalar_vars: Set[VariableIdentifier] = None,
dict_vars: Set[VariableIdentifier] = None,
scalar_k_conv: Callable[[EnvironmentMixin], KeyWrapper]
= lambda x: x,
k_scalar_conv: Callable[[KeyWrapper], EnvironmentMixin]
= lambda x: x,
scalar_v_conv: Callable[[EnvironmentMixin], ValueWrapper]
= lambda x: x,
v_scalar_conv: Callable[[ValueWrapper], EnvironmentMixin]
= lambda x: x):
"""
:param scalar_domain: domain for abstraction of scalar variable values,
ranges over the scalar variables (should accept a set of variables in in __init__)
(may have different abstract domains for different types)
:param key_domain: domain for abstraction of dictionary keys,
ranges over the scalar variables and the special key variable v_k
and should therefore have a 'scalar_variables' and a 'k_var' argument in __init__
:param value_domain: domain for abstraction of dictionary values,
ranges over the scalar variables and the special value variable v_v
and should therefore have a 'scalar_variables' and a 'v_var' argument in __init__
:param update_key_from_scalar: Function to update the scalar part of a given key_domain
element to the scalar_domain element, given as second argument
:param update_val_from_scalar: Function to update the scalar part of a given value_domain
element to the scalar_domain element, given as second argument
:param scalar_vars: list of scalar variables, whose values should be abstracted
:param dict_vars: list of dictionary variables, whose values should be abstracted
:param scalar_k_conv: conversion function to convert from scalar domain elements
to key domain elements
:param k_scalar_conv: conversion function to convert from key domain elements
to scalar domain elements
:param scalar_v_conv: conversion function to convert from scalar domain elements
to value domain elements
:param v_scalar_conv: conversion function to convert from value domain elements
to scalar domain elements
"""
super().__init__()
self._s_vars = scalar_vars or set()
self._d_vars = dict_vars or set()
self._k_domain = key_domain
self._v_domain = value_domain
self._s_domain = scalar_domain
self._scalar_state = scalar_domain(scalar_vars) # require as input?
arguments = {}
for dv in dict_vars:
typ = dv.typ
if isinstance(typ, DictLyraType): # should be true
if typ not in arguments:
k_var = VariableIdentifier(typ.key_typ, k_name)
v_var = VariableIdentifier(typ.val_typ, v_name)
arguments[typ] = {'key_domain': key_domain, 'value_domain': value_domain,
'key_d_args': {'scalar_variables': scalar_vars,
'k_var': k_var},
'value_d_args': {'scalar_variables': scalar_vars,
'v_var': v_var}}
else:
raise TypeError("Dictionary variables should be of DictLyraType")
lattices = defaultdict(lambda: FularaLattice)
self._dict_store = Store(dict_vars, lattices, arguments)
for k in arguments.keys():
arguments[k]['value_domain'] = BoolLattice
del arguments[k]['value_d_args']
self._init_store = Store(dict_vars, lattices, arguments)
self._update_k_from_s = update_key_from_scalar
self._update_v_from_s = update_val_from_scalar
self._s_k_conv = scalar_k_conv
self._k_s_conv = k_scalar_conv
self._s_v_conv = scalar_v_conv
self._v_s_conv = v_scalar_conv
self._in_relations = InRelationState()
self._scopes = list() # stack of scope types
@property
def scalar_state(self) -> EnvironmentMixin:
"""Abstract state of scalar variable values."""
return self._scalar_state
@property
def dict_store(self) -> Store:
"""Abstract store of dictionary variable contents."""
return self._dict_store
@property
def init_store(self) -> Store:
"""Abstract store of dictionary variable initialization info."""
return self._init_store
@property
def in_relations(self) -> InRelationState:
"""Relational state storing relationships introduced by 'in'-conditions."""
return self._in_relations
@property
def update_k_from_s(self):
"""Function to update the scalar part of a given key abstraction
to the scalar abstraction, given as second argument"""
return self._update_k_from_s
@property
def update_v_from_s(self):
"""Function to update the scalar part of a given value abstraction
to the scalar abstraction, given as second argument"""
return self._update_v_from_s
@property
def v_domain(self) -> Type[ValueWrapper]:
"""Domain for dictionary values"""
return self._v_domain
@property
def k_domain(self) -> Type[KeyWrapper]:
"""Domain for dictionary keys"""
return self._k_domain
@property
def s_k_conv(self):
"""Function to convert from scalar domain elements to key domain elements"""
return self._s_k_conv
@property
def k_s_conv(self):
"""Function to convert from key domain elements to scalar domain elements"""
return self._k_s_conv
@property
def s_v_conv(self):
"""Function to convert from scalar domain elements to value domain elements"""
return self._s_v_conv
@property
def v_s_conv(self):
"""Function to convert from value domain elements to scalar domain elements"""
return self._v_s_conv
@property
def scopes(self):
"""Current stack of scope types."""
return self._scopes
@property
def scope(self):
"""Current scope type."""
return self._scopes[-1]
def __repr__(self):
return f"{self.scalar_state}, {self.dict_store}, {self.init_store}, {self.in_relations}"
@copy_docstring(Lattice.bottom)
def bottom(self) -> 'FularaState':
"""The bottom lattice element is defined point-wise."""
self.scalar_state.bottom()
self.dict_store.bottom()
self.init_store.bottom()
self.in_relations.bottom()
return self
@copy_docstring(Lattice.top)
def top(self) -> 'FularaState':
"""The top lattice element is defined point-wise."""
self.scalar_state.top()
self.dict_store.top()
self.init_store.top()
self.in_relations.top()
return self
@copy_docstring(Lattice.is_bottom)
def is_bottom(self) -> bool:
"""The current state is bottom if `any` of its four elements is bottom"""
scalar_b = self.scalar_state.is_bottom()
dict_b = self.dict_store.is_bottom()
init_b = self.init_store.is_bottom()
in_b = self.in_relations.is_bottom()
return scalar_b or dict_b or init_b or in_b
@copy_docstring(Lattice.is_top)
def is_top(self) -> bool:
"""The current state is bottom if `all` of its four elements are top"""
scalar_t = self.scalar_state.is_top()
dict_t = self.dict_store.is_top()
init_t = self.init_store.is_top()
in_t = self.in_relations.is_top()
return scalar_t and dict_t and init_t and in_t
@copy_docstring(Lattice._less_equal)
def _less_equal(self, other: 'FularaState') -> bool:
"""Defined point-wise"""
scalar_le = self.scalar_state.less_equal(other.scalar_state)
dict_le = self.dict_store.less_equal(other.dict_store)
init_le = self.init_store.less_equal(other.init_store)
in_le = self.in_relations.less_equal(other.in_relations)
return scalar_le and dict_le and init_le and in_le
def join_with_scalar(self, self_store: Store, other_store: Store, value_with_scalar: bool):
"""
Joins the two dictionary stores, setting the scalar information
for non-overlapping segments to the current scalar state.
The result is directly written to self_store.
(adapted from Store.join and fulara_lattice._join/dnorm,
adding update of scalar information for non-overlapping segments)
:param self_store: Store of FularaLattices to be overwritten by join
:param other_store: Store of FulararLattices to be joined with self_store
:param value_with_scalar: Indicates, if the value abstract domain
contains scalar information and so if it should be updated
"""
if other_store.is_bottom() or self_store.is_top():
pass
elif self_store.is_bottom() or other_store.is_top():
self_store._replace(other_store)
else:
for var in self_store.store:
self_lattice: FularaLattice = self_store.store[var]
other_lattice: FularaLattice = other_store.store[var]
# states for var can't be bottom, because then the whole store would be bottom
if self_lattice.is_top():
pass
elif other_lattice.is_top():
self_lattice._replace(other_lattice)
else:
if len(self_lattice.segments) > len(other_lattice.segments):
segment_set = other_lattice.segments
result_set = copy(self_lattice.segments)
else:
segment_set = self_lattice.segments
result_set = copy(other_lattice.segments)
unjoined_result = copy(result_set)
for s in segment_set:
remove_set = set()
s_joined = False
for r in result_set:
s_meet_r = deepcopy(s[0]).meet(r[0])
if not s_meet_r.key_is_bottom(): # not disjoint -> join segments
s = (deepcopy(s[0]).join(deepcopy(r[0])),
deepcopy(s[1]).join(deepcopy(r[1])))
unjoined_result.discard(r)
s_joined = True
remove_set.add(r)
result_set.difference_update(remove_set)
if s_joined:
result_set.add(s)
else:
new_k = self.update_k_from_s(s[0], self.scalar_state)
if value_with_scalar:
new_v = self.update_v_from_s(s[1], self.scalar_state)
else:
new_v = deepcopy(s[1])
result_set.add((new_k, new_v))
result_set.difference_update(unjoined_result)
for r in unjoined_result:
new_k = self.update_k_from_s(r[0], self.scalar_state)
if value_with_scalar:
new_v = self.update_v_from_s(r[1], self.scalar_state)
else:
new_v = deepcopy(r[1])
result_set.add((new_k, new_v))
self_lattice.segments.clear()
self_lattice.segments.update(result_set)
@copy_docstring(Lattice._join)
def _join(self, other: 'FularaState') -> 'FularaState':
"""Defined point-wise"""
self.scalar_state.join(other.scalar_state)
# self.dict_store.join(other.dict_store)
self.join_with_scalar(self.dict_store, other.dict_store, True)
# self.init_store.join(other.init_store)
self.join_with_scalar(self.init_store, other.init_store, False)
self.in_relations.join(other.in_relations)
return self
@copy_docstring(Lattice._meet)
def _meet(self, other: 'FularaState'):
"""Defined point-wise"""
self.scalar_state.meet(other.scalar_state)
self.dict_store.meet(other.dict_store)
self.init_store.meet(other.init_store)
self.in_relations.meet(other.in_relations)
return self
@copy_docstring(Lattice._widening)
def _widening(self, other: 'FularaState'):
"""To avoid imprecise widening of FularaLattice, first widens the scalar state"""
old_scalar = deepcopy(self.scalar_state)
self.scalar_state.widening(other.scalar_state)
if old_scalar != self.scalar_state:
self.dict_store.join(other.dict_store)
self.init_store.join(other.init_store)
self.in_relations.join(other.in_relations)
else:
self.dict_store.widening(other.dict_store)
self.init_store.widening(other.init_store)
self.in_relations.widening(other.in_relations)
return self
# helper
def eval_key(self, key_expr: Expression) -> KeyWrapper:
"""evaluates key_expr in the scalar_state and assigns it to v_k in a key state"""
scalar_copy = deepcopy(self.scalar_state)
v_k = VariableIdentifier(key_expr.typ, k_name) # TODO: type?
scalar_copy.add_variable(v_k)
scalar_copy.assign({v_k}, {key_expr})
return self._s_k_conv(scalar_copy)
# helper
def eval_value(self, value_expr: Expression) -> ValueWrapper:
"""evaluates value_expr in the scalar_state and assigns it to v_v in a value state"""
scalar_copy = deepcopy(self.scalar_state)
v_v = VariableIdentifier(value_expr.typ, v_name) # TODO: type?
scalar_copy.add_variable(v_v)
scalar_copy.assign({v_v}, {value_expr})
return self._s_v_conv(scalar_copy)
# helper
def _temp_cleanup(self, evaluation: Dict[Subscription, VariableIdentifier]):
"""Deletes all temporary variables of evaluation
and assigns them back to the dictionary subscription before that"""
current_temps = set(evaluation.values())
for expr, var in evaluation.items():
# 'assign expr = var' to update relationships
d = expr.target
k_abs = self.eval_key(expr.key)
v_abs = self.eval_value(var)
# temporary variables not needed in dict abstractions
for temp in current_temps: # TODO: better way?
k_abs.remove_variable(temp)
v_abs.remove_variable(temp)
d_lattice: 'FularaLattice' = self.dict_store.store[d]
if k_abs.is_singleton():
# STRONG UPDATE
d_lattice.partition_add(k_abs, v_abs)
i_lattice: 'FularaLattice' = self.init_store.store[d]
i_lattice.partition_add(k_abs, BoolLattice(False))
else:
# WEAK UPDATE
# -> meet
assign_lattice = deepcopy(d_lattice)
assign_lattice.partition_update({(k_abs, v_abs)})
d_lattice.meet(assign_lattice)
# remove temporary var
self.scalar_state.remove_variable(var)
current_temps.remove(var)
def update_dict_from_scalar(self, store: Store, value_with_scalar: bool):
"""Updates the scalar information of the given dictionary store to the current scalar state
:param store: Store of FularaLattices to be updated
:param value_with_scalar: Indicates, if the value abstract domain
contains scalar information and so if it should be updated
"""
d_lattice: FularaLattice
for d_lattice in store.store.values():
updated_segments = set()
for (k, v) in d_lattice.segments:
new_k = self.update_k_from_s(k, self.scalar_state)
if value_with_scalar:
new_v = self.update_v_from_s(v, self.scalar_state)
else:
new_v = deepcopy(v)
updated_segments.add((new_k, new_v))
d_lattice.segments.clear()
d_lattice.segments.update(updated_segments)
@copy_docstring(State._assign)
def _assign(self, left: Expression, right: Expression):
if self.is_bottom(): # unreachable
return self
all_ids = left.ids().union(right.ids())
if all(type(ident.typ) in scalar_types for ident in all_ids): # TODO: use not any?
# completely SCALAR STMT
# update scalar part
self.scalar_state.assign({left}, {right})
# update relations with scalar variables in dict stores
for d_lattice in self.dict_store.store.values():
for (k1, v) in d_lattice.segments:
d_lattice.segments.remove((k1, v)) # needed, because tuple is immutable?
k1.assign({left}, {right})
v.assign({left}, {right})
d_lattice.segments.add((k1, v))
d_lattice.d_norm_own()
for i_lattice in self.init_store.store.values():
for (k2, b) in i_lattice.segments: # b must be True
i_lattice.segments.remove((k2, b)) # needed, because tuple is immutable?
k2.assign({left}, {right})
i_lattice.segments.add((k2, b))
i_lattice.d_norm_own()
elif isinstance(left, VariableIdentifier):
if type(left.typ) in scalar_types: # assignment to scalar variable
evaluation = dict()
scalar_right = self.read_eval.visit(right, self, evaluation)
self.scalar_state.assign({left}, {scalar_right})
self._temp_cleanup(evaluation)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
elif isinstance(left.typ, DictLyraType): # overwrite dictionary
if isinstance(right, VariableIdentifier):
self.dict_store.store[left] = deepcopy(self.dict_store.store[right])
self.init_store.store[left] = deepcopy(self.init_store.store[right])
elif isinstance(right, DictDisplay):
# "NEW DICT"
left_lattice: FularaLattice = self.dict_store.store[left]
left_i_lattice: FularaLattice = self.init_store.store[left]
# erase all dict contents before:
left_lattice.empty()
# everything uninitialized,
# but scalars should conform with scalar state -> copy from scalar state:
v_k = VariableIdentifier(left.typ.key_typ, k_name)
s_state = deepcopy(self.scalar_state)
s_state.add_variable(v_k)
top_state = self.s_k_conv(s_state)
top_segment = (top_state, BoolLattice(True))
left_i_lattice.segments.clear()
left_i_lattice.segments.add(top_segment)
for i in range(len(right.keys)): # similar to write
k_abs = self.eval_key(right.keys[i])
v_abs = self.eval_value(right.values[i])
# k_abs must be a singleton -> 'strong update'
left_lattice.partition_add(k_abs, v_abs)
left_i_lattice.partition_add(k_abs, BoolLattice(False))
elif isinstance(right, Input): # TODO: add special dictinput() function?
# everything set to top,
# but copy from scalar state to have a more precise abstraction of it
left_lattice: FularaLattice = self.dict_store.store[left]
left_i_lattice: FularaLattice = self.init_store.store[left]
v_k = VariableIdentifier(left.typ.key_typ, k_name)
s_state = deepcopy(self.scalar_state)
s_state.add_variable(v_k)
top_k_state = self.s_k_conv(s_state)
v_v = VariableIdentifier(left.typ.val_typ, v_name)
s_state = deepcopy(self.scalar_state)
s_state.add_variable(v_v)
top_v_state = self.s_v_conv(s_state)
left_lattice.segments.clear()
top_segment = (top_k_state, top_v_state)
left_lattice.segments.add(top_segment)
left_i_lattice.segments.clear()
top_bool_segment = (deepcopy(top_k_state), BoolLattice(True))
left_i_lattice.segments.add(top_bool_segment)
else:
raise NotImplementedError(
f"Assignment '{left} = {right}' is not yet supported")
else:
raise NotImplementedError(
f"Assignment '{left} = {right}' is not yet supported")
elif isinstance(left, Subscription) and isinstance(left.target.typ, DictLyraType):
# DICT WRITE
d = left.target
k_abs = self.eval_key(left.key) # TODO: nested subscripts -> read_eval
evaluation = dict()
scalar_right = self.read_eval.visit(right, self, evaluation)
v_abs = self.eval_value(scalar_right)
for temp in evaluation.values():
v_abs.remove_variable(temp)
self._temp_cleanup(evaluation) # TODO: no assign needed?
d_lattice: 'FularaLattice' = self.dict_store.store[d]
if k_abs.is_singleton():
# STRONG UPDATE
d_lattice.partition_add(k_abs, v_abs)
i_lattice: 'FularaLattice' = self.init_store.store[d]
i_lattice.partition_add(k_abs, BoolLattice(False))
else:
# WEAK UPDATE (with partitioning)
d_lattice.partition_update({(k_abs, v_abs)})
else:
raise NotImplementedError(f"Assignment '{left} = {right}' is not yet supported")
# TODO: other stmts
# update relations
self.in_relations.assign({left}, {right})
return self
@copy_docstring(State._assume)
def _assume(self, condition: Expression) -> 'FularaState':
if self.is_bottom(): # unreachable
return self
condition = NegationFreeNormalExpression().visit(condition) # eliminate negations
if isinstance(condition, BinaryComparisonOperation):
if condition.operator == BinaryComparisonOperation.Operator.In:
# refine in_relations
self.in_relations.assume({condition})
# refine variable(s):
if isinstance(condition.right, Keys) \
and isinstance(condition.left, VariableIdentifier):
d = condition.right.target_dict
d_lattice: FularaLattice = self.dict_store.store[d]
k_abs: KeyWrapper = d_lattice.get_keys_joined() # TODO: check if bottom?
v_k = k_abs.k_var
if self.scope == Scope.Loop: # -> overwrite old value
self.scalar_state.add_variable(v_k)
self.scalar_state.meet(self._k_s_conv(k_abs))
self.scalar_state.assign({condition.left}, {v_k})
self.scalar_state.remove_variable(v_k)
else: # meet after assignment -> only refine old value
assign_state = self._k_s_conv(k_abs)
assign_state.assign({condition.left}, {v_k})
assign_state.remove_variable(v_k)
self.scalar_state.meet(assign_state)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
return self
elif isinstance(condition.right, Values) \
and isinstance(condition.left, VariableIdentifier):
d = condition.right.target_dict
d_lattice: FularaLattice = self.dict_store.store[d]
v_abs: ValueWrapper = d_lattice.get_values_joined()
v_v = v_abs.v_var
if self.scope == Scope.Loop: # -> overwrite old value
self.scalar_state.add_variable(v_v)
self.scalar_state.meet(self._v_s_conv(v_abs))
self.scalar_state.assign({condition.left}, {v_v})
self.scalar_state.remove_variable(v_v)
else: # meet after assignment -> only refine old value
assign_state = self._v_s_conv(v_abs)
assign_state.assign({condition.left}, {v_v})
assign_state.remove_variable(v_v)
self.scalar_state.meet(assign_state)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
return self
elif isinstance(condition.right, Items) \
and isinstance(condition.left, TupleDisplay):
d = condition.right.target_dict
d_lattice: FularaLattice = self.dict_store.store[d]
k_abs: KeyWrapper = d_lattice.get_keys_joined()
v_k = k_abs.k_var
v_abs = d_lattice.get_values_joined()
v_v = v_abs.v_var
if self.scope == Scope.Loop: # -> overwrite old value
self.scalar_state.add_variable(v_k)
self.scalar_state.meet(self._k_s_conv(k_abs))
self.scalar_state.assign({condition.left.items[0]}, {v_k})
self.scalar_state.remove_variable(v_k)
self.scalar_state.add_variable(v_v)
self.scalar_state.meet(self._v_s_conv(v_abs))
self.scalar_state.assign({condition.left.items[1]}, {v_v})
self.scalar_state.remove_variable(v_v)
else:
k_s_state = self.k_s_conv(k_abs)
k_s_state.assign({condition.left.items[0]}, {v_k})
k_s_state.remove_variable(v_k)
v_s_state = self.v_s_conv(v_abs)
v_s_state.assign({condition.left.items[1]}, {v_v})
v_s_state.remove_variable(v_v)
assign_state = k_s_state
assign_state.meet(v_s_state)
self.scalar_state.meet(assign_state)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
return self
elif condition.operator == BinaryComparisonOperation.Operator.NotIn:
# refine in_relations
self.in_relations.assume({condition})
if isinstance(condition.right, Keys):
d = condition.right.target_dict
i_lattice: FularaLattice = self.init_store.store[d]
if self.scope == Scope.Loop:
# check for definitely initialized elements:
if i_lattice.is_bottom() or len(i_lattice.segments) != 1:
# v_k not top (don't do partition_update on init_store elements
# -> cannot have multiple segments whose keys add up to top)
# -> loop is definitely executed at least once
# -> loop var can only have values from the dictionary == IN case
d_lattice: FularaLattice = self.dict_store.store[d]
k_abs: KeyWrapper = d_lattice.get_keys_joined()
v_k = k_abs.k_var
self.scalar_state.add_variable(v_k)
self.scalar_state.meet(self._k_s_conv(k_abs))
self.scalar_state.assign({condition.left}, {v_k})
self.scalar_state.remove_variable(v_k)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
# else: can have any value from before or inside the loop -> return self
else:
# get possibly uninitialized keys
k_abs: KeyWrapper = i_lattice.get_keys_joined()
v_k = k_abs.k_var
assign_state = self._k_s_conv(k_abs)
assign_state.assign({condition.left}, {v_k})
assign_state.remove_variable(v_k)
self.scalar_state.meet(assign_state)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
return self
elif isinstance(condition.right, Values):
if self.scope == Scope.Loop:
d = condition.right.target_dict
i_lattice: FularaLattice = self.init_store.store[d]
# check for definitely initialized elements:
if i_lattice.is_bottom() or len(i_lattice.segments) != 1:
# v_k not top (don't do partition_update on init_store elements
# -> cannot have multiple segments whose keys add up to top)
# -> loop is definitely executed at least once
# -> loop var can only have values from the dictionary == IN case
d_lattice: FularaLattice = self.dict_store.store[d]
v_abs = d_lattice.get_values_joined()
v_v = v_abs.v_var
self.scalar_state.add_variable(v_v)
self.scalar_state.meet(self._v_s_conv(v_abs))
self.scalar_state.assign({condition.left.items}, {v_v})
self.scalar_state.remove_variable(v_v)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
# else: TODO: refine value variable abstraction
return self
elif isinstance(condition.right, Items):
d = condition.right.target_dict
i_lattice: FularaLattice = self.init_store.store[d]
if self.scope == Scope.Loop:
# check for definitely initialized elements:
if i_lattice.is_bottom() or len(i_lattice.segments) != 1:
# v_k not top (don't do partition_update on init_store elements
# -> cannot have multiple segments whose keys add up to top)
# -> loop is definitely executed at least once
# -> loop var can only have values from the dictionary == IN case
d_lattice: FularaLattice = self.dict_store.store[d]
k_abs: KeyWrapper = d_lattice.get_keys_joined()
v_k = k_abs.k_var
self.scalar_state.add_variable(v_k)
self.scalar_state.meet(self._k_s_conv(k_abs))
self.scalar_state.assign({condition.left.items[0]}, {v_k})
self.scalar_state.remove_variable(v_k)
v_abs = d_lattice.get_values_joined()
v_v = v_abs.v_var
self.scalar_state.add_variable(v_v)
self.scalar_state.meet(self._v_s_conv(v_abs))
self.scalar_state.assign({condition.left.items[1]}, {v_v})
self.scalar_state.remove_variable(v_v)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
# else: can have any value from before or inside the loop -> return self
else:
# TODO: refine value variable abstraction
# get possibly uninitialized keys
k_abs: KeyWrapper = i_lattice.get_keys_joined()
v_k = k_abs.k_var
assign_state = self._k_s_conv(k_abs)
assign_state.assign({condition.left.items[0]}, {v_k})
assign_state.remove_variable(v_k)
self.scalar_state.meet(assign_state)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
return self
# default: try in scalar domain
evaluation = dict()
scalar_condition = self.read_eval.visit(condition, self, evaluation)
self.scalar_state.assume({scalar_condition})
self._temp_cleanup(evaluation)
# check, if coupled loop variables (from items-loops) get refined
# -> refine their counterpart
cond_vars = condition.ids()
for (d_var, k_var, v_var) in self.in_relations.k_v_tuples():
if k_var in cond_vars:
if v_var in cond_vars:
raise NotImplementedError(
f"Conditions like {condition} containing both the key "
f"and value loop variable of a .items()-loop are not yet supported!")
else:
# refine v_var according to refined k_var
# -> re-evaluate: v_var meet d_var[k_var]
d_lattice: FularaLattice = self.dict_store.store[d_var]
k_abs = self.eval_key(k_var)
if not k_abs.is_top(): # TODO: check for less_equal old?
scalar_vars = self._s_vars.copy()
v_v = VariableIdentifier(d_var.typ.val_typ, v_name)
v_abs = d_lattice.v_domain(scalar_vars, v_v).bottom()
for (k, v) in d_lattice.segments:
key_meet_k = deepcopy(k_abs).meet(k)
if not key_meet_k.key_is_bottom(): # key may be contained in this segment
v_abs.join(deepcopy(v))
scalar_copy = deepcopy(self.scalar_state)
scalar_copy.add_variable(v_v)
scalar_copy.meet(self.v_s_conv(v_abs))
scalar_copy.assign({v_var}, {v_v})
scalar_copy.remove_variable(v_v)
self.scalar_state.meet(scalar_copy)
elif v_var in cond_vars:
# refine k_var according to refined v_bar -> k_var, s.t. d_var[k_var] = v_var
d_lattice: FularaLattice = self.dict_store.store[d_var]
v_abs = self.eval_value(v_var)
if not v_abs.is_top(): # TODO: check for less_equal old?
scalar_vars = self._s_vars.copy()
v_k = VariableIdentifier(d_var.typ.key_typ, k_name)
k_abs = d_lattice.k_domain(scalar_vars, v_k).bottom()
for (k, v) in d_lattice.segments:
value_meet_v = deepcopy(v_abs).meet(v)
if not value_meet_v.key_is_bottom(): # value may be contained in this segment
k_abs.join(deepcopy(k))
scalar_copy = deepcopy(self.scalar_state)
scalar_copy.add_variable(v_k)
scalar_copy.meet(self.k_s_conv(k_abs))
scalar_copy.assign({k_var}, {v_k})
scalar_copy.remove_variable(v_k)
self.scalar_state.meet(scalar_copy)
self.update_dict_from_scalar(self.dict_store, True)
self.update_dict_from_scalar(self.init_store, False)
# no 'in'-condition -> no need to update in_relations
return self
@copy_docstring(State.enter_if)
def enter_if(self) -> 'FularaState':
if not self.is_bottom(): # not yet analyzed/unreachable
self.scopes.append(Scope.Branch)
self.in_relations.enter_if()
return self # nothing else to be done
@copy_docstring(State.exit_if)
def exit_if(self) -> 'FularaState':
if not self.is_bottom(): # not yet analyzed/unreachable
assert self.scope == Scope.Branch
self.scopes.pop()
self.in_relations.exit_if()
return self # nothing to be done
@copy_docstring(State.enter_loop)
def enter_loop(self) -> 'FularaState':
if not self.is_bottom(): # not yet analyzed/unreachable
self.scopes.append(Scope.Loop)
self.in_relations.enter_loop()
return self # nothing else to be done
@copy_docstring(State.exit_loop)
def exit_loop(self) -> 'FularaState':
if not self.is_bottom(): # not yet analyzed/unreachable
assert self.scope == Scope.Loop
self.scopes.pop()
self.in_relations.exit_loop()
return self # nothing to be done
@copy_docstring(State._output)
def _output(self, output: Expression) -> 'FularaState':
return self # nothing to be done
@copy_docstring(State._substitute)
def _substitute(self, left: Expression, right: Expression) -> 'FularaState':
raise RuntimeError("Unexpected substitute in a forward analysis!")
# expression evaluation
class DictReadEvaluation:
"""Visitor that performs the evaluation of dictionary reads in the DictContentDomain
lattice by replacing them with a temporary (scalar) variable holding
the corresponding value in the scalar state.
Adapted from `ExpressionVisitor`.
"""
@copy_docstring(ExpressionVisitor.visit)
def visit(self, expr, *args, **kwargs):
"""
:param expr: current expression
:keyword arguments:
* *state* -- current FularaState
* *evaluation* -- dictionary mapping from dictionary read expressions,
that already got evaluated to temporary variables (VariableIdentifier)
:return expression with replaced dictionary reads
"""
method = 'visit_' + expr.__class__.__name__
return getattr(self, method, self.default_visit)(expr, *args, **kwargs)
@copy_docstring(ExpressionVisitor.visit_Subscription)
def visit_Subscription(self, expr: Subscription, state: 'FularaState' = None,
evaluation=None):
if isinstance(expr.target.typ, DictLyraType):
if expr in evaluation: # already evaluated
return evaluation[expr]
else:
d = expr.target
d_lattice: FularaLattice = state.dict_store.store[d]
k_abs = state.eval_key(expr.key)
for old_temp in evaluation.values(): # remove already added temp vars
k_abs.remove_variable(old_temp)
scalar_vars = state._s_vars.copy()
v_var = VariableIdentifier(d.typ.val_typ, v_name)
v_abs = d_lattice.v_domain(scalar_vars, v_var).bottom()
for (k, v) in d_lattice.segments:
key_meet_k = deepcopy(k_abs).meet(k)
if not key_meet_k.key_is_bottom(): # key may be contained in this segment
v_abs.join(deepcopy(v))
for old_temp in evaluation.values(): # add already added temp vars
v_abs.add_variable(old_temp)
state.scalar_state.add_variable(v_var)
state.scalar_state.meet(state.v_s_conv(v_abs))
# use increasing numbers for temp_var names
temp_var = VariableIdentifier(d.typ.val_typ, str(len(evaluation)) + "v")
state.scalar_state.add_variable(temp_var)
state.scalar_state.assign({temp_var}, {v_var})
state.scalar_state.remove_variable(v_var)
evaluation[expr] = temp_var
return temp_var
else:
return self.default_visit(expr, state, evaluation)
def default_visit(self, expr: Expression, state: 'FularaState' = None,
evaluation=None):
"""default: visit & replace children (adapted from expressions._iter_child_exprs)"""
new_expr = copy(expr)
for name, field in new_expr.__dict__.items():
if isinstance(field, Expression):
new_expr.__dict__[name] = self.visit(field, state, evaluation) # replace
elif isinstance(field, list):
for idx, item in enumerate(field):
if isinstance(item, Expression):
field[idx] = self.visit(item, state, evaluation)
return new_expr
read_eval = DictReadEvaluation() # static class member shared between all instances
|
import logging
import urllib.parse
import decimal
from xml.etree import ElementTree
from discord.ext import commands
from .common import Cog
log = logging.getLogger(__name__)
TAX_PER_CHAR = decimal.Decimal('0.022')
class Translation(Cog):
"""Microsoft's Translation API."""
def __init__(self, bot):
super().__init__(bot)
self.APIROUTE = 'https://api.microsofttranslator.com/V2/Http.svc'
self.apicfg = self.bot.config.MSFT_TRANSLATION
# This one is given by azure
self.subkey = self.apicfg['key']
self.subkey_headers = {
'Ocp-Apim-Subscription-Key': self.subkey,
}
async def req(self, method, route, qs_dict: dict) -> 'any':
"""Make a request to the translation API."""
qs = urllib.parse.urlencode(qs_dict)
url = f'{self.APIROUTE}{route}?{qs}'
async with self.bot.session.request(method, url,
headers=self.subkey_headers) as r:
return r
async def get(self, route: str, qs: dict) -> 'any':
return await self.req('GET', route, qs)
async def post(self, route: str, qs: dict) -> 'any':
return await self.req('POST', route, qs)
@commands.command()
async def translist(self, ctx):
"""List all available languages."""
resp = await self.get('/GetLanguagesForTranslate', {})
text = await resp.text()
if resp.status != 200:
raise self.SayException(f'\N{WARNING SIGN} API '
f'replied {resp.status}')
root = ElementTree.fromstring(text)
await ctx.send(f"`{', '.join(list(root.itertext()))}`")
@commands.command()
async def translate(self, ctx, to_lang: str, *, sentence: str):
"""Translate from one language to another."""
to_lang = self.bot.clean_content(to_lang).lower()
sentence = self.bot.clean_content(sentence)
tax = len(sentence) * TAX_PER_CHAR
await self.coins.pricing(ctx, tax)
# detect language
resp_detect = await self.get('/Detect', {
'text': sentence,
})
text_detect = await resp_detect.text()
if resp_detect.status != 200:
raise self.SayException(f'\N{WARNING SIGN} Detect failed'
f' with {resp_detect.status}')
root_detect = ElementTree.fromstring(text_detect)
detected = root_detect.text
# translate
resp = await self.get('/Translate', {
'to': to_lang,
'text': sentence,
})
text = await resp.text()
if resp.status != 200:
log.warning('[trans] got a non-200, %r', text)
raise self.SayException(f'\N{WARNING SIGN} Translation failed'
f' with {resp.status}')
root = ElementTree.fromstring(text)
translated = list(root.itertext())[0]
translated = self.bot.clean_content(translated)
log.debug('[translate] %r [%s] => %r [%s]',
sentence, detected, translated, to_lang)
res = [
f'`{sentence}` ({detected})',
' > ',
f'`{translated}` ({to_lang})',
]
await ctx.send('\n'.join(res))
def setup(bot):
bot.add_cog(Translation(bot))
translation: don't post the original message
import logging
import urllib.parse
import decimal
from xml.etree import ElementTree
from discord.ext import commands
from .common import Cog
log = logging.getLogger(__name__)
TAX_PER_CHAR = decimal.Decimal('0.022')
class Translation(Cog):
"""Microsoft's Translation API."""
def __init__(self, bot):
super().__init__(bot)
self.APIROUTE = 'https://api.microsofttranslator.com/V2/Http.svc'
self.apicfg = self.bot.config.MSFT_TRANSLATION
# This one is given by azure
self.subkey = self.apicfg['key']
self.subkey_headers = {
'Ocp-Apim-Subscription-Key': self.subkey,
}
async def req(self, method, route, qs_dict: dict) -> 'any':
"""Make a request to the translation API."""
qs = urllib.parse.urlencode(qs_dict)
url = f'{self.APIROUTE}{route}?{qs}'
async with self.bot.session.request(method, url,
headers=self.subkey_headers) as r:
return r
async def get(self, route: str, qs: dict) -> 'any':
return await self.req('GET', route, qs)
async def post(self, route: str, qs: dict) -> 'any':
return await self.req('POST', route, qs)
@commands.command()
async def translist(self, ctx):
"""List all available languages."""
resp = await self.get('/GetLanguagesForTranslate', {})
text = await resp.text()
if resp.status != 200:
raise self.SayException(f'\N{WARNING SIGN} API '
f'replied {resp.status}')
root = ElementTree.fromstring(text)
await ctx.send(f"`{', '.join(list(root.itertext()))}`")
@commands.command()
async def translate(self, ctx, to_lang: str, *, sentence: str):
"""Translate from one language to another."""
to_lang = self.bot.clean_content(to_lang).lower()
sentence = self.bot.clean_content(sentence)
tax = len(sentence) * TAX_PER_CHAR
await self.coins.pricing(ctx, tax)
# detect language
resp_detect = await self.get('/Detect', {
'text': sentence,
})
text_detect = await resp_detect.text()
if resp_detect.status != 200:
raise self.SayException(f'\N{WARNING SIGN} Detect failed'
f' with {resp_detect.status}')
root_detect = ElementTree.fromstring(text_detect)
detected = root_detect.text
# translate
resp = await self.get('/Translate', {
'to': to_lang,
'text': sentence,
})
text = await resp.text()
if resp.status != 200:
log.warning('[trans] got a non-200, %r', text)
raise self.SayException(f'\N{WARNING SIGN} Translation failed'
f' with {resp.status}')
root = ElementTree.fromstring(text)
translated = list(root.itertext())[0]
translated = self.bot.clean_content(translated)
log.debug('[translate] %r [%s] => %r [%s]',
sentence, detected, translated, to_lang)
res = [
f'detected language: {detected}',
f'`{translated}` ({to_lang})',
]
await ctx.send('\n'.join(res))
def setup(bot):
bot.add_cog(Translation(bot))
|
import socket, struct, sys, time, random
from viro_veil import * # for the constants
class ViroModule(object):
def __init__(self, my_dpid, my_vid):
self.dpid = my_dpid
self.vid = my_vid
self.L = len(my_vid)
self.routing_table = {}
self.rdv_store = {}
self.neighbors = {}
self.rdv_request_tracker = {}
def update_routing_table_based_on_neighbor(self, neighbor_vid, port):
print "update_routing_table_based_on_neighbor: neighbor_vid =", neighbor_vid, "port =", port
bucket = delta(neighbor_vid, self.vid)
# If we don't have any entries at this bucket -> create a new bucket
if bucket not in self.routing_table:
self.routing_table[bucket] = []
bucket_info = {
'prefix': get_prefix(self.vid, bucket),
'gateway': int(self.vid, 2),
'next_hop': int(neighbor_vid, 2),
'port': port
}
if not is_duplicate_bucket(self.routing_table[bucket], bucket_info):
self.routing_table[bucket].append(bucket_info)
self.recalculate_default_gw_for_bucket(bucket)
print "Updating the Neighbors list..."
self.update_neighbors(neighbor_vid, bucket)
self.print_routing_table()
# Presumably a gateway has just been added to or removed from the list for this bucket,
# so we need to do the following:
# - (Re)compute the logical distance of each gateway
# - Set a gateway having minimal distance to be the default (and all others not to be the default)
# - Limit the number of gateways stored to the maximum allowed
# as defined by MAX_GW_PER_LEVEL parameter (which is assumed to be > 1).
# To do that we remove a gateway whose distance is maximal,
# and which was not selected as the default (in the case of all gateways being equidistant)
def recalculate_default_gw_for_bucket(self, bucket):
print "Recalculating default gateway for bucket", bucket
entries = self.routing_table[bucket]
min_distance = float("inf")
min_entry = None
max_distance = -1
max_entry = None
for entry in entries:
# Clear default flag -- will set again once all distances have been computed
entry['default'] = False
# Compute distance
gw = bin2str(entry['gateway'], self.L)
distance = delta(gw, self.vid)
# Update min/max pointers
if distance > max_distance:
max_distance = distance
max_entry = entry
if distance < min_distance:
min_distance = distance
min_entry = entry
if min_entry is None or max_entry is None:
print "recalculate_default_gw_for_bucket did not find a min and max distance gateways (no gateways)"
return
# DEBUG
# print "min_distance =", min_distance, "min_entry =", min_entry
# print "max_distance =", max_distance, "max_entry =", max_entry
# Set (possibly new) default gateway for this bucket to be one having minimal distance
min_entry['default'] = True
# Limit number of entries (assume for now that there will be at most 1 too many)
if len(entries) > MAX_GW_PER_LEVEL:
max_gw_index = entries.index(max_entry)
if not max_entry['default']:
# Delete gateway at maximal distance (non-equidistant case)
del entries[max_gw_index]
else:
# max_distance == min_distance (equidistant case)
# So just delete any non-default gateway
next_gw_index = (max_gw_index + 1) % len(entries)
del entries[next_gw_index]
# In case somehow there were more than 1 too many gateways then do this again.
# If this were expected to happen often then we could do something more efficient for that case,
# such as sort the entries in order of increasing distance then removing all beyond maximum,
# but this is not expected to happen. We just have this check here to ensure correctness in case
# of this unexpected scenario where there is more than 1 gateway that needs to be removed
# (since this function should be called each time a gateway is added or removed).
if len(entries) > MAX_GW_PER_LEVEL:
print "WARNING: Recursively calling recalculate_default_gw_for_bucket; unexpected situation"
self.recalculate_default_gw_for_bucket(bucket)
def update_neighbors(self, neighbor_vid, distance):
if neighbor_vid not in self.neighbors:
self.neighbors[neighbor_vid] = {}
self.neighbors[neighbor_vid][distance] = time.time()
def print_routing_table(self):
print '\n----> Routing Table at :', self.vid, '|', self.dpid, ' <----'
for distance in range(1, self.L + 1):
if distance in self.routing_table:
for entry in self.routing_table[distance]:
print 'Bucket:', distance, \
'Port:', entry['port'], \
'Prefix:', entry['prefix'],\
'Gateway:', bin2str(entry['gateway'], self.L), \
'Next hop:', bin2str(entry['next_hop'], self.L), \
'Default:', entry['default']
else:
print 'Bucket:', distance, '--- E M P T Y ---'
print 'RDV STORE: ', self.rdv_store, "\n"
def remove_failed_gw(self, packet, gw=None):
if gw is None:
payload = bin2str((struct.unpack("!I", packet[24:28]))[0], self.L)
payload = int(payload, 2)
else:
payload = int(gw, 2)
to_be_deleted = {}
for level in self.routing_table:
to_be_deleted[level] = []
for idx in xrange(0, len(self.routing_table[level])):
entry = self.routing_table[level][idx]
if entry['gateway'] == payload or entry['next_hop'] == payload:
to_be_deleted[level].append(idx)
for level in to_be_deleted:
for index in to_be_deleted[level]:
del self.routing_table[level][index]
bucket_ = []
for level in self.routing_table:
if len(self.routing_table[level]) == 0:
bucket_.append(level)
for level in bucket_:
del self.routing_table[level]
return
def publish(self, bucket, k):
dst = get_rdv_id(k, self.vid)
packet = create_RDV_PUBLISH(bucket, self.vid, dst)
print 'Node:', self.vid, 'is publishing neighbor', bin2str(bucket['next_hop'], self.L), 'to rdv:', dst
return (packet, dst)
def withdraw(self, failedNode, RDV_level):
dst = get_rdv_id(RDV_level, self.vid)
if dst != failedNode:
packet = create_RDV_WITHDRAW(int(failedNode, 2), self.vid, '00')
print 'Node: ', self.vid, 'is withdrawing neighbor', failedNode, 'to rdv:', dst
return packet
# FIXME: Not used?
def withdraw_gw(self, failed_gw, vid, dst):
print "Creating GW_WITHDRAW packet"
packet = create_GW_WITHDRAW(failed_gw, vid, dst)
print self.vid, '- RDV gateway withdraw:', failed_gw, 'to dst:', dst
return packet
def query(self, k):
dst = get_rdv_id(k, self.vid)
packet = create_RDV_QUERY(k, self.vid, dst)
print 'Node:', self.vid, 'is querying to reach bucket:', k, 'to rdv:', dst
return (packet, dst)
def get_next_hop(self, packet):
dst_vid = get_dest(packet, self.L)
next_hop = ''
packet_type = get_operation(packet)
port = ''
while next_hop == '':
distance = delta(self.vid, dst_vid)
if distance == 0:
break
if distance in self.routing_table and len(self.routing_table[distance]) > 0:
for entry in self.routing_table[distance]:
if entry['default']:
next_hop = str(entry['next_hop'])
port = int(entry['port'])
break
if next_hop != '':
break
if (packet_type != RDV_PUBLISH) and (packet_type != RDV_QUERY):
break
print 'No next hop for destination: ', dst_vid, 'distance: ', distance
# flip the distance bit to
dst_vid = flip_bit(dst_vid, distance)
if next_hop == '':
print 'No route to destination', 'MyVID: ', self.vid, 'DEST: ', dst_vid
return ('', '')
return (next_hop, port)
# Adds an entry to rdv_store, and also ensures that there are no duplicates
def add_if_no_duplicate_rdv_entry(self, distance, new_entry):
for x in self.rdv_store[distance]:
if x[0] == new_entry[0] and x[1] == new_entry[1]:
return
self.rdv_store[distance].append(new_entry)
# Adds an entry to rdv_store, and also ensures that there are no duplicates
def add_if_no_duplicate_gw_entry(self, gw, new_entry):
for x in self.rdv_request_tracker[gw]:
if x == new_entry:
return
self.rdv_request_tracker[gw].append(new_entry)
def process_rdv_publish(self, packet):
src_vid = bin2str((struct.unpack("!I", packet[16:20]))[0], self.L)
next_hop = bin2str((struct.unpack("!I", packet[24:28]))[0], self.L)
print "RDV_PUBLISH message received from: ", src_vid
distance = delta(self.vid, next_hop)
if distance not in self.rdv_store:
self.rdv_store[distance] = []
new_entry = [src_vid, next_hop]
self.add_if_no_duplicate_rdv_entry(distance, new_entry)
def process_rdv_query(self, packet):
src_vid = bin2str((struct.unpack("!I", packet[16:20]))[0], self.L)
payload = bin2str((struct.unpack("!I", packet[24:28]))[0], self.L)
k = int(payload, 2)
print "RDV_QUERY message received from: ", src_vid
# search in rdv store for the logically closest gateway to reach kth distance away neighbor
gw_str_list = self.find_gateways_in_rdv_store(k, src_vid)
# if found then form the reply packet and send to src_vid
if len(gw_str_list) < 1:
# No gateway found
print 'Node: ', self.vid, 'has no gateway for the rdv_query packet to reach bucket: ', k, 'for node: ', src_vid
return ''
gw_list = []
for gw_str in gw_str_list:
gw_list.append(int(gw_str,2))
# create a RDV_REPLY packet and send it
reply_packet = create_RDV_REPLY(gw_list, k, self.vid, src_vid)
# Keeps track of the Nodes that requests each Gateways at
# specific level
for gw_str in gw_str_list:
if gw_str not in self.rdv_request_tracker:
self.rdv_request_tracker[gw_str] = []
self.add_if_no_duplicate_gw_entry(gw_str, src_vid)
return reply_packet
def process_self_rdv_query(self, packet):
src_vid = bin2str((struct.unpack("!I", packet[16:20]))[0], self.L)
[k] = struct.unpack("!I", packet[24:28])
# search in rdv store for the logically closest gateway to reach kth distance away neighbor
gw_str_list = self.find_gateways_in_rdv_store(k, src_vid)
# if found then form the reply packet and send to src_vid
if len(gw_str_list) < 1:
# No gateway found
print 'Node:', self.vid, 'has no gateway for the rdv_query packet to reach bucket: ', k, 'for node: ', src_vid
return ''
for gw_str in gw_str_list:
if not k in self.routing_table:
self.routing_table[k] = []
if len(self.routing_table[k]) >= MAX_GW_PER_LEVEL:
print 'Node:', self.vid, 'already has the maximum number of routing entries allowed for level: ', k
return
next_hop, port = self.get_next_hop_rdv(gw_str)
if next_hop == '':
print 'No next_hop found for the gateway:', gw_str
print 'New routing information couldnt be added! '
return
# Destination Subtree-k
bucket_info = {
'prefix': get_prefix(self.vid, k),
'gateway': int(gw_str, 2),
'next_hop': int(next_hop, 2),
'port': port
}
self.routing_table[k].append(bucket_info)
self.recalculate_default_gw_for_bucket(k)
def find_gateways_in_rdv_store(self, k, src_vid):
gw_list = []
if k not in self.rdv_store:
return []
# Look through rdv store for next_hop entries
for t in self.rdv_store[k]:
gw_vid = t[0]
distance = delta(gw_vid, src_vid)
gw_list.append({'gw_vid': gw_vid, 'distance': distance})
if len(gw_list) < 1:
return []
# Sort the list of available gateways by distance (closest first)
gw_list.sort(key=lambda gw: gw['distance'])
# print "find_gateways_in_rdv_store found these gateways:", gw_list
# Truncate list so that it has at most MAX_GW_PER_RDV_REPLY entries
gw_list = gw_list[:MAX_GW_PER_RDV_REPLY]
# Remove the distance information from the list so it's a list of VIDs again instead of a list of dictionaries
gw_list = map(lambda x: x['gw_vid'], gw_list)
return gw_list
def process_rdv_reply(self, packet):
# Fill my routing table using this new information
[k] = struct.unpack("!I", packet[24:28])
gw_offset = 28
num_of_gw = (len(packet) - gw_offset)/4
gw_list = struct.unpack("!" + "I"*num_of_gw, packet[28:(28+4*num_of_gw)])
print "RDV_REPLY contained", num_of_gw, "gateway(s):", map(lambda s: bin2str(s, self.L), gw_list)
for gw in gw_list:
gw_str = bin2str(gw, self.L)
if not k in self.routing_table:
self.routing_table[k] = []
if len(self.routing_table[k]) >= MAX_GW_PER_LEVEL:
print 'Node:', self.vid, 'already has the maximum number of routing entries allowed for level', k
return
next_hop, port = self.get_next_hop_rdv(gw_str)
if next_hop == '':
print 'ERROR: no next_hop found for the gateway:', gw_str
print "New routing information couldn't be added!"
return
next_hop_int = int(next_hop, 2)
bucket_info = {
'prefix': get_prefix(self.vid, k),
'gateway': gw,
'next_hop': next_hop_int,
'port': port
}
self.routing_table[k].append(bucket_info)
self.recalculate_default_gw_for_bucket(k)
def get_next_hop_rdv(self, gw_str):
next_hop = ''
port = ''
distance = delta(self.vid, gw_str)
if distance in self.routing_table:
for entry in self.routing_table[distance]:
if entry['default']:
next_hop = bin2str(entry['next_hop'], self.L)
port = str(entry['port'])
return (next_hop, port)
# FIXME: Not used?
def process_rdv_withdraw(self, packet):
print "WARNING: process_rdv_withdraw called but implementation not verified yet"
src_vid = bin2str((struct.unpack("!I", packet[16:20]))[0], self.L)
payload = bin2str((struct.unpack("!I", packet[24:28]))[0], self.L)
print 'Node:', self.vid, 'has received process_rdv_withdraw from ', src_vid
gw = {}
print self.rdv_store
for level in self.rdv_store:
delete = []
for idx in range(0, len(self.rdv_store[level])):
entry = self.rdv_store[level][idx]
if (entry[0] == payload) or (entry[1] == payload):
delete.append(idx)
# Save the list of Removed Gateways and delete them from rdv Store
if not level in gw:
gw[level] = []
gw[level].append(entry[0]) # saves the removed GWs
for index in delete:
del self.rdv_store[level][index]
if self.vid != src_vid: # only need to update routing table if this came from someone else
self.remove_failed_gw(packet) # update the Routing Table
else:
print "I am the rdv point. My routing table is already updated."
return gw
# TODO: Dead code -- may be incorrect
def get_gw_list(self, next_hop):
print 'FIXME: get_gw_list should not be called yet -- implementation may not be correct'
gw_list = []
# calculate logical distance
print "Finding the gateways..."
entries = self.find_entries_with_neighbor_as_next_hop(next_hop)
for level in entries:
if level != 1 or level != -1:
bucket = entries[level]
# return gateway from routing_table with distance = bucket
gw = bin2str(self.routing_table[level][bucket]['gateway'], self.L)
gw_list.append(gw)
return gw_list
# Returns a dictionary that is like a copy of the routing table except:
# - There is exactly 1 entry for each bucket
# - If the next hop in a routing table entry matches this neighbor_vid
# then that entry is copied into this dictionary
# - Otherwise (e.g. no matching entry found for bucket/level) the corresponding entry is set to -1
# TODO: Dead code -- may be incorrect
def find_entries_with_neighbor_as_next_hop(self, neighbor_vid):
print 'FIXME: find_entries_with_neighbor_as_next_hop should not be called yet -- implementation may not be correct'
# Note: removed dead code from original implementation (may need to add back later when needed)
result = {}
for bucket in self.routing_table:
result[bucket] = -1
for entry in self.routing_table[bucket]:
next_hop = bin2str(self.routing_table[bucket][entry]['next_hop'], self.L)
if next_hop == neighbor_vid:
result[bucket] = entry
return result
Let recalculate_default_gw_for_bucket enforce MAX_GW_PER_LEVEL (otherwise a distance GW might be able to block a close one from being added)
import socket, struct, sys, time, random
from viro_veil import * # for the constants
class ViroModule(object):
def __init__(self, my_dpid, my_vid):
self.dpid = my_dpid
self.vid = my_vid
self.L = len(my_vid)
self.routing_table = {}
self.rdv_store = {}
self.neighbors = {}
self.rdv_request_tracker = {}
def update_routing_table_based_on_neighbor(self, neighbor_vid, port):
print "update_routing_table_based_on_neighbor: neighbor_vid =", neighbor_vid, "port =", port
bucket = delta(neighbor_vid, self.vid)
# If we don't have any entries at this bucket -> create a new bucket
if bucket not in self.routing_table:
self.routing_table[bucket] = []
bucket_info = {
'prefix': get_prefix(self.vid, bucket),
'gateway': int(self.vid, 2),
'next_hop': int(neighbor_vid, 2),
'port': port
}
if not is_duplicate_bucket(self.routing_table[bucket], bucket_info):
self.routing_table[bucket].append(bucket_info)
self.recalculate_default_gw_for_bucket(bucket)
print "Updating the Neighbors list..."
self.update_neighbors(neighbor_vid, bucket)
self.print_routing_table()
# Presumably a gateway has just been added to or removed from the list for this bucket,
# so we need to do the following:
# - (Re)compute the logical distance of each gateway
# - Set a gateway having minimal distance to be the default (and all others not to be the default)
# - Limit the number of gateways stored to the maximum allowed
# as defined by MAX_GW_PER_LEVEL parameter (which is assumed to be > 1).
# To do that we remove a gateway whose distance is maximal,
# and which was not selected as the default (in the case of all gateways being equidistant)
def recalculate_default_gw_for_bucket(self, bucket):
print "Recalculating default gateway for bucket", bucket
entries = self.routing_table[bucket]
min_distance = float("inf")
min_entry = None
max_distance = -1
max_entry = None
for entry in entries:
# Clear default flag -- will set again once all distances have been computed
entry['default'] = False
# Compute distance
gw = bin2str(entry['gateway'], self.L)
distance = delta(gw, self.vid)
# Update min/max pointers
if distance > max_distance:
max_distance = distance
max_entry = entry
if distance < min_distance:
min_distance = distance
min_entry = entry
if min_entry is None or max_entry is None:
print "recalculate_default_gw_for_bucket did not find a min and max distance gateways (no gateways)"
return
# DEBUG
# print "min_distance =", min_distance, "min_entry =", min_entry
# print "max_distance =", max_distance, "max_entry =", max_entry
# Set (possibly new) default gateway for this bucket to be one having minimal distance
min_entry['default'] = True
# Limit number of entries (assume for now that there will be at most 1 too many)
if len(entries) > MAX_GW_PER_LEVEL:
max_gw_index = entries.index(max_entry)
if not max_entry['default']:
# Delete gateway at maximal distance (non-equidistant case)
del entries[max_gw_index]
else:
# max_distance == min_distance (equidistant case)
# So just delete any non-default gateway
next_gw_index = (max_gw_index + 1) % len(entries)
del entries[next_gw_index]
# In case somehow there were more than 1 too many gateways then do this again.
# If this were expected to happen often then we could do something more efficient for that case,
# such as sort the entries in order of increasing distance then removing all beyond maximum,
# but this is not expected to happen. We just have this check here to ensure correctness in case
# of this unexpected scenario where there is more than 1 gateway that needs to be removed
# (since this function should be called each time a gateway is added or removed).
if len(entries) > MAX_GW_PER_LEVEL:
print "WARNING: Recursively calling recalculate_default_gw_for_bucket; unexpected situation"
self.recalculate_default_gw_for_bucket(bucket)
def update_neighbors(self, neighbor_vid, distance):
if neighbor_vid not in self.neighbors:
self.neighbors[neighbor_vid] = {}
self.neighbors[neighbor_vid][distance] = time.time()
def print_routing_table(self):
print '\n----> Routing Table at :', self.vid, '|', self.dpid, ' <----'
for distance in range(1, self.L + 1):
if distance in self.routing_table:
for entry in self.routing_table[distance]:
print 'Bucket:', distance, \
'Port:', entry['port'], \
'Prefix:', entry['prefix'],\
'Gateway:', bin2str(entry['gateway'], self.L), \
'Next hop:', bin2str(entry['next_hop'], self.L), \
'Default:', entry['default']
else:
print 'Bucket:', distance, '--- E M P T Y ---'
print 'RDV STORE: ', self.rdv_store, "\n"
def remove_failed_gw(self, packet, gw=None):
if gw is None:
payload = bin2str((struct.unpack("!I", packet[24:28]))[0], self.L)
payload = int(payload, 2)
else:
payload = int(gw, 2)
to_be_deleted = {}
for level in self.routing_table:
to_be_deleted[level] = []
for idx in xrange(0, len(self.routing_table[level])):
entry = self.routing_table[level][idx]
if entry['gateway'] == payload or entry['next_hop'] == payload:
to_be_deleted[level].append(idx)
for level in to_be_deleted:
for index in to_be_deleted[level]:
del self.routing_table[level][index]
bucket_ = []
for level in self.routing_table:
if len(self.routing_table[level]) == 0:
bucket_.append(level)
for level in bucket_:
del self.routing_table[level]
return
def publish(self, bucket, k):
dst = get_rdv_id(k, self.vid)
packet = create_RDV_PUBLISH(bucket, self.vid, dst)
print 'Node:', self.vid, 'is publishing neighbor', bin2str(bucket['next_hop'], self.L), 'to rdv:', dst
return (packet, dst)
def withdraw(self, failedNode, RDV_level):
dst = get_rdv_id(RDV_level, self.vid)
if dst != failedNode:
packet = create_RDV_WITHDRAW(int(failedNode, 2), self.vid, '00')
print 'Node: ', self.vid, 'is withdrawing neighbor', failedNode, 'to rdv:', dst
return packet
# FIXME: Not used?
def withdraw_gw(self, failed_gw, vid, dst):
print "Creating GW_WITHDRAW packet"
packet = create_GW_WITHDRAW(failed_gw, vid, dst)
print self.vid, '- RDV gateway withdraw:', failed_gw, 'to dst:', dst
return packet
def query(self, k):
dst = get_rdv_id(k, self.vid)
packet = create_RDV_QUERY(k, self.vid, dst)
print 'Node:', self.vid, 'is querying to reach bucket:', k, 'to rdv:', dst
return (packet, dst)
def get_next_hop(self, packet):
dst_vid = get_dest(packet, self.L)
next_hop = ''
packet_type = get_operation(packet)
port = ''
while next_hop == '':
distance = delta(self.vid, dst_vid)
if distance == 0:
break
if distance in self.routing_table and len(self.routing_table[distance]) > 0:
for entry in self.routing_table[distance]:
if entry['default']:
next_hop = str(entry['next_hop'])
port = int(entry['port'])
break
if next_hop != '':
break
if (packet_type != RDV_PUBLISH) and (packet_type != RDV_QUERY):
break
print 'No next hop for destination: ', dst_vid, 'distance: ', distance
# flip the distance bit to
dst_vid = flip_bit(dst_vid, distance)
if next_hop == '':
print 'No route to destination', 'MyVID: ', self.vid, 'DEST: ', dst_vid
return ('', '')
return (next_hop, port)
# Adds an entry to rdv_store, and also ensures that there are no duplicates
def add_if_no_duplicate_rdv_entry(self, distance, new_entry):
for x in self.rdv_store[distance]:
if x[0] == new_entry[0] and x[1] == new_entry[1]:
return
self.rdv_store[distance].append(new_entry)
# Adds an entry to rdv_store, and also ensures that there are no duplicates
def add_if_no_duplicate_gw_entry(self, gw, new_entry):
for x in self.rdv_request_tracker[gw]:
if x == new_entry:
return
self.rdv_request_tracker[gw].append(new_entry)
def process_rdv_publish(self, packet):
src_vid = bin2str((struct.unpack("!I", packet[16:20]))[0], self.L)
next_hop = bin2str((struct.unpack("!I", packet[24:28]))[0], self.L)
print "RDV_PUBLISH message received from: ", src_vid
distance = delta(self.vid, next_hop)
if distance not in self.rdv_store:
self.rdv_store[distance] = []
new_entry = [src_vid, next_hop]
self.add_if_no_duplicate_rdv_entry(distance, new_entry)
def process_rdv_query(self, packet):
src_vid = bin2str((struct.unpack("!I", packet[16:20]))[0], self.L)
payload = bin2str((struct.unpack("!I", packet[24:28]))[0], self.L)
k = int(payload, 2)
print "RDV_QUERY message received from: ", src_vid
# search in rdv store for the logically closest gateway to reach kth distance away neighbor
gw_str_list = self.find_gateways_in_rdv_store(k, src_vid)
# if found then form the reply packet and send to src_vid
if len(gw_str_list) < 1:
# No gateway found
print 'Node: ', self.vid, 'has no gateway for the rdv_query packet to reach bucket: ', k, 'for node: ', src_vid
return ''
gw_list = []
for gw_str in gw_str_list:
gw_list.append(int(gw_str,2))
# create a RDV_REPLY packet and send it
reply_packet = create_RDV_REPLY(gw_list, k, self.vid, src_vid)
# Keeps track of the Nodes that requests each Gateways at
# specific level
for gw_str in gw_str_list:
if gw_str not in self.rdv_request_tracker:
self.rdv_request_tracker[gw_str] = []
self.add_if_no_duplicate_gw_entry(gw_str, src_vid)
return reply_packet
def process_self_rdv_query(self, packet):
src_vid = bin2str((struct.unpack("!I", packet[16:20]))[0], self.L)
[k] = struct.unpack("!I", packet[24:28])
# search in rdv store for the logically closest gateway to reach kth distance away neighbor
gw_str_list = self.find_gateways_in_rdv_store(k, src_vid)
# if found then form the reply packet and send to src_vid
if len(gw_str_list) < 1:
# No gateway found
print 'Node:', self.vid, 'has no gateway for the rdv_query packet to reach bucket: ', k, 'for node: ', src_vid
return ''
for gw_str in gw_str_list:
if not k in self.routing_table:
self.routing_table[k] = []
next_hop, port = self.get_next_hop_rdv(gw_str)
if next_hop == '':
print 'No next_hop found for the gateway:', gw_str
print 'New routing information couldnt be added! '
return
# Destination Subtree-k
bucket_info = {
'prefix': get_prefix(self.vid, k),
'gateway': int(gw_str, 2),
'next_hop': int(next_hop, 2),
'port': port
}
self.routing_table[k].append(bucket_info)
self.recalculate_default_gw_for_bucket(k)
def find_gateways_in_rdv_store(self, k, src_vid):
gw_list = []
if k not in self.rdv_store:
return []
# Look through rdv store for next_hop entries
for t in self.rdv_store[k]:
gw_vid = t[0]
distance = delta(gw_vid, src_vid)
gw_list.append({'gw_vid': gw_vid, 'distance': distance})
if len(gw_list) < 1:
return []
# Sort the list of available gateways by distance (closest first)
gw_list.sort(key=lambda gw: gw['distance'])
# print "find_gateways_in_rdv_store found these gateways:", gw_list
# Truncate list so that it has at most MAX_GW_PER_RDV_REPLY entries
gw_list = gw_list[:MAX_GW_PER_RDV_REPLY]
# Remove the distance information from the list so it's a list of VIDs again instead of a list of dictionaries
gw_list = map(lambda x: x['gw_vid'], gw_list)
return gw_list
def process_rdv_reply(self, packet):
# Fill my routing table using this new information
[k] = struct.unpack("!I", packet[24:28])
gw_offset = 28
num_of_gw = (len(packet) - gw_offset)/4
gw_list = struct.unpack("!" + "I"*num_of_gw, packet[28:(28+4*num_of_gw)])
print "RDV_REPLY contained", num_of_gw, "gateway(s):", map(lambda s: bin2str(s, self.L), gw_list)
for gw in gw_list:
gw_str = bin2str(gw, self.L)
if not k in self.routing_table:
self.routing_table[k] = []
next_hop, port = self.get_next_hop_rdv(gw_str)
if next_hop == '':
print 'ERROR: no next_hop found for the gateway:', gw_str
print "New routing information couldn't be added!"
return
next_hop_int = int(next_hop, 2)
bucket_info = {
'prefix': get_prefix(self.vid, k),
'gateway': gw,
'next_hop': next_hop_int,
'port': port
}
self.routing_table[k].append(bucket_info)
self.recalculate_default_gw_for_bucket(k)
def get_next_hop_rdv(self, gw_str):
next_hop = ''
port = ''
distance = delta(self.vid, gw_str)
if distance in self.routing_table:
for entry in self.routing_table[distance]:
if entry['default']:
next_hop = bin2str(entry['next_hop'], self.L)
port = str(entry['port'])
return (next_hop, port)
# FIXME: Not used?
def process_rdv_withdraw(self, packet):
print "WARNING: process_rdv_withdraw called but implementation not verified yet"
src_vid = bin2str((struct.unpack("!I", packet[16:20]))[0], self.L)
payload = bin2str((struct.unpack("!I", packet[24:28]))[0], self.L)
print 'Node:', self.vid, 'has received process_rdv_withdraw from ', src_vid
gw = {}
print self.rdv_store
for level in self.rdv_store:
delete = []
for idx in range(0, len(self.rdv_store[level])):
entry = self.rdv_store[level][idx]
if (entry[0] == payload) or (entry[1] == payload):
delete.append(idx)
# Save the list of Removed Gateways and delete them from rdv Store
if not level in gw:
gw[level] = []
gw[level].append(entry[0]) # saves the removed GWs
for index in delete:
del self.rdv_store[level][index]
if self.vid != src_vid: # only need to update routing table if this came from someone else
self.remove_failed_gw(packet) # update the Routing Table
else:
print "I am the rdv point. My routing table is already updated."
return gw
# TODO: Dead code -- may be incorrect
def get_gw_list(self, next_hop):
print 'FIXME: get_gw_list should not be called yet -- implementation may not be correct'
gw_list = []
# calculate logical distance
print "Finding the gateways..."
entries = self.find_entries_with_neighbor_as_next_hop(next_hop)
for level in entries:
if level != 1 or level != -1:
bucket = entries[level]
# return gateway from routing_table with distance = bucket
gw = bin2str(self.routing_table[level][bucket]['gateway'], self.L)
gw_list.append(gw)
return gw_list
# Returns a dictionary that is like a copy of the routing table except:
# - There is exactly 1 entry for each bucket
# - If the next hop in a routing table entry matches this neighbor_vid
# then that entry is copied into this dictionary
# - Otherwise (e.g. no matching entry found for bucket/level) the corresponding entry is set to -1
# TODO: Dead code -- may be incorrect
def find_entries_with_neighbor_as_next_hop(self, neighbor_vid):
print 'FIXME: find_entries_with_neighbor_as_next_hop should not be called yet -- implementation may not be correct'
# Note: removed dead code from original implementation (may need to add back later when needed)
result = {}
for bucket in self.routing_table:
result[bucket] = -1
for entry in self.routing_table[bucket]:
next_hop = bin2str(self.routing_table[bucket][entry]['next_hop'], self.L)
if next_hop == neighbor_vid:
result[bucket] = entry
return result
|
"""
fs.expose.ftp
==============
Expose an FS object over FTP (via pyftpdlib).
This module provides the necessary interfaces to expose an FS object over
FTP, plugging into the infrastructure provided by the 'pyftpdlib' module.
"""
from __future__ import with_statement
import os
import stat as statinfo
import time
import threading
from pyftpdlib import ftpserver
from fs.base import flags_to_mode
from fs.path import *
from fs.errors import *
from fs.local_functools import wraps
from fs.filelike import StringIO
from fs.utils import isdir
from fs.osfs import OSFS
class FTPFS(ftpserver.AbstractedFS):
def __init__(self, fs, root, cmd_channel):
self.fs = fs
super(FTPFS, self).__init__(root, cmd_channel)
def validpath(self, path):
return True
def open(self, path, mode):
return self.fs.open(path, mode)
def chdir(self, path):
self._cwd = self.ftp2fs(path)
def mkdir(self, path):
if isinstance(path, str):
path = unicode(path, sys.getfilesystemencoding())
self.fs.createdir(path)
def listdir(self, path):
return map(lambda x: x.encode('utf8'), self.fs.listdir(path))
def rmdir(self, path):
self.fs.removedir(path)
def remove(self, path):
self.fs.remove(path)
def rename(self, src, dst):
self.fs.rename(src, dst)
def chmod(self, path, mode):
raise NotImplementedError()
def stat(self, path):
# TODO: stat needs to be handled using fs.getinfo() method.
return super(FTPFS, self).stat(self.fs.getsyspath(path))
def lstat(self, path):
return self.stat(path)
def isfile(self, path):
return self.fs.isfile(path)
def isdir(self, path):
return self.fs.isdir(path)
def getsize(self, path):
return self.fs.getsize(path)
def getmtime(self, path):
return self.fs.getinfo(path).time
def realpath(self, path):
return path
def lexists(self, path):
return True
class FTPFSFactory(object):
"""
A factory class which can hold a reference to a file system object and
later pass it along to an FTPFS instance. An instance of this object allows
multiple FTPFS instances to be created by pyftpdlib and share the same fs.
"""
def __init__(self, fs):
"""
Initializes the factory with an fs instance.
"""
self.fs = fs
def __call__(self, root, cmd_channel):
"""
This is the entry point of pyftpdlib. We will pass along the two parameters
as well as the previously provided fs instance.
"""
return FTPFS(self.fs, root, cmd_channel)
class HomeFTPFS(FTPFS):
"""
A file system which serves a user's home directory.
"""
def __init__(self, root, cmd_channel):
"""
Use the provided user's home directory to create an FTPFS that serves an OSFS
rooted at the home directory.
"""
super(DemoFS, self).__init__(OSFS(root_path=root), '/', cmd_channel)
def serve_fs(fs, addr, port):
from pyftpdlib.contrib.authorizers import UnixAuthorizer
ftp_handler = ftpserver.FTPHandler
ftp_handler.authorizer = ftpserver.DummyAuthorizer()
ftp_handler.authorizer.add_anonymous('/')
ftp_handler.abstracted_fs = FTPFSFactory(fs)
s = ftpserver.FTPServer((addr, port), ftp_handler)
s.serve_forever()
def main():
serve_fs(HomeFTPFS, '127.0.0.1', 21)
# When called from the command-line, expose a DemoFS for testing purposes
if __name__ == "__main__":
main()
Documentation, fixed makedir
"""
fs.expose.ftp
==============
Expose an FS object over FTP (via pyftpdlib).
This module provides the necessary interfaces to expose an FS object over
FTP, plugging into the infrastructure provided by the 'pyftpdlib' module.
To use this in combination with fsserve, do the following:
$ fsserve -t 'ftp' $HOME
The above will serve your home directory in read-only mode via anonymous FTP on the
loopback address.
"""
import os
import stat
from pyftpdlib import ftpserver
from fs.osfs import OSFS
class FTPFS(ftpserver.AbstractedFS):
"""
The basic FTP Filesystem. This is a bridge between a pyfs filesystem and pyftpdlib's
AbstractedFS. This class will cause the FTP server to service the given fs instance.
"""
def __init__(self, fs, root, cmd_channel):
self.fs = fs
super(FTPFS, self).__init__(root, cmd_channel)
def validpath(self, path):
# All paths are valid because we offload chrooting to pyfs.
return True
def open(self, path, mode):
return self.fs.open(path, mode)
def chdir(self, path):
# Put the user into the requested directory, again, all paths
# are valid.
self._cwd = self.ftp2fs(path)
def mkdir(self, path):
self.fs.makedir(path)
def listdir(self, path):
return map(lambda x: x.encode('utf8'), self.fs.listdir(path))
def rmdir(self, path):
self.fs.removedir(path)
def remove(self, path):
self.fs.remove(path)
def rename(self, src, dst):
self.fs.rename(src, dst)
def chmod(self, path, mode):
raise NotImplementedError()
def stat(self, path):
# TODO: stat needs to be handled using fs.getinfo() method.
return super(FTPFS, self).stat(self.fs.getsyspath(path))
def lstat(self, path):
return self.stat(path)
def isfile(self, path):
return self.fs.isfile(path)
def isdir(self, path):
return self.fs.isdir(path)
def getsize(self, path):
return self.fs.getsize(path)
def getmtime(self, path):
return self.fs.getinfo(path).time
def realpath(self, path):
return path
def lexists(self, path):
return True
class FTPFSFactory(object):
"""
A factory class which can hold a reference to a file system object and
later pass it along to an FTPFS instance. An instance of this object allows
multiple FTPFS instances to be created by pyftpdlib and share the same fs.
"""
def __init__(self, fs):
"""
Initializes the factory with an fs instance.
"""
self.fs = fs
def __call__(self, root, cmd_channel):
"""
This is the entry point of pyftpdlib. We will pass along the two parameters
as well as the previously provided fs instance.
"""
return FTPFS(self.fs, root, cmd_channel)
class HomeFTPFS(FTPFS):
"""
A file system which serves a user's home directory.
"""
def __init__(self, root, cmd_channel):
"""
Use the provided user's home directory to create an FTPFS that serves an OSFS
rooted at the home directory.
"""
super(DemoFS, self).__init__(OSFS(root_path=root), '/', cmd_channel)
def serve_fs(fs, addr, port):
"""
Creates a basic anonymous FTP server serving the given FS on the given address/port
combo.
"""
from pyftpdlib.contrib.authorizers import UnixAuthorizer
ftp_handler = ftpserver.FTPHandler
ftp_handler.authorizer = ftpserver.DummyAuthorizer()
ftp_handler.authorizer.add_anonymous('/')
ftp_handler.abstracted_fs = FTPFSFactory(fs)
s = ftpserver.FTPServer((addr, port), ftp_handler)
s.serve_forever()
|
import os
__VERSION__ = ''
__PLUGIN_VERSION__ = ''
# Config settings
AUTH = {}
DEBUG = False
SOCK_DEBUG = False
SOCK_SINGLE_READ = False
EXPERT_MODE = False
ALERT_ON_MSG = True
LOG_TO_CONSOLE = False
BASE_DIR = os.path.expanduser(os.path.join('~', 'floobits'))
# Shared globals
DEFAULT_HOST = 'floobits.com'
DEFAULT_PORT = 3448
SECURE = True
ERROR_COUNT = 0
ERRORS_SENT = 0
# Don't spam us with error reports
MAX_ERROR_REPORTS = 3
# For people who have outbound ports blocked (schools and BigCos)
OUTBOUND_FILTER_PROXY_HOST = 'proxy.floobits.com'
OUTBOUND_FILTER_PROXY_PORT = 443
OUTBOUND_FILTERING = False
PROXY_PORT = 0 # Random port
SHARE_DIR = None
COLAB_DIR = ''
PROJECT_PATH = ''
WORKSPACE_WINDOW = None
PERMS = []
FOLLOW_MODE = False
FOLLOW_USERS = set()
SPLIT_MODE = False
AUTO_GENERATED_ACCOUNT = False
PLUGIN_PATH = None
CHAT_VIEW = None
CHAT_VIEW_PATH = None
TICK_TIME = 100
AGENT = None
IGNORE = None
VIEW_TO_HASH = {}
FLOORC_PATH = os.path.expanduser(os.path.join('~', '.floorc'))
FLOORC_JSON_PATH = os.path.expanduser(os.path.join('~', '.floorc.json'))
Add optional flootty_safe key in floorc to override default behavior.
import os
__VERSION__ = ''
__PLUGIN_VERSION__ = ''
# Config settings
AUTH = {}
DEBUG = False
SOCK_DEBUG = False
SOCK_SINGLE_READ = False
EXPERT_MODE = False
FLOOTTY_SAFE = True
ALERT_ON_MSG = True
LOG_TO_CONSOLE = False
BASE_DIR = os.path.expanduser(os.path.join('~', 'floobits'))
# Shared globals
DEFAULT_HOST = 'floobits.com'
DEFAULT_PORT = 3448
SECURE = True
ERROR_COUNT = 0
ERRORS_SENT = 0
# Don't spam us with error reports
MAX_ERROR_REPORTS = 3
# For people who have outbound ports blocked (schools and BigCos)
OUTBOUND_FILTER_PROXY_HOST = 'proxy.floobits.com'
OUTBOUND_FILTER_PROXY_PORT = 443
OUTBOUND_FILTERING = False
PROXY_PORT = 0 # Random port
SHARE_DIR = None
COLAB_DIR = ''
PROJECT_PATH = ''
WORKSPACE_WINDOW = None
PERMS = []
FOLLOW_MODE = False
FOLLOW_USERS = set()
SPLIT_MODE = False
AUTO_GENERATED_ACCOUNT = False
PLUGIN_PATH = None
CHAT_VIEW = None
CHAT_VIEW_PATH = None
TICK_TIME = 100
AGENT = None
IGNORE = None
VIEW_TO_HASH = {}
FLOORC_PATH = os.path.expanduser(os.path.join('~', '.floorc'))
FLOORC_JSON_PATH = os.path.expanduser(os.path.join('~', '.floorc.json'))
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# CowBots -- Error detection bots for CKAN-of-Worms
# By: Emmanuel Raviart <emmanuel@raviart.com>
#
# Copyright (C) 2013 Etalab
# http://github.com/etalab/cowbots
#
# This file is part of CowBots.
#
# CowBots is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# CowBots is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Check CKAN-of-Worms datasets for errors in fields and send result to CKAN-of-Worms."""
import argparse
import ConfigParser
import json
import logging
import os
import re
import sys
import urllib2
import urlparse
from biryani1.baseconv import (
check,
cleanup_line,
empty_to_none,
function,
input_to_email,
make_input_to_url,
noop,
not_none,
pipe,
struct,
test,
test_equals,
test_greater_or_equal,
test_in,
test_isinstance,
test_none,
test_not_in,
uniform_sequence,
)
from biryani1.datetimeconv import (
date_to_iso8601_str,
datetime_to_iso8601_str,
iso8601_input_to_date,
iso8601_input_to_datetime,
)
from biryani1.jsonconv import (
make_input_to_json,
)
from biryani1.states import default_state
#import fedmsg
app_name = os.path.splitext(os.path.basename(__file__))[0]
conf = None
headers = None
log = logging.getLogger(app_name)
N_ = lambda message: message
name_re = re.compile(ur'[-_\da-z]+$')
uuid_re = re.compile(ur'[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12}$')
year_re = re.compile(ur'(^|[^\d])(19|20)\d\d([^\d]|$)')
# Level-1 Converters
cow_json_to_iso8601_date_str = pipe(
test_isinstance(basestring),
iso8601_input_to_date,
date_to_iso8601_str,
)
cow_json_to_iso8601_datetime_str = pipe(
test_isinstance(basestring),
iso8601_input_to_datetime,
datetime_to_iso8601_str,
)
cow_json_to_markdown = pipe(
test_isinstance(basestring),
cleanup_line,
)
cow_json_to_name = pipe(
test_isinstance(basestring),
test(lambda name: name == name.strip(), error = N_(u'String begins or ends with spaces')),
test(lambda name: name == name.strip('-'), error = N_(u'String begins or ends with "-"')),
test(lambda name: name == name.strip('_'), error = N_(u'String begins or ends with "_"')),
test(lambda name: '--' not in name, error = N_(u'String contains duplicate "-"')),
test(lambda name: '__' not in name, error = N_(u'String contains duplicate "_"')),
test(lambda name: name.islower(), error = N_(u'String must contain only lowercase characters')),
test(name_re.match, error = N_(u'String must contain only "a"-"z", "-" & "_"')),
)
cow_json_to_title = pipe(
test_isinstance(basestring),
test(lambda title: title == title.strip(), error = N_(u'String begins or ends with spaces')),
empty_to_none,
test(lambda title: not title[0].islower(), error = N_(u'String must begin with an uppercase character')),
)
cow_json_to_uuid = pipe(
test_isinstance(basestring),
test(uuid_re.match, error = N_(u'Invalid ID')),
)
cow_json_to_year_or_month_or_day_str = pipe(
test_isinstance(basestring),
iso8601_input_to_date,
date_to_iso8601_str,
)
cow_response_to_value = pipe(
make_input_to_json(),
not_none,
test_isinstance(dict),
struct(
dict(
apiVersion = pipe(
test_equals('1.0'),
not_none,
),
context = noop,
method = pipe(
test_isinstance(basestring),
not_none,
),
params = test_isinstance(dict),
url = pipe(
make_input_to_url(full = True),
not_none,
),
value = noop,
),
),
function(lambda response: response['value']),
)
# Level-2 Converters
cow_json_to_dataset = pipe(
test_isinstance(dict),
struct(
dict(
draft_id = pipe(
cow_json_to_uuid,
not_none,
),
id = pipe(
cow_json_to_uuid,
not_none,
),
),
default = noop,
),
)
cow_json_to_ids = pipe(
test_isinstance(list),
uniform_sequence(
pipe(
cow_json_to_uuid,
not_none,
),
),
)
# Functions
def main():
parser = argparse.ArgumentParser(description = __doc__)
parser.add_argument('config', help = 'path of configuration file')
parser.add_argument('-v', '--verbose', action = 'store_true', help = 'increase output verbosity')
global args
args = parser.parse_args()
logging.basicConfig(level = logging.DEBUG if args.verbose else logging.WARNING, stream = sys.stdout)
config_parser = ConfigParser.SafeConfigParser(dict(here = os.path.dirname(args.config)))
config_parser.read(args.config)
global conf
conf = check(pipe(
test_isinstance(dict),
struct(
{
'ckan_of_worms.api_key': pipe(
cleanup_line,
not_none,
),
'ckan_of_worms.site_url': pipe(
make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True,
full = True),
not_none,
),
'user_agent': pipe(
cleanup_line,
not_none,
),
},
default = 'drop',
),
not_none,
))(dict(config_parser.items('CowBots-Check-Datasets')), default_state)
# fedmsg_conf = check(struct(
# dict(
# environment = pipe(
# empty_to_none,
# test_in(['dev', 'prod', 'stg']),
# ),
# modname = pipe(
# empty_to_none,
# test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'),
# default('ckan_of_worms'),
# ),
## name = pipe(
## empty_to_none,
## default('ckan_of_worms.{}'.format(hostname)),
## ),
# topic_prefix = pipe(
# empty_to_none,
# test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'),
# ),
# ),
# default = 'drop',
# ))(dict(config_parser.items('fedmsg')))
global headers
headers = {
'User-Agent': conf['user_agent'],
}
# # Read in the config from /etc/fedmsg.d/.
# fedmsg_config = fedmsg.config.load_config([], None)
# # Disable a warning about not sending. We know. We only want to tail.
# fedmsg_config['mute'] = True
# # Disable timing out so that we can tail forever. This is deprecated
# # and will disappear in future versions.
# fedmsg_config['timeout'] = 0
# # For the time being, don't require message to be signed.
# fedmsg_config['validate_signatures'] = False
# for key, value in fedmsg_conf.iteritems():
# if value is not None:
# fedmsg_config[key] = value
# expected_topic_prefix = '{}.{}.ckan_of_worms.'.format(fedmsg_config['topic_prefix'], fedmsg_config['environment'])
# for name, endpoint, topic, message in fedmsg.tail_messages(**fedmsg_config):
# if not topic.startswith(expected_topic_prefix):
# log.debug(u'Ignoring message: {}, {}'.format(topic, name))
# continue
# kind, action = topic[len(expected_topic_prefix):].split('.')
# if kind == 'dataset':
# if action in ('create', 'update'):
# while len(pool) >= args.thread_count:
# time.sleep(0.1)
# pool.add(thread.start_new_thread(check_dataset_urls, (message['msg'],)))
# else:
# log.warning(u'TODO: Handle {}, {} for {}'.format(kind, action, message))
# else:
# log.warning(u'TODO: Handle {}, {} for {}'.format(kind, action, message))
request = urllib2.Request(urlparse.urljoin(conf['ckan_of_worms.site_url'], 'api/1/datasets'), headers = headers)
response = urllib2.urlopen(request)
datasets_id = check(pipe(
cow_response_to_value,
cow_json_to_ids,
not_none,
))(response.read(), state = default_state)
for dataset_id in datasets_id:
request = urllib2.Request(urlparse.urljoin(conf['ckan_of_worms.site_url'],
'api/1/datasets/{}'.format(dataset_id)), headers = headers)
response = urllib2.urlopen(request)
dataset = check(pipe(
cow_response_to_value,
cow_json_to_dataset,
not_none,
))(response.read(), state = default_state)
verified_dataset, errors = struct(
dict(
author = cow_json_to_title,
author_email = input_to_email,
draft_id = pipe(
cow_json_to_uuid,
not_none,
),
errors = test_isinstance(dict),
extras = pipe(
test_isinstance(list),
uniform_sequence(
pipe(
test_isinstance(dict),
struct(
dict(
key = pipe(
cow_json_to_title,
not_none,
),
value = pipe(
test_isinstance(basestring),
cleanup_line,
not_none,
),
),
default = noop,
),
not_none,
),
),
empty_to_none,
),
groups = pipe(
test_isinstance(list),
uniform_sequence(
pipe(
test_isinstance(dict),
struct(
dict(
id = pipe(
cow_json_to_uuid,
not_none,
),
name = pipe(
cow_json_to_name,
not_none,
),
title = pipe(
cow_json_to_title,
not_none,
),
),
),
not_none,
),
),
empty_to_none,
not_none,
),
id = pipe(
cow_json_to_uuid,
not_none,
),
isopen = pipe(
test_isinstance(bool),
test_equals(True),
not_none,
),
license_id = pipe(
test_isinstance(basestring),
test_in([
'cc-by', # Creative Commons Attribution
'cc-by-sa', # Creative Commons Attribution Share-Alike
'cc-zero', # Creative Commons CCZero
'fr-lo', # Licence Ouverte / Open Licence
'odc-by', # Open Data Commons Attribution License
'odc-odbl', # Open Data Commons Open Database License (ODbL)
'odc-pddl', # Open Data Commons Public Domain Dedication and Licence (PDDL)
'other-at', # Other (Attribution)
'other-open', # Other (Open)
'other-pd', # Other (Public Domain)
]),
not_none,
),
license_title = pipe(
test_isinstance(basestring),
cleanup_line,
not_none,
),
license_url = pipe(
test_isinstance(basestring),
make_input_to_url(full = True),
not_none,
),
maintainer = cow_json_to_title,
maintainer_email = input_to_email,
metadata_created = pipe(
cow_json_to_iso8601_date_str,
not_none,
),
metadata_modified = pipe(
cow_json_to_iso8601_date_str,
not_none,
),
name = pipe(
cow_json_to_name,
not_none,
),
num_resources = pipe(
test_isinstance(int),
test_greater_or_equal(0),
),
num_tags = pipe(
test_isinstance(int),
test_greater_or_equal(0),
),
notes = pipe(
cow_json_to_markdown,
not_none,
),
organization = test_isinstance(dict),
owner_org = cow_json_to_uuid,
private = pipe(
test_isinstance(bool),
test_equals(False),
),
resources = pipe(
test_isinstance(list),
uniform_sequence(
pipe(
test_isinstance(dict),
struct(
dict(
created = pipe(
cow_json_to_iso8601_date_str,
not_none,
),
description = pipe(
cow_json_to_markdown,
not_none,
),
format = pipe(
test_isinstance(basestring),
test(lambda format: format == format.lower(),
error = N_(u'Format must contain only lowercase characters')),
test_not_in(['xlsx'], error = N_(u'Invalid format; use "xls" instead')),
# test_in([
# 'XLS',
# ]),
not_none,
),
id = pipe(
cow_json_to_uuid,
not_none,
),
last_modified = cow_json_to_iso8601_date_str,
name = pipe(
cow_json_to_title,
not_none,
),
position = pipe(
test_isinstance(int),
test_greater_or_equal(0),
not_none,
),
resource_group_id = pipe(
cow_json_to_uuid,
not_none,
),
revision_id = pipe(
cow_json_to_uuid,
not_none,
),
revision_timestamp = pipe(
cow_json_to_iso8601_datetime_str,
not_none,
),
state = pipe(
test_isinstance(basestring),
test_equals('active'),
),
tracking_summary = pipe(
test_isinstance(dict),
struct(
dict(
recent = pipe(
test_isinstance(int),
test_greater_or_equal(0),
not_none,
),
total = pipe(
test_isinstance(int),
test_greater_or_equal(0),
not_none,
),
),
),
not_none,
),
url = pipe(
test_isinstance(basestring),
make_input_to_url(full = True),
not_none,
),
),
),
not_none,
),
),
empty_to_none,
# not_none,
),
revision_id = pipe(
cow_json_to_uuid,
not_none,
),
revision_timestamp = pipe(
cow_json_to_iso8601_datetime_str,
not_none,
),
state = pipe(
test_isinstance(basestring),
test_equals('active'),
),
supplier = test_isinstance(dict),
supplier_id = cow_json_to_uuid,
tags = pipe(
test_isinstance(list),
uniform_sequence(
pipe(
test_isinstance(dict),
struct(
dict(
name = pipe(
cow_json_to_name,
not_none,
),
),
default = noop,
),
not_none,
),
),
empty_to_none,
not_none,
),
temporal_coverage_from = pipe(
cow_json_to_year_or_month_or_day_str,
not_none,
),
temporal_coverage_to = pipe(
cow_json_to_year_or_month_or_day_str,
not_none,
),
territorial_coverage = pipe(
test_isinstance(basestring),
function(lambda value: value.split(',')),
uniform_sequence(
pipe(
empty_to_none,
test(lambda value: value.count('/') == 1, error = N_(u'Invalid territory')),
function(lambda value: value.split('/')),
struct(
[
pipe(
empty_to_none,
test_in(
[
u'ArrondissementOfFrance',
u'AssociatedCommuneOfFrance',
u'CantonalFractionOfCommuneOfFrance',
u'CantonCityOfFrance',
u'CantonOfFrance',
u'CatchmentAreaOfFrance',
u'CommuneOfFrance',
u'Country',
u'DepartmentOfFrance',
u'EmploymentAreaOfFrance',
u'IntercommunalityOfFrance',
u'InternationalOrganization',
u'JusticeAreaOfFrance',
u'MetropoleOfCountry',
u'Mountain',
u'OverseasCollectivityOfFrance',
u'OverseasOfCountry',
u'PaysOfFrance',
u'RegionalNatureParkOfFrance',
u'RegionOfFrance',
u'UrbanAreaOfFrance',
u'UrbanTransportsPerimeterOfFrance',
u'UrbanUnitOfFrance',
],
error = N_(u'Invalid territory type'),
),
not_none
),
pipe(
empty_to_none,
not_none
),
],
),
not_none
),
),
empty_to_none,
not_none,
),
territorial_coverage_granularity = pipe(
test_isinstance(basestring),
test_in([
u'canton',
u'commune',
u'department',
u'epci',
u'france',
u'iris',
u'poi',
u'region',
]),
not_none,
),
title = pipe(
cow_json_to_title,
test(lambda title: len(title) >= 8, error = N_(u'String is too short')),
test(lambda title: year_re.search(title) is None, error = N_(u'String contains a year')),
not_none,
),
tracking_summary = test_isinstance(dict),
type = pipe(
test_isinstance(basestring),
test_equals('dataset'),
),
url = pipe(
test_isinstance(basestring),
make_input_to_url(full = True),
test_none(),
),
version = pipe(
test_isinstance(basestring),
cleanup_line,
test_none(),
),
),
)(dataset, state = default_state)
if ((dataset.get('errors') or {}).get(app_name) or {}).get('error') != errors:
request_headers = headers.copy()
request_headers['Content-Type'] = 'application/json'
request = urllib2.Request(urlparse.urljoin(conf['ckan_of_worms.site_url'],
'api/1/datasets/{}/errors'.format(dataset['id'])), headers = request_headers)
try:
response = urllib2.urlopen(request, json.dumps(dict(
api_key = conf['ckan_of_worms.api_key'],
author = app_name,
draft_id = dataset['draft_id'],
value = errors,
)))
except urllib2.HTTPError as response:
if response.code == 409:
# The dataset has been modified. Don't submit errors because we will be notified of the new dataset
#version.
log.info(u'Dataset "{}" has been modified. Errors are ignored.'.format(dataset['name']))
return
log.error(u'An error occured while setting dataset "{}" errors: {}'.format(dataset['name'], errors))
response_text = response.read()
try:
response_dict = json.loads(response_text)
except ValueError:
log.error(response_text)
raise
for key, value in response_dict.iteritems():
print '{} = {}'.format(key, value)
raise
else:
assert response.code == 200
check(cow_response_to_value)(response.read(), state = default_state)
return 0
if __name__ == '__main__':
sys.exit(main())
Add verification of related items in datasets.
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# CowBots -- Error detection bots for CKAN-of-Worms
# By: Emmanuel Raviart <emmanuel@raviart.com>
#
# Copyright (C) 2013 Etalab
# http://github.com/etalab/cowbots
#
# This file is part of CowBots.
#
# CowBots is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# CowBots is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Check CKAN-of-Worms datasets for errors in fields and send result to CKAN-of-Worms."""
import argparse
import ConfigParser
import json
import logging
import os
import re
import sys
import urllib2
import urlparse
from biryani1.baseconv import (
check,
cleanup_line,
empty_to_none,
function,
input_to_email,
make_input_to_url,
noop,
not_none,
pipe,
struct,
test,
test_equals,
test_greater_or_equal,
test_in,
test_isinstance,
test_none,
test_not_in,
uniform_sequence,
)
from biryani1.datetimeconv import (
date_to_iso8601_str,
datetime_to_iso8601_str,
iso8601_input_to_date,
iso8601_input_to_datetime,
)
from biryani1.jsonconv import (
make_input_to_json,
)
from biryani1.states import default_state
#import fedmsg
app_name = os.path.splitext(os.path.basename(__file__))[0]
conf = None
headers = None
log = logging.getLogger(app_name)
N_ = lambda message: message
name_re = re.compile(ur'[-_\da-z]+$')
uuid_re = re.compile(ur'[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12}$')
year_re = re.compile(ur'(^|[^\d])(19|20)\d\d([^\d]|$)')
# Level-1 Converters
cow_json_to_iso8601_date_str = pipe(
test_isinstance(basestring),
iso8601_input_to_date,
date_to_iso8601_str,
)
cow_json_to_iso8601_datetime_str = pipe(
test_isinstance(basestring),
iso8601_input_to_datetime,
datetime_to_iso8601_str,
)
cow_json_to_markdown = pipe(
test_isinstance(basestring),
cleanup_line,
)
cow_json_to_name = pipe(
test_isinstance(basestring),
test(lambda name: name == name.strip(), error = N_(u'String begins or ends with spaces')),
test(lambda name: name == name.strip('-'), error = N_(u'String begins or ends with "-"')),
test(lambda name: name == name.strip('_'), error = N_(u'String begins or ends with "_"')),
test(lambda name: '--' not in name, error = N_(u'String contains duplicate "-"')),
test(lambda name: '__' not in name, error = N_(u'String contains duplicate "_"')),
test(lambda name: name.islower(), error = N_(u'String must contain only lowercase characters')),
test(name_re.match, error = N_(u'String must contain only "a"-"z", "-" & "_"')),
)
cow_json_to_title = pipe(
test_isinstance(basestring),
test(lambda title: title == title.strip(), error = N_(u'String begins or ends with spaces')),
empty_to_none,
test(lambda title: not title[0].islower(), error = N_(u'String must begin with an uppercase character')),
)
cow_json_to_uuid = pipe(
test_isinstance(basestring),
test(uuid_re.match, error = N_(u'Invalid ID')),
)
cow_json_to_year_or_month_or_day_str = pipe(
test_isinstance(basestring),
iso8601_input_to_date,
date_to_iso8601_str,
)
cow_response_to_value = pipe(
make_input_to_json(),
not_none,
test_isinstance(dict),
struct(
dict(
apiVersion = pipe(
test_equals('1.0'),
not_none,
),
context = noop,
method = pipe(
test_isinstance(basestring),
not_none,
),
params = test_isinstance(dict),
url = pipe(
make_input_to_url(full = True),
not_none,
),
value = noop,
),
),
function(lambda response: response['value']),
)
# Level-2 Converters
cow_json_to_dataset = pipe(
test_isinstance(dict),
struct(
dict(
draft_id = pipe(
cow_json_to_uuid,
not_none,
),
id = pipe(
cow_json_to_uuid,
not_none,
),
),
default = noop,
),
)
cow_json_to_ids = pipe(
test_isinstance(list),
uniform_sequence(
pipe(
cow_json_to_uuid,
not_none,
),
),
)
# Functions
def main():
parser = argparse.ArgumentParser(description = __doc__)
parser.add_argument('config', help = 'path of configuration file')
parser.add_argument('-v', '--verbose', action = 'store_true', help = 'increase output verbosity')
global args
args = parser.parse_args()
logging.basicConfig(level = logging.DEBUG if args.verbose else logging.WARNING, stream = sys.stdout)
config_parser = ConfigParser.SafeConfigParser(dict(here = os.path.dirname(args.config)))
config_parser.read(args.config)
global conf
conf = check(pipe(
test_isinstance(dict),
struct(
{
'ckan_of_worms.api_key': pipe(
cleanup_line,
not_none,
),
'ckan_of_worms.site_url': pipe(
make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True,
full = True),
not_none,
),
'user_agent': pipe(
cleanup_line,
not_none,
),
},
default = 'drop',
),
not_none,
))(dict(config_parser.items('CowBots-Check-Datasets')), default_state)
# fedmsg_conf = check(struct(
# dict(
# environment = pipe(
# empty_to_none,
# test_in(['dev', 'prod', 'stg']),
# ),
# modname = pipe(
# empty_to_none,
# test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'),
# default('ckan_of_worms'),
# ),
## name = pipe(
## empty_to_none,
## default('ckan_of_worms.{}'.format(hostname)),
## ),
# topic_prefix = pipe(
# empty_to_none,
# test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'),
# ),
# ),
# default = 'drop',
# ))(dict(config_parser.items('fedmsg')))
global headers
headers = {
'User-Agent': conf['user_agent'],
}
# # Read in the config from /etc/fedmsg.d/.
# fedmsg_config = fedmsg.config.load_config([], None)
# # Disable a warning about not sending. We know. We only want to tail.
# fedmsg_config['mute'] = True
# # Disable timing out so that we can tail forever. This is deprecated
# # and will disappear in future versions.
# fedmsg_config['timeout'] = 0
# # For the time being, don't require message to be signed.
# fedmsg_config['validate_signatures'] = False
# for key, value in fedmsg_conf.iteritems():
# if value is not None:
# fedmsg_config[key] = value
# expected_topic_prefix = '{}.{}.ckan_of_worms.'.format(fedmsg_config['topic_prefix'], fedmsg_config['environment'])
# for name, endpoint, topic, message in fedmsg.tail_messages(**fedmsg_config):
# if not topic.startswith(expected_topic_prefix):
# log.debug(u'Ignoring message: {}, {}'.format(topic, name))
# continue
# kind, action = topic[len(expected_topic_prefix):].split('.')
# if kind == 'dataset':
# if action in ('create', 'update'):
# while len(pool) >= args.thread_count:
# time.sleep(0.1)
# pool.add(thread.start_new_thread(check_dataset_urls, (message['msg'],)))
# else:
# log.warning(u'TODO: Handle {}, {} for {}'.format(kind, action, message))
# else:
# log.warning(u'TODO: Handle {}, {} for {}'.format(kind, action, message))
request = urllib2.Request(urlparse.urljoin(conf['ckan_of_worms.site_url'], 'api/1/datasets'), headers = headers)
response = urllib2.urlopen(request)
datasets_id = check(pipe(
cow_response_to_value,
cow_json_to_ids,
not_none,
))(response.read(), state = default_state)
for dataset_id in datasets_id:
request = urllib2.Request(urlparse.urljoin(conf['ckan_of_worms.site_url'],
'api/1/datasets/{}'.format(dataset_id)), headers = headers)
response = urllib2.urlopen(request)
dataset = check(pipe(
cow_response_to_value,
cow_json_to_dataset,
not_none,
))(response.read(), state = default_state)
verified_dataset, errors = struct(
dict(
author = cow_json_to_title,
author_email = input_to_email,
draft_id = pipe(
cow_json_to_uuid,
not_none,
),
errors = test_isinstance(dict),
extras = pipe(
test_isinstance(list),
uniform_sequence(
pipe(
test_isinstance(dict),
struct(
dict(
key = pipe(
cow_json_to_title,
not_none,
),
value = pipe(
test_isinstance(basestring),
cleanup_line,
not_none,
),
),
default = noop,
),
not_none,
),
),
empty_to_none,
),
groups = pipe(
test_isinstance(list),
uniform_sequence(
pipe(
test_isinstance(dict),
struct(
dict(
id = pipe(
cow_json_to_uuid,
not_none,
),
name = pipe(
cow_json_to_name,
not_none,
),
title = pipe(
cow_json_to_title,
not_none,
),
),
),
not_none,
),
),
empty_to_none,
not_none,
),
id = pipe(
cow_json_to_uuid,
not_none,
),
isopen = pipe(
test_isinstance(bool),
test_equals(True),
not_none,
),
license_id = pipe(
test_isinstance(basestring),
test_in([
'cc-by', # Creative Commons Attribution
'cc-by-sa', # Creative Commons Attribution Share-Alike
'cc-zero', # Creative Commons CCZero
'fr-lo', # Licence Ouverte / Open Licence
'odc-by', # Open Data Commons Attribution License
'odc-odbl', # Open Data Commons Open Database License (ODbL)
'odc-pddl', # Open Data Commons Public Domain Dedication and Licence (PDDL)
'other-at', # Other (Attribution)
'other-open', # Other (Open)
'other-pd', # Other (Public Domain)
]),
not_none,
),
license_title = pipe(
test_isinstance(basestring),
cleanup_line,
not_none,
),
license_url = pipe(
test_isinstance(basestring),
make_input_to_url(full = True),
not_none,
),
maintainer = cow_json_to_title,
maintainer_email = input_to_email,
metadata_created = pipe(
cow_json_to_iso8601_date_str,
not_none,
),
metadata_modified = pipe(
cow_json_to_iso8601_date_str,
not_none,
),
name = pipe(
cow_json_to_name,
not_none,
),
num_resources = pipe(
test_isinstance(int),
test_greater_or_equal(0),
),
num_tags = pipe(
test_isinstance(int),
test_greater_or_equal(0),
),
notes = pipe(
cow_json_to_markdown,
not_none,
),
organization = test_isinstance(dict),
owner_org = cow_json_to_uuid,
private = pipe(
test_isinstance(bool),
test_equals(False),
),
related = pipe(
test_isinstance(list),
uniform_sequence(
pipe(
test_isinstance(dict),
struct(
dict(
created = pipe(
cow_json_to_iso8601_datetime_str,
not_none,
),
description = pipe(
cow_json_to_markdown,
not_none,
),
featured = pipe(
test_isinstance(bool),
test_equals(False),
not_none,
),
id = pipe(
cow_json_to_uuid,
not_none,
),
image_url = pipe(
test_isinstance(basestring),
make_input_to_url(full = True),
not_none,
),
owner_id = pipe(
cow_json_to_uuid,
not_none,
),
title = pipe(
cow_json_to_title,
test(lambda title: len(title) >= 8, error = N_(u'String is too short')),
not_none,
),
type = pipe(
test_isinstance(basestring),
cleanup_line,
test_in([
u'api',
u'application',
u'idea',
u'news_article',
u'paper',
u'post',
u'visualization',
]),
),
url = pipe(
test_isinstance(basestring),
make_input_to_url(full = True),
not_none,
),
view_count = pipe(
test_isinstance(int),
test_greater_or_equal(0),
not_none,
),
),
),
not_none,
),
),
empty_to_none,
),
resources = pipe(
test_isinstance(list),
uniform_sequence(
pipe(
test_isinstance(dict),
struct(
dict(
created = pipe(
cow_json_to_iso8601_date_str,
not_none,
),
description = pipe(
cow_json_to_markdown,
not_none,
),
format = pipe(
test_isinstance(basestring),
test(lambda format: format == format.lower(),
error = N_(u'Format must contain only lowercase characters')),
test_not_in(['xlsx'], error = N_(u'Invalid format; use "xls" instead')),
# test_in([
# 'XLS',
# ]),
not_none,
),
id = pipe(
cow_json_to_uuid,
not_none,
),
last_modified = cow_json_to_iso8601_date_str,
name = pipe(
cow_json_to_title,
not_none,
),
position = pipe(
test_isinstance(int),
test_greater_or_equal(0),
not_none,
),
resource_group_id = pipe(
cow_json_to_uuid,
not_none,
),
revision_id = pipe(
cow_json_to_uuid,
not_none,
),
revision_timestamp = pipe(
cow_json_to_iso8601_datetime_str,
not_none,
),
state = pipe(
test_isinstance(basestring),
test_equals('active'),
),
tracking_summary = pipe(
test_isinstance(dict),
struct(
dict(
recent = pipe(
test_isinstance(int),
test_greater_or_equal(0),
not_none,
),
total = pipe(
test_isinstance(int),
test_greater_or_equal(0),
not_none,
),
),
),
not_none,
),
url = pipe(
test_isinstance(basestring),
make_input_to_url(full = True),
not_none,
),
),
),
not_none,
),
),
empty_to_none,
# not_none,
),
revision_id = pipe(
cow_json_to_uuid,
not_none,
),
revision_timestamp = pipe(
cow_json_to_iso8601_datetime_str,
not_none,
),
state = pipe(
test_isinstance(basestring),
test_equals('active'),
),
supplier = test_isinstance(dict),
supplier_id = cow_json_to_uuid,
tags = pipe(
test_isinstance(list),
uniform_sequence(
pipe(
test_isinstance(dict),
struct(
dict(
name = pipe(
cow_json_to_name,
not_none,
),
),
default = noop,
),
not_none,
),
),
empty_to_none,
not_none,
),
temporal_coverage_from = pipe(
cow_json_to_year_or_month_or_day_str,
not_none,
),
temporal_coverage_to = pipe(
cow_json_to_year_or_month_or_day_str,
not_none,
),
territorial_coverage = pipe(
test_isinstance(basestring),
function(lambda value: value.split(',')),
uniform_sequence(
pipe(
empty_to_none,
test(lambda value: value.count('/') == 1, error = N_(u'Invalid territory')),
function(lambda value: value.split('/')),
struct(
[
pipe(
empty_to_none,
test_in(
[
u'ArrondissementOfFrance',
u'AssociatedCommuneOfFrance',
u'CantonalFractionOfCommuneOfFrance',
u'CantonCityOfFrance',
u'CantonOfFrance',
u'CatchmentAreaOfFrance',
u'CommuneOfFrance',
u'Country',
u'DepartmentOfFrance',
u'EmploymentAreaOfFrance',
u'IntercommunalityOfFrance',
u'InternationalOrganization',
u'JusticeAreaOfFrance',
u'MetropoleOfCountry',
u'Mountain',
u'OverseasCollectivityOfFrance',
u'OverseasOfCountry',
u'PaysOfFrance',
u'RegionalNatureParkOfFrance',
u'RegionOfFrance',
u'UrbanAreaOfFrance',
u'UrbanTransportsPerimeterOfFrance',
u'UrbanUnitOfFrance',
],
error = N_(u'Invalid territory type'),
),
not_none
),
pipe(
empty_to_none,
not_none
),
],
),
not_none
),
),
empty_to_none,
not_none,
),
territorial_coverage_granularity = pipe(
test_isinstance(basestring),
test_in([
u'canton',
u'commune',
u'department',
u'epci',
u'france',
u'iris',
u'poi',
u'region',
]),
not_none,
),
title = pipe(
cow_json_to_title,
test(lambda title: len(title) >= 8, error = N_(u'String is too short')),
test(lambda title: year_re.search(title) is None, error = N_(u'String contains a year')),
not_none,
),
tracking_summary = test_isinstance(dict),
type = pipe(
test_isinstance(basestring),
test_equals('dataset'),
),
url = pipe(
test_isinstance(basestring),
make_input_to_url(full = True),
test_none(),
),
version = pipe(
test_isinstance(basestring),
cleanup_line,
test_none(),
),
),
)(dataset, state = default_state)
if ((dataset.get('errors') or {}).get(app_name) or {}).get('error') != errors:
request_headers = headers.copy()
request_headers['Content-Type'] = 'application/json'
request = urllib2.Request(urlparse.urljoin(conf['ckan_of_worms.site_url'],
'api/1/datasets/{}/errors'.format(dataset['id'])), headers = request_headers)
try:
response = urllib2.urlopen(request, json.dumps(dict(
api_key = conf['ckan_of_worms.api_key'],
author = app_name,
draft_id = dataset['draft_id'],
value = errors,
)))
except urllib2.HTTPError as response:
if response.code == 409:
# The dataset has been modified. Don't submit errors because we will be notified of the new dataset
#version.
log.info(u'Dataset "{}" has been modified. Errors are ignored.'.format(dataset['name']))
return
log.error(u'An error occured while setting dataset "{}" errors: {}'.format(dataset['name'], errors))
response_text = response.read()
try:
response_dict = json.loads(response_text)
except ValueError:
log.error(response_text)
raise
for key, value in response_dict.iteritems():
print '{} = {}'.format(key, value)
raise
else:
assert response.code == 200
check(cow_response_to_value)(response.read(), state = default_state)
return 0
if __name__ == '__main__':
sys.exit(main())
|
# -*- coding: utf-8 -*-
from collections import OrderedDict
from itertools import chain
from dmapiclient import HTTPError
from flask import request
from werkzeug.datastructures import MultiDict
from app.main.forms.frameworks import ReuseDeclarationForm
try:
from StringIO import StringIO
except ImportError:
from io import BytesIO as StringIO
import mock
import pytest
from six.moves.urllib.parse import urljoin
from flask import session
from lxml import html
from dmapiclient import APIError
from dmapiclient.audit import AuditTypes
from dmutils.email.exceptions import EmailError
from dmutils.s3 import S3ResponseError
from ..helpers import BaseApplicationTest, FULL_G7_SUBMISSION, FakeMail, valid_g9_declaration_base
def _return_fake_s3_file_dict(directory, filename, ext, last_modified=None, size=None):
return {
'path': '{}{}.{}'.format(directory, filename, ext),
'filename': filename,
'ext': ext,
'last_modified': last_modified or '2015-08-17T14:00:00.000Z',
'size': size if size is not None else 1
}
def get_g_cloud_8():
return BaseApplicationTest.framework(
status='standstill',
name='G-Cloud 8',
slug='g-cloud-8',
framework_agreement_version='v1.0'
)
def _assert_args_and_raise(e, *args, **kwargs):
def _inner(*inner_args, **inner_kwargs):
assert args == inner_args
assert kwargs == inner_kwargs
raise e
return _inner
def _assert_args_and_return(retval, *args, **kwargs):
def _inner(*inner_args, **inner_kwargs):
assert args == inner_args
assert kwargs == inner_kwargs
return retval
return _inner
@pytest.fixture(params=("GET", "POST"))
def get_or_post(request):
return request.param
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestFrameworksDashboard(BaseApplicationTest):
@staticmethod
def _extract_guidance_links(doc):
return OrderedDict(
(
section_li.xpath("normalize-space(string(.//h2))"),
tuple(
(
item_li.xpath("normalize-space(string(.//a))") or None,
item_li.xpath("string(.//a/@href)") or None,
item_li.xpath("normalize-space(string(.//time))") or None,
item_li.xpath("string(.//time/@datetime)") or None,
)
for item_li in section_li.xpath(".//p[.//a]")
),
)
for section_li in doc.xpath("//main//*[./h2][.//p//a]")
)
@staticmethod
def _extract_signing_details_table_rows(doc):
return tuple(
tuple(
td_th_elem.xpath("normalize-space(string())")
for td_th_elem in tr_elem.xpath("td|th")
)
for tr_elem in doc.xpath(
"//main//table[normalize-space(string(./caption))=$b]/tbody/tr",
b="Agreement details",
)
)
@property
def _boring_agreement_details(self):
# property so we always get a clean copy
return {
'frameworkAgreementVersion': 'v1.0',
'signerName': 'Martin Cunningham',
'signerRole': 'Foreman',
'uploaderUserId': 123,
'uploaderUserName': 'User',
'uploaderUserEmail': 'email@email.com',
}
_boring_agreement_returned_at = "2016-07-10T21:20:00.000000Z"
@property
def _boring_agreement_details_expected_table_results(self):
# property so we always get a clean copy
return (
(
'Person who signed',
'Martin Cunningham Foreman'
),
(
'Submitted by',
'User email@email.com Sunday 10 July 2016 at 22:20'
),
(
'Countersignature',
'Waiting for CCS to countersign'
),
)
def test_framework_dashboard_shows_for_pending_if_declaration_exists(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
assert len(doc.xpath(
"//h1[normalize-space(string())=$b]",
b="Your G-Cloud 7 application",
)) == 1
def test_framework_dashboard_shows_for_live_if_declaration_exists(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='live')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
assert len(doc.xpath(
"//h1[normalize-space(string())=$b]",
b="G-Cloud 7 documents",
)) == 1
def test_does_not_show_for_live_if_no_declaration(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='live')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(declaration=None)
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 404
@mock.patch('app.main.views.frameworks.send_email')
def test_interest_registered_in_framework_on_post(self, send_email, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.post("/suppliers/frameworks/digital-outcomes-and-specialists")
assert res.status_code == 200
data_api_client.register_framework_interest.assert_called_once_with(
1234,
"digital-outcomes-and-specialists",
"email@email.com"
)
@mock.patch('app.main.views.frameworks.send_email')
def test_email_sent_when_interest_registered_in_framework(self, send_email, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
data_api_client.find_users.return_value = {'users': [
{'emailAddress': 'email1', 'active': True},
{'emailAddress': 'email2', 'active': True},
{'emailAddress': 'email3', 'active': False}
]}
res = self.client.post("/suppliers/frameworks/digital-outcomes-and-specialists")
assert res.status_code == 200
send_email.assert_called_once_with(
['email1', 'email2'],
mock.ANY,
'MANDRILL',
'You started a G-Cloud 7 application',
'do-not-reply@digitalmarketplace.service.gov.uk',
'Digital Marketplace Admin',
['digital-outcomes-and-specialists-application-started']
)
def test_interest_not_registered_in_framework_on_get(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/digital-outcomes-and-specialists")
assert res.status_code == 200
assert data_api_client.register_framework_interest.called is False
def test_interest_set_but_no_declaration(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.get_framework_interest.return_value = {'frameworks': ['g-cloud-7']}
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(declaration=None)
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
def test_shows_gcloud_7_closed_message_if_pending_and_no_application_done(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.get_framework_interest.return_value = {'frameworks': ['g-cloud-7']}
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'not-submitted'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
heading = doc.xpath('//div[@class="summary-item-lede"]//h2[@class="summary-item-heading"]')
assert len(heading) > 0
assert u"G-Cloud 7 is closed for applications" in heading[0].xpath('text()')[0]
assert u"You didn't submit an application." in heading[0].xpath('../p[1]/text()')[0]
def test_shows_gcloud_7_closed_message_if_pending_and_application(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.get_framework_interest.return_value = {'frameworks': ['g-cloud-7']}
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
heading = doc.xpath('//div[@class="summary-item-lede"]//h2[@class="summary-item-heading"]')
assert len(heading) > 0
assert u"G-Cloud 7 is closed for applications" in heading[0].xpath('text()')[0]
lede = doc.xpath('//div[@class="summary-item-lede"]')
assert u"You made your supplier declaration and submitted 1 service for consideration." in \
lede[0].xpath('./p[1]/text()')[0]
assert u"We’ll let you know the result of your application by " in \
lede[0].xpath('./p[2]/text()')[0]
def test_declaration_status_when_complete(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
assert len(doc.xpath(u'//p/strong[contains(text(), "You’ve made the supplier declaration")]')) == 1
def test_declaration_status_when_started(self, data_api_client, s3):
with self.app.test_client():
self.login()
submission = FULL_G7_SUBMISSION.copy()
# User has not yet submitted page 3 of the declaration
del submission['SQ2-1abcd']
del submission['SQ2-1e']
del submission['SQ2-1f']
del submission['SQ2-1ghijklmn']
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
declaration=submission, status='started')
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
assert len(doc.xpath('//p[contains(text(), "You need to finish making the supplier declaration")]')) == 1
def test_declaration_status_when_not_complete(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.side_effect = APIError(mock.Mock(status_code=404))
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
assert len(doc.xpath('//p[contains(text(), "You need to make the supplier declaration")]')) == 1
def test_downloads_shown_open_framework(self, data_api_client, s3):
files = [
('updates/communications/', 'file 1', 'odt', '2015-01-01T14:00:00.000Z'),
('updates/clarifications/', 'file 2', 'odt', '2015-02-02T14:00:00.000Z'),
('', 'g-cloud-7-proposed-call-off', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-invitation', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-proposed-framework-agreement', 'pdf', '2016-06-01T14:00:00.000Z'),
('', 'g-cloud-7-reporting-template', 'xls', '2016-06-06T14:00:00.000Z'),
# superfluous file that shouldn't be shown
('', 'g-cloud-7-supplier-pack', 'zip', '2015-01-01T14:00:00.000Z'),
]
s3.return_value.list.return_value = [
_return_fake_s3_file_dict(
'g-cloud-7/communications/{}'.format(section), filename, ext, last_modified=last_modified
) for section, filename, ext, last_modified in files
]
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("Guidance", (
(
"Download the invitation to apply",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-invitation.pdf",
None,
None,
),
(
"Read about how to apply",
"https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply",
None,
None,
),
)),
("Legal documents", (
(
"Download the proposed framework agreement",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-proposed-framework-agreement.pdf",
"Wednesday 1 June 2016",
"2016-06-01T14:00:00.000Z",
),
(
u"Download the proposed \u2018call-off\u2019 contract",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-proposed-call-off.pdf",
"Sunday 1 May 2016",
"2016-05-01T14:00:00.000Z",
),
)),
("Communications", (
(
"View communications and ask clarification questions",
"/suppliers/frameworks/g-cloud-7/updates",
"Monday 2 February 2015",
"2015-02-02T14:00:00.000Z",
),
)),
("Reporting", (
(
"Download the reporting template",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-reporting-template.xls",
None,
None,
),
)),
))
assert not any(
doc.xpath("//main//a[contains(@href, $href_part)]", href_part=href_part)
for href_part in (
"g-cloud-7-final-framework-agreement.pdf",
"g-cloud-7-supplier-pack.zip",
)
)
assert len(doc.xpath(
"//main//p[contains(normalize-space(string()), $a)]",
a="until 5pm BST, 22 September 2015",
)) == 1
assert not doc.xpath(
"//main//table[normalize-space(string(./caption))=$b]",
b="Agreement details",
)
def test_downloads_shown_open_framework_clarification_questions_closed(self, data_api_client, s3):
files = [
('updates/communications/', 'file 1', 'odt', '2015-01-01T14:00:00.000Z'),
('updates/clarifications/', 'file 2', 'odt', '2015-02-02T14:00:00.000Z'),
('', 'g-cloud-7-proposed-call-off', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-invitation', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-proposed-framework-agreement', 'pdf', '2016-06-01T14:00:00.000Z'),
('', 'g-cloud-7-reporting-template', 'xls', '2016-06-06T14:00:00.000Z'),
# superfluous file that shouldn't be shown
('', 'g-cloud-7-supplier-pack', 'zip', '2015-01-01T14:00:00.000Z'),
]
s3.return_value.list.return_value = [
_return_fake_s3_file_dict(
'g-cloud-7/communications/{}'.format(section), filename, ext, last_modified=last_modified
) for section, filename, ext, last_modified in files
]
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(
status="open",
clarification_questions_open=False,
)
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("Guidance", (
(
"Download the invitation to apply",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-invitation.pdf",
None,
None,
),
(
"Read about how to apply",
"https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply",
None,
None,
),
)),
("Legal documents", (
(
"Download the proposed framework agreement",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-proposed-framework-agreement.pdf",
"Wednesday 1 June 2016",
"2016-06-01T14:00:00.000Z",
),
(
u"Download the proposed \u2018call-off\u2019 contract",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-proposed-call-off.pdf",
"Sunday 1 May 2016",
"2016-05-01T14:00:00.000Z",
),
)),
("Communications", (
(
"View communications and clarification questions",
"/suppliers/frameworks/g-cloud-7/updates",
"Monday 2 February 2015",
"2015-02-02T14:00:00.000Z",
),
)),
("Reporting", (
(
"Download the reporting template",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-reporting-template.xls",
None,
None,
),
)),
))
assert not any(
doc.xpath("//main//a[contains(@href, $href_part)]", href_part=href_part)
for href_part in (
"g-cloud-7-final-framework-agreement.pdf",
"g-cloud-7-supplier-pack.zip",
)
)
assert not doc.xpath(
"//main[contains(normalize-space(string()), $a)]",
a="until 5pm BST, 22 September 2015",
)
assert not doc.xpath(
"//main//table[normalize-space(string(./caption))=$b]",
b="Agreement details",
)
def test_final_agreement_download_shown_open_framework(self, data_api_client, s3):
files = [
('updates/communications/', 'file 1', 'odt', '2015-01-01T14:00:00.000Z'),
('updates/clarifications/', 'file 2', 'odt', '2015-02-02T14:00:00.000Z'),
('', 'g-cloud-7-proposed-call-off', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-invitation', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-reporting-template', 'xls', '2016-06-06T14:00:00.000Z'),
('', 'g-cloud-7-final-framework-agreement', 'pdf', '2016-06-02T14:00:00.000Z'),
# present but should be overridden by final agreement file
('', 'g-cloud-7-proposed-framework-agreement', 'pdf', '2016-06-11T14:00:00.000Z'),
]
s3.return_value.list.return_value = [
_return_fake_s3_file_dict(
'g-cloud-7/communications/{}'.format(section), filename, ext, last_modified=last_modified
) for section, filename, ext, last_modified in files
]
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("Guidance", (
(
"Download the invitation to apply",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-invitation.pdf",
None,
None,
),
(
"Read about how to apply",
"https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply",
None,
None,
),
)),
("Legal documents", (
(
"Download the framework agreement",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-final-framework-agreement.pdf",
"Thursday 2 June 2016",
"2016-06-02T14:00:00.000Z",
),
(
u"Download the proposed \u2018call-off\u2019 contract",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-proposed-call-off.pdf",
"Sunday 1 May 2016",
"2016-05-01T14:00:00.000Z",
),
)),
("Communications", (
(
"View communications and ask clarification questions",
"/suppliers/frameworks/g-cloud-7/updates",
"Monday 2 February 2015",
"2015-02-02T14:00:00.000Z",
),
)),
("Reporting", (
(
"Download the reporting template",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-reporting-template.xls",
None,
None,
),
)),
))
assert not any(
doc.xpath("//main//a[contains(@href, $href_part)]", href_part=href_part)
for href_part in (
"g-cloud-7-proposed-framework-agreement.pdf",
"g-cloud-7-supplier-pack.zip",
)
)
assert len(doc.xpath(
"//main//p[contains(normalize-space(string()), $a)]",
a="until 5pm BST, 22 September 2015",
)) == 1
assert not doc.xpath(
"//main//table[normalize-space(string(./caption))=$b]",
b="Agreement details",
)
def test_no_updates_open_framework(self, data_api_client, s3):
files = [
('', 'g-cloud-7-call-off', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-invitation', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-proposed-framework-agreement', 'pdf', '2016-06-01T14:00:00.000Z'),
('', 'g-cloud-7-reporting-template', 'xls', '2016-06-06T14:00:00.000Z'),
]
s3.return_value.list.return_value = [
_return_fake_s3_file_dict(
'g-cloud-7/communications/{}'.format(section), filename, ext, last_modified=last_modified
) for section, filename, ext, last_modified in files
]
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
extracted_guidance_links = self._extract_guidance_links(doc)
assert (
"View communications and ask clarification questions",
"/suppliers/frameworks/g-cloud-7/updates",
None,
None,
) in extracted_guidance_links["Communications"]
assert len(doc.xpath(
"//main//p[contains(normalize-space(string()), $a)]",
a="until 5pm BST, 22 September 2015",
)) == 1
assert not doc.xpath(
"//main//table[normalize-space(string(./caption))=$b]",
b="Agreement details",
)
def test_no_files_exist_open_framework(self, data_api_client, s3):
s3.return_value.list.return_value = []
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("Guidance", (
(
"Read about how to apply",
"https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply",
None,
None,
),
)),
("Communications", (
(
"View communications and ask clarification questions",
"/suppliers/frameworks/g-cloud-7/updates",
None,
None,
),
)),
))
assert not any(
doc.xpath(
"//a[contains(@href, $href_part) or normalize-space(string())=$label]",
href_part=href_part,
label=label,
) for href_part, label in (
(
"g-cloud-7-invitation.pdf",
"Download the invitation to apply",
),
(
"g-cloud-7-proposed-framework-agreement.pdf",
"Download the proposed framework agreement",
),
(
"g-cloud-7-call-off.pdf",
u"Download the proposed \u2018call-off\u2019 contract",
),
(
"g-cloud-7-reporting-template.xls",
"Download the reporting template",
),
(
"result-letter.pdf",
"Download your application result letter",
),
)
)
assert len(doc.xpath(
"//main//p[contains(normalize-space(string()), $a)]",
a="until 5pm BST, 22 September 2015",
)) == 1
assert not doc.xpath(
"//main//table[normalize-space(string(./caption))=$b]",
b="Agreement details",
)
def test_returns_404_if_framework_does_not_exist(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.side_effect = APIError(mock.Mock(status_code=404))
res = self.client.get('/suppliers/frameworks/does-not-exist')
assert res.status_code == 404
def test_result_letter_is_shown_when_is_in_standstill(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
data = res.get_data(as_text=True)
assert u'Download your application result letter' in data
def test_result_letter_is_not_shown_when_not_in_standstill(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
data = res.get_data(as_text=True)
assert u'Download your application result letter' not in data
def test_result_letter_is_not_shown_when_no_application(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'not-submitted'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
data = res.get_data(as_text=True)
assert u'Download your application result letter' not in data
def test_link_to_unsigned_framework_agreement_is_shown_if_supplier_is_on_framework(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
res = self.client.get("/suppliers/frameworks/g-cloud-7")
data = res.get_data(as_text=True)
assert u'Sign and return your framework agreement' in data
assert u'Download your countersigned framework agreement' not in data
def test_pending_success_message_is_explicit_if_supplier_is_on_framework(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(on_framework=True)
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
data = res.get_data(as_text=True)
for success_message in [
u'Your application was successful. '
u'You must return a signed framework agreement signature page before you can '
u'sell services on the Digital Marketplace.',
u'Download your application award letter (.pdf)',
u'This letter is a record of your successful G-Cloud 7 application.'
]:
assert success_message in data
for equivocal_message in [
u'You made your supplier declaration and submitted 1 service.',
u'Download your application result letter (.pdf)',
u'This letter informs you if your G-Cloud 7 application has been successful.'
]:
assert equivocal_message not in data
def test_link_to_framework_agreement_is_not_shown_if_supplier_is_not_on_framework(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=False)
res = self.client.get("/suppliers/frameworks/g-cloud-7")
data = res.get_data(as_text=True)
assert u'Sign and return your framework agreement' not in data
def test_pending_success_message_is_equivocal_if_supplier_is_on_framework(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(on_framework=False)
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
data = res.get_data(as_text=True)
for success_message in [
u'Your application was successful. You\'ll be able to sell services when the G-Cloud 7 framework is live',
u'Download your application award letter (.pdf)',
u'This letter is a record of your successful G-Cloud 7 application.'
]:
assert success_message not in data
for equivocal_message in [
u'You made your supplier declaration and submitted 1 service.',
u'Download your application result letter (.pdf)',
u'This letter informs you if your G-Cloud 7 application has been successful.'
]:
assert equivocal_message in data
def test_countersigned_framework_agreement_non_fav_framework(self, data_api_client, s3):
# "fav" being "frameworkAgreementVersion"
files = [
('', 'g-cloud-7-final-call-off', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-invitation', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-final-framework-agreement', 'pdf', '2016-06-01T14:00:00.000Z'),
('', 'g-cloud-7-reporting-template', 'xls', '2016-06-06T14:00:00.000Z'),
]
s3.return_value.list.return_value = [
_return_fake_s3_file_dict(
'g-cloud-7/communications/{}'.format(section), filename, ext, last_modified=last_modified
) for section, filename, ext, last_modified in files
]
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True,
agreement_returned=True,
agreement_details=self._boring_agreement_details,
agreement_path='pathy/mc/path.face',
countersigned=True,
countersigned_path='g-cloud-7/agreements/1234/1234-countersigned-agreement.pdf',
)
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/frameworks/g-cloud-7/agreement",
label="Sign and return your framework agreement",
)
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("You submitted:", (
(
'View submitted services',
'/suppliers/frameworks/g-cloud-7/submissions',
None,
None,
),
(
"View your declaration",
"/suppliers/frameworks/g-cloud-7/declaration",
None,
None,
),
)),
("Legal documents", (
(
'Download the standard framework agreement',
'/suppliers/frameworks/g-cloud-7/files/g-cloud-7-final-framework-agreement.pdf',
None,
None,
),
(
"Download your signed framework agreement",
"/suppliers/frameworks/g-cloud-7/agreements/pathy/mc/path.face",
None,
None,
),
(
u"Download your countersigned framework agreement",
"/suppliers/frameworks/g-cloud-7/agreements/countersigned-agreement.pdf",
None,
None,
),
(
'Download your application result letter',
'/suppliers/frameworks/g-cloud-7/agreements/result-letter.pdf',
None,
None,
),
(
'Download the call-off contract template',
'/suppliers/frameworks/g-cloud-7/files/g-cloud-7-final-call-off.pdf',
None,
None,
),
)),
("Guidance", (
(
'Download the invitation to apply',
'/suppliers/frameworks/g-cloud-7/files/g-cloud-7-invitation.pdf',
None,
None,
),
(
"Read about how to sell your services",
"https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply",
None,
None,
),
)),
("Communications", (
(
"View communications and clarification questions",
"/suppliers/frameworks/g-cloud-7/updates",
None,
None,
),
)),
('Reporting', (
(
'Download the reporting template',
'/suppliers/frameworks/g-cloud-7/files/g-cloud-7-reporting-template.xls',
None,
None,
),
)),
))
assert not doc.xpath(
"//main//table[normalize-space(string(./caption))=$b]",
b="Agreement details",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="You can start selling your",
)
# neither of these should exist because it's a pre-frameworkAgreementVersion framework
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your original and counterpart signature pages",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your framework agreement signature page has been sent to the Crown Commercial Service",
)
def test_countersigned_framework_agreement_fav_framework(self, data_api_client, s3):
# "fav" being "frameworkAgreementVersion"
files = [
('', 'g-cloud-8-final-call-off', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-8-invitation', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-8-final-framework-agreement', 'pdf', '2016-06-01T14:00:00.000Z'),
('', 'g-cloud-8-reporting-template', 'xls', '2016-06-06T14:00:00.000Z'),
]
s3.return_value.list.return_value = [
_return_fake_s3_file_dict(
'g-cloud-8/communications/{}'.format(section), filename, ext, last_modified=last_modified
) for section, filename, ext, last_modified in files
]
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True,
agreement_returned=True,
agreement_details=self._boring_agreement_details,
agreement_path='pathy/mc/path.face',
agreement_returned_at=self._boring_agreement_returned_at,
countersigned=True,
countersigned_path='g-cloud-8/agreements/1234/1234-countersigned-agreement.pdf',
)
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/frameworks/g-cloud-8/agreement",
label="Sign and return your framework agreement",
)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/suppliers/frameworks/g-cloud-7/agreements/result-letter.pdf",
label="Download your application result letter",
)
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("You submitted:", (
(
'View submitted services',
'/suppliers/frameworks/g-cloud-8/submissions',
None,
None,
),
(
"View your declaration",
"/suppliers/frameworks/g-cloud-8/declaration",
None,
None,
),
)),
("Legal documents", (
(
'Read the standard framework agreement',
'https://www.gov.uk/government/publications/g-cloud-8-framework-agreement',
None,
None,
),
(
u"Download your \u2018original\u2019 framework agreement signature page",
"/suppliers/frameworks/g-cloud-8/agreements/pathy/mc/path.face",
None,
None,
),
(
u"Download your \u2018counterpart\u2019 framework agreement signature page",
"/suppliers/frameworks/g-cloud-8/agreements/countersigned-agreement.pdf",
None,
None,
),
(
'Download the call-off contract template',
'/suppliers/frameworks/g-cloud-8/files/g-cloud-8-final-call-off.pdf',
None,
None,
),
)),
("Guidance", (
(
'Download the invitation to apply',
'/suppliers/frameworks/g-cloud-8/files/g-cloud-8-invitation.pdf',
None,
None,
),
(
"Read about how to sell your services",
"https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply",
None,
None,
),
)),
("Communications", (
(
"View communications and clarification questions",
"/suppliers/frameworks/g-cloud-8/updates",
None,
None,
),
)),
('Reporting', (
(
'Download the reporting template',
'/suppliers/frameworks/g-cloud-8/files/g-cloud-8-reporting-template.xls',
None,
None,
),
)),
))
assert not doc.xpath(
"//main//table[normalize-space(string(./caption))=$b]",
b="Agreement details",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="You can start selling your",
)
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your original and counterpart signature pages",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your framework agreement signature page has been sent to the Crown Commercial Service",
)
def test_shows_returned_agreement_details(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True,
agreement_returned=True,
agreement_details=self._boring_agreement_details,
agreement_path='g-cloud-8/agreements/123-framework-agreement.pdf',
agreement_returned_at=self._boring_agreement_returned_at,
)
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/frameworks/g-cloud-8/agreement",
label="Sign and return your framework agreement",
)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/suppliers/frameworks/g-cloud-8/agreements/result-letter.pdf",
label="Download your application result letter",
)
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("You submitted:", (
(
'View submitted services',
'/suppliers/frameworks/g-cloud-8/submissions',
None,
None,
),
(
"View your declaration",
"/suppliers/frameworks/g-cloud-8/declaration",
None,
None,
),
)),
('Legal documents', (
(
'Read the standard framework agreement',
'https://www.gov.uk/government/publications/g-cloud-8-framework-agreement',
None,
None,
),
(
u'Download your \u2018original\u2019 framework agreement signature page',
'/suppliers/frameworks/g-cloud-8/agreements/framework-agreement.pdf',
None,
None,
),
)),
('Guidance', (
(
'Read about how to sell your services',
'https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply',
None,
None,
),
)),
('Communications', (
(
'View communications and clarification questions',
'/suppliers/frameworks/g-cloud-8/updates',
None,
None,
),
)),
))
extracted_signing_details_table_rows = self._extract_signing_details_table_rows(doc)
assert extracted_signing_details_table_rows == \
self._boring_agreement_details_expected_table_results
assert len(doc.xpath(
"//main//h1[normalize-space(string())=$b]",
b="Your G-Cloud 8 application",
)) == 1
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="You can start selling your",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your original and counterpart signature pages",
)
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your framework agreement signature page has been sent to the Crown Commercial Service",
)
def test_countersigned_but_no_countersigned_path(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True,
agreement_returned=True,
agreement_details=self._boring_agreement_details,
agreement_path='g-cloud-8/agreements/123-framework-agreement.pdf',
agreement_returned_at=self._boring_agreement_returned_at,
countersigned=True,
# note `countersigned_path` is not set: we're testing that the view behaves as though not countersigned
# i.e. is not depending on the `countersigned` property
)
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/frameworks/g-cloud-8/agreement",
label="Sign and return your framework agreement",
)
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("You submitted:", (
(
'View submitted services',
'/suppliers/frameworks/g-cloud-8/submissions',
None,
None,
),
(
"View your declaration",
"/suppliers/frameworks/g-cloud-8/declaration",
None,
None,
),
)),
('Legal documents', (
(
'Read the standard framework agreement',
'https://www.gov.uk/government/publications/g-cloud-8-framework-agreement',
None,
None,
),
(
u'Download your \u2018original\u2019 framework agreement signature page',
'/suppliers/frameworks/g-cloud-8/agreements/framework-agreement.pdf',
None,
None,
),
)),
('Guidance', (
(
'Read about how to sell your services',
'https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply',
None,
None,
),
)),
('Communications', (
(
'View communications and clarification questions',
'/suppliers/frameworks/g-cloud-8/updates',
None,
None,
),
)),
))
extracted_signing_details_table_rows = self._extract_signing_details_table_rows(doc)
assert extracted_signing_details_table_rows == \
self._boring_agreement_details_expected_table_results
assert len(doc.xpath(
"//main//h1[normalize-space(string())=$b]",
b="Your G-Cloud 8 application",
)) == 1
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="You can start selling your",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your original and counterpart signature pages",
)
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your framework agreement signature page has been sent to the Crown Commercial Service",
)
def test_shows_contract_variation_link_after_agreement_returned(self, data_api_client, s3):
with self.app.test_client():
self.login()
g8_with_variation = get_g_cloud_8()
g8_with_variation['frameworks']['variations'] = {"1": {"createdAt": "2018-08-16"}}
data_api_client.get_framework.return_value = g8_with_variation
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True,
agreement_returned=True,
agreement_details=self._boring_agreement_details,
agreement_path='g-cloud-8/agreements/123-framework-agreement.pdf',
agreement_returned_at=self._boring_agreement_returned_at,
)
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/frameworks/g-cloud-8/agreement",
label="Sign and return your framework agreement",
)
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("You submitted:", (
(
'View submitted services',
'/suppliers/frameworks/g-cloud-8/submissions',
None,
None,
),
(
"View your declaration",
"/suppliers/frameworks/g-cloud-8/declaration",
None,
None,
),
)),
('Legal documents', (
(
'Read the standard framework agreement',
'https://www.gov.uk/government/publications/g-cloud-8-framework-agreement',
None,
None,
),
(
u'Download your \u2018original\u2019 framework agreement signature page',
'/suppliers/frameworks/g-cloud-8/agreements/framework-agreement.pdf',
None,
None,
),
(
'Read the proposed contract variation',
'/suppliers/frameworks/g-cloud-8/contract-variation/1',
None,
None,
),
)),
('Guidance', (
(
'Read about how to sell your services',
'https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply',
None,
None,
),
)),
('Communications', (
(
'View communications and clarification questions',
'/suppliers/frameworks/g-cloud-8/updates',
None,
None,
),
)),
))
extracted_signing_details_table_rows = self._extract_signing_details_table_rows(doc)
assert extracted_signing_details_table_rows == \
self._boring_agreement_details_expected_table_results
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="You can start selling your",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your original and counterpart signature pages",
)
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your framework agreement signature page has been sent to the Crown Commercial Service",
)
def test_does_not_show_contract_variation_link_if_feature_flagged_off(self, data_api_client, s3):
with self.app.test_client():
self.app.config['FEATURE_FLAGS_CONTRACT_VARIATION'] = False
self.login()
g8_with_variation = get_g_cloud_8()
g8_with_variation['frameworks']['variations'] = {"1": {"createdAt": "2018-08-16"}}
data_api_client.get_framework.return_value = g8_with_variation
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True,
agreement_returned=True,
agreement_details=self._boring_agreement_details,
agreement_path='g-cloud-8/agreements/123-framework-agreement.pdf',
agreement_returned_at=self._boring_agreement_returned_at,
)
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/frameworks/g-cloud-7/agreement",
label="Sign and return your framework agreement",
)
assert not doc.xpath(
"//main//a[contains(@href, $href_part) or normalize-space(string())=$label]",
href_part="contract-variation/1",
label="Read the proposed contract variation",
)
extracted_signing_details_table_rows = self._extract_signing_details_table_rows(doc)
assert extracted_signing_details_table_rows == \
self._boring_agreement_details_expected_table_results
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="You can start selling your",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your original and counterpart signature pages",
)
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your framework agreement signature page has been sent to the Crown Commercial Service",
)
def test_does_not_show_contract_variation_link_if_no_variation(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True,
agreement_returned=True,
agreement_details=self._boring_agreement_details,
agreement_path='g-cloud-8/agreements/123-framework-agreement.pdf',
agreement_returned_at=self._boring_agreement_returned_at,
)
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/frameworks/g-cloud-7/agreement",
label="Sign and return your framework agreement",
)
assert not doc.xpath(
"//main//a[normalize-space(string())=$label]",
label="Read the proposed contract variation",
)
extracted_signing_details_table_rows = self._extract_signing_details_table_rows(doc)
assert extracted_signing_details_table_rows == \
self._boring_agreement_details_expected_table_results
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="You can start selling your",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your original and counterpart signature pages",
)
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your framework agreement signature page has been sent to the Crown Commercial Service",
)
def test_does_not_show_contract_variation_link_if_agreement_not_returned(self, data_api_client, s3):
with self.app.test_client():
self.login()
g8_with_variation = get_g_cloud_8()
g8_with_variation['frameworks']['variations'] = {"1": {"createdAt": "2018-08-16"}}
data_api_client.get_framework.return_value = g8_with_variation
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/frameworks/g-cloud-7/agreement",
label="Sign and return your framework agreement",
)
assert not doc.xpath(
"//main//a[contains(@href, $href_part) or normalize-space(string())=$label]",
href_part="contract-variation/1",
label="Read the proposed contract variation",
)
assert not doc.xpath(
"//main//table[normalize-space(string(./caption))=$b]",
b="Agreement details",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="You can start selling your",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your original and counterpart signature pages",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your framework agreement signature page has been sent to the Crown Commercial Service",
)
def test_shows_contract_variation_alternate_link_text_after_agreed_by_ccs(self, data_api_client, s3):
with self.app.test_client():
self.login()
g8_with_variation = get_g_cloud_8()
g8_with_variation['frameworks']['variations'] = {
"1": {
"createdAt": "2018-08-16",
"countersignedAt": "2018-10-01",
"countersignerName": "A.N. Other",
"countersignerRole": "Head honcho",
},
}
data_api_client.get_framework.return_value = g8_with_variation
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True,
agreement_returned=True,
agreement_details=self._boring_agreement_details,
agreement_returned_at=self._boring_agreement_returned_at,
agreement_path='g-cloud-8/agreements/1234/1234-signed-agreement.pdf',
agreed_variations={
"1": {
"agreedAt": "2016-08-19T15:47:08.116613Z",
"agreedUserId": 1,
"agreedUserEmail": "agreed@email.com",
"agreedUserName": u"William Drăyton",
},
},
)
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/frameworks/g-cloud-8/agreement",
label="Sign and return your framework agreement",
)
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("You submitted:", (
(
'View submitted services',
'/suppliers/frameworks/g-cloud-8/submissions',
None,
None,
),
(
"View your declaration",
"/suppliers/frameworks/g-cloud-8/declaration",
None,
None,
),
)),
('Legal documents', (
(
'Read the standard framework agreement',
'https://www.gov.uk/government/publications/g-cloud-8-framework-agreement',
None,
None,
),
(
u'Download your \u2018original\u2019 framework agreement signature page',
'/suppliers/frameworks/g-cloud-8/agreements/signed-agreement.pdf',
None,
None,
),
(
'View the signed contract variation',
'/suppliers/frameworks/g-cloud-8/contract-variation/1',
None,
None,
),
)),
('Guidance', (
(
'Read about how to sell your services',
'https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply',
None,
None,
),
)),
('Communications', (
(
'View communications and clarification questions',
'/suppliers/frameworks/g-cloud-8/updates',
None,
None,
),
)),
))
assert not doc.xpath(
"//main//a[normalize-space(string())=$label]",
label="Read the proposed contract variation",
)
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="You can start selling your",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your original and counterpart signature pages",
)
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your framework agreement signature page has been sent to the Crown Commercial Service",
)
@pytest.mark.parametrize('supplier_framework_kwargs,link_label,link_href', (
({'declaration': None}, 'Make supplier declaration', '/suppliers/frameworks/g-cloud-7/declaration/start'),
({}, 'Edit supplier declaration', '/suppliers/frameworks/g-cloud-7/declaration'),
))
def test_make_or_edit_supplier_declaration_shows_correct_page(self, data_api_client, s3, supplier_framework_kwargs,
link_label, link_href):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
**supplier_framework_kwargs)
response = self.client.get('/suppliers/frameworks/g-cloud-7')
document = html.fromstring(response.get_data(as_text=True))
assert document.xpath("//a[normalize-space(string())=$link_label]/@href", link_label=link_label)[0] \
== link_href
def test_dashboard_does_not_show_use_of_service_data_if_not_available(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(slug="g-cloud-8", name="G-Cloud 8",
status="open")
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
add_edit_complete_services = doc.xpath('//div[contains(@class, "framework-dashboard")]/div/li')[1]
use_of_data = add_edit_complete_services.xpath('//div[@class="browse-list-item-body"]')
assert len(use_of_data) == 0
def test_dashboard_shows_use_of_service_data_if_available(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(slug="g-cloud-9", name="G-Cloud 9",
status="open")
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-9")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
add_edit_complete_services = doc.xpath('//div[contains(@class, "framework-dashboard")]/div/li')[1]
use_of_data = add_edit_complete_services.xpath('//div[@class="browse-list-item-body"]')
assert len(use_of_data) == 1
assert 'The service information you provide here:' in use_of_data[0].text_content()
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestFrameworksDashboardConfidenceBannerOnPage(BaseApplicationTest):
"""Tests for the confidence banner on the declaration page."""
expected = (
'Your application will be submitted at 5pm BST, 23 June 2016. <br> '
'You can edit your declaration and services at any time before the deadline.'
)
def test_confidence_banner_on_page(self, data_api_client, _):
"""Test confidence banner appears on page happy path."""
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'foo'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(status='complete')
with self.app.test_client():
self.login()
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
assert self.expected in str(res.data)
def test_confidence_banner_not_on_page(self, data_api_client, _):
"""Change value and assertt that confidence banner is not displayed."""
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'not-submitted', 'lotSlug': 'foo'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(status='complete')
with self.app.test_client():
self.login()
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
assert self.expected not in str(res.data)
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestFrameworkAgreement(BaseApplicationTest):
def test_page_renders_if_all_ok(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
res = self.client.get("/suppliers/frameworks/g-cloud-7/agreement")
data = res.get_data(as_text=True)
assert res.status_code == 200
assert u'Send document to CCS' in data
assert u'Return your signed signature page' not in data
def test_page_returns_404_if_framework_in_wrong_state(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
res = self.client.get("/suppliers/frameworks/g-cloud-7/agreement")
assert res.status_code == 404
def test_page_returns_404_if_supplier_not_on_framework(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=False)
res = self.client.get("/suppliers/frameworks/g-cloud-7/agreement")
assert res.status_code == 404
@mock.patch('dmutils.s3.S3')
def test_upload_message_if_agreement_is_returned(self, s3, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True, agreement_returned=True, agreement_returned_at='2015-11-02T15:25:56.000000Z'
)
res = self.client.get('/suppliers/frameworks/g-cloud-7/agreement')
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert res.status_code == 200
assert u'/suppliers/frameworks/g-cloud-7/agreement' == doc.xpath('//form')[0].action
assert u'Document uploaded Monday 2 November 2015 at 15:25' in data
assert u'Your document has been uploaded' in data
def test_upload_message_if_agreement_is_not_returned(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
res = self.client.get('/suppliers/frameworks/g-cloud-7/agreement')
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert res.status_code == 200
assert u'/suppliers/frameworks/g-cloud-7/agreement' == doc.xpath('//form')[0].action
assert u'Document uploaded' not in data
assert u'Your document has been uploaded' not in data
def test_loads_contract_start_page_if_framework_agreement_version_exists(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
res = self.client.get("/suppliers/frameworks/g-cloud-8/agreement")
data = res.get_data(as_text=True)
assert res.status_code == 200
assert u'Return your signed signature page' in data
assert u'Send document to CCS' not in data
def test_two_lots_passed_on_contract_start_page(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
data_api_client.find_draft_services.return_value = {
'services': [
{'lotSlug': 'saas', 'status': 'submitted'},
{'lotSlug': 'saas', 'status': 'not-submitted'},
{'lotSlug': 'paas', 'status': 'failed'},
{'lotSlug': 'scs', 'status': 'submitted'}
]
}
expected_lots_and_statuses = [
('Software as a Service', 'Successful'),
('Platform as a Service', 'Unsuccessful'),
('Infrastructure as a Service', 'No application'),
('Specialist Cloud Services', 'Successful'),
]
res = self.client.get("/suppliers/frameworks/g-cloud-8/agreement")
doc = html.fromstring(res.get_data(as_text=True))
assert res.status_code == 200
lots_and_statuses = []
lot_table_rows = doc.xpath('//*[@id="content"]//table/tbody/tr')
for row in lot_table_rows:
cells = row.findall('./td')
lots_and_statuses.append(
(cells[0].text_content().strip(), cells[1].text_content().strip())
)
assert len(lots_and_statuses) == len(expected_lots_and_statuses)
for lot_and_status in lots_and_statuses:
assert lot_and_status in expected_lots_and_statuses
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestFrameworkAgreementUpload(BaseApplicationTest):
def test_page_returns_404_if_framework_in_wrong_state(self, data_api_client, send_email, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.pdf'),
}
)
assert res.status_code == 404
def test_page_returns_404_if_supplier_not_on_framework(self, data_api_client, send_email, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=False)
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.pdf'),
}
)
assert res.status_code == 404
@mock.patch('app.main.views.frameworks.file_is_less_than_5mb')
def test_page_returns_400_if_file_is_too_large(self, file_is_less_than_5mb, data_api_client, send_email, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
file_is_less_than_5mb.return_value = False
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.pdf'),
}
)
assert res.status_code == 400
assert u'Document must be less than 5MB' in res.get_data(as_text=True)
@mock.patch('app.main.views.frameworks.file_is_empty')
def test_page_returns_400_if_file_is_empty(self, file_is_empty, data_api_client, send_email, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
file_is_empty.return_value = True
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b''), 'test.pdf'),
}
)
assert res.status_code == 400
assert u'Document must not be empty' in res.get_data(as_text=True)
@mock.patch('app.main.views.frameworks.generate_timestamped_document_upload_path')
def test_api_is_not_updated_and_email_not_sent_if_upload_fails(
self, generate_timestamped_document_upload_path, data_api_client, send_email, s3
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
generate_timestamped_document_upload_path.return_value = 'my/path.pdf'
s3.return_value.save.side_effect = S3ResponseError(500, 'All fail')
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.pdf'),
}
)
assert res.status_code == 503
s3.return_value.save.assert_called_with(
'my/path.pdf',
mock.ANY,
acl='private',
download_filename='Supplier_Nme-1234-signed-framework-agreement.pdf'
)
assert data_api_client.create_framework_agreement.called is False
assert data_api_client.update_framework_agreement.called is False
assert data_api_client.sign_framework_agreement.called is False
assert send_email.called is False
@mock.patch('app.main.views.frameworks.generate_timestamped_document_upload_path')
def test_email_is_not_sent_if_api_create_framework_agreement_fails(
self, generate_timestamped_document_upload_path, data_api_client, send_email, s3
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
generate_timestamped_document_upload_path.return_value = 'my/path.pdf'
data_api_client.create_framework_agreement.side_effect = APIError(mock.Mock(status_code=500))
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.pdf'),
}
)
assert res.status_code == 500
assert data_api_client.create_framework_agreement.called is True
assert data_api_client.update_framework_agreement.called is False
assert data_api_client.sign_framework_agreement.called is False
assert send_email.called is False
@mock.patch('app.main.views.frameworks.generate_timestamped_document_upload_path')
def test_email_is_not_sent_if_api_update_framework_agreement_fails(
self, generate_timestamped_document_upload_path, data_api_client, send_email, s3
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
generate_timestamped_document_upload_path.return_value = 'my/path.pdf'
data_api_client.update_framework_agreement.side_effect = APIError(mock.Mock(status_code=500))
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.pdf'),
}
)
assert res.status_code == 500
assert data_api_client.create_framework_agreement.called is True
assert data_api_client.update_framework_agreement.called is True
assert data_api_client.sign_framework_agreement.called is False
assert send_email.called is False
@mock.patch('app.main.views.frameworks.generate_timestamped_document_upload_path')
def test_email_is_not_sent_if_api_sign_framework_agreement_fails(
self, generate_timestamped_document_upload_path, data_api_client, send_email, s3
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
generate_timestamped_document_upload_path.return_value = 'my/path.pdf'
data_api_client.sign_framework_agreement.side_effect = APIError(mock.Mock(status_code=500))
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.pdf'),
}
)
assert res.status_code == 500
assert data_api_client.create_framework_agreement.called is True
assert data_api_client.update_framework_agreement.called is True
assert data_api_client.sign_framework_agreement.called is True
assert send_email.called is False
@mock.patch('app.main.views.frameworks.generate_timestamped_document_upload_path')
def test_email_failure(
self, generate_timestamped_document_upload_path, data_api_client, send_email, s3
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
generate_timestamped_document_upload_path.return_value = 'my/path.pdf'
send_email.side_effect = EmailError()
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.pdf'),
}
)
assert res.status_code == 503
assert send_email.called is True
@mock.patch('app.main.views.frameworks.generate_timestamped_document_upload_path')
def test_upload_agreement_document(
self, generate_timestamped_document_upload_path, data_api_client, send_email, s3
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
data_api_client.create_framework_agreement.return_value = {
"agreement": {"id": 20}
}
generate_timestamped_document_upload_path.return_value = 'my/path.pdf'
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.pdf'),
}
)
generate_timestamped_document_upload_path.assert_called_once_with(
'g-cloud-7',
1234,
'agreements',
'signed-framework-agreement.pdf'
)
s3.return_value.save.assert_called_with(
'my/path.pdf',
mock.ANY,
acl='private',
download_filename='Supplier_Nme-1234-signed-framework-agreement.pdf'
)
data_api_client.create_framework_agreement.assert_called_with(
1234, 'g-cloud-7', 'email@email.com'
)
data_api_client.update_framework_agreement.assert_called_with(
20,
{"signedAgreementPath": 'my/path.pdf'},
'email@email.com'
)
data_api_client.sign_framework_agreement.assert_called_with(
20, 'email@email.com', {"uploaderUserId": 123}
)
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-7/agreement'
@mock.patch('app.main.views.frameworks.generate_timestamped_document_upload_path')
def test_upload_jpeg_agreement_document(
self, generate_timestamped_document_upload_path, data_api_client, send_email, s3
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
generate_timestamped_document_upload_path.return_value = 'my/path.jpg'
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.jpg'),
}
)
s3.return_value.save.assert_called_with(
'my/path.jpg',
mock.ANY,
acl='private',
download_filename='Supplier_Nme-1234-signed-framework-agreement.jpg'
)
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-7/agreement'
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
@mock.patch('dmutils.s3.S3')
class TestFrameworkAgreementDocumentDownload(BaseApplicationTest):
def test_download_document_fails_if_no_supplier_framework(self, S3, data_api_client):
data_api_client.get_supplier_framework_info.side_effect = APIError(mock.Mock(status_code=404))
with self.app.test_client():
self.login()
res = self.client.get('/suppliers/frameworks/g-cloud-7/agreements/example.pdf')
assert res.status_code == 404
def test_download_document_fails_if_no_supplier_declaration(self, S3, data_api_client):
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(declaration=None)
with self.app.test_client():
self.login()
res = self.client.get('/suppliers/frameworks/g-cloud-7/agreements/example.pdf')
assert res.status_code == 404
def test_download_document(self, S3, data_api_client):
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
uploader = mock.Mock()
S3.return_value = uploader
uploader.get_signed_url.return_value = 'http://url/path?param=value'
with self.app.test_client():
self.login()
res = self.client.get('/suppliers/frameworks/g-cloud-7/agreements/example.pdf')
assert res.status_code == 302
assert res.location == 'http://asset-host/path?param=value'
uploader.get_signed_url.assert_called_with(
'g-cloud-7/agreements/1234/1234-example.pdf')
def test_download_document_with_asset_url(self, S3, data_api_client):
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
uploader = mock.Mock()
S3.return_value = uploader
uploader.get_signed_url.return_value = 'http://url/path?param=value'
with self.app.test_client():
self.app.config['DM_ASSETS_URL'] = 'https://example'
self.login()
res = self.client.get('/suppliers/frameworks/g-cloud-7/agreements/example.pdf')
assert res.status_code == 302
assert res.location == 'https://example/path?param=value'
uploader.get_signed_url.assert_called_with(
'g-cloud-7/agreements/1234/1234-example.pdf')
@mock.patch('dmutils.s3.S3')
class TestFrameworkDocumentDownload(BaseApplicationTest):
def test_download_document(self, S3):
uploader = mock.Mock()
S3.return_value = uploader
uploader.get_signed_url.return_value = 'http://url/path?param=value'
with self.app.test_client():
self.login()
res = self.client.get('/suppliers/frameworks/g-cloud-7/files/example.pdf')
assert res.status_code == 302
assert res.location == 'http://asset-host/path?param=value'
uploader.get_signed_url.assert_called_with('g-cloud-7/communications/example.pdf')
def test_download_document_returns_404_if_url_is_None(self, S3):
uploader = mock.Mock()
S3.return_value = uploader
uploader.get_signed_url.return_value = None
with self.app.test_client():
self.login()
res = self.client.get('/suppliers/frameworks/g-cloud-7/files/example.pdf')
assert res.status_code == 404
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestStartSupplierDeclaration(BaseApplicationTest):
def test_start_declaration_goes_to_declaration_overview_page(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
response = self.client.get('/suppliers/frameworks/g-cloud-7/declaration/start')
document = html.fromstring(response.get_data(as_text=True))
assert document.xpath("//a[normalize-space(string(.))='Start your declaration']/@href")[0] \
== '/suppliers/frameworks/g-cloud-7/declaration/reuse'
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestDeclarationOverviewSubmit(BaseApplicationTest):
"""
Behaviour common to both GET and POST views on path /suppliers/frameworks/g-cloud-7/declaration
"""
def test_supplier_not_interested(self, data_api_client, get_or_post):
with self.app.test_client():
self.login()
data_api_client.get_framework.side_effect = _assert_args_and_return(
self.framework(status="open"),
"g-cloud-7",
)
data_api_client.get_supplier_framework_info.side_effect = _assert_args_and_raise(
APIError(mock.Mock(status_code=404)),
1234,
"g-cloud-7",
)
data_api_client.set_supplier_declaration.side_effect = AssertionError("This shouldn't be called")
response = getattr(self.client, get_or_post.lower())("/suppliers/frameworks/g-cloud-7/declaration")
assert response.status_code == 404
def test_framework_coming(self, data_api_client, get_or_post):
with self.app.test_client():
self.login()
data_api_client.get_framework.side_effect = _assert_args_and_return(
self.framework(status="coming"),
"g-cloud-7",
)
data_api_client.get_supplier_framework_info.side_effect = _assert_args_and_return(
self.supplier_framework(framework_slug="g-cloud-7"),
1234,
"g-cloud-7",
)
data_api_client.set_supplier_declaration.side_effect = AssertionError("This shouldn't be called")
response = getattr(self.client, get_or_post.lower())("/suppliers/frameworks/g-cloud-7/declaration")
assert response.status_code == 404
def test_framework_unknown(self, data_api_client, get_or_post):
with self.app.test_client():
self.login()
data_api_client.get_framework.side_effect = _assert_args_and_raise(
APIError(mock.Mock(status_code=404)),
"muttoning-clouds",
)
data_api_client.get_supplier_framework_info.side_effect = _assert_args_and_raise(
APIError(mock.Mock(status_code=404)),
1234,
"muttoning-clouds",
)
data_api_client.set_supplier_declaration.side_effect = AssertionError("This shouldn't be called")
response = getattr(self.client, get_or_post.lower())("/suppliers/frameworks/muttoning-clouds/declaration")
assert response.status_code == 404
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestDeclarationOverview(BaseApplicationTest):
@staticmethod
def _extract_section_information(doc, section_title, expect_edit_link=True):
"""
given a section (full text) name, returns that section's relevant information in a tuple (format described
in comments)
"""
tables = doc.xpath(
"//table[preceding::h2[1][normalize-space(string())=$section_title]]",
section_title=section_title,
)
assert len(tables) == 1
table = tables[0]
edit_as = doc.xpath(
"//a[@class='summary-change-link'][preceding::h2[1][normalize-space(string())=$section_title]]",
section_title=section_title,
)
assert ([a.xpath("normalize-space(string())") for a in edit_as] == ["Edit"]) is expect_edit_link
return (
# table caption text
table.xpath("normalize-space(string(./caption))"),
# "Edit" link href
edit_as[0].xpath("@href")[0] if expect_edit_link else None,
tuple(
(
# contents of row heading
row.xpath("normalize-space(string(./td[@class='summary-item-field-first']))"),
# full text contents of row "value"
row.xpath("normalize-space(string(./td[@class='summary-item-field']))"),
# full text contents of each a element in row value
tuple(a.xpath("normalize-space(string())") for a in row.xpath(
"./td[@class='summary-item-field']//a"
)),
# href of each a element in row value
tuple(row.xpath("./td[@class='summary-item-field']//a/@href")),
# full text contents of each li element in row value
tuple(li.xpath("normalize-space(string())") for li in row.xpath(
"./td[@class='summary-item-field']//li"
)),
) for row in table.xpath(".//tr[contains(@class,'summary-item-row')]")
)
)
@staticmethod
def _section_information_strip_edit_href(section_information):
row_heading, edit_href, rows = section_information
return row_heading, None, rows
def _setup_data_api_client(self, data_api_client, framework_status, framework_slug, declaration, prefill_fw_slug):
data_api_client.get_framework.side_effect = _assert_args_and_return(
self.framework(slug=framework_slug, name="F-Cumulus 0", status=framework_status),
framework_slug,
)
data_api_client.get_supplier_framework_info.side_effect = _assert_args_and_return(
self.supplier_framework(
framework_slug=framework_slug,
declaration=declaration,
prefill_declaration_from_framework_slug=prefill_fw_slug,
),
1234,
framework_slug,
)
data_api_client.set_supplier_declaration.side_effect = AssertionError("This shouldn't be called")
# corresponds to the parametrization args:
# "framework_slug,declaration,decl_valid,prefill_fw_slug,expected_sections"
_common_parametrization = tuple(
chain.from_iterable(chain(
(( # noqa
"g-cloud-9",
empty_declaration,
False,
prefill_fw_slug,
(
( # expected result for "Providing suitable services" section as returned by
# _extract_section_information
"Providing suitable services",
"/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-services",
(
(
"Services are cloud-related",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-services",),
(),
),
(
"Services in scope for G-Cloud",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-"
"services#servicesDoNotInclude",),
(),
),
(
"Buyers pay for what they use",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-"
"services#payForWhatUse",),
(),
),
(
"What your team will deliver",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-"
"services#offerServicesYourselves",),
(),
),
(
"Contractual responsibility and accountability",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-"
"services#fullAccountability",),
(),
),
),
),
( # expected result for "Grounds for mandatory exclusion" section as returned by
# _extract_section_information
"Grounds for mandatory exclusion",
"/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-exclusion",
(
(
"Organised crime or conspiracy convictions",
q_link_text_prefillable_section,
(q_link_text_prefillable_section,),
("/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-exclusion",),
(),
),
(
"Bribery or corruption convictions",
q_link_text_prefillable_section,
(q_link_text_prefillable_section,),
("/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-"
"exclusion#corruptionBribery",),
(),
),
(
"Fraud convictions",
q_link_text_prefillable_section,
(q_link_text_prefillable_section,),
("/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-"
"exclusion#fraudAndTheft",),
(),
),
(
"Terrorism convictions",
q_link_text_prefillable_section,
(q_link_text_prefillable_section,),
("/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-"
"exclusion#terrorism",),
(),
),
(
"Organised crime convictions",
q_link_text_prefillable_section,
(q_link_text_prefillable_section,),
("/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-"
"exclusion#organisedCrime",),
(),
),
),
),
( # expected result for "How you’ll deliver your services" section as returned by
# _extract_section_information
u"How you’ll deliver your services",
"/suppliers/frameworks/g-cloud-9/declaration/edit/how-youll-deliver-your-services",
(
(
"Subcontractors or consortia",
q_link_text_prefillable_section,
(q_link_text_prefillable_section,),
("/suppliers/frameworks/g-cloud-9/declaration/edit/how-youll-deliver-your-"
"services",),
(),
),
),
),
),
) for empty_declaration in (None, {})), # two possible ways of specifying a "empty" declaration - test both
(( # noqa
"g-cloud-9",
{
"status": "started",
"conspiracy": True,
"corruptionBribery": False,
"fraudAndTheft": True,
"terrorism": False,
"organisedCrime": True,
"subcontracting": [
"yourself without the use of third parties (subcontractors)",
"as a prime contractor, using third parties (subcontractors) to provide all services",
],
},
False,
prefill_fw_slug,
(
( # expected result for "Providing suitable services" section as returned by
# _extract_section_information
"Providing suitable services",
"/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-services",
(
(
"Services are cloud-related",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-services",),
(),
),
(
"Services in scope for G-Cloud",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-"
"services#servicesDoNotInclude",),
(),
),
(
"Buyers pay for what they use",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-"
"services#payForWhatUse",),
(),
),
(
"What your team will deliver",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-"
"services#offerServicesYourselves",),
(),
),
(
"Contractual responsibility and accountability",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-"
"services#fullAccountability",),
(),
),
),
),
( # expected result for "Grounds for mandatory exclusion" section as returned by
# _extract_section_information
"Grounds for mandatory exclusion",
"/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-exclusion",
(
(
"Organised crime or conspiracy convictions",
"Yes",
(),
(),
(),
),
(
"Bribery or corruption convictions",
"No",
(),
(),
(),
),
(
"Fraud convictions",
"Yes",
(),
(),
(),
),
(
"Terrorism convictions",
"No",
(),
(),
(),
),
(
"Organised crime convictions",
"Yes",
(),
(),
(),
),
),
),
( # expected result for "How you’ll deliver your services" section as returned by
# _extract_section_information
u"How you’ll deliver your services",
"/suppliers/frameworks/g-cloud-9/declaration/edit/how-youll-deliver-your-services",
(
(
"Subcontractors or consortia",
"yourself without the use of third parties (subcontractors) as a prime contractor, using "
"third parties (subcontractors) to provide all services", # noqa
(),
(),
(
"yourself without the use of third parties (subcontractors)",
"as a prime contractor, using third parties (subcontractors) to provide all services",
),
),
),
),
),
),),
(( # noqa
"g-cloud-9",
dict(status=declaration_status, **(valid_g9_declaration_base())),
True,
prefill_fw_slug,
(
( # expected result for "Providing suitable services" section as returned by
# _extract_section_information
"Providing suitable services",
"/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-services",
(
(
"Services are cloud-related",
"Yes",
(),
(),
(),
),
(
"Services in scope for G-Cloud",
"Yes",
(),
(),
(),
),
(
"Buyers pay for what they use",
"Yes",
(),
(),
(),
),
(
"What your team will deliver",
"No",
(),
(),
(),
),
(
"Contractual responsibility and accountability",
"Yes",
(),
(),
(),
),
),
),
( # expected result for "Grounds for mandatory exclusion" section as returned by
# _extract_section_information
"Grounds for mandatory exclusion",
"/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-exclusion",
(
(
"Organised crime or conspiracy convictions",
"No",
(),
(),
(),
),
(
"Bribery or corruption convictions",
"Yes",
(),
(),
(),
),
(
"Fraud convictions",
"No",
(),
(),
(),
),
(
"Terrorism convictions",
"Yes",
(),
(),
(),
),
(
"Organised crime convictions",
"No",
(),
(),
(),
),
),
),
( # expected result for "How you’ll deliver your services" section as returned by
# _extract_section_information
u"How you’ll deliver your services",
"/suppliers/frameworks/g-cloud-9/declaration/edit/how-youll-deliver-your-services",
(
(
"Subcontractors or consortia",
"yourself without the use of third parties (subcontractors)",
(),
(),
(),
),
),
),
),
) for declaration_status in ("started", "complete",)),
) for prefill_fw_slug, q_link_text_prefillable_section in (
# test all of the previous combinations with two possible values of prefill_fw_slug
(None, "Answer question",),
("some-previous-framework", "Review answer",),
)))
# corresponds to the parametrization args:
# "framework_slug,declaration,decl_valid,prefill_fw_slug,expected_sections"
#
# this is more straightforward than _common_parametrization because we only have to care about non-open frameworks
_g7_parametrization = tuple(
(
"g-cloud-7",
declaration,
decl_valid,
None,
# G7 doesn't (yet?) have any "short names" for questions and so will be listing the answers in the
# overview against their full verbose questions so any sections that we wanted to assert the content of
# would require a reference copy of all its full question texts kept here. we don't want to do this so for
# now don't assert any G7 sections...
(),
) for declaration, decl_valid in chain(
(
(dict(FULL_G7_SUBMISSION, status=decl_status), True)
for decl_status in ("started", "complete",)
),
(
(empty_decl, False)
for empty_decl in (None, {})
),
)
)
@pytest.mark.parametrize(
"framework_slug,declaration,decl_valid,prefill_fw_slug,expected_sections",
_common_parametrization,
)
def test_display_open(
self,
data_api_client,
framework_slug,
declaration,
decl_valid,
prefill_fw_slug,
expected_sections,
):
self._setup_data_api_client(data_api_client, "open", framework_slug, declaration, prefill_fw_slug)
with self.app.test_client():
self.login()
response = self.client.get("/suppliers/frameworks/{}/declaration".format(framework_slug))
assert response.status_code == 200
doc = html.fromstring(response.get_data(as_text=True))
assert [e.xpath("normalize-space(string())") for e in doc.xpath(
"//nav//*[@role='breadcrumbs']//a",
)] == [
"Digital Marketplace",
"Your account",
"Apply to F-Cumulus 0",
]
assert doc.xpath(
"//nav//*[@role='breadcrumbs']//a/@href",
) == [
"/",
"/suppliers",
"/suppliers/frameworks/{}".format(framework_slug),
]
assert bool(doc.xpath(
"//p[contains(normalize-space(string()), $t)][contains(normalize-space(string()), $f)]",
t="You must answer all questions and make your declaration before",
f="F-Cumulus 0",
)) is (not decl_valid)
assert bool(doc.xpath(
"//p[contains(normalize-space(string()), $t)][contains(normalize-space(string()), $f)]",
t="You must make your declaration before",
f="F-Cumulus 0",
)) is (decl_valid and declaration.get("status") != "complete")
assert len(doc.xpath(
"//p[contains(normalize-space(string()), $t)]",
t="You can come back and edit your answers at any time before the deadline.",
)) == (2 if decl_valid and declaration.get("status") != "complete" else 0)
assert len(doc.xpath(
"//p[contains(normalize-space(string()), $t)][not(contains(normalize-space(string()), $d))]",
t="You can come back and edit your answers at any time",
d="deadline",
)) == (2 if decl_valid and declaration.get("status") == "complete" else 0)
if prefill_fw_slug is None:
assert not doc.xpath("//a[normalize-space(string())=$t]", t="Review answer")
assert bool(doc.xpath(
"//a[normalize-space(string())=$a or normalize-space(string())=$b]",
a="Answer question",
b="Review answer",
)) is (not decl_valid)
if not decl_valid:
# assert that all links with the label "Answer question" or "Review answer" link to some subpage (by
# asserting that there are none that don't, having previously determined that such-labelled links exist)
assert not doc.xpath(
# we want the href to *contain* $u but not *be* $u
"//a[normalize-space(string())=$a or normalize-space(string())=$b]"
"[not(starts-with(@href, $u)) or @href=$u]",
a="Answer question",
b="Review answer",
u="/suppliers/frameworks/{}/declaration/".format(framework_slug),
)
if decl_valid and declaration.get("status") != "complete":
mdf_actions = doc.xpath(
"//form[@method='POST'][.//input[@value=$t][@type='submit']][.//input[@name='csrf_token']]/@action",
t="Make declaration",
)
assert len(mdf_actions) == 2
assert all(
urljoin("/suppliers/frameworks/{}/declaration".format(framework_slug), action) ==
"/suppliers/frameworks/{}/declaration".format(framework_slug)
for action in mdf_actions
)
else:
assert not doc.xpath("//input[@value=$t]", t="Make declaration")
assert doc.xpath(
"//a[normalize-space(string())=$t][@href=$u]",
t="Return to application",
u="/suppliers/frameworks/{}".format(framework_slug),
)
for expected_section in expected_sections:
assert self._extract_section_information(doc, expected_section[0]) == expected_section
@pytest.mark.parametrize(
"framework_slug,declaration,decl_valid,prefill_fw_slug,expected_sections",
tuple(
(
framework_slug,
declaration,
decl_valid,
prefill_fw_slug,
expected_sections,
)
for framework_slug, declaration, decl_valid, prefill_fw_slug, expected_sections
in chain(_common_parametrization, _g7_parametrization)
if (declaration or {}).get("status") == "complete"
)
)
@pytest.mark.parametrize("framework_status", ("pending", "standstill", "live", "expired",))
def test_display_closed(
self,
data_api_client,
framework_status,
framework_slug,
declaration,
decl_valid,
prefill_fw_slug,
expected_sections,
):
self._setup_data_api_client(data_api_client, framework_status, framework_slug, declaration, prefill_fw_slug)
with self.app.test_client():
self.login()
response = self.client.get("/suppliers/frameworks/{}/declaration".format(framework_slug))
assert response.status_code == 200
doc = html.fromstring(response.get_data(as_text=True))
assert [e.xpath("normalize-space(string())") for e in doc.xpath(
"//nav//*[@role='breadcrumbs']//a",
)] == [
"Digital Marketplace",
"Your account",
"Your F-Cumulus 0 application",
]
assert doc.xpath(
"//nav//*[@role='breadcrumbs']//a/@href",
) == [
"/",
"/suppliers",
"/suppliers/frameworks/{}".format(framework_slug),
]
# there shouldn't be any links to the "edit" page
assert not any(
urljoin("/suppliers/frameworks/{}/declaration".format(framework_slug), a.attrib["href"]).startswith(
"/suppliers/frameworks/{}/declaration/edit/".format(framework_slug)
)
for a in doc.xpath("//a[@href]")
)
# no submittable forms should be pointing at ourselves
assert not any(
urljoin(
"/suppliers/frameworks/{}/declaration".format(framework_slug),
form.attrib["action"],
) == "/suppliers/frameworks/{}/declaration".format(framework_slug)
for form in doc.xpath("//form[.//input[@type='submit']]")
)
assert not doc.xpath("//a[@href][normalize-space(string())=$label]", label="Answer question")
assert not doc.xpath("//a[@href][normalize-space(string())=$label]", label="Review answer")
assert not doc.xpath("//p[contains(normalize-space(string()), $t)]", t="make your declaration")
assert not doc.xpath("//p[contains(normalize-space(string()), $t)]", t="edit your answers")
for expected_section in expected_sections:
assert self._extract_section_information(
doc,
expected_section[0],
expect_edit_link=False,
) == self._section_information_strip_edit_href(expected_section)
@pytest.mark.parametrize(
"framework_slug,declaration,decl_valid,prefill_fw_slug,expected_sections",
tuple(
(
framework_slug,
declaration,
decl_valid,
prefill_fw_slug,
expected_sections,
)
for framework_slug, declaration, decl_valid, prefill_fw_slug, expected_sections
in chain(_common_parametrization, _g7_parametrization)
if (declaration or {}).get("status") != "complete"
)
)
@pytest.mark.parametrize("framework_status", ("pending", "standstill", "live", "expired",))
def test_error_closed(
self,
data_api_client,
framework_status,
framework_slug,
declaration,
decl_valid,
prefill_fw_slug,
expected_sections,
):
self._setup_data_api_client(data_api_client, framework_status, framework_slug, declaration, prefill_fw_slug)
with self.app.test_client():
self.login()
response = self.client.get("/suppliers/frameworks/{}/declaration".format(framework_slug))
assert response.status_code == 410
@pytest.mark.parametrize("framework_status", ("coming", "open", "pending", "standstill", "live", "expired",))
def test_error_nonexistent_framework(self, data_api_client, framework_status):
self._setup_data_api_client(data_api_client, framework_status, "g-cloud-31415", {"status": "complete"}, None)
with self.app.test_client():
self.login()
response = self.client.get("/suppliers/frameworks/g-cloud-31415/declaration")
assert response.status_code == 404
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestDeclarationSubmit(BaseApplicationTest):
@pytest.mark.parametrize("prefill_fw_slug", (None, "some-previous-framework",))
@pytest.mark.parametrize("invalid_declaration", (
None,
{},
{
# not actually complete - only first section is
"status": "complete",
"unfairCompetition": False,
"skillsAndResources": False,
"offerServicesYourselves": False,
"fullAccountability": True,
},
))
def test_invalid_declaration(self, data_api_client, invalid_declaration, prefill_fw_slug):
with self.app.test_client():
self.login()
data_api_client.get_framework.side_effect = _assert_args_and_return(
self.framework(slug="g-cloud-9", name="G-Cloud 9", status="open"),
"g-cloud-9",
)
data_api_client.get_supplier_framework_info.side_effect = _assert_args_and_return(
self.supplier_framework(
framework_slug="g-cloud-9",
declaration=invalid_declaration,
prefill_declaration_from_framework_slug=prefill_fw_slug, # should have zero effect
),
1234,
"g-cloud-9",
)
data_api_client.set_supplier_declaration.side_effect = AssertionError("This shouldn't be called")
response = self.client.post("/suppliers/frameworks/g-cloud-9/declaration")
assert response.status_code == 400
@pytest.mark.parametrize("prefill_fw_slug", (None, "some-previous-framework",))
@pytest.mark.parametrize("declaration_status", ("started", "complete",))
@mock.patch("dmutils.s3.S3") # needed by the framework dashboard which our request gets redirected to
def test_valid_declaration(self, s3, data_api_client, prefill_fw_slug, declaration_status):
with self.app.test_client():
self.login()
data_api_client.get_framework.side_effect = _assert_args_and_return(
self.framework(slug="g-cloud-9", name="G-Cloud 9", status="open"),
"g-cloud-9",
)
data_api_client.get_supplier_framework_info.side_effect = _assert_args_and_return(
self.supplier_framework(
framework_slug="g-cloud-9",
declaration=dict(status=declaration_status, **(valid_g9_declaration_base())),
prefill_declaration_from_framework_slug=prefill_fw_slug, # should have zero effect
),
1234,
"g-cloud-9",
)
data_api_client.set_supplier_declaration.side_effect = _assert_args_and_return(
dict(status="complete", **(valid_g9_declaration_base())),
1234,
"g-cloud-9",
dict(status="complete", **(valid_g9_declaration_base())),
"email@email.com",
)
response = self.client.post("/suppliers/frameworks/g-cloud-9/declaration", follow_redirects=True)
# args of call are asserted by mock's side_effect
assert data_api_client.set_supplier_declaration.called is True
# this will be the response from the redirected-to view
assert response.status_code == 200
doc = html.fromstring(response.get_data(as_text=True))
assert doc.xpath(
"//*[@data-analytics='trackPageView'][@data-url=$k]",
k="/suppliers/frameworks/g-cloud-9/declaration_complete",
)
@pytest.mark.parametrize("framework_status", ("standstill", "pending", "live", "expired",))
def test_closed_framework_state(self, data_api_client, framework_status):
with self.app.test_client():
self.login()
data_api_client.get_framework.side_effect = _assert_args_and_return(
self.framework(status=framework_status),
"g-cloud-7",
)
data_api_client.get_supplier_framework_info.side_effect = _assert_args_and_return(
self.supplier_framework(framework_slug="g-cloud-7"),
1234,
"g-cloud-7",
)
data_api_client.set_supplier_declaration.side_effect = AssertionError("This shouldn't be called")
response = self.client.post("/suppliers/frameworks/g-cloud-7/declaration")
assert response.status_code == 404
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestSupplierDeclaration(BaseApplicationTest):
@pytest.mark.parametrize("empty_declaration", ({}, None,))
def test_get_with_no_previous_answers(self, data_api_client, empty_declaration):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
framework_slug="g-cloud-7",
declaration=empty_declaration,
)
data_api_client.get_supplier_declaration.side_effect = APIError(mock.Mock(status_code=404))
res = self.client.get(
'/suppliers/frameworks/g-cloud-7/declaration/edit/g-cloud-7-essentials')
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
assert doc.xpath('//input[@id="PR-1-yes"]/@checked') == []
assert doc.xpath('//input[@id="PR-1-no"]/@checked') == []
def test_get_with_with_previous_answers(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
framework_slug="g-cloud-7",
declaration={"status": "started", "PR1": False}
)
res = self.client.get(
'/suppliers/frameworks/g-cloud-7/declaration/edit/g-cloud-7-essentials')
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
assert len(doc.xpath('//input[@id="input-PR1-2"]/@checked')) == 1
def test_get_with_with_prefilled_answers(self, data_api_client):
with self.app.test_client():
self.login()
# Handle calls for both the current framework and for the framework to pre-fill from
data_api_client.get_framework.side_effect = lambda framework_slug: {
"g-cloud-9": self.framework(slug='g-cloud-9', name='G-Cloud 9', status='open'),
"digital-outcomes-and-specialists-2": self.framework(slug='digital-outcomes-and-specialists-2',
name='Digital Stuff 2', status='live'),
}[framework_slug]
# Current framework application information
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
framework_slug="g-cloud-9",
declaration={"status": "started"},
prefill_declaration_from_framework_slug="digital-outcomes-and-specialists-2"
)
# The previous declaration to prefill from
data_api_client.get_supplier_declaration.return_value = {
'declaration': self.supplier_framework(
framework_slug="digital-outcomes-and-specialists-2",
declaration={"status": "complete",
"conspiracy": True,
"corruptionBribery": False,
"fraudAndTheft": True,
"terrorism": False,
"organisedCrime": False,
},
)["frameworkInterest"]["declaration"]
}
# The grounds-for-mandatory-exclusion section has "prefill: True" in the declaration manifest
res = self.client.get(
'/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-exclusion')
assert res.status_code == 200
data_api_client.get_supplier_declaration.assert_called_once_with(1234, "digital-outcomes-and-specialists-2")
doc = html.fromstring(res.get_data(as_text=True))
# Radio buttons have been pre-filled with the correct answers
assert len(doc.xpath('//input[@id="input-conspiracy-1"][@value="True"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-corruptionBribery-2"][@value="False"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-fraudAndTheft-1"][@value="True"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-terrorism-2"][@value="False"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-organisedCrime-2"][@value="False"]/@checked')) == 1
# Blue banner message is shown at top of page
assert doc.xpath('normalize-space(string(//div[@class="banner-information-without-action"]))') == \
"Answers on this page are from an earlier declaration and need review."
# Blue information messages are shown next to each question
info_messages = doc.xpath('//div[@class="message-wrapper"]//span[@class="message-content"]')
assert len(info_messages) == 5
for message in info_messages:
assert self.strip_all_whitespace(message.text) == self.strip_all_whitespace(
"This answer is from your Digital Stuff 2 declaration"
)
def test_get_with_with_partially_prefilled_answers(self, data_api_client):
with self.app.test_client():
self.login()
# Handle calls for both the current framework and for the framework to pre-fill from
data_api_client.get_framework.side_effect = lambda framework_slug: {
"g-cloud-9": self.framework(slug='g-cloud-9', name='G-Cloud 9', status='open'),
"digital-outcomes-and-specialists-2": self.framework(slug='digital-outcomes-and-specialists-2',
name='Digital Stuff 2', status='live'),
}[framework_slug]
# Current framework application information
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
framework_slug="g-cloud-9",
declaration={"status": "started"},
prefill_declaration_from_framework_slug="digital-outcomes-and-specialists-2"
)
# The previous declaration to prefill from - missing "corruptionBribery" and "terrorism" keys
data_api_client.get_supplier_declaration.return_value = {
'declaration': self.supplier_framework(
framework_slug="digital-outcomes-and-specialists-2",
declaration={"status": "complete",
"conspiracy": True,
"fraudAndTheft": True,
"organisedCrime": False,
},
)["frameworkInterest"]["declaration"]
}
# The grounds-for-mandatory-exclusion section has "prefill: True" in the declaration manifest
res = self.client.get(
'/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-exclusion')
assert res.status_code == 200
data_api_client.get_supplier_declaration.assert_called_once_with(1234, "digital-outcomes-and-specialists-2")
doc = html.fromstring(res.get_data(as_text=True))
# Radio buttons have been pre-filled with the correct answers
assert len(doc.xpath('//input[@id="input-conspiracy-1"][@value="True"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-fraudAndTheft-1"][@value="True"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-organisedCrime-2"][@value="False"]/@checked')) == 1
# Radio buttons for missing keys exist but have not been pre-filled
assert len(doc.xpath('//input[@id="input-corruptionBribery-1"]')) == 1
assert len(doc.xpath('//input[@id="input-corruptionBribery-2"]')) == 1
assert len(doc.xpath('//input[@id="input-corruptionBribery-1"]/@checked')) == 0
assert len(doc.xpath('//input[@id="input-corruptionBribery-2"]/@checked')) == 0
assert len(doc.xpath('//input[@id="input-terrorism-1"]')) == 1
assert len(doc.xpath('//input[@id="input-terrorism-2"]')) == 1
assert len(doc.xpath('//input[@id="input-terrorism-1"]/@checked')) == 0
assert len(doc.xpath('//input[@id="input-terrorism-2"]/@checked')) == 0
# Blue banner message is shown at top of page
assert doc.xpath('normalize-space(string(//div[@class="banner-information-without-action"]))') == \
"Answers on this page are from an earlier declaration and need review."
# Blue information messages are shown next to pre-filled questions only
info_messages = doc.xpath('//div[@class="message-wrapper"]//span[@class="message-content"]')
assert len(info_messages) == 3
for message in info_messages:
assert self.strip_all_whitespace(message.text) == self.strip_all_whitespace(
"This answer is from your Digital Stuff 2 declaration"
)
def test_answers_not_prefilled_if_section_has_already_been_saved(self, data_api_client):
with self.app.test_client():
self.login()
# Handle calls for both the current framework and for the framework to pre-fill from
data_api_client.get_framework.side_effect = lambda framework_slug: {
"g-cloud-9": self.framework(slug='g-cloud-9', name='G-Cloud 9', status='open'),
"digital-outcomes-and-specialists-2": self.framework(slug='digital-outcomes-and-specialists-2',
name='Digital Stuff 2', status='live'),
}[framework_slug]
# Current framework application information with the grounds-for-mandatory-exclusion section complete
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
framework_slug="g-cloud-9",
declaration={"status": "started",
"conspiracy": False,
"corruptionBribery": True,
"fraudAndTheft": False,
"terrorism": True,
"organisedCrime": False,
},
prefill_declaration_from_framework_slug="digital-outcomes-and-specialists-2"
)
# The previous declaration to prefill from - has relevant answers but should not ever be called
data_api_client.get_supplier_declaration.return_value = {
'declaration': self.supplier_framework(
framework_slug="digital-outcomes-and-specialists-2",
declaration={"status": "complete",
"conspiracy": True,
"corruptionBribery": False,
"fraudAndTheft": True,
"terrorism": False,
"organisedCrime": False,
},
)["frameworkInterest"]["declaration"]
}
# The grounds-for-mandatory-exclusion section has "prefill: True" in the declaration manifest
res = self.client.get(
'/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-exclusion')
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
# Previous framework and declaration have not been fetched
data_api_client.get_framework.assert_called_once_with('g-cloud-9')
assert data_api_client.get_supplier_declaration.called is False
# Radio buttons have been filled with the current answers; not those from previous declaration
assert len(doc.xpath('//input[@id="input-conspiracy-2"][@value="False"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-corruptionBribery-1"][@value="True"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-fraudAndTheft-2"][@value="False"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-terrorism-1"][@value="True"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-organisedCrime-2"][@value="False"]/@checked')) == 1
# No blue banner message is shown at top of page
assert len(doc.xpath('//div[@class="banner-information-without-action"]')) == 0
# No blue information messages are shown next to each question
info_messages = doc.xpath('//div[@class="message-wrapper"]//span[@class="message-content"]')
assert len(info_messages) == 0
def test_answers_not_prefilled_if_section_marked_as_prefill_false(self, data_api_client):
with self.app.test_client():
self.login()
# Handle calls for both the current framework and for the framework to pre-fill from
data_api_client.get_framework.side_effect = lambda framework_slug: {
"g-cloud-9": self.framework(slug='g-cloud-9', name='G-Cloud 9', status='open'),
"digital-outcomes-and-specialists-2": self.framework(slug='digital-outcomes-and-specialists-2',
name='Digital Stuff 2', status='live'),
}[framework_slug]
# Current framework application information
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
framework_slug="g-cloud-9",
declaration={"status": "started"},
prefill_declaration_from_framework_slug="digital-outcomes-and-specialists-2"
)
# The previous declaration to prefill from - has relevant answers but should not ever be called
data_api_client.get_supplier_declaration.return_value = {
'declaration': self.supplier_framework(
framework_slug="digital-outcomes-and-specialists-2",
declaration={"status": "complete",
"readUnderstoodGuidance": True,
"understandTool": True,
"understandHowToAskQuestions": False,
},
)["frameworkInterest"]["declaration"]
}
# The how-you-apply section has "prefill: False" in the declaration manifest
res = self.client.get(
'/suppliers/frameworks/g-cloud-9/declaration/edit/how-you-apply')
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
# Previous framework and declaration have not been fetched
data_api_client.get_framework.assert_called_once_with('g-cloud-9')
assert data_api_client.get_supplier_declaration.called is False
# Radio buttons exist on page but have not been populated at all
assert len(doc.xpath('//input[@id="input-readUnderstoodGuidance-1"]')) == 1
assert len(doc.xpath('//input[@id="input-readUnderstoodGuidance-2"]')) == 1
assert len(doc.xpath('//input[@id="input-readUnderstoodGuidance-1"]/@checked')) == 0
assert len(doc.xpath('//input[@id="input-readUnderstoodGuidance-2"]/@checked')) == 0
assert len(doc.xpath('//input[@id="input-understandTool-1"]')) == 1
assert len(doc.xpath('//input[@id="input-understandTool-2"]')) == 1
assert len(doc.xpath('//input[@id="input-understandTool-1"]/@checked')) == 0
assert len(doc.xpath('//input[@id="input-understandTool-2"]/@checked')) == 0
assert len(doc.xpath('//input[@id="input-understandHowToAskQuestions-1"]')) == 1
assert len(doc.xpath('//input[@id="input-understandHowToAskQuestions-2"]')) == 1
assert len(doc.xpath('//input[@id="input-understandHowToAskQuestions-1"]/@checked')) == 0
assert len(doc.xpath('//input[@id="input-understandHowToAskQuestions-2"]/@checked')) == 0
# No blue banner message is shown at top of page
assert len(doc.xpath('//div[@class="banner-information-without-action"]')) == 0
# No blue information messages are shown next to each question
info_messages = doc.xpath('//div[@class="message-wrapper"]//span[@class="message-content"]')
assert len(info_messages) == 0
def test_post_valid_data(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
framework_slug="g-cloud-7",
declaration={"status": "started"}
)
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/declaration/edit/g-cloud-7-essentials',
data=FULL_G7_SUBMISSION)
assert res.status_code == 302
assert data_api_client.set_supplier_declaration.called is True
def test_post_valid_data_to_complete_declaration(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
framework_slug="g-cloud-7",
declaration=FULL_G7_SUBMISSION
)
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/declaration/edit/grounds-for-discretionary-exclusion',
data=FULL_G7_SUBMISSION)
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-7/declaration'
assert data_api_client.set_supplier_declaration.called is True
assert data_api_client.set_supplier_declaration.call_args[0][2]['status'] == 'complete'
def test_post_valid_data_with_api_failure(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
framework_slug="g-cloud-7",
declaration={"status": "started"}
)
data_api_client.set_supplier_declaration.side_effect = APIError(mock.Mock(status_code=400))
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/declaration/edit/g-cloud-7-essentials',
data=FULL_G7_SUBMISSION)
assert res.status_code == 400
@mock.patch('app.main.helpers.validation.G7Validator.get_error_messages_for_page')
def test_post_with_validation_errors(self, get_error_messages_for_page, data_api_client):
"""Test that answers are not saved if there are errors
For unit tests of the validation see :mod:`tests.app.main.helpers.test_frameworks`
"""
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
get_error_messages_for_page.return_value = {'PR1': {'input_name': 'PR1', 'message': 'this is invalid'}}
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/declaration/edit/g-cloud-7-essentials',
data=FULL_G7_SUBMISSION)
assert res.status_code == 400
assert data_api_client.set_supplier_declaration.called is False
doc = html.fromstring(res.get_data(as_text=True))
elems = doc.cssselect('#input-PR1-1')
assert elems[0].value == 'True'
def test_post_invalidating_previously_valid_page(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(slug='g-cloud-9', status='open')
mock_supplier_framework = self.supplier_framework(
framework_slug="g-cloud-9",
declaration={
"status": "started",
"establishedInTheUK": False,
"appropriateTradeRegisters": True,
"appropriateTradeRegistersNumber": "242#353",
"licenceOrMemberRequired": "licensed",
"licenceOrMemberRequiredDetails": "Foo Bar",
},
)
data_api_client.get_supplier_framework_info.return_value = mock_supplier_framework
data_api_client.get_supplier_declaration.return_value = {
"declaration": mock_supplier_framework["frameworkInterest"]["declaration"],
}
res = self.client.post(
'/suppliers/frameworks/g-cloud-9/declaration/edit/established-outside-the-uk',
data={
"establishedInTheUK": "False",
"appropriateTradeRegisters": "True",
"appropriateTradeRegistersNumber": "242#353",
"licenceOrMemberRequired": "licensed",
# deliberately missing:
"licenceOrMemberRequiredDetails": "",
},
)
assert res.status_code == 400
assert data_api_client.set_supplier_declaration.called is False
def test_cannot_post_data_if_not_open(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = {
'frameworks': {'status': 'pending'}
}
data_api_client.get_supplier_declaration.return_value = {
"declaration": {"status": "started"}
}
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/declaration/edit/g-cloud-7-essentials',
data=FULL_G7_SUBMISSION)
assert res.status_code == 404
assert data_api_client.set_supplier_declaration.called is False
@mock.patch('app.main.views.frameworks.data_api_client')
@mock.patch('dmutils.s3.S3')
class TestFrameworkUpdatesPage(BaseApplicationTest):
def _assert_page_title_and_table_headings(self, doc, tables_exist=True):
assert self.strip_all_whitespace('G-Cloud 7 updates') in self.strip_all_whitespace(doc.xpath('//h1')[0].text)
section_names = [
'Communications',
'Clarification questions and answers',
]
headers = doc.xpath('//div[contains(@class, "updates-document-tables")]/h2[@class="summary-item-heading"]')
assert len(headers) == 2
for index, section_name in enumerate(section_names):
assert self.strip_all_whitespace(section_name) in self.strip_all_whitespace(headers[index].text)
if tables_exist:
table_captions = doc.xpath('//div[contains(@class, "updates-document-tables")]/table/caption')
assert len(table_captions) == 2
for index, section_name in enumerate(section_names):
assert self.strip_all_whitespace(section_name) in self.strip_all_whitespace(table_captions[index].text)
def test_should_be_a_503_if_connecting_to_amazon_fails(self, s3, data_api_client):
data_api_client.get_framework.return_value = self.framework('open')
# if s3 throws a 500-level error
s3.side_effect = S3ResponseError(500, 'Amazon has collapsed. The internet is over.')
with self.app.test_client():
self.login()
response = self.client.get(
'/suppliers/frameworks/g-cloud-7/updates'
)
assert response.status_code == 503
assert self.strip_all_whitespace(u"<h1>Sorry, we’re experiencing technical difficulties</h1>") in \
self.strip_all_whitespace(response.get_data(as_text=True))
def test_empty_messages_exist_if_no_files_returned(self, s3, data_api_client):
data_api_client.get_framework.return_value = self.framework('open')
with self.app.test_client():
self.login()
response = self.client.get(
'/suppliers/frameworks/g-cloud-7/updates'
)
assert response.status_code == 200
doc = html.fromstring(response.get_data(as_text=True))
self._assert_page_title_and_table_headings(doc, tables_exist=False)
for empty_message in [
'<p class="summary-item-no-content">No communications have been sent out.</p>',
'<p class="summary-item-no-content">No clarification questions and answers have been posted yet.</p>',
]:
assert self.strip_all_whitespace(empty_message) in \
self.strip_all_whitespace(response.get_data(as_text=True))
def test_dates_for_open_framework_closed_for_questions(self, s3, data_api_client):
data_api_client.get_framework.return_value = self.framework('open', clarification_questions_open=False)
with self.app.test_client():
self.login()
response = self.client.get('/suppliers/frameworks/g-cloud-7/updates')
data = response.get_data(as_text=True)
assert response.status_code == 200
assert 'All clarification questions and answers will be published by 5pm BST, 29 September 2015.' in data
assert "The deadline for clarification questions is" not in data
def test_dates_for_open_framework_open_for_questions(self, s3, data_api_client):
data_api_client.get_framework.return_value = self.framework('open', clarification_questions_open=True)
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
with self.app.test_client():
self.login()
response = self.client.get('/suppliers/frameworks/g-cloud-7/updates')
data = response.get_data(as_text=True)
assert response.status_code == 200
assert "All clarification questions and answers will be published by" not in data
assert 'The deadline for clarification questions is 5pm BST, 22 September 2015.' in data
def test_the_tables_should_be_displayed_correctly(self, s3, data_api_client):
data_api_client.get_framework.return_value = self.framework('open')
files = [
('updates/communications/', 'file 1', 'odt'),
('updates/communications/', 'file 2', 'odt'),
('updates/clarifications/', 'file 3', 'odt'),
('updates/clarifications/', 'file 4', 'odt'),
]
# the communications table is always before the clarifications table
s3.return_value.list.return_value = [
_return_fake_s3_file_dict(
"g-cloud-7/communications/{}".format(section), filename, ext
) for section, filename, ext in files
]
with self.app.test_client():
self.login()
response = self.client.get(
'/suppliers/frameworks/g-cloud-7/updates'
)
doc = html.fromstring(response.get_data(as_text=True))
self._assert_page_title_and_table_headings(doc)
tables = doc.xpath('//div[contains(@class, "updates-document-tables")]/table')
# test that for each table, we have the right number of rows
for table in tables:
item_rows = table.findall('.//tr[@class="summary-item-row"]')
assert len(item_rows) == 2
# test that the file names and urls are right
for row in item_rows:
section, filename, ext = files.pop(0)
filename_link = row.find('.//a[@class="document-link-with-icon"]')
assert filename in filename_link.text_content()
assert filename_link.get('href') == '/suppliers/frameworks/g-cloud-7/files/{}{}.{}'.format(
section,
filename.replace(' ', '%20'),
ext,
)
def test_names_with_the_section_name_in_them_will_display_correctly(self, s3, data_api_client):
data_api_client.get_framework.return_value = self.framework('open')
# for example: 'g-cloud-7-updates/clarifications/communications%20file.odf'
files = [
('updates/communications/', 'clarifications file', 'odt'),
('updates/clarifications/', 'communications file', 'odt')
]
s3.return_value.list.return_value = [
_return_fake_s3_file_dict(
"g-cloud-7/communications/{}".format(section), filename, ext
) for section, filename, ext in files
]
with self.app.test_client():
self.login()
response = self.client.get(
'/suppliers/frameworks/g-cloud-7/updates'
)
doc = html.fromstring(response.get_data(as_text=True))
self._assert_page_title_and_table_headings(doc)
tables = doc.xpath('//div[contains(@class, "updates-document-tables")]/table')
# test that for each table, we have the right number of rows
for table in tables:
item_rows = table.findall('.//tr[@class="summary-item-row"]')
assert len(item_rows) == 1
# test that the file names and urls are right
for row in item_rows:
section, filename, ext = files.pop(0)
filename_link = row.find('.//a[@class="document-link-with-icon"]')
assert filename in filename_link.text_content()
assert filename_link.get('href') == '/suppliers/frameworks/g-cloud-7/files/{}{}.{}'.format(
section,
filename.replace(' ', '%20'),
ext,
)
def test_question_box_is_shown_if_countersigned_agreement_is_not_yet_returned(self, s3, data_api_client):
data_api_client.get_framework.return_value = self.framework('live', clarification_questions_open=False)
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
with self.app.test_client():
self.login()
response = self.client.get('/suppliers/frameworks/g-cloud-7/updates')
data = response.get_data(as_text=True)
assert response.status_code == 200
assert u'Ask a question about your G-Cloud 7 application' in data
def test_no_question_box_shown_if_countersigned_agreement_is_returned(self, s3, data_api_client):
data_api_client.get_framework.return_value = self.framework('live', clarification_questions_open=False)
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(countersigned_path="path")
with self.app.test_client():
self.login()
response = self.client.get('/suppliers/frameworks/g-cloud-7/updates')
data = response.get_data(as_text=True)
assert response.status_code == 200
assert u'Ask a question about your G-Cloud 7 application' not in data
class TestSendClarificationQuestionEmail(BaseApplicationTest):
def _send_email(self, clarification_question):
with self.app.test_client():
self.login()
return self.client.post(
"/suppliers/frameworks/g-cloud-7/updates",
data={
'clarification_question': clarification_question,
}
)
def _assert_clarification_email(self, send_email, is_called=True, succeeds=True):
if succeeds:
assert send_email.call_count == 2
elif is_called:
assert send_email.call_count == 1
else:
assert send_email.call_count == 0
if is_called:
send_email.assert_any_call(
"digitalmarketplace@mailinator.com",
FakeMail('Supplier ID:'),
"MANDRILL",
"Test Framework clarification question",
"do-not-reply@digitalmarketplace.service.gov.uk",
"Test Framework Supplier",
["clarification-question"],
reply_to="suppliers+g-cloud-7@digitalmarketplace.service.gov.uk",
)
if succeeds:
send_email.assert_any_call(
"email@email.com",
FakeMail('Thanks for sending your Test Framework clarification', 'Test Framework updates page'),
"MANDRILL",
"Thanks for your clarification question",
"do-not-reply@digitalmarketplace.service.gov.uk",
"Digital Marketplace Admin",
["clarification-question-confirm"]
)
def _assert_application_email(self, send_email, succeeds=True):
if succeeds:
assert send_email.call_count == 1
else:
assert send_email.call_count == 0
if succeeds:
send_email.assert_called_with(
"digitalmarketplace@mailinator.com",
FakeMail('Test Framework question asked'),
"MANDRILL",
"Test Framework application question",
"do-not-reply@digitalmarketplace.service.gov.uk",
"Test Framework Supplier",
["application-question"],
reply_to="email@email.com",
)
@mock.patch('app.main.views.frameworks.data_api_client')
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_should_not_send_email_if_invalid_clarification_question(self, send_email, s3, data_api_client):
data_api_client.get_framework.return_value = self.framework('open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
for invalid_clarification_question in [
{
'question': '', # empty question
'error_message': 'Add text if you want to ask a question.'
}, {
'question': '\t \n\n\n', # whitespace-only question
'error_message': 'Add text if you want to ask a question.'
},
{
'question': ('ten__chars' * 500) + '1', # 5000+ char question
'error_message': 'Question cannot be longer than 5000 characters'
}
]:
response = self._send_email(invalid_clarification_question['question'])
self._assert_clarification_email(send_email, is_called=False, succeeds=False)
assert response.status_code == 400
assert self.strip_all_whitespace('There was a problem with your submitted question') in \
self.strip_all_whitespace(response.get_data(as_text=True))
assert self.strip_all_whitespace(invalid_clarification_question['error_message']) in \
self.strip_all_whitespace(response.get_data(as_text=True))
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.data_api_client')
@mock.patch('app.main.views.frameworks.send_email')
def test_should_call_send_email_with_correct_params(self, send_email, data_api_client, s3):
data_api_client.get_framework.return_value = self.framework('open', name='Test Framework')
clarification_question = 'This is a clarification question.'
response = self._send_email(clarification_question)
self._assert_clarification_email(send_email)
assert response.status_code == 200
assert self.strip_all_whitespace(
'<p class="banner-message">Your clarification question has been sent. Answers to all ' +
'clarification questions will be published on this page.</p>'
) in self.strip_all_whitespace(response.get_data(as_text=True))
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.data_api_client')
@mock.patch('app.main.views.frameworks.send_email')
def test_should_call_send_g7_email_with_correct_params(self, send_email, data_api_client, s3):
data_api_client.get_framework.return_value = self.framework('open', name='Test Framework',
clarification_questions_open=False)
clarification_question = 'This is a G7 question.'
response = self._send_email(clarification_question)
self._assert_application_email(send_email)
assert response.status_code == 200
assert self.strip_all_whitespace(
'<p class="banner-message">Your question has been sent. You'll get a reply from ' +
'the Crown Commercial Service soon.</p>'
) in self.strip_all_whitespace(response.get_data(as_text=True))
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.data_api_client')
@mock.patch('app.main.views.frameworks.send_email')
def test_should_create_audit_event(self, send_email, data_api_client, s3):
data_api_client.get_framework.return_value = self.framework('open', name='Test Framework')
clarification_question = 'This is a clarification question'
response = self._send_email(clarification_question)
self._assert_clarification_email(send_email)
assert response.status_code == 200
data_api_client.create_audit_event.assert_called_with(
audit_type=AuditTypes.send_clarification_question,
user="email@email.com",
object_type="suppliers",
object_id=1234,
data={"question": clarification_question, 'framework': 'g-cloud-7'})
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.data_api_client')
@mock.patch('app.main.views.frameworks.send_email')
def test_should_create_g7_question_audit_event(self, send_email, data_api_client, s3):
data_api_client.get_framework.return_value = self.framework('open', name='Test Framework',
clarification_questions_open=False)
clarification_question = 'This is a G7 question'
response = self._send_email(clarification_question)
self._assert_application_email(send_email)
assert response.status_code == 200
data_api_client.create_audit_event.assert_called_with(
audit_type=AuditTypes.send_application_question,
user="email@email.com",
object_type="suppliers",
object_id=1234,
data={"question": clarification_question, 'framework': 'g-cloud-7'})
@mock.patch('app.main.views.frameworks.data_api_client')
@mock.patch('app.main.views.frameworks.send_email')
def test_should_be_a_503_if_email_fails(self, send_email, data_api_client):
data_api_client.get_framework.return_value = self.framework('open', name='Test Framework')
send_email.side_effect = EmailError("Arrrgh")
clarification_question = 'This is a clarification question.'
response = self._send_email(clarification_question)
self._assert_clarification_email(send_email, succeeds=False)
assert response.status_code == 503
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
@mock.patch('app.main.views.frameworks.count_unanswered_questions')
class TestG7ServicesList(BaseApplicationTest):
def test_404_when_g7_pending_and_no_complete_services(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.find_draft_services.return_value = {'services': []}
count_unanswered.return_value = 0
response = self.client.get('/suppliers/frameworks/g-cloud-7/submissions/iaas')
assert response.status_code == 404
def test_404_when_g7_pending_and_no_declaration(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.get_supplier_declaration.return_value = {
"declaration": {"status": "started"}
}
response = self.client.get('/suppliers/frameworks/g-cloud-7/submissions/iaas')
assert response.status_code == 404
def test_no_404_when_g7_open_and_no_complete_services(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.find_draft_services.return_value = {'services': []}
count_unanswered.return_value = 0
response = self.client.get('/suppliers/frameworks/g-cloud-7/submissions/iaas')
assert response.status_code == 200
def test_no_404_when_g7_open_and_no_declaration(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_declaration.return_value = {
"declaration": {"status": "started"}
}
response = self.client.get('/suppliers/frameworks/g-cloud-7/submissions/iaas')
assert response.status_code == 200
def test_shows_g7_message_if_pending_and_application_made(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.get_supplier_declaration.return_value = {'declaration': FULL_G7_SUBMISSION} # noqa
data_api_client.find_draft_services.return_value = {
'services': [
{'serviceName': 'draft', 'lotSlug': 'scs', 'status': 'submitted'},
]
}
count_unanswered.return_value = 0, 1
response = self.client.get('/suppliers/frameworks/g-cloud-7/submissions/scs')
doc = html.fromstring(response.get_data(as_text=True))
assert response.status_code == 200
heading = doc.xpath('//div[@class="summary-item-lede"]//h2[@class="summary-item-heading"]')
assert len(heading) > 0
assert u"G-Cloud 7 is closed for applications" in heading[0].xpath('text()')[0]
assert u"You made your supplier declaration and submitted 1 complete service." in \
heading[0].xpath('../p[1]/text()')[0]
assert not doc.xpath(
"//*[contains(@class,'banner')][contains(normalize-space(string()),$t)]",
t="declaration before any services can be submitted",
)
def test_drafts_list_progress_count(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
count_unanswered.return_value = 3, 1
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.find_draft_services.return_value = {
'services': [
{'serviceName': 'draft', 'lotSlug': 'scs', 'status': 'not-submitted'},
]
}
submissions = self.client.get('/suppliers/frameworks/g-cloud-7/submissions')
lot_page = self.client.get('/suppliers/frameworks/g-cloud-7/submissions/scs')
assert u'Service can be moved to complete' not in lot_page.get_data(as_text=True)
assert u'4 unanswered questions' in lot_page.get_data(as_text=True)
assert u'1 draft service' in submissions.get_data(as_text=True)
assert u'complete service' not in submissions.get_data(as_text=True)
def test_drafts_list_can_be_completed(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
count_unanswered.return_value = 0, 1
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.find_draft_services.return_value = {
'services': [
{'serviceName': 'draft', 'lotSlug': 'scs', 'status': 'not-submitted'},
]
}
res = self.client.get('/suppliers/frameworks/g-cloud-7/submissions/scs')
assert u'Service can be marked as complete' in res.get_data(as_text=True)
assert u'1 optional question unanswered' in res.get_data(as_text=True)
@pytest.mark.parametrize("incomplete_declaration", ({}, {"status": "started"},))
def test_drafts_list_completed(self, count_unanswered, data_api_client, incomplete_declaration):
with self.app.test_client():
self.login()
count_unanswered.return_value = 0, 1
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_declaration.return_value = {
'declaration': incomplete_declaration,
}
data_api_client.find_draft_services.return_value = {
'services': [
{'serviceName': 'draft', 'lotSlug': 'scs', 'status': 'submitted'},
]
}
submissions = self.client.get('/suppliers/frameworks/g-cloud-7/submissions')
lot_page = self.client.get('/suppliers/frameworks/g-cloud-7/submissions/scs')
submissions_doc, lot_page_doc = (html.fromstring(r.get_data(as_text=True)) for r in (submissions, lot_page))
assert u'Service can be moved to complete' not in lot_page.get_data(as_text=True)
assert u'1 optional question unanswered' in lot_page.get_data(as_text=True)
assert u'1 service marked as complete' in submissions.get_data(as_text=True)
assert u'draft service' not in submissions.get_data(as_text=True)
for doc in (submissions_doc, lot_page_doc,):
assert doc.xpath(
"//*[@class='banner-warning-without-action'][normalize-space(string())=$t][.//a[@href=$u]]",
t=u"You need to make the supplier\u00a0declaration before any services can be submitted",
u=(
"/suppliers/frameworks/g-cloud-7/declaration"
if incomplete_declaration.get("status") == "started" else
"/suppliers/frameworks/g-cloud-7/declaration/start"
),
)
def test_drafts_list_completed_with_declaration_status(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_declaration.return_value = {
'declaration': {
'status': 'complete'
}
}
data_api_client.find_draft_services.return_value = {
'services': [
{'serviceName': 'draft', 'lotSlug': 'scs', 'status': 'submitted'},
]
}
submissions = self.client.get('/suppliers/frameworks/g-cloud-7/submissions')
doc = html.fromstring(submissions.get_data(as_text=True))
assert u'1 service will be submitted' in submissions.get_data(as_text=True)
assert u'1 complete service was submitted' not in submissions.get_data(as_text=True)
assert u'browse-list-item-status-happy' in submissions.get_data(as_text=True)
assert not doc.xpath(
"//*[contains(@class,'banner')][contains(normalize-space(string()),$t)]",
t="declaration before any services can be submitted",
)
def test_drafts_list_services_were_submitted(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_declaration.return_value = {
'declaration': {
'status': 'complete'
}
}
data_api_client.find_draft_services.return_value = {
'services': [
{'serviceName': 'draft', 'lotSlug': 'scs', 'status': 'not-submitted'},
{'serviceName': 'draft', 'lotSlug': 'scs', 'status': 'submitted'},
]
}
submissions = self.client.get('/suppliers/frameworks/g-cloud-7/submissions')
assert u'1 complete service was submitted' in submissions.get_data(as_text=True)
def test_dos_drafts_list_with_open_framework(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(slug='digital-outcomes-and-specialists',
status='open')
data_api_client.get_supplier_declaration.return_value = {
'declaration': {
'status': 'complete'
}
}
data_api_client.find_draft_services.return_value = {
'services': [
{'serviceName': 'draft', 'lotSlug': 'digital-specialists', 'status': 'submitted'},
]
}
submissions = self.client.get('/suppliers/frameworks/digital-outcomes-and-specialists/submissions')
doc = html.fromstring(submissions.get_data(as_text=True))
assert u'This will be submitted' in submissions.get_data(as_text=True)
assert u'browse-list-item-status-happy' in submissions.get_data(as_text=True)
assert u'Apply to provide' in submissions.get_data(as_text=True)
assert not doc.xpath(
"//*[contains(@class,'banner')][contains(normalize-space(string()),$t)]",
t="declaration before any services can be submitted",
)
def test_dos_drafts_list_with_closed_framework(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(slug="digital-outcomes-and-specialists",
status='pending')
data_api_client.get_supplier_declaration.return_value = {
'declaration': {
'status': 'complete'
}
}
data_api_client.find_draft_services.return_value = {
'services': [
{'serviceName': 'draft', 'lotSlug': 'digital-specialists', 'status': 'not-submitted'},
{'serviceName': 'draft', 'lotSlug': 'digital-specialists', 'status': 'submitted'},
]
}
submissions = self.client.get('/suppliers/frameworks/digital-outcomes-and-specialists/submissions')
assert submissions.status_code == 200
assert u'Submitted' in submissions.get_data(as_text=True)
assert u'Apply to provide' not in submissions.get_data(as_text=True)
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestCreateFrameworkAgreement(BaseApplicationTest):
def test_creates_framework_agreement_and_redirects_to_signer_details_page(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(
slug='g-cloud-8', status='standstill', framework_agreement_version="1.0"
)
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
data_api_client.create_framework_agreement.return_value = {"agreement": {"id": 789}}
res = self.client.post("/suppliers/frameworks/g-cloud-8/create-agreement")
data_api_client.create_framework_agreement.assert_called_once_with(1234, 'g-cloud-8', 'email@email.com')
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-8/789/signer-details'
def test_404_if_supplier_not_on_framework(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=False)
res = self.client.post("/suppliers/frameworks/g-cloud-8/create-agreement")
assert res.status_code == 404
def test_404_if_framework_in_wrong_state(self, data_api_client):
with self.app.test_client():
self.login()
# Suppliers can only sign agreements in 'standstill' and 'live' lifecycle statuses
for status in ('coming', 'open', 'pending', 'expired'):
data_api_client.get_framework.return_value = self.framework(status=status)
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
res = self.client.post("/suppliers/frameworks/g-cloud-8/create-agreement")
assert res.status_code == 404
@mock.patch("app.main.views.frameworks.data_api_client", autospec=True)
@mock.patch("app.main.views.frameworks.return_supplier_framework_info_if_on_framework_or_abort")
class TestSignerDetailsPage(BaseApplicationTest):
def test_signer_details_shows_company_name(self, return_supplier_framework, data_api_client):
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
supplier_framework['declaration']['nameOfOrganisation'] = u'£unicodename'
return_supplier_framework.return_value = supplier_framework
res = self.client.get("/suppliers/frameworks/g-cloud-8/234/signer-details")
page = res.get_data(as_text=True)
assert res.status_code == 200
assert u'Details of the person who is signing on behalf of £unicodename' in page
def test_signer_details_shows_existing_signer_details(self, return_supplier_framework, data_api_client):
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={
"signerName": "Sid James",
"signerRole": "Ex funny man"
}
)
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
res = self.client.get("/suppliers/frameworks/g-cloud-8/234/signer-details")
page = res.get_data(as_text=True)
assert res.status_code == 200
assert "Sid James" in page
assert "Ex funny man" in page
def test_404_if_framework_in_wrong_state(self, return_supplier_framework, data_api_client):
self.login()
# Suppliers can only sign agreements in 'standstill' and 'live' lifecycle statuses
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
res = self.client.get("/suppliers/frameworks/g-cloud-8/234/signer-details")
assert res.status_code == 404
@mock.patch('app.main.views.frameworks.check_agreement_is_related_to_supplier_framework_or_abort')
def test_we_abort_if_agreement_does_not_match_supplier_framework(
self, check_agreement_is_related_to_supplier_framework_or_abort, return_supplier_framework, data_api_client
):
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement(supplier_id=2345)
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
self.client.get("/suppliers/frameworks/g-cloud-8/234/signer-details")
# This call will abort because supplier_framework has mismatched supplier_id 1234
check_agreement_is_related_to_supplier_framework_or_abort.assert_called_with(
self.framework_agreement(supplier_id=2345)['agreement'],
supplier_framework
)
def test_should_be_an_error_if_no_full_name(self, return_supplier_framework, data_api_client):
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/signer-details",
data={
'signerRole': "The Boss"
}
)
assert res.status_code == 400
page = res.get_data(as_text=True)
assert "You must provide the full name of the person signing on behalf of the company" in page
def test_should_be_an_error_if_no_role(self, return_supplier_framework, data_api_client):
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/signer-details",
data={
'signerName': "Josh Moss"
}
)
assert res.status_code == 400
page = res.get_data(as_text=True)
assert "You must provide the role of the person signing on behalf of the company" in page
def test_should_be_an_error_if_signer_details_fields_more_than_255_characters(
self, return_supplier_framework, data_api_client
):
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
# 255 characters should be fine
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/signer-details",
data={
'signerName': "J" * 255,
'signerRole': "J" * 255
}
)
assert res.status_code == 302
# 256 characters should be an error
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/signer-details",
data={
'signerName': "J" * 256,
'signerRole': "J" * 256
}
)
assert res.status_code == 400
page = res.get_data(as_text=True)
assert "You must provide a name under 256 characters" in page
assert "You must provide a role under 256 characters" in page
def test_should_strip_whitespace_on_signer_details_fields(self, return_supplier_framework, data_api_client):
signer_details = {
'signerName': " Josh Moss ",
'signerRole': " The Boss "
}
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
self.login()
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/signer-details",
data=signer_details
)
assert res.status_code == 302
data_api_client.update_framework_agreement.assert_called_with(
234,
{'signedAgreementDetails': {'signerName': 'Josh Moss', 'signerRole': 'The Boss'}},
'email@email.com'
)
def test_provide_signer_details_form_with_valid_input_redirects_to_upload_page(
self, return_supplier_framework, data_api_client
):
signer_details = {
'signerName': "Josh Moss",
'signerRole': "The Boss"
}
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
with self.client as c:
self.login()
res = c.post(
"/suppliers/frameworks/g-cloud-8/234/signer-details",
data=signer_details
)
assert res.status_code == 302
assert "suppliers/frameworks/g-cloud-8/234/signature-upload" in res.location
data_api_client.update_framework_agreement.assert_called_with(
234,
{'signedAgreementDetails': {'signerName': 'Josh Moss', 'signerRole': 'The Boss'}},
'email@email.com'
)
def test_provide_signer_details_form_with_valid_input_redirects_to_contract_review_page_if_file_already_uploaded(
self, return_supplier_framework, data_api_client
):
signer_details = {
'signerName': "Josh Moss",
'signerRole': "The Boss",
}
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={'signerName': 'existing name', 'signerRole': 'existing role'},
signed_agreement_path='existing/path.pdf'
)
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
with self.client as c:
self.login()
with self.client.session_transaction() as sess:
# An already uploaded file will also have set a filename in the session
sess['signature_page'] = 'test.pdf'
res = c.post(
"/suppliers/frameworks/g-cloud-8/234/signer-details",
data=signer_details
)
assert res.status_code == 302
assert "suppliers/frameworks/g-cloud-8/234/contract-review" in res.location
data_api_client.update_framework_agreement.assert_called_with(
234,
{'signedAgreementDetails': {'signerName': 'Josh Moss', 'signerRole': 'The Boss'}},
'email@email.com'
)
def test_signer_details_form_redirects_to_signature_upload_page_if_file_in_session_but_no_signed_agreement_path(
self, return_supplier_framework, data_api_client
):
signer_details = {
'signerName': "Josh Moss",
'signerRole': "The Boss",
}
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={'signerName': 'existing name', 'signerRole': 'existing role'}
)
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
with self.client as c:
self.login()
with self.client.session_transaction() as sess:
# We set a file name that could be from a previous framework agreement signing attempt but this
# agreement does not have a signedAgreementPath
sess['signature_page'] = 'test.pdf'
res = c.post(
"/suppliers/frameworks/g-cloud-8/234/signer-details",
data=signer_details
)
assert res.status_code == 302
assert "suppliers/frameworks/g-cloud-8/234/signature-upload" in res.location
@mock.patch("app.main.views.frameworks.data_api_client", autospec=True)
@mock.patch("app.main.views.frameworks.return_supplier_framework_info_if_on_framework_or_abort")
class TestSignatureUploadPage(BaseApplicationTest):
@mock.patch('app.main.views.frameworks.check_agreement_is_related_to_supplier_framework_or_abort')
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.generate_timestamped_document_upload_path')
def test_we_abort_if_agreement_does_not_match_supplier_framework(
self,
generate_timestamped_document_upload_path,
s3,
check_agreement_is_related_to_supplier_framework_or_abort,
return_supplier_framework,
data_api_client
):
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement(supplier_id=2345)
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
s3.return_value.get_key.return_value = None
self.client.get("/suppliers/frameworks/g-cloud-8/234/signature-upload")
# This call will abort because supplier_framework has mismatched supplier_id 1234
check_agreement_is_related_to_supplier_framework_or_abort.assert_called_with(
self.framework_agreement(supplier_id=2345)['agreement'],
supplier_framework
)
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.generate_timestamped_document_upload_path')
def test_upload_signature_page(
self, generate_timestamped_document_upload_path, s3, return_supplier_framework, data_api_client
):
with self.client:
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True
)['frameworkInterest']
generate_timestamped_document_upload_path.return_value = 'my/path.jpg'
res = self.client.post(
'/suppliers/frameworks/g-cloud-8/234/signature-upload',
data={
'signature_page': (StringIO(b'asdf'), 'test.jpg'),
}
)
generate_timestamped_document_upload_path.assert_called_once_with(
'g-cloud-8',
1234,
'agreements',
'signed-framework-agreement.jpg'
)
s3.return_value.save.assert_called_with(
'my/path.jpg',
mock.ANY,
download_filename='Supplier_Nme-1234-signed-signature-page.jpg',
acl='private',
disposition_type='inline'
)
data_api_client.update_framework_agreement.assert_called_with(
234,
{"signedAgreementPath": 'my/path.jpg'},
'email@email.com'
)
assert session['signature_page'] == 'test.jpg'
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-8/234/contract-review'
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.file_is_empty')
def test_signature_upload_returns_400_if_file_is_empty(
self, file_is_empty, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True
)['frameworkInterest']
s3.return_value.get_key.return_value = None
file_is_empty.return_value = True
res = self.client.post(
'/suppliers/frameworks/g-cloud-8/234/signature-upload',
data={
'signature_page': (StringIO(b''), 'test.pdf'),
}
)
assert res.status_code == 400
assert 'The file must not be empty' in res.get_data(as_text=True)
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.file_is_image')
def test_signature_upload_returns_400_if_file_is_not_image_or_pdf(
self, file_is_image, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True
)['frameworkInterest']
s3.return_value.get_key.return_value = None
file_is_image.return_value = False
res = self.client.post(
'/suppliers/frameworks/g-cloud-8/234/signature-upload',
data={
'signature_page': (StringIO(b'asdf'), 'test.txt'),
}
)
assert res.status_code == 400
assert 'The file must be a PDF, JPG or PNG' in res.get_data(as_text=True)
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.file_is_less_than_5mb')
def test_signature_upload_returns_400_if_file_is_larger_than_5mb(
self, file_is_less_than_5mb, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True
)['frameworkInterest']
s3.return_value.get_key.return_value = None
file_is_less_than_5mb.return_value = False
res = self.client.post(
'/suppliers/frameworks/g-cloud-8/234/signature-upload',
data={
'signature_page': (StringIO(b'asdf'), 'test.jpg'),
}
)
assert res.status_code == 400
assert 'The file must be less than 5MB' in res.get_data(as_text=True)
@mock.patch('dmutils.s3.S3')
def test_signature_page_displays_uploaded_filename_and_timestamp(
self, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_path='already/uploaded/file/path.pdf'
)
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True
)['frameworkInterest']
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
with self.client as c:
self.login()
with self.client.session_transaction() as sess:
sess['signature_page'] = 'test.pdf'
res = c.get(
'/suppliers/frameworks/g-cloud-8/234/signature-upload'
)
s3.return_value.get_key.assert_called_with('already/uploaded/file/path.pdf')
assert res.status_code == 200
assert "test.pdf, uploaded Sunday 10 July 2016 at 22:18" in res.get_data(as_text=True)
@mock.patch('dmutils.s3.S3')
def test_signature_page_displays_file_upload_timestamp_if_no_filename_in_session(
self, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_path='already/uploaded/file/path.pdf'
)
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True
)['frameworkInterest']
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
with self.client as c:
self.login()
res = c.get(
'/suppliers/frameworks/g-cloud-8/234/signature-upload'
)
s3.return_value.get_key.assert_called_with('already/uploaded/file/path.pdf')
assert res.status_code == 200
assert "Uploaded Sunday 10 July 2016 at 22:18" in res.get_data(as_text=True)
@mock.patch('dmutils.s3.S3')
def test_signature_page_allows_continuation_without_file_chosen_to_be_uploaded_if_an_uploaded_file_already_exists(
self, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_path='already/uploaded/file/path.pdf'
)
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True
)['frameworkInterest']
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
with self.client as c:
self.login()
res = c.post(
'/suppliers/frameworks/g-cloud-8/234/signature-upload',
data={
'signature_page': (StringIO(b''), ''),
}
)
s3.return_value.get_key.assert_called_with('already/uploaded/file/path.pdf')
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-8/234/contract-review'
@mock.patch("app.main.views.frameworks.data_api_client")
@mock.patch("app.main.views.frameworks.return_supplier_framework_info_if_on_framework_or_abort")
class TestContractReviewPage(BaseApplicationTest):
@mock.patch('dmutils.s3.S3')
def test_contract_review_page_loads_with_correct_supplier_and_signer_details_and_filename(
self, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
with self.client.session_transaction() as sess:
sess['signature_page'] = 'test.pdf'
res = self.client.get(
"/suppliers/frameworks/g-cloud-8/234/contract-review"
)
assert res.status_code == 200
s3.return_value.get_key.assert_called_with('I/have/returned/my/agreement.pdf')
page = res.get_data(as_text=True)
page_without_whitespace = self.strip_all_whitespace(page)
assert u'Check the details you’ve given before returning the signature page for £unicodename' in page
assert '<tdclass="summary-item-field"><span><p>signer_name</p><p>signer_role</p></span></td>' \
in page_without_whitespace
assert u"I have the authority to return this agreement on behalf of £unicodename" in page
assert "Returning the signature page will notify the Crown Commercial Service and the primary contact you "
"gave in your G-Cloud 8 application, contact name at email@email.com." in page
assert '<tdclass="summary-item-field-first"><span>test.pdf</span></td>' in page_without_whitespace
@mock.patch('dmutils.s3.S3')
def test_contract_review_page_loads_with_uploaded_time_of_file_if_no_filename_in_session(
self, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
res = self.client.get(
"/suppliers/frameworks/g-cloud-8/234/contract-review"
)
assert res.status_code == 200
page = res.get_data(as_text=True)
assert u'Check the details you’ve given before returning the signature page for £unicodename' in page
assert '<tdclass="summary-item-field-first"><span>UploadedSunday10July2016at22:18</span></td>' in self.strip_all_whitespace(page) # noqa
@mock.patch('dmutils.s3.S3')
def test_contract_review_page_aborts_if_visited_when_information_required_to_return_agreement_does_not_exist(
self, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"}
# No file has been uploaded
)
# no file has been uploaded
s3.return_value.get_key.return_value = None
res = self.client.get(
"/suppliers/frameworks/g-cloud-8/234/contract-review"
)
assert res.status_code == 404
@mock.patch('app.main.views.frameworks.check_agreement_is_related_to_supplier_framework_or_abort')
@mock.patch('dmutils.s3.S3')
def test_we_abort_if_agreement_does_not_match_supplier_framework(
self, s3, check_agreement_is_related_to_supplier_framework_or_abort, return_supplier_framework, data_api_client
):
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement(supplier_id=2345)
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
s3.return_value.get_key.return_value = None
self.client.get("/suppliers/frameworks/g-cloud-8/234/contract-review")
# This call will abort because supplier_framework has mismatched supplier_id 1234
check_agreement_is_related_to_supplier_framework_or_abort.assert_called_with(
self.framework_agreement(supplier_id=2345)['agreement'],
supplier_framework
)
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_return_400_response_and_no_email_sent_if_authorisation_not_checked(
self, send_email, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/contract-review",
data={}
)
assert res.status_code == 400
page = res.get_data(as_text=True)
assert send_email.called is False
assert "You must confirm you have the authority to return the agreement" in page
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_valid_framework_agreement_returned_updates_api_and_sends_confirmation_emails_and_unsets_session(
self, send_email, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email2@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
with self.client.session_transaction() as sess:
sess['signature_page'] = 'test.pdf'
self.client.post(
"/suppliers/frameworks/g-cloud-8/234/contract-review",
data={
'authorisation': 'I have the authority to return this agreement on behalf of company name'
}
)
data_api_client.sign_framework_agreement.assert_called_once_with(
234,
'email@email.com',
{'uploaderUserId': 123}
)
# Delcaration primaryContactEmail and current_user.email_address are different so expect two recipients
send_email.assert_called_once_with(
['email2@email.com', 'email@email.com'],
mock.ANY,
'MANDRILL',
'Your G-Cloud 8 signature page has been received',
'do-not-reply@digitalmarketplace.service.gov.uk',
'Digital Marketplace Admin',
['g-cloud-8-framework-agreement']
)
# Check 'signature_page' has been removed from session
with self.client.session_transaction() as sess:
assert 'signature_page' not in sess
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_valid_framework_agreement_returned_sends_only_one_confirmation_email_if_contact_email_addresses_are_equal(
self, send_email, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
self.client.post(
"/suppliers/frameworks/g-cloud-8/234/contract-review",
data={
'authorisation': 'I have the authority to return this agreement on behalf of company name'
}
)
send_email.assert_called_once_with(
['email@email.com'],
mock.ANY,
'MANDRILL',
'Your G-Cloud 8 signature page has been received',
'do-not-reply@digitalmarketplace.service.gov.uk',
'Digital Marketplace Admin',
['g-cloud-8-framework-agreement']
)
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_return_503_response_if_mandrill_exception_raised_by_send_email(
self, send_email, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
send_email.side_effect = EmailError()
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/contract-review",
data={
'authorisation': 'I have the authority to return this agreement on behalf of company name'
}
)
assert res.status_code == 503
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_email_not_sent_if_api_call_fails(
self, send_email, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.sign_framework_agreement.side_effect = APIError(mock.Mock(status_code=500))
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/contract-review",
data={
'authorisation': 'I have the authority to return this agreement on behalf of company name'
}
)
assert data_api_client.sign_framework_agreement.called is True
assert res.status_code == 500
assert send_email.called is False
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_framework_agreement_returned_having_signed_contract_variation_redirects_to_framework_dashboard(
self, send_email, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
framework = get_g_cloud_8()
framework['variations'] = {
"1": {"createdAt": "2016-06-06T20:01:34.000000Z"}
}
data_api_client.get_framework.return_value = framework
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email2@email.com",
"nameOfOrganisation": u"£unicodename"
},
agreed_variations={
'1': {
"agreedUserId": 2,
"agreedAt": "2016-06-06T00:00:00.000000Z",
}
}
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/contract-review",
data={
'authorisation': 'I have the authority to return this agreement on behalf of company name'
}
)
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-8'
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_framework_agreement_returned_with_feature_flag_off_redirects_to_framework_dashboard(
self, send_email, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
self.app.config['FEATURE_FLAGS_CONTRACT_VARIATION'] = False
framework = get_g_cloud_8()
framework['frameworks']['variations'] = {
"1": {"createdAt": "2016-06-06T20:01:34.000000Z"}
}
data_api_client.get_framework.return_value = framework
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/contract-review",
data={
'authorisation': 'I have the authority to return this agreement on behalf of company name'
}
)
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-8'
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_framework_agreement_returned_having_not_signed_contract_variation_redirects_to_variation(
self, send_email, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
framework = get_g_cloud_8()
framework['frameworks']['variations'] = {
"1": {"createdAt": "2016-06-06T20:01:34.000000Z"}
}
data_api_client.get_framework.return_value = framework
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email2@email.com",
"nameOfOrganisation": u"£unicodename"
},
agreed_variations={}
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/contract-review",
data={
'authorisation': 'I have the authority to return this agreement on behalf of company name'
}
)
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-8/contract-variation/1'
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_framework_agreement_returned_for_framework_with_no_variations_redirects_to_framework_dashboard(
self, send_email, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
framework = get_g_cloud_8()
framework['variations'] = {}
data_api_client.get_framework.return_value = framework
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/contract-review",
data={
'authorisation': 'I have the authority to return this agreement on behalf of company name'
}
)
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-8'
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestContractVariation(BaseApplicationTest):
def setup_method(self, method):
super(TestContractVariation, self).setup_method(method)
self.good_supplier_framework = self.supplier_framework(
declaration={'nameOfOrganisation': 'A.N. Supplier',
'primaryContactEmail': 'bigboss@email.com'},
on_framework=True,
agreement_returned=True,
agreement_details={}
)
self.g8_framework = self.framework(
name='G-Cloud 8',
slug='g-cloud-8',
status='live',
framework_agreement_version='3.1'
)
self.g8_framework['frameworks']['variations'] = {"1": {"createdAt": "2018-08-16"}}
with self.app.test_client():
self.login()
def test_get_page_renders_if_all_ok(self, data_api_client):
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = self.good_supplier_framework
res = self.client.get("/suppliers/frameworks/g-cloud-8/contract-variation/1")
doc = html.fromstring(res.get_data(as_text=True))
assert res.status_code == 200
assert len(doc.xpath('//h1[contains(text(), "G-Cloud 8: proposed contract variation")]')) == 1
def test_supplier_must_be_on_framework(self, data_api_client):
supplier_not_on_framework = self.good_supplier_framework.copy()
supplier_not_on_framework['frameworkInterest']['onFramework'] = False
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = supplier_not_on_framework
res = self.client.get("/suppliers/frameworks/g-cloud-8/contract-variation/1")
assert res.status_code == 404
def test_variation_must_exist(self, data_api_client):
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = self.good_supplier_framework
# There is no variation number 2
res = self.client.get("/suppliers/frameworks/g-cloud-8/contract-variation/2")
assert res.status_code == 404
def test_agreement_must_be_returned_already(self, data_api_client):
agreement_not_returned = self.good_supplier_framework.copy()
agreement_not_returned['frameworkInterest']['agreementReturned'] = False
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = agreement_not_returned
res = self.client.get("/suppliers/frameworks/g-cloud-8/contract-variation/1")
assert res.status_code == 404
def test_shows_form_if_not_yet_agreed(self, data_api_client):
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = self.good_supplier_framework
res = self.client.get("/suppliers/frameworks/g-cloud-8/contract-variation/1")
doc = html.fromstring(res.get_data(as_text=True))
assert res.status_code == 200
assert len(doc.xpath('//label[contains(text(), "I accept these proposed changes")]')) == 1
assert len(doc.xpath('//input[@value="Save and continue"]')) == 1
def test_shows_signer_details_and_no_form_if_already_agreed(self, data_api_client):
already_agreed = self.good_supplier_framework.copy()
already_agreed['frameworkInterest']['agreedVariations'] = {
"1": {
"agreedAt": "2016-08-19T15:47:08.116613Z",
"agreedUserId": 1,
"agreedUserEmail": "agreed@email.com",
"agreedUserName": u"William Drăyton",
}}
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = already_agreed
res = self.client.get("/suppliers/frameworks/g-cloud-8/contract-variation/1")
page_text = res.get_data(as_text=True)
doc = html.fromstring(page_text)
assert res.status_code == 200
assert len(doc.xpath('//h2[contains(text(), "Contract variation status")]')) == 1
assert u"<span>William Drăyton<br />agreed@email.com<br />Friday 19 August 2016 at 16:47</span>" in page_text
assert len(doc.xpath('//label[contains(text(), "I accept these proposed changes")]')) == 0
assert len(doc.xpath('//input[@value="Save and continue"]')) == 0
def test_shows_updated_heading_and_countersigner_details_but_no_form_if_countersigned(self, data_api_client):
already_agreed = self.good_supplier_framework.copy()
already_agreed['frameworkInterest']['agreedVariations'] = {
"1": {
"agreedAt": "2016-08-19T15:47:08.116613Z",
"agreedUserId": 1,
"agreedUserEmail": "agreed@email.com",
"agreedUserName": u"William Drăyton",
}}
g8_with_countersigned_variation = self.framework(
status='live',
name='G-Cloud 8'
)
g8_with_countersigned_variation['frameworks']['variations'] = {"1": {
"createdAt": "2016-08-01T12:30:00.000000Z",
"countersignedAt": "2016-10-01T02:00:00.000000Z",
"countersignerName": "A.N. Other",
"countersignerRole": "Head honcho",
}
}
data_api_client.get_framework.return_value = g8_with_countersigned_variation
data_api_client.get_supplier_framework_info.return_value = already_agreed
res = self.client.get("/suppliers/frameworks/g-cloud-8/contract-variation/1")
page_text = res.get_data(as_text=True)
doc = html.fromstring(page_text)
assert res.status_code == 200
assert len(doc.xpath('//h1[contains(text(), "G-Cloud 8: contract variation")]')) == 1
assert len(doc.xpath('//h2[contains(text(), "Contract variation status")]')) == 1
assert u"<span>A.N. Other<br />Head honcho<br />Saturday 1 October 2016</span>" in page_text
assert len(doc.xpath('//label[contains(text(), "I accept these proposed changes")]')) == 0
assert len(doc.xpath('//input[@value="Save and continue"]')) == 0
def test_api_is_called_to_agree(self, data_api_client):
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = self.good_supplier_framework
res = self.client.post("/suppliers/frameworks/g-cloud-8/contract-variation/1",
data={"accept_changes": "Yes"}
)
assert res.status_code == 302
assert res.location == "http://localhost/suppliers/frameworks/g-cloud-8/contract-variation/1"
data_api_client.agree_framework_variation.assert_called_once_with(
1234, 'g-cloud-8', '1', 123, 'email@email.com'
)
@mock.patch('app.main.views.frameworks.send_email')
def test_email_is_sent_to_correct_users(self, send_email, data_api_client):
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = self.good_supplier_framework
self.client.post("/suppliers/frameworks/g-cloud-8/contract-variation/1",
data={"accept_changes": "Yes"}
)
send_email.assert_called_once_with(
['bigboss@email.com', 'email@email.com'],
mock.ANY,
'MANDRILL',
'G-Cloud 8: you have accepted the proposed contract variation',
'do-not-reply@digitalmarketplace.service.gov.uk',
'Digital Marketplace Admin',
['g-cloud-8-variation-accepted']
)
@mock.patch('app.main.views.frameworks.send_email')
def test_only_one_email_sent_if_user_is_framework_contact(self, send_email, data_api_client):
same_email_as_current_user = self.good_supplier_framework.copy()
same_email_as_current_user['frameworkInterest']['declaration']['primaryContactEmail'] = 'email@email.com'
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = same_email_as_current_user
self.client.post("/suppliers/frameworks/g-cloud-8/contract-variation/1",
data={"accept_changes": "Yes"}
)
send_email.assert_called_once_with(
['email@email.com'],
mock.ANY,
'MANDRILL',
'G-Cloud 8: you have accepted the proposed contract variation',
'do-not-reply@digitalmarketplace.service.gov.uk',
'Digital Marketplace Admin',
['g-cloud-8-variation-accepted']
)
def test_success_message_is_displayed_on_success(self, data_api_client):
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = self.good_supplier_framework
res = self.client.post("/suppliers/frameworks/g-cloud-8/contract-variation/1",
data={"accept_changes": "Yes"},
follow_redirects=True
)
doc = html.fromstring(res.get_data(as_text=True))
assert res.status_code == 200
assert len(
doc.xpath('//p[@class="banner-message"][contains(text(), "You have accepted the proposed changes.")]')
) == 1, res.get_data(as_text=True)
@mock.patch('app.main.views.frameworks.send_email')
def test_api_is_not_called_and_no_email_sent_for_subsequent_posts(self, send_email, data_api_client):
already_agreed = self.good_supplier_framework.copy()
already_agreed['frameworkInterest']['agreedVariations'] = {
"1": {
"agreedAt": "2016-08-19T15:47:08.116613Z",
"agreedUserId": 1,
"agreedUserEmail": "agreed@email.com",
"agreedUserName": "William Drayton",
}}
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = already_agreed
res = self.client.post("/suppliers/frameworks/g-cloud-8/contract-variation/1",
data={"accept_changes": "Yes"}
)
assert res.status_code == 200
assert data_api_client.agree_framework_variation.called is False
assert send_email.called is False
def test_error_if_box_not_ticked(self, data_api_client):
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = self.good_supplier_framework
res = self.client.post("/suppliers/frameworks/g-cloud-8/contract-variation/1", data={})
doc = html.fromstring(res.get_data(as_text=True))
assert res.status_code == 400
assert len(
doc.xpath('//span[@class="validation-message"][contains(text(), "You can only save and continue if you agree to the proposed changes")]') # noqa
) == 1
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestReuseFrameworkSupplierDeclaration(BaseApplicationTest):
"""Tests for frameworks/<framework_slug>/declaration/reuse view."""
def setup_method(self, method):
super(TestReuseFrameworkSupplierDeclaration, self).setup_method(method)
with self.app.test_client():
self.login()
def test_reusable_declaration_framework_slug_param(self, data_api_client):
"""Ensure that when using the param to specify declaration we collect the correct declaration."""
# Modify the data client.
t07 = '2009-12-03T01:01:01.000000Z'
framework = {
'x_field': 'foo',
'allowDeclarationReuse': True,
'applicationCloseDate': t07,
'slug': 'g-cloud-8',
'name': 'g-cloud-8'
}
data_api_client.get_framework.return_value = {'frameworks': framework}
data_api_client.get_supplier_framework_info.return_value = {
'frameworkInterest': {
'declaration': {'status': 'complete'},
'onFramework': True
}
}
# Do the get.
with self.client as cont:
resp = cont.get(
'/suppliers/frameworks/g-cloud-9/declaration/reuse?reusable_declaration_framework_slug=g-cloud-8'
)
# Assert request arg inside context manager.
assert request.args['reusable_declaration_framework_slug'] == 'g-cloud-8'
# Assert response OK.
assert resp.status_code == 200
# Assert expected api calls.
data_api_client.get_framework.assert_has_calls([mock.call('g-cloud-9'), mock.call('g-cloud-8')])
data_api_client.get_supplier_framework_info.assert_called_once_with(1234, 'g-cloud-8')
def test_404_when_specified_declaration_not_found(self, data_api_client):
"""Fail on a 404 if declaration is specified but not found."""
# Modify the data client.
framework = {}
data_api_client.get_framework.return_value = {'frameworks': framework}
data_api_client.get_supplier_framework_info.side_effect = APIError(mock.Mock(status_code=404))
# Do the get.
resp = self.client.get(
'/suppliers/frameworks/g-cloud-9/declaration/reuse?reusable_declaration_framework_slug=g-cloud-8'
)
# Assert the 404
assert resp.status_code == 404
# Assert expected api calls.
data_api_client.get_framework.assert_called_once_with('g-cloud-9')
data_api_client.get_supplier_framework_info.assert_called_once_with(1234, 'g-cloud-8')
def test_redirect_when_declaration_not_found(self, data_api_client):
"""Redirect if a reusable declaration is not found."""
# Modify the data client.
t09 = '2009-03-03T01:01:01.000000Z'
frameworks = [
{'x_field': 'foo', 'allowDeclarationReuse': True, 'applicationCloseDate': t09, 'slug': 'ben-cloud-2'},
]
supplier_declarations = []
data_api_client.find_frameworks.return_value = {'frameworks': frameworks}
data_api_client.find_supplier_declarations.return_value = dict(
frameworkInterest=supplier_declarations
)
# Do the get.
with self.client:
resp = self.client.get(
'/suppliers/frameworks/g-cloud-9/declaration/reuse',
)
# Assert the redirect
assert resp.location.endswith('/suppliers/frameworks/g-cloud-9/declaration')
# Assert expected api calls.
data_api_client.get_framework.assert_called_once_with('g-cloud-9')
data_api_client.find_supplier_declarations.assert_called_once_with(1234)
def test_success_reuse_g_cloud_7_for_8(self, data_api_client):
"""Test success path."""
# Modify the data client.
t09 = '2009-03-03T01:01:01.000000Z'
t10 = '2010-03-03T01:01:01.000000Z'
t11 = '2011-03-03T01:01:01.000000Z'
t12 = '2012-03-03T01:01:01.000000Z'
frameworks_response = [
{
'x_field': 'foo',
'allowDeclarationReuse': True,
'applicationCloseDate': t12,
'slug': 'g-cloud-8',
'name': 'G-cloud 8'
}, {
'x_field': 'foo',
'allowDeclarationReuse': True,
'applicationCloseDate': t11,
'slug': 'g-cloud-7',
'name': 'G-cloud 7'
}, {
'x_field': 'foo',
'allowDeclarationReuse': True,
'applicationCloseDate': t10,
'slug': 'dos',
'name': 'Digital'
}, {
'x_field': 'foo',
'allowDeclarationReuse': False,
'applicationCloseDate': t09,
'slug': 'g-cloud-6',
'name': 'G-cloud 6'
},
]
framework_response = {
'x_field': 'foo',
'allowDeclarationReuse': True,
'applicationCloseDate': t09,
'slug': 'g-cloud-8',
'name': 'G-cloud 8'
}
supplier_declarations_response = [
{'x': 'foo', 'frameworkSlug': 'g-cloud-6', 'declaration': {'status': 'complete'}, 'onFramework': True},
{'x': 'foo', 'frameworkSlug': 'g-cloud-7', 'declaration': {'status': 'complete'}, 'onFramework': True},
{'x': 'foo', 'frameworkSlug': 'dos', 'declaration': {'status': 'complete'}, 'onFramework': True}
]
data_api_client.find_frameworks.return_value = {'frameworks': frameworks_response}
data_api_client.get_framework.return_value = {'frameworks': framework_response}
data_api_client.find_supplier_declarations.return_value = {'frameworkInterest': supplier_declarations_response}
# Do the get.
resp = self.client.get(
'/suppliers/frameworks/g-cloud-8/declaration/reuse',
)
# Assert the success.
assert resp.status_code == 200
expected = 'In March 2011, your organisation completed a declaration for G-cloud 7.'
assert expected in str(resp.data)
# Assert expected api calls.
data_api_client.get_framework.assert_called_once_with('g-cloud-8')
data_api_client.find_supplier_declarations.assert_called_once_with(1234)
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestReuseFrameworkSupplierDeclarationPost(BaseApplicationTest):
"""Tests for frameworks/<framework_slug>/declaration/reuse POST view."""
def setup_method(self, method):
super(TestReuseFrameworkSupplierDeclarationPost, self).setup_method(method)
with self.app.test_client():
self.login()
def test_reuse_false(self, data_api_client):
"""Assert that the redirect happens and the client sets the prefill pref to None."""
data = {'reuse': 'False', 'old_framework_slug': 'should-not-be-used'}
with self.client:
resp = self.client.post(
'/suppliers/frameworks/g-cloud-9/declaration/reuse',
data=data
)
# Assert the redirect
assert resp.location.endswith('/suppliers/frameworks/g-cloud-9/declaration')
data_api_client.set_supplier_framework_prefill_declaration.assert_called_once_with(
1234,
'g-cloud-9',
None,
'email@email.com'
)
def test_reuse_true(self, data_api_client):
"""Assert that the redirect happens and the client sets the prefill pref to the desired framework slug."""
data = {'reuse': True, 'old_framework_slug': 'digital-outcomes-and-specialists-2'}
data_api_client.get_supplier_framework_info.return_value = {
'frameworkInterest': {
'x_field': 'foo',
'frameworkSlug': 'digital-outcomes-and-specialists-2',
'declaration': {'status': 'complete'},
'onFramework': True
}
}
framework_response = {'frameworks': {'x_field': 'foo', 'allowDeclarationReuse': True}}
data_api_client.get_framework.return_value = framework_response
with self.client:
resp = self.client.post(
'/suppliers/frameworks/g-cloud-9/declaration/reuse',
data=data
)
# Assert the redirect
assert resp.location.endswith('/suppliers/frameworks/g-cloud-9/declaration')
# These api calls need to be made so that we can verify the declaration.
data_api_client.get_framework.assert_called_once_with('digital-outcomes-and-specialists-2')
data_api_client.get_supplier_framework_info.assert_called_once_with(
1234,
'digital-outcomes-and-specialists-2'
)
# This end point sets the prefill preference.
data_api_client.set_supplier_framework_prefill_declaration.assert_called_once_with(
1234,
'g-cloud-9',
'digital-outcomes-and-specialists-2',
'email@email.com'
)
def test_reuse_invalid_framework_post(self, data_api_client):
"""Assert 404 for non reusable framework."""
data = {'reuse': 'true', 'old_framework_slug': 'digital-outcomes-and-specialists'}
# A framework with allowDeclarationReuse as False
data_api_client.get_framework.return_value = {
'frameworks': {'x_field': 'foo', 'allowDeclarationReuse': False}
}
# Do the post.
resp = self.client.post(
'/suppliers/frameworks/g-cloud-9/declaration/reuse',
data=data
)
# Should get the framework and error on allowDeclarationReuse as False.
data_api_client.get_framework.assert_called_once_with('digital-outcomes-and-specialists')
# Should not do the declaration call if the framework is invalid
assert not data_api_client.get_supplier_framework_info.called
# Should 404
assert resp.status_code == 404
def test_reuse_non_existent_framework_post(self, data_api_client):
"""Assert 404 for non existent framework."""
data = {'reuse': 'true', 'old_framework_slug': 'digital-outcomes-and-specialists-1000000'}
# Attach does not exist.
data_api_client.get_framework.side_effect = HTTPError()
# Do the post.
resp = self.client.post(
'/suppliers/frameworks/g-cloud-9/declaration/reuse',
data=data
)
# Should error on get.
data_api_client.get_framework.assert_called_once_with('digital-outcomes-and-specialists-1000000')
# Should not do the declaration call if the framework is invalid.
assert not data_api_client.get_supplier_framework_info.called
# Should 404.
assert resp.status_code == 404
def test_reuse_non_existent_declaration_post(self, data_api_client):
"""Assert 404 for non existent declaration."""
data = {'reuse': 'true', 'old_framework_slug': 'digital-outcomes-and-specialists-2'}
framework_response = {'frameworks': {'x_field': 'foo', 'allowDeclarationReuse': True}}
data_api_client.get_framework.return_value = framework_response
# Attach does not exist.
data_api_client.get_supplier_framework_info.side_effect = HTTPError()
# Do the post.
resp = self.client.post(
'/suppliers/frameworks/g-cloud-9/declaration/reuse',
data=data
)
# Should get the framework
data_api_client.get_framework.assert_called_once_with('digital-outcomes-and-specialists-2')
# Should error getting declaration.
data_api_client.get_supplier_framework_info.assert_called_once_with(1234, 'digital-outcomes-and-specialists-2')
# Should 404.
assert resp.status_code == 404
class TestReuseFrameworkSupplierDeclarationForm(BaseApplicationTest):
"""Tests for app.main.forms.frameworks.ReuseDeclarationForm form."""
@pytest.mark.parametrize('falsey_value', ('False', '', 'false'))
def test_false_values(self, falsey_value):
with self.app.test_request_context():
data = MultiDict({'framework_slug': 'digital-outcomes-and-specialists', 'reuse': falsey_value})
form = ReuseDeclarationForm(data)
assert form.reuse.data is False
Update S3ResponseError constructor
Some of our tests were importing an S3ResponseError from
boto which has since been updated in v27 of the utils
library.
The old arguments we used to pass into an S3 error were not
being accepted anymore so I've updated them.
Seems like our test shouldn't be relying on this kind of thing,
but this was a relatively easy fix.
# -*- coding: utf-8 -*-
from collections import OrderedDict
from itertools import chain
from dmapiclient import HTTPError
from flask import request
from werkzeug.datastructures import MultiDict
from app.main.forms.frameworks import ReuseDeclarationForm
try:
from StringIO import StringIO
except ImportError:
from io import BytesIO as StringIO
import mock
import pytest
from six.moves.urllib.parse import urljoin
from flask import session
from lxml import html
from dmapiclient import APIError
from dmapiclient.audit import AuditTypes
from dmutils.email.exceptions import EmailError
from dmutils.s3 import S3ResponseError
from ..helpers import BaseApplicationTest, FULL_G7_SUBMISSION, FakeMail, valid_g9_declaration_base
def _return_fake_s3_file_dict(directory, filename, ext, last_modified=None, size=None):
return {
'path': '{}{}.{}'.format(directory, filename, ext),
'filename': filename,
'ext': ext,
'last_modified': last_modified or '2015-08-17T14:00:00.000Z',
'size': size if size is not None else 1
}
def get_g_cloud_8():
return BaseApplicationTest.framework(
status='standstill',
name='G-Cloud 8',
slug='g-cloud-8',
framework_agreement_version='v1.0'
)
def _assert_args_and_raise(e, *args, **kwargs):
def _inner(*inner_args, **inner_kwargs):
assert args == inner_args
assert kwargs == inner_kwargs
raise e
return _inner
def _assert_args_and_return(retval, *args, **kwargs):
def _inner(*inner_args, **inner_kwargs):
assert args == inner_args
assert kwargs == inner_kwargs
return retval
return _inner
@pytest.fixture(params=("GET", "POST"))
def get_or_post(request):
return request.param
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestFrameworksDashboard(BaseApplicationTest):
@staticmethod
def _extract_guidance_links(doc):
return OrderedDict(
(
section_li.xpath("normalize-space(string(.//h2))"),
tuple(
(
item_li.xpath("normalize-space(string(.//a))") or None,
item_li.xpath("string(.//a/@href)") or None,
item_li.xpath("normalize-space(string(.//time))") or None,
item_li.xpath("string(.//time/@datetime)") or None,
)
for item_li in section_li.xpath(".//p[.//a]")
),
)
for section_li in doc.xpath("//main//*[./h2][.//p//a]")
)
@staticmethod
def _extract_signing_details_table_rows(doc):
return tuple(
tuple(
td_th_elem.xpath("normalize-space(string())")
for td_th_elem in tr_elem.xpath("td|th")
)
for tr_elem in doc.xpath(
"//main//table[normalize-space(string(./caption))=$b]/tbody/tr",
b="Agreement details",
)
)
@property
def _boring_agreement_details(self):
# property so we always get a clean copy
return {
'frameworkAgreementVersion': 'v1.0',
'signerName': 'Martin Cunningham',
'signerRole': 'Foreman',
'uploaderUserId': 123,
'uploaderUserName': 'User',
'uploaderUserEmail': 'email@email.com',
}
_boring_agreement_returned_at = "2016-07-10T21:20:00.000000Z"
@property
def _boring_agreement_details_expected_table_results(self):
# property so we always get a clean copy
return (
(
'Person who signed',
'Martin Cunningham Foreman'
),
(
'Submitted by',
'User email@email.com Sunday 10 July 2016 at 22:20'
),
(
'Countersignature',
'Waiting for CCS to countersign'
),
)
def test_framework_dashboard_shows_for_pending_if_declaration_exists(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
assert len(doc.xpath(
"//h1[normalize-space(string())=$b]",
b="Your G-Cloud 7 application",
)) == 1
def test_framework_dashboard_shows_for_live_if_declaration_exists(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='live')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
assert len(doc.xpath(
"//h1[normalize-space(string())=$b]",
b="G-Cloud 7 documents",
)) == 1
def test_does_not_show_for_live_if_no_declaration(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='live')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(declaration=None)
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 404
@mock.patch('app.main.views.frameworks.send_email')
def test_interest_registered_in_framework_on_post(self, send_email, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.post("/suppliers/frameworks/digital-outcomes-and-specialists")
assert res.status_code == 200
data_api_client.register_framework_interest.assert_called_once_with(
1234,
"digital-outcomes-and-specialists",
"email@email.com"
)
@mock.patch('app.main.views.frameworks.send_email')
def test_email_sent_when_interest_registered_in_framework(self, send_email, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
data_api_client.find_users.return_value = {'users': [
{'emailAddress': 'email1', 'active': True},
{'emailAddress': 'email2', 'active': True},
{'emailAddress': 'email3', 'active': False}
]}
res = self.client.post("/suppliers/frameworks/digital-outcomes-and-specialists")
assert res.status_code == 200
send_email.assert_called_once_with(
['email1', 'email2'],
mock.ANY,
'MANDRILL',
'You started a G-Cloud 7 application',
'do-not-reply@digitalmarketplace.service.gov.uk',
'Digital Marketplace Admin',
['digital-outcomes-and-specialists-application-started']
)
def test_interest_not_registered_in_framework_on_get(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/digital-outcomes-and-specialists")
assert res.status_code == 200
assert data_api_client.register_framework_interest.called is False
def test_interest_set_but_no_declaration(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.get_framework_interest.return_value = {'frameworks': ['g-cloud-7']}
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(declaration=None)
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
def test_shows_gcloud_7_closed_message_if_pending_and_no_application_done(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.get_framework_interest.return_value = {'frameworks': ['g-cloud-7']}
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'not-submitted'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
heading = doc.xpath('//div[@class="summary-item-lede"]//h2[@class="summary-item-heading"]')
assert len(heading) > 0
assert u"G-Cloud 7 is closed for applications" in heading[0].xpath('text()')[0]
assert u"You didn't submit an application." in heading[0].xpath('../p[1]/text()')[0]
def test_shows_gcloud_7_closed_message_if_pending_and_application(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.get_framework_interest.return_value = {'frameworks': ['g-cloud-7']}
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
heading = doc.xpath('//div[@class="summary-item-lede"]//h2[@class="summary-item-heading"]')
assert len(heading) > 0
assert u"G-Cloud 7 is closed for applications" in heading[0].xpath('text()')[0]
lede = doc.xpath('//div[@class="summary-item-lede"]')
assert u"You made your supplier declaration and submitted 1 service for consideration." in \
lede[0].xpath('./p[1]/text()')[0]
assert u"We’ll let you know the result of your application by " in \
lede[0].xpath('./p[2]/text()')[0]
def test_declaration_status_when_complete(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
assert len(doc.xpath(u'//p/strong[contains(text(), "You’ve made the supplier declaration")]')) == 1
def test_declaration_status_when_started(self, data_api_client, s3):
with self.app.test_client():
self.login()
submission = FULL_G7_SUBMISSION.copy()
# User has not yet submitted page 3 of the declaration
del submission['SQ2-1abcd']
del submission['SQ2-1e']
del submission['SQ2-1f']
del submission['SQ2-1ghijklmn']
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
declaration=submission, status='started')
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
assert len(doc.xpath('//p[contains(text(), "You need to finish making the supplier declaration")]')) == 1
def test_declaration_status_when_not_complete(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.side_effect = APIError(mock.Mock(status_code=404))
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
assert len(doc.xpath('//p[contains(text(), "You need to make the supplier declaration")]')) == 1
def test_downloads_shown_open_framework(self, data_api_client, s3):
files = [
('updates/communications/', 'file 1', 'odt', '2015-01-01T14:00:00.000Z'),
('updates/clarifications/', 'file 2', 'odt', '2015-02-02T14:00:00.000Z'),
('', 'g-cloud-7-proposed-call-off', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-invitation', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-proposed-framework-agreement', 'pdf', '2016-06-01T14:00:00.000Z'),
('', 'g-cloud-7-reporting-template', 'xls', '2016-06-06T14:00:00.000Z'),
# superfluous file that shouldn't be shown
('', 'g-cloud-7-supplier-pack', 'zip', '2015-01-01T14:00:00.000Z'),
]
s3.return_value.list.return_value = [
_return_fake_s3_file_dict(
'g-cloud-7/communications/{}'.format(section), filename, ext, last_modified=last_modified
) for section, filename, ext, last_modified in files
]
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("Guidance", (
(
"Download the invitation to apply",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-invitation.pdf",
None,
None,
),
(
"Read about how to apply",
"https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply",
None,
None,
),
)),
("Legal documents", (
(
"Download the proposed framework agreement",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-proposed-framework-agreement.pdf",
"Wednesday 1 June 2016",
"2016-06-01T14:00:00.000Z",
),
(
u"Download the proposed \u2018call-off\u2019 contract",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-proposed-call-off.pdf",
"Sunday 1 May 2016",
"2016-05-01T14:00:00.000Z",
),
)),
("Communications", (
(
"View communications and ask clarification questions",
"/suppliers/frameworks/g-cloud-7/updates",
"Monday 2 February 2015",
"2015-02-02T14:00:00.000Z",
),
)),
("Reporting", (
(
"Download the reporting template",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-reporting-template.xls",
None,
None,
),
)),
))
assert not any(
doc.xpath("//main//a[contains(@href, $href_part)]", href_part=href_part)
for href_part in (
"g-cloud-7-final-framework-agreement.pdf",
"g-cloud-7-supplier-pack.zip",
)
)
assert len(doc.xpath(
"//main//p[contains(normalize-space(string()), $a)]",
a="until 5pm BST, 22 September 2015",
)) == 1
assert not doc.xpath(
"//main//table[normalize-space(string(./caption))=$b]",
b="Agreement details",
)
def test_downloads_shown_open_framework_clarification_questions_closed(self, data_api_client, s3):
files = [
('updates/communications/', 'file 1', 'odt', '2015-01-01T14:00:00.000Z'),
('updates/clarifications/', 'file 2', 'odt', '2015-02-02T14:00:00.000Z'),
('', 'g-cloud-7-proposed-call-off', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-invitation', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-proposed-framework-agreement', 'pdf', '2016-06-01T14:00:00.000Z'),
('', 'g-cloud-7-reporting-template', 'xls', '2016-06-06T14:00:00.000Z'),
# superfluous file that shouldn't be shown
('', 'g-cloud-7-supplier-pack', 'zip', '2015-01-01T14:00:00.000Z'),
]
s3.return_value.list.return_value = [
_return_fake_s3_file_dict(
'g-cloud-7/communications/{}'.format(section), filename, ext, last_modified=last_modified
) for section, filename, ext, last_modified in files
]
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(
status="open",
clarification_questions_open=False,
)
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("Guidance", (
(
"Download the invitation to apply",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-invitation.pdf",
None,
None,
),
(
"Read about how to apply",
"https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply",
None,
None,
),
)),
("Legal documents", (
(
"Download the proposed framework agreement",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-proposed-framework-agreement.pdf",
"Wednesday 1 June 2016",
"2016-06-01T14:00:00.000Z",
),
(
u"Download the proposed \u2018call-off\u2019 contract",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-proposed-call-off.pdf",
"Sunday 1 May 2016",
"2016-05-01T14:00:00.000Z",
),
)),
("Communications", (
(
"View communications and clarification questions",
"/suppliers/frameworks/g-cloud-7/updates",
"Monday 2 February 2015",
"2015-02-02T14:00:00.000Z",
),
)),
("Reporting", (
(
"Download the reporting template",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-reporting-template.xls",
None,
None,
),
)),
))
assert not any(
doc.xpath("//main//a[contains(@href, $href_part)]", href_part=href_part)
for href_part in (
"g-cloud-7-final-framework-agreement.pdf",
"g-cloud-7-supplier-pack.zip",
)
)
assert not doc.xpath(
"//main[contains(normalize-space(string()), $a)]",
a="until 5pm BST, 22 September 2015",
)
assert not doc.xpath(
"//main//table[normalize-space(string(./caption))=$b]",
b="Agreement details",
)
def test_final_agreement_download_shown_open_framework(self, data_api_client, s3):
files = [
('updates/communications/', 'file 1', 'odt', '2015-01-01T14:00:00.000Z'),
('updates/clarifications/', 'file 2', 'odt', '2015-02-02T14:00:00.000Z'),
('', 'g-cloud-7-proposed-call-off', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-invitation', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-reporting-template', 'xls', '2016-06-06T14:00:00.000Z'),
('', 'g-cloud-7-final-framework-agreement', 'pdf', '2016-06-02T14:00:00.000Z'),
# present but should be overridden by final agreement file
('', 'g-cloud-7-proposed-framework-agreement', 'pdf', '2016-06-11T14:00:00.000Z'),
]
s3.return_value.list.return_value = [
_return_fake_s3_file_dict(
'g-cloud-7/communications/{}'.format(section), filename, ext, last_modified=last_modified
) for section, filename, ext, last_modified in files
]
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("Guidance", (
(
"Download the invitation to apply",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-invitation.pdf",
None,
None,
),
(
"Read about how to apply",
"https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply",
None,
None,
),
)),
("Legal documents", (
(
"Download the framework agreement",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-final-framework-agreement.pdf",
"Thursday 2 June 2016",
"2016-06-02T14:00:00.000Z",
),
(
u"Download the proposed \u2018call-off\u2019 contract",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-proposed-call-off.pdf",
"Sunday 1 May 2016",
"2016-05-01T14:00:00.000Z",
),
)),
("Communications", (
(
"View communications and ask clarification questions",
"/suppliers/frameworks/g-cloud-7/updates",
"Monday 2 February 2015",
"2015-02-02T14:00:00.000Z",
),
)),
("Reporting", (
(
"Download the reporting template",
"/suppliers/frameworks/g-cloud-7/files/g-cloud-7-reporting-template.xls",
None,
None,
),
)),
))
assert not any(
doc.xpath("//main//a[contains(@href, $href_part)]", href_part=href_part)
for href_part in (
"g-cloud-7-proposed-framework-agreement.pdf",
"g-cloud-7-supplier-pack.zip",
)
)
assert len(doc.xpath(
"//main//p[contains(normalize-space(string()), $a)]",
a="until 5pm BST, 22 September 2015",
)) == 1
assert not doc.xpath(
"//main//table[normalize-space(string(./caption))=$b]",
b="Agreement details",
)
def test_no_updates_open_framework(self, data_api_client, s3):
files = [
('', 'g-cloud-7-call-off', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-invitation', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-proposed-framework-agreement', 'pdf', '2016-06-01T14:00:00.000Z'),
('', 'g-cloud-7-reporting-template', 'xls', '2016-06-06T14:00:00.000Z'),
]
s3.return_value.list.return_value = [
_return_fake_s3_file_dict(
'g-cloud-7/communications/{}'.format(section), filename, ext, last_modified=last_modified
) for section, filename, ext, last_modified in files
]
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
extracted_guidance_links = self._extract_guidance_links(doc)
assert (
"View communications and ask clarification questions",
"/suppliers/frameworks/g-cloud-7/updates",
None,
None,
) in extracted_guidance_links["Communications"]
assert len(doc.xpath(
"//main//p[contains(normalize-space(string()), $a)]",
a="until 5pm BST, 22 September 2015",
)) == 1
assert not doc.xpath(
"//main//table[normalize-space(string(./caption))=$b]",
b="Agreement details",
)
def test_no_files_exist_open_framework(self, data_api_client, s3):
s3.return_value.list.return_value = []
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("Guidance", (
(
"Read about how to apply",
"https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply",
None,
None,
),
)),
("Communications", (
(
"View communications and ask clarification questions",
"/suppliers/frameworks/g-cloud-7/updates",
None,
None,
),
)),
))
assert not any(
doc.xpath(
"//a[contains(@href, $href_part) or normalize-space(string())=$label]",
href_part=href_part,
label=label,
) for href_part, label in (
(
"g-cloud-7-invitation.pdf",
"Download the invitation to apply",
),
(
"g-cloud-7-proposed-framework-agreement.pdf",
"Download the proposed framework agreement",
),
(
"g-cloud-7-call-off.pdf",
u"Download the proposed \u2018call-off\u2019 contract",
),
(
"g-cloud-7-reporting-template.xls",
"Download the reporting template",
),
(
"result-letter.pdf",
"Download your application result letter",
),
)
)
assert len(doc.xpath(
"//main//p[contains(normalize-space(string()), $a)]",
a="until 5pm BST, 22 September 2015",
)) == 1
assert not doc.xpath(
"//main//table[normalize-space(string(./caption))=$b]",
b="Agreement details",
)
def test_returns_404_if_framework_does_not_exist(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.side_effect = APIError(mock.Mock(status_code=404))
res = self.client.get('/suppliers/frameworks/does-not-exist')
assert res.status_code == 404
def test_result_letter_is_shown_when_is_in_standstill(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
data = res.get_data(as_text=True)
assert u'Download your application result letter' in data
def test_result_letter_is_not_shown_when_not_in_standstill(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
data = res.get_data(as_text=True)
assert u'Download your application result letter' not in data
def test_result_letter_is_not_shown_when_no_application(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'not-submitted'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-7")
data = res.get_data(as_text=True)
assert u'Download your application result letter' not in data
def test_link_to_unsigned_framework_agreement_is_shown_if_supplier_is_on_framework(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
res = self.client.get("/suppliers/frameworks/g-cloud-7")
data = res.get_data(as_text=True)
assert u'Sign and return your framework agreement' in data
assert u'Download your countersigned framework agreement' not in data
def test_pending_success_message_is_explicit_if_supplier_is_on_framework(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(on_framework=True)
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
data = res.get_data(as_text=True)
for success_message in [
u'Your application was successful. '
u'You must return a signed framework agreement signature page before you can '
u'sell services on the Digital Marketplace.',
u'Download your application award letter (.pdf)',
u'This letter is a record of your successful G-Cloud 7 application.'
]:
assert success_message in data
for equivocal_message in [
u'You made your supplier declaration and submitted 1 service.',
u'Download your application result letter (.pdf)',
u'This letter informs you if your G-Cloud 7 application has been successful.'
]:
assert equivocal_message not in data
def test_link_to_framework_agreement_is_not_shown_if_supplier_is_not_on_framework(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=False)
res = self.client.get("/suppliers/frameworks/g-cloud-7")
data = res.get_data(as_text=True)
assert u'Sign and return your framework agreement' not in data
def test_pending_success_message_is_equivocal_if_supplier_is_on_framework(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(on_framework=False)
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
data = res.get_data(as_text=True)
for success_message in [
u'Your application was successful. You\'ll be able to sell services when the G-Cloud 7 framework is live',
u'Download your application award letter (.pdf)',
u'This letter is a record of your successful G-Cloud 7 application.'
]:
assert success_message not in data
for equivocal_message in [
u'You made your supplier declaration and submitted 1 service.',
u'Download your application result letter (.pdf)',
u'This letter informs you if your G-Cloud 7 application has been successful.'
]:
assert equivocal_message in data
def test_countersigned_framework_agreement_non_fav_framework(self, data_api_client, s3):
# "fav" being "frameworkAgreementVersion"
files = [
('', 'g-cloud-7-final-call-off', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-invitation', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-7-final-framework-agreement', 'pdf', '2016-06-01T14:00:00.000Z'),
('', 'g-cloud-7-reporting-template', 'xls', '2016-06-06T14:00:00.000Z'),
]
s3.return_value.list.return_value = [
_return_fake_s3_file_dict(
'g-cloud-7/communications/{}'.format(section), filename, ext, last_modified=last_modified
) for section, filename, ext, last_modified in files
]
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True,
agreement_returned=True,
agreement_details=self._boring_agreement_details,
agreement_path='pathy/mc/path.face',
countersigned=True,
countersigned_path='g-cloud-7/agreements/1234/1234-countersigned-agreement.pdf',
)
res = self.client.get("/suppliers/frameworks/g-cloud-7")
assert res.status_code == 200
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/frameworks/g-cloud-7/agreement",
label="Sign and return your framework agreement",
)
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("You submitted:", (
(
'View submitted services',
'/suppliers/frameworks/g-cloud-7/submissions',
None,
None,
),
(
"View your declaration",
"/suppliers/frameworks/g-cloud-7/declaration",
None,
None,
),
)),
("Legal documents", (
(
'Download the standard framework agreement',
'/suppliers/frameworks/g-cloud-7/files/g-cloud-7-final-framework-agreement.pdf',
None,
None,
),
(
"Download your signed framework agreement",
"/suppliers/frameworks/g-cloud-7/agreements/pathy/mc/path.face",
None,
None,
),
(
u"Download your countersigned framework agreement",
"/suppliers/frameworks/g-cloud-7/agreements/countersigned-agreement.pdf",
None,
None,
),
(
'Download your application result letter',
'/suppliers/frameworks/g-cloud-7/agreements/result-letter.pdf',
None,
None,
),
(
'Download the call-off contract template',
'/suppliers/frameworks/g-cloud-7/files/g-cloud-7-final-call-off.pdf',
None,
None,
),
)),
("Guidance", (
(
'Download the invitation to apply',
'/suppliers/frameworks/g-cloud-7/files/g-cloud-7-invitation.pdf',
None,
None,
),
(
"Read about how to sell your services",
"https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply",
None,
None,
),
)),
("Communications", (
(
"View communications and clarification questions",
"/suppliers/frameworks/g-cloud-7/updates",
None,
None,
),
)),
('Reporting', (
(
'Download the reporting template',
'/suppliers/frameworks/g-cloud-7/files/g-cloud-7-reporting-template.xls',
None,
None,
),
)),
))
assert not doc.xpath(
"//main//table[normalize-space(string(./caption))=$b]",
b="Agreement details",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="You can start selling your",
)
# neither of these should exist because it's a pre-frameworkAgreementVersion framework
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your original and counterpart signature pages",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your framework agreement signature page has been sent to the Crown Commercial Service",
)
def test_countersigned_framework_agreement_fav_framework(self, data_api_client, s3):
# "fav" being "frameworkAgreementVersion"
files = [
('', 'g-cloud-8-final-call-off', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-8-invitation', 'pdf', '2016-05-01T14:00:00.000Z'),
('', 'g-cloud-8-final-framework-agreement', 'pdf', '2016-06-01T14:00:00.000Z'),
('', 'g-cloud-8-reporting-template', 'xls', '2016-06-06T14:00:00.000Z'),
]
s3.return_value.list.return_value = [
_return_fake_s3_file_dict(
'g-cloud-8/communications/{}'.format(section), filename, ext, last_modified=last_modified
) for section, filename, ext, last_modified in files
]
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True,
agreement_returned=True,
agreement_details=self._boring_agreement_details,
agreement_path='pathy/mc/path.face',
agreement_returned_at=self._boring_agreement_returned_at,
countersigned=True,
countersigned_path='g-cloud-8/agreements/1234/1234-countersigned-agreement.pdf',
)
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/frameworks/g-cloud-8/agreement",
label="Sign and return your framework agreement",
)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/suppliers/frameworks/g-cloud-7/agreements/result-letter.pdf",
label="Download your application result letter",
)
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("You submitted:", (
(
'View submitted services',
'/suppliers/frameworks/g-cloud-8/submissions',
None,
None,
),
(
"View your declaration",
"/suppliers/frameworks/g-cloud-8/declaration",
None,
None,
),
)),
("Legal documents", (
(
'Read the standard framework agreement',
'https://www.gov.uk/government/publications/g-cloud-8-framework-agreement',
None,
None,
),
(
u"Download your \u2018original\u2019 framework agreement signature page",
"/suppliers/frameworks/g-cloud-8/agreements/pathy/mc/path.face",
None,
None,
),
(
u"Download your \u2018counterpart\u2019 framework agreement signature page",
"/suppliers/frameworks/g-cloud-8/agreements/countersigned-agreement.pdf",
None,
None,
),
(
'Download the call-off contract template',
'/suppliers/frameworks/g-cloud-8/files/g-cloud-8-final-call-off.pdf',
None,
None,
),
)),
("Guidance", (
(
'Download the invitation to apply',
'/suppliers/frameworks/g-cloud-8/files/g-cloud-8-invitation.pdf',
None,
None,
),
(
"Read about how to sell your services",
"https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply",
None,
None,
),
)),
("Communications", (
(
"View communications and clarification questions",
"/suppliers/frameworks/g-cloud-8/updates",
None,
None,
),
)),
('Reporting', (
(
'Download the reporting template',
'/suppliers/frameworks/g-cloud-8/files/g-cloud-8-reporting-template.xls',
None,
None,
),
)),
))
assert not doc.xpath(
"//main//table[normalize-space(string(./caption))=$b]",
b="Agreement details",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="You can start selling your",
)
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your original and counterpart signature pages",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your framework agreement signature page has been sent to the Crown Commercial Service",
)
def test_shows_returned_agreement_details(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True,
agreement_returned=True,
agreement_details=self._boring_agreement_details,
agreement_path='g-cloud-8/agreements/123-framework-agreement.pdf',
agreement_returned_at=self._boring_agreement_returned_at,
)
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/frameworks/g-cloud-8/agreement",
label="Sign and return your framework agreement",
)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/suppliers/frameworks/g-cloud-8/agreements/result-letter.pdf",
label="Download your application result letter",
)
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("You submitted:", (
(
'View submitted services',
'/suppliers/frameworks/g-cloud-8/submissions',
None,
None,
),
(
"View your declaration",
"/suppliers/frameworks/g-cloud-8/declaration",
None,
None,
),
)),
('Legal documents', (
(
'Read the standard framework agreement',
'https://www.gov.uk/government/publications/g-cloud-8-framework-agreement',
None,
None,
),
(
u'Download your \u2018original\u2019 framework agreement signature page',
'/suppliers/frameworks/g-cloud-8/agreements/framework-agreement.pdf',
None,
None,
),
)),
('Guidance', (
(
'Read about how to sell your services',
'https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply',
None,
None,
),
)),
('Communications', (
(
'View communications and clarification questions',
'/suppliers/frameworks/g-cloud-8/updates',
None,
None,
),
)),
))
extracted_signing_details_table_rows = self._extract_signing_details_table_rows(doc)
assert extracted_signing_details_table_rows == \
self._boring_agreement_details_expected_table_results
assert len(doc.xpath(
"//main//h1[normalize-space(string())=$b]",
b="Your G-Cloud 8 application",
)) == 1
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="You can start selling your",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your original and counterpart signature pages",
)
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your framework agreement signature page has been sent to the Crown Commercial Service",
)
def test_countersigned_but_no_countersigned_path(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'iaas'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True,
agreement_returned=True,
agreement_details=self._boring_agreement_details,
agreement_path='g-cloud-8/agreements/123-framework-agreement.pdf',
agreement_returned_at=self._boring_agreement_returned_at,
countersigned=True,
# note `countersigned_path` is not set: we're testing that the view behaves as though not countersigned
# i.e. is not depending on the `countersigned` property
)
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/frameworks/g-cloud-8/agreement",
label="Sign and return your framework agreement",
)
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("You submitted:", (
(
'View submitted services',
'/suppliers/frameworks/g-cloud-8/submissions',
None,
None,
),
(
"View your declaration",
"/suppliers/frameworks/g-cloud-8/declaration",
None,
None,
),
)),
('Legal documents', (
(
'Read the standard framework agreement',
'https://www.gov.uk/government/publications/g-cloud-8-framework-agreement',
None,
None,
),
(
u'Download your \u2018original\u2019 framework agreement signature page',
'/suppliers/frameworks/g-cloud-8/agreements/framework-agreement.pdf',
None,
None,
),
)),
('Guidance', (
(
'Read about how to sell your services',
'https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply',
None,
None,
),
)),
('Communications', (
(
'View communications and clarification questions',
'/suppliers/frameworks/g-cloud-8/updates',
None,
None,
),
)),
))
extracted_signing_details_table_rows = self._extract_signing_details_table_rows(doc)
assert extracted_signing_details_table_rows == \
self._boring_agreement_details_expected_table_results
assert len(doc.xpath(
"//main//h1[normalize-space(string())=$b]",
b="Your G-Cloud 8 application",
)) == 1
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="You can start selling your",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your original and counterpart signature pages",
)
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your framework agreement signature page has been sent to the Crown Commercial Service",
)
def test_shows_contract_variation_link_after_agreement_returned(self, data_api_client, s3):
with self.app.test_client():
self.login()
g8_with_variation = get_g_cloud_8()
g8_with_variation['frameworks']['variations'] = {"1": {"createdAt": "2018-08-16"}}
data_api_client.get_framework.return_value = g8_with_variation
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True,
agreement_returned=True,
agreement_details=self._boring_agreement_details,
agreement_path='g-cloud-8/agreements/123-framework-agreement.pdf',
agreement_returned_at=self._boring_agreement_returned_at,
)
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/frameworks/g-cloud-8/agreement",
label="Sign and return your framework agreement",
)
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("You submitted:", (
(
'View submitted services',
'/suppliers/frameworks/g-cloud-8/submissions',
None,
None,
),
(
"View your declaration",
"/suppliers/frameworks/g-cloud-8/declaration",
None,
None,
),
)),
('Legal documents', (
(
'Read the standard framework agreement',
'https://www.gov.uk/government/publications/g-cloud-8-framework-agreement',
None,
None,
),
(
u'Download your \u2018original\u2019 framework agreement signature page',
'/suppliers/frameworks/g-cloud-8/agreements/framework-agreement.pdf',
None,
None,
),
(
'Read the proposed contract variation',
'/suppliers/frameworks/g-cloud-8/contract-variation/1',
None,
None,
),
)),
('Guidance', (
(
'Read about how to sell your services',
'https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply',
None,
None,
),
)),
('Communications', (
(
'View communications and clarification questions',
'/suppliers/frameworks/g-cloud-8/updates',
None,
None,
),
)),
))
extracted_signing_details_table_rows = self._extract_signing_details_table_rows(doc)
assert extracted_signing_details_table_rows == \
self._boring_agreement_details_expected_table_results
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="You can start selling your",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your original and counterpart signature pages",
)
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your framework agreement signature page has been sent to the Crown Commercial Service",
)
def test_does_not_show_contract_variation_link_if_feature_flagged_off(self, data_api_client, s3):
with self.app.test_client():
self.app.config['FEATURE_FLAGS_CONTRACT_VARIATION'] = False
self.login()
g8_with_variation = get_g_cloud_8()
g8_with_variation['frameworks']['variations'] = {"1": {"createdAt": "2018-08-16"}}
data_api_client.get_framework.return_value = g8_with_variation
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True,
agreement_returned=True,
agreement_details=self._boring_agreement_details,
agreement_path='g-cloud-8/agreements/123-framework-agreement.pdf',
agreement_returned_at=self._boring_agreement_returned_at,
)
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/frameworks/g-cloud-7/agreement",
label="Sign and return your framework agreement",
)
assert not doc.xpath(
"//main//a[contains(@href, $href_part) or normalize-space(string())=$label]",
href_part="contract-variation/1",
label="Read the proposed contract variation",
)
extracted_signing_details_table_rows = self._extract_signing_details_table_rows(doc)
assert extracted_signing_details_table_rows == \
self._boring_agreement_details_expected_table_results
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="You can start selling your",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your original and counterpart signature pages",
)
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your framework agreement signature page has been sent to the Crown Commercial Service",
)
def test_does_not_show_contract_variation_link_if_no_variation(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True,
agreement_returned=True,
agreement_details=self._boring_agreement_details,
agreement_path='g-cloud-8/agreements/123-framework-agreement.pdf',
agreement_returned_at=self._boring_agreement_returned_at,
)
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/frameworks/g-cloud-7/agreement",
label="Sign and return your framework agreement",
)
assert not doc.xpath(
"//main//a[normalize-space(string())=$label]",
label="Read the proposed contract variation",
)
extracted_signing_details_table_rows = self._extract_signing_details_table_rows(doc)
assert extracted_signing_details_table_rows == \
self._boring_agreement_details_expected_table_results
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="You can start selling your",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your original and counterpart signature pages",
)
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your framework agreement signature page has been sent to the Crown Commercial Service",
)
def test_does_not_show_contract_variation_link_if_agreement_not_returned(self, data_api_client, s3):
with self.app.test_client():
self.login()
g8_with_variation = get_g_cloud_8()
g8_with_variation['frameworks']['variations'] = {"1": {"createdAt": "2018-08-16"}}
data_api_client.get_framework.return_value = g8_with_variation
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/frameworks/g-cloud-7/agreement",
label="Sign and return your framework agreement",
)
assert not doc.xpath(
"//main//a[contains(@href, $href_part) or normalize-space(string())=$label]",
href_part="contract-variation/1",
label="Read the proposed contract variation",
)
assert not doc.xpath(
"//main//table[normalize-space(string(./caption))=$b]",
b="Agreement details",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="You can start selling your",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your original and counterpart signature pages",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your framework agreement signature page has been sent to the Crown Commercial Service",
)
def test_shows_contract_variation_alternate_link_text_after_agreed_by_ccs(self, data_api_client, s3):
with self.app.test_client():
self.login()
g8_with_variation = get_g_cloud_8()
g8_with_variation['frameworks']['variations'] = {
"1": {
"createdAt": "2018-08-16",
"countersignedAt": "2018-10-01",
"countersignerName": "A.N. Other",
"countersignerRole": "Head honcho",
},
}
data_api_client.get_framework.return_value = g8_with_variation
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True,
agreement_returned=True,
agreement_details=self._boring_agreement_details,
agreement_returned_at=self._boring_agreement_returned_at,
agreement_path='g-cloud-8/agreements/1234/1234-signed-agreement.pdf',
agreed_variations={
"1": {
"agreedAt": "2016-08-19T15:47:08.116613Z",
"agreedUserId": 1,
"agreedUserEmail": "agreed@email.com",
"agreedUserName": u"William Drăyton",
},
},
)
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert not doc.xpath(
"//main//a[@href=$href or normalize-space(string())=$label]",
href="/frameworks/g-cloud-8/agreement",
label="Sign and return your framework agreement",
)
extracted_guidance_links = self._extract_guidance_links(doc)
assert extracted_guidance_links == OrderedDict((
("You submitted:", (
(
'View submitted services',
'/suppliers/frameworks/g-cloud-8/submissions',
None,
None,
),
(
"View your declaration",
"/suppliers/frameworks/g-cloud-8/declaration",
None,
None,
),
)),
('Legal documents', (
(
'Read the standard framework agreement',
'https://www.gov.uk/government/publications/g-cloud-8-framework-agreement',
None,
None,
),
(
u'Download your \u2018original\u2019 framework agreement signature page',
'/suppliers/frameworks/g-cloud-8/agreements/signed-agreement.pdf',
None,
None,
),
(
'View the signed contract variation',
'/suppliers/frameworks/g-cloud-8/contract-variation/1',
None,
None,
),
)),
('Guidance', (
(
'Read about how to sell your services',
'https://www.gov.uk/guidance/g-cloud-suppliers-guide#how-to-apply',
None,
None,
),
)),
('Communications', (
(
'View communications and clarification questions',
'/suppliers/frameworks/g-cloud-8/updates',
None,
None,
),
)),
))
assert not doc.xpath(
"//main//a[normalize-space(string())=$label]",
label="Read the proposed contract variation",
)
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="You can start selling your",
)
assert not doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your original and counterpart signature pages",
)
assert doc.xpath(
"//main//p[contains(normalize-space(string()), $b)]",
b="Your framework agreement signature page has been sent to the Crown Commercial Service",
)
@pytest.mark.parametrize('supplier_framework_kwargs,link_label,link_href', (
({'declaration': None}, 'Make supplier declaration', '/suppliers/frameworks/g-cloud-7/declaration/start'),
({}, 'Edit supplier declaration', '/suppliers/frameworks/g-cloud-7/declaration'),
))
def test_make_or_edit_supplier_declaration_shows_correct_page(self, data_api_client, s3, supplier_framework_kwargs,
link_label, link_href):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
**supplier_framework_kwargs)
response = self.client.get('/suppliers/frameworks/g-cloud-7')
document = html.fromstring(response.get_data(as_text=True))
assert document.xpath("//a[normalize-space(string())=$link_label]/@href", link_label=link_label)[0] \
== link_href
def test_dashboard_does_not_show_use_of_service_data_if_not_available(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(slug="g-cloud-8", name="G-Cloud 8",
status="open")
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
add_edit_complete_services = doc.xpath('//div[contains(@class, "framework-dashboard")]/div/li')[1]
use_of_data = add_edit_complete_services.xpath('//div[@class="browse-list-item-body"]')
assert len(use_of_data) == 0
def test_dashboard_shows_use_of_service_data_if_available(self, data_api_client, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(slug="g-cloud-9", name="G-Cloud 9",
status="open")
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
res = self.client.get("/suppliers/frameworks/g-cloud-9")
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
add_edit_complete_services = doc.xpath('//div[contains(@class, "framework-dashboard")]/div/li')[1]
use_of_data = add_edit_complete_services.xpath('//div[@class="browse-list-item-body"]')
assert len(use_of_data) == 1
assert 'The service information you provide here:' in use_of_data[0].text_content()
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestFrameworksDashboardConfidenceBannerOnPage(BaseApplicationTest):
"""Tests for the confidence banner on the declaration page."""
expected = (
'Your application will be submitted at 5pm BST, 23 June 2016. <br> '
'You can edit your declaration and services at any time before the deadline.'
)
def test_confidence_banner_on_page(self, data_api_client, _):
"""Test confidence banner appears on page happy path."""
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'submitted', 'lotSlug': 'foo'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(status='complete')
with self.app.test_client():
self.login()
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
assert self.expected in str(res.data)
def test_confidence_banner_not_on_page(self, data_api_client, _):
"""Change value and assertt that confidence banner is not displayed."""
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.find_draft_services.return_value = {
"services": [
{'serviceName': 'A service', 'status': 'not-submitted', 'lotSlug': 'foo'}
]
}
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(status='complete')
with self.app.test_client():
self.login()
res = self.client.get("/suppliers/frameworks/g-cloud-8")
assert res.status_code == 200
assert self.expected not in str(res.data)
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestFrameworkAgreement(BaseApplicationTest):
def test_page_renders_if_all_ok(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
res = self.client.get("/suppliers/frameworks/g-cloud-7/agreement")
data = res.get_data(as_text=True)
assert res.status_code == 200
assert u'Send document to CCS' in data
assert u'Return your signed signature page' not in data
def test_page_returns_404_if_framework_in_wrong_state(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
res = self.client.get("/suppliers/frameworks/g-cloud-7/agreement")
assert res.status_code == 404
def test_page_returns_404_if_supplier_not_on_framework(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=False)
res = self.client.get("/suppliers/frameworks/g-cloud-7/agreement")
assert res.status_code == 404
@mock.patch('dmutils.s3.S3')
def test_upload_message_if_agreement_is_returned(self, s3, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True, agreement_returned=True, agreement_returned_at='2015-11-02T15:25:56.000000Z'
)
res = self.client.get('/suppliers/frameworks/g-cloud-7/agreement')
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert res.status_code == 200
assert u'/suppliers/frameworks/g-cloud-7/agreement' == doc.xpath('//form')[0].action
assert u'Document uploaded Monday 2 November 2015 at 15:25' in data
assert u'Your document has been uploaded' in data
def test_upload_message_if_agreement_is_not_returned(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
res = self.client.get('/suppliers/frameworks/g-cloud-7/agreement')
data = res.get_data(as_text=True)
doc = html.fromstring(data)
assert res.status_code == 200
assert u'/suppliers/frameworks/g-cloud-7/agreement' == doc.xpath('//form')[0].action
assert u'Document uploaded' not in data
assert u'Your document has been uploaded' not in data
def test_loads_contract_start_page_if_framework_agreement_version_exists(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
res = self.client.get("/suppliers/frameworks/g-cloud-8/agreement")
data = res.get_data(as_text=True)
assert res.status_code == 200
assert u'Return your signed signature page' in data
assert u'Send document to CCS' not in data
def test_two_lots_passed_on_contract_start_page(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
data_api_client.find_draft_services.return_value = {
'services': [
{'lotSlug': 'saas', 'status': 'submitted'},
{'lotSlug': 'saas', 'status': 'not-submitted'},
{'lotSlug': 'paas', 'status': 'failed'},
{'lotSlug': 'scs', 'status': 'submitted'}
]
}
expected_lots_and_statuses = [
('Software as a Service', 'Successful'),
('Platform as a Service', 'Unsuccessful'),
('Infrastructure as a Service', 'No application'),
('Specialist Cloud Services', 'Successful'),
]
res = self.client.get("/suppliers/frameworks/g-cloud-8/agreement")
doc = html.fromstring(res.get_data(as_text=True))
assert res.status_code == 200
lots_and_statuses = []
lot_table_rows = doc.xpath('//*[@id="content"]//table/tbody/tr')
for row in lot_table_rows:
cells = row.findall('./td')
lots_and_statuses.append(
(cells[0].text_content().strip(), cells[1].text_content().strip())
)
assert len(lots_and_statuses) == len(expected_lots_and_statuses)
for lot_and_status in lots_and_statuses:
assert lot_and_status in expected_lots_and_statuses
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestFrameworkAgreementUpload(BaseApplicationTest):
def test_page_returns_404_if_framework_in_wrong_state(self, data_api_client, send_email, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.pdf'),
}
)
assert res.status_code == 404
def test_page_returns_404_if_supplier_not_on_framework(self, data_api_client, send_email, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=False)
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.pdf'),
}
)
assert res.status_code == 404
@mock.patch('app.main.views.frameworks.file_is_less_than_5mb')
def test_page_returns_400_if_file_is_too_large(self, file_is_less_than_5mb, data_api_client, send_email, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
file_is_less_than_5mb.return_value = False
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.pdf'),
}
)
assert res.status_code == 400
assert u'Document must be less than 5MB' in res.get_data(as_text=True)
@mock.patch('app.main.views.frameworks.file_is_empty')
def test_page_returns_400_if_file_is_empty(self, file_is_empty, data_api_client, send_email, s3):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
file_is_empty.return_value = True
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b''), 'test.pdf'),
}
)
assert res.status_code == 400
assert u'Document must not be empty' in res.get_data(as_text=True)
@mock.patch('app.main.views.frameworks.generate_timestamped_document_upload_path')
def test_api_is_not_updated_and_email_not_sent_if_upload_fails(
self, generate_timestamped_document_upload_path, data_api_client, send_email, s3
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
generate_timestamped_document_upload_path.return_value = 'my/path.pdf'
s3.return_value.save.side_effect = S3ResponseError(
{'Error': {'Code': 500, 'Message': 'All fail'}},
'test_api_is_not_updated_and_email_not_sent_if_upload_fails'
)
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.pdf'),
}
)
assert res.status_code == 503
s3.return_value.save.assert_called_with(
'my/path.pdf',
mock.ANY,
acl='private',
download_filename='Supplier_Nme-1234-signed-framework-agreement.pdf'
)
assert data_api_client.create_framework_agreement.called is False
assert data_api_client.update_framework_agreement.called is False
assert data_api_client.sign_framework_agreement.called is False
assert send_email.called is False
@mock.patch('app.main.views.frameworks.generate_timestamped_document_upload_path')
def test_email_is_not_sent_if_api_create_framework_agreement_fails(
self, generate_timestamped_document_upload_path, data_api_client, send_email, s3
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
generate_timestamped_document_upload_path.return_value = 'my/path.pdf'
data_api_client.create_framework_agreement.side_effect = APIError(mock.Mock(status_code=500))
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.pdf'),
}
)
assert res.status_code == 500
assert data_api_client.create_framework_agreement.called is True
assert data_api_client.update_framework_agreement.called is False
assert data_api_client.sign_framework_agreement.called is False
assert send_email.called is False
@mock.patch('app.main.views.frameworks.generate_timestamped_document_upload_path')
def test_email_is_not_sent_if_api_update_framework_agreement_fails(
self, generate_timestamped_document_upload_path, data_api_client, send_email, s3
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
generate_timestamped_document_upload_path.return_value = 'my/path.pdf'
data_api_client.update_framework_agreement.side_effect = APIError(mock.Mock(status_code=500))
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.pdf'),
}
)
assert res.status_code == 500
assert data_api_client.create_framework_agreement.called is True
assert data_api_client.update_framework_agreement.called is True
assert data_api_client.sign_framework_agreement.called is False
assert send_email.called is False
@mock.patch('app.main.views.frameworks.generate_timestamped_document_upload_path')
def test_email_is_not_sent_if_api_sign_framework_agreement_fails(
self, generate_timestamped_document_upload_path, data_api_client, send_email, s3
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
generate_timestamped_document_upload_path.return_value = 'my/path.pdf'
data_api_client.sign_framework_agreement.side_effect = APIError(mock.Mock(status_code=500))
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.pdf'),
}
)
assert res.status_code == 500
assert data_api_client.create_framework_agreement.called is True
assert data_api_client.update_framework_agreement.called is True
assert data_api_client.sign_framework_agreement.called is True
assert send_email.called is False
@mock.patch('app.main.views.frameworks.generate_timestamped_document_upload_path')
def test_email_failure(
self, generate_timestamped_document_upload_path, data_api_client, send_email, s3
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
generate_timestamped_document_upload_path.return_value = 'my/path.pdf'
send_email.side_effect = EmailError()
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.pdf'),
}
)
assert res.status_code == 503
assert send_email.called is True
@mock.patch('app.main.views.frameworks.generate_timestamped_document_upload_path')
def test_upload_agreement_document(
self, generate_timestamped_document_upload_path, data_api_client, send_email, s3
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
data_api_client.create_framework_agreement.return_value = {
"agreement": {"id": 20}
}
generate_timestamped_document_upload_path.return_value = 'my/path.pdf'
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.pdf'),
}
)
generate_timestamped_document_upload_path.assert_called_once_with(
'g-cloud-7',
1234,
'agreements',
'signed-framework-agreement.pdf'
)
s3.return_value.save.assert_called_with(
'my/path.pdf',
mock.ANY,
acl='private',
download_filename='Supplier_Nme-1234-signed-framework-agreement.pdf'
)
data_api_client.create_framework_agreement.assert_called_with(
1234, 'g-cloud-7', 'email@email.com'
)
data_api_client.update_framework_agreement.assert_called_with(
20,
{"signedAgreementPath": 'my/path.pdf'},
'email@email.com'
)
data_api_client.sign_framework_agreement.assert_called_with(
20, 'email@email.com', {"uploaderUserId": 123}
)
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-7/agreement'
@mock.patch('app.main.views.frameworks.generate_timestamped_document_upload_path')
def test_upload_jpeg_agreement_document(
self, generate_timestamped_document_upload_path, data_api_client, send_email, s3
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
generate_timestamped_document_upload_path.return_value = 'my/path.jpg'
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/agreement',
data={
'agreement': (StringIO(b'doc'), 'test.jpg'),
}
)
s3.return_value.save.assert_called_with(
'my/path.jpg',
mock.ANY,
acl='private',
download_filename='Supplier_Nme-1234-signed-framework-agreement.jpg'
)
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-7/agreement'
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
@mock.patch('dmutils.s3.S3')
class TestFrameworkAgreementDocumentDownload(BaseApplicationTest):
def test_download_document_fails_if_no_supplier_framework(self, S3, data_api_client):
data_api_client.get_supplier_framework_info.side_effect = APIError(mock.Mock(status_code=404))
with self.app.test_client():
self.login()
res = self.client.get('/suppliers/frameworks/g-cloud-7/agreements/example.pdf')
assert res.status_code == 404
def test_download_document_fails_if_no_supplier_declaration(self, S3, data_api_client):
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(declaration=None)
with self.app.test_client():
self.login()
res = self.client.get('/suppliers/frameworks/g-cloud-7/agreements/example.pdf')
assert res.status_code == 404
def test_download_document(self, S3, data_api_client):
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
uploader = mock.Mock()
S3.return_value = uploader
uploader.get_signed_url.return_value = 'http://url/path?param=value'
with self.app.test_client():
self.login()
res = self.client.get('/suppliers/frameworks/g-cloud-7/agreements/example.pdf')
assert res.status_code == 302
assert res.location == 'http://asset-host/path?param=value'
uploader.get_signed_url.assert_called_with(
'g-cloud-7/agreements/1234/1234-example.pdf')
def test_download_document_with_asset_url(self, S3, data_api_client):
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
uploader = mock.Mock()
S3.return_value = uploader
uploader.get_signed_url.return_value = 'http://url/path?param=value'
with self.app.test_client():
self.app.config['DM_ASSETS_URL'] = 'https://example'
self.login()
res = self.client.get('/suppliers/frameworks/g-cloud-7/agreements/example.pdf')
assert res.status_code == 302
assert res.location == 'https://example/path?param=value'
uploader.get_signed_url.assert_called_with(
'g-cloud-7/agreements/1234/1234-example.pdf')
@mock.patch('dmutils.s3.S3')
class TestFrameworkDocumentDownload(BaseApplicationTest):
def test_download_document(self, S3):
uploader = mock.Mock()
S3.return_value = uploader
uploader.get_signed_url.return_value = 'http://url/path?param=value'
with self.app.test_client():
self.login()
res = self.client.get('/suppliers/frameworks/g-cloud-7/files/example.pdf')
assert res.status_code == 302
assert res.location == 'http://asset-host/path?param=value'
uploader.get_signed_url.assert_called_with('g-cloud-7/communications/example.pdf')
def test_download_document_returns_404_if_url_is_None(self, S3):
uploader = mock.Mock()
S3.return_value = uploader
uploader.get_signed_url.return_value = None
with self.app.test_client():
self.login()
res = self.client.get('/suppliers/frameworks/g-cloud-7/files/example.pdf')
assert res.status_code == 404
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestStartSupplierDeclaration(BaseApplicationTest):
def test_start_declaration_goes_to_declaration_overview_page(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
response = self.client.get('/suppliers/frameworks/g-cloud-7/declaration/start')
document = html.fromstring(response.get_data(as_text=True))
assert document.xpath("//a[normalize-space(string(.))='Start your declaration']/@href")[0] \
== '/suppliers/frameworks/g-cloud-7/declaration/reuse'
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestDeclarationOverviewSubmit(BaseApplicationTest):
"""
Behaviour common to both GET and POST views on path /suppliers/frameworks/g-cloud-7/declaration
"""
def test_supplier_not_interested(self, data_api_client, get_or_post):
with self.app.test_client():
self.login()
data_api_client.get_framework.side_effect = _assert_args_and_return(
self.framework(status="open"),
"g-cloud-7",
)
data_api_client.get_supplier_framework_info.side_effect = _assert_args_and_raise(
APIError(mock.Mock(status_code=404)),
1234,
"g-cloud-7",
)
data_api_client.set_supplier_declaration.side_effect = AssertionError("This shouldn't be called")
response = getattr(self.client, get_or_post.lower())("/suppliers/frameworks/g-cloud-7/declaration")
assert response.status_code == 404
def test_framework_coming(self, data_api_client, get_or_post):
with self.app.test_client():
self.login()
data_api_client.get_framework.side_effect = _assert_args_and_return(
self.framework(status="coming"),
"g-cloud-7",
)
data_api_client.get_supplier_framework_info.side_effect = _assert_args_and_return(
self.supplier_framework(framework_slug="g-cloud-7"),
1234,
"g-cloud-7",
)
data_api_client.set_supplier_declaration.side_effect = AssertionError("This shouldn't be called")
response = getattr(self.client, get_or_post.lower())("/suppliers/frameworks/g-cloud-7/declaration")
assert response.status_code == 404
def test_framework_unknown(self, data_api_client, get_or_post):
with self.app.test_client():
self.login()
data_api_client.get_framework.side_effect = _assert_args_and_raise(
APIError(mock.Mock(status_code=404)),
"muttoning-clouds",
)
data_api_client.get_supplier_framework_info.side_effect = _assert_args_and_raise(
APIError(mock.Mock(status_code=404)),
1234,
"muttoning-clouds",
)
data_api_client.set_supplier_declaration.side_effect = AssertionError("This shouldn't be called")
response = getattr(self.client, get_or_post.lower())("/suppliers/frameworks/muttoning-clouds/declaration")
assert response.status_code == 404
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestDeclarationOverview(BaseApplicationTest):
@staticmethod
def _extract_section_information(doc, section_title, expect_edit_link=True):
"""
given a section (full text) name, returns that section's relevant information in a tuple (format described
in comments)
"""
tables = doc.xpath(
"//table[preceding::h2[1][normalize-space(string())=$section_title]]",
section_title=section_title,
)
assert len(tables) == 1
table = tables[0]
edit_as = doc.xpath(
"//a[@class='summary-change-link'][preceding::h2[1][normalize-space(string())=$section_title]]",
section_title=section_title,
)
assert ([a.xpath("normalize-space(string())") for a in edit_as] == ["Edit"]) is expect_edit_link
return (
# table caption text
table.xpath("normalize-space(string(./caption))"),
# "Edit" link href
edit_as[0].xpath("@href")[0] if expect_edit_link else None,
tuple(
(
# contents of row heading
row.xpath("normalize-space(string(./td[@class='summary-item-field-first']))"),
# full text contents of row "value"
row.xpath("normalize-space(string(./td[@class='summary-item-field']))"),
# full text contents of each a element in row value
tuple(a.xpath("normalize-space(string())") for a in row.xpath(
"./td[@class='summary-item-field']//a"
)),
# href of each a element in row value
tuple(row.xpath("./td[@class='summary-item-field']//a/@href")),
# full text contents of each li element in row value
tuple(li.xpath("normalize-space(string())") for li in row.xpath(
"./td[@class='summary-item-field']//li"
)),
) for row in table.xpath(".//tr[contains(@class,'summary-item-row')]")
)
)
@staticmethod
def _section_information_strip_edit_href(section_information):
row_heading, edit_href, rows = section_information
return row_heading, None, rows
def _setup_data_api_client(self, data_api_client, framework_status, framework_slug, declaration, prefill_fw_slug):
data_api_client.get_framework.side_effect = _assert_args_and_return(
self.framework(slug=framework_slug, name="F-Cumulus 0", status=framework_status),
framework_slug,
)
data_api_client.get_supplier_framework_info.side_effect = _assert_args_and_return(
self.supplier_framework(
framework_slug=framework_slug,
declaration=declaration,
prefill_declaration_from_framework_slug=prefill_fw_slug,
),
1234,
framework_slug,
)
data_api_client.set_supplier_declaration.side_effect = AssertionError("This shouldn't be called")
# corresponds to the parametrization args:
# "framework_slug,declaration,decl_valid,prefill_fw_slug,expected_sections"
_common_parametrization = tuple(
chain.from_iterable(chain(
(( # noqa
"g-cloud-9",
empty_declaration,
False,
prefill_fw_slug,
(
( # expected result for "Providing suitable services" section as returned by
# _extract_section_information
"Providing suitable services",
"/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-services",
(
(
"Services are cloud-related",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-services",),
(),
),
(
"Services in scope for G-Cloud",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-"
"services#servicesDoNotInclude",),
(),
),
(
"Buyers pay for what they use",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-"
"services#payForWhatUse",),
(),
),
(
"What your team will deliver",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-"
"services#offerServicesYourselves",),
(),
),
(
"Contractual responsibility and accountability",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-"
"services#fullAccountability",),
(),
),
),
),
( # expected result for "Grounds for mandatory exclusion" section as returned by
# _extract_section_information
"Grounds for mandatory exclusion",
"/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-exclusion",
(
(
"Organised crime or conspiracy convictions",
q_link_text_prefillable_section,
(q_link_text_prefillable_section,),
("/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-exclusion",),
(),
),
(
"Bribery or corruption convictions",
q_link_text_prefillable_section,
(q_link_text_prefillable_section,),
("/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-"
"exclusion#corruptionBribery",),
(),
),
(
"Fraud convictions",
q_link_text_prefillable_section,
(q_link_text_prefillable_section,),
("/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-"
"exclusion#fraudAndTheft",),
(),
),
(
"Terrorism convictions",
q_link_text_prefillable_section,
(q_link_text_prefillable_section,),
("/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-"
"exclusion#terrorism",),
(),
),
(
"Organised crime convictions",
q_link_text_prefillable_section,
(q_link_text_prefillable_section,),
("/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-"
"exclusion#organisedCrime",),
(),
),
),
),
( # expected result for "How you’ll deliver your services" section as returned by
# _extract_section_information
u"How you’ll deliver your services",
"/suppliers/frameworks/g-cloud-9/declaration/edit/how-youll-deliver-your-services",
(
(
"Subcontractors or consortia",
q_link_text_prefillable_section,
(q_link_text_prefillable_section,),
("/suppliers/frameworks/g-cloud-9/declaration/edit/how-youll-deliver-your-"
"services",),
(),
),
),
),
),
) for empty_declaration in (None, {})), # two possible ways of specifying a "empty" declaration - test both
(( # noqa
"g-cloud-9",
{
"status": "started",
"conspiracy": True,
"corruptionBribery": False,
"fraudAndTheft": True,
"terrorism": False,
"organisedCrime": True,
"subcontracting": [
"yourself without the use of third parties (subcontractors)",
"as a prime contractor, using third parties (subcontractors) to provide all services",
],
},
False,
prefill_fw_slug,
(
( # expected result for "Providing suitable services" section as returned by
# _extract_section_information
"Providing suitable services",
"/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-services",
(
(
"Services are cloud-related",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-services",),
(),
),
(
"Services in scope for G-Cloud",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-"
"services#servicesDoNotInclude",),
(),
),
(
"Buyers pay for what they use",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-"
"services#payForWhatUse",),
(),
),
(
"What your team will deliver",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-"
"services#offerServicesYourselves",),
(),
),
(
"Contractual responsibility and accountability",
"Answer question",
("Answer question",),
("/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-"
"services#fullAccountability",),
(),
),
),
),
( # expected result for "Grounds for mandatory exclusion" section as returned by
# _extract_section_information
"Grounds for mandatory exclusion",
"/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-exclusion",
(
(
"Organised crime or conspiracy convictions",
"Yes",
(),
(),
(),
),
(
"Bribery or corruption convictions",
"No",
(),
(),
(),
),
(
"Fraud convictions",
"Yes",
(),
(),
(),
),
(
"Terrorism convictions",
"No",
(),
(),
(),
),
(
"Organised crime convictions",
"Yes",
(),
(),
(),
),
),
),
( # expected result for "How you’ll deliver your services" section as returned by
# _extract_section_information
u"How you’ll deliver your services",
"/suppliers/frameworks/g-cloud-9/declaration/edit/how-youll-deliver-your-services",
(
(
"Subcontractors or consortia",
"yourself without the use of third parties (subcontractors) as a prime contractor, using "
"third parties (subcontractors) to provide all services", # noqa
(),
(),
(
"yourself without the use of third parties (subcontractors)",
"as a prime contractor, using third parties (subcontractors) to provide all services",
),
),
),
),
),
),),
(( # noqa
"g-cloud-9",
dict(status=declaration_status, **(valid_g9_declaration_base())),
True,
prefill_fw_slug,
(
( # expected result for "Providing suitable services" section as returned by
# _extract_section_information
"Providing suitable services",
"/suppliers/frameworks/g-cloud-9/declaration/edit/providing-suitable-services",
(
(
"Services are cloud-related",
"Yes",
(),
(),
(),
),
(
"Services in scope for G-Cloud",
"Yes",
(),
(),
(),
),
(
"Buyers pay for what they use",
"Yes",
(),
(),
(),
),
(
"What your team will deliver",
"No",
(),
(),
(),
),
(
"Contractual responsibility and accountability",
"Yes",
(),
(),
(),
),
),
),
( # expected result for "Grounds for mandatory exclusion" section as returned by
# _extract_section_information
"Grounds for mandatory exclusion",
"/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-exclusion",
(
(
"Organised crime or conspiracy convictions",
"No",
(),
(),
(),
),
(
"Bribery or corruption convictions",
"Yes",
(),
(),
(),
),
(
"Fraud convictions",
"No",
(),
(),
(),
),
(
"Terrorism convictions",
"Yes",
(),
(),
(),
),
(
"Organised crime convictions",
"No",
(),
(),
(),
),
),
),
( # expected result for "How you’ll deliver your services" section as returned by
# _extract_section_information
u"How you’ll deliver your services",
"/suppliers/frameworks/g-cloud-9/declaration/edit/how-youll-deliver-your-services",
(
(
"Subcontractors or consortia",
"yourself without the use of third parties (subcontractors)",
(),
(),
(),
),
),
),
),
) for declaration_status in ("started", "complete",)),
) for prefill_fw_slug, q_link_text_prefillable_section in (
# test all of the previous combinations with two possible values of prefill_fw_slug
(None, "Answer question",),
("some-previous-framework", "Review answer",),
)))
# corresponds to the parametrization args:
# "framework_slug,declaration,decl_valid,prefill_fw_slug,expected_sections"
#
# this is more straightforward than _common_parametrization because we only have to care about non-open frameworks
_g7_parametrization = tuple(
(
"g-cloud-7",
declaration,
decl_valid,
None,
# G7 doesn't (yet?) have any "short names" for questions and so will be listing the answers in the
# overview against their full verbose questions so any sections that we wanted to assert the content of
# would require a reference copy of all its full question texts kept here. we don't want to do this so for
# now don't assert any G7 sections...
(),
) for declaration, decl_valid in chain(
(
(dict(FULL_G7_SUBMISSION, status=decl_status), True)
for decl_status in ("started", "complete",)
),
(
(empty_decl, False)
for empty_decl in (None, {})
),
)
)
@pytest.mark.parametrize(
"framework_slug,declaration,decl_valid,prefill_fw_slug,expected_sections",
_common_parametrization,
)
def test_display_open(
self,
data_api_client,
framework_slug,
declaration,
decl_valid,
prefill_fw_slug,
expected_sections,
):
self._setup_data_api_client(data_api_client, "open", framework_slug, declaration, prefill_fw_slug)
with self.app.test_client():
self.login()
response = self.client.get("/suppliers/frameworks/{}/declaration".format(framework_slug))
assert response.status_code == 200
doc = html.fromstring(response.get_data(as_text=True))
assert [e.xpath("normalize-space(string())") for e in doc.xpath(
"//nav//*[@role='breadcrumbs']//a",
)] == [
"Digital Marketplace",
"Your account",
"Apply to F-Cumulus 0",
]
assert doc.xpath(
"//nav//*[@role='breadcrumbs']//a/@href",
) == [
"/",
"/suppliers",
"/suppliers/frameworks/{}".format(framework_slug),
]
assert bool(doc.xpath(
"//p[contains(normalize-space(string()), $t)][contains(normalize-space(string()), $f)]",
t="You must answer all questions and make your declaration before",
f="F-Cumulus 0",
)) is (not decl_valid)
assert bool(doc.xpath(
"//p[contains(normalize-space(string()), $t)][contains(normalize-space(string()), $f)]",
t="You must make your declaration before",
f="F-Cumulus 0",
)) is (decl_valid and declaration.get("status") != "complete")
assert len(doc.xpath(
"//p[contains(normalize-space(string()), $t)]",
t="You can come back and edit your answers at any time before the deadline.",
)) == (2 if decl_valid and declaration.get("status") != "complete" else 0)
assert len(doc.xpath(
"//p[contains(normalize-space(string()), $t)][not(contains(normalize-space(string()), $d))]",
t="You can come back and edit your answers at any time",
d="deadline",
)) == (2 if decl_valid and declaration.get("status") == "complete" else 0)
if prefill_fw_slug is None:
assert not doc.xpath("//a[normalize-space(string())=$t]", t="Review answer")
assert bool(doc.xpath(
"//a[normalize-space(string())=$a or normalize-space(string())=$b]",
a="Answer question",
b="Review answer",
)) is (not decl_valid)
if not decl_valid:
# assert that all links with the label "Answer question" or "Review answer" link to some subpage (by
# asserting that there are none that don't, having previously determined that such-labelled links exist)
assert not doc.xpath(
# we want the href to *contain* $u but not *be* $u
"//a[normalize-space(string())=$a or normalize-space(string())=$b]"
"[not(starts-with(@href, $u)) or @href=$u]",
a="Answer question",
b="Review answer",
u="/suppliers/frameworks/{}/declaration/".format(framework_slug),
)
if decl_valid and declaration.get("status") != "complete":
mdf_actions = doc.xpath(
"//form[@method='POST'][.//input[@value=$t][@type='submit']][.//input[@name='csrf_token']]/@action",
t="Make declaration",
)
assert len(mdf_actions) == 2
assert all(
urljoin("/suppliers/frameworks/{}/declaration".format(framework_slug), action) ==
"/suppliers/frameworks/{}/declaration".format(framework_slug)
for action in mdf_actions
)
else:
assert not doc.xpath("//input[@value=$t]", t="Make declaration")
assert doc.xpath(
"//a[normalize-space(string())=$t][@href=$u]",
t="Return to application",
u="/suppliers/frameworks/{}".format(framework_slug),
)
for expected_section in expected_sections:
assert self._extract_section_information(doc, expected_section[0]) == expected_section
@pytest.mark.parametrize(
"framework_slug,declaration,decl_valid,prefill_fw_slug,expected_sections",
tuple(
(
framework_slug,
declaration,
decl_valid,
prefill_fw_slug,
expected_sections,
)
for framework_slug, declaration, decl_valid, prefill_fw_slug, expected_sections
in chain(_common_parametrization, _g7_parametrization)
if (declaration or {}).get("status") == "complete"
)
)
@pytest.mark.parametrize("framework_status", ("pending", "standstill", "live", "expired",))
def test_display_closed(
self,
data_api_client,
framework_status,
framework_slug,
declaration,
decl_valid,
prefill_fw_slug,
expected_sections,
):
self._setup_data_api_client(data_api_client, framework_status, framework_slug, declaration, prefill_fw_slug)
with self.app.test_client():
self.login()
response = self.client.get("/suppliers/frameworks/{}/declaration".format(framework_slug))
assert response.status_code == 200
doc = html.fromstring(response.get_data(as_text=True))
assert [e.xpath("normalize-space(string())") for e in doc.xpath(
"//nav//*[@role='breadcrumbs']//a",
)] == [
"Digital Marketplace",
"Your account",
"Your F-Cumulus 0 application",
]
assert doc.xpath(
"//nav//*[@role='breadcrumbs']//a/@href",
) == [
"/",
"/suppliers",
"/suppliers/frameworks/{}".format(framework_slug),
]
# there shouldn't be any links to the "edit" page
assert not any(
urljoin("/suppliers/frameworks/{}/declaration".format(framework_slug), a.attrib["href"]).startswith(
"/suppliers/frameworks/{}/declaration/edit/".format(framework_slug)
)
for a in doc.xpath("//a[@href]")
)
# no submittable forms should be pointing at ourselves
assert not any(
urljoin(
"/suppliers/frameworks/{}/declaration".format(framework_slug),
form.attrib["action"],
) == "/suppliers/frameworks/{}/declaration".format(framework_slug)
for form in doc.xpath("//form[.//input[@type='submit']]")
)
assert not doc.xpath("//a[@href][normalize-space(string())=$label]", label="Answer question")
assert not doc.xpath("//a[@href][normalize-space(string())=$label]", label="Review answer")
assert not doc.xpath("//p[contains(normalize-space(string()), $t)]", t="make your declaration")
assert not doc.xpath("//p[contains(normalize-space(string()), $t)]", t="edit your answers")
for expected_section in expected_sections:
assert self._extract_section_information(
doc,
expected_section[0],
expect_edit_link=False,
) == self._section_information_strip_edit_href(expected_section)
@pytest.mark.parametrize(
"framework_slug,declaration,decl_valid,prefill_fw_slug,expected_sections",
tuple(
(
framework_slug,
declaration,
decl_valid,
prefill_fw_slug,
expected_sections,
)
for framework_slug, declaration, decl_valid, prefill_fw_slug, expected_sections
in chain(_common_parametrization, _g7_parametrization)
if (declaration or {}).get("status") != "complete"
)
)
@pytest.mark.parametrize("framework_status", ("pending", "standstill", "live", "expired",))
def test_error_closed(
self,
data_api_client,
framework_status,
framework_slug,
declaration,
decl_valid,
prefill_fw_slug,
expected_sections,
):
self._setup_data_api_client(data_api_client, framework_status, framework_slug, declaration, prefill_fw_slug)
with self.app.test_client():
self.login()
response = self.client.get("/suppliers/frameworks/{}/declaration".format(framework_slug))
assert response.status_code == 410
@pytest.mark.parametrize("framework_status", ("coming", "open", "pending", "standstill", "live", "expired",))
def test_error_nonexistent_framework(self, data_api_client, framework_status):
self._setup_data_api_client(data_api_client, framework_status, "g-cloud-31415", {"status": "complete"}, None)
with self.app.test_client():
self.login()
response = self.client.get("/suppliers/frameworks/g-cloud-31415/declaration")
assert response.status_code == 404
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestDeclarationSubmit(BaseApplicationTest):
@pytest.mark.parametrize("prefill_fw_slug", (None, "some-previous-framework",))
@pytest.mark.parametrize("invalid_declaration", (
None,
{},
{
# not actually complete - only first section is
"status": "complete",
"unfairCompetition": False,
"skillsAndResources": False,
"offerServicesYourselves": False,
"fullAccountability": True,
},
))
def test_invalid_declaration(self, data_api_client, invalid_declaration, prefill_fw_slug):
with self.app.test_client():
self.login()
data_api_client.get_framework.side_effect = _assert_args_and_return(
self.framework(slug="g-cloud-9", name="G-Cloud 9", status="open"),
"g-cloud-9",
)
data_api_client.get_supplier_framework_info.side_effect = _assert_args_and_return(
self.supplier_framework(
framework_slug="g-cloud-9",
declaration=invalid_declaration,
prefill_declaration_from_framework_slug=prefill_fw_slug, # should have zero effect
),
1234,
"g-cloud-9",
)
data_api_client.set_supplier_declaration.side_effect = AssertionError("This shouldn't be called")
response = self.client.post("/suppliers/frameworks/g-cloud-9/declaration")
assert response.status_code == 400
@pytest.mark.parametrize("prefill_fw_slug", (None, "some-previous-framework",))
@pytest.mark.parametrize("declaration_status", ("started", "complete",))
@mock.patch("dmutils.s3.S3") # needed by the framework dashboard which our request gets redirected to
def test_valid_declaration(self, s3, data_api_client, prefill_fw_slug, declaration_status):
with self.app.test_client():
self.login()
data_api_client.get_framework.side_effect = _assert_args_and_return(
self.framework(slug="g-cloud-9", name="G-Cloud 9", status="open"),
"g-cloud-9",
)
data_api_client.get_supplier_framework_info.side_effect = _assert_args_and_return(
self.supplier_framework(
framework_slug="g-cloud-9",
declaration=dict(status=declaration_status, **(valid_g9_declaration_base())),
prefill_declaration_from_framework_slug=prefill_fw_slug, # should have zero effect
),
1234,
"g-cloud-9",
)
data_api_client.set_supplier_declaration.side_effect = _assert_args_and_return(
dict(status="complete", **(valid_g9_declaration_base())),
1234,
"g-cloud-9",
dict(status="complete", **(valid_g9_declaration_base())),
"email@email.com",
)
response = self.client.post("/suppliers/frameworks/g-cloud-9/declaration", follow_redirects=True)
# args of call are asserted by mock's side_effect
assert data_api_client.set_supplier_declaration.called is True
# this will be the response from the redirected-to view
assert response.status_code == 200
doc = html.fromstring(response.get_data(as_text=True))
assert doc.xpath(
"//*[@data-analytics='trackPageView'][@data-url=$k]",
k="/suppliers/frameworks/g-cloud-9/declaration_complete",
)
@pytest.mark.parametrize("framework_status", ("standstill", "pending", "live", "expired",))
def test_closed_framework_state(self, data_api_client, framework_status):
with self.app.test_client():
self.login()
data_api_client.get_framework.side_effect = _assert_args_and_return(
self.framework(status=framework_status),
"g-cloud-7",
)
data_api_client.get_supplier_framework_info.side_effect = _assert_args_and_return(
self.supplier_framework(framework_slug="g-cloud-7"),
1234,
"g-cloud-7",
)
data_api_client.set_supplier_declaration.side_effect = AssertionError("This shouldn't be called")
response = self.client.post("/suppliers/frameworks/g-cloud-7/declaration")
assert response.status_code == 404
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestSupplierDeclaration(BaseApplicationTest):
@pytest.mark.parametrize("empty_declaration", ({}, None,))
def test_get_with_no_previous_answers(self, data_api_client, empty_declaration):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
framework_slug="g-cloud-7",
declaration=empty_declaration,
)
data_api_client.get_supplier_declaration.side_effect = APIError(mock.Mock(status_code=404))
res = self.client.get(
'/suppliers/frameworks/g-cloud-7/declaration/edit/g-cloud-7-essentials')
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
assert doc.xpath('//input[@id="PR-1-yes"]/@checked') == []
assert doc.xpath('//input[@id="PR-1-no"]/@checked') == []
def test_get_with_with_previous_answers(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
framework_slug="g-cloud-7",
declaration={"status": "started", "PR1": False}
)
res = self.client.get(
'/suppliers/frameworks/g-cloud-7/declaration/edit/g-cloud-7-essentials')
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
assert len(doc.xpath('//input[@id="input-PR1-2"]/@checked')) == 1
def test_get_with_with_prefilled_answers(self, data_api_client):
with self.app.test_client():
self.login()
# Handle calls for both the current framework and for the framework to pre-fill from
data_api_client.get_framework.side_effect = lambda framework_slug: {
"g-cloud-9": self.framework(slug='g-cloud-9', name='G-Cloud 9', status='open'),
"digital-outcomes-and-specialists-2": self.framework(slug='digital-outcomes-and-specialists-2',
name='Digital Stuff 2', status='live'),
}[framework_slug]
# Current framework application information
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
framework_slug="g-cloud-9",
declaration={"status": "started"},
prefill_declaration_from_framework_slug="digital-outcomes-and-specialists-2"
)
# The previous declaration to prefill from
data_api_client.get_supplier_declaration.return_value = {
'declaration': self.supplier_framework(
framework_slug="digital-outcomes-and-specialists-2",
declaration={"status": "complete",
"conspiracy": True,
"corruptionBribery": False,
"fraudAndTheft": True,
"terrorism": False,
"organisedCrime": False,
},
)["frameworkInterest"]["declaration"]
}
# The grounds-for-mandatory-exclusion section has "prefill: True" in the declaration manifest
res = self.client.get(
'/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-exclusion')
assert res.status_code == 200
data_api_client.get_supplier_declaration.assert_called_once_with(1234, "digital-outcomes-and-specialists-2")
doc = html.fromstring(res.get_data(as_text=True))
# Radio buttons have been pre-filled with the correct answers
assert len(doc.xpath('//input[@id="input-conspiracy-1"][@value="True"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-corruptionBribery-2"][@value="False"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-fraudAndTheft-1"][@value="True"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-terrorism-2"][@value="False"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-organisedCrime-2"][@value="False"]/@checked')) == 1
# Blue banner message is shown at top of page
assert doc.xpath('normalize-space(string(//div[@class="banner-information-without-action"]))') == \
"Answers on this page are from an earlier declaration and need review."
# Blue information messages are shown next to each question
info_messages = doc.xpath('//div[@class="message-wrapper"]//span[@class="message-content"]')
assert len(info_messages) == 5
for message in info_messages:
assert self.strip_all_whitespace(message.text) == self.strip_all_whitespace(
"This answer is from your Digital Stuff 2 declaration"
)
def test_get_with_with_partially_prefilled_answers(self, data_api_client):
with self.app.test_client():
self.login()
# Handle calls for both the current framework and for the framework to pre-fill from
data_api_client.get_framework.side_effect = lambda framework_slug: {
"g-cloud-9": self.framework(slug='g-cloud-9', name='G-Cloud 9', status='open'),
"digital-outcomes-and-specialists-2": self.framework(slug='digital-outcomes-and-specialists-2',
name='Digital Stuff 2', status='live'),
}[framework_slug]
# Current framework application information
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
framework_slug="g-cloud-9",
declaration={"status": "started"},
prefill_declaration_from_framework_slug="digital-outcomes-and-specialists-2"
)
# The previous declaration to prefill from - missing "corruptionBribery" and "terrorism" keys
data_api_client.get_supplier_declaration.return_value = {
'declaration': self.supplier_framework(
framework_slug="digital-outcomes-and-specialists-2",
declaration={"status": "complete",
"conspiracy": True,
"fraudAndTheft": True,
"organisedCrime": False,
},
)["frameworkInterest"]["declaration"]
}
# The grounds-for-mandatory-exclusion section has "prefill: True" in the declaration manifest
res = self.client.get(
'/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-exclusion')
assert res.status_code == 200
data_api_client.get_supplier_declaration.assert_called_once_with(1234, "digital-outcomes-and-specialists-2")
doc = html.fromstring(res.get_data(as_text=True))
# Radio buttons have been pre-filled with the correct answers
assert len(doc.xpath('//input[@id="input-conspiracy-1"][@value="True"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-fraudAndTheft-1"][@value="True"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-organisedCrime-2"][@value="False"]/@checked')) == 1
# Radio buttons for missing keys exist but have not been pre-filled
assert len(doc.xpath('//input[@id="input-corruptionBribery-1"]')) == 1
assert len(doc.xpath('//input[@id="input-corruptionBribery-2"]')) == 1
assert len(doc.xpath('//input[@id="input-corruptionBribery-1"]/@checked')) == 0
assert len(doc.xpath('//input[@id="input-corruptionBribery-2"]/@checked')) == 0
assert len(doc.xpath('//input[@id="input-terrorism-1"]')) == 1
assert len(doc.xpath('//input[@id="input-terrorism-2"]')) == 1
assert len(doc.xpath('//input[@id="input-terrorism-1"]/@checked')) == 0
assert len(doc.xpath('//input[@id="input-terrorism-2"]/@checked')) == 0
# Blue banner message is shown at top of page
assert doc.xpath('normalize-space(string(//div[@class="banner-information-without-action"]))') == \
"Answers on this page are from an earlier declaration and need review."
# Blue information messages are shown next to pre-filled questions only
info_messages = doc.xpath('//div[@class="message-wrapper"]//span[@class="message-content"]')
assert len(info_messages) == 3
for message in info_messages:
assert self.strip_all_whitespace(message.text) == self.strip_all_whitespace(
"This answer is from your Digital Stuff 2 declaration"
)
def test_answers_not_prefilled_if_section_has_already_been_saved(self, data_api_client):
with self.app.test_client():
self.login()
# Handle calls for both the current framework and for the framework to pre-fill from
data_api_client.get_framework.side_effect = lambda framework_slug: {
"g-cloud-9": self.framework(slug='g-cloud-9', name='G-Cloud 9', status='open'),
"digital-outcomes-and-specialists-2": self.framework(slug='digital-outcomes-and-specialists-2',
name='Digital Stuff 2', status='live'),
}[framework_slug]
# Current framework application information with the grounds-for-mandatory-exclusion section complete
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
framework_slug="g-cloud-9",
declaration={"status": "started",
"conspiracy": False,
"corruptionBribery": True,
"fraudAndTheft": False,
"terrorism": True,
"organisedCrime": False,
},
prefill_declaration_from_framework_slug="digital-outcomes-and-specialists-2"
)
# The previous declaration to prefill from - has relevant answers but should not ever be called
data_api_client.get_supplier_declaration.return_value = {
'declaration': self.supplier_framework(
framework_slug="digital-outcomes-and-specialists-2",
declaration={"status": "complete",
"conspiracy": True,
"corruptionBribery": False,
"fraudAndTheft": True,
"terrorism": False,
"organisedCrime": False,
},
)["frameworkInterest"]["declaration"]
}
# The grounds-for-mandatory-exclusion section has "prefill: True" in the declaration manifest
res = self.client.get(
'/suppliers/frameworks/g-cloud-9/declaration/edit/grounds-for-mandatory-exclusion')
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
# Previous framework and declaration have not been fetched
data_api_client.get_framework.assert_called_once_with('g-cloud-9')
assert data_api_client.get_supplier_declaration.called is False
# Radio buttons have been filled with the current answers; not those from previous declaration
assert len(doc.xpath('//input[@id="input-conspiracy-2"][@value="False"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-corruptionBribery-1"][@value="True"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-fraudAndTheft-2"][@value="False"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-terrorism-1"][@value="True"]/@checked')) == 1
assert len(doc.xpath('//input[@id="input-organisedCrime-2"][@value="False"]/@checked')) == 1
# No blue banner message is shown at top of page
assert len(doc.xpath('//div[@class="banner-information-without-action"]')) == 0
# No blue information messages are shown next to each question
info_messages = doc.xpath('//div[@class="message-wrapper"]//span[@class="message-content"]')
assert len(info_messages) == 0
def test_answers_not_prefilled_if_section_marked_as_prefill_false(self, data_api_client):
with self.app.test_client():
self.login()
# Handle calls for both the current framework and for the framework to pre-fill from
data_api_client.get_framework.side_effect = lambda framework_slug: {
"g-cloud-9": self.framework(slug='g-cloud-9', name='G-Cloud 9', status='open'),
"digital-outcomes-and-specialists-2": self.framework(slug='digital-outcomes-and-specialists-2',
name='Digital Stuff 2', status='live'),
}[framework_slug]
# Current framework application information
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
framework_slug="g-cloud-9",
declaration={"status": "started"},
prefill_declaration_from_framework_slug="digital-outcomes-and-specialists-2"
)
# The previous declaration to prefill from - has relevant answers but should not ever be called
data_api_client.get_supplier_declaration.return_value = {
'declaration': self.supplier_framework(
framework_slug="digital-outcomes-and-specialists-2",
declaration={"status": "complete",
"readUnderstoodGuidance": True,
"understandTool": True,
"understandHowToAskQuestions": False,
},
)["frameworkInterest"]["declaration"]
}
# The how-you-apply section has "prefill: False" in the declaration manifest
res = self.client.get(
'/suppliers/frameworks/g-cloud-9/declaration/edit/how-you-apply')
assert res.status_code == 200
doc = html.fromstring(res.get_data(as_text=True))
# Previous framework and declaration have not been fetched
data_api_client.get_framework.assert_called_once_with('g-cloud-9')
assert data_api_client.get_supplier_declaration.called is False
# Radio buttons exist on page but have not been populated at all
assert len(doc.xpath('//input[@id="input-readUnderstoodGuidance-1"]')) == 1
assert len(doc.xpath('//input[@id="input-readUnderstoodGuidance-2"]')) == 1
assert len(doc.xpath('//input[@id="input-readUnderstoodGuidance-1"]/@checked')) == 0
assert len(doc.xpath('//input[@id="input-readUnderstoodGuidance-2"]/@checked')) == 0
assert len(doc.xpath('//input[@id="input-understandTool-1"]')) == 1
assert len(doc.xpath('//input[@id="input-understandTool-2"]')) == 1
assert len(doc.xpath('//input[@id="input-understandTool-1"]/@checked')) == 0
assert len(doc.xpath('//input[@id="input-understandTool-2"]/@checked')) == 0
assert len(doc.xpath('//input[@id="input-understandHowToAskQuestions-1"]')) == 1
assert len(doc.xpath('//input[@id="input-understandHowToAskQuestions-2"]')) == 1
assert len(doc.xpath('//input[@id="input-understandHowToAskQuestions-1"]/@checked')) == 0
assert len(doc.xpath('//input[@id="input-understandHowToAskQuestions-2"]/@checked')) == 0
# No blue banner message is shown at top of page
assert len(doc.xpath('//div[@class="banner-information-without-action"]')) == 0
# No blue information messages are shown next to each question
info_messages = doc.xpath('//div[@class="message-wrapper"]//span[@class="message-content"]')
assert len(info_messages) == 0
def test_post_valid_data(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
framework_slug="g-cloud-7",
declaration={"status": "started"}
)
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/declaration/edit/g-cloud-7-essentials',
data=FULL_G7_SUBMISSION)
assert res.status_code == 302
assert data_api_client.set_supplier_declaration.called is True
def test_post_valid_data_to_complete_declaration(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
framework_slug="g-cloud-7",
declaration=FULL_G7_SUBMISSION
)
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/declaration/edit/grounds-for-discretionary-exclusion',
data=FULL_G7_SUBMISSION)
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-7/declaration'
assert data_api_client.set_supplier_declaration.called is True
assert data_api_client.set_supplier_declaration.call_args[0][2]['status'] == 'complete'
def test_post_valid_data_with_api_failure(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
framework_slug="g-cloud-7",
declaration={"status": "started"}
)
data_api_client.set_supplier_declaration.side_effect = APIError(mock.Mock(status_code=400))
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/declaration/edit/g-cloud-7-essentials',
data=FULL_G7_SUBMISSION)
assert res.status_code == 400
@mock.patch('app.main.helpers.validation.G7Validator.get_error_messages_for_page')
def test_post_with_validation_errors(self, get_error_messages_for_page, data_api_client):
"""Test that answers are not saved if there are errors
For unit tests of the validation see :mod:`tests.app.main.helpers.test_frameworks`
"""
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
get_error_messages_for_page.return_value = {'PR1': {'input_name': 'PR1', 'message': 'this is invalid'}}
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/declaration/edit/g-cloud-7-essentials',
data=FULL_G7_SUBMISSION)
assert res.status_code == 400
assert data_api_client.set_supplier_declaration.called is False
doc = html.fromstring(res.get_data(as_text=True))
elems = doc.cssselect('#input-PR1-1')
assert elems[0].value == 'True'
def test_post_invalidating_previously_valid_page(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(slug='g-cloud-9', status='open')
mock_supplier_framework = self.supplier_framework(
framework_slug="g-cloud-9",
declaration={
"status": "started",
"establishedInTheUK": False,
"appropriateTradeRegisters": True,
"appropriateTradeRegistersNumber": "242#353",
"licenceOrMemberRequired": "licensed",
"licenceOrMemberRequiredDetails": "Foo Bar",
},
)
data_api_client.get_supplier_framework_info.return_value = mock_supplier_framework
data_api_client.get_supplier_declaration.return_value = {
"declaration": mock_supplier_framework["frameworkInterest"]["declaration"],
}
res = self.client.post(
'/suppliers/frameworks/g-cloud-9/declaration/edit/established-outside-the-uk',
data={
"establishedInTheUK": "False",
"appropriateTradeRegisters": "True",
"appropriateTradeRegistersNumber": "242#353",
"licenceOrMemberRequired": "licensed",
# deliberately missing:
"licenceOrMemberRequiredDetails": "",
},
)
assert res.status_code == 400
assert data_api_client.set_supplier_declaration.called is False
def test_cannot_post_data_if_not_open(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = {
'frameworks': {'status': 'pending'}
}
data_api_client.get_supplier_declaration.return_value = {
"declaration": {"status": "started"}
}
res = self.client.post(
'/suppliers/frameworks/g-cloud-7/declaration/edit/g-cloud-7-essentials',
data=FULL_G7_SUBMISSION)
assert res.status_code == 404
assert data_api_client.set_supplier_declaration.called is False
@mock.patch('app.main.views.frameworks.data_api_client')
@mock.patch('dmutils.s3.S3')
class TestFrameworkUpdatesPage(BaseApplicationTest):
def _assert_page_title_and_table_headings(self, doc, tables_exist=True):
assert self.strip_all_whitespace('G-Cloud 7 updates') in self.strip_all_whitespace(doc.xpath('//h1')[0].text)
section_names = [
'Communications',
'Clarification questions and answers',
]
headers = doc.xpath('//div[contains(@class, "updates-document-tables")]/h2[@class="summary-item-heading"]')
assert len(headers) == 2
for index, section_name in enumerate(section_names):
assert self.strip_all_whitespace(section_name) in self.strip_all_whitespace(headers[index].text)
if tables_exist:
table_captions = doc.xpath('//div[contains(@class, "updates-document-tables")]/table/caption')
assert len(table_captions) == 2
for index, section_name in enumerate(section_names):
assert self.strip_all_whitespace(section_name) in self.strip_all_whitespace(table_captions[index].text)
def test_should_be_a_503_if_connecting_to_amazon_fails(self, s3, data_api_client):
data_api_client.get_framework.return_value = self.framework('open')
# if s3 throws a 500-level error
s3.side_effect = S3ResponseError(
{'Error': {'Code': 500, 'Message': 'Amazon has collapsed. The internet is over.'}},
'test_should_be_a_503_if_connecting_to_amazon_fails'
)
with self.app.test_client():
self.login()
response = self.client.get(
'/suppliers/frameworks/g-cloud-7/updates'
)
assert response.status_code == 503
assert self.strip_all_whitespace(u"<h1>Sorry, we’re experiencing technical difficulties</h1>") in \
self.strip_all_whitespace(response.get_data(as_text=True))
def test_empty_messages_exist_if_no_files_returned(self, s3, data_api_client):
data_api_client.get_framework.return_value = self.framework('open')
with self.app.test_client():
self.login()
response = self.client.get(
'/suppliers/frameworks/g-cloud-7/updates'
)
assert response.status_code == 200
doc = html.fromstring(response.get_data(as_text=True))
self._assert_page_title_and_table_headings(doc, tables_exist=False)
for empty_message in [
'<p class="summary-item-no-content">No communications have been sent out.</p>',
'<p class="summary-item-no-content">No clarification questions and answers have been posted yet.</p>',
]:
assert self.strip_all_whitespace(empty_message) in \
self.strip_all_whitespace(response.get_data(as_text=True))
def test_dates_for_open_framework_closed_for_questions(self, s3, data_api_client):
data_api_client.get_framework.return_value = self.framework('open', clarification_questions_open=False)
with self.app.test_client():
self.login()
response = self.client.get('/suppliers/frameworks/g-cloud-7/updates')
data = response.get_data(as_text=True)
assert response.status_code == 200
assert 'All clarification questions and answers will be published by 5pm BST, 29 September 2015.' in data
assert "The deadline for clarification questions is" not in data
def test_dates_for_open_framework_open_for_questions(self, s3, data_api_client):
data_api_client.get_framework.return_value = self.framework('open', clarification_questions_open=True)
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
with self.app.test_client():
self.login()
response = self.client.get('/suppliers/frameworks/g-cloud-7/updates')
data = response.get_data(as_text=True)
assert response.status_code == 200
assert "All clarification questions and answers will be published by" not in data
assert 'The deadline for clarification questions is 5pm BST, 22 September 2015.' in data
def test_the_tables_should_be_displayed_correctly(self, s3, data_api_client):
data_api_client.get_framework.return_value = self.framework('open')
files = [
('updates/communications/', 'file 1', 'odt'),
('updates/communications/', 'file 2', 'odt'),
('updates/clarifications/', 'file 3', 'odt'),
('updates/clarifications/', 'file 4', 'odt'),
]
# the communications table is always before the clarifications table
s3.return_value.list.return_value = [
_return_fake_s3_file_dict(
"g-cloud-7/communications/{}".format(section), filename, ext
) for section, filename, ext in files
]
with self.app.test_client():
self.login()
response = self.client.get(
'/suppliers/frameworks/g-cloud-7/updates'
)
doc = html.fromstring(response.get_data(as_text=True))
self._assert_page_title_and_table_headings(doc)
tables = doc.xpath('//div[contains(@class, "updates-document-tables")]/table')
# test that for each table, we have the right number of rows
for table in tables:
item_rows = table.findall('.//tr[@class="summary-item-row"]')
assert len(item_rows) == 2
# test that the file names and urls are right
for row in item_rows:
section, filename, ext = files.pop(0)
filename_link = row.find('.//a[@class="document-link-with-icon"]')
assert filename in filename_link.text_content()
assert filename_link.get('href') == '/suppliers/frameworks/g-cloud-7/files/{}{}.{}'.format(
section,
filename.replace(' ', '%20'),
ext,
)
def test_names_with_the_section_name_in_them_will_display_correctly(self, s3, data_api_client):
data_api_client.get_framework.return_value = self.framework('open')
# for example: 'g-cloud-7-updates/clarifications/communications%20file.odf'
files = [
('updates/communications/', 'clarifications file', 'odt'),
('updates/clarifications/', 'communications file', 'odt')
]
s3.return_value.list.return_value = [
_return_fake_s3_file_dict(
"g-cloud-7/communications/{}".format(section), filename, ext
) for section, filename, ext in files
]
with self.app.test_client():
self.login()
response = self.client.get(
'/suppliers/frameworks/g-cloud-7/updates'
)
doc = html.fromstring(response.get_data(as_text=True))
self._assert_page_title_and_table_headings(doc)
tables = doc.xpath('//div[contains(@class, "updates-document-tables")]/table')
# test that for each table, we have the right number of rows
for table in tables:
item_rows = table.findall('.//tr[@class="summary-item-row"]')
assert len(item_rows) == 1
# test that the file names and urls are right
for row in item_rows:
section, filename, ext = files.pop(0)
filename_link = row.find('.//a[@class="document-link-with-icon"]')
assert filename in filename_link.text_content()
assert filename_link.get('href') == '/suppliers/frameworks/g-cloud-7/files/{}{}.{}'.format(
section,
filename.replace(' ', '%20'),
ext,
)
def test_question_box_is_shown_if_countersigned_agreement_is_not_yet_returned(self, s3, data_api_client):
data_api_client.get_framework.return_value = self.framework('live', clarification_questions_open=False)
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
with self.app.test_client():
self.login()
response = self.client.get('/suppliers/frameworks/g-cloud-7/updates')
data = response.get_data(as_text=True)
assert response.status_code == 200
assert u'Ask a question about your G-Cloud 7 application' in data
def test_no_question_box_shown_if_countersigned_agreement_is_returned(self, s3, data_api_client):
data_api_client.get_framework.return_value = self.framework('live', clarification_questions_open=False)
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(countersigned_path="path")
with self.app.test_client():
self.login()
response = self.client.get('/suppliers/frameworks/g-cloud-7/updates')
data = response.get_data(as_text=True)
assert response.status_code == 200
assert u'Ask a question about your G-Cloud 7 application' not in data
class TestSendClarificationQuestionEmail(BaseApplicationTest):
def _send_email(self, clarification_question):
with self.app.test_client():
self.login()
return self.client.post(
"/suppliers/frameworks/g-cloud-7/updates",
data={
'clarification_question': clarification_question,
}
)
def _assert_clarification_email(self, send_email, is_called=True, succeeds=True):
if succeeds:
assert send_email.call_count == 2
elif is_called:
assert send_email.call_count == 1
else:
assert send_email.call_count == 0
if is_called:
send_email.assert_any_call(
"digitalmarketplace@mailinator.com",
FakeMail('Supplier ID:'),
"MANDRILL",
"Test Framework clarification question",
"do-not-reply@digitalmarketplace.service.gov.uk",
"Test Framework Supplier",
["clarification-question"],
reply_to="suppliers+g-cloud-7@digitalmarketplace.service.gov.uk",
)
if succeeds:
send_email.assert_any_call(
"email@email.com",
FakeMail('Thanks for sending your Test Framework clarification', 'Test Framework updates page'),
"MANDRILL",
"Thanks for your clarification question",
"do-not-reply@digitalmarketplace.service.gov.uk",
"Digital Marketplace Admin",
["clarification-question-confirm"]
)
def _assert_application_email(self, send_email, succeeds=True):
if succeeds:
assert send_email.call_count == 1
else:
assert send_email.call_count == 0
if succeeds:
send_email.assert_called_with(
"digitalmarketplace@mailinator.com",
FakeMail('Test Framework question asked'),
"MANDRILL",
"Test Framework application question",
"do-not-reply@digitalmarketplace.service.gov.uk",
"Test Framework Supplier",
["application-question"],
reply_to="email@email.com",
)
@mock.patch('app.main.views.frameworks.data_api_client')
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_should_not_send_email_if_invalid_clarification_question(self, send_email, s3, data_api_client):
data_api_client.get_framework.return_value = self.framework('open')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework()
for invalid_clarification_question in [
{
'question': '', # empty question
'error_message': 'Add text if you want to ask a question.'
}, {
'question': '\t \n\n\n', # whitespace-only question
'error_message': 'Add text if you want to ask a question.'
},
{
'question': ('ten__chars' * 500) + '1', # 5000+ char question
'error_message': 'Question cannot be longer than 5000 characters'
}
]:
response = self._send_email(invalid_clarification_question['question'])
self._assert_clarification_email(send_email, is_called=False, succeeds=False)
assert response.status_code == 400
assert self.strip_all_whitespace('There was a problem with your submitted question') in \
self.strip_all_whitespace(response.get_data(as_text=True))
assert self.strip_all_whitespace(invalid_clarification_question['error_message']) in \
self.strip_all_whitespace(response.get_data(as_text=True))
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.data_api_client')
@mock.patch('app.main.views.frameworks.send_email')
def test_should_call_send_email_with_correct_params(self, send_email, data_api_client, s3):
data_api_client.get_framework.return_value = self.framework('open', name='Test Framework')
clarification_question = 'This is a clarification question.'
response = self._send_email(clarification_question)
self._assert_clarification_email(send_email)
assert response.status_code == 200
assert self.strip_all_whitespace(
'<p class="banner-message">Your clarification question has been sent. Answers to all ' +
'clarification questions will be published on this page.</p>'
) in self.strip_all_whitespace(response.get_data(as_text=True))
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.data_api_client')
@mock.patch('app.main.views.frameworks.send_email')
def test_should_call_send_g7_email_with_correct_params(self, send_email, data_api_client, s3):
data_api_client.get_framework.return_value = self.framework('open', name='Test Framework',
clarification_questions_open=False)
clarification_question = 'This is a G7 question.'
response = self._send_email(clarification_question)
self._assert_application_email(send_email)
assert response.status_code == 200
assert self.strip_all_whitespace(
'<p class="banner-message">Your question has been sent. You'll get a reply from ' +
'the Crown Commercial Service soon.</p>'
) in self.strip_all_whitespace(response.get_data(as_text=True))
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.data_api_client')
@mock.patch('app.main.views.frameworks.send_email')
def test_should_create_audit_event(self, send_email, data_api_client, s3):
data_api_client.get_framework.return_value = self.framework('open', name='Test Framework')
clarification_question = 'This is a clarification question'
response = self._send_email(clarification_question)
self._assert_clarification_email(send_email)
assert response.status_code == 200
data_api_client.create_audit_event.assert_called_with(
audit_type=AuditTypes.send_clarification_question,
user="email@email.com",
object_type="suppliers",
object_id=1234,
data={"question": clarification_question, 'framework': 'g-cloud-7'})
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.data_api_client')
@mock.patch('app.main.views.frameworks.send_email')
def test_should_create_g7_question_audit_event(self, send_email, data_api_client, s3):
data_api_client.get_framework.return_value = self.framework('open', name='Test Framework',
clarification_questions_open=False)
clarification_question = 'This is a G7 question'
response = self._send_email(clarification_question)
self._assert_application_email(send_email)
assert response.status_code == 200
data_api_client.create_audit_event.assert_called_with(
audit_type=AuditTypes.send_application_question,
user="email@email.com",
object_type="suppliers",
object_id=1234,
data={"question": clarification_question, 'framework': 'g-cloud-7'})
@mock.patch('app.main.views.frameworks.data_api_client')
@mock.patch('app.main.views.frameworks.send_email')
def test_should_be_a_503_if_email_fails(self, send_email, data_api_client):
data_api_client.get_framework.return_value = self.framework('open', name='Test Framework')
send_email.side_effect = EmailError("Arrrgh")
clarification_question = 'This is a clarification question.'
response = self._send_email(clarification_question)
self._assert_clarification_email(send_email, succeeds=False)
assert response.status_code == 503
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
@mock.patch('app.main.views.frameworks.count_unanswered_questions')
class TestG7ServicesList(BaseApplicationTest):
def test_404_when_g7_pending_and_no_complete_services(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.find_draft_services.return_value = {'services': []}
count_unanswered.return_value = 0
response = self.client.get('/suppliers/frameworks/g-cloud-7/submissions/iaas')
assert response.status_code == 404
def test_404_when_g7_pending_and_no_declaration(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.get_supplier_declaration.return_value = {
"declaration": {"status": "started"}
}
response = self.client.get('/suppliers/frameworks/g-cloud-7/submissions/iaas')
assert response.status_code == 404
def test_no_404_when_g7_open_and_no_complete_services(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.find_draft_services.return_value = {'services': []}
count_unanswered.return_value = 0
response = self.client.get('/suppliers/frameworks/g-cloud-7/submissions/iaas')
assert response.status_code == 200
def test_no_404_when_g7_open_and_no_declaration(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_declaration.return_value = {
"declaration": {"status": "started"}
}
response = self.client.get('/suppliers/frameworks/g-cloud-7/submissions/iaas')
assert response.status_code == 200
def test_shows_g7_message_if_pending_and_application_made(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.get_supplier_declaration.return_value = {'declaration': FULL_G7_SUBMISSION} # noqa
data_api_client.find_draft_services.return_value = {
'services': [
{'serviceName': 'draft', 'lotSlug': 'scs', 'status': 'submitted'},
]
}
count_unanswered.return_value = 0, 1
response = self.client.get('/suppliers/frameworks/g-cloud-7/submissions/scs')
doc = html.fromstring(response.get_data(as_text=True))
assert response.status_code == 200
heading = doc.xpath('//div[@class="summary-item-lede"]//h2[@class="summary-item-heading"]')
assert len(heading) > 0
assert u"G-Cloud 7 is closed for applications" in heading[0].xpath('text()')[0]
assert u"You made your supplier declaration and submitted 1 complete service." in \
heading[0].xpath('../p[1]/text()')[0]
assert not doc.xpath(
"//*[contains(@class,'banner')][contains(normalize-space(string()),$t)]",
t="declaration before any services can be submitted",
)
def test_drafts_list_progress_count(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
count_unanswered.return_value = 3, 1
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.find_draft_services.return_value = {
'services': [
{'serviceName': 'draft', 'lotSlug': 'scs', 'status': 'not-submitted'},
]
}
submissions = self.client.get('/suppliers/frameworks/g-cloud-7/submissions')
lot_page = self.client.get('/suppliers/frameworks/g-cloud-7/submissions/scs')
assert u'Service can be moved to complete' not in lot_page.get_data(as_text=True)
assert u'4 unanswered questions' in lot_page.get_data(as_text=True)
assert u'1 draft service' in submissions.get_data(as_text=True)
assert u'complete service' not in submissions.get_data(as_text=True)
def test_drafts_list_can_be_completed(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
count_unanswered.return_value = 0, 1
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.find_draft_services.return_value = {
'services': [
{'serviceName': 'draft', 'lotSlug': 'scs', 'status': 'not-submitted'},
]
}
res = self.client.get('/suppliers/frameworks/g-cloud-7/submissions/scs')
assert u'Service can be marked as complete' in res.get_data(as_text=True)
assert u'1 optional question unanswered' in res.get_data(as_text=True)
@pytest.mark.parametrize("incomplete_declaration", ({}, {"status": "started"},))
def test_drafts_list_completed(self, count_unanswered, data_api_client, incomplete_declaration):
with self.app.test_client():
self.login()
count_unanswered.return_value = 0, 1
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_declaration.return_value = {
'declaration': incomplete_declaration,
}
data_api_client.find_draft_services.return_value = {
'services': [
{'serviceName': 'draft', 'lotSlug': 'scs', 'status': 'submitted'},
]
}
submissions = self.client.get('/suppliers/frameworks/g-cloud-7/submissions')
lot_page = self.client.get('/suppliers/frameworks/g-cloud-7/submissions/scs')
submissions_doc, lot_page_doc = (html.fromstring(r.get_data(as_text=True)) for r in (submissions, lot_page))
assert u'Service can be moved to complete' not in lot_page.get_data(as_text=True)
assert u'1 optional question unanswered' in lot_page.get_data(as_text=True)
assert u'1 service marked as complete' in submissions.get_data(as_text=True)
assert u'draft service' not in submissions.get_data(as_text=True)
for doc in (submissions_doc, lot_page_doc,):
assert doc.xpath(
"//*[@class='banner-warning-without-action'][normalize-space(string())=$t][.//a[@href=$u]]",
t=u"You need to make the supplier\u00a0declaration before any services can be submitted",
u=(
"/suppliers/frameworks/g-cloud-7/declaration"
if incomplete_declaration.get("status") == "started" else
"/suppliers/frameworks/g-cloud-7/declaration/start"
),
)
def test_drafts_list_completed_with_declaration_status(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='open')
data_api_client.get_supplier_declaration.return_value = {
'declaration': {
'status': 'complete'
}
}
data_api_client.find_draft_services.return_value = {
'services': [
{'serviceName': 'draft', 'lotSlug': 'scs', 'status': 'submitted'},
]
}
submissions = self.client.get('/suppliers/frameworks/g-cloud-7/submissions')
doc = html.fromstring(submissions.get_data(as_text=True))
assert u'1 service will be submitted' in submissions.get_data(as_text=True)
assert u'1 complete service was submitted' not in submissions.get_data(as_text=True)
assert u'browse-list-item-status-happy' in submissions.get_data(as_text=True)
assert not doc.xpath(
"//*[contains(@class,'banner')][contains(normalize-space(string()),$t)]",
t="declaration before any services can be submitted",
)
def test_drafts_list_services_were_submitted(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_declaration.return_value = {
'declaration': {
'status': 'complete'
}
}
data_api_client.find_draft_services.return_value = {
'services': [
{'serviceName': 'draft', 'lotSlug': 'scs', 'status': 'not-submitted'},
{'serviceName': 'draft', 'lotSlug': 'scs', 'status': 'submitted'},
]
}
submissions = self.client.get('/suppliers/frameworks/g-cloud-7/submissions')
assert u'1 complete service was submitted' in submissions.get_data(as_text=True)
def test_dos_drafts_list_with_open_framework(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(slug='digital-outcomes-and-specialists',
status='open')
data_api_client.get_supplier_declaration.return_value = {
'declaration': {
'status': 'complete'
}
}
data_api_client.find_draft_services.return_value = {
'services': [
{'serviceName': 'draft', 'lotSlug': 'digital-specialists', 'status': 'submitted'},
]
}
submissions = self.client.get('/suppliers/frameworks/digital-outcomes-and-specialists/submissions')
doc = html.fromstring(submissions.get_data(as_text=True))
assert u'This will be submitted' in submissions.get_data(as_text=True)
assert u'browse-list-item-status-happy' in submissions.get_data(as_text=True)
assert u'Apply to provide' in submissions.get_data(as_text=True)
assert not doc.xpath(
"//*[contains(@class,'banner')][contains(normalize-space(string()),$t)]",
t="declaration before any services can be submitted",
)
def test_dos_drafts_list_with_closed_framework(self, count_unanswered, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(slug="digital-outcomes-and-specialists",
status='pending')
data_api_client.get_supplier_declaration.return_value = {
'declaration': {
'status': 'complete'
}
}
data_api_client.find_draft_services.return_value = {
'services': [
{'serviceName': 'draft', 'lotSlug': 'digital-specialists', 'status': 'not-submitted'},
{'serviceName': 'draft', 'lotSlug': 'digital-specialists', 'status': 'submitted'},
]
}
submissions = self.client.get('/suppliers/frameworks/digital-outcomes-and-specialists/submissions')
assert submissions.status_code == 200
assert u'Submitted' in submissions.get_data(as_text=True)
assert u'Apply to provide' not in submissions.get_data(as_text=True)
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestCreateFrameworkAgreement(BaseApplicationTest):
def test_creates_framework_agreement_and_redirects_to_signer_details_page(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(
slug='g-cloud-8', status='standstill', framework_agreement_version="1.0"
)
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
data_api_client.create_framework_agreement.return_value = {"agreement": {"id": 789}}
res = self.client.post("/suppliers/frameworks/g-cloud-8/create-agreement")
data_api_client.create_framework_agreement.assert_called_once_with(1234, 'g-cloud-8', 'email@email.com')
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-8/789/signer-details'
def test_404_if_supplier_not_on_framework(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = self.framework(status='standstill')
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=False)
res = self.client.post("/suppliers/frameworks/g-cloud-8/create-agreement")
assert res.status_code == 404
def test_404_if_framework_in_wrong_state(self, data_api_client):
with self.app.test_client():
self.login()
# Suppliers can only sign agreements in 'standstill' and 'live' lifecycle statuses
for status in ('coming', 'open', 'pending', 'expired'):
data_api_client.get_framework.return_value = self.framework(status=status)
data_api_client.get_supplier_framework_info.return_value = self.supplier_framework(
on_framework=True)
res = self.client.post("/suppliers/frameworks/g-cloud-8/create-agreement")
assert res.status_code == 404
@mock.patch("app.main.views.frameworks.data_api_client", autospec=True)
@mock.patch("app.main.views.frameworks.return_supplier_framework_info_if_on_framework_or_abort")
class TestSignerDetailsPage(BaseApplicationTest):
def test_signer_details_shows_company_name(self, return_supplier_framework, data_api_client):
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
supplier_framework['declaration']['nameOfOrganisation'] = u'£unicodename'
return_supplier_framework.return_value = supplier_framework
res = self.client.get("/suppliers/frameworks/g-cloud-8/234/signer-details")
page = res.get_data(as_text=True)
assert res.status_code == 200
assert u'Details of the person who is signing on behalf of £unicodename' in page
def test_signer_details_shows_existing_signer_details(self, return_supplier_framework, data_api_client):
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={
"signerName": "Sid James",
"signerRole": "Ex funny man"
}
)
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
res = self.client.get("/suppliers/frameworks/g-cloud-8/234/signer-details")
page = res.get_data(as_text=True)
assert res.status_code == 200
assert "Sid James" in page
assert "Ex funny man" in page
def test_404_if_framework_in_wrong_state(self, return_supplier_framework, data_api_client):
self.login()
# Suppliers can only sign agreements in 'standstill' and 'live' lifecycle statuses
data_api_client.get_framework.return_value = self.framework(status='pending')
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
res = self.client.get("/suppliers/frameworks/g-cloud-8/234/signer-details")
assert res.status_code == 404
@mock.patch('app.main.views.frameworks.check_agreement_is_related_to_supplier_framework_or_abort')
def test_we_abort_if_agreement_does_not_match_supplier_framework(
self, check_agreement_is_related_to_supplier_framework_or_abort, return_supplier_framework, data_api_client
):
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement(supplier_id=2345)
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
self.client.get("/suppliers/frameworks/g-cloud-8/234/signer-details")
# This call will abort because supplier_framework has mismatched supplier_id 1234
check_agreement_is_related_to_supplier_framework_or_abort.assert_called_with(
self.framework_agreement(supplier_id=2345)['agreement'],
supplier_framework
)
def test_should_be_an_error_if_no_full_name(self, return_supplier_framework, data_api_client):
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/signer-details",
data={
'signerRole': "The Boss"
}
)
assert res.status_code == 400
page = res.get_data(as_text=True)
assert "You must provide the full name of the person signing on behalf of the company" in page
def test_should_be_an_error_if_no_role(self, return_supplier_framework, data_api_client):
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/signer-details",
data={
'signerName': "Josh Moss"
}
)
assert res.status_code == 400
page = res.get_data(as_text=True)
assert "You must provide the role of the person signing on behalf of the company" in page
def test_should_be_an_error_if_signer_details_fields_more_than_255_characters(
self, return_supplier_framework, data_api_client
):
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
# 255 characters should be fine
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/signer-details",
data={
'signerName': "J" * 255,
'signerRole': "J" * 255
}
)
assert res.status_code == 302
# 256 characters should be an error
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/signer-details",
data={
'signerName': "J" * 256,
'signerRole': "J" * 256
}
)
assert res.status_code == 400
page = res.get_data(as_text=True)
assert "You must provide a name under 256 characters" in page
assert "You must provide a role under 256 characters" in page
def test_should_strip_whitespace_on_signer_details_fields(self, return_supplier_framework, data_api_client):
signer_details = {
'signerName': " Josh Moss ",
'signerRole': " The Boss "
}
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
self.login()
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/signer-details",
data=signer_details
)
assert res.status_code == 302
data_api_client.update_framework_agreement.assert_called_with(
234,
{'signedAgreementDetails': {'signerName': 'Josh Moss', 'signerRole': 'The Boss'}},
'email@email.com'
)
def test_provide_signer_details_form_with_valid_input_redirects_to_upload_page(
self, return_supplier_framework, data_api_client
):
signer_details = {
'signerName': "Josh Moss",
'signerRole': "The Boss"
}
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
with self.client as c:
self.login()
res = c.post(
"/suppliers/frameworks/g-cloud-8/234/signer-details",
data=signer_details
)
assert res.status_code == 302
assert "suppliers/frameworks/g-cloud-8/234/signature-upload" in res.location
data_api_client.update_framework_agreement.assert_called_with(
234,
{'signedAgreementDetails': {'signerName': 'Josh Moss', 'signerRole': 'The Boss'}},
'email@email.com'
)
def test_provide_signer_details_form_with_valid_input_redirects_to_contract_review_page_if_file_already_uploaded(
self, return_supplier_framework, data_api_client
):
signer_details = {
'signerName': "Josh Moss",
'signerRole': "The Boss",
}
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={'signerName': 'existing name', 'signerRole': 'existing role'},
signed_agreement_path='existing/path.pdf'
)
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
with self.client as c:
self.login()
with self.client.session_transaction() as sess:
# An already uploaded file will also have set a filename in the session
sess['signature_page'] = 'test.pdf'
res = c.post(
"/suppliers/frameworks/g-cloud-8/234/signer-details",
data=signer_details
)
assert res.status_code == 302
assert "suppliers/frameworks/g-cloud-8/234/contract-review" in res.location
data_api_client.update_framework_agreement.assert_called_with(
234,
{'signedAgreementDetails': {'signerName': 'Josh Moss', 'signerRole': 'The Boss'}},
'email@email.com'
)
def test_signer_details_form_redirects_to_signature_upload_page_if_file_in_session_but_no_signed_agreement_path(
self, return_supplier_framework, data_api_client
):
signer_details = {
'signerName': "Josh Moss",
'signerRole': "The Boss",
}
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={'signerName': 'existing name', 'signerRole': 'existing role'}
)
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
with self.client as c:
self.login()
with self.client.session_transaction() as sess:
# We set a file name that could be from a previous framework agreement signing attempt but this
# agreement does not have a signedAgreementPath
sess['signature_page'] = 'test.pdf'
res = c.post(
"/suppliers/frameworks/g-cloud-8/234/signer-details",
data=signer_details
)
assert res.status_code == 302
assert "suppliers/frameworks/g-cloud-8/234/signature-upload" in res.location
@mock.patch("app.main.views.frameworks.data_api_client", autospec=True)
@mock.patch("app.main.views.frameworks.return_supplier_framework_info_if_on_framework_or_abort")
class TestSignatureUploadPage(BaseApplicationTest):
@mock.patch('app.main.views.frameworks.check_agreement_is_related_to_supplier_framework_or_abort')
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.generate_timestamped_document_upload_path')
def test_we_abort_if_agreement_does_not_match_supplier_framework(
self,
generate_timestamped_document_upload_path,
s3,
check_agreement_is_related_to_supplier_framework_or_abort,
return_supplier_framework,
data_api_client
):
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement(supplier_id=2345)
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
s3.return_value.get_key.return_value = None
self.client.get("/suppliers/frameworks/g-cloud-8/234/signature-upload")
# This call will abort because supplier_framework has mismatched supplier_id 1234
check_agreement_is_related_to_supplier_framework_or_abort.assert_called_with(
self.framework_agreement(supplier_id=2345)['agreement'],
supplier_framework
)
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.generate_timestamped_document_upload_path')
def test_upload_signature_page(
self, generate_timestamped_document_upload_path, s3, return_supplier_framework, data_api_client
):
with self.client:
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True
)['frameworkInterest']
generate_timestamped_document_upload_path.return_value = 'my/path.jpg'
res = self.client.post(
'/suppliers/frameworks/g-cloud-8/234/signature-upload',
data={
'signature_page': (StringIO(b'asdf'), 'test.jpg'),
}
)
generate_timestamped_document_upload_path.assert_called_once_with(
'g-cloud-8',
1234,
'agreements',
'signed-framework-agreement.jpg'
)
s3.return_value.save.assert_called_with(
'my/path.jpg',
mock.ANY,
download_filename='Supplier_Nme-1234-signed-signature-page.jpg',
acl='private',
disposition_type='inline'
)
data_api_client.update_framework_agreement.assert_called_with(
234,
{"signedAgreementPath": 'my/path.jpg'},
'email@email.com'
)
assert session['signature_page'] == 'test.jpg'
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-8/234/contract-review'
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.file_is_empty')
def test_signature_upload_returns_400_if_file_is_empty(
self, file_is_empty, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True
)['frameworkInterest']
s3.return_value.get_key.return_value = None
file_is_empty.return_value = True
res = self.client.post(
'/suppliers/frameworks/g-cloud-8/234/signature-upload',
data={
'signature_page': (StringIO(b''), 'test.pdf'),
}
)
assert res.status_code == 400
assert 'The file must not be empty' in res.get_data(as_text=True)
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.file_is_image')
def test_signature_upload_returns_400_if_file_is_not_image_or_pdf(
self, file_is_image, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True
)['frameworkInterest']
s3.return_value.get_key.return_value = None
file_is_image.return_value = False
res = self.client.post(
'/suppliers/frameworks/g-cloud-8/234/signature-upload',
data={
'signature_page': (StringIO(b'asdf'), 'test.txt'),
}
)
assert res.status_code == 400
assert 'The file must be a PDF, JPG or PNG' in res.get_data(as_text=True)
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.file_is_less_than_5mb')
def test_signature_upload_returns_400_if_file_is_larger_than_5mb(
self, file_is_less_than_5mb, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True
)['frameworkInterest']
s3.return_value.get_key.return_value = None
file_is_less_than_5mb.return_value = False
res = self.client.post(
'/suppliers/frameworks/g-cloud-8/234/signature-upload',
data={
'signature_page': (StringIO(b'asdf'), 'test.jpg'),
}
)
assert res.status_code == 400
assert 'The file must be less than 5MB' in res.get_data(as_text=True)
@mock.patch('dmutils.s3.S3')
def test_signature_page_displays_uploaded_filename_and_timestamp(
self, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_path='already/uploaded/file/path.pdf'
)
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True
)['frameworkInterest']
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
with self.client as c:
self.login()
with self.client.session_transaction() as sess:
sess['signature_page'] = 'test.pdf'
res = c.get(
'/suppliers/frameworks/g-cloud-8/234/signature-upload'
)
s3.return_value.get_key.assert_called_with('already/uploaded/file/path.pdf')
assert res.status_code == 200
assert "test.pdf, uploaded Sunday 10 July 2016 at 22:18" in res.get_data(as_text=True)
@mock.patch('dmutils.s3.S3')
def test_signature_page_displays_file_upload_timestamp_if_no_filename_in_session(
self, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_path='already/uploaded/file/path.pdf'
)
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True
)['frameworkInterest']
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
with self.client as c:
self.login()
res = c.get(
'/suppliers/frameworks/g-cloud-8/234/signature-upload'
)
s3.return_value.get_key.assert_called_with('already/uploaded/file/path.pdf')
assert res.status_code == 200
assert "Uploaded Sunday 10 July 2016 at 22:18" in res.get_data(as_text=True)
@mock.patch('dmutils.s3.S3')
def test_signature_page_allows_continuation_without_file_chosen_to_be_uploaded_if_an_uploaded_file_already_exists(
self, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_path='already/uploaded/file/path.pdf'
)
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True
)['frameworkInterest']
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
with self.client as c:
self.login()
res = c.post(
'/suppliers/frameworks/g-cloud-8/234/signature-upload',
data={
'signature_page': (StringIO(b''), ''),
}
)
s3.return_value.get_key.assert_called_with('already/uploaded/file/path.pdf')
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-8/234/contract-review'
@mock.patch("app.main.views.frameworks.data_api_client")
@mock.patch("app.main.views.frameworks.return_supplier_framework_info_if_on_framework_or_abort")
class TestContractReviewPage(BaseApplicationTest):
@mock.patch('dmutils.s3.S3')
def test_contract_review_page_loads_with_correct_supplier_and_signer_details_and_filename(
self, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
with self.client.session_transaction() as sess:
sess['signature_page'] = 'test.pdf'
res = self.client.get(
"/suppliers/frameworks/g-cloud-8/234/contract-review"
)
assert res.status_code == 200
s3.return_value.get_key.assert_called_with('I/have/returned/my/agreement.pdf')
page = res.get_data(as_text=True)
page_without_whitespace = self.strip_all_whitespace(page)
assert u'Check the details you’ve given before returning the signature page for £unicodename' in page
assert '<tdclass="summary-item-field"><span><p>signer_name</p><p>signer_role</p></span></td>' \
in page_without_whitespace
assert u"I have the authority to return this agreement on behalf of £unicodename" in page
assert "Returning the signature page will notify the Crown Commercial Service and the primary contact you "
"gave in your G-Cloud 8 application, contact name at email@email.com." in page
assert '<tdclass="summary-item-field-first"><span>test.pdf</span></td>' in page_without_whitespace
@mock.patch('dmutils.s3.S3')
def test_contract_review_page_loads_with_uploaded_time_of_file_if_no_filename_in_session(
self, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
res = self.client.get(
"/suppliers/frameworks/g-cloud-8/234/contract-review"
)
assert res.status_code == 200
page = res.get_data(as_text=True)
assert u'Check the details you’ve given before returning the signature page for £unicodename' in page
assert '<tdclass="summary-item-field-first"><span>UploadedSunday10July2016at22:18</span></td>' in self.strip_all_whitespace(page) # noqa
@mock.patch('dmutils.s3.S3')
def test_contract_review_page_aborts_if_visited_when_information_required_to_return_agreement_does_not_exist(
self, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"}
# No file has been uploaded
)
# no file has been uploaded
s3.return_value.get_key.return_value = None
res = self.client.get(
"/suppliers/frameworks/g-cloud-8/234/contract-review"
)
assert res.status_code == 404
@mock.patch('app.main.views.frameworks.check_agreement_is_related_to_supplier_framework_or_abort')
@mock.patch('dmutils.s3.S3')
def test_we_abort_if_agreement_does_not_match_supplier_framework(
self, s3, check_agreement_is_related_to_supplier_framework_or_abort, return_supplier_framework, data_api_client
):
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.get_framework_agreement.return_value = self.framework_agreement(supplier_id=2345)
supplier_framework = self.supplier_framework(framework_slug='g-cloud-8', on_framework=True)['frameworkInterest']
return_supplier_framework.return_value = supplier_framework
s3.return_value.get_key.return_value = None
self.client.get("/suppliers/frameworks/g-cloud-8/234/contract-review")
# This call will abort because supplier_framework has mismatched supplier_id 1234
check_agreement_is_related_to_supplier_framework_or_abort.assert_called_with(
self.framework_agreement(supplier_id=2345)['agreement'],
supplier_framework
)
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_return_400_response_and_no_email_sent_if_authorisation_not_checked(
self, send_email, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/contract-review",
data={}
)
assert res.status_code == 400
page = res.get_data(as_text=True)
assert send_email.called is False
assert "You must confirm you have the authority to return the agreement" in page
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_valid_framework_agreement_returned_updates_api_and_sends_confirmation_emails_and_unsets_session(
self, send_email, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email2@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
with self.client.session_transaction() as sess:
sess['signature_page'] = 'test.pdf'
self.client.post(
"/suppliers/frameworks/g-cloud-8/234/contract-review",
data={
'authorisation': 'I have the authority to return this agreement on behalf of company name'
}
)
data_api_client.sign_framework_agreement.assert_called_once_with(
234,
'email@email.com',
{'uploaderUserId': 123}
)
# Delcaration primaryContactEmail and current_user.email_address are different so expect two recipients
send_email.assert_called_once_with(
['email2@email.com', 'email@email.com'],
mock.ANY,
'MANDRILL',
'Your G-Cloud 8 signature page has been received',
'do-not-reply@digitalmarketplace.service.gov.uk',
'Digital Marketplace Admin',
['g-cloud-8-framework-agreement']
)
# Check 'signature_page' has been removed from session
with self.client.session_transaction() as sess:
assert 'signature_page' not in sess
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_valid_framework_agreement_returned_sends_only_one_confirmation_email_if_contact_email_addresses_are_equal(
self, send_email, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
self.client.post(
"/suppliers/frameworks/g-cloud-8/234/contract-review",
data={
'authorisation': 'I have the authority to return this agreement on behalf of company name'
}
)
send_email.assert_called_once_with(
['email@email.com'],
mock.ANY,
'MANDRILL',
'Your G-Cloud 8 signature page has been received',
'do-not-reply@digitalmarketplace.service.gov.uk',
'Digital Marketplace Admin',
['g-cloud-8-framework-agreement']
)
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_return_503_response_if_mandrill_exception_raised_by_send_email(
self, send_email, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
send_email.side_effect = EmailError()
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/contract-review",
data={
'authorisation': 'I have the authority to return this agreement on behalf of company name'
}
)
assert res.status_code == 503
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_email_not_sent_if_api_call_fails(
self, send_email, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
data_api_client.get_framework.return_value = get_g_cloud_8()
data_api_client.sign_framework_agreement.side_effect = APIError(mock.Mock(status_code=500))
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/contract-review",
data={
'authorisation': 'I have the authority to return this agreement on behalf of company name'
}
)
assert data_api_client.sign_framework_agreement.called is True
assert res.status_code == 500
assert send_email.called is False
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_framework_agreement_returned_having_signed_contract_variation_redirects_to_framework_dashboard(
self, send_email, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
framework = get_g_cloud_8()
framework['variations'] = {
"1": {"createdAt": "2016-06-06T20:01:34.000000Z"}
}
data_api_client.get_framework.return_value = framework
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email2@email.com",
"nameOfOrganisation": u"£unicodename"
},
agreed_variations={
'1': {
"agreedUserId": 2,
"agreedAt": "2016-06-06T00:00:00.000000Z",
}
}
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/contract-review",
data={
'authorisation': 'I have the authority to return this agreement on behalf of company name'
}
)
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-8'
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_framework_agreement_returned_with_feature_flag_off_redirects_to_framework_dashboard(
self, send_email, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
self.app.config['FEATURE_FLAGS_CONTRACT_VARIATION'] = False
framework = get_g_cloud_8()
framework['frameworks']['variations'] = {
"1": {"createdAt": "2016-06-06T20:01:34.000000Z"}
}
data_api_client.get_framework.return_value = framework
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/contract-review",
data={
'authorisation': 'I have the authority to return this agreement on behalf of company name'
}
)
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-8'
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_framework_agreement_returned_having_not_signed_contract_variation_redirects_to_variation(
self, send_email, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
framework = get_g_cloud_8()
framework['frameworks']['variations'] = {
"1": {"createdAt": "2016-06-06T20:01:34.000000Z"}
}
data_api_client.get_framework.return_value = framework
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email2@email.com",
"nameOfOrganisation": u"£unicodename"
},
agreed_variations={}
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/contract-review",
data={
'authorisation': 'I have the authority to return this agreement on behalf of company name'
}
)
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-8/contract-variation/1'
@mock.patch('dmutils.s3.S3')
@mock.patch('app.main.views.frameworks.send_email')
def test_framework_agreement_returned_for_framework_with_no_variations_redirects_to_framework_dashboard(
self, send_email, s3, return_supplier_framework, data_api_client
):
with self.app.test_client():
self.login()
framework = get_g_cloud_8()
framework['variations'] = {}
data_api_client.get_framework.return_value = framework
return_supplier_framework.return_value = self.supplier_framework(
framework_slug='g-cloud-8',
on_framework=True,
declaration={
"primaryContact": "contact name",
"primaryContactEmail": "email@email.com",
"nameOfOrganisation": u"£unicodename"
},
)['frameworkInterest']
data_api_client.get_framework_agreement.return_value = self.framework_agreement(
signed_agreement_details={"signerName": "signer_name", "signerRole": "signer_role"},
signed_agreement_path="I/have/returned/my/agreement.pdf"
)
s3.return_value.get_key.return_value = {
'last_modified': '2016-07-10T21:18:00.000000Z'
}
res = self.client.post(
"/suppliers/frameworks/g-cloud-8/234/contract-review",
data={
'authorisation': 'I have the authority to return this agreement on behalf of company name'
}
)
assert res.status_code == 302
assert res.location == 'http://localhost/suppliers/frameworks/g-cloud-8'
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestContractVariation(BaseApplicationTest):
def setup_method(self, method):
super(TestContractVariation, self).setup_method(method)
self.good_supplier_framework = self.supplier_framework(
declaration={'nameOfOrganisation': 'A.N. Supplier',
'primaryContactEmail': 'bigboss@email.com'},
on_framework=True,
agreement_returned=True,
agreement_details={}
)
self.g8_framework = self.framework(
name='G-Cloud 8',
slug='g-cloud-8',
status='live',
framework_agreement_version='3.1'
)
self.g8_framework['frameworks']['variations'] = {"1": {"createdAt": "2018-08-16"}}
with self.app.test_client():
self.login()
def test_get_page_renders_if_all_ok(self, data_api_client):
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = self.good_supplier_framework
res = self.client.get("/suppliers/frameworks/g-cloud-8/contract-variation/1")
doc = html.fromstring(res.get_data(as_text=True))
assert res.status_code == 200
assert len(doc.xpath('//h1[contains(text(), "G-Cloud 8: proposed contract variation")]')) == 1
def test_supplier_must_be_on_framework(self, data_api_client):
supplier_not_on_framework = self.good_supplier_framework.copy()
supplier_not_on_framework['frameworkInterest']['onFramework'] = False
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = supplier_not_on_framework
res = self.client.get("/suppliers/frameworks/g-cloud-8/contract-variation/1")
assert res.status_code == 404
def test_variation_must_exist(self, data_api_client):
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = self.good_supplier_framework
# There is no variation number 2
res = self.client.get("/suppliers/frameworks/g-cloud-8/contract-variation/2")
assert res.status_code == 404
def test_agreement_must_be_returned_already(self, data_api_client):
agreement_not_returned = self.good_supplier_framework.copy()
agreement_not_returned['frameworkInterest']['agreementReturned'] = False
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = agreement_not_returned
res = self.client.get("/suppliers/frameworks/g-cloud-8/contract-variation/1")
assert res.status_code == 404
def test_shows_form_if_not_yet_agreed(self, data_api_client):
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = self.good_supplier_framework
res = self.client.get("/suppliers/frameworks/g-cloud-8/contract-variation/1")
doc = html.fromstring(res.get_data(as_text=True))
assert res.status_code == 200
assert len(doc.xpath('//label[contains(text(), "I accept these proposed changes")]')) == 1
assert len(doc.xpath('//input[@value="Save and continue"]')) == 1
def test_shows_signer_details_and_no_form_if_already_agreed(self, data_api_client):
already_agreed = self.good_supplier_framework.copy()
already_agreed['frameworkInterest']['agreedVariations'] = {
"1": {
"agreedAt": "2016-08-19T15:47:08.116613Z",
"agreedUserId": 1,
"agreedUserEmail": "agreed@email.com",
"agreedUserName": u"William Drăyton",
}}
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = already_agreed
res = self.client.get("/suppliers/frameworks/g-cloud-8/contract-variation/1")
page_text = res.get_data(as_text=True)
doc = html.fromstring(page_text)
assert res.status_code == 200
assert len(doc.xpath('//h2[contains(text(), "Contract variation status")]')) == 1
assert u"<span>William Drăyton<br />agreed@email.com<br />Friday 19 August 2016 at 16:47</span>" in page_text
assert len(doc.xpath('//label[contains(text(), "I accept these proposed changes")]')) == 0
assert len(doc.xpath('//input[@value="Save and continue"]')) == 0
def test_shows_updated_heading_and_countersigner_details_but_no_form_if_countersigned(self, data_api_client):
already_agreed = self.good_supplier_framework.copy()
already_agreed['frameworkInterest']['agreedVariations'] = {
"1": {
"agreedAt": "2016-08-19T15:47:08.116613Z",
"agreedUserId": 1,
"agreedUserEmail": "agreed@email.com",
"agreedUserName": u"William Drăyton",
}}
g8_with_countersigned_variation = self.framework(
status='live',
name='G-Cloud 8'
)
g8_with_countersigned_variation['frameworks']['variations'] = {"1": {
"createdAt": "2016-08-01T12:30:00.000000Z",
"countersignedAt": "2016-10-01T02:00:00.000000Z",
"countersignerName": "A.N. Other",
"countersignerRole": "Head honcho",
}
}
data_api_client.get_framework.return_value = g8_with_countersigned_variation
data_api_client.get_supplier_framework_info.return_value = already_agreed
res = self.client.get("/suppliers/frameworks/g-cloud-8/contract-variation/1")
page_text = res.get_data(as_text=True)
doc = html.fromstring(page_text)
assert res.status_code == 200
assert len(doc.xpath('//h1[contains(text(), "G-Cloud 8: contract variation")]')) == 1
assert len(doc.xpath('//h2[contains(text(), "Contract variation status")]')) == 1
assert u"<span>A.N. Other<br />Head honcho<br />Saturday 1 October 2016</span>" in page_text
assert len(doc.xpath('//label[contains(text(), "I accept these proposed changes")]')) == 0
assert len(doc.xpath('//input[@value="Save and continue"]')) == 0
def test_api_is_called_to_agree(self, data_api_client):
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = self.good_supplier_framework
res = self.client.post("/suppliers/frameworks/g-cloud-8/contract-variation/1",
data={"accept_changes": "Yes"}
)
assert res.status_code == 302
assert res.location == "http://localhost/suppliers/frameworks/g-cloud-8/contract-variation/1"
data_api_client.agree_framework_variation.assert_called_once_with(
1234, 'g-cloud-8', '1', 123, 'email@email.com'
)
@mock.patch('app.main.views.frameworks.send_email')
def test_email_is_sent_to_correct_users(self, send_email, data_api_client):
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = self.good_supplier_framework
self.client.post("/suppliers/frameworks/g-cloud-8/contract-variation/1",
data={"accept_changes": "Yes"}
)
send_email.assert_called_once_with(
['bigboss@email.com', 'email@email.com'],
mock.ANY,
'MANDRILL',
'G-Cloud 8: you have accepted the proposed contract variation',
'do-not-reply@digitalmarketplace.service.gov.uk',
'Digital Marketplace Admin',
['g-cloud-8-variation-accepted']
)
@mock.patch('app.main.views.frameworks.send_email')
def test_only_one_email_sent_if_user_is_framework_contact(self, send_email, data_api_client):
same_email_as_current_user = self.good_supplier_framework.copy()
same_email_as_current_user['frameworkInterest']['declaration']['primaryContactEmail'] = 'email@email.com'
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = same_email_as_current_user
self.client.post("/suppliers/frameworks/g-cloud-8/contract-variation/1",
data={"accept_changes": "Yes"}
)
send_email.assert_called_once_with(
['email@email.com'],
mock.ANY,
'MANDRILL',
'G-Cloud 8: you have accepted the proposed contract variation',
'do-not-reply@digitalmarketplace.service.gov.uk',
'Digital Marketplace Admin',
['g-cloud-8-variation-accepted']
)
def test_success_message_is_displayed_on_success(self, data_api_client):
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = self.good_supplier_framework
res = self.client.post("/suppliers/frameworks/g-cloud-8/contract-variation/1",
data={"accept_changes": "Yes"},
follow_redirects=True
)
doc = html.fromstring(res.get_data(as_text=True))
assert res.status_code == 200
assert len(
doc.xpath('//p[@class="banner-message"][contains(text(), "You have accepted the proposed changes.")]')
) == 1, res.get_data(as_text=True)
@mock.patch('app.main.views.frameworks.send_email')
def test_api_is_not_called_and_no_email_sent_for_subsequent_posts(self, send_email, data_api_client):
already_agreed = self.good_supplier_framework.copy()
already_agreed['frameworkInterest']['agreedVariations'] = {
"1": {
"agreedAt": "2016-08-19T15:47:08.116613Z",
"agreedUserId": 1,
"agreedUserEmail": "agreed@email.com",
"agreedUserName": "William Drayton",
}}
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = already_agreed
res = self.client.post("/suppliers/frameworks/g-cloud-8/contract-variation/1",
data={"accept_changes": "Yes"}
)
assert res.status_code == 200
assert data_api_client.agree_framework_variation.called is False
assert send_email.called is False
def test_error_if_box_not_ticked(self, data_api_client):
data_api_client.get_framework.return_value = self.g8_framework
data_api_client.get_supplier_framework_info.return_value = self.good_supplier_framework
res = self.client.post("/suppliers/frameworks/g-cloud-8/contract-variation/1", data={})
doc = html.fromstring(res.get_data(as_text=True))
assert res.status_code == 400
assert len(
doc.xpath('//span[@class="validation-message"][contains(text(), "You can only save and continue if you agree to the proposed changes")]') # noqa
) == 1
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestReuseFrameworkSupplierDeclaration(BaseApplicationTest):
"""Tests for frameworks/<framework_slug>/declaration/reuse view."""
def setup_method(self, method):
super(TestReuseFrameworkSupplierDeclaration, self).setup_method(method)
with self.app.test_client():
self.login()
def test_reusable_declaration_framework_slug_param(self, data_api_client):
"""Ensure that when using the param to specify declaration we collect the correct declaration."""
# Modify the data client.
t07 = '2009-12-03T01:01:01.000000Z'
framework = {
'x_field': 'foo',
'allowDeclarationReuse': True,
'applicationCloseDate': t07,
'slug': 'g-cloud-8',
'name': 'g-cloud-8'
}
data_api_client.get_framework.return_value = {'frameworks': framework}
data_api_client.get_supplier_framework_info.return_value = {
'frameworkInterest': {
'declaration': {'status': 'complete'},
'onFramework': True
}
}
# Do the get.
with self.client as cont:
resp = cont.get(
'/suppliers/frameworks/g-cloud-9/declaration/reuse?reusable_declaration_framework_slug=g-cloud-8'
)
# Assert request arg inside context manager.
assert request.args['reusable_declaration_framework_slug'] == 'g-cloud-8'
# Assert response OK.
assert resp.status_code == 200
# Assert expected api calls.
data_api_client.get_framework.assert_has_calls([mock.call('g-cloud-9'), mock.call('g-cloud-8')])
data_api_client.get_supplier_framework_info.assert_called_once_with(1234, 'g-cloud-8')
def test_404_when_specified_declaration_not_found(self, data_api_client):
"""Fail on a 404 if declaration is specified but not found."""
# Modify the data client.
framework = {}
data_api_client.get_framework.return_value = {'frameworks': framework}
data_api_client.get_supplier_framework_info.side_effect = APIError(mock.Mock(status_code=404))
# Do the get.
resp = self.client.get(
'/suppliers/frameworks/g-cloud-9/declaration/reuse?reusable_declaration_framework_slug=g-cloud-8'
)
# Assert the 404
assert resp.status_code == 404
# Assert expected api calls.
data_api_client.get_framework.assert_called_once_with('g-cloud-9')
data_api_client.get_supplier_framework_info.assert_called_once_with(1234, 'g-cloud-8')
def test_redirect_when_declaration_not_found(self, data_api_client):
"""Redirect if a reusable declaration is not found."""
# Modify the data client.
t09 = '2009-03-03T01:01:01.000000Z'
frameworks = [
{'x_field': 'foo', 'allowDeclarationReuse': True, 'applicationCloseDate': t09, 'slug': 'ben-cloud-2'},
]
supplier_declarations = []
data_api_client.find_frameworks.return_value = {'frameworks': frameworks}
data_api_client.find_supplier_declarations.return_value = dict(
frameworkInterest=supplier_declarations
)
# Do the get.
with self.client:
resp = self.client.get(
'/suppliers/frameworks/g-cloud-9/declaration/reuse',
)
# Assert the redirect
assert resp.location.endswith('/suppliers/frameworks/g-cloud-9/declaration')
# Assert expected api calls.
data_api_client.get_framework.assert_called_once_with('g-cloud-9')
data_api_client.find_supplier_declarations.assert_called_once_with(1234)
def test_success_reuse_g_cloud_7_for_8(self, data_api_client):
"""Test success path."""
# Modify the data client.
t09 = '2009-03-03T01:01:01.000000Z'
t10 = '2010-03-03T01:01:01.000000Z'
t11 = '2011-03-03T01:01:01.000000Z'
t12 = '2012-03-03T01:01:01.000000Z'
frameworks_response = [
{
'x_field': 'foo',
'allowDeclarationReuse': True,
'applicationCloseDate': t12,
'slug': 'g-cloud-8',
'name': 'G-cloud 8'
}, {
'x_field': 'foo',
'allowDeclarationReuse': True,
'applicationCloseDate': t11,
'slug': 'g-cloud-7',
'name': 'G-cloud 7'
}, {
'x_field': 'foo',
'allowDeclarationReuse': True,
'applicationCloseDate': t10,
'slug': 'dos',
'name': 'Digital'
}, {
'x_field': 'foo',
'allowDeclarationReuse': False,
'applicationCloseDate': t09,
'slug': 'g-cloud-6',
'name': 'G-cloud 6'
},
]
framework_response = {
'x_field': 'foo',
'allowDeclarationReuse': True,
'applicationCloseDate': t09,
'slug': 'g-cloud-8',
'name': 'G-cloud 8'
}
supplier_declarations_response = [
{'x': 'foo', 'frameworkSlug': 'g-cloud-6', 'declaration': {'status': 'complete'}, 'onFramework': True},
{'x': 'foo', 'frameworkSlug': 'g-cloud-7', 'declaration': {'status': 'complete'}, 'onFramework': True},
{'x': 'foo', 'frameworkSlug': 'dos', 'declaration': {'status': 'complete'}, 'onFramework': True}
]
data_api_client.find_frameworks.return_value = {'frameworks': frameworks_response}
data_api_client.get_framework.return_value = {'frameworks': framework_response}
data_api_client.find_supplier_declarations.return_value = {'frameworkInterest': supplier_declarations_response}
# Do the get.
resp = self.client.get(
'/suppliers/frameworks/g-cloud-8/declaration/reuse',
)
# Assert the success.
assert resp.status_code == 200
expected = 'In March 2011, your organisation completed a declaration for G-cloud 7.'
assert expected in str(resp.data)
# Assert expected api calls.
data_api_client.get_framework.assert_called_once_with('g-cloud-8')
data_api_client.find_supplier_declarations.assert_called_once_with(1234)
@mock.patch('app.main.views.frameworks.data_api_client', autospec=True)
class TestReuseFrameworkSupplierDeclarationPost(BaseApplicationTest):
"""Tests for frameworks/<framework_slug>/declaration/reuse POST view."""
def setup_method(self, method):
super(TestReuseFrameworkSupplierDeclarationPost, self).setup_method(method)
with self.app.test_client():
self.login()
def test_reuse_false(self, data_api_client):
"""Assert that the redirect happens and the client sets the prefill pref to None."""
data = {'reuse': 'False', 'old_framework_slug': 'should-not-be-used'}
with self.client:
resp = self.client.post(
'/suppliers/frameworks/g-cloud-9/declaration/reuse',
data=data
)
# Assert the redirect
assert resp.location.endswith('/suppliers/frameworks/g-cloud-9/declaration')
data_api_client.set_supplier_framework_prefill_declaration.assert_called_once_with(
1234,
'g-cloud-9',
None,
'email@email.com'
)
def test_reuse_true(self, data_api_client):
"""Assert that the redirect happens and the client sets the prefill pref to the desired framework slug."""
data = {'reuse': True, 'old_framework_slug': 'digital-outcomes-and-specialists-2'}
data_api_client.get_supplier_framework_info.return_value = {
'frameworkInterest': {
'x_field': 'foo',
'frameworkSlug': 'digital-outcomes-and-specialists-2',
'declaration': {'status': 'complete'},
'onFramework': True
}
}
framework_response = {'frameworks': {'x_field': 'foo', 'allowDeclarationReuse': True}}
data_api_client.get_framework.return_value = framework_response
with self.client:
resp = self.client.post(
'/suppliers/frameworks/g-cloud-9/declaration/reuse',
data=data
)
# Assert the redirect
assert resp.location.endswith('/suppliers/frameworks/g-cloud-9/declaration')
# These api calls need to be made so that we can verify the declaration.
data_api_client.get_framework.assert_called_once_with('digital-outcomes-and-specialists-2')
data_api_client.get_supplier_framework_info.assert_called_once_with(
1234,
'digital-outcomes-and-specialists-2'
)
# This end point sets the prefill preference.
data_api_client.set_supplier_framework_prefill_declaration.assert_called_once_with(
1234,
'g-cloud-9',
'digital-outcomes-and-specialists-2',
'email@email.com'
)
def test_reuse_invalid_framework_post(self, data_api_client):
"""Assert 404 for non reusable framework."""
data = {'reuse': 'true', 'old_framework_slug': 'digital-outcomes-and-specialists'}
# A framework with allowDeclarationReuse as False
data_api_client.get_framework.return_value = {
'frameworks': {'x_field': 'foo', 'allowDeclarationReuse': False}
}
# Do the post.
resp = self.client.post(
'/suppliers/frameworks/g-cloud-9/declaration/reuse',
data=data
)
# Should get the framework and error on allowDeclarationReuse as False.
data_api_client.get_framework.assert_called_once_with('digital-outcomes-and-specialists')
# Should not do the declaration call if the framework is invalid
assert not data_api_client.get_supplier_framework_info.called
# Should 404
assert resp.status_code == 404
def test_reuse_non_existent_framework_post(self, data_api_client):
"""Assert 404 for non existent framework."""
data = {'reuse': 'true', 'old_framework_slug': 'digital-outcomes-and-specialists-1000000'}
# Attach does not exist.
data_api_client.get_framework.side_effect = HTTPError()
# Do the post.
resp = self.client.post(
'/suppliers/frameworks/g-cloud-9/declaration/reuse',
data=data
)
# Should error on get.
data_api_client.get_framework.assert_called_once_with('digital-outcomes-and-specialists-1000000')
# Should not do the declaration call if the framework is invalid.
assert not data_api_client.get_supplier_framework_info.called
# Should 404.
assert resp.status_code == 404
def test_reuse_non_existent_declaration_post(self, data_api_client):
"""Assert 404 for non existent declaration."""
data = {'reuse': 'true', 'old_framework_slug': 'digital-outcomes-and-specialists-2'}
framework_response = {'frameworks': {'x_field': 'foo', 'allowDeclarationReuse': True}}
data_api_client.get_framework.return_value = framework_response
# Attach does not exist.
data_api_client.get_supplier_framework_info.side_effect = HTTPError()
# Do the post.
resp = self.client.post(
'/suppliers/frameworks/g-cloud-9/declaration/reuse',
data=data
)
# Should get the framework
data_api_client.get_framework.assert_called_once_with('digital-outcomes-and-specialists-2')
# Should error getting declaration.
data_api_client.get_supplier_framework_info.assert_called_once_with(1234, 'digital-outcomes-and-specialists-2')
# Should 404.
assert resp.status_code == 404
class TestReuseFrameworkSupplierDeclarationForm(BaseApplicationTest):
"""Tests for app.main.forms.frameworks.ReuseDeclarationForm form."""
@pytest.mark.parametrize('falsey_value', ('False', '', 'false'))
def test_false_values(self, falsey_value):
with self.app.test_request_context():
data = MultiDict({'framework_slug': 'digital-outcomes-and-specialists', 'reuse': falsey_value})
form = ReuseDeclarationForm(data)
assert form.reuse.data is False
|
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
#from itertools import tee, izip
from datetime import datetime
from django.contrib.gis.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from django.contrib.gis.measure import Distance as D
from django.contrib.gis.geos import *
from django.db.models import Q,F
from checkin.conf import settings
from itertools import chain
class CheckinManager(models.GeoManager):
def nearby_places(self, lat=None, lng=None, accuracy=None, campaigns=None):
out = []
now = datetime.now()
position = Point(lng, lat)
for campaign in self.get_query_set().filter(
Q(date_start__isnull=True) | Q(date_start__lte=now),
Q(date_end__isnull=True) | Q(date_end__gte=now),
pk__in=campaigns):
rs = campaign.checkinplace_set.filter(
Q(date_start__isnull=True) | Q(date_start__lte=now), #date debut
Q(date_end__isnull=True) | Q(date_end__gte=now), #date fin
is_active=True)\
.distance(position)\
.order_by('distance')\
res_1 = rs.filter(proximity = None,
point__distance_lte=(
position,
D(m= campaign.proximity + settings.EXTENDED_RADIUS_LIMIT)
),
)
res_2 = rs.exclude(proximity__isnull = True)
rs = []
for u in res_2:
try:
rs.append(CheckinPlace.objects.get(
pk = u.pk,
point__distance_lte=(
position,
D(m= u.proximity + settings.EXTENDED_RADIUS_LIMIT)
),
))
except:
pass
if res_1.__len__() > 0: out.append([ u for u in res_1])
if rs.__len__() > 0: out.append([ u for u in rs])
return list(chain(*out))
def get_query_set(self):
return super(CheckinManager, self).get_query_set()
class CheckinCampaign(models.Model):
owner = models.ForeignKey(User)
name = models.CharField(_('Name'), max_length=100)
date_start = models.DateTimeField(_('Date start'), blank=True, null=True)
date_end = models.DateTimeField(_('Date end'), blank=True, null=True)
date_created = models.DateTimeField(_('Date created'), auto_now_add=True)
# allow_multi_ci = models.BooleanField(_('Allow overlaping checkins'), default=True)
proximity = models.IntegerField(_('Minimum required proximity'), default=settings.DEFAULT_PROXIMITY)
min_accuracy = models.IntegerField(_('Minimum required accuracy'), default=settings.DEFAULT_PROXIMITY)
is_active = models.BooleanField(_('Is active'), default=True)
objects = CheckinManager()
#def _format_distance(self, pnt):
# #return (pnt, D(**{self.distances_unit: self.proximity}))
# if self.distances_unit == 'km':
# return (pnt, D(km=self.proximity))
# elif self.distances_unit == 'mi':
# return (pnt, D(mi=self.proximity))
# elif self.distances_unit == 'ft':
# return (pnt, D(ft=self.proximity))
# else:
# return (pnt, D(m=self.proximity))
def checkin(self, lng, lat, place_id=None):
q= {'is_active':True,}
if place_id:
q['id'] = place_id
print "***********************"
print place_id
print self.proximity
if not place_id:
q['point__distance_lte']= (Point(lng, lat), D(m=self.proximity))
else:
q['point__distance_lte']= (Point(lng, lat), D(m=self.checkinplace_set.get(id=place_id).proximity or self.proximity ))
qs = self.checkinplace_set.filter(**q)
# TODO: second pass for checkin places that have custom proximity set
if qs.count() > 0:
# TODO: account for allow_multi_ci
return qs
else:
return False
def __unicode__(self):
return u"%s" % self.name
class CheckinPlace(models.Model):
campaign = models.ForeignKey(CheckinCampaign)
name = models.CharField(_('Name'), max_length=100)
address = models.CharField(_('Address'), max_length=250, blank=True, null=True)
city = models.CharField(_('City'), max_length=100, blank=True, null=True)
state = models.CharField(_('State or province'), max_length=100, blank=True, null=True)
zipcode = models.CharField(_('Zip code'), max_length=10, blank=True, null=True)
phone = models.CharField(_('Phone'), max_length=15, blank=True, null=True)
lng = models.FloatField(_('Longitude'), blank=True, null=True)
lat = models.FloatField(_('Latitude'), blank=True, null=True)
distances_unit = models.CharField(_('Distance unit'), max_length=3, choices=settings.DISTANCE_CHOICES, default=settings.DEFAULT_DISTANCE_UNIT)
proximity = models.IntegerField(_('Minimum required proximity'), blank=True, null=True)
min_accuracy = models.IntegerField(_('Minimum required accuracy'), blank=True, null=True)
date_start = models.DateTimeField(_('Date start'), blank=True, null=True)
date_end = models.DateTimeField(_('Date end'), blank=True, null=True)
date_created = models.DateTimeField(_('Date created'), auto_now_add=True)
is_active = models.BooleanField(_('Is active'), default=True)
point = models.PointField(srid=4326, geography=True, blank=True, null=True)
objects = models.GeoManager()
def save(self, *args, **kwargs):
self.point = Point(self.lng, self.lat)
super(CheckinPlace, self).save(*args, **kwargs)
def __unicode__(self):
return u"%s" % self.name
class Meta:
unique_together = (("campaign", "lat", "lng"),)
ordering = ("date_created", "name")
class Checkin(models.Model):
date = models.DateTimeField(_('Checkin date'), auto_now_add=True)
place = models.ForeignKey(CheckinPlace, blank=True, null=True)
user = models.ForeignKey(User)
is_valid = models.BooleanField(default=False)
# Checkin infos
lng = models.FloatField()
lat = models.FloatField()
accuracy = models.FloatField(default=20000)
timestamp = models.DateTimeField(_('Checkin date'), auto_now_add=True)
useragent = models.CharField(max_length=250, default="Unknown")
visitor_ip = models.IPAddressField(blank=True, null=True)
extra_data = models.TextField(blank=True, null=True)
def distance(self):
try:
place = CheckinPlace.objects.filter(pk=self.place.id).distance(Point(self.lng, self.lat))[0]
if place.distance.m > 999:
return '~%0.3f km' % place.distance.km
else:
return '~%0.3f m' % place.distance.m
except:
return "Unknown"
def __unicode__(self):
title = self.is_valid and 'Valid checkin' or 'Invalid checkin'
return u"%s at %s" % (title, self.place)
class Meta:
ordering = ('-date',)
chequin place list
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
#from itertools import tee, izip
from datetime import datetime
from django.contrib.gis.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from django.contrib.gis.measure import Distance as D
from django.contrib.gis.geos import *
from django.db.models import Q,F
from checkin.conf import settings
from itertools import chain
class CheckinManager(models.GeoManager):
def nearby_places(self, lat=None, lng=None, accuracy=None, campaigns=[]):
out = []
now = datetime.now()
position = Point(lng, lat)
for campaign in self.get_query_set().filter(
# We filter only active campaigns
Q(date_start__isnull=True) | Q(date_start__lte=now),
Q(date_end__isnull=True) | Q(date_end__gte=now),
pk__in=campaigns):
rs = campaign.checkinplace_set.filter(
Q(date_start__isnull=True) | Q(date_start__lte=now), #date debut
Q(date_end__isnull=True) | Q(date_end__gte=now), #date fin
is_active=True)\
.distance(position)\
.order_by('distance')\
res_1 = rs.filter(proximity = None,
point__distance_lte=(
position,
D(m= campaign.proximity + settings.EXTENDED_RADIUS_LIMIT)
),
)
res_2 = rs.exclude(proximity__isnull = True)
rs = []
for u in res_2:
try:
rs.append(CheckinPlace.objects.get(
pk = u.pk,
point__distance_lte=(
position,
D(m= u.proximity + settings.EXTENDED_RADIUS_LIMIT)
),
))
except:
pass
if res_1.__len__() > 0: out.append([ u for u in res_1])
if rs.__len__() > 0: out.append([ u for u in rs])
return list(chain(*out))
def get_query_set(self):
return super(CheckinManager, self).get_query_set()
class CheckinCampaign(models.Model):
owner = models.ForeignKey(User)
name = models.CharField(_('Name'), max_length=100)
date_start = models.DateTimeField(_('Date start'), blank=True, null=True)
date_end = models.DateTimeField(_('Date end'), blank=True, null=True)
date_created = models.DateTimeField(_('Date created'), auto_now_add=True)
# allow_multi_ci = models.BooleanField(_('Allow overlaping checkins'), default=True)
proximity = models.IntegerField(_('Minimum required proximity'), default=settings.DEFAULT_PROXIMITY)
min_accuracy = models.IntegerField(_('Minimum required accuracy'), default=settings.DEFAULT_PROXIMITY)
is_active = models.BooleanField(_('Is active'), default=True)
objects = CheckinManager()
#def _format_distance(self, pnt):
# #return (pnt, D(**{self.distances_unit: self.proximity}))
# if self.distances_unit == 'km':
# return (pnt, D(km=self.proximity))
# elif self.distances_unit == 'mi':
# return (pnt, D(mi=self.proximity))
# elif self.distances_unit == 'ft':
# return (pnt, D(ft=self.proximity))
# else:
# return (pnt, D(m=self.proximity))
def checkin(self, lng, lat, place_id=None):
q= {'is_active':True,}
if place_id:
q['id'] = place_id
print "***********************"
print place_id
print self.proximity
if not place_id:
q['point__distance_lte']= (Point(lng, lat), D(m=self.proximity))
else:
q['point__distance_lte']= (Point(lng, lat), D(m=self.checkinplace_set.get(id=place_id).proximity or self.proximity ))
qs = self.checkinplace_set.filter(**q)
# TODO: second pass for checkin places that have custom proximity set
if qs.count() > 0:
# TODO: account for allow_multi_ci
return qs
else:
return False
def __unicode__(self):
return u"%s" % self.name
class CheckinPlace(models.Model):
campaign = models.ForeignKey(CheckinCampaign)
name = models.CharField(_('Name'), max_length=100)
address = models.CharField(_('Address'), max_length=250, blank=True, null=True)
city = models.CharField(_('City'), max_length=100, blank=True, null=True)
state = models.CharField(_('State or province'), max_length=100, blank=True, null=True)
zipcode = models.CharField(_('Zip code'), max_length=10, blank=True, null=True)
phone = models.CharField(_('Phone'), max_length=15, blank=True, null=True)
lng = models.FloatField(_('Longitude'), blank=True, null=True)
lat = models.FloatField(_('Latitude'), blank=True, null=True)
distances_unit = models.CharField(_('Distance unit'), max_length=3, choices=settings.DISTANCE_CHOICES, default=settings.DEFAULT_DISTANCE_UNIT)
proximity = models.IntegerField(_('Minimum required proximity'), blank=True, null=True)
min_accuracy = models.IntegerField(_('Minimum required accuracy'), blank=True, null=True)
date_start = models.DateTimeField(_('Date start'), blank=True, null=True)
date_end = models.DateTimeField(_('Date end'), blank=True, null=True)
date_created = models.DateTimeField(_('Date created'), auto_now_add=True)
is_active = models.BooleanField(_('Is active'), default=True)
point = models.PointField(srid=4326, geography=True, blank=True, null=True)
objects = models.GeoManager()
def save(self, *args, **kwargs):
self.point = Point(self.lng, self.lat)
super(CheckinPlace, self).save(*args, **kwargs)
def __unicode__(self):
return u"%s" % self.name
class Meta:
unique_together = (("campaign", "lat", "lng"),)
ordering = ("date_created", "name")
class Checkin(models.Model):
date = models.DateTimeField(_('Checkin date'), auto_now_add=True)
place = models.ForeignKey(CheckinPlace, blank=True, null=True)
user = models.ForeignKey(User)
is_valid = models.BooleanField(default=False)
# Checkin infos
lng = models.FloatField()
lat = models.FloatField()
accuracy = models.FloatField(default=20000)
timestamp = models.DateTimeField(_('Checkin date'), auto_now_add=True)
useragent = models.CharField(max_length=250, default="Unknown")
visitor_ip = models.IPAddressField(blank=True, null=True)
extra_data = models.TextField(blank=True, null=True)
def distance(self):
try:
place = CheckinPlace.objects.filter(pk=self.place.id).distance(Point(self.lng, self.lat))[0]
if place.distance.m > 999:
return '~%0.3f km' % place.distance.km
else:
return '~%0.3f m' % place.distance.m
except:
return "Unknown"
def __unicode__(self):
title = self.is_valid and 'Valid checkin' or 'Invalid checkin'
return u"%s at %s" % (title, self.place)
class Meta:
ordering = ('-date',)
|
import json
import uuid
from datetime import datetime, timezone
import pytest
from flask import url_for
from freezegun import freeze_time
from app.main.views.jobs import get_time_left
from tests import job_json, notification_json, sample_uuid
from tests.conftest import (
SERVICE_ONE_ID,
create_active_caseworking_user,
create_active_user_with_permissions,
create_notifications,
create_template,
normalize_spaces,
)
@pytest.mark.parametrize('user, expected_rows', [
(create_active_user_with_permissions(), (
(
'File Sending Delivered Failed'
),
(
'export 1/1/2016.xls '
'Sent today at 12:12pm 1 0 0'
),
(
'all email addresses.xlsx '
'Sent today at 12:12pm 1 0 0'
),
(
'applicants.ods '
'Sent today at 12:12pm 1 0 0'
),
(
'thisisatest.csv '
'Sent today at 12:12pm 1 0 0'
),
)),
(create_active_caseworking_user(), (
(
'File Messages to be sent'
),
(
'send_me_later.csv '
'Sending 1 January 2016 at 11:09am 1'
),
(
'even_later.csv '
'Sending 1 January 2016 at 11:09pm 1'
),
(
'File Sending Delivered Failed'
),
(
'export 1/1/2016.xls '
'Sent today at 12:12pm 1 0 0'
),
(
'all email addresses.xlsx '
'Sent today at 12:12pm 1 0 0'
),
(
'applicants.ods '
'Sent today at 12:12pm 1 0 0'
),
(
'thisisatest.csv '
'Sent today at 12:12pm 1 0 0'
),
)),
])
@freeze_time("2012-12-12 12:12")
def test_jobs_page_shows_scheduled_jobs_if_user_doesnt_have_dashboard(
client_request,
service_one,
active_user_with_permissions,
mock_get_jobs,
user,
expected_rows,
):
client_request.login(user)
page = client_request.get('main.view_jobs', service_id=service_one['id'])
for index, row in enumerate(expected_rows):
assert normalize_spaces(page.select('tr')[index].text) == row
@pytest.mark.parametrize('user', [
create_active_user_with_permissions(),
create_active_caseworking_user(),
])
def test_get_jobs_shows_page_links(
client_request,
active_user_with_permissions,
mock_get_jobs,
user,
):
client_request.login(user)
page = client_request.get('main.view_jobs', service_id=SERVICE_ONE_ID)
assert 'Next page' in page.find('li', {'class': 'next-page'}).text
assert 'Previous page' in page.find('li', {'class': 'previous-page'}).text
@pytest.mark.parametrize('user', [
create_active_user_with_permissions(),
create_active_caseworking_user(),
])
@freeze_time("2012-12-12 12:12")
def test_jobs_page_doesnt_show_scheduled_on_page_2(
client_request,
service_one,
active_user_with_permissions,
mock_get_jobs,
user,
):
client_request.login(user)
page = client_request.get('main.view_jobs', service_id=service_one['id'], page=2)
for index, row in enumerate((
(
'File Sending Delivered Failed'
),
(
'export 1/1/2016.xls '
'Sent today at 12:12pm 1 0 0'
),
(
'all email addresses.xlsx '
'Sent today at 12:12pm 1 0 0'
),
(
'applicants.ods '
'Sent today at 12:12pm 1 0 0'
),
(
'thisisatest.csv '
'Sent today at 12:12pm 1 0 0'
),
)):
assert normalize_spaces(page.select('tr')[index].text) == row
@pytest.mark.parametrize('user', [
create_active_user_with_permissions(),
create_active_caseworking_user(),
])
@pytest.mark.parametrize(
"status_argument, expected_api_call", [
(
'',
[
'created', 'pending', 'sending', 'pending-virus-check',
'delivered', 'sent', 'returned-letter',
'failed', 'temporary-failure', 'permanent-failure', 'technical-failure',
'virus-scan-failed', 'validation-failed'
]
),
(
'sending',
['sending', 'created', 'pending', 'pending-virus-check']
),
(
'delivered',
['delivered', 'sent', 'returned-letter']
),
(
'failed',
[
'failed', 'temporary-failure', 'permanent-failure', 'technical-failure', 'virus-scan-failed',
'validation-failed'
]
)
]
)
@freeze_time("2016-01-01 11:09:00.061258")
def test_should_show_page_for_one_job(
client_request,
mock_get_service_template,
mock_get_job,
mocker,
mock_get_notifications,
mock_get_service_data_retention,
fake_uuid,
status_argument,
expected_api_call,
user,
):
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
status=status_argument
)
assert page.h1.text.strip() == 'thisisatest.csv'
assert ' '.join(page.find('tbody').find('tr').text.split()) == (
'07123456789 template content Delivered 1 January at 11:10am'
)
assert page.find('div', {'data-key': 'notifications'})['data-resource'] == url_for(
'main.view_job_updates',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
status=status_argument,
)
csv_link = page.select_one('a[download]')
assert csv_link['href'] == url_for(
'main.view_job_csv',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
status=status_argument
)
assert csv_link.text == 'Download this report'
assert page.find('span', {'id': 'time-left'}).text == 'Data available for 7 days'
assert normalize_spaces(page.select_one('tbody tr').text) == normalize_spaces(
'07123456789 '
'template content '
'Delivered 1 January at 11:10am'
)
assert page.select_one('tbody tr a')['href'] == url_for(
'main.view_notification',
service_id=SERVICE_ONE_ID,
notification_id=sample_uuid(),
from_job=fake_uuid,
)
mock_get_notifications.assert_called_with(
SERVICE_ONE_ID,
fake_uuid,
status=expected_api_call
)
@freeze_time("2016-01-01 11:09:00.061258")
def test_should_show_page_for_one_job_with_flexible_data_retention(
client_request,
active_user_with_permissions,
mock_get_service_template,
mock_get_job,
mocker,
mock_get_notifications,
mock_get_service_data_retention,
fake_uuid,
):
mock_get_service_data_retention.side_effect = [[{"days_of_retention": 10, "notification_type": "sms"}]]
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
status='delivered'
)
assert page.find('span', {'id': 'time-left'}).text == 'Data available for 10 days'
assert "Cancel sending these letters" not in page
def test_get_jobs_should_tell_user_if_more_than_one_page(
client_request,
fake_uuid,
service_one,
mock_get_job,
mock_get_service_template,
mock_get_notifications_with_previous_next,
mock_get_service_data_retention,
):
page = client_request.get(
'main.view_job',
service_id=service_one['id'],
job_id=fake_uuid,
status='',
)
assert page.find('p', {'class': 'table-show-more-link'}).text.strip() == 'Only showing the first 50 rows'
def test_should_show_job_in_progress(
client_request,
service_one,
active_user_with_permissions,
mock_get_service_template,
mock_get_job_in_progress,
mocker,
mock_get_notifications,
mock_get_service_data_retention,
fake_uuid,
):
page = client_request.get(
'main.view_job',
service_id=service_one['id'],
job_id=fake_uuid,
)
assert [
normalize_spaces(link.text)
for link in page.select('.pill a')
] == [
'10 sending', '0 delivered', '0 failed'
]
assert page.select_one('p.hint').text.strip() == 'Report is 50% complete…'
def test_should_show_job_without_notifications(
client_request,
service_one,
active_user_with_permissions,
mock_get_service_template,
mock_get_job_in_progress,
mocker,
mock_get_notifications_with_no_notifications,
mock_get_service_data_retention,
fake_uuid,
):
page = client_request.get(
'main.view_job',
service_id=service_one['id'],
job_id=fake_uuid,
)
assert [
normalize_spaces(link.text)
for link in page.select('.pill a')
] == [
'10 sending', '0 delivered', '0 failed'
]
assert page.select_one('p.hint').text.strip() == 'Report is 50% complete…'
assert page.select_one('tbody').text.strip() == 'No messages to show yet…'
@freeze_time("2020-01-10 0:0:0")
@pytest.mark.parametrize('created_at, processing_started, expected_message', (
# Recently created, not yet started
(datetime(2020, 1, 10, 0, 0, 0), None, (
'No messages to show yet…'
)),
# Just started
(datetime(2020, 1, 10, 0, 0, 0), datetime(2020, 1, 10, 0, 0, 1), (
'No messages to show yet…'
)),
# Created a while ago, just started
(datetime(2020, 1, 1, 0, 0, 0), datetime(2020, 1, 10, 0, 0, 1), (
'No messages to show yet…'
)),
# Created a while ago, started a couple of days ago
(datetime(2020, 1, 1, 0, 0, 0), datetime(2020, 1, 8, 0, 0, 1), (
'These messages have been deleted because they were sent more than 7 days ago'
)),
))
def test_should_show_old_job(
client_request,
service_one,
active_user_with_permissions,
mock_get_service_template,
mocker,
mock_get_notifications_with_no_notifications,
mock_get_service_data_retention,
fake_uuid,
created_at,
processing_started,
expected_message,
):
mocker.patch('app.job_api_client.get_job', return_value={
"data": job_json(
SERVICE_ONE_ID,
active_user_with_permissions,
created_at=created_at.replace(tzinfo=timezone.utc).isoformat(),
processing_started=(
processing_started.replace(tzinfo=timezone.utc).isoformat()
if processing_started else None
),
),
})
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
)
assert not page.select('.pill a')
assert not page.select('p.hint')
assert not page.select('a[download]')
assert page.select_one('tbody').text.strip() == expected_message
@freeze_time("2016-01-01 11:09:00.061258")
def test_should_show_letter_job(
client_request,
mock_get_service_letter_template,
mock_get_job,
mock_get_service_data_retention,
fake_uuid,
mocker,
):
notifications = create_notifications(template_type='letter', subject='template subject')
get_notifications = mocker.patch(
'app.notification_api_client.get_notifications_for_service',
return_value=notifications,
)
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
)
assert normalize_spaces(page.h1.text) == 'thisisatest.csv'
assert normalize_spaces(page.select('p.bottom-gutter')[0].text) == (
'Sent by Test User on 1 January at 11:09am Printing starts today at 5:30pm'
)
assert page.select('.banner-default-with-tick') == []
assert normalize_spaces(page.select('tbody tr')[0].text) == (
'1 Example Street template subject 1 January at 11:09am'
)
assert normalize_spaces(page.select('.keyline-block')[0].text) == (
'1 Letter'
)
assert normalize_spaces(page.select('.keyline-block')[1].text) == (
'6 January Estimated delivery date'
)
assert page.select_one('a[download]')['href'] == url_for(
'main.view_job_csv',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
)
assert page.select('.hint') == []
get_notifications.assert_called_with(
SERVICE_ONE_ID,
fake_uuid,
status=[
'created',
'pending',
'sending',
'pending-virus-check',
'delivered',
'sent',
'returned-letter',
'failed',
'temporary-failure',
'permanent-failure',
'technical-failure',
'virus-scan-failed',
'validation-failed'
],
)
@freeze_time("2016-01-01 11:09:00")
def test_should_show_letter_job_with_banner_after_sending_before_1730(
client_request,
mock_get_service_letter_template,
mock_get_job,
mock_get_notifications,
mock_get_service_data_retention,
fake_uuid,
):
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
just_sent='yes',
)
assert page.select('p.bottom-gutter') == []
assert normalize_spaces(page.select('.banner-default-with-tick')[0].text) == (
'Your letter has been sent. Printing starts today at 5:30pm.'
)
@freeze_time("2016-01-01 11:09:00")
def test_should_show_letter_job_with_banner_when_there_are_multiple_CSV_rows(
client_request,
mock_get_service_letter_template,
mock_get_job_in_progress,
mock_get_notifications,
mock_get_service_data_retention,
fake_uuid,
):
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
just_sent='yes',
)
assert page.select('p.bottom-gutter') == []
assert normalize_spaces(page.select('.banner-default-with-tick')[0].text) == (
'Your letters have been sent. Printing starts today at 5:30pm.'
)
@freeze_time("2016-01-01 18:09:00")
def test_should_show_letter_job_with_banner_after_sending_after_1730(
client_request,
mock_get_service_letter_template,
mock_get_job,
mock_get_notifications,
mock_get_service_data_retention,
fake_uuid,
):
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
just_sent='yes',
)
assert page.select('p.bottom-gutter') == []
assert normalize_spaces(page.select('.banner-default-with-tick')[0].text) == (
'Your letter has been sent. Printing starts tomorrow at 5:30pm.'
)
@freeze_time("2016-01-01T00:00:00.061258")
def test_should_show_scheduled_job(
client_request,
mock_get_service_template,
mock_get_scheduled_job,
mock_get_service_data_retention,
mock_get_notifications,
fake_uuid,
):
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
)
assert normalize_spaces(page.select('main p')[1].text) == (
'Sending Two week reminder today at midnight'
)
assert page.select('main p a')[0]['href'] == url_for(
'main.view_template_version',
service_id=SERVICE_ONE_ID,
template_id='5d729fbd-239c-44ab-b498-75a985f3198f',
version=1,
)
assert page.select_one('button[type=submit]').text.strip() == 'Cancel sending'
def test_should_cancel_job(
client_request,
fake_uuid,
mock_get_job,
mock_get_service_template,
mocker,
):
mock_cancel = mocker.patch('app.job_api_client.cancel_job')
client_request.post(
'main.cancel_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
_expected_status=302,
_expected_redirect=url_for(
'main.service_dashboard',
service_id=SERVICE_ONE_ID,
_external=True,
)
)
mock_cancel.assert_called_once_with(SERVICE_ONE_ID, fake_uuid)
def test_should_not_show_cancelled_job(
client_request,
active_user_with_permissions,
mock_get_cancelled_job,
fake_uuid,
):
client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
_expected_status=404,
)
def test_should_cancel_letter_job(
client_request,
mocker,
mock_get_service_letter_template,
active_user_with_permissions
):
job_id = str(uuid.uuid4())
job = job_json(
SERVICE_ONE_ID,
active_user_with_permissions,
job_id=job_id,
created_at="2019-06-20T15:30:00.000001+00:00",
job_status="finished"
)
mocker.patch('app.job_api_client.get_job', side_effect=[{"data": job}])
notifications_json = notification_json(SERVICE_ONE_ID, job=job, status="created", template_type="letter")
mocker.patch('app.job_api_client.get_job', side_effect=[{"data": job}])
mocker.patch('app.notification_api_client.get_notifications_for_service', return_value=notifications_json)
mocker.patch('app.notification_api_client.get_notification_count_for_job_id', return_value=5)
mock_cancel = mocker.patch('app.job_api_client.cancel_letter_job', return_value=5)
client_request.post(
'main.cancel_letter_job',
service_id=SERVICE_ONE_ID,
job_id=job_id,
_expected_status=302,
_expected_redirect=url_for(
'main.service_dashboard',
service_id=SERVICE_ONE_ID,
_external=True,
)
)
mock_cancel.assert_called_once_with(SERVICE_ONE_ID, job_id)
@freeze_time("2019-06-20 17:30:00.000001")
@pytest.mark.parametrize("job_created_at, expected_fragment", [
("2019-06-20T15:30:00.000001+00:00", "today"),
("2019-06-19T15:30:00.000001+00:00", "yesterday"),
("2019-06-18T15:30:00.000001+00:00", "on 18 June"),
])
def test_should_not_show_cancel_link_for_letter_job_if_too_late(
client_request,
mocker,
mock_get_service_letter_template,
mock_get_service_data_retention,
active_user_with_permissions,
job_created_at,
expected_fragment,
):
job_id = uuid.uuid4()
job = job_json(
SERVICE_ONE_ID, active_user_with_permissions, job_id=job_id, created_at=job_created_at
)
notifications_json = notification_json(SERVICE_ONE_ID, job=job, status="created", template_type="letter")
mocker.patch('app.job_api_client.get_job', side_effect=[{"data": job}])
mocker.patch(
'app.notification_api_client.get_notifications_for_service',
return_value=notifications_json
)
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=str(job_id)
)
assert "Cancel sending these letters" not in page
assert page.find('p', {'id': 'printing-info'}).text.strip() == "Printed {} at 5:30pm".format(expected_fragment)
@freeze_time("2019-06-20 15:32:00.000001")
@pytest.mark.parametrize(" job_status", [
"finished", "in progress"
])
def test_should_show_cancel_link_for_letter_job(
client_request,
mocker,
mock_get_service_letter_template,
mock_get_service_data_retention,
active_user_with_permissions,
job_status,
):
job_id = uuid.uuid4()
job = job_json(
SERVICE_ONE_ID,
active_user_with_permissions,
job_id=job_id,
created_at="2019-06-20T15:30:00.000001+00:00",
job_status=job_status
)
notifications_json = notification_json(SERVICE_ONE_ID, job=job, status="created", template_type="letter")
mocker.patch('app.job_api_client.get_job', side_effect=[{"data": job}])
mocker.patch(
'app.notification_api_client.get_notifications_for_service',
return_value=notifications_json,
)
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=str(job_id)
)
assert page.find('a', text='Cancel sending these letters').attrs["href"] == url_for(
"main.cancel_letter_job", service_id=SERVICE_ONE_ID, job_id=job_id
)
assert page.find('p', {'id': 'printing-info'}).text.strip() == "Printing starts today at 5:30pm"
@freeze_time("2019-06-20 15:31:00.000001")
@pytest.mark.parametrize('job_status,number_of_processed_notifications', [['in progress', 2], ['finished', 1]])
def test_dont_cancel_letter_job_when_to_early_to_cancel(
client_request,
mocker,
mock_get_service_letter_template,
mock_get_service_data_retention,
active_user_with_permissions,
job_status,
number_of_processed_notifications,
):
job_id = uuid.uuid4()
job = job_json(
SERVICE_ONE_ID,
active_user_with_permissions,
job_id=job_id,
created_at="2019-06-20T15:30:00.000001+00:00",
job_status=job_status,
notification_count=2
)
mocker.patch('app.job_api_client.get_job', side_effect=[{"data": job}, {"data": job}])
notifications_json = notification_json(
SERVICE_ONE_ID, job=job, status="created", template_type="letter", rows=number_of_processed_notifications
)
mocker.patch('app.notification_api_client.get_notifications_for_service', return_value=notifications_json)
mocker.patch(
'app.notification_api_client.get_notification_count_for_job_id', return_value=number_of_processed_notifications
)
mock_cancel = mocker.patch('app.job_api_client.cancel_letter_job')
page = client_request.post(
'main.cancel_letter_job',
service_id=SERVICE_ONE_ID,
job_id=str(job_id),
_expected_status=200,
)
mock_cancel.assert_not_called()
flash_message = normalize_spaces(page.find('div', class_='banner-dangerous').text)
assert 'We are still processing these letters, please try again in a minute.' in flash_message
@freeze_time("2016-01-01 00:00:00.000001")
def test_should_show_updates_for_one_job_as_json(
logged_in_client,
service_one,
active_user_with_permissions,
mock_get_notifications,
mock_get_service_template,
mock_get_job,
mock_get_service_data_retention,
mocker,
fake_uuid,
):
response = logged_in_client.get(url_for('main.view_job_updates', service_id=service_one['id'], job_id=fake_uuid))
assert response.status_code == 200
content = json.loads(response.get_data(as_text=True))
assert 'sending' in content['counts']
assert 'delivered' in content['counts']
assert 'failed' in content['counts']
assert 'Recipient' in content['notifications']
assert '07123456789' in content['notifications']
assert 'Status' in content['notifications']
assert 'Delivered' in content['notifications']
assert '12:01am' in content['notifications']
assert 'Sent by Test User on 1 January at midnight' in content['status']
@pytest.mark.parametrize(
"job_created_at, expected_message", [
("2016-01-10 11:09:00.000000+00:00", "Data available for 7 days"),
("2016-01-04 11:09:00.000000+00:00", "Data available for 1 day"),
("2016-01-03 11:09:00.000000+00:00", "Data available for 12 hours"),
("2016-01-02 23:59:59.000000+00:00", "Data no longer available")
]
)
@freeze_time("2016-01-10 12:00:00.000000")
def test_time_left(job_created_at, expected_message):
assert get_time_left(job_created_at) == expected_message
@freeze_time("2016-01-01 11:09:00.061258")
def test_should_show_letter_job_with_first_class_if_notifications_are_first_class(
client_request,
mock_get_service_letter_template,
mock_get_job,
mock_get_service_data_retention,
fake_uuid,
mocker,
):
notifications = create_notifications(template_type='letter', postage='first')
mocker.patch('app.notification_api_client.get_notifications_for_service', return_value=notifications)
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
)
assert normalize_spaces(page.select('.keyline-block')[1].text) == '5 January Estimated delivery date'
@freeze_time("2016-01-01 11:09:00.061258")
def test_should_show_letter_job_with_first_class_if_no_notifications(
client_request,
service_one,
mock_get_job,
fake_uuid,
mock_get_notifications_with_no_notifications,
mock_get_service_data_retention,
mocker
):
mocker.patch(
'app.service_api_client.get_service_template',
return_value={'data': create_template(template_type='letter', postage='first')}
)
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
)
assert normalize_spaces(page.select('.keyline-block')[1].text) == '5 January Estimated delivery date'
Don’t mock job processing in the future
It could hide bugs or make the tests harder to understand later on if
what they’re testing is an impossible scenario.
import json
import uuid
from datetime import datetime, timezone
import pytest
from flask import url_for
from freezegun import freeze_time
from app.main.views.jobs import get_time_left
from tests import job_json, notification_json, sample_uuid
from tests.conftest import (
SERVICE_ONE_ID,
create_active_caseworking_user,
create_active_user_with_permissions,
create_notifications,
create_template,
normalize_spaces,
)
@pytest.mark.parametrize('user, expected_rows', [
(create_active_user_with_permissions(), (
(
'File Sending Delivered Failed'
),
(
'export 1/1/2016.xls '
'Sent today at 12:12pm 1 0 0'
),
(
'all email addresses.xlsx '
'Sent today at 12:12pm 1 0 0'
),
(
'applicants.ods '
'Sent today at 12:12pm 1 0 0'
),
(
'thisisatest.csv '
'Sent today at 12:12pm 1 0 0'
),
)),
(create_active_caseworking_user(), (
(
'File Messages to be sent'
),
(
'send_me_later.csv '
'Sending 1 January 2016 at 11:09am 1'
),
(
'even_later.csv '
'Sending 1 January 2016 at 11:09pm 1'
),
(
'File Sending Delivered Failed'
),
(
'export 1/1/2016.xls '
'Sent today at 12:12pm 1 0 0'
),
(
'all email addresses.xlsx '
'Sent today at 12:12pm 1 0 0'
),
(
'applicants.ods '
'Sent today at 12:12pm 1 0 0'
),
(
'thisisatest.csv '
'Sent today at 12:12pm 1 0 0'
),
)),
])
@freeze_time("2012-12-12 12:12")
def test_jobs_page_shows_scheduled_jobs_if_user_doesnt_have_dashboard(
client_request,
service_one,
active_user_with_permissions,
mock_get_jobs,
user,
expected_rows,
):
client_request.login(user)
page = client_request.get('main.view_jobs', service_id=service_one['id'])
for index, row in enumerate(expected_rows):
assert normalize_spaces(page.select('tr')[index].text) == row
@pytest.mark.parametrize('user', [
create_active_user_with_permissions(),
create_active_caseworking_user(),
])
def test_get_jobs_shows_page_links(
client_request,
active_user_with_permissions,
mock_get_jobs,
user,
):
client_request.login(user)
page = client_request.get('main.view_jobs', service_id=SERVICE_ONE_ID)
assert 'Next page' in page.find('li', {'class': 'next-page'}).text
assert 'Previous page' in page.find('li', {'class': 'previous-page'}).text
@pytest.mark.parametrize('user', [
create_active_user_with_permissions(),
create_active_caseworking_user(),
])
@freeze_time("2012-12-12 12:12")
def test_jobs_page_doesnt_show_scheduled_on_page_2(
client_request,
service_one,
active_user_with_permissions,
mock_get_jobs,
user,
):
client_request.login(user)
page = client_request.get('main.view_jobs', service_id=service_one['id'], page=2)
for index, row in enumerate((
(
'File Sending Delivered Failed'
),
(
'export 1/1/2016.xls '
'Sent today at 12:12pm 1 0 0'
),
(
'all email addresses.xlsx '
'Sent today at 12:12pm 1 0 0'
),
(
'applicants.ods '
'Sent today at 12:12pm 1 0 0'
),
(
'thisisatest.csv '
'Sent today at 12:12pm 1 0 0'
),
)):
assert normalize_spaces(page.select('tr')[index].text) == row
@pytest.mark.parametrize('user', [
create_active_user_with_permissions(),
create_active_caseworking_user(),
])
@pytest.mark.parametrize(
"status_argument, expected_api_call", [
(
'',
[
'created', 'pending', 'sending', 'pending-virus-check',
'delivered', 'sent', 'returned-letter',
'failed', 'temporary-failure', 'permanent-failure', 'technical-failure',
'virus-scan-failed', 'validation-failed'
]
),
(
'sending',
['sending', 'created', 'pending', 'pending-virus-check']
),
(
'delivered',
['delivered', 'sent', 'returned-letter']
),
(
'failed',
[
'failed', 'temporary-failure', 'permanent-failure', 'technical-failure', 'virus-scan-failed',
'validation-failed'
]
)
]
)
@freeze_time("2016-01-01 11:09:00.061258")
def test_should_show_page_for_one_job(
client_request,
mock_get_service_template,
mock_get_job,
mocker,
mock_get_notifications,
mock_get_service_data_retention,
fake_uuid,
status_argument,
expected_api_call,
user,
):
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
status=status_argument
)
assert page.h1.text.strip() == 'thisisatest.csv'
assert ' '.join(page.find('tbody').find('tr').text.split()) == (
'07123456789 template content Delivered 1 January at 11:10am'
)
assert page.find('div', {'data-key': 'notifications'})['data-resource'] == url_for(
'main.view_job_updates',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
status=status_argument,
)
csv_link = page.select_one('a[download]')
assert csv_link['href'] == url_for(
'main.view_job_csv',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
status=status_argument
)
assert csv_link.text == 'Download this report'
assert page.find('span', {'id': 'time-left'}).text == 'Data available for 7 days'
assert normalize_spaces(page.select_one('tbody tr').text) == normalize_spaces(
'07123456789 '
'template content '
'Delivered 1 January at 11:10am'
)
assert page.select_one('tbody tr a')['href'] == url_for(
'main.view_notification',
service_id=SERVICE_ONE_ID,
notification_id=sample_uuid(),
from_job=fake_uuid,
)
mock_get_notifications.assert_called_with(
SERVICE_ONE_ID,
fake_uuid,
status=expected_api_call
)
@freeze_time("2016-01-01 11:09:00.061258")
def test_should_show_page_for_one_job_with_flexible_data_retention(
client_request,
active_user_with_permissions,
mock_get_service_template,
mock_get_job,
mocker,
mock_get_notifications,
mock_get_service_data_retention,
fake_uuid,
):
mock_get_service_data_retention.side_effect = [[{"days_of_retention": 10, "notification_type": "sms"}]]
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
status='delivered'
)
assert page.find('span', {'id': 'time-left'}).text == 'Data available for 10 days'
assert "Cancel sending these letters" not in page
def test_get_jobs_should_tell_user_if_more_than_one_page(
client_request,
fake_uuid,
service_one,
mock_get_job,
mock_get_service_template,
mock_get_notifications_with_previous_next,
mock_get_service_data_retention,
):
page = client_request.get(
'main.view_job',
service_id=service_one['id'],
job_id=fake_uuid,
status='',
)
assert page.find('p', {'class': 'table-show-more-link'}).text.strip() == 'Only showing the first 50 rows'
def test_should_show_job_in_progress(
client_request,
service_one,
active_user_with_permissions,
mock_get_service_template,
mock_get_job_in_progress,
mocker,
mock_get_notifications,
mock_get_service_data_retention,
fake_uuid,
):
page = client_request.get(
'main.view_job',
service_id=service_one['id'],
job_id=fake_uuid,
)
assert [
normalize_spaces(link.text)
for link in page.select('.pill a')
] == [
'10 sending', '0 delivered', '0 failed'
]
assert page.select_one('p.hint').text.strip() == 'Report is 50% complete…'
def test_should_show_job_without_notifications(
client_request,
service_one,
active_user_with_permissions,
mock_get_service_template,
mock_get_job_in_progress,
mocker,
mock_get_notifications_with_no_notifications,
mock_get_service_data_retention,
fake_uuid,
):
page = client_request.get(
'main.view_job',
service_id=service_one['id'],
job_id=fake_uuid,
)
assert [
normalize_spaces(link.text)
for link in page.select('.pill a')
] == [
'10 sending', '0 delivered', '0 failed'
]
assert page.select_one('p.hint').text.strip() == 'Report is 50% complete…'
assert page.select_one('tbody').text.strip() == 'No messages to show yet…'
@freeze_time("2020-01-10 1:0:0")
@pytest.mark.parametrize('created_at, processing_started, expected_message', (
# Recently created, not yet started
(datetime(2020, 1, 10, 0, 0, 0), None, (
'No messages to show yet…'
)),
# Just started
(datetime(2020, 1, 10, 0, 0, 0), datetime(2020, 1, 10, 0, 0, 1), (
'No messages to show yet…'
)),
# Created a while ago, just started
(datetime(2020, 1, 1, 0, 0, 0), datetime(2020, 1, 10, 0, 0, 1), (
'No messages to show yet…'
)),
# Created a while ago, started a couple of days ago
(datetime(2020, 1, 1, 0, 0, 0), datetime(2020, 1, 8, 0, 0, 1), (
'These messages have been deleted because they were sent more than 7 days ago'
)),
))
def test_should_show_old_job(
client_request,
service_one,
active_user_with_permissions,
mock_get_service_template,
mocker,
mock_get_notifications_with_no_notifications,
mock_get_service_data_retention,
fake_uuid,
created_at,
processing_started,
expected_message,
):
mocker.patch('app.job_api_client.get_job', return_value={
"data": job_json(
SERVICE_ONE_ID,
active_user_with_permissions,
created_at=created_at.replace(tzinfo=timezone.utc).isoformat(),
processing_started=(
processing_started.replace(tzinfo=timezone.utc).isoformat()
if processing_started else None
),
),
})
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
)
assert not page.select('.pill a')
assert not page.select('p.hint')
assert not page.select('a[download]')
assert page.select_one('tbody').text.strip() == expected_message
@freeze_time("2016-01-01 11:09:00.061258")
def test_should_show_letter_job(
client_request,
mock_get_service_letter_template,
mock_get_job,
mock_get_service_data_retention,
fake_uuid,
mocker,
):
notifications = create_notifications(template_type='letter', subject='template subject')
get_notifications = mocker.patch(
'app.notification_api_client.get_notifications_for_service',
return_value=notifications,
)
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
)
assert normalize_spaces(page.h1.text) == 'thisisatest.csv'
assert normalize_spaces(page.select('p.bottom-gutter')[0].text) == (
'Sent by Test User on 1 January at 11:09am Printing starts today at 5:30pm'
)
assert page.select('.banner-default-with-tick') == []
assert normalize_spaces(page.select('tbody tr')[0].text) == (
'1 Example Street template subject 1 January at 11:09am'
)
assert normalize_spaces(page.select('.keyline-block')[0].text) == (
'1 Letter'
)
assert normalize_spaces(page.select('.keyline-block')[1].text) == (
'6 January Estimated delivery date'
)
assert page.select_one('a[download]')['href'] == url_for(
'main.view_job_csv',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
)
assert page.select('.hint') == []
get_notifications.assert_called_with(
SERVICE_ONE_ID,
fake_uuid,
status=[
'created',
'pending',
'sending',
'pending-virus-check',
'delivered',
'sent',
'returned-letter',
'failed',
'temporary-failure',
'permanent-failure',
'technical-failure',
'virus-scan-failed',
'validation-failed'
],
)
@freeze_time("2016-01-01 11:09:00")
def test_should_show_letter_job_with_banner_after_sending_before_1730(
client_request,
mock_get_service_letter_template,
mock_get_job,
mock_get_notifications,
mock_get_service_data_retention,
fake_uuid,
):
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
just_sent='yes',
)
assert page.select('p.bottom-gutter') == []
assert normalize_spaces(page.select('.banner-default-with-tick')[0].text) == (
'Your letter has been sent. Printing starts today at 5:30pm.'
)
@freeze_time("2016-01-01 11:09:00")
def test_should_show_letter_job_with_banner_when_there_are_multiple_CSV_rows(
client_request,
mock_get_service_letter_template,
mock_get_job_in_progress,
mock_get_notifications,
mock_get_service_data_retention,
fake_uuid,
):
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
just_sent='yes',
)
assert page.select('p.bottom-gutter') == []
assert normalize_spaces(page.select('.banner-default-with-tick')[0].text) == (
'Your letters have been sent. Printing starts today at 5:30pm.'
)
@freeze_time("2016-01-01 18:09:00")
def test_should_show_letter_job_with_banner_after_sending_after_1730(
client_request,
mock_get_service_letter_template,
mock_get_job,
mock_get_notifications,
mock_get_service_data_retention,
fake_uuid,
):
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
just_sent='yes',
)
assert page.select('p.bottom-gutter') == []
assert normalize_spaces(page.select('.banner-default-with-tick')[0].text) == (
'Your letter has been sent. Printing starts tomorrow at 5:30pm.'
)
@freeze_time("2016-01-01T00:00:00.061258")
def test_should_show_scheduled_job(
client_request,
mock_get_service_template,
mock_get_scheduled_job,
mock_get_service_data_retention,
mock_get_notifications,
fake_uuid,
):
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
)
assert normalize_spaces(page.select('main p')[1].text) == (
'Sending Two week reminder today at midnight'
)
assert page.select('main p a')[0]['href'] == url_for(
'main.view_template_version',
service_id=SERVICE_ONE_ID,
template_id='5d729fbd-239c-44ab-b498-75a985f3198f',
version=1,
)
assert page.select_one('button[type=submit]').text.strip() == 'Cancel sending'
def test_should_cancel_job(
client_request,
fake_uuid,
mock_get_job,
mock_get_service_template,
mocker,
):
mock_cancel = mocker.patch('app.job_api_client.cancel_job')
client_request.post(
'main.cancel_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
_expected_status=302,
_expected_redirect=url_for(
'main.service_dashboard',
service_id=SERVICE_ONE_ID,
_external=True,
)
)
mock_cancel.assert_called_once_with(SERVICE_ONE_ID, fake_uuid)
def test_should_not_show_cancelled_job(
client_request,
active_user_with_permissions,
mock_get_cancelled_job,
fake_uuid,
):
client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
_expected_status=404,
)
def test_should_cancel_letter_job(
client_request,
mocker,
mock_get_service_letter_template,
active_user_with_permissions
):
job_id = str(uuid.uuid4())
job = job_json(
SERVICE_ONE_ID,
active_user_with_permissions,
job_id=job_id,
created_at="2019-06-20T15:30:00.000001+00:00",
job_status="finished"
)
mocker.patch('app.job_api_client.get_job', side_effect=[{"data": job}])
notifications_json = notification_json(SERVICE_ONE_ID, job=job, status="created", template_type="letter")
mocker.patch('app.job_api_client.get_job', side_effect=[{"data": job}])
mocker.patch('app.notification_api_client.get_notifications_for_service', return_value=notifications_json)
mocker.patch('app.notification_api_client.get_notification_count_for_job_id', return_value=5)
mock_cancel = mocker.patch('app.job_api_client.cancel_letter_job', return_value=5)
client_request.post(
'main.cancel_letter_job',
service_id=SERVICE_ONE_ID,
job_id=job_id,
_expected_status=302,
_expected_redirect=url_for(
'main.service_dashboard',
service_id=SERVICE_ONE_ID,
_external=True,
)
)
mock_cancel.assert_called_once_with(SERVICE_ONE_ID, job_id)
@freeze_time("2019-06-20 17:30:00.000001")
@pytest.mark.parametrize("job_created_at, expected_fragment", [
("2019-06-20T15:30:00.000001+00:00", "today"),
("2019-06-19T15:30:00.000001+00:00", "yesterday"),
("2019-06-18T15:30:00.000001+00:00", "on 18 June"),
])
def test_should_not_show_cancel_link_for_letter_job_if_too_late(
client_request,
mocker,
mock_get_service_letter_template,
mock_get_service_data_retention,
active_user_with_permissions,
job_created_at,
expected_fragment,
):
job_id = uuid.uuid4()
job = job_json(
SERVICE_ONE_ID, active_user_with_permissions, job_id=job_id, created_at=job_created_at
)
notifications_json = notification_json(SERVICE_ONE_ID, job=job, status="created", template_type="letter")
mocker.patch('app.job_api_client.get_job', side_effect=[{"data": job}])
mocker.patch(
'app.notification_api_client.get_notifications_for_service',
return_value=notifications_json
)
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=str(job_id)
)
assert "Cancel sending these letters" not in page
assert page.find('p', {'id': 'printing-info'}).text.strip() == "Printed {} at 5:30pm".format(expected_fragment)
@freeze_time("2019-06-20 15:32:00.000001")
@pytest.mark.parametrize(" job_status", [
"finished", "in progress"
])
def test_should_show_cancel_link_for_letter_job(
client_request,
mocker,
mock_get_service_letter_template,
mock_get_service_data_retention,
active_user_with_permissions,
job_status,
):
job_id = uuid.uuid4()
job = job_json(
SERVICE_ONE_ID,
active_user_with_permissions,
job_id=job_id,
created_at="2019-06-20T15:30:00.000001+00:00",
job_status=job_status
)
notifications_json = notification_json(SERVICE_ONE_ID, job=job, status="created", template_type="letter")
mocker.patch('app.job_api_client.get_job', side_effect=[{"data": job}])
mocker.patch(
'app.notification_api_client.get_notifications_for_service',
return_value=notifications_json,
)
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=str(job_id)
)
assert page.find('a', text='Cancel sending these letters').attrs["href"] == url_for(
"main.cancel_letter_job", service_id=SERVICE_ONE_ID, job_id=job_id
)
assert page.find('p', {'id': 'printing-info'}).text.strip() == "Printing starts today at 5:30pm"
@freeze_time("2019-06-20 15:31:00.000001")
@pytest.mark.parametrize('job_status,number_of_processed_notifications', [['in progress', 2], ['finished', 1]])
def test_dont_cancel_letter_job_when_to_early_to_cancel(
client_request,
mocker,
mock_get_service_letter_template,
mock_get_service_data_retention,
active_user_with_permissions,
job_status,
number_of_processed_notifications,
):
job_id = uuid.uuid4()
job = job_json(
SERVICE_ONE_ID,
active_user_with_permissions,
job_id=job_id,
created_at="2019-06-20T15:30:00.000001+00:00",
job_status=job_status,
notification_count=2
)
mocker.patch('app.job_api_client.get_job', side_effect=[{"data": job}, {"data": job}])
notifications_json = notification_json(
SERVICE_ONE_ID, job=job, status="created", template_type="letter", rows=number_of_processed_notifications
)
mocker.patch('app.notification_api_client.get_notifications_for_service', return_value=notifications_json)
mocker.patch(
'app.notification_api_client.get_notification_count_for_job_id', return_value=number_of_processed_notifications
)
mock_cancel = mocker.patch('app.job_api_client.cancel_letter_job')
page = client_request.post(
'main.cancel_letter_job',
service_id=SERVICE_ONE_ID,
job_id=str(job_id),
_expected_status=200,
)
mock_cancel.assert_not_called()
flash_message = normalize_spaces(page.find('div', class_='banner-dangerous').text)
assert 'We are still processing these letters, please try again in a minute.' in flash_message
@freeze_time("2016-01-01 00:00:00.000001")
def test_should_show_updates_for_one_job_as_json(
logged_in_client,
service_one,
active_user_with_permissions,
mock_get_notifications,
mock_get_service_template,
mock_get_job,
mock_get_service_data_retention,
mocker,
fake_uuid,
):
response = logged_in_client.get(url_for('main.view_job_updates', service_id=service_one['id'], job_id=fake_uuid))
assert response.status_code == 200
content = json.loads(response.get_data(as_text=True))
assert 'sending' in content['counts']
assert 'delivered' in content['counts']
assert 'failed' in content['counts']
assert 'Recipient' in content['notifications']
assert '07123456789' in content['notifications']
assert 'Status' in content['notifications']
assert 'Delivered' in content['notifications']
assert '12:01am' in content['notifications']
assert 'Sent by Test User on 1 January at midnight' in content['status']
@pytest.mark.parametrize(
"job_created_at, expected_message", [
("2016-01-10 11:09:00.000000+00:00", "Data available for 7 days"),
("2016-01-04 11:09:00.000000+00:00", "Data available for 1 day"),
("2016-01-03 11:09:00.000000+00:00", "Data available for 12 hours"),
("2016-01-02 23:59:59.000000+00:00", "Data no longer available")
]
)
@freeze_time("2016-01-10 12:00:00.000000")
def test_time_left(job_created_at, expected_message):
assert get_time_left(job_created_at) == expected_message
@freeze_time("2016-01-01 11:09:00.061258")
def test_should_show_letter_job_with_first_class_if_notifications_are_first_class(
client_request,
mock_get_service_letter_template,
mock_get_job,
mock_get_service_data_retention,
fake_uuid,
mocker,
):
notifications = create_notifications(template_type='letter', postage='first')
mocker.patch('app.notification_api_client.get_notifications_for_service', return_value=notifications)
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
)
assert normalize_spaces(page.select('.keyline-block')[1].text) == '5 January Estimated delivery date'
@freeze_time("2016-01-01 11:09:00.061258")
def test_should_show_letter_job_with_first_class_if_no_notifications(
client_request,
service_one,
mock_get_job,
fake_uuid,
mock_get_notifications_with_no_notifications,
mock_get_service_data_retention,
mocker
):
mocker.patch(
'app.service_api_client.get_service_template',
return_value={'data': create_template(template_type='letter', postage='first')}
)
page = client_request.get(
'main.view_job',
service_id=SERVICE_ONE_ID,
job_id=fake_uuid,
)
assert normalize_spaces(page.select('.keyline-block')[1].text) == '5 January Estimated delivery date'
|
"""f90nml
======
A Fortran 90 namelist parser and generator.
:copyright: Copyright 2014 Marshall Ward, see AUTHORS for details.
:license: Apache License, Version 2.0, see LICENSE for details.
"""
__version__ = '0.9.2-dev'
from f90nml.parser import Parser
# Legacy API functions
def read(nml_fname, verbose=False):
"""Parse a Fortran 90 namelist file (data.nml) and store its contents.
>>> nml = f90nml.read('data.nml')"""
return Parser(verbose).read(nml_fname)
def write(nml, nml_fname, force=False):
"""Output namelist (nml) to a Fortran 90 namelist file (data.nml).
>>> f90nml.write(nml, 'data.nml')"""
nml.write(nml_fname, force)
def patch(nml_fname, nml_patch, out_fname=None, verbose=False):
"""Create a new namelist based on an input namelist and reference dict.
>>> f90nml.patch('data.nml', nml_patch, 'patched_data.nml')"""
return Parser(verbose).read(nml_fname, nml_patch, out_fname)
Version update
"""f90nml
======
A Fortran 90 namelist parser and generator.
:copyright: Copyright 2014 Marshall Ward, see AUTHORS for details.
:license: Apache License, Version 2.0, see LICENSE for details.
"""
__version__ = '0.10'
from f90nml.parser import Parser
# Legacy API functions
def read(nml_fname, verbose=False):
"""Parse a Fortran 90 namelist file (data.nml) and store its contents.
>>> nml = f90nml.read('data.nml')"""
return Parser(verbose).read(nml_fname)
def write(nml, nml_fname, force=False):
"""Output namelist (nml) to a Fortran 90 namelist file (data.nml).
>>> f90nml.write(nml, 'data.nml')"""
nml.write(nml_fname, force)
def patch(nml_fname, nml_patch, out_fname=None, verbose=False):
"""Create a new namelist based on an input namelist and reference dict.
>>> f90nml.patch('data.nml', nml_patch, 'patched_data.nml')"""
return Parser(verbose).read(nml_fname, nml_patch, out_fname)
|
"""Fortran namelist interface.
The ``Namelist`` is a representation of a Fortran namelist and its contents in
a Python environment.
:copyright: Copyright 2014 Marshall Ward, see AUTHORS for details.
:license: Apache License, Version 2.0, see LICENSE for details.
"""
from __future__ import print_function
import itertools
import copy
import numbers
import os
import platform
try:
from StringIO import StringIO # Python 2.x
except ImportError:
from io import StringIO # Python 3.x
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
try:
basestring # Python 2.x
except NameError:
basestring = str # Python 3.x
class Namelist(OrderedDict):
"""Representation of Fortran namelist in a Python environment.
Namelists can be initialised as empty or with a pre-defined `dict` of
`items`. If an explicit default start index is required for `items`, then
it can be initialised with the `default_start_index` input argument.
In addition to the standard methods supported by `dict`, several additional
methods and properties are provided for working with Fortran namelists.
"""
class RepeatValue(object):
"""Container class for output using repeat counters."""
def __init__(self, n, value):
"""Create the RepeatValue object."""
self.repeats = n
self.value = value
def __init__(self, *args, **kwds):
"""Create the Namelist object."""
s_args = list(args)
# If using (unordered) dict, then resort the keys for reproducibility
# NOTE: Python 3.7+ dicts are order-preserving.
if (args and not isinstance(args[0], OrderedDict) and
isinstance(args[0], dict)):
s_args[0] = sorted(args[0].items())
# Assign the default start index
try:
self._default_start_index = kwds.pop('default_start_index')
except KeyError:
self._default_start_index = None
super(Namelist, self).__init__(*s_args, **kwds)
# XXX: Testing a "co-group" feature, where a namelist group tracks
# its other entries with the same name.
self._cogroups = []
self.start_index = self.pop('_start_index', {})
# Update the complex tuples as intrinsics
# TODO: We are effectively setting these twice. Instead, fetch these
# from s_args rather than relying on Namelist to handle the content.
if '_complex' in self:
for key in self['_complex']:
if all(isinstance(v, list) for v in self[key]):
self[key] = [complex(*v) for v in self[key]]
else:
self[key] = complex(*self[key])
self.pop('_complex')
# Formatting properties
self._column_width = 72
self._indent = 4 * ' '
self._end_comma = False
self._uppercase = False
self._float_format = ''
self._logical_repr = {False: '.false.', True: '.true.'}
self._index_spacing = False
self._repeat_counter = False
self._split_strings = False
# Namelist group spacing flag
self._newline = False
# Check for pre-set indentation
self.indent = self.pop('_indent', self.indent)
# PyPy 2 is dumb and does not use __setitem__() inside __init__()
# This loop will explicitly convert any internal dicts to Namelists.
if (platform.python_implementation() == 'PyPy' and
platform.python_version_tuple()[0] == '2'):
for key, value in self.items():
self[key] = value
def __contains__(self, key):
"""Case-insensitive interface to OrderedDict."""
return super(Namelist, self).__contains__(key.lower())
def __delitem__(self, key):
"""Case-insensitive interface to OrderedDict."""
return super(Namelist, self).__delitem__(key.lower())
def __getitem__(self, key):
"""Case-insensitive interface to OrderedDict."""
if isinstance(key, basestring):
lkey = key.lower()
val = super(Namelist, self).__getitem__(lkey)
if lkey in self._cogroups:
return Cogroup(self, lkey)
else:
return val
else:
keyiter = iter(key)
grp, var = next(keyiter).lower(), next(keyiter).lower()
return super(Namelist, self).__getitem__(grp).__getitem__(var)
def __setitem__(self, key, value):
"""Case-insensitive interface to OrderedDict.
Python dict inputs to the Namelist, such as derived types, are also
converted into Namelists.
"""
# Promote dicts to Namelists
if isinstance(value, dict) and not isinstance(value, Namelist):
value = Namelist(
value,
default_start_index=self.default_start_index
)
# Convert list of dicts to list of namelists
# (NOTE: This may be for legacy cogroup support? Can it be dropped?)
elif is_nullable_list(value, dict):
for i, v in enumerate(value):
if isinstance(v, Namelist) or v is None:
value[i] = v
else:
# value is a non-Namelist dict
value[i] = Namelist(
v,
default_start_index=self.default_start_index
)
lkey = key.lower()
super(Namelist, self).__setitem__(lkey, value)
def __str__(self):
"""Print the Fortran representation of the namelist.
Currently this can only be applied to the full contents of the namelist
file. Indiviual namelist groups or values may not render correctly.
"""
output = StringIO()
if all(isinstance(v, Namelist) for v in self.values()):
self._writestream(output)
else:
print(repr(self), file=output)
nml_string = output.getvalue().rstrip()
output.close()
return nml_string
# Format configuration
@property
def column_width(self):
"""Set the maximum number of characters per line of the namelist file.
:type: ``int``
:default: 72
Tokens longer than ``column_width`` are allowed to extend past this
limit.
"""
return self._column_width
@column_width.setter
def column_width(self, width):
"""Validate and set the column width."""
if isinstance(width, int):
if width >= 0:
self._column_width = width
else:
raise ValueError('Column width must be nonnegative.')
else:
raise TypeError('Column width must be a nonnegative integer.')
@property
def default_start_index(self):
"""Set the default start index for vectors with no explicit index.
:type: ``int``, ``None``
:default: ``None``
When the `default_start_index` is set, all vectors without an explicit
start index are assumed to begin with `default_start_index`. This
index is shown when printing the namelist output.
If set to `None`, then no start index is assumed and is left as
implicit for any vectors undefined in `start_index`.
"""
return self._default_start_index
@default_start_index.setter
def default_start_index(self, value):
if not isinstance(value, int):
raise TypeError('default_start_index must be an integer.')
self._default_start_index = value
@property
def end_comma(self):
"""Append commas to the end of namelist variable entries.
:type: ``bool``
:default: ``False``
Fortran will generally disregard any commas separating variable
assignments, and the default behaviour is to omit these commas from the
output. Enabling this flag will append commas at the end of the line
for each variable assignment.
"""
return self._end_comma
@end_comma.setter
def end_comma(self, value):
"""Validate and set the comma termination flag."""
if not isinstance(value, bool):
raise TypeError('end_comma attribute must be a logical type.')
self._end_comma = value
@property
def false_repr(self):
"""Set the string representation of logical false values.
:type: ``str``
:default: ``'.false.'``
This is equivalent to the first element of ``logical_repr``.
"""
return self._logical_repr[0]
@false_repr.setter
def false_repr(self, value):
"""Validate and set the logical false representation."""
if isinstance(value, str):
if not (value.lower().startswith('f') or
value.lower().startswith('.f')):
raise ValueError("Logical false representation must start "
"with 'F' or '.F'.")
else:
self._logical_repr[0] = value
else:
raise TypeError('Logical false representation must be a string.')
@property
def float_format(self):
"""Set the namelist floating point format.
:type: ``str``
:default: ``''``
The property sets the format string for floating point numbers,
following the format expected by the Python ``format()`` function.
"""
return self._float_format
@float_format.setter
def float_format(self, value):
"""Validate and set the upper case flag."""
if isinstance(value, str):
# Duck-test the format string; raise ValueError on fail
'{0:{1}}'.format(1.23, value)
self._float_format = value
else:
raise TypeError('Floating point format code must be a string.')
@property
def indent(self):
r"""Set the whitespace indentation of namelist entries.
:type: ``int``, ``str``
:default: ``' '`` (four spaces)
This can be set to an integer, denoting the number of spaces, or to an
explicit whitespace character, such as a tab (``\t``).
"""
return self._indent
@indent.setter
def indent(self, value):
"""Validate and set the indent width."""
# Explicit indent setting
if isinstance(value, str):
if value.isspace() or len(value) == 0:
self._indent = value
else:
raise ValueError('String indentation can only contain '
'whitespace.')
# Set indent width
elif isinstance(value, int):
if value >= 0:
self._indent = value * ' '
else:
raise ValueError('Indentation spacing must be nonnegative.')
else:
raise TypeError('Indentation must be specified by string or space '
'width.')
@property
def index_spacing(self):
"""Apply a space between indexes of multidimensional vectors.
:type: ``bool``
:default: ``False``
"""
return self._index_spacing
@index_spacing.setter
def index_spacing(self, value):
"""Validate and set the index_spacing flag."""
if not isinstance(value, bool):
raise TypeError('index_spacing attribute must be a logical type.')
self._index_spacing = value
# NOTE: This presumes that bools and ints are identical as dict keys
@property
def logical_repr(self):
"""Set the string representation of logical values.
:type: ``dict``
:default: ``{False: '.false.', True: '.true.'}``
There are multiple valid representations of True and False values in
Fortran. This property sets the preferred representation in the
namelist output.
The properties ``true_repr`` and ``false_repr`` are also provided as
interfaces to the elements of ``logical_repr``.
"""
return self._logical_repr
@logical_repr.setter
def logical_repr(self, value):
"""Set the string representation of logical values."""
if not any(isinstance(value, t) for t in (list, tuple)):
raise TypeError("Logical representation must be a tuple with "
"a valid true and false value.")
if not len(value) == 2:
raise ValueError("List must contain two values.")
self.false_repr = value[0]
self.true_repr = value[1]
@property
def repeat_counter(self):
"""Return whether the namelist uses repeat counters for arrays.
If True, then arrays with repeated values will use repeat tokens. For
example, the array ``[1, 2, 2, 2]`` will be written as ``1, 3*2``.
:type: ``bool``
:default: ``False``
"""
return self._repeat_counter
@repeat_counter.setter
def repeat_counter(self, value):
"""Set whether array output should be done in repeat form."""
if isinstance(value, bool):
self._repeat_counter = value
else:
raise TypeError(r"repeat must be of type ``bool``")
@property
def split_strings(self):
"""Split strings at the ``column_width`` over multiple lines.
:type: ``bool``
:default: ``False``
"""
return self._split_strings
@split_strings.setter
def split_strings(self, value):
"""Validate and set the split_strings flag."""
if not isinstance(value, bool):
raise TypeError('split_strings attribute must be a logical type.')
self._split_strings = value
@property
def start_index(self):
"""Set the starting index for each vector in the namelist.
:type: ``dict``
:default: ``{}``
``start_index`` is stored as a dict which contains the starting index
for each vector saved in the namelist. For the namelist ``vec.nml``
shown below,
.. code-block:: fortran
&vec_nml
a = 1, 2, 3
b(0:2) = 0, 1, 2
c(3:5) = 3, 4, 5
d(:,:) = 1, 2, 3, 4
/
the ``start_index`` contents are
.. code:: python
>>> import f90nml
>>> nml = f90nml.read('vec.nml')
>>> nml['vec_nml'].start_index
{'b': [0], 'c': [3], 'd': [None, None]}
The starting index of ``a`` is absent from ``start_index``, since its
starting index is unknown and its values cannot be assigned without
referring to the corresponding Fortran source.
"""
return self._start_index
@start_index.setter
def start_index(self, value):
"""Validate and set the vector start index."""
# TODO: Validate contents? (May want to set before adding the data.)
if not isinstance(value, dict):
raise TypeError('start_index attribute must be a dict.')
self._start_index = value
@property
def true_repr(self):
"""Set the string representation of logical true values.
:type: ``str``
:default: ``.true.``
This is equivalent to the second element of ``logical_repr``.
"""
return self._logical_repr[1]
@true_repr.setter
def true_repr(self, value):
"""Validate and set the logical true representation."""
if isinstance(value, str):
if not (value.lower().startswith('t') or
value.lower().startswith('.t')):
raise ValueError("Logical true representation must start with "
"'T' or '.T'.")
else:
self._logical_repr[1] = value
else:
raise TypeError('Logical true representation must be a string.')
@property
def uppercase(self):
"""Print group and variable names in uppercase.
:type: ``bool``
:default: ``False``
This is equivalent to the second element of ``logical_repr``.
"""
return self._uppercase
@uppercase.setter
def uppercase(self, value):
"""Validate and set the uppercase flag."""
if not isinstance(value, bool):
raise TypeError('uppercase attribute must be a logical type.')
self._uppercase = value
def write(self, nml_path, force=False, sort=False):
"""Write Namelist to a Fortran 90 namelist file.
>>> nml = f90nml.read('input.nml')
>>> nml.write('out.nml')
"""
nml_is_file = hasattr(nml_path, 'read')
if not force and not nml_is_file and os.path.isfile(nml_path):
raise IOError('File {0} already exists.'.format(nml_path))
nml_file = nml_path if nml_is_file else open(nml_path, 'w')
try:
self._writestream(nml_file, sort)
finally:
if not nml_is_file:
nml_file.close()
def patch(self, nml_patch):
"""Update the namelist from another partial or full namelist.
This is different from the intrinsic `update()` method, which replaces
a namelist section. Rather, it updates the values within a section.
"""
for sec in nml_patch:
if sec not in self:
self[sec] = Namelist()
self[sec].update(nml_patch[sec])
def add_cogroup(self, key, val):
"""Append a duplicate group to the Namelist as a new group.
TODO: Detailed explanation
TODO: Integrate into __setitem__?
"""
# TODO: What to do if it's a new group? Add normally?
lkey = key.lower()
assert lkey in self
grps = self[lkey]
# Set up the cogroup if it does not yet exist
if isinstance(grps, Namelist):
# NOTE: We re-use the key to preserve the original order.
self._cogroups.append(lkey)
grps = [grps]
# Generate the cogroup label and add to the Namelist
cogrp_id = str(len(grps))
cogrp_key = '_'.join(['_grp', lkey, cogrp_id])
self[cogrp_key] = val
def groups(self):
"""Return an iterator that spans values with group and variable names.
Elements of the iterator consist of a tuple containing two values. The
first is internal tuple containing the current namelist group and its
variable name. The second element of the returned tuple is the value
associated with the current group and variable.
"""
for key, value in self.items():
for inner_key, inner_value in value.items():
yield (key, inner_key), inner_value
def _writestream(self, nml_file, sort=False):
"""Output Namelist to a streamable file object."""
# Reset newline flag
self._newline = False
if sort:
sel = Namelist(sorted(self.items(), key=lambda t: t[0]))
else:
sel = self
for grp_name, grp_vars in sel.items():
# Check for repeated namelist records (saved as lists)
if isinstance(grp_vars, list):
for g_vars in grp_vars:
self._write_nmlgrp(grp_name, g_vars, nml_file, sort)
else:
self._write_nmlgrp(grp_name, grp_vars, nml_file, sort)
def _write_nmlgrp(self, grp_name, grp_vars, nml_file, sort=False):
"""Write namelist group to target file."""
if self._newline:
print(file=nml_file)
self._newline = True
# Strip metadata label for repeat groups
if grp_name.startswith('_grp_'):
grp_name = grp_name[5:].rsplit('_', 1)[0]
if self.uppercase:
grp_name = grp_name.upper()
if sort:
grp_vars = Namelist(sorted(grp_vars.items(), key=lambda t: t[0]))
print('&{0}'.format(grp_name), file=nml_file)
for v_name, v_val in grp_vars.items():
v_start = grp_vars.start_index.get(v_name, None)
for v_str in self._var_strings(v_name, v_val, v_start=v_start):
print(v_str, file=nml_file)
print('/', file=nml_file)
def _var_strings(self, v_name, v_values, v_idx=None, v_start=None):
"""Convert namelist variable to list of fixed-width strings."""
if self.uppercase:
v_name = v_name.upper()
var_strs = []
# Parse a multidimensional array
if is_nullable_list(v_values, list):
if not v_idx:
v_idx = []
i_s = v_start[::-1][len(v_idx)] if v_start else None
# FIXME: We incorrectly assume 1-based indexing if it is
# unspecified. This is necessary because our output method always
# separates the outer axes to one per line. But we cannot do this
# if we don't know the first index (which we are no longer assuming
# to be 1-based elsewhere). Unfortunately, the solution needs a
# rethink of multidimensional output.
# NOTE: Fixing this would also clean up the output of todict(),
# which is now incorrectly documenting unspecified indices as 1.
# For now, we will assume 1-based indexing here, just to keep
# things working smoothly.
if i_s is None:
i_s = 1
for idx, val in enumerate(v_values, start=i_s):
v_idx_new = v_idx + [idx]
v_strs = self._var_strings(v_name, val, v_idx=v_idx_new,
v_start=v_start)
var_strs.extend(v_strs)
# Parse derived type contents
elif isinstance(v_values, Namelist):
for f_name, f_vals in v_values.items():
v_title = '%'.join([v_name, f_name])
v_start_new = v_values.start_index.get(f_name, None)
v_strs = self._var_strings(v_title, f_vals,
v_start=v_start_new)
var_strs.extend(v_strs)
# Parse an array of derived types
elif is_nullable_list(v_values, Namelist):
if not v_idx:
v_idx = []
i_s = v_start[::-1][len(v_idx)] if v_start else 1
for idx, val in enumerate(v_values, start=i_s):
# Skip any empty elements in a list of derived types
if val is None:
continue
v_title = v_name + '({0})'.format(idx)
v_strs = self._var_strings(v_title, val)
var_strs.extend(v_strs)
else:
use_default_start_index = False
if not isinstance(v_values, list):
v_values = [v_values]
use_default_start_index = False
else:
use_default_start_index = self.default_start_index is not None
# Print the index range
# TODO: Include a check for len(v_values) to determine if vector
if v_idx or v_start or use_default_start_index:
v_idx_repr = '('
if v_start or use_default_start_index:
if v_start:
i_s = v_start[0]
else:
i_s = self.default_start_index
if i_s is None:
v_idx_repr += ':'
else:
i_e = i_s + len(v_values) - 1
if i_s == i_e:
v_idx_repr += '{0}'.format(i_s)
else:
v_idx_repr += '{0}:{1}'.format(i_s, i_e)
else:
v_idx_repr += ':'
if v_idx:
idx_delim = ', ' if self._index_spacing else ','
v_idx_repr += idx_delim
v_idx_repr += idx_delim.join(str(i) for i in v_idx[::-1])
v_idx_repr += ')'
else:
v_idx_repr = ''
# Split output across multiple lines (if necessary)
v_header = self.indent + v_name + v_idx_repr + ' = '
val_strs = []
val_line = v_header
if self._repeat_counter:
v_values = list(
self.RepeatValue(len(list(x)), val)
for val, x in itertools.groupby(v_values)
)
for i_val, v_val in enumerate(v_values):
# Increase column width if the header exceeds this value
if len(v_header) >= self.column_width:
column_width = len(v_header) + 1
else:
column_width = self.column_width
if len(val_line) < column_width:
# NOTE: We allow non-strings to extend past the column
# limit, but strings will be split as needed.
v_str = self._f90repr(v_val)
# Set a comma placeholder if needed
if i_val < len(v_values) - 1 or self.end_comma:
v_comma = ', '
else:
v_comma = ''
if self.split_strings and isinstance(v_val, str):
idx = column_width - len(val_line + v_comma.rstrip())
# Split the line along idx until we either exceed the
# column width, or read the end of the string.
v_l, v_r = v_str[:idx], v_str[idx:]
if v_r:
# Check if string can fit on the next line
new_val_line = (
' ' * len(v_header) + v_str + v_comma
)
if len(new_val_line.rstrip()) <= column_width:
val_strs.append(val_line)
val_line = ' ' * len(v_header)
else:
# Split string across multiple lines
while v_r:
val_line += v_l
val_strs.append(val_line)
val_line = ''
idx = column_width - len(v_comma.rstrip())
v_l, v_r = v_r[:idx], v_r[idx:]
v_str = v_l
val_line += v_str + v_comma
# Line break
if len(val_line) >= column_width:
# Append current line to list of lines
val_strs.append(val_line.rstrip())
# Start new line with space corresponding to header
val_line = ' ' * len(v_header)
# Append any remaining values
if val_line and not val_line.isspace():
val_strs.append(val_line.rstrip())
# Final null values must always precede a comma
if val_strs and v_values[-1] is None:
# NOTE: val_strs has been rstrip-ed so lead with a space
val_strs[-1] += ' ,'
# Complete the set of values
if val_strs:
var_strs.extend(val_strs)
return var_strs
def todict(self, complex_tuple=False):
"""Return a dict equivalent to the namelist.
Since Fortran variables and names cannot start with the ``_``
character, any keys starting with this token denote metadata, such as
starting index.
The ``complex_tuple`` flag is used to convert complex data into an
equivalent 2-tuple, with metadata stored to flag the variable as
complex. This is primarily used to facilitate the storage of the
namelist into an equivalent format which does not support complex
numbers, such as JSON or YAML.
"""
# TODO: Preserve ordering
nmldict = OrderedDict(self)
# Search for namelists within the namelist
# TODO: Move repeated stuff to new functions
for key, value in self.items():
if isinstance(value, Namelist):
nml = copy.deepcopy(value)
nmldict[key] = nml.todict(complex_tuple)
elif isinstance(value, complex) and complex_tuple:
nmldict[key] = [value.real, value.imag]
try:
nmldict['_complex'].append(key)
except KeyError:
nmldict['_complex'] = [key]
elif isinstance(value, list):
complex_list = False
for idx, entry in enumerate(value):
if isinstance(entry, Namelist):
nml = copy.deepcopy(entry)
nmldict[key][idx] = nml.todict(complex_tuple)
elif isinstance(entry, complex) and complex_tuple:
nmldict[key][idx] = [entry.real, entry.imag]
complex_list = True
if complex_list:
try:
nmldict['_complex'].append(key)
except KeyError:
nmldict['_complex'] = [key]
# Append the start index if present
if self.start_index:
nmldict['_start_index'] = self.start_index
return nmldict
def _f90repr(self, value):
"""Convert primitive Python types to equivalent Fortran strings."""
if isinstance(value, self.RepeatValue):
return self._f90repeat(value)
elif isinstance(value, bool):
return self._f90bool(value)
elif isinstance(value, numbers.Integral):
return self._f90int(value)
elif isinstance(value, numbers.Real):
return self._f90float(value)
elif isinstance(value, numbers.Complex):
return self._f90complex(value)
elif isinstance(value, basestring):
return self._f90str(value)
elif value is None:
return ''
else:
raise ValueError('Type {0} of {1} cannot be converted to a Fortran'
' type.'.format(type(value), value))
def _f90repeat(self, value):
"""Return a Fortran 90 representation of a repeated value."""
if value.repeats == 1:
return self._f90repr(value.value)
else:
return "{0}*{1}".format(value.repeats,
self._f90repr(value.value))
def _f90bool(self, value):
"""Return a Fortran 90 representation of a logical value."""
return self.logical_repr[value]
def _f90int(self, value):
"""Return a Fortran 90 representation of an integer."""
return str(value)
def _f90float(self, value):
"""Return a Fortran 90 representation of a floating point number."""
return '{0:{fmt}}'.format(value, fmt=self.float_format)
def _f90complex(self, value):
"""Return a Fortran 90 representation of a complex number."""
return '({0:{fmt}}, {1:{fmt}})'.format(value.real, value.imag,
fmt=self.float_format)
def _f90str(self, value):
"""Return a Fortran 90 representation of a string."""
# Replace Python quote escape sequence with Fortran
result = repr(str(value)).replace("\\'", "''").replace('\\"', '""')
# Un-escape the Python backslash escape sequence
result = result.replace('\\\\', '\\')
return result
# TODO: Move to separate file? What about ref to Namelist?
class Cogroup(list):
"""List of Namelist groups which share a common key.
Although Namelists are organized as associative arrays, access is
typically through a serial I/O data stream. One consequence is that a
namelist may contain multiple keys for different values.
This object returns a list of namelist groups which use the same key.
Internal keys correspond to the original ordering in the namelist.
When an element of the list is updated, the corresponding namelist element
is also updated.
"""
def __init__(self, nml, key, *args, **kwds):
self.nml = nml
self.key = key
grps = [super(Namelist, self.nml).__getitem__(k) for k in self.keys]
super(Cogroup, self).__init__(grps, **kwds)
def __setitem__(self, index, value):
key = self.keys[index]
super(Namelist, self.nml).__setitem__(key, value)
@property
def keys(self):
"""Return the namelist keys in the cogroup."""
cogrp_keys = [
k for k in self.nml
if k.startswith('_grp_{}'.format(self.key))
or k == self.key
]
return cogrp_keys
def is_nullable_list(val, vtype):
"""Return True if list contains either values of type `vtype` or None."""
return (isinstance(val, list) and
any(isinstance(v, vtype) for v in val) and
all((isinstance(v, vtype) or v is None) for v in val))
del support; fixed super() errors
del is now supported for cogroups via __delitem__ implementations.
Also removed some problematic super() usage; super() should only be used
to reference the parent class within a class. We were using it to
access the parent class of a field in the class. This has been fixed by
explicitly referencing dict.
(In truth, we should not even need super() and it should be sufficient
to explicit reference the parent, but oh well.)
"""Fortran namelist interface.
The ``Namelist`` is a representation of a Fortran namelist and its contents in
a Python environment.
:copyright: Copyright 2014 Marshall Ward, see AUTHORS for details.
:license: Apache License, Version 2.0, see LICENSE for details.
"""
from __future__ import print_function
import itertools
import copy
import numbers
import os
import platform
try:
from StringIO import StringIO # Python 2.x
except ImportError:
from io import StringIO # Python 3.x
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
try:
basestring # Python 2.x
except NameError:
basestring = str # Python 3.x
class Namelist(OrderedDict):
"""Representation of Fortran namelist in a Python environment.
Namelists can be initialised as empty or with a pre-defined `dict` of
`items`. If an explicit default start index is required for `items`, then
it can be initialised with the `default_start_index` input argument.
In addition to the standard methods supported by `dict`, several additional
methods and properties are provided for working with Fortran namelists.
"""
class RepeatValue(object):
"""Container class for output using repeat counters."""
def __init__(self, n, value):
"""Create the RepeatValue object."""
self.repeats = n
self.value = value
def __init__(self, *args, **kwds):
"""Create the Namelist object."""
s_args = list(args)
# If using (unordered) dict, then resort the keys for reproducibility
# NOTE: Python 3.7+ dicts are order-preserving.
if (args and not isinstance(args[0], OrderedDict) and
isinstance(args[0], dict)):
s_args[0] = sorted(args[0].items())
# Assign the default start index
try:
self._default_start_index = kwds.pop('default_start_index')
except KeyError:
self._default_start_index = None
super(Namelist, self).__init__(*s_args, **kwds)
# We internally track the list of cogroups (groups of the same name),
# although this could be replaced with a per-access search.
self._cogroups = []
self.start_index = self.pop('_start_index', {})
# Update the complex tuples as intrinsics
# TODO: We are effectively setting these twice. Instead, fetch these
# from s_args rather than relying on Namelist to handle the content.
if '_complex' in self:
for key in self['_complex']:
if all(isinstance(v, list) for v in self[key]):
self[key] = [complex(*v) for v in self[key]]
else:
self[key] = complex(*self[key])
self.pop('_complex')
# Formatting properties
self._column_width = 72
self._indent = 4 * ' '
self._end_comma = False
self._uppercase = False
self._float_format = ''
self._logical_repr = {False: '.false.', True: '.true.'}
self._index_spacing = False
self._repeat_counter = False
self._split_strings = False
# Namelist group spacing flag
self._newline = False
# Check for pre-set indentation
self.indent = self.pop('_indent', self.indent)
# PyPy 2 is dumb and does not use __setitem__() inside __init__()
# This loop will explicitly convert any internal dicts to Namelists.
if (platform.python_implementation() == 'PyPy' and
platform.python_version_tuple()[0] == '2'):
for key, value in self.items():
self[key] = value
def __contains__(self, key):
"""Case-insensitive interface to OrderedDict."""
return super(Namelist, self).__contains__(key.lower())
def __delitem__(self, key):
"""Case-insensitive interface to OrderedDict."""
lkey = key.lower()
# Remove cogroup values
if lkey in self._cogroups:
cogrp = Cogroup(self, lkey)
for grp in cogrp.keys:
super(Namelist, self).__delitem__(grp)
else:
super(Namelist, self).__delitem__(key)
def __getitem__(self, key):
"""Case-insensitive interface to OrderedDict."""
if isinstance(key, basestring):
lkey = key.lower()
if lkey in self._cogroups:
return Cogroup(self, lkey)
else:
return super(Namelist, self).__getitem__(lkey)
else:
keyiter = iter(key)
grp, var = next(keyiter).lower(), next(keyiter).lower()
return super(Namelist, self).__getitem__(grp).__getitem__(var)
def __setitem__(self, key, value):
"""Case-insensitive interface to OrderedDict.
Python dict inputs to the Namelist, such as derived types, are also
converted into Namelists.
"""
# Promote dicts to Namelists
if isinstance(value, dict) and not isinstance(value, Namelist):
value = Namelist(
value,
default_start_index=self.default_start_index
)
# Convert list of dicts to list of namelists
# (NOTE: This may be for legacy cogroup support? Can it be dropped?)
elif is_nullable_list(value, dict):
for i, v in enumerate(value):
if isinstance(v, Namelist) or v is None:
value[i] = v
else:
# value is a non-Namelist dict
value[i] = Namelist(
v,
default_start_index=self.default_start_index
)
lkey = key.lower()
super(Namelist, self).__setitem__(lkey, value)
def __str__(self):
"""Print the Fortran representation of the namelist.
Currently this can only be applied to the full contents of the namelist
file. Indiviual namelist groups or values may not render correctly.
"""
output = StringIO()
if all(isinstance(v, Namelist) for v in self.values()):
self._writestream(output)
else:
print(repr(self), file=output)
nml_string = output.getvalue().rstrip()
output.close()
return nml_string
# Format configuration
@property
def column_width(self):
"""Set the maximum number of characters per line of the namelist file.
:type: ``int``
:default: 72
Tokens longer than ``column_width`` are allowed to extend past this
limit.
"""
return self._column_width
@column_width.setter
def column_width(self, width):
"""Validate and set the column width."""
if isinstance(width, int):
if width >= 0:
self._column_width = width
else:
raise ValueError('Column width must be nonnegative.')
else:
raise TypeError('Column width must be a nonnegative integer.')
@property
def default_start_index(self):
"""Set the default start index for vectors with no explicit index.
:type: ``int``, ``None``
:default: ``None``
When the `default_start_index` is set, all vectors without an explicit
start index are assumed to begin with `default_start_index`. This
index is shown when printing the namelist output.
If set to `None`, then no start index is assumed and is left as
implicit for any vectors undefined in `start_index`.
"""
return self._default_start_index
@default_start_index.setter
def default_start_index(self, value):
if not isinstance(value, int):
raise TypeError('default_start_index must be an integer.')
self._default_start_index = value
@property
def end_comma(self):
"""Append commas to the end of namelist variable entries.
:type: ``bool``
:default: ``False``
Fortran will generally disregard any commas separating variable
assignments, and the default behaviour is to omit these commas from the
output. Enabling this flag will append commas at the end of the line
for each variable assignment.
"""
return self._end_comma
@end_comma.setter
def end_comma(self, value):
"""Validate and set the comma termination flag."""
if not isinstance(value, bool):
raise TypeError('end_comma attribute must be a logical type.')
self._end_comma = value
@property
def false_repr(self):
"""Set the string representation of logical false values.
:type: ``str``
:default: ``'.false.'``
This is equivalent to the first element of ``logical_repr``.
"""
return self._logical_repr[0]
@false_repr.setter
def false_repr(self, value):
"""Validate and set the logical false representation."""
if isinstance(value, str):
if not (value.lower().startswith('f') or
value.lower().startswith('.f')):
raise ValueError("Logical false representation must start "
"with 'F' or '.F'.")
else:
self._logical_repr[0] = value
else:
raise TypeError('Logical false representation must be a string.')
@property
def float_format(self):
"""Set the namelist floating point format.
:type: ``str``
:default: ``''``
The property sets the format string for floating point numbers,
following the format expected by the Python ``format()`` function.
"""
return self._float_format
@float_format.setter
def float_format(self, value):
"""Validate and set the upper case flag."""
if isinstance(value, str):
# Duck-test the format string; raise ValueError on fail
'{0:{1}}'.format(1.23, value)
self._float_format = value
else:
raise TypeError('Floating point format code must be a string.')
@property
def indent(self):
r"""Set the whitespace indentation of namelist entries.
:type: ``int``, ``str``
:default: ``' '`` (four spaces)
This can be set to an integer, denoting the number of spaces, or to an
explicit whitespace character, such as a tab (``\t``).
"""
return self._indent
@indent.setter
def indent(self, value):
"""Validate and set the indent width."""
# Explicit indent setting
if isinstance(value, str):
if value.isspace() or len(value) == 0:
self._indent = value
else:
raise ValueError('String indentation can only contain '
'whitespace.')
# Set indent width
elif isinstance(value, int):
if value >= 0:
self._indent = value * ' '
else:
raise ValueError('Indentation spacing must be nonnegative.')
else:
raise TypeError('Indentation must be specified by string or space '
'width.')
@property
def index_spacing(self):
"""Apply a space between indexes of multidimensional vectors.
:type: ``bool``
:default: ``False``
"""
return self._index_spacing
@index_spacing.setter
def index_spacing(self, value):
"""Validate and set the index_spacing flag."""
if not isinstance(value, bool):
raise TypeError('index_spacing attribute must be a logical type.')
self._index_spacing = value
# NOTE: This presumes that bools and ints are identical as dict keys
@property
def logical_repr(self):
"""Set the string representation of logical values.
:type: ``dict``
:default: ``{False: '.false.', True: '.true.'}``
There are multiple valid representations of True and False values in
Fortran. This property sets the preferred representation in the
namelist output.
The properties ``true_repr`` and ``false_repr`` are also provided as
interfaces to the elements of ``logical_repr``.
"""
return self._logical_repr
@logical_repr.setter
def logical_repr(self, value):
"""Set the string representation of logical values."""
if not any(isinstance(value, t) for t in (list, tuple)):
raise TypeError("Logical representation must be a tuple with "
"a valid true and false value.")
if not len(value) == 2:
raise ValueError("List must contain two values.")
self.false_repr = value[0]
self.true_repr = value[1]
@property
def repeat_counter(self):
"""Return whether the namelist uses repeat counters for arrays.
If True, then arrays with repeated values will use repeat tokens. For
example, the array ``[1, 2, 2, 2]`` will be written as ``1, 3*2``.
:type: ``bool``
:default: ``False``
"""
return self._repeat_counter
@repeat_counter.setter
def repeat_counter(self, value):
"""Set whether array output should be done in repeat form."""
if isinstance(value, bool):
self._repeat_counter = value
else:
raise TypeError(r"repeat must be of type ``bool``")
@property
def split_strings(self):
"""Split strings at the ``column_width`` over multiple lines.
:type: ``bool``
:default: ``False``
"""
return self._split_strings
@split_strings.setter
def split_strings(self, value):
"""Validate and set the split_strings flag."""
if not isinstance(value, bool):
raise TypeError('split_strings attribute must be a logical type.')
self._split_strings = value
@property
def start_index(self):
"""Set the starting index for each vector in the namelist.
:type: ``dict``
:default: ``{}``
``start_index`` is stored as a dict which contains the starting index
for each vector saved in the namelist. For the namelist ``vec.nml``
shown below,
.. code-block:: fortran
&vec_nml
a = 1, 2, 3
b(0:2) = 0, 1, 2
c(3:5) = 3, 4, 5
d(:,:) = 1, 2, 3, 4
/
the ``start_index`` contents are
.. code:: python
>>> import f90nml
>>> nml = f90nml.read('vec.nml')
>>> nml['vec_nml'].start_index
{'b': [0], 'c': [3], 'd': [None, None]}
The starting index of ``a`` is absent from ``start_index``, since its
starting index is unknown and its values cannot be assigned without
referring to the corresponding Fortran source.
"""
return self._start_index
@start_index.setter
def start_index(self, value):
"""Validate and set the vector start index."""
# TODO: Validate contents? (May want to set before adding the data.)
if not isinstance(value, dict):
raise TypeError('start_index attribute must be a dict.')
self._start_index = value
@property
def true_repr(self):
"""Set the string representation of logical true values.
:type: ``str``
:default: ``.true.``
This is equivalent to the second element of ``logical_repr``.
"""
return self._logical_repr[1]
@true_repr.setter
def true_repr(self, value):
"""Validate and set the logical true representation."""
if isinstance(value, str):
if not (value.lower().startswith('t') or
value.lower().startswith('.t')):
raise ValueError("Logical true representation must start with "
"'T' or '.T'.")
else:
self._logical_repr[1] = value
else:
raise TypeError('Logical true representation must be a string.')
@property
def uppercase(self):
"""Print group and variable names in uppercase.
:type: ``bool``
:default: ``False``
This is equivalent to the second element of ``logical_repr``.
"""
return self._uppercase
@uppercase.setter
def uppercase(self, value):
"""Validate and set the uppercase flag."""
if not isinstance(value, bool):
raise TypeError('uppercase attribute must be a logical type.')
self._uppercase = value
def write(self, nml_path, force=False, sort=False):
"""Write Namelist to a Fortran 90 namelist file.
>>> nml = f90nml.read('input.nml')
>>> nml.write('out.nml')
"""
nml_is_file = hasattr(nml_path, 'read')
if not force and not nml_is_file and os.path.isfile(nml_path):
raise IOError('File {0} already exists.'.format(nml_path))
nml_file = nml_path if nml_is_file else open(nml_path, 'w')
try:
self._writestream(nml_file, sort)
finally:
if not nml_is_file:
nml_file.close()
def patch(self, nml_patch):
"""Update the namelist from another partial or full namelist.
This is different from the intrinsic `update()` method, which replaces
a namelist section. Rather, it updates the values within a section.
"""
for sec in nml_patch:
if sec not in self:
self[sec] = Namelist()
self[sec].update(nml_patch[sec])
def add_cogroup(self, key, val):
"""Append a duplicate group to the Namelist as a new group."""
# TODO: What to do if it's a new group? Add normally?
lkey = key.lower()
assert lkey in self
grps = self[lkey]
# Set up the cogroup if it does not yet exist
if isinstance(grps, Namelist):
# NOTE: We re-use the key to preserve the original order.
self._cogroups.append(lkey)
grps = [grps]
# Generate the cogroup label and add to the Namelist
cogrp_id = str(len(grps))
cogrp_key = '_'.join(['_grp', lkey, cogrp_id])
self[cogrp_key] = val
def groups(self):
"""Return an iterator that spans values with group and variable names.
Elements of the iterator consist of a tuple containing two values. The
first is internal tuple containing the current namelist group and its
variable name. The second element of the returned tuple is the value
associated with the current group and variable.
"""
for key, value in self.items():
for inner_key, inner_value in value.items():
yield (key, inner_key), inner_value
def _writestream(self, nml_file, sort=False):
"""Output Namelist to a streamable file object."""
# Reset newline flag
self._newline = False
if sort:
sel = Namelist(sorted(self.items(), key=lambda t: t[0]))
else:
sel = self
for grp_name, grp_vars in sel.items():
# Check for repeated namelist records (saved as lists)
if isinstance(grp_vars, list):
for g_vars in grp_vars:
self._write_nmlgrp(grp_name, g_vars, nml_file, sort)
else:
self._write_nmlgrp(grp_name, grp_vars, nml_file, sort)
def _write_nmlgrp(self, grp_name, grp_vars, nml_file, sort=False):
"""Write namelist group to target file."""
if self._newline:
print(file=nml_file)
self._newline = True
# Strip metadata label for repeat groups
if grp_name.startswith('_grp_'):
grp_name = grp_name[5:].rsplit('_', 1)[0]
if self.uppercase:
grp_name = grp_name.upper()
if sort:
grp_vars = Namelist(sorted(grp_vars.items(), key=lambda t: t[0]))
print('&{0}'.format(grp_name), file=nml_file)
for v_name, v_val in grp_vars.items():
v_start = grp_vars.start_index.get(v_name, None)
for v_str in self._var_strings(v_name, v_val, v_start=v_start):
print(v_str, file=nml_file)
print('/', file=nml_file)
def _var_strings(self, v_name, v_values, v_idx=None, v_start=None):
"""Convert namelist variable to list of fixed-width strings."""
if self.uppercase:
v_name = v_name.upper()
var_strs = []
# Parse a multidimensional array
if is_nullable_list(v_values, list):
if not v_idx:
v_idx = []
i_s = v_start[::-1][len(v_idx)] if v_start else None
# FIXME: We incorrectly assume 1-based indexing if it is
# unspecified. This is necessary because our output method always
# separates the outer axes to one per line. But we cannot do this
# if we don't know the first index (which we are no longer assuming
# to be 1-based elsewhere). Unfortunately, the solution needs a
# rethink of multidimensional output.
# NOTE: Fixing this would also clean up the output of todict(),
# which is now incorrectly documenting unspecified indices as 1.
# For now, we will assume 1-based indexing here, just to keep
# things working smoothly.
if i_s is None:
i_s = 1
for idx, val in enumerate(v_values, start=i_s):
v_idx_new = v_idx + [idx]
v_strs = self._var_strings(v_name, val, v_idx=v_idx_new,
v_start=v_start)
var_strs.extend(v_strs)
# Parse derived type contents
elif isinstance(v_values, Namelist):
for f_name, f_vals in v_values.items():
v_title = '%'.join([v_name, f_name])
v_start_new = v_values.start_index.get(f_name, None)
v_strs = self._var_strings(v_title, f_vals,
v_start=v_start_new)
var_strs.extend(v_strs)
# Parse an array of derived types
elif is_nullable_list(v_values, Namelist):
if not v_idx:
v_idx = []
i_s = v_start[::-1][len(v_idx)] if v_start else 1
for idx, val in enumerate(v_values, start=i_s):
# Skip any empty elements in a list of derived types
if val is None:
continue
v_title = v_name + '({0})'.format(idx)
v_strs = self._var_strings(v_title, val)
var_strs.extend(v_strs)
else:
use_default_start_index = False
if not isinstance(v_values, list):
v_values = [v_values]
use_default_start_index = False
else:
use_default_start_index = self.default_start_index is not None
# Print the index range
# TODO: Include a check for len(v_values) to determine if vector
if v_idx or v_start or use_default_start_index:
v_idx_repr = '('
if v_start or use_default_start_index:
if v_start:
i_s = v_start[0]
else:
i_s = self.default_start_index
if i_s is None:
v_idx_repr += ':'
else:
i_e = i_s + len(v_values) - 1
if i_s == i_e:
v_idx_repr += '{0}'.format(i_s)
else:
v_idx_repr += '{0}:{1}'.format(i_s, i_e)
else:
v_idx_repr += ':'
if v_idx:
idx_delim = ', ' if self._index_spacing else ','
v_idx_repr += idx_delim
v_idx_repr += idx_delim.join(str(i) for i in v_idx[::-1])
v_idx_repr += ')'
else:
v_idx_repr = ''
# Split output across multiple lines (if necessary)
v_header = self.indent + v_name + v_idx_repr + ' = '
val_strs = []
val_line = v_header
if self._repeat_counter:
v_values = list(
self.RepeatValue(len(list(x)), val)
for val, x in itertools.groupby(v_values)
)
for i_val, v_val in enumerate(v_values):
# Increase column width if the header exceeds this value
if len(v_header) >= self.column_width:
column_width = len(v_header) + 1
else:
column_width = self.column_width
if len(val_line) < column_width:
# NOTE: We allow non-strings to extend past the column
# limit, but strings will be split as needed.
v_str = self._f90repr(v_val)
# Set a comma placeholder if needed
if i_val < len(v_values) - 1 or self.end_comma:
v_comma = ', '
else:
v_comma = ''
if self.split_strings and isinstance(v_val, str):
idx = column_width - len(val_line + v_comma.rstrip())
# Split the line along idx until we either exceed the
# column width, or read the end of the string.
v_l, v_r = v_str[:idx], v_str[idx:]
if v_r:
# Check if string can fit on the next line
new_val_line = (
' ' * len(v_header) + v_str + v_comma
)
if len(new_val_line.rstrip()) <= column_width:
val_strs.append(val_line)
val_line = ' ' * len(v_header)
else:
# Split string across multiple lines
while v_r:
val_line += v_l
val_strs.append(val_line)
val_line = ''
idx = column_width - len(v_comma.rstrip())
v_l, v_r = v_r[:idx], v_r[idx:]
v_str = v_l
val_line += v_str + v_comma
# Line break
if len(val_line) >= column_width:
# Append current line to list of lines
val_strs.append(val_line.rstrip())
# Start new line with space corresponding to header
val_line = ' ' * len(v_header)
# Append any remaining values
if val_line and not val_line.isspace():
val_strs.append(val_line.rstrip())
# Final null values must always precede a comma
if val_strs and v_values[-1] is None:
# NOTE: val_strs has been rstrip-ed so lead with a space
val_strs[-1] += ' ,'
# Complete the set of values
if val_strs:
var_strs.extend(val_strs)
return var_strs
def todict(self, complex_tuple=False):
"""Return a dict equivalent to the namelist.
Since Fortran variables and names cannot start with the ``_``
character, any keys starting with this token denote metadata, such as
starting index.
The ``complex_tuple`` flag is used to convert complex data into an
equivalent 2-tuple, with metadata stored to flag the variable as
complex. This is primarily used to facilitate the storage of the
namelist into an equivalent format which does not support complex
numbers, such as JSON or YAML.
"""
# TODO: Preserve ordering
nmldict = OrderedDict(self)
# Search for namelists within the namelist
# TODO: Move repeated stuff to new functions
for key, value in self.items():
if isinstance(value, Namelist):
nml = copy.deepcopy(value)
nmldict[key] = nml.todict(complex_tuple)
elif isinstance(value, complex) and complex_tuple:
nmldict[key] = [value.real, value.imag]
try:
nmldict['_complex'].append(key)
except KeyError:
nmldict['_complex'] = [key]
elif isinstance(value, list):
complex_list = False
for idx, entry in enumerate(value):
if isinstance(entry, Namelist):
nml = copy.deepcopy(entry)
nmldict[key][idx] = nml.todict(complex_tuple)
elif isinstance(entry, complex) and complex_tuple:
nmldict[key][idx] = [entry.real, entry.imag]
complex_list = True
if complex_list:
try:
nmldict['_complex'].append(key)
except KeyError:
nmldict['_complex'] = [key]
# Append the start index if present
if self.start_index:
nmldict['_start_index'] = self.start_index
return nmldict
def _f90repr(self, value):
"""Convert primitive Python types to equivalent Fortran strings."""
if isinstance(value, self.RepeatValue):
return self._f90repeat(value)
elif isinstance(value, bool):
return self._f90bool(value)
elif isinstance(value, numbers.Integral):
return self._f90int(value)
elif isinstance(value, numbers.Real):
return self._f90float(value)
elif isinstance(value, numbers.Complex):
return self._f90complex(value)
elif isinstance(value, basestring):
return self._f90str(value)
elif value is None:
return ''
else:
raise ValueError('Type {0} of {1} cannot be converted to a Fortran'
' type.'.format(type(value), value))
def _f90repeat(self, value):
"""Return a Fortran 90 representation of a repeated value."""
if value.repeats == 1:
return self._f90repr(value.value)
else:
return "{0}*{1}".format(value.repeats,
self._f90repr(value.value))
def _f90bool(self, value):
"""Return a Fortran 90 representation of a logical value."""
return self.logical_repr[value]
def _f90int(self, value):
"""Return a Fortran 90 representation of an integer."""
return str(value)
def _f90float(self, value):
"""Return a Fortran 90 representation of a floating point number."""
return '{0:{fmt}}'.format(value, fmt=self.float_format)
def _f90complex(self, value):
"""Return a Fortran 90 representation of a complex number."""
return '({0:{fmt}}, {1:{fmt}})'.format(value.real, value.imag,
fmt=self.float_format)
def _f90str(self, value):
"""Return a Fortran 90 representation of a string."""
# Replace Python quote escape sequence with Fortran
result = repr(str(value)).replace("\\'", "''").replace('\\"', '""')
# Un-escape the Python backslash escape sequence
result = result.replace('\\\\', '\\')
return result
# TODO: Move to separate file? What about ref to Namelist?
class Cogroup(list):
"""List of Namelist groups which share a common key.
Although Namelists are organized as associative arrays, access is
typically through a serial I/O data stream. One consequence is that a
namelist may contain multiple keys for different values.
This object returns a list of namelist groups which use the same key.
Internal keys correspond to the original ordering in the namelist.
When an element of the list is updated, the corresponding namelist element
is also updated.
"""
def __init__(self, nml, key, *args, **kwds):
self.nml = nml
self.key = key
grps = [dict.__getitem__(self.nml, k) for k in self.keys]
super(Cogroup, self).__init__(grps, **kwds)
def __setitem__(self, index, value):
key = self.keys[index]
dict.__setitem__(self.nml, key, value)
def __delitem__(self, index):
key = self.keys[index]
dict.__delitem__(self.nml, key)
super(Cogroup, self).__delitem__(index)
@property
def keys(self):
"""Return the namelist keys in the cogroup."""
cogrp_keys = [
k for k in self.nml
if k.startswith('_grp_{}'.format(self.key))
or k == self.key
]
return cogrp_keys
def is_nullable_list(val, vtype):
"""Return True if list contains either values of type `vtype` or None."""
return (isinstance(val, list) and
any(isinstance(v, vtype) for v in val) and
all((isinstance(v, vtype) or v is None) for v in val))
|
# Copyright 2015 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from paasta_tools.cli.cmds.cook_image import paasta_cook_image
@mock.patch('paasta_tools.cli.cmds.cook_image.validate_service_name', autospec=True)
@mock.patch('paasta_tools.cli.cmds.cook_image.makefile_responds_to', autospec=True)
@mock.patch('paasta_tools.cli.cmds.cook_image._run', autospec=True)
def test_run_success(
mock_run,
mock_makefile_responds_to,
mock_validate_service_name,
):
mock_run.return_value = (0, 'Output')
mock_makefile_responds_to.return_value = True
mock_validate_service_name.return_value = True
args = mock.MagicMock()
args.service = 'fake_service'
assert paasta_cook_image(args) is None
@mock.patch('paasta_tools.cli.cmds.cook_image.validate_service_name', autospec=True)
@mock.patch('paasta_tools.cli.cmds.cook_image.makefile_responds_to', autospec=True)
@mock.patch('paasta_tools.cli.cmds.cook_image._run', autospec=True)
def test_run_makefile_fail(
mock_run,
mock_makefile_responds_to,
mock_validate_service_name,
):
mock_run.return_value = (0, 'Output')
mock_makefile_responds_to.return_value = False
mock_validate_service_name.return_value = True
args = mock.MagicMock()
args.service = 'fake_service'
assert paasta_cook_image(args) == 1
class FakeKeyboardInterrupt(KeyboardInterrupt):
pass
@mock.patch('paasta_tools.cli.cmds.cook_image.validate_service_name', autospec=True)
@mock.patch('paasta_tools.cli.cmds.cook_image.makefile_responds_to', autospec=True)
@mock.patch('paasta_tools.cli.cmds.cook_image._run', autospec=True)
def test_run_keyboard_interrupt(
mock_run,
mock_makefile_responds_to,
mock_validate_service_name,
):
mock_run.return_value = (0, 'Output')
mock_makefile_responds_to.return_value = True
mock_validate_service_name.return_value = True
mock_run.side_effect = FakeKeyboardInterrupt
args = mock.MagicMock()
args.service = 'fake_service'
assert paasta_cook_image(args) == 2
Fix broken cook_image test to correctly return the right return code
# Copyright 2015 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from paasta_tools.cli.cmds.cook_image import paasta_cook_image
@mock.patch('paasta_tools.cli.cmds.cook_image.validate_service_name', autospec=True)
@mock.patch('paasta_tools.cli.cmds.cook_image.makefile_responds_to', autospec=True)
@mock.patch('paasta_tools.cli.cmds.cook_image._run', autospec=True)
def test_run_success(
mock_run,
mock_makefile_responds_to,
mock_validate_service_name,
):
mock_run.return_value = (0, 'Output')
mock_makefile_responds_to.return_value = True
mock_validate_service_name.return_value = True
args = mock.MagicMock()
args.service = 'fake_service'
assert paasta_cook_image(args) is 0
@mock.patch('paasta_tools.cli.cmds.cook_image.validate_service_name', autospec=True)
@mock.patch('paasta_tools.cli.cmds.cook_image.makefile_responds_to', autospec=True)
@mock.patch('paasta_tools.cli.cmds.cook_image._run', autospec=True)
def test_run_makefile_fail(
mock_run,
mock_makefile_responds_to,
mock_validate_service_name,
):
mock_run.return_value = (0, 'Output')
mock_makefile_responds_to.return_value = False
mock_validate_service_name.return_value = True
args = mock.MagicMock()
args.service = 'fake_service'
assert paasta_cook_image(args) == 1
class FakeKeyboardInterrupt(KeyboardInterrupt):
pass
@mock.patch('paasta_tools.cli.cmds.cook_image.validate_service_name', autospec=True)
@mock.patch('paasta_tools.cli.cmds.cook_image.makefile_responds_to', autospec=True)
@mock.patch('paasta_tools.cli.cmds.cook_image._run', autospec=True)
def test_run_keyboard_interrupt(
mock_run,
mock_makefile_responds_to,
mock_validate_service_name,
):
mock_run.return_value = (0, 'Output')
mock_makefile_responds_to.return_value = True
mock_validate_service_name.return_value = True
mock_run.side_effect = FakeKeyboardInterrupt
args = mock.MagicMock()
args.service = 'fake_service'
assert paasta_cook_image(args) == 2
|
"""
Fabric tools for managing Python packages using pip
"""
from __future__ import with_statement
from contextlib import contextmanager
from distutils.version import StrictVersion as V
import os.path
from fabric.api import *
from fabric.utils import puts
def is_pip_installed(version=None):
"""
Check if pip is installed
"""
with settings(hide('running', 'warnings', 'stderr', 'stdout'), warn_only=True):
res = run('pip --version')
if res.failed:
return False
if version is None:
return res.succeeded
else:
installed = res.split(' ')[1]
if V(installed) < V(version):
puts("pip %s found (version >= %s required)" % (installed, version))
return False
else:
return True
def install_pip():
"""
Install pip
"""
with cd("/tmp"):
run("curl --silent -O https://raw.github.com/pypa/pip/master/contrib/get-pip.py")
sudo("python get-pip.py")
def is_installed(package):
"""
Check if a Python package is installed
"""
options = []
options = " ".join(options)
with settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True):
res = run("pip freeze %(options)s" % locals())
packages = [line.split('==')[0] for line in res.splitlines()]
return (package in packages)
def install(packages, upgrade=False, use_mirrors=True, use_sudo=False, user=None):
"""
Install Python packages
"""
if not isinstance(packages, basestring):
packages = " ".join(packages)
options = []
if use_mirrors:
options.append('--use-mirrors')
if upgrade:
options.append("--upgrade")
options = " ".join(options)
command = 'pip install %(options)s %(packages)s' % locals()
if use_sudo:
sudo(command, user=user)
else:
run(command)
def install_requirements(filename, upgrade=False, use_mirrors=True, use_sudo=False, user=None):
"""
Install Python packages from a pip requirements file
"""
options = []
if use_mirrors:
options.append('--use-mirrors')
if upgrade:
options.append("--upgrade")
options = " ".join(options)
command = 'pip install %(options)s -r %(filename)s' % locals()
if use_sudo:
sudo(command, user=user)
else:
run(command)
@contextmanager
def virtualenv(directory):
"""
Context manager to activate a Python virtualenv
"""
with cd(directory):
with prefix('source "%s"' % os.path.join(directory, 'bin', 'activate')):
yield
Fix Python virtualenv context manager
"""
Fabric tools for managing Python packages using pip
"""
from __future__ import with_statement
from contextlib import contextmanager
from distutils.version import StrictVersion as V
import os.path
from fabric.api import *
from fabric.utils import puts
def is_pip_installed(version=None):
"""
Check if pip is installed
"""
with settings(hide('running', 'warnings', 'stderr', 'stdout'), warn_only=True):
res = run('pip --version')
if res.failed:
return False
if version is None:
return res.succeeded
else:
installed = res.split(' ')[1]
if V(installed) < V(version):
puts("pip %s found (version >= %s required)" % (installed, version))
return False
else:
return True
def install_pip():
"""
Install pip
"""
with cd("/tmp"):
run("curl --silent -O https://raw.github.com/pypa/pip/master/contrib/get-pip.py")
sudo("python get-pip.py")
def is_installed(package):
"""
Check if a Python package is installed
"""
options = []
options = " ".join(options)
with settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True):
res = run("pip freeze %(options)s" % locals())
packages = [line.split('==')[0] for line in res.splitlines()]
return (package in packages)
def install(packages, upgrade=False, use_mirrors=True, use_sudo=False, user=None):
"""
Install Python packages
"""
if not isinstance(packages, basestring):
packages = " ".join(packages)
options = []
if use_mirrors:
options.append('--use-mirrors')
if upgrade:
options.append("--upgrade")
options = " ".join(options)
command = 'pip install %(options)s %(packages)s' % locals()
if use_sudo:
sudo(command, user=user)
else:
run(command)
def install_requirements(filename, upgrade=False, use_mirrors=True, use_sudo=False, user=None):
"""
Install Python packages from a pip requirements file
"""
options = []
if use_mirrors:
options.append('--use-mirrors')
if upgrade:
options.append("--upgrade")
options = " ".join(options)
command = 'pip install %(options)s -r %(filename)s' % locals()
if use_sudo:
sudo(command, user=user)
else:
run(command)
@contextmanager
def virtualenv(directory):
"""
Context manager to activate a Python virtualenv
"""
with prefix('source "%s"' % os.path.join(directory, 'bin', 'activate')):
yield
|
# swift_build_support/products/swiftsyntax.py --------------------*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
import os
from build_swift.build_swift.constants import MULTIROOT_DATA_FILE_PATH
from . import cmark
from . import foundation
from . import libcxx
from . import libdispatch
from . import libicu
from . import llbuild
from . import llvm
from . import product
from . import swift
from . import swiftpm
from . import xctest
from .. import shell
class SwiftSyntax(product.Product):
@classmethod
def product_source_name(cls):
"""product_source_name() -> str
The name of the source code directory of this product.
"""
return "swift-syntax"
@classmethod
def is_build_script_impl_product(cls):
return False
@classmethod
def is_before_build_script_impl_product(cls):
return False
@classmethod
def is_swiftpm_unified_build_product(cls):
return True
def run_swiftsyntax_build_script(self, target, command, additional_params=[]):
script_path = os.path.join(self.source_dir, 'build-script.py')
build_cmd = [
script_path,
command,
'--build-dir', self.build_dir,
'--multiroot-data-file', MULTIROOT_DATA_FILE_PATH,
'--toolchain', self.install_toolchain_path(target)
]
if self.is_release():
build_cmd.append('--release')
if self.args.verbose_build:
build_cmd.append('--verbose')
build_cmd.extend(additional_params)
shell.call(build_cmd)
def should_build(self, host_target):
return True
def build(self, host_target):
if self.args.swiftsyntax_verify_generated_files:
self.run_swiftsyntax_build_script(target=host_target,
command='verify-source-code')
self.run_swiftsyntax_build_script(target=host_target,
command='build')
def should_test(self, host_target):
return self.args.test_swiftsyntax
def test(self, host_target):
llvm_build_dir = os.path.join(self.build_dir, '..', 'llvm-' + host_target)
llvm_build_dir = os.path.realpath(llvm_build_dir)
self.run_swiftsyntax_build_script(target=host_target,
command='test',
additional_params=[
'--filecheck-exec',
os.path.join(llvm_build_dir,
'bin',
'FileCheck')
])
def should_install(self, host_target):
return self.args.install_swiftsyntax
def install(self, target_name):
# SwiftSyntax doesn't produce any products that should be installed
# into the toolchein. All tools using it link against SwiftSyntax
# statically.
pass
@classmethod
def get_dependencies(cls):
return [cmark.CMark,
llvm.LLVM,
libcxx.LibCXX,
libicu.LibICU,
swift.Swift,
libdispatch.LibDispatch,
foundation.Foundation,
xctest.XCTest,
llbuild.LLBuild,
swiftpm.SwiftPM]
spelling: toolchain
Signed-off-by: Josh Soref <dc510c92cc1794ea84000fde88becdce67bf7624@users.noreply.github.com>
# swift_build_support/products/swiftsyntax.py --------------------*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
import os
from build_swift.build_swift.constants import MULTIROOT_DATA_FILE_PATH
from . import cmark
from . import foundation
from . import libcxx
from . import libdispatch
from . import libicu
from . import llbuild
from . import llvm
from . import product
from . import swift
from . import swiftpm
from . import xctest
from .. import shell
class SwiftSyntax(product.Product):
@classmethod
def product_source_name(cls):
"""product_source_name() -> str
The name of the source code directory of this product.
"""
return "swift-syntax"
@classmethod
def is_build_script_impl_product(cls):
return False
@classmethod
def is_before_build_script_impl_product(cls):
return False
@classmethod
def is_swiftpm_unified_build_product(cls):
return True
def run_swiftsyntax_build_script(self, target, command, additional_params=[]):
script_path = os.path.join(self.source_dir, 'build-script.py')
build_cmd = [
script_path,
command,
'--build-dir', self.build_dir,
'--multiroot-data-file', MULTIROOT_DATA_FILE_PATH,
'--toolchain', self.install_toolchain_path(target)
]
if self.is_release():
build_cmd.append('--release')
if self.args.verbose_build:
build_cmd.append('--verbose')
build_cmd.extend(additional_params)
shell.call(build_cmd)
def should_build(self, host_target):
return True
def build(self, host_target):
if self.args.swiftsyntax_verify_generated_files:
self.run_swiftsyntax_build_script(target=host_target,
command='verify-source-code')
self.run_swiftsyntax_build_script(target=host_target,
command='build')
def should_test(self, host_target):
return self.args.test_swiftsyntax
def test(self, host_target):
llvm_build_dir = os.path.join(self.build_dir, '..', 'llvm-' + host_target)
llvm_build_dir = os.path.realpath(llvm_build_dir)
self.run_swiftsyntax_build_script(target=host_target,
command='test',
additional_params=[
'--filecheck-exec',
os.path.join(llvm_build_dir,
'bin',
'FileCheck')
])
def should_install(self, host_target):
return self.args.install_swiftsyntax
def install(self, target_name):
# SwiftSyntax doesn't produce any products that should be installed
# into the toolchain. All tools using it link against SwiftSyntax
# statically.
pass
@classmethod
def get_dependencies(cls):
return [cmark.CMark,
llvm.LLVM,
libcxx.LibCXX,
libicu.LibICU,
swift.Swift,
libdispatch.LibDispatch,
foundation.Foundation,
xctest.XCTest,
llbuild.LLBuild,
swiftpm.SwiftPM]
|
# -*- encoding: utf-8 -*-
import logging
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
import re
import time
import requests
log = logging.getLogger('facebook')
log.setLevel(logging.WARN)
MESSAGE_URL = 'https://www.facebook.com/ajax/mercury/send_messages.php'
UPLOAD_URL = 'https://upload.facebook.com/ajax/mercury/upload.php?'
TYP_URL = 'https://www.facebook.com/ajax/messaging/typ.php'
READ_URL = 'https://www.facebook.com/ajax/mercury/change_read_status.php'
# define like sticker id
LIKE_STICKER = {
'l': '369239383222810',
'm': '369239343222814',
's': '369239263222822'
}
def send_group(fb, thread, body, pic=None, sticker=None, like=None):
data = {
"message_batch[0][action_type]": "ma-type:user-generated-message",
"message_batch[0][author]": "fbid:{}".format(fb.user_id),
"message_batch[0][source]": "source:chat:web",
"message_batch[0][body]": body,
"message_batch[0][signatureID]": "3c132b09",
"message_batch[0][ui_push_phase]": "V3",
"message_batch[0][status]": "0",
"message_batch[0][thread_fbid]": thread,
"client": "mercury",
"__user": fb.user_id,
"__a": "1",
"__dyn": "7n8anEBQ9FoBUSt2u6aAix97xN6yUgByV9GiyFqzQC-C26m6oDAyoSnx2ubhHAyXBBzEy5E",
"__req": "c",
"fb_dtsg": fb.dtsg,
"ttstamp": "26581691011017411284781047297",
"__rev": "1436610",
}
if pic:
# upload the picture and get picture form data
pic_data = upload_picture(fb, pic)
if pic_data:
# merge together to send message with picture
data.update(pic_data)
# add sticker if sticker is available
if any([sticker, like]):
# if like is not None, find the corresponding sticker id
if like:
try:
sticker = LIKE_STICKER[like.lower()]
except KeyError:
# if user doesn't enter l or m or s, then use the large one
sticker = LIKE_STICKER['l']
data["message_batch[0][sticker_id]"] = sticker
fb.session.post(MESSAGE_URL, data)
def send_person(fb, person, body, pic=None, sticker=None, like=None):
data = {
"message_batch[0][action_type]": "ma-type:user-generated-message",
"message_batch[0][author]": "fbid:{}".format(fb.user_id),
"message_batch[0][source]": "source:chat:web",
"message_batch[0][body]": body,
"message_batch[0][signatureID]": "3c132b09",
"message_batch[0][ui_push_phase]": "V3",
"message_batch[0][status]": "0",
"message_batch[0][specific_to_list][0]": "fbid:{}".format(person),
"message_batch[0][specific_to_list][1]": "fbid:{}".format(fb.user_id),
"client": "mercury",
"__user": fb.user_id,
"__a": "1",
"__dyn": "7n8anEBQ9FoBUSt2u6aAix97xN6yUgByV9GiyFqzQC-C26m6oDAyoSnx2ubhHAyXBBzEy5E",
"__req": "c",
"fb_dtsg": fb.dtsg,
"ttstamp": "26581691011017411284781047297",
"__rev": "1436610",
}
if pic:
# upload the picture and get picture form data
pic_data = upload_picture(fb, pic)
if pic_data:
# merge together to send message with picture
data.update(pic_data)
# add sticker if sticker is available
if any([sticker, like]):
# if like is not None, find the corresponding sticker id
if like:
try:
sticker = LIKE_STICKER[like.lower()]
except KeyError:
# if user doesn't enter l or m or s, then use the large one
sticker = LIKE_STICKER['l']
data["message_batch[0][sticker_id]"] = sticker
fb.session.post(MESSAGE_URL, data)
def upload_picture(fb, pic):
params = {
"__user": fb.user_id,
"__a": "1",
"__dyn": "7n8anEBQ9FoBUSt2u6aAix97xN6yUgByV9GiyFqzQC-C26m6oDAyoSnx2ubhHAyXBBzEy5E",
"__req": "c",
"fb_dtsg": fb.dtsg,
"ttstamp": "26581691011017411284781047297",
"__rev": "1436610",
'ft[tn]': '+M',
}
# upload the image to facebook server, filename should be unique
res = fb.session.post(UPLOAD_URL + urlencode(params), files={
'images_only': 'true',
'upload_1024': (str(time.time()), requests.get(pic).content, 'image/jpeg')
})
# check status code
if res.status_code != 200:
return
# check image_id is valid
m = re.search(r'"image_id":(\d+),', res.text)
if not m:
return
image_id = m.group(1)
return {
"message_batch[0][has_attachment]": "true",
"message_batch[0][preview_attachments][0][upload_id]": "upload_1024",
"message_batch[0][preview_attachments][0][attach_type]": "photo",
"message_batch[0][preview_attachments][0][preview_uploading]": "true",
"message_batch[0][preview_attachments][0][preview_width]": "540",
"message_batch[0][preview_attachments][0][preview_height]": "720",
"message_batch[0][image_ids][0]": image_id,
}
def group_typing(fb, thread):
data = {
"typ": "1",
"to": "",
"source": "web-messenger",
"thread": thread,
"__user": fb.user_id,
"__a": "1",
"__dyn": "7n8anEBQ9FoBUSt2u6aAix97xN6yUgByV9GiyFqzQC-C26m6oDAyoSnx2ubhHAyXBBzEy5E",
"__req": "c",
"fb_dtsg": fb.dtsg,
"ttstamp": "26581691011017411284781047297",
"__rev": "1436610",
}
fb.session.post(TYP_URL, data)
def person_typing(fb, thread):
data = {
"typ": "1",
"to": thread,
"source": "web-messenger",
"thread": thread,
"__user": fb.user_id,
"__a": "1",
"__dyn": "7n8anEBQ9FoBUSt2u6aAix97xN6yUgByV9GiyFqzQC-C26m6oDAyoSnx2ubhHAyXBBzEy5E",
"__req": "c",
"fb_dtsg": fb.dtsg,
"ttstamp": "26581691011017411284781047297",
"__rev": "1436610",
}
fb.session.post(TYP_URL, data)
def read(fb, thread):
data = {
"ids[{}]".format(thread): "true",
"__user": fb.user_id,
"__a": "1",
"__dyn": "7n8anEBQ9FoBUSt2u6aAix97xN6yUgByV9GiyFqzQC-C26m6oDAyoSnx2ubhHAyXBBzEy5E",
"__req": "c",
"fb_dtsg": fb.dtsg,
"ttstamp": "26581691011017411284781047297",
"__rev": "1436610",
}
fb.session.post(READ_URL, data)
Quick message sending hotfix
Breaks uploading files
# -*- encoding: utf-8 -*-
import logging
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
import re
import time
import requests
log = logging.getLogger('facebook')
log.setLevel(logging.WARN)
#MESSAGE_URL = 'https://www.facebook.com/ajax/mercury/send_messages.php'
MESSAGE_URL = 'https://m.facebook.com/messages/send/?icm=1&refid=12'
UPLOAD_URL = 'https://upload.facebook.com/ajax/mercury/upload.php?'
TYP_URL = 'https://www.facebook.com/ajax/messaging/typ.php'
READ_URL = 'https://www.facebook.com/ajax/mercury/change_read_status.php'
# define like sticker id
LIKE_STICKER = {
'l': '369239383222810',
'm': '369239343222814',
's': '369239263222822'
}
def send_group(fb, thread, body, pic=None, sticker=None, like=None):
data = {
"message_batch[0][action_type]": "ma-type:user-generated-message",
"message_batch[0][author]": "fbid:{}".format(fb.user_id),
"message_batch[0][source]": "source:chat:web",
"message_batch[0][body]": body,
"message_batch[0][signatureID]": "3c132b09",
"message_batch[0][ui_push_phase]": "V3",
"message_batch[0][status]": "0",
"message_batch[0][thread_fbid]": thread,
"client": "mercury",
"__user": fb.user_id,
"__a": "1",
"__dyn": "7n8anEBQ9FoBUSt2u6aAix97xN6yUgByV9GiyFqzQC-C26m6oDAyoSnx2ubhHAyXBBzEy5E",
"__req": "c",
"fb_dtsg": fb.dtsg,
"ttstamp": "26581691011017411284781047297",
"__rev": "1436610",
}
if pic:
# upload the picture and get picture form data
pic_data = upload_picture(fb, pic)
if pic_data:
# merge together to send message with picture
data.update(pic_data)
# add sticker if sticker is available
if any([sticker, like]):
# if like is not None, find the corresponding sticker id
if like:
try:
sticker = LIKE_STICKER[like.lower()]
except KeyError:
# if user doesn't enter l or m or s, then use the large one
sticker = LIKE_STICKER['l']
data["message_batch[0][sticker_id]"] = sticker
fb.session.post(MESSAGE_URL, data)
def send_person(fb, person, body, pic=None, sticker=None, like=None):
'''data = {
"message_batch[0][action_type]": "ma-type:user-generated-message",
"message_batch[0][author]": "fbid:{}".format(fb.user_id),
"message_batch[0][source]": "source:chat:web",
"message_batch[0][body]": body,
"message_batch[0][signatureID]": "3c132b09",
"message_batch[0][ui_push_phase]": "V3",
"message_batch[0][status]": "0",
"message_batch[0][specific_to_list][0]": "fbid:{}".format(person),
"message_batch[0][specific_to_list][1]": "fbid:{}".format(fb.user_id),
"client": "mercury",
"__user": fb.user_id,
"__a": "1",
"__dyn": "7n8anEBQ9FoBUSt2u6aAix97xN6yUgByV9GiyFqzQC-C26m6oDAyoSnx2ubhHAyXBBzEy5E",
"__req": "c",
"fb_dtsg": fb.dtsg,
"ttstamp": "26581691011017411284781047297",
"__rev": "1436610",
}'''
data = {
"charset_test":"€,´,€,´,水,Д,Є",
"tids":"mid.1431958823342:701030c3d90a7f3499",
"wwwupp":"V3",
"ids[{}]".format(person):person,
"body":body,
"waterfall_source":"message",
"m_sess":"",
"fb_dtsg":fb.dtsg,
"__dyn":"1Z3p5wnE-4UpwDF3FQ8xO6ES9xG6U4a6EC5UfQ1uzobE6u1Vw821-yo2bw",
"__req":"h",
"__ajax__":"true",
"__user":fb.user_id
}
if pic:
# upload the picture and get picture form data
pic_data = upload_picture(fb, pic)
if pic_data:
# merge together to send message with picture
data.update(pic_data)
# add sticker if sticker is available
if any([sticker, like]):
# if like is not None, find the corresponding sticker id
if like:
try:
sticker = LIKE_STICKER[like.lower()]
except KeyError:
# if user doesn't enter l or m or s, then use the large one
sticker = LIKE_STICKER['l']
data["message_batch[0][sticker_id]"] = sticker
fb.session.post(MESSAGE_URL, data)
def upload_picture(fb, pic):
params = {
"__user": fb.user_id,
"__a": "1",
"__dyn": "7n8anEBQ9FoBUSt2u6aAix97xN6yUgByV9GiyFqzQC-C26m6oDAyoSnx2ubhHAyXBBzEy5E",
"__req": "c",
"fb_dtsg": fb.dtsg,
"ttstamp": "26581691011017411284781047297",
"__rev": "1436610",
'ft[tn]': '+M',
}
# upload the image to facebook server, filename should be unique
res = fb.session.post(UPLOAD_URL + urlencode(params), files={
'images_only': 'true',
'upload_1024': (str(time.time()), requests.get(pic).content, 'image/jpeg')
})
# check status code
if res.status_code != 200:
return
# check image_id is valid
m = re.search(r'"image_id":(\d+),', res.text)
if not m:
return
image_id = m.group(1)
return {
"message_batch[0][has_attachment]": "true",
"message_batch[0][preview_attachments][0][upload_id]": "upload_1024",
"message_batch[0][preview_attachments][0][attach_type]": "photo",
"message_batch[0][preview_attachments][0][preview_uploading]": "true",
"message_batch[0][preview_attachments][0][preview_width]": "540",
"message_batch[0][preview_attachments][0][preview_height]": "720",
"message_batch[0][image_ids][0]": image_id,
}
def group_typing(fb, thread):
data = {
"typ": "1",
"to": "",
"source": "web-messenger",
"thread": thread,
"__user": fb.user_id,
"__a": "1",
"__dyn": "7n8anEBQ9FoBUSt2u6aAix97xN6yUgByV9GiyFqzQC-C26m6oDAyoSnx2ubhHAyXBBzEy5E",
"__req": "c",
"fb_dtsg": fb.dtsg,
"ttstamp": "26581691011017411284781047297",
"__rev": "1436610",
}
fb.session.post(TYP_URL, data)
def person_typing(fb, thread):
data = {
"typ": "1",
"to": thread,
"source": "web-messenger",
"thread": thread,
"__user": fb.user_id,
"__a": "1",
"__dyn": "7n8anEBQ9FoBUSt2u6aAix97xN6yUgByV9GiyFqzQC-C26m6oDAyoSnx2ubhHAyXBBzEy5E",
"__req": "c",
"fb_dtsg": fb.dtsg,
"ttstamp": "26581691011017411284781047297",
"__rev": "1436610",
}
fb.session.post(TYP_URL, data)
def read(fb, thread):
data = {
"ids[{}]".format(thread): "true",
"__user": fb.user_id,
"__a": "1",
"__dyn": "7n8anEBQ9FoBUSt2u6aAix97xN6yUgByV9GiyFqzQC-C26m6oDAyoSnx2ubhHAyXBBzEy5E",
"__req": "c",
"fb_dtsg": fb.dtsg,
"ttstamp": "26581691011017411284781047297",
"__rev": "1436610",
}
fb.session.post(READ_URL, data)
|
{
'target_defaults': {
'configurations': {
'Debug': {
'defines': [
'SK_DEBUG',
'GR_DEBUG=1',
],
},
'Release': {
'defines': [
'SK_RELEASE',
'GR_RELEASE=1',
],
},
},
'conditions': [
[ 'OS == "linux" or OS == "freebsd" or OS == "openbsd" or OS == "solaris"', {
'include_dirs' : [
'/usr/include/freetype2',
],
}],
[ 'OS == "mac"', {
'defines': [
'SK_BUILD_FOR_MAC',
],
}],
[ 'OS == "win"', {
'defines': [
'SK_BUILD_FOR_WIN32',
'SK_IGNORE_STDINT_DOT_H',
],
}],
[ 'OS == "linux"', {
'defines': [
'SK_SAMPLES_FOR_X',
],
}],
],
'direct_dependent_settings': {
'conditions': [
[ 'OS == "mac"', {
'defines': [
'SK_BUILD_FOR_MAC',
],
}],
[ 'OS == "win"', {
'defines': [
'SK_BUILD_FOR_WIN32',
],
}],
],
},
},
'targets': [
{
'target_name': 'skia',
'type': 'static_library',
'msvs_guid': 'B7760B5E-BFA8-486B-ACFD-49E3A6DE8E76',
'sources': [
'../src/core/ARGB32_Clamp_Bilinear_BitmapShader.h',
'../src/core/Sk64.cpp',
'../src/core/SkAdvancedTypefaceMetrics.cpp',
'../src/core/SkAlphaRuns.cpp',
'../src/core/SkAntiRun.h',
'../src/core/SkBitmap.cpp',
'../src/core/SkBitmapProcShader.cpp',
'../src/core/SkBitmapProcShader.h',
'../src/core/SkBitmapProcState.cpp',
'../src/core/SkBitmapProcState.h',
'../src/core/SkBitmapProcState_matrix.h',
'../src/core/SkBitmapProcState_matrixProcs.cpp',
'../src/core/SkBitmapProcState_sample.h',
'../src/core/SkBitmapSampler.cpp',
'../src/core/SkBitmapSampler.h',
'../src/core/SkBitmapSamplerTemplate.h',
'../src/core/SkBitmapShader16BilerpTemplate.h',
'../src/core/SkBitmapShaderTemplate.h',
'../src/core/SkBitmap_scroll.cpp',
'../src/core/SkBlitBWMaskTemplate.h',
'../src/core/SkBlitRow_D16.cpp',
'../src/core/SkBlitRow_D32.cpp',
'../src/core/SkBlitRow_D4444.cpp',
'../src/core/SkBlitter.cpp',
'../src/core/SkBlitter_4444.cpp',
'../src/core/SkBlitter_A1.cpp',
'../src/core/SkBlitter_A8.cpp',
'../src/core/SkBlitter_ARGB32.cpp',
'../src/core/SkBlitter_RGB16.cpp',
'../src/core/SkBlitter_Sprite.cpp',
'../src/core/SkBuffer.cpp',
'../src/core/SkCanvas.cpp',
'../src/core/SkChunkAlloc.cpp',
'../src/core/SkClampRange.cpp',
'../src/core/SkClipStack.cpp',
'../src/core/SkColor.cpp',
'../src/core/SkColorFilter.cpp',
'../src/core/SkColorTable.cpp',
'../src/core/SkComposeShader.cpp',
'../src/core/SkConcaveToTriangles.cpp',
'../src/core/SkConcaveToTriangles.h',
'../src/core/SkCordic.cpp',
'../src/core/SkCordic.h',
'../src/core/SkCoreBlitters.h',
'../src/core/SkCubicClipper.cpp',
'../src/core/SkCubicClipper.h',
'../src/core/SkDebug.cpp',
'../src/core/SkDeque.cpp',
'../src/core/SkDevice.cpp',
'../src/core/SkDither.cpp',
'../src/core/SkDraw.cpp',
'../src/core/SkDrawProcs.h',
'../src/core/SkEdgeBuilder.cpp',
'../src/core/SkEdgeClipper.cpp',
'../src/core/SkEdge.cpp',
'../src/core/SkEdge.h',
'../src/core/SkFP.h',
'../src/core/SkFilterProc.cpp',
'../src/core/SkFilterProc.h',
'../src/core/SkFlattenable.cpp',
'../src/core/SkFloat.cpp',
'../src/core/SkFloat.h',
'../src/core/SkFloatBits.cpp',
'../src/core/SkFontHost.cpp',
'../src/core/SkGeometry.cpp',
'../src/core/SkGlobals.cpp',
'../src/core/SkGlyphCache.cpp',
'../src/core/SkGlyphCache.h',
'../src/core/SkGraphics.cpp',
'../src/core/SkLineClipper.cpp',
'../src/core/SkMallocPixelRef.cpp',
'../src/core/SkMask.cpp',
'../src/core/SkMaskFilter.cpp',
'../src/core/SkMath.cpp',
'../src/core/SkMatrix.cpp',
'../src/core/SkMetaData.cpp',
'../src/core/SkPackBits.cpp',
'../src/core/SkPaint.cpp',
'../src/core/SkPath.cpp',
'../src/core/SkPathEffect.cpp',
'../src/core/SkPathHeap.cpp',
'../src/core/SkPathHeap.h',
'../src/core/SkPathMeasure.cpp',
'../src/core/SkPicture.cpp',
'../src/core/SkPictureFlat.cpp',
'../src/core/SkPictureFlat.h',
'../src/core/SkPicturePlayback.cpp',
'../src/core/SkPicturePlayback.h',
'../src/core/SkPictureRecord.cpp',
'../src/core/SkPictureRecord.h',
'../src/core/SkPixelRef.cpp',
'../src/core/SkPoint.cpp',
'../src/core/SkProcSpriteBlitter.cpp',
'../src/core/SkPtrRecorder.cpp',
'../src/core/SkQuadClipper.cpp',
'../src/core/SkQuadClipper.h',
'../src/core/SkRasterizer.cpp',
'../src/core/SkRect.cpp',
'../src/core/SkRefDict.cpp',
'../src/core/SkRegion.cpp',
'../src/core/SkRegionPriv.h',
'../src/core/SkRegion_path.cpp',
'../src/core/SkScalar.cpp',
'../src/core/SkScalerContext.cpp',
'../src/core/SkScan.cpp',
'../src/core/SkScanPriv.h',
'../src/core/SkScan_AntiPath.cpp',
'../src/core/SkScan_Antihair.cpp',
'../src/core/SkScan_Hairline.cpp',
'../src/core/SkScan_Path.cpp',
'../src/core/SkShader.cpp',
'../src/core/SkShape.cpp',
'../src/core/SkSpriteBlitter_ARGB32.cpp',
'../src/core/SkSpriteBlitter_RGB16.cpp',
'../src/core/SkSinTable.h',
'../src/core/SkSpriteBlitter.h',
'../src/core/SkSpriteBlitterTemplate.h',
'../src/core/SkStream.cpp',
'../src/core/SkString.cpp',
'../src/core/SkStroke.cpp',
'../src/core/SkStrokerPriv.cpp',
'../src/core/SkStrokerPriv.h',
'../src/core/SkTextFormatParams.h',
'../src/core/SkTSearch.cpp',
'../src/core/SkTSort.h',
'../src/core/SkTemplatesPriv.h',
'../src/core/SkTypeface.cpp',
'../src/core/SkTypefaceCache.cpp',
'../src/core/SkTypefaceCache.h',
'../src/core/SkUnPreMultiply.cpp',
'../src/core/SkUtils.cpp',
'../src/core/SkWriter32.cpp',
'../src/core/SkXfermode.cpp',
'../src/opts/opts_check_SSE2.cpp',
'../src/ports/SkDebug_stdio.cpp',
'../src/ports/SkDebug_win.cpp',
'../src/ports/SkFontHost_tables.cpp',
'../src/ports/SkGlobals_global.cpp',
'../src/ports/SkMemory_malloc.cpp',
'../src/ports/SkOSFile_stdio.cpp',
'../src/ports/SkTime_Unix.cpp',
'../src/ports/SkXMLParser_empty.cpp',
'../src/ports/sk_predefined_gamma.h',
'../include/core/Sk64.h',
'../include/core/SkAdvancedTypefaceMetrics.h',
'../include/core/SkAutoKern.h',
'../include/core/SkBitmap.h',
'../include/core/SkBlitRow.h',
'../include/core/SkBlitter.h',
'../include/core/SkBounder.h',
'../include/core/SkBuffer.h',
'../include/core/SkCanvas.h',
'../include/core/SkChunkAlloc.h',
'../include/core/SkClampRange.h',
'../include/core/SkClipStack.h',
'../include/core/SkColor.h',
'../include/core/SkColorFilter.h',
'../include/core/SkColorPriv.h',
'../include/core/SkColorShader.h',
'../include/core/SkComposeShader.h',
'../include/core/SkDeque.h',
'../include/core/SkDescriptor.h',
'../include/core/SkDevice.h',
'../include/core/SkDither.h',
'../include/core/SkDraw.h',
'../include/core/SkDrawFilter.h',
'../include/core/SkDrawLooper.h',
'../include/core/SkEndian.h',
'../include/core/SkFDot6.h',
'../include/core/SkFixed.h',
'../include/core/SkFlattenable.h',
'../include/core/SkFloatBits.h',
'../include/core/SkFloatingPoint.h',
'../include/core/SkFontHost.h',
'../include/core/SkGeometry.h',
'../include/core/SkGlobals.h',
'../include/core/SkGraphics.h',
'../include/core/SkMallocPixelRef.h',
'../include/core/SkMask.h',
'../include/core/SkMaskFilter.h',
'../include/core/SkMath.h',
'../include/core/SkMatrix.h',
'../include/core/SkMetaData.h',
'../include/core/SkOSFile.h',
'../include/core/SkPackBits.h',
'../include/core/SkPaint.h',
'../include/core/SkPath.h',
'../include/core/SkPathEffect.h',
'../include/core/SkPathMeasure.h',
'../include/core/SkPerspIter.h',
'../include/core/SkPicture.h',
'../include/core/SkPixelRef.h',
'../include/core/SkPoint.h',
'../include/core/SkPtrRecorder.h',
'../include/core/SkRandom.h',
'../include/core/SkRasterizer.h',
'../include/core/SkReader32.h',
'../include/core/SkRect.h',
'../include/core/SkRefCnt.h',
'../include/core/SkRefDict.h',
'../include/core/SkRegion.h',
'../include/core/SkScalar.h',
'../include/core/SkScalarCompare.h',
'../include/core/SkScalerContext.h',
'../include/core/SkScan.h',
'../include/core/SkShader.h',
'../include/core/SkStream.h',
'../include/core/SkString.h',
'../include/core/SkStroke.h',
'../include/core/SkTDArray.h',
'../include/core/SkTDStack.h',
'../include/core/SkTDict.h',
'../include/core/SkTRegistry.h',
'../include/core/SkTScopedPtr.h',
'../include/core/SkTSearch.h',
'../include/core/SkTemplates.h',
'../include/core/SkThread.h',
'../include/core/SkThread_platform.h',
'../include/core/SkTime.h',
'../include/core/SkTypeface.h',
'../include/core/SkTypes.h',
'../include/core/SkUnPreMultiply.h',
'../include/core/SkUnitMapper.h',
'../include/core/SkUtils.h',
'../include/core/SkWriter32.h',
'../include/core/SkXfermode.h',
],
'include_dirs': [
'../include/config',
'../include/core',
'../include/ports',
'../include/xml',
'../src/core',
],
'msvs_disabled_warnings': [4244, 4267,4345, 4390, 4554, 4800],
'conditions': [
[ 'OS == "linux" or OS == "freebsd" or OS == "openbsd" or OS == "solaris"', {
'cflags': [
'-Wno-unused',
'-Wno-unused-function',
],
'sources': [
'../include/core/SkMMapStream.h',
'../src/core/SkMMapStream.cpp',
'../src/core/SkBlitter_ARGB32_Subpixel.cpp',
'../src/ports/SkThread_pthread.cpp',
'../src/ports/SkTime_Unix.cpp',
'../src/ports/SkFontHost_FreeType_Subpixel.cpp',
'../src/ports/SkFontHost_FreeType.cpp',
'../src/ports/SkFontHost_gamma_none.cpp',
'../src/ports/SkFontHost_linux.cpp',
],
'link_settings': {
'libraries': [
'-lfreetype',
'-lpthread',
],
},
}],
[ 'OS == "mac"', {
'include_dirs': [
'../include/utils/mac',
],
'sources': [
'../include/core/SkMMapStream.h',
'../include/utils/mac/SkCGUtils.h',
'../src/core/SkMMapStream.cpp',
'../src/ports/SkFontHost_mac_coretext.cpp',
'../src/ports/SkThread_pthread.cpp',
'../src/ports/SkTime_Unix.cpp',
'../src/utils/mac/SkCreateCGImageRef.cpp',
],
}],
[ 'OS == "win"', {
'include_dirs': [
'config/win',
],
'sources': [
'../src/ports/SkFontHost_win.cpp',
'../src/ports/SkThread_win.cpp',
],
'sources!': [
'../src/ports/SkDebug_stdio.cpp',
],
}],
[ 'OS != "win"', {
'sources!': [
'../src/ports/SkDebug_win.cpp',
],
}],
],
'direct_dependent_settings': {
'include_dirs': [
'config',
'../include/config',
'../include/core',
'ext',
],
},
'dependencies': [
'skia_opts'
],
},
# Due to an unfortunate intersection of lameness between gcc and gyp,
# we have to build the *_SSE2.cpp files in a separate target. The
# gcc lameness is that, in order to compile SSE2 intrinsics code, it
# must be passed the -msse2 flag. However, with this flag, it may
# emit SSE2 instructions even for scalar code, such as the CPUID
# test used to test for the presence of SSE2. So that, and all other
# code must be compiled *without* -msse2. The gyp lameness is that it
# does not allow file-specific CFLAGS, so we must create this extra
# target for those files to be compiled with -msse2.
#
# This is actually only a problem on 32-bit Linux (all Intel Macs have
# SSE2, Linux x86_64 has SSE2 by definition, and MSC will happily emit
# SSE2 from instrinsics, while generating plain ol' 386 for everything
# else). However, to keep the .gyp file simple and avoid platform-specific
# build breakage, we do this on all platforms.
# For about the same reason, we need to compile the ARM opts files
# separately as well.
{
'target_name': 'skia_opts',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../src/core',
],
'conditions': [
[ '(OS == "linux" or OS == "freebsd" or OS == "openbsd")', {
'cflags': [
'-msse2',
],
}],
],
'sources': [
'../src/opts/SkBitmapProcState_opts_SSE2.cpp',
'../src/opts/SkBlitRow_opts_SSE2.cpp',
'../src/opts/SkUtils_opts_SSE2.cpp',
],
},
{
'target_name': 'effects',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../include/effects',
],
'sources': [
'../include/effects/Sk1DPathEffect.h',
'../include/effects/Sk2DPathEffect.h',
'../include/effects/SkAvoidXfermode.h',
'../include/effects/SkBlurDrawLooper.h',
'../include/effects/SkBlurMaskFilter.h',
'../include/effects/SkColorMatrix.h',
'../include/effects/SkColorMatrixFilter.h',
'../include/effects/SkCornerPathEffect.h',
'../include/effects/SkDashPathEffect.h',
'../include/effects/SkDiscretePathEffect.h',
'../include/effects/SkDrawExtraPathEffect.h',
'../include/effects/SkEmbossMaskFilter.h',
'../include/effects/SkGradientShader.h',
'../include/effects/SkGroupShape.h',
'../include/effects/SkKernel33MaskFilter.h',
'../include/effects/SkLayerDrawLooper.h',
'../include/effects/SkLayerRasterizer.h',
'../include/effects/SkPaintFlagsDrawFilter.h',
'../include/effects/SkPixelXorXfermode.h',
'../include/effects/SkPorterDuff.h',
'../include/effects/SkRectShape.h',
'../include/effects/SkTableMaskFilter.h',
'../include/effects/SkTransparentShader.h',
'../src/effects/Sk1DPathEffect.cpp',
'../src/effects/Sk2DPathEffect.cpp',
'../src/effects/SkAvoidXfermode.cpp',
'../src/effects/SkBitmapCache.cpp',
'../src/effects/SkBitmapCache.h',
'../src/effects/SkBlurDrawLooper.cpp',
'../src/effects/SkBlurMask.cpp',
'../src/effects/SkBlurMask.h',
'../src/effects/SkBlurMaskFilter.cpp',
'../src/effects/SkColorFilters.cpp',
'../src/effects/SkColorMatrixFilter.cpp',
'../src/effects/SkCornerPathEffect.cpp',
'../src/effects/SkDashPathEffect.cpp',
'../src/effects/SkDiscretePathEffect.cpp',
'../src/effects/SkEmbossMask.cpp',
'../src/effects/SkEmbossMask.h',
'../src/effects/SkEmbossMask_Table.h',
'../src/effects/SkEmbossMaskFilter.cpp',
'../src/effects/SkGradientShader.cpp',
'../src/effects/SkGroupShape.cpp',
'../src/effects/SkKernel33MaskFilter.cpp',
'../src/effects/SkLayerDrawLooper.cpp',
'../src/effects/SkLayerRasterizer.cpp',
'../src/effects/SkPaintFlagsDrawFilter.cpp',
'../src/effects/SkPixelXorXfermode.cpp',
'../src/effects/SkPorterDuff.cpp',
'../src/effects/SkRadialGradient_Table.h',
'../src/effects/SkRectShape.cpp',
'../src/effects/SkTableMaskFilter.cpp',
'../src/effects/SkTransparentShader.cpp',
],
'direct_dependent_settings': {
'include_dirs': [
'../include/effects',
],
},
},
{
'target_name': 'images',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../include/images',
],
'sources': [
'../include/images/SkFlipPixelRef.h',
'../include/images/SkImageDecoder.h',
'../include/images/SkImageEncoder.h',
'../include/images/SkImageRef.h',
'../include/images/SkImageRef_GlobalPool.h',
'../include/images/SkJpegUtility.h',
'../include/images/SkMovie.h',
'../include/images/SkPageFlipper.h',
'../src/images/bmpdecoderhelper.cpp',
'../src/images/bmpdecoderhelper.h',
'../src/images/SkBitmap_RLEPixels.h',
'../src/images/SkCreateRLEPixelRef.cpp',
'../src/images/SkFDStream.cpp',
'../src/images/SkFlipPixelRef.cpp',
'../src/images/SkImageDecoder.cpp',
'../src/images/SkImageDecoder_Factory.cpp',
'../src/images/SkImageDecoder_libbmp.cpp',
'../src/images/SkImageDecoder_libgif.cpp',
'../src/images/SkImageDecoder_libico.cpp',
'../src/images/SkImageDecoder_libjpeg.cpp',
'../src/images/SkImageDecoder_libpng.cpp',
'../src/images/SkImageDecoder_libpvjpeg.c',
'../src/images/SkImageDecoder_wbmp.cpp',
'../src/images/SkImageEncoder.cpp',
'../src/images/SkImageEncoder_Factory.cpp',
'../src/images/SkImageRef.cpp',
'../src/images/SkImageRefPool.cpp',
'../src/images/SkImageRefPool.h',
'../src/images/SkImageRef_GlobalPool.cpp',
'../src/images/SkJpegUtility.cpp',
'../src/images/SkMovie.cpp',
'../src/images/SkMovie_gif.cpp',
'../src/images/SkPageFlipper.cpp',
'../src/images/SkScaledBitmapSampler.cpp',
'../src/images/SkScaledBitmapSampler.h',
],
'conditions': [
[ 'OS == "win"', {
'sources!': [
'../include/images/SkJpegUtility.h',
'../src/images/SkFDStream.cpp',
'../src/images/SkImageDecoder_libgif.cpp',
'../src/images/SkImageDecoder_libjpeg.cpp',
'../src/images/SkImageDecoder_libpng.cpp',
'../src/images/SkImageDecoder_libpvjpeg.c',
'../src/images/SkJpegUtility.cpp',
'../src/images/SkMovie_gif.cpp',
],
}],
[ 'OS == "mac"', {
'sources!': [
'../include/images/SkJpegUtility.h',
'../src/images/SkImageDecoder_libgif.cpp',
'../src/images/SkImageDecoder_libjpeg.cpp',
'../src/images/SkImageDecoder_libpng.cpp',
'../src/images/SkImageDecoder_libpvjpeg.c',
'../src/images/SkJpegUtility.cpp',
'../src/images/SkMovie_gif.cpp',
],
}],
[ 'OS == "linux" or OS == "freebsd" or OS == "openbsd" or OS == "solaris"', {
'sources!': [
'../include/images/SkJpegUtility.h',
'../src/images/SkImageDecoder_libjpeg.cpp',
'../src/images/SkImageDecoder_libgif.cpp',
'../src/images/SkImageDecoder_libpvjpeg.c',
'../src/images/SkJpegUtility.cpp',
'../src/images/SkMovie_gif.cpp',
],
}],
],
'direct_dependent_settings': {
'include_dirs': [
'../include/images',
],
},
},
{
'target_name': 'xml',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../include/xml',
'../include/utils',
],
'sources': [
'../include/xml/SkBML_WXMLParser.h',
'../include/xml/SkBML_XMLParser.h',
'../include/xml/SkDOM.h',
'../include/xml/SkJS.h',
'../include/xml/SkXMLParser.h',
'../include/xml/SkXMLWriter.h',
'../src/xml/SkBML_Verbs.h',
'../src/xml/SkBML_XMLParser.cpp',
'../src/xml/SkDOM.cpp',
'../src/xml/SkJS.cpp',
'../src/xml/SkJSDisplayable.cpp',
'../src/xml/SkXMLParser.cpp',
'../src/xml/SkXMLPullParser.cpp',
'../src/xml/SkXMLWriter.cpp',
],
'sources!': [
'../src/xml/SkXMLPullParser.cpp', #if 0 around class decl in header
],
'conditions': [
[ 'OS == "win" or OS == "mac" or OS == "linux" or OS == "openbsd" or OS == "solaris"', {
'sources!': [
# no jsapi.h by default on system
'../include/xml/SkJS.h',
'../src/xml/SkJS.cpp',
'../src/xml/SkJSDisplayable.cpp',
],
}],
],
'direct_dependent_settings': {
'include_dirs': [
'../include/xml',
],
},
},
{
'target_name': 'pdf',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../include/pdf',
'../src/core', # needed to get SkGlyphCache.h and SkTextFormatParams.h
],
'sources': [
'../include/pdf/SkPDFCatalog.h',
'../include/pdf/SkPDFDevice.h',
'../include/pdf/SkPDFDocument.h',
'../include/pdf/SkPDFFont.h',
'../include/pdf/SkPDFFormXObject.h',
'../include/pdf/SkPDFGraphicState.h',
'../include/pdf/SkPDFImage.h',
'../include/pdf/SkPDFPage.h',
'../include/pdf/SkPDFShader.h',
'../include/pdf/SkPDFStream.h',
'../include/pdf/SkPDFTypes.h',
'../include/pdf/SkPDFUtils.h',
'../src/pdf/SkPDFCatalog.cpp',
'../src/pdf/SkPDFDevice.cpp',
'../src/pdf/SkPDFDocument.cpp',
'../src/pdf/SkPDFFont.cpp',
'../src/pdf/SkPDFFormXObject.cpp',
'../src/pdf/SkPDFGraphicState.cpp',
'../src/pdf/SkPDFImage.cpp',
'../src/pdf/SkPDFPage.cpp',
'../src/pdf/SkPDFShader.cpp',
'../src/pdf/SkPDFStream.cpp',
'../src/pdf/SkPDFTypes.cpp',
'../src/pdf/SkPDFUtils.cpp',
],
'direct_dependent_settings': {
'include_dirs': [
'../include/pdf',
],
},
},
{
'target_name': 'utils',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../include/utils',
'../include/views',
'../include/effects',
'../include/xml',
],
'sources': [
'../include/utils/SkBoundaryPatch.h',
'../include/utils/SkCamera.h',
'../include/utils/SkCubicInterval.h',
'../include/utils/SkCullPoints.h',
'../include/utils/SkDumpCanvas.h',
'../include/utils/SkEGLContext.h',
'../include/utils/SkGLCanvas.h',
'../include/utils/SkInterpolator.h',
'../include/utils/SkLayer.h',
'../include/utils/SkMeshUtils.h',
'../include/utils/SkNinePatch.h',
'../include/utils/SkNWayCanvas.h',
'../include/utils/SkParse.h',
'../include/utils/SkParsePaint.h',
'../include/utils/SkParsePath.h',
'../include/utils/SkProxyCanvas.h',
'../include/utils/SkSfntUtils.h',
'../include/utils/SkTextBox.h',
'../include/utils/SkUnitMappers.h',
'../src/utils/SkBoundaryPatch.cpp',
'../src/utils/SkCamera.cpp',
'../src/utils/SkColorMatrix.cpp',
'../src/utils/SkCubicInterval.cpp',
'../src/utils/SkCullPoints.cpp',
'../src/utils/SkDumpCanvas.cpp',
'../src/utils/SkEGLContext_none.cpp',
'../src/utils/SkInterpolator.cpp',
'../src/utils/SkLayer.cpp',
'../src/utils/SkMeshUtils.cpp',
'../src/utils/SkNinePatch.cpp',
'../src/utils/SkNWayCanvas.cpp',
'../src/utils/SkOSFile.cpp',
'../src/utils/SkParse.cpp',
'../src/utils/SkParseColor.cpp',
'../src/utils/SkParsePath.cpp',
'../src/utils/SkProxyCanvas.cpp',
'../src/utils/SkSfntUtils.cpp',
'../src/utils/SkUnitMappers.cpp',
],
'conditions': [
[ 'OS == "mac"', {
'sources': [
'../include/utils/SkCGUtils.h',
'../src/utils/mac/SkCreateCGImageRef.cpp',
'../src/utils/mac/SkEGLContext_mac.cpp',
],
}],
],
'direct_dependent_settings': {
'include_dirs': [
'../include/utils',
],
},
},
{
'target_name': 'views',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../include/views',
'../include/xml',
'../include/utils',
'../include/images',
'../include/animator',
'../include/effects',
],
'sources': [
'../include/views/SkApplication.h',
'../include/views/SkBGViewArtist.h',
'../include/views/SkBorderView.h',
'../include/views/SkEvent.h',
'../include/views/SkEventSink.h',
'../include/views/SkImageView.h',
'../include/views/SkKey.h',
'../include/views/SkOSMenu.h',
'../include/views/SkOSWindow_Mac.h',
'../include/views/SkOSWindow_SDL.h',
'../include/views/SkOSWindow_Unix.h',
'../include/views/SkOSWindow_Win.h',
#'../include/views/SkOSWindow_wxwidgets.h',
'../include/views/SkProgressBarView.h',
'../include/views/SkScrollBarView.h',
'../include/views/SkStackViewLayout.h',
'../include/views/SkSystemEventTypes.h',
'../include/views/SkTouchGesture.h',
'../include/views/SkView.h',
'../include/views/SkViewInflate.h',
'../include/views/SkWidget.h',
'../include/views/SkWidgetViews.h',
'../include/views/SkWindow.h',
'../src/views/SkBGViewArtist.cpp',
'../src/views/SkBorderView.cpp',
'../src/views/SkEvent.cpp',
'../src/views/SkEventSink.cpp',
'../src/views/SkImageView.cpp',
'../src/views/SkListView.cpp',
'../src/views/SkListWidget.cpp',
'../src/views/SkOSMenu.cpp',
'../src/views/SkParsePaint.cpp',
'../src/views/SkProgressBarView.cpp',
'../src/views/SkProgressView.cpp',
'../src/views/SkScrollBarView.cpp',
'../src/views/SkStackViewLayout.cpp',
'../src/views/SkStaticTextView.cpp',
'../src/views/SkTagList.cpp',
'../src/views/SkTagList.h',
'../src/views/SkTextBox.cpp',
'../src/views/SkTouchGesture.cpp',
'../src/views/SkView.cpp',
'../src/views/SkViewInflate.cpp',
'../src/views/SkViewPriv.cpp',
'../src/views/SkViewPriv.h',
'../src/views/SkWidget.cpp',
'../src/views/SkWidgets.cpp',
'../src/views/SkWidgetViews.cpp',
'../src/views/SkWindow.cpp',
],
'sources!' : [
'../src/views/SkListView.cpp', #depends on missing SkListSource implementation
'../src/views/SkListWidget.cpp', #depends on missing SkListSource implementation
],
'conditions': [
[ 'OS == "win"', {
'sources': [
'../src/utils/win/SkOSWindow_Win.cpp',
'../src/utils/win/skia_win.cpp',
],
}],
[ 'OS == "mac"', {
'sources': [
'../include/utils/SkCGUtils.h',
#'../src/utils/mac/SkBitmap_Mac.cpp',
'../src/utils/mac/SkCreateCGImageRef.cpp',
'../src/utils/mac/SkEGLContext_mac.cpp',
'../src/utils/mac/skia_mac.cpp',
'../src/utils/mac/SkOSWindow_Mac.cpp',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/Carbon.framework',
'$(SDKROOT)/System/Library/Frameworks/AGL.framework',
],
},
}],
[ 'OS == "linux" or OS == "freebsd" or OS == "openbsd" or OS == "solaris"', {
'include_dirs' : [
'../include/utils/unix',
],
'sources': [
'../src/utils/unix/keysym2ucs.c',
'../src/utils/unix/SkOSWindow_Unix.cpp',
'../unix_test_app/main.cpp',
],
}],
],
'direct_dependent_settings': {
'include_dirs': [
'../include/views',
],
},
},
{
'target_name': 'skgr',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../src/core',
'../include/gpu',
'../gpu/include',
],
'sources': [
'../include/gpu/SkGpuCanvas.h',
'../include/gpu/SkGpuDevice.h',
'../include/gpu/SkGpuDeviceFactory.h',
'../include/gpu/SkGr.h',
'../include/gpu/SkGrTexturePixelRef.h',
'../src/gpu/GrPrintf_skia.cpp',
'../src/gpu/SkGpuCanvas.cpp',
'../src/gpu/SkGpuDevice.cpp',
'../src/gpu/SkGr.cpp',
'../src/gpu/SkGrFontScaler.cpp',
'../src/gpu/SkGrTexturePixelRef.cpp',
],
'conditions': [
[ 'OS == "linux"', {
'defines': [
'GR_LINUX_BUILD=1',
],
}],
[ 'OS == "mac"', {
'defines': [
'GR_MAC_BUILD=1',
],
}],
[ 'OS == "win"', {
'defines': [
'GR_WIN32_BUILD=1',
],
}],
],
'direct_dependent_settings': {
'conditions': [
[ 'OS == "linux"', {
'defines': [
'GR_LINUX_BUILD=1',
],
}],
[ 'OS == "mac"', {
'defines': [
'GR_MAC_BUILD=1',
],
}],
[ 'OS == "win"', {
'defines': [
'GR_WIN32_BUILD=1',
],
}],
],
'include_dirs': [
'../include/gpu',
],
},
},
{
'target_name': 'gr',
'type': 'static_library',
'include_dirs': [
'../gpu/include',
'../include/core',
'../include/config',
],
'dependencies': [
'libtess',
],
'sources': [
'../gpu/include/GrAllocator.h',
'../gpu/include/GrAllocPool.h',
'../gpu/include/GrAtlas.h',
'../gpu/include/GrClip.h',
'../gpu/include/GrClipIterator.h',
'../gpu/include/GrColor.h',
'../gpu/include/GrConfig.h',
'../gpu/include/GrContext.h',
'../gpu/include/GrContext_impl.h',
'../gpu/include/GrDrawTarget.h',
'../gpu/include/GrFontScaler.h',
'../gpu/include/GrGeometryBuffer.h',
'../gpu/include/GrGLConfig.h',
'../gpu/include/GrGLConfig_chrome.h',
'../gpu/include/GrGLIndexBuffer.h',
'../gpu/include/GrGLInterface.h',
'../gpu/include/GrGLIRect.h',
'../gpu/include/GrGLTexture.h',
'../gpu/include/GrGLVertexBuffer.h',
'../gpu/include/GrGlyph.h',
'../gpu/include/GrGpu.h',
'../gpu/include/GrGpuVertex.h',
'../gpu/include/GrIndexBuffer.h',
'../gpu/include/GrInOrderDrawBuffer.h',
'../gpu/include/GrInstanceCounter.h',
'../gpu/include/GrIPoint.h',
'../gpu/include/GrKey.h',
'../gpu/include/GrMatrix.h',
'../gpu/include/GrMemory.h',
'../gpu/include/GrMesh.h',
'../gpu/include/GrNoncopyable.h',
'../gpu/include/GrPaint.h',
'../gpu/include/GrPath.h',
'../gpu/include/GrPathRenderer.h',
'../gpu/include/GrPathSink.h',
'../gpu/include/GrPlotMgr.h',
'../gpu/include/GrPoint.h',
'../gpu/include/GrRandom.h',
'../gpu/include/GrRect.h',
'../gpu/include/GrRectanizer.h',
'../gpu/include/GrRefCnt.h',
'../gpu/include/GrResource.h',
'../gpu/include/GrSamplerState.h',
'../gpu/include/GrScalar.h',
'../gpu/include/GrStencil.h',
'../gpu/include/GrStopwatch.h',
'../gpu/include/GrStringBuilder.h',
'../gpu/include/GrTArray.h',
'../gpu/include/GrTBSearch.h',
'../gpu/include/GrTDArray.h',
'../gpu/include/GrTesselatedPathRenderer.h',
'../gpu/include/GrTextContext.h',
'../gpu/include/GrTextStrike.h',
'../gpu/include/GrTexture.h',
'../gpu/include/GrTextureCache.h',
'../gpu/include/GrTHashCache.h',
'../gpu/include/GrTLList.h',
'../gpu/include/GrTouchGesture.h',
'../gpu/include/GrTypes.h',
'../gpu/include/GrUserConfig.h',
'../gpu/include/GrVertexBuffer.h',
'../gpu/src/GrAllocPool.cpp',
'../gpu/src/GrAtlas.cpp',
'../gpu/src/GrBinHashKey.h',
'../gpu/src/GrBufferAllocPool.cpp',
'../gpu/src/GrBufferAllocPool.h',
'../gpu/src/GrClip.cpp',
'../gpu/src/GrContext.cpp',
'../gpu/src/GrCreatePathRenderer_none.cpp',
'../gpu/src/GrDrawTarget.cpp',
'../gpu/src/GrGLDefaultInterface_none.cpp',
'../gpu/src/GrGLIndexBuffer.cpp',
'../gpu/src/GrGLInterface.cpp',
'../gpu/src/GrGLProgram.cpp',
'../gpu/src/GrGLProgram.h',
'../gpu/src/GrGLTexture.cpp',
'../gpu/src/GrGLUtil.cpp',
'../gpu/src/GrGLVertexBuffer.cpp',
'../gpu/src/GrGpu.cpp',
'../gpu/src/GrGpuFactory.cpp',
'../gpu/src/GrGpuGL.cpp',
'../gpu/src/GrGpuGL.h',
'../gpu/src/GrGpuGLFixed.cpp',
'../gpu/src/GrGpuGLFixed.h',
'../gpu/src/GrGpuGLShaders.cpp',
'../gpu/src/GrGpuGLShaders.h',
'../gpu/src/GrInOrderDrawBuffer.cpp',
'../gpu/src/GrMatrix.cpp',
'../gpu/src/GrMemory.cpp',
'../gpu/src/GrPathRenderer.cpp',
'../gpu/src/GrPathUtils.cpp',
'../gpu/src/GrPathUtils.h',
'../gpu/src/GrRectanizer.cpp',
'../gpu/src/GrRedBlackTree.h',
'../gpu/src/GrResource.cpp',
'../gpu/src/GrStencil.cpp',
'../gpu/src/GrTesselatedPathRenderer.cpp',
'../gpu/src/GrTextContext.cpp',
'../gpu/src/GrTextStrike.cpp',
'../gpu/src/GrTextStrike_impl.h',
'../gpu/src/GrTexture.cpp',
'../gpu/src/GrTextureCache.cpp',
'../gpu/src/gr_unittests.cpp',
'../gpu/src/mac/GrGLDefaultInterface_mac.cpp',
'../gpu/src/win/GrGLDefaultInterface_win.cpp',
'../gpu/src/unix/GrGLDefaultInterface_unix.cpp',
],
'defines': [
'GR_IMPLEMENTATION=1',
],
'conditions': [
[ 'OS == "linux"', {
'defines': [
'GR_LINUX_BUILD=1',
],
'sources!': [
'../gpu/src/GrGLDefaultInterface_none.cpp',
],
'link_settings': {
'libraries': [
'-lGL',
'-lX11',
],
},
}],
[ 'OS == "mac"', {
'defines': [
'GR_MAC_BUILD=1',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/OpenGL.framework',
],
},
'sources!': [
'../gpu/src/GrGLDefaultInterface_none.cpp',
],
}],
[ 'OS == "win"', {
'defines': [
'GR_WIN32_BUILD=1',
'GR_GL_FUNCTION_TYPE=__stdcall',
],
'sources!': [
'../gpu/src/GrGLDefaultInterface_none.cpp',
],
}],
[ 'OS != "win"', {
'sources!': [
'../gpu/src/win/GrGLDefaultInterface_win.cpp',
],
}],
[ 'OS != "mac"', {
'sources!': [
'../gpu/src/mac/GrGLDefaultInterface_mac.cpp',
],
}],
[ 'OS != "linux"', {
'sources!': [
'../gpu/src/unix/GrGLDefaultInterface_unix.cpp',
],
}],
],
'direct_dependent_settings': {
'conditions': [
[ 'OS == "linux"', {
'defines': [
'GR_LINUX_BUILD=1',
],
}],
[ 'OS == "mac"', {
'defines': [
'GR_MAC_BUILD=1',
],
}],
[ 'OS == "win"', {
'defines': [
'GR_WIN32_BUILD=1',
'GR_GL_FUNCTION_TYPE=__stdcall',
],
}],
],
'include_dirs': [
'../gpu/include',
],
},
},
{
'target_name': 'animator',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../include/effects',
'../include/animator',
'../include/views',
'../include/xml',
'../include/utils',
'../include/images',
],
'sources': [
'../include/animator/SkAnimator.h',
'../include/animator/SkAnimatorView.h',
'../src/animator/SkAnimate.h',
'../src/animator/SkAnimateActive.cpp',
'../src/animator/SkAnimateActive.h',
'../src/animator/SkAnimateBase.cpp',
'../src/animator/SkAnimateBase.h',
'../src/animator/SkAnimateField.cpp',
'../src/animator/SkAnimateMaker.cpp',
'../src/animator/SkAnimateMaker.h',
'../src/animator/SkAnimateProperties.h',
'../src/animator/SkAnimateSet.cpp',
'../src/animator/SkAnimateSet.h',
'../src/animator/SkAnimator.cpp',
'../src/animator/SkAnimatorScript.cpp',
'../src/animator/SkAnimatorScript.h',
#'../src/animator/SkAnimatorScript2.cpp', fails on windows
#'../src/animator/SkAnimatorScript2.h',
'../src/animator/SkBase64.cpp',
'../src/animator/SkBase64.h',
'../src/animator/SkBoundable.cpp',
'../src/animator/SkBoundable.h',
'../src/animator/SkBuildCondensedInfo.cpp',
#'../src/animator/SkCondensedDebug.cpp', fails on windows
#'../src/animator/SkCondensedRelease.cpp',
'../src/animator/SkDisplayable.cpp',
'../src/animator/SkDisplayable.h',
'../src/animator/SkDisplayAdd.cpp',
'../src/animator/SkDisplayAdd.h',
'../src/animator/SkDisplayApply.cpp',
'../src/animator/SkDisplayApply.h',
'../src/animator/SkDisplayBounds.cpp',
'../src/animator/SkDisplayBounds.h',
'../src/animator/SkDisplayEvent.cpp',
'../src/animator/SkDisplayEvent.h',
'../src/animator/SkDisplayEvents.cpp',
'../src/animator/SkDisplayEvents.h',
'../src/animator/SkDisplayInclude.cpp',
'../src/animator/SkDisplayInclude.h',
'../src/animator/SkDisplayInput.cpp',
'../src/animator/SkDisplayInput.h',
'../src/animator/SkDisplayList.cpp',
'../src/animator/SkDisplayList.h',
'../src/animator/SkDisplayMath.cpp',
'../src/animator/SkDisplayMath.h',
'../src/animator/SkDisplayMovie.cpp',
'../src/animator/SkDisplayMovie.h',
'../src/animator/SkDisplayNumber.cpp',
'../src/animator/SkDisplayNumber.h',
'../src/animator/SkDisplayPost.cpp',
'../src/animator/SkDisplayPost.h',
'../src/animator/SkDisplayRandom.cpp',
'../src/animator/SkDisplayRandom.h',
'../src/animator/SkDisplayScreenplay.cpp',
'../src/animator/SkDisplayScreenplay.h',
'../src/animator/SkDisplayType.cpp',
'../src/animator/SkDisplayType.h',
'../src/animator/SkDisplayTypes.cpp',
'../src/animator/SkDisplayTypes.h',
'../src/animator/SkDisplayXMLParser.cpp',
'../src/animator/SkDisplayXMLParser.h',
'../src/animator/SkDraw3D.cpp',
'../src/animator/SkDraw3D.h',
'../src/animator/SkDrawable.cpp',
'../src/animator/SkDrawable.h',
'../src/animator/SkDrawBitmap.cpp',
'../src/animator/SkDrawBitmap.h',
'../src/animator/SkDrawBlur.cpp',
'../src/animator/SkDrawBlur.h',
'../src/animator/SkDrawClip.cpp',
'../src/animator/SkDrawClip.h',
'../src/animator/SkDrawColor.cpp',
'../src/animator/SkDrawColor.h',
'../src/animator/SkDrawDash.cpp',
'../src/animator/SkDrawDash.h',
'../src/animator/SkDrawDiscrete.cpp',
'../src/animator/SkDrawDiscrete.h',
'../src/animator/SkDrawEmboss.cpp',
'../src/animator/SkDrawEmboss.h',
'../src/animator/SkDrawExtraPathEffect.cpp',
'../src/animator/SkDrawFull.cpp',
'../src/animator/SkDrawFull.h',
'../src/animator/SkDrawGradient.cpp',
'../src/animator/SkDrawGradient.h',
'../src/animator/SkDrawGroup.cpp',
'../src/animator/SkDrawGroup.h',
'../src/animator/SkDrawLine.cpp',
'../src/animator/SkDrawLine.h',
'../src/animator/SkDrawMatrix.cpp',
'../src/animator/SkDrawMatrix.h',
'../src/animator/SkDrawOval.cpp',
'../src/animator/SkDrawOval.h',
'../src/animator/SkDrawPaint.cpp',
'../src/animator/SkDrawPaint.h',
'../src/animator/SkDrawPath.cpp',
'../src/animator/SkDrawPath.h',
'../src/animator/SkDrawPoint.cpp',
'../src/animator/SkDrawPoint.h',
'../src/animator/SkDrawRectangle.cpp',
'../src/animator/SkDrawRectangle.h',
'../src/animator/SkDrawSaveLayer.cpp',
'../src/animator/SkDrawSaveLayer.h',
'../src/animator/SkDrawShader.cpp',
'../src/animator/SkDrawShader.h',
'../src/animator/SkDrawText.cpp',
'../src/animator/SkDrawText.h',
'../src/animator/SkDrawTextBox.cpp',
'../src/animator/SkDrawTextBox.h',
'../src/animator/SkDrawTo.cpp',
'../src/animator/SkDrawTo.h',
'../src/animator/SkDrawTransparentShader.cpp',
'../src/animator/SkDrawTransparentShader.h',
'../src/animator/SkDump.cpp',
'../src/animator/SkDump.h',
'../src/animator/SkExtras.h',
'../src/animator/SkGetCondensedInfo.cpp',
'../src/animator/SkHitClear.cpp',
'../src/animator/SkHitClear.h',
'../src/animator/SkHitTest.cpp',
'../src/animator/SkHitTest.h',
'../src/animator/SkIntArray.h',
'../src/animator/SkMatrixParts.cpp',
'../src/animator/SkMatrixParts.h',
'../src/animator/SkMemberInfo.cpp',
'../src/animator/SkMemberInfo.h',
'../src/animator/SkOpArray.cpp',
'../src/animator/SkOpArray.h',
'../src/animator/SkOperand.h',
'../src/animator/SkOperand2.h',
'../src/animator/SkOperandInterpolator.h',
'../src/animator/SkOperandIterpolator.cpp',
'../src/animator/SkPaintParts.cpp',
'../src/animator/SkPaintParts.h',
'../src/animator/SkParseSVGPath.cpp',
'../src/animator/SkPathParts.cpp',
'../src/animator/SkPathParts.h',
'../src/animator/SkPostParts.cpp',
'../src/animator/SkPostParts.h',
'../src/animator/SkScript.cpp',
'../src/animator/SkScript.h',
'../src/animator/SkScript2.h',
'../src/animator/SkScriptCallBack.h',
'../src/animator/SkScriptDecompile.cpp',
'../src/animator/SkScriptRuntime.cpp',
'../src/animator/SkScriptRuntime.h',
'../src/animator/SkScriptTokenizer.cpp',
'../src/animator/SkSnapshot.cpp',
'../src/animator/SkSnapshot.h',
'../src/animator/SkTDArray_Experimental.h',
'../src/animator/SkTextOnPath.cpp',
'../src/animator/SkTextOnPath.h',
'../src/animator/SkTextToPath.cpp',
'../src/animator/SkTextToPath.h',
'../src/animator/SkTime.cpp',
'../src/animator/SkTypedArray.cpp',
'../src/animator/SkTypedArray.h',
'../src/animator/SkXMLAnimatorWriter.cpp',
'../src/animator/SkXMLAnimatorWriter.h',
],
'direct_dependent_settings': {
'include_dirs': [
'../include/animator',
],
},
},
{
'target_name': 'svg',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../include/xml',
'../include/utils',
'../include/svg',
],
'sources': [
'../include/svg/SkSVGAttribute.h',
'../include/svg/SkSVGBase.h',
'../include/svg/SkSVGPaintState.h',
'../include/svg/SkSVGParser.h',
'../include/svg/SkSVGTypes.h',
'../src/svg/SkSVGCircle.cpp',
'../src/svg/SkSVGCircle.h',
'../src/svg/SkSVGClipPath.cpp',
'../src/svg/SkSVGClipPath.h',
'../src/svg/SkSVGDefs.cpp',
'../src/svg/SkSVGDefs.h',
'../src/svg/SkSVGElements.cpp',
'../src/svg/SkSVGElements.h',
'../src/svg/SkSVGEllipse.cpp',
'../src/svg/SkSVGEllipse.h',
'../src/svg/SkSVGFeColorMatrix.cpp',
'../src/svg/SkSVGFeColorMatrix.h',
'../src/svg/SkSVGFilter.cpp',
'../src/svg/SkSVGFilter.h',
'../src/svg/SkSVGG.cpp',
'../src/svg/SkSVGG.h',
'../src/svg/SkSVGGradient.cpp',
'../src/svg/SkSVGGradient.h',
'../src/svg/SkSVGGroup.cpp',
'../src/svg/SkSVGGroup.h',
'../src/svg/SkSVGImage.cpp',
'../src/svg/SkSVGImage.h',
'../src/svg/SkSVGLine.cpp',
'../src/svg/SkSVGLine.h',
'../src/svg/SkSVGLinearGradient.cpp',
'../src/svg/SkSVGLinearGradient.h',
'../src/svg/SkSVGMask.cpp',
'../src/svg/SkSVGMask.h',
'../src/svg/SkSVGMetadata.cpp',
'../src/svg/SkSVGMetadata.h',
'../src/svg/SkSVGPaintState.cpp',
'../src/svg/SkSVGParser.cpp',
'../src/svg/SkSVGPath.cpp',
'../src/svg/SkSVGPath.h',
'../src/svg/SkSVGPolygon.cpp',
'../src/svg/SkSVGPolygon.h',
'../src/svg/SkSVGPolyline.cpp',
'../src/svg/SkSVGPolyline.h',
'../src/svg/SkSVGRadialGradient.cpp',
'../src/svg/SkSVGRadialGradient.h',
'../src/svg/SkSVGRect.cpp',
'../src/svg/SkSVGRect.h',
'../src/svg/SkSVGStop.cpp',
'../src/svg/SkSVGStop.h',
'../src/svg/SkSVGSVG.cpp',
'../src/svg/SkSVGSVG.h',
'../src/svg/SkSVGSymbol.cpp',
'../src/svg/SkSVGSymbol.h',
'../src/svg/SkSVGText.cpp',
'../src/svg/SkSVGText.h',
'../src/svg/SkSVGUse.cpp',
],
'sources!' : [
'../src/svg/SkSVG.cpp', # doesn't compile, maybe this is test code?
],
'direct_dependent_settings': {
'include_dirs': [
'../include/svg',
],
},
},
{
'target_name': 'experimental',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
],
'sources': [
'../experimental/SkMatrix44.cpp',
'../experimental/SkMatrix44.h',
'../experimental/SkSetPoly3To3.cpp',
'../experimental/SkSetPoly3To3_A.cpp',
'../experimental/SkSetPoly3To3_D.cpp',
],
'sources!': [
'../experimental/SkMatrix44.cpp', #doesn't compile
'../experimental/SkMatrix44.h',
],
'direct_dependent_settings': {
'include_dirs': [
'../experimental',
],
},
},
{
'target_name': 'SampleApp',
'type': 'executable',
'mac_bundle' : 1,
'include_dirs' : [
'../src/core', # needed to get SkConcaveToTriangle, maybe this should be moved to include dir?
'../gm', # SampleGM.cpp pulls gm.h
],
'sources': [
# gm files needed for SampleGM.cpp
'../gm/bitmapfilters.cpp',
'../gm/blurs.cpp',
'../gm/complexclip.cpp',
'../gm/filltypes.cpp',
'../gm/gm.h',
'../gm/gradients.cpp',
'../gm/nocolorbleed.cpp',
'../gm/points.cpp',
'../gm/poly2poly.cpp',
'../gm/shadertext.cpp',
'../gm/shadows.cpp',
'../gm/shapes.cpp',
'../gm/tilemodes.cpp',
'../gm/xfermodes.cpp',
'../samplecode/ClockFaceView.cpp',
'../samplecode/OverView.cpp',
'../samplecode/SampleAll.cpp',
'../samplecode/SampleAnimator.cpp',
'../samplecode/SampleApp.cpp',
'../samplecode/SampleArc.cpp',
'../samplecode/SampleAvoid.cpp',
'../samplecode/SampleBigGradient.cpp',
'../samplecode/SampleBitmapRect.cpp',
'../samplecode/SampleBlur.cpp',
'../samplecode/SampleCamera.cpp',
'../samplecode/SampleCircle.cpp',
'../samplecode/SampleCode.h',
'../samplecode/SampleColorFilter.cpp',
'../samplecode/SampleComplexClip.cpp',
'../samplecode/SampleCull.cpp',
'../samplecode/SampleDecode.cpp',
'../samplecode/SampleDither.cpp',
'../samplecode/SampleDitherBitmap.cpp',
'../samplecode/SampleDrawLooper.cpp',
'../samplecode/SampleEffects.cpp',
'../samplecode/SampleEmboss.cpp',
'../samplecode/SampleEncode.cpp',
'../samplecode/SampleExtractAlpha.cpp',
'../samplecode/SampleFillType.cpp',
'../samplecode/SampleFilter.cpp',
'../samplecode/SampleFilter2.cpp',
'../samplecode/SampleFontCache.cpp',
'../samplecode/SampleFontScalerTest.cpp',
'../samplecode/SampleFuzz.cpp',
'../samplecode/SampleGM.cpp',
'../samplecode/SampleGradients.cpp',
'../samplecode/SampleHairline.cpp',
'../samplecode/SampleImage.cpp',
'../samplecode/SampleImageDir.cpp',
'../samplecode/SampleLayerMask.cpp',
'../samplecode/SampleLayers.cpp',
'../samplecode/SampleLCD.cpp',
'../samplecode/SampleLineClipper.cpp',
'../samplecode/SampleLines.cpp',
'../samplecode/SampleMeasure.cpp',
'../samplecode/SampleMipMap.cpp',
'../samplecode/SampleMovie.cpp',
'../samplecode/SampleNinePatch.cpp',
'../samplecode/SampleOvalTest.cpp',
'../samplecode/SampleOverflow.cpp',
'../samplecode/SamplePageFlip.cpp',
'../samplecode/SamplePatch.cpp',
'../samplecode/SamplePath.cpp',
'../samplecode/SamplePathClip.cpp',
'../samplecode/SamplePathEffects.cpp',
'../samplecode/SamplePicture.cpp',
'../samplecode/SamplePoints.cpp',
'../samplecode/SamplePolyToPoly.cpp',
'../samplecode/SampleAARects.cpp',
'../samplecode/SampleRegion.cpp',
'../samplecode/SampleRepeatTile.cpp',
'../samplecode/SampleShaders.cpp',
'../samplecode/SampleShaderText.cpp',
'../samplecode/SampleShapes.cpp',
'../samplecode/SampleSkLayer.cpp',
'../samplecode/SampleSlides.cpp',
'../samplecode/SampleStrokePath.cpp',
'../samplecode/SampleStrokeText.cpp',
'../samplecode/SampleSVG.cpp',
'../samplecode/SampleTests.cpp',
'../samplecode/SampleText.cpp',
'../samplecode/SampleTextAlpha.cpp',
'../samplecode/SampleTextBox.cpp',
'../samplecode/SampleTextEffects.cpp',
'../samplecode/SampleTextOnPath.cpp',
'../samplecode/SampleTextureDomain.cpp',
'../samplecode/SampleTiling.cpp',
'../samplecode/SampleTinyBitmap.cpp',
'../samplecode/SampleTriangles.cpp',
'../samplecode/SampleTypeface.cpp',
'../samplecode/SampleUnitMapper.cpp',
'../samplecode/SampleVertices.cpp',
'../samplecode/SampleXfermodes.cpp',
],
'sources!': [
'../samplecode/SampleSkLayer.cpp', #relies on SkMatrix44 which doesn't compile
'../samplecode/SampleTests.cpp', #includes unknown file SkShaderExtras.h
'../samplecode/SampleWarp.cpp',
'../samplecode/SampleFontCache.cpp',
],
'dependencies': [
'skia',
'effects',
'images',
'views',
'utils',
'animator',
'xml',
'svg',
'experimental',
'gr',
'skgr',
],
'conditions' : [
[ 'OS == "linux" or OS == "freebsd" or OS == "openbsd" or OS == "solaris"', {
'sources!': [
'../samplecode/SampleDecode.cpp',
],
}],
[ 'OS == "win"', {
'sources!': [
# require UNIX functions
'../samplecode/SampleEncode.cpp',
'../samplecode/SamplePageFlip.cpp',
],
}],
[ 'OS == "mac"', {
'sources!': [
'../samplecode/SampleDecode.cpp',
],
}],
],
'msvs_settings': {
'VCLinkerTool': {
'SubSystem': '2',
'AdditionalDependencies': [
'OpenGL32.lib',
'usp10.lib',
'd3d9.lib',
],
},
},
},
{
'target_name': 'libtess',
'type': 'static_library',
'include_dirs': [
'../third_party/glu',
],
'sources': [
'../third_party/glu/internal_glu.h',
'../third_party/glu/gluos.h',
'../third_party/glu/libtess/dict-list.h',
'../third_party/glu/libtess/dict.c',
'../third_party/glu/libtess/dict.h',
'../third_party/glu/libtess/geom.c',
'../third_party/glu/libtess/geom.h',
'../third_party/glu/libtess/memalloc.c',
'../third_party/glu/libtess/memalloc.h',
'../third_party/glu/libtess/mesh.c',
'../third_party/glu/libtess/mesh.h',
'../third_party/glu/libtess/normal.c',
'../third_party/glu/libtess/normal.h',
'../third_party/glu/libtess/priorityq-heap.h',
'../third_party/glu/libtess/priorityq-sort.h',
'../third_party/glu/libtess/priorityq.c',
'../third_party/glu/libtess/priorityq.h',
'../third_party/glu/libtess/render.c',
'../third_party/glu/libtess/render.h',
'../third_party/glu/libtess/sweep.c',
'../third_party/glu/libtess/sweep.h',
'../third_party/glu/libtess/tess.c',
'../third_party/glu/libtess/tess.h',
'../third_party/glu/libtess/tessmono.c',
'../third_party/glu/libtess/tessmono.h',
],
'direct_dependent_settings': {
'include_dirs': [
'../third_party/glu',
],
},
},
],
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:
Add unix and mesa GL files to gyp.
http://codereview.appspot.com/4545055/
{
'target_defaults': {
'configurations': {
'Debug': {
'defines': [
'SK_DEBUG',
'GR_DEBUG=1',
],
},
'Release': {
'defines': [
'SK_RELEASE',
'GR_RELEASE=1',
],
},
},
'conditions': [
[ 'OS == "linux" or OS == "freebsd" or OS == "openbsd" or OS == "solaris"', {
'include_dirs' : [
'/usr/include/freetype2',
],
}],
[ 'OS == "mac"', {
'defines': [
'SK_BUILD_FOR_MAC',
],
}],
[ 'OS == "win"', {
'defines': [
'SK_BUILD_FOR_WIN32',
'SK_IGNORE_STDINT_DOT_H',
],
}],
[ 'OS == "linux"', {
'defines': [
'SK_SAMPLES_FOR_X',
],
}],
],
'direct_dependent_settings': {
'conditions': [
[ 'OS == "mac"', {
'defines': [
'SK_BUILD_FOR_MAC',
],
}],
[ 'OS == "win"', {
'defines': [
'SK_BUILD_FOR_WIN32',
],
}],
],
},
},
'targets': [
{
'target_name': 'skia',
'type': 'static_library',
'msvs_guid': 'B7760B5E-BFA8-486B-ACFD-49E3A6DE8E76',
'sources': [
'../src/core/ARGB32_Clamp_Bilinear_BitmapShader.h',
'../src/core/Sk64.cpp',
'../src/core/SkAdvancedTypefaceMetrics.cpp',
'../src/core/SkAlphaRuns.cpp',
'../src/core/SkAntiRun.h',
'../src/core/SkBitmap.cpp',
'../src/core/SkBitmapProcShader.cpp',
'../src/core/SkBitmapProcShader.h',
'../src/core/SkBitmapProcState.cpp',
'../src/core/SkBitmapProcState.h',
'../src/core/SkBitmapProcState_matrix.h',
'../src/core/SkBitmapProcState_matrixProcs.cpp',
'../src/core/SkBitmapProcState_sample.h',
'../src/core/SkBitmapSampler.cpp',
'../src/core/SkBitmapSampler.h',
'../src/core/SkBitmapSamplerTemplate.h',
'../src/core/SkBitmapShader16BilerpTemplate.h',
'../src/core/SkBitmapShaderTemplate.h',
'../src/core/SkBitmap_scroll.cpp',
'../src/core/SkBlitBWMaskTemplate.h',
'../src/core/SkBlitRow_D16.cpp',
'../src/core/SkBlitRow_D32.cpp',
'../src/core/SkBlitRow_D4444.cpp',
'../src/core/SkBlitter.cpp',
'../src/core/SkBlitter_4444.cpp',
'../src/core/SkBlitter_A1.cpp',
'../src/core/SkBlitter_A8.cpp',
'../src/core/SkBlitter_ARGB32.cpp',
'../src/core/SkBlitter_RGB16.cpp',
'../src/core/SkBlitter_Sprite.cpp',
'../src/core/SkBuffer.cpp',
'../src/core/SkCanvas.cpp',
'../src/core/SkChunkAlloc.cpp',
'../src/core/SkClampRange.cpp',
'../src/core/SkClipStack.cpp',
'../src/core/SkColor.cpp',
'../src/core/SkColorFilter.cpp',
'../src/core/SkColorTable.cpp',
'../src/core/SkComposeShader.cpp',
'../src/core/SkConcaveToTriangles.cpp',
'../src/core/SkConcaveToTriangles.h',
'../src/core/SkCordic.cpp',
'../src/core/SkCordic.h',
'../src/core/SkCoreBlitters.h',
'../src/core/SkCubicClipper.cpp',
'../src/core/SkCubicClipper.h',
'../src/core/SkDebug.cpp',
'../src/core/SkDeque.cpp',
'../src/core/SkDevice.cpp',
'../src/core/SkDither.cpp',
'../src/core/SkDraw.cpp',
'../src/core/SkDrawProcs.h',
'../src/core/SkEdgeBuilder.cpp',
'../src/core/SkEdgeClipper.cpp',
'../src/core/SkEdge.cpp',
'../src/core/SkEdge.h',
'../src/core/SkFP.h',
'../src/core/SkFilterProc.cpp',
'../src/core/SkFilterProc.h',
'../src/core/SkFlattenable.cpp',
'../src/core/SkFloat.cpp',
'../src/core/SkFloat.h',
'../src/core/SkFloatBits.cpp',
'../src/core/SkFontHost.cpp',
'../src/core/SkGeometry.cpp',
'../src/core/SkGlobals.cpp',
'../src/core/SkGlyphCache.cpp',
'../src/core/SkGlyphCache.h',
'../src/core/SkGraphics.cpp',
'../src/core/SkLineClipper.cpp',
'../src/core/SkMallocPixelRef.cpp',
'../src/core/SkMask.cpp',
'../src/core/SkMaskFilter.cpp',
'../src/core/SkMath.cpp',
'../src/core/SkMatrix.cpp',
'../src/core/SkMetaData.cpp',
'../src/core/SkPackBits.cpp',
'../src/core/SkPaint.cpp',
'../src/core/SkPath.cpp',
'../src/core/SkPathEffect.cpp',
'../src/core/SkPathHeap.cpp',
'../src/core/SkPathHeap.h',
'../src/core/SkPathMeasure.cpp',
'../src/core/SkPicture.cpp',
'../src/core/SkPictureFlat.cpp',
'../src/core/SkPictureFlat.h',
'../src/core/SkPicturePlayback.cpp',
'../src/core/SkPicturePlayback.h',
'../src/core/SkPictureRecord.cpp',
'../src/core/SkPictureRecord.h',
'../src/core/SkPixelRef.cpp',
'../src/core/SkPoint.cpp',
'../src/core/SkProcSpriteBlitter.cpp',
'../src/core/SkPtrRecorder.cpp',
'../src/core/SkQuadClipper.cpp',
'../src/core/SkQuadClipper.h',
'../src/core/SkRasterizer.cpp',
'../src/core/SkRect.cpp',
'../src/core/SkRefDict.cpp',
'../src/core/SkRegion.cpp',
'../src/core/SkRegionPriv.h',
'../src/core/SkRegion_path.cpp',
'../src/core/SkScalar.cpp',
'../src/core/SkScalerContext.cpp',
'../src/core/SkScan.cpp',
'../src/core/SkScanPriv.h',
'../src/core/SkScan_AntiPath.cpp',
'../src/core/SkScan_Antihair.cpp',
'../src/core/SkScan_Hairline.cpp',
'../src/core/SkScan_Path.cpp',
'../src/core/SkShader.cpp',
'../src/core/SkShape.cpp',
'../src/core/SkSpriteBlitter_ARGB32.cpp',
'../src/core/SkSpriteBlitter_RGB16.cpp',
'../src/core/SkSinTable.h',
'../src/core/SkSpriteBlitter.h',
'../src/core/SkSpriteBlitterTemplate.h',
'../src/core/SkStream.cpp',
'../src/core/SkString.cpp',
'../src/core/SkStroke.cpp',
'../src/core/SkStrokerPriv.cpp',
'../src/core/SkStrokerPriv.h',
'../src/core/SkTextFormatParams.h',
'../src/core/SkTSearch.cpp',
'../src/core/SkTSort.h',
'../src/core/SkTemplatesPriv.h',
'../src/core/SkTypeface.cpp',
'../src/core/SkTypefaceCache.cpp',
'../src/core/SkTypefaceCache.h',
'../src/core/SkUnPreMultiply.cpp',
'../src/core/SkUtils.cpp',
'../src/core/SkWriter32.cpp',
'../src/core/SkXfermode.cpp',
'../src/opts/opts_check_SSE2.cpp',
'../src/ports/SkDebug_stdio.cpp',
'../src/ports/SkDebug_win.cpp',
'../src/ports/SkFontHost_tables.cpp',
'../src/ports/SkGlobals_global.cpp',
'../src/ports/SkMemory_malloc.cpp',
'../src/ports/SkOSFile_stdio.cpp',
'../src/ports/SkTime_Unix.cpp',
'../src/ports/SkXMLParser_empty.cpp',
'../src/ports/sk_predefined_gamma.h',
'../include/core/Sk64.h',
'../include/core/SkAdvancedTypefaceMetrics.h',
'../include/core/SkAutoKern.h',
'../include/core/SkBitmap.h',
'../include/core/SkBlitRow.h',
'../include/core/SkBlitter.h',
'../include/core/SkBounder.h',
'../include/core/SkBuffer.h',
'../include/core/SkCanvas.h',
'../include/core/SkChunkAlloc.h',
'../include/core/SkClampRange.h',
'../include/core/SkClipStack.h',
'../include/core/SkColor.h',
'../include/core/SkColorFilter.h',
'../include/core/SkColorPriv.h',
'../include/core/SkColorShader.h',
'../include/core/SkComposeShader.h',
'../include/core/SkDeque.h',
'../include/core/SkDescriptor.h',
'../include/core/SkDevice.h',
'../include/core/SkDither.h',
'../include/core/SkDraw.h',
'../include/core/SkDrawFilter.h',
'../include/core/SkDrawLooper.h',
'../include/core/SkEndian.h',
'../include/core/SkFDot6.h',
'../include/core/SkFixed.h',
'../include/core/SkFlattenable.h',
'../include/core/SkFloatBits.h',
'../include/core/SkFloatingPoint.h',
'../include/core/SkFontHost.h',
'../include/core/SkGeometry.h',
'../include/core/SkGlobals.h',
'../include/core/SkGraphics.h',
'../include/core/SkMallocPixelRef.h',
'../include/core/SkMask.h',
'../include/core/SkMaskFilter.h',
'../include/core/SkMath.h',
'../include/core/SkMatrix.h',
'../include/core/SkMetaData.h',
'../include/core/SkOSFile.h',
'../include/core/SkPackBits.h',
'../include/core/SkPaint.h',
'../include/core/SkPath.h',
'../include/core/SkPathEffect.h',
'../include/core/SkPathMeasure.h',
'../include/core/SkPerspIter.h',
'../include/core/SkPicture.h',
'../include/core/SkPixelRef.h',
'../include/core/SkPoint.h',
'../include/core/SkPtrRecorder.h',
'../include/core/SkRandom.h',
'../include/core/SkRasterizer.h',
'../include/core/SkReader32.h',
'../include/core/SkRect.h',
'../include/core/SkRefCnt.h',
'../include/core/SkRefDict.h',
'../include/core/SkRegion.h',
'../include/core/SkScalar.h',
'../include/core/SkScalarCompare.h',
'../include/core/SkScalerContext.h',
'../include/core/SkScan.h',
'../include/core/SkShader.h',
'../include/core/SkStream.h',
'../include/core/SkString.h',
'../include/core/SkStroke.h',
'../include/core/SkTDArray.h',
'../include/core/SkTDStack.h',
'../include/core/SkTDict.h',
'../include/core/SkTRegistry.h',
'../include/core/SkTScopedPtr.h',
'../include/core/SkTSearch.h',
'../include/core/SkTemplates.h',
'../include/core/SkThread.h',
'../include/core/SkThread_platform.h',
'../include/core/SkTime.h',
'../include/core/SkTypeface.h',
'../include/core/SkTypes.h',
'../include/core/SkUnPreMultiply.h',
'../include/core/SkUnitMapper.h',
'../include/core/SkUtils.h',
'../include/core/SkWriter32.h',
'../include/core/SkXfermode.h',
],
'include_dirs': [
'../include/config',
'../include/core',
'../include/ports',
'../include/xml',
'../src/core',
],
'msvs_disabled_warnings': [4244, 4267,4345, 4390, 4554, 4800],
'conditions': [
[ 'OS == "linux" or OS == "freebsd" or OS == "openbsd" or OS == "solaris"', {
'cflags': [
'-Wno-unused',
'-Wno-unused-function',
],
'sources': [
'../include/core/SkMMapStream.h',
'../src/core/SkMMapStream.cpp',
'../src/core/SkBlitter_ARGB32_Subpixel.cpp',
'../src/ports/SkThread_pthread.cpp',
'../src/ports/SkTime_Unix.cpp',
'../src/ports/SkFontHost_FreeType_Subpixel.cpp',
'../src/ports/SkFontHost_FreeType.cpp',
'../src/ports/SkFontHost_gamma_none.cpp',
'../src/ports/SkFontHost_linux.cpp',
],
'link_settings': {
'libraries': [
'-lfreetype',
'-lpthread',
],
},
}],
[ 'OS == "mac"', {
'include_dirs': [
'../include/utils/mac',
],
'sources': [
'../include/core/SkMMapStream.h',
'../include/utils/mac/SkCGUtils.h',
'../src/core/SkMMapStream.cpp',
'../src/ports/SkFontHost_mac_coretext.cpp',
'../src/ports/SkThread_pthread.cpp',
'../src/ports/SkTime_Unix.cpp',
'../src/utils/mac/SkCreateCGImageRef.cpp',
],
}],
[ 'OS == "win"', {
'include_dirs': [
'config/win',
],
'sources': [
'../src/ports/SkFontHost_win.cpp',
'../src/ports/SkThread_win.cpp',
],
'sources!': [
'../src/ports/SkDebug_stdio.cpp',
],
}],
[ 'OS != "win"', {
'sources!': [
'../src/ports/SkDebug_win.cpp',
],
}],
],
'direct_dependent_settings': {
'include_dirs': [
'config',
'../include/config',
'../include/core',
'ext',
],
},
'dependencies': [
'skia_opts'
],
},
# Due to an unfortunate intersection of lameness between gcc and gyp,
# we have to build the *_SSE2.cpp files in a separate target. The
# gcc lameness is that, in order to compile SSE2 intrinsics code, it
# must be passed the -msse2 flag. However, with this flag, it may
# emit SSE2 instructions even for scalar code, such as the CPUID
# test used to test for the presence of SSE2. So that, and all other
# code must be compiled *without* -msse2. The gyp lameness is that it
# does not allow file-specific CFLAGS, so we must create this extra
# target for those files to be compiled with -msse2.
#
# This is actually only a problem on 32-bit Linux (all Intel Macs have
# SSE2, Linux x86_64 has SSE2 by definition, and MSC will happily emit
# SSE2 from instrinsics, while generating plain ol' 386 for everything
# else). However, to keep the .gyp file simple and avoid platform-specific
# build breakage, we do this on all platforms.
# For about the same reason, we need to compile the ARM opts files
# separately as well.
{
'target_name': 'skia_opts',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../src/core',
],
'conditions': [
[ '(OS == "linux" or OS == "freebsd" or OS == "openbsd")', {
'cflags': [
'-msse2',
],
}],
],
'sources': [
'../src/opts/SkBitmapProcState_opts_SSE2.cpp',
'../src/opts/SkBlitRow_opts_SSE2.cpp',
'../src/opts/SkUtils_opts_SSE2.cpp',
],
},
{
'target_name': 'effects',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../include/effects',
],
'sources': [
'../include/effects/Sk1DPathEffect.h',
'../include/effects/Sk2DPathEffect.h',
'../include/effects/SkAvoidXfermode.h',
'../include/effects/SkBlurDrawLooper.h',
'../include/effects/SkBlurMaskFilter.h',
'../include/effects/SkColorMatrix.h',
'../include/effects/SkColorMatrixFilter.h',
'../include/effects/SkCornerPathEffect.h',
'../include/effects/SkDashPathEffect.h',
'../include/effects/SkDiscretePathEffect.h',
'../include/effects/SkDrawExtraPathEffect.h',
'../include/effects/SkEmbossMaskFilter.h',
'../include/effects/SkGradientShader.h',
'../include/effects/SkGroupShape.h',
'../include/effects/SkKernel33MaskFilter.h',
'../include/effects/SkLayerDrawLooper.h',
'../include/effects/SkLayerRasterizer.h',
'../include/effects/SkPaintFlagsDrawFilter.h',
'../include/effects/SkPixelXorXfermode.h',
'../include/effects/SkPorterDuff.h',
'../include/effects/SkRectShape.h',
'../include/effects/SkTableMaskFilter.h',
'../include/effects/SkTransparentShader.h',
'../src/effects/Sk1DPathEffect.cpp',
'../src/effects/Sk2DPathEffect.cpp',
'../src/effects/SkAvoidXfermode.cpp',
'../src/effects/SkBitmapCache.cpp',
'../src/effects/SkBitmapCache.h',
'../src/effects/SkBlurDrawLooper.cpp',
'../src/effects/SkBlurMask.cpp',
'../src/effects/SkBlurMask.h',
'../src/effects/SkBlurMaskFilter.cpp',
'../src/effects/SkColorFilters.cpp',
'../src/effects/SkColorMatrixFilter.cpp',
'../src/effects/SkCornerPathEffect.cpp',
'../src/effects/SkDashPathEffect.cpp',
'../src/effects/SkDiscretePathEffect.cpp',
'../src/effects/SkEmbossMask.cpp',
'../src/effects/SkEmbossMask.h',
'../src/effects/SkEmbossMask_Table.h',
'../src/effects/SkEmbossMaskFilter.cpp',
'../src/effects/SkGradientShader.cpp',
'../src/effects/SkGroupShape.cpp',
'../src/effects/SkKernel33MaskFilter.cpp',
'../src/effects/SkLayerDrawLooper.cpp',
'../src/effects/SkLayerRasterizer.cpp',
'../src/effects/SkPaintFlagsDrawFilter.cpp',
'../src/effects/SkPixelXorXfermode.cpp',
'../src/effects/SkPorterDuff.cpp',
'../src/effects/SkRadialGradient_Table.h',
'../src/effects/SkRectShape.cpp',
'../src/effects/SkTableMaskFilter.cpp',
'../src/effects/SkTransparentShader.cpp',
],
'direct_dependent_settings': {
'include_dirs': [
'../include/effects',
],
},
},
{
'target_name': 'images',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../include/images',
],
'sources': [
'../include/images/SkFlipPixelRef.h',
'../include/images/SkImageDecoder.h',
'../include/images/SkImageEncoder.h',
'../include/images/SkImageRef.h',
'../include/images/SkImageRef_GlobalPool.h',
'../include/images/SkJpegUtility.h',
'../include/images/SkMovie.h',
'../include/images/SkPageFlipper.h',
'../src/images/bmpdecoderhelper.cpp',
'../src/images/bmpdecoderhelper.h',
'../src/images/SkBitmap_RLEPixels.h',
'../src/images/SkCreateRLEPixelRef.cpp',
'../src/images/SkFDStream.cpp',
'../src/images/SkFlipPixelRef.cpp',
'../src/images/SkImageDecoder.cpp',
'../src/images/SkImageDecoder_Factory.cpp',
'../src/images/SkImageDecoder_libbmp.cpp',
'../src/images/SkImageDecoder_libgif.cpp',
'../src/images/SkImageDecoder_libico.cpp',
'../src/images/SkImageDecoder_libjpeg.cpp',
'../src/images/SkImageDecoder_libpng.cpp',
'../src/images/SkImageDecoder_libpvjpeg.c',
'../src/images/SkImageDecoder_wbmp.cpp',
'../src/images/SkImageEncoder.cpp',
'../src/images/SkImageEncoder_Factory.cpp',
'../src/images/SkImageRef.cpp',
'../src/images/SkImageRefPool.cpp',
'../src/images/SkImageRefPool.h',
'../src/images/SkImageRef_GlobalPool.cpp',
'../src/images/SkJpegUtility.cpp',
'../src/images/SkMovie.cpp',
'../src/images/SkMovie_gif.cpp',
'../src/images/SkPageFlipper.cpp',
'../src/images/SkScaledBitmapSampler.cpp',
'../src/images/SkScaledBitmapSampler.h',
],
'conditions': [
[ 'OS == "win"', {
'sources!': [
'../include/images/SkJpegUtility.h',
'../src/images/SkFDStream.cpp',
'../src/images/SkImageDecoder_libgif.cpp',
'../src/images/SkImageDecoder_libjpeg.cpp',
'../src/images/SkImageDecoder_libpng.cpp',
'../src/images/SkImageDecoder_libpvjpeg.c',
'../src/images/SkJpegUtility.cpp',
'../src/images/SkMovie_gif.cpp',
],
}],
[ 'OS == "mac"', {
'sources!': [
'../include/images/SkJpegUtility.h',
'../src/images/SkImageDecoder_libgif.cpp',
'../src/images/SkImageDecoder_libjpeg.cpp',
'../src/images/SkImageDecoder_libpng.cpp',
'../src/images/SkImageDecoder_libpvjpeg.c',
'../src/images/SkJpegUtility.cpp',
'../src/images/SkMovie_gif.cpp',
],
}],
[ 'OS == "linux" or OS == "freebsd" or OS == "openbsd" or OS == "solaris"', {
'sources!': [
'../include/images/SkJpegUtility.h',
'../src/images/SkImageDecoder_libjpeg.cpp',
'../src/images/SkImageDecoder_libgif.cpp',
'../src/images/SkImageDecoder_libpvjpeg.c',
'../src/images/SkJpegUtility.cpp',
'../src/images/SkMovie_gif.cpp',
],
}],
],
'direct_dependent_settings': {
'include_dirs': [
'../include/images',
],
},
},
{
'target_name': 'xml',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../include/xml',
'../include/utils',
],
'sources': [
'../include/xml/SkBML_WXMLParser.h',
'../include/xml/SkBML_XMLParser.h',
'../include/xml/SkDOM.h',
'../include/xml/SkJS.h',
'../include/xml/SkXMLParser.h',
'../include/xml/SkXMLWriter.h',
'../src/xml/SkBML_Verbs.h',
'../src/xml/SkBML_XMLParser.cpp',
'../src/xml/SkDOM.cpp',
'../src/xml/SkJS.cpp',
'../src/xml/SkJSDisplayable.cpp',
'../src/xml/SkXMLParser.cpp',
'../src/xml/SkXMLPullParser.cpp',
'../src/xml/SkXMLWriter.cpp',
],
'sources!': [
'../src/xml/SkXMLPullParser.cpp', #if 0 around class decl in header
],
'conditions': [
[ 'OS == "win" or OS == "mac" or OS == "linux" or OS == "openbsd" or OS == "solaris"', {
'sources!': [
# no jsapi.h by default on system
'../include/xml/SkJS.h',
'../src/xml/SkJS.cpp',
'../src/xml/SkJSDisplayable.cpp',
],
}],
],
'direct_dependent_settings': {
'include_dirs': [
'../include/xml',
],
},
},
{
'target_name': 'pdf',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../include/pdf',
'../src/core', # needed to get SkGlyphCache.h and SkTextFormatParams.h
],
'sources': [
'../include/pdf/SkPDFCatalog.h',
'../include/pdf/SkPDFDevice.h',
'../include/pdf/SkPDFDocument.h',
'../include/pdf/SkPDFFont.h',
'../include/pdf/SkPDFFormXObject.h',
'../include/pdf/SkPDFGraphicState.h',
'../include/pdf/SkPDFImage.h',
'../include/pdf/SkPDFPage.h',
'../include/pdf/SkPDFShader.h',
'../include/pdf/SkPDFStream.h',
'../include/pdf/SkPDFTypes.h',
'../include/pdf/SkPDFUtils.h',
'../src/pdf/SkPDFCatalog.cpp',
'../src/pdf/SkPDFDevice.cpp',
'../src/pdf/SkPDFDocument.cpp',
'../src/pdf/SkPDFFont.cpp',
'../src/pdf/SkPDFFormXObject.cpp',
'../src/pdf/SkPDFGraphicState.cpp',
'../src/pdf/SkPDFImage.cpp',
'../src/pdf/SkPDFPage.cpp',
'../src/pdf/SkPDFShader.cpp',
'../src/pdf/SkPDFStream.cpp',
'../src/pdf/SkPDFTypes.cpp',
'../src/pdf/SkPDFUtils.cpp',
],
'direct_dependent_settings': {
'include_dirs': [
'../include/pdf',
],
},
},
{
'target_name': 'utils',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../include/utils',
'../include/views',
'../include/effects',
'../include/xml',
],
'sources': [
'../include/utils/SkBoundaryPatch.h',
'../include/utils/SkCamera.h',
'../include/utils/SkCubicInterval.h',
'../include/utils/SkCullPoints.h',
'../include/utils/SkDumpCanvas.h',
'../include/utils/SkEGLContext.h',
'../include/utils/SkGLCanvas.h',
'../include/utils/SkInterpolator.h',
'../include/utils/SkLayer.h',
'../include/utils/SkMeshUtils.h',
'../include/utils/SkNinePatch.h',
'../include/utils/SkNWayCanvas.h',
'../include/utils/SkParse.h',
'../include/utils/SkParsePaint.h',
'../include/utils/SkParsePath.h',
'../include/utils/SkProxyCanvas.h',
'../include/utils/SkSfntUtils.h',
'../include/utils/SkTextBox.h',
'../include/utils/SkUnitMappers.h',
'../src/utils/SkBoundaryPatch.cpp',
'../src/utils/SkCamera.cpp',
'../src/utils/SkColorMatrix.cpp',
'../src/utils/SkCubicInterval.cpp',
'../src/utils/SkCullPoints.cpp',
'../src/utils/SkDumpCanvas.cpp',
'../src/utils/SkEGLContext_none.cpp',
'../src/utils/SkInterpolator.cpp',
'../src/utils/SkLayer.cpp',
'../src/utils/SkMeshUtils.cpp',
'../src/utils/SkNinePatch.cpp',
'../src/utils/SkNWayCanvas.cpp',
'../src/utils/SkOSFile.cpp',
'../src/utils/SkParse.cpp',
'../src/utils/SkParseColor.cpp',
'../src/utils/SkParsePath.cpp',
'../src/utils/SkProxyCanvas.cpp',
'../src/utils/SkSfntUtils.cpp',
'../src/utils/SkUnitMappers.cpp',
'../src/utils/mac/SkEGLContext_Mesa.cpp',
],
'sources!': [
'../src/utils/mac/SkEGLContext_Mesa.cpp',
],
'conditions': [
[ 'OS == "mac"', {
'sources': [
'../include/utils/SkCGUtils.h',
'../src/utils/mac/SkCreateCGImageRef.cpp',
'../src/utils/mac/SkEGLContext_mac.cpp',
],
}],
[ 'OS == "linux" or OS == "freebsd" or OS == "openbsd" or OS == "solaris"', {
'sources': [
'../src/utils/unix/SkEGLContext_Unix.cpp',
],
}],
],
'direct_dependent_settings': {
'include_dirs': [
'../include/utils',
],
},
},
{
'target_name': 'views',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../include/views',
'../include/xml',
'../include/utils',
'../include/images',
'../include/animator',
'../include/effects',
],
'sources': [
'../include/views/SkApplication.h',
'../include/views/SkBGViewArtist.h',
'../include/views/SkBorderView.h',
'../include/views/SkEvent.h',
'../include/views/SkEventSink.h',
'../include/views/SkImageView.h',
'../include/views/SkKey.h',
'../include/views/SkOSMenu.h',
'../include/views/SkOSWindow_Mac.h',
'../include/views/SkOSWindow_SDL.h',
'../include/views/SkOSWindow_Unix.h',
'../include/views/SkOSWindow_Win.h',
#'../include/views/SkOSWindow_wxwidgets.h',
'../include/views/SkProgressBarView.h',
'../include/views/SkScrollBarView.h',
'../include/views/SkStackViewLayout.h',
'../include/views/SkSystemEventTypes.h',
'../include/views/SkTouchGesture.h',
'../include/views/SkView.h',
'../include/views/SkViewInflate.h',
'../include/views/SkWidget.h',
'../include/views/SkWidgetViews.h',
'../include/views/SkWindow.h',
'../src/views/SkBGViewArtist.cpp',
'../src/views/SkBorderView.cpp',
'../src/views/SkEvent.cpp',
'../src/views/SkEventSink.cpp',
'../src/views/SkImageView.cpp',
'../src/views/SkListView.cpp',
'../src/views/SkListWidget.cpp',
'../src/views/SkOSMenu.cpp',
'../src/views/SkParsePaint.cpp',
'../src/views/SkProgressBarView.cpp',
'../src/views/SkProgressView.cpp',
'../src/views/SkScrollBarView.cpp',
'../src/views/SkStackViewLayout.cpp',
'../src/views/SkStaticTextView.cpp',
'../src/views/SkTagList.cpp',
'../src/views/SkTagList.h',
'../src/views/SkTextBox.cpp',
'../src/views/SkTouchGesture.cpp',
'../src/views/SkView.cpp',
'../src/views/SkViewInflate.cpp',
'../src/views/SkViewPriv.cpp',
'../src/views/SkViewPriv.h',
'../src/views/SkWidget.cpp',
'../src/views/SkWidgets.cpp',
'../src/views/SkWidgetViews.cpp',
'../src/views/SkWindow.cpp',
],
'sources!' : [
'../src/views/SkListView.cpp', #depends on missing SkListSource implementation
'../src/views/SkListWidget.cpp', #depends on missing SkListSource implementation
],
'conditions': [
[ 'OS == "win"', {
'sources': [
'../src/utils/win/SkOSWindow_Win.cpp',
'../src/utils/win/skia_win.cpp',
],
}],
[ 'OS == "mac"', {
'sources': [
'../include/utils/SkCGUtils.h',
#'../src/utils/mac/SkBitmap_Mac.cpp',
'../src/utils/mac/SkCreateCGImageRef.cpp',
'../src/utils/mac/SkEGLContext_mac.cpp',
'../src/utils/mac/skia_mac.cpp',
'../src/utils/mac/SkOSWindow_Mac.cpp',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/Carbon.framework',
'$(SDKROOT)/System/Library/Frameworks/AGL.framework',
],
},
}],
[ 'OS == "linux" or OS == "freebsd" or OS == "openbsd" or OS == "solaris"', {
'include_dirs' : [
'../include/utils/unix',
],
'sources': [
'../src/utils/unix/keysym2ucs.c',
'../src/utils/unix/SkOSWindow_Unix.cpp',
'../unix_test_app/main.cpp',
],
}],
],
'direct_dependent_settings': {
'include_dirs': [
'../include/views',
],
},
},
{
'target_name': 'skgr',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../src/core',
'../include/gpu',
'../gpu/include',
],
'sources': [
'../include/gpu/SkGpuCanvas.h',
'../include/gpu/SkGpuDevice.h',
'../include/gpu/SkGpuDeviceFactory.h',
'../include/gpu/SkGr.h',
'../include/gpu/SkGrTexturePixelRef.h',
'../src/gpu/GrPrintf_skia.cpp',
'../src/gpu/SkGpuCanvas.cpp',
'../src/gpu/SkGpuDevice.cpp',
'../src/gpu/SkGr.cpp',
'../src/gpu/SkGrFontScaler.cpp',
'../src/gpu/SkGrTexturePixelRef.cpp',
],
'conditions': [
[ 'OS == "linux"', {
'defines': [
'GR_LINUX_BUILD=1',
],
}],
[ 'OS == "mac"', {
'defines': [
'GR_MAC_BUILD=1',
],
}],
[ 'OS == "win"', {
'defines': [
'GR_WIN32_BUILD=1',
],
}],
],
'direct_dependent_settings': {
'conditions': [
[ 'OS == "linux"', {
'defines': [
'GR_LINUX_BUILD=1',
],
}],
[ 'OS == "mac"', {
'defines': [
'GR_MAC_BUILD=1',
],
}],
[ 'OS == "win"', {
'defines': [
'GR_WIN32_BUILD=1',
],
}],
],
'include_dirs': [
'../include/gpu',
],
},
},
{
'target_name': 'gr',
'type': 'static_library',
'include_dirs': [
'../gpu/include',
'../include/core',
'../include/config',
],
'dependencies': [
'libtess',
],
'sources': [
'../gpu/include/GrAllocator.h',
'../gpu/include/GrAllocPool.h',
'../gpu/include/GrAtlas.h',
'../gpu/include/GrClip.h',
'../gpu/include/GrClipIterator.h',
'../gpu/include/GrColor.h',
'../gpu/include/GrConfig.h',
'../gpu/include/GrContext.h',
'../gpu/include/GrContext_impl.h',
'../gpu/include/GrDrawTarget.h',
'../gpu/include/GrFontScaler.h',
'../gpu/include/GrGeometryBuffer.h',
'../gpu/include/GrGLConfig.h',
'../gpu/include/GrGLConfig_chrome.h',
'../gpu/include/GrGLIndexBuffer.h',
'../gpu/include/GrGLInterface.h',
'../gpu/include/GrGLIRect.h',
'../gpu/include/GrGLTexture.h',
'../gpu/include/GrGLVertexBuffer.h',
'../gpu/include/GrGlyph.h',
'../gpu/include/GrGpu.h',
'../gpu/include/GrGpuVertex.h',
'../gpu/include/GrIndexBuffer.h',
'../gpu/include/GrInOrderDrawBuffer.h',
'../gpu/include/GrInstanceCounter.h',
'../gpu/include/GrIPoint.h',
'../gpu/include/GrKey.h',
'../gpu/include/GrMatrix.h',
'../gpu/include/GrMemory.h',
'../gpu/include/GrMesh.h',
'../gpu/include/GrNoncopyable.h',
'../gpu/include/GrPaint.h',
'../gpu/include/GrPath.h',
'../gpu/include/GrPathRenderer.h',
'../gpu/include/GrPathSink.h',
'../gpu/include/GrPlotMgr.h',
'../gpu/include/GrPoint.h',
'../gpu/include/GrRandom.h',
'../gpu/include/GrRect.h',
'../gpu/include/GrRectanizer.h',
'../gpu/include/GrRefCnt.h',
'../gpu/include/GrResource.h',
'../gpu/include/GrSamplerState.h',
'../gpu/include/GrScalar.h',
'../gpu/include/GrStencil.h',
'../gpu/include/GrStopwatch.h',
'../gpu/include/GrStringBuilder.h',
'../gpu/include/GrTArray.h',
'../gpu/include/GrTBSearch.h',
'../gpu/include/GrTDArray.h',
'../gpu/include/GrTesselatedPathRenderer.h',
'../gpu/include/GrTextContext.h',
'../gpu/include/GrTextStrike.h',
'../gpu/include/GrTexture.h',
'../gpu/include/GrTextureCache.h',
'../gpu/include/GrTHashCache.h',
'../gpu/include/GrTLList.h',
'../gpu/include/GrTouchGesture.h',
'../gpu/include/GrTypes.h',
'../gpu/include/GrUserConfig.h',
'../gpu/include/GrVertexBuffer.h',
'../gpu/src/GrAllocPool.cpp',
'../gpu/src/GrAtlas.cpp',
'../gpu/src/GrBinHashKey.h',
'../gpu/src/GrBufferAllocPool.cpp',
'../gpu/src/GrBufferAllocPool.h',
'../gpu/src/GrClip.cpp',
'../gpu/src/GrContext.cpp',
'../gpu/src/GrCreatePathRenderer_none.cpp',
'../gpu/src/GrDrawTarget.cpp',
'../gpu/src/GrGLDefaultInterface_none.cpp',
'../gpu/src/GrGLIndexBuffer.cpp',
'../gpu/src/GrGLInterface.cpp',
'../gpu/src/GrGLProgram.cpp',
'../gpu/src/GrGLProgram.h',
'../gpu/src/GrGLTexture.cpp',
'../gpu/src/GrGLUtil.cpp',
'../gpu/src/GrGLVertexBuffer.cpp',
'../gpu/src/GrGpu.cpp',
'../gpu/src/GrGpuFactory.cpp',
'../gpu/src/GrGpuGL.cpp',
'../gpu/src/GrGpuGL.h',
'../gpu/src/GrGpuGLFixed.cpp',
'../gpu/src/GrGpuGLFixed.h',
'../gpu/src/GrGpuGLShaders.cpp',
'../gpu/src/GrGpuGLShaders.h',
'../gpu/src/GrInOrderDrawBuffer.cpp',
'../gpu/src/GrMatrix.cpp',
'../gpu/src/GrMemory.cpp',
'../gpu/src/GrPathRenderer.cpp',
'../gpu/src/GrPathUtils.cpp',
'../gpu/src/GrPathUtils.h',
'../gpu/src/GrRectanizer.cpp',
'../gpu/src/GrRedBlackTree.h',
'../gpu/src/GrResource.cpp',
'../gpu/src/GrStencil.cpp',
'../gpu/src/GrTesselatedPathRenderer.cpp',
'../gpu/src/GrTextContext.cpp',
'../gpu/src/GrTextStrike.cpp',
'../gpu/src/GrTextStrike_impl.h',
'../gpu/src/GrTexture.cpp',
'../gpu/src/GrTextureCache.cpp',
'../gpu/src/gr_unittests.cpp',
'../gpu/src/mac/GrGLDefaultInterface_mac.cpp',
'../gpu/src/win/GrGLDefaultInterface_win.cpp',
'../gpu/src/unix/GrGLDefaultInterface_unix.cpp',
'../gpu/src/mesa/GrGLDefaultInterface_mesa.cpp',
],
'sources!': [
'../gpu/src/mesa/GrGLDefaultInterface_mesa.cpp',
],
'defines': [
'GR_IMPLEMENTATION=1',
],
'conditions': [
[ 'OS == "linux"', {
'defines': [
'GR_LINUX_BUILD=1',
],
'sources!': [
'../gpu/src/GrGLDefaultInterface_none.cpp',
],
'link_settings': {
'libraries': [
'-lGL',
'-lX11',
],
},
}],
[ 'OS == "mac"', {
'defines': [
'GR_MAC_BUILD=1',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/OpenGL.framework',
],
},
'sources!': [
'../gpu/src/GrGLDefaultInterface_none.cpp',
],
}],
[ 'OS == "win"', {
'defines': [
'GR_WIN32_BUILD=1',
'GR_GL_FUNCTION_TYPE=__stdcall',
],
'sources!': [
'../gpu/src/GrGLDefaultInterface_none.cpp',
],
}],
[ 'OS != "win"', {
'sources!': [
'../gpu/src/win/GrGLDefaultInterface_win.cpp',
],
}],
[ 'OS != "mac"', {
'sources!': [
'../gpu/src/mac/GrGLDefaultInterface_mac.cpp',
],
}],
[ 'OS != "linux"', {
'sources!': [
'../gpu/src/unix/GrGLDefaultInterface_unix.cpp',
],
}],
],
'direct_dependent_settings': {
'conditions': [
[ 'OS == "linux"', {
'defines': [
'GR_LINUX_BUILD=1',
],
}],
[ 'OS == "mac"', {
'defines': [
'GR_MAC_BUILD=1',
],
}],
[ 'OS == "win"', {
'defines': [
'GR_WIN32_BUILD=1',
'GR_GL_FUNCTION_TYPE=__stdcall',
],
}],
],
'include_dirs': [
'../gpu/include',
],
},
},
{
'target_name': 'animator',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../include/effects',
'../include/animator',
'../include/views',
'../include/xml',
'../include/utils',
'../include/images',
],
'sources': [
'../include/animator/SkAnimator.h',
'../include/animator/SkAnimatorView.h',
'../src/animator/SkAnimate.h',
'../src/animator/SkAnimateActive.cpp',
'../src/animator/SkAnimateActive.h',
'../src/animator/SkAnimateBase.cpp',
'../src/animator/SkAnimateBase.h',
'../src/animator/SkAnimateField.cpp',
'../src/animator/SkAnimateMaker.cpp',
'../src/animator/SkAnimateMaker.h',
'../src/animator/SkAnimateProperties.h',
'../src/animator/SkAnimateSet.cpp',
'../src/animator/SkAnimateSet.h',
'../src/animator/SkAnimator.cpp',
'../src/animator/SkAnimatorScript.cpp',
'../src/animator/SkAnimatorScript.h',
#'../src/animator/SkAnimatorScript2.cpp', fails on windows
#'../src/animator/SkAnimatorScript2.h',
'../src/animator/SkBase64.cpp',
'../src/animator/SkBase64.h',
'../src/animator/SkBoundable.cpp',
'../src/animator/SkBoundable.h',
'../src/animator/SkBuildCondensedInfo.cpp',
#'../src/animator/SkCondensedDebug.cpp', fails on windows
#'../src/animator/SkCondensedRelease.cpp',
'../src/animator/SkDisplayable.cpp',
'../src/animator/SkDisplayable.h',
'../src/animator/SkDisplayAdd.cpp',
'../src/animator/SkDisplayAdd.h',
'../src/animator/SkDisplayApply.cpp',
'../src/animator/SkDisplayApply.h',
'../src/animator/SkDisplayBounds.cpp',
'../src/animator/SkDisplayBounds.h',
'../src/animator/SkDisplayEvent.cpp',
'../src/animator/SkDisplayEvent.h',
'../src/animator/SkDisplayEvents.cpp',
'../src/animator/SkDisplayEvents.h',
'../src/animator/SkDisplayInclude.cpp',
'../src/animator/SkDisplayInclude.h',
'../src/animator/SkDisplayInput.cpp',
'../src/animator/SkDisplayInput.h',
'../src/animator/SkDisplayList.cpp',
'../src/animator/SkDisplayList.h',
'../src/animator/SkDisplayMath.cpp',
'../src/animator/SkDisplayMath.h',
'../src/animator/SkDisplayMovie.cpp',
'../src/animator/SkDisplayMovie.h',
'../src/animator/SkDisplayNumber.cpp',
'../src/animator/SkDisplayNumber.h',
'../src/animator/SkDisplayPost.cpp',
'../src/animator/SkDisplayPost.h',
'../src/animator/SkDisplayRandom.cpp',
'../src/animator/SkDisplayRandom.h',
'../src/animator/SkDisplayScreenplay.cpp',
'../src/animator/SkDisplayScreenplay.h',
'../src/animator/SkDisplayType.cpp',
'../src/animator/SkDisplayType.h',
'../src/animator/SkDisplayTypes.cpp',
'../src/animator/SkDisplayTypes.h',
'../src/animator/SkDisplayXMLParser.cpp',
'../src/animator/SkDisplayXMLParser.h',
'../src/animator/SkDraw3D.cpp',
'../src/animator/SkDraw3D.h',
'../src/animator/SkDrawable.cpp',
'../src/animator/SkDrawable.h',
'../src/animator/SkDrawBitmap.cpp',
'../src/animator/SkDrawBitmap.h',
'../src/animator/SkDrawBlur.cpp',
'../src/animator/SkDrawBlur.h',
'../src/animator/SkDrawClip.cpp',
'../src/animator/SkDrawClip.h',
'../src/animator/SkDrawColor.cpp',
'../src/animator/SkDrawColor.h',
'../src/animator/SkDrawDash.cpp',
'../src/animator/SkDrawDash.h',
'../src/animator/SkDrawDiscrete.cpp',
'../src/animator/SkDrawDiscrete.h',
'../src/animator/SkDrawEmboss.cpp',
'../src/animator/SkDrawEmboss.h',
'../src/animator/SkDrawExtraPathEffect.cpp',
'../src/animator/SkDrawFull.cpp',
'../src/animator/SkDrawFull.h',
'../src/animator/SkDrawGradient.cpp',
'../src/animator/SkDrawGradient.h',
'../src/animator/SkDrawGroup.cpp',
'../src/animator/SkDrawGroup.h',
'../src/animator/SkDrawLine.cpp',
'../src/animator/SkDrawLine.h',
'../src/animator/SkDrawMatrix.cpp',
'../src/animator/SkDrawMatrix.h',
'../src/animator/SkDrawOval.cpp',
'../src/animator/SkDrawOval.h',
'../src/animator/SkDrawPaint.cpp',
'../src/animator/SkDrawPaint.h',
'../src/animator/SkDrawPath.cpp',
'../src/animator/SkDrawPath.h',
'../src/animator/SkDrawPoint.cpp',
'../src/animator/SkDrawPoint.h',
'../src/animator/SkDrawRectangle.cpp',
'../src/animator/SkDrawRectangle.h',
'../src/animator/SkDrawSaveLayer.cpp',
'../src/animator/SkDrawSaveLayer.h',
'../src/animator/SkDrawShader.cpp',
'../src/animator/SkDrawShader.h',
'../src/animator/SkDrawText.cpp',
'../src/animator/SkDrawText.h',
'../src/animator/SkDrawTextBox.cpp',
'../src/animator/SkDrawTextBox.h',
'../src/animator/SkDrawTo.cpp',
'../src/animator/SkDrawTo.h',
'../src/animator/SkDrawTransparentShader.cpp',
'../src/animator/SkDrawTransparentShader.h',
'../src/animator/SkDump.cpp',
'../src/animator/SkDump.h',
'../src/animator/SkExtras.h',
'../src/animator/SkGetCondensedInfo.cpp',
'../src/animator/SkHitClear.cpp',
'../src/animator/SkHitClear.h',
'../src/animator/SkHitTest.cpp',
'../src/animator/SkHitTest.h',
'../src/animator/SkIntArray.h',
'../src/animator/SkMatrixParts.cpp',
'../src/animator/SkMatrixParts.h',
'../src/animator/SkMemberInfo.cpp',
'../src/animator/SkMemberInfo.h',
'../src/animator/SkOpArray.cpp',
'../src/animator/SkOpArray.h',
'../src/animator/SkOperand.h',
'../src/animator/SkOperand2.h',
'../src/animator/SkOperandInterpolator.h',
'../src/animator/SkOperandIterpolator.cpp',
'../src/animator/SkPaintParts.cpp',
'../src/animator/SkPaintParts.h',
'../src/animator/SkParseSVGPath.cpp',
'../src/animator/SkPathParts.cpp',
'../src/animator/SkPathParts.h',
'../src/animator/SkPostParts.cpp',
'../src/animator/SkPostParts.h',
'../src/animator/SkScript.cpp',
'../src/animator/SkScript.h',
'../src/animator/SkScript2.h',
'../src/animator/SkScriptCallBack.h',
'../src/animator/SkScriptDecompile.cpp',
'../src/animator/SkScriptRuntime.cpp',
'../src/animator/SkScriptRuntime.h',
'../src/animator/SkScriptTokenizer.cpp',
'../src/animator/SkSnapshot.cpp',
'../src/animator/SkSnapshot.h',
'../src/animator/SkTDArray_Experimental.h',
'../src/animator/SkTextOnPath.cpp',
'../src/animator/SkTextOnPath.h',
'../src/animator/SkTextToPath.cpp',
'../src/animator/SkTextToPath.h',
'../src/animator/SkTime.cpp',
'../src/animator/SkTypedArray.cpp',
'../src/animator/SkTypedArray.h',
'../src/animator/SkXMLAnimatorWriter.cpp',
'../src/animator/SkXMLAnimatorWriter.h',
],
'direct_dependent_settings': {
'include_dirs': [
'../include/animator',
],
},
},
{
'target_name': 'svg',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
'../include/xml',
'../include/utils',
'../include/svg',
],
'sources': [
'../include/svg/SkSVGAttribute.h',
'../include/svg/SkSVGBase.h',
'../include/svg/SkSVGPaintState.h',
'../include/svg/SkSVGParser.h',
'../include/svg/SkSVGTypes.h',
'../src/svg/SkSVGCircle.cpp',
'../src/svg/SkSVGCircle.h',
'../src/svg/SkSVGClipPath.cpp',
'../src/svg/SkSVGClipPath.h',
'../src/svg/SkSVGDefs.cpp',
'../src/svg/SkSVGDefs.h',
'../src/svg/SkSVGElements.cpp',
'../src/svg/SkSVGElements.h',
'../src/svg/SkSVGEllipse.cpp',
'../src/svg/SkSVGEllipse.h',
'../src/svg/SkSVGFeColorMatrix.cpp',
'../src/svg/SkSVGFeColorMatrix.h',
'../src/svg/SkSVGFilter.cpp',
'../src/svg/SkSVGFilter.h',
'../src/svg/SkSVGG.cpp',
'../src/svg/SkSVGG.h',
'../src/svg/SkSVGGradient.cpp',
'../src/svg/SkSVGGradient.h',
'../src/svg/SkSVGGroup.cpp',
'../src/svg/SkSVGGroup.h',
'../src/svg/SkSVGImage.cpp',
'../src/svg/SkSVGImage.h',
'../src/svg/SkSVGLine.cpp',
'../src/svg/SkSVGLine.h',
'../src/svg/SkSVGLinearGradient.cpp',
'../src/svg/SkSVGLinearGradient.h',
'../src/svg/SkSVGMask.cpp',
'../src/svg/SkSVGMask.h',
'../src/svg/SkSVGMetadata.cpp',
'../src/svg/SkSVGMetadata.h',
'../src/svg/SkSVGPaintState.cpp',
'../src/svg/SkSVGParser.cpp',
'../src/svg/SkSVGPath.cpp',
'../src/svg/SkSVGPath.h',
'../src/svg/SkSVGPolygon.cpp',
'../src/svg/SkSVGPolygon.h',
'../src/svg/SkSVGPolyline.cpp',
'../src/svg/SkSVGPolyline.h',
'../src/svg/SkSVGRadialGradient.cpp',
'../src/svg/SkSVGRadialGradient.h',
'../src/svg/SkSVGRect.cpp',
'../src/svg/SkSVGRect.h',
'../src/svg/SkSVGStop.cpp',
'../src/svg/SkSVGStop.h',
'../src/svg/SkSVGSVG.cpp',
'../src/svg/SkSVGSVG.h',
'../src/svg/SkSVGSymbol.cpp',
'../src/svg/SkSVGSymbol.h',
'../src/svg/SkSVGText.cpp',
'../src/svg/SkSVGText.h',
'../src/svg/SkSVGUse.cpp',
],
'sources!' : [
'../src/svg/SkSVG.cpp', # doesn't compile, maybe this is test code?
],
'direct_dependent_settings': {
'include_dirs': [
'../include/svg',
],
},
},
{
'target_name': 'experimental',
'type': 'static_library',
'include_dirs': [
'../include/config',
'../include/core',
],
'sources': [
'../experimental/SkMatrix44.cpp',
'../experimental/SkMatrix44.h',
'../experimental/SkSetPoly3To3.cpp',
'../experimental/SkSetPoly3To3_A.cpp',
'../experimental/SkSetPoly3To3_D.cpp',
],
'sources!': [
'../experimental/SkMatrix44.cpp', #doesn't compile
'../experimental/SkMatrix44.h',
],
'direct_dependent_settings': {
'include_dirs': [
'../experimental',
],
},
},
{
'target_name': 'SampleApp',
'type': 'executable',
'mac_bundle' : 1,
'include_dirs' : [
'../src/core', # needed to get SkConcaveToTriangle, maybe this should be moved to include dir?
'../gm', # SampleGM.cpp pulls gm.h
],
'sources': [
# gm files needed for SampleGM.cpp
'../gm/bitmapfilters.cpp',
'../gm/blurs.cpp',
'../gm/complexclip.cpp',
'../gm/filltypes.cpp',
'../gm/gm.h',
'../gm/gradients.cpp',
'../gm/nocolorbleed.cpp',
'../gm/points.cpp',
'../gm/poly2poly.cpp',
'../gm/shadertext.cpp',
'../gm/shadows.cpp',
'../gm/shapes.cpp',
'../gm/tilemodes.cpp',
'../gm/xfermodes.cpp',
'../samplecode/ClockFaceView.cpp',
'../samplecode/OverView.cpp',
'../samplecode/SampleAll.cpp',
'../samplecode/SampleAnimator.cpp',
'../samplecode/SampleApp.cpp',
'../samplecode/SampleArc.cpp',
'../samplecode/SampleAvoid.cpp',
'../samplecode/SampleBigGradient.cpp',
'../samplecode/SampleBitmapRect.cpp',
'../samplecode/SampleBlur.cpp',
'../samplecode/SampleCamera.cpp',
'../samplecode/SampleCircle.cpp',
'../samplecode/SampleCode.h',
'../samplecode/SampleColorFilter.cpp',
'../samplecode/SampleComplexClip.cpp',
'../samplecode/SampleCull.cpp',
'../samplecode/SampleDecode.cpp',
'../samplecode/SampleDither.cpp',
'../samplecode/SampleDitherBitmap.cpp',
'../samplecode/SampleDrawLooper.cpp',
'../samplecode/SampleEffects.cpp',
'../samplecode/SampleEmboss.cpp',
'../samplecode/SampleEncode.cpp',
'../samplecode/SampleExtractAlpha.cpp',
'../samplecode/SampleFillType.cpp',
'../samplecode/SampleFilter.cpp',
'../samplecode/SampleFilter2.cpp',
'../samplecode/SampleFontCache.cpp',
'../samplecode/SampleFontScalerTest.cpp',
'../samplecode/SampleFuzz.cpp',
'../samplecode/SampleGM.cpp',
'../samplecode/SampleGradients.cpp',
'../samplecode/SampleHairline.cpp',
'../samplecode/SampleImage.cpp',
'../samplecode/SampleImageDir.cpp',
'../samplecode/SampleLayerMask.cpp',
'../samplecode/SampleLayers.cpp',
'../samplecode/SampleLCD.cpp',
'../samplecode/SampleLineClipper.cpp',
'../samplecode/SampleLines.cpp',
'../samplecode/SampleMeasure.cpp',
'../samplecode/SampleMipMap.cpp',
'../samplecode/SampleMovie.cpp',
'../samplecode/SampleNinePatch.cpp',
'../samplecode/SampleOvalTest.cpp',
'../samplecode/SampleOverflow.cpp',
'../samplecode/SamplePageFlip.cpp',
'../samplecode/SamplePatch.cpp',
'../samplecode/SamplePath.cpp',
'../samplecode/SamplePathClip.cpp',
'../samplecode/SamplePathEffects.cpp',
'../samplecode/SamplePicture.cpp',
'../samplecode/SamplePoints.cpp',
'../samplecode/SamplePolyToPoly.cpp',
'../samplecode/SampleAARects.cpp',
'../samplecode/SampleRegion.cpp',
'../samplecode/SampleRepeatTile.cpp',
'../samplecode/SampleShaders.cpp',
'../samplecode/SampleShaderText.cpp',
'../samplecode/SampleShapes.cpp',
'../samplecode/SampleSkLayer.cpp',
'../samplecode/SampleSlides.cpp',
'../samplecode/SampleStrokePath.cpp',
'../samplecode/SampleStrokeText.cpp',
'../samplecode/SampleSVG.cpp',
'../samplecode/SampleTests.cpp',
'../samplecode/SampleText.cpp',
'../samplecode/SampleTextAlpha.cpp',
'../samplecode/SampleTextBox.cpp',
'../samplecode/SampleTextEffects.cpp',
'../samplecode/SampleTextOnPath.cpp',
'../samplecode/SampleTextureDomain.cpp',
'../samplecode/SampleTiling.cpp',
'../samplecode/SampleTinyBitmap.cpp',
'../samplecode/SampleTriangles.cpp',
'../samplecode/SampleTypeface.cpp',
'../samplecode/SampleUnitMapper.cpp',
'../samplecode/SampleVertices.cpp',
'../samplecode/SampleXfermodes.cpp',
],
'sources!': [
'../samplecode/SampleSkLayer.cpp', #relies on SkMatrix44 which doesn't compile
'../samplecode/SampleTests.cpp', #includes unknown file SkShaderExtras.h
'../samplecode/SampleWarp.cpp',
'../samplecode/SampleFontCache.cpp',
],
'dependencies': [
'skia',
'effects',
'images',
'views',
'utils',
'animator',
'xml',
'svg',
'experimental',
'gr',
'skgr',
],
'conditions' : [
[ 'OS == "linux" or OS == "freebsd" or OS == "openbsd" or OS == "solaris"', {
'sources!': [
'../samplecode/SampleDecode.cpp',
],
}],
[ 'OS == "win"', {
'sources!': [
# require UNIX functions
'../samplecode/SampleEncode.cpp',
'../samplecode/SamplePageFlip.cpp',
],
}],
[ 'OS == "mac"', {
'sources!': [
'../samplecode/SampleDecode.cpp',
],
}],
],
'msvs_settings': {
'VCLinkerTool': {
'SubSystem': '2',
'AdditionalDependencies': [
'OpenGL32.lib',
'usp10.lib',
'd3d9.lib',
],
},
},
},
{
'target_name': 'libtess',
'type': 'static_library',
'include_dirs': [
'../third_party/glu',
],
'sources': [
'../third_party/glu/internal_glu.h',
'../third_party/glu/gluos.h',
'../third_party/glu/libtess/dict-list.h',
'../third_party/glu/libtess/dict.c',
'../third_party/glu/libtess/dict.h',
'../third_party/glu/libtess/geom.c',
'../third_party/glu/libtess/geom.h',
'../third_party/glu/libtess/memalloc.c',
'../third_party/glu/libtess/memalloc.h',
'../third_party/glu/libtess/mesh.c',
'../third_party/glu/libtess/mesh.h',
'../third_party/glu/libtess/normal.c',
'../third_party/glu/libtess/normal.h',
'../third_party/glu/libtess/priorityq-heap.h',
'../third_party/glu/libtess/priorityq-sort.h',
'../third_party/glu/libtess/priorityq.c',
'../third_party/glu/libtess/priorityq.h',
'../third_party/glu/libtess/render.c',
'../third_party/glu/libtess/render.h',
'../third_party/glu/libtess/sweep.c',
'../third_party/glu/libtess/sweep.h',
'../third_party/glu/libtess/tess.c',
'../third_party/glu/libtess/tess.h',
'../third_party/glu/libtess/tessmono.c',
'../third_party/glu/libtess/tessmono.h',
],
'direct_dependent_settings': {
'include_dirs': [
'../third_party/glu',
],
},
},
],
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:
|
import printer
import os
import sys
import importlib
import re
import caches
import multiprocessing
import time
import dill
HERE = os.path.abspath(os.path.dirname(__file__))
TIMEOUT = 20 # timeout of a module set of tests in seconds
def test(testName, module = ""):
fileName = _getFileName(testName)
filePath = _getFilePath(testName)
if filePath not in sys.path:
sys.path.append(filePath)
testFileName = fileName[:-3] + "Test.py"
testFilePath = _getTestDirPath(testFileName, module = module)
if testFilePath is None:
printer.displayError("No test found for {}".format(fileName))
return
if testFilePath not in sys.path:
sys.path.append(testFilePath)
testModule = importlib.import_module(testFileName[:-3])
testModule._fileName = os.path.join(filePath, fileName)
_runTests(testModule)
def testModule(module):
testNames = _getTestNames(module)
if not testNames:
printer.displayError("no tests found in module: {}".format(module))
return
for testName in testNames:
test(testName, module = module)
def _runTests(testModule):
def _runner(testModule):
reservedNames = ["before", "after"]
testCreators = [method for method in testModule.__dict__.values() if callable(method) and method.__name__ not in reservedNames]
printer.displayTestName(os.path.basename(testModule._fileName))
if hasattr(testModule, "before"):
try:
testModule.before()
except Exception as e:
printer.displayError("Something went wrong at setup:\n{}".format(e))
return
for test in sorted(tc() for tc in testCreators):
testResult = test.run()
if testResult != None:
printer.display(testResult)
if hasattr(testModule, "after"):
try:
testModule.after()
except Exception as e:
printer.displayError("Something went wrong at closing:\n{}".format(e))
reservedNames = ["before", "after"]
testCreators = [method for method in testModule.__dict__.values() if callable(method) and method.__name__ not in reservedNames]
p = multiprocessing.Process(target=_runner, name="Run", args=(testModule,))
p.start()
start = time.time()
while p.is_alive():
if time.time() - start > TIMEOUT:
printer.displayError("Timeout ({} seconds) reached, stopped testing.".format(TIMEOUT))
p.terminate()
p.join()
return
time.sleep(0.1)
def _getTestNames(moduleName):
moduleName = _backslashToForwardslash(moduleName)
for (dirPath, dirNames, fileNames) in os.walk(os.path.join(HERE, "tests")):
dirPath = _backslashToForwardslash(dirPath)
if moduleName in dirPath:
return [fileName[:-7] for fileName in fileNames if fileName.endswith(".py") and not fileName.startswith("_")]
def _getTestDirPath(testFileName, module = ""):
module = _backslashToForwardslash(module)
testFileName = _backslashToForwardslash(testFileName)
for (dirPath, dirNames, fileNames) in os.walk(os.path.join(HERE, "tests")):
if module in _backslashToForwardslash(dirPath) and testFileName in fileNames:
return dirPath
def _getFileName(completeFilePath):
fileName = os.path.basename(completeFilePath)
if not fileName.endswith(".py"):
fileName += ".py"
return fileName
def _getFilePath(completeFilePath):
filePath = os.path.dirname(completeFilePath)
if not filePath:
filePath = os.path.dirname(os.path.abspath(_getFileName(completeFilePath)))
return filePath
def _backslashToForwardslash(text):
return re.sub("\\\\", "/", text)
removed some redundant code
import printer
import os
import sys
import importlib
import re
import caches
import multiprocessing
import time
import dill
HERE = os.path.abspath(os.path.dirname(__file__))
TIMEOUT = 20 # timeout of a module set of tests in seconds
def test(testName, module = ""):
fileName = _getFileName(testName)
filePath = _getFilePath(testName)
if filePath not in sys.path:
sys.path.append(filePath)
testFileName = fileName[:-3] + "Test.py"
testFilePath = _getTestDirPath(testFileName, module = module)
if testFilePath is None:
printer.displayError("No test found for {}".format(fileName))
return
if testFilePath not in sys.path:
sys.path.append(testFilePath)
testModule = importlib.import_module(testFileName[:-3])
testModule._fileName = os.path.join(filePath, fileName)
_runTests(testModule)
def testModule(module):
testNames = _getTestNames(module)
if not testNames:
printer.displayError("no tests found in module: {}".format(module))
return
for testName in testNames:
test(testName, module = module)
def _runTests(testModule):
def _runner(testModule):
reservedNames = ["before", "after"]
testCreators = [method for method in testModule.__dict__.values() if callable(method) and method.__name__ not in reservedNames]
printer.displayTestName(os.path.basename(testModule._fileName))
if hasattr(testModule, "before"):
try:
testModule.before()
except Exception as e:
printer.displayError("Something went wrong at setup:\n{}".format(e))
return
for test in sorted(tc() for tc in testCreators):
testResult = test.run()
if testResult != None:
printer.display(testResult)
if hasattr(testModule, "after"):
try:
testModule.after()
except Exception as e:
printer.displayError("Something went wrong at closing:\n{}".format(e))
p = multiprocessing.Process(target=_runner, name="Run", args=(testModule,))
p.start()
start = time.time()
while p.is_alive():
if time.time() - start > TIMEOUT:
printer.displayError("Timeout ({} seconds) reached, stopped testing.".format(TIMEOUT))
p.terminate()
p.join()
return
time.sleep(0.1)
def _getTestNames(moduleName):
moduleName = _backslashToForwardslash(moduleName)
for (dirPath, dirNames, fileNames) in os.walk(os.path.join(HERE, "tests")):
dirPath = _backslashToForwardslash(dirPath)
if moduleName in dirPath:
return [fileName[:-7] for fileName in fileNames if fileName.endswith(".py") and not fileName.startswith("_")]
def _getTestDirPath(testFileName, module = ""):
module = _backslashToForwardslash(module)
testFileName = _backslashToForwardslash(testFileName)
for (dirPath, dirNames, fileNames) in os.walk(os.path.join(HERE, "tests")):
if module in _backslashToForwardslash(dirPath) and testFileName in fileNames:
return dirPath
def _getFileName(completeFilePath):
fileName = os.path.basename(completeFilePath)
if not fileName.endswith(".py"):
fileName += ".py"
return fileName
def _getFilePath(completeFilePath):
filePath = os.path.dirname(completeFilePath)
if not filePath:
filePath = os.path.dirname(os.path.abspath(_getFileName(completeFilePath)))
return filePath
def _backslashToForwardslash(text):
return re.sub("\\\\", "/", text) |
from checks.utils import TailFile
import modules
import os
import sys
import traceback
import re
import time
from datetime import datetime
from itertools import groupby # >= python 2.4
from checks import LaconicFilter
if hasattr('some string', 'partition'):
def partition(s, sep):
return s.partition(sep)
else:
def partition(s, sep):
pos = s.find(sep)
if pos == -1:
return (s, sep, '')
else:
return s[0:pos], sep, s[pos + len(sep):]
def point_sorter(p):
# Sort and group by timestamp, metric name, host_name, device_name
return (p[1], p[0], p[3].get('host_name', None), p[3].get('device_name', None))
class EventDefaults(object):
EVENT_TYPE = 'dogstream_event'
EVENT_OBJECT = 'dogstream_event:default'
class Dogstreams(object):
@classmethod
def init(cls, logger, config):
dogstreams_config = config.get('dogstreams', None)
dogstreams = []
if dogstreams_config:
# Expecting dogstreams config value to look like:
# <dogstream value>, <dog stream value>, ...
# Where <dogstream value> looks like:
# <log path>
# or
# <log path>:<module>:<parser function>
# Create a Dogstream object for each <dogstream value>
for config_item in dogstreams_config.split(','):
try:
config_item = config_item.strip()
parts = config_item.split(':')
if len(parts) == 1:
dogstreams.append(Dogstream.init(logger, log_path=parts[0]))
elif len(parts) == 2:
logger.warn("Invalid dogstream: %s" % ':'.join(parts))
elif len(parts) >= 3:
dogstreams.append(Dogstream.init(
logger,
log_path=parts[0],
parser_spec=':'.join(parts[1:3]),
parser_args=parts[3:],
config=config))
elif len(parts) > 3:
logger.warn("Invalid dogstream: %s" % ':'.join(parts))
except:
logger.exception("Cannot build dogstream")
perfdata_parsers = NagiosPerfData.init(logger, config)
if perfdata_parsers:
dogstreams.extend(perfdata_parsers)
logger.info("Dogstream parsers: %s" % repr(dogstreams))
return cls(logger, dogstreams)
def __init__(self, logger, dogstreams):
self.logger = logger
self.dogstreams = dogstreams
def check(self, agentConfig, move_end=True):
if not self.dogstreams:
return {}
output = {}
for dogstream in self.dogstreams:
try:
result = dogstream.check(agentConfig, move_end)
# result may contain {"dogstream": [new]}.
# If output contains {"dogstream": [old]}, that old value will get concatenated with the new value
assert type(result) == type(output), "dogstream.check must return a dictionary"
for k in result:
if k in output:
output[k].extend(result[k])
else:
output[k] = result[k]
except:
self.logger.exception("Error in parsing %s" % (dogstream.log_path))
return output
class Dogstream(object):
@classmethod
def init(cls, logger, log_path, parser_spec=None, parser_args=None, config=None):
class_based = False
parse_func = None
parse_args = tuple(parser_args or ())
if parser_spec:
try:
parse_func = modules.load(parser_spec, 'parser')
if isinstance(parse_func, type):
logger.info('Instantiating class-based dogstream')
parse_func = parse_func(
user_args=parse_args or (),
logger=logger,
log_path=log_path,
config=config,
)
parse_args = ()
class_based = True
else:
logger.info('Instantiating function-based dogstream')
except:
logger.exception(traceback.format_exc())
logger.error('Could not load Dogstream line parser "%s" PYTHONPATH=%s' % (
parser_spec,
os.environ.get('PYTHONPATH', ''))
)
logger.info("dogstream: parsing %s with %s (requested %s)" % (log_path, parse_func, parser_spec))
else:
logger.info("dogstream: parsing %s with default parser" % log_path)
return cls(logger, log_path, parse_func, parse_args, class_based=class_based)
def __init__(self, logger, log_path, parse_func=None, parse_args=(), class_based=False):
self.logger = logger
self.class_based = class_based
# Apply LaconicFilter to avoid log flooding
self.logger.addFilter(LaconicFilter("dogstream"))
self.log_path = log_path
self.parse_func = parse_func or self._default_line_parser
self.parse_args = parse_args
self._gen = None
self._values = None
self._freq = 15 # Will get updated on each check()
self._error_count = 0L
self._line_count = 0L
self.parser_state = {}
def check(self, agentConfig, move_end=True):
if self.log_path:
self._freq = int(agentConfig.get('check_freq', 15))
self._values = []
self._events = []
# Build our tail -f
if self._gen is None:
self._gen = TailFile(self.logger, self.log_path, self._line_parser).tail(line_by_line=False, move_end=move_end)
# read until the end of file
try:
self._gen.next()
self.logger.debug("Done dogstream check for file %s, found %s metric points" % (self.log_path, len(self._values)))
except StopIteration, e:
self.logger.exception(e)
self.logger.warn("Can't tail %s file" % self.log_path)
check_output = self._aggregate(self._values)
if self._events:
check_output.update({"dogstreamEvents": self._events})
return check_output
else:
return {}
def _line_parser(self, line):
try:
# alq - Allow parser state to be kept between invocations
# This means a new argument can be passed the custom parsing function
# to store context that can be shared between parsing of lines.
# One example is a running counter, which is incremented each time
# a line is processed.
parsed = None
if self.class_based:
parsed = self.parse_func.parse_line(line)
else:
try:
parsed = self.parse_func(self.logger, line, self.parser_state, *self.parse_args)
except TypeError, e:
# Arity of parse_func is 3 (old-style), not 4
parsed = self.parse_func(self.logger, line)
self._line_count += 1
if parsed is None:
return
if isinstance(parsed, (tuple, dict)):
parsed = [parsed]
for datum in parsed:
# Check if it's an event
if isinstance(datum, dict):
# An event requires at least a title or a body
if 'msg_title' not in datum and 'msg_text' not in datum:
continue
# Populate the default fields
if 'event_type' not in datum:
datum['event_type'] = EventDefaults.EVENT_TYPE
if 'timestamp' not in datum:
datum['timestamp'] = time.time()
# Make sure event_object and aggregation_key (synonyms) are set
# FIXME when the backend treats those as true synonyms, we can
# deprecate event_object.
if 'event_object' in datum or 'aggregation_key' in datum:
datum['aggregation_key'] = datum.get('event_object', datum.get('aggregation_key'))
else:
datum['aggregation_key'] = EventDefaults.EVENT_OBJECT
datum['event_object'] = datum['aggregation_key']
self._events.append(datum)
continue
# Otherwise, assume it's a metric
try:
metric, ts, value, attrs = datum
except:
continue
# Validation
invalid_reasons = []
try:
# Bucket points into 15 second buckets
ts = (int(float(ts)) / self._freq) * self._freq
date = datetime.fromtimestamp(ts)
assert date.year > 1990
except Exception:
invalid_reasons.append('invalid timestamp')
try:
value = float(value)
except Exception:
invalid_reasons.append('invalid metric value')
if invalid_reasons:
self.logger.debug('Invalid parsed values %s (%s): "%s"',
repr(datum), ', '.join(invalid_reasons), line)
else:
self._values.append((metric, ts, value, attrs))
except Exception, e:
self.logger.debug("Error while parsing line %s" % line, exc_info=True)
self._error_count += 1
self.logger.error("Parser error: %s out of %s" % (self._error_count, self._line_count))
def _default_line_parser(self, logger, line):
original_line = line
sep = ' '
metric, _, line = partition(line.strip(), sep)
timestamp, _, line = partition(line.strip(), sep)
value, _, line = partition(line.strip(), sep)
attributes = {}
try:
while line:
keyval, _, line = partition(line.strip(), sep)
key, val = keyval.split('=', 1)
attributes[key] = val
except Exception, e:
logger.debug(traceback.format_exc())
return metric, timestamp, value, attributes
def _aggregate(self, values):
""" Aggregate values down to the second and store as:
{
"dogstream": [(metric, timestamp, value, {key: val})]
}
If there are many values per second for a metric, take the median
"""
output = []
values.sort(key=point_sorter)
for (timestamp, metric, host_name, device_name), val_attrs in groupby(values, key=point_sorter):
attributes = {}
vals = []
for _metric, _timestamp, v, a in val_attrs:
try:
v = float(v)
vals.append(v)
attributes.update(a)
except:
self.logger.debug("Could not convert %s into a float", v)
if len(vals) == 1:
val = vals[0]
elif len(vals) > 1:
val = vals[-1]
else: # len(vals) == 0
continue
metric_type = str(attributes.get('metric_type', '')).lower()
if metric_type == 'gauge':
val = float(val)
elif metric_type == 'counter':
val = int(val)
output.append((metric, timestamp, val, attributes))
if output:
return {"dogstream": output}
else:
return {}
class InvalidDataTemplate(Exception): pass
class NagiosPerfData(object):
perfdata_field = '' # Should be overriden by subclasses
metric_prefix = 'nagios'
pair_pattern = re.compile(r"".join([
r"'?(?P<label>[^=']+)'?=",
r"(?P<value>[-0-9.]+)",
r"(?P<unit>s|us|ms|%|B|KB|MB|GB|TB|c)?",
r"(;(?P<warn>@?[-0-9.~]*:?[-0-9.~]*))?",
r"(;(?P<crit>@?[-0-9.~]*:?[-0-9.~]*))?",
r"(;(?P<min>[-0-9.]*))?",
r"(;(?P<max>[-0-9.]*))?",
]))
@classmethod
def init(cls, logger, config):
nagios_perf_config = config.get('nagios_perf_cfg', None)
parsers = []
if nagios_perf_config:
nagios_config = cls.parse_nagios_config(nagios_perf_config)
host_parser = NagiosHostPerfData.init(logger, nagios_config)
if host_parser:
parsers.append(host_parser)
service_parser = NagiosServicePerfData.init(logger, nagios_config)
if service_parser:
parsers.append(service_parser)
return parsers
@staticmethod
def template_regex(file_template):
try:
# Escape characters that will be interpreted as regex bits
# e.g. [ and ] in "[SERVICEPERFDATA]"
regex = re.sub(r'[[\]*]', r'.', file_template)
regex = re.sub(r'\$([^\$]*)\$', r'(?P<\1>[^\$]*)', regex)
return re.compile(regex)
except Exception, e:
raise InvalidDataTemplate("%s (%s)"% (file_template, e))
@staticmethod
def underscorize(s):
return s.replace(' ', '_').lower()
@classmethod
def parse_nagios_config(cls, filename):
output = {}
keys = [
'host_perfdata_file_template',
'service_perfdata_file_template',
'host_perfdata_file',
'service_perfdata_file',
]
try:
f = open(filename)
for line in f:
line = line.strip()
if not line:
continue
for key in keys:
if line.startswith(key + '='):
eq_pos = line.find('=')
if eq_pos:
output[key] = line[eq_pos + 1:]
break
finally:
f.close()
return output
def __init__(self, logger, line_pattern, datafile):
if isinstance(line_pattern, (str, unicode)):
self.line_pattern = re.compile(line_pattern)
else:
self.line_pattern = line_pattern
self._dogstream = Dogstream(logger, datafile, self._parse_line)
def _get_metric_prefix(self, data):
# Should be overridded by subclasses
return [self.metric_prefix]
def _parse_line(self, logger, line):
matched = self.line_pattern.match(line)
output = []
if matched:
data = matched.groupdict()
metric_prefix = self._get_metric_prefix(data)
# Parse the prefdata values, which are a space-delimited list of:
# 'label'=value[UOM];[warn];[crit];[min];[max]
perf_data = data.get(self.perfdata_field, '').split(' ')
for pair in perf_data:
pair_match = self.pair_pattern.match(pair)
if not pair_match:
continue
else:
pair_data = pair_match.groupdict()
label = pair_data['label']
timestamp = data.get('TIMET', '')
value = pair_data['value']
attributes = {'metric_type': 'gauge'}
if '/' in label:
# Special case: if the label begins
# with a /, treat the label as the device
# and use the metric prefix as the metric name
metric = '.'.join(metric_prefix)
attributes['device_name'] = label
else:
# Otherwise, append the label to the metric prefix
# and use that as the metric name
metric = '.'.join(metric_prefix + [label])
host_name = data.get('HOSTNAME', None)
if host_name:
attributes['host_name'] = host_name
optional_keys = ['unit', 'warn', 'crit', 'min', 'max']
for key in optional_keys:
attr_val = pair_data.get(key, None)
if attr_val is not None and attr_val != '':
attributes[key] = attr_val
output.append((
metric,
timestamp,
value,
attributes
))
return output
def check(self, agentConfig, move_end=True):
return self._dogstream.check(agentConfig, move_end)
class NagiosHostPerfData(NagiosPerfData):
perfdata_field = 'HOSTPERFDATA'
@classmethod
def init(cls, logger, nagios_config):
host_perfdata_file_template = nagios_config.get('host_perfdata_file_template', None)
host_perfdata_file = nagios_config.get('host_perfdata_file', None)
if host_perfdata_file_template and host_perfdata_file:
host_pattern = cls.template_regex(host_perfdata_file_template)
return cls(logger, host_pattern, host_perfdata_file)
else:
return None
def _get_metric_prefix(self, line_data):
return [self.metric_prefix, 'host']
class NagiosServicePerfData(NagiosPerfData):
perfdata_field = 'SERVICEPERFDATA'
@classmethod
def init(cls, logger, nagios_config):
service_perfdata_file_template = nagios_config.get('service_perfdata_file_template', None)
service_perfdata_file = nagios_config.get('service_perfdata_file', None)
if service_perfdata_file_template and service_perfdata_file:
service_pattern = cls.template_regex(service_perfdata_file_template)
return cls(logger, service_pattern, service_perfdata_file)
else:
return None
def _get_metric_prefix(self, line_data):
metric = [self.metric_prefix]
middle_name = line_data.get('SERVICEDESC', None)
if middle_name:
metric.append(middle_name.replace(' ', '_').lower())
return metric
# Allow a smooth uninstall of previous version
class RollupLP: pass
class DdForwarder(object):
QUEUE_SIZE = "queue_size"
QUEUE_COUNT = "queue_count"
RE_QUEUE_STAT = re.compile(r"\[.*\] Queue size: at (.*), (\d+) transaction\(s\), (\d+) KB")
def __init__(self, logger, config):
self.log_path = config.get('ddforwarder_log', '/var/log/ddforwarder.log')
self.logger = logger
self._gen = None
def _init_metrics(self):
self.metrics = {}
def _add_metric(self, name, value, ts):
if self.metrics.has_key(name):
self.metrics[name].append((ts, value))
else:
self.metrics[name] = [(ts, value)]
def _parse_line(self, line):
try:
m = self.RE_QUEUE_STAT.match(line)
if m is not None:
ts, count, size = m.groups()
self._add_metric(self.QUEUE_SIZE, size, round(float(ts)))
self._add_metric(self.QUEUE_COUNT, count, round(float(ts)))
except Exception, e:
self.logger.exception(e)
def check(self, agentConfig, move_end=True):
if self.log_path and os.path.isfile(self.log_path):
#reset metric points
self._init_metrics()
# Build our tail -f
if self._gen is None:
self._gen = TailFile(self.logger, self.log_path, self._parse_line).tail(line_by_line=False,
move_end=move_end)
# read until the end of file
try:
self._gen.next()
self.logger.debug("Done ddforwarder check for file %s" % self.log_path)
except StopIteration, e:
self.logger.exception(e)
self.logger.warn("Can't tail %s file" % self.log_path)
return { 'ddforwarder': self.metrics }
else:
self.logger.debug("Can't tail datadog forwarder log file: %s" % self.log_path)
return {}
def testDogStream():
import logging
logger = logging.getLogger("datadog")
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
dogstream = Dogstream(logger)
while True:
events = dogstream.check({'api_key':'my_apikey', 'dogstream_log': sys.argv[1]}, move_end=True)
for e in events:
print "Event:", e
time.sleep(5)
def testddForwarder():
import logging
logger = logging.getLogger("datadog")
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
config = {'api_key':'my_apikey', 'ddforwarder_log': sys.argv[1]}
dd = DdForwarder(logger, config)
m = dd.check(config, move_end=False)
while True:
print m
time.sleep(5)
m = dd.check(config)
if __name__ == '__main__':
testddForwarder()
Crasher reported by @sroldan.
Latent bug.
from checks.utils import TailFile
import modules
import os
import sys
import traceback
import re
import time
from datetime import datetime
from itertools import groupby # >= python 2.4
from checks import LaconicFilter
if hasattr('some string', 'partition'):
def partition(s, sep):
return s.partition(sep)
else:
def partition(s, sep):
pos = s.find(sep)
if pos == -1:
return (s, sep, '')
else:
return s[0:pos], sep, s[pos + len(sep):]
def point_sorter(p):
# Sort and group by timestamp, metric name, host_name, device_name
return (p[1], p[0], p[3].get('host_name', None), p[3].get('device_name', None))
class EventDefaults(object):
EVENT_TYPE = 'dogstream_event'
EVENT_OBJECT = 'dogstream_event:default'
class Dogstreams(object):
@classmethod
def init(cls, logger, config):
dogstreams_config = config.get('dogstreams', None)
dogstreams = []
if dogstreams_config:
# Expecting dogstreams config value to look like:
# <dogstream value>, <dog stream value>, ...
# Where <dogstream value> looks like:
# <log path>
# or
# <log path>:<module>:<parser function>
# Create a Dogstream object for each <dogstream value>
for config_item in dogstreams_config.split(','):
try:
config_item = config_item.strip()
parts = config_item.split(':')
if len(parts) == 1:
dogstreams.append(Dogstream.init(logger, log_path=parts[0]))
elif len(parts) == 2:
logger.warn("Invalid dogstream: %s" % ':'.join(parts))
elif len(parts) >= 3:
dogstreams.append(Dogstream.init(
logger,
log_path=parts[0],
parser_spec=':'.join(parts[1:3]),
parser_args=parts[3:],
config=config))
elif len(parts) > 3:
logger.warn("Invalid dogstream: %s" % ':'.join(parts))
except:
logger.exception("Cannot build dogstream")
perfdata_parsers = NagiosPerfData.init(logger, config)
if perfdata_parsers:
dogstreams.extend(perfdata_parsers)
logger.info("Dogstream parsers: %s" % repr(dogstreams))
return cls(logger, dogstreams)
def __init__(self, logger, dogstreams):
self.logger = logger
self.dogstreams = dogstreams
def check(self, agentConfig, move_end=True):
if not self.dogstreams:
return {}
output = {}
for dogstream in self.dogstreams:
try:
result = dogstream.check(agentConfig, move_end)
# result may contain {"dogstream": [new]}.
# If output contains {"dogstream": [old]}, that old value will get concatenated with the new value
assert type(result) == type(output), "dogstream.check must return a dictionary"
for k in result:
if k in output:
output[k].extend(result[k])
else:
output[k] = result[k]
except:
self.logger.exception("Error in parsing %s" % (dogstream.log_path))
return output
class Dogstream(object):
@classmethod
def init(cls, logger, log_path, parser_spec=None, parser_args=None, config=None):
class_based = False
parse_func = None
parse_args = tuple(parser_args or ())
if parser_spec:
try:
parse_func = modules.load(parser_spec, 'parser')
if isinstance(parse_func, type):
logger.info('Instantiating class-based dogstream')
parse_func = parse_func(
user_args=parse_args or (),
logger=logger,
log_path=log_path,
config=config,
)
parse_args = ()
class_based = True
else:
logger.info('Instantiating function-based dogstream')
except:
logger.exception(traceback.format_exc())
logger.error('Could not load Dogstream line parser "%s" PYTHONPATH=%s' % (
parser_spec,
os.environ.get('PYTHONPATH', ''))
)
logger.info("dogstream: parsing %s with %s (requested %s)" % (log_path, parse_func, parser_spec))
else:
logger.info("dogstream: parsing %s with default parser" % log_path)
return cls(logger, log_path, parse_func, parse_args, class_based=class_based)
def __init__(self, logger, log_path, parse_func=None, parse_args=(), class_based=False):
self.logger = logger
self.class_based = class_based
# Apply LaconicFilter to avoid log flooding
self.logger.addFilter(LaconicFilter("dogstream"))
self.log_path = log_path
self.parse_func = parse_func or self._default_line_parser
self.parse_args = parse_args
self._gen = None
self._values = None
self._freq = 15 # Will get updated on each check()
self._error_count = 0L
self._line_count = 0L
self.parser_state = {}
def check(self, agentConfig, move_end=True):
if self.log_path:
self._freq = int(agentConfig.get('check_freq', 15))
self._values = []
self._events = []
# Build our tail -f
if self._gen is None:
self._gen = TailFile(self.logger, self.log_path, self._line_parser).tail(line_by_line=False, move_end=move_end)
# read until the end of file
try:
self._gen.next()
self.logger.debug("Done dogstream check for file %s, found %s metric points" % (self.log_path, len(self._values)))
except StopIteration, e:
self.logger.exception(e)
self.logger.warn("Can't tail %s file" % self.log_path)
check_output = self._aggregate(self._values)
if self._events:
check_output.update({"dogstreamEvents": self._events})
return check_output
else:
return {}
def _line_parser(self, line):
try:
# alq - Allow parser state to be kept between invocations
# This means a new argument can be passed the custom parsing function
# to store context that can be shared between parsing of lines.
# One example is a running counter, which is incremented each time
# a line is processed.
parsed = None
if self.class_based:
parsed = self.parse_func.parse_line(line)
else:
try:
parsed = self.parse_func(self.logger, line, self.parser_state, *self.parse_args)
except TypeError, e:
# Arity of parse_func is 3 (old-style), not 4
parsed = self.parse_func(self.logger, line)
self._line_count += 1
if parsed is None:
return
if isinstance(parsed, (tuple, dict)):
parsed = [parsed]
for datum in parsed:
# Check if it's an event
if isinstance(datum, dict):
# An event requires at least a title or a body
if 'msg_title' not in datum and 'msg_text' not in datum:
continue
# Populate the default fields
if 'event_type' not in datum:
datum['event_type'] = EventDefaults.EVENT_TYPE
if 'timestamp' not in datum:
datum['timestamp'] = time.time()
# Make sure event_object and aggregation_key (synonyms) are set
# FIXME when the backend treats those as true synonyms, we can
# deprecate event_object.
if 'event_object' in datum or 'aggregation_key' in datum:
datum['aggregation_key'] = datum.get('event_object', datum.get('aggregation_key'))
else:
datum['aggregation_key'] = EventDefaults.EVENT_OBJECT
datum['event_object'] = datum['aggregation_key']
self._events.append(datum)
continue
# Otherwise, assume it's a metric
try:
metric, ts, value, attrs = datum
except:
continue
# Validation
invalid_reasons = []
try:
# Bucket points into 15 second buckets
ts = (int(float(ts)) / self._freq) * self._freq
date = datetime.fromtimestamp(ts)
assert date.year > 1990
except Exception:
invalid_reasons.append('invalid timestamp')
try:
value = float(value)
except Exception:
invalid_reasons.append('invalid metric value')
if invalid_reasons:
self.logger.debug('Invalid parsed values %s (%s): "%s"',
repr(datum), ', '.join(invalid_reasons), line)
else:
self._values.append((metric, ts, value, attrs))
except Exception, e:
self.logger.debug("Error while parsing line %s" % line, exc_info=True)
self._error_count += 1
self.logger.error("Parser error: %s out of %s" % (self._error_count, self._line_count))
def _default_line_parser(self, logger, line):
original_line = line
sep = ' '
metric, _, line = partition(line.strip(), sep)
timestamp, _, line = partition(line.strip(), sep)
value, _, line = partition(line.strip(), sep)
attributes = {}
try:
while line:
keyval, _, line = partition(line.strip(), sep)
key, val = keyval.split('=', 1)
attributes[key] = val
except Exception, e:
logger.debug(traceback.format_exc())
return metric, timestamp, value, attributes
def _aggregate(self, values):
""" Aggregate values down to the second and store as:
{
"dogstream": [(metric, timestamp, value, {key: val})]
}
If there are many values per second for a metric, take the median
"""
output = []
values.sort(key=point_sorter)
for (timestamp, metric, host_name, device_name), val_attrs in groupby(values, key=point_sorter):
attributes = {}
vals = []
for _metric, _timestamp, v, a in val_attrs:
try:
v = float(v)
vals.append(v)
attributes.update(a)
except:
self.logger.debug("Could not convert %s into a float", v)
if len(vals) == 1:
val = vals[0]
elif len(vals) > 1:
val = vals[-1]
else: # len(vals) == 0
continue
metric_type = str(attributes.get('metric_type', '')).lower()
if metric_type == 'gauge':
val = float(val)
elif metric_type == 'counter':
val = int(val)
output.append((metric, timestamp, val, attributes))
if output:
return {"dogstream": output}
else:
return {}
class InvalidDataTemplate(Exception): pass
class NagiosPerfData(object):
perfdata_field = '' # Should be overriden by subclasses
metric_prefix = 'nagios'
pair_pattern = re.compile(r"".join([
r"'?(?P<label>[^=']+)'?=",
r"(?P<value>[-0-9.]+)",
r"(?P<unit>s|us|ms|%|B|KB|MB|GB|TB|c)?",
r"(;(?P<warn>@?[-0-9.~]*:?[-0-9.~]*))?",
r"(;(?P<crit>@?[-0-9.~]*:?[-0-9.~]*))?",
r"(;(?P<min>[-0-9.]*))?",
r"(;(?P<max>[-0-9.]*))?",
]))
@classmethod
def init(cls, logger, config):
nagios_perf_config = config.get('nagios_perf_cfg', None)
parsers = []
if nagios_perf_config:
nagios_config = cls.parse_nagios_config(nagios_perf_config)
host_parser = NagiosHostPerfData.init(logger, nagios_config)
if host_parser:
parsers.append(host_parser)
service_parser = NagiosServicePerfData.init(logger, nagios_config)
if service_parser:
parsers.append(service_parser)
return parsers
@staticmethod
def template_regex(file_template):
try:
# Escape characters that will be interpreted as regex bits
# e.g. [ and ] in "[SERVICEPERFDATA]"
regex = re.sub(r'[[\]*]', r'.', file_template)
regex = re.sub(r'\$([^\$]*)\$', r'(?P<\1>[^\$]*)', regex)
return re.compile(regex)
except Exception, e:
raise InvalidDataTemplate("%s (%s)"% (file_template, e))
@staticmethod
def underscorize(s):
return s.replace(' ', '_').lower()
@classmethod
def parse_nagios_config(cls, filename):
output = {}
keys = [
'host_perfdata_file_template',
'service_perfdata_file_template',
'host_perfdata_file',
'service_perfdata_file',
]
f = None
try:
f = open(filename)
for line in f:
line = line.strip()
if not line:
continue
for key in keys:
if line.startswith(key + '='):
eq_pos = line.find('=')
if eq_pos:
output[key] = line[eq_pos + 1:]
break
finally:
if f is not None:
f.close()
return output
def __init__(self, logger, line_pattern, datafile):
if isinstance(line_pattern, (str, unicode)):
self.line_pattern = re.compile(line_pattern)
else:
self.line_pattern = line_pattern
self._dogstream = Dogstream(logger, datafile, self._parse_line)
def _get_metric_prefix(self, data):
# Should be overridded by subclasses
return [self.metric_prefix]
def _parse_line(self, logger, line):
matched = self.line_pattern.match(line)
output = []
if matched:
data = matched.groupdict()
metric_prefix = self._get_metric_prefix(data)
# Parse the prefdata values, which are a space-delimited list of:
# 'label'=value[UOM];[warn];[crit];[min];[max]
perf_data = data.get(self.perfdata_field, '').split(' ')
for pair in perf_data:
pair_match = self.pair_pattern.match(pair)
if not pair_match:
continue
else:
pair_data = pair_match.groupdict()
label = pair_data['label']
timestamp = data.get('TIMET', '')
value = pair_data['value']
attributes = {'metric_type': 'gauge'}
if '/' in label:
# Special case: if the label begins
# with a /, treat the label as the device
# and use the metric prefix as the metric name
metric = '.'.join(metric_prefix)
attributes['device_name'] = label
else:
# Otherwise, append the label to the metric prefix
# and use that as the metric name
metric = '.'.join(metric_prefix + [label])
host_name = data.get('HOSTNAME', None)
if host_name:
attributes['host_name'] = host_name
optional_keys = ['unit', 'warn', 'crit', 'min', 'max']
for key in optional_keys:
attr_val = pair_data.get(key, None)
if attr_val is not None and attr_val != '':
attributes[key] = attr_val
output.append((
metric,
timestamp,
value,
attributes
))
return output
def check(self, agentConfig, move_end=True):
return self._dogstream.check(agentConfig, move_end)
class NagiosHostPerfData(NagiosPerfData):
perfdata_field = 'HOSTPERFDATA'
@classmethod
def init(cls, logger, nagios_config):
host_perfdata_file_template = nagios_config.get('host_perfdata_file_template', None)
host_perfdata_file = nagios_config.get('host_perfdata_file', None)
if host_perfdata_file_template and host_perfdata_file:
host_pattern = cls.template_regex(host_perfdata_file_template)
return cls(logger, host_pattern, host_perfdata_file)
else:
return None
def _get_metric_prefix(self, line_data):
return [self.metric_prefix, 'host']
class NagiosServicePerfData(NagiosPerfData):
perfdata_field = 'SERVICEPERFDATA'
@classmethod
def init(cls, logger, nagios_config):
service_perfdata_file_template = nagios_config.get('service_perfdata_file_template', None)
service_perfdata_file = nagios_config.get('service_perfdata_file', None)
if service_perfdata_file_template and service_perfdata_file:
service_pattern = cls.template_regex(service_perfdata_file_template)
return cls(logger, service_pattern, service_perfdata_file)
else:
return None
def _get_metric_prefix(self, line_data):
metric = [self.metric_prefix]
middle_name = line_data.get('SERVICEDESC', None)
if middle_name:
metric.append(middle_name.replace(' ', '_').lower())
return metric
# Allow a smooth uninstall of previous version
class RollupLP: pass
class DdForwarder(object):
QUEUE_SIZE = "queue_size"
QUEUE_COUNT = "queue_count"
RE_QUEUE_STAT = re.compile(r"\[.*\] Queue size: at (.*), (\d+) transaction\(s\), (\d+) KB")
def __init__(self, logger, config):
self.log_path = config.get('ddforwarder_log', '/var/log/ddforwarder.log')
self.logger = logger
self._gen = None
def _init_metrics(self):
self.metrics = {}
def _add_metric(self, name, value, ts):
if self.metrics.has_key(name):
self.metrics[name].append((ts, value))
else:
self.metrics[name] = [(ts, value)]
def _parse_line(self, line):
try:
m = self.RE_QUEUE_STAT.match(line)
if m is not None:
ts, count, size = m.groups()
self._add_metric(self.QUEUE_SIZE, size, round(float(ts)))
self._add_metric(self.QUEUE_COUNT, count, round(float(ts)))
except Exception, e:
self.logger.exception(e)
def check(self, agentConfig, move_end=True):
if self.log_path and os.path.isfile(self.log_path):
#reset metric points
self._init_metrics()
# Build our tail -f
if self._gen is None:
self._gen = TailFile(self.logger, self.log_path, self._parse_line).tail(line_by_line=False,
move_end=move_end)
# read until the end of file
try:
self._gen.next()
self.logger.debug("Done ddforwarder check for file %s" % self.log_path)
except StopIteration, e:
self.logger.exception(e)
self.logger.warn("Can't tail %s file" % self.log_path)
return { 'ddforwarder': self.metrics }
else:
self.logger.debug("Can't tail datadog forwarder log file: %s" % self.log_path)
return {}
def testDogStream():
import logging
logger = logging.getLogger("datadog")
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
dogstream = Dogstream(logger)
while True:
events = dogstream.check({'api_key':'my_apikey', 'dogstream_log': sys.argv[1]}, move_end=True)
for e in events:
print "Event:", e
time.sleep(5)
def testddForwarder():
import logging
logger = logging.getLogger("datadog")
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
config = {'api_key':'my_apikey', 'ddforwarder_log': sys.argv[1]}
dd = DdForwarder(logger, config)
m = dd.check(config, move_end=False)
while True:
print m
time.sleep(5)
m = dd.check(config)
if __name__ == '__main__':
testddForwarder()
|
from copy import copy
from unittest.mock import Mock, patch
import networkx as nx
from pytest import fixture, raises
from smif.controller.modelrun import (ModelRunBuilder, ModelRunError,
ModelRunner)
@fixture(scope='function')
def config_data():
"""Config for a model run
"""
sos_model = Mock()
sos_model.name = "test_sos_model"
sos_model.parameters = {}
climate_scenario = Mock()
climate_scenario.name = 'climate'
climate_scenario.deps = {}
sos_model.scenario_models = {'climate': climate_scenario}
energy_supply = Mock()
energy_supply.name = 'energy_supply'
energy_supply.deps = {}
energy_supply.parameters = {}
sos_model.models = {'energy_supply': energy_supply}
graph = nx.DiGraph()
graph.add_node('energy_supply', model=climate_scenario)
config = {
'name': 'unique_model_run_name',
'stamp': '2017-09-20T12:53:23+00:00',
'description': 'a description of what the model run contains',
'timesteps': [2010, 2011, 2012],
'sos_model': sos_model,
'scenarios': {
'climate': 'RCP4.5'
},
'narratives': [
Mock(data={'model_name': {'parameter_name': 0}}),
Mock(data={'model_name': {'parameter_name': 0}})
],
'strategies': [
{
'strategy': 'pre-specified-planning',
'description': 'build_nuclear',
'model_name': 'energy_supply',
'interventions': [
{'name': 'nuclear_large', 'build_year': 2012},
{'name': 'carrington_retire', 'build_year': 2011}
]
}
]
}
return config
@fixture(scope='function')
def model_run(config_data):
"""ModelRun built from config
"""
builder = ModelRunBuilder()
builder.construct(config_data)
return builder.finish()
@fixture(scope='function')
def mock_model_run():
"""Minimal mock ModelRun
"""
sos_model = Mock()
sos_model.parameters = {}
sos_model.models = []
modelrun = Mock()
modelrun.strategies = []
modelrun.sos_model = sos_model
modelrun.narratives = []
modelrun.model_horizon = [1]
modelrun.initialised = False
return modelrun
@fixture(scope='function')
def mock_store():
"""Minimal mock store
"""
store = Mock()
store.read_model_run = Mock(return_value={'narratives': {}})
store.read_strategies = Mock(return_value=[])
store.read_all_initial_conditions = Mock(return_value=[])
store.read_sos_model = Mock(return_value={'sector_models': ['sector_model_test']})
store.read_interventions = Mock(return_value={})
return store
class TestModelRunBuilder:
"""Build from config
"""
def test_builder(self, config_data):
"""Test basic properties
"""
builder = ModelRunBuilder()
builder.construct(config_data)
modelrun = builder.finish()
assert modelrun.name == 'unique_model_run_name'
assert modelrun.timestamp == '2017-09-20T12:53:23+00:00'
assert modelrun.model_horizon == [2010, 2011, 2012]
assert modelrun.status == 'Built'
assert modelrun.scenarios == {'climate': 'RCP4.5'}
assert modelrun.narratives == config_data['narratives']
assert modelrun.strategies == config_data['strategies']
def test_builder_scenario_sosmodelrun_not_in_sosmodel(self, config_data):
"""Error from unused scenarios
"""
config_data['scenarios'] = {
'climate': 'RCP4.5',
'population': 'high_population'
}
builder = ModelRunBuilder()
builder.construct(config_data)
with raises(ModelRunError) as ex:
builder.finish()
assert "ScenarioSet 'population' is selected in the ModelRun " \
"configuration but not found in the SosModel configuration" in str(ex)
class TestModelRun:
"""Core ModelRun
"""
def test_run_static(self, model_run, mock_store):
"""Call run
"""
model_run.run(mock_store)
def test_run_timesteps(self, config_data):
"""should error that timesteps are empty
"""
config_data['timesteps'] = []
builder = ModelRunBuilder()
builder.construct(config_data)
model_run = builder.finish()
store = Mock()
with raises(ModelRunError) as ex:
model_run.run(store)
assert 'No timesteps specified' in str(ex)
def test_serialize(self, config_data):
"""Serialise back to config dict
"""
builder = ModelRunBuilder()
builder.construct(config_data)
model_run = builder.finish()
expected = copy(config_data)
expected['sos_model'] = config_data['sos_model'].name # expect a reference by name
assert expected == model_run.as_dict()
class TestModelRunnerJobGraphs():
"""Cover all JobGraph corner cases
"""
@patch('smif.controller.modelrun.JobScheduler.add')
def test_jobgraph_single_timestep(self, mock_add, mock_store, mock_model_run):
"""
a[before]
|
v
a[sim]
"""
model_a = Mock()
model_a.name = 'model_a'
model_a.deps = {}
model_a.parameters = {}
mock_model_run.sos_model.models = {
model_a.name: model_a
}
runner = ModelRunner()
runner.solve_model(mock_model_run, mock_store)
call_args = mock_add.call_args
job_graph = call_args[0][0]
actual = list(job_graph.predecessors('before_model_run_model_a'))
expected = []
assert actual == expected
actual = list(job_graph.successors('before_model_run_model_a'))
expected = ['simulate_1_0_model_a']
assert actual == expected
actual = list(job_graph.predecessors('simulate_1_0_model_a'))
expected = ['before_model_run_model_a']
assert actual == expected
actual = list(job_graph.successors('simulate_1_0_model_a'))
expected = []
assert actual == expected
@patch('smif.controller.modelrun.JobScheduler.add')
def test_jobgraph_multiple_timesteps(self, mock_add, mock_store, mock_model_run):
"""
a[before]
| |
v V
a[sim] a[sim]
t=1 t=2
"""
model_a = Mock()
model_a.name = 'model_a'
model_a.deps = {}
model_a.parameters = {}
mock_model_run.sos_model.models = {
model_a.name: model_a
}
mock_model_run.model_horizon = [1, 2]
runner = ModelRunner()
runner.solve_model(mock_model_run, mock_store)
call_args = mock_add.call_args
job_graph = call_args[0][0]
actual = list(job_graph.predecessors('before_model_run_model_a'))
expected = []
assert actual == expected
actual = list(job_graph.successors('before_model_run_model_a'))
expected = ['simulate_1_0_model_a', 'simulate_2_0_model_a']
assert sorted(actual) == sorted(expected)
actual = list(job_graph.predecessors('simulate_1_0_model_a'))
expected = ['before_model_run_model_a']
assert actual == expected
actual = list(job_graph.successors('simulate_1_0_model_a'))
expected = []
assert actual == expected
actual = list(job_graph.predecessors('simulate_2_0_model_a'))
expected = ['before_model_run_model_a']
assert actual == expected
actual = list(job_graph.successors('simulate_2_0_model_a'))
expected = []
assert actual == expected
@patch('smif.controller.modelrun.JobScheduler.add')
def test_jobgraph_multiple_models(self, mock_add, mock_store, mock_model_run):
"""
a[before] b[before] c[before]
| | |
v V V
a[sim] ---> b[sim] ---> c[sim]
|------------------>
"""
model_a = Mock()
model_b = Mock()
model_c = Mock()
model_a.name = 'model_a'
model_b.name = 'model_b'
model_c.name = 'model_c'
model_a.parameters = {}
model_b.parameters = {}
model_c.parameters = {}
dep_a_b = Mock()
dep_a_b.source_model.name = 'model_a'
dep_a_b.sink_model.name = 'model_b'
dep_b_c = Mock()
dep_b_c.source_model.name = 'model_b'
dep_b_c.sink_model.name = 'model_c'
dep_a_c = Mock()
dep_a_c.source_model.name = 'model_a'
dep_a_c.sink_model.name = 'model_c'
model_a.deps = {
'model_a': dep_a_b
}
model_b.deps = {
'model_b': dep_b_c
}
model_c.deps = {
'model_c': dep_a_c
}
mock_model_run.sos_model.models = {
model_a.name: model_a,
model_b.name: model_b,
model_c.name: model_c
}
runner = ModelRunner()
runner.solve_model(mock_model_run, mock_store)
call_args = mock_add.call_args
job_graph = call_args[0][0]
actual = list(job_graph.predecessors('simulate_1_0_model_a'))
expected = ['before_model_run_model_a']
assert actual == expected
actual = list(job_graph.successors('simulate_1_0_model_a'))
expected = ['simulate_1_0_model_b', 'simulate_1_0_model_c']
assert sorted(actual) == sorted(expected)
actual = list(job_graph.predecessors('simulate_1_0_model_b'))
expected = ['before_model_run_model_b', 'simulate_1_0_model_a']
assert sorted(actual) == sorted(expected)
actual = list(job_graph.successors('simulate_1_0_model_b'))
expected = ['simulate_1_0_model_c']
assert actual == expected
actual = list(job_graph.predecessors('simulate_1_0_model_c'))
expected = ['before_model_run_model_c', 'simulate_1_0_model_a', 'simulate_1_0_model_b']
assert sorted(actual) == sorted(expected)
actual = list(job_graph.successors('simulate_1_0_model_c'))
expected = []
assert actual == expected
Add test to check for cyclic graph NotImplemented Exception
from copy import copy
from unittest.mock import Mock, patch
import networkx as nx
import pytest
from pytest import fixture, raises
from smif.controller.modelrun import (ModelRunBuilder, ModelRunError,
ModelRunner)
@fixture(scope='function')
def config_data():
"""Config for a model run
"""
sos_model = Mock()
sos_model.name = "test_sos_model"
sos_model.parameters = {}
climate_scenario = Mock()
climate_scenario.name = 'climate'
climate_scenario.deps = {}
sos_model.scenario_models = {'climate': climate_scenario}
energy_supply = Mock()
energy_supply.name = 'energy_supply'
energy_supply.deps = {}
energy_supply.parameters = {}
sos_model.models = {'energy_supply': energy_supply}
graph = nx.DiGraph()
graph.add_node('energy_supply', model=climate_scenario)
config = {
'name': 'unique_model_run_name',
'stamp': '2017-09-20T12:53:23+00:00',
'description': 'a description of what the model run contains',
'timesteps': [2010, 2011, 2012],
'sos_model': sos_model,
'scenarios': {
'climate': 'RCP4.5'
},
'narratives': [
Mock(data={'model_name': {'parameter_name': 0}}),
Mock(data={'model_name': {'parameter_name': 0}})
],
'strategies': [
{
'strategy': 'pre-specified-planning',
'description': 'build_nuclear',
'model_name': 'energy_supply',
'interventions': [
{'name': 'nuclear_large', 'build_year': 2012},
{'name': 'carrington_retire', 'build_year': 2011}
]
}
]
}
return config
@fixture(scope='function')
def model_run(config_data):
"""ModelRun built from config
"""
builder = ModelRunBuilder()
builder.construct(config_data)
return builder.finish()
@fixture(scope='function')
def mock_model_run():
"""Minimal mock ModelRun
"""
sos_model = Mock()
sos_model.parameters = {}
sos_model.models = []
modelrun = Mock()
modelrun.strategies = []
modelrun.sos_model = sos_model
modelrun.narratives = []
modelrun.model_horizon = [1]
modelrun.initialised = False
return modelrun
@fixture(scope='function')
def mock_store():
"""Minimal mock store
"""
store = Mock()
store.read_model_run = Mock(return_value={'narratives': {}})
store.read_strategies = Mock(return_value=[])
store.read_all_initial_conditions = Mock(return_value=[])
store.read_sos_model = Mock(return_value={'sector_models': ['sector_model_test']})
store.read_interventions = Mock(return_value={})
return store
class TestModelRunBuilder:
"""Build from config
"""
def test_builder(self, config_data):
"""Test basic properties
"""
builder = ModelRunBuilder()
builder.construct(config_data)
modelrun = builder.finish()
assert modelrun.name == 'unique_model_run_name'
assert modelrun.timestamp == '2017-09-20T12:53:23+00:00'
assert modelrun.model_horizon == [2010, 2011, 2012]
assert modelrun.status == 'Built'
assert modelrun.scenarios == {'climate': 'RCP4.5'}
assert modelrun.narratives == config_data['narratives']
assert modelrun.strategies == config_data['strategies']
def test_builder_scenario_sosmodelrun_not_in_sosmodel(self, config_data):
"""Error from unused scenarios
"""
config_data['scenarios'] = {
'climate': 'RCP4.5',
'population': 'high_population'
}
builder = ModelRunBuilder()
builder.construct(config_data)
with raises(ModelRunError) as ex:
builder.finish()
assert "ScenarioSet 'population' is selected in the ModelRun " \
"configuration but not found in the SosModel configuration" in str(ex)
class TestModelRun:
"""Core ModelRun
"""
def test_run_static(self, model_run, mock_store):
"""Call run
"""
model_run.run(mock_store)
def test_run_timesteps(self, config_data):
"""should error that timesteps are empty
"""
config_data['timesteps'] = []
builder = ModelRunBuilder()
builder.construct(config_data)
model_run = builder.finish()
store = Mock()
with raises(ModelRunError) as ex:
model_run.run(store)
assert 'No timesteps specified' in str(ex)
def test_serialize(self, config_data):
"""Serialise back to config dict
"""
builder = ModelRunBuilder()
builder.construct(config_data)
model_run = builder.finish()
expected = copy(config_data)
expected['sos_model'] = config_data['sos_model'].name # expect a reference by name
assert expected == model_run.as_dict()
class TestModelRunnerJobGraphs():
"""Cover all JobGraph corner cases
"""
@patch('smif.controller.modelrun.JobScheduler.add')
def test_jobgraph_single_timestep(self, mock_add, mock_store, mock_model_run):
"""
a[before]
|
v
a[sim]
"""
model_a = Mock()
model_a.name = 'model_a'
model_a.deps = {}
model_a.parameters = {}
mock_model_run.sos_model.models = {
model_a.name: model_a
}
runner = ModelRunner()
runner.solve_model(mock_model_run, mock_store)
call_args = mock_add.call_args
job_graph = call_args[0][0]
actual = list(job_graph.predecessors('before_model_run_model_a'))
expected = []
assert actual == expected
actual = list(job_graph.successors('before_model_run_model_a'))
expected = ['simulate_1_0_model_a']
assert actual == expected
actual = list(job_graph.predecessors('simulate_1_0_model_a'))
expected = ['before_model_run_model_a']
assert actual == expected
actual = list(job_graph.successors('simulate_1_0_model_a'))
expected = []
assert actual == expected
@patch('smif.controller.modelrun.JobScheduler.add')
def test_jobgraph_multiple_timesteps(self, mock_add, mock_store, mock_model_run):
"""
a[before]
| |
v V
a[sim] a[sim]
t=1 t=2
"""
model_a = Mock()
model_a.name = 'model_a'
model_a.deps = {}
model_a.parameters = {}
mock_model_run.sos_model.models = {
model_a.name: model_a
}
mock_model_run.model_horizon = [1, 2]
runner = ModelRunner()
runner.solve_model(mock_model_run, mock_store)
call_args = mock_add.call_args
job_graph = call_args[0][0]
actual = list(job_graph.predecessors('before_model_run_model_a'))
expected = []
assert actual == expected
actual = list(job_graph.successors('before_model_run_model_a'))
expected = ['simulate_1_0_model_a', 'simulate_2_0_model_a']
assert sorted(actual) == sorted(expected)
actual = list(job_graph.predecessors('simulate_1_0_model_a'))
expected = ['before_model_run_model_a']
assert actual == expected
actual = list(job_graph.successors('simulate_1_0_model_a'))
expected = []
assert actual == expected
actual = list(job_graph.predecessors('simulate_2_0_model_a'))
expected = ['before_model_run_model_a']
assert actual == expected
actual = list(job_graph.successors('simulate_2_0_model_a'))
expected = []
assert actual == expected
@patch('smif.controller.modelrun.JobScheduler.add')
def test_jobgraph_multiple_models(self, mock_add, mock_store, mock_model_run):
"""
a[before] b[before] c[before]
| | |
v V V
a[sim] ---> b[sim] ---> c[sim]
|------------------>
"""
model_a = Mock()
model_b = Mock()
model_c = Mock()
model_a.name = 'model_a'
model_b.name = 'model_b'
model_c.name = 'model_c'
model_a.parameters = {}
model_b.parameters = {}
model_c.parameters = {}
dep_a_b = Mock()
dep_a_b.source_model.name = 'model_a'
dep_a_b.sink_model.name = 'model_b'
dep_b_c = Mock()
dep_b_c.source_model.name = 'model_b'
dep_b_c.sink_model.name = 'model_c'
dep_a_c = Mock()
dep_a_c.source_model.name = 'model_a'
dep_a_c.sink_model.name = 'model_c'
model_a.deps = {
'model_a': dep_a_b
}
model_b.deps = {
'model_b': dep_b_c
}
model_c.deps = {
'model_c': dep_a_c
}
mock_model_run.sos_model.models = {
model_a.name: model_a,
model_b.name: model_b,
model_c.name: model_c
}
runner = ModelRunner()
runner.solve_model(mock_model_run, mock_store)
call_args = mock_add.call_args
job_graph = call_args[0][0]
actual = list(job_graph.predecessors('simulate_1_0_model_a'))
expected = ['before_model_run_model_a']
assert actual == expected
actual = list(job_graph.successors('simulate_1_0_model_a'))
expected = ['simulate_1_0_model_b', 'simulate_1_0_model_c']
assert sorted(actual) == sorted(expected)
actual = list(job_graph.predecessors('simulate_1_0_model_b'))
expected = ['before_model_run_model_b', 'simulate_1_0_model_a']
assert sorted(actual) == sorted(expected)
actual = list(job_graph.successors('simulate_1_0_model_b'))
expected = ['simulate_1_0_model_c']
assert actual == expected
actual = list(job_graph.predecessors('simulate_1_0_model_c'))
expected = ['before_model_run_model_c', 'simulate_1_0_model_a', 'simulate_1_0_model_b']
assert sorted(actual) == sorted(expected)
actual = list(job_graph.successors('simulate_1_0_model_c'))
expected = []
assert actual == expected
@patch('smif.controller.modelrun.JobScheduler.add')
def test_jobgraph_interdependency(self, mock_add, mock_store, mock_model_run):
"""
a[before] b[before]
| |
v V
a[sim] ---> b[sim]
<---
"""
model_a = Mock()
model_b = Mock()
model_a.name = 'model_a'
model_b.name = 'model_b'
model_a.parameters = {}
model_b.parameters = {}
dep_a_b = Mock()
dep_a_b.source_model.name = 'model_a'
dep_a_b.sink_model.name = 'model_b'
dep_b_a = Mock()
dep_b_a.source_model.name = 'model_b'
dep_b_a.sink_model.name = 'model_a'
model_a.deps = {
'model_a': dep_a_b
}
model_b.deps = {
'model_b': dep_b_a
}
mock_model_run.sos_model.models = {
model_a.name: model_a,
model_b.name: model_b,
}
runner = ModelRunner()
with pytest.raises(NotImplementedError):
runner.solve_model(mock_model_run, mock_store)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import os
import sys
import base64
from openerp.osv import osv, fields
from datetime import datetime
from openerp.tools import (DEFAULT_SERVER_DATETIME_FORMAT,
DEFAULT_SERVER_DATE_FORMAT)
from openerp import tools
from openerp.tools.translate import _
# --------
# Utility:
# --------
# TODO eliminare le funzioni, non devono stare qui!
def _get_image(self, cr, uid, ids, name, args, context=None):
''' Read image from file
'''
result = dict.fromkeys(ids, False)
for obj in self.browse(cr, uid, ids, context=context):
result[obj.id] = tools.image_get_resized_images(
obj.image, avoid_resize_medium=True)
return result
def _set_image(self, cr, uid, item_id, name, value, args, context=None):
''' Store image from file
'''
return self.write(cr, uid, [item_id], {
'image': tools.image_resize_image_big(value)}, context=context)
def get_temp_filename(filename):
''' Get temp path for copy and paste functions
'''
import openerp
return os.path.join(
openerp.__path__[0], 'addons', 'fashion', 'temp', filename)
class fashion_season(osv.osv):
'''Table that manages the seasons
'''
_name = 'fashion.season'
_description = 'Season'
_order = 'sequence,name'
def set_obsolete(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'obsolete': context.get('obsolete',True)})
return True
def set_not_obsolete(self, cr, uid, ids, context=None):
self.set_obsolete(cr, uid, ids, {'obsolete':False})
return True
_columns = {
'sequence': fields.integer('Sequence'),
'code': fields.char('Cod', size=10, required=True,
help='Code used in fabric for join in the name'),
'name': fields.char('Name', size=40, required=True),
'note': fields.text('Note'),
'obsolete':fields.boolean('Obsolete'),
# Link di importazione:
'access_id': fields.integer(
'Access ID', help="ID Importazione che tiene il link"),
}
class fashion_article(osv.osv):
'''Table that manages the articles
'''
_name = 'fashion.article'
_description = 'Article'
_order = 'name'
_columns = {
'name': fields.char('Name', size=40, required=True),
'note': fields.text('Note'),
'code': fields.char('Code', size=1),
#'measure_ids': fields.many2many(
# 'fashion.form.measure', 'fashion_form_article_rel',
# 'article_id', 'measure_id', 'Measures', readonly = False),
# Link di importazione:
'access_id': fields.integer(
'Access ID', help="ID Importazione che tiene il link"),
}
class fashion_form_characteristic(osv.osv):
'''Table that manages the characteristic
'''
_name = 'fashion.form.characteristic'
_description = 'Characteristic'
_order = 'sequence,name'
_columns = {
'name': fields.char('Name', size = 40, required = True),
'note': fields.text('Note'),
'sequence': fields.integer('Sequence'),
# Link di importazione:
'access_id': fields.integer(
'Access ID', help="ID Importazione che tiene il link"),
}
class fashion_form_cost(osv.osv):
'''Table that manages the cost
'''
_name = 'fashion.form.cost'
_description = 'Cost'
_order = 'sequence,name'
_columns = {
'name': fields.char('Name', size = 40, required=True),
'note': fields.text('Note'),
'sequence': fields.integer('Sequence'),
'cost': fields.float('Cost', digits=(12, 4)),
'default': fields.boolean('Default'),
# Link di importazione:
'access_id': fields.integer(
'Access ID', help="ID Importazione che tiene il link"),
}
_defaults = {
'default': False,
}
class fashion_form_accessory(osv.osv):
'''Table that manages the accessory
'''
_name = 'fashion.form.accessory'
_description = 'Accessory'
_order = 'sequence,name'
_columns = {
'name': fields.char('Name', size = 40, required = True),
'gerber_char': fields.char('Gerber char', size = 1, required = False),
'note': fields.text('Note'),
'sequence': fields.integer('Sequence'),
'type': fields.selection([
('t', 'Cut'),
('a', 'Accessory'),
], 'Type', select=True),
# Link di importazione:
'access_id': fields.integer('Access ID', help="ID Importazione che tiene il link"),
}
_defaults = {
'sequence': lambda *x: 1000, # normal accessory have high number
}
class fashion_form_accessory_pricelist(osv.osv):
'''Table that manages the accessory pricelist
'''
_name = 'fashion.form.accessory.pricelist'
_description = 'Accessory pricelist'
_order = 'supplier_id,create_date desc'
# ------------------
# Override function:
# ------------------
def name_get(self, cr, uid, ids, context=None):
''' Add customer-fabric ID to name
'''
res = []
for item in self.browse(cr, uid, ids, context = context):
res.append((item.id, "%s %s" % (item.name or '', item.extra_info or '')))
return res
_columns = {
'name': fields.char('Article', size=70, required=False),
'accessory_id':fields.many2one('fashion.form.accessory', 'Accessory',
required=False, ondelete='cascade'),
'supplier_id':fields.many2one('res.partner', 'Supplier',
required=True, domain=[('supplier','=',True)]),
'create_date': fields.datetime('Date', readonly=True),
'um': fields.char('U.M.', size=5, required=False),
'extra_info': fields.char('Extra info', size=40, required=False),
'note': fields.text('Note'),
'cost': fields.float('Cost', digits=(12, 4)),
# Link di importazione:
'access_id': fields.integer(
'Access ID', help="ID Importazione che tiene il link"),
}
class fashion_form_accessory(osv.osv):
'''Table that manages the accessory relation *2many
'''
_name = 'fashion.form.accessory'
_inherit = 'fashion.form.accessory'
_columns = {
'pricelist_ids':fields.one2many('fashion.form.accessory.pricelist',
'accessory_id', 'Pricelist', required=False),
}
class fashion_form_fabric_composition(osv.osv):
'''Table that manages the fabric composition
'''
_name = 'fashion.form.fabric.composition'
_description = 'Fabric'
_rec_name = 'code'
_order = 'code'
_columns = {
'code': fields.char('Code', size = 15, required=True),
'perc_composition': fields.char('Percentage composition', size=60),
'note': fields.text('Note'),
'symbol': fields.char('Wash symbol', size=10),
'season_id': fields.many2one('fashion.season', 'Season',
required=True),
}
class fashion_form_fabric(osv.osv):
'''Table that manages the fabric
'''
_name = 'fashion.form.fabric'
_description = 'Fabric'
_rec_name = 'code'
_order = 'code'
# Button:
def load_from_composition(self, cr, uid, ids, context=None):
''' Search last part of code in composition and override
elements on fabric
Code >> XXX-CCC (CCC = composition)
'''
# TODO maybe better as onchange?
fabric_proxy = self.browse(cr, uid, ids, context=context)[0]
composition_pool = self.pool.get('fashion.form.fabric.composition')
composition_ids = composition_pool.search(cr, uid, [
('season_id', '=', fabric_proxy.season_id.id),
('code', '=', fabric_proxy.code.split('-')[-1]),
], context=context)
if not composition_ids: # search without season (last from accounting)
composition_ids = composition_pool.search(cr, uid, [
('code', '=', fabric_proxy.code.split('-')[-1]),
], context=context)
if composition_ids:
composition_proxy = composition_pool.browse(
cr, uid, composition_ids, context=context)[0]
self.write(cr, uid, ids, {
'perc_composition': composition_proxy.perc_composition,
'symbol': composition_proxy.symbol,
}, context=context)
else:
raise osv.except_osv(_('Error'), _("Season and code not found!"))
return True
#Override:
def name_get(self, cr, uid, ids, context = None):
''' Add season ID to name
'''
res = []
for fabric in self.browse(cr, uid, ids, context = context):
res.append((fabric.id, "%s-[%s] %s" % (
fabric.code,
fabric.season_id.code if fabric.season_id else "",
fabric.note or '')))
return res
_columns = {
'supplier_id': fields.many2one('res.partner', 'Fabric Supplier'),
'article_code': fields.char('Fabric Article code', size = 50),
'code': fields.char('Code', size = 15, required=True),
#'name': fields.char('Name', size = 20),
#'cost':
#'composition': fields.char('Composition', size = 60),
'perc_composition': fields.char('Percentage composition', size=60),
'note': fields.text('Note'),
#'desc_cx': fields.char('Description CX', size = 80),
'symbol': fields.char('Wash symbol', size=10),
'season_id': fields.many2one('fashion.season', 'Season'),
'test': fields.boolean('Test fabric',
help='This fabric is used for a model testing, maybe it won\'t be produced!'),
'um': fields.char('U.M.', size=5),
'cost': fields.float('Cost', digits=(10, 4)),
# Link di importazione:
'access_id': fields.integer('Access ID', help="ID Importazione che tiene il link"),
}
_defaults = {
'um': lambda *x: 'MT'
}
class fashion_form_stitch(osv.osv):
'''Table that manages the stitch
'''
_name = 'fashion.form.stitch'
_description = 'Stitch' #cuciture
_order = 'sequence,name'
_columns = {
'name': fields.char('Name', size = 40, required = True),
'note': fields.text('Note'),
'sequence': fields.integer('Sequence'),
# Link di importazione:
'access_id': fields.integer('Access ID', help="ID Importazione che tiene il link"),
}
class fashion_form_measure(osv.osv):
'''Table that manages the measure
'''
_name = 'fashion.form.measure'
_description = 'Measure'
_order = 'name'
_columns = {
'letter': fields.char('Letter', size = 1),
'name': fields.char('Description', size = 40, required = True),
'note': fields.text('Note'),
# Link di importazione:
'access_id': fields.integer('Access ID', help="ID Importazione che tiene il link"),
}
class fashion_form(osv.osv):
''' Table that manages the form
'''
_name = 'fashion.form'
_inherits = {'product.product': 'product_id', }
#_inherit = 'mail.thread' # link to messages
_order = 'model_article,model_number desc,model_customer,model,review desc'
_rec_name = 'model'
_default_extension = 'jpg'
# --------------------
# On change functions:
# --------------------
def on_change_model(self, cr, uid, ids, model, review, context=None):
''' Split model code in all the part
'''
res = {'value': {}}
if not model:
return res
if not review:
review = 0
model = model.upper()
res['value']['model'] = model
res['value']['name'] = "%s.%s" % (model, review)
if model[0:1].isalpha():
if model[1:2].isalpha():
model_customer = model[0:1]
model_article = model[1:2]
else:
model_customer = False
model_article = model[0:1]
else:
res['warning'] = {
'title': _('warning'),
'message': _('Error: Model must start with letter'),
}
res['value']['model_customer'] = model_customer
res['value']['model_article'] = model_article
model_number = ''
i = 2 if model_customer else 1
for c in model[2 if model_customer else 1:]:
if c.isdigit():
i += 1
model_number += c
else:
break
res['value']['model_number'] = int(model_number) if model_number.isdigit() else 0
if res['value']['model_number'] and len(model)>i and model[i] == 'C':
res['value']['conformed'] = True
i += 1
else:
res['value']['conformed'] = False
res['value']['model_revision'] = model[i:] or False
return res
# ------------------
# Utility functions:
# ------------------
# Naming function:
def _get_form_name(self, model, review):
''' Return name of form element
'''
return "%s.%s" % (model, review)
def _get_draw_image_name(self, obj):
''' Return name of image from browese obj passed
'''
return ("%s.%s" % (self._get_form_name(obj.model, obj.review), self._default_extension)).lower()
# Image function:
def _get_draw_image_type(self, field_name):
''' Return type of image depend on draw field name
'''
return field_name[-1].lower()
def _load_image(self, name, type_image):
''' Load image from file:
'''
path = os.path.expanduser(os.path.join("~/etl/fashion/image", type_image)) # TODO parametrize
filename = os.path.join(path, name)
try:
f = open(filename, 'rb')
img = base64.encodestring(f.read())
f.close()
except:
img = False
return img
def _unload_image(self, name, value, type_image):
''' Unload image to file:
'''
path = os.path.expanduser(os.path.join("~/etl/fashion/image", type_image)) # TODO parametrize
filename = os.path.join(path, name)
try:
f = open(filename, 'wb')
f.write(base64.decodestring(value))
f.close()
try: # Set parameter for update
os.chmod(filename, 0777)
os.chown(filename, -1, 1000)
except:
return True
except:
return False
return True
# ------------------
# Override function:
# ------------------
def create(self, cr, uid, vals, context=None):
"""
Create a new record for a model ModelName
@param cr: cursor to database
@param uid: id of current user
@param vals: provides a data for new record
@param context: context arguments, like lang, time zone
@return: returns a id of new record
"""
# Explode model element ("name" created in onchange "model.review")
vals.update(self.on_change_model(
cr, uid, 0,
vals.get('model', False),
vals.get('review', 0),
context=context)['value'])
return super(fashion_form, self).create(
cr, uid, vals, context=context)
def write(self, cr, uid, ids, vals, context=None):
"""
Update record(s) comes in {ids}, with new value comes as {vals}
return True on success, False otherwise
@param cr: cursor to database
@param uid: id of current user
@param ids: list of record ids to be update
@param vals: dict of new values to be set
@param context: context arguments, like lang, time zone
@return: True on success, False otherwise
"""
# Test if one is modified, take data from database for other value
if 'model' in vals or 'review' in vals: # change also name and explode
form_proxy = self.browse(cr, uid, ids, context = context)[0]
# Explore model element:
vals.update(self.on_change_model(
cr, uid, 0,
vals.get('model', form_proxy.model),
vals.get('review', form_proxy.review),
context=context)['value'])
return super(fashion_form, self).write(
cr, uid, ids, vals, context=context)
# ------------
# Button event
# ------------
def open_form_item(self, cr, uid, ids, context=None):
''' Button for open detail in kanban view
'''
return {
'name': _('Form detail'),
'view_type': 'form',
'view_mode': 'form',
'res_model': 'fashion.form',
'res_id': ids[0],
'view_id': False,
'views': [(False, 'form')],
'target': 'new',
'domain': [('id','=',ids[0])],
'context': {},
'type': 'ir.actions.act_window',
}
def reset_duplicate_characteristic(self, cr, uid, ids, context=None):
''' Remove file used for copy paste operations
'''
fn = get_temp_filename("%s.car.dat" % uid)
try:
os.remove(fn)
except:
return False
return True
def paste_duplicate_characteristic(self, cr, uid, ids, context=None):
''' Paste operation in form
'''
fn = get_temp_filename("%s.car.dat" % uid)
try:
f = open(fn, "r")
except:
# TODO Comunicate error?
return False
item_ids = [int(item) for item in f]
f.close()
characteristic_pool = self.pool.get('fashion.form.characteristic.rel')
for item in characteristic_pool.browse(cr, uid, item_ids, context=context):
characteristic_pool.create(cr, uid, {
'name': item.name,
'sequence': item.sequence,
'form_id': ids[0],
'characteristic_id': item.characteristic_id.id if item.characteristic_id else False,
'lenght': item.lenght,
'old_name': item.old_name,
'stitch_type_id': item.stitch_type_id.id if item.stitch_type_id else False,
'stitch_verse_id': item.stitch_verse_id.id if item.stitch_verse_id else False,
'stitch_cut_id': item.stitch_cut_id.id if item.stitch_cut_id else False,
'stitch_top_id': item.stitch_top_id.id if item.stitch_top_id else False,
'stitch_top_type_id': item.stitch_top_type_id.id if item.stitch_top_type_id else False,
'bindello': item.bindello,
}, context=context)
self.reset_duplicate_characteristic(cr, uid, ids, context=context)
return True
def reset_duplicate_accessory(self, cr, uid, ids, context=None):
''' Remove file used for copy paste operations
'''
fn = get_temp_filename("%s.acc.dat" % uid)
try:
os.remove(fn)
except:
return False
return True
def paste_duplicate_accessory(self, cr, uid, ids, context=None):
''' Paste operation in form
'''
fn = get_temp_filename("%s.acc.dat" % uid)
try:
f = open(fn, "r")
except:
# TODO Comunicate error?
return False
item_ids = [int(item) for item in f]
f.close()
accessory_pool = self.pool.get('fashion.form.accessory.rel')
for item in accessory_pool.browse(cr, uid, item_ids, context=context):
accessory_pool.create(cr, uid, {
'form_id': ids[0],
'sequence': item.sequence,
'accessory_id': item.accessory_id.id if item.accessory_id else False,
'fabric_id': item.fabric_id.id if item.fabric_id else False,
'name': item.name,
'code': item.code,
'um': item.um,
'quantity': item.quantity,
'currency': item.currency,
'note': item.note,
'gerber_name': item.gerber_name,
'gerber_desc': item.gerber_desc,
'gerber_h': item.gerber_h,
'gerber_l': item.gerber_l,
'supplier_id': item.supplier_id.id if item.supplier_id else False,
'pricelist_id': item.pricelist_id.id if item.pricelist_id else False,
'tot_cost': item.tot_cost,
'color': item.color,
'h': item.h,
}, context=context)
self.reset_duplicate_accessory(cr, uid, ids, context=context)
return True
def set_not_cost_model(self, cr, uid, ids, context=None):
''' Set form for no manage costs
'''
return self.write(cr, uid, ids, {'model_for_cost': False}, context=context)
def set_cost_model(self, cr, uid, ids, context=None):
''' Set form for no manage costs
'''
# Set default fixed cost in list:
form_proxy = self.browse(cr, uid, ids, context=context)[0]
cost_list_ids = [item.cost_id.id for item in form_proxy.cost_rel_ids]
cost_pool = self.pool.get('fashion.form.cost')
default_ids = cost_pool.search(cr, uid, [('default','=',True)], context=context)
for cost in cost_pool.browse(cr, uid, default_ids, context=context):
if cost.id not in cost_list_ids:
# Create cost and pricelist:
self.pool.get('fashion.form.cost.rel').create(cr, uid, {
'form_id': ids[0],
'cost_id': cost.id,
'value': cost.cost or 0.0
}, context=context)
return self.write(cr, uid, ids, {'model_for_cost': True}, context=context)
def button_refresh(self, cr, uid, ids, context=None):
''' Dummy action for refresh form
'''
return True
def create_update_header(self, cr, uid, ids, context=None):
''' Create a particular line for header (tg. values)
> fashion.form.measure.rel
'''
try:
# test if there's yet a header line
found_id = False
form_proxy = self.browse(cr, uid, ids, context=context)[0]
for measure in form_proxy.measure_rel_ids:
if measure.header:
found_id = measure.id
break
start = int(form_proxy.size_base or '42') - 2 * ((form_proxy.col_ref or 3) - 1)
data = {
'header': True,
'sequence': 0,
'form_id': form_proxy.id,
'measure_id': False,
'name': _('Header'),
'size_1': "Tg.%s" % (start),
'size_2': "Tg.%s" % (start + 2),
'size_3': "Tg.%s" % (start + 4),
'size_4': "Tg.%s" % (start + 6),
'size_5': "Tg.%s" % (start + 8),
'size_6': "Tg.%s" % (start + 10),
'size_7': "Tg.%s" % (start + 12),
'size_8': "Tg.%s" % (start + 14),
'size_9': "Tg.%s" % (start + 16),
'size_10': "Tg.%s" % (start + 18),
'size_11': "Tg.%s" % (start + 20),
'size_12': "Tg.%s" % (start + 22),
'size_13': "Tg.%s" % (start + 24),
'visible': False,
'real': False,
}
measure_pool = self.pool.get('fashion.form.measure.rel')
if found_id: # Update
measure_pool.write(cr, uid, found_id, data, context=context)
else: # Create a header elements:
measure_pool.create(cr, uid, data, context=context)
except:
return False # if error no creation
return True
def insert_article(self, cr, uid, ids, context=None):
'''Insert the measure of article
Delete all lines before and recreate
'''
form_proxy = self.browse(cr, uid, ids, context=context)[0]
# test if article is selected:
if not form_proxy.article_id:
return True
# Delete all items:
res_pool = self.pool.get('fashion.form.measure.rel')
res_ids = res_pool.search(
cr, uid, [('form_id', '=', ids[0])], context=context)
res_pool.unlink(cr, uid, res_ids, context=context)
# create header line:
self.create_update_header(cr, uid, ids, context=context)
# after load article item list:
for item_id in [l.measure_id.id for l in form_proxy.article_id.fashion_measure_ids]:
res_pool.create(cr, uid, {
'measure_id': item_id,
'form_id': ids[0],
'visible': True,
}, context=context)
return True
def empty_article(self, cr, uid, ids, context=None):
''' Keep current list but empty all measure
'''
form_proxy = self.browse(cr, uid, ids, context=context)[0]
for item in form_proxy.measure_rel_ids:
if item.header: # jump header
continue
data = {}
for col in range(1, 13):
if col == 3:
continue
data["size_%s" % col] = False
self.pool.get('fashion.form.measure.rel').write(
cr, uid, item.id, data, context=context)
return True
def reload_measure(self, cr, uid, ids, context=None):
''' Delete all measure list
Create empyt list depend on type of article selected
'''
# Get current record:
form_proxy = self.browse(cr, uid, ids, context=context)[0]
if not form_proxy.article_id:
return False # TODO comunicare l'errore perchè non è presente l'art.
# delete all elements:
measure_pool = self.pool.get('fashion.form.measure.rel')
measure_ids = measure_pool.search(cr, uid, [('form_id','=',ids[0])], context = context)
measure_pool.unlink(cr, uid, measure_ids, context = context)
# Loop in all measure of the article selected:
for measure in form_proxy.article_id.measure_ids:
measure_pool.create(cr, uid,
{'form_id': ids[0],
'measure_id': measure.id,
}, context = context)
return True
def modify_draw_image(self, cr, uid, item_id, context=None):
''' Call url if format:
fashion://name of image (in format model.version.extension)
item_id: ID of the image selected
context: 'side': 'A' or 'side': 'B'
'''
form_proxy = self.browse(cr, uid, item_id, context=context)[0]
final_url = (r"fashion://%s/%s.%s.%s" % (
context.get('side', 'A'),
form_proxy.model,
form_proxy.review,
self._default_extension,
)).lower()
return {
'type': 'ir.actions.act_url',
'url':final_url,
'target': 'new'
}
def reset_image(self, cr, uid, ids, context=None):
''' Reset form image
'''
try:
self.write(cr, uid, ids, {'draw_image_%s' % (context.get('side').lower()):False},context=context)
except:
pass
return True
#============================#
# Workflow Activity Function #
#============================#
def form_draft(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'draft'}, context=context)
def form_sample(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'sample'}, context=context)
def form_ok(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'ok'}, context=context)
def form_produced(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'produced'}, context=context)
def form_discarded(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'discarded'}, context=context)
# ----------------
# Fields functions
# ----------------
def _get_sum_items(self, cr, uid, ids, name, args, context=None):
''' Calculate total sum for costs (cost list and accessory)
'''
res = {}
for obj in self.browse(cr, uid, ids, context=context):
res[obj.id] = {}
res[obj.id]['sum_accessory'] = 0.0
res[obj.id]['sum_cost'] = 0.0
if obj.model_for_cost: # calculate only if it's a model (for speed)
for accessory in obj.accessory_rel_ids:
res[obj.id]['sum_accessory'] += accessory.tot_cost
for cost in obj.cost_rel_ids:
res[obj.id]['sum_cost'] += cost.value
res[obj.id]['sum_extra_cost'] = res[obj.id]['sum_cost'] + res[obj.id]['sum_accessory']
else:
res[obj.id]['sum_extra_cost'] = 0.0
return res
def _get_draw_image(self, cr, uid, ids, name, args, context=None):
''' Read image from file according to name and version format:
MODEL.VERSION.ext
'''
res = dict.fromkeys(ids, False)
for obj in self.browse(cr, uid, ids, context=context):
res[obj.id] = self._load_image(
self._get_draw_image_name(obj),
self._get_draw_image_type(name)) # TODO parametrize
return res
def _set_draw_image(self, cr, uid, item_id, name, value, args, context=None):
''' Write image passed to file
'''
obj_proxy = self.browse(cr, uid, item_id, context=context)
self._unload_image(
self._get_draw_image_name(obj_proxy), value,
self._get_draw_image_type(name)) # TODO test return value
# Resizing function:
def _get_resized_image(self, cr, uid, ids, name, args, context=None):
''' Resize defaulf draw_image_a
'''
type_of_image = name.split("_")[-1] # from field name (last block)
if type_of_image == 'medium':
width = 800
elif type_of_image == 'small':
width = 200
else:
width = 64
res = dict.fromkeys(ids, False)
for obj in self.browse(cr, uid, ids, context=context):
res[obj.id] = tools.image_resize_image(
obj.draw_image_a, size=(width, None), encoding='base64',
filetype=self._default_extension, avoid_if_small=True) # 'PNG'
return res
def _set_resized_image(self, cr, uid, item_id, name, value, args, context=None):
''' Store image in original field: draw_image_a
(call field function for file image)
'''
return self.write(cr, uid, [item_id], {'draw_image_a': value, }, context=context)
def invoice_print(self, cr, uid, ids, context=None):
''' This function prints the invoice and mark it as sent, so that we
can see more easily the next step of the workflow
'''
assert len(ids) == 1, 'This option should only be used for a single id at a time.'
self.write(cr, uid, ids, {'sent': True}, context=context)
datas = {
'ids': ids,
'model': 'account.invoice',
'form': self.read(cr, uid, ids[0], context=context)
}
return {
'type': 'ir.actions.report.xml',
'report_name': 'account.invoice',
'datas': datas,
'nodestroy' : True
}
def set_obsolete(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'obsolete': context.get('obsolete',True)})
return True
def set_not_obsolete(self, cr, uid, ids, context=None):
self.set_obsolete(cr, uid, ids, {'obsolete':False})
return True
# Detail information functions:
def _get_detail_informations(self, cr, uid, ids, fields, args, context=None):
''' Get detail information about fabric and customer
'''
res = {}
for form in self.browse(cr, uid, ids, context=context):
res[form.id] = {}
res[form.id]['detail_info_partner'] = ''
res[form.id]['detail_info_fabric'] = ''
res[form.id]['detail_partner'] = False # only for search
res[form.id]['detail_fabric'] = False # only for search
for detail in form.partner_rel_ids:
res[form.id]['detail_info_partner'] += "%s\n" % (
detail.partner_id.name if detail.partner_id else "?",
)
res[form.id]['detail_info_fabric'] += "%s\n" % (
detail.fabric_id.code if detail.fabric_id else "?",
)
return res
def _search_detail_info(self, cr, uid, obj, name, args, context=None):
''' Search in detail information seeking partner
'''
if name == 'detail_partner':
field_name = 'partner_id'
else: # detail_fabric
field_name = 'fabric_id'
try:
search_id = args[0][2]
cr.execute("""
SELECT DISTINCT form_id
FROM fashion_form_partner_rel
WHERE %s = %s;
""" % (field_name, search_id))
return [('id', 'in',
[item[0] for item in cr.fetchall()])]
except:
return [('id', 'in', [])] # if error
_columns = {
'model': fields.char('Model', size=10, required=True),
'customer_code': fields.char('Customer code', size=18),
'size_base': fields.char('Size', size=30,
help='Basic size reference, ex:42', required=True),
'size_measure': fields.char('Column for feedback', size=30,
help='Size basis for the measurement'),
'review': fields.integer('Review', help='Revision of the main model',
required=True),
'date': fields.date('Date', help='Date of revision'),
'create_date': fields.datetime('Create date', readonly=True),
'write_date': fields.datetime('Date Last modify', readonly=True),
'write_uid': fields.many2one('res.users', 'by User', readonly=True),
'original': fields.char('Original', size=80),
'base_id': fields.many2one('fashion.form', 'Base form',
help="Duplicated from"),
'base_name': fields.char('Duplicated form', size=40),
'h_lining': fields.char('Height lining', size=10),
'mt_lining': fields.char('Meters lining', size=10),
'cost_lining': fields.float('Cost lining', digits=(10,2)),
'conformed': fields.boolean('Conformed',
help='Indicates that the model uses standardized sizes'),
'start': fields.integer('Start size',
help='Departure for the standardized sizes'),
'ironing': fields.text('Ironing'),
'area': fields.char('Area', size=30, help='Link the table Gerber'),
'user_id': fields.many2one('res.users', 'User'),
'cut': fields.char('Size n.', size=40),
'size': fields.text('Sizes'),
'colors': fields.char('Colors', size=40),
'article_id': fields.many2one('fashion.article', 'Article'),
'season_id': fields.many2one('fashion.season', 'Season'),
'obsolete': fields.boolean('Obsolete',
help='Indicates that the form is old'),
'reactivate': fields.boolean('Not Obsolete',
help='Indicates that the form is not old'),
'old_model': fields.boolean('Old Model'),
'show_old_model': fields.boolean('Show old model'),
'washing': fields.text('Washing'),
'model_for_cost': fields.boolean('Model for cost',
help='Indicates that this form is use for create a pricelist'
' elements'),
'col_ref': fields.selection([
(1,'col 1'), (2,'col 2'), (3,'col 3'), (4,'col 4'), (5,'col 5'),
(6,'col 6'), (7,'col 7'), (8,'col 8'), (9,'col 9'), (10,'col 10'),
(11,'col 11'), (12,'col 12'),
], 'Column reference', select=True, required=True),
# Function for totals:
'sum_accessory': fields.function(_get_sum_items,
string="Total accessory",
type="float", digits=(10, 2), store=False, multi='totals',
help="Sum of the accessory list (see page Accessory for details)",
),
'sum_cost': fields.function(_get_sum_items, string="Total cost list",
type="float", digits=(10, 2), store=False, multi='totals',
help="Sum of costs in the list on the left"),
'sum_extra_cost': fields.function(_get_sum_items,
string="Total extra cost",
type="float", digits=(10, 2), store=False, multi='totals',
help="Sum of accessory cost and cost list "
"(no fabric in this total)"),
# Image:
'draw_image_a': fields.function(_get_draw_image,
fnct_inv=_set_draw_image,
string="Draw Image A", type="binary",
help="Image for draw side A. Usual size:"\
"1024 x 768"\
"The image is printed in report form and, in small size"\
"in kanban report views"),
'draw_image_b': fields.function(_get_draw_image,
fnct_inv=_set_draw_image,
string="Draw Image B", type="binary",
help="Image for draw side B. Usual size:"\
"1024 x 768"\
"The image is printed in report form and, in small size"\
"in kanban report views"),
# Photos:
'draw_image_c': fields.function(_get_draw_image,
fnct_inv=_set_draw_image,
string="Photo", type="binary",
help="Image for draw side B. Usual size:"\
"1024 x 768"\
"The image is printed in report form and, in small size"\
"in kanban report views"),
'draw_image_d': fields.function(_get_draw_image,
fnct_inv=_set_draw_image,
string="Photo", type="binary",
help="Image for draw side B. Usual size:"\
"1024 x 768"\
"The image is printed in report form and, in small size"\
"in kanban report views"),
# Resize dinamically images:
'draw_image_a_medium': fields.function(_get_resized_image,
fnct_inv=_set_resized_image,
string="Medium-sized image", type="binary",
help="Medium-sized image of the product. It is automatically "
"resized as a 800px large image, with aspect ratio preserved "
"only when the image exceeds one of those sizes. Use this "
"field in form views or some kanban views."),
'draw_image_a_small': fields.function(_get_resized_image,
fnct_inv=_set_resized_image,
string="Small-sized image", type="binary",
help="Small-sized image of the product. It is automatically"
"resized as a 128px large image, with aspect ratio preserved."
"Use this field anywhere a small image is required."),
# Inherit fields:
'product_id': fields.many2one('product.product', 'Product',
ondelete = "restrict", required=True,
help="Inherits value for link form to a product"),
# Details fields (and search)
'detail_info_partner': fields.function(
_get_detail_informations, method=True, type='text',
string='Detail customer', store=False, multi='detail_info'),
'detail_info_fabric': fields.function(
_get_detail_informations, method=True, type='text',
string='Detail fabric', store=False, multi='detail_info'),
'detail_partner': fields.function(
_get_detail_informations, method=True,
type='many2one', relation='res.partner',
string='Detail partner',
fnct_search=_search_detail_info,
store=False, multi='detail_info'),
'detail_fabric': fields.function(
_get_detail_informations, method=True,
type='many2one', relation='fashion.form.fabric',
string='Detail fabric',
fnct_search=_search_detail_info,
store=False, multi='detail_info'),
# Explosion of model (on change setted)
'model_customer': fields.char('Sigla cliente', size=1),
'model_article': fields.char('Sigla articolo', size=1),
'model_number': fields.integer('Numero modello'),
'model_revision': fields.char('Revisione', size=3),
# Workflow fields:
'state': fields.selection([
('draft', 'Draft'),
('sample', 'Sample'),
('ok', 'Ok production'),
('produced', 'Produced'),
('discarded', 'Discarded'),
], 'State', select=True),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
_defaults = {
'date': lambda *a: datetime.now().strftime(DEFAULT_SERVER_DATE_FORMAT),
'state': lambda *a: 'draft',
'user_id': lambda s, cr, uid, ctx: uid,
'old_model': lambda *x: False,
'show_old_model': lambda *x: False,
'col_ref': lambda *a: 3,
'model_for_cost': False,
}
# -----------------------------------------------------------------------------
# Object relations
# -----------------------------------------------------------------------------
class fashion_form_measure_rel(osv.osv):
'''Table that manage the relation measure/form
'''
_name = 'fashion.form.measure.rel'
_description = 'Measure relation'
_order = 'header desc,sequence,id'
def clean_measure(self, cr, uid, ids, context=None):
''' Clean only measure in this line passed
'''
return self.write(cr, uid, ids, {
'size_1': False,
'size_2': False,
'size_3': False,
'size_4': False,
'size_5': False,
'size_6': False,
'size_7': False,
'size_8': False,
'size_9': False,
'size_10': False,
'size_11': False,
'size_12': False,
'size_13': False,
'real': False,
}, context=context)
_columns = {
'header': fields.boolean('Header'),
'sequence': fields.integer('Seq.'),
'form_id': fields.many2one('fashion.form', 'Form'),
'measure_id': fields.many2one('fashion.form.measure', 'Measure'),
'name': fields.text('Description'),
'size_1': fields.char('Size 1', size = 10),
'size_2': fields.char('Size 2', size = 10),
'size_3': fields.char('[Size 3]', size = 10),
'size_4': fields.char('Size 4', size = 10),
'size_5': fields.char('Size 5', size = 10),
'size_6': fields.char('Size 6', size = 10),
'size_7': fields.char('Size 7', size = 10),
'size_8': fields.char('Size 8', size = 10),
'size_9': fields.char('Size 9', size = 10),
'size_10': fields.char('Size 10', size = 10),
'size_11': fields.char('Size 11', size = 10),
'size_12': fields.char('Size 12', size = 10),
'size_13': fields.char('Size 13', size = 10),
'visible': fields.boolean('Visible', size = 10,
help='Indicates is the size is visible in the form reply'),
'real': fields.char('Real', size = 10),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
_defaults = {
'header': lambda *x: False,
'sequence': lambda *x: 0,
}
class fashion_form_characteristic_rel(osv.osv):
'''Table that manage the relation characteristic/form
'''
_name = 'fashion.form.characteristic.rel'
_description = 'Form characteristic relation'
_order = 'sequence,id'
# ----------------
# On change event:
# ----------------
def on_change_upper_characteristic(self, cr, uid, ids, name, context=None):
''' Manages the capital of the fields in the form Characteristic
'''
res = {'value': {}}
if name:
res['value']['name'] = name.upper()
return res
def on_change_upper_lenght(self, cr, uid, ids, lenght, context=None):
''' Manages the capital of the fields in the form Characteristic
'''
res = {'value': {}}
if lenght:
res['value']['lenght'] = lenght.upper()
return res
# -------------
# Button event:
# -------------
def duplicate_characteristic(self, cr, uid, ids, context=None):
''' Duplicate characteristic element
'''
fn = get_temp_filename("%s.car.dat" % uid)
f = open(fn, "a")
f.write("%s\n" % ids[0])
f.close()
return True
_columns = {
'sequence': fields.integer('Seq.'),
'form_id': fields.many2one('fashion.form', 'Form'),
'characteristic_id': fields.many2one('fashion.form.characteristic',
'Characteristic'),
'name': fields.text('Description'),
'lenght': fields.char('Lenght', size=30),
'old_name': fields.char('Old name', size=30),
'stitch_type_id': fields.many2one(
'fashion.form.characteristic.rel.specific', 'Stitch type',
domain=[('type','=','1')]),
'stitch_verse_id': fields.many2one(
'fashion.form.characteristic.rel.specific', 'Stitch verse',
domain=[('type','=','2')]),
'stitch_cut_id': fields.many2one(
'fashion.form.characteristic.rel.specific', 'Stitch cut',
domain=[('type','=','3')]),
'stitch_top_id': fields.many2one(
'fashion.form.characteristic.rel.specific', 'Stitch top',
domain=[('type','=','4')]),
'stitch_top_type_id': fields.many2one(
'fashion.form.characteristic.rel.specific', 'Stitch top type',
domain=[('type','=','5')]),
'bindello': fields.boolean('Bindello'),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
_defaults = {
'sequence': lambda *x: 1,
}
class fashion_form_characteristic_rel_specific(osv.osv):
'''Table that manage the specific of characteristic
'''
_name = 'fashion.form.characteristic.rel.specific'
_description = 'Specific'
_order = 'type,name'
def create(self, cr, uid, vals, context=None):
"""
Create a new record for a model ModelName
@param cr: cursor to database
@param uid: id of current user
@param vals: provides a data for new record
@param context: context arguments, like lang, time zone
@return: returns a id of new record
"""
if 'name' in vals:
vals['name'] = vals['name'].upper()
#return osv.osv.create(self, cr, uid, ids, context=context)
return super(fashion_form_characteristic_rel_specific, self).create(
cr, uid, vals, context=context)
def write(self, cr, uid, ids, vals, context=None):
"""
Update redord(s) comes in {ids}, with new value comes as {vals}
return True on success, False otherwise
@param cr: cursor to database
@param uid: id of current user
@param ids: list of record ids to be update
@param vals: dict of new values to be set
@param context: context arguments, like lang, time zone
@return: True on success, False otherwise
"""
if 'name' in vals:
vals['name'] = vals['name'].upper()
#return osv.osv.create(self, cr, uid, ids, context=context)
return super(fashion_form_characteristic_rel_specific, self).create(
cr, uid, ids, vals, context=context)
_columns = {
'name': fields.text('Description'),
'type': fields.selection([
('1', 'Col1'),
('2', 'Col2'),
('3', 'Col3'),
('4', 'Col4'),
('5', 'Col5'), ], 'Type', select=True),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
class fashion_form_cost_rel(osv.osv):
'''Table that manage the relation cost/form
'''
_name = 'fashion.form.cost.rel'
_description = 'Relation'
_rec_name = 'note'
_columns = {
'form_id': fields.many2one('fashion.form', 'Form'),
'cost_id': fields.many2one('fashion.form.cost', 'Cost', required=True),
'value': fields.float('Value'),
'note': fields.text('Note'),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
class fashion_form_cost_rel_pricelist(osv.osv):
'''Table that manage the pricelist elements for signle cost
'''
_name = 'fashion.form.cost.rel.pricelist'
_description = 'Pricelist'
_rec_name = 'value'
_columns = {
'current': fields.boolean('Current', required=False),
'cost_rel_id': fields.many2one('fashion.form.cost.rel', 'Cost'),
'supplier_id': fields.many2one('res.partner', 'Supplier',
domain=[('supplier','=',True)]),
'value': fields.float('Value', required=True),
'note': fields.text('Note'),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
_defaults = {
'current': False,
}
class fashion_form_cost_rel(osv.osv):
'''Table that manage the relation cost/form
'''
_name = 'fashion.form.cost.rel'
_inherit = 'fashion.form.cost.rel'
_columns = {
'pricelist_ids':fields.one2many('fashion.form.cost.rel.pricelist',
'cost_rel_id', 'Pricelist', required=False),
}
class fashion_form_accessory_rel(osv.osv):
'''Table that manage the relation accessory/form
'''
_name = 'fashion.form.accessory.rel'
_description = 'Relation accessory'
_order = 'sequence,id'
# -------------
# Button event:
# -------------
def duplicate_accessory(self, cr, uid, ids, context=None):
''' Duplicate accessory element
'''
fn = get_temp_filename("%s.acc.dat" % uid)
f = open(fn, "a")
f.write("%s\n" % ids[0])
f.close()
return True
# ----------
# On change:
# ----------
def on_change_calcolate_cost(self, cr, uid, ids, quantity, currency, context=None):
'''Calcolate the total cost of the accessory
'''
res = {'value': {}}
if quantity and currency:
res['value']['tot_cost'] = quantity * currency
return res
def onchange_accessory(self, cr, uid, ids, accessory_id, context=None):
''' Read gerber letter and write in code
'''
res = {'value': {}, }
if accessory_id:
accessory_pool = self.pool.get('fashion.form.accessory')
accessory_proxy = accessory_pool.browse(
cr, uid, accessory_id, context=context)
if accessory_proxy.gerber_char:
res['value']['code'] = accessory_proxy.gerber_char
res['value']['sequence'] = accessory_proxy.sequence
else:
res['value']['code'] = accessory_proxy.gerber_char
#res['domain']['pricelist_id'] = [('accessory_id','=',accessory_id)]
else:
res['value']['code'] = False
#res['domain']['pricelist_id'] = []
return res
def on_change_upper_code(self, cr, uid, ids, code, context=None):
''' Manages the capital of the fields in the form Accessory
'''
res = {'value': {}}
if code:
res['value']['code'] = code.upper()
return res
def on_change_upper_accessory(self, cr, uid, ids, name, context=None):
''' Manages the capital of the fields in the form Accessory
'''
res = {'value': {}}
if name:
res['value']['name'] = name.upper()
return res
def on_change_upper_um(self, cr, uid, ids, um, context=None):
''' Manages the capital of the fields in the form Accessory
'''
res = {'value': {}}
if um:
res['value']['um'] = um.upper()
return res
def on_change_upper_color(self, cr, uid, ids, note, context=None):
''' Manages the capital of the fields in the form Accessory
'''
res = {'value': {}}
if note:
res['value']['note'] = note.upper()
return res
def onchange_fabric(self, cr, uid, ids, fabric_id, context=None):
''' On change fabric
search supplier and description from PC
'''
res = {'value': {'supplier_id': False, 'name': False}}
if fabric_id:
fabric_pool = self.pool.get('fashion.form.fabric')
try:
fabric_proxy = fabric_pool.browse(
cr, uid, fabric_id, context=context)
res['value']['supplier_id'] = fabric_proxy.supplier_id and fabric_proxy.supplier_id.id or False
res['value']['currency'] = fabric_proxy.cost or 0.0
res['value']['name'] = "%s - %s" % (
fabric_proxy.code or "",
fabric_proxy.perc_composition or "")
except:
return res
return res
def onchange_pricelist(self, cr, uid, ids, pricelist_id, context=None):
''' Save pricelist info in accessory rel
'''
res = {}
res['value'] = {}
if pricelist_id:
pricelist_proxy = self.pool.get(
"fashion.form.accessory.pricelist").browse(
cr, uid, pricelist_id, context=context)
res['value']['supplier_id'] = pricelist_proxy.supplier_id.id if pricelist_proxy.supplier_id else False
res['value']['currency'] = pricelist_proxy.cost or 0.0
res['value']['h'] = pricelist_proxy.extra_info or False
res['value']['um'] = pricelist_proxy.um or False
res['value']['name'] = pricelist_proxy.name or False
#res['value']['article_id'] = pricelist_proxy.article_id.id if pricelist_proxy.article_id else False
else:
res['value']['supplier_id'] = False
res['value']['currency'] = 0.0
res['value']['h'] = False
res['value']['um'] = False
res['value']['name'] = False
return res
_columns = {
'form_id': fields.many2one('fashion.form', 'Form'),
'sequence': fields.integer('Seq.'),
'accessory_id': fields.many2one('fashion.form.accessory', 'Accessory'),
'fabric_id': fields.many2one('fashion.form.fabric', 'Fabric'),
'name': fields.text('Description'),
'code': fields.char('Code', size=1), # TODO farlo diventare related (di fatto non viene modificato dalla videata ma prende sempre quello dell'articolo
'um': fields.char('U.M.', size=5),
'quantity': fields.float('Quantity', digits=(10, 4)),
'currency': fields.float('Cost', digits=(10, 4)),
'note': fields.text('Color'),
'gerber_name': fields.char('Gerber name', size=10),
'gerber_desc': fields.char('Gerber description', size=10),
'gerber_h': fields.char('Gerber height', size=10),
'gerber_l': fields.char('Gerber length', size=10),
'supplier_id': fields.many2one('res.partner', 'Supplier',
domain=[('supplier','=',True)]),
'pricelist_id': fields.many2one('fashion.form.accessory.pricelist',
'Pricelist'),
'tot_cost': fields.float('Total cost', digits=(10, 4)),
'color': fields.char('Color', size=20), # TODO eliminare
#'h': fields.float('H'),
'h': fields.char('H', size=15),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
_defaults = {
'sequence': lambda *x: 1000, # high number so letter are lower
}
class fashion_form_stitch_rel(osv.osv):
'''Table that manage the relation stitch/form
'''
_name = 'fashion.form.stitch.rel'
_description = 'Stitches'
_columns = {
'sequence': fields.integer('Seq.'),
'form_id': fields.many2one('fashion.form', 'Form'),
'stitch_id': fields.many2one('fashion.form.stitch', 'Stitch'),
'name': fields.char('Name', size = 50),
'topstitch': fields.char('Topstitching', size = 60),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
class fashion_form_comment_rel(osv.osv):
'''Table that manages the comment/form
'''
_name = 'fashion.form.comment.rel'
_description = 'Comments'
_order = 'date,id'
# -------------------
# On change function:
# -------------------
def on_change_upper_comment(self, cr, uid, ids, name, context=None):
''' #Manages the capital of the fields in the form Comment
'''
res = {'value': {}}
if name:
res['value']['name'] = name.upper()
return res
def on_change_upper_reference(self, cr, uid, ids, reference, context=None):
''' #Manages the capital of the fields in the form Comment
'''
res = {'value': {}}
if reference:
res['value']['reference'] = reference.upper()
return res
_columns = {
'name': fields.text('Changes'),
'form_id': fields.many2one('fashion.form', 'Form'),
'date': fields.date('Date'),
'user_id': fields.many2one('res.users', 'User',
help="User that insert comment"),
'reference': fields.char('Reference', size=50,
help="If it is not the user or is an external reference write "
"here the name."),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
_defaults = {
'date': lambda *a: datetime.now().strftime(DEFAULT_SERVER_DATE_FORMAT),
'user_id': lambda s, cr, uid, ctx: uid
}
class fashion_measure_rel(osv.osv):
'''Table that manages the measure/article
'''
_name = 'fashion.measure.rel'
_description = 'Relation'
_rec_name = 'article_id'
_columns = {
'article_id': fields.many2one('fashion.article', 'Article'),
'measure_id': fields.many2one('fashion.form.measure', 'Measure'),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
class fashion_article(osv.osv):
'''
'''
_name = 'fashion.article'
_inherit = 'fashion.article'
_columns = {
'fashion_measure_ids': fields.one2many(
'fashion.measure.rel', 'article_id', 'Measure'),
}
class fashion_form_partner_rel(osv.osv):
''' Form relation with partner, this object contain much elements useful
for cost determination and caracteristic of model (ex. fabric)
'''
_name = 'fashion.form.partner.rel'
_description = 'Relation'
_rec_name = 'partner_id'
_order = 'model_article,model_number desc,model_customer,model,review desc'
# --------------------
# Override ORM method:
# --------------------
def name_get(self, cr, uid, ids, context = None):
''' Add customer-fabric ID to name
'''
res = []
for item in self.browse(cr, uid, ids, context = context):
res.append((item.id, "%s [%s]" % (
item.partner_id.name,
item.fabric_id.code if item.fabric_id else "???")))
return res
# TODO: name_search
#--------------
# Button event:
#--------------
def wizard_print_a(self, cr, uid, ids, context=None):
''' Print directyl report C with totals (instead of wizard)
'''
record_proxy = self.browse(cr, uid, ids, context=context)[0]
datas = {}
datas['active_ids'] = [record_proxy.form_id.id]
datas['active_id'] = record_proxy.form_id.id
datas['partner_fabric_id'] = ids[0]
datas['summary'] = True
datas['image'] = True
return {
'model': 'fashion.form',
'type': 'ir.actions.report.xml',
'report_name': "fashion_form_A",
'datas': datas,
}
def wizard_print_b(self, cr, uid, ids, context=None):
''' Print directyl report C with totals (instead of wizard)
'''
record_proxy = self.browse(cr, uid, ids, context=context)[0]
datas = {}
datas['active_ids'] = [record_proxy.form_id.id]
datas['active_id'] = record_proxy.form_id.id
datas['partner_fabric_id'] = ids[0]
datas['total'] = True
datas['image'] = True
return {
'model': 'fashion.form',
'type': 'ir.actions.report.xml',
'report_name': "fashion_form_B",
'datas': datas,
}
def wizard_print_c(self, cr, uid, ids, context=None):
''' Print directyl report C with totals (instead of wizard)
'''
record_proxy = self.browse(cr, uid, ids, context=context)[0]
datas = {}
datas['active_ids'] = [record_proxy.form_id.id]
datas['active_id'] = record_proxy.form_id.id
datas['partner_fabric_id'] = ids[0]
datas['image'] = True
return {
'model': 'fashion.form',
'type': 'ir.actions.report.xml',
'report_name': "fashion_form_C",
'datas': datas,
}
#---------------------
# On change functions:
#---------------------
def on_change_symbol_fabric(self, cr, uid, ids, fabric_id, symbol_fabric, article_code, supplier_id, perc_fabric, context=None):
''' Load default wash symbol, maybe in the form are changed
'''
res = {'value': {}}
if not fabric_id: # nothing if no fabric selected or wash symbol jet present
res['value']['symbol_fabric'] = False
res['value']['article_code'] = False
res['value']['supplier_id'] = False
res['value']['perc_fabric'] = False
res['value']['cost'] = False
res['value']['note_fabric'] = False
return res
fabric_proxy = self.pool.get('fashion.form.fabric').browse(
cr, uid, fabric_id, context=context)
res['value']['symbol_fabric'] = fabric_proxy.symbol
res['value']['article_code'] = fabric_proxy.article_code
res['value']['supplier_id'] = fabric_proxy.supplier_id.id
res['value']['perc_fabric'] = fabric_proxy.perc_composition
res['value']['cost'] = fabric_proxy.cost
res['value']['note_fabric'] = "%s%s %s" % (
fabric_proxy.supplier_id.name if fabric_proxy.supplier_id else "",
" ART %s" % (
fabric_proxy.article_code) if fabric_proxy.article_code else "",
fabric_proxy.note or '')
return res
def onchange_cost_computation(self, cr, uid, ids, mt_fabric, cost, retail, wholesale, sale, sum_extra_cost, context=None):
''' Master event for calculate sale price with all variables
mt_fabric * cost = cost_tot * (100 + retail) * (100 + wholesale) = sale
'''
res = {'value': {}}
res['value']['total_fabric'] = (mt_fabric or 0.0) * (cost or 0.0)
res['value']['sale'] = (res['value']['total_fabric'] + sum_extra_cost or 0.0) * (100.0 + (retail or 0.0)) * (100.0 + (wholesale or 0.0)) / 10000.0
return res
#def onchange_sale(self, cr, uid, sale, context=None):
# ''' Find % of margin for all
# '''
# res = {}
# return res
#------------------
# Fields functions:
#------------------
def _get_total_fabric(self, cr, uid, ids, name, args, context=None):
''' Calculate total fabric (cost * mt)
Total costs (fabric, list, accessory)
Info for margine and recharge
'''
res = {}
for obj in self.browse(cr, uid, ids, context=context):
res[obj.id] = {}
res[obj.id]['total_fabric'] = obj.cost * obj.mt_fabric
res[obj.id]['total_cost'] = res[obj.id]['total_fabric'] + obj.form_id.sum_extra_cost # TODO + cost list + accessory totale
profit = obj.sale - res[obj.id]['total_cost']
res[obj.id]['margin_note'] = _("%5.2f%s(Mar.)\n%5.2f%s(Ric.)\n%10.2f€(Ut.)") % (
(profit * 100.0 / res[obj.id]['total_cost']) if res[obj.id]['total_cost'] else 0.0,
"%",
(profit * 100.0 / obj.sale) if obj.sale else 0.0,
"%",
profit,
)
return res
def _store_update_model_customer(self, cr, uid, ids, context=None):
''' Recalculate store season in client when change in partner
'''
res = []
rel_pool = self.pool.get('fashion.form.partner.rel')
# Note: reset all:
#return rel_pool.search(cr, uid, [], context=context)
for item_id in ids:
res.extend(rel_pool.search(cr, uid, [
('form_id', '=', item_id)], context=context))
return res
_columns = {
'form_id': fields.many2one('fashion.form', 'Form'),
'partner_id': fields.many2one('res.partner', 'Partner',
domain=[('customer','=',True)], required=True),
'fabric_id': fields.many2one('fashion.form.fabric', 'Fabric',
#required=True # TODO reimportare quando elimimato righe vuote
),
'desc_fabric': fields.char('Description', size=80),
'perc_fabric': fields.char('Composition', size=40),
'corr_fabric': fields.char('Additional description', size=80),
'symbol_fabric': fields.char('Symbols', size=80),
'note_fabric': fields.text('Fabric note'),
'note_cost': fields.text('Cost note'),
'weight': fields.float('Weight', digits=(10, 2)),
'h_fabric': fields.float('H.', digits=(10, 2)),
'mt_fabric': fields.float('Mt.', digits=(10, 2)),
'cost': fields.float('Cost', digits=(10, 4),
help="Unit price for fabric"),
'retail': fields.float('Retail', digits=(10, 4)),
'wholesale': fields.float('Wholesale', digits=(10, 4)),
'sale': fields.float('Selling price', digits=(10, 4)),
# Calculated fields:
'total_fabric': fields.function(_get_total_fabric,
string="Total fabric",
type="float", digits=(10, 2), store=False, multi='totals'), # m_lining * cost
'total_cost': fields.function(_get_total_fabric, string="Total cost",
type="float", digits=(10, 2), store=False, multi='totals'), # total_fabric + cost list + accessory
'margin_note': fields.function(_get_total_fabric, string="Balance",
type="char", size=100, store=False, multi='totals'), # margin information
'code': fields.char('Customer Code', size=10),
'gerber_name': fields.char('Name', size=10),
'gerber_desc': fields.char('Description', size=10),
'gerber_h': fields.char('Height', size=10),
'gerber_l': fields.char('Length', size=10),
'article_code': fields.char('Article', size=60),
'article_description': fields.char('Description', size=60),
'supplier_id': fields.many2one('res.partner', 'Supplier',
domain=[('supplier','=',True)]),
'perc_reload': fields.float('Reloading percentage', digits=(10, 2)),
'perc_margin': fields.float('Margin percentage', digits=(10, 2)),
# TODO eliminare appena vengono tolte dalle viste (kanban)
#'image': fields.related('form_id', 'image', type='binary', string='Image', readonly=True),
'draw_image_a': fields.related('form_id', 'draw_image_a',
type='binary', string='Image', readonly=True),
#'image_large': fields.related('form_id', 'image_large', type='binary', string='Image', readonly=True),
'cost_id': fields.many2one('fashion.form.cost', 'Cost'),
'value': fields.float('Value', digits=(10, 2)),
'note': fields.text('Note'),
'article_id': fields.related('form_id', 'article_id', type='many2one',
relation='fashion.article', string='Article', readonly=True,
store=True),
'season_id': fields.related('form_id','season_id', type='many2one',
relation='fashion.season', string='Season', store=True),
# Store function on related fields (for search):
'model': fields.related('form_id', 'model', type='char',
string='Modello', size=14,
store={
'fashion.form': (
_store_update_model_customer, ['model'], 10), }),
'model_customer': fields.related('form_id', 'model_customer',
type='char', string='Sigla cliente', size=1,
store={
'fashion.form': (
_store_update_model_customer, ['model'], 10), }),
'model_article': fields.related('form_id', 'model_article',
type='char', string='Sigla articolo', size=1,
store={
'fashion.form': (
_store_update_model_customer, ['model'], 10), }),
'model_number': fields.related('form_id', 'model_number',
type='integer', string='Numero modello',
store={
'fashion.form': (
_store_update_model_customer, ['model'], 10), }),
'model_revision': fields.related('form_id', 'model_revision',
type='char', string='Revisione', size=3,
store={
'fashion.form': (
_store_update_model_customer, ['model'], 10), }),
'review': fields.related('form_id', 'review', type='integer',
string='Revisione',
store={
'fashion.form': (
_store_update_model_customer, ['model'], 10), }),
'conformed': fields.related('form_id', 'conformed', type='boolean',
string='Conformato',
store={
'fashion.form': (
_store_update_model_customer, ['model'], 10), }),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
'access_2_id': fields.integer('Access 2 ID',
help="ID Importazione che tiene il link con partner costi"),
}
class res_partner(osv.osv):
''' Extra fields for partner
'''
_name = 'res.partner'
_inherit = 'res.partner'
_columns = {
'start': fields.integer('Start size',
help='Departure for the standardized sizes'),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
class fashion_form_extra_relations(osv.osv):
'''Table that manage the relation forms
'''
_name = 'fashion.form'
_inherit = 'fashion.form'
_columns = {
'characteristic_rel_ids': fields.one2many(
'fashion.form.characteristic.rel', 'form_id',
'Characteristic Relation'),
'cost_rel_ids': fields.one2many('fashion.form.cost.rel', 'form_id',
'Cost Relation'),
'accessory_rel_ids': fields.one2many('fashion.form.accessory.rel',
'form_id', 'Accessory Relation'),
# 'fabric_rel_ids': fields.one2many('fashion.form.fabric.rel', 'form_id', 'Relation'), #TODO
'stitch_rel_ids': fields.one2many('fashion.form.stitch.rel',
'form_id', 'Stitch Relation'),
'partner_rel_ids': fields.one2many('fashion.form.partner.rel',
'form_id', 'Partner Relation'),
'measure_rel_ids': fields.one2many('fashion.form.measure.rel',
'form_id', 'Measure Relation'),
'comment_rel_ids': fields.one2many('fashion.form.comment.rel',
'form_id', 'Comment Relation'),
}
class product_template(osv.osv):
''' Remove translation from product name
'''
_name = "product.template"
_inherit = "product.template"
_columns = {
'name': fields.char('Name', size=128, required=True, select=True),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
add extra fields to fabric
#!/usr/bin/python
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import os
import sys
import base64
from openerp.osv import osv, fields
from datetime import datetime
from openerp.tools import (DEFAULT_SERVER_DATETIME_FORMAT,
DEFAULT_SERVER_DATE_FORMAT)
from openerp import tools
from openerp.tools.translate import _
# --------
# Utility:
# --------
# TODO eliminare le funzioni, non devono stare qui!
def _get_image(self, cr, uid, ids, name, args, context=None):
''' Read image from file
'''
result = dict.fromkeys(ids, False)
for obj in self.browse(cr, uid, ids, context=context):
result[obj.id] = tools.image_get_resized_images(
obj.image, avoid_resize_medium=True)
return result
def _set_image(self, cr, uid, item_id, name, value, args, context=None):
''' Store image from file
'''
return self.write(cr, uid, [item_id], {
'image': tools.image_resize_image_big(value)}, context=context)
def get_temp_filename(filename):
''' Get temp path for copy and paste functions
'''
import openerp
return os.path.join(
openerp.__path__[0], 'addons', 'fashion', 'temp', filename)
class fashion_season(osv.osv):
'''Table that manages the seasons
'''
_name = 'fashion.season'
_description = 'Season'
_order = 'sequence,name'
def set_obsolete(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'obsolete': context.get('obsolete',True)})
return True
def set_not_obsolete(self, cr, uid, ids, context=None):
self.set_obsolete(cr, uid, ids, {'obsolete':False})
return True
_columns = {
'sequence': fields.integer('Sequence'),
'code': fields.char('Cod', size=10, required=True,
help='Code used in fabric for join in the name'),
'name': fields.char('Name', size=40, required=True),
'note': fields.text('Note'),
'obsolete':fields.boolean('Obsolete'),
# Link di importazione:
'access_id': fields.integer(
'Access ID', help="ID Importazione che tiene il link"),
}
class fashion_article(osv.osv):
'''Table that manages the articles
'''
_name = 'fashion.article'
_description = 'Article'
_order = 'name'
_columns = {
'name': fields.char('Name', size=40, required=True),
'note': fields.text('Note'),
'code': fields.char('Code', size=1),
#'measure_ids': fields.many2many(
# 'fashion.form.measure', 'fashion_form_article_rel',
# 'article_id', 'measure_id', 'Measures', readonly = False),
# Link di importazione:
'access_id': fields.integer(
'Access ID', help="ID Importazione che tiene il link"),
}
class fashion_form_characteristic(osv.osv):
'''Table that manages the characteristic
'''
_name = 'fashion.form.characteristic'
_description = 'Characteristic'
_order = 'sequence,name'
_columns = {
'name': fields.char('Name', size = 40, required = True),
'note': fields.text('Note'),
'sequence': fields.integer('Sequence'),
# Link di importazione:
'access_id': fields.integer(
'Access ID', help="ID Importazione che tiene il link"),
}
class fashion_form_cost(osv.osv):
'''Table that manages the cost
'''
_name = 'fashion.form.cost'
_description = 'Cost'
_order = 'sequence,name'
_columns = {
'name': fields.char('Name', size = 40, required=True),
'note': fields.text('Note'),
'sequence': fields.integer('Sequence'),
'cost': fields.float('Cost', digits=(12, 4)),
'default': fields.boolean('Default'),
# Link di importazione:
'access_id': fields.integer(
'Access ID', help="ID Importazione che tiene il link"),
}
_defaults = {
'default': False,
}
class fashion_form_accessory(osv.osv):
'''Table that manages the accessory
'''
_name = 'fashion.form.accessory'
_description = 'Accessory'
_order = 'sequence,name'
_columns = {
'name': fields.char('Name', size = 40, required = True),
'gerber_char': fields.char('Gerber char', size = 1, required = False),
'note': fields.text('Note'),
'sequence': fields.integer('Sequence'),
'type': fields.selection([
('t', 'Cut'),
('a', 'Accessory'),
], 'Type', select=True),
# Link di importazione:
'access_id': fields.integer('Access ID', help="ID Importazione che tiene il link"),
}
_defaults = {
'sequence': lambda *x: 1000, # normal accessory have high number
}
class fashion_form_accessory_pricelist(osv.osv):
'''Table that manages the accessory pricelist
'''
_name = 'fashion.form.accessory.pricelist'
_description = 'Accessory pricelist'
_order = 'supplier_id,create_date desc'
# ------------------
# Override function:
# ------------------
def name_get(self, cr, uid, ids, context=None):
''' Add customer-fabric ID to name
'''
res = []
for item in self.browse(cr, uid, ids, context = context):
res.append((item.id, "%s %s" % (item.name or '', item.extra_info or '')))
return res
_columns = {
'name': fields.char('Article', size=70, required=False),
'accessory_id':fields.many2one('fashion.form.accessory', 'Accessory',
required=False, ondelete='cascade'),
'supplier_id':fields.many2one('res.partner', 'Supplier',
required=True, domain=[('supplier','=',True)]),
'create_date': fields.datetime('Date', readonly=True),
'um': fields.char('U.M.', size=5, required=False),
'extra_info': fields.char('Extra info', size=40, required=False),
'note': fields.text('Note'),
'cost': fields.float('Cost', digits=(12, 4)),
# Link di importazione:
'access_id': fields.integer(
'Access ID', help="ID Importazione che tiene il link"),
}
class fashion_form_accessory(osv.osv):
'''Table that manages the accessory relation *2many
'''
_name = 'fashion.form.accessory'
_inherit = 'fashion.form.accessory'
_columns = {
'pricelist_ids':fields.one2many('fashion.form.accessory.pricelist',
'accessory_id', 'Pricelist', required=False),
}
class fashion_form_fabric_composition(osv.osv):
'''Table that manages the fabric composition
'''
_name = 'fashion.form.fabric.composition'
_description = 'Fabric'
_rec_name = 'code'
_order = 'code'
_columns = {
'code': fields.char('Code', size = 15, required=True),
'perc_composition': fields.char('Percentage composition', size=60),
'note': fields.text('Note'),
'symbol': fields.char('Wash symbol', size=10),
'season_id': fields.many2one('fashion.season', 'Season',
required=True),
}
class fashion_form_fabric(osv.osv):
'''Table that manages the fabric
'''
_name = 'fashion.form.fabric'
_description = 'Fabric'
_rec_name = 'code'
_order = 'code'
# Button:
def load_from_composition(self, cr, uid, ids, context=None):
''' Search last part of code in composition and override
elements on fabric
Code >> XXX-CCC (CCC = composition)
'''
# TODO maybe better as onchange?
fabric_proxy = self.browse(cr, uid, ids, context=context)[0]
code = fabric_proxy.code.split('-')[-1] # 3 final char after "-"
composition_pool = self.pool.get('fashion.form.fabric.composition')
composition_ids = composition_pool.search(cr, uid, [
('season_id', '=', fabric_proxy.season_id.id),
('code', '=', code),
], context=context)
if not composition_ids: # search without season (last from accounting)
composition_ids = composition_pool.search(cr, uid, [
('code', '=', code,
], context=context)
if composition_ids:
composition_proxy = composition_pool.browse(
cr, uid, composition_ids, context=context)[0]
self.write(cr, uid, ids, {
'perc_composition': composition_proxy.perc_composition,
'symbol': composition_proxy.symbol,
}, context=context)
else:
raise osv.except_osv(_('Error'), _("Season and code not found!"))
return True
#Override:
def name_get(self, cr, uid, ids, context = None):
''' Add season ID to name
'''
res = []
for fabric in self.browse(cr, uid, ids, context=context):
res.append((fabric.id, "%s-[%s] %s" % (
fabric.code,
fabric.season_id.code if fabric.season_id else "",
fabric.note or '')))
return res
_columns = {
'supplier_id': fields.many2one('res.partner', 'Fabric Supplier'),
'article_code': fields.char('Fabric Article code', size=50),
'code': fields.char('Code', size=15, required=True),
#'name': fields.char('Name', size = 20),
#'composition': fields.char('Composition', size = 60),
'perc_composition': fields.char('Percentage composition', size=60),
'note': fields.text('Note'),
'symbol': fields.char('Wash symbol', size=10),
'season_id': fields.many2one('fashion.season', 'Season'),
'test': fields.boolean('Test fabric',
help='This fabric is used for a model testing, maybe it won\'t be produced!'),
'um': fields.char('U.M.', size=5),
'cost': fields.float('Cost', digits=(10, 4)),
# Manage from accounting employe:
'weight': fields.float('Weight', digits=(10, 2)),
'h_fabric': fields.float('H.', digits=(10, 2)),
'range_supplier_cost': fields.char('Range cost', size=50),
'range_final_cost': fields.char('Range cost', size=50),
'preferred_fabric': fields.char('Preferred fabric', size=50),
'tag': fields.char('Tag', size=50),
# Link di importazione:
'access_id': fields.integer(
'Access ID', help="ID Importazione che tiene il link"),
}
_defaults = {
'um': lambda *x: 'MT'
}
class fashion_form_stitch(osv.osv):
'''Table that manages the stitch
'''
_name = 'fashion.form.stitch'
_description = 'Stitch' #cuciture
_order = 'sequence,name'
_columns = {
'name': fields.char('Name', size = 40, required = True),
'note': fields.text('Note'),
'sequence': fields.integer('Sequence'),
# Link di importazione:
'access_id': fields.integer('Access ID', help="ID Importazione che tiene il link"),
}
class fashion_form_measure(osv.osv):
'''Table that manages the measure
'''
_name = 'fashion.form.measure'
_description = 'Measure'
_order = 'name'
_columns = {
'letter': fields.char('Letter', size = 1),
'name': fields.char('Description', size = 40, required = True),
'note': fields.text('Note'),
# Link di importazione:
'access_id': fields.integer('Access ID', help="ID Importazione che tiene il link"),
}
class fashion_form(osv.osv):
''' Table that manages the form
'''
_name = 'fashion.form'
_inherits = {'product.product': 'product_id', }
#_inherit = 'mail.thread' # link to messages
_order = 'model_article,model_number desc,model_customer,model,review desc'
_rec_name = 'model'
_default_extension = 'jpg'
# --------------------
# On change functions:
# --------------------
def on_change_model(self, cr, uid, ids, model, review, context=None):
''' Split model code in all the part
'''
res = {'value': {}}
if not model:
return res
if not review:
review = 0
model = model.upper()
res['value']['model'] = model
res['value']['name'] = "%s.%s" % (model, review)
if model[0:1].isalpha():
if model[1:2].isalpha():
model_customer = model[0:1]
model_article = model[1:2]
else:
model_customer = False
model_article = model[0:1]
else:
res['warning'] = {
'title': _('warning'),
'message': _('Error: Model must start with letter'),
}
res['value']['model_customer'] = model_customer
res['value']['model_article'] = model_article
model_number = ''
i = 2 if model_customer else 1
for c in model[2 if model_customer else 1:]:
if c.isdigit():
i += 1
model_number += c
else:
break
res['value']['model_number'] = int(model_number) if model_number.isdigit() else 0
if res['value']['model_number'] and len(model)>i and model[i] == 'C':
res['value']['conformed'] = True
i += 1
else:
res['value']['conformed'] = False
res['value']['model_revision'] = model[i:] or False
return res
# ------------------
# Utility functions:
# ------------------
# Naming function:
def _get_form_name(self, model, review):
''' Return name of form element
'''
return "%s.%s" % (model, review)
def _get_draw_image_name(self, obj):
''' Return name of image from browese obj passed
'''
return ("%s.%s" % (self._get_form_name(obj.model, obj.review), self._default_extension)).lower()
# Image function:
def _get_draw_image_type(self, field_name):
''' Return type of image depend on draw field name
'''
return field_name[-1].lower()
def _load_image(self, name, type_image):
''' Load image from file:
'''
path = os.path.expanduser(os.path.join("~/etl/fashion/image", type_image)) # TODO parametrize
filename = os.path.join(path, name)
try:
f = open(filename, 'rb')
img = base64.encodestring(f.read())
f.close()
except:
img = False
return img
def _unload_image(self, name, value, type_image):
''' Unload image to file:
'''
path = os.path.expanduser(os.path.join("~/etl/fashion/image", type_image)) # TODO parametrize
filename = os.path.join(path, name)
try:
f = open(filename, 'wb')
f.write(base64.decodestring(value))
f.close()
try: # Set parameter for update
os.chmod(filename, 0777)
os.chown(filename, -1, 1000)
except:
return True
except:
return False
return True
# ------------------
# Override function:
# ------------------
def create(self, cr, uid, vals, context=None):
"""
Create a new record for a model ModelName
@param cr: cursor to database
@param uid: id of current user
@param vals: provides a data for new record
@param context: context arguments, like lang, time zone
@return: returns a id of new record
"""
# Explode model element ("name" created in onchange "model.review")
vals.update(self.on_change_model(
cr, uid, 0,
vals.get('model', False),
vals.get('review', 0),
context=context)['value'])
return super(fashion_form, self).create(
cr, uid, vals, context=context)
def write(self, cr, uid, ids, vals, context=None):
"""
Update record(s) comes in {ids}, with new value comes as {vals}
return True on success, False otherwise
@param cr: cursor to database
@param uid: id of current user
@param ids: list of record ids to be update
@param vals: dict of new values to be set
@param context: context arguments, like lang, time zone
@return: True on success, False otherwise
"""
# Test if one is modified, take data from database for other value
if 'model' in vals or 'review' in vals: # change also name and explode
form_proxy = self.browse(cr, uid, ids, context = context)[0]
# Explore model element:
vals.update(self.on_change_model(
cr, uid, 0,
vals.get('model', form_proxy.model),
vals.get('review', form_proxy.review),
context=context)['value'])
return super(fashion_form, self).write(
cr, uid, ids, vals, context=context)
# ------------
# Button event
# ------------
def open_form_item(self, cr, uid, ids, context=None):
''' Button for open detail in kanban view
'''
return {
'name': _('Form detail'),
'view_type': 'form',
'view_mode': 'form',
'res_model': 'fashion.form',
'res_id': ids[0],
'view_id': False,
'views': [(False, 'form')],
'target': 'new',
'domain': [('id','=',ids[0])],
'context': {},
'type': 'ir.actions.act_window',
}
def reset_duplicate_characteristic(self, cr, uid, ids, context=None):
''' Remove file used for copy paste operations
'''
fn = get_temp_filename("%s.car.dat" % uid)
try:
os.remove(fn)
except:
return False
return True
def paste_duplicate_characteristic(self, cr, uid, ids, context=None):
''' Paste operation in form
'''
fn = get_temp_filename("%s.car.dat" % uid)
try:
f = open(fn, "r")
except:
# TODO Comunicate error?
return False
item_ids = [int(item) for item in f]
f.close()
characteristic_pool = self.pool.get('fashion.form.characteristic.rel')
for item in characteristic_pool.browse(cr, uid, item_ids, context=context):
characteristic_pool.create(cr, uid, {
'name': item.name,
'sequence': item.sequence,
'form_id': ids[0],
'characteristic_id': item.characteristic_id.id if item.characteristic_id else False,
'lenght': item.lenght,
'old_name': item.old_name,
'stitch_type_id': item.stitch_type_id.id if item.stitch_type_id else False,
'stitch_verse_id': item.stitch_verse_id.id if item.stitch_verse_id else False,
'stitch_cut_id': item.stitch_cut_id.id if item.stitch_cut_id else False,
'stitch_top_id': item.stitch_top_id.id if item.stitch_top_id else False,
'stitch_top_type_id': item.stitch_top_type_id.id if item.stitch_top_type_id else False,
'bindello': item.bindello,
}, context=context)
self.reset_duplicate_characteristic(cr, uid, ids, context=context)
return True
def reset_duplicate_accessory(self, cr, uid, ids, context=None):
''' Remove file used for copy paste operations
'''
fn = get_temp_filename("%s.acc.dat" % uid)
try:
os.remove(fn)
except:
return False
return True
def paste_duplicate_accessory(self, cr, uid, ids, context=None):
''' Paste operation in form
'''
fn = get_temp_filename("%s.acc.dat" % uid)
try:
f = open(fn, "r")
except:
# TODO Comunicate error?
return False
item_ids = [int(item) for item in f]
f.close()
accessory_pool = self.pool.get('fashion.form.accessory.rel')
for item in accessory_pool.browse(cr, uid, item_ids, context=context):
accessory_pool.create(cr, uid, {
'form_id': ids[0],
'sequence': item.sequence,
'accessory_id': item.accessory_id.id if item.accessory_id else False,
'fabric_id': item.fabric_id.id if item.fabric_id else False,
'name': item.name,
'code': item.code,
'um': item.um,
'quantity': item.quantity,
'currency': item.currency,
'note': item.note,
'gerber_name': item.gerber_name,
'gerber_desc': item.gerber_desc,
'gerber_h': item.gerber_h,
'gerber_l': item.gerber_l,
'supplier_id': item.supplier_id.id if item.supplier_id else False,
'pricelist_id': item.pricelist_id.id if item.pricelist_id else False,
'tot_cost': item.tot_cost,
'color': item.color,
'h': item.h,
}, context=context)
self.reset_duplicate_accessory(cr, uid, ids, context=context)
return True
def set_not_cost_model(self, cr, uid, ids, context=None):
''' Set form for no manage costs
'''
return self.write(cr, uid, ids, {'model_for_cost': False}, context=context)
def set_cost_model(self, cr, uid, ids, context=None):
''' Set form for no manage costs
'''
# Set default fixed cost in list:
form_proxy = self.browse(cr, uid, ids, context=context)[0]
cost_list_ids = [item.cost_id.id for item in form_proxy.cost_rel_ids]
cost_pool = self.pool.get('fashion.form.cost')
default_ids = cost_pool.search(cr, uid, [('default','=',True)], context=context)
for cost in cost_pool.browse(cr, uid, default_ids, context=context):
if cost.id not in cost_list_ids:
# Create cost and pricelist:
self.pool.get('fashion.form.cost.rel').create(cr, uid, {
'form_id': ids[0],
'cost_id': cost.id,
'value': cost.cost or 0.0
}, context=context)
return self.write(cr, uid, ids, {'model_for_cost': True}, context=context)
def button_refresh(self, cr, uid, ids, context=None):
''' Dummy action for refresh form
'''
return True
def create_update_header(self, cr, uid, ids, context=None):
''' Create a particular line for header (tg. values)
> fashion.form.measure.rel
'''
try:
# test if there's yet a header line
found_id = False
form_proxy = self.browse(cr, uid, ids, context=context)[0]
for measure in form_proxy.measure_rel_ids:
if measure.header:
found_id = measure.id
break
start = int(form_proxy.size_base or '42') - 2 * ((form_proxy.col_ref or 3) - 1)
data = {
'header': True,
'sequence': 0,
'form_id': form_proxy.id,
'measure_id': False,
'name': _('Header'),
'size_1': "Tg.%s" % (start),
'size_2': "Tg.%s" % (start + 2),
'size_3': "Tg.%s" % (start + 4),
'size_4': "Tg.%s" % (start + 6),
'size_5': "Tg.%s" % (start + 8),
'size_6': "Tg.%s" % (start + 10),
'size_7': "Tg.%s" % (start + 12),
'size_8': "Tg.%s" % (start + 14),
'size_9': "Tg.%s" % (start + 16),
'size_10': "Tg.%s" % (start + 18),
'size_11': "Tg.%s" % (start + 20),
'size_12': "Tg.%s" % (start + 22),
'size_13': "Tg.%s" % (start + 24),
'visible': False,
'real': False,
}
measure_pool = self.pool.get('fashion.form.measure.rel')
if found_id: # Update
measure_pool.write(cr, uid, found_id, data, context=context)
else: # Create a header elements:
measure_pool.create(cr, uid, data, context=context)
except:
return False # if error no creation
return True
def insert_article(self, cr, uid, ids, context=None):
'''Insert the measure of article
Delete all lines before and recreate
'''
form_proxy = self.browse(cr, uid, ids, context=context)[0]
# test if article is selected:
if not form_proxy.article_id:
return True
# Delete all items:
res_pool = self.pool.get('fashion.form.measure.rel')
res_ids = res_pool.search(
cr, uid, [('form_id', '=', ids[0])], context=context)
res_pool.unlink(cr, uid, res_ids, context=context)
# create header line:
self.create_update_header(cr, uid, ids, context=context)
# after load article item list:
for item_id in [l.measure_id.id for l in form_proxy.article_id.fashion_measure_ids]:
res_pool.create(cr, uid, {
'measure_id': item_id,
'form_id': ids[0],
'visible': True,
}, context=context)
return True
def empty_article(self, cr, uid, ids, context=None):
''' Keep current list but empty all measure
'''
form_proxy = self.browse(cr, uid, ids, context=context)[0]
for item in form_proxy.measure_rel_ids:
if item.header: # jump header
continue
data = {}
for col in range(1, 13):
if col == 3:
continue
data["size_%s" % col] = False
self.pool.get('fashion.form.measure.rel').write(
cr, uid, item.id, data, context=context)
return True
def reload_measure(self, cr, uid, ids, context=None):
''' Delete all measure list
Create empyt list depend on type of article selected
'''
# Get current record:
form_proxy = self.browse(cr, uid, ids, context=context)[0]
if not form_proxy.article_id:
return False # TODO comunicare l'errore perchè non è presente l'art.
# delete all elements:
measure_pool = self.pool.get('fashion.form.measure.rel')
measure_ids = measure_pool.search(cr, uid, [('form_id','=',ids[0])], context = context)
measure_pool.unlink(cr, uid, measure_ids, context = context)
# Loop in all measure of the article selected:
for measure in form_proxy.article_id.measure_ids:
measure_pool.create(cr, uid,
{'form_id': ids[0],
'measure_id': measure.id,
}, context = context)
return True
def modify_draw_image(self, cr, uid, item_id, context=None):
''' Call url if format:
fashion://name of image (in format model.version.extension)
item_id: ID of the image selected
context: 'side': 'A' or 'side': 'B'
'''
form_proxy = self.browse(cr, uid, item_id, context=context)[0]
final_url = (r"fashion://%s/%s.%s.%s" % (
context.get('side', 'A'),
form_proxy.model,
form_proxy.review,
self._default_extension,
)).lower()
return {
'type': 'ir.actions.act_url',
'url':final_url,
'target': 'new'
}
def reset_image(self, cr, uid, ids, context=None):
''' Reset form image
'''
try:
self.write(cr, uid, ids, {'draw_image_%s' % (context.get('side').lower()):False},context=context)
except:
pass
return True
#============================#
# Workflow Activity Function #
#============================#
def form_draft(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'draft'}, context=context)
def form_sample(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'sample'}, context=context)
def form_ok(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'ok'}, context=context)
def form_produced(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'produced'}, context=context)
def form_discarded(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'discarded'}, context=context)
# ----------------
# Fields functions
# ----------------
def _get_sum_items(self, cr, uid, ids, name, args, context=None):
''' Calculate total sum for costs (cost list and accessory)
'''
res = {}
for obj in self.browse(cr, uid, ids, context=context):
res[obj.id] = {}
res[obj.id]['sum_accessory'] = 0.0
res[obj.id]['sum_cost'] = 0.0
if obj.model_for_cost: # calculate only if it's a model (for speed)
for accessory in obj.accessory_rel_ids:
res[obj.id]['sum_accessory'] += accessory.tot_cost
for cost in obj.cost_rel_ids:
res[obj.id]['sum_cost'] += cost.value
res[obj.id]['sum_extra_cost'] = res[obj.id]['sum_cost'] + res[obj.id]['sum_accessory']
else:
res[obj.id]['sum_extra_cost'] = 0.0
return res
def _get_draw_image(self, cr, uid, ids, name, args, context=None):
''' Read image from file according to name and version format:
MODEL.VERSION.ext
'''
res = dict.fromkeys(ids, False)
for obj in self.browse(cr, uid, ids, context=context):
res[obj.id] = self._load_image(
self._get_draw_image_name(obj),
self._get_draw_image_type(name)) # TODO parametrize
return res
def _set_draw_image(self, cr, uid, item_id, name, value, args, context=None):
''' Write image passed to file
'''
obj_proxy = self.browse(cr, uid, item_id, context=context)
self._unload_image(
self._get_draw_image_name(obj_proxy), value,
self._get_draw_image_type(name)) # TODO test return value
# Resizing function:
def _get_resized_image(self, cr, uid, ids, name, args, context=None):
''' Resize defaulf draw_image_a
'''
type_of_image = name.split("_")[-1] # from field name (last block)
if type_of_image == 'medium':
width = 800
elif type_of_image == 'small':
width = 200
else:
width = 64
res = dict.fromkeys(ids, False)
for obj in self.browse(cr, uid, ids, context=context):
res[obj.id] = tools.image_resize_image(
obj.draw_image_a, size=(width, None), encoding='base64',
filetype=self._default_extension, avoid_if_small=True) # 'PNG'
return res
def _set_resized_image(self, cr, uid, item_id, name, value, args, context=None):
''' Store image in original field: draw_image_a
(call field function for file image)
'''
return self.write(cr, uid, [item_id], {'draw_image_a': value, }, context=context)
def invoice_print(self, cr, uid, ids, context=None):
''' This function prints the invoice and mark it as sent, so that we
can see more easily the next step of the workflow
'''
assert len(ids) == 1, 'This option should only be used for a single id at a time.'
self.write(cr, uid, ids, {'sent': True}, context=context)
datas = {
'ids': ids,
'model': 'account.invoice',
'form': self.read(cr, uid, ids[0], context=context)
}
return {
'type': 'ir.actions.report.xml',
'report_name': 'account.invoice',
'datas': datas,
'nodestroy' : True
}
def set_obsolete(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'obsolete': context.get('obsolete',True)})
return True
def set_not_obsolete(self, cr, uid, ids, context=None):
self.set_obsolete(cr, uid, ids, {'obsolete':False})
return True
# Detail information functions:
def _get_detail_informations(self, cr, uid, ids, fields, args, context=None):
''' Get detail information about fabric and customer
'''
res = {}
for form in self.browse(cr, uid, ids, context=context):
res[form.id] = {}
res[form.id]['detail_info_partner'] = ''
res[form.id]['detail_info_fabric'] = ''
res[form.id]['detail_partner'] = False # only for search
res[form.id]['detail_fabric'] = False # only for search
for detail in form.partner_rel_ids:
res[form.id]['detail_info_partner'] += "%s\n" % (
detail.partner_id.name if detail.partner_id else "?",
)
res[form.id]['detail_info_fabric'] += "%s\n" % (
detail.fabric_id.code if detail.fabric_id else "?",
)
return res
def _search_detail_info(self, cr, uid, obj, name, args, context=None):
''' Search in detail information seeking partner
'''
if name == 'detail_partner':
field_name = 'partner_id'
else: # detail_fabric
field_name = 'fabric_id'
try:
search_id = args[0][2]
cr.execute("""
SELECT DISTINCT form_id
FROM fashion_form_partner_rel
WHERE %s = %s;
""" % (field_name, search_id))
return [('id', 'in',
[item[0] for item in cr.fetchall()])]
except:
return [('id', 'in', [])] # if error
_columns = {
'model': fields.char('Model', size=10, required=True),
'customer_code': fields.char('Customer code', size=18),
'size_base': fields.char('Size', size=30,
help='Basic size reference, ex:42', required=True),
'size_measure': fields.char('Column for feedback', size=30,
help='Size basis for the measurement'),
'review': fields.integer('Review', help='Revision of the main model',
required=True),
'date': fields.date('Date', help='Date of revision'),
'create_date': fields.datetime('Create date', readonly=True),
'write_date': fields.datetime('Date Last modify', readonly=True),
'write_uid': fields.many2one('res.users', 'by User', readonly=True),
'original': fields.char('Original', size=80),
'base_id': fields.many2one('fashion.form', 'Base form',
help="Duplicated from"),
'base_name': fields.char('Duplicated form', size=40),
'h_lining': fields.char('Height lining', size=10),
'mt_lining': fields.char('Meters lining', size=10),
'cost_lining': fields.float('Cost lining', digits=(10,2)),
'conformed': fields.boolean('Conformed',
help='Indicates that the model uses standardized sizes'),
'start': fields.integer('Start size',
help='Departure for the standardized sizes'),
'ironing': fields.text('Ironing'),
'area': fields.char('Area', size=30, help='Link the table Gerber'),
'user_id': fields.many2one('res.users', 'User'),
'cut': fields.char('Size n.', size=40),
'size': fields.text('Sizes'),
'colors': fields.char('Colors', size=40),
'article_id': fields.many2one('fashion.article', 'Article'),
'season_id': fields.many2one('fashion.season', 'Season'),
'obsolete': fields.boolean('Obsolete',
help='Indicates that the form is old'),
'reactivate': fields.boolean('Not Obsolete',
help='Indicates that the form is not old'),
'old_model': fields.boolean('Old Model'),
'show_old_model': fields.boolean('Show old model'),
'washing': fields.text('Washing'),
'model_for_cost': fields.boolean('Model for cost',
help='Indicates that this form is use for create a pricelist'
' elements'),
'col_ref': fields.selection([
(1,'col 1'), (2,'col 2'), (3,'col 3'), (4,'col 4'), (5,'col 5'),
(6,'col 6'), (7,'col 7'), (8,'col 8'), (9,'col 9'), (10,'col 10'),
(11,'col 11'), (12,'col 12'),
], 'Column reference', select=True, required=True),
# Function for totals:
'sum_accessory': fields.function(_get_sum_items,
string="Total accessory",
type="float", digits=(10, 2), store=False, multi='totals',
help="Sum of the accessory list (see page Accessory for details)",
),
'sum_cost': fields.function(_get_sum_items, string="Total cost list",
type="float", digits=(10, 2), store=False, multi='totals',
help="Sum of costs in the list on the left"),
'sum_extra_cost': fields.function(_get_sum_items,
string="Total extra cost",
type="float", digits=(10, 2), store=False, multi='totals',
help="Sum of accessory cost and cost list "
"(no fabric in this total)"),
# Image:
'draw_image_a': fields.function(_get_draw_image,
fnct_inv=_set_draw_image,
string="Draw Image A", type="binary",
help="Image for draw side A. Usual size:"\
"1024 x 768"\
"The image is printed in report form and, in small size"\
"in kanban report views"),
'draw_image_b': fields.function(_get_draw_image,
fnct_inv=_set_draw_image,
string="Draw Image B", type="binary",
help="Image for draw side B. Usual size:"\
"1024 x 768"\
"The image is printed in report form and, in small size"\
"in kanban report views"),
# Photos:
'draw_image_c': fields.function(_get_draw_image,
fnct_inv=_set_draw_image,
string="Photo", type="binary",
help="Image for draw side B. Usual size:"\
"1024 x 768"\
"The image is printed in report form and, in small size"\
"in kanban report views"),
'draw_image_d': fields.function(_get_draw_image,
fnct_inv=_set_draw_image,
string="Photo", type="binary",
help="Image for draw side B. Usual size:"\
"1024 x 768"\
"The image is printed in report form and, in small size"\
"in kanban report views"),
# Resize dinamically images:
'draw_image_a_medium': fields.function(_get_resized_image,
fnct_inv=_set_resized_image,
string="Medium-sized image", type="binary",
help="Medium-sized image of the product. It is automatically "
"resized as a 800px large image, with aspect ratio preserved "
"only when the image exceeds one of those sizes. Use this "
"field in form views or some kanban views."),
'draw_image_a_small': fields.function(_get_resized_image,
fnct_inv=_set_resized_image,
string="Small-sized image", type="binary",
help="Small-sized image of the product. It is automatically"
"resized as a 128px large image, with aspect ratio preserved."
"Use this field anywhere a small image is required."),
# Inherit fields:
'product_id': fields.many2one('product.product', 'Product',
ondelete = "restrict", required=True,
help="Inherits value for link form to a product"),
# Details fields (and search)
'detail_info_partner': fields.function(
_get_detail_informations, method=True, type='text',
string='Detail customer', store=False, multi='detail_info'),
'detail_info_fabric': fields.function(
_get_detail_informations, method=True, type='text',
string='Detail fabric', store=False, multi='detail_info'),
'detail_partner': fields.function(
_get_detail_informations, method=True,
type='many2one', relation='res.partner',
string='Detail partner',
fnct_search=_search_detail_info,
store=False, multi='detail_info'),
'detail_fabric': fields.function(
_get_detail_informations, method=True,
type='many2one', relation='fashion.form.fabric',
string='Detail fabric',
fnct_search=_search_detail_info,
store=False, multi='detail_info'),
# Explosion of model (on change setted)
'model_customer': fields.char('Sigla cliente', size=1),
'model_article': fields.char('Sigla articolo', size=1),
'model_number': fields.integer('Numero modello'),
'model_revision': fields.char('Revisione', size=3),
# Workflow fields:
'state': fields.selection([
('draft', 'Draft'),
('sample', 'Sample'),
('ok', 'Ok production'),
('produced', 'Produced'),
('discarded', 'Discarded'),
], 'State', select=True),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
_defaults = {
'date': lambda *a: datetime.now().strftime(DEFAULT_SERVER_DATE_FORMAT),
'state': lambda *a: 'draft',
'user_id': lambda s, cr, uid, ctx: uid,
'old_model': lambda *x: False,
'show_old_model': lambda *x: False,
'col_ref': lambda *a: 3,
'model_for_cost': False,
}
# -----------------------------------------------------------------------------
# Object relations
# -----------------------------------------------------------------------------
class fashion_form_measure_rel(osv.osv):
'''Table that manage the relation measure/form
'''
_name = 'fashion.form.measure.rel'
_description = 'Measure relation'
_order = 'header desc,sequence,id'
def clean_measure(self, cr, uid, ids, context=None):
''' Clean only measure in this line passed
'''
return self.write(cr, uid, ids, {
'size_1': False,
'size_2': False,
'size_3': False,
'size_4': False,
'size_5': False,
'size_6': False,
'size_7': False,
'size_8': False,
'size_9': False,
'size_10': False,
'size_11': False,
'size_12': False,
'size_13': False,
'real': False,
}, context=context)
_columns = {
'header': fields.boolean('Header'),
'sequence': fields.integer('Seq.'),
'form_id': fields.many2one('fashion.form', 'Form'),
'measure_id': fields.many2one('fashion.form.measure', 'Measure'),
'name': fields.text('Description'),
'size_1': fields.char('Size 1', size = 10),
'size_2': fields.char('Size 2', size = 10),
'size_3': fields.char('[Size 3]', size = 10),
'size_4': fields.char('Size 4', size = 10),
'size_5': fields.char('Size 5', size = 10),
'size_6': fields.char('Size 6', size = 10),
'size_7': fields.char('Size 7', size = 10),
'size_8': fields.char('Size 8', size = 10),
'size_9': fields.char('Size 9', size = 10),
'size_10': fields.char('Size 10', size = 10),
'size_11': fields.char('Size 11', size = 10),
'size_12': fields.char('Size 12', size = 10),
'size_13': fields.char('Size 13', size = 10),
'visible': fields.boolean('Visible', size = 10,
help='Indicates is the size is visible in the form reply'),
'real': fields.char('Real', size = 10),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
_defaults = {
'header': lambda *x: False,
'sequence': lambda *x: 0,
}
class fashion_form_characteristic_rel(osv.osv):
'''Table that manage the relation characteristic/form
'''
_name = 'fashion.form.characteristic.rel'
_description = 'Form characteristic relation'
_order = 'sequence,id'
# ----------------
# On change event:
# ----------------
def on_change_upper_characteristic(self, cr, uid, ids, name, context=None):
''' Manages the capital of the fields in the form Characteristic
'''
res = {'value': {}}
if name:
res['value']['name'] = name.upper()
return res
def on_change_upper_lenght(self, cr, uid, ids, lenght, context=None):
''' Manages the capital of the fields in the form Characteristic
'''
res = {'value': {}}
if lenght:
res['value']['lenght'] = lenght.upper()
return res
# -------------
# Button event:
# -------------
def duplicate_characteristic(self, cr, uid, ids, context=None):
''' Duplicate characteristic element
'''
fn = get_temp_filename("%s.car.dat" % uid)
f = open(fn, "a")
f.write("%s\n" % ids[0])
f.close()
return True
_columns = {
'sequence': fields.integer('Seq.'),
'form_id': fields.many2one('fashion.form', 'Form'),
'characteristic_id': fields.many2one('fashion.form.characteristic',
'Characteristic'),
'name': fields.text('Description'),
'lenght': fields.char('Lenght', size=30),
'old_name': fields.char('Old name', size=30),
'stitch_type_id': fields.many2one(
'fashion.form.characteristic.rel.specific', 'Stitch type',
domain=[('type','=','1')]),
'stitch_verse_id': fields.many2one(
'fashion.form.characteristic.rel.specific', 'Stitch verse',
domain=[('type','=','2')]),
'stitch_cut_id': fields.many2one(
'fashion.form.characteristic.rel.specific', 'Stitch cut',
domain=[('type','=','3')]),
'stitch_top_id': fields.many2one(
'fashion.form.characteristic.rel.specific', 'Stitch top',
domain=[('type','=','4')]),
'stitch_top_type_id': fields.many2one(
'fashion.form.characteristic.rel.specific', 'Stitch top type',
domain=[('type','=','5')]),
'bindello': fields.boolean('Bindello'),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
_defaults = {
'sequence': lambda *x: 1,
}
class fashion_form_characteristic_rel_specific(osv.osv):
'''Table that manage the specific of characteristic
'''
_name = 'fashion.form.characteristic.rel.specific'
_description = 'Specific'
_order = 'type,name'
def create(self, cr, uid, vals, context=None):
"""
Create a new record for a model ModelName
@param cr: cursor to database
@param uid: id of current user
@param vals: provides a data for new record
@param context: context arguments, like lang, time zone
@return: returns a id of new record
"""
if 'name' in vals:
vals['name'] = vals['name'].upper()
#return osv.osv.create(self, cr, uid, ids, context=context)
return super(fashion_form_characteristic_rel_specific, self).create(
cr, uid, vals, context=context)
def write(self, cr, uid, ids, vals, context=None):
"""
Update redord(s) comes in {ids}, with new value comes as {vals}
return True on success, False otherwise
@param cr: cursor to database
@param uid: id of current user
@param ids: list of record ids to be update
@param vals: dict of new values to be set
@param context: context arguments, like lang, time zone
@return: True on success, False otherwise
"""
if 'name' in vals:
vals['name'] = vals['name'].upper()
#return osv.osv.create(self, cr, uid, ids, context=context)
return super(fashion_form_characteristic_rel_specific, self).create(
cr, uid, ids, vals, context=context)
_columns = {
'name': fields.text('Description'),
'type': fields.selection([
('1', 'Col1'),
('2', 'Col2'),
('3', 'Col3'),
('4', 'Col4'),
('5', 'Col5'), ], 'Type', select=True),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
class fashion_form_cost_rel(osv.osv):
'''Table that manage the relation cost/form
'''
_name = 'fashion.form.cost.rel'
_description = 'Relation'
_rec_name = 'note'
_columns = {
'form_id': fields.many2one('fashion.form', 'Form'),
'cost_id': fields.many2one('fashion.form.cost', 'Cost', required=True),
'value': fields.float('Value'),
'note': fields.text('Note'),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
class fashion_form_cost_rel_pricelist(osv.osv):
'''Table that manage the pricelist elements for signle cost
'''
_name = 'fashion.form.cost.rel.pricelist'
_description = 'Pricelist'
_rec_name = 'value'
_columns = {
'current': fields.boolean('Current', required=False),
'cost_rel_id': fields.many2one('fashion.form.cost.rel', 'Cost'),
'supplier_id': fields.many2one('res.partner', 'Supplier',
domain=[('supplier','=',True)]),
'value': fields.float('Value', required=True),
'note': fields.text('Note'),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
_defaults = {
'current': False,
}
class fashion_form_cost_rel(osv.osv):
'''Table that manage the relation cost/form
'''
_name = 'fashion.form.cost.rel'
_inherit = 'fashion.form.cost.rel'
_columns = {
'pricelist_ids':fields.one2many('fashion.form.cost.rel.pricelist',
'cost_rel_id', 'Pricelist', required=False),
}
class fashion_form_accessory_rel(osv.osv):
'''Table that manage the relation accessory/form
'''
_name = 'fashion.form.accessory.rel'
_description = 'Relation accessory'
_order = 'sequence,id'
# -------------
# Button event:
# -------------
def duplicate_accessory(self, cr, uid, ids, context=None):
''' Duplicate accessory element
'''
fn = get_temp_filename("%s.acc.dat" % uid)
f = open(fn, "a")
f.write("%s\n" % ids[0])
f.close()
return True
# ----------
# On change:
# ----------
def on_change_calcolate_cost(self, cr, uid, ids, quantity, currency, context=None):
'''Calcolate the total cost of the accessory
'''
res = {'value': {}}
if quantity and currency:
res['value']['tot_cost'] = quantity * currency
return res
def onchange_accessory(self, cr, uid, ids, accessory_id, context=None):
''' Read gerber letter and write in code
'''
res = {'value': {}, }
if accessory_id:
accessory_pool = self.pool.get('fashion.form.accessory')
accessory_proxy = accessory_pool.browse(
cr, uid, accessory_id, context=context)
if accessory_proxy.gerber_char:
res['value']['code'] = accessory_proxy.gerber_char
res['value']['sequence'] = accessory_proxy.sequence
else:
res['value']['code'] = accessory_proxy.gerber_char
#res['domain']['pricelist_id'] = [('accessory_id','=',accessory_id)]
else:
res['value']['code'] = False
#res['domain']['pricelist_id'] = []
return res
def on_change_upper_code(self, cr, uid, ids, code, context=None):
''' Manages the capital of the fields in the form Accessory
'''
res = {'value': {}}
if code:
res['value']['code'] = code.upper()
return res
def on_change_upper_accessory(self, cr, uid, ids, name, context=None):
''' Manages the capital of the fields in the form Accessory
'''
res = {'value': {}}
if name:
res['value']['name'] = name.upper()
return res
def on_change_upper_um(self, cr, uid, ids, um, context=None):
''' Manages the capital of the fields in the form Accessory
'''
res = {'value': {}}
if um:
res['value']['um'] = um.upper()
return res
def on_change_upper_color(self, cr, uid, ids, note, context=None):
''' Manages the capital of the fields in the form Accessory
'''
res = {'value': {}}
if note:
res['value']['note'] = note.upper()
return res
def onchange_fabric(self, cr, uid, ids, fabric_id, context=None):
''' On change fabric
search supplier and description from PC
'''
res = {'value': {'supplier_id': False, 'name': False}}
if fabric_id:
fabric_pool = self.pool.get('fashion.form.fabric')
try:
fabric_proxy = fabric_pool.browse(
cr, uid, fabric_id, context=context)
res['value']['supplier_id'] = fabric_proxy.supplier_id and fabric_proxy.supplier_id.id or False
res['value']['currency'] = fabric_proxy.cost or 0.0
res['value']['name'] = "%s - %s" % (
fabric_proxy.code or "",
fabric_proxy.perc_composition or "")
except:
return res
return res
def onchange_pricelist(self, cr, uid, ids, pricelist_id, context=None):
''' Save pricelist info in accessory rel
'''
res = {}
res['value'] = {}
if pricelist_id:
pricelist_proxy = self.pool.get(
"fashion.form.accessory.pricelist").browse(
cr, uid, pricelist_id, context=context)
res['value']['supplier_id'] = pricelist_proxy.supplier_id.id if pricelist_proxy.supplier_id else False
res['value']['currency'] = pricelist_proxy.cost or 0.0
res['value']['h'] = pricelist_proxy.extra_info or False
res['value']['um'] = pricelist_proxy.um or False
res['value']['name'] = pricelist_proxy.name or False
#res['value']['article_id'] = pricelist_proxy.article_id.id if pricelist_proxy.article_id else False
else:
res['value']['supplier_id'] = False
res['value']['currency'] = 0.0
res['value']['h'] = False
res['value']['um'] = False
res['value']['name'] = False
return res
_columns = {
'form_id': fields.many2one('fashion.form', 'Form'),
'sequence': fields.integer('Seq.'),
'accessory_id': fields.many2one('fashion.form.accessory', 'Accessory'),
'fabric_id': fields.many2one('fashion.form.fabric', 'Fabric'),
'name': fields.text('Description'),
'code': fields.char('Code', size=1), # TODO farlo diventare related (di fatto non viene modificato dalla videata ma prende sempre quello dell'articolo
'um': fields.char('U.M.', size=5),
'quantity': fields.float('Quantity', digits=(10, 4)),
'currency': fields.float('Cost', digits=(10, 4)),
'note': fields.text('Color'),
'gerber_name': fields.char('Gerber name', size=10),
'gerber_desc': fields.char('Gerber description', size=10),
'gerber_h': fields.char('Gerber height', size=10),
'gerber_l': fields.char('Gerber length', size=10),
'supplier_id': fields.many2one('res.partner', 'Supplier',
domain=[('supplier','=',True)]),
'pricelist_id': fields.many2one('fashion.form.accessory.pricelist',
'Pricelist'),
'tot_cost': fields.float('Total cost', digits=(10, 4)),
'color': fields.char('Color', size=20), # TODO eliminare
#'h': fields.float('H'),
'h': fields.char('H', size=15),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
_defaults = {
'sequence': lambda *x: 1000, # high number so letter are lower
}
class fashion_form_stitch_rel(osv.osv):
'''Table that manage the relation stitch/form
'''
_name = 'fashion.form.stitch.rel'
_description = 'Stitches'
_columns = {
'sequence': fields.integer('Seq.'),
'form_id': fields.many2one('fashion.form', 'Form'),
'stitch_id': fields.many2one('fashion.form.stitch', 'Stitch'),
'name': fields.char('Name', size = 50),
'topstitch': fields.char('Topstitching', size = 60),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
class fashion_form_comment_rel(osv.osv):
'''Table that manages the comment/form
'''
_name = 'fashion.form.comment.rel'
_description = 'Comments'
_order = 'date,id'
# -------------------
# On change function:
# -------------------
def on_change_upper_comment(self, cr, uid, ids, name, context=None):
''' #Manages the capital of the fields in the form Comment
'''
res = {'value': {}}
if name:
res['value']['name'] = name.upper()
return res
def on_change_upper_reference(self, cr, uid, ids, reference, context=None):
''' #Manages the capital of the fields in the form Comment
'''
res = {'value': {}}
if reference:
res['value']['reference'] = reference.upper()
return res
_columns = {
'name': fields.text('Changes'),
'form_id': fields.many2one('fashion.form', 'Form'),
'date': fields.date('Date'),
'user_id': fields.many2one('res.users', 'User',
help="User that insert comment"),
'reference': fields.char('Reference', size=50,
help="If it is not the user or is an external reference write "
"here the name."),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
_defaults = {
'date': lambda *a: datetime.now().strftime(DEFAULT_SERVER_DATE_FORMAT),
'user_id': lambda s, cr, uid, ctx: uid
}
class fashion_measure_rel(osv.osv):
'''Table that manages the measure/article
'''
_name = 'fashion.measure.rel'
_description = 'Relation'
_rec_name = 'article_id'
_columns = {
'article_id': fields.many2one('fashion.article', 'Article'),
'measure_id': fields.many2one('fashion.form.measure', 'Measure'),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
class fashion_article(osv.osv):
'''
'''
_name = 'fashion.article'
_inherit = 'fashion.article'
_columns = {
'fashion_measure_ids': fields.one2many(
'fashion.measure.rel', 'article_id', 'Measure'),
}
class fashion_form_partner_rel(osv.osv):
''' Form relation with partner, this object contain much elements useful
for cost determination and caracteristic of model (ex. fabric)
'''
_name = 'fashion.form.partner.rel'
_description = 'Relation'
_rec_name = 'partner_id'
_order = 'model_article,model_number desc,model_customer,model,review desc'
# --------------------
# Override ORM method:
# --------------------
def name_get(self, cr, uid, ids, context = None):
''' Add customer-fabric ID to name
'''
res = []
for item in self.browse(cr, uid, ids, context = context):
res.append((item.id, "%s [%s]" % (
item.partner_id.name,
item.fabric_id.code if item.fabric_id else "???")))
return res
# TODO: name_search
#--------------
# Button event:
#--------------
def wizard_print_a(self, cr, uid, ids, context=None):
''' Print directyl report C with totals (instead of wizard)
'''
record_proxy = self.browse(cr, uid, ids, context=context)[0]
datas = {}
datas['active_ids'] = [record_proxy.form_id.id]
datas['active_id'] = record_proxy.form_id.id
datas['partner_fabric_id'] = ids[0]
datas['summary'] = True
datas['image'] = True
return {
'model': 'fashion.form',
'type': 'ir.actions.report.xml',
'report_name': "fashion_form_A",
'datas': datas,
}
def wizard_print_b(self, cr, uid, ids, context=None):
''' Print directyl report C with totals (instead of wizard)
'''
record_proxy = self.browse(cr, uid, ids, context=context)[0]
datas = {}
datas['active_ids'] = [record_proxy.form_id.id]
datas['active_id'] = record_proxy.form_id.id
datas['partner_fabric_id'] = ids[0]
datas['total'] = True
datas['image'] = True
return {
'model': 'fashion.form',
'type': 'ir.actions.report.xml',
'report_name': "fashion_form_B",
'datas': datas,
}
def wizard_print_c(self, cr, uid, ids, context=None):
''' Print directyl report C with totals (instead of wizard)
'''
record_proxy = self.browse(cr, uid, ids, context=context)[0]
datas = {}
datas['active_ids'] = [record_proxy.form_id.id]
datas['active_id'] = record_proxy.form_id.id
datas['partner_fabric_id'] = ids[0]
datas['image'] = True
return {
'model': 'fashion.form',
'type': 'ir.actions.report.xml',
'report_name': "fashion_form_C",
'datas': datas,
}
#---------------------
# On change functions:
#---------------------
def on_change_symbol_fabric(self, cr, uid, ids, fabric_id, symbol_fabric, article_code, supplier_id, perc_fabric, context=None):
''' Load default wash symbol, maybe in the form are changed
'''
res = {'value': {}}
if not fabric_id: # nothing if no fabric selected or wash symbol jet present
res['value']['symbol_fabric'] = False
res['value']['article_code'] = False
res['value']['supplier_id'] = False
res['value']['perc_fabric'] = False
res['value']['cost'] = False
res['value']['note_fabric'] = False
return res
fabric_proxy = self.pool.get('fashion.form.fabric').browse(
cr, uid, fabric_id, context=context)
res['value']['symbol_fabric'] = fabric_proxy.symbol
res['value']['article_code'] = fabric_proxy.article_code
res['value']['supplier_id'] = fabric_proxy.supplier_id.id
res['value']['perc_fabric'] = fabric_proxy.perc_composition
res['value']['cost'] = fabric_proxy.cost
res['value']['note_fabric'] = "%s%s %s" % (
fabric_proxy.supplier_id.name if fabric_proxy.supplier_id else "",
" ART %s" % (
fabric_proxy.article_code) if fabric_proxy.article_code else "",
fabric_proxy.note or '')
return res
def onchange_cost_computation(self, cr, uid, ids, mt_fabric, cost, retail, wholesale, sale, sum_extra_cost, context=None):
''' Master event for calculate sale price with all variables
mt_fabric * cost = cost_tot * (100 + retail) * (100 + wholesale) = sale
'''
res = {'value': {}}
res['value']['total_fabric'] = (mt_fabric or 0.0) * (cost or 0.0)
res['value']['sale'] = (res['value']['total_fabric'] + sum_extra_cost or 0.0) * (100.0 + (retail or 0.0)) * (100.0 + (wholesale or 0.0)) / 10000.0
return res
#def onchange_sale(self, cr, uid, sale, context=None):
# ''' Find % of margin for all
# '''
# res = {}
# return res
#------------------
# Fields functions:
#------------------
def _get_total_fabric(self, cr, uid, ids, name, args, context=None):
''' Calculate total fabric (cost * mt)
Total costs (fabric, list, accessory)
Info for margine and recharge
'''
res = {}
for obj in self.browse(cr, uid, ids, context=context):
res[obj.id] = {}
res[obj.id]['total_fabric'] = obj.cost * obj.mt_fabric
res[obj.id]['total_cost'] = res[obj.id]['total_fabric'] + obj.form_id.sum_extra_cost # TODO + cost list + accessory totale
profit = obj.sale - res[obj.id]['total_cost']
res[obj.id]['margin_note'] = _("%5.2f%s(Mar.)\n%5.2f%s(Ric.)\n%10.2f€(Ut.)") % (
(profit * 100.0 / res[obj.id]['total_cost']) if res[obj.id]['total_cost'] else 0.0,
"%",
(profit * 100.0 / obj.sale) if obj.sale else 0.0,
"%",
profit,
)
return res
def _store_update_model_customer(self, cr, uid, ids, context=None):
''' Recalculate store season in client when change in partner
'''
res = []
rel_pool = self.pool.get('fashion.form.partner.rel')
# Note: reset all:
#return rel_pool.search(cr, uid, [], context=context)
for item_id in ids:
res.extend(rel_pool.search(cr, uid, [
('form_id', '=', item_id)], context=context))
return res
_columns = {
'form_id': fields.many2one('fashion.form', 'Form'),
'partner_id': fields.many2one('res.partner', 'Partner',
domain=[('customer','=',True)], required=True),
'fabric_id': fields.many2one('fashion.form.fabric', 'Fabric',
#required=True # TODO reimportare quando elimimato righe vuote
),
'desc_fabric': fields.char('Description', size=80),
'perc_fabric': fields.char('Composition', size=40),
'corr_fabric': fields.char('Additional description', size=80),
'symbol_fabric': fields.char('Symbols', size=80),
'note_fabric': fields.text('Fabric note'),
'note_cost': fields.text('Cost note'),
'weight': fields.float('Weight', digits=(10, 2)),
'h_fabric': fields.float('H.', digits=(10, 2)),
'mt_fabric': fields.float('Mt.', digits=(10, 2)),
'cost': fields.float('Cost', digits=(10, 4),
help="Unit price for fabric"),
'retail': fields.float('Retail', digits=(10, 4)),
'wholesale': fields.float('Wholesale', digits=(10, 4)),
'sale': fields.float('Selling price', digits=(10, 4)),
# Calculated fields:
'total_fabric': fields.function(_get_total_fabric,
string="Total fabric",
type="float", digits=(10, 2), store=False, multi='totals'), # m_lining * cost
'total_cost': fields.function(_get_total_fabric, string="Total cost",
type="float", digits=(10, 2), store=False, multi='totals'), # total_fabric + cost list + accessory
'margin_note': fields.function(_get_total_fabric, string="Balance",
type="char", size=100, store=False, multi='totals'), # margin information
'code': fields.char('Customer Code', size=10),
'gerber_name': fields.char('Name', size=10),
'gerber_desc': fields.char('Description', size=10),
'gerber_h': fields.char('Height', size=10),
'gerber_l': fields.char('Length', size=10),
'article_code': fields.char('Article', size=60),
'article_description': fields.char('Description', size=60),
'supplier_id': fields.many2one('res.partner', 'Supplier',
domain=[('supplier','=',True)]),
'perc_reload': fields.float('Reloading percentage', digits=(10, 2)),
'perc_margin': fields.float('Margin percentage', digits=(10, 2)),
# TODO eliminare appena vengono tolte dalle viste (kanban)
#'image': fields.related('form_id', 'image', type='binary', string='Image', readonly=True),
'draw_image_a': fields.related('form_id', 'draw_image_a',
type='binary', string='Image', readonly=True),
#'image_large': fields.related('form_id', 'image_large', type='binary', string='Image', readonly=True),
'cost_id': fields.many2one('fashion.form.cost', 'Cost'),
'value': fields.float('Value', digits=(10, 2)),
'note': fields.text('Note'),
'article_id': fields.related('form_id', 'article_id', type='many2one',
relation='fashion.article', string='Article', readonly=True,
store=True),
'season_id': fields.related('form_id','season_id', type='many2one',
relation='fashion.season', string='Season', store=True),
# Store function on related fields (for search):
'model': fields.related('form_id', 'model', type='char',
string='Modello', size=14,
store={
'fashion.form': (
_store_update_model_customer, ['model'], 10), }),
'model_customer': fields.related('form_id', 'model_customer',
type='char', string='Sigla cliente', size=1,
store={
'fashion.form': (
_store_update_model_customer, ['model'], 10), }),
'model_article': fields.related('form_id', 'model_article',
type='char', string='Sigla articolo', size=1,
store={
'fashion.form': (
_store_update_model_customer, ['model'], 10), }),
'model_number': fields.related('form_id', 'model_number',
type='integer', string='Numero modello',
store={
'fashion.form': (
_store_update_model_customer, ['model'], 10), }),
'model_revision': fields.related('form_id', 'model_revision',
type='char', string='Revisione', size=3,
store={
'fashion.form': (
_store_update_model_customer, ['model'], 10), }),
'review': fields.related('form_id', 'review', type='integer',
string='Revisione',
store={
'fashion.form': (
_store_update_model_customer, ['model'], 10), }),
'conformed': fields.related('form_id', 'conformed', type='boolean',
string='Conformato',
store={
'fashion.form': (
_store_update_model_customer, ['model'], 10), }),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
'access_2_id': fields.integer('Access 2 ID',
help="ID Importazione che tiene il link con partner costi"),
}
class res_partner(osv.osv):
''' Extra fields for partner
'''
_name = 'res.partner'
_inherit = 'res.partner'
_columns = {
'start': fields.integer('Start size',
help='Departure for the standardized sizes'),
# Link di importazione:
'access_id': fields.integer('Access ID',
help="ID Importazione che tiene il link"),
}
class fashion_form_extra_relations(osv.osv):
'''Table that manage the relation forms
'''
_name = 'fashion.form'
_inherit = 'fashion.form'
_columns = {
'characteristic_rel_ids': fields.one2many(
'fashion.form.characteristic.rel', 'form_id',
'Characteristic Relation'),
'cost_rel_ids': fields.one2many('fashion.form.cost.rel', 'form_id',
'Cost Relation'),
'accessory_rel_ids': fields.one2many('fashion.form.accessory.rel',
'form_id', 'Accessory Relation'),
# 'fabric_rel_ids': fields.one2many('fashion.form.fabric.rel', 'form_id', 'Relation'), #TODO
'stitch_rel_ids': fields.one2many('fashion.form.stitch.rel',
'form_id', 'Stitch Relation'),
'partner_rel_ids': fields.one2many('fashion.form.partner.rel',
'form_id', 'Partner Relation'),
'measure_rel_ids': fields.one2many('fashion.form.measure.rel',
'form_id', 'Measure Relation'),
'comment_rel_ids': fields.one2many('fashion.form.comment.rel',
'form_id', 'Comment Relation'),
}
class product_template(osv.osv):
''' Remove translation from product name
'''
_name = "product.template"
_inherit = "product.template"
_columns = {
'name': fields.char('Name', size=128, required=True, select=True),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
"""
ftfy: fixes text for you
This is a module for making text less broken. See the `fix_text` function
for more information.
"""
import unicodedata
import ftfy.bad_codecs
from ftfy import fixes
from ftfy.formatting import display_ljust
__version__ = '5.0.1'
# See the docstring for ftfy.bad_codecs to see what we're doing here.
ftfy.bad_codecs.ok()
def fix_text(text,
*,
fix_entities='auto',
remove_terminal_escapes=True,
fix_encoding=True,
fix_latin_ligatures=True,
fix_character_width=True,
uncurl_quotes=True,
fix_line_breaks=True,
fix_surrogates=True,
remove_control_chars=True,
remove_bom=True,
normalization='NFC',
max_decode_length=10**6):
r"""
Given Unicode text as input, fix inconsistencies and glitches in it,
such as mojibake.
Let's start with some examples:
>>> print(fix_text('ünicode'))
ünicode
>>> print(fix_text('Broken text… it’s flubberific!',
... normalization='NFKC'))
Broken text... it's flubberific!
>>> print(fix_text('HTML entities <3'))
HTML entities <3
>>> print(fix_text('<em>HTML entities <3</em>'))
<em>HTML entities <3</em>
>>> print(fix_text("¯\\_(ã\x83\x84)_/¯"))
¯\_(ツ)_/¯
>>> # This example string starts with a byte-order mark, even if
>>> # you can't see it on the Web.
>>> print(fix_text('\ufeffParty like\nit’s 1999!'))
Party like
it's 1999!
>>> print(fix_text('LOUD NOISES'))
LOUD NOISES
>>> len(fix_text('fi' * 100000))
200000
>>> len(fix_text(''))
0
Based on the options you provide, ftfy applies these steps in order:
- If `remove_terminal_escapes` is True, remove sequences of bytes that are
instructions for Unix terminals, such as the codes that make text appear
in different colors.
- If `fix_encoding` is True, look for common mistakes that come from
encoding or decoding Unicode text incorrectly, and fix them if they are
reasonably fixable. See `fixes.fix_encoding` for details.
- If `fix_entities` is True, replace HTML entities with their equivalent
characters. If it's "auto" (the default), then consider replacing HTML
entities, but don't do so in text where you have seen a pair of actual
angle brackets (that's probably actually HTML and you shouldn't mess
with the entities).
- If `uncurl_quotes` is True, replace various curly quotation marks with
plain-ASCII straight quotes.
- If `fix_latin_ligatures` is True, then ligatures made of Latin letters,
such as `fi`, will be separated into individual letters. These ligatures
are usually not meaningful outside of font rendering, and often represent
copy-and-paste errors.
- If `fix_character_width` is True, half-width and full-width characters
will be replaced by their standard-width form.
- If `fix_line_breaks` is true, convert all line breaks to Unix style
(CRLF and CR line breaks become LF line breaks).
- If `fix_surrogates` is true, ensure that there are no UTF-16 surrogates
in the resulting string, by converting them to the correct characters
when they're appropriately paired, or replacing them with \ufffd
otherwise.
- If `remove_control_chars` is true, remove control characters that
are not suitable for use in text. This includes most of the ASCII control
characters, plus some Unicode controls such as the byte order mark
(U+FEFF). Useful control characters, such as Tab, Line Feed, and
bidirectional marks, are left as they are.
- If `remove_bom` is True, remove the Byte-Order Mark at the start of the
string if it exists. (This is largely redundant, because it's a special
case of `remove_control_characters`. This option will become deprecated
in a later version.)
- If `normalization` is not None, apply the specified form of Unicode
normalization, which can be one of 'NFC', 'NFKC', 'NFD', and 'NFKD'.
- The default normalization, NFC, combines characters and diacritics that
are written using separate code points, such as converting "e" plus an
acute accent modifier into "é", or converting "ka" (か) plus a dakuten
into the single character "ga" (が). Unicode can be converted to NFC
form without any change in its meaning.
- If you ask for NFKC normalization, it will apply additional
normalizations that can change the meanings of characters. For example,
ellipsis characters will be replaced with three periods, all ligatures
will be replaced with the individual characters that make them up,
and characters that differ in font style will be converted to the same
character.
- If anything was changed, repeat all the steps, so that the function is
idempotent. "&amp;" will become "&", for example, not "&".
`fix_text` will work one line at a time, with the possibility that some
lines are in different encodings, allowing it to fix text that has been
concatenated together from different sources.
When it encounters lines longer than `max_decode_length` (1 million
codepoints by default), it will not run the `fix_encoding` step, to avoid
unbounded slowdowns.
If you're certain that any decoding errors in the text would have affected
the entire text in the same way, and you don't mind operations that scale
with the length of the text, you can use `fix_text_segment` directly to
fix the whole string in one batch.
"""
if isinstance(text, bytes):
raise UnicodeError(fixes.BYTES_ERROR_TEXT)
out = []
pos = 0
while pos < len(text):
textbreak = text.find('\n', pos) + 1
fix_encoding_this_time = fix_encoding
if textbreak == 0:
textbreak = len(text)
if (textbreak - pos) > max_decode_length:
fix_encoding_this_time = False
substring = text[pos:textbreak]
if fix_entities == 'auto' and '<' in substring and '>' in substring:
# we see angle brackets together; this could be HTML
fix_entities = False
out.append(
fix_text_segment(
substring,
fix_entities=fix_entities,
remove_terminal_escapes=remove_terminal_escapes,
fix_encoding=fix_encoding_this_time,
uncurl_quotes=uncurl_quotes,
fix_latin_ligatures=fix_latin_ligatures,
fix_character_width=fix_character_width,
fix_line_breaks=fix_line_breaks,
fix_surrogates=fix_surrogates,
remove_control_chars=remove_control_chars,
remove_bom=remove_bom,
normalization=normalization
)
)
pos = textbreak
return ''.join(out)
# Some alternate names for the main functions
ftfy = fix_text
fix_encoding = fixes.fix_encoding
fix_text_encoding = fixes.fix_text_encoding # deprecated
def fix_file(input_file,
encoding=None,
*,
fix_entities='auto',
remove_terminal_escapes=True,
fix_encoding=True,
fix_latin_ligatures=True,
fix_character_width=True,
uncurl_quotes=True,
fix_line_breaks=True,
fix_surrogates=True,
remove_control_chars=True,
remove_bom=True,
normalization='NFC'):
"""
Fix text that is found in a file.
If the file is being read as Unicode text, use that. If it's being read as
bytes, then we hope an encoding was supplied. If not, unfortunately, we
have to guess what encoding it is. We'll try a few common encodings, but we
make no promises. See the `guess_bytes` function for how this is done.
The output is a stream of fixed lines of text.
"""
entities = fix_entities
for line in input_file:
if isinstance(line, bytes):
if encoding is None:
line, encoding = guess_bytes(line)
else:
line = line.decode(encoding)
if fix_entities == 'auto' and '<' in line and '>' in line:
entities = False
yield fix_text_segment(
line,
fix_entities=entities,
remove_terminal_escapes=remove_terminal_escapes,
fix_encoding=fix_encoding,
fix_latin_ligatures=fix_latin_ligatures,
fix_character_width=fix_character_width,
uncurl_quotes=uncurl_quotes,
fix_line_breaks=fix_line_breaks,
fix_surrogates=fix_surrogates,
remove_control_chars=remove_control_chars,
remove_bom=remove_bom,
normalization=normalization
)
def fix_text_segment(text,
*,
fix_entities='auto',
remove_terminal_escapes=True,
fix_encoding=True,
fix_latin_ligatures=True,
fix_character_width=True,
uncurl_quotes=True,
fix_line_breaks=True,
fix_surrogates=True,
remove_control_chars=True,
remove_bom=True,
normalization='NFC'):
"""
Apply fixes to text in a single chunk. This could be a line of text
within a larger run of `fix_text`, or it could be a larger amount
of text that you are certain is in a consistent encoding.
See `fix_text` for a description of the parameters.
"""
if isinstance(text, bytes):
raise UnicodeError(fixes.BYTES_ERROR_TEXT)
if fix_entities == 'auto' and '<' in text and '>' in text:
fix_entities = False
while True:
origtext = text
if remove_terminal_escapes:
text = fixes.remove_terminal_escapes(text)
if fix_encoding:
text = fixes.fix_encoding(text)
if fix_entities:
text = fixes.unescape_html(text)
if fix_latin_ligatures:
text = fixes.fix_latin_ligatures(text)
if fix_character_width:
text = fixes.fix_character_width(text)
if uncurl_quotes:
text = fixes.uncurl_quotes(text)
if fix_line_breaks:
text = fixes.fix_line_breaks(text)
if fix_surrogates:
text = fixes.fix_surrogates(text)
if remove_control_chars:
text = fixes.remove_control_chars(text)
if remove_bom and not remove_control_chars:
# Skip this step if we've already done `remove_control_chars`,
# because it would be redundant.
text = fixes.remove_bom(text)
if normalization is not None:
text = unicodedata.normalize(normalization, text)
if text == origtext:
return text
def guess_bytes(bstring):
"""
NOTE: Using `guess_bytes` is not the recommended way of using ftfy. ftfy
is not designed to be an encoding detector.
In the unfortunate situation that you have some bytes in an unknown
encoding, ftfy can guess a reasonable strategy for decoding them, by trying
a few common encodings that can be distinguished from each other.
Unlike the rest of ftfy, this may not be accurate, and it may *create*
Unicode problems instead of solving them!
It doesn't try East Asian encodings at all, and if you have East Asian text
that you don't know how to decode, you are somewhat out of luck. East
Asian encodings require some serious statistics to distinguish from each
other, so we can't support them without decreasing the accuracy of ftfy.
If you don't know which encoding you have at all, I recommend
trying the 'chardet' module, and being appropriately skeptical about its
results.
The encodings we try here are:
- UTF-16 with a byte order mark, because a UTF-16 byte order mark looks
like nothing else
- UTF-8, because it's the global standard, which has been used by a
majority of the Web since 2008
- "utf-8-variants", because it's what people actually implement when they
think they're doing UTF-8
- MacRoman, because Microsoft Office thinks it's still a thing, and it
can be distinguished by its line breaks. (If there are no line breaks in
the string, though, you're out of luck.)
- "sloppy-windows-1252", the Latin-1-like encoding that is the most common
single-byte encoding
"""
if isinstance(bstring, str):
raise UnicodeError(
"This string was already decoded as Unicode. You should pass "
"bytes to guess_bytes, not Unicode."
)
if bstring.startswith(b'\xfe\xff') or bstring.startswith(b'\xff\xfe'):
return bstring.decode('utf-16'), 'utf-16'
byteset = set(bstring)
try:
if 0xed in byteset or 0xc0 in byteset:
# Byte 0xed can be used to encode a range of codepoints that
# are UTF-16 surrogates. UTF-8 does not use UTF-16 surrogates,
# so when we see 0xed, it's very likely we're being asked to
# decode CESU-8, the variant that encodes UTF-16 surrogates
# instead of the original characters themselves.
#
# This will occasionally trigger on standard UTF-8, as there
# are some Korean characters that also use byte 0xed, but that's
# not harmful.
#
# Byte 0xc0 is impossible because, numerically, it would only
# encode characters lower than U+0040. Those already have
# single-byte representations, and UTF-8 requires using the
# shortest possible representation. However, Java hides the null
# codepoint, U+0000, in a non-standard longer representation -- it
# encodes it as 0xc0 0x80 instead of 0x00, guaranteeing that 0x00
# will never appear in the encoded bytes.
#
# The 'utf-8-variants' decoder can handle both of these cases, as
# well as standard UTF-8, at the cost of a bit of speed.
return bstring.decode('utf-8-variants'), 'utf-8-variants'
else:
return bstring.decode('utf-8'), 'utf-8'
except UnicodeDecodeError:
pass
if 0x0d in byteset and 0x0a not in byteset:
# Files that contain CR and not LF are likely to be MacRoman.
return bstring.decode('macroman'), 'macroman'
else:
return bstring.decode('sloppy-windows-1252'), 'sloppy-windows-1252'
def explain_unicode(text):
"""
A utility method that's useful for debugging mysterious Unicode.
It breaks down a string, showing you for each codepoint its number in
hexadecimal, its glyph, its category in the Unicode standard, and its name
in the Unicode standard.
>>> explain_unicode('(╯°□°)╯︵ ┻━┻')
U+0028 ( [Ps] LEFT PARENTHESIS
U+256F ╯ [So] BOX DRAWINGS LIGHT ARC UP AND LEFT
U+00B0 ° [So] DEGREE SIGN
U+25A1 □ [So] WHITE SQUARE
U+00B0 ° [So] DEGREE SIGN
U+0029 ) [Pe] RIGHT PARENTHESIS
U+256F ╯ [So] BOX DRAWINGS LIGHT ARC UP AND LEFT
U+FE35 ︵ [Ps] PRESENTATION FORM FOR VERTICAL LEFT PARENTHESIS
U+0020 [Zs] SPACE
U+253B ┻ [So] BOX DRAWINGS HEAVY UP AND HORIZONTAL
U+2501 ━ [So] BOX DRAWINGS HEAVY HORIZONTAL
U+253B ┻ [So] BOX DRAWINGS HEAVY UP AND HORIZONTAL
"""
for char in text:
if char.isprintable():
display = char
else:
display = char.encode('unicode-escape').decode('ascii')
print('U+{code:04X} {display} [{category}] {name}'.format(
display=display_ljust(display, 7),
code=ord(char),
category=unicodedata.category(char),
name=unicodedata.name(char, '<unknown>')
))
Fix __version__.
"""
ftfy: fixes text for you
This is a module for making text less broken. See the `fix_text` function
for more information.
"""
import unicodedata
import ftfy.bad_codecs
from ftfy import fixes
from ftfy.formatting import display_ljust
__version__ = '5.0.2'
# See the docstring for ftfy.bad_codecs to see what we're doing here.
ftfy.bad_codecs.ok()
def fix_text(text,
*,
fix_entities='auto',
remove_terminal_escapes=True,
fix_encoding=True,
fix_latin_ligatures=True,
fix_character_width=True,
uncurl_quotes=True,
fix_line_breaks=True,
fix_surrogates=True,
remove_control_chars=True,
remove_bom=True,
normalization='NFC',
max_decode_length=10**6):
r"""
Given Unicode text as input, fix inconsistencies and glitches in it,
such as mojibake.
Let's start with some examples:
>>> print(fix_text('ünicode'))
ünicode
>>> print(fix_text('Broken text… it’s flubberific!',
... normalization='NFKC'))
Broken text... it's flubberific!
>>> print(fix_text('HTML entities <3'))
HTML entities <3
>>> print(fix_text('<em>HTML entities <3</em>'))
<em>HTML entities <3</em>
>>> print(fix_text("¯\\_(ã\x83\x84)_/¯"))
¯\_(ツ)_/¯
>>> # This example string starts with a byte-order mark, even if
>>> # you can't see it on the Web.
>>> print(fix_text('\ufeffParty like\nit’s 1999!'))
Party like
it's 1999!
>>> print(fix_text('LOUD NOISES'))
LOUD NOISES
>>> len(fix_text('fi' * 100000))
200000
>>> len(fix_text(''))
0
Based on the options you provide, ftfy applies these steps in order:
- If `remove_terminal_escapes` is True, remove sequences of bytes that are
instructions for Unix terminals, such as the codes that make text appear
in different colors.
- If `fix_encoding` is True, look for common mistakes that come from
encoding or decoding Unicode text incorrectly, and fix them if they are
reasonably fixable. See `fixes.fix_encoding` for details.
- If `fix_entities` is True, replace HTML entities with their equivalent
characters. If it's "auto" (the default), then consider replacing HTML
entities, but don't do so in text where you have seen a pair of actual
angle brackets (that's probably actually HTML and you shouldn't mess
with the entities).
- If `uncurl_quotes` is True, replace various curly quotation marks with
plain-ASCII straight quotes.
- If `fix_latin_ligatures` is True, then ligatures made of Latin letters,
such as `fi`, will be separated into individual letters. These ligatures
are usually not meaningful outside of font rendering, and often represent
copy-and-paste errors.
- If `fix_character_width` is True, half-width and full-width characters
will be replaced by their standard-width form.
- If `fix_line_breaks` is true, convert all line breaks to Unix style
(CRLF and CR line breaks become LF line breaks).
- If `fix_surrogates` is true, ensure that there are no UTF-16 surrogates
in the resulting string, by converting them to the correct characters
when they're appropriately paired, or replacing them with \ufffd
otherwise.
- If `remove_control_chars` is true, remove control characters that
are not suitable for use in text. This includes most of the ASCII control
characters, plus some Unicode controls such as the byte order mark
(U+FEFF). Useful control characters, such as Tab, Line Feed, and
bidirectional marks, are left as they are.
- If `remove_bom` is True, remove the Byte-Order Mark at the start of the
string if it exists. (This is largely redundant, because it's a special
case of `remove_control_characters`. This option will become deprecated
in a later version.)
- If `normalization` is not None, apply the specified form of Unicode
normalization, which can be one of 'NFC', 'NFKC', 'NFD', and 'NFKD'.
- The default normalization, NFC, combines characters and diacritics that
are written using separate code points, such as converting "e" plus an
acute accent modifier into "é", or converting "ka" (か) plus a dakuten
into the single character "ga" (が). Unicode can be converted to NFC
form without any change in its meaning.
- If you ask for NFKC normalization, it will apply additional
normalizations that can change the meanings of characters. For example,
ellipsis characters will be replaced with three periods, all ligatures
will be replaced with the individual characters that make them up,
and characters that differ in font style will be converted to the same
character.
- If anything was changed, repeat all the steps, so that the function is
idempotent. "&amp;" will become "&", for example, not "&".
`fix_text` will work one line at a time, with the possibility that some
lines are in different encodings, allowing it to fix text that has been
concatenated together from different sources.
When it encounters lines longer than `max_decode_length` (1 million
codepoints by default), it will not run the `fix_encoding` step, to avoid
unbounded slowdowns.
If you're certain that any decoding errors in the text would have affected
the entire text in the same way, and you don't mind operations that scale
with the length of the text, you can use `fix_text_segment` directly to
fix the whole string in one batch.
"""
if isinstance(text, bytes):
raise UnicodeError(fixes.BYTES_ERROR_TEXT)
out = []
pos = 0
while pos < len(text):
textbreak = text.find('\n', pos) + 1
fix_encoding_this_time = fix_encoding
if textbreak == 0:
textbreak = len(text)
if (textbreak - pos) > max_decode_length:
fix_encoding_this_time = False
substring = text[pos:textbreak]
if fix_entities == 'auto' and '<' in substring and '>' in substring:
# we see angle brackets together; this could be HTML
fix_entities = False
out.append(
fix_text_segment(
substring,
fix_entities=fix_entities,
remove_terminal_escapes=remove_terminal_escapes,
fix_encoding=fix_encoding_this_time,
uncurl_quotes=uncurl_quotes,
fix_latin_ligatures=fix_latin_ligatures,
fix_character_width=fix_character_width,
fix_line_breaks=fix_line_breaks,
fix_surrogates=fix_surrogates,
remove_control_chars=remove_control_chars,
remove_bom=remove_bom,
normalization=normalization
)
)
pos = textbreak
return ''.join(out)
# Some alternate names for the main functions
ftfy = fix_text
fix_encoding = fixes.fix_encoding
fix_text_encoding = fixes.fix_text_encoding # deprecated
def fix_file(input_file,
encoding=None,
*,
fix_entities='auto',
remove_terminal_escapes=True,
fix_encoding=True,
fix_latin_ligatures=True,
fix_character_width=True,
uncurl_quotes=True,
fix_line_breaks=True,
fix_surrogates=True,
remove_control_chars=True,
remove_bom=True,
normalization='NFC'):
"""
Fix text that is found in a file.
If the file is being read as Unicode text, use that. If it's being read as
bytes, then we hope an encoding was supplied. If not, unfortunately, we
have to guess what encoding it is. We'll try a few common encodings, but we
make no promises. See the `guess_bytes` function for how this is done.
The output is a stream of fixed lines of text.
"""
entities = fix_entities
for line in input_file:
if isinstance(line, bytes):
if encoding is None:
line, encoding = guess_bytes(line)
else:
line = line.decode(encoding)
if fix_entities == 'auto' and '<' in line and '>' in line:
entities = False
yield fix_text_segment(
line,
fix_entities=entities,
remove_terminal_escapes=remove_terminal_escapes,
fix_encoding=fix_encoding,
fix_latin_ligatures=fix_latin_ligatures,
fix_character_width=fix_character_width,
uncurl_quotes=uncurl_quotes,
fix_line_breaks=fix_line_breaks,
fix_surrogates=fix_surrogates,
remove_control_chars=remove_control_chars,
remove_bom=remove_bom,
normalization=normalization
)
def fix_text_segment(text,
*,
fix_entities='auto',
remove_terminal_escapes=True,
fix_encoding=True,
fix_latin_ligatures=True,
fix_character_width=True,
uncurl_quotes=True,
fix_line_breaks=True,
fix_surrogates=True,
remove_control_chars=True,
remove_bom=True,
normalization='NFC'):
"""
Apply fixes to text in a single chunk. This could be a line of text
within a larger run of `fix_text`, or it could be a larger amount
of text that you are certain is in a consistent encoding.
See `fix_text` for a description of the parameters.
"""
if isinstance(text, bytes):
raise UnicodeError(fixes.BYTES_ERROR_TEXT)
if fix_entities == 'auto' and '<' in text and '>' in text:
fix_entities = False
while True:
origtext = text
if remove_terminal_escapes:
text = fixes.remove_terminal_escapes(text)
if fix_encoding:
text = fixes.fix_encoding(text)
if fix_entities:
text = fixes.unescape_html(text)
if fix_latin_ligatures:
text = fixes.fix_latin_ligatures(text)
if fix_character_width:
text = fixes.fix_character_width(text)
if uncurl_quotes:
text = fixes.uncurl_quotes(text)
if fix_line_breaks:
text = fixes.fix_line_breaks(text)
if fix_surrogates:
text = fixes.fix_surrogates(text)
if remove_control_chars:
text = fixes.remove_control_chars(text)
if remove_bom and not remove_control_chars:
# Skip this step if we've already done `remove_control_chars`,
# because it would be redundant.
text = fixes.remove_bom(text)
if normalization is not None:
text = unicodedata.normalize(normalization, text)
if text == origtext:
return text
def guess_bytes(bstring):
"""
NOTE: Using `guess_bytes` is not the recommended way of using ftfy. ftfy
is not designed to be an encoding detector.
In the unfortunate situation that you have some bytes in an unknown
encoding, ftfy can guess a reasonable strategy for decoding them, by trying
a few common encodings that can be distinguished from each other.
Unlike the rest of ftfy, this may not be accurate, and it may *create*
Unicode problems instead of solving them!
It doesn't try East Asian encodings at all, and if you have East Asian text
that you don't know how to decode, you are somewhat out of luck. East
Asian encodings require some serious statistics to distinguish from each
other, so we can't support them without decreasing the accuracy of ftfy.
If you don't know which encoding you have at all, I recommend
trying the 'chardet' module, and being appropriately skeptical about its
results.
The encodings we try here are:
- UTF-16 with a byte order mark, because a UTF-16 byte order mark looks
like nothing else
- UTF-8, because it's the global standard, which has been used by a
majority of the Web since 2008
- "utf-8-variants", because it's what people actually implement when they
think they're doing UTF-8
- MacRoman, because Microsoft Office thinks it's still a thing, and it
can be distinguished by its line breaks. (If there are no line breaks in
the string, though, you're out of luck.)
- "sloppy-windows-1252", the Latin-1-like encoding that is the most common
single-byte encoding
"""
if isinstance(bstring, str):
raise UnicodeError(
"This string was already decoded as Unicode. You should pass "
"bytes to guess_bytes, not Unicode."
)
if bstring.startswith(b'\xfe\xff') or bstring.startswith(b'\xff\xfe'):
return bstring.decode('utf-16'), 'utf-16'
byteset = set(bstring)
try:
if 0xed in byteset or 0xc0 in byteset:
# Byte 0xed can be used to encode a range of codepoints that
# are UTF-16 surrogates. UTF-8 does not use UTF-16 surrogates,
# so when we see 0xed, it's very likely we're being asked to
# decode CESU-8, the variant that encodes UTF-16 surrogates
# instead of the original characters themselves.
#
# This will occasionally trigger on standard UTF-8, as there
# are some Korean characters that also use byte 0xed, but that's
# not harmful.
#
# Byte 0xc0 is impossible because, numerically, it would only
# encode characters lower than U+0040. Those already have
# single-byte representations, and UTF-8 requires using the
# shortest possible representation. However, Java hides the null
# codepoint, U+0000, in a non-standard longer representation -- it
# encodes it as 0xc0 0x80 instead of 0x00, guaranteeing that 0x00
# will never appear in the encoded bytes.
#
# The 'utf-8-variants' decoder can handle both of these cases, as
# well as standard UTF-8, at the cost of a bit of speed.
return bstring.decode('utf-8-variants'), 'utf-8-variants'
else:
return bstring.decode('utf-8'), 'utf-8'
except UnicodeDecodeError:
pass
if 0x0d in byteset and 0x0a not in byteset:
# Files that contain CR and not LF are likely to be MacRoman.
return bstring.decode('macroman'), 'macroman'
else:
return bstring.decode('sloppy-windows-1252'), 'sloppy-windows-1252'
def explain_unicode(text):
"""
A utility method that's useful for debugging mysterious Unicode.
It breaks down a string, showing you for each codepoint its number in
hexadecimal, its glyph, its category in the Unicode standard, and its name
in the Unicode standard.
>>> explain_unicode('(╯°□°)╯︵ ┻━┻')
U+0028 ( [Ps] LEFT PARENTHESIS
U+256F ╯ [So] BOX DRAWINGS LIGHT ARC UP AND LEFT
U+00B0 ° [So] DEGREE SIGN
U+25A1 □ [So] WHITE SQUARE
U+00B0 ° [So] DEGREE SIGN
U+0029 ) [Pe] RIGHT PARENTHESIS
U+256F ╯ [So] BOX DRAWINGS LIGHT ARC UP AND LEFT
U+FE35 ︵ [Ps] PRESENTATION FORM FOR VERTICAL LEFT PARENTHESIS
U+0020 [Zs] SPACE
U+253B ┻ [So] BOX DRAWINGS HEAVY UP AND HORIZONTAL
U+2501 ━ [So] BOX DRAWINGS HEAVY HORIZONTAL
U+253B ┻ [So] BOX DRAWINGS HEAVY UP AND HORIZONTAL
"""
for char in text:
if char.isprintable():
display = char
else:
display = char.encode('unicode-escape').decode('ascii')
print('U+{code:04X} {display} [{category}] {name}'.format(
display=display_ljust(display, 7),
code=ord(char),
category=unicodedata.category(char),
name=unicodedata.name(char, '<unknown>')
))
|
from binascii import hexlify
from hashlib import sha256
from typing import TypeVar
from fastecdsa import _ecdsa
from .curve import Curve, P256
from .point import Point
from .util import RFC6979, msg_bytes
MsgTypes = TypeVar('MsgTypes', str, bytes, bytearray)
class EcdsaError(Exception):
def __init__(self, msg):
self.msg = msg
def sign(msg: MsgTypes, d: int, curve: Curve = P256, hashfunc=sha256, prehashed: bool = False):
"""Sign a message using the elliptic curve digital signature algorithm.
The elliptic curve signature algorithm is described in full in FIPS 186-4 Section 6. Please
refer to http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf for more information.
Args:
| msg (str|bytes|bytearray): A message to be signed.
| d (int): The ECDSA private key of the signer.
| curve (fastecdsa.curve.Curve): The curve to be used to sign the message.
| hashfunc (_hashlib.HASH): The hash function used to compress the message.
| prehashed (bool): The message being passed has already been hashed by :code:`hashfunc`.
"""
# generate a deterministic nonce per RFC6979
rfc6979 = RFC6979(msg, d, curve.q, hashfunc, prehashed=prehashed)
k = rfc6979.gen_nonce()
if prehashed:
hex_digest = hexlify(msg).decode()
else:
hex_digest = hashfunc(msg_bytes(msg)).hexdigest()
r, s = _ecdsa.sign(
hex_digest,
str(d),
str(k),
str(curve.p),
str(curve.a),
str(curve.b),
str(curve.q),
str(curve.gx),
str(curve.gy)
)
return int(r), int(s)
def verify(sig: (int, int), msg: MsgTypes, Q: Point, curve: Curve = P256, hashfunc=sha256) -> bool:
"""Verify a message signature using the elliptic curve digital signature algorithm.
The elliptic curve signature algorithm is described in full in FIPS 186-4 Section 6. Please
refer to http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf for more information.
Args:
| sig (int, int): The signature for the message.
| msg (str|bytes|bytearray): A message to be signed.
| Q (fastecdsa.point.Point): The ECDSA public key of the signer.
| curve (fastecdsa.curve.Curve): The curve to be used to sign the message.
| hashfunc (_hashlib.HASH): The hash function used to compress the message.
Returns:
bool: True if the signature is valid, False otherwise.
Raises:
fastecdsa.ecdsa.EcdsaError: If the signature or public key are invalid. Invalid signature
in this case means that it has values less than 1 or greater than the curve order.
"""
if isinstance(Q, tuple):
Q = Point(Q[0], Q[1], curve)
r, s = sig
# validate Q, r, s (Q should be validated in constructor of Point already but double check)
if not curve.is_point_on_curve((Q.x, Q.y)):
raise EcdsaError('Invalid public key, point is not on curve {}'.format(curve.name))
elif r > curve.q or r < 1:
raise EcdsaError(
'Invalid Signature: r is not a positive integer smaller than the curve order')
elif s > curve.q or s < 1:
raise EcdsaError(
'Invalid Signature: s is not a positive integer smaller than the curve order')
hashed = hashfunc(msg_bytes(msg)).hexdigest()
return _ecdsa.verify(
str(r),
str(s),
hashed,
str(Q.x),
str(Q.y),
str(curve.p),
str(curve.a),
str(curve.b),
str(curve.q),
str(curve.gx),
str(curve.gy)
)
Adding kwarg for
from binascii import hexlify
from hashlib import sha256
from typing import TypeVar
from fastecdsa import _ecdsa
from .curve import Curve, P256
from .point import Point
from .util import RFC6979, msg_bytes
MsgTypes = TypeVar('MsgTypes', str, bytes, bytearray)
class EcdsaError(Exception):
def __init__(self, msg):
self.msg = msg
def sign(msg: MsgTypes, d: int, curve: Curve = P256, hashfunc=sha256, prehashed: bool = False):
"""Sign a message using the elliptic curve digital signature algorithm.
The elliptic curve signature algorithm is described in full in FIPS 186-4 Section 6. Please
refer to http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf for more information.
Args:
| msg (str|bytes|bytearray): A message to be signed.
| d (int): The ECDSA private key of the signer.
| curve (fastecdsa.curve.Curve): The curve to be used to sign the message.
| hashfunc (_hashlib.HASH): The hash function used to compress the message.
| prehashed (bool): The message being passed has already been hashed by :code:`hashfunc`.
"""
if prehashed:
try:
hex_digest = msg.hexdigest()
msg = msg.digest()
except:
hex_digest = hexlify(msg).decode()
else:
hex_digest = hashfunc(msg_bytes(msg)).hexdigest()
# generate a deterministic nonce per RFC6979
rfc6979 = RFC6979(msg, d, curve.q, hashfunc, prehashed=prehashed)
k = rfc6979.gen_nonce()
r, s = _ecdsa.sign(
hex_digest,
str(d),
str(k),
str(curve.p),
str(curve.a),
str(curve.b),
str(curve.q),
str(curve.gx),
str(curve.gy)
)
return int(r), int(s)
def verify(sig: (int, int), msg: MsgTypes, Q: Point, curve: Curve = P256, hashfunc=sha256, prehashed: bool=False) -> bool:
"""Verify a message signature using the elliptic curve digital signature algorithm.
The elliptic curve signature algorithm is described in full in FIPS 186-4 Section 6. Please
refer to http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf for more information.
Args:
| sig (int, int): The signature for the message.
| msg (str|bytes|bytearray): A message to be signed.
| Q (fastecdsa.point.Point): The ECDSA public key of the signer.
| curve (fastecdsa.curve.Curve): The curve to be used to sign the message.
| hashfunc (_hashlib.HASH): The hash function used to compress the message.
| prehashed (bool): The message being passed has already been hashed by :code:`hashfunc`.
Returns:
bool: True if the signature is valid, False otherwise.
Raises:
fastecdsa.ecdsa.EcdsaError: If the signature or public key are invalid. Invalid signature
in this case means that it has values less than 1 or greater than the curve order.
"""
if isinstance(Q, tuple):
Q = Point(Q[0], Q[1], curve)
r, s = sig
# validate Q, r, s (Q should be validated in constructor of Point already but double check)
if not curve.is_point_on_curve((Q.x, Q.y)):
raise EcdsaError('Invalid public key, point is not on curve {}'.format(curve.name))
elif r > curve.q or r < 1:
raise EcdsaError(
'Invalid Signature: r is not a positive integer smaller than the curve order')
elif s > curve.q or s < 1:
raise EcdsaError(
'Invalid Signature: s is not a positive integer smaller than the curve order')
if prehashed:
try:
hashed = msg.hexdigest()
except:
hashed = msg.hex()
else:
hashed = hashfunc(msg_bytes(msg)).hexdigest()
return _ecdsa.verify(
str(r),
str(s),
hashed,
str(Q.x),
str(Q.y),
str(curve.p),
str(curve.a),
str(curve.b),
str(curve.q),
str(curve.gx),
str(curve.gy)
) |
# pylint disable: W0622,C0103,R0913,R0902
"""
Classes and functions for quality assessment of FASTQ and SAM format NGS reads
"""
from __future__ import division
import sys
import os
import re
from six.moves import range, zip
from six import string_types, PY2, PY3
import string
from itertools import groupby, islice
from collections import defaultdict
try:
from collections import Counter
except ImportError:
from fastqp.backports import Counter
from subprocess import Popen, PIPE
from io import TextIOWrapper
class Gzip(object):
""" Call system gzip and maintain interface compatibility with python
gzip module """
def __init__(self, filename, mode):
self.stream, self.p = self.open(filename, mode)
self.mode = mode
self.filename = filename
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
return next(self.stream)
def open(self, filename, mode):
if 'r' in mode:
self.fh = open(filename, 'rb', 0)
p = Popen(['gzip', '-dc', filename], stdout=PIPE, stderr=PIPE)
if 'b' in mode:
fh = p.stdout
else:
try:
fh = TextIOWrapper(p.stdout)
except AttributeError:
sys.exit(p.stderr.readlines())
elif 'w' in mode:
self.fh = open(filename, 'wb', 0)
p = Popen(['gzip', '-c'], stdin=PIPE, stdout=self.fh)
fh = p.stdin
return (fh, p)
def write(self, string):
self.stream.write(string.encode('utf-8'))
def read(self, string):
self.stream.read(string)
def close(self):
self.__exit__()
def __enter__(self):
return self
def __exit__(self, *args):
self.p.communicate()
if self.fh:
self.fh.close()
class Fastq(object):
"""
A class to hold features from fastq reads.
"""
def __init__(self, name='', seq='', strand='+', qual='', conv=None):
self.name = name
self.seq = seq
self.strand = strand
self.qual = qual
self.conv = conv
self.i = int()
assert isinstance(name, string_types)
assert isinstance(seq, string_types)
assert isinstance(qual, string_types)
def __iter__(self):
return self
def next(self):
if self.i < len(self):
value, self.i = self[self.i], self.i + 1
return value
else:
raise StopIteration()
def __getitem__(self, key):
if self.conv:
return self.__class__(self.name, self.seq[key], self.strand,
self.qual[key], self.conv[key])
else:
return self.__class__(self.name, self.seq[key], self.strand,
self.qual[key])
def __next__(self):
return self.next()
def __repr__(self):
return str(self)
def __str__(self):
if self.name[0] != '@':
self.name = ''.join(['@', self.name])
if self.conv:
return '\n'.join(['{0}:YM:Z:{1}'.format(self.name, self.conv),
self.seq, self.strand, self.qual]) + '\n'
else:
return '\n'.join([self.name, self.seq, self.strand, self.qual]) + '\n'
def __len__(self):
return len(self.seq)
def gc(self):
""" Return the GC content of self as an int
>>> x = Fastq(name='test', seq='TTTTTATGGAGGTATTGAGAACGTAAGATGTTTGGATAT', qual=' # # ##EC4<?4A<+EFB@GHC<9FAA+DDCAFFC=22')
>>> x.gc()
30
"""
g = self.seq.count('G')
c = self.seq.count('C')
return int((g + c) / len(self) * 100)
class Sam(object):
""" Store fields in each line of a SAM file, provided as a tuple. """
__slots__ = ['qname', 'flag', 'rname', 'pos', 'mapq', 'cigar', 'rnext', 'pnext', 'tlen', 'seq', 'qual', 'tags', '_tags', '_cigars']
def __init__(self, fields):
self.qname = fields[0]
self.flag = int(fields[1])
self.rname = fields[2]
self.pos = int(fields[3])
self.mapq = int(fields[4])
self.cigar = fields[5]
self.rnext = fields[6]
self.pnext = int(fields[7])
self.tlen = int(fields[8])
self.seq = fields[9]
self.qual = fields[10]
self.tags = None
self._tags = fields[11:]
self._cigars = None
def __gt__(self, other):
if self.rname != other.rname:
return self.rname > other.rname
elif (self.rname == other.rname) and (self.pos != other.pos):
return self.pos > other.pos
else:
return str(self) > str(other)
def __lt__(self, other):
if self.rname != other.rname:
return self.rname < other.rname
elif (self.rname == other.rname) and (self.pos != other.pos):
return self.pos < other.pos
else:
return str(self) < str(other)
def __eq__(self, other):
if (self.rname == other.rname) and (self.pos == other.pos) and (str(self) != str(other)):
return str(self) == str(other)
else:
return self.pos == other.pos
def __str__(self):
if not self.tags:
self.tags = parse_sam_tags(self._tags)
return '\t'.join((self.qname, str(self.flag), self.rname, str(self.pos),
str(self.mapq), str(self.cigar), self.rnext, str(self.pnext),
str(self.tlen), ''.join(self.seq), ''.join(self.qual)) + \
tuple(':'.join((tag, self.tags[tag][0], str(self.tags[tag][1]))) for tag in sorted(self.tags.keys()))) + '\n'
def __repr__(self):
return "Sam({0}:{1}:{2})".format(self.rname, self.pos, self.qname)
def __len__(self):
return sum(c[0] for c in self.cigars if c[1] in
("M", "D", "N", "EQ", "X", "P"))
def __getitem__(self, key):
if not self.tags:
self.tags = parse_sam_tags(self._tags)
return self.tags[key][1]
def __setitem__(self, key, value):
if not self.tags:
self.tags = parse_sam_tags(self._tags)
self.tags[key] = value
def cigar_split(self):
""" CIGAR grouping function modified from:
https://github.com/brentp/bwa-meth
"""
if self.cigar == "*":
yield (0, None)
raise StopIteration
cig_iter = groupby(self.cigar, lambda c: c.isdigit())
for g, n in cig_iter:
yield int("".join(n)), "".join(next(cig_iter)[1])
@property
def conv(self):
return self['YM']
@property
def cigars(self):
if not self._cigars:
self._cigars = tuple(self.cigar_split())
return self._cigars
@property
def mapped(self):
return not (self.flag & 0x4)
@property
def secondary(self):
return bool(self.flag & 0x100)
@property
def reverse(self):
return bool(self.flag & 0x10)
@property
def duplicate(self):
return bool(self.flag & 0x400)
def gapped(self, string, gap_char='-'):
""" Return string with all deletions wrt reference
represented as gaps '-' and all insertions wrt reference
removed.
i: sequence index
"""
seq = []
i = 0
for n, t in self.cigars:
if t in ("M", "N", "EQ", "X", "P"):
seq.extend(string[i:i+n])
i += n
elif t in ("D",):
seq.extend(('-',) * n)
elif t in ("I",):
i += n
return ''.join(seq)
@property
def coords(self):
return range(self.pos, self.pos + len(self))
class FastqReader:
"""
A class to read the name, sequence, strand and qualities from a fastq file
"""
def __init__(self, f):
name, ext = os.path.splitext(f.name)
if ext == '.gz':
self.file = Gzip(''.join([name, ext]), 'r')
else:
self.file = f
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
try:
name = next(self.file).strip().split()[0] # remove whitespace
seq = next(self.file).strip()
strand = next(self.file).strip()
qual = next(self.file).strip()
if name.count(':YM:Z:') > 0:
tag, dtype, data = name.split(':')[-3:]
name = ':'.join(name.split(':')[:-3])
return Fastq(name=name, seq=seq, strand=strand, qual=qual, conv=data)
else:
return Fastq(name=name, seq=seq, strand=strand, qual=qual)
except StopIteration:
raise StopIteration
def subsample(self, n):
""" Draws every nth read from self. Returns Fastq. """
n = n * 4
for i, line in enumerate(self.file):
if i % n == 0:
name = line.strip().split()[0]
elif i % n == 1:
seq = line.strip()
elif i % n == 2:
strand = line.strip()
elif i % n == 3:
qual = line.strip()
if name.count(':YM:Z:') > 0:
tag, dtype, data = name.split(':')[-3:]
name = ':'.join(name.split(':')[:-3])
yield Fastq(name=name, seq=seq, strand=strand, qual=qual, conv=data)
else:
yield Fastq(name=name, seq=seq, strand=strand, qual=qual)
def fileno(self):
return self.file.fileno()
def __enter__(self):
return self
def __exit__(self, *args):
self.file.close()
class Reader(object):
""" Read SAM/BAM format file using iterator. """
def __init__(self, f):
name, ext = os.path.splitext(f.name)
if ext == '.bam':
BamReaderSamtools.__init__(self, f)
else:
SamReader.__init__(self, f)
def next(self):
try:
line = next(self.file).rstrip('\n\r')
return Sam(tuple(line.split('\t')))
except StopIteration:
raise StopIteration
def __next__(self):
return self.next()
def __iter__(self):
return self
def subsample(self, n):
""" Draws every nth read from self. Returns Sam. """
for i, line in enumerate(self.file):
if i % n == 0:
yield Sam(tuple(line.rstrip().split('\t')))
def __enter__(self):
return self
def __exit__(self, *args):
self.file.close()
class SamReader(Reader):
""" Read SAM format file using iterator. """
def __init__(self, f):
self.header = []
self.file = f
for line in self.file:
if line[0] == '@':
self.header.append(line.rstrip('\n\r'))
else:
break
class BamReaderSamtools(Reader):
""" Read BAM format file using iterator. """
def __init__(self, f):
pline = ['samtools', 'view', '-H', f.name]
try:
p = Popen(pline, bufsize=-1, stdout=PIPE,
stderr=PIPE)
except OSError:
sys.stderr.write('Samtools must be installed for BAM file support!\n')
sys.exit(1)
self.header = [line.decode('utf-8').rstrip('\n\r') for line in p.stdout]
p.wait()
pline = ['samtools', 'view', f.name]
self.p = Popen(pline, bufsize=-1, stdout=PIPE,
stderr=PIPE)
self.file = TextIOWrapper(self.p.stdout)
def __exit__(self, *args):
self.p.wait()
class Stats:
""" Counter for characterization of NGS reads
"""
def __init__(self):
self.depth = defaultdict(int)
self.nuc = defaultdict(lambda: defaultdict(int))
self.qual = defaultdict(lambda: defaultdict(int))
self.gc = defaultdict(int)
self.kmers = Counter(defaultdict(int))
self.conv = defaultdict(lambda: defaultdict(int))
def evaluate(self, seq, qual, conv=None):
""" Evaluate read object at each position, and fill in nuc and qual dictionaries """
self.gc[gc(seq)] += 1
if conv:
cpgs = cpg_map(seq)
for i in range(1, len(seq) + 1):
self.depth[i] += 1
self.nuc[i][seq[i-1]] += 1
self.qual[i][qual[i-1]] += 1
if conv:
if cpgs[i-1] != 'N':
self.conv[i][conv[i-1]] += 1
def kmercount(self, seq, k=5):
""" Count all kmers of k length in seq and update kmer counter.
"""
for kmer in window(seq, n=k):
self.kmers[kmer] += 1
def __enter__(self):
return self
def __exit__(self, *args):
pass
def bam_read_count(bamfile):
""" Return a tuple of the number of mapped and unmapped reads in a bam file """
p = Popen(['samtools', 'idxstats', bamfile], stdout=PIPE)
mapped = 0
unmapped = 0
for line in p.stdout:
rname, rlen, nm, nu = line.rstrip().split()
mapped += int(nm)
unmapped += int(nu)
return (mapped, unmapped)
def parse_sam_tags(tagfields):
""" Return a dictionary containing the tags """
return dict((tag, (dtype, data)) for tag, dtype, data in (decode_tag(x) for x in tagfields))
def encode_tag(tag, data_type, data):
""" Write a SAM tag in the format ``TAG:TYPE:data``
>>> encode_tag('YM', 'Z', '#""9O"1@!J')
'YM:Z:#""9O"1@!J'
"""
value = ':'.join(list((tag.upper(), data_type.upper(), data)))
return value
def decode_tag(tag):
""" Parse a SAM format tag to a (TAG, TYPE, data) tuple.
TYPE in A, i, f, Z, H, B
>>> decode_tag('YM:Z:#""9O"1@!J')
('YM', 'Z', '#""9O"1@!J')
>>> decode_tag('XS:i:5')
('XS', 'i', 5)
"""
values = tag.split(':')
if len(values) != 3:
values = (values[0], values[1], ':'.join(values[2:]))
if values[1] == 'i':
values[2] = int(values[2])
elif values[1] == 'f':
values[2] = float(values[2])
elif values[1] == 'H':
raise(NotImplementedError, "Hex array SAM tags are currently not parsed.")
elif values[1] == 'B':
raise(NotImplementedError, "Byte array SAM tags are currently not parsed.")
return tuple(values)
def gc(seq):
""" Return the GC content of as an int
>>> x = tuple('TTTTTATGGAGGTATTGAGAACGTAAGATGTTTGGATAT')
>>> gc(x)
30
"""
g = seq.count('G')
c = seq.count('C')
return int((g + c) / len(seq) * 100)
def padbases(bases):
"""For each base call in dictionary D, add an entry base:0 if key base does not exist."""
def inner(D):
for key in bases:
if key not in D:
D[key] = 0
return inner
def percentile(D, percent):
"""
modified from: http://stackoverflow.com/a/2753343/717419
Find the percentile of a list of values.
N - is a dictionary with key=numeric value and value=frequency.
percent - a float value from 0.0 to 1.0.
outlier removal: http://www.purplemath.com/modules/boxwhisk3.htm
return the percentile of the values
"""
N = sorted(D.keys()) # dict keys
P = [D[n] for n in N] # dict values
if not N:
return None
k = (sum(P)) * percent
l = (sum(P)) * 0.25 # lower quartile
u = (sum(P)) * 0.75 # upper quartile
e = int()
for n,p in zip(N, P): # find percentile
e += p
if e >= k:
z = n # value at percentile
break
e = int()
for n,p in zip(N, P): # find upper quartile
e += p
if e >= u:
uz = n # value at quartile
break
e = int()
for n,p in zip(N, P): # find lower quartile
e += p
if e >= l:
lz = n # value at quartile
break
iqd = 1.5 * (uz - lz) # 1.5 times the inter-quartile distance
if (z) & (z < lz - iqd):
return int(lz - iqd)
elif (z) & (z > uz + iqd):
return int(uz + iqd)
elif z:
return int(z)
else:
return N[-1]
def window(seq, n=2):
""" Returns a sliding window (of width n) over data from the iterable
s -> (s0,s1,...s[n-1]), (s1,s2,...,sn), ... """
it = iter(seq)
result = tuple(islice(it, n))
if len(result) == n:
yield ''.join(result)
for elem in it:
result = result[1:] + (elem,)
yield ''.join(result)
def mean(s):
return sum(s) / len(s)
def cpg_map(seq):
""" Return tuple of C/G/N.
>>> cpg_map('CGCGTAGCCG')
'CGCGNNNNCG'
"""
starts = (x.start() for x in re.finditer('CG', ''.join(['N', seq, 'N'])))
cpgs = ['N'] * (len(seq) + 2)
for start in starts:
cpgs[start] = 'C'
cpgs[start+1] = 'G'
return ''.join(cpgs[1:-1])
if __name__ == "__main__":
import doctest
doctest.testmod()
Read as a string...
# pylint disable: W0622,C0103,R0913,R0902
"""
Classes and functions for quality assessment of FASTQ and SAM format NGS reads
"""
from __future__ import division
import sys
import os
import re
from six.moves import range, zip
from six import string_types, PY2, PY3
import string
from itertools import groupby, islice
from collections import defaultdict
try:
from collections import Counter
except ImportError:
from fastqp.backports import Counter
from subprocess import Popen, PIPE
from io import TextIOWrapper
class Gzip(object):
""" Call system gzip and maintain interface compatibility with python
gzip module """
def __init__(self, filename, mode):
self.stream, self.p = self.open(filename, mode)
self.mode = mode
self.filename = filename
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
return next(self.stream)
def open(self, filename, mode):
if 'r' in mode:
self.fh = open(filename, 'rb', 0)
p = Popen(['gzip', '-dc', filename], stdout=PIPE, stderr=PIPE)
if 'b' in mode:
fh = p.stdout
else:
try:
fh = TextIOWrapper(p.stdout)
except AttributeError:
sys.exit(p.stderr.read())
elif 'w' in mode:
self.fh = open(filename, 'wb', 0)
p = Popen(['gzip', '-c'], stdin=PIPE, stdout=self.fh)
fh = p.stdin
return (fh, p)
def write(self, string):
self.stream.write(string.encode('utf-8'))
def read(self, string):
self.stream.read(string)
def close(self):
self.__exit__()
def __enter__(self):
return self
def __exit__(self, *args):
self.p.communicate()
if self.fh:
self.fh.close()
class Fastq(object):
"""
A class to hold features from fastq reads.
"""
def __init__(self, name='', seq='', strand='+', qual='', conv=None):
self.name = name
self.seq = seq
self.strand = strand
self.qual = qual
self.conv = conv
self.i = int()
assert isinstance(name, string_types)
assert isinstance(seq, string_types)
assert isinstance(qual, string_types)
def __iter__(self):
return self
def next(self):
if self.i < len(self):
value, self.i = self[self.i], self.i + 1
return value
else:
raise StopIteration()
def __getitem__(self, key):
if self.conv:
return self.__class__(self.name, self.seq[key], self.strand,
self.qual[key], self.conv[key])
else:
return self.__class__(self.name, self.seq[key], self.strand,
self.qual[key])
def __next__(self):
return self.next()
def __repr__(self):
return str(self)
def __str__(self):
if self.name[0] != '@':
self.name = ''.join(['@', self.name])
if self.conv:
return '\n'.join(['{0}:YM:Z:{1}'.format(self.name, self.conv),
self.seq, self.strand, self.qual]) + '\n'
else:
return '\n'.join([self.name, self.seq, self.strand, self.qual]) + '\n'
def __len__(self):
return len(self.seq)
def gc(self):
""" Return the GC content of self as an int
>>> x = Fastq(name='test', seq='TTTTTATGGAGGTATTGAGAACGTAAGATGTTTGGATAT', qual=' # # ##EC4<?4A<+EFB@GHC<9FAA+DDCAFFC=22')
>>> x.gc()
30
"""
g = self.seq.count('G')
c = self.seq.count('C')
return int((g + c) / len(self) * 100)
class Sam(object):
""" Store fields in each line of a SAM file, provided as a tuple. """
__slots__ = ['qname', 'flag', 'rname', 'pos', 'mapq', 'cigar', 'rnext', 'pnext', 'tlen', 'seq', 'qual', 'tags', '_tags', '_cigars']
def __init__(self, fields):
self.qname = fields[0]
self.flag = int(fields[1])
self.rname = fields[2]
self.pos = int(fields[3])
self.mapq = int(fields[4])
self.cigar = fields[5]
self.rnext = fields[6]
self.pnext = int(fields[7])
self.tlen = int(fields[8])
self.seq = fields[9]
self.qual = fields[10]
self.tags = None
self._tags = fields[11:]
self._cigars = None
def __gt__(self, other):
if self.rname != other.rname:
return self.rname > other.rname
elif (self.rname == other.rname) and (self.pos != other.pos):
return self.pos > other.pos
else:
return str(self) > str(other)
def __lt__(self, other):
if self.rname != other.rname:
return self.rname < other.rname
elif (self.rname == other.rname) and (self.pos != other.pos):
return self.pos < other.pos
else:
return str(self) < str(other)
def __eq__(self, other):
if (self.rname == other.rname) and (self.pos == other.pos) and (str(self) != str(other)):
return str(self) == str(other)
else:
return self.pos == other.pos
def __str__(self):
if not self.tags:
self.tags = parse_sam_tags(self._tags)
return '\t'.join((self.qname, str(self.flag), self.rname, str(self.pos),
str(self.mapq), str(self.cigar), self.rnext, str(self.pnext),
str(self.tlen), ''.join(self.seq), ''.join(self.qual)) + \
tuple(':'.join((tag, self.tags[tag][0], str(self.tags[tag][1]))) for tag in sorted(self.tags.keys()))) + '\n'
def __repr__(self):
return "Sam({0}:{1}:{2})".format(self.rname, self.pos, self.qname)
def __len__(self):
return sum(c[0] for c in self.cigars if c[1] in
("M", "D", "N", "EQ", "X", "P"))
def __getitem__(self, key):
if not self.tags:
self.tags = parse_sam_tags(self._tags)
return self.tags[key][1]
def __setitem__(self, key, value):
if not self.tags:
self.tags = parse_sam_tags(self._tags)
self.tags[key] = value
def cigar_split(self):
""" CIGAR grouping function modified from:
https://github.com/brentp/bwa-meth
"""
if self.cigar == "*":
yield (0, None)
raise StopIteration
cig_iter = groupby(self.cigar, lambda c: c.isdigit())
for g, n in cig_iter:
yield int("".join(n)), "".join(next(cig_iter)[1])
@property
def conv(self):
return self['YM']
@property
def cigars(self):
if not self._cigars:
self._cigars = tuple(self.cigar_split())
return self._cigars
@property
def mapped(self):
return not (self.flag & 0x4)
@property
def secondary(self):
return bool(self.flag & 0x100)
@property
def reverse(self):
return bool(self.flag & 0x10)
@property
def duplicate(self):
return bool(self.flag & 0x400)
def gapped(self, string, gap_char='-'):
""" Return string with all deletions wrt reference
represented as gaps '-' and all insertions wrt reference
removed.
i: sequence index
"""
seq = []
i = 0
for n, t in self.cigars:
if t in ("M", "N", "EQ", "X", "P"):
seq.extend(string[i:i+n])
i += n
elif t in ("D",):
seq.extend(('-',) * n)
elif t in ("I",):
i += n
return ''.join(seq)
@property
def coords(self):
return range(self.pos, self.pos + len(self))
class FastqReader:
"""
A class to read the name, sequence, strand and qualities from a fastq file
"""
def __init__(self, f):
name, ext = os.path.splitext(f.name)
if ext == '.gz':
self.file = Gzip(''.join([name, ext]), 'r')
else:
self.file = f
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
try:
name = next(self.file).strip().split()[0] # remove whitespace
seq = next(self.file).strip()
strand = next(self.file).strip()
qual = next(self.file).strip()
if name.count(':YM:Z:') > 0:
tag, dtype, data = name.split(':')[-3:]
name = ':'.join(name.split(':')[:-3])
return Fastq(name=name, seq=seq, strand=strand, qual=qual, conv=data)
else:
return Fastq(name=name, seq=seq, strand=strand, qual=qual)
except StopIteration:
raise StopIteration
def subsample(self, n):
""" Draws every nth read from self. Returns Fastq. """
n = n * 4
for i, line in enumerate(self.file):
if i % n == 0:
name = line.strip().split()[0]
elif i % n == 1:
seq = line.strip()
elif i % n == 2:
strand = line.strip()
elif i % n == 3:
qual = line.strip()
if name.count(':YM:Z:') > 0:
tag, dtype, data = name.split(':')[-3:]
name = ':'.join(name.split(':')[:-3])
yield Fastq(name=name, seq=seq, strand=strand, qual=qual, conv=data)
else:
yield Fastq(name=name, seq=seq, strand=strand, qual=qual)
def fileno(self):
return self.file.fileno()
def __enter__(self):
return self
def __exit__(self, *args):
self.file.close()
class Reader(object):
""" Read SAM/BAM format file using iterator. """
def __init__(self, f):
name, ext = os.path.splitext(f.name)
if ext == '.bam':
BamReaderSamtools.__init__(self, f)
else:
SamReader.__init__(self, f)
def next(self):
try:
line = next(self.file).rstrip('\n\r')
return Sam(tuple(line.split('\t')))
except StopIteration:
raise StopIteration
def __next__(self):
return self.next()
def __iter__(self):
return self
def subsample(self, n):
""" Draws every nth read from self. Returns Sam. """
for i, line in enumerate(self.file):
if i % n == 0:
yield Sam(tuple(line.rstrip().split('\t')))
def __enter__(self):
return self
def __exit__(self, *args):
self.file.close()
class SamReader(Reader):
""" Read SAM format file using iterator. """
def __init__(self, f):
self.header = []
self.file = f
for line in self.file:
if line[0] == '@':
self.header.append(line.rstrip('\n\r'))
else:
break
class BamReaderSamtools(Reader):
""" Read BAM format file using iterator. """
def __init__(self, f):
pline = ['samtools', 'view', '-H', f.name]
try:
p = Popen(pline, bufsize=-1, stdout=PIPE,
stderr=PIPE)
except OSError:
sys.stderr.write('Samtools must be installed for BAM file support!\n')
sys.exit(1)
self.header = [line.decode('utf-8').rstrip('\n\r') for line in p.stdout]
p.wait()
pline = ['samtools', 'view', f.name]
self.p = Popen(pline, bufsize=-1, stdout=PIPE,
stderr=PIPE)
self.file = TextIOWrapper(self.p.stdout)
def __exit__(self, *args):
self.p.wait()
class Stats:
""" Counter for characterization of NGS reads
"""
def __init__(self):
self.depth = defaultdict(int)
self.nuc = defaultdict(lambda: defaultdict(int))
self.qual = defaultdict(lambda: defaultdict(int))
self.gc = defaultdict(int)
self.kmers = Counter(defaultdict(int))
self.conv = defaultdict(lambda: defaultdict(int))
def evaluate(self, seq, qual, conv=None):
""" Evaluate read object at each position, and fill in nuc and qual dictionaries """
self.gc[gc(seq)] += 1
if conv:
cpgs = cpg_map(seq)
for i in range(1, len(seq) + 1):
self.depth[i] += 1
self.nuc[i][seq[i-1]] += 1
self.qual[i][qual[i-1]] += 1
if conv:
if cpgs[i-1] != 'N':
self.conv[i][conv[i-1]] += 1
def kmercount(self, seq, k=5):
""" Count all kmers of k length in seq and update kmer counter.
"""
for kmer in window(seq, n=k):
self.kmers[kmer] += 1
def __enter__(self):
return self
def __exit__(self, *args):
pass
def bam_read_count(bamfile):
""" Return a tuple of the number of mapped and unmapped reads in a bam file """
p = Popen(['samtools', 'idxstats', bamfile], stdout=PIPE)
mapped = 0
unmapped = 0
for line in p.stdout:
rname, rlen, nm, nu = line.rstrip().split()
mapped += int(nm)
unmapped += int(nu)
return (mapped, unmapped)
def parse_sam_tags(tagfields):
""" Return a dictionary containing the tags """
return dict((tag, (dtype, data)) for tag, dtype, data in (decode_tag(x) for x in tagfields))
def encode_tag(tag, data_type, data):
""" Write a SAM tag in the format ``TAG:TYPE:data``
>>> encode_tag('YM', 'Z', '#""9O"1@!J')
'YM:Z:#""9O"1@!J'
"""
value = ':'.join(list((tag.upper(), data_type.upper(), data)))
return value
def decode_tag(tag):
""" Parse a SAM format tag to a (TAG, TYPE, data) tuple.
TYPE in A, i, f, Z, H, B
>>> decode_tag('YM:Z:#""9O"1@!J')
('YM', 'Z', '#""9O"1@!J')
>>> decode_tag('XS:i:5')
('XS', 'i', 5)
"""
values = tag.split(':')
if len(values) != 3:
values = (values[0], values[1], ':'.join(values[2:]))
if values[1] == 'i':
values[2] = int(values[2])
elif values[1] == 'f':
values[2] = float(values[2])
elif values[1] == 'H':
raise(NotImplementedError, "Hex array SAM tags are currently not parsed.")
elif values[1] == 'B':
raise(NotImplementedError, "Byte array SAM tags are currently not parsed.")
return tuple(values)
def gc(seq):
""" Return the GC content of as an int
>>> x = tuple('TTTTTATGGAGGTATTGAGAACGTAAGATGTTTGGATAT')
>>> gc(x)
30
"""
g = seq.count('G')
c = seq.count('C')
return int((g + c) / len(seq) * 100)
def padbases(bases):
"""For each base call in dictionary D, add an entry base:0 if key base does not exist."""
def inner(D):
for key in bases:
if key not in D:
D[key] = 0
return inner
def percentile(D, percent):
"""
modified from: http://stackoverflow.com/a/2753343/717419
Find the percentile of a list of values.
N - is a dictionary with key=numeric value and value=frequency.
percent - a float value from 0.0 to 1.0.
outlier removal: http://www.purplemath.com/modules/boxwhisk3.htm
return the percentile of the values
"""
N = sorted(D.keys()) # dict keys
P = [D[n] for n in N] # dict values
if not N:
return None
k = (sum(P)) * percent
l = (sum(P)) * 0.25 # lower quartile
u = (sum(P)) * 0.75 # upper quartile
e = int()
for n,p in zip(N, P): # find percentile
e += p
if e >= k:
z = n # value at percentile
break
e = int()
for n,p in zip(N, P): # find upper quartile
e += p
if e >= u:
uz = n # value at quartile
break
e = int()
for n,p in zip(N, P): # find lower quartile
e += p
if e >= l:
lz = n # value at quartile
break
iqd = 1.5 * (uz - lz) # 1.5 times the inter-quartile distance
if (z) & (z < lz - iqd):
return int(lz - iqd)
elif (z) & (z > uz + iqd):
return int(uz + iqd)
elif z:
return int(z)
else:
return N[-1]
def window(seq, n=2):
""" Returns a sliding window (of width n) over data from the iterable
s -> (s0,s1,...s[n-1]), (s1,s2,...,sn), ... """
it = iter(seq)
result = tuple(islice(it, n))
if len(result) == n:
yield ''.join(result)
for elem in it:
result = result[1:] + (elem,)
yield ''.join(result)
def mean(s):
return sum(s) / len(s)
def cpg_map(seq):
""" Return tuple of C/G/N.
>>> cpg_map('CGCGTAGCCG')
'CGCGNNNNCG'
"""
starts = (x.start() for x in re.finditer('CG', ''.join(['N', seq, 'N'])))
cpgs = ['N'] * (len(seq) + 2)
for start in starts:
cpgs[start] = 'C'
cpgs[start+1] = 'G'
return ''.join(cpgs[1:-1])
if __name__ == "__main__":
import doctest
doctest.testmod()
|
# -*- coding: utf-8 -*-
__author__ = """Javier Collado"""
__email__ = 'javier.collado@gmail.com'
__version__ = '0.1.0'
Import implementation easily
# -*- coding: utf-8 -*-
from .ftps import FTPS # noqa
__author__ = """Javier Collado"""
__email__ = 'javier.collado@gmail.com'
__version__ = '0.1.0'
|
import os
import re
import collections
import traceback
from PyQt4 import QtCore, QtGui
Qt = QtCore.Qt
from maya import cmds, mel
from sgfs.ui import product_select
import sgfs.ui.scene_name.widget as scene_name
RefEdit = collections.namedtuple('RefEdit', ('command', 'namespaces', 'source'))
class RefEditSelector(product_select.Layout):
def _setup_sections(self):
super(RefEditSelector, self)._setup_sections()
self.register_section('Ref Edits', self._iter_files)
def _iter_files(self, step_path):
if step_path is None:
return
refedit_dir = os.path.join(step_path, 'maya', 'data', 'refedits')
if not os.path.exists(refedit_dir):
return
for name in os.listdir(refedit_dir):
if name.startswith('.'):
continue
if not name.endswith('.mel'):
continue
m = re.search(r'v(\d+)(?:_r(\d+))?', name)
if m:
priority = tuple(int(x or 0) for x in m.groups())
else:
priority = (0, 0)
refedit_path = os.path.join(refedit_dir, name)
yield name, refedit_path, priority
class Dialog(QtGui.QDialog):
def __init__(self):
super(Dialog, self).__init__()
self._setup_ui()
def _setup_ui(self):
self.setWindowTitle("Reference Edit Import")
self.setLayout(QtGui.QVBoxLayout())
self._selector = RefEditSelector(parent=self)
# Select as far as we can.
path = (
cmds.file(q=True, sceneName=True) or
cmds.workspace(q=True, fullName=True) or
None
)
if path is not None:
self._selector.setPath(path, allow_partial=True)
self.layout().addLayout(self._selector)
self._type_box = QtGui.QGroupBox("Edit Types")
self._type_box.setLayout(QtGui.QVBoxLayout())
self.layout().addWidget(self._type_box)
self._namespace_box = QtGui.QGroupBox("Namespace Mappings (not implemented)")
self._namespace_box.setLayout(QtGui.QVBoxLayout())
self.layout().addWidget(self._namespace_box)
button = QtGui.QPushButton("Apply Edits")
button.clicked.connect(self._on_reference)
self.layout().addWidget(button)
self._selector.path_changed = self._path_changed
self._path_changed(self._selector.path())
def _parse_file(self, path):
self._edits = []
for line in open(path):
line = line.strip()
if not line or line.startswith('//'):
continue
command = line.split()[0]
namespaces = re.findall(r'(\w+):', line)
self._edits.append(RefEdit(
command=command,
namespaces=set(namespaces),
source=line,
))
def _path_changed(self, path):
for child in self._type_box.children():
if isinstance(child, QtGui.QWidget):
child.hide()
child.destroy()
for child in self._namespace_box.children():
if isinstance(child, QtGui.QWidget):
child.hide()
child.destroy()
if path is None:
self._type_box.layout().addWidget(QtGui.QLabel("Nothing"))
self._namespace_box.layout().addWidget(QtGui.QLabel("Nothing"))
return
self._parse_file(path)
self._command_boxes = []
for command in sorted(set(e.command for e in self._edits)):
checkbox = QtGui.QCheckBox(command)
checkbox.setChecked(command == 'setAttr')
self._command_boxes.append(checkbox)
self._type_box.layout().addWidget(checkbox)
existing = [cmds.file(ref, q=True, namespace=True) for ref in cmds.file(q=True, reference=True) or []]
self._namespace_menus = []
namespaces = set()
for edit in self._edits:
namespaces.update(edit.namespaces)
for namespace in sorted(namespaces):
layout = QtGui.QHBoxLayout()
layout.addWidget(QtGui.QLabel(namespace))
combo = QtGui.QComboBox()
combo.addItem('<None>')
for name in existing:
combo.addItem(name)
if name == namespace:
combo.setCurrentIndex(combo.count() - 1)
layout.addWidget(combo)
self._namespace_box.layout().addLayout(layout)
def _on_reference(self, *args):
do_command = {}
for checkbox in self._command_boxes:
do_command[str(checkbox.text())] = checkbox.isChecked()
failed = 0
for edit in self._edits:
if do_command.get(edit.command):
try:
mel.eval(edit.source)
except Exception as e:
cmds.warning(str(e))
failed += 1
(QtGui.QMessageBox.warning if failed else QtGui.QMessageBox.information)(
self,
"Applied Reference Edits",
"Applied %d edits with %d failures." % (len(self._edits) - failed, failed)
)
self.close()
def __before_reload__():
if dialog:
dialog.close()
dialog = None
def run():
global dialog
if dialog:
dialog.close()
dialog = Dialog()
dialog.show()
Very rough refedit node filtering on import
import os
import re
import collections
import traceback
from PyQt4 import QtCore, QtGui
Qt = QtCore.Qt
from maya import cmds, mel
from sgfs.ui import product_select
import sgfs.ui.scene_name.widget as scene_name
RefEdit = collections.namedtuple('RefEdit', ('command', 'namespaces', 'nodes', 'source'))
class RefEditSelector(product_select.Layout):
def _setup_sections(self):
super(RefEditSelector, self)._setup_sections()
self.register_section('Ref Edits', self._iter_files)
def _iter_files(self, step_path):
if step_path is None:
return
refedit_dir = os.path.join(step_path, 'maya', 'data', 'refedits')
if not os.path.exists(refedit_dir):
return
for name in os.listdir(refedit_dir):
if name.startswith('.'):
continue
if not name.endswith('.mel'):
continue
m = re.search(r'v(\d+)(?:_r(\d+))?', name)
if m:
priority = tuple(int(x or 0) for x in m.groups())
else:
priority = (0, 0)
refedit_path = os.path.join(refedit_dir, name)
yield name, refedit_path, priority
class Dialog(QtGui.QDialog):
def __init__(self):
super(Dialog, self).__init__()
self._setup_ui()
def _setup_ui(self):
self.setWindowTitle("Reference Edit Import")
self.setLayout(QtGui.QVBoxLayout())
self._selector = RefEditSelector(parent=self)
# Select as far as we can.
path = (
cmds.file(q=True, sceneName=True) or
cmds.workspace(q=True, fullName=True) or
None
)
if path is not None:
self._selector.setPath(path, allow_partial=True)
self.layout().addLayout(self._selector)
self._type_box = QtGui.QGroupBox("Edit Types")
self._type_box.setLayout(QtGui.QVBoxLayout())
self.layout().addWidget(self._type_box)
self._option_box = QtGui.QGroupBox("Options")
self._option_box.setLayout(QtGui.QVBoxLayout())
self.layout().addWidget(self._option_box)
self._only_selected_checkbox = QtGui.QCheckBox("Only Apply to Selected Nodes", checked=True)
self._only_selected_checkbox.stateChanged.connect(lambda state: self._path_changed(self._path))
self._option_box.layout().addWidget(self._only_selected_checkbox)
self._node_box = QtGui.QGroupBox("Nodes")
self._node_box.setLayout(QtGui.QVBoxLayout())
self.layout().addWidget(self._node_box)
button = QtGui.QPushButton("Apply Edits")
button.clicked.connect(self._on_reference)
self.layout().addWidget(button)
self._selector.path_changed = self._path_changed
self._path_changed(self._selector.path())
def _parse_file(self, path):
self._edits = []
for line in open(path):
line = line.strip()
if not line or line.startswith('//'):
continue
command = line.split()[0]
namespaces = re.findall(r'(\w+):', line)
nodes = re.findall(r'(\|[\|:\w]+)', line)
self._edits.append(RefEdit(
command=command,
nodes=set(nodes),
namespaces=set(namespaces),
source=line,
))
def _path_changed(self, path):
self._path = path
for child in self._type_box.children():
if isinstance(child, QtGui.QWidget):
child.hide()
child.destroy()
for child in self._node_box.children():
if isinstance(child, QtGui.QWidget):
child.hide()
child.destroy()
if path is None:
self._type_box.layout().addWidget(QtGui.QLabel("Nothing"))
self._option_box.layout().addWidget(QtGui.QLabel("Nothing"))
return
self._parse_file(path)
self._command_boxes = []
for command in sorted(set(e.command for e in self._edits)):
checkbox = QtGui.QCheckBox(command)
checkbox.setChecked(command == 'setAttr')
self._command_boxes.append(checkbox)
self._type_box.layout().addWidget(checkbox)
self._node_boxes = []
all_nodes = set()
for e in self._edits:
all_nodes.update(e.nodes)
if self._only_selected_checkbox.isChecked():
all_nodes.intersection_update(cmds.ls(selection=True, long=True))
for node in sorted(all_nodes):
checkbox = QtGui.QCheckBox(node, checked=True)
self._node_boxes.append(checkbox)
self._node_box.layout().addWidget(checkbox)
# existing = [cmds.file(ref, q=True, namespace=True) for ref in cmds.file(q=True, reference=True) or []]
# self._namespace_menus = []
# namespaces = set()
# for edit in self._edits:
# namespaces.update(edit.namespaces)
# for namespace in sorted(namespaces):
# layout = QtGui.QHBoxLayout()
# layout.addWidget(QtGui.QLabel(namespace))
# combo = QtGui.QComboBox()
# combo.addItem('<None>')
# for name in existing:
# combo.addItem(name)
# if name == namespace:
# combo.setCurrentIndex(combo.count() - 1)
# layout.addWidget(combo)
# self._option_box.layout().addLayout(layout)
def _on_reference(self, *args):
do_command = {}
for checkbox in self._command_boxes:
do_command[str(checkbox.text())] = checkbox.isChecked()
do_node = {}
for checkbox in self._node_boxes:
do_node[str(checkbox.text())] = checkbox.isChecked()
applied = 0
failed = 0
for edit in self._edits:
if not do_command.get(edit.command):
continue
if not all(do_node.get(n) for n in edit.nodes):
continue
try:
mel.eval(edit.source)
except Exception as e:
cmds.warning(str(e))
failed += 1
else:
applied += 1
(QtGui.QMessageBox.warning if failed else QtGui.QMessageBox.information)(
self,
"Applied Reference Edits",
"Applied %d edits with %d failures." % (applied, failed)
)
self.close()
def __before_reload__():
if dialog:
dialog.close()
dialog = None
def run():
global dialog
if dialog:
dialog.close()
dialog = Dialog()
dialog.show()
|
"""
mbed SDK
Copyright (c) 2011-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: Przemyslaw Wirkus <Przemyslaw.wirkus@arm.com>
"""
from past.builtins import basestring
import re
import os
import sys
import json
import string
from time import time
from subprocess import Popen, PIPE, STDOUT
from mbed_greentea.tests_spec import TestSpec
from mbed_greentea.mbed_yotta_api import get_test_spec_from_yt_module
from mbed_greentea.mbed_greentea_log import gt_logger
from mbed_greentea.mbed_coverage_api import coverage_dump_file
from mbed_greentea.mbed_coverage_api import coverage_pack_hex_payload
from mbed_greentea.cmake_handlers import list_binaries_for_builds
from mbed_greentea.cmake_handlers import list_binaries_for_targets
# Return codes for test script
TEST_RESULT_OK = "OK"
TEST_RESULT_FAIL = "FAIL"
TEST_RESULT_ERROR = "ERROR"
TEST_RESULT_SKIPPED = "SKIPPED"
TEST_RESULT_UNDEF = "UNDEF"
TEST_RESULT_IOERR_COPY = "IOERR_COPY"
TEST_RESULT_IOERR_DISK = "IOERR_DISK"
TEST_RESULT_IOERR_SERIAL = "IOERR_SERIAL"
TEST_RESULT_TIMEOUT = "TIMEOUT"
TEST_RESULT_NO_IMAGE = "NO_IMAGE"
TEST_RESULT_MBED_ASSERT = "MBED_ASSERT"
TEST_RESULT_BUILD_FAILED = "BUILD_FAILED"
TEST_RESULT_SYNC_FAILED = "SYNC_FAILED"
TEST_RESULTS = [TEST_RESULT_OK,
TEST_RESULT_FAIL,
TEST_RESULT_ERROR,
TEST_RESULT_SKIPPED,
TEST_RESULT_UNDEF,
TEST_RESULT_IOERR_COPY,
TEST_RESULT_IOERR_DISK,
TEST_RESULT_IOERR_SERIAL,
TEST_RESULT_TIMEOUT,
TEST_RESULT_NO_IMAGE,
TEST_RESULT_MBED_ASSERT,
TEST_RESULT_BUILD_FAILED,
TEST_RESULT_SYNC_FAILED
]
TEST_RESULT_MAPPING = {"success" : TEST_RESULT_OK,
"failure" : TEST_RESULT_FAIL,
"error" : TEST_RESULT_ERROR,
"skipped" : TEST_RESULT_SKIPPED,
"end" : TEST_RESULT_UNDEF,
"ioerr_copy" : TEST_RESULT_IOERR_COPY,
"ioerr_disk" : TEST_RESULT_IOERR_DISK,
"ioerr_serial" : TEST_RESULT_IOERR_SERIAL,
"timeout" : TEST_RESULT_TIMEOUT,
"no_image" : TEST_RESULT_NO_IMAGE,
"mbed_assert" : TEST_RESULT_MBED_ASSERT,
"build_failed" : TEST_RESULT_BUILD_FAILED,
"sync_failed" : TEST_RESULT_SYNC_FAILED
}
# This value is used to tell caller than run_host_test function failed while invoking mbedhtrun
# Just a value greater than zero
RUN_HOST_TEST_POPEN_ERROR = 1729
def get_test_result(output):
"""! Parse test 'output' data
@details If test result not found returns by default TEST_RESULT_TIMEOUT value
@return Returns found test result
"""
re_detect = re.compile(r"\{result;([\w+_]*)\}")
for line in output.split():
search_result = re_detect.search(line)
if search_result:
if search_result.group(1) in TEST_RESULT_MAPPING:
return TEST_RESULT_MAPPING[search_result.group(1)]
else:
return TEST_RESULT_UNDEF
return TEST_RESULT_TIMEOUT
def run_command(cmd):
"""! Runs command and prints proc stdout on screen
@paran cmd List with command line to execute e.g. ['ls', '-l]
@return Value returned by subprocess.Popen, if failed return None
"""
try:
p = Popen(cmd,
stdout=PIPE,
stderr=STDOUT)
except OSError as e:
gt_logger.gt_log_err("run_host_test.run_command(%s) failed!" % str(cmd))
gt_logger.gt_log_tab(str(e))
return None
return p
def run_htrun(cmd, verbose):
# detect overflow when running tests
htrun_output = str()
# run_command will return None if process can't be opened (Issue #134)
p = run_command(cmd)
if not p:
# int value > 0 notifies caller that starting of host test process failed
return RUN_HOST_TEST_POPEN_ERROR
htrun_failure_line = re.compile('\[RXD\] (:\d+::FAIL: .*)')
for line in iter(p.stdout.readline, b''):
decoded_line = line.decode('utf-8', 'ignore')
htrun_output += decoded_line
# When dumping output to file both \r and \n will be a new line
# To avoid this "extra new-line" we only use \n at the end
test_error = htrun_failure_line.search(decoded_line)
if test_error:
gt_logger.gt_log_err(test_error.group(1))
if verbose:
sys.stdout.write(decoded_line.rstrip() + '\n')
sys.stdout.flush()
# Check if process was terminated by signal
returncode = p.wait()
return returncode, htrun_output
def run_host_test(image_path,
disk,
port,
build_path,
target_id,
duration=10,
micro=None,
reset=None,
verbose=False,
copy_method=None,
program_cycle_s=None,
forced_reset_timeout=None,
digest_source=None,
json_test_cfg=None,
max_failed_properties=5,
enum_host_tests_path=None,
global_resource_mgr=None,
fast_model_connection=None,
num_sync_packtes=None,
polling_timeout=None,
retry_count=1,
tags=None,
run_app=None):
"""! This function runs host test supervisor (executes mbedhtrun) and checks output from host test process.
@param image_path Path to binary file for flashing
@param disk Currently mounted mbed-enabled devices disk (mount point)
@param port Currently mounted mbed-enabled devices serial port (console)
@param duration Test case timeout
@param micro Mbed-enabled device name
@param reset Reset type
@param forced_reset_timeout Reset timeout (sec)
@param verbose Verbose mode flag
@param copy_method Copy method type (name)
@param program_cycle_s Wait after flashing delay (sec)
@param json_test_cfg Additional test configuration file path passed to host tests in JSON format
@param max_failed_properties After how many unknown properties we will assume test is not ported
@param enum_host_tests_path Directory where locally defined host tests may reside
@param num_sync_packtes sync packets to send for host <---> device communication
@param polling_timeout Timeout in sec for readiness of mount point and serial port of local or remote device
@param tags Filter list of available devices under test to only run on devices with the provided list
of tags [tag-filters tag1,tag]
@param run_app Run application mode flag (we run application and grab serial port data)
@param digest_source if None mbedhtrun will be executed. If 'stdin',
stdin will be used via StdInObserver or file (if
file name was given as switch option)
@return Tuple with test results, test output, test duration times, test case results, and memory metrics.
Return int > 0 if running mbedhtrun process failed.
Retrun int < 0 if something went wrong during mbedhtrun execution.
"""
def get_binary_host_tests_dir(binary_path, level=2):
"""! Checks if in binary test group has host_tests directory
@param binary_path Path to binary in test specification
@param level How many directories above test host_tests dir exists
@return Path to host_tests dir in group binary belongs too, None if not found
"""
try:
binary_path_norm = os.path.normpath(binary_path)
current_path_norm = os.path.normpath(os.getcwd())
host_tests_path = binary_path_norm.split(os.sep)[:-level] + ['host_tests']
build_dir_candidates = ['BUILD', '.build']
idx = None
for build_dir_candidate in build_dir_candidates:
if build_dir_candidate in host_tests_path:
idx = host_tests_path.index(build_dir_candidate)
break
if idx is None:
msg = 'The following directories were not in the path: %s' % (', '.join(build_dir_candidates))
raise Exception(msg)
# Cut /<build dir>/tests/TOOLCHAIN/TARGET
host_tests_path = host_tests_path[:idx] + host_tests_path[idx+4:]
host_tests_path = os.sep.join(host_tests_path)
except Exception as e:
gt_logger.gt_log_warn("there was a problem while looking for host_tests directory")
gt_logger.gt_log_tab("level %d, path: %s"% (level, binary_path))
gt_logger.gt_log_tab(str(e))
return None
if os.path.isdir(host_tests_path):
return host_tests_path
return None
if not enum_host_tests_path:
# If there is -e specified we will try to find a host_tests path ourselves
#
# * Path to binary starts from "build" directory, and goes 4 levels
# deep: ./build/tests/compiler/toolchain
# * Binary is inside test group.
# For example: <app>/tests/test_group_name/test_dir/*,cpp.
# * We will search for directory called host_tests on the level of test group (level=2)
# or on the level of tests directory (level=3).
#
# If host_tests directory is found above test code will will pass it to mbedhtrun using
# switch -e <path_to_host_tests_dir>
gt_logger.gt_log("checking for 'host_tests' directory above image directory structure", print_text=verbose)
test_group_ht_path = get_binary_host_tests_dir(image_path, level=2)
TESTS_dir_ht_path = get_binary_host_tests_dir(image_path, level=3)
if test_group_ht_path:
enum_host_tests_path = test_group_ht_path
elif TESTS_dir_ht_path:
enum_host_tests_path = TESTS_dir_ht_path
if enum_host_tests_path:
gt_logger.gt_log_tab("found 'host_tests' directory in: '%s'"% enum_host_tests_path, print_text=verbose)
else:
gt_logger.gt_log_tab("'host_tests' directory not found: two directory levels above image path checked", print_text=verbose)
gt_logger.gt_log("selecting test case observer...", print_text=verbose)
if digest_source:
gt_logger.gt_log_tab("selected digest source: %s"% digest_source, print_text=verbose)
# Select who will digest test case serial port data
if digest_source == 'stdin':
# When we want to scan stdin for test results
raise NotImplementedError
elif digest_source is not None:
# When we want to open file to scan for test results
raise NotImplementedError
# Command executing CLI for host test supervisor (in detect-mode)
cmd = ["mbedhtrun",
'-m', micro,
'-p', port,
'-f', '"%s"'% image_path,
]
if enum_host_tests_path:
cmd += ["-e", '"%s"'% enum_host_tests_path]
if global_resource_mgr:
# Use global resource manager to execute test
# Example:
# $ mbedhtrun -p :9600 -f "tests-mbed_drivers-generic_tests.bin" -m K64F --grm raas_client:10.2.203.31:8000
cmd += ['--grm', global_resource_mgr]
else:
# Use local resources to execute tests
# Add extra parameters to host_test
if disk:
cmd += ["-d", disk]
if copy_method:
cmd += ["-c", copy_method]
if target_id:
cmd += ["-t", target_id]
if reset:
cmd += ["-r", reset]
if run_app:
cmd += ["--run"] # -f stores binary name!
if fast_model_connection:
# Use simulator resource manager to execute test
# Example:
# $ mbedhtrun -f "tests-mbed_drivers-generic_tests.elf" -m FVP_MPS2_M3 --fm DEFAULT
cmd += ['--fm', fast_model_connection]
if program_cycle_s:
cmd += ["-C", str(program_cycle_s)]
if forced_reset_timeout:
cmd += ["-R", str(forced_reset_timeout)]
if json_test_cfg:
cmd += ["--test-cfg", '"%s"' % str(json_test_cfg)]
if num_sync_packtes:
cmd += ["--sync",str(num_sync_packtes)]
if tags:
cmd += ["--tag-filters", tags]
if polling_timeout:
cmd += ["-P", str(polling_timeout)]
gt_logger.gt_log_tab("calling mbedhtrun: %s" % " ".join(cmd), print_text=verbose)
gt_logger.gt_log("mbed-host-test-runner: started")
for retry in range(1, 1 + retry_count):
start_time = time()
returncode, htrun_output = run_htrun(cmd, verbose)
end_time = time()
if returncode < 0:
return returncode
elif returncode == 0:
break
gt_logger.gt_log("retry mbedhtrun {}/{}".format(retry, retry_count))
else:
gt_logger.gt_log("{} failed after {} count".format(cmd, retry_count))
testcase_duration = end_time - start_time # Test case duration from reset to {end}
htrun_output = get_printable_string(htrun_output)
result = get_test_result(htrun_output)
result_test_cases = get_testcase_result(htrun_output)
test_cases_summary = get_testcase_summary(htrun_output)
max_heap, reserved_heap, thread_stack_info = get_memory_metrics(htrun_output)
thread_stack_summary = []
if thread_stack_info:
thread_stack_summary = get_thread_stack_info_summary(thread_stack_info)
memory_metrics = {
"max_heap": max_heap,
"reserved_heap": reserved_heap,
"thread_stack_info": thread_stack_info,
"thread_stack_summary": thread_stack_summary
}
get_coverage_data(build_path, htrun_output)
gt_logger.gt_log("mbed-host-test-runner: stopped and returned '%s'"% result, print_text=verbose)
return (result, htrun_output, testcase_duration, duration, result_test_cases, test_cases_summary, memory_metrics)
def get_testcase_count_and_names(output):
""" Fetches from log utest events with test case count (__testcase_count) and test case names (__testcase_name)*
@details
Example test case count + names prints
[1467197417.34][HTST][INF] host test detected: default_auto
[1467197417.36][CONN][RXD] {{__testcase_count;2}}
[1467197417.36][CONN][INF] found KV pair in stream: {{__testcase_count;2}}, queued...
[1467197417.39][CONN][RXD] >>> Running 2 test cases...
[1467197417.43][CONN][RXD] {{__testcase_name;C strings: strtok}}
[1467197417.43][CONN][INF] found KV pair in stream: {{__testcase_name;C strings: strtok}}, queued...
[1467197417.47][CONN][RXD] {{__testcase_name;C strings: strpbrk}}
[1467197417.47][CONN][INF] found KV pair in stream: {{__testcase_name;C strings: strpbrk}}, queued...
[1467197417.52][CONN][RXD] >>> Running case #1: 'C strings: strtok'...
[1467197417.56][CONN][RXD] {{__testcase_start;C strings: strtok}}
[1467197417.56][CONN][INF] found KV pair in stream: {{__testcase_start;C strings: strtok}}, queued...
@return Tuple with (test case count, list of test case names in order of appearance)
"""
testcase_count = 0
testcase_names = []
re_tc_count = re.compile(r"^\[(\d+\.\d+)\]\[(\w+)\]\[(\w+)\].*\{\{(__testcase_count);(\d+)\}\}")
re_tc_names = re.compile(r"^\[(\d+\.\d+)\]\[(\w+)\]\[(\w+)\].*\{\{(__testcase_name);([^;]+)\}\}")
for line in output.splitlines():
m = re_tc_names.search(line)
if m:
testcase_names.append(m.group(5))
continue
m = re_tc_count.search(line)
if m:
testcase_count = m.group(5)
return (testcase_count, testcase_names)
def get_testcase_utest(output, test_case_name):
""" Fetches from log all prints for given utest test case (from being print to end print)
@details
Example test case prints
[1455553765.52][CONN][RXD] >>> Running case #1: 'Simple Test'...
[1455553765.52][CONN][RXD] {{__testcase_start;Simple Test}}
[1455553765.52][CONN][INF] found KV pair in stream: {{__testcase_start;Simple Test}}, queued...
[1455553765.58][CONN][RXD] Simple test called
[1455553765.58][CONN][RXD] {{__testcase_finish;Simple Test;1;0}}
[1455553765.58][CONN][INF] found KV pair in stream: {{__testcase_finish;Simple Test;1;0}}, queued...
[1455553765.70][CONN][RXD] >>> 'Simple Test': 1 passed, 0 failed
@return log lines between start and end test case print
"""
# Return string with all non-alphanumerics backslashed;
# this is useful if you want to match an arbitrary literal
# string that may have regular expression metacharacters in it.
escaped_test_case_name = re.escape(test_case_name)
re_tc_utest_log_start = re.compile(r"^\[(\d+\.\d+)\]\[(\w+)\]\[(\w+)\] >>> Running case #(\d)+: '(%s)'"% escaped_test_case_name)
re_tc_utest_log_finish = re.compile(r"^\[(\d+\.\d+)\]\[(\w+)\]\[(\w+)\] >>> '(%s)': (\d+) passed, (\d+) failed"% escaped_test_case_name)
tc_log_lines = []
for line in output.splitlines():
# utest test case start string search
m = re_tc_utest_log_start.search(line)
if m:
tc_log_lines.append(line)
continue
# If utest test case end string found
m = re_tc_utest_log_finish.search(line)
if m:
tc_log_lines.append(line)
break
# Continue adding utest log lines
if tc_log_lines:
tc_log_lines.append(line)
return tc_log_lines
def get_coverage_data(build_path, output):
# Example GCOV output
# [1456840876.73][CONN][RXD] {{__coverage_start;c:\Work\core-util/source/PoolAllocator.cpp.gcda;6164636772393034c2733f32...a33e...b9}}
gt_logger.gt_log("checking for GCOV data...")
re_gcov = re.compile(r"^\[(\d+\.\d+)\][^\{]+\{\{(__coverage_start);([^;]+);([^}]+)\}\}$")
for line in output.splitlines():
m = re_gcov.search(line)
if m:
_, _, gcov_path, gcov_payload = m.groups()
try:
bin_gcov_payload = coverage_pack_hex_payload(gcov_payload)
coverage_dump_file(build_path, gcov_path, bin_gcov_payload)
except Exception as e:
gt_logger.gt_log_err("error while handling GCOV data: " + str(e))
gt_logger.gt_log_tab("storing %d bytes in '%s'"% (len(bin_gcov_payload), gcov_path))
def get_printable_string(unprintable_string):
return "".join(filter(lambda x: x in string.printable, unprintable_string))
def get_testcase_summary(output):
"""! Searches for test case summary
String to find:
[1459246276.95][CONN][INF] found KV pair in stream: {{__testcase_summary;7;1}}, queued...
@return Tuple of (passed, failed) or None if no summary found
"""
re_tc_summary = re.compile(r"^\[(\d+\.\d+)\][^\{]+\{\{(__testcase_summary);(\d+);(\d+)\}\}")
for line in output.splitlines():
m = re_tc_summary.search(line)
if m:
_, _, passes, failures = m.groups()
return int(passes), int(failures)
return None
def get_testcase_result(output):
result_test_cases = {} # Test cases results
re_tc_start = re.compile(r"^\[(\d+\.\d+)\][^\{]+\{\{(__testcase_start);([^;]+)\}\}")
re_tc_finish = re.compile(r"^\[(\d+\.\d+)\][^\{]+\{\{(__testcase_finish);([^;]+);(\d+);(\d+)\}\}")
for line in output.splitlines():
m = re_tc_start.search(line)
if m:
timestamp, _, testcase_id = m.groups()
if testcase_id not in result_test_cases:
result_test_cases[testcase_id] = {}
# Data collected when __testcase_start is fetched
result_test_cases[testcase_id]['time_start'] = float(timestamp)
result_test_cases[testcase_id]['utest_log'] = get_testcase_utest(output, testcase_id)
# Data collected when __testcase_finish is fetched
result_test_cases[testcase_id]['duration'] = 0.0
result_test_cases[testcase_id]['result_text'] = 'ERROR'
result_test_cases[testcase_id]['time_end'] = float(timestamp)
result_test_cases[testcase_id]['passed'] = 0
result_test_cases[testcase_id]['failed'] = 0
result_test_cases[testcase_id]['result'] = -4096
continue
m = re_tc_finish.search(line)
if m:
timestamp, _, testcase_id, testcase_passed, testcase_failed = m.groups()
testcase_passed = int(testcase_passed)
testcase_failed = int(testcase_failed)
testcase_result = 0 # OK case
if testcase_failed != 0:
testcase_result = testcase_failed # testcase_result > 0 is FAILure
if testcase_id not in result_test_cases:
result_test_cases[testcase_id] = {}
# Setting some info about test case itself
result_test_cases[testcase_id]['duration'] = 0.0
result_test_cases[testcase_id]['result_text'] = 'OK'
result_test_cases[testcase_id]['time_end'] = float(timestamp)
result_test_cases[testcase_id]['passed'] = testcase_passed
result_test_cases[testcase_id]['failed'] = testcase_failed
result_test_cases[testcase_id]['result'] = testcase_result
# Assign human readable test case result
if testcase_result > 0:
result_test_cases[testcase_id]['result_text'] = 'FAIL'
elif testcase_result < 0:
result_test_cases[testcase_id]['result_text'] = 'ERROR'
if 'time_start' in result_test_cases[testcase_id]:
result_test_cases[testcase_id]['duration'] = result_test_cases[testcase_id]['time_end'] - result_test_cases[testcase_id]['time_start']
else:
result_test_cases[testcase_id]['duration'] = 0.0
if 'utest_log' not in result_test_cases[testcase_id]:
result_test_cases[testcase_id]['utest_log'] = "__testcase_start tag not found."
### Adding missing test cases which were defined with __testcase_name
# Get test case names reported by utest + test case names
# This data will be used to process all tests which were not executed
# do their status can be set to SKIPPED (e.g. in JUnit)
tc_count, tc_names = get_testcase_count_and_names(output)
for testcase_id in tc_names:
if testcase_id not in result_test_cases:
result_test_cases[testcase_id] = {}
# Data collected when __testcase_start is fetched
result_test_cases[testcase_id]['time_start'] = 0.0
result_test_cases[testcase_id]['utest_log'] = []
# Data collected when __testcase_finish is fetched
result_test_cases[testcase_id]['duration'] = 0.0
result_test_cases[testcase_id]['result_text'] = 'SKIPPED'
result_test_cases[testcase_id]['time_end'] = 0.0
result_test_cases[testcase_id]['passed'] = 0
result_test_cases[testcase_id]['failed'] = 0
result_test_cases[testcase_id]['result'] = -8192
return result_test_cases
def get_memory_metrics(output):
"""! Searches for test case memory metrics
String to find:
[1477505660.40][CONN][INF] found KV pair in stream: {{max_heap_usage;2284}}, queued...
@return Tuple of (max heap usage, thread info list), where thread info list
is a list of dictionaries with format {entry, arg, max_stack, stack_size}
"""
max_heap_usage = None
reserved_heap = None
thread_info = {}
re_tc_max_heap_usage = re.compile(r"^\[(\d+\.\d+)\][^\{]+\{\{(max_heap_usage);(\d+)\}\}")
re_tc_reserved_heap = re.compile(r"^\[(\d+\.\d+)\][^\{]+\{\{(reserved_heap);(\d+)\}\}")
re_tc_thread_info = re.compile(r"^\[(\d+\.\d+)\][^\{]+\{\{(__thread_info);\"([A-Fa-f0-9\-xX]+)\",(\d+),(\d+)\}\}")
for line in output.splitlines():
m = re_tc_max_heap_usage.search(line)
if m:
_, _, max_heap_usage = m.groups()
max_heap_usage = int(max_heap_usage)
m = re_tc_reserved_heap.search(line)
if m:
_, _, reserved_heap = m.groups()
reserved_heap = int(reserved_heap)
m = re_tc_thread_info.search(line)
if m:
_, _, thread_entry_arg, thread_max_stack, thread_stack_size = m.groups()
thread_max_stack = int(thread_max_stack)
thread_stack_size = int(thread_stack_size)
thread_entry_arg_split = thread_entry_arg.split('-')
thread_entry = thread_entry_arg_split[0]
thread_info[thread_entry_arg] = {
'entry': thread_entry,
'max_stack': thread_max_stack,
'stack_size': thread_stack_size
}
if len(thread_entry_arg_split) > 1:
thread_arg = thread_entry_arg_split[1]
thread_info[thread_entry_arg]['arg'] = thread_arg
thread_info_list = thread_info.values()
return max_heap_usage, reserved_heap, thread_info_list
def get_thread_with_max_stack_size(thread_stack_info):
max_thread_stack_size = 0
max_thread = None
max_stack_usage_total = 0
reserved_stack_total = 0
for cur_thread_stack_info in thread_stack_info:
if cur_thread_stack_info['stack_size'] > max_thread_stack_size:
max_thread_stack_size = cur_thread_stack_info['stack_size']
max_thread = cur_thread_stack_info
max_stack_usage_total += cur_thread_stack_info['max_stack']
reserved_stack_total += cur_thread_stack_info['stack_size']
max_thread['max_stack_usage_total'] = max_stack_usage_total
max_thread['reserved_stack_total'] = reserved_stack_total
return max_thread
def get_thread_stack_info_summary(thread_stack_info):
max_thread_info = get_thread_with_max_stack_size(thread_stack_info)
summary = {
'max_stack_size': max_thread_info['stack_size'],
'max_stack_usage': max_thread_info['max_stack'],
'max_stack_usage_total': max_thread_info['max_stack_usage_total'],
'reserved_stack_total': max_thread_info['reserved_stack_total']
}
return summary
def log_mbed_devices_in_table(muts, cols = ['platform_name', 'platform_name_unique', 'serial_port', 'mount_point', 'target_id']):
"""! Print table of muts using prettytable
@param muts List of MUTs to print in table
@param cols Columns used to for a table, required for each mut
@return string with formatted prettytable
"""
from prettytable import PrettyTable, HEADER
pt = PrettyTable(cols, junction_char="|", hrules=HEADER)
for col in cols:
pt.align[col] = "l"
pt.padding_width = 1 # One space between column edges and contents (default)
row = []
for mut in muts:
for col in cols:
cell_val = mut[col] if col in mut else 'not detected'
row.append(cell_val)
pt.add_row(row)
row = []
return pt.get_string()
def get_test_spec(opts):
"""! Closure encapsulating how we get test specification and load it from file of from yotta module
@return Returns tuple of (test specification, ret code). Test specification == None if test spec load was not successful
"""
test_spec = None
# Check if test_spec.json file exist, if so we will pick it up as default file and load it
test_spec_file_name = opts.test_spec
test_spec_file_name_list = []
# Note: test_spec.json will have higher priority than module.json file
# so if we are inside directory with module.json and test_spec.json we will use test spec file
# instead of using yotta's module.json file
def get_all_test_specs_from_build_dir(path_to_scan):
"""! Searches for all test_spec.json files
@param path_to_scan Directory path used to recursively search for test_spec.json
@result List of locations of test_spec.json
"""
return [os.path.join(dp, f) for dp, dn, filenames in os.walk(path_to_scan) for f in filenames if f == 'test_spec.json']
def merge_multiple_test_specifications_from_file_list(test_spec_file_name_list):
"""! For each file in test_spec_file_name_list merge all test specifications into one
@param test_spec_file_name_list List of paths to different test specifications
@return TestSpec object with all test specification data inside
"""
def copy_builds_between_test_specs(source, destination):
"""! Copies build key-value pairs between two test_spec dicts
@param source Source dictionary
@param destination Dictionary with will be applied with 'builds' key-values
@return Dictionary with merged source
"""
result = destination.copy()
if 'builds' in source and 'builds' in destination:
for k in source['builds']:
result['builds'][k] = source['builds'][k]
return result
merged_test_spec = {}
for test_spec_file in test_spec_file_name_list:
gt_logger.gt_log_tab("using '%s'"% test_spec_file)
try:
with open(test_spec_file, 'r') as f:
test_spec_data = json.load(f)
merged_test_spec = copy_builds_between_test_specs(merged_test_spec, test_spec_data)
except Exception as e:
gt_logger.gt_log_err("Unexpected error while processing '%s' test specification file"% test_spec_file)
gt_logger.gt_log_tab(str(e))
merged_test_spec = {}
test_spec = TestSpec()
test_spec.parse(merged_test_spec)
return test_spec
# Test specification look-up
if opts.test_spec:
# Loading test specification from command line specified file
gt_logger.gt_log("test specification file '%s' (specified with --test-spec option)"% opts.test_spec)
elif os.path.exists('test_spec.json'):
# Test specification file exists in current directory
gt_logger.gt_log("using 'test_spec.json' from current directory!")
test_spec_file_name = 'test_spec.json'
elif 'BUILD' in os.listdir(os.getcwd()):
# Checking 'BUILD' directory for test specifications
# Using `os.listdir()` since it preserves case
test_spec_file_name_list = get_all_test_specs_from_build_dir('BUILD')
elif os.path.exists('.build'):
# Checking .build directory for test specifications
test_spec_file_name_list = get_all_test_specs_from_build_dir('.build')
elif os.path.exists('mbed-os') and 'BUILD' in os.listdir('mbed-os'):
# Checking mbed-os/.build directory for test specifications
# Using `os.listdir()` since it preserves case
test_spec_file_name_list = get_all_test_specs_from_build_dir(os.path.join(['mbed-os', 'BUILD']))
elif os.path.exists(os.path.join('mbed-os', '.build')):
# Checking mbed-os/.build directory for test specifications
test_spec_file_name_list = get_all_test_specs_from_build_dir(os.path.join(['mbed-os', '.build']))
# Actual load and processing of test specification from sources
if test_spec_file_name:
# Test specification from command line (--test-spec) or default test_spec.json will be used
gt_logger.gt_log("using '%s' from current directory!"% test_spec_file_name)
test_spec = TestSpec(test_spec_file_name)
if opts.list_binaries:
list_binaries_for_builds(test_spec)
return None, 0
elif test_spec_file_name_list:
# Merge multiple test specs into one and keep calm
gt_logger.gt_log("using multiple test specifications from current directory!")
test_spec = merge_multiple_test_specifications_from_file_list(test_spec_file_name_list)
if opts.list_binaries:
list_binaries_for_builds(test_spec)
return None, 0
elif os.path.exists('module.json'):
# If inside yotta module load module data and generate test spec
gt_logger.gt_log("using 'module.json' from current directory!")
if opts.list_binaries:
# List available test binaries (names, no extension)
list_binaries_for_targets()
return None, 0
else:
test_spec = get_test_spec_from_yt_module(opts)
else:
gt_logger.gt_log_err("greentea should be run inside a Yotta module or --test-spec switch should be used")
return None, -1
return test_spec, 0
def get_test_build_properties(test_spec, test_build_name):
result = dict()
test_builds = test_spec.get_test_builds(filter_by_names=[test_build_name])
if test_builds:
test_build = test_builds[0]
result['name'] = test_build.get_name()
result['toolchain'] = test_build.get_toolchain()
result['target'] = test_build.get_platform()
return result
else:
return None
def parse_global_resource_mgr(global_resource_mgr):
"""! Parses --grm switch with global resource manager info
@details K64F:module_name:10.2.123.43:3334
@return tuple wity four elements from GRM or None if error
"""
try:
platform_name, module_name, ip_name, port_name = global_resource_mgr.split(':')
except ValueError as e:
return False
return platform_name, module_name, ip_name, port_name
def parse_fast_model_connection(fast_model_connection):
"""! Parses --fm switch with simulator resource manager info
@details FVP_MPS2_M3:DEFAULT
"""
try:
platform_name, config_name = fast_model_connection.split(':')
except ValueError as e:
return False
return platform_name, config_name
Fix encoding error by trying to output bytes
"""
mbed SDK
Copyright (c) 2011-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author: Przemyslaw Wirkus <Przemyslaw.wirkus@arm.com>
"""
from past.builtins import basestring
import re
import os
import sys
import json
import string
from time import time
from subprocess import Popen, PIPE, STDOUT
from mbed_greentea.tests_spec import TestSpec
from mbed_greentea.mbed_yotta_api import get_test_spec_from_yt_module
from mbed_greentea.mbed_greentea_log import gt_logger
from mbed_greentea.mbed_coverage_api import coverage_dump_file
from mbed_greentea.mbed_coverage_api import coverage_pack_hex_payload
from mbed_greentea.cmake_handlers import list_binaries_for_builds
from mbed_greentea.cmake_handlers import list_binaries_for_targets
# Return codes for test script
TEST_RESULT_OK = "OK"
TEST_RESULT_FAIL = "FAIL"
TEST_RESULT_ERROR = "ERROR"
TEST_RESULT_SKIPPED = "SKIPPED"
TEST_RESULT_UNDEF = "UNDEF"
TEST_RESULT_IOERR_COPY = "IOERR_COPY"
TEST_RESULT_IOERR_DISK = "IOERR_DISK"
TEST_RESULT_IOERR_SERIAL = "IOERR_SERIAL"
TEST_RESULT_TIMEOUT = "TIMEOUT"
TEST_RESULT_NO_IMAGE = "NO_IMAGE"
TEST_RESULT_MBED_ASSERT = "MBED_ASSERT"
TEST_RESULT_BUILD_FAILED = "BUILD_FAILED"
TEST_RESULT_SYNC_FAILED = "SYNC_FAILED"
TEST_RESULTS = [TEST_RESULT_OK,
TEST_RESULT_FAIL,
TEST_RESULT_ERROR,
TEST_RESULT_SKIPPED,
TEST_RESULT_UNDEF,
TEST_RESULT_IOERR_COPY,
TEST_RESULT_IOERR_DISK,
TEST_RESULT_IOERR_SERIAL,
TEST_RESULT_TIMEOUT,
TEST_RESULT_NO_IMAGE,
TEST_RESULT_MBED_ASSERT,
TEST_RESULT_BUILD_FAILED,
TEST_RESULT_SYNC_FAILED
]
TEST_RESULT_MAPPING = {"success" : TEST_RESULT_OK,
"failure" : TEST_RESULT_FAIL,
"error" : TEST_RESULT_ERROR,
"skipped" : TEST_RESULT_SKIPPED,
"end" : TEST_RESULT_UNDEF,
"ioerr_copy" : TEST_RESULT_IOERR_COPY,
"ioerr_disk" : TEST_RESULT_IOERR_DISK,
"ioerr_serial" : TEST_RESULT_IOERR_SERIAL,
"timeout" : TEST_RESULT_TIMEOUT,
"no_image" : TEST_RESULT_NO_IMAGE,
"mbed_assert" : TEST_RESULT_MBED_ASSERT,
"build_failed" : TEST_RESULT_BUILD_FAILED,
"sync_failed" : TEST_RESULT_SYNC_FAILED
}
# This value is used to tell caller than run_host_test function failed while invoking mbedhtrun
# Just a value greater than zero
RUN_HOST_TEST_POPEN_ERROR = 1729
def get_test_result(output):
"""! Parse test 'output' data
@details If test result not found returns by default TEST_RESULT_TIMEOUT value
@return Returns found test result
"""
re_detect = re.compile(r"\{result;([\w+_]*)\}")
for line in output.split():
search_result = re_detect.search(line)
if search_result:
if search_result.group(1) in TEST_RESULT_MAPPING:
return TEST_RESULT_MAPPING[search_result.group(1)]
else:
return TEST_RESULT_UNDEF
return TEST_RESULT_TIMEOUT
def run_command(cmd):
"""! Runs command and prints proc stdout on screen
@paran cmd List with command line to execute e.g. ['ls', '-l]
@return Value returned by subprocess.Popen, if failed return None
"""
try:
p = Popen(cmd,
stdout=PIPE,
stderr=STDOUT)
except OSError as e:
gt_logger.gt_log_err("run_host_test.run_command(%s) failed!" % str(cmd))
gt_logger.gt_log_tab(str(e))
return None
return p
def run_htrun(cmd, verbose):
# detect overflow when running tests
htrun_output = str()
# run_command will return None if process can't be opened (Issue #134)
p = run_command(cmd)
if not p:
# int value > 0 notifies caller that starting of host test process failed
return RUN_HOST_TEST_POPEN_ERROR
htrun_failure_line = re.compile('\[RXD\] (:\d+::FAIL: .*)')
for line in iter(p.stdout.readline, b''):
decoded_line = line.decode('utf-8', 'ignore')
htrun_output += decoded_line
# When dumping output to file both \r and \n will be a new line
# To avoid this "extra new-line" we only use \n at the end
test_error = htrun_failure_line.search(decoded_line)
if test_error:
gt_logger.gt_log_err(test_error.group(1))
if verbose:
output = decoded_line.rstrip() + '\n'
try:
sys.stdout.write(output.encode("utf-8"))
except TypeError:
sys.stdout.write(output)
sys.stdout.flush()
# Check if process was terminated by signal
returncode = p.wait()
return returncode, htrun_output
def run_host_test(image_path,
disk,
port,
build_path,
target_id,
duration=10,
micro=None,
reset=None,
verbose=False,
copy_method=None,
program_cycle_s=None,
forced_reset_timeout=None,
digest_source=None,
json_test_cfg=None,
max_failed_properties=5,
enum_host_tests_path=None,
global_resource_mgr=None,
fast_model_connection=None,
num_sync_packtes=None,
polling_timeout=None,
retry_count=1,
tags=None,
run_app=None):
"""! This function runs host test supervisor (executes mbedhtrun) and checks output from host test process.
@param image_path Path to binary file for flashing
@param disk Currently mounted mbed-enabled devices disk (mount point)
@param port Currently mounted mbed-enabled devices serial port (console)
@param duration Test case timeout
@param micro Mbed-enabled device name
@param reset Reset type
@param forced_reset_timeout Reset timeout (sec)
@param verbose Verbose mode flag
@param copy_method Copy method type (name)
@param program_cycle_s Wait after flashing delay (sec)
@param json_test_cfg Additional test configuration file path passed to host tests in JSON format
@param max_failed_properties After how many unknown properties we will assume test is not ported
@param enum_host_tests_path Directory where locally defined host tests may reside
@param num_sync_packtes sync packets to send for host <---> device communication
@param polling_timeout Timeout in sec for readiness of mount point and serial port of local or remote device
@param tags Filter list of available devices under test to only run on devices with the provided list
of tags [tag-filters tag1,tag]
@param run_app Run application mode flag (we run application and grab serial port data)
@param digest_source if None mbedhtrun will be executed. If 'stdin',
stdin will be used via StdInObserver or file (if
file name was given as switch option)
@return Tuple with test results, test output, test duration times, test case results, and memory metrics.
Return int > 0 if running mbedhtrun process failed.
Retrun int < 0 if something went wrong during mbedhtrun execution.
"""
def get_binary_host_tests_dir(binary_path, level=2):
"""! Checks if in binary test group has host_tests directory
@param binary_path Path to binary in test specification
@param level How many directories above test host_tests dir exists
@return Path to host_tests dir in group binary belongs too, None if not found
"""
try:
binary_path_norm = os.path.normpath(binary_path)
current_path_norm = os.path.normpath(os.getcwd())
host_tests_path = binary_path_norm.split(os.sep)[:-level] + ['host_tests']
build_dir_candidates = ['BUILD', '.build']
idx = None
for build_dir_candidate in build_dir_candidates:
if build_dir_candidate in host_tests_path:
idx = host_tests_path.index(build_dir_candidate)
break
if idx is None:
msg = 'The following directories were not in the path: %s' % (', '.join(build_dir_candidates))
raise Exception(msg)
# Cut /<build dir>/tests/TOOLCHAIN/TARGET
host_tests_path = host_tests_path[:idx] + host_tests_path[idx+4:]
host_tests_path = os.sep.join(host_tests_path)
except Exception as e:
gt_logger.gt_log_warn("there was a problem while looking for host_tests directory")
gt_logger.gt_log_tab("level %d, path: %s"% (level, binary_path))
gt_logger.gt_log_tab(str(e))
return None
if os.path.isdir(host_tests_path):
return host_tests_path
return None
if not enum_host_tests_path:
# If there is -e specified we will try to find a host_tests path ourselves
#
# * Path to binary starts from "build" directory, and goes 4 levels
# deep: ./build/tests/compiler/toolchain
# * Binary is inside test group.
# For example: <app>/tests/test_group_name/test_dir/*,cpp.
# * We will search for directory called host_tests on the level of test group (level=2)
# or on the level of tests directory (level=3).
#
# If host_tests directory is found above test code will will pass it to mbedhtrun using
# switch -e <path_to_host_tests_dir>
gt_logger.gt_log("checking for 'host_tests' directory above image directory structure", print_text=verbose)
test_group_ht_path = get_binary_host_tests_dir(image_path, level=2)
TESTS_dir_ht_path = get_binary_host_tests_dir(image_path, level=3)
if test_group_ht_path:
enum_host_tests_path = test_group_ht_path
elif TESTS_dir_ht_path:
enum_host_tests_path = TESTS_dir_ht_path
if enum_host_tests_path:
gt_logger.gt_log_tab("found 'host_tests' directory in: '%s'"% enum_host_tests_path, print_text=verbose)
else:
gt_logger.gt_log_tab("'host_tests' directory not found: two directory levels above image path checked", print_text=verbose)
gt_logger.gt_log("selecting test case observer...", print_text=verbose)
if digest_source:
gt_logger.gt_log_tab("selected digest source: %s"% digest_source, print_text=verbose)
# Select who will digest test case serial port data
if digest_source == 'stdin':
# When we want to scan stdin for test results
raise NotImplementedError
elif digest_source is not None:
# When we want to open file to scan for test results
raise NotImplementedError
# Command executing CLI for host test supervisor (in detect-mode)
cmd = ["mbedhtrun",
'-m', micro,
'-p', port,
'-f', '"%s"'% image_path,
]
if enum_host_tests_path:
cmd += ["-e", '"%s"'% enum_host_tests_path]
if global_resource_mgr:
# Use global resource manager to execute test
# Example:
# $ mbedhtrun -p :9600 -f "tests-mbed_drivers-generic_tests.bin" -m K64F --grm raas_client:10.2.203.31:8000
cmd += ['--grm', global_resource_mgr]
else:
# Use local resources to execute tests
# Add extra parameters to host_test
if disk:
cmd += ["-d", disk]
if copy_method:
cmd += ["-c", copy_method]
if target_id:
cmd += ["-t", target_id]
if reset:
cmd += ["-r", reset]
if run_app:
cmd += ["--run"] # -f stores binary name!
if fast_model_connection:
# Use simulator resource manager to execute test
# Example:
# $ mbedhtrun -f "tests-mbed_drivers-generic_tests.elf" -m FVP_MPS2_M3 --fm DEFAULT
cmd += ['--fm', fast_model_connection]
if program_cycle_s:
cmd += ["-C", str(program_cycle_s)]
if forced_reset_timeout:
cmd += ["-R", str(forced_reset_timeout)]
if json_test_cfg:
cmd += ["--test-cfg", '"%s"' % str(json_test_cfg)]
if num_sync_packtes:
cmd += ["--sync",str(num_sync_packtes)]
if tags:
cmd += ["--tag-filters", tags]
if polling_timeout:
cmd += ["-P", str(polling_timeout)]
gt_logger.gt_log_tab("calling mbedhtrun: %s" % " ".join(cmd), print_text=verbose)
gt_logger.gt_log("mbed-host-test-runner: started")
for retry in range(1, 1 + retry_count):
start_time = time()
returncode, htrun_output = run_htrun(cmd, verbose)
end_time = time()
if returncode < 0:
return returncode
elif returncode == 0:
break
gt_logger.gt_log("retry mbedhtrun {}/{}".format(retry, retry_count))
else:
gt_logger.gt_log("{} failed after {} count".format(cmd, retry_count))
testcase_duration = end_time - start_time # Test case duration from reset to {end}
htrun_output = get_printable_string(htrun_output)
result = get_test_result(htrun_output)
result_test_cases = get_testcase_result(htrun_output)
test_cases_summary = get_testcase_summary(htrun_output)
max_heap, reserved_heap, thread_stack_info = get_memory_metrics(htrun_output)
thread_stack_summary = []
if thread_stack_info:
thread_stack_summary = get_thread_stack_info_summary(thread_stack_info)
memory_metrics = {
"max_heap": max_heap,
"reserved_heap": reserved_heap,
"thread_stack_info": thread_stack_info,
"thread_stack_summary": thread_stack_summary
}
get_coverage_data(build_path, htrun_output)
gt_logger.gt_log("mbed-host-test-runner: stopped and returned '%s'"% result, print_text=verbose)
return (result, htrun_output, testcase_duration, duration, result_test_cases, test_cases_summary, memory_metrics)
def get_testcase_count_and_names(output):
""" Fetches from log utest events with test case count (__testcase_count) and test case names (__testcase_name)*
@details
Example test case count + names prints
[1467197417.34][HTST][INF] host test detected: default_auto
[1467197417.36][CONN][RXD] {{__testcase_count;2}}
[1467197417.36][CONN][INF] found KV pair in stream: {{__testcase_count;2}}, queued...
[1467197417.39][CONN][RXD] >>> Running 2 test cases...
[1467197417.43][CONN][RXD] {{__testcase_name;C strings: strtok}}
[1467197417.43][CONN][INF] found KV pair in stream: {{__testcase_name;C strings: strtok}}, queued...
[1467197417.47][CONN][RXD] {{__testcase_name;C strings: strpbrk}}
[1467197417.47][CONN][INF] found KV pair in stream: {{__testcase_name;C strings: strpbrk}}, queued...
[1467197417.52][CONN][RXD] >>> Running case #1: 'C strings: strtok'...
[1467197417.56][CONN][RXD] {{__testcase_start;C strings: strtok}}
[1467197417.56][CONN][INF] found KV pair in stream: {{__testcase_start;C strings: strtok}}, queued...
@return Tuple with (test case count, list of test case names in order of appearance)
"""
testcase_count = 0
testcase_names = []
re_tc_count = re.compile(r"^\[(\d+\.\d+)\]\[(\w+)\]\[(\w+)\].*\{\{(__testcase_count);(\d+)\}\}")
re_tc_names = re.compile(r"^\[(\d+\.\d+)\]\[(\w+)\]\[(\w+)\].*\{\{(__testcase_name);([^;]+)\}\}")
for line in output.splitlines():
m = re_tc_names.search(line)
if m:
testcase_names.append(m.group(5))
continue
m = re_tc_count.search(line)
if m:
testcase_count = m.group(5)
return (testcase_count, testcase_names)
def get_testcase_utest(output, test_case_name):
""" Fetches from log all prints for given utest test case (from being print to end print)
@details
Example test case prints
[1455553765.52][CONN][RXD] >>> Running case #1: 'Simple Test'...
[1455553765.52][CONN][RXD] {{__testcase_start;Simple Test}}
[1455553765.52][CONN][INF] found KV pair in stream: {{__testcase_start;Simple Test}}, queued...
[1455553765.58][CONN][RXD] Simple test called
[1455553765.58][CONN][RXD] {{__testcase_finish;Simple Test;1;0}}
[1455553765.58][CONN][INF] found KV pair in stream: {{__testcase_finish;Simple Test;1;0}}, queued...
[1455553765.70][CONN][RXD] >>> 'Simple Test': 1 passed, 0 failed
@return log lines between start and end test case print
"""
# Return string with all non-alphanumerics backslashed;
# this is useful if you want to match an arbitrary literal
# string that may have regular expression metacharacters in it.
escaped_test_case_name = re.escape(test_case_name)
re_tc_utest_log_start = re.compile(r"^\[(\d+\.\d+)\]\[(\w+)\]\[(\w+)\] >>> Running case #(\d)+: '(%s)'"% escaped_test_case_name)
re_tc_utest_log_finish = re.compile(r"^\[(\d+\.\d+)\]\[(\w+)\]\[(\w+)\] >>> '(%s)': (\d+) passed, (\d+) failed"% escaped_test_case_name)
tc_log_lines = []
for line in output.splitlines():
# utest test case start string search
m = re_tc_utest_log_start.search(line)
if m:
tc_log_lines.append(line)
continue
# If utest test case end string found
m = re_tc_utest_log_finish.search(line)
if m:
tc_log_lines.append(line)
break
# Continue adding utest log lines
if tc_log_lines:
tc_log_lines.append(line)
return tc_log_lines
def get_coverage_data(build_path, output):
# Example GCOV output
# [1456840876.73][CONN][RXD] {{__coverage_start;c:\Work\core-util/source/PoolAllocator.cpp.gcda;6164636772393034c2733f32...a33e...b9}}
gt_logger.gt_log("checking for GCOV data...")
re_gcov = re.compile(r"^\[(\d+\.\d+)\][^\{]+\{\{(__coverage_start);([^;]+);([^}]+)\}\}$")
for line in output.splitlines():
m = re_gcov.search(line)
if m:
_, _, gcov_path, gcov_payload = m.groups()
try:
bin_gcov_payload = coverage_pack_hex_payload(gcov_payload)
coverage_dump_file(build_path, gcov_path, bin_gcov_payload)
except Exception as e:
gt_logger.gt_log_err("error while handling GCOV data: " + str(e))
gt_logger.gt_log_tab("storing %d bytes in '%s'"% (len(bin_gcov_payload), gcov_path))
def get_printable_string(unprintable_string):
return "".join(filter(lambda x: x in string.printable, unprintable_string))
def get_testcase_summary(output):
"""! Searches for test case summary
String to find:
[1459246276.95][CONN][INF] found KV pair in stream: {{__testcase_summary;7;1}}, queued...
@return Tuple of (passed, failed) or None if no summary found
"""
re_tc_summary = re.compile(r"^\[(\d+\.\d+)\][^\{]+\{\{(__testcase_summary);(\d+);(\d+)\}\}")
for line in output.splitlines():
m = re_tc_summary.search(line)
if m:
_, _, passes, failures = m.groups()
return int(passes), int(failures)
return None
def get_testcase_result(output):
result_test_cases = {} # Test cases results
re_tc_start = re.compile(r"^\[(\d+\.\d+)\][^\{]+\{\{(__testcase_start);([^;]+)\}\}")
re_tc_finish = re.compile(r"^\[(\d+\.\d+)\][^\{]+\{\{(__testcase_finish);([^;]+);(\d+);(\d+)\}\}")
for line in output.splitlines():
m = re_tc_start.search(line)
if m:
timestamp, _, testcase_id = m.groups()
if testcase_id not in result_test_cases:
result_test_cases[testcase_id] = {}
# Data collected when __testcase_start is fetched
result_test_cases[testcase_id]['time_start'] = float(timestamp)
result_test_cases[testcase_id]['utest_log'] = get_testcase_utest(output, testcase_id)
# Data collected when __testcase_finish is fetched
result_test_cases[testcase_id]['duration'] = 0.0
result_test_cases[testcase_id]['result_text'] = 'ERROR'
result_test_cases[testcase_id]['time_end'] = float(timestamp)
result_test_cases[testcase_id]['passed'] = 0
result_test_cases[testcase_id]['failed'] = 0
result_test_cases[testcase_id]['result'] = -4096
continue
m = re_tc_finish.search(line)
if m:
timestamp, _, testcase_id, testcase_passed, testcase_failed = m.groups()
testcase_passed = int(testcase_passed)
testcase_failed = int(testcase_failed)
testcase_result = 0 # OK case
if testcase_failed != 0:
testcase_result = testcase_failed # testcase_result > 0 is FAILure
if testcase_id not in result_test_cases:
result_test_cases[testcase_id] = {}
# Setting some info about test case itself
result_test_cases[testcase_id]['duration'] = 0.0
result_test_cases[testcase_id]['result_text'] = 'OK'
result_test_cases[testcase_id]['time_end'] = float(timestamp)
result_test_cases[testcase_id]['passed'] = testcase_passed
result_test_cases[testcase_id]['failed'] = testcase_failed
result_test_cases[testcase_id]['result'] = testcase_result
# Assign human readable test case result
if testcase_result > 0:
result_test_cases[testcase_id]['result_text'] = 'FAIL'
elif testcase_result < 0:
result_test_cases[testcase_id]['result_text'] = 'ERROR'
if 'time_start' in result_test_cases[testcase_id]:
result_test_cases[testcase_id]['duration'] = result_test_cases[testcase_id]['time_end'] - result_test_cases[testcase_id]['time_start']
else:
result_test_cases[testcase_id]['duration'] = 0.0
if 'utest_log' not in result_test_cases[testcase_id]:
result_test_cases[testcase_id]['utest_log'] = "__testcase_start tag not found."
### Adding missing test cases which were defined with __testcase_name
# Get test case names reported by utest + test case names
# This data will be used to process all tests which were not executed
# do their status can be set to SKIPPED (e.g. in JUnit)
tc_count, tc_names = get_testcase_count_and_names(output)
for testcase_id in tc_names:
if testcase_id not in result_test_cases:
result_test_cases[testcase_id] = {}
# Data collected when __testcase_start is fetched
result_test_cases[testcase_id]['time_start'] = 0.0
result_test_cases[testcase_id]['utest_log'] = []
# Data collected when __testcase_finish is fetched
result_test_cases[testcase_id]['duration'] = 0.0
result_test_cases[testcase_id]['result_text'] = 'SKIPPED'
result_test_cases[testcase_id]['time_end'] = 0.0
result_test_cases[testcase_id]['passed'] = 0
result_test_cases[testcase_id]['failed'] = 0
result_test_cases[testcase_id]['result'] = -8192
return result_test_cases
def get_memory_metrics(output):
"""! Searches for test case memory metrics
String to find:
[1477505660.40][CONN][INF] found KV pair in stream: {{max_heap_usage;2284}}, queued...
@return Tuple of (max heap usage, thread info list), where thread info list
is a list of dictionaries with format {entry, arg, max_stack, stack_size}
"""
max_heap_usage = None
reserved_heap = None
thread_info = {}
re_tc_max_heap_usage = re.compile(r"^\[(\d+\.\d+)\][^\{]+\{\{(max_heap_usage);(\d+)\}\}")
re_tc_reserved_heap = re.compile(r"^\[(\d+\.\d+)\][^\{]+\{\{(reserved_heap);(\d+)\}\}")
re_tc_thread_info = re.compile(r"^\[(\d+\.\d+)\][^\{]+\{\{(__thread_info);\"([A-Fa-f0-9\-xX]+)\",(\d+),(\d+)\}\}")
for line in output.splitlines():
m = re_tc_max_heap_usage.search(line)
if m:
_, _, max_heap_usage = m.groups()
max_heap_usage = int(max_heap_usage)
m = re_tc_reserved_heap.search(line)
if m:
_, _, reserved_heap = m.groups()
reserved_heap = int(reserved_heap)
m = re_tc_thread_info.search(line)
if m:
_, _, thread_entry_arg, thread_max_stack, thread_stack_size = m.groups()
thread_max_stack = int(thread_max_stack)
thread_stack_size = int(thread_stack_size)
thread_entry_arg_split = thread_entry_arg.split('-')
thread_entry = thread_entry_arg_split[0]
thread_info[thread_entry_arg] = {
'entry': thread_entry,
'max_stack': thread_max_stack,
'stack_size': thread_stack_size
}
if len(thread_entry_arg_split) > 1:
thread_arg = thread_entry_arg_split[1]
thread_info[thread_entry_arg]['arg'] = thread_arg
thread_info_list = thread_info.values()
return max_heap_usage, reserved_heap, thread_info_list
def get_thread_with_max_stack_size(thread_stack_info):
max_thread_stack_size = 0
max_thread = None
max_stack_usage_total = 0
reserved_stack_total = 0
for cur_thread_stack_info in thread_stack_info:
if cur_thread_stack_info['stack_size'] > max_thread_stack_size:
max_thread_stack_size = cur_thread_stack_info['stack_size']
max_thread = cur_thread_stack_info
max_stack_usage_total += cur_thread_stack_info['max_stack']
reserved_stack_total += cur_thread_stack_info['stack_size']
max_thread['max_stack_usage_total'] = max_stack_usage_total
max_thread['reserved_stack_total'] = reserved_stack_total
return max_thread
def get_thread_stack_info_summary(thread_stack_info):
max_thread_info = get_thread_with_max_stack_size(thread_stack_info)
summary = {
'max_stack_size': max_thread_info['stack_size'],
'max_stack_usage': max_thread_info['max_stack'],
'max_stack_usage_total': max_thread_info['max_stack_usage_total'],
'reserved_stack_total': max_thread_info['reserved_stack_total']
}
return summary
def log_mbed_devices_in_table(muts, cols = ['platform_name', 'platform_name_unique', 'serial_port', 'mount_point', 'target_id']):
"""! Print table of muts using prettytable
@param muts List of MUTs to print in table
@param cols Columns used to for a table, required for each mut
@return string with formatted prettytable
"""
from prettytable import PrettyTable, HEADER
pt = PrettyTable(cols, junction_char="|", hrules=HEADER)
for col in cols:
pt.align[col] = "l"
pt.padding_width = 1 # One space between column edges and contents (default)
row = []
for mut in muts:
for col in cols:
cell_val = mut[col] if col in mut else 'not detected'
row.append(cell_val)
pt.add_row(row)
row = []
return pt.get_string()
def get_test_spec(opts):
"""! Closure encapsulating how we get test specification and load it from file of from yotta module
@return Returns tuple of (test specification, ret code). Test specification == None if test spec load was not successful
"""
test_spec = None
# Check if test_spec.json file exist, if so we will pick it up as default file and load it
test_spec_file_name = opts.test_spec
test_spec_file_name_list = []
# Note: test_spec.json will have higher priority than module.json file
# so if we are inside directory with module.json and test_spec.json we will use test spec file
# instead of using yotta's module.json file
def get_all_test_specs_from_build_dir(path_to_scan):
"""! Searches for all test_spec.json files
@param path_to_scan Directory path used to recursively search for test_spec.json
@result List of locations of test_spec.json
"""
return [os.path.join(dp, f) for dp, dn, filenames in os.walk(path_to_scan) for f in filenames if f == 'test_spec.json']
def merge_multiple_test_specifications_from_file_list(test_spec_file_name_list):
"""! For each file in test_spec_file_name_list merge all test specifications into one
@param test_spec_file_name_list List of paths to different test specifications
@return TestSpec object with all test specification data inside
"""
def copy_builds_between_test_specs(source, destination):
"""! Copies build key-value pairs between two test_spec dicts
@param source Source dictionary
@param destination Dictionary with will be applied with 'builds' key-values
@return Dictionary with merged source
"""
result = destination.copy()
if 'builds' in source and 'builds' in destination:
for k in source['builds']:
result['builds'][k] = source['builds'][k]
return result
merged_test_spec = {}
for test_spec_file in test_spec_file_name_list:
gt_logger.gt_log_tab("using '%s'"% test_spec_file)
try:
with open(test_spec_file, 'r') as f:
test_spec_data = json.load(f)
merged_test_spec = copy_builds_between_test_specs(merged_test_spec, test_spec_data)
except Exception as e:
gt_logger.gt_log_err("Unexpected error while processing '%s' test specification file"% test_spec_file)
gt_logger.gt_log_tab(str(e))
merged_test_spec = {}
test_spec = TestSpec()
test_spec.parse(merged_test_spec)
return test_spec
# Test specification look-up
if opts.test_spec:
# Loading test specification from command line specified file
gt_logger.gt_log("test specification file '%s' (specified with --test-spec option)"% opts.test_spec)
elif os.path.exists('test_spec.json'):
# Test specification file exists in current directory
gt_logger.gt_log("using 'test_spec.json' from current directory!")
test_spec_file_name = 'test_spec.json'
elif 'BUILD' in os.listdir(os.getcwd()):
# Checking 'BUILD' directory for test specifications
# Using `os.listdir()` since it preserves case
test_spec_file_name_list = get_all_test_specs_from_build_dir('BUILD')
elif os.path.exists('.build'):
# Checking .build directory for test specifications
test_spec_file_name_list = get_all_test_specs_from_build_dir('.build')
elif os.path.exists('mbed-os') and 'BUILD' in os.listdir('mbed-os'):
# Checking mbed-os/.build directory for test specifications
# Using `os.listdir()` since it preserves case
test_spec_file_name_list = get_all_test_specs_from_build_dir(os.path.join(['mbed-os', 'BUILD']))
elif os.path.exists(os.path.join('mbed-os', '.build')):
# Checking mbed-os/.build directory for test specifications
test_spec_file_name_list = get_all_test_specs_from_build_dir(os.path.join(['mbed-os', '.build']))
# Actual load and processing of test specification from sources
if test_spec_file_name:
# Test specification from command line (--test-spec) or default test_spec.json will be used
gt_logger.gt_log("using '%s' from current directory!"% test_spec_file_name)
test_spec = TestSpec(test_spec_file_name)
if opts.list_binaries:
list_binaries_for_builds(test_spec)
return None, 0
elif test_spec_file_name_list:
# Merge multiple test specs into one and keep calm
gt_logger.gt_log("using multiple test specifications from current directory!")
test_spec = merge_multiple_test_specifications_from_file_list(test_spec_file_name_list)
if opts.list_binaries:
list_binaries_for_builds(test_spec)
return None, 0
elif os.path.exists('module.json'):
# If inside yotta module load module data and generate test spec
gt_logger.gt_log("using 'module.json' from current directory!")
if opts.list_binaries:
# List available test binaries (names, no extension)
list_binaries_for_targets()
return None, 0
else:
test_spec = get_test_spec_from_yt_module(opts)
else:
gt_logger.gt_log_err("greentea should be run inside a Yotta module or --test-spec switch should be used")
return None, -1
return test_spec, 0
def get_test_build_properties(test_spec, test_build_name):
result = dict()
test_builds = test_spec.get_test_builds(filter_by_names=[test_build_name])
if test_builds:
test_build = test_builds[0]
result['name'] = test_build.get_name()
result['toolchain'] = test_build.get_toolchain()
result['target'] = test_build.get_platform()
return result
else:
return None
def parse_global_resource_mgr(global_resource_mgr):
"""! Parses --grm switch with global resource manager info
@details K64F:module_name:10.2.123.43:3334
@return tuple wity four elements from GRM or None if error
"""
try:
platform_name, module_name, ip_name, port_name = global_resource_mgr.split(':')
except ValueError as e:
return False
return platform_name, module_name, ip_name, port_name
def parse_fast_model_connection(fast_model_connection):
"""! Parses --fm switch with simulator resource manager info
@details FVP_MPS2_M3:DEFAULT
"""
try:
platform_name, config_name = fast_model_connection.split(':')
except ValueError as e:
return False
return platform_name, config_name
|
"""
A high-speed, production ready, thread pooled, generic HTTP server.
For those of you wanting to understand internals of this module, here's the
basic call flow. The server's listening thread runs a very tight loop,
sticking incoming connections onto a Queue::
server = HTTPServer(...)
server.start()
while True:
tick()
# This blocks until a request comes in:
child = socket.accept()
conn = HTTPConnection(child, ...)
server.requests.put(conn)
Worker threads are kept in a pool and poll the Queue, popping off and then
handling each connection in turn. Each connection can consist of an arbitrary
number of requests and their responses, so we run a nested loop::
while True:
conn = server.requests.get()
conn.communicate()
-> while True:
req = HTTPRequest(...)
req.parse_request()
-> # Read the Request-Line, e.g. "GET /page HTTP/1.1"
req.rfile.readline()
read_headers(req.rfile, req.inheaders)
req.respond()
-> response = app(...)
try:
for chunk in response:
if chunk:
req.write(chunk)
finally:
if hasattr(response, "close"):
response.close()
if req.close_connection:
return
And now for a trivial doctest to exercise the test suite
>>> 'HTTPServer' in globals()
True
"""
import os
import io
import re
import email.utils
import socket
import sys
import time
import traceback as traceback_
import logging
import platform
import six
from six.moves import queue
from six.moves import urllib
from . import errors, __version__
from ._compat import bton, ntou
from .workers import threadpool
from .makefile import MakeFile
__all__ = ('HTTPRequest', 'HTTPConnection', 'HTTPServer',
'SizeCheckWrapper', 'KnownLengthRFile', 'ChunkedRFile',
'Gateway', 'get_ssl_adapter_class')
if 'win' in sys.platform and hasattr(socket, 'AF_INET6'):
if not hasattr(socket, 'IPPROTO_IPV6'):
socket.IPPROTO_IPV6 = 41
if not hasattr(socket, 'IPV6_V6ONLY'):
socket.IPV6_V6ONLY = 27
LF = b'\n'
CRLF = b'\r\n'
TAB = b'\t'
SPACE = b' '
COLON = b':'
SEMICOLON = b';'
EMPTY = b''
ASTERISK = b'*'
FORWARD_SLASH = b'/'
QUOTED_SLASH = b'%2F'
QUOTED_SLASH_REGEX = re.compile(b'(?i)' + QUOTED_SLASH)
comma_separated_headers = [
b'Accept', b'Accept-Charset', b'Accept-Encoding',
b'Accept-Language', b'Accept-Ranges', b'Allow', b'Cache-Control',
b'Connection', b'Content-Encoding', b'Content-Language', b'Expect',
b'If-Match', b'If-None-Match', b'Pragma', b'Proxy-Authenticate', b'TE',
b'Trailer', b'Transfer-Encoding', b'Upgrade', b'Vary', b'Via', b'Warning',
b'WWW-Authenticate',
]
if not hasattr(logging, 'statistics'):
logging.statistics = {}
class HeaderReader(object):
"""Object for reading headers from an HTTP request.
Interface and default implementation.
"""
def __call__(self, rfile, hdict=None):
"""
Read headers from the given stream into the given header dict.
If hdict is None, a new header dict is created. Returns the populated
header dict.
Headers which are repeated are folded together using a comma if their
specification so dictates.
This function raises ValueError when the read bytes violate the HTTP
spec.
You should probably return "400 Bad Request" if this happens.
"""
if hdict is None:
hdict = {}
while True:
line = rfile.readline()
if not line:
# No more data--illegal end of headers
raise ValueError('Illegal end of headers.')
if line == CRLF:
# Normal end of headers
break
if not line.endswith(CRLF):
raise ValueError('HTTP requires CRLF terminators')
if line[0] in (SPACE, TAB):
# It's a continuation line.
v = line.strip()
else:
try:
k, v = line.split(COLON, 1)
except ValueError:
raise ValueError('Illegal header line.')
v = v.strip()
k = self._transform_key(k)
hname = k
if not self._allow_header(k):
continue
if k in comma_separated_headers:
existing = hdict.get(hname)
if existing:
v = b', '.join((existing, v))
hdict[hname] = v
return hdict
def _allow_header(self, key_name):
return True
def _transform_key(self, key_name):
# TODO: what about TE and WWW-Authenticate?
return key_name.strip().title()
class DropUnderscoreHeaderReader(HeaderReader):
"""Custom HeaderReader to exclude any headers with underscores in them."""
def _allow_header(self, key_name):
orig = super(DropUnderscoreHeaderReader, self)._allow_header(key_name)
return orig and '_' not in key_name
class SizeCheckWrapper(object):
"""Wraps a file-like object, raising MaxSizeExceeded if too large."""
def __init__(self, rfile, maxlen):
"""Initialize SizeCheckWrapper instance.
Args:
rfile (file): file of a limited size
maxlen (int): maximum length of the file being read
"""
self.rfile = rfile
self.maxlen = maxlen
self.bytes_read = 0
def _check_length(self):
if self.maxlen and self.bytes_read > self.maxlen:
raise errors.MaxSizeExceeded()
def read(self, size=None):
"""Read a chunk from rfile buffer and return it.
Args:
size (int): amount of data to read
Returns:
bytes: Chunk from rfile, limited by size if specified.
"""
data = self.rfile.read(size)
self.bytes_read += len(data)
self._check_length()
return data
def readline(self, size=None):
"""Read a single line from rfile buffer and return it.
Args:
size (int): minimum amount of data to read
Returns:
bytes: One line from rfile.
"""
if size is not None:
data = self.rfile.readline(size)
self.bytes_read += len(data)
self._check_length()
return data
# User didn't specify a size ...
# We read the line in chunks to make sure it's not a 100MB line !
res = []
while True:
data = self.rfile.readline(256)
self.bytes_read += len(data)
self._check_length()
res.append(data)
# See https://github.com/cherrypy/cherrypy/issues/421
if len(data) < 256 or data[-1:] == LF:
return EMPTY.join(res)
def readlines(self, sizehint=0):
"""Read all lines from rfile buffer and return them.
Args:
sizehint (int): hint of minimum amount of data to read
Returns:
list[bytes]: Lines of bytes read from rfile.
"""
# Shamelessly stolen from StringIO
total = 0
lines = []
line = self.readline(sizehint)
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline(sizehint)
return lines
def close(self):
"""Release resources allocated for rfile."""
self.rfile.close()
def __iter__(self):
"""Return file iterator."""
return self
def __next__(self):
"""Generate next file chunk."""
data = next(self.rfile)
self.bytes_read += len(data)
self._check_length()
return data
def next(self):
"""Generate next file chunk."""
data = self.rfile.next()
self.bytes_read += len(data)
self._check_length()
return data
class KnownLengthRFile(object):
"""Wraps a file-like object, returning an empty string when exhausted."""
def __init__(self, rfile, content_length):
"""Initialize KnownLengthRFile instance.
Args:
rfile (file): file of a known size
content_length (int): length of the file being read
"""
self.rfile = rfile
self.remaining = content_length
def read(self, size=None):
"""Read a chunk from rfile buffer and return it.
Args:
size (int): amount of data to read
Returns:
bytes: Chunk from rfile, limited by size if specified.
"""
if self.remaining == 0:
return b''
if size is None:
size = self.remaining
else:
size = min(size, self.remaining)
data = self.rfile.read(size)
self.remaining -= len(data)
return data
def readline(self, size=None):
"""Read a single line from rfile buffer and return it.
Args:
size (int): minimum amount of data to read
Returns:
bytes: One line from rfile.
"""
if self.remaining == 0:
return b''
if size is None:
size = self.remaining
else:
size = min(size, self.remaining)
data = self.rfile.readline(size)
self.remaining -= len(data)
return data
def readlines(self, sizehint=0):
"""Read all lines from rfile buffer and return them.
Args:
sizehint (int): hint of minimum amount of data to read
Returns:
list[bytes]: Lines of bytes read from rfile.
"""
# Shamelessly stolen from StringIO
total = 0
lines = []
line = self.readline(sizehint)
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline(sizehint)
return lines
def close(self):
"""Release resources allocated for rfile."""
self.rfile.close()
def __iter__(self):
"""Return file iterator."""
return self
def __next__(self):
"""Generate next file chunk."""
data = next(self.rfile)
self.remaining -= len(data)
return data
class ChunkedRFile(object):
"""Wraps a file-like object, returning an empty string when exhausted.
This class is intended to provide a conforming wsgi.input value for
request entities that have been encoded with the 'chunked' transfer
encoding.
"""
def __init__(self, rfile, maxlen, bufsize=8192):
"""Initialize ChunkedRFile instance.
Args:
rfile (file): file encoded with the 'chunked' transfer encoding
maxlen (int): maximum length of the file being read
bufsize (int): size of the buffer used to read the file
"""
self.rfile = rfile
self.maxlen = maxlen
self.bytes_read = 0
self.buffer = EMPTY
self.bufsize = bufsize
self.closed = False
def _fetch(self):
if self.closed:
return
line = self.rfile.readline()
self.bytes_read += len(line)
if self.maxlen and self.bytes_read > self.maxlen:
raise errors.MaxSizeExceeded(
'Request Entity Too Large', self.maxlen)
line = line.strip().split(SEMICOLON, 1)
try:
chunk_size = line.pop(0)
chunk_size = int(chunk_size, 16)
except ValueError:
raise ValueError('Bad chunked transfer size: ' + repr(chunk_size))
if chunk_size <= 0:
self.closed = True
return
# if line: chunk_extension = line[0]
if self.maxlen and self.bytes_read + chunk_size > self.maxlen:
raise IOError('Request Entity Too Large')
chunk = self.rfile.read(chunk_size)
self.bytes_read += len(chunk)
self.buffer += chunk
crlf = self.rfile.read(2)
if crlf != CRLF:
raise ValueError(
"Bad chunked transfer coding (expected '\\r\\n', "
'got ' + repr(crlf) + ')')
def read(self, size=None):
"""Read a chunk from rfile buffer and return it.
Args:
size (int): amount of data to read
Returns:
bytes: Chunk from rfile, limited by size if specified.
"""
data = EMPTY
if size == 0:
return data
while True:
if size and len(data) >= size:
return data
if not self.buffer:
self._fetch()
if not self.buffer:
# EOF
return data
if size:
remaining = size - len(data)
data += self.buffer[:remaining]
self.buffer = self.buffer[remaining:]
else:
data += self.buffer
self.buffer = EMPTY
def readline(self, size=None):
"""Read a single line from rfile buffer and return it.
Args:
size (int): minimum amount of data to read
Returns:
bytes: One line from rfile.
"""
data = EMPTY
if size == 0:
return data
while True:
if size and len(data) >= size:
return data
if not self.buffer:
self._fetch()
if not self.buffer:
# EOF
return data
newline_pos = self.buffer.find(LF)
if size:
if newline_pos == -1:
remaining = size - len(data)
data += self.buffer[:remaining]
self.buffer = self.buffer[remaining:]
else:
remaining = min(size - len(data), newline_pos)
data += self.buffer[:remaining]
self.buffer = self.buffer[remaining:]
else:
if newline_pos == -1:
data += self.buffer
self.buffer = EMPTY
else:
data += self.buffer[:newline_pos]
self.buffer = self.buffer[newline_pos:]
def readlines(self, sizehint=0):
"""Read all lines from rfile buffer and return them.
Args:
sizehint (int): hint of minimum amount of data to read
Returns:
list[bytes]: Lines of bytes read from rfile.
"""
# Shamelessly stolen from StringIO
total = 0
lines = []
line = self.readline(sizehint)
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline(sizehint)
return lines
def read_trailer_lines(self):
"""Read HTTP headers and yield them.
Returns:
Generator: yields CRLF separated lines.
"""
if not self.closed:
raise ValueError(
'Cannot read trailers until the request body has been read.')
while True:
line = self.rfile.readline()
if not line:
# No more data--illegal end of headers
raise ValueError('Illegal end of headers.')
self.bytes_read += len(line)
if self.maxlen and self.bytes_read > self.maxlen:
raise IOError('Request Entity Too Large')
if line == CRLF:
# Normal end of headers
break
if not line.endswith(CRLF):
raise ValueError('HTTP requires CRLF terminators')
yield line
def close(self):
"""Release resources allocated for rfile."""
self.rfile.close()
class HTTPRequest(object):
"""An HTTP Request (and response).
A single HTTP connection may consist of multiple request/response pairs.
"""
server = None
"""The HTTPServer object which is receiving this request."""
conn = None
"""The HTTPConnection object on which this request connected."""
inheaders = {}
"""A dict of request headers."""
outheaders = []
"""A list of header tuples to write in the response."""
ready = False
"""When True, the request has been parsed and is ready to begin generating
the response. When False, signals the calling Connection that the response
should not be generated and the connection should close."""
close_connection = False
"""Signals the calling Connection that the request should close. This does
not imply an error! The client and/or server may each request that the
connection be closed."""
chunked_write = False
"""If True, output will be encoded with the "chunked" transfer-coding.
This value is set automatically inside send_headers."""
header_reader = HeaderReader()
"""
A HeaderReader instance or compatible reader.
"""
def __init__(self, server, conn, proxy_mode=False, strict_mode=True):
"""Initialize HTTP request container instance.
Args:
server (HTTPServer): web server object receiving this request
conn (HTTPConnection): HTTP connection object for this request
proxy_mode (bool): whether this HTTPServer should behave as a PROXY
server for certain requests
strict_mode (bool): whether we should return a 400 Bad Request when
we encounter a request that a HTTP compliant client should not be
making
"""
self.server = server
self.conn = conn
self.ready = False
self.started_request = False
self.scheme = b'http'
if self.server.ssl_adapter is not None:
self.scheme = b'https'
# Use the lowest-common protocol in case read_request_line errors.
self.response_protocol = 'HTTP/1.0'
self.inheaders = {}
self.status = ''
self.outheaders = []
self.sent_headers = False
self.close_connection = self.__class__.close_connection
self.chunked_read = False
self.chunked_write = self.__class__.chunked_write
self.proxy_mode = proxy_mode
self.strict_mode = strict_mode
def parse_request(self):
"""Parse the next HTTP request start-line and message-headers."""
self.rfile = SizeCheckWrapper(self.conn.rfile,
self.server.max_request_header_size)
try:
success = self.read_request_line()
except errors.MaxSizeExceeded:
self.simple_response(
'414 Request-URI Too Long',
'The Request-URI sent with the request exceeds the maximum '
'allowed bytes.')
return
else:
if not success:
return
try:
success = self.read_request_headers()
except errors.MaxSizeExceeded:
self.simple_response(
'413 Request Entity Too Large',
'The headers sent with the request exceed the maximum '
'allowed bytes.')
return
else:
if not success:
return
self.ready = True
def read_request_line(self):
"""Read and parse first line of the HTTP request.
Returns:
bool: True if the request line is valid or False if it's malformed.
"""
# HTTP/1.1 connections are persistent by default. If a client
# requests a page, then idles (leaves the connection open),
# then rfile.readline() will raise socket.error("timed out").
# Note that it does this based on the value given to settimeout(),
# and doesn't need the client to request or acknowledge the close
# (although your TCP stack might suffer for it: cf Apache's history
# with FIN_WAIT_2).
request_line = self.rfile.readline()
# Set started_request to True so communicate() knows to send 408
# from here on out.
self.started_request = True
if not request_line:
return False
if request_line == CRLF:
# RFC 2616 sec 4.1: "...if the server is reading the protocol
# stream at the beginning of a message and receives a CRLF
# first, it should ignore the CRLF."
# But only ignore one leading line! else we enable a DoS.
request_line = self.rfile.readline()
if not request_line:
return False
if not request_line.endswith(CRLF):
self.simple_response(
'400 Bad Request', 'HTTP requires CRLF terminators')
return False
try:
method, uri, req_protocol = request_line.strip().split(SPACE, 2)
if not req_protocol.startswith(b'HTTP/'):
self.simple_response(
'400 Bad Request', 'Malformed Request-Line: bad protocol'
)
return False
rp = req_protocol[5:].split(b'.', 1)
rp = tuple(map(int, rp)) # Minor.Major must be threat as integers
if rp > (1, 1):
self.simple_response(
'505 HTTP Version Not Supported', 'Cannot fulfill request'
)
return False
except (ValueError, IndexError):
self.simple_response('400 Bad Request', 'Malformed Request-Line')
return False
self.uri = uri
self.method = method.upper()
if self.strict_mode and method != self.method:
resp = (
'Malformed method name: According to RFC 2616 '
'(section 5.1.1) and its successors '
'RFC 7230 (section 3.1.1) and RFC 7231 (section 4.1) '
'method names are case-sensitive and uppercase.'
)
self.simple_response('400 Bad Request', resp)
return False
try:
if six.PY2: # FIXME: Figure out better way to do this
# Ref: https://stackoverflow.com/a/196392/595220 (like this?)
"""This is a dummy check for unicode in URI."""
ntou(bton(uri, 'ascii'), 'ascii')
scheme, authority, path, qs, fragment = urllib.parse.urlsplit(uri)
except UnicodeError:
self.simple_response('400 Bad Request', 'Malformed Request-URI')
return False
if self.method == b'OPTIONS':
# TODO: cover this branch with tests
path = (uri
# https://tools.ietf.org/html/rfc7230#section-5.3.4
if self.proxy_mode or uri == ASTERISK
else path)
elif self.method == b'CONNECT':
# TODO: cover this branch with tests
if not self.proxy_mode:
self.simple_response('405 Method Not Allowed')
return False
# `urlsplit()` above parses "example.com:3128" as path part of URI.
# this is a workaround, which makes it detect netloc correctly
uri_split = urllib.parse.urlsplit(b'//' + uri)
_scheme, _authority, _path, _qs, _fragment = uri_split
_port = EMPTY
try:
_port = uri_split.port
except ValueError:
pass
# FIXME: use third-party validation to make checks against RFC
# the validation doesn't take into account, that urllib parses
# invalid URIs without raising errors
# https://tools.ietf.org/html/rfc7230#section-5.3.3
invalid_path = (
_authority != uri
or not _port
or any((_scheme, _path, _qs, _fragment))
)
if invalid_path:
self.simple_response('400 Bad Request',
'Invalid path in Request-URI: request-'
'target must match authority-form.')
return False
authority = path = _authority
scheme = qs = fragment = EMPTY
else:
uri_is_absolute_form = (scheme or authority)
disallowed_absolute = (
self.strict_mode
and not self.proxy_mode
and uri_is_absolute_form
)
if disallowed_absolute:
# https://tools.ietf.org/html/rfc7230#section-5.3.2
# (absolute form)
"""Absolute URI is only allowed within proxies."""
self.simple_response(
'400 Bad Request',
'Absolute URI not allowed if server is not a proxy.',
)
return False
invalid_path = (
self.strict_mode
and not uri.startswith(FORWARD_SLASH)
and not uri_is_absolute_form
)
if invalid_path:
# https://tools.ietf.org/html/rfc7230#section-5.3.1
# (origin_form) and
"""Path should start with a forward slash."""
resp = (
'Invalid path in Request-URI: request-target must contain '
'origin-form which starts with absolute-path (URI '
'starting with a slash "/").'
)
self.simple_response('400 Bad Request', resp)
return False
if fragment:
self.simple_response('400 Bad Request',
'Illegal #fragment in Request-URI.')
return False
if path is None:
# FIXME: It looks like this case cannot happen
self.simple_response('400 Bad Request',
'Invalid path in Request-URI.')
return False
# Unquote the path+params (e.g. "/this%20path" -> "/this path").
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
#
# But note that "...a URI must be separated into its components
# before the escaped characters within those components can be
# safely decoded." http://www.ietf.org/rfc/rfc2396.txt, sec 2.4.2
# Therefore, "/this%2Fpath" becomes "/this%2Fpath", not
# "/this/path".
try:
# TODO: Figure out whether exception can really happen here.
# It looks like it's caught on urlsplit() call above.
atoms = [
urllib.parse.unquote_to_bytes(x)
for x in QUOTED_SLASH_REGEX.split(path)
]
except ValueError as ex:
self.simple_response('400 Bad Request', ex.args[0])
return False
path = QUOTED_SLASH.join(atoms)
if not path.startswith(FORWARD_SLASH):
path = FORWARD_SLASH + path
if scheme is not EMPTY:
self.scheme = scheme
self.authority = authority
self.path = path
# Note that, like wsgiref and most other HTTP servers,
# we "% HEX HEX"-unquote the path but not the query string.
self.qs = qs
# Compare request and server HTTP protocol versions, in case our
# server does not support the requested protocol. Limit our output
# to min(req, server). We want the following output:
# request server actual written supported response
# protocol protocol response protocol feature set
# a 1.0 1.0 1.0 1.0
# b 1.0 1.1 1.1 1.0
# c 1.1 1.0 1.0 1.0
# d 1.1 1.1 1.1 1.1
# Notice that, in (b), the response will be "HTTP/1.1" even though
# the client only understands 1.0. RFC 2616 10.5.6 says we should
# only return 505 if the _major_ version is different.
sp = int(self.server.protocol[5]), int(self.server.protocol[7])
if sp[0] != rp[0]:
self.simple_response('505 HTTP Version Not Supported')
return False
self.request_protocol = req_protocol
self.response_protocol = 'HTTP/%s.%s' % min(rp, sp)
return True
def read_request_headers(self):
"""Read self.rfile into self.inheaders. Return success."""
# then all the http headers
try:
self.header_reader(self.rfile, self.inheaders)
except ValueError as ex:
self.simple_response('400 Bad Request', ex.args[0])
return False
mrbs = self.server.max_request_body_size
if mrbs and int(self.inheaders.get(b'Content-Length', 0)) > mrbs:
self.simple_response(
'413 Request Entity Too Large',
'The entity sent with the request exceeds the maximum '
'allowed bytes.')
return False
# Persistent connection support
if self.response_protocol == 'HTTP/1.1':
# Both server and client are HTTP/1.1
if self.inheaders.get(b'Connection', b'') == b'close':
self.close_connection = True
else:
# Either the server or client (or both) are HTTP/1.0
if self.inheaders.get(b'Connection', b'') != b'Keep-Alive':
self.close_connection = True
# Transfer-Encoding support
te = None
if self.response_protocol == 'HTTP/1.1':
te = self.inheaders.get(b'Transfer-Encoding')
if te:
te = [x.strip().lower() for x in te.split(b',') if x.strip()]
self.chunked_read = False
if te:
for enc in te:
if enc == b'chunked':
self.chunked_read = True
else:
# Note that, even if we see "chunked", we must reject
# if there is an extension we don't recognize.
self.simple_response('501 Unimplemented')
self.close_connection = True
return False
# From PEP 333:
# "Servers and gateways that implement HTTP 1.1 must provide
# transparent support for HTTP 1.1's "expect/continue" mechanism.
# This may be done in any of several ways:
# 1. Respond to requests containing an Expect: 100-continue request
# with an immediate "100 Continue" response, and proceed normally.
# 2. Proceed with the request normally, but provide the application
# with a wsgi.input stream that will send the "100 Continue"
# response if/when the application first attempts to read from
# the input stream. The read request must then remain blocked
# until the client responds.
# 3. Wait until the client decides that the server does not support
# expect/continue, and sends the request body on its own.
# (This is suboptimal, and is not recommended.)
#
# We used to do 3, but are now doing 1. Maybe we'll do 2 someday,
# but it seems like it would be a big slowdown for such a rare case.
if self.inheaders.get(b'Expect', b'') == b'100-continue':
# Don't use simple_response here, because it emits headers
# we don't want. See
# https://github.com/cherrypy/cherrypy/issues/951
msg = self.server.protocol.encode('ascii')
msg += b' 100 Continue\r\n\r\n'
try:
self.conn.wfile.write(msg)
except socket.error as ex:
if ex.args[0] not in errors.socket_errors_to_ignore:
raise
return True
def respond(self):
"""Call the gateway and write its iterable output."""
mrbs = self.server.max_request_body_size
if self.chunked_read:
self.rfile = ChunkedRFile(self.conn.rfile, mrbs)
else:
cl = int(self.inheaders.get(b'Content-Length', 0))
if mrbs and mrbs < cl:
if not self.sent_headers:
self.simple_response(
'413 Request Entity Too Large',
'The entity sent with the request exceeds the '
'maximum allowed bytes.')
return
self.rfile = KnownLengthRFile(self.conn.rfile, cl)
self.server.gateway(self).respond()
if (self.ready and not self.sent_headers):
self.sent_headers = True
self.send_headers()
if self.chunked_write:
self.conn.wfile.write(b'0\r\n\r\n')
def simple_response(self, status, msg=''):
"""Write a simple response back to the client."""
status = str(status)
proto_status = '%s %s\r\n' % (self.server.protocol, status)
content_length = 'Content-Length: %s\r\n' % len(msg)
content_type = 'Content-Type: text/plain\r\n'
buf = [
proto_status.encode('ISO-8859-1'),
content_length.encode('ISO-8859-1'),
content_type.encode('ISO-8859-1'),
]
if status[:3] in ('413', '414'):
# Request Entity Too Large / Request-URI Too Long
self.close_connection = True
if self.response_protocol == 'HTTP/1.1':
# This will not be true for 414, since read_request_line
# usually raises 414 before reading the whole line, and we
# therefore cannot know the proper response_protocol.
buf.append(b'Connection: close\r\n')
else:
# HTTP/1.0 had no 413/414 status nor Connection header.
# Emit 400 instead and trust the message body is enough.
status = '400 Bad Request'
buf.append(CRLF)
if msg:
if isinstance(msg, six.text_type):
msg = msg.encode('ISO-8859-1')
buf.append(msg)
try:
self.conn.wfile.write(EMPTY.join(buf))
except socket.error as ex:
if ex.args[0] not in errors.socket_errors_to_ignore:
raise
def write(self, chunk):
"""Write unbuffered data to the client."""
if self.chunked_write and chunk:
chunk_size_hex = hex(len(chunk))[2:].encode('ascii')
buf = [chunk_size_hex, CRLF, chunk, CRLF]
self.conn.wfile.write(EMPTY.join(buf))
else:
self.conn.wfile.write(chunk)
def send_headers(self):
"""Assert, process, and send the HTTP response message-headers.
You must set self.status, and self.outheaders before calling this.
"""
hkeys = [key.lower() for key, value in self.outheaders]
status = int(self.status[:3])
if status == 413:
# Request Entity Too Large. Close conn to avoid garbage.
self.close_connection = True
elif b'content-length' not in hkeys:
# "All 1xx (informational), 204 (no content),
# and 304 (not modified) responses MUST NOT
# include a message-body." So no point chunking.
if status < 200 or status in (204, 205, 304):
pass
else:
needs_chunked = (
self.response_protocol == 'HTTP/1.1'
and self.method != b'HEAD'
)
if needs_chunked:
# Use the chunked transfer-coding
self.chunked_write = True
self.outheaders.append((b'Transfer-Encoding', b'chunked'))
else:
# Closing the conn is the only way to determine len.
self.close_connection = True
if b'connection' not in hkeys:
if self.response_protocol == 'HTTP/1.1':
# Both server and client are HTTP/1.1 or better
if self.close_connection:
self.outheaders.append((b'Connection', b'close'))
else:
# Server and/or client are HTTP/1.0
if not self.close_connection:
self.outheaders.append((b'Connection', b'Keep-Alive'))
if (not self.close_connection) and (not self.chunked_read):
# Read any remaining request body data on the socket.
# "If an origin server receives a request that does not include an
# Expect request-header field with the "100-continue" expectation,
# the request includes a request body, and the server responds
# with a final status code before reading the entire request body
# from the transport connection, then the server SHOULD NOT close
# the transport connection until it has read the entire request,
# or until the client closes the connection. Otherwise, the client
# might not reliably receive the response message. However, this
# requirement is not be construed as preventing a server from
# defending itself against denial-of-service attacks, or from
# badly broken client implementations."
remaining = getattr(self.rfile, 'remaining', 0)
if remaining > 0:
self.rfile.read(remaining)
if b'date' not in hkeys:
self.outheaders.append((
b'Date',
email.utils.formatdate(usegmt=True).encode('ISO-8859-1'),
))
if b'server' not in hkeys:
self.outheaders.append((
b'Server',
self.server.server_name.encode('ISO-8859-1'),
))
proto = self.server.protocol.encode('ascii')
buf = [proto + SPACE + self.status + CRLF]
for k, v in self.outheaders:
buf.append(k + COLON + SPACE + v + CRLF)
buf.append(CRLF)
self.conn.wfile.write(EMPTY.join(buf))
class HTTPConnection(object):
"""An HTTP connection (active socket)."""
remote_addr = None
remote_port = None
ssl_env = None
rbufsize = io.DEFAULT_BUFFER_SIZE
wbufsize = io.DEFAULT_BUFFER_SIZE
RequestHandlerClass = HTTPRequest
def __init__(self, server, sock, makefile=MakeFile):
"""Initialize HTTPConnection instance.
Args:
server (HTTPServer): web server object receiving this request
socket (socket._socketobject): the raw socket object (usually
TCP) for this connection
makefile (file): a fileobject class for reading from the socket
"""
self.server = server
self.socket = sock
self.rfile = makefile(sock, 'rb', self.rbufsize)
self.wfile = makefile(sock, 'wb', self.wbufsize)
self.requests_seen = 0
def communicate(self):
"""Read each request and respond appropriately."""
request_seen = False
try:
while True:
# (re)set req to None so that if something goes wrong in
# the RequestHandlerClass constructor, the error doesn't
# get written to the previous request.
req = None
req = self.RequestHandlerClass(self.server, self)
# This order of operations should guarantee correct pipelining.
req.parse_request()
if self.server.stats['Enabled']:
self.requests_seen += 1
if not req.ready:
# Something went wrong in the parsing (and the server has
# probably already made a simple_response). Return and
# let the conn close.
return
request_seen = True
req.respond()
if req.close_connection:
return
except socket.error as ex:
errnum = ex.args[0]
# sadly SSL sockets return a different (longer) time out string
timeout_errs = 'timed out', 'The read operation timed out'
if errnum in timeout_errs:
# Don't error if we're between requests; only error
# if 1) no request has been started at all, or 2) we're
# in the middle of a request.
# See https://github.com/cherrypy/cherrypy/issues/853
if (not request_seen) or (req and req.started_request):
self._conditional_error(req, '408 Request Timeout')
elif errnum not in errors.socket_errors_to_ignore:
self.server.error_log('socket.error %s' % repr(errnum),
level=logging.WARNING, traceback=True)
self._conditional_error(req, '500 Internal Server Error')
except (KeyboardInterrupt, SystemExit):
raise
except errors.FatalSSLAlert:
pass
except errors.NoSSLError:
self._handle_no_ssl(req)
except Exception as ex:
self.server.error_log(
repr(ex), level=logging.ERROR, traceback=True)
self._conditional_error(req, '500 Internal Server Error')
linger = False
def _handle_no_ssl(self, req):
if not req or req.sent_headers:
return
# Unwrap wfile
self.wfile = MakeFile(self.socket._sock, 'wb', self.wbufsize)
msg = (
'The client sent a plain HTTP request, but '
'this server only speaks HTTPS on this port.'
)
req.simple_response('400 Bad Request', msg)
self.linger = True
def _conditional_error(self, req, response):
"""Respond with an error.
Don't bother writing if a response
has already started being written.
"""
if not req or req.sent_headers:
return
try:
req.simple_response('408 Request Timeout')
except errors.FatalSSLAlert:
pass
except errors.NoSSLError:
self._handle_no_ssl(req)
def close(self):
"""Close the socket underlying this connection."""
self.rfile.close()
if not self.linger:
self._close_kernel_socket()
self.socket.close()
else:
# On the other hand, sometimes we want to hang around for a bit
# to make sure the client has a chance to read our entire
# response. Skipping the close() calls here delays the FIN
# packet until the socket object is garbage-collected later.
# Someday, perhaps, we'll do the full lingering_close that
# Apache does, but not today.
pass
def _close_kernel_socket(self):
"""Close kernel socket in outdated Python versions.
On old Python versions,
Python's socket module does NOT call close on the kernel
socket when you call socket.close(). We do so manually here
because we want this server to send a FIN TCP segment
immediately. Note this must be called *before* calling
socket.close(), because the latter drops its reference to
the kernel socket.
"""
if six.PY2 and hasattr(self.socket, '_sock'):
self.socket._sock.close()
try:
import fcntl
except ImportError:
try:
from ctypes import windll, WinError
import ctypes.wintypes
_SetHandleInformation = windll.kernel32.SetHandleInformation
_SetHandleInformation.argtypes = [
ctypes.wintypes.HANDLE,
ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
]
_SetHandleInformation.restype = ctypes.wintypes.BOOL
except ImportError:
def prevent_socket_inheritance(sock):
"""Dummy function, since neither fcntl nor ctypes are available."""
pass
else:
def prevent_socket_inheritance(sock):
"""Mark the given socket fd as non-inheritable (Windows)."""
if not _SetHandleInformation(sock.fileno(), 1, 0):
raise WinError()
else:
def prevent_socket_inheritance(sock):
"""Mark the given socket fd as non-inheritable (POSIX)."""
fd = sock.fileno()
old_flags = fcntl.fcntl(fd, fcntl.F_GETFD)
fcntl.fcntl(fd, fcntl.F_SETFD, old_flags | fcntl.FD_CLOEXEC)
class HTTPServer(object):
"""An HTTP server."""
_bind_addr = '127.0.0.1'
_interrupt = None
gateway = None
"""A Gateway instance."""
minthreads = None
"""The minimum number of worker threads to create (default 10)."""
maxthreads = None
"""The maximum number of worker threads to create.
(default -1 = no limit)"""
server_name = None
"""The name of the server; defaults to ``self.version``."""
protocol = 'HTTP/1.1'
"""The version string to write in the Status-Line of all HTTP responses.
For example, "HTTP/1.1" is the default. This also limits the supported
features used in the response."""
request_queue_size = 5
"""The 'backlog' arg to socket.listen(); max queued connections.
(default 5)."""
shutdown_timeout = 5
"""The total time to wait for worker threads to cleanly exit.
Specified in seconds."""
timeout = 10
"""The timeout in seconds for accepted connections (default 10)."""
version = 'Cheroot/' + __version__
"""A version string for the HTTPServer."""
software = None
"""The value to set for the SERVER_SOFTWARE entry in the WSGI environ.
If None, this defaults to ``'%s Server' % self.version``.
"""
ready = False
"""Internal flag which indicating the socket is accepting connections."""
max_request_header_size = 0
"""The maximum size, in bytes, for request headers, or 0 for no limit."""
max_request_body_size = 0
"""The maximum size, in bytes, for request bodies, or 0 for no limit."""
nodelay = True
"""If True (the default since 3.1), sets the TCP_NODELAY socket option."""
ConnectionClass = HTTPConnection
"""The class to use for handling HTTP connections."""
ssl_adapter = None
"""An instance of ssl.Adapter (or a subclass).
You must have the corresponding SSL driver library installed.
"""
def __init__(
self, bind_addr, gateway, minthreads=10, maxthreads=-1,
server_name=None):
"""Initialize HTTPServer instance.
Args:
bind_addr (tuple): network interface to listen to
gateway (Gateway): gateway for processing HTTP requests
minthreads (int): minimum number of threads for HTTP thread pool
maxthreads (int): maximum number of threads for HTTP thread pool
server_name (str): web server name to be advertised via Server
HTTP header
"""
self.bind_addr = bind_addr
self.gateway = gateway
self.requests = threadpool.ThreadPool(
self, min=minthreads or 1, max=maxthreads)
if not server_name:
server_name = self.version
self.server_name = server_name
self.clear_stats()
def clear_stats(self):
"""Reset server stat counters.."""
self._start_time = None
self._run_time = 0
self.stats = {
'Enabled': False,
'Bind Address': lambda s: repr(self.bind_addr),
'Run time': lambda s: (not s['Enabled']) and -1 or self.runtime(),
'Accepts': 0,
'Accepts/sec': lambda s: s['Accepts'] / self.runtime(),
'Queue': lambda s: getattr(self.requests, 'qsize', None),
'Threads': lambda s: len(getattr(self.requests, '_threads', [])),
'Threads Idle': lambda s: getattr(self.requests, 'idle', None),
'Socket Errors': 0,
'Requests': lambda s: (not s['Enabled']) and -1 or sum(
[w['Requests'](w) for w in s['Worker Threads'].values()], 0),
'Bytes Read': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Read'](w) for w in s['Worker Threads'].values()], 0),
'Bytes Written': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Written'](w) for w in s['Worker Threads'].values()],
0),
'Work Time': lambda s: (not s['Enabled']) and -1 or sum(
[w['Work Time'](w) for w in s['Worker Threads'].values()], 0),
'Read Throughput': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Read'](w) / (w['Work Time'](w) or 1e-6)
for w in s['Worker Threads'].values()], 0),
'Write Throughput': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Written'](w) / (w['Work Time'](w) or 1e-6)
for w in s['Worker Threads'].values()], 0),
'Worker Threads': {},
}
logging.statistics['Cheroot HTTPServer %d' % id(self)] = self.stats
def runtime(self):
"""Return server uptime."""
if self._start_time is None:
return self._run_time
else:
return self._run_time + (time.time() - self._start_time)
def __str__(self):
"""Render Server instance representing bind address."""
return '%s.%s(%r)' % (self.__module__, self.__class__.__name__,
self.bind_addr)
@property
def bind_addr(self):
"""Return the interface on which to listen for connections.
For TCP sockets, a (host, port) tuple. Host values may be any IPv4
or IPv6 address, or any valid hostname. The string 'localhost' is a
synonym for '127.0.0.1' (or '::1', if your hosts file prefers IPv6).
The string '0.0.0.0' is a special IPv4 entry meaning "any active
interface" (INADDR_ANY), and '::' is the similar IN6ADDR_ANY for
IPv6. The empty string or None are not allowed.
For UNIX sockets, supply the filename as a string.
Systemd socket activation is automatic and doesn't require tempering
with this variable.
"""
return self._bind_addr
@bind_addr.setter
def bind_addr(self, value):
"""Set the interface on which to listen for connections."""
if isinstance(value, tuple) and value[0] in ('', None):
# Despite the socket module docs, using '' does not
# allow AI_PASSIVE to work. Passing None instead
# returns '0.0.0.0' like we want. In other words:
# host AI_PASSIVE result
# '' Y 192.168.x.y
# '' N 192.168.x.y
# None Y 0.0.0.0
# None N 127.0.0.1
# But since you can get the same effect with an explicit
# '0.0.0.0', we deny both the empty string and None as values.
raise ValueError("Host values of '' or None are not allowed. "
"Use '0.0.0.0' (IPv4) or '::' (IPv6) instead "
'to listen on all active interfaces.')
self._bind_addr = value
def safe_start(self):
"""Run the server forever, and stop it cleanly on exit."""
try:
self.start()
except (KeyboardInterrupt, IOError):
# The time.sleep call might raise
# "IOError: [Errno 4] Interrupted function call" on KBInt.
self.error_log('Keyboard Interrupt: shutting down')
self.stop()
raise
except SystemExit:
self.error_log('SystemExit raised: shutting down')
self.stop()
raise
def start(self):
"""Run the server forever."""
# We don't have to trap KeyboardInterrupt or SystemExit here,
# because cherrpy.server already does so, calling self.stop() for us.
# If you're using this server with another framework, you should
# trap those exceptions in whatever code block calls start().
self._interrupt = None
if self.software is None:
self.software = '%s Server' % self.version
# Select the appropriate socket
self.socket = None
if os.getenv('LISTEN_PID', None):
# systemd socket activation
self.socket = socket.fromfd(3, socket.AF_INET, socket.SOCK_STREAM)
elif isinstance(self.bind_addr, six.string_types):
# AF_UNIX socket
# So we can reuse the socket...
try:
os.unlink(self.bind_addr)
except Exception:
pass
# So everyone can access the socket...
try:
os.chmod(self.bind_addr, 0o777)
except Exception:
pass
info = [
(socket.AF_UNIX, socket.SOCK_STREAM, 0, '', self.bind_addr)]
else:
# AF_INET or AF_INET6 socket
# Get the correct address family for our host (allows IPv6
# addresses)
host, port = self.bind_addr
try:
info = socket.getaddrinfo(
host, port, socket.AF_UNSPEC,
socket.SOCK_STREAM, 0, socket.AI_PASSIVE)
except socket.gaierror:
sock_type = socket.AF_INET
bind_addr = self.bind_addr
if ':' in host:
sock_type = socket.AF_INET6
bind_addr = bind_addr + (0, 0)
info = [(sock_type, socket.SOCK_STREAM, 0, '', bind_addr)]
if not self.socket:
msg = 'No socket could be created'
for res in info:
af, socktype, proto, canonname, sa = res
try:
self.bind(af, socktype, proto)
break
except socket.error as serr:
msg = '%s -- (%s: %s)' % (msg, sa, serr)
if self.socket:
self.socket.close()
self.socket = None
if not self.socket:
raise socket.error(msg)
# Timeout so KeyboardInterrupt can be caught on Win32
self.socket.settimeout(1)
self.socket.listen(self.request_queue_size)
# Create worker threads
self.requests.start()
self.ready = True
self._start_time = time.time()
while self.ready:
try:
self.tick()
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
self.error_log('Error in HTTPServer.tick', level=logging.ERROR,
traceback=True)
if self.interrupt:
while self.interrupt is True:
# Wait for self.stop() to complete. See _set_interrupt.
time.sleep(0.1)
if self.interrupt:
raise self.interrupt
def error_log(self, msg='', level=20, traceback=False):
"""Write error message to log.
Args:
msg (str): error message
level (int): logging level
traceback (bool): add traceback to output or not
"""
# Override this in subclasses as desired
sys.stderr.write(msg + '\n')
sys.stderr.flush()
if traceback:
tblines = traceback_.format_exc()
sys.stderr.write(tblines)
sys.stderr.flush()
def bind(self, family, type, proto=0):
"""Create (or recreate) the actual socket object."""
self.socket = socket.socket(family, type, proto)
prevent_socket_inheritance(self.socket)
if platform.system() != 'Windows':
# Windows has different semantics for SO_REUSEADDR,
# so don't set it.
# https://msdn.microsoft.com/en-us/library/ms740621(v=vs.85).aspx
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if self.nodelay and not isinstance(self.bind_addr, str):
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if self.ssl_adapter is not None:
self.socket = self.ssl_adapter.bind(self.socket)
host, port = self.bind_addr[:2]
# If listening on the IPV6 any address ('::' = IN6ADDR_ANY),
# activate dual-stack. See
# https://github.com/cherrypy/cherrypy/issues/871.
listening_ipv6 = (
hasattr(socket, 'AF_INET6')
and family == socket.AF_INET6
and host in ('::', '::0', '::0.0.0.0')
)
if listening_ipv6:
try:
self.socket.setsockopt(
socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
except (AttributeError, socket.error):
# Apparently, the socket option is not available in
# this machine's TCP stack
pass
self.socket.bind(self.bind_addr)
self.bind_addr = self.socket.getsockname()[:2] # TODO: keep separate
def tick(self):
"""Accept a new connection and put it on the Queue."""
try:
s, addr = self.socket.accept()
if self.stats['Enabled']:
self.stats['Accepts'] += 1
if not self.ready:
return
prevent_socket_inheritance(s)
if hasattr(s, 'settimeout'):
s.settimeout(self.timeout)
mf = MakeFile
ssl_env = {}
# if ssl cert and key are set, we try to be a secure HTTP server
if self.ssl_adapter is not None:
try:
s, ssl_env = self.ssl_adapter.wrap(s)
except errors.NoSSLError:
msg = ('The client sent a plain HTTP request, but '
'this server only speaks HTTPS on this port.')
buf = ['%s 400 Bad Request\r\n' % self.protocol,
'Content-Length: %s\r\n' % len(msg),
'Content-Type: text/plain\r\n\r\n',
msg]
sock_to_make = s if six.PY3 else s._sock
wfile = mf(sock_to_make, 'wb', io.DEFAULT_BUFFER_SIZE)
try:
wfile.write(''.join(buf).encode('ISO-8859-1'))
except socket.error as ex:
if ex.args[0] not in errors.socket_errors_to_ignore:
raise
return
if not s:
return
mf = self.ssl_adapter.makefile
# Re-apply our timeout since we may have a new socket object
if hasattr(s, 'settimeout'):
s.settimeout(self.timeout)
conn = self.ConnectionClass(self, s, mf)
if not isinstance(self.bind_addr, six.string_types):
# optional values
# Until we do DNS lookups, omit REMOTE_HOST
if addr is None: # sometimes this can happen
# figure out if AF_INET or AF_INET6.
if len(s.getsockname()) == 2:
# AF_INET
addr = ('0.0.0.0', 0)
else:
# AF_INET6
addr = ('::', 0)
conn.remote_addr = addr[0]
conn.remote_port = addr[1]
conn.ssl_env = ssl_env
try:
self.requests.put(conn)
except queue.Full:
# Just drop the conn. TODO: write 503 back?
conn.close()
return
except socket.timeout:
# The only reason for the timeout in start() is so we can
# notice keyboard interrupts on Win32, which don't interrupt
# accept() by default
return
except socket.error as ex:
if self.stats['Enabled']:
self.stats['Socket Errors'] += 1
if ex.args[0] in errors.socket_error_eintr:
# I *think* this is right. EINTR should occur when a signal
# is received during the accept() call; all docs say retry
# the call, and I *think* I'm reading it right that Python
# will then go ahead and poll for and handle the signal
# elsewhere. See
# https://github.com/cherrypy/cherrypy/issues/707.
return
if ex.args[0] in errors.socket_errors_nonblocking:
# Just try again. See
# https://github.com/cherrypy/cherrypy/issues/479.
return
if ex.args[0] in errors.socket_errors_to_ignore:
# Our socket was closed.
# See https://github.com/cherrypy/cherrypy/issues/686.
return
raise
@property
def interrupt(self):
"""Flag interrupt of the server."""
return self._interrupt
@interrupt.setter
def interrupt(self, interrupt):
"""Perform the shutdown of this server and save the exception."""
self._interrupt = True
self.stop()
self._interrupt = interrupt
def stop(self):
"""Gracefully shutdown a server that is serving forever."""
self.ready = False
if self._start_time is not None:
self._run_time += (time.time() - self._start_time)
self._start_time = None
sock = getattr(self, 'socket', None)
if sock:
if not isinstance(self.bind_addr, six.string_types):
# Touch our own socket to make accept() return immediately.
try:
host, port = sock.getsockname()[:2]
except socket.error as ex:
if ex.args[0] not in errors.socket_errors_to_ignore:
# Changed to use error code and not message
# See
# https://github.com/cherrypy/cherrypy/issues/860.
raise
else:
# Note that we're explicitly NOT using AI_PASSIVE,
# here, because we want an actual IP to touch.
# localhost won't work if we've bound to a public IP,
# but it will if we bound to '0.0.0.0' (INADDR_ANY).
for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC,
socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
s = None
try:
s = socket.socket(af, socktype, proto)
# See
# http://groups.google.com/group/cherrypy-users/
# browse_frm/thread/bbfe5eb39c904fe0
s.settimeout(1.0)
s.connect((host, port))
s.close()
except socket.error:
if s:
s.close()
if hasattr(sock, 'close'):
sock.close()
self.socket = None
self.requests.stop(self.shutdown_timeout)
class Gateway(object):
"""Base class to interface HTTPServer with other systems, such as WSGI."""
def __init__(self, req):
"""Initialize Gateway instance with request.
Args:
req (HTTPRequest): current HTTP request
"""
self.req = req
def respond(self):
"""Process the current request. Must be overridden in a subclass."""
raise NotImplementedError
# These may either be ssl.Adapter subclasses or the string names
# of such classes (in which case they will be lazily loaded).
ssl_adapters = {
'builtin': 'cheroot.ssl.builtin.BuiltinSSLAdapter',
'pyopenssl': 'cheroot.ssl.pyopenssl.pyOpenSSLAdapter',
}
def get_ssl_adapter_class(name='builtin'):
"""Return an SSL adapter class for the given name."""
adapter = ssl_adapters[name.lower()]
if isinstance(adapter, six.string_types):
last_dot = adapter.rfind('.')
attr_name = adapter[last_dot + 1:]
mod_path = adapter[:last_dot]
try:
mod = sys.modules[mod_path]
if mod is None:
raise KeyError()
except KeyError:
# The last [''] is important.
mod = __import__(mod_path, globals(), locals(), [''])
# Let an AttributeError propagate outward.
try:
adapter = getattr(mod, attr_name)
except AttributeError:
raise AttributeError("'%s' object has no attribute '%s'"
% (mod_path, attr_name))
return adapter
Actually use 'reponse' in _conditional_error() method
"""
A high-speed, production ready, thread pooled, generic HTTP server.
For those of you wanting to understand internals of this module, here's the
basic call flow. The server's listening thread runs a very tight loop,
sticking incoming connections onto a Queue::
server = HTTPServer(...)
server.start()
while True:
tick()
# This blocks until a request comes in:
child = socket.accept()
conn = HTTPConnection(child, ...)
server.requests.put(conn)
Worker threads are kept in a pool and poll the Queue, popping off and then
handling each connection in turn. Each connection can consist of an arbitrary
number of requests and their responses, so we run a nested loop::
while True:
conn = server.requests.get()
conn.communicate()
-> while True:
req = HTTPRequest(...)
req.parse_request()
-> # Read the Request-Line, e.g. "GET /page HTTP/1.1"
req.rfile.readline()
read_headers(req.rfile, req.inheaders)
req.respond()
-> response = app(...)
try:
for chunk in response:
if chunk:
req.write(chunk)
finally:
if hasattr(response, "close"):
response.close()
if req.close_connection:
return
And now for a trivial doctest to exercise the test suite
>>> 'HTTPServer' in globals()
True
"""
import os
import io
import re
import email.utils
import socket
import sys
import time
import traceback as traceback_
import logging
import platform
import six
from six.moves import queue
from six.moves import urllib
from . import errors, __version__
from ._compat import bton, ntou
from .workers import threadpool
from .makefile import MakeFile
__all__ = ('HTTPRequest', 'HTTPConnection', 'HTTPServer',
'SizeCheckWrapper', 'KnownLengthRFile', 'ChunkedRFile',
'Gateway', 'get_ssl_adapter_class')
if 'win' in sys.platform and hasattr(socket, 'AF_INET6'):
if not hasattr(socket, 'IPPROTO_IPV6'):
socket.IPPROTO_IPV6 = 41
if not hasattr(socket, 'IPV6_V6ONLY'):
socket.IPV6_V6ONLY = 27
LF = b'\n'
CRLF = b'\r\n'
TAB = b'\t'
SPACE = b' '
COLON = b':'
SEMICOLON = b';'
EMPTY = b''
ASTERISK = b'*'
FORWARD_SLASH = b'/'
QUOTED_SLASH = b'%2F'
QUOTED_SLASH_REGEX = re.compile(b'(?i)' + QUOTED_SLASH)
comma_separated_headers = [
b'Accept', b'Accept-Charset', b'Accept-Encoding',
b'Accept-Language', b'Accept-Ranges', b'Allow', b'Cache-Control',
b'Connection', b'Content-Encoding', b'Content-Language', b'Expect',
b'If-Match', b'If-None-Match', b'Pragma', b'Proxy-Authenticate', b'TE',
b'Trailer', b'Transfer-Encoding', b'Upgrade', b'Vary', b'Via', b'Warning',
b'WWW-Authenticate',
]
if not hasattr(logging, 'statistics'):
logging.statistics = {}
class HeaderReader(object):
"""Object for reading headers from an HTTP request.
Interface and default implementation.
"""
def __call__(self, rfile, hdict=None):
"""
Read headers from the given stream into the given header dict.
If hdict is None, a new header dict is created. Returns the populated
header dict.
Headers which are repeated are folded together using a comma if their
specification so dictates.
This function raises ValueError when the read bytes violate the HTTP
spec.
You should probably return "400 Bad Request" if this happens.
"""
if hdict is None:
hdict = {}
while True:
line = rfile.readline()
if not line:
# No more data--illegal end of headers
raise ValueError('Illegal end of headers.')
if line == CRLF:
# Normal end of headers
break
if not line.endswith(CRLF):
raise ValueError('HTTP requires CRLF terminators')
if line[0] in (SPACE, TAB):
# It's a continuation line.
v = line.strip()
else:
try:
k, v = line.split(COLON, 1)
except ValueError:
raise ValueError('Illegal header line.')
v = v.strip()
k = self._transform_key(k)
hname = k
if not self._allow_header(k):
continue
if k in comma_separated_headers:
existing = hdict.get(hname)
if existing:
v = b', '.join((existing, v))
hdict[hname] = v
return hdict
def _allow_header(self, key_name):
return True
def _transform_key(self, key_name):
# TODO: what about TE and WWW-Authenticate?
return key_name.strip().title()
class DropUnderscoreHeaderReader(HeaderReader):
"""Custom HeaderReader to exclude any headers with underscores in them."""
def _allow_header(self, key_name):
orig = super(DropUnderscoreHeaderReader, self)._allow_header(key_name)
return orig and '_' not in key_name
class SizeCheckWrapper(object):
"""Wraps a file-like object, raising MaxSizeExceeded if too large."""
def __init__(self, rfile, maxlen):
"""Initialize SizeCheckWrapper instance.
Args:
rfile (file): file of a limited size
maxlen (int): maximum length of the file being read
"""
self.rfile = rfile
self.maxlen = maxlen
self.bytes_read = 0
def _check_length(self):
if self.maxlen and self.bytes_read > self.maxlen:
raise errors.MaxSizeExceeded()
def read(self, size=None):
"""Read a chunk from rfile buffer and return it.
Args:
size (int): amount of data to read
Returns:
bytes: Chunk from rfile, limited by size if specified.
"""
data = self.rfile.read(size)
self.bytes_read += len(data)
self._check_length()
return data
def readline(self, size=None):
"""Read a single line from rfile buffer and return it.
Args:
size (int): minimum amount of data to read
Returns:
bytes: One line from rfile.
"""
if size is not None:
data = self.rfile.readline(size)
self.bytes_read += len(data)
self._check_length()
return data
# User didn't specify a size ...
# We read the line in chunks to make sure it's not a 100MB line !
res = []
while True:
data = self.rfile.readline(256)
self.bytes_read += len(data)
self._check_length()
res.append(data)
# See https://github.com/cherrypy/cherrypy/issues/421
if len(data) < 256 or data[-1:] == LF:
return EMPTY.join(res)
def readlines(self, sizehint=0):
"""Read all lines from rfile buffer and return them.
Args:
sizehint (int): hint of minimum amount of data to read
Returns:
list[bytes]: Lines of bytes read from rfile.
"""
# Shamelessly stolen from StringIO
total = 0
lines = []
line = self.readline(sizehint)
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline(sizehint)
return lines
def close(self):
"""Release resources allocated for rfile."""
self.rfile.close()
def __iter__(self):
"""Return file iterator."""
return self
def __next__(self):
"""Generate next file chunk."""
data = next(self.rfile)
self.bytes_read += len(data)
self._check_length()
return data
def next(self):
"""Generate next file chunk."""
data = self.rfile.next()
self.bytes_read += len(data)
self._check_length()
return data
class KnownLengthRFile(object):
"""Wraps a file-like object, returning an empty string when exhausted."""
def __init__(self, rfile, content_length):
"""Initialize KnownLengthRFile instance.
Args:
rfile (file): file of a known size
content_length (int): length of the file being read
"""
self.rfile = rfile
self.remaining = content_length
def read(self, size=None):
"""Read a chunk from rfile buffer and return it.
Args:
size (int): amount of data to read
Returns:
bytes: Chunk from rfile, limited by size if specified.
"""
if self.remaining == 0:
return b''
if size is None:
size = self.remaining
else:
size = min(size, self.remaining)
data = self.rfile.read(size)
self.remaining -= len(data)
return data
def readline(self, size=None):
"""Read a single line from rfile buffer and return it.
Args:
size (int): minimum amount of data to read
Returns:
bytes: One line from rfile.
"""
if self.remaining == 0:
return b''
if size is None:
size = self.remaining
else:
size = min(size, self.remaining)
data = self.rfile.readline(size)
self.remaining -= len(data)
return data
def readlines(self, sizehint=0):
"""Read all lines from rfile buffer and return them.
Args:
sizehint (int): hint of minimum amount of data to read
Returns:
list[bytes]: Lines of bytes read from rfile.
"""
# Shamelessly stolen from StringIO
total = 0
lines = []
line = self.readline(sizehint)
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline(sizehint)
return lines
def close(self):
"""Release resources allocated for rfile."""
self.rfile.close()
def __iter__(self):
"""Return file iterator."""
return self
def __next__(self):
"""Generate next file chunk."""
data = next(self.rfile)
self.remaining -= len(data)
return data
class ChunkedRFile(object):
"""Wraps a file-like object, returning an empty string when exhausted.
This class is intended to provide a conforming wsgi.input value for
request entities that have been encoded with the 'chunked' transfer
encoding.
"""
def __init__(self, rfile, maxlen, bufsize=8192):
"""Initialize ChunkedRFile instance.
Args:
rfile (file): file encoded with the 'chunked' transfer encoding
maxlen (int): maximum length of the file being read
bufsize (int): size of the buffer used to read the file
"""
self.rfile = rfile
self.maxlen = maxlen
self.bytes_read = 0
self.buffer = EMPTY
self.bufsize = bufsize
self.closed = False
def _fetch(self):
if self.closed:
return
line = self.rfile.readline()
self.bytes_read += len(line)
if self.maxlen and self.bytes_read > self.maxlen:
raise errors.MaxSizeExceeded(
'Request Entity Too Large', self.maxlen)
line = line.strip().split(SEMICOLON, 1)
try:
chunk_size = line.pop(0)
chunk_size = int(chunk_size, 16)
except ValueError:
raise ValueError('Bad chunked transfer size: ' + repr(chunk_size))
if chunk_size <= 0:
self.closed = True
return
# if line: chunk_extension = line[0]
if self.maxlen and self.bytes_read + chunk_size > self.maxlen:
raise IOError('Request Entity Too Large')
chunk = self.rfile.read(chunk_size)
self.bytes_read += len(chunk)
self.buffer += chunk
crlf = self.rfile.read(2)
if crlf != CRLF:
raise ValueError(
"Bad chunked transfer coding (expected '\\r\\n', "
'got ' + repr(crlf) + ')')
def read(self, size=None):
"""Read a chunk from rfile buffer and return it.
Args:
size (int): amount of data to read
Returns:
bytes: Chunk from rfile, limited by size if specified.
"""
data = EMPTY
if size == 0:
return data
while True:
if size and len(data) >= size:
return data
if not self.buffer:
self._fetch()
if not self.buffer:
# EOF
return data
if size:
remaining = size - len(data)
data += self.buffer[:remaining]
self.buffer = self.buffer[remaining:]
else:
data += self.buffer
self.buffer = EMPTY
def readline(self, size=None):
"""Read a single line from rfile buffer and return it.
Args:
size (int): minimum amount of data to read
Returns:
bytes: One line from rfile.
"""
data = EMPTY
if size == 0:
return data
while True:
if size and len(data) >= size:
return data
if not self.buffer:
self._fetch()
if not self.buffer:
# EOF
return data
newline_pos = self.buffer.find(LF)
if size:
if newline_pos == -1:
remaining = size - len(data)
data += self.buffer[:remaining]
self.buffer = self.buffer[remaining:]
else:
remaining = min(size - len(data), newline_pos)
data += self.buffer[:remaining]
self.buffer = self.buffer[remaining:]
else:
if newline_pos == -1:
data += self.buffer
self.buffer = EMPTY
else:
data += self.buffer[:newline_pos]
self.buffer = self.buffer[newline_pos:]
def readlines(self, sizehint=0):
"""Read all lines from rfile buffer and return them.
Args:
sizehint (int): hint of minimum amount of data to read
Returns:
list[bytes]: Lines of bytes read from rfile.
"""
# Shamelessly stolen from StringIO
total = 0
lines = []
line = self.readline(sizehint)
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline(sizehint)
return lines
def read_trailer_lines(self):
"""Read HTTP headers and yield them.
Returns:
Generator: yields CRLF separated lines.
"""
if not self.closed:
raise ValueError(
'Cannot read trailers until the request body has been read.')
while True:
line = self.rfile.readline()
if not line:
# No more data--illegal end of headers
raise ValueError('Illegal end of headers.')
self.bytes_read += len(line)
if self.maxlen and self.bytes_read > self.maxlen:
raise IOError('Request Entity Too Large')
if line == CRLF:
# Normal end of headers
break
if not line.endswith(CRLF):
raise ValueError('HTTP requires CRLF terminators')
yield line
def close(self):
"""Release resources allocated for rfile."""
self.rfile.close()
class HTTPRequest(object):
"""An HTTP Request (and response).
A single HTTP connection may consist of multiple request/response pairs.
"""
server = None
"""The HTTPServer object which is receiving this request."""
conn = None
"""The HTTPConnection object on which this request connected."""
inheaders = {}
"""A dict of request headers."""
outheaders = []
"""A list of header tuples to write in the response."""
ready = False
"""When True, the request has been parsed and is ready to begin generating
the response. When False, signals the calling Connection that the response
should not be generated and the connection should close."""
close_connection = False
"""Signals the calling Connection that the request should close. This does
not imply an error! The client and/or server may each request that the
connection be closed."""
chunked_write = False
"""If True, output will be encoded with the "chunked" transfer-coding.
This value is set automatically inside send_headers."""
header_reader = HeaderReader()
"""
A HeaderReader instance or compatible reader.
"""
def __init__(self, server, conn, proxy_mode=False, strict_mode=True):
"""Initialize HTTP request container instance.
Args:
server (HTTPServer): web server object receiving this request
conn (HTTPConnection): HTTP connection object for this request
proxy_mode (bool): whether this HTTPServer should behave as a PROXY
server for certain requests
strict_mode (bool): whether we should return a 400 Bad Request when
we encounter a request that a HTTP compliant client should not be
making
"""
self.server = server
self.conn = conn
self.ready = False
self.started_request = False
self.scheme = b'http'
if self.server.ssl_adapter is not None:
self.scheme = b'https'
# Use the lowest-common protocol in case read_request_line errors.
self.response_protocol = 'HTTP/1.0'
self.inheaders = {}
self.status = ''
self.outheaders = []
self.sent_headers = False
self.close_connection = self.__class__.close_connection
self.chunked_read = False
self.chunked_write = self.__class__.chunked_write
self.proxy_mode = proxy_mode
self.strict_mode = strict_mode
def parse_request(self):
"""Parse the next HTTP request start-line and message-headers."""
self.rfile = SizeCheckWrapper(self.conn.rfile,
self.server.max_request_header_size)
try:
success = self.read_request_line()
except errors.MaxSizeExceeded:
self.simple_response(
'414 Request-URI Too Long',
'The Request-URI sent with the request exceeds the maximum '
'allowed bytes.')
return
else:
if not success:
return
try:
success = self.read_request_headers()
except errors.MaxSizeExceeded:
self.simple_response(
'413 Request Entity Too Large',
'The headers sent with the request exceed the maximum '
'allowed bytes.')
return
else:
if not success:
return
self.ready = True
def read_request_line(self):
"""Read and parse first line of the HTTP request.
Returns:
bool: True if the request line is valid or False if it's malformed.
"""
# HTTP/1.1 connections are persistent by default. If a client
# requests a page, then idles (leaves the connection open),
# then rfile.readline() will raise socket.error("timed out").
# Note that it does this based on the value given to settimeout(),
# and doesn't need the client to request or acknowledge the close
# (although your TCP stack might suffer for it: cf Apache's history
# with FIN_WAIT_2).
request_line = self.rfile.readline()
# Set started_request to True so communicate() knows to send 408
# from here on out.
self.started_request = True
if not request_line:
return False
if request_line == CRLF:
# RFC 2616 sec 4.1: "...if the server is reading the protocol
# stream at the beginning of a message and receives a CRLF
# first, it should ignore the CRLF."
# But only ignore one leading line! else we enable a DoS.
request_line = self.rfile.readline()
if not request_line:
return False
if not request_line.endswith(CRLF):
self.simple_response(
'400 Bad Request', 'HTTP requires CRLF terminators')
return False
try:
method, uri, req_protocol = request_line.strip().split(SPACE, 2)
if not req_protocol.startswith(b'HTTP/'):
self.simple_response(
'400 Bad Request', 'Malformed Request-Line: bad protocol'
)
return False
rp = req_protocol[5:].split(b'.', 1)
rp = tuple(map(int, rp)) # Minor.Major must be threat as integers
if rp > (1, 1):
self.simple_response(
'505 HTTP Version Not Supported', 'Cannot fulfill request'
)
return False
except (ValueError, IndexError):
self.simple_response('400 Bad Request', 'Malformed Request-Line')
return False
self.uri = uri
self.method = method.upper()
if self.strict_mode and method != self.method:
resp = (
'Malformed method name: According to RFC 2616 '
'(section 5.1.1) and its successors '
'RFC 7230 (section 3.1.1) and RFC 7231 (section 4.1) '
'method names are case-sensitive and uppercase.'
)
self.simple_response('400 Bad Request', resp)
return False
try:
if six.PY2: # FIXME: Figure out better way to do this
# Ref: https://stackoverflow.com/a/196392/595220 (like this?)
"""This is a dummy check for unicode in URI."""
ntou(bton(uri, 'ascii'), 'ascii')
scheme, authority, path, qs, fragment = urllib.parse.urlsplit(uri)
except UnicodeError:
self.simple_response('400 Bad Request', 'Malformed Request-URI')
return False
if self.method == b'OPTIONS':
# TODO: cover this branch with tests
path = (uri
# https://tools.ietf.org/html/rfc7230#section-5.3.4
if self.proxy_mode or uri == ASTERISK
else path)
elif self.method == b'CONNECT':
# TODO: cover this branch with tests
if not self.proxy_mode:
self.simple_response('405 Method Not Allowed')
return False
# `urlsplit()` above parses "example.com:3128" as path part of URI.
# this is a workaround, which makes it detect netloc correctly
uri_split = urllib.parse.urlsplit(b'//' + uri)
_scheme, _authority, _path, _qs, _fragment = uri_split
_port = EMPTY
try:
_port = uri_split.port
except ValueError:
pass
# FIXME: use third-party validation to make checks against RFC
# the validation doesn't take into account, that urllib parses
# invalid URIs without raising errors
# https://tools.ietf.org/html/rfc7230#section-5.3.3
invalid_path = (
_authority != uri
or not _port
or any((_scheme, _path, _qs, _fragment))
)
if invalid_path:
self.simple_response('400 Bad Request',
'Invalid path in Request-URI: request-'
'target must match authority-form.')
return False
authority = path = _authority
scheme = qs = fragment = EMPTY
else:
uri_is_absolute_form = (scheme or authority)
disallowed_absolute = (
self.strict_mode
and not self.proxy_mode
and uri_is_absolute_form
)
if disallowed_absolute:
# https://tools.ietf.org/html/rfc7230#section-5.3.2
# (absolute form)
"""Absolute URI is only allowed within proxies."""
self.simple_response(
'400 Bad Request',
'Absolute URI not allowed if server is not a proxy.',
)
return False
invalid_path = (
self.strict_mode
and not uri.startswith(FORWARD_SLASH)
and not uri_is_absolute_form
)
if invalid_path:
# https://tools.ietf.org/html/rfc7230#section-5.3.1
# (origin_form) and
"""Path should start with a forward slash."""
resp = (
'Invalid path in Request-URI: request-target must contain '
'origin-form which starts with absolute-path (URI '
'starting with a slash "/").'
)
self.simple_response('400 Bad Request', resp)
return False
if fragment:
self.simple_response('400 Bad Request',
'Illegal #fragment in Request-URI.')
return False
if path is None:
# FIXME: It looks like this case cannot happen
self.simple_response('400 Bad Request',
'Invalid path in Request-URI.')
return False
# Unquote the path+params (e.g. "/this%20path" -> "/this path").
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
#
# But note that "...a URI must be separated into its components
# before the escaped characters within those components can be
# safely decoded." http://www.ietf.org/rfc/rfc2396.txt, sec 2.4.2
# Therefore, "/this%2Fpath" becomes "/this%2Fpath", not
# "/this/path".
try:
# TODO: Figure out whether exception can really happen here.
# It looks like it's caught on urlsplit() call above.
atoms = [
urllib.parse.unquote_to_bytes(x)
for x in QUOTED_SLASH_REGEX.split(path)
]
except ValueError as ex:
self.simple_response('400 Bad Request', ex.args[0])
return False
path = QUOTED_SLASH.join(atoms)
if not path.startswith(FORWARD_SLASH):
path = FORWARD_SLASH + path
if scheme is not EMPTY:
self.scheme = scheme
self.authority = authority
self.path = path
# Note that, like wsgiref and most other HTTP servers,
# we "% HEX HEX"-unquote the path but not the query string.
self.qs = qs
# Compare request and server HTTP protocol versions, in case our
# server does not support the requested protocol. Limit our output
# to min(req, server). We want the following output:
# request server actual written supported response
# protocol protocol response protocol feature set
# a 1.0 1.0 1.0 1.0
# b 1.0 1.1 1.1 1.0
# c 1.1 1.0 1.0 1.0
# d 1.1 1.1 1.1 1.1
# Notice that, in (b), the response will be "HTTP/1.1" even though
# the client only understands 1.0. RFC 2616 10.5.6 says we should
# only return 505 if the _major_ version is different.
sp = int(self.server.protocol[5]), int(self.server.protocol[7])
if sp[0] != rp[0]:
self.simple_response('505 HTTP Version Not Supported')
return False
self.request_protocol = req_protocol
self.response_protocol = 'HTTP/%s.%s' % min(rp, sp)
return True
def read_request_headers(self):
"""Read self.rfile into self.inheaders. Return success."""
# then all the http headers
try:
self.header_reader(self.rfile, self.inheaders)
except ValueError as ex:
self.simple_response('400 Bad Request', ex.args[0])
return False
mrbs = self.server.max_request_body_size
if mrbs and int(self.inheaders.get(b'Content-Length', 0)) > mrbs:
self.simple_response(
'413 Request Entity Too Large',
'The entity sent with the request exceeds the maximum '
'allowed bytes.')
return False
# Persistent connection support
if self.response_protocol == 'HTTP/1.1':
# Both server and client are HTTP/1.1
if self.inheaders.get(b'Connection', b'') == b'close':
self.close_connection = True
else:
# Either the server or client (or both) are HTTP/1.0
if self.inheaders.get(b'Connection', b'') != b'Keep-Alive':
self.close_connection = True
# Transfer-Encoding support
te = None
if self.response_protocol == 'HTTP/1.1':
te = self.inheaders.get(b'Transfer-Encoding')
if te:
te = [x.strip().lower() for x in te.split(b',') if x.strip()]
self.chunked_read = False
if te:
for enc in te:
if enc == b'chunked':
self.chunked_read = True
else:
# Note that, even if we see "chunked", we must reject
# if there is an extension we don't recognize.
self.simple_response('501 Unimplemented')
self.close_connection = True
return False
# From PEP 333:
# "Servers and gateways that implement HTTP 1.1 must provide
# transparent support for HTTP 1.1's "expect/continue" mechanism.
# This may be done in any of several ways:
# 1. Respond to requests containing an Expect: 100-continue request
# with an immediate "100 Continue" response, and proceed normally.
# 2. Proceed with the request normally, but provide the application
# with a wsgi.input stream that will send the "100 Continue"
# response if/when the application first attempts to read from
# the input stream. The read request must then remain blocked
# until the client responds.
# 3. Wait until the client decides that the server does not support
# expect/continue, and sends the request body on its own.
# (This is suboptimal, and is not recommended.)
#
# We used to do 3, but are now doing 1. Maybe we'll do 2 someday,
# but it seems like it would be a big slowdown for such a rare case.
if self.inheaders.get(b'Expect', b'') == b'100-continue':
# Don't use simple_response here, because it emits headers
# we don't want. See
# https://github.com/cherrypy/cherrypy/issues/951
msg = self.server.protocol.encode('ascii')
msg += b' 100 Continue\r\n\r\n'
try:
self.conn.wfile.write(msg)
except socket.error as ex:
if ex.args[0] not in errors.socket_errors_to_ignore:
raise
return True
def respond(self):
"""Call the gateway and write its iterable output."""
mrbs = self.server.max_request_body_size
if self.chunked_read:
self.rfile = ChunkedRFile(self.conn.rfile, mrbs)
else:
cl = int(self.inheaders.get(b'Content-Length', 0))
if mrbs and mrbs < cl:
if not self.sent_headers:
self.simple_response(
'413 Request Entity Too Large',
'The entity sent with the request exceeds the '
'maximum allowed bytes.')
return
self.rfile = KnownLengthRFile(self.conn.rfile, cl)
self.server.gateway(self).respond()
if (self.ready and not self.sent_headers):
self.sent_headers = True
self.send_headers()
if self.chunked_write:
self.conn.wfile.write(b'0\r\n\r\n')
def simple_response(self, status, msg=''):
"""Write a simple response back to the client."""
status = str(status)
proto_status = '%s %s\r\n' % (self.server.protocol, status)
content_length = 'Content-Length: %s\r\n' % len(msg)
content_type = 'Content-Type: text/plain\r\n'
buf = [
proto_status.encode('ISO-8859-1'),
content_length.encode('ISO-8859-1'),
content_type.encode('ISO-8859-1'),
]
if status[:3] in ('413', '414'):
# Request Entity Too Large / Request-URI Too Long
self.close_connection = True
if self.response_protocol == 'HTTP/1.1':
# This will not be true for 414, since read_request_line
# usually raises 414 before reading the whole line, and we
# therefore cannot know the proper response_protocol.
buf.append(b'Connection: close\r\n')
else:
# HTTP/1.0 had no 413/414 status nor Connection header.
# Emit 400 instead and trust the message body is enough.
status = '400 Bad Request'
buf.append(CRLF)
if msg:
if isinstance(msg, six.text_type):
msg = msg.encode('ISO-8859-1')
buf.append(msg)
try:
self.conn.wfile.write(EMPTY.join(buf))
except socket.error as ex:
if ex.args[0] not in errors.socket_errors_to_ignore:
raise
def write(self, chunk):
"""Write unbuffered data to the client."""
if self.chunked_write and chunk:
chunk_size_hex = hex(len(chunk))[2:].encode('ascii')
buf = [chunk_size_hex, CRLF, chunk, CRLF]
self.conn.wfile.write(EMPTY.join(buf))
else:
self.conn.wfile.write(chunk)
def send_headers(self):
"""Assert, process, and send the HTTP response message-headers.
You must set self.status, and self.outheaders before calling this.
"""
hkeys = [key.lower() for key, value in self.outheaders]
status = int(self.status[:3])
if status == 413:
# Request Entity Too Large. Close conn to avoid garbage.
self.close_connection = True
elif b'content-length' not in hkeys:
# "All 1xx (informational), 204 (no content),
# and 304 (not modified) responses MUST NOT
# include a message-body." So no point chunking.
if status < 200 or status in (204, 205, 304):
pass
else:
needs_chunked = (
self.response_protocol == 'HTTP/1.1'
and self.method != b'HEAD'
)
if needs_chunked:
# Use the chunked transfer-coding
self.chunked_write = True
self.outheaders.append((b'Transfer-Encoding', b'chunked'))
else:
# Closing the conn is the only way to determine len.
self.close_connection = True
if b'connection' not in hkeys:
if self.response_protocol == 'HTTP/1.1':
# Both server and client are HTTP/1.1 or better
if self.close_connection:
self.outheaders.append((b'Connection', b'close'))
else:
# Server and/or client are HTTP/1.0
if not self.close_connection:
self.outheaders.append((b'Connection', b'Keep-Alive'))
if (not self.close_connection) and (not self.chunked_read):
# Read any remaining request body data on the socket.
# "If an origin server receives a request that does not include an
# Expect request-header field with the "100-continue" expectation,
# the request includes a request body, and the server responds
# with a final status code before reading the entire request body
# from the transport connection, then the server SHOULD NOT close
# the transport connection until it has read the entire request,
# or until the client closes the connection. Otherwise, the client
# might not reliably receive the response message. However, this
# requirement is not be construed as preventing a server from
# defending itself against denial-of-service attacks, or from
# badly broken client implementations."
remaining = getattr(self.rfile, 'remaining', 0)
if remaining > 0:
self.rfile.read(remaining)
if b'date' not in hkeys:
self.outheaders.append((
b'Date',
email.utils.formatdate(usegmt=True).encode('ISO-8859-1'),
))
if b'server' not in hkeys:
self.outheaders.append((
b'Server',
self.server.server_name.encode('ISO-8859-1'),
))
proto = self.server.protocol.encode('ascii')
buf = [proto + SPACE + self.status + CRLF]
for k, v in self.outheaders:
buf.append(k + COLON + SPACE + v + CRLF)
buf.append(CRLF)
self.conn.wfile.write(EMPTY.join(buf))
class HTTPConnection(object):
"""An HTTP connection (active socket)."""
remote_addr = None
remote_port = None
ssl_env = None
rbufsize = io.DEFAULT_BUFFER_SIZE
wbufsize = io.DEFAULT_BUFFER_SIZE
RequestHandlerClass = HTTPRequest
def __init__(self, server, sock, makefile=MakeFile):
"""Initialize HTTPConnection instance.
Args:
server (HTTPServer): web server object receiving this request
socket (socket._socketobject): the raw socket object (usually
TCP) for this connection
makefile (file): a fileobject class for reading from the socket
"""
self.server = server
self.socket = sock
self.rfile = makefile(sock, 'rb', self.rbufsize)
self.wfile = makefile(sock, 'wb', self.wbufsize)
self.requests_seen = 0
def communicate(self):
"""Read each request and respond appropriately."""
request_seen = False
try:
while True:
# (re)set req to None so that if something goes wrong in
# the RequestHandlerClass constructor, the error doesn't
# get written to the previous request.
req = None
req = self.RequestHandlerClass(self.server, self)
# This order of operations should guarantee correct pipelining.
req.parse_request()
if self.server.stats['Enabled']:
self.requests_seen += 1
if not req.ready:
# Something went wrong in the parsing (and the server has
# probably already made a simple_response). Return and
# let the conn close.
return
request_seen = True
req.respond()
if req.close_connection:
return
except socket.error as ex:
errnum = ex.args[0]
# sadly SSL sockets return a different (longer) time out string
timeout_errs = 'timed out', 'The read operation timed out'
if errnum in timeout_errs:
# Don't error if we're between requests; only error
# if 1) no request has been started at all, or 2) we're
# in the middle of a request.
# See https://github.com/cherrypy/cherrypy/issues/853
if (not request_seen) or (req and req.started_request):
self._conditional_error(req, '408 Request Timeout')
elif errnum not in errors.socket_errors_to_ignore:
self.server.error_log('socket.error %s' % repr(errnum),
level=logging.WARNING, traceback=True)
self._conditional_error(req, '500 Internal Server Error')
except (KeyboardInterrupt, SystemExit):
raise
except errors.FatalSSLAlert:
pass
except errors.NoSSLError:
self._handle_no_ssl(req)
except Exception as ex:
self.server.error_log(
repr(ex), level=logging.ERROR, traceback=True)
self._conditional_error(req, '500 Internal Server Error')
linger = False
def _handle_no_ssl(self, req):
if not req or req.sent_headers:
return
# Unwrap wfile
self.wfile = MakeFile(self.socket._sock, 'wb', self.wbufsize)
msg = (
'The client sent a plain HTTP request, but '
'this server only speaks HTTPS on this port.'
)
req.simple_response('400 Bad Request', msg)
self.linger = True
def _conditional_error(self, req, response):
"""Respond with an error.
Don't bother writing if a response
has already started being written.
"""
if not req or req.sent_headers:
return
try:
req.simple_response(response)
except errors.FatalSSLAlert:
pass
except errors.NoSSLError:
self._handle_no_ssl(req)
def close(self):
"""Close the socket underlying this connection."""
self.rfile.close()
if not self.linger:
self._close_kernel_socket()
self.socket.close()
else:
# On the other hand, sometimes we want to hang around for a bit
# to make sure the client has a chance to read our entire
# response. Skipping the close() calls here delays the FIN
# packet until the socket object is garbage-collected later.
# Someday, perhaps, we'll do the full lingering_close that
# Apache does, but not today.
pass
def _close_kernel_socket(self):
"""Close kernel socket in outdated Python versions.
On old Python versions,
Python's socket module does NOT call close on the kernel
socket when you call socket.close(). We do so manually here
because we want this server to send a FIN TCP segment
immediately. Note this must be called *before* calling
socket.close(), because the latter drops its reference to
the kernel socket.
"""
if six.PY2 and hasattr(self.socket, '_sock'):
self.socket._sock.close()
try:
import fcntl
except ImportError:
try:
from ctypes import windll, WinError
import ctypes.wintypes
_SetHandleInformation = windll.kernel32.SetHandleInformation
_SetHandleInformation.argtypes = [
ctypes.wintypes.HANDLE,
ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
]
_SetHandleInformation.restype = ctypes.wintypes.BOOL
except ImportError:
def prevent_socket_inheritance(sock):
"""Dummy function, since neither fcntl nor ctypes are available."""
pass
else:
def prevent_socket_inheritance(sock):
"""Mark the given socket fd as non-inheritable (Windows)."""
if not _SetHandleInformation(sock.fileno(), 1, 0):
raise WinError()
else:
def prevent_socket_inheritance(sock):
"""Mark the given socket fd as non-inheritable (POSIX)."""
fd = sock.fileno()
old_flags = fcntl.fcntl(fd, fcntl.F_GETFD)
fcntl.fcntl(fd, fcntl.F_SETFD, old_flags | fcntl.FD_CLOEXEC)
class HTTPServer(object):
"""An HTTP server."""
_bind_addr = '127.0.0.1'
_interrupt = None
gateway = None
"""A Gateway instance."""
minthreads = None
"""The minimum number of worker threads to create (default 10)."""
maxthreads = None
"""The maximum number of worker threads to create.
(default -1 = no limit)"""
server_name = None
"""The name of the server; defaults to ``self.version``."""
protocol = 'HTTP/1.1'
"""The version string to write in the Status-Line of all HTTP responses.
For example, "HTTP/1.1" is the default. This also limits the supported
features used in the response."""
request_queue_size = 5
"""The 'backlog' arg to socket.listen(); max queued connections.
(default 5)."""
shutdown_timeout = 5
"""The total time to wait for worker threads to cleanly exit.
Specified in seconds."""
timeout = 10
"""The timeout in seconds for accepted connections (default 10)."""
version = 'Cheroot/' + __version__
"""A version string for the HTTPServer."""
software = None
"""The value to set for the SERVER_SOFTWARE entry in the WSGI environ.
If None, this defaults to ``'%s Server' % self.version``.
"""
ready = False
"""Internal flag which indicating the socket is accepting connections."""
max_request_header_size = 0
"""The maximum size, in bytes, for request headers, or 0 for no limit."""
max_request_body_size = 0
"""The maximum size, in bytes, for request bodies, or 0 for no limit."""
nodelay = True
"""If True (the default since 3.1), sets the TCP_NODELAY socket option."""
ConnectionClass = HTTPConnection
"""The class to use for handling HTTP connections."""
ssl_adapter = None
"""An instance of ssl.Adapter (or a subclass).
You must have the corresponding SSL driver library installed.
"""
def __init__(
self, bind_addr, gateway, minthreads=10, maxthreads=-1,
server_name=None):
"""Initialize HTTPServer instance.
Args:
bind_addr (tuple): network interface to listen to
gateway (Gateway): gateway for processing HTTP requests
minthreads (int): minimum number of threads for HTTP thread pool
maxthreads (int): maximum number of threads for HTTP thread pool
server_name (str): web server name to be advertised via Server
HTTP header
"""
self.bind_addr = bind_addr
self.gateway = gateway
self.requests = threadpool.ThreadPool(
self, min=minthreads or 1, max=maxthreads)
if not server_name:
server_name = self.version
self.server_name = server_name
self.clear_stats()
def clear_stats(self):
"""Reset server stat counters.."""
self._start_time = None
self._run_time = 0
self.stats = {
'Enabled': False,
'Bind Address': lambda s: repr(self.bind_addr),
'Run time': lambda s: (not s['Enabled']) and -1 or self.runtime(),
'Accepts': 0,
'Accepts/sec': lambda s: s['Accepts'] / self.runtime(),
'Queue': lambda s: getattr(self.requests, 'qsize', None),
'Threads': lambda s: len(getattr(self.requests, '_threads', [])),
'Threads Idle': lambda s: getattr(self.requests, 'idle', None),
'Socket Errors': 0,
'Requests': lambda s: (not s['Enabled']) and -1 or sum(
[w['Requests'](w) for w in s['Worker Threads'].values()], 0),
'Bytes Read': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Read'](w) for w in s['Worker Threads'].values()], 0),
'Bytes Written': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Written'](w) for w in s['Worker Threads'].values()],
0),
'Work Time': lambda s: (not s['Enabled']) and -1 or sum(
[w['Work Time'](w) for w in s['Worker Threads'].values()], 0),
'Read Throughput': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Read'](w) / (w['Work Time'](w) or 1e-6)
for w in s['Worker Threads'].values()], 0),
'Write Throughput': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Written'](w) / (w['Work Time'](w) or 1e-6)
for w in s['Worker Threads'].values()], 0),
'Worker Threads': {},
}
logging.statistics['Cheroot HTTPServer %d' % id(self)] = self.stats
def runtime(self):
"""Return server uptime."""
if self._start_time is None:
return self._run_time
else:
return self._run_time + (time.time() - self._start_time)
def __str__(self):
"""Render Server instance representing bind address."""
return '%s.%s(%r)' % (self.__module__, self.__class__.__name__,
self.bind_addr)
@property
def bind_addr(self):
"""Return the interface on which to listen for connections.
For TCP sockets, a (host, port) tuple. Host values may be any IPv4
or IPv6 address, or any valid hostname. The string 'localhost' is a
synonym for '127.0.0.1' (or '::1', if your hosts file prefers IPv6).
The string '0.0.0.0' is a special IPv4 entry meaning "any active
interface" (INADDR_ANY), and '::' is the similar IN6ADDR_ANY for
IPv6. The empty string or None are not allowed.
For UNIX sockets, supply the filename as a string.
Systemd socket activation is automatic and doesn't require tempering
with this variable.
"""
return self._bind_addr
@bind_addr.setter
def bind_addr(self, value):
"""Set the interface on which to listen for connections."""
if isinstance(value, tuple) and value[0] in ('', None):
# Despite the socket module docs, using '' does not
# allow AI_PASSIVE to work. Passing None instead
# returns '0.0.0.0' like we want. In other words:
# host AI_PASSIVE result
# '' Y 192.168.x.y
# '' N 192.168.x.y
# None Y 0.0.0.0
# None N 127.0.0.1
# But since you can get the same effect with an explicit
# '0.0.0.0', we deny both the empty string and None as values.
raise ValueError("Host values of '' or None are not allowed. "
"Use '0.0.0.0' (IPv4) or '::' (IPv6) instead "
'to listen on all active interfaces.')
self._bind_addr = value
def safe_start(self):
"""Run the server forever, and stop it cleanly on exit."""
try:
self.start()
except (KeyboardInterrupt, IOError):
# The time.sleep call might raise
# "IOError: [Errno 4] Interrupted function call" on KBInt.
self.error_log('Keyboard Interrupt: shutting down')
self.stop()
raise
except SystemExit:
self.error_log('SystemExit raised: shutting down')
self.stop()
raise
def start(self):
"""Run the server forever."""
# We don't have to trap KeyboardInterrupt or SystemExit here,
# because cherrpy.server already does so, calling self.stop() for us.
# If you're using this server with another framework, you should
# trap those exceptions in whatever code block calls start().
self._interrupt = None
if self.software is None:
self.software = '%s Server' % self.version
# Select the appropriate socket
self.socket = None
if os.getenv('LISTEN_PID', None):
# systemd socket activation
self.socket = socket.fromfd(3, socket.AF_INET, socket.SOCK_STREAM)
elif isinstance(self.bind_addr, six.string_types):
# AF_UNIX socket
# So we can reuse the socket...
try:
os.unlink(self.bind_addr)
except Exception:
pass
# So everyone can access the socket...
try:
os.chmod(self.bind_addr, 0o777)
except Exception:
pass
info = [
(socket.AF_UNIX, socket.SOCK_STREAM, 0, '', self.bind_addr)]
else:
# AF_INET or AF_INET6 socket
# Get the correct address family for our host (allows IPv6
# addresses)
host, port = self.bind_addr
try:
info = socket.getaddrinfo(
host, port, socket.AF_UNSPEC,
socket.SOCK_STREAM, 0, socket.AI_PASSIVE)
except socket.gaierror:
sock_type = socket.AF_INET
bind_addr = self.bind_addr
if ':' in host:
sock_type = socket.AF_INET6
bind_addr = bind_addr + (0, 0)
info = [(sock_type, socket.SOCK_STREAM, 0, '', bind_addr)]
if not self.socket:
msg = 'No socket could be created'
for res in info:
af, socktype, proto, canonname, sa = res
try:
self.bind(af, socktype, proto)
break
except socket.error as serr:
msg = '%s -- (%s: %s)' % (msg, sa, serr)
if self.socket:
self.socket.close()
self.socket = None
if not self.socket:
raise socket.error(msg)
# Timeout so KeyboardInterrupt can be caught on Win32
self.socket.settimeout(1)
self.socket.listen(self.request_queue_size)
# Create worker threads
self.requests.start()
self.ready = True
self._start_time = time.time()
while self.ready:
try:
self.tick()
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
self.error_log('Error in HTTPServer.tick', level=logging.ERROR,
traceback=True)
if self.interrupt:
while self.interrupt is True:
# Wait for self.stop() to complete. See _set_interrupt.
time.sleep(0.1)
if self.interrupt:
raise self.interrupt
def error_log(self, msg='', level=20, traceback=False):
"""Write error message to log.
Args:
msg (str): error message
level (int): logging level
traceback (bool): add traceback to output or not
"""
# Override this in subclasses as desired
sys.stderr.write(msg + '\n')
sys.stderr.flush()
if traceback:
tblines = traceback_.format_exc()
sys.stderr.write(tblines)
sys.stderr.flush()
def bind(self, family, type, proto=0):
"""Create (or recreate) the actual socket object."""
self.socket = socket.socket(family, type, proto)
prevent_socket_inheritance(self.socket)
if platform.system() != 'Windows':
# Windows has different semantics for SO_REUSEADDR,
# so don't set it.
# https://msdn.microsoft.com/en-us/library/ms740621(v=vs.85).aspx
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if self.nodelay and not isinstance(self.bind_addr, str):
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if self.ssl_adapter is not None:
self.socket = self.ssl_adapter.bind(self.socket)
host, port = self.bind_addr[:2]
# If listening on the IPV6 any address ('::' = IN6ADDR_ANY),
# activate dual-stack. See
# https://github.com/cherrypy/cherrypy/issues/871.
listening_ipv6 = (
hasattr(socket, 'AF_INET6')
and family == socket.AF_INET6
and host in ('::', '::0', '::0.0.0.0')
)
if listening_ipv6:
try:
self.socket.setsockopt(
socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
except (AttributeError, socket.error):
# Apparently, the socket option is not available in
# this machine's TCP stack
pass
self.socket.bind(self.bind_addr)
self.bind_addr = self.socket.getsockname()[:2] # TODO: keep separate
def tick(self):
"""Accept a new connection and put it on the Queue."""
try:
s, addr = self.socket.accept()
if self.stats['Enabled']:
self.stats['Accepts'] += 1
if not self.ready:
return
prevent_socket_inheritance(s)
if hasattr(s, 'settimeout'):
s.settimeout(self.timeout)
mf = MakeFile
ssl_env = {}
# if ssl cert and key are set, we try to be a secure HTTP server
if self.ssl_adapter is not None:
try:
s, ssl_env = self.ssl_adapter.wrap(s)
except errors.NoSSLError:
msg = ('The client sent a plain HTTP request, but '
'this server only speaks HTTPS on this port.')
buf = ['%s 400 Bad Request\r\n' % self.protocol,
'Content-Length: %s\r\n' % len(msg),
'Content-Type: text/plain\r\n\r\n',
msg]
sock_to_make = s if six.PY3 else s._sock
wfile = mf(sock_to_make, 'wb', io.DEFAULT_BUFFER_SIZE)
try:
wfile.write(''.join(buf).encode('ISO-8859-1'))
except socket.error as ex:
if ex.args[0] not in errors.socket_errors_to_ignore:
raise
return
if not s:
return
mf = self.ssl_adapter.makefile
# Re-apply our timeout since we may have a new socket object
if hasattr(s, 'settimeout'):
s.settimeout(self.timeout)
conn = self.ConnectionClass(self, s, mf)
if not isinstance(self.bind_addr, six.string_types):
# optional values
# Until we do DNS lookups, omit REMOTE_HOST
if addr is None: # sometimes this can happen
# figure out if AF_INET or AF_INET6.
if len(s.getsockname()) == 2:
# AF_INET
addr = ('0.0.0.0', 0)
else:
# AF_INET6
addr = ('::', 0)
conn.remote_addr = addr[0]
conn.remote_port = addr[1]
conn.ssl_env = ssl_env
try:
self.requests.put(conn)
except queue.Full:
# Just drop the conn. TODO: write 503 back?
conn.close()
return
except socket.timeout:
# The only reason for the timeout in start() is so we can
# notice keyboard interrupts on Win32, which don't interrupt
# accept() by default
return
except socket.error as ex:
if self.stats['Enabled']:
self.stats['Socket Errors'] += 1
if ex.args[0] in errors.socket_error_eintr:
# I *think* this is right. EINTR should occur when a signal
# is received during the accept() call; all docs say retry
# the call, and I *think* I'm reading it right that Python
# will then go ahead and poll for and handle the signal
# elsewhere. See
# https://github.com/cherrypy/cherrypy/issues/707.
return
if ex.args[0] in errors.socket_errors_nonblocking:
# Just try again. See
# https://github.com/cherrypy/cherrypy/issues/479.
return
if ex.args[0] in errors.socket_errors_to_ignore:
# Our socket was closed.
# See https://github.com/cherrypy/cherrypy/issues/686.
return
raise
@property
def interrupt(self):
"""Flag interrupt of the server."""
return self._interrupt
@interrupt.setter
def interrupt(self, interrupt):
"""Perform the shutdown of this server and save the exception."""
self._interrupt = True
self.stop()
self._interrupt = interrupt
def stop(self):
"""Gracefully shutdown a server that is serving forever."""
self.ready = False
if self._start_time is not None:
self._run_time += (time.time() - self._start_time)
self._start_time = None
sock = getattr(self, 'socket', None)
if sock:
if not isinstance(self.bind_addr, six.string_types):
# Touch our own socket to make accept() return immediately.
try:
host, port = sock.getsockname()[:2]
except socket.error as ex:
if ex.args[0] not in errors.socket_errors_to_ignore:
# Changed to use error code and not message
# See
# https://github.com/cherrypy/cherrypy/issues/860.
raise
else:
# Note that we're explicitly NOT using AI_PASSIVE,
# here, because we want an actual IP to touch.
# localhost won't work if we've bound to a public IP,
# but it will if we bound to '0.0.0.0' (INADDR_ANY).
for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC,
socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
s = None
try:
s = socket.socket(af, socktype, proto)
# See
# http://groups.google.com/group/cherrypy-users/
# browse_frm/thread/bbfe5eb39c904fe0
s.settimeout(1.0)
s.connect((host, port))
s.close()
except socket.error:
if s:
s.close()
if hasattr(sock, 'close'):
sock.close()
self.socket = None
self.requests.stop(self.shutdown_timeout)
class Gateway(object):
"""Base class to interface HTTPServer with other systems, such as WSGI."""
def __init__(self, req):
"""Initialize Gateway instance with request.
Args:
req (HTTPRequest): current HTTP request
"""
self.req = req
def respond(self):
"""Process the current request. Must be overridden in a subclass."""
raise NotImplementedError
# These may either be ssl.Adapter subclasses or the string names
# of such classes (in which case they will be lazily loaded).
ssl_adapters = {
'builtin': 'cheroot.ssl.builtin.BuiltinSSLAdapter',
'pyopenssl': 'cheroot.ssl.pyopenssl.pyOpenSSLAdapter',
}
def get_ssl_adapter_class(name='builtin'):
"""Return an SSL adapter class for the given name."""
adapter = ssl_adapters[name.lower()]
if isinstance(adapter, six.string_types):
last_dot = adapter.rfind('.')
attr_name = adapter[last_dot + 1:]
mod_path = adapter[:last_dot]
try:
mod = sys.modules[mod_path]
if mod is None:
raise KeyError()
except KeyError:
# The last [''] is important.
mod = __import__(mod_path, globals(), locals(), [''])
# Let an AttributeError propagate outward.
try:
adapter = getattr(mod, attr_name)
except AttributeError:
raise AttributeError("'%s' object has no attribute '%s'"
% (mod_path, attr_name))
return adapter
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.db.models import F, Q, Count
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from django.utils.timezone import now
from django.core.exceptions import ValidationError
from mptt.models import MPTTModel, TreeForeignKey
from tickle.models import Person, PersonQuerySet, BaseDiscount
class WorkerQuerySet(PersonQuerySet):
def workers(self):
return self.filter(models.Q(shift_registrations__isnull=False) | models.Q(functionary__isnull=False)).distinct()
def functionaries(self):
return self.filter(functionary__isnull=False)
class Worker(Person):
"""
Proxy class to tickle.Person so we can add some Fungus specific methods.
"""
objects = WorkerQuerySet.as_manager()
class Meta:
proxy = True
verbose_name = _('worker')
verbose_name_plural = _('workers')
@python_2_unicode_compatible
class Functionary(models.Model):
person = models.OneToOneField('tickle.Person', related_name='functionary', verbose_name=_('person'))
registered = models.DateTimeField(auto_now_add=True, verbose_name=_('registration timestamp'))
registered.editable = True # Ignored if set during field init
ice_number = models.CharField(max_length=16, null=True, blank=True, verbose_name=_('ICE number'))
b_driving_license = models.BooleanField(default=False, verbose_name=_('B driving license'), help_text=_('Mandatory for driving missions.'))
signed_contract = models.BooleanField(default=False, verbose_name=_('signed contract'))
attended_info_meeting = models.BooleanField(default=False, verbose_name=_('attended info meeting'))
pledge_payed = models.DateTimeField(null=True, blank=True, verbose_name=_('pledge payed'))
pledge_returned = models.DateTimeField(null=True, blank=True, verbose_name=_('pledge returned'))
class Meta:
verbose_name = _('functionary')
verbose_name_plural = _('functionaries')
def __str__(self):
try:
return self.person.full_name
except models.ObjectDoesNotExist:
return '<No person set>'
class WorkerDiscount(BaseDiscount):
shifts = models.PositiveIntegerField(verbose_name=_('shifts'),
help_text=_('Number of shifts to work to be eligible for this discount.'))
class Meta:
verbose_name = _('worker discount')
verbose_name_plural = _('worker discounts')
def eligible(self, person):
return person.shift_registrations.count() == self.shifts
@python_2_unicode_compatible
class ShiftType(MPTTModel):
name = models.CharField(max_length=256, verbose_name=_('name'))
parent = TreeForeignKey('self', related_name='children', null=True, blank=True, verbose_name=_('parent'))
description = models.TextField(blank=True, null=True, verbose_name=_('description'))
class Meta:
verbose_name = _('shift type')
verbose_name_plural = _('shift types')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Location(MPTTModel):
name = models.CharField(max_length=256, verbose_name=_('name'))
parent = TreeForeignKey('self', related_name='children', null=True, blank=True, verbose_name=_('parent'))
class Meta:
verbose_name = _('location')
verbose_name_plural = _('locations')
def __str__(self):
return self.name
class ShiftQuerySet(models.QuerySet):
def annotate_registrations_count(self):
return self.annotate(registrations_count=Count('registrations'))
def public(self):
return self.filter(public=True)
def critical(self):
return self.annotate_registrations_count().exclude(registrations_count__gt=F('people_critical')).distinct()
def alarming(self):
return self.annotate_registrations_count().exclude(registrations_count__lte=F('people_critical'), registrations__gt=F('people_alarming')).distinct()
def ok(self):
return self.annotate_registrations_count().exclude(registrations_count__lte=F('people_alarming'), registrations__gt=F('people_max')).distinct()
def overstaffed(self):
return self.annotate_registrations_count().filter(registrations_count__gt=F('people_max')).distinct()
def registerable(self):
return self.public().annotate_registrations_count().filter(registrations_count__lt=F('people_max')).distinct()
@python_2_unicode_compatible
class Shift(models.Model):
shift_type = TreeForeignKey('ShiftType', related_name='shifts', verbose_name=_('shift type'))
start = models.DateTimeField(verbose_name=_('start'))
end = models.DateTimeField(verbose_name=_('end'))
location = models.ForeignKey('Location', related_name='shifts', null=True, blank=True, verbose_name=_('location'))
responsible = models.ForeignKey('tickle.Person', related_name='shift_responsibilities', null=True, blank=True, verbose_name=_('responsible'))
people_max = models.PositiveIntegerField(default=2, verbose_name=_('maximum number of workers'),
help_text=_("The maximum number of workers on this shift. This shift's status will be reported as overstaffed if the number of workers are over this value."))
people_alarming = models.PositiveIntegerField(default=1, verbose_name=_('alarming number of workers'),
help_text=_("The number of workers where the system will report the status as alarming."))
people_critical = models.PositiveIntegerField(default=0, verbose_name=_('critical number of workers'),
help_text=_("The number of workers where the system will report the status as critical."))
public = models.BooleanField(default=True, verbose_name=_('public'), help_text=_(
"If unchecked, this shift won't be visible or available for registration by public users."))
objects = ShiftQuerySet.as_manager()
class Meta:
verbose_name = _('shift')
verbose_name_plural = _('shifts')
def __str__(self):
time_format = "%H:%M"
date_format = " %A %d %B"
if self.start.date() == self.end.date():
return u'%s, %s–%s %s' % (self.shift_type.name, self.start.strftime(time_format), self.end.strftime(time_format), self.start.strftime(date_format))
return u'%s, %s – %s' % (self.shift_type.name, self.start.strftime(time_format + date_format), self.end.strftime(time_format + date_format))
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
if not self.people_critical <= self.people_alarming:
raise ValidationError({'people_critical': _('Critical number must be less than or equal to alarming.'),
'people_alarming': _('Critical number must be less than or equal to alarming.')})
if not self.people_alarming <= self.people_max:
raise ValidationError({'people_alarming': _('Alarming number must be less than or equal to maximum.'),
'people_max': _('Alarming number must be less than or equal to maximum.')})
return super(Shift, self).save(force_insert=force_insert, force_update=force_update, using=using, update_fields=update_fields)
@property
def status(self):
people_count = self.registrations.count()
if people_count <= self.people_critical:
return 'critical'
elif people_count <= self.people_alarming:
return 'alarming'
elif people_count > self.people_max:
return 'overstaffed'
else:
return 'ok'
@python_2_unicode_compatible
class ShiftRegistration(models.Model):
shift = models.ForeignKey('Shift', related_name='registrations', verbose_name=_('shift'))
person = models.ForeignKey('tickle.Person', related_name='shift_registrations', verbose_name=_('person'))
checked_in = models.DateTimeField(null=True, blank=True, verbose_name=_('checked in'))
checked_out = models.DateTimeField(null=True, blank=True, verbose_name=_('checked out'))
class Meta:
verbose_name = _('shift registration')
verbose_name_plural = _('shift registrations')
def __str__(self):
return u'%s: %s' % (self.person, self.shift)
@property
def start(self):
return self.shift.start
Fixed broken queryset methods
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.db.models import F, Q, Count
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from django.utils.timezone import now
from django.core.exceptions import ValidationError
from mptt.models import MPTTModel, TreeForeignKey
from tickle.models import Person, PersonQuerySet, BaseDiscount
class WorkerQuerySet(PersonQuerySet):
def workers(self):
return self.filter(models.Q(shift_registrations__isnull=False) | models.Q(functionary__isnull=False)).distinct()
def functionaries(self):
return self.filter(functionary__isnull=False)
class Worker(Person):
"""
Proxy class to tickle.Person so we can add some Fungus specific methods.
"""
objects = WorkerQuerySet.as_manager()
class Meta:
proxy = True
verbose_name = _('worker')
verbose_name_plural = _('workers')
@python_2_unicode_compatible
class Functionary(models.Model):
person = models.OneToOneField('tickle.Person', related_name='functionary', verbose_name=_('person'))
registered = models.DateTimeField(auto_now_add=True, verbose_name=_('registration timestamp'))
registered.editable = True # Ignored if set during field init
ice_number = models.CharField(max_length=16, null=True, blank=True, verbose_name=_('ICE number'))
b_driving_license = models.BooleanField(default=False, verbose_name=_('B driving license'), help_text=_('Mandatory for driving missions.'))
signed_contract = models.BooleanField(default=False, verbose_name=_('signed contract'))
attended_info_meeting = models.BooleanField(default=False, verbose_name=_('attended info meeting'))
pledge_payed = models.DateTimeField(null=True, blank=True, verbose_name=_('pledge payed'))
pledge_returned = models.DateTimeField(null=True, blank=True, verbose_name=_('pledge returned'))
class Meta:
verbose_name = _('functionary')
verbose_name_plural = _('functionaries')
def __str__(self):
try:
return self.person.full_name
except models.ObjectDoesNotExist:
return '<No person set>'
class WorkerDiscount(BaseDiscount):
shifts = models.PositiveIntegerField(verbose_name=_('shifts'),
help_text=_('Number of shifts to work to be eligible for this discount.'))
class Meta:
verbose_name = _('worker discount')
verbose_name_plural = _('worker discounts')
def eligible(self, person):
return person.shift_registrations.count() == self.shifts
@python_2_unicode_compatible
class ShiftType(MPTTModel):
name = models.CharField(max_length=256, verbose_name=_('name'))
parent = TreeForeignKey('self', related_name='children', null=True, blank=True, verbose_name=_('parent'))
description = models.TextField(blank=True, null=True, verbose_name=_('description'))
class Meta:
verbose_name = _('shift type')
verbose_name_plural = _('shift types')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Location(MPTTModel):
name = models.CharField(max_length=256, verbose_name=_('name'))
parent = TreeForeignKey('self', related_name='children', null=True, blank=True, verbose_name=_('parent'))
class Meta:
verbose_name = _('location')
verbose_name_plural = _('locations')
def __str__(self):
return self.name
class ShiftQuerySet(models.QuerySet):
def annotate_registrations_count(self):
return self.annotate(registrations_count=Count('registrations'))
def public(self):
return self.filter(public=True)
def critical(self):
return self.annotate_registrations_count().exclude(registrations_count__gt=F('people_critical')).distinct()
def alarming(self):
return self.annotate_registrations_count().exclude(registrations_count__lte=F('people_critical'), registrations_count__gt=F('people_alarming')).distinct()
def ok(self):
return self.annotate_registrations_count().exclude(registrations_count__lte=F('people_alarming'), registrations_count__gt=F('people_max')).distinct()
def overstaffed(self):
return self.annotate_registrations_count().filter(registrations_count__gt=F('people_max')).distinct()
def registerable(self):
return self.public().annotate_registrations_count().filter(registrations_count__lt=F('people_max')).distinct()
@python_2_unicode_compatible
class Shift(models.Model):
shift_type = TreeForeignKey('ShiftType', related_name='shifts', verbose_name=_('shift type'))
start = models.DateTimeField(verbose_name=_('start'))
end = models.DateTimeField(verbose_name=_('end'))
location = models.ForeignKey('Location', related_name='shifts', null=True, blank=True, verbose_name=_('location'))
responsible = models.ForeignKey('tickle.Person', related_name='shift_responsibilities', null=True, blank=True, verbose_name=_('responsible'))
people_max = models.PositiveIntegerField(default=2, verbose_name=_('maximum number of workers'),
help_text=_("The maximum number of workers on this shift. This shift's status will be reported as overstaffed if the number of workers are over this value."))
people_alarming = models.PositiveIntegerField(default=1, verbose_name=_('alarming number of workers'),
help_text=_("The number of workers where the system will report the status as alarming."))
people_critical = models.PositiveIntegerField(default=0, verbose_name=_('critical number of workers'),
help_text=_("The number of workers where the system will report the status as critical."))
public = models.BooleanField(default=True, verbose_name=_('public'), help_text=_(
"If unchecked, this shift won't be visible or available for registration by public users."))
objects = ShiftQuerySet.as_manager()
class Meta:
verbose_name = _('shift')
verbose_name_plural = _('shifts')
def __str__(self):
time_format = "%H:%M"
date_format = " %A %d %B"
if self.start.date() == self.end.date():
return u'%s, %s–%s %s' % (self.shift_type.name, self.start.strftime(time_format), self.end.strftime(time_format), self.start.strftime(date_format))
return u'%s, %s – %s' % (self.shift_type.name, self.start.strftime(time_format + date_format), self.end.strftime(time_format + date_format))
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
if not self.people_critical <= self.people_alarming:
raise ValidationError({'people_critical': _('Critical number must be less than or equal to alarming.'),
'people_alarming': _('Critical number must be less than or equal to alarming.')})
if not self.people_alarming <= self.people_max:
raise ValidationError({'people_alarming': _('Alarming number must be less than or equal to maximum.'),
'people_max': _('Alarming number must be less than or equal to maximum.')})
return super(Shift, self).save(force_insert=force_insert, force_update=force_update, using=using, update_fields=update_fields)
@property
def status(self):
people_count = self.registrations.count()
if people_count <= self.people_critical:
return 'critical'
elif people_count <= self.people_alarming:
return 'alarming'
elif people_count > self.people_max:
return 'overstaffed'
else:
return 'ok'
@python_2_unicode_compatible
class ShiftRegistration(models.Model):
shift = models.ForeignKey('Shift', related_name='registrations', verbose_name=_('shift'))
person = models.ForeignKey('tickle.Person', related_name='shift_registrations', verbose_name=_('person'))
checked_in = models.DateTimeField(null=True, blank=True, verbose_name=_('checked in'))
checked_out = models.DateTimeField(null=True, blank=True, verbose_name=_('checked out'))
class Meta:
verbose_name = _('shift registration')
verbose_name_plural = _('shift registrations')
def __str__(self):
return u'%s: %s' % (self.person, self.shift)
@property
def start(self):
return self.shift.start
|
from functools import wraps
from funk.error import FunkyError
from funk.call import Call
from funk.call import IntegerCallCount
from funk.call import InfiniteCallCount
from funk.sequence import Sequence
from funk.util import function_call_str
__all__ = ['with_context', 'Context', 'expects', 'allows', 'set_attr', 'expects_call', 'allows_call']
class UnexpectedInvocationError(AssertionError):
def __init__(self, mock_name, args, kwargs, expectations):
call_str = function_call_str(mock_name, args, kwargs)
exception_str = ["Unexpected invocation: %s" % call_str]
exception_str.append("\nThe following expectations on %s did not match:\n " % mock_name)
if len(expectations) > 0:
exception_str.append("\n ".join(expectations))
else:
exception_str.append("No expectations set.")
super(UnexpectedInvocationError, self).__init__(''.join(exception_str))
class Mock(object):
def __init__(self, base, name):
self._mocked_calls = MockedCalls(base, name)
self._base = base
def __getattribute__(self, name):
my = lambda name: object.__getattribute__(self, name)
mocked_calls = my('_mocked_calls')
base = my('_base')
if name in mocked_calls or (base is not None and hasattr(base, name)):
return mocked_calls.for_method(name)
return my(name)
def __call__(self, *args, **kwargs):
return object.__getattribute__(self, "_mocked_calls").for_self()(*args, **kwargs)
def _verify(self):
object.__getattribute__(self, "_mocked_calls").verify()
class MockedCalls(object):
def __init__(self, base, mock_name):
self._base = base
self._method_calls = {}
self._function_calls = []
self._mock_name = mock_name
def add_method_call(self, method_name, call_count):
if self._base is not None:
if not hasattr(self._base, method_name):
raise AssertionError("Method '%s' is not defined on type object '%s'" % (method_name, self._base.__name__))
if not callable(getattr(self._base, method_name)):
raise AssertionError("Attribute '%s' is not callable on type object '%s'" % (method_name, self._base.__name__))
call = Call("%s.%s" % (self._mock_name, method_name), call_count)
if method_name not in self._method_calls:
self._method_calls[method_name] = []
self._method_calls[method_name].append(call)
return call
def add_function_call(self, call_count):
call = Call(self._mock_name, call_count)
self._function_calls.append(call)
return call
def for_method(self, name):
return MockedCallsForFunction("%s.%s" % (self._mock_name, name), self._method_calls.get(name, []))
def for_self(self):
return MockedCallsForFunction(self._mock_name, self._function_calls)
def __contains__(self, name):
return name in self._method_calls
def verify(self):
for method_name in self._method_calls:
for call in self._method_calls[method_name]:
self._verify_call(call)
for call in self._function_calls:
self._verify_call(call)
def _verify_call(self, call):
if not call.is_satisfied():
raise AssertionError("Not all expectations were satisfied. Expected call: %s" % call)
class MockedCallsForFunction(object):
def __init__(self, name, calls):
self._name = name
self._calls = calls
def __call__(self, *args, **kwargs):
desc = []
for call in self._calls:
if call.accepts(args, kwargs, desc):
return call(*args, **kwargs)
raise UnexpectedInvocationError(self._name, args, kwargs, desc)
def with_context(test_function, mock_factory=None):
@wraps(test_function)
def test_function_with_context(*args, **kwargs):
if 'context' in kwargs:
raise FunkyError("context has already been set")
context = Context(mock_factory)
kwargs['context'] = context
test_function(*args, **kwargs)
context.verify()
return test_function_with_context
class MethodArgumentsSetter(object):
def __init__(self, call):
self._call = call
def __call__(self, *args, **kwargs):
return self._call.with_args(*args, **kwargs)
def __getattr__(self, name):
return getattr(self._call, name)
class ExpectationCreator(object):
def __init__(self, expectation_setter):
self._expectation_setter = expectation_setter
def __getattribute__(self, name):
my = lambda name: object.__getattribute__(self, name)
return MethodArgumentsSetter(my('_expectation_setter')(name))
def expects(mock, method_name=None):
if method_name is None:
return ExpectationCreator(lambda method_name: expects(mock, method_name))
return object.__getattribute__(mock, "_mocked_calls").add_method_call(method_name, IntegerCallCount(1))
def allows(mock, method_name=None):
if method_name is None:
return ExpectationCreator(lambda method_name: allows(mock, method_name))
return object.__getattribute__(mock, "_mocked_calls").add_method_call(method_name, InfiniteCallCount())
def set_attr(mock, **kwargs):
for kwarg in kwargs:
setattr(mock, kwarg, kwargs[kwarg])
def expects_call(mock):
return MethodArgumentsSetter(object.__getattribute__(mock, "_mocked_calls").add_function_call(IntegerCallCount(1)))
def allows_call(mock):
return MethodArgumentsSetter(object.__getattribute__(mock, "_mocked_calls").add_function_call(InfiniteCallCount()))
class Context(object):
def __init__(self, mock_factory=None):
if mock_factory is None:
mock_factory = Mock
self._mocks = []
self._mock_factory = mock_factory
def mock(self, base=None, name=None):
mock = self._mock_factory(base, self._generate_name(name, base))
self._mocks.append(mock)
return mock
def verify(self):
for mock in self._mocks:
mock._verify()
def sequence(self):
return Sequence()
def _generate_name(self, name, base):
if name is not None:
return name
if base is None:
return "unnamed"
name = []
name.append(base.__name__[0].lower())
for character in base.__name__[1:]:
if character.isupper():
name.append("_%s" % character.lower())
else:
name.append(character)
return ''.join(name)
Fix indentation of unmatched expectations
from functools import wraps
from funk.error import FunkyError
from funk.call import Call
from funk.call import IntegerCallCount
from funk.call import InfiniteCallCount
from funk.sequence import Sequence
from funk.util import function_call_str
__all__ = ['with_context', 'Context', 'expects', 'allows', 'set_attr', 'expects_call', 'allows_call']
class UnexpectedInvocationError(AssertionError):
def __init__(self, mock_name, args, kwargs, expectations):
call_str = function_call_str(mock_name, args, kwargs)
exception_str = ["Unexpected invocation: %s" % call_str]
exception_str.append("\nThe following expectations on %s did not match:\n " % mock_name)
if len(expectations) > 0:
exception_str.append("\n ".join(expectations))
else:
exception_str.append("No expectations set.")
super(UnexpectedInvocationError, self).__init__(''.join(exception_str))
class Mock(object):
def __init__(self, base, name):
self._mocked_calls = MockedCalls(base, name)
self._base = base
def __getattribute__(self, name):
my = lambda name: object.__getattribute__(self, name)
mocked_calls = my('_mocked_calls')
base = my('_base')
if name in mocked_calls or (base is not None and hasattr(base, name)):
return mocked_calls.for_method(name)
return my(name)
def __call__(self, *args, **kwargs):
return object.__getattribute__(self, "_mocked_calls").for_self()(*args, **kwargs)
def _verify(self):
object.__getattribute__(self, "_mocked_calls").verify()
class MockedCalls(object):
def __init__(self, base, mock_name):
self._base = base
self._method_calls = {}
self._function_calls = []
self._mock_name = mock_name
def add_method_call(self, method_name, call_count):
if self._base is not None:
if not hasattr(self._base, method_name):
raise AssertionError("Method '%s' is not defined on type object '%s'" % (method_name, self._base.__name__))
if not callable(getattr(self._base, method_name)):
raise AssertionError("Attribute '%s' is not callable on type object '%s'" % (method_name, self._base.__name__))
call = Call("%s.%s" % (self._mock_name, method_name), call_count)
if method_name not in self._method_calls:
self._method_calls[method_name] = []
self._method_calls[method_name].append(call)
return call
def add_function_call(self, call_count):
call = Call(self._mock_name, call_count)
self._function_calls.append(call)
return call
def for_method(self, name):
return MockedCallsForFunction("%s.%s" % (self._mock_name, name), self._method_calls.get(name, []))
def for_self(self):
return MockedCallsForFunction(self._mock_name, self._function_calls)
def __contains__(self, name):
return name in self._method_calls
def verify(self):
for method_name in self._method_calls:
for call in self._method_calls[method_name]:
self._verify_call(call)
for call in self._function_calls:
self._verify_call(call)
def _verify_call(self, call):
if not call.is_satisfied():
raise AssertionError("Not all expectations were satisfied. Expected call: %s" % call)
class MockedCallsForFunction(object):
def __init__(self, name, calls):
self._name = name
self._calls = calls
def __call__(self, *args, **kwargs):
desc = []
for call in self._calls:
if call.accepts(args, kwargs, desc):
return call(*args, **kwargs)
raise UnexpectedInvocationError(self._name, args, kwargs, desc)
def with_context(test_function, mock_factory=None):
@wraps(test_function)
def test_function_with_context(*args, **kwargs):
if 'context' in kwargs:
raise FunkyError("context has already been set")
context = Context(mock_factory)
kwargs['context'] = context
test_function(*args, **kwargs)
context.verify()
return test_function_with_context
class MethodArgumentsSetter(object):
def __init__(self, call):
self._call = call
def __call__(self, *args, **kwargs):
return self._call.with_args(*args, **kwargs)
def __getattr__(self, name):
return getattr(self._call, name)
class ExpectationCreator(object):
def __init__(self, expectation_setter):
self._expectation_setter = expectation_setter
def __getattribute__(self, name):
my = lambda name: object.__getattribute__(self, name)
return MethodArgumentsSetter(my('_expectation_setter')(name))
def expects(mock, method_name=None):
if method_name is None:
return ExpectationCreator(lambda method_name: expects(mock, method_name))
return object.__getattribute__(mock, "_mocked_calls").add_method_call(method_name, IntegerCallCount(1))
def allows(mock, method_name=None):
if method_name is None:
return ExpectationCreator(lambda method_name: allows(mock, method_name))
return object.__getattribute__(mock, "_mocked_calls").add_method_call(method_name, InfiniteCallCount())
def set_attr(mock, **kwargs):
for kwarg in kwargs:
setattr(mock, kwarg, kwargs[kwarg])
def expects_call(mock):
return MethodArgumentsSetter(object.__getattribute__(mock, "_mocked_calls").add_function_call(IntegerCallCount(1)))
def allows_call(mock):
return MethodArgumentsSetter(object.__getattribute__(mock, "_mocked_calls").add_function_call(InfiniteCallCount()))
class Context(object):
def __init__(self, mock_factory=None):
if mock_factory is None:
mock_factory = Mock
self._mocks = []
self._mock_factory = mock_factory
def mock(self, base=None, name=None):
mock = self._mock_factory(base, self._generate_name(name, base))
self._mocks.append(mock)
return mock
def verify(self):
for mock in self._mocks:
mock._verify()
def sequence(self):
return Sequence()
def _generate_name(self, name, base):
if name is not None:
return name
if base is None:
return "unnamed"
name = []
name.append(base.__name__[0].lower())
for character in base.__name__[1:]:
if character.isupper():
name.append("_%s" % character.lower())
else:
name.append(character)
return ''.join(name)
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
# Define the common dependencies that contain all the actual
# Chromium functionality. This list gets pulled in below by
# the link of the actual chrome (or chromium) executable on
# Linux or Mac, and into chrome.dll on Windows.
'chromium_dependencies': [
'common',
'browser',
'debugger',
'renderer',
'utility',
'../printing/printing.gyp:printing',
'../webkit/webkit.gyp:inspector_resources',
],
'grit_out_dir': '<(SHARED_INTERMEDIATE_DIR)/chrome',
'browser_tests_sources': [
'browser/child_process_security_policy_browser_test.cc',
'browser/renderer_host/web_cache_manager_browser_test.cc',
'browser/ssl/ssl_browser_tests.cc',
],
'browser_tests_sources_win_specific': [
'browser/extensions/extension_shelf_model_unittest.cc',
'browser/extensions/extension_browsertest.cc',
'browser/extensions/test_extension_loader.cc',
'browser/extensions/test_extension_loader.h',
'browser/views/find_bar_win_browsertest.cc',
# TODO(jcampan): once the task manager works on Mac, move this test to the
# non win specific section.
'browser/task_manager_browsertest.cc',
],
},
'includes': [
'../build/common.gypi',
],
'target_defaults': {
'sources/': [
['exclude', '/(cocoa|gtk|win)/'],
['exclude', '_(cocoa|gtk|linux|mac|posix|skia|win|views|x)(_unittest)?\\.(cc|mm?)$'],
['exclude', '/(gtk|win|x11)_[^/]*\\.cc$'],
],
'conditions': [
['OS=="linux"', {'sources/': [
['include', '/gtk/'],
['include', '_(gtk|linux|posix|skia|x)(_unittest)?\\.cc$'],
['include', '/(gtk|x11)_[^/]*\\.cc$'],
]}],
['OS=="mac"', {'sources/': [
['include', '/cocoa/'],
['include', '_(cocoa|mac|posix)(_unittest)?\\.(cc|mm?)$'],
]}, { # else: OS != "mac"
'sources/': [
['exclude', '\\.mm?$'],
],
}],
['OS=="win"', {'sources/': [
['include', '_(views|win)(_unittest)?\\.cc$'],
['include', '/win/'],
['include', '/(views|win)_[^/]*\\.cc$'],
]}],
['OS=="linux" and toolkit_views==1', {'sources/': [
['include', '_views\\.cc$'],
]}],
],
},
'targets': [
{
# TODO(mark): It would be better if each static library that needed
# to run grit would list its own .grd files, but unfortunately some
# of the static libraries currently have circular dependencies among
# generated headers.
'target_name': 'chrome_resources',
'type': 'none',
'msvs_guid': 'B95AB527-F7DB-41E9-AD91-EB51EE0F56BE',
'rules': [
{
'rule_name': 'grit',
'extension': 'grd',
'variables': {
'conditions': [
['branding=="Chrome"', {
# TODO(mmoss) The .grd files look for _google_chrome, but for
# consistency they should look for GOOGLE_CHROME_BUILD like C++.
# Clean this up when Windows moves to gyp.
'chrome_build': '_google_chrome',
'branded_env': 'CHROMIUM_BUILD=google_chrome',
}, { # else: branding!="Chrome"
'chrome_build': '_chromium',
'branded_env': 'CHROMIUM_BUILD=chromium',
}],
],
},
'inputs': [
'../tools/grit/grit.py',
],
'outputs': [
'<(grit_out_dir)/grit/<(RULE_INPUT_ROOT).h',
'<(grit_out_dir)/<(RULE_INPUT_ROOT).pak',
],
'action': ['python', '<@(_inputs)', '-i', '<(RULE_INPUT_PATH)',
'build', '-o', '<(grit_out_dir)',
'-D', '<(chrome_build)',
'-E', '<(branded_env)',
],
'message': 'Generating resources from <(RULE_INPUT_PATH)',
},
],
'sources': [
# Data resources.
'browser/debugger/resources/debugger_resources.grd',
'browser/browser_resources.grd',
'common/common_resources.grd',
'renderer/renderer_resources.grd',
],
'direct_dependent_settings': {
'include_dirs': [
'<(grit_out_dir)',
],
},
'conditions': [
['OS=="win"', {
'dependencies': ['../build/win/system.gyp:cygwin'],
}],
],
},
{
# TODO(mark): It would be better if each static library that needed
# to run grit would list its own .grd files, but unfortunately some
# of the static libraries currently have circular dependencies among
# generated headers.
'target_name': 'chrome_strings',
'type': 'none',
'msvs_guid': 'D9DDAF60-663F-49CC-90DC-3D08CC3D1B28',
'rules': [
{
'rule_name': 'grit',
'extension': 'grd',
'variables': {
'conditions': [
['branding=="Chrome"', {
# TODO(mmoss) The .grd files look for _google_chrome, but for
# consistency they should look for GOOGLE_CHROME_BUILD like C++.
# Clean this up when Windows moves to gyp.
'chrome_build': '_google_chrome',
}, { # else: branding!="Chrome"
'chrome_build': '_chromium',
}],
],
},
'inputs': [
'../tools/grit/grit.py',
],
'outputs': [
'<(grit_out_dir)/grit/<(RULE_INPUT_ROOT).h',
'<(grit_out_dir)/<(RULE_INPUT_ROOT)_da.pak',
'<(grit_out_dir)/<(RULE_INPUT_ROOT)_en-US.pak',
'<(grit_out_dir)/<(RULE_INPUT_ROOT)_he.pak',
'<(grit_out_dir)/<(RULE_INPUT_ROOT)_zh-TW.pak',
],
'action': ['python', '<@(_inputs)', '-i', '<(RULE_INPUT_PATH)',
'build', '-o', '<(grit_out_dir)',
'-D', '<(chrome_build)'],
'message': 'Generating resources from <(RULE_INPUT_PATH)',
},
],
'sources': [
# Localizable resources.
'app/resources/locale_settings.grd',
'app/chromium_strings.grd',
'app/generated_resources.grd',
'app/google_chrome_strings.grd',
],
'direct_dependent_settings': {
'include_dirs': [
'<(grit_out_dir)',
],
},
'conditions': [
['OS=="win"', {
'dependencies': ['../build/win/system.gyp:cygwin'],
}],
],
},
{
# theme_resources also generates a .cc file, so it can't use the rules above.
'target_name': 'theme_resources',
'type': 'none',
'msvs_guid' : 'A158FB0A-25E4-6523-6B5A-4BB294B73D31',
'variables': {
'grit_path': '../tools/grit/grit.py',
},
'actions': [
{
'action_name': 'theme_resources',
'variables': {
'input_path': 'app/theme/theme_resources.grd',
'conditions': [
['branding=="Chrome"', {
# TODO(mmoss) The .grd files look for _google_chrome, but for
# consistency they should look for GOOGLE_CHROME_BUILD like C++.
# Clean this up when Windows moves to gyp.
'chrome_build': '_google_chrome',
}, { # else: branding!="Chrome"
'chrome_build': '_chromium',
}],
],
},
'inputs': [
'<(input_path)',
],
'outputs': [
'<(grit_out_dir)/grit/theme_resources.h',
'<(grit_out_dir)/grit/theme_resources_map.cc',
'<(grit_out_dir)/grit/theme_resources_map.h',
'<(grit_out_dir)/theme_resources.pak',
'<(grit_out_dir)/theme_resources.rc',
],
'action': [
'python', '<(grit_path)',
'-i', '<(input_path)', 'build',
'-o', '<(grit_out_dir)',
'-D', '<(chrome_build)'
],
'conditions': [
['linux2==1', {
'action': ['-D', 'linux2'],
}],
],
'message': 'Generating resources from <(input_path)',
},
],
'direct_dependent_settings': {
'include_dirs': [
'<(grit_out_dir)',
],
},
'conditions': [
['OS=="win"', {
'dependencies': ['../build/win/system.gyp:cygwin'],
}],
],
},
{
'target_name': 'common',
'type': '<(library)',
'msvs_guid': '899F1280-3441-4D1F-BA04-CCD6208D9146',
'dependencies': [
'chrome_resources',
'chrome_strings',
'theme_resources',
'../app/app.gyp:app_base',
'../base/base.gyp:base',
'../base/base.gyp:base_gfx',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../net/net.gyp:net',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/zlib/zlib.gyp:zlib',
'../third_party/npapi/npapi.gyp:npapi',
'../webkit/webkit.gyp:glue',
],
'include_dirs': [
'..',
],
'sources': [
# All .cc, .h, and .mm files under chrome/common except for tests.
'common/extensions/extension.cc',
'common/extensions/extension.h',
'common/extensions/extension_error_reporter.cc',
'common/extensions/extension_error_reporter.h',
'common/extensions/extension_error_utils.cc',
'common/extensions/extension_error_utils.h',
'common/extensions/extension_unpacker.cc',
'common/extensions/extension_unpacker.h',
'common/extensions/url_pattern.cc',
'common/extensions/url_pattern.h',
'common/extensions/user_script.cc',
'common/extensions/user_script.h',
'common/gfx/emf.cc',
'common/gfx/emf.h',
'common/gfx/utils.h',
'common/net/cookie_monster_sqlite.cc',
'common/net/cookie_monster_sqlite.h',
'common/net/dns.h',
'common/net/url_request_intercept_job.cc',
'common/net/url_request_intercept_job.h',
'common/app_cache/app_cache_context_impl.cc',
'common/app_cache/app_cache_context_impl.h',
'common/app_cache/app_cache_dispatcher.cc',
'common/app_cache/app_cache_dispatcher.h',
'common/app_cache/app_cache_dispatcher_host.cc',
'common/app_cache/app_cache_dispatcher_host.h',
'common/automation_constants.cc',
'common/automation_constants.h',
'common/bindings_policy.h',
'common/child_process.cc',
'common/child_process.h',
'common/child_process_host.cc',
'common/child_process_host.h',
'common/child_process_info.cc',
'common/child_process_info.h',
'common/child_thread.cc',
'common/child_thread.h',
'common/chrome_constants.cc',
'common/chrome_constants.h',
'common/chrome_counters.cc',
'common/chrome_counters.h',
'common/chrome_paths.cc',
'common/chrome_paths.h',
'common/chrome_paths_internal.h',
'common/chrome_paths_linux.cc',
'common/chrome_paths_mac.mm',
'common/chrome_paths_win.cc',
'common/chrome_plugin_api.h',
'common/chrome_plugin_lib.cc',
'common/chrome_plugin_lib.h',
'common/chrome_plugin_util.cc',
'common/chrome_plugin_util.h',
'common/chrome_switches.cc',
'common/chrome_switches.h',
'common/classfactory.cc',
'common/classfactory.h',
'common/common_glue.cc',
'common/debug_flags.cc',
'common/debug_flags.h',
'common/devtools_messages.h',
'common/devtools_messages_internal.h',
'common/env_vars.cc',
'common/env_vars.h',
'common/file_descriptor_set_posix.cc',
'common/file_descriptor_set_posix.h',
'common/filter_policy.h',
'common/gears_api.h',
'common/gtk_util.cc',
'common/gtk_util.h',
'common/histogram_synchronizer.cc',
'common/histogram_synchronizer.h',
'common/important_file_writer.cc',
'common/important_file_writer.h',
'common/ipc_channel.h',
'common/ipc_channel_posix.cc',
'common/ipc_channel_posix.h',
'common/ipc_channel_proxy.cc',
'common/ipc_channel_proxy.h',
'common/ipc_channel_win.cc',
'common/ipc_channel_win.h',
'common/ipc_logging.cc',
'common/ipc_logging.h',
'common/ipc_message.cc',
'common/ipc_message.h',
'common/ipc_message_macros.h',
'common/ipc_message_utils.cc',
'common/ipc_message_utils.h',
'common/ipc_sync_channel.cc',
'common/ipc_sync_channel.h',
'common/ipc_sync_message.cc',
'common/ipc_sync_message.h',
'common/json_value_serializer.cc',
'common/json_value_serializer.h',
'common/jstemplate_builder.cc',
'common/jstemplate_builder.h',
'common/libxml_utils.cc',
'common/libxml_utils.h',
'common/logging_chrome.cc',
'common/logging_chrome.h',
'common/main_function_params.h',
'common/message_router.cc',
'common/message_router.h',
'common/modal_dialog_event.h',
'common/mru_cache.h',
'common/navigation_types.h',
'common/native_web_keyboard_event.h',
'common/native_web_keyboard_event_linux.cc',
'common/native_web_keyboard_event_mac.mm',
'common/native_web_keyboard_event_win.cc',
'common/native_window_notification_source.h',
'common/notification_details.h',
'common/notification_observer.h',
'common/notification_registrar.cc',
'common/notification_registrar.h',
'common/notification_service.cc',
'common/notification_service.h',
'common/notification_source.h',
'common/notification_type.h',
'common/owned_widget_gtk.cc',
'common/owned_widget_gtk.h',
'common/page_action.h',
'common/page_action.cc',
'common/page_transition_types.h',
'common/page_zoom.h',
'common/platform_util.h',
'common/platform_util_linux.cc',
'common/platform_util_mac.mm',
'common/platform_util_win.cc',
'common/plugin_messages.h',
'common/plugin_messages_internal.h',
'common/pref_member.cc',
'common/pref_member.h',
'common/pref_names.cc',
'common/pref_names.h',
'common/pref_service.cc',
'common/pref_service.h',
'common/process_watcher_posix.cc',
'common/process_watcher_win.cc',
'common/process_watcher.h',
'common/property_bag.cc',
'common/property_bag.h',
'common/quarantine_mac.h',
'common/quarantine_mac.mm',
'common/ref_counted_util.h',
'common/render_messages.h',
'common/render_messages_internal.h',
'common/renderer_preferences.h',
'common/resource_dispatcher.cc',
'common/resource_dispatcher.h',
'common/result_codes.h',
'common/sandbox_init_wrapper.cc',
'common/sandbox_init_wrapper.h',
'common/security_filter_peer.cc',
'common/security_filter_peer.h',
'common/sqlite_compiled_statement.cc',
'common/sqlite_compiled_statement.h',
'common/sqlite_utils.cc',
'common/sqlite_utils.h',
'common/task_queue.cc',
'common/task_queue.h',
'common/temp_scaffolding_stubs.cc',
'common/temp_scaffolding_stubs.h',
'common/thumbnail_score.cc',
'common/thumbnail_score.h',
'common/time_format.cc',
'common/time_format.h',
'common/transport_dib.h',
'common/transport_dib_linux.cc',
'common/transport_dib_mac.cc',
'common/transport_dib_win.cc',
'common/url_constants.cc',
'common/url_constants.h',
'common/visitedlink_common.cc',
'common/visitedlink_common.h',
'common/webkit_param_traits.h',
'common/win_safe_util.cc',
'common/win_safe_util.h',
'common/worker_messages.h',
'common/worker_messages_internal.h',
'common/worker_thread_ticker.cc',
'common/worker_thread_ticker.h',
'common/x11_util.cc',
'common/x11_util.h',
'common/x11_util_internal.h',
'common/zip.cc', # Requires zlib directly.
'common/zip.h',
'third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
],
'direct_dependent_settings': {
'include_dirs': [
'..',
],
},
'export_dependent_settings': [
'../app/app.gyp:app_base',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled.h',
'msvs_precompiled_source': 'tools/build/win/precompiled.cc',
},
},
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'link_settings': {
'libraries': [
'-lX11',
'-lXrender',
'-lXext',
],
},
}, { # else: 'OS!="linux"'
'sources!': [
'third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
'sources!': [
'common/temp_scaffolding_stubs.cc',
'common/temp_scaffolding_stubs.h',
],
}, { # else: OS != "win"
'sources!': [
'common/gfx/emf.cc',
'common/classfactory.cc',
],
}],
],
},
{
'target_name': 'browser',
'type': '<(library)',
'msvs_guid': '5BF908A7-68FB-4A4B-99E3-8C749F1FE4EA',
'dependencies': [
'common',
'chrome_resources',
'chrome_strings',
'theme_resources',
'../app/app.gyp:app_resources',
'../media/media.gyp:media',
'../net/net.gyp:net_resources',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
'third_party/hunspell/hunspell.gyp:hunspell',
'../webkit/webkit.gyp:glue',
],
'include_dirs': [
'..',
'<(INTERMEDIATE_DIR)',
],
'sources': [
# All .cc, .h, .m, and .mm files under browser except for tests and
# mocks.
'browser/alternate_nav_url_fetcher.cc',
'browser/alternate_nav_url_fetcher.h',
'browser/app_controller_mac.h',
'browser/app_controller_mac.mm',
'browser/app_modal_dialog.cc',
'browser/app_modal_dialog.h',
'browser/app_modal_dialog_gtk.cc',
'browser/app_modal_dialog_mac.mm',
'browser/app_modal_dialog_win.cc',
'browser/app_modal_dialog_queue.cc',
'browser/app_modal_dialog_queue.h',
'browser/autocomplete/autocomplete.cc',
'browser/autocomplete/autocomplete.h',
'browser/autocomplete/autocomplete_accessibility.cc',
'browser/autocomplete/autocomplete_accessibility.h',
'browser/autocomplete/autocomplete_edit.cc',
'browser/autocomplete/autocomplete_edit.h',
'browser/autocomplete/autocomplete_edit_view.h',
'browser/autocomplete/autocomplete_edit_view_gtk.cc',
'browser/autocomplete/autocomplete_edit_view_gtk.h',
'browser/autocomplete/autocomplete_edit_view_mac.h',
'browser/autocomplete/autocomplete_edit_view_mac.mm',
'browser/autocomplete/autocomplete_edit_view_win.cc',
'browser/autocomplete/autocomplete_edit_view_win.h',
'browser/autocomplete/autocomplete_popup_model.cc',
'browser/autocomplete/autocomplete_popup_model.h',
'browser/autocomplete/autocomplete_popup_view.h',
'browser/autocomplete/autocomplete_popup_view_gtk.cc',
'browser/autocomplete/autocomplete_popup_view_gtk.h',
'browser/autocomplete/autocomplete_popup_view_mac.h',
'browser/autocomplete/autocomplete_popup_view_mac.mm',
'browser/autocomplete/autocomplete_popup_view_win.cc',
'browser/autocomplete/autocomplete_popup_view_win.h',
'browser/autocomplete/history_contents_provider.cc',
'browser/autocomplete/history_contents_provider.h',
'browser/autocomplete/history_url_provider.cc',
'browser/autocomplete/history_url_provider.h',
'browser/autocomplete/keyword_provider.cc',
'browser/autocomplete/keyword_provider.h',
'browser/autocomplete/search_provider.cc',
'browser/autocomplete/search_provider.h',
'browser/autofill_manager.cc',
'browser/autofill_manager.h',
'browser/automation/automation_autocomplete_edit_tracker.h',
'browser/automation/automation_browser_tracker.h',
'browser/automation/extension_automation_constants.h',
'browser/automation/extension_automation_constants.cc',
'browser/automation/automation_extension_function.h',
'browser/automation/automation_extension_function.cc',
'browser/automation/automation_provider.cc',
'browser/automation/automation_provider.h',
'browser/automation/automation_provider_list.cc',
'browser/automation/automation_provider_list_generic.cc',
'browser/automation/automation_provider_list_mac.mm',
'browser/automation/automation_provider_list.h',
'browser/automation/automation_resource_tracker.cc',
'browser/automation/automation_resource_tracker.h',
'browser/automation/automation_tab_tracker.h',
'browser/automation/automation_window_tracker.h',
'browser/automation/extension_port_container.cc',
'browser/automation/extension_port_container.h',
'browser/automation/ui_controls.cc',
'browser/automation/ui_controls.h',
'browser/automation/url_request_failed_dns_job.cc',
'browser/automation/url_request_failed_dns_job.h',
# TODO: These should be moved to test_support (see below), but
# are currently used by production code in automation_provider.cc.
'browser/automation/url_request_mock_http_job.cc',
'browser/automation/url_request_mock_http_job.h',
'browser/automation/url_request_slow_download_job.cc',
'browser/automation/url_request_slow_download_job.h',
'browser/back_forward_menu_model.cc',
'browser/back_forward_menu_model.h',
'browser/back_forward_menu_model_views.cc',
'browser/back_forward_menu_model_views.h',
'browser/blocked_popup_container.cc',
'browser/blocked_popup_container.h',
'browser/bookmarks/bookmark_codec.cc',
'browser/bookmarks/bookmark_codec.h',
'browser/bookmarks/bookmark_context_menu_gtk.cc',
'browser/bookmarks/bookmark_context_menu_views.cc',
'browser/bookmarks/bookmark_context_menu.cc',
'browser/bookmarks/bookmark_context_menu.h',
'browser/bookmarks/bookmark_drag_data.cc',
'browser/bookmarks/bookmark_drag_data.h',
'browser/bookmarks/bookmark_drop_info.cc',
'browser/bookmarks/bookmark_drop_info.h',
'browser/bookmarks/bookmark_editor.h',
'browser/bookmarks/bookmark_folder_tree_model.cc',
'browser/bookmarks/bookmark_folder_tree_model.h',
'browser/bookmarks/bookmark_index.cc',
'browser/bookmarks/bookmark_index.h',
'browser/bookmarks/bookmark_html_writer.cc',
'browser/bookmarks/bookmark_html_writer.h',
'browser/bookmarks/bookmark_manager.h',
'browser/bookmarks/bookmark_model.cc',
'browser/bookmarks/bookmark_model.h',
'browser/bookmarks/bookmark_service.h',
'browser/bookmarks/bookmark_storage.cc',
'browser/bookmarks/bookmark_storage.h',
'browser/bookmarks/bookmark_table_model.cc',
'browser/bookmarks/bookmark_table_model.h',
'browser/bookmarks/bookmark_utils.cc',
'browser/bookmarks/bookmark_utils.h',
'browser/browser.cc',
'browser/browser.h',
'browser/browser_about_handler.cc',
'browser/browser_about_handler.h',
'browser/browser_accessibility.cc',
'browser/browser_accessibility.h',
'browser/browser_accessibility_manager.cc',
'browser/browser_accessibility_manager.h',
'browser/browser_init.cc',
'browser/browser_init.h',
'browser/browser_list.cc',
'browser/browser_list.h',
'browser/browser_main.cc',
'browser/browser_main_gtk.cc',
'browser/browser_main_mac.mm',
'browser/browser_main_win.cc',
'browser/browser_main_win.h',
'browser/browser_prefs.cc',
'browser/browser_prefs.h',
'browser/browser_process.cc',
'browser/browser_process.h',
'browser/browser_process_impl.cc',
'browser/browser_process_impl.h',
'browser/browser_shutdown.cc',
'browser/browser_shutdown.h',
'browser/browser_theme_provider_gtk.cc',
'browser/browser_theme_provider.cc',
'browser/browser_theme_provider.h',
'browser/browser_trial.cc',
'browser/browser_trial.h',
'browser/browser_url_handler.cc',
'browser/browser_url_handler.h',
'browser/browser_window.h',
'browser/browser_window_factory.mm',
'browser/browsing_data_remover.cc',
'browser/browsing_data_remover.h',
'browser/browsing_instance.cc',
'browser/browsing_instance.h',
'browser/cancelable_request.cc',
'browser/cancelable_request.h',
'browser/cert_store.cc',
'browser/cert_store.h',
'browser/character_encoding.cc',
'browser/character_encoding.h',
'browser/child_process_security_policy.cc',
'browser/child_process_security_policy.h',
'browser/chrome_plugin_browsing_context.cc',
'browser/chrome_plugin_browsing_context.h',
'browser/chrome_plugin_host.cc',
'browser/chrome_plugin_host.h',
'browser/chrome_thread.cc',
'browser/chrome_thread.h',
'browser/cocoa/about_window_controller.h',
'browser/cocoa/about_window_controller.mm',
'browser/cocoa/background_gradient_view.h',
'browser/cocoa/background_gradient_view.mm',
'browser/cocoa/base_view.h',
'browser/cocoa/base_view.mm',
'browser/cocoa/bookmark_bar_bridge.h',
'browser/cocoa/bookmark_bar_bridge.mm',
'browser/cocoa/bookmark_bar_controller.h',
'browser/cocoa/bookmark_bar_controller.mm',
'browser/cocoa/bookmark_bar_view.h',
'browser/cocoa/bookmark_bar_view.mm',
'browser/cocoa/bookmark_button_cell.h',
'browser/cocoa/bookmark_button_cell.mm',
'browser/cocoa/bookmark_menu_bridge.h',
'browser/cocoa/bookmark_menu_bridge.mm',
'browser/cocoa/bookmark_menu_cocoa_controller.h',
'browser/cocoa/bookmark_menu_cocoa_controller.mm',
'browser/cocoa/browser_test_helper.h',
'browser/cocoa/browser_window_cocoa.h',
'browser/cocoa/browser_window_cocoa.mm',
'browser/cocoa/browser_window_controller.h',
'browser/cocoa/browser_window_controller.mm',
'browser/cocoa/clear_browsing_data_controller.h',
'browser/cocoa/clear_browsing_data_controller.mm',
'browser/cocoa/cocoa_test_helper.h',
'browser/cocoa/cocoa_utils.h',
'browser/cocoa/cocoa_utils.mm',
'browser/cocoa/command_observer_bridge.h',
'browser/cocoa/command_observer_bridge.mm',
'browser/cocoa/custom_home_pages_model.h',
'browser/cocoa/custom_home_pages_model.mm',
'browser/cocoa/encoding_menu_controller_delegate_mac.h',
'browser/cocoa/encoding_menu_controller_delegate_mac.mm',
'browser/cocoa/find_bar_bridge.h',
'browser/cocoa/find_bar_bridge.mm',
'browser/cocoa/find_bar_cocoa_controller.h',
'browser/cocoa/find_bar_cocoa_controller.mm',
'browser/cocoa/find_bar_view.h',
'browser/cocoa/find_bar_view.mm',
'browser/cocoa/first_run_dialog.h',
'browser/cocoa/first_run_dialog.mm',
'browser/cocoa/gradient_button_cell.h',
'browser/cocoa/gradient_button_cell.mm',
'browser/cocoa/grow_box_view.h',
'browser/cocoa/grow_box_view.mm',
'browser/cocoa/location_bar_cell.h',
'browser/cocoa/location_bar_cell.mm',
'browser/cocoa/location_bar_view_mac.h',
'browser/cocoa/location_bar_view_mac.mm',
'browser/cocoa/menu_localizer.h',
'browser/cocoa/menu_localizer.mm',
'browser/cocoa/page_info_window_controller.h',
'browser/cocoa/page_info_window_controller.mm',
'browser/cocoa/page_info_window_mac.h',
'browser/cocoa/page_info_window_mac.mm',
'browser/cocoa/preferences_localizer.h',
'browser/cocoa/preferences_localizer.mm',
'browser/cocoa/preferences_window_controller.h',
'browser/cocoa/preferences_window_controller.mm',
'browser/cocoa/sad_tab_view.h',
'browser/cocoa/sad_tab_view.mm',
'browser/cocoa/search_engine_list_model.h',
'browser/cocoa/search_engine_list_model.mm',
'browser/cocoa/shell_dialogs_mac.mm',
'browser/cocoa/status_bubble_mac.h',
'browser/cocoa/status_bubble_mac.mm',
'browser/cocoa/tab_cell.h',
'browser/cocoa/tab_cell.mm',
'browser/cocoa/tab_contents_controller.h',
'browser/cocoa/tab_contents_controller.mm',
'browser/cocoa/tab_controller.h',
'browser/cocoa/tab_controller.mm',
'browser/cocoa/tab_strip_controller.h',
'browser/cocoa/tab_strip_controller.mm',
'browser/cocoa/tab_strip_model_observer_bridge.h',
'browser/cocoa/tab_strip_model_observer_bridge.mm',
'browser/cocoa/tab_strip_view.h',
'browser/cocoa/tab_strip_view.mm',
'browser/cocoa/tab_view.h',
'browser/cocoa/tab_view.mm',
'browser/cocoa/tab_window_controller.h',
'browser/cocoa/tab_window_controller.mm',
'browser/cocoa/throbber_view.h',
'browser/cocoa/throbber_view.mm',
'browser/cocoa/toolbar_button_cell.h',
'browser/cocoa/toolbar_button_cell.mm',
'browser/cocoa/toolbar_controller.h',
'browser/cocoa/toolbar_controller.mm',
'browser/cocoa/toolbar_view.h',
'browser/cocoa/toolbar_view.mm',
'browser/command_updater.cc',
'browser/command_updater.h',
'browser/cross_site_request_manager.cc',
'browser/cross_site_request_manager.h',
'browser/dock_info_gtk.cc',
'browser/dock_info_win.cc',
'browser/dock_info.cc',
'browser/dock_info.h',
'browser/dom_operation_notification_details.h',
'browser/dom_ui/chrome_url_data_manager.cc',
'browser/dom_ui/chrome_url_data_manager.h',
'browser/dom_ui/debugger_ui.cc',
'browser/dom_ui/debugger_ui.h',
'browser/dom_ui/devtools_ui.cc',
'browser/dom_ui/devtools_ui.h',
'browser/dom_ui/dom_ui.cc',
'browser/dom_ui/dom_ui.h',
'browser/dom_ui/dom_ui_factory.cc',
'browser/dom_ui/dom_ui_factory.h',
'browser/dom_ui/dom_ui_favicon_source.cc',
'browser/dom_ui/dom_ui_favicon_source.h',
'browser/dom_ui/dom_ui_theme_source.cc',
'browser/dom_ui/dom_ui_theme_source.h',
'browser/dom_ui/dom_ui_thumbnail_source.cc',
'browser/dom_ui/dom_ui_thumbnail_source.h',
'browser/dom_ui/downloads_dom_handler.cc',
'browser/dom_ui/downloads_dom_handler.h',
'browser/dom_ui/downloads_ui.cc',
'browser/dom_ui/downloads_ui.h',
'browser/dom_ui/fileicon_source.cc',
'browser/dom_ui/fileicon_source.h',
'browser/dom_ui/history_ui.cc',
'browser/dom_ui/history_ui.h',
'browser/dom_ui/html_dialog_ui.cc',
'browser/dom_ui/html_dialog_ui.h',
'browser/dom_ui/new_tab_ui.cc',
'browser/dom_ui/new_tab_ui.h',
'browser/download/download_exe.cc',
'browser/download/download_file.cc',
'browser/download/download_file.h',
'browser/download/download_item_model.cc',
'browser/download/download_item_model.h',
'browser/download/download_manager.cc',
'browser/download/download_manager.h',
'browser/download/download_request_dialog_delegate.h',
'browser/download/download_request_dialog_delegate_win.cc',
'browser/download/download_request_dialog_delegate_win.h',
'browser/download/download_request_manager.cc',
'browser/download/download_request_manager.h',
'browser/download/download_shelf.cc',
'browser/download/download_shelf.h',
'browser/download/download_started_animation.h',
'browser/download/download_util.cc',
'browser/download/download_util.h',
'browser/download/save_file.cc',
'browser/download/save_file.h',
'browser/download/save_file_manager.cc',
'browser/download/save_file_manager.h',
'browser/download/save_item.cc',
'browser/download/save_item.h',
'browser/download/save_package.cc',
'browser/download/save_package.h',
'browser/download/save_types.h',
'browser/encoding_menu_controller.cc',
'browser/encoding_menu_controller.h',
'browser/extensions/extension_bookmarks_module.cc',
'browser/extensions/extension_bookmarks_module.h',
'browser/extensions/extension_bookmarks_module_constants.cc',
'browser/extensions/extension_bookmarks_module_constants.h',
'browser/extensions/extension_creator.cc',
'browser/extensions/extension_creator.h',
'browser/extensions/extension_event_names.cc',
'browser/extensions/extension_event_names.h',
'browser/extensions/extension_function.cc',
'browser/extensions/extension_function.h',
'browser/extensions/extension_function_dispatcher.cc',
'browser/extensions/extension_function_dispatcher.h',
'browser/extensions/extension_host.cc',
'browser/extensions/extension_host.h',
'browser/extensions/extension_message_service.cc',
'browser/extensions/extension_message_service.h',
'browser/extensions/extension_browser_event_router.cc',
'browser/extensions/extension_browser_event_router.h',
'browser/extensions/extension_page_actions_module.cc',
'browser/extensions/extension_page_actions_module.h',
'browser/extensions/extension_page_actions_module_constants.cc',
'browser/extensions/extension_page_actions_module_constants.h',
'browser/extensions/extension_process_manager.cc',
'browser/extensions/extension_process_manager.h',
'browser/extensions/extension_protocols.cc',
'browser/extensions/extension_protocols.h',
'browser/extensions/extension_shelf.cc',
'browser/extensions/extension_shelf.h',
'browser/extensions/extension_shelf_model.cc',
'browser/extensions/extension_shelf_model.h',
'browser/extensions/extension_tabs_module.cc',
'browser/extensions/extension_tabs_module.h',
'browser/extensions/extension_tabs_module_constants.cc',
'browser/extensions/extension_tabs_module_constants.h',
'browser/extensions/extension_view.cc',
'browser/extensions/extension_view.h',
'browser/extensions/extensions_service.cc',
'browser/extensions/extensions_service.h',
'browser/extensions/extensions_ui.cc',
'browser/extensions/extensions_ui.h',
'browser/extensions/external_extension_provider.h',
'browser/extensions/external_registry_extension_provider_win.cc',
'browser/extensions/external_registry_extension_provider_win.h',
'browser/extensions/external_pref_extension_provider.cc',
'browser/extensions/external_pref_extension_provider.h',
'browser/extensions/user_script_master.cc',
'browser/extensions/user_script_master.h',
'browser/external_protocol_handler.cc',
'browser/external_protocol_handler.h',
'browser/external_tab_container.cc',
'browser/external_tab_container.h',
'browser/fav_icon_helper.cc',
'browser/fav_icon_helper.h',
'browser/find_bar.h',
'browser/find_bar_controller.cc',
'browser/find_bar_controller.h',
'browser/find_notification_details.h',
'browser/first_run.cc',
'browser/first_run.h',
'browser/first_run_mac.mm',
'browser/first_run_win.cc',
'browser/first_run_gtk.cc',
'browser/gears_integration.cc',
'browser/gears_integration.h',
'browser/google_update.cc',
'browser/google_update.h',
'browser/google_update_settings_linux.cc',
'browser/google_update_settings_mac.mm',
'browser/google_url_tracker.cc',
'browser/google_url_tracker.h',
'browser/google_util.cc',
'browser/google_util.h',
'browser/gtk/about_chrome_dialog.cc',
'browser/gtk/about_chrome_dialog.h',
'browser/gtk/back_forward_button_gtk.cc',
'browser/gtk/back_forward_button_gtk.h',
'browser/gtk/back_forward_menu_model_gtk.cc',
'browser/gtk/back_forward_menu_model_gtk.h',
'browser/gtk/blocked_popup_container_view_gtk.cc',
'browser/gtk/blocked_popup_container_view_gtk.h',
'browser/gtk/bookmark_bar_gtk.cc',
'browser/gtk/bookmark_bar_gtk.h',
'browser/gtk/bookmark_bubble_gtk.cc',
'browser/gtk/bookmark_bubble_gtk.h',
'browser/gtk/bookmark_editor_gtk.cc',
'browser/gtk/bookmark_editor_gtk.h',
'browser/gtk/bookmark_manager_gtk.cc',
'browser/gtk/bookmark_manager_gtk.h',
'browser/gtk/bookmark_menu_controller_gtk.cc',
'browser/gtk/bookmark_menu_controller_gtk.h',
'browser/gtk/bookmark_utils_gtk.cc',
'browser/gtk/bookmark_utils_gtk.h',
'browser/gtk/bookmark_tree_model.cc',
'browser/gtk/bookmark_tree_model.h',
'browser/gtk/browser_titlebar.cc',
'browser/gtk/browser_titlebar.h',
'browser/gtk/browser_toolbar_gtk.cc',
'browser/gtk/browser_toolbar_gtk.h',
'browser/gtk/browser_window_factory_gtk.cc',
'browser/gtk/browser_window_gtk.cc',
'browser/gtk/browser_window_gtk.h',
'browser/gtk/clear_browsing_data_dialog_gtk.cc',
'browser/gtk/clear_browsing_data_dialog_gtk.h',
'browser/gtk/custom_button.cc',
'browser/gtk/custom_button.h',
'browser/gtk/dialogs_gtk.cc',
'browser/gtk/download_item_gtk.cc',
'browser/gtk/download_item_gtk.h',
'browser/gtk/download_shelf_gtk.cc',
'browser/gtk/download_shelf_gtk.h',
'browser/gtk/download_started_animation_gtk.cc',
'browser/gtk/go_button_gtk.cc',
'browser/gtk/go_button_gtk.h',
'browser/gtk/gtk_chrome_button.cc',
'browser/gtk/gtk_chrome_button.h',
'browser/gtk/gtk_chrome_link_button.cc',
'browser/gtk/gtk_chrome_link_button.h',
'browser/gtk/gtk_floating_container.cc',
'browser/gtk/gtk_floating_container.h',
'browser/gtk/hung_renderer_dialog_gtk.cc',
'browser/gtk/import_dialog_gtk.cc',
'browser/gtk/import_dialog_gtk.h',
'browser/gtk/import_lock_dialog_gtk.cc',
'browser/gtk/import_lock_dialog_gtk.h',
'browser/gtk/import_progress_dialog_gtk.cc',
'browser/gtk/import_progress_dialog_gtk.h',
'browser/gtk/info_bubble_gtk.cc',
'browser/gtk/info_bubble_gtk.h',
'browser/gtk/infobar_container_gtk.cc',
'browser/gtk/infobar_container_gtk.h',
'browser/gtk/infobar_gtk.cc',
'browser/gtk/infobar_gtk.h',
'browser/gtk/find_bar_gtk.cc',
'browser/gtk/find_bar_gtk.h',
'browser/gtk/focus_store_gtk.cc',
'browser/gtk/focus_store_gtk.h',
'browser/gtk/location_bar_view_gtk.cc',
'browser/gtk/location_bar_view_gtk.h',
'browser/gtk/menu_gtk.cc',
'browser/gtk/menu_gtk.h',
'browser/gtk/nine_box.cc',
'browser/gtk/nine_box.h',
'browser/gtk/options/general_page_gtk.cc',
'browser/gtk/options/general_page_gtk.h',
'browser/gtk/options/options_layout_gtk.cc',
'browser/gtk/options/options_layout_gtk.h',
'browser/gtk/options/options_window_gtk.cc',
'browser/gtk/options/url_picker_dialog_gtk.cc',
'browser/gtk/options/url_picker_dialog_gtk.h',
'browser/gtk/sad_tab_gtk.cc',
'browser/gtk/sad_tab_gtk.h',
'browser/gtk/slide_animator_gtk.cc',
'browser/gtk/slide_animator_gtk.h',
'browser/gtk/standard_menus.cc',
'browser/gtk/standard_menus.h',
'browser/gtk/status_bubble_gtk.cc',
'browser/gtk/status_bubble_gtk.h',
'browser/gtk/tab_contents_container_gtk.cc',
'browser/gtk/tab_contents_container_gtk.h',
'browser/gtk/tabs/dragged_tab_controller_gtk.cc',
'browser/gtk/tabs/dragged_tab_controller_gtk.h',
'browser/gtk/tabs/dragged_tab_gtk.cc',
'browser/gtk/tabs/dragged_tab_gtk.h',
'browser/gtk/tabs/tab_gtk.cc',
'browser/gtk/tabs/tab_gtk.h',
'browser/gtk/tabs/tab_renderer_gtk.cc',
'browser/gtk/tabs/tab_renderer_gtk.h',
'browser/gtk/tabs/tab_strip_gtk.cc',
'browser/gtk/tabs/tab_strip_gtk.h',
'browser/gtk/toolbar_star_toggle_gtk.cc',
'browser/gtk/toolbar_star_toggle_gtk.h',
'browser/hang_monitor/hung_plugin_action.cc',
'browser/hang_monitor/hung_plugin_action.h',
'browser/hang_monitor/hung_window_detector.cc',
'browser/hang_monitor/hung_window_detector.h',
'browser/history/archived_database.cc',
'browser/history/archived_database.h',
'browser/history/download_database.cc',
'browser/history/download_database.h',
'browser/history/download_types.h',
'browser/history/expire_history_backend.cc',
'browser/history/expire_history_backend.h',
'browser/history/history.cc',
'browser/history/history.h',
'browser/history/history_backend.cc',
'browser/history/history_backend.h',
'browser/history/history_database.cc',
'browser/history/history_database.h',
'browser/history/history_marshaling.h',
'browser/history/history_notifications.h',
'browser/history/history_publisher.cc',
'browser/history/history_publisher.h',
'browser/history/history_publisher_none.cc',
'browser/history/history_publisher_win.cc',
'browser/history/history_types.cc',
'browser/history/history_types.h',
'browser/history/in_memory_database.cc',
'browser/history/in_memory_database.h',
'browser/history/in_memory_history_backend.cc',
'browser/history/in_memory_history_backend.h',
'browser/history/page_usage_data.cc',
'browser/history/page_usage_data.h',
'browser/history/query_parser.cc',
'browser/history/query_parser.h',
'browser/history/snippet.cc',
'browser/history/snippet.h',
'browser/history/starred_url_database.cc',
'browser/history/starred_url_database.h',
'browser/history/text_database.cc',
'browser/history/text_database.h',
'browser/history/text_database_manager.cc',
'browser/history/text_database_manager.h',
'browser/history/thumbnail_database.cc',
'browser/history/thumbnail_database.h',
'browser/history/url_database.cc',
'browser/history/url_database.h',
'browser/history/visit_database.cc',
'browser/history/visit_database.h',
'browser/history/visit_tracker.cc',
'browser/history/visit_tracker.h',
'browser/history/visitsegment_database.cc',
'browser/history/visitsegment_database.h',
'browser/hung_renderer_dialog.h',
'browser/icon_loader.h',
'browser/icon_loader.cc',
'browser/icon_loader_linux.cc',
'browser/icon_loader_mac.mm',
'browser/icon_loader_win.cc',
'browser/icon_manager.cc',
'browser/icon_manager.h',
'browser/icon_manager_linux.cc',
'browser/icon_manager_mac.mm',
'browser/icon_manager_win.cc',
'browser/ime_input.cc',
'browser/ime_input.h',
'browser/importer/firefox2_importer.cc',
'browser/importer/firefox2_importer.h',
'browser/importer/firefox3_importer.cc',
'browser/importer/firefox3_importer.h',
'browser/importer/firefox_importer_utils.cc',
'browser/importer/firefox_importer_utils.h',
'browser/importer/firefox_profile_lock.cc',
'browser/importer/firefox_profile_lock.h',
'browser/importer/firefox_profile_lock_posix.cc',
'browser/importer/firefox_profile_lock_win.cc',
'browser/importer/ie_importer.cc',
'browser/importer/ie_importer.h',
'browser/importer/importer.cc',
'browser/importer/importer.h',
'browser/importer/mork_reader.cc',
'browser/importer/mork_reader.h',
'browser/importer/toolbar_importer.cc',
'browser/importer/toolbar_importer.h',
'browser/input_window_dialog.h',
'browser/input_window_dialog_gtk.cc',
'browser/input_window_dialog_win.cc',
'browser/jankometer.cc',
'browser/jankometer.h',
'browser/jsmessage_box_handler.cc',
'browser/jsmessage_box_handler.h',
'browser/keychain_mac.cc',
'browser/keychain_mac.h',
'browser/load_from_memory_cache_details.h',
'browser/load_notification_details.h',
'browser/location_bar.h',
'browser/login_prompt.cc',
'browser/login_prompt.h',
'browser/login_prompt_win.cc',
'browser/memory_details.cc',
'browser/memory_details.h',
'browser/meta_table_helper.cc',
'browser/meta_table_helper.h',
'browser/metrics/metrics_log.cc',
'browser/metrics/metrics_log.h',
'browser/metrics/metrics_response.cc',
'browser/metrics/metrics_response.h',
'browser/metrics/metrics_service.cc',
'browser/metrics/metrics_service.h',
'browser/metrics/user_metrics.cc',
'browser/metrics/user_metrics.h',
'browser/modal_html_dialog_delegate.cc',
'browser/modal_html_dialog_delegate.h',
'browser/net/chrome_url_request_context.cc',
'browser/net/chrome_url_request_context.h',
'browser/net/dns_global.cc',
'browser/net/dns_global.h',
'browser/net/dns_host_info.cc',
'browser/net/dns_host_info.h',
'browser/net/dns_master.cc',
'browser/net/dns_master.h',
'browser/net/referrer.cc',
'browser/net/referrer.h',
'browser/net/resolve_proxy_msg_helper.cc',
'browser/net/resolve_proxy_msg_helper.h',
'browser/net/sdch_dictionary_fetcher.cc',
'browser/net/sdch_dictionary_fetcher.h',
'browser/net/url_fetcher.cc',
'browser/net/url_fetcher.h',
'browser/net/url_fetcher_protect.cc',
'browser/net/url_fetcher_protect.h',
'browser/net/url_fixer_upper.cc',
'browser/net/url_fixer_upper.h',
'browser/options_page_base.cc',
'browser/options_page_base.h',
'browser/options_window.h',
'browser/page_info_window.cc',
'browser/page_info_window.h',
'browser/page_state.cc',
'browser/page_state.h',
'browser/password_manager/encryptor_linux.cc',
'browser/password_manager/encryptor_mac.mm',
'browser/password_manager/encryptor_win.cc',
'browser/password_manager/encryptor.h',
'browser/password_manager/ie7_password.cc',
'browser/password_manager/ie7_password.h',
'browser/password_manager/login_database_mac.cc',
'browser/password_manager/login_database_mac.h',
'browser/password_manager/login_database.cc',
'browser/password_manager/login_database.h',
'browser/password_manager/password_form_manager.cc',
'browser/password_manager/password_form_manager.h',
'browser/password_manager/password_manager.cc',
'browser/password_manager/password_manager.h',
'browser/password_manager/password_store.cc',
'browser/password_manager/password_store.h',
'browser/password_manager/password_store_default.cc',
'browser/password_manager/password_store_default.h',
# Temporarily disabled while we figure some stuff out.
# http://code.google.com/p/chromium/issues/detail?id=12351
# 'browser/password_manager/password_store_gnome.h',
# 'browser/password_manager/password_store_gnome.cc',
# 'browser/password_manager/password_store_kwallet.h',
# 'browser/password_manager/password_store_kwallet.cc',
'browser/password_manager/password_store_mac_internal.h',
'browser/password_manager/password_store_mac.h',
'browser/password_manager/password_store_mac.cc',
'browser/password_manager/password_store_win.h',
'browser/password_manager/password_store_win.cc',
'browser/plugin_installer.cc',
'browser/plugin_installer.h',
'browser/plugin_process_host.cc',
'browser/plugin_process_host.h',
'browser/plugin_service.cc',
'browser/plugin_service.h',
'browser/printing/page_number.cc',
'browser/printing/page_number.h',
'browser/printing/page_overlays.cc',
'browser/printing/page_overlays.h',
'browser/printing/page_range.cc',
'browser/printing/page_range.h',
'browser/printing/page_setup.cc',
'browser/printing/page_setup.h',
'browser/printing/print_job.cc',
'browser/printing/print_job.h',
'browser/printing/print_job_manager.cc',
'browser/printing/print_job_manager.h',
'browser/printing/print_job_worker.cc',
'browser/printing/print_job_worker.h',
'browser/printing/print_job_worker_owner.h',
'browser/printing/print_settings.cc',
'browser/printing/print_settings.h',
'browser/printing/print_view_manager.cc',
'browser/printing/print_view_manager.h',
'browser/printing/printed_document.cc',
'browser/printing/printed_document.h',
'browser/printing/printed_page.cc',
'browser/printing/printed_page.h',
'browser/printing/printed_pages_source.h',
'browser/printing/printer_query.cc',
'browser/printing/printer_query.h',
'browser/printing/win_printing_context.cc',
'browser/printing/win_printing_context.h',
'browser/process_singleton.h',
'browser/process_singleton_linux.cc',
'browser/process_singleton_mac.cc',
'browser/process_singleton_win.cc',
'browser/profile.cc',
'browser/profile.h',
'browser/profile_manager.cc',
'browser/profile_manager.h',
'browser/renderer_host/async_resource_handler.cc',
'browser/renderer_host/async_resource_handler.h',
'browser/renderer_host/audio_renderer_host.cc',
'browser/renderer_host/audio_renderer_host.h',
'browser/renderer_host/backing_store.h',
'browser/renderer_host/backing_store_manager.cc',
'browser/renderer_host/backing_store_manager.h',
'browser/renderer_host/backing_store_mac.cc',
'browser/renderer_host/backing_store_win.cc',
'browser/renderer_host/backing_store_x.cc',
'browser/renderer_host/browser_render_process_host.cc',
'browser/renderer_host/browser_render_process_host.h',
'browser/renderer_host/buffered_resource_handler.cc',
'browser/renderer_host/buffered_resource_handler.h',
'browser/renderer_host/cross_site_resource_handler.cc',
'browser/renderer_host/cross_site_resource_handler.h',
'browser/renderer_host/download_resource_handler.cc',
'browser/renderer_host/download_resource_handler.h',
'browser/renderer_host/download_throttling_resource_handler.cc',
'browser/renderer_host/download_throttling_resource_handler.h',
'browser/renderer_host/render_process_host.cc',
'browser/renderer_host/render_process_host.h',
'browser/renderer_host/render_sandbox_host_linux.h',
'browser/renderer_host/render_sandbox_host_linux.cc',
'browser/renderer_host/render_view_host.cc',
'browser/renderer_host/render_view_host.h',
'browser/renderer_host/render_view_host_delegate.h',
'browser/renderer_host/render_view_host_factory.cc',
'browser/renderer_host/render_view_host_factory.h',
'browser/renderer_host/render_widget_helper.cc',
'browser/renderer_host/render_widget_helper.h',
'browser/renderer_host/render_widget_host.cc',
'browser/renderer_host/render_widget_host.h',
'browser/renderer_host/render_widget_host_view.h',
'browser/renderer_host/render_widget_host_view_gtk.cc',
'browser/renderer_host/render_widget_host_view_gtk.h',
'browser/renderer_host/render_widget_host_view_mac.h',
'browser/renderer_host/render_widget_host_view_mac.mm',
'browser/renderer_host/render_widget_host_view_win.cc',
'browser/renderer_host/render_widget_host_view_win.h',
'browser/renderer_host/resource_dispatcher_host.cc',
'browser/renderer_host/resource_dispatcher_host.h',
'browser/renderer_host/resource_handler.h',
'browser/renderer_host/resource_message_filter.cc',
'browser/renderer_host/resource_message_filter.h',
'browser/renderer_host/resource_message_filter_gtk.cc',
'browser/renderer_host/resource_message_filter_mac.mm',
'browser/renderer_host/resource_message_filter_win.cc',
'browser/renderer_host/resource_request_details.h',
'browser/renderer_host/safe_browsing_resource_handler.cc',
'browser/renderer_host/safe_browsing_resource_handler.h',
'browser/renderer_host/save_file_resource_handler.cc',
'browser/renderer_host/save_file_resource_handler.h',
'browser/renderer_host/sync_resource_handler.cc',
'browser/renderer_host/sync_resource_handler.h',
'browser/renderer_host/web_cache_manager.cc',
'browser/renderer_host/web_cache_manager.h',
'browser/rlz/rlz.cc',
'browser/rlz/rlz.h',
'browser/safe_browsing/bloom_filter.cc',
'browser/safe_browsing/bloom_filter.h',
'browser/safe_browsing/chunk_range.cc',
'browser/safe_browsing/chunk_range.h',
'browser/safe_browsing/protocol_manager.cc',
'browser/safe_browsing/protocol_manager.h',
'browser/safe_browsing/protocol_parser.cc',
'browser/safe_browsing/protocol_parser.h',
'browser/safe_browsing/safe_browsing_blocking_page.cc',
'browser/safe_browsing/safe_browsing_blocking_page.h',
'browser/safe_browsing/safe_browsing_database.cc',
'browser/safe_browsing/safe_browsing_database.h',
'browser/safe_browsing/safe_browsing_database_bloom.cc',
'browser/safe_browsing/safe_browsing_database_bloom.h',
'browser/safe_browsing/safe_browsing_service.cc',
'browser/safe_browsing/safe_browsing_service.h',
'browser/safe_browsing/safe_browsing_util.cc',
'browser/safe_browsing/safe_browsing_util.h',
'browser/sandbox_policy.cc',
'browser/sandbox_policy.h',
'browser/search_engines/edit_keyword_controller_base.cc',
'browser/search_engines/edit_keyword_controller_base.h',
'browser/search_engines/template_url.cc',
'browser/search_engines/template_url.h',
'browser/search_engines/template_url_fetcher.cc',
'browser/search_engines/template_url_fetcher.h',
'browser/search_engines/template_url_model.cc',
'browser/search_engines/template_url_model.h',
'browser/search_engines/template_url_parser.cc',
'browser/search_engines/template_url_parser.h',
'browser/search_engines/template_url_prepopulate_data.cc',
'browser/search_engines/template_url_prepopulate_data.h',
'browser/session_startup_pref.cc',
'browser/session_startup_pref.h',
'browser/sessions/base_session_service.cc',
'browser/sessions/base_session_service.h',
'browser/sessions/session_backend.cc',
'browser/sessions/session_backend.h',
'browser/sessions/session_command.cc',
'browser/sessions/session_command.h',
'browser/sessions/session_id.cc',
'browser/sessions/session_id.h',
'browser/sessions/session_restore.cc',
'browser/sessions/session_restore.h',
'browser/sessions/session_service.cc',
'browser/sessions/session_service.h',
'browser/sessions/session_types.cc',
'browser/sessions/session_types.h',
'browser/sessions/tab_restore_service.cc',
'browser/sessions/tab_restore_service.h',
'browser/shell_dialogs.h',
'browser/shell_integration.cc',
'browser/shell_integration.h',
'browser/shell_integration_mac.mm',
'browser/spellcheck_worditerator.cc',
'browser/spellcheck_worditerator.h',
'browser/spellchecker.cc',
'browser/spellchecker.h',
'browser/ssl/ssl_blocking_page.cc',
'browser/ssl/ssl_blocking_page.h',
'browser/ssl/ssl_cert_error_handler.cc',
'browser/ssl/ssl_cert_error_handler.h',
'browser/ssl/ssl_error_handler.cc',
'browser/ssl/ssl_error_handler.h',
'browser/ssl/ssl_error_info.cc',
'browser/ssl/ssl_error_info.h',
'browser/ssl/ssl_host_state.cc',
'browser/ssl/ssl_host_state.h',
'browser/ssl/ssl_manager.cc',
'browser/ssl/ssl_manager.h',
'browser/ssl/ssl_mixed_content_handler.cc',
'browser/ssl/ssl_mixed_content_handler.h',
'browser/ssl/ssl_policy.cc',
'browser/ssl/ssl_policy.h',
'browser/ssl/ssl_policy_backend.cc',
'browser/ssl/ssl_policy_backend.h',
'browser/ssl/ssl_request_info.h',
'browser/status_bubble.h',
'browser/tab_contents/constrained_window.h',
'browser/tab_contents/infobar_delegate.cc',
'browser/tab_contents/infobar_delegate.h',
'browser/tab_contents/interstitial_page.cc',
'browser/tab_contents/interstitial_page.h',
'browser/tab_contents/navigation_controller.cc',
'browser/tab_contents/navigation_controller.h',
'browser/tab_contents/navigation_entry.cc',
'browser/tab_contents/navigation_entry.h',
'browser/tab_contents/page_navigator.h',
'browser/tab_contents/provisional_load_details.cc',
'browser/tab_contents/provisional_load_details.h',
'browser/tab_contents/render_view_context_menu.cc',
'browser/tab_contents/render_view_context_menu.h',
'browser/tab_contents/render_view_context_menu_gtk.cc',
'browser/tab_contents/render_view_context_menu_gtk.h',
'browser/tab_contents/render_view_context_menu_mac.mm',
'browser/tab_contents/render_view_context_menu_mac.h',
'browser/tab_contents/render_view_host_delegate_helper.cc',
'browser/tab_contents/render_view_host_delegate_helper.h',
'browser/tab_contents/render_view_host_manager.cc',
'browser/tab_contents/render_view_host_manager.h',
'browser/tab_contents/repost_form_warning.h',
'browser/tab_contents/security_style.h',
'browser/tab_contents/site_instance.cc',
'browser/tab_contents/site_instance.h',
'browser/tab_contents/tab_contents.cc',
'browser/tab_contents/tab_contents.h',
'browser/tab_contents/tab_contents_delegate.h',
'browser/tab_contents/tab_contents_view.cc',
'browser/tab_contents/tab_contents_view.h',
'browser/tab_contents/tab_contents_view_gtk.cc',
'browser/tab_contents/tab_contents_view_gtk.h',
'browser/tab_contents/tab_contents_view_mac.h',
'browser/tab_contents/tab_contents_view_mac.mm',
'browser/tab_contents/tab_util.cc',
'browser/tab_contents/tab_util.h',
'browser/tab_contents/thumbnail_generator.cc',
'browser/tab_contents/thumbnail_generator.h',
'browser/tab_contents/web_drag_source.cc',
'browser/tab_contents/web_drag_source.h',
'browser/tab_contents/web_drop_target.cc',
'browser/tab_contents/web_drop_target.h',
'browser/tabs/tab_strip_model.cc',
'browser/tabs/tab_strip_model.h',
'browser/tabs/tab_strip_model_order_controller.cc',
'browser/tabs/tab_strip_model_order_controller.h',
'browser/task_manager.cc',
'browser/task_manager.h',
'browser/task_manager_linux.cc',
'browser/task_manager_win.cc',
'browser/task_manager_resource_providers.cc',
'browser/task_manager_resource_providers.h',
'browser/theme_resources_util.cc',
'browser/theme_resources_util.h',
'browser/thumbnail_store.cc',
'browser/thumbnail_store.h',
'browser/toolbar_model.cc',
'browser/toolbar_model.h',
'browser/user_data_manager.cc',
'browser/user_data_manager.h',
'browser/utility_process_host.cc',
'browser/utility_process_host.h',
'browser/view_ids.h',
'browser/views/about_chrome_view.cc',
'browser/views/about_chrome_view.h',
'browser/views/about_ipc_dialog.cc',
'browser/views/about_ipc_dialog.h',
'browser/views/about_network_dialog.cc',
'browser/views/about_network_dialog.h',
'browser/views/autocomplete/autocomplete_popup_contents_view.cc',
'browser/views/autocomplete/autocomplete_popup_contents_view.h',
'browser/views/autocomplete/autocomplete_popup_win.cc',
'browser/views/autocomplete/autocomplete_popup_win.h',
'browser/views/blocked_popup_container_view_win.cc',
'browser/views/blocked_popup_container_view_win.h',
'browser/views/bookmark_bar_view.cc',
'browser/views/bookmark_bar_view.h',
'browser/views/bookmark_bubble_view.cc',
'browser/views/bookmark_bubble_view.h',
'browser/views/bookmark_editor_view.cc',
'browser/views/bookmark_editor_view.h',
'browser/views/bookmark_folder_tree_view.cc',
'browser/views/bookmark_folder_tree_view.h',
'browser/views/bookmark_manager_view.cc',
'browser/views/bookmark_manager_view.h',
'browser/views/bookmark_menu_button.cc',
'browser/views/bookmark_menu_button.h',
'browser/views/bookmark_menu_controller_views.cc',
'browser/views/bookmark_menu_controller_views.h',
'browser/views/bookmark_table_view.cc',
'browser/views/bookmark_table_view.h',
'browser/views/browser_bubble.cc',
'browser/views/browser_bubble.h',
'browser/views/browser_bubble_gtk.cc',
'browser/views/browser_bubble_win.cc',
'browser/views/browser_dialogs.h',
'browser/views/bug_report_view.cc',
'browser/views/bug_report_view.h',
'browser/views/chrome_views_delegate.cc',
'browser/views/chrome_views_delegate.h',
'browser/views/clear_browsing_data.cc',
'browser/views/clear_browsing_data.h',
'browser/views/constrained_window_impl.cc',
'browser/views/constrained_window_impl.h',
'browser/views/dialog_stubs_gtk.cc',
'browser/views/dom_view.cc',
'browser/views/dom_view.h',
'browser/views/download_item_view.cc',
'browser/views/download_item_view.h',
'browser/views/download_shelf_view.cc',
'browser/views/download_shelf_view.h',
'browser/views/download_started_animation_win.cc',
'browser/views/edit_keyword_controller.cc',
'browser/views/edit_keyword_controller.h',
'browser/views/event_utils.cc',
'browser/views/event_utils.h',
'browser/views/external_protocol_dialog.cc',
'browser/views/external_protocol_dialog.h',
'browser/views/find_bar_view.cc',
'browser/views/find_bar_view.h',
'browser/views/find_bar_win.cc',
'browser/views/find_bar_win.h',
'browser/views/first_run_bubble.cc',
'browser/views/first_run_bubble.h',
'browser/views/first_run_customize_view.cc',
'browser/views/first_run_customize_view.h',
'browser/views/first_run_view.cc',
'browser/views/first_run_view.h',
'browser/views/first_run_view_base.cc',
'browser/views/first_run_view_base.h',
'browser/views/frame/browser_frame.h',
'browser/views/frame/browser_frame_gtk.cc',
'browser/views/frame/browser_frame_gtk.h',
'browser/views/frame/browser_frame_win.cc',
'browser/views/frame/browser_frame_win.h',
'browser/views/frame/browser_non_client_frame_view.h',
'browser/views/frame/browser_root_view.cc',
'browser/views/frame/browser_root_view.h',
'browser/views/frame/browser_view.cc',
'browser/views/frame/browser_view.h',
'browser/views/frame/glass_browser_frame_view.cc',
'browser/views/frame/glass_browser_frame_view.h',
'browser/views/frame/opaque_browser_frame_view.cc',
'browser/views/frame/opaque_browser_frame_view.h',
'browser/views/fullscreen_exit_bubble.cc',
'browser/views/fullscreen_exit_bubble.h',
'browser/views/go_button.cc',
'browser/views/go_button.h',
'browser/views/html_dialog_view.cc',
'browser/views/html_dialog_view.h',
'browser/views/hung_renderer_view.cc',
'browser/views/importer_lock_view.cc',
'browser/views/importer_lock_view.h',
'browser/views/importer_view.cc',
'browser/views/importer_view.h',
'browser/views/importing_progress_view.cc',
'browser/views/importing_progress_view.h',
'browser/views/info_bubble.cc',
'browser/views/info_bubble.h',
'browser/views/infobars/infobar_container.cc',
'browser/views/infobars/infobar_container.h',
'browser/views/infobars/infobars.cc',
'browser/views/infobars/infobars.h',
'browser/views/jsmessage_box_dialog.cc',
'browser/views/jsmessage_box_dialog.h',
'browser/views/keyword_editor_view.cc',
'browser/views/keyword_editor_view.h',
'browser/views/location_bar_view.cc',
'browser/views/location_bar_view.h',
'browser/views/login_view.cc',
'browser/views/login_view.h',
'browser/views/new_profile_dialog.cc',
'browser/views/new_profile_dialog.h',
'browser/views/options/advanced_contents_view.cc',
'browser/views/options/advanced_contents_view.h',
'browser/views/options/advanced_page_view.cc',
'browser/views/options/advanced_page_view.h',
'browser/views/options/content_page_view.cc',
'browser/views/options/content_page_view.h',
'browser/views/options/cookies_view.cc',
'browser/views/options/cookies_view.h',
'browser/views/options/exceptions_page_view.cc',
'browser/views/options/exceptions_page_view.h',
'browser/views/options/fonts_languages_window_view.cc',
'browser/views/options/fonts_languages_window_view.h',
'browser/views/options/fonts_page_view.cc',
'browser/views/options/fonts_page_view.h',
'browser/views/options/general_page_view.cc',
'browser/views/options/general_page_view.h',
'browser/views/options/language_combobox_model.cc',
'browser/views/options/language_combobox_model.h',
'browser/views/options/languages_page_view.cc',
'browser/views/options/languages_page_view.h',
'browser/views/options/options_group_view.cc',
'browser/views/options/options_group_view.h',
'browser/views/options/options_page_view.cc',
'browser/views/options/options_page_view.h',
'browser/views/options/options_window_view.cc',
'browser/views/options/passwords_exceptions_window_view.cc',
'browser/views/options/passwords_exceptions_window_view.h',
'browser/views/options/passwords_page_view.cc',
'browser/views/options/passwords_page_view.h',
'browser/views/page_info_window_win.cc',
'browser/views/page_info_window_win.h',
'browser/views/repost_form_warning_view.cc',
'browser/views/repost_form_warning_view.h',
'browser/views/restart_message_box.cc',
'browser/views/restart_message_box.h',
'browser/views/sad_tab_view.cc',
'browser/views/sad_tab_view.h',
'browser/views/select_profile_dialog.cc',
'browser/views/select_profile_dialog.h',
'browser/views/shelf_item_dialog.cc',
'browser/views/shelf_item_dialog.h',
'browser/views/shell_dialogs_win.cc',
'browser/views/star_toggle.cc',
'browser/views/star_toggle.h',
'browser/views/status_bubble_views.cc',
'browser/views/status_bubble_views.h',
'browser/views/tab_icon_view.cc',
'browser/views/tab_icon_view.h',
'browser/views/tab_contents/tab_contents_container.cc',
'browser/views/tab_contents/tab_contents_container.h',
'browser/views/tab_contents/native_tab_contents_container.h',
'browser/views/tab_contents/native_tab_contents_container_gtk.cc',
'browser/views/tab_contents/native_tab_contents_container_gtk.h',
'browser/views/tab_contents/native_tab_contents_container_win.cc',
'browser/views/tab_contents/native_tab_contents_container_win.h',
'browser/views/tab_contents/render_view_context_menu_win.cc',
'browser/views/tab_contents/render_view_context_menu_win.h',
'browser/views/tab_contents/render_view_context_menu_external_win.cc',
'browser/views/tab_contents/render_view_context_menu_external_win.h',
'browser/views/tab_contents/tab_contents_view_gtk.cc',
'browser/views/tab_contents/tab_contents_view_gtk.h',
'browser/views/tab_contents/tab_contents_view_win.cc',
'browser/views/tab_contents/tab_contents_view_win.h',
'browser/views/tabs/dragged_tab_controller.cc',
'browser/views/tabs/dragged_tab_controller.h',
'browser/views/tabs/dragged_tab_view.cc',
'browser/views/tabs/dragged_tab_view.h',
'browser/views/tabs/grid.cc',
'browser/views/tabs/grid.h',
'browser/views/tabs/native_view_photobooth.h',
'browser/views/tabs/native_view_photobooth_gtk.cc',
'browser/views/tabs/native_view_photobooth_gtk.h',
'browser/views/tabs/native_view_photobooth_win.cc',
'browser/views/tabs/native_view_photobooth_win.h',
'browser/views/tabs/tab.cc',
'browser/views/tabs/tab.h',
'browser/views/tabs/tab_overview_cell.cc',
'browser/views/tabs/tab_overview_cell.h',
'browser/views/tabs/tab_overview_container.cc',
'browser/views/tabs/tab_overview_container.h',
'browser/views/tabs/tab_overview_controller.cc',
'browser/views/tabs/tab_overview_controller.h',
'browser/views/tabs/tab_overview_drag_controller.cc',
'browser/views/tabs/tab_overview_drag_controller.h',
'browser/views/tabs/tab_overview_grid.cc',
'browser/views/tabs/tab_overview_grid.h',
'browser/views/tabs/tab_overview_message_listener.cc',
'browser/views/tabs/tab_overview_message_listener.h',
'browser/views/tabs/tab_overview_types.cc',
'browser/views/tabs/tab_overview_types.h',
'browser/views/tabs/tab_renderer.cc',
'browser/views/tabs/tab_renderer.h',
'browser/views/tabs/tab_strip.cc',
'browser/views/tabs/tab_strip.h',
'browser/views/theme_helpers.cc',
'browser/views/theme_helpers.h',
'browser/views/toolbar_star_toggle.cc',
'browser/views/toolbar_star_toggle.h',
'browser/views/toolbar_view.cc',
'browser/views/toolbar_view.h',
'browser/views/uninstall_dialog.cc',
'browser/views/uninstall_dialog.h',
'browser/views/user_data_dir_dialog.cc',
'browser/views/user_data_dir_dialog.h',
'browser/visitedlink_master.cc',
'browser/visitedlink_master.h',
'browser/webdata/web_data_service.cc',
'browser/webdata/web_data_service.h',
'browser/webdata/web_data_service_win.cc',
'browser/webdata/web_database.cc',
'browser/webdata/web_database.h',
'browser/webdata/web_database_win.cc',
'browser/window_sizer.cc',
'browser/window_sizer.h',
'browser/window_sizer_mac.mm',
'browser/window_sizer_linux.cc',
'browser/window_sizer_win.cc',
'browser/worker_host/worker_process_host.cc',
'browser/worker_host/worker_process_host.h',
'browser/worker_host/worker_service.cc',
'browser/worker_host/worker_service.h',
'browser/zygote_host_linux.cc',
'browser/zygote_main_linux.cc',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
# This file is generated by GRIT.
'<(grit_out_dir)/grit/theme_resources_map.cc',
],
'conditions': [
['javascript_engine=="v8"', {
'defines': [
'CHROME_V8',
],
}],
['OS=="linux"', {
'dependencies': [
# Temporarily disabled while we figure some stuff out.
# http://code.google.com/p/chromium/issues/detail?id=12351
# '../build/linux/system.gyp:dbus-glib',
# '../build/linux/system.gyp:gnome-keyring',
'../build/linux/system.gyp:gtk',
'../build/linux/system.gyp:nss',
],
'sources!': [
'browser/extensions/extension_shelf.cc',
'browser/extensions/extension_shelf.h',
'browser/extensions/extension_view.cc',
'browser/extensions/extension_view.h',
# Windows-specific files.
'browser/password_manager/password_store_win.cc',
'browser/password_manager/password_store_win.h',
],
'conditions': [
['linux_breakpad==1', {
'sources': [
'browser/renderer_host/render_crash_handler_host_linux.cc',
'app/breakpad_linux.cc',
'app/breakpad_linux.h',
],
'dependencies': [
'../breakpad/breakpad.gyp:breakpad_client',
],
'include_dirs': [
# breakpad_linux.cc wants file_version_info_linux.h
'<(SHARED_INTERMEDIATE_DIR)',
],
}, {
'sources': [
'browser/renderer_host/render_crash_handler_host_linux_stub.cc',
'app/breakpad_linux_stub.cc',
'app/breakpad_linux.h',
],
}],
],
}],
['OS=="linux" and toolkit_views==0', {
'sources!': [
'browser/bookmarks/bookmark_drop_info.cc',
],
}],
['OS=="mac"', {
'sources/': [
# Exclude most of download.
['exclude', '^browser/download/'],
['include', '^browser/download/download_(file|manager|shelf)\\.cc$'],
['include', '^browser/download/download_request_manager\\.cc$'],
['include', '^browser/download/download_item_model\\.cc$'],
['include', '^browser/download/save_(file(_manager)?|item|package)\\.cc$'],
],
'sources!': [
'browser/automation/automation_provider_list_generic.cc',
'browser/bookmarks/bookmark_context_menu.cc',
'browser/bookmarks/bookmark_drop_info.cc',
'browser/dock_info.cc',
'browser/password_manager/password_store_gnome.h',
'browser/password_manager/password_store_gnome.cc',
'browser/password_manager/password_store_kwallet.h',
'browser/password_manager/password_store_kwallet.cc',
'browser/password_manager/password_store_win.cc',
'browser/password_manager/password_store_win.h',
'browser/extensions/extension_shelf.cc',
'browser/extensions/extension_shelf.h',
'browser/extensions/extension_view.cc',
'browser/extensions/extension_view.h',
],
'sources': [
# Build the necessary GTM sources
'../third_party/GTM/AppKit/GTMNSBezierPath+RoundRect.m',
'../third_party/GTM/AppKit/GTMNSColor+Luminance.m',
'../third_party/GTM/AppKit/GTMTheme.m',
'../third_party/GTM/AppKit/GTMUILocalizer.h',
'../third_party/GTM/AppKit/GTMUILocalizer.m',
# Build necessary Mozilla sources
'../third_party/mozilla/include/NSScreen+Utils.h',
'../third_party/mozilla/include/NSScreen+Utils.m',
'../third_party/mozilla/include/NSWorkspace+Utils.h',
'../third_party/mozilla/include/NSWorkspace+Utils.m',
'../third_party/mozilla/include/ToolTip.h',
'../third_party/mozilla/include/ToolTip.mm',
],
'include_dirs': [
'../third_party/GTM',
'../third_party/GTM/AppKit',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/SecurityInterface.framework',
'$(SDKROOT)/System/Library/Frameworks/QuartzCore.framework',
],
},
}],
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
'dependencies': [
'../google_update/google_update.gyp:google_update',
'installer/installer.gyp:installer_util',
'../printing/printing.gyp:printing',
'../third_party/cld/cld.gyp:cld',
'../views/views.gyp:views',
'../gears/gears.gyp:gears',
],
'export_dependent_settings': [
'../views/views.gyp:views',
],
'sources': [
# Using built-in rule in vstudio for midl.
'browser/history/history_indexer.idl',
],
'sources!': [
'browser/history/history_publisher_none.cc',
'browser/password_manager/password_store_gnome.h',
'browser/password_manager/password_store_gnome.cc',
'browser/password_manager/password_store_kwallet.h',
'browser/password_manager/password_store_kwallet.cc',
'browser/views/tabs/grid.cc',
'browser/views/tabs/grid.h',
'browser/views/tabs/tab_overview_cell.cc',
'browser/views/tabs/tab_overview_cell.h',
'browser/views/tabs/tab_overview_container.cc',
'browser/views/tabs/tab_overview_container.h',
'browser/views/tabs/tab_overview_controller.cc',
'browser/views/tabs/tab_overview_controller.h',
'browser/views/tabs/tab_overview_drag_controller.cc',
'browser/views/tabs/tab_overview_drag_controller.h',
'browser/views/tabs/tab_overview_grid.cc',
'browser/views/tabs/tab_overview_grid.h',
'browser/views/tabs/tab_overview_message_listener.cc',
'browser/views/tabs/tab_overview_message_listener.h',
'browser/views/tabs/tab_overview_types.cc',
'browser/views/tabs/tab_overview_types.h',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
}, { # 'OS!="win"
'sources/': [
# Exclude all of hang_monitor.
['exclude', '^browser/hang_monitor/'],
# Exclude parts of password_manager.
['exclude', '^browser/password_manager/ie7_password\\.cc$'],
# Exclude most of printing.
['exclude', '^browser/printing/'],
['include', '^browser/printing/page_(number|range|setup)\\.cc$'],
# Exclude all of rlz.
['exclude', '^browser/rlz/'],
# Exclude all of views.
['exclude', '^browser/views/'],
],
'conditions': [
['toolkit_views==1',{
'sources/': [
['include', '^browser/dock_info_gtk.cc'],
['include', '^browser/dock_info.cc'],
['include', '^browser/dock_info.h'],
['include', '^browser/extensions/'],
['include', '^browser/views/bookmark_bar_view.cc'],
['include', '^browser/views/bookmark_bar_view.h'],
['include', '^browser/views/bookmark_context_menu.cc'],
['include', '^browser/views/bookmark_context_menu.h'],
['include', '^browser/views/bookmark_menu_button.cc'],
['include', '^browser/views/bookmark_menu_button.h'],
['include', '^browser/views/bookmark_menu_controller_views.cc'],
['include', '^browser/views/browser_bubble_gtk.cc'],
['include', '^browser/views/browser_bubble.cc'],
['include', '^browser/views/browser_bubble.h'],
['include', '^browser/views/chrome_views_delegate.cc'],
['include', '^browser/views/dialog_stubs_gtk.cc'],
['include', '^browser/views/download_item_view.cc'],
['include', '^browser/views/download_item_view.h'],
['include', '^browser/views/download_shelf_view.cc'],
['include', '^browser/views/download_shelf_view.h'],
['include', '^browser/views/dragged_tab_controller.cc'],
['include', '^browser/views/dragged_tab_controller.h'],
['include', '^browser/views/event_utils.cc'],
['include', '^browser/views/event_utils.h'],
['include', '^browser/views/find_bar_view.cc'],
['include', '^browser/views/find_bar_view.h'],
['include', '^browser/views/find_bar_win.cc'],
['include', '^browser/views/find_bar_win.h'],
['include', '^browser/views/go_button.cc'],
['include', '^browser/views/go_button.h'],
['include', '^browser/views/toolbar_star_toggle.h'],
['include', '^browser/views/toolbar_star_toggle.cc'],
['include', '^browser/views/frame/browser_view.cc'],
['include', '^browser/views/frame/browser_view.h'],
['include', '^browser/views/frame/browser_frame_gtk.cc'],
['include', '^browser/views/frame/browser_frame_gtk.h'],
['include', '^browser/views/frame/browser_root_view.cc'],
['include', '^browser/views/frame/browser_root_view.h'],
['include', '^browser/views/frame/opaque_browser_frame_view.cc'],
['include', '^browser/views/frame/opaque_browser_frame_view.h'],
['include', '^browser/views/infobars/*'],
['include', '^browser/views/info_bubble.cc'],
['include', '^browser/views/info_bubble.h'],
['include', '^browser/views/location_bar_view.cc'],
['include', '^browser/views/location_bar_view.h'],
['include', '^browser/views/status_bubble_views.cc'],
['include', '^browser/views/status_bubble_views.h'],
['include', '^browser/views/tab_contents/native_tab_contents_container_gtk.cc'],
['include', '^browser/views/tab_contents/native_tab_contents_container_gtk.h'],
['include', '^browser/views/tab_contents/render_view_context_menu_win.cc'],
['include', '^browser/views/tab_contents/render_view_context_menu_win.h'],
['include', '^browser/views/tab_contents/tab_contents_container.cc'],
['include', '^browser/views/tab_contents/tab_contents_container.h'],
['include', '^browser/views/tab_contents/tab_contents_view_gtk.cc'],
['include', '^browser/views/tab_contents/tab_contents_view_gtk.h'],
['include', '^browser/views/tab_icon_view.cc'],
['include', '^browser/views/tab_icon_view.h'],
['include', '^browser/views/tabs/dragged_tab_controller.cc'],
['include', '^browser/views/tabs/dragged_tab_controller.h'],
['include', '^browser/views/tabs/dragged_tab_view.cc'],
['include', '^browser/views/tabs/dragged_tab_view.h'],
['include', '^browser/views/tabs/native_view_photobooth.h'],
['include', '^browser/views/tabs/native_view_photobooth_gtk.cc'],
['include', '^browser/views/tabs/native_view_photobooth_gtk.h'],
['include', '^browser/views/tabs/tab.cc'],
['include', '^browser/views/tabs/tab.h'],
['include', '^browser/views/tabs/tab_renderer.cc'],
['include', '^browser/views/tabs/tab_renderer.h'],
['include', '^browser/views/tabs/tab_strip.cc'],
['include', '^browser/views/tabs/tab_strip.h'],
['include', '^browser/views/toolbar_view.cc'],
['include', '^browser/views/toolbar_view.h'],
['include', '^browser/window_sizer.cc'],
['include', '^browser/window_sizer.h'],
# Exclude all of browser/gtk, then include the things we want.
['exclude', '^browser/gtk'],
['include', '^browser/gtk/autocomplete_edit_gtk.cc'],
['include', '^browser/gtk/autocomplete_edit_gtk.h'],
['include', '^browser/gtk/dialogs_gtk.cc'],
['include', '^browser/gtk/dialogs_gtk.h'],
['include', '^browser/gtk/download_started_animation_gtk.cc'],
['include', '^browser/gtk/download_started_animation_gtk.h'],
['include', '^browser/gtk/focus_store_gtk.cc'],
['include', '^browser/gtk/focus_store_gtk.h'],
['include', '^browser/gtk/hung_renderer_dialog_gtk.cc'],
['include', '^browser/gtk/hung_renderer_dialog_gtk.h'],
['include', '^browser/gtk/options'],
['include', '^browser/gtk/menu_gtk.cc'],
['include', '^browser/gtk/menu_gtk.h'],
['include', '^browser/gtk/sad_tab_gtk.cc'],
['include', '^browser/gtk/sad_tab_gtk.h'],
# More GTK stuff to exclude outside of the browser/gtk directory
['exclude', '^browser/bookmarks/bookmark_context_menu_gtk.cc'],
# Other excluded stuff.
['exclude', '^browser/extensions/external_registry_extension_provider_win.cc'],
['exclude', '^browser/tab_contents/tab_contents_view_gtk.cc'],
['exclude', '^browser/tab_contents/tab_contents_view_gtk.h'],
['exclude', '^browser/tab_contents/render_view_context_menu_gtk.cc'],
['exclude', '^browser/tab_contents/render_view_context_menu_gtk.h'],
],
}],
['linux2==1',{
'sources/': [
['include', 'browser/views/tabs/grid.cc'],
['include', 'browser/views/tabs/grid.h'],
['include', 'browser/views/tabs/tab_overview_cell.cc'],
['include', 'browser/views/tabs/tab_overview_cell.h'],
['include', 'browser/views/tabs/tab_overview_container.cc'],
['include', 'browser/views/tabs/tab_overview_container.h'],
['include', 'browser/views/tabs/tab_overview_controller.cc'],
['include', 'browser/views/tabs/tab_overview_controller.h'],
['include', 'browser/views/tabs/tab_overview_drag_controller.cc'],
['include', 'browser/views/tabs/tab_overview_drag_controller.h'],
['include', 'browser/views/tabs/tab_overview_grid.cc'],
['include', 'browser/views/tabs/tab_overview_grid.h'],
['include', 'browser/views/tabs/tab_overview_message_listener.cc'],
['include', 'browser/views/tabs/tab_overview_message_listener.h'],
['include', 'browser/views/tabs/tab_overview_types.cc'],
['include', 'browser/views/tabs/tab_overview_types.h'],
],
}],
],
# Exclude files that should be excluded for all non-Windows platforms.
'sources!': [
'browser/autocomplete/autocomplete_accessibility.cc',
'browser/automation/ui_controls.cc',
'browser/bookmarks/bookmark_menu_controller.cc',
'browser/bookmarks/bookmark_menu_controller.h',
'browser/browser_accessibility.cc',
'browser/browser_accessibility_manager.cc',
'browser/extensions/extension_shelf.cc',
'browser/extensions/extension_creator.cc',
'browser/dom_ui/html_dialog_contents.cc',
'browser/encoding_menu_controller_delegate.cc',
'browser/external_tab_container.cc',
'browser/google_update.cc',
'browser/history/history_indexer.idl',
'browser/history_tab_ui.cc',
'browser/history_view.cc',
'browser/ime_input.cc',
'browser/importer/ie_importer.cc',
'browser/jankometer.cc',
'browser/login_prompt.cc',
'browser/memory_details.cc',
'browser/modal_html_dialog_delegate.cc',
'browser/sandbox_policy.cc',
'browser/shell_integration.cc',
'browser/tab_contents/web_drag_source.cc',
'browser/tab_contents/web_drop_target.cc',
],
}],
],
},
{
'target_name': 'debugger',
'type': '<(library)',
'msvs_guid': '57823D8C-A317-4713-9125-2C91FDFD12D6',
'dependencies': [
'chrome_resources',
'theme_resources',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
],
'include_dirs': [
'..',
],
'sources': [
'browser/debugger/resources/debugger.css',
'browser/debugger/resources/debugger.html',
'browser/debugger/resources/debugger.js',
'browser/debugger/resources/debugger_shell.js',
'browser/debugger/debugger_host.h',
'browser/debugger/debugger_host_impl.cpp',
'browser/debugger/debugger_host_impl.h',
'browser/debugger/debugger_io.h',
'browser/debugger/debugger_io_socket.cc',
'browser/debugger/debugger_io_socket.h',
'browser/debugger/debugger_node.cc',
'browser/debugger/debugger_node.h',
'browser/debugger/debugger_remote_service.cc',
'browser/debugger/debugger_remote_service.h',
'browser/debugger/debugger_shell.cc',
'browser/debugger/debugger_shell.h',
# Currently unused, resurrect when porting to new platforms.
#'browser/debugger/debugger_shell_stubs.cc',
'browser/debugger/debugger_view.cc',
'browser/debugger/debugger_view.h',
'browser/debugger/debugger_window.cc',
'browser/debugger/debugger_window.h',
'browser/debugger/debugger_wrapper.cc',
'browser/debugger/debugger_wrapper.h',
'browser/debugger/devtools_client_host.h',
'browser/debugger/devtools_manager.cc',
'browser/debugger/devtools_manager.h',
'browser/debugger/devtools_protocol_handler.cc',
'browser/debugger/devtools_protocol_handler.h',
'browser/debugger/devtools_remote.h',
'browser/debugger/devtools_remote_listen_socket.cc',
'browser/debugger/devtools_remote_listen_socket.h',
'browser/debugger/devtools_remote_message.cc',
'browser/debugger/devtools_remote_message.h',
'browser/debugger/devtools_remote_service.cc',
'browser/debugger/devtools_remote_service.h',
'browser/debugger/devtools_window.cc',
'browser/debugger/devtools_window.h',
'browser/debugger/inspectable_tab_proxy.cc',
'browser/debugger/inspectable_tab_proxy.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS!="win"', {
'sources!': [
'browser/debugger/debugger_view.cc',
'browser/debugger/debugger_window.cc',
],
}],
],
},
{
'target_name': 'plugin',
'type': '<(library)',
'msvs_guid': '20A560A0-2CD0-4D9E-A58B-1F24B99C087A',
'dependencies': [
'common',
'chrome_resources',
'chrome_strings',
'../media/media.gyp:media',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
'third_party/hunspell/hunspell.gyp:hunspell',
'../webkit/webkit.gyp:glue',
],
'include_dirs': [
'<(INTERMEDIATE_DIR)',
],
'sources': [
# All .cc, .h, .m, and .mm files under plugins except for tests and
# mocks.
'plugin/chrome_plugin_host.cc',
'plugin/chrome_plugin_host.h',
'plugin/npobject_proxy.cc',
'plugin/npobject_proxy.h',
'plugin/npobject_stub.cc',
'plugin/npobject_stub.h',
'plugin/npobject_util.cc',
'plugin/npobject_util.h',
'plugin/plugin_channel.cc',
'plugin/plugin_channel.h',
'plugin/plugin_channel_base.cc',
'plugin/plugin_channel_base.h',
'plugin/plugin_main.cc',
'plugin/plugin_thread.cc',
'plugin/plugin_thread.h',
'plugin/webplugin_delegate_stub.cc',
'plugin/webplugin_delegate_stub.h',
'plugin/webplugin_proxy.cc',
'plugin/webplugin_proxy.h',
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled.h',
'msvs_precompiled_source': 'tools/build/win/precompiled.cc',
},
},
# These are layered in conditionals in the event other platforms
# end up using this module as well.
'conditions': [
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
},],
],
},
{
'target_name': 'renderer',
'type': '<(library)',
'msvs_guid': '9301A569-5D2B-4D11-9332-B1E30AEACB8D',
'dependencies': [
'common',
'plugin',
'chrome_resources',
'chrome_strings',
'../printing/printing.gyp:printing',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/npapi/npapi.gyp:npapi',
'../webkit/webkit.gyp:glue',
'../webkit/webkit.gyp:webkit',
],
'include_dirs': [
'..',
],
'sources': [
# TODO(jrg): to link ipc_tests, these files need to be in renderer.a.
# But app/ is the wrong directory for them.
# Better is to remove the dep of *_tests on renderer, but in the
# short term I'd like the build to work.
'renderer/automation/dom_automation_controller.cc',
'renderer/automation/dom_automation_controller.h',
'renderer/extensions/bindings_utils.cc',
'renderer/extensions/bindings_utils.h',
'renderer/extensions/event_bindings.cc',
'renderer/extensions/event_bindings.h',
'renderer/extensions/extension_process_bindings.cc',
'renderer/extensions/extension_process_bindings.h',
'renderer/extensions/renderer_extension_bindings.cc',
'renderer/extensions/renderer_extension_bindings.h',
'renderer/loadtimes_extension_bindings.h',
'renderer/loadtimes_extension_bindings.cc',
'renderer/media/audio_renderer_impl.cc',
'renderer/media/audio_renderer_impl.h',
'renderer/media/buffered_data_source.cc',
'renderer/media/buffered_data_source.h',
'renderer/net/render_dns_master.cc',
'renderer/net/render_dns_master.h',
'renderer/net/render_dns_queue.cc',
'renderer/net/render_dns_queue.h',
'renderer/resources/event_bindings.js',
'renderer/resources/extension_process_bindings.js',
'renderer/resources/greasemonkey_api.js',
'renderer/resources/json_schema.js',
'renderer/resources/renderer_extension_bindings.js',
'renderer/about_handler.cc',
'renderer/about_handler.h',
'renderer/audio_message_filter.cc',
'renderer/audio_message_filter.h',
'renderer/debug_message_handler.cc',
'renderer/debug_message_handler.h',
'renderer/devtools_agent.cc',
'renderer/devtools_agent.h',
'renderer/devtools_agent_filter.cc',
'renderer/devtools_agent_filter.h',
'renderer/devtools_client.cc',
'renderer/devtools_client.h',
'renderer/dom_ui_bindings.cc',
'renderer/dom_ui_bindings.h',
'renderer/external_host_bindings.cc',
'renderer/external_host_bindings.h',
'renderer/external_extension.cc',
'renderer/external_extension.h',
'renderer/js_only_v8_extensions.cc',
'renderer/js_only_v8_extensions.h',
'renderer/localized_error.cc',
'renderer/localized_error.h',
'renderer/navigation_state.h',
'renderer/plugin_channel_host.cc',
'renderer/plugin_channel_host.h',
'renderer/print_web_view_helper.cc',
'renderer/print_web_view_helper.h',
'renderer/render_process.cc',
'renderer/render_process.h',
'renderer/render_thread.cc',
'renderer/render_thread.h',
'renderer/render_view.cc',
'renderer/render_view.h',
'renderer/render_widget.cc',
'renderer/render_widget.h',
'renderer/renderer_glue.cc',
'renderer/renderer_histogram_snapshots.cc',
'renderer/renderer_histogram_snapshots.h',
'renderer/renderer_logging.h',
'renderer/renderer_logging_linux.cc',
'renderer/renderer_logging_mac.mm',
'renderer/renderer_logging_win.cc',
'renderer/renderer_main.cc',
'renderer/renderer_main_platform_delegate.h',
'renderer/renderer_main_platform_delegate_linux.cc',
'renderer/renderer_main_platform_delegate_mac.mm',
'renderer/renderer_main_platform_delegate_win.cc',
'renderer/renderer_webkitclient_impl.cc',
'renderer/renderer_webkitclient_impl.h',
'renderer/user_script_slave.cc',
'renderer/user_script_slave.h',
'renderer/visitedlink_slave.cc',
'renderer/visitedlink_slave.h',
'renderer/webplugin_delegate_proxy.cc',
'renderer/webplugin_delegate_proxy.h',
'renderer/webworker_proxy.cc',
'renderer/webworker_proxy.h',
'tools/build/win/precompiled_wtl.cc',
'tools/build/win/precompiled_wtl.h',
],
'link_settings': {
'mac_bundle_resources': [
'renderer/renderer.sb',
],
},
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
'conditions': [
# Linux-specific rules.
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
# Windows-specific rules.
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
},],
# As of yet unported-from-Windows code.
['OS!="win"', {
'sources!': [
'renderer/webworker_proxy.cc',
'renderer/webworker_proxy.h',
],
},],
],
},
{
'target_name': 'utility',
'type': '<(library)',
'msvs_guid': '4D2B38E6-65FF-4F97-B88A-E441DF54EBF7',
'dependencies': [
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
],
'sources': [
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
'utility/utility_main.cc',
'utility/utility_thread.cc',
'utility/utility_thread.h',
],
'include_dirs': [
'..',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled.h',
'msvs_precompiled_source': 'tools/build/win/precompiled.cc',
},
},
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'chrome',
'type': 'executable',
'mac_bundle': 1,
'msvs_guid': '7B219FAA-E360-43C8-B341-804A94EEFFAC',
'msvs_existing_vcproj': 'app/chrome_exe.vcproj',
'sources': [
# All .cc, .h, .m, and .mm files under app except for tests.
'app/breakpad_win.cc',
'app/breakpad_win.h',
'app/breakpad_mac.mm',
'app/breakpad_mac.h',
'app/chrome_exe_main.cc',
'app/chrome_exe_main.mm',
'app/chrome_exe_main_gtk.cc',
'app/chrome_exe_resource.h',
'app/client_util.cc',
'app/client_util.h',
'app/google_update_client.cc',
'app/google_update_client.h',
'app/hard_error_handler_win.cc',
'app/hard_error_handler_win.h',
'app/keystone_glue.h',
'app/keystone_glue.m',
'app/scoped_ole_initializer.h',
],
'dependencies': [
'../support/support.gyp:*',
],
'mac_bundle_resources': [
# put any pdfs down in the sources block below so pdfsqueeze runs on
# them.
'app/nibs/en.lproj/About.xib',
'app/nibs/en.lproj/BrowserWindow.xib',
'app/nibs/en.lproj/ClearBrowsingData.xib',
'app/nibs/en.lproj/FindBar.xib',
'app/nibs/en.lproj/FirstRunDialog.xib',
'app/nibs/en.lproj/MainMenu.xib',
'app/nibs/en.lproj/PageInfo.xib',
'app/nibs/en.lproj/Preferences.xib',
'app/nibs/en.lproj/SaveAccessoryView.xib',
'app/nibs/en.lproj/TabContents.xib',
'app/nibs/en.lproj/TabView.xib',
'app/nibs/en.lproj/Toolbar.xib',
'app/theme/back.pdf',
'app/theme/close_bar.pdf',
'app/theme/close_bar_h.pdf',
'app/theme/close_bar_p.pdf',
'app/theme/forward.pdf',
'app/theme/go.pdf',
'app/theme/grow_box.png',
'app/theme/nav.pdf',
'app/theme/newtab.pdf',
'app/theme/o2_globe.png',
'app/theme/o2_history.png',
'app/theme/o2_more.png',
'app/theme/o2_search.png',
'app/theme/o2_star.png',
'app/theme/otr_icon.pdf',
'app/theme/pageinfo_bad.png',
'app/theme/pageinfo_good.png',
'app/theme/reload.pdf',
'app/theme/sadtab.png',
'app/theme/star.pdf',
'app/theme/starred.pdf',
'app/theme/stop.pdf',
'../app/resources/throbber.png',
'app/theme/throbber_waiting.png',
'app/app-Info.plist',
],
# TODO(mark): Come up with a fancier way to do this. It should only
# be necessary to list app-Info.plist once, not the three times it is
# listed here.
'mac_bundle_resources!': [
'app/app-Info.plist',
],
'xcode_settings': {
'INFOPLIST_FILE': 'app/app-Info.plist',
},
'conditions': [
['OS=="linux"', {
'conditions': [
['branding=="Chrome"', {
'actions': [
{
'action_name': 'dump_symbols',
'inputs': [
'<(DEPTH)/build/linux/dump_app_syms',
'<(DEPTH)/build/linux/dump_signature.py',
'<(PRODUCT_DIR)/dump_syms',
'<(PRODUCT_DIR)/chrome',
],
'outputs': [
'<(PRODUCT_DIR)/chrome.breakpad',
],
'action': ['<(DEPTH)/build/linux/dump_app_syms',
'<(PRODUCT_DIR)/dump_syms',
'<(PRODUCT_DIR)/chrome', '<@(_outputs)'],
},
],
}],
],
'dependencies': [
# Needed for chrome_dll_main.cc #include of gtk/gtk.h
'../build/linux/system.gyp:gtk',
# Needed for chrome_dll_main.cc use of g_thread_init
'../build/linux/system.gyp:gthread',
],
'copies': [
{
'destination': '<(PRODUCT_DIR)',
'files': ['<(INTERMEDIATE_DIR)/repack/chrome.pak'],
},
{
'destination': '<(PRODUCT_DIR)/locales',
'files': ['<(INTERMEDIATE_DIR)/repack/da.pak',
'<(INTERMEDIATE_DIR)/repack/en-US.pak',
'<(INTERMEDIATE_DIR)/repack/he.pak',
'<(INTERMEDIATE_DIR)/repack/zh-TW.pak',
],
},
{
'destination': '<(PRODUCT_DIR)/themes',
'files': ['<(INTERMEDIATE_DIR)/repack/default.pak'],
},
],
}],
['OS=="linux" and (toolkit_views==1 or linux2==1)', {
'dependencies': [
'../views/views.gyp:views',
],
}],
['OS=="mac"', {
# 'branding' is a variable defined in common.gypi
# (e.g. "Chromium", "Chrome")
'conditions': [
['branding=="Chrome"', {
'mac_bundle_resources': ['app/theme/google_chrome/app.icns'],
'variables': {
'bundle_id': 'com.google.Chrome',
},
}, { # else: 'branding!="Chrome"
'mac_bundle_resources': ['app/theme/chromium/app.icns'],
'variables': {
'bundle_id': 'org.chromium.Chromium',
},
}],
['mac_breakpad==1', {
# Only include breakpad in official builds.
'variables': {
# A real .dSYM is needed for dump_syms to operate on.
'mac_real_dsym': 1,
},
'dependencies': [
'../breakpad/breakpad.gyp:breakpad',
'../breakpad/breakpad.gyp:dump_syms',
'../breakpad/breakpad.gyp:symupload',
],
'copies': [
{
'destination': '<(PRODUCT_DIR)/<(mac_product_name).app/Contents/Resources/',
'files': ['<(PRODUCT_DIR)/crash_inspector', '<(PRODUCT_DIR)/crash_report_sender.app'],
},
],
'postbuilds': [
{
'postbuild_name': 'Dump Symbols',
'action': ['<(DEPTH)/build/mac/dump_app_syms',
'<(branding)'],
},
],
}], # mac_breakpad
['mac_keystone==1', {
'copies': [
{
'destination': '<(PRODUCT_DIR)/<(mac_product_name).app/Contents/Frameworks/',
'files': ['../third_party/googlemac/Releases/Keystone/KeystoneRegistration.framework'],
},
],
}], # mac_keystone
],
'product_name': '<(mac_product_name)',
'xcode_settings': {
# chrome/app/app-Info.plist has:
# CFBundleIdentifier of CHROMIUM_BUNDLE_ID
# CFBundleName of CHROMIUM_SHORT_NAME
# Xcode then replaces these values with the branded values we set
# as settings on the target.
'CHROMIUM_BUNDLE_ID': '<(bundle_id)',
'CHROMIUM_SHORT_NAME': '<(branding)',
},
# Bring in pdfsqueeze and run it on all pdfs
'dependencies': [
'../build/temp_gyp/pdfsqueeze.gyp:pdfsqueeze',
],
'rules': [
{
'rule_name': 'pdfsqueeze',
'extension': 'pdf',
'inputs': [
'<(PRODUCT_DIR)/pdfsqueeze',
],
'outputs': [
'<(INTERMEDIATE_DIR)/pdfsqueeze/<(RULE_INPUT_ROOT).pdf',
],
'action': ['<(PRODUCT_DIR)/pdfsqueeze', '<(RULE_INPUT_PATH)', '<@(_outputs)'],
'message': 'Running pdfsqueeze on <(RULE_INPUT_PATH)',
},
],
}, { # else: OS != "mac"
'conditions': [
['branding=="Chrome"', {
'product_name': 'chrome'
}, { # else: Branding!="Chrome"
# TODO: change to:
# 'product_name': 'chromium'
# whenever we convert the rest of the infrastructure
# (buildbots etc.) to use "gyp -Dbranding=Chrome".
# NOTE: chrome/app/theme/chromium/BRANDING and
# chrome/app/theme/google_chrome/BRANDING have the short names,
# etc.; should we try to extract from there instead?
'product_name': 'chrome'
}],
],
}],
['OS=="mac"', {
# Mac addes an action to modify the Info.plist to meet our needs
# (see the script for why this is done).
'actions': [
{
'action_name': 'tweak_app_infoplist',
# We don't list any inputs or outputs because we always want
# the script to run. Why? Because it does thinks like record
# the svn revision into the info.plist, so there is no file to
# depend on that will change when ever that changes.
'inputs': [],
'outputs': [],
'action': ['<(DEPTH)/build/mac/tweak_app_infoplist',
'-b<(mac_breakpad)',
'-k<(mac_keystone)',
'<(branding)'],
},
],
}],
['OS=="mac"', {
# Copy web inspector resources to the Contents/Resources folder.
'copies': [
{
'destination': '<(PRODUCT_DIR)/<(mac_product_name).app/Contents/Resources',
'files': ['<(PRODUCT_DIR)/resources/inspector/'],
},
],
}],
['OS=="linux"', {
'conditions': [
['branding=="Chrome"', {
'dependencies': [
'installer/installer.gyp:installer_util',
],
}],
],
}],
['OS=="win"', {
'dependencies': [
# On Windows, make sure we've built chrome.dll, which
# contains all of the library code with Chromium
# functionality.
'chrome_dll',
'installer/installer.gyp:installer_util',
'installer/installer.gyp:installer_util_strings',
'../breakpad/breakpad.gyp:breakpad_handler',
'../breakpad/breakpad.gyp:breakpad_sender',
'../sandbox/sandbox.gyp:sandbox',
'../views/views.gyp:views',
'worker',
'app/locales/locales.gyp:*',
],
'sources': [
'app/chrome_exe.rc',
'app/chrome_exe_version.rc.version',
],
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/app',
],
'msvs_settings': {
'VCLinkerTool': {
'DelayLoadDLLs': [
'dbghelp.dll',
'dwmapi.dll',
'uxtheme.dll',
'ole32.dll',
'oleaut32.dll',
],
'ImportLibrary': '$(OutDir)\\lib\\chrome_exe.lib',
'ProgramDatabaseFile': '$(OutDir)\\chrome_exe.pdb',
# Set /SUBSYSTEM:WINDOWS for chrome.exe itself.
'SubSystem': '2',
},
'VCManifestTool': {
'AdditionalManifestFiles': '$(SolutionDir)\\app\\chrome.exe.manifest',
},
},
'actions': [
{
'action_name': 'version',
'variables': {
'lastchange_path':
'<(SHARED_INTERMEDIATE_DIR)/build/LASTCHANGE',
'version_py': 'tools/build/version.py',
'version_path': 'VERSION',
'template_input_path': 'app/chrome_exe_version.rc.version',
},
'conditions': [
[ 'branding == "Chrome"', {
'variables': {
'branding_path': 'app/theme/google_chrome/BRANDING',
},
}, { # else branding!="Chrome"
'variables': {
'branding_path': 'app/theme/chromium/BRANDING',
},
}],
],
'inputs': [
'<(template_input_path)',
'<(version_path)',
'<(branding_path)',
'<(lastchange_path)',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/app/chrome_exe_version.rc',
],
'action': [
'python',
'<(version_py)',
'-f', '<(version_path)',
'-f', '<(branding_path)',
'-f', '<(lastchange_path)',
'<(template_input_path)',
'<@(_outputs)',
],
'process_outputs_as_sources': 1,
'message': 'Generating version information in <(_outputs)'
},
{
'action_name': 'first_run',
'inputs': [
'app/FirstRun',
],
'outputs': [
'<(PRODUCT_DIR)/First Run',
],
'action': ['copy', '<@(_inputs)', '<@(_outputs)'],
'message': 'Copy first run complete sentinel file',
},
],
},{ # 'OS!="win"
'dependencies': [
# On Linux and Mac, link the dependencies (libraries)
# that make up actual Chromium functionality directly
# into the executable.
'<@(chromium_dependencies)',
],
'sources': [
'app/chrome_dll_main.cc',
'app/chrome_dll_resource.h',
],
'variables': {
'repack_path': '../tools/data_pack/repack.py',
},
'actions': [
{
'action_name': 'repack_chrome',
'variables': {
'pak_inputs': [
'<(grit_out_dir)/browser_resources.pak',
'<(grit_out_dir)/debugger_resources.pak',
'<(grit_out_dir)/common_resources.pak',
'<(grit_out_dir)/renderer_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/net/net_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_resources.pak',
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'outputs': [
'<(INTERMEDIATE_DIR)/repack/chrome.pak',
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
'process_outputs_as_mac_bundle_resources': 1,
},
{
'action_name': 'repack_theme',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/app/app_resources.pak',
'<(grit_out_dir)/theme_resources.pak',
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'outputs': [
'<(INTERMEDIATE_DIR)/repack/theme.pak',
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
'process_outputs_as_mac_bundle_resources': 1,
'conditions': [
['OS=="linux"', {
'outputs=': [
'<(INTERMEDIATE_DIR)/repack/default.pak',
]
}],
],
},
{
# TODO(mark): Make this work with more languages than the
# hardcoded da, en-US, he, zh-TW.
'action_name': 'repack_locale_da',
'variables': {
'pak_inputs': [
'<(grit_out_dir)/generated_resources_da.pak',
'<(grit_out_dir)/locale_settings_da.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_da.pak',
],
'conditions': [
['branding=="Chrome"', {
'pak_inputs': [
'<(grit_out_dir)/google_chrome_strings_da.pak',
]
}, { # else: branding!="Chrome"
'pak_inputs': [
'<(grit_out_dir)/chromium_strings_da.pak',
]
}],
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'conditions': [
['OS=="mac"', {
'outputs': [
# TODO(port): We can't simply emit the strings file without
# the nibs too, or the app fails to launch in this language.
# Currently, this is only for ui_tests, which won't work on
# the Mac anyway, so temporarily disable until we have the
# full strategy figured out. This goes for he and zh below.
# '<(INTERMEDIATE_DIR)/repack/da.lproj/locale.pak',
],
}, { # else: OS!="mac"
'outputs': [
'<(INTERMEDIATE_DIR)/repack/da.pak',
],
}],
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
},
{
# TODO(mark): Make this work with more languages than the
# hardcoded da, en-US, he, zh-TW.
'action_name': 'repack_locale_en_us',
'variables': {
'pak_inputs': [
'<(grit_out_dir)/generated_resources_en-US.pak',
'<(grit_out_dir)/locale_settings_en-US.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_en-US.pak',
],
'conditions': [
['branding=="Chrome"', {
'pak_inputs': [
'<(grit_out_dir)/google_chrome_strings_en-US.pak',
]
}, { # else: branding!="Chrome"
'pak_inputs': [
'<(grit_out_dir)/chromium_strings_en-US.pak',
]
}],
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'conditions': [
['OS=="mac"', {
'outputs': [
'<(INTERMEDIATE_DIR)/repack/en.lproj/locale.pak',
],
}, { # else: OS!="mac"
'outputs': [
'<(INTERMEDIATE_DIR)/repack/en-US.pak',
],
}],
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
'process_outputs_as_mac_bundle_resources': 1,
},
{
# TODO(mark): Make this work with more languages than the
# hardcoded da, en-US, he, zh-TW.
'action_name': 'repack_locale_he',
'variables': {
'pak_inputs': [
'<(grit_out_dir)/generated_resources_he.pak',
'<(grit_out_dir)/locale_settings_he.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_he.pak',
],
'conditions': [
['branding=="Chrome"', {
'pak_inputs': [
'<(grit_out_dir)/google_chrome_strings_he.pak',
]
}, { # else: branding!="Chrome"
'pak_inputs': [
'<(grit_out_dir)/chromium_strings_he.pak',
]
}],
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'conditions': [
['OS=="mac"', {
'outputs': [
# '<(INTERMEDIATE_DIR)/repack/he.lproj/locale.pak',
],
}, { # else: OS!="mac"
'outputs': [
'<(INTERMEDIATE_DIR)/repack/he.pak',
],
}],
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
},
{
# TODO(mark): Make this work with more languages than the
# hardcoded da, en-US, he, zh-TW.
'action_name': 'repack_locale_zh_tw',
'variables': {
'pak_inputs': [
'<(grit_out_dir)/generated_resources_zh-TW.pak',
'<(grit_out_dir)/locale_settings_zh-TW.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_zh-TW.pak',
],
'conditions': [
['branding=="Chrome"', {
'pak_inputs': [
'<(grit_out_dir)/google_chrome_strings_zh-TW.pak',
]
}, { # else: branding!="Chrome"
'pak_inputs': [
'<(grit_out_dir)/chromium_strings_zh-TW.pak',
]
}],
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'conditions': [
['OS=="mac"', {
'outputs': [
# '<(INTERMEDIATE_DIR)/repack/zh.lproj/locale.pak',
],
}, { # else: OS!="mac"
'outputs': [
'<(INTERMEDIATE_DIR)/repack/zh-TW.pak',
],
}],
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
},
],
'sources!': [
'app/chrome_exe_main.cc',
'app/client_util.cc',
'app/google_update_client.cc',
]
}],
],
},
{
'target_name': 'image_diff',
'type': 'executable',
'msvs_guid': '50B079C7-CD01-42D3-B8C4-9F8D9322E822',
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:base_gfx',
],
'sources': [
'tools/test/image_diff/image_diff.cc',
],
},
{
# This target contains mocks and test utilities that don't belong in
# production libraries but are used by more than one test executable.
'target_name': 'test_support_common',
'type': '<(library)',
'dependencies': [
'browser',
'common',
'renderer',
'chrome_resources',
'chrome_strings',
'theme_resources',
'../skia/skia.gyp:skia',
'../testing/gmock.gyp:gmock',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
# TODO: these should live here but are currently used by
# production code code in libbrowser (above).
#'browser/automation/url_request_mock_http_job.cc',
#'browser/automation/url_request_mock_http_job.h',
'browser/automation/url_request_mock_net_error_job.cc',
'browser/automation/url_request_mock_net_error_job.h',
'browser/renderer_host/mock_render_process_host.cc',
'browser/renderer_host/mock_render_process_host.h',
'browser/renderer_host/test_render_view_host.cc',
'browser/renderer_host/test_render_view_host.h',
'browser/tab_contents/test_web_contents.cc',
'browser/tab_contents/test_web_contents.h',
'common/ipc_test_sink.cc',
'common/ipc_test_sink.h',
'renderer/mock_keyboard.cc',
'renderer/mock_keyboard.h',
'renderer/mock_keyboard_driver_win.cc',
'renderer/mock_keyboard_driver_win.h',
'renderer/mock_printer.cc',
'renderer/mock_printer.h',
'renderer/mock_printer_driver_win.cc',
'renderer/mock_printer_driver_win.h',
'renderer/mock_render_process.h',
'renderer/mock_render_thread.cc',
'renderer/mock_render_thread.h',
'test/automation/autocomplete_edit_proxy.cc',
'test/automation/autocomplete_edit_proxy.h',
'test/automation/automation_constants.h',
'test/automation/automation_handle_tracker.cc',
'test/automation/automation_handle_tracker.h',
'test/automation/automation_proxy.cc',
'test/automation/automation_proxy.h',
'test/automation/browser_proxy.cc',
'test/automation/browser_proxy.h',
'test/automation/tab_proxy.cc',
'test/automation/tab_proxy.h',
'test/automation/window_proxy.cc',
'test/automation/window_proxy.h',
'test/chrome_process_util.cc',
'test/chrome_process_util.h',
'test/chrome_process_util_linux.cc',
'test/chrome_process_util_mac.cc',
'test/chrome_process_util_win.cc',
'test/in_process_browser_test.cc',
'test/in_process_browser_test.h',
'test/perf/mem_usage.cc',
'test/perf/mem_usage.h',
'test/testing_profile.cc',
'test/testing_profile.h',
'test/ui_test_utils.cc',
'test/ui_test_utils.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
}, { # OS != "win"
'sources!': [
'test/in_process_browser_test.cc',
'test/in_process_browser_test.h',
'test/perf/mem_usage.cc',
'test/perf/mem_usage.h',
'test/ui_test_utils.cc',
'test/ui_test_utils.h',
],
}],
],
},
{
'target_name': 'test_support_ui',
'type': '<(library)',
'dependencies': [
'test_support_common',
'chrome_resources',
'chrome_strings',
'theme_resources',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/automated_ui_tests/automated_ui_test_base.cc',
'test/automated_ui_tests/automated_ui_test_base.h',
'test/testing_browser_process.h',
'test/ui/javascript_test_util.cc',
'test/ui/npapi_test_helper.cc',
'test/ui/npapi_test_helper.h',
'test/ui/run_all_unittests.cc',
'test/ui/ui_test.cc',
'test/ui/ui_test.h',
'test/ui/ui_test_suite.cc',
'test/ui/ui_test_suite.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS!="win"', {
'sources!': [
'test/ui/npapi_test_helper.cc',
],
}],
],
},
{
'target_name': 'test_support_unit',
'type': '<(library)',
'dependencies': [
'test_support_common',
'chrome_resources',
'chrome_strings',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/unit/run_all_unittests.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
# Needed for the following #include chain:
# test/unit/run_all_unittests.cc
# test/unit/chrome_test_suite.h
# gtk/gtk.h
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'ipc_tests',
'type': 'executable',
'msvs_guid': 'B92AE829-E1CD-4781-824A-DCB1603A1672',
'dependencies': [
'common',
'test_support_unit',
'../base/base.gyp:base',
'../testing/gtest.gyp:gtest',
],
'sources': [
'common/ipc_fuzzing_tests.cc',
'common/ipc_send_fds_test.cc',
'common/ipc_tests.cc',
'common/ipc_tests.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS=="linux" and toolkit_views==1', {
'dependencies': [
'../views/views.gyp:views',
],
}],
],
},
{
'target_name': 'ui_tests',
'type': 'executable',
'msvs_guid': '76235B67-1C27-4627-8A33-4B2E1EF93EDE',
'dependencies': [
'chrome',
'browser',
'debugger',
'common',
'chrome_resources',
'chrome_strings',
'test_support_ui',
'../base/base.gyp:base',
'../net/net.gyp:net',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
],
'include_dirs': [
'..',
],
'sources': [
'app/chrome_main_uitest.cc',
'browser/browser_uitest.cc',
'browser/crash_recovery_uitest.cc',
'browser/download/download_uitest.cc',
'browser/download/save_page_uitest.cc',
'browser/errorpage_uitest.cc',
'browser/extensions/extension_uitest.cc',
'browser/history/redirect_uitest.cc',
'browser/iframe_uitest.cc',
'browser/images_uitest.cc',
'browser/locale_tests_uitest.cc',
'browser/login_prompt_uitest.cc',
'browser/media_uitest.cc',
'browser/metrics/metrics_service_uitest.cc',
'browser/printing/printing_layout_uitest.cc',
'browser/printing/printing_test.h',
'browser/renderer_host/resource_dispatcher_host_uitest.cc',
'browser/sanity_uitest.cc',
'browser/session_history_uitest.cc',
'browser/sessions/session_restore_uitest.cc',
'browser/ssl/ssl_uitest.cc',
'browser/tab_contents/view_source_uitest.cc',
'browser/tab_restore_uitest.cc',
'browser/unload_uitest.cc',
'browser/views/find_bar_win_uitest.cc',
'common/logging_chrome_uitest.cc',
'common/net/cache_uitest.cc',
'common/pref_service_uitest.cc',
'test/accessibility/accessibility_tests.cc',
'test/accessibility/accessibility_util.cc',
'test/accessibility/accessibility_util.h',
'test/accessibility/browser_impl.cc',
'test/accessibility/browser_impl.h',
'test/accessibility/constants.h',
'test/accessibility/keyboard_util.cc',
'test/accessibility/keyboard_util.h',
'test/accessibility/registry_util.cc',
'test/accessibility/registry_util.h',
'test/accessibility/tab_impl.cc',
'test/accessibility/tab_impl.h',
'test/automation/automation_proxy_uitest.cc',
'test/automated_ui_tests/automated_ui_test_test.cc',
'test/chrome_process_util_uitest.cc',
'test/reliability/page_load_test.cc',
'test/reliability/page_load_test.h',
'test/ui/dom_checker_uitest.cc',
'test/ui/history_uitest.cc',
'test/ui/layout_plugin_uitest.cc',
'test/ui/npapi_uitest.cc',
'test/ui/omnibox_uitest.cc',
'test/ui/sandbox_uitests.cc',
'test/ui/sunspider_uitest.cc',
'test/ui/v8_benchmark_uitest.cc',
'worker/worker_uitest.cc',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'sources!': [
# TODO(port)
'browser/crash_recovery_uitest.cc',
'browser/login_prompt_uitest.cc',
'browser/renderer_host/resource_dispatcher_host_uitest.cc',
'test/reliability/page_load_test.cc',
'test/ui/layout_plugin_uitest.cc',
],
}],
['OS=="linux" and toolkit_views==1', {
'dependencies': [
'../views/views.gyp:views',
],
}],
['OS=="mac"', {
'sources!': [
# TODO(port)
'app/chrome_main_uitest.cc',
'browser/crash_recovery_uitest.cc',
'browser/download/download_uitest.cc',
# blocked on download shelf
'browser/download/save_page_uitest.cc',
'browser/login_prompt_uitest.cc',
'browser/metrics/metrics_service_uitest.cc',
# leaves an extra window on screen after test completes.
'browser/sessions/session_restore_uitest.cc',
# hangs indefinitely but doesn't crash.
'browser/tab_restore_uitest.cc',
# puts up modal dialogs.
'browser/unload_uitest.cc',
'test/reliability/page_load_test.cc',
'test/ui/layout_plugin_uitest.cc',
'test/ui/omnibox_uitest.cc',
# these pass locally but fail on the bots
'common/net/cache_uitest.cc',
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
'dependencies': [
'test_support_common',
'../google_update/google_update.gyp:google_update',
'../views/views.gyp:views',
],
'link_settings': {
'libraries': [
'-lOleAcc.lib',
],
},
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
}, { # else: OS != "win"
'sources!': [
# TODO(port)? (Most of these include windows.h or similar.)
'browser/extensions/extension_uitest.cc',
'browser/media_uitest.cc',
'browser/printing/printing_layout_uitest.cc',
'browser/ssl/ssl_uitest.cc',
'browser/views/find_bar_win_uitest.cc',
'common/logging_chrome_uitest.cc',
'test/accessibility/accessibility_tests.cc',
'test/accessibility/accessibility_util.cc',
'test/accessibility/browser_impl.cc',
'test/accessibility/keyboard_util.cc',
'test/accessibility/registry_util.cc',
'test/accessibility/tab_impl.cc',
'test/ui/npapi_uitest.cc',
'test/ui/sandbox_uitests.cc',
'worker/worker_uitest.cc',
],
}],
],
},
{
'target_name': 'unit_tests',
'type': 'executable',
'msvs_guid': 'ECFC2BEC-9FC0-4AD9-9649-5F26793F65FC',
'msvs_existing_vcproj': 'test/unit/unittests.vcproj',
'dependencies': [
'chrome',
'browser',
'chrome_resources',
'chrome_strings',
'common',
'debugger',
'renderer',
'test_support_unit',
'utility',
'../app/app.gyp:app_resources',
'../net/net.gyp:net_resources',
'../net/net.gyp:net_test_support',
'../printing/printing.gyp:printing',
'../webkit/webkit.gyp:webkit',
'../webkit/webkit.gyp:webkit_resources',
'../skia/skia.gyp:skia',
'../testing/gmock.gyp:gmock',
'../testing/gtest.gyp:gtest',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
],
'include_dirs': [
'..',
],
'sources': [
'app/breakpad_mac_stubs.mm',
# *NO* files in chrome/app have unit tests (except keystone_glue)!!!
# It seems a waste to have an app_unittests target, so for now
# I add keystone_glue.m explicitly to this target.
'app/keystone_glue.m',
'app/keystone_glue_unittest.mm',
# All unittests in browser, common, and renderer.
'browser/autocomplete/autocomplete_unittest.cc',
'browser/autocomplete/autocomplete_popup_view_mac_unittest.mm',
'browser/autocomplete/history_contents_provider_unittest.cc',
'browser/autocomplete/history_url_provider_unittest.cc',
'browser/autocomplete/keyword_provider_unittest.cc',
'browser/autocomplete/search_provider_unittest.cc',
'browser/back_forward_menu_model_unittest.cc',
'browser/blocked_popup_container_unittest.cc',
'browser/bookmarks/bookmark_codec_unittest.cc',
'browser/bookmarks/bookmark_context_menu_test.cc',
'browser/bookmarks/bookmark_drag_data_unittest.cc',
'browser/bookmarks/bookmark_folder_tree_model_unittest.cc',
'browser/bookmarks/bookmark_html_writer_unittest.cc',
'browser/bookmarks/bookmark_index_unittest.cc',
'browser/bookmarks/bookmark_model_test_utils.cc',
'browser/bookmarks/bookmark_model_test_utils.h',
'browser/bookmarks/bookmark_model_unittest.cc',
'browser/bookmarks/bookmark_table_model_unittest.cc',
'browser/bookmarks/bookmark_utils_unittest.cc',
'browser/browser_commands_unittest.cc',
'browser/browser_theme_provider_unittest.cc',
'browser/browser_unittest.cc',
'browser/debugger/devtools_remote_message_unittest.cc',
'browser/debugger/devtools_remote_listen_socket_unittest.cc',
'browser/debugger/devtools_remote_listen_socket_unittest.h',
'browser/debugger/devtools_sanity_unittest.cc',
'browser/child_process_security_policy_unittest.cc',
'browser/chrome_thread_unittest.cc',
# It is safe to list */cocoa/* files in the "common" file list
# without an explicit exclusion since gyp is smart enough to
# exclude them from non-Mac builds.
'browser/cocoa/about_window_controller_unittest.mm',
'browser/cocoa/base_view_unittest.mm',
'browser/cocoa/background_gradient_view_unittest.mm',
'browser/cocoa/bookmark_bar_bridge_unittest.mm',
'browser/cocoa/bookmark_bar_controller_unittest.mm',
'browser/cocoa/bookmark_bar_view_unittest.mm',
'browser/cocoa/bookmark_button_cell_unittest.mm',
'browser/cocoa/bookmark_menu_bridge_unittest.mm',
'browser/cocoa/bookmark_menu_cocoa_controller_unittest.mm',
'browser/cocoa/browser_window_cocoa_unittest.mm',
'browser/cocoa/browser_window_controller_unittest.mm',
'browser/cocoa/cocoa_utils_unittest.mm',
'browser/cocoa/command_observer_bridge_unittest.mm',
'browser/cocoa/custom_home_pages_model_unittest.mm',
'browser/cocoa/find_bar_bridge_unittest.mm',
'browser/cocoa/find_bar_cocoa_controller_unittest.mm',
'browser/cocoa/find_bar_view_unittest.mm',
'browser/cocoa/location_bar_cell_unittest.mm',
'browser/cocoa/location_bar_view_mac_unittest.mm',
'browser/cocoa/location_bar_fieldeditor_unittest.mm',
'browser/cocoa/gradient_button_cell_unittest.mm',
'browser/cocoa/grow_box_view_unittest.mm',
'browser/cocoa/preferences_window_controller_unittest.mm',
'browser/cocoa/sad_tab_view_unittest.mm',
'browser/cocoa/search_engine_list_model_unittest.mm',
'browser/cocoa/status_bubble_mac_unittest.mm',
'browser/cocoa/tab_cell_unittest.mm',
'browser/cocoa/tab_controller_unittest.mm',
'browser/cocoa/tab_strip_controller_unittest.mm',
'browser/cocoa/tab_strip_view_unittest.mm',
'browser/cocoa/tab_view_unittest.mm',
'browser/cocoa/throbber_view_unittest.mm',
'browser/cocoa/toolbar_button_cell_unittest.mm',
'browser/cocoa/toolbar_controller_unittest.mm',
'browser/cocoa/toolbar_view_unittest.mm',
'browser/command_updater_unittest.cc',
'browser/debugger/devtools_manager_unittest.cc',
'browser/dom_ui/dom_ui_theme_source_unittest.cc',
'browser/dom_ui/dom_ui_unittest.cc',
'browser/download/download_manager_unittest.cc',
'browser/download/download_request_manager_unittest.cc',
'browser/download/save_package_unittest.cc',
'browser/encoding_menu_controller_unittest.cc',
'browser/extensions/extension_messages_unittest.cc',
'browser/extensions/extension_process_manager_unittest.cc',
'browser/extensions/extension_ui_unittest.cc',
'browser/extensions/extensions_service_unittest.cc',
'browser/extensions/test_extension_loader.cc',
'browser/extensions/user_script_master_unittest.cc',
'browser/find_backend_unittest.cc',
'browser/google_url_tracker_unittest.cc',
'browser/google_update_settings_linux_unittest.cc',
'browser/google_update_settings_mac_unittest.mm',
'browser/gtk/bookmark_editor_gtk_unittest.cc',
'browser/gtk/go_button_gtk_unittest.cc',
'browser/gtk/tabs/tab_renderer_gtk_unittest.cc',
'browser/history/expire_history_backend_unittest.cc',
'browser/history/history_backend_unittest.cc',
'browser/history/history_querying_unittest.cc',
'browser/history/history_types_unittest.cc',
'browser/history/history_unittest.cc',
'browser/history/query_parser_unittest.cc',
'browser/history/snippet_unittest.cc',
'browser/history/starred_url_database_unittest.cc',
'browser/history/text_database_manager_unittest.cc',
'browser/history/text_database_unittest.cc',
'browser/history/thumbnail_database_unittest.cc',
'browser/thumbnail_store_unittest.cc',
'browser/history/url_database_unittest.cc',
'browser/history/visit_database_unittest.cc',
'browser/history/visit_tracker_unittest.cc',
'browser/importer/firefox_importer_unittest.cc',
'browser/importer/importer_unittest.cc',
'browser/importer/toolbar_importer_unittest.cc',
'browser/login_prompt_unittest.cc',
'browser/metrics/metrics_log_unittest.cc',
'browser/metrics/metrics_response_unittest.cc',
'browser/net/chrome_url_request_context_unittest.cc',
'browser/net/dns_host_info_unittest.cc',
'browser/net/dns_master_unittest.cc',
'browser/net/resolve_proxy_msg_helper_unittest.cc',
'browser/net/test_url_fetcher_factory.cc',
'browser/net/test_url_fetcher_factory.h',
'browser/net/url_fetcher_unittest.cc',
'browser/net/url_fixer_upper_unittest.cc',
'browser/password_manager/encryptor_unittest.cc',
'browser/password_manager/login_database_unittest.cc',
'browser/password_manager/password_form_manager_unittest.cc',
'browser/password_manager/password_store_mac_unittest.cc',
'browser/printing/page_number_unittest.cc',
'browser/printing/page_overlays_unittest.cc',
'browser/printing/page_range_unittest.cc',
'browser/printing/page_setup_unittest.cc',
'browser/printing/print_job_unittest.cc',
'browser/printing/win_printing_context_unittest.cc',
'browser/profile_manager_unittest.cc',
'browser/renderer_host/audio_renderer_host_unittest.cc',
'browser/renderer_host/render_view_host_unittest.cc',
'browser/renderer_host/render_widget_host_unittest.cc',
'browser/renderer_host/resource_dispatcher_host_unittest.cc',
'browser/renderer_host/web_cache_manager_unittest.cc',
'browser/rlz/rlz_unittest.cc',
'browser/safe_browsing/bloom_filter_unittest.cc',
'browser/safe_browsing/chunk_range_unittest.cc',
'browser/safe_browsing/protocol_manager_unittest.cc',
'browser/safe_browsing/protocol_parser_unittest.cc',
'browser/safe_browsing/safe_browsing_blocking_page_unittest.cc',
'browser/safe_browsing/safe_browsing_database_unittest.cc',
'browser/safe_browsing/safe_browsing_util_unittest.cc',
'browser/search_engines/template_url_model_unittest.cc',
'browser/search_engines/template_url_parser_unittest.cc',
'browser/search_engines/template_url_prepopulate_data_unittest.cc',
'browser/search_engines/template_url_scraper_unittest.cc',
'browser/search_engines/template_url_unittest.cc',
'browser/sessions/session_backend_unittest.cc',
'browser/sessions/session_service_test_helper.cc',
'browser/sessions/session_service_test_helper.h',
'browser/sessions/session_service_unittest.cc',
'browser/sessions/tab_restore_service_unittest.cc',
'browser/spellcheck_unittest.cc',
'browser/ssl/ssl_host_state_unittest.cc',
'browser/tab_contents/navigation_controller_unittest.cc',
'browser/tab_contents/navigation_entry_unittest.cc',
'browser/tab_contents/render_view_host_manager_unittest.cc',
'browser/tab_contents/site_instance_unittest.cc',
'browser/tab_contents/thumbnail_generator_unittest.cc',
'browser/tab_contents/web_contents_unittest.cc',
'browser/tabs/tab_strip_model_unittest.cc',
'browser/task_manager_unittest.cc',
'browser/theme_resources_util_unittest.cc',
'browser/views/bookmark_editor_view_unittest.cc',
'browser/views/keyword_editor_view_unittest.cc',
'browser/visitedlink_unittest.cc',
'browser/webdata/web_database_unittest.cc',
'browser/window_sizer_unittest.cc',
'common/bzip2_unittest.cc',
'common/chrome_plugin_unittest.cc',
'common/extensions/extension_unittest.cc',
'common/extensions/url_pattern_unittest.cc',
'common/extensions/user_script_unittest.cc',
'common/file_descriptor_set_unittest.cc',
'common/gfx/emf_unittest.cc',
'common/important_file_writer_unittest.cc',
'common/ipc_message_unittest.cc',
'common/ipc_sync_channel_unittest.cc',
'common/ipc_sync_message_unittest.cc',
'common/ipc_sync_message_unittest.h',
'common/json_value_serializer_unittest.cc',
'common/mru_cache_unittest.cc',
'common/net/url_util_unittest.cc',
'common/notification_service_unittest.cc',
'common/pref_member_unittest.cc',
'common/pref_service_unittest.cc',
'common/property_bag_unittest.cc',
'common/resource_dispatcher_unittest.cc',
'common/time_format_unittest.cc',
'common/worker_thread_ticker_unittest.cc',
'common/zip_unittest.cc',
'renderer/audio_message_filter_unittest.cc',
'renderer/extensions/extension_api_client_unittest.cc',
'renderer/extensions/greasemonkey_api_unittest.cc',
'renderer/extensions/json_schema_unittest.cc',
'renderer/net/render_dns_master_unittest.cc',
'renderer/net/render_dns_queue_unittest.cc',
'renderer/render_process_unittest.cc',
'renderer/render_thread_unittest.cc',
'renderer/render_view_unittest.cc',
'renderer/render_widget_unittest.cc',
'renderer/renderer_logging_mac_unittest.mm',
'renderer/renderer_main_unittest.cc',
'test/browser_with_test_window_test.cc',
'test/browser_with_test_window_test.h',
'test/file_test_utils.h',
'test/file_test_utils.cc',
'test/render_view_test.cc',
'test/render_view_test.h',
'test/test_notification_tracker.cc',
'test/test_notification_tracker.h',
'test/v8_unit_test.cc',
'test/v8_unit_test.h',
'../views/controls/label_unittest.cc',
'../views/controls/table/table_view_unittest.cc',
'../views/focus/focus_manager_unittest.cc',
'../views/grid_layout_unittest.cc',
'../views/view_unittest.cc',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
'../build/linux/system.gyp:nss',
],
'sources!': [
# This test is mostly about renaming downloads to safe file
# names. As such we don't need/want to port it to linux. We
# might want to write our own tests for the download manager
# on linux, though.
'browser/download/download_manager_unittest.cc',
],
}],
['OS=="linux" and toolkit_views==1', {
'dependencies': [
'../views/views.gyp:views',
],
}],
['OS=="mac"', {
# The test fetches resources which means Mac need the app bundle to
# exist on disk so it can pull from it.
'dependencies': [
'chrome',
],
'include_dirs': [
'../third_party/GTM',
],
'sources!': [
'browser/bookmarks/bookmark_context_menu_test.cc',
'browser/back_forward_menu_model_unittest.cc',
'browser/download/download_manager_unittest.cc',
'browser/gtk/go_button_gtk_unittest.cc',
'browser/gtk/tabs/tab_renderer_gtk_unittest.cc',
'browser/tab_contents/navigation_controller_unittest.cc',
'browser/task_manager_unittest.cc',
'test/test_notification_tracker.cc',
'test/test_notification_tracker.h',
'third_party/hunspell/google/hunspell_tests.cc',
],
# TODO(mark): We really want this for all non-static library targets,
# but when we tried to pull it up to the common.gypi level, it broke
# other things like the ui, startup, and page_cycler tests. *shrug*
'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-ObjC']},
}, { # OS != "mac"
'dependencies': [
'convert_dict_lib',
'third_party/hunspell/hunspell.gyp:hunspell',
],
}],
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'dependencies': [
'chrome_dll_version',
'installer/installer.gyp:installer_util_strings',
'../views/views.gyp:views',
],
'include_dirs': [
'third_party/wtl/include',
],
'sources': [
'app/chrome_dll.rc',
'test/data/resource.rc',
# TODO: It would be nice to have these pulled in
# automatically from direct_dependent_settings in
# their various targets (net.gyp:net_resources, etc.),
# but that causes errors in other targets when
# resulting .res files get referenced multiple times.
'<(SHARED_INTERMEDIATE_DIR)/chrome/browser_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/common_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/debugger_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/renderer_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/net/net_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_resources.rc',
],
'sources!': [
'browser/gtk/tabs/tab_renderer_gtk_unittest.cc',
'common/file_descriptor_set_unittest.cc',
'common/net/url_util_unittest.cc',
],
'link_settings': {
'libraries': [
'-loleacc.lib',
'-lcomsupp.lib',
],
},
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
'msvs_settings': {
'VCLinkerTool': {
'LinkIncremental': '1', # /INCREMENTAL:NO
},
},
},
},
}, { # else: OS != "win"
'sources!': [
'browser/bookmarks/bookmark_codec_unittest.cc',
'browser/bookmarks/bookmark_drag_data_unittest.cc',
'browser/bookmarks/bookmark_folder_tree_model_unittest.cc',
'browser/bookmarks/bookmark_table_model_unittest.cc',
# Need to port browser_with_test_window_test.* first
'browser/browser_commands_unittest.cc',
'browser/browser_unittest.cc',
# Need to port browser/automation/ui_controls.h
'browser/debugger/devtools_sanity_unittest.cc',
'browser/extensions/extension_process_manager_unittest.cc',
'browser/extensions/test_extension_loader.cc',
'browser/importer/firefox_importer_unittest.cc',
'browser/importer/importer_unittest.cc',
'browser/login_prompt_unittest.cc',
'browser/printing/page_number_unittest.cc',
'browser/printing/page_overlays_unittest.cc',
'browser/printing/print_job_unittest.cc',
'browser/rlz/rlz_unittest.cc',
'browser/safe_browsing/safe_browsing_blocking_page_unittest.cc',
'browser/search_engines/template_url_scraper_unittest.cc',
'browser/views/bookmark_editor_view_unittest.cc',
'browser/views/find_bar_win_unittest.cc',
'browser/views/keyword_editor_view_unittest.cc',
'browser/window_sizer_unittest.cc',
'common/chrome_plugin_unittest.cc',
'common/gfx/emf_unittest.cc',
'common/net/url_util_unittest.cc',
'test/browser_with_test_window_test.cc',
'test/browser_with_test_window_test.h',
'../views/controls/label_unittest.cc',
'../views/controls/table/table_view_unittest.cc',
'../views/focus/focus_manager_unittest.cc',
'../views/grid_layout_unittest.cc',
'../views/view_unittest.cc',
],
}],
],
},
{
'target_name': 'startup_tests',
'type': 'executable',
'msvs_guid': 'D3E6C0FD-54C7-4FF2-9AE1-72F2DAFD820C',
'dependencies': [
'chrome',
'browser',
'common',
'chrome_resources',
'chrome_strings',
'test_support_ui',
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'sources': [
'test/startup/feature_startup_test.cc',
'test/startup/startup_test.cc',
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS=="linux" and toolkit_views==1', {
'dependencies': [
'../views/views.gyp:views',
],
}],
],
},
{
'target_name': 'page_cycler_tests',
'type': 'executable',
'msvs_guid': 'C9E0BD1D-B175-4A91-8380-3FDC81FAB9D7',
'dependencies': [
'chrome',
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'sources': [
'test/page_cycler/page_cycler_test.cc',
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS=="win" or (OS=="linux" and toolkit_views==1)', {
'dependencies': [
'../views/views.gyp:views',
],
}],
],
},
{
'target_name': 'tab_switching_test',
'type': 'executable',
'msvs_guid': 'A34770EA-A574-43E8-9327-F79C04770E98',
'dependencies': [
'chrome',
'test_support_common',
'test_support_ui',
'theme_resources',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/tab_switching/tab_switching_test.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'memory_test',
'type': 'executable',
'msvs_guid': 'A5F831FD-9B9C-4FEF-9FBA-554817B734CE',
'dependencies': [
'chrome',
'test_support_common',
'test_support_ui',
'theme_resources',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/memory_test/memory_test.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'url_fetch_test',
'type': 'executable',
'msvs_guid': '7EFD0C91-198E-4043-9E71-4A4C7879B929',
'dependencies': [
'chrome',
'test_support_common',
'test_support_ui',
'theme_resources',
'../net/net.gyp:net',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/url_fetch_test/url_fetch_test.cc',
],
'conditions': [
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
'sources': [
'tools/build/win/precompiled_wtl.cc',
'tools/build/win/precompiled_wtl.h',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
}], # OS="win"
], # conditions
},
],
'conditions': [
# We set feature variables so the different parts that need to check for
# the mac build use of breakpad/keystone, check that flag instead of coding
# it based on branding.
# We need the Mac app name on disk, so we stick this into a variable so
# the different places that need it can use the common variable.
# NOTE: chrome/app/theme/chromium/BRANDING and
# chrome/app/theme/google_chrome/BRANDING have the short names, etc.;
# but extracting from there still means xcodeproject are out of date until
# the next project regeneration.
['OS=="mac" and branding=="Chrome"', {
'variables': {
'mac_breakpad%': 1,
'mac_keystone%': 1,
'mac_product_name%': 'Google Chrome',
}
}, {
'variables': {
'mac_breakpad%': 0,
'mac_keystone%': 0,
'mac_product_name%': 'Chromium',
}
}],
['OS=="linux"', {
'conditions': [
['branding=="Chrome"', {
'variables': {
'linux_breakpad%': 1,
},
}, {
'variables': {
'linux_breakpad%': 0,
},
}],
],
}],
['OS=="mac"',
# On Mac only, add a project target called "build_app_dmg" that only
# builds a DMG out of the App (eventually will completely replace
# "package_app").
{ 'targets': [
{
'target_name': 'build_app_dmg',
# do NOT place this in the 'all' list; most won't want it.
# In gyp, booleans are 0/1 not True/False.
'suppress_wildcard': 1,
'type': 'none',
'dependencies': [
'chrome',
],
'variables': {
'build_app_dmg_script_path': '<(DEPTH)/build/mac/build_app_dmg',
},
'actions': [
{
'inputs': [
'<(build_app_dmg_script_path)',
'<(PRODUCT_DIR)/<(branding).app',
],
'outputs': [
'<(PRODUCT_DIR)/<(branding).dmg',
],
'action_name': 'build_app_dmg',
'action': ['<(build_app_dmg_script_path)', '<@(branding)'],
},
], # 'actions'
},
]
}, { # else: OS != "mac"
'targets': [
{
'target_name': 'convert_dict',
'type': 'executable',
'msvs_guid': '42ECD5EC-722F-41DE-B6B8-83764C8016DF',
'dependencies': [
'../base/base.gyp:base',
'convert_dict_lib',
'third_party/hunspell/hunspell.gyp:hunspell',
],
'sources': [
'tools/convert_dict/convert_dict.cc',
],
},
{
'target_name': 'convert_dict_lib',
'product_name': 'convert_dict',
'type': 'static_library',
'msvs_guid': '1F669F6B-3F4A-4308-E496-EE480BDF0B89',
'include_dirs': [
'..',
],
'sources': [
'tools/convert_dict/aff_reader.cc',
'tools/convert_dict/aff_reader.h',
'tools/convert_dict/dic_reader.cc',
'tools/convert_dict/dic_reader.h',
'tools/convert_dict/hunspell_reader.cc',
'tools/convert_dict/hunspell_reader.h',
],
},
{
'target_name': 'flush_cache',
'type': 'executable',
'msvs_guid': '4539AFB3-B8DC-47F3-A491-6DAC8FD26657',
'dependencies': [
'../base/base.gyp:base',
],
'sources': [
'tools/perf/flush_cache/flush_cache.cc',
],
},
{
'target_name': 'perf_tests',
'type': 'executable',
'msvs_guid': '9055E088-25C6-47FD-87D5-D9DD9FD75C9F',
'dependencies': [
'browser',
'common',
'debugger',
'renderer',
'chrome_resources',
'chrome_strings',
'../base/base.gyp:base',
'../base/base.gyp:test_support_base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../webkit/webkit.gyp:glue',
],
'sources': [
'browser/safe_browsing/database_perftest.cc',
'browser/safe_browsing/filter_false_positive_perftest.cc',
'browser/visitedlink_perftest.cc',
'common/json_value_serializer_perftest.cc',
'test/perf/perftests.cc',
'test/perf/url_parse_perftest.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'sources!': [
# TODO(port):
'browser/safe_browsing/filter_false_positive_perftest.cc',
'browser/visitedlink_perftest.cc',
],
}],
['OS=="win" or (OS=="linux" and toolkit_views==1)', {
'dependencies': [
'../views/views.gyp:views',
],
}],
],
'configurations': {
'Debug': {
'msvs_settings': {
'VCLinkerTool': {
'LinkIncremental': '1', # /INCREMENTAL:NO
},
},
},
},
},
],
}], # OS!="mac"
['OS!="win"',
{ 'targets': [
{
# Executable that runs each browser test in a new process.
'target_name': 'browser_tests',
'type': 'executable',
'dependencies': [
'browser',
'chrome',
'chrome_resources',
'debugger',
'test_support_common',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/browser/run_all_unittests.cc',
'test/in_process_browser_test.cc',
'test/in_process_browser_test.h',
'test/browser/browser_test_launcher_out_of_proc.cc',
'test/browser/browser_test_runner.cc',
'test/browser/browser_test_runner.h',
'test/unit/chrome_test_suite.h',
'test/ui_test_utils.cc',
# browser_tests_sources is defined in 'variables' at the top of the
# file.
'<@(browser_tests_sources)',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS=="mac"', {
# The test fetches resources which means Mac need the app bundle to
# exist on disk so it can pull from it.
'dependencies': [
'chrome',
],
'sources': [
'app/breakpad_mac_stubs.mm',
'app/keystone_glue.h',
'app/keystone_glue.m',
],
# TODO(mark): We really want this for all non-static library targets,
# but when we tried to pull it up to the common.gypi level, it broke
# other things like the ui, startup, and page_cycler tests. *shrug*
'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-ObjC']},
}],
],
},
]
}],
['OS=="win"',
{ 'targets': [
{
# TODO(sgk): remove this when we change the buildbots to
# use the generated build\all.sln file to build the world.
'target_name': 'pull_in_all',
'type': 'none',
'dependencies': [
'installer/installer.gyp:*',
'../app/app.gyp:*',
'../base/base.gyp:*',
'../media/media.gyp:*',
'../net/net.gyp:*',
'../printing/printing.gyp:*',
'../rlz/rlz.gyp:*',
'../sdch/sdch.gyp:*',
'../skia/skia.gyp:*',
'../testing/gmock.gyp:*',
'../testing/gtest.gyp:*',
'../third_party/bsdiff/bsdiff.gyp:*',
'../third_party/bspatch/bspatch.gyp:*',
'../third_party/bzip2/bzip2.gyp:*',
'../third_party/cld/cld.gyp:cld',
'../third_party/codesighs/codesighs.gyp:*',
'../third_party/ffmpeg/ffmpeg.gyp:*',
'../third_party/icu38/icu38.gyp:*',
'../third_party/libjpeg/libjpeg.gyp:*',
'../third_party/libpng/libpng.gyp:*',
'../third_party/libxml/libxml.gyp:*',
'../third_party/libxslt/libxslt.gyp:*',
'../third_party/lzma_sdk/lzma_sdk.gyp:*',
'../third_party/modp_b64/modp_b64.gyp:*',
'../third_party/npapi/npapi.gyp:*',
'../third_party/sqlite/sqlite.gyp:*',
'../third_party/tcmalloc/tcmalloc.gyp:*',
'../third_party/zlib/zlib.gyp:*',
'../webkit/tools/test_shell/test_shell.gyp:*',
'../webkit/webkit.gyp:*',
'../build/temp_gyp/googleurl.gyp:*',
'../breakpad/breakpad.gyp:*',
'../courgette/courgette.gyp:*',
'../gears/gears.gyp:*',
'../rlz/rlz.gyp:*',
'../sandbox/sandbox.gyp:*',
'../tools/memory_watcher/memory_watcher.gyp:*',
'../webkit/activex_shim/activex_shim.gyp:*',
'../webkit/activex_shim_dll/activex_shim_dll.gyp:*',
'../v8/tools/gyp/v8.gyp:v8_shell',
],
},
{
'target_name': 'chrome_dll',
'type': 'shared_library',
'product_name': 'chrome',
'msvs_guid': 'C0A7EE2C-2A6D-45BE-BA78-6D006FDF52D9',
'include_dirs': [
'third_party/wtl/include',
],
'dependencies': [
# On Windows, link the dependencies (libraries) that make
# up actual Chromium functionality into this .dll.
'<@(chromium_dependencies)',
'chrome_dll_version',
'chrome_resources',
'installer/installer.gyp:installer_util_strings',
# TODO(sgk): causes problems because theme_dll doesn't
# actually generate default.lib, but now expects it.
#'theme_dll',
'worker',
'../net/net.gyp:net_resources',
'../support/support.gyp:*',
'../third_party/cld/cld.gyp:cld',
'../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
'../third_party/tcmalloc/tcmalloc.gyp:tcmalloc',
'../views/views.gyp:views',
'../webkit/webkit.gyp:webkit_resources',
'../gears/gears.gyp:gears',
],
'defines': [
'CHROME_DLL',
'BROWSER_DLL',
'RENDERER_DLL',
'PLUGIN_DLL',
],
'sources': [
'app/chrome_dll.rc',
'app/chrome_dll_main.cc',
'app/chrome_dll_resource.h',
'<(SHARED_INTERMEDIATE_DIR)/chrome_dll_version/chrome_dll_version.rc',
'../webkit/glue/resources/aliasb.cur',
'../webkit/glue/resources/cell.cur',
'../webkit/glue/resources/col_resize.cur',
'../webkit/glue/resources/copy.cur',
'../webkit/glue/resources/row_resize.cur',
'../webkit/glue/resources/vertical_text.cur',
'../webkit/glue/resources/zoom_in.cur',
'../webkit/glue/resources/zoom_out.cur',
# TODO: It would be nice to have these pulled in
# automatically from direct_dependent_settings in
# their various targets (net.gyp:net_resources, etc.),
# but that causes errors in other targets when
# resulting .res files get referenced multiple times.
'<(SHARED_INTERMEDIATE_DIR)/chrome/browser_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/common_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/debugger_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/renderer_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/net/net_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_resources.rc',
# TODO(sgk): left-over from pre-gyp build, figure out
# if we still need them and/or how to update to gyp.
#'app/check_dependents.bat',
#'app/chrome.dll.deps',
],
'msvs_settings': {
'VCLinkerTool': {
'BaseAddress': '0x01c30000',
'DelayLoadDLLs': [
'crypt32.dll',
'cryptui.dll',
'winhttp.dll',
'wininet.dll',
'wsock32.dll',
'ws2_32.dll',
'winspool.drv',
'comdlg32.dll',
'imagehlp.dll',
'psapi.dll',
'urlmon.dll',
'imm32.dll',
],
'ImportLibrary': '$(OutDir)\\lib\\chrome_dll.lib',
'ProgramDatabaseFile': '$(OutDir)\\chrome_dll.pdb',
# Set /SUBSYSTEM:WINDOWS for chrome.dll (for consistency).
'SubSystem': '2',
},
'VCManifestTool': {
'AdditionalManifestFiles': '$(SolutionDir)\\app\\chrome.dll.manifest',
},
},
'configurations': {
'Debug': {
'msvs_settings': {
'VCLinkerTool': {
'LinkIncremental': '1', # /INCREMENTAL:NO
},
},
},
},
},
{
'target_name': 'chrome_dll_version',
'type': 'none',
#'msvs_guid': '414D4D24-5D65-498B-A33F-3A29AD3CDEDC',
'dependencies': [
'../build/util/build_util.gyp:lastchange',
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome_dll_version',
],
},
'actions': [
{
'action_name': 'version',
'variables': {
'lastchange_path':
'<(SHARED_INTERMEDIATE_DIR)/build/LASTCHANGE',
'version_py': 'tools/build/version.py',
'version_path': 'VERSION',
'template_input_path': 'app/chrome_dll_version.rc.version',
},
'conditions': [
[ 'branding == "Chrome"', {
'variables': {
'branding_path': 'app/theme/google_chrome/BRANDING',
},
}, { # else branding!="Chrome"
'variables': {
'branding_path': 'app/theme/chromium/BRANDING',
},
}],
],
'inputs': [
'<(template_input_path)',
'<(version_path)',
'<(branding_path)',
'<(lastchange_path)',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome_dll_version/chrome_dll_version.rc',
],
'action': [
'python',
'<(version_py)',
'-f', '<(version_path)',
'-f', '<(branding_path)',
'-f', '<(lastchange_path)',
'<(template_input_path)',
'<@(_outputs)',
],
'message': 'Generating version information in <(_outputs)'
},
],
},
{
'target_name': 'activex_test_control',
'type': 'shared_library',
'msvs_guid': '414D4D24-5D65-498B-A33F-3A29AD3CDEDC',
'include_dirs': [
'..',
'<(INTERMEDIATE_DIR)',
],
'link_settings': {
'libraries': [
'-lcomsuppw.lib',
],
},
'sources': [
'test/activex_test_control/activex_test_control.cc',
'test/activex_test_control/activex_test_control.def',
'test/activex_test_control/activex_test_control.idl',
'test/activex_test_control/activex_test_control.rc',
'test/activex_test_control/activex_test_control.rgs',
'test/activex_test_control/chrome_test_control.bmp',
'test/activex_test_control/chrome_test_control.cc',
'test/activex_test_control/chrome_test_control.h',
'test/activex_test_control/chrome_test_control.rgs',
'test/activex_test_control/chrome_test_control_cp.h',
'test/activex_test_control/resource.h',
],
},
{
'target_name': 'automated_ui_tests',
'type': 'executable',
'msvs_guid': 'D2250C20-3A94-4FB9-AF73-11BC5B73884B',
'dependencies': [
'browser',
'renderer',
'test_support_common',
'test_support_ui',
'theme_resources',
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
'../third_party/libxml/libxml.gyp:libxml',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'test/automated_ui_tests/automated_ui_tests.cc',
'test/automated_ui_tests/automated_ui_tests.h',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
},
{
'target_name': 'automation',
'type': '<(library)',
'msvs_guid': '1556EF78-C7E6-43C8-951F-F6B43AC0DD12',
'dependencies': [
'theme_resources',
'../skia/skia.gyp:skia',
],
'include_dirs': [
'..',
],
'sources': [
'test/automation/autocomplete_edit_proxy.cc',
'test/automation/autocomplete_edit_proxy.h',
'test/automation/automation_constants.h',
'test/automation/automation_handle_tracker.cc',
'test/automation/automation_handle_tracker.h',
'test/automation/automation_messages.h',
'test/automation/automation_messages_internal.h',
'test/automation/automation_proxy.cc',
'test/automation/automation_proxy.h',
'test/automation/browser_proxy.cc',
'test/automation/browser_proxy.h',
'test/automation/tab_proxy.cc',
'test/automation/tab_proxy.h',
'test/automation/window_proxy.cc',
'test/automation/window_proxy.h',
],
},
{
# Shared library used by the in-proc browser tests.
'target_name': 'browser_tests_dll',
'type': 'shared_library',
'product_name': 'browser_tests',
'msvs_guid': 'D7589D0D-304E-4589-85A4-153B7D84B07F',
'dependencies': [
'chrome',
'browser',
'chrome_dll_version',
'chrome_resources',
'installer/installer.gyp:installer_util_strings',
'debugger',
'renderer',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
'msvs_settings': {
'VCLinkerTool': {
'LinkIncremental': '1', # /INCREMENTAL:NO
},
},
},
},
'sources': [
'test/browser/run_all_unittests.cc',
'test/in_process_browser_test.cc',
'test/in_process_browser_test.h',
'test/unit/chrome_test_suite.h',
'test/ui_test_utils.cc',
'app/chrome_dll.rc',
'app/chrome_dll_resource.h',
'app/chrome_dll_version.rc.version',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/browser_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome_dll_version/chrome_dll_version.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/common_resources.rc',
# browser_tests_sources and browser_tests_source_win_specific are
# defined in 'variables' at the top of the file.
'<@(browser_tests_sources)',
'<@(browser_tests_sources_win_specific)',
],
},
{
# Executable that runs the browser tests in-process.
'target_name': 'browser_tests',
'type': 'executable',
'msvs_guid': '9B87804D-2502-480B-95AE-5A572CE91809',
'dependencies': [
'browser_tests_dll',
'../base/base.gyp:base',
],
'include_dirs': [
'..',
],
'sources': [
'test/browser/browser_test_launcher_in_proc.cc',
'test/browser/browser_test_runner.cc',
'test/browser/browser_test_runner.h',
],
'msvs_settings': {
'VCLinkerTool': {
# Use a PDB name different than the one for the DLL.
'ProgramDatabaseFile': '$(OutDir)\\browser_tests_exe.pdb',
},
},
},
{
'target_name': 'crash_service',
'type': 'executable',
'msvs_guid': '89C1C190-A5D1-4EC4-BD6A-67FF2195C7CC',
'dependencies': [
'common',
'../base/base.gyp:base',
'../breakpad/breakpad.gyp:breakpad_handler',
'../breakpad/breakpad.gyp:breakpad_sender',
],
'include_dirs': [
'..',
],
'sources': [
'tools/crash_service/crash_service.cc',
'tools/crash_service/crash_service.h',
'tools/crash_service/main.cc',
],
'msvs_settings': {
'VCLinkerTool': {
'SubSystem': '2', # Set /SUBSYSTEM:WINDOWS
},
},
},
{
'target_name': 'generate_profile',
'type': 'executable',
'msvs_guid': '2E969AE9-7B12-4EDB-8E8B-48C7AE7BE357',
'dependencies': [
'browser',
'debugger',
'renderer',
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
],
'include_dirs': [
'..',
],
'sources': [
'tools/profiles/generate_profile.cc',
'tools/profiles/thumbnail-inl.h',
],
},
{
'target_name': 'interactive_ui_tests',
'type': 'executable',
'msvs_guid': '018D4F38-6272-448F-A864-976DA09F05D0',
'dependencies': [
'chrome_dll_version',
'chrome_resources',
'chrome_strings',
'debugger',
'installer/installer.gyp:installer_util_strings',
'test_support_common',
'test_support_ui',
'third_party/hunspell/hunspell.gyp:hunspell',
'../net/net.gyp:net_resources',
'../skia/skia.gyp:skia',
'../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/libpng/libpng.gyp:libpng',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/zlib/zlib.gyp:zlib',
'../testing/gtest.gyp:gtest',
'../third_party/npapi/npapi.gyp:npapi',
'../views/views.gyp:views',
'../webkit/webkit.gyp:webkit_resources',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'browser/browser_focus_uitest.cc',
'browser/debugger/devtools_sanity_unittest.cc',
'browser/views/bookmark_bar_view_test.cc',
'browser/blocked_popup_container_interactive_uitest.cc',
'browser/views/find_bar_win_interactive_uitest.cc',
'browser/views/tabs/tab_dragging_test.cc',
'test/interactive_ui/npapi_interactive_test.cc',
'test/interactive_ui/view_event_test_base.cc',
'test/interactive_ui/view_event_test_base.h',
# Windows-only below here, will need addressing if/when
# this gets ported.
'../webkit/glue/resources/aliasb.cur',
'../webkit/glue/resources/cell.cur',
'../webkit/glue/resources/col_resize.cur',
'../webkit/glue/resources/copy.cur',
'../webkit/glue/resources/row_resize.cur',
'../webkit/glue/resources/vertical_text.cur',
'../webkit/glue/resources/zoom_in.cur',
'../webkit/glue/resources/zoom_out.cur',
'app/chrome_dll.rc',
'test/data/resource.rc',
# TODO: It would be nice to have these pulled in
# automatically from direct_dependent_settings in
# their various targets (net.gyp:net_resources, etc.),
# but that causes errors in other targets when
# resulting .res files get referenced multiple times.
'<(SHARED_INTERMEDIATE_DIR)/chrome/browser_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/common_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/debugger_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/renderer_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/net/net_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_resources.rc',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
'msvs_settings': {
'VCLinkerTool': {
'LinkIncremental': '1', # /INCREMENTAL:NO
},
},
},
},
},
{
'target_name': 'plugin_tests',
'type': 'executable',
'msvs_guid': 'A1CAA831-C507-4B2E-87F3-AEC63C9907F9',
'dependencies': [
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/libxslt/libxslt.gyp:libxslt',
'../third_party/npapi/npapi.gyp:npapi',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'test/plugin/plugin_test.cpp',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
},
{
'target_name': 'reliability_tests',
'type': 'executable',
'msvs_guid': '8A3E1774-1DE9-445C-982D-3EE37C8A752A',
'dependencies': [
'test_support_common',
'test_support_ui',
'theme_resources',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/reliability/page_load_test.cc',
'test/reliability/page_load_test.h',
'test/reliability/reliability_test_suite.h',
'test/reliability/run_all_unittests.cc',
],
},
{
'target_name': 'security_tests',
'type': 'shared_library',
'msvs_guid': 'E750512D-FC7C-4C98-BF04-0A0DAF882055',
'include_dirs': [
'..',
],
'sources': [
'test/injection_test_dll.h',
'test/security_tests/ipc_security_tests.cc',
'test/security_tests/ipc_security_tests.h',
'test/security_tests/security_tests.cc',
'../sandbox/tests/validation_tests/commands.cc',
'../sandbox/tests/validation_tests/commands.h',
],
},
{
'target_name': 'selenium_tests',
'type': 'executable',
'msvs_guid': 'E3749617-BA3D-4230-B54C-B758E56D9FA5',
'dependencies': [
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'test/selenium/selenium_test.cc',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
},
{
'target_name': 'test_chrome_plugin',
'type': 'shared_library',
'msvs_guid': '7F0A70F6-BE3F-4C19-B435-956AB8F30BA4',
'dependencies': [
'../base/base.gyp:base',
'../build/temp_gyp/googleurl.gyp:googleurl',
],
'include_dirs': [
'..',
],
'link_settings': {
'libraries': [
'-lwinmm.lib',
],
},
'sources': [
'test/chrome_plugin/test_chrome_plugin.cc',
'test/chrome_plugin/test_chrome_plugin.def',
'test/chrome_plugin/test_chrome_plugin.h',
],
},
{
'target_name': 'theme_dll',
'type': 'loadable_module',
'msvs_guid': 'FD683DD6-D9BF-4B1B-AB6D-A3AC03EDAA4D',
'product_name': 'default',
'dependencies': [
'theme_resources',
'../app/app.gyp:app_resources',
],
'sources': [
'<(grit_out_dir)/theme_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/app/app_resources.rc',
],
'msvs_settings': {
'VCLinkerTool': {
'BaseAddress': '0x3CE00000',
'OutputFile': '<(PRODUCT_DIR)/themes/default.dll',
'ResourceOnlyDLL': 'true',
},
},
'configurations': {
'Debug': {
'msvs_settings': {
'VCLinkerTool': {
'LinkIncremental': '1', # /INCREMENTAL:NO
},
},
},
},
},
{
'target_name': 'worker',
'type': '<(library)',
'msvs_guid': 'C78D02D0-A366-4EC6-A248-AA8E64C4BA18',
'dependencies': [
'../base/base.gyp:base',
'../webkit/webkit.gyp:webkit',
],
'sources': [
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
'worker/webworkerclient_proxy.cc',
'worker/webworkerclient_proxy.h',
'worker/worker_main.cc',
'worker/worker_thread.cc',
'worker/worker_thread.h',
'worker/worker_webkitclient_impl.cc',
'worker/worker_webkitclient_impl.h',
],
'include_dirs': [
'..',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled.h',
'msvs_precompiled_source': 'tools/build/win/precompiled.cc',
},
},
},
]}, # 'targets'
], # OS=="win"
# TODO(jrg): add in Windows code coverage targets.
['coverage!=0 and OS!="win"',
{ 'targets': [
{
'target_name': 'coverage',
# do NOT place this in the 'all' list; most won't want it.
# In gyp, booleans are 0/1 not True/False.
'suppress_wildcard': 1,
'type': 'none',
# If you add new tests here you may need to update the croc configs.
# E.g. build/{linux|mac}/chrome_linux.croc
'dependencies': [
'../base/base.gyp:base_unittests',
'../media/media.gyp:media_unittests',
'../net/net.gyp:net_unittests',
'../printing/printing.gyp:printing_unittests',
'unit_tests',
],
'actions': [
{
# 'message' for Linux/scons in particular
'message': 'Running coverage_posix.py to generate coverage numbers',
'inputs': [],
'outputs': [],
'action_name': 'coverage',
'action': [ 'python',
'../tools/code_coverage/coverage_posix.py',
'--directory',
'<(PRODUCT_DIR)',
'--',
'<@(_dependencies)'],
# Use outputs of this action as inputs for the main target build.
# Seems as a misnomer but makes this happy on Linux (scons).
'process_outputs_as_sources': 1,
},
], # 'actions'
},
]
}],
], # 'conditions'
}
Convert chrome.exe to generation by gyp.
BUG=none
TEST=none
git-svn-id: http://src.chromium.org/svn/trunk/src@18692 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
Former-commit-id: 878e3e59df02d8fdf9ccf5f637cca679a303da61
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
# Define the common dependencies that contain all the actual
# Chromium functionality. This list gets pulled in below by
# the link of the actual chrome (or chromium) executable on
# Linux or Mac, and into chrome.dll on Windows.
'chromium_dependencies': [
'common',
'browser',
'debugger',
'renderer',
'utility',
'../printing/printing.gyp:printing',
'../webkit/webkit.gyp:inspector_resources',
],
'grit_out_dir': '<(SHARED_INTERMEDIATE_DIR)/chrome',
'browser_tests_sources': [
'browser/child_process_security_policy_browser_test.cc',
'browser/renderer_host/web_cache_manager_browser_test.cc',
'browser/ssl/ssl_browser_tests.cc',
],
'browser_tests_sources_win_specific': [
'browser/extensions/extension_shelf_model_unittest.cc',
'browser/extensions/extension_browsertest.cc',
'browser/extensions/test_extension_loader.cc',
'browser/extensions/test_extension_loader.h',
'browser/views/find_bar_win_browsertest.cc',
# TODO(jcampan): once the task manager works on Mac, move this test to the
# non win specific section.
'browser/task_manager_browsertest.cc',
],
},
'includes': [
'../build/common.gypi',
],
'target_defaults': {
'sources/': [
['exclude', '/(cocoa|gtk|win)/'],
['exclude', '_(cocoa|gtk|linux|mac|posix|skia|win|views|x)(_unittest)?\\.(cc|mm?)$'],
['exclude', '/(gtk|win|x11)_[^/]*\\.cc$'],
],
'conditions': [
['OS=="linux"', {'sources/': [
['include', '/gtk/'],
['include', '_(gtk|linux|posix|skia|x)(_unittest)?\\.cc$'],
['include', '/(gtk|x11)_[^/]*\\.cc$'],
]}],
['OS=="mac"', {'sources/': [
['include', '/cocoa/'],
['include', '_(cocoa|mac|posix)(_unittest)?\\.(cc|mm?)$'],
]}, { # else: OS != "mac"
'sources/': [
['exclude', '\\.mm?$'],
],
}],
['OS=="win"', {'sources/': [
['include', '_(views|win)(_unittest)?\\.cc$'],
['include', '/win/'],
['include', '/(views|win)_[^/]*\\.cc$'],
]}],
['OS=="linux" and toolkit_views==1', {'sources/': [
['include', '_views\\.cc$'],
]}],
],
},
'targets': [
{
# TODO(mark): It would be better if each static library that needed
# to run grit would list its own .grd files, but unfortunately some
# of the static libraries currently have circular dependencies among
# generated headers.
'target_name': 'chrome_resources',
'type': 'none',
'msvs_guid': 'B95AB527-F7DB-41E9-AD91-EB51EE0F56BE',
'rules': [
{
'rule_name': 'grit',
'extension': 'grd',
'variables': {
'conditions': [
['branding=="Chrome"', {
# TODO(mmoss) The .grd files look for _google_chrome, but for
# consistency they should look for GOOGLE_CHROME_BUILD like C++.
# Clean this up when Windows moves to gyp.
'chrome_build': '_google_chrome',
'branded_env': 'CHROMIUM_BUILD=google_chrome',
}, { # else: branding!="Chrome"
'chrome_build': '_chromium',
'branded_env': 'CHROMIUM_BUILD=chromium',
}],
],
},
'inputs': [
'../tools/grit/grit.py',
],
'outputs': [
'<(grit_out_dir)/grit/<(RULE_INPUT_ROOT).h',
'<(grit_out_dir)/<(RULE_INPUT_ROOT).pak',
],
'action': ['python', '<@(_inputs)', '-i', '<(RULE_INPUT_PATH)',
'build', '-o', '<(grit_out_dir)',
'-D', '<(chrome_build)',
'-E', '<(branded_env)',
],
'message': 'Generating resources from <(RULE_INPUT_PATH)',
},
],
'sources': [
# Data resources.
'browser/debugger/resources/debugger_resources.grd',
'browser/browser_resources.grd',
'common/common_resources.grd',
'renderer/renderer_resources.grd',
],
'direct_dependent_settings': {
'include_dirs': [
'<(grit_out_dir)',
],
},
'conditions': [
['OS=="win"', {
'dependencies': ['../build/win/system.gyp:cygwin'],
}],
],
},
{
# TODO(mark): It would be better if each static library that needed
# to run grit would list its own .grd files, but unfortunately some
# of the static libraries currently have circular dependencies among
# generated headers.
'target_name': 'chrome_strings',
'type': 'none',
'msvs_guid': 'D9DDAF60-663F-49CC-90DC-3D08CC3D1B28',
'rules': [
{
'rule_name': 'grit',
'extension': 'grd',
'variables': {
'conditions': [
['branding=="Chrome"', {
# TODO(mmoss) The .grd files look for _google_chrome, but for
# consistency they should look for GOOGLE_CHROME_BUILD like C++.
# Clean this up when Windows moves to gyp.
'chrome_build': '_google_chrome',
}, { # else: branding!="Chrome"
'chrome_build': '_chromium',
}],
],
},
'inputs': [
'../tools/grit/grit.py',
],
'outputs': [
'<(grit_out_dir)/grit/<(RULE_INPUT_ROOT).h',
'<(grit_out_dir)/<(RULE_INPUT_ROOT)_da.pak',
'<(grit_out_dir)/<(RULE_INPUT_ROOT)_en-US.pak',
'<(grit_out_dir)/<(RULE_INPUT_ROOT)_he.pak',
'<(grit_out_dir)/<(RULE_INPUT_ROOT)_zh-TW.pak',
],
'action': ['python', '<@(_inputs)', '-i', '<(RULE_INPUT_PATH)',
'build', '-o', '<(grit_out_dir)',
'-D', '<(chrome_build)'],
'message': 'Generating resources from <(RULE_INPUT_PATH)',
},
],
'sources': [
# Localizable resources.
'app/resources/locale_settings.grd',
'app/chromium_strings.grd',
'app/generated_resources.grd',
'app/google_chrome_strings.grd',
],
'direct_dependent_settings': {
'include_dirs': [
'<(grit_out_dir)',
],
},
'conditions': [
['OS=="win"', {
'dependencies': ['../build/win/system.gyp:cygwin'],
}],
],
},
{
# theme_resources also generates a .cc file, so it can't use the rules above.
'target_name': 'theme_resources',
'type': 'none',
'msvs_guid' : 'A158FB0A-25E4-6523-6B5A-4BB294B73D31',
'variables': {
'grit_path': '../tools/grit/grit.py',
},
'actions': [
{
'action_name': 'theme_resources',
'variables': {
'input_path': 'app/theme/theme_resources.grd',
'conditions': [
['branding=="Chrome"', {
# TODO(mmoss) The .grd files look for _google_chrome, but for
# consistency they should look for GOOGLE_CHROME_BUILD like C++.
# Clean this up when Windows moves to gyp.
'chrome_build': '_google_chrome',
}, { # else: branding!="Chrome"
'chrome_build': '_chromium',
}],
],
},
'inputs': [
'<(input_path)',
],
'outputs': [
'<(grit_out_dir)/grit/theme_resources.h',
'<(grit_out_dir)/grit/theme_resources_map.cc',
'<(grit_out_dir)/grit/theme_resources_map.h',
'<(grit_out_dir)/theme_resources.pak',
'<(grit_out_dir)/theme_resources.rc',
],
'action': [
'python', '<(grit_path)',
'-i', '<(input_path)', 'build',
'-o', '<(grit_out_dir)',
'-D', '<(chrome_build)'
],
'conditions': [
['linux2==1', {
'action': ['-D', 'linux2'],
}],
],
'message': 'Generating resources from <(input_path)',
},
],
'direct_dependent_settings': {
'include_dirs': [
'<(grit_out_dir)',
],
},
'conditions': [
['OS=="win"', {
'dependencies': ['../build/win/system.gyp:cygwin'],
}],
],
},
{
'target_name': 'common',
'type': '<(library)',
'msvs_guid': '899F1280-3441-4D1F-BA04-CCD6208D9146',
'dependencies': [
'chrome_resources',
'chrome_strings',
'theme_resources',
'../app/app.gyp:app_base',
'../base/base.gyp:base',
'../base/base.gyp:base_gfx',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../net/net.gyp:net',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/zlib/zlib.gyp:zlib',
'../third_party/npapi/npapi.gyp:npapi',
'../webkit/webkit.gyp:glue',
],
'include_dirs': [
'..',
],
'sources': [
# All .cc, .h, and .mm files under chrome/common except for tests.
'common/extensions/extension.cc',
'common/extensions/extension.h',
'common/extensions/extension_error_reporter.cc',
'common/extensions/extension_error_reporter.h',
'common/extensions/extension_error_utils.cc',
'common/extensions/extension_error_utils.h',
'common/extensions/extension_unpacker.cc',
'common/extensions/extension_unpacker.h',
'common/extensions/url_pattern.cc',
'common/extensions/url_pattern.h',
'common/extensions/user_script.cc',
'common/extensions/user_script.h',
'common/gfx/emf.cc',
'common/gfx/emf.h',
'common/gfx/utils.h',
'common/net/cookie_monster_sqlite.cc',
'common/net/cookie_monster_sqlite.h',
'common/net/dns.h',
'common/net/url_request_intercept_job.cc',
'common/net/url_request_intercept_job.h',
'common/app_cache/app_cache_context_impl.cc',
'common/app_cache/app_cache_context_impl.h',
'common/app_cache/app_cache_dispatcher.cc',
'common/app_cache/app_cache_dispatcher.h',
'common/app_cache/app_cache_dispatcher_host.cc',
'common/app_cache/app_cache_dispatcher_host.h',
'common/automation_constants.cc',
'common/automation_constants.h',
'common/bindings_policy.h',
'common/child_process.cc',
'common/child_process.h',
'common/child_process_host.cc',
'common/child_process_host.h',
'common/child_process_info.cc',
'common/child_process_info.h',
'common/child_thread.cc',
'common/child_thread.h',
'common/chrome_constants.cc',
'common/chrome_constants.h',
'common/chrome_counters.cc',
'common/chrome_counters.h',
'common/chrome_paths.cc',
'common/chrome_paths.h',
'common/chrome_paths_internal.h',
'common/chrome_paths_linux.cc',
'common/chrome_paths_mac.mm',
'common/chrome_paths_win.cc',
'common/chrome_plugin_api.h',
'common/chrome_plugin_lib.cc',
'common/chrome_plugin_lib.h',
'common/chrome_plugin_util.cc',
'common/chrome_plugin_util.h',
'common/chrome_switches.cc',
'common/chrome_switches.h',
'common/classfactory.cc',
'common/classfactory.h',
'common/common_glue.cc',
'common/debug_flags.cc',
'common/debug_flags.h',
'common/devtools_messages.h',
'common/devtools_messages_internal.h',
'common/env_vars.cc',
'common/env_vars.h',
'common/file_descriptor_set_posix.cc',
'common/file_descriptor_set_posix.h',
'common/filter_policy.h',
'common/gears_api.h',
'common/gtk_util.cc',
'common/gtk_util.h',
'common/histogram_synchronizer.cc',
'common/histogram_synchronizer.h',
'common/important_file_writer.cc',
'common/important_file_writer.h',
'common/ipc_channel.h',
'common/ipc_channel_posix.cc',
'common/ipc_channel_posix.h',
'common/ipc_channel_proxy.cc',
'common/ipc_channel_proxy.h',
'common/ipc_channel_win.cc',
'common/ipc_channel_win.h',
'common/ipc_logging.cc',
'common/ipc_logging.h',
'common/ipc_message.cc',
'common/ipc_message.h',
'common/ipc_message_macros.h',
'common/ipc_message_utils.cc',
'common/ipc_message_utils.h',
'common/ipc_sync_channel.cc',
'common/ipc_sync_channel.h',
'common/ipc_sync_message.cc',
'common/ipc_sync_message.h',
'common/json_value_serializer.cc',
'common/json_value_serializer.h',
'common/jstemplate_builder.cc',
'common/jstemplate_builder.h',
'common/libxml_utils.cc',
'common/libxml_utils.h',
'common/logging_chrome.cc',
'common/logging_chrome.h',
'common/main_function_params.h',
'common/message_router.cc',
'common/message_router.h',
'common/modal_dialog_event.h',
'common/mru_cache.h',
'common/navigation_types.h',
'common/native_web_keyboard_event.h',
'common/native_web_keyboard_event_linux.cc',
'common/native_web_keyboard_event_mac.mm',
'common/native_web_keyboard_event_win.cc',
'common/native_window_notification_source.h',
'common/notification_details.h',
'common/notification_observer.h',
'common/notification_registrar.cc',
'common/notification_registrar.h',
'common/notification_service.cc',
'common/notification_service.h',
'common/notification_source.h',
'common/notification_type.h',
'common/owned_widget_gtk.cc',
'common/owned_widget_gtk.h',
'common/page_action.h',
'common/page_action.cc',
'common/page_transition_types.h',
'common/page_zoom.h',
'common/platform_util.h',
'common/platform_util_linux.cc',
'common/platform_util_mac.mm',
'common/platform_util_win.cc',
'common/plugin_messages.h',
'common/plugin_messages_internal.h',
'common/pref_member.cc',
'common/pref_member.h',
'common/pref_names.cc',
'common/pref_names.h',
'common/pref_service.cc',
'common/pref_service.h',
'common/process_watcher_posix.cc',
'common/process_watcher_win.cc',
'common/process_watcher.h',
'common/property_bag.cc',
'common/property_bag.h',
'common/quarantine_mac.h',
'common/quarantine_mac.mm',
'common/ref_counted_util.h',
'common/render_messages.h',
'common/render_messages_internal.h',
'common/renderer_preferences.h',
'common/resource_dispatcher.cc',
'common/resource_dispatcher.h',
'common/result_codes.h',
'common/sandbox_init_wrapper.cc',
'common/sandbox_init_wrapper.h',
'common/security_filter_peer.cc',
'common/security_filter_peer.h',
'common/sqlite_compiled_statement.cc',
'common/sqlite_compiled_statement.h',
'common/sqlite_utils.cc',
'common/sqlite_utils.h',
'common/task_queue.cc',
'common/task_queue.h',
'common/temp_scaffolding_stubs.cc',
'common/temp_scaffolding_stubs.h',
'common/thumbnail_score.cc',
'common/thumbnail_score.h',
'common/time_format.cc',
'common/time_format.h',
'common/transport_dib.h',
'common/transport_dib_linux.cc',
'common/transport_dib_mac.cc',
'common/transport_dib_win.cc',
'common/url_constants.cc',
'common/url_constants.h',
'common/visitedlink_common.cc',
'common/visitedlink_common.h',
'common/webkit_param_traits.h',
'common/win_safe_util.cc',
'common/win_safe_util.h',
'common/worker_messages.h',
'common/worker_messages_internal.h',
'common/worker_thread_ticker.cc',
'common/worker_thread_ticker.h',
'common/x11_util.cc',
'common/x11_util.h',
'common/x11_util_internal.h',
'common/zip.cc', # Requires zlib directly.
'common/zip.h',
'third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
],
'direct_dependent_settings': {
'include_dirs': [
'..',
],
},
'export_dependent_settings': [
'../app/app.gyp:app_base',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled.h',
'msvs_precompiled_source': 'tools/build/win/precompiled.cc',
},
},
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'link_settings': {
'libraries': [
'-lX11',
'-lXrender',
'-lXext',
],
},
}, { # else: 'OS!="linux"'
'sources!': [
'third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
'sources!': [
'common/temp_scaffolding_stubs.cc',
'common/temp_scaffolding_stubs.h',
],
}, { # else: OS != "win"
'sources!': [
'common/gfx/emf.cc',
'common/classfactory.cc',
],
}],
],
},
{
'target_name': 'browser',
'type': '<(library)',
'msvs_guid': '5BF908A7-68FB-4A4B-99E3-8C749F1FE4EA',
'dependencies': [
'common',
'chrome_resources',
'chrome_strings',
'theme_resources',
'../app/app.gyp:app_resources',
'../media/media.gyp:media',
'../net/net.gyp:net_resources',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
'third_party/hunspell/hunspell.gyp:hunspell',
'../webkit/webkit.gyp:glue',
],
'include_dirs': [
'..',
'<(INTERMEDIATE_DIR)',
],
'sources': [
# All .cc, .h, .m, and .mm files under browser except for tests and
# mocks.
'browser/alternate_nav_url_fetcher.cc',
'browser/alternate_nav_url_fetcher.h',
'browser/app_controller_mac.h',
'browser/app_controller_mac.mm',
'browser/app_modal_dialog.cc',
'browser/app_modal_dialog.h',
'browser/app_modal_dialog_gtk.cc',
'browser/app_modal_dialog_mac.mm',
'browser/app_modal_dialog_win.cc',
'browser/app_modal_dialog_queue.cc',
'browser/app_modal_dialog_queue.h',
'browser/autocomplete/autocomplete.cc',
'browser/autocomplete/autocomplete.h',
'browser/autocomplete/autocomplete_accessibility.cc',
'browser/autocomplete/autocomplete_accessibility.h',
'browser/autocomplete/autocomplete_edit.cc',
'browser/autocomplete/autocomplete_edit.h',
'browser/autocomplete/autocomplete_edit_view.h',
'browser/autocomplete/autocomplete_edit_view_gtk.cc',
'browser/autocomplete/autocomplete_edit_view_gtk.h',
'browser/autocomplete/autocomplete_edit_view_mac.h',
'browser/autocomplete/autocomplete_edit_view_mac.mm',
'browser/autocomplete/autocomplete_edit_view_win.cc',
'browser/autocomplete/autocomplete_edit_view_win.h',
'browser/autocomplete/autocomplete_popup_model.cc',
'browser/autocomplete/autocomplete_popup_model.h',
'browser/autocomplete/autocomplete_popup_view.h',
'browser/autocomplete/autocomplete_popup_view_gtk.cc',
'browser/autocomplete/autocomplete_popup_view_gtk.h',
'browser/autocomplete/autocomplete_popup_view_mac.h',
'browser/autocomplete/autocomplete_popup_view_mac.mm',
'browser/autocomplete/autocomplete_popup_view_win.cc',
'browser/autocomplete/autocomplete_popup_view_win.h',
'browser/autocomplete/history_contents_provider.cc',
'browser/autocomplete/history_contents_provider.h',
'browser/autocomplete/history_url_provider.cc',
'browser/autocomplete/history_url_provider.h',
'browser/autocomplete/keyword_provider.cc',
'browser/autocomplete/keyword_provider.h',
'browser/autocomplete/search_provider.cc',
'browser/autocomplete/search_provider.h',
'browser/autofill_manager.cc',
'browser/autofill_manager.h',
'browser/automation/automation_autocomplete_edit_tracker.h',
'browser/automation/automation_browser_tracker.h',
'browser/automation/extension_automation_constants.h',
'browser/automation/extension_automation_constants.cc',
'browser/automation/automation_extension_function.h',
'browser/automation/automation_extension_function.cc',
'browser/automation/automation_provider.cc',
'browser/automation/automation_provider.h',
'browser/automation/automation_provider_list.cc',
'browser/automation/automation_provider_list_generic.cc',
'browser/automation/automation_provider_list_mac.mm',
'browser/automation/automation_provider_list.h',
'browser/automation/automation_resource_tracker.cc',
'browser/automation/automation_resource_tracker.h',
'browser/automation/automation_tab_tracker.h',
'browser/automation/automation_window_tracker.h',
'browser/automation/extension_port_container.cc',
'browser/automation/extension_port_container.h',
'browser/automation/ui_controls.cc',
'browser/automation/ui_controls.h',
'browser/automation/url_request_failed_dns_job.cc',
'browser/automation/url_request_failed_dns_job.h',
# TODO: These should be moved to test_support (see below), but
# are currently used by production code in automation_provider.cc.
'browser/automation/url_request_mock_http_job.cc',
'browser/automation/url_request_mock_http_job.h',
'browser/automation/url_request_slow_download_job.cc',
'browser/automation/url_request_slow_download_job.h',
'browser/back_forward_menu_model.cc',
'browser/back_forward_menu_model.h',
'browser/back_forward_menu_model_views.cc',
'browser/back_forward_menu_model_views.h',
'browser/blocked_popup_container.cc',
'browser/blocked_popup_container.h',
'browser/bookmarks/bookmark_codec.cc',
'browser/bookmarks/bookmark_codec.h',
'browser/bookmarks/bookmark_context_menu_gtk.cc',
'browser/bookmarks/bookmark_context_menu_views.cc',
'browser/bookmarks/bookmark_context_menu.cc',
'browser/bookmarks/bookmark_context_menu.h',
'browser/bookmarks/bookmark_drag_data.cc',
'browser/bookmarks/bookmark_drag_data.h',
'browser/bookmarks/bookmark_drop_info.cc',
'browser/bookmarks/bookmark_drop_info.h',
'browser/bookmarks/bookmark_editor.h',
'browser/bookmarks/bookmark_folder_tree_model.cc',
'browser/bookmarks/bookmark_folder_tree_model.h',
'browser/bookmarks/bookmark_index.cc',
'browser/bookmarks/bookmark_index.h',
'browser/bookmarks/bookmark_html_writer.cc',
'browser/bookmarks/bookmark_html_writer.h',
'browser/bookmarks/bookmark_manager.h',
'browser/bookmarks/bookmark_model.cc',
'browser/bookmarks/bookmark_model.h',
'browser/bookmarks/bookmark_service.h',
'browser/bookmarks/bookmark_storage.cc',
'browser/bookmarks/bookmark_storage.h',
'browser/bookmarks/bookmark_table_model.cc',
'browser/bookmarks/bookmark_table_model.h',
'browser/bookmarks/bookmark_utils.cc',
'browser/bookmarks/bookmark_utils.h',
'browser/browser.cc',
'browser/browser.h',
'browser/browser_about_handler.cc',
'browser/browser_about_handler.h',
'browser/browser_accessibility.cc',
'browser/browser_accessibility.h',
'browser/browser_accessibility_manager.cc',
'browser/browser_accessibility_manager.h',
'browser/browser_init.cc',
'browser/browser_init.h',
'browser/browser_list.cc',
'browser/browser_list.h',
'browser/browser_main.cc',
'browser/browser_main_gtk.cc',
'browser/browser_main_mac.mm',
'browser/browser_main_win.cc',
'browser/browser_main_win.h',
'browser/browser_prefs.cc',
'browser/browser_prefs.h',
'browser/browser_process.cc',
'browser/browser_process.h',
'browser/browser_process_impl.cc',
'browser/browser_process_impl.h',
'browser/browser_shutdown.cc',
'browser/browser_shutdown.h',
'browser/browser_theme_provider_gtk.cc',
'browser/browser_theme_provider.cc',
'browser/browser_theme_provider.h',
'browser/browser_trial.cc',
'browser/browser_trial.h',
'browser/browser_url_handler.cc',
'browser/browser_url_handler.h',
'browser/browser_window.h',
'browser/browser_window_factory.mm',
'browser/browsing_data_remover.cc',
'browser/browsing_data_remover.h',
'browser/browsing_instance.cc',
'browser/browsing_instance.h',
'browser/cancelable_request.cc',
'browser/cancelable_request.h',
'browser/cert_store.cc',
'browser/cert_store.h',
'browser/character_encoding.cc',
'browser/character_encoding.h',
'browser/child_process_security_policy.cc',
'browser/child_process_security_policy.h',
'browser/chrome_plugin_browsing_context.cc',
'browser/chrome_plugin_browsing_context.h',
'browser/chrome_plugin_host.cc',
'browser/chrome_plugin_host.h',
'browser/chrome_thread.cc',
'browser/chrome_thread.h',
'browser/cocoa/about_window_controller.h',
'browser/cocoa/about_window_controller.mm',
'browser/cocoa/background_gradient_view.h',
'browser/cocoa/background_gradient_view.mm',
'browser/cocoa/base_view.h',
'browser/cocoa/base_view.mm',
'browser/cocoa/bookmark_bar_bridge.h',
'browser/cocoa/bookmark_bar_bridge.mm',
'browser/cocoa/bookmark_bar_controller.h',
'browser/cocoa/bookmark_bar_controller.mm',
'browser/cocoa/bookmark_bar_view.h',
'browser/cocoa/bookmark_bar_view.mm',
'browser/cocoa/bookmark_button_cell.h',
'browser/cocoa/bookmark_button_cell.mm',
'browser/cocoa/bookmark_menu_bridge.h',
'browser/cocoa/bookmark_menu_bridge.mm',
'browser/cocoa/bookmark_menu_cocoa_controller.h',
'browser/cocoa/bookmark_menu_cocoa_controller.mm',
'browser/cocoa/browser_test_helper.h',
'browser/cocoa/browser_window_cocoa.h',
'browser/cocoa/browser_window_cocoa.mm',
'browser/cocoa/browser_window_controller.h',
'browser/cocoa/browser_window_controller.mm',
'browser/cocoa/clear_browsing_data_controller.h',
'browser/cocoa/clear_browsing_data_controller.mm',
'browser/cocoa/cocoa_test_helper.h',
'browser/cocoa/cocoa_utils.h',
'browser/cocoa/cocoa_utils.mm',
'browser/cocoa/command_observer_bridge.h',
'browser/cocoa/command_observer_bridge.mm',
'browser/cocoa/custom_home_pages_model.h',
'browser/cocoa/custom_home_pages_model.mm',
'browser/cocoa/encoding_menu_controller_delegate_mac.h',
'browser/cocoa/encoding_menu_controller_delegate_mac.mm',
'browser/cocoa/find_bar_bridge.h',
'browser/cocoa/find_bar_bridge.mm',
'browser/cocoa/find_bar_cocoa_controller.h',
'browser/cocoa/find_bar_cocoa_controller.mm',
'browser/cocoa/find_bar_view.h',
'browser/cocoa/find_bar_view.mm',
'browser/cocoa/first_run_dialog.h',
'browser/cocoa/first_run_dialog.mm',
'browser/cocoa/gradient_button_cell.h',
'browser/cocoa/gradient_button_cell.mm',
'browser/cocoa/grow_box_view.h',
'browser/cocoa/grow_box_view.mm',
'browser/cocoa/location_bar_cell.h',
'browser/cocoa/location_bar_cell.mm',
'browser/cocoa/location_bar_view_mac.h',
'browser/cocoa/location_bar_view_mac.mm',
'browser/cocoa/menu_localizer.h',
'browser/cocoa/menu_localizer.mm',
'browser/cocoa/page_info_window_controller.h',
'browser/cocoa/page_info_window_controller.mm',
'browser/cocoa/page_info_window_mac.h',
'browser/cocoa/page_info_window_mac.mm',
'browser/cocoa/preferences_localizer.h',
'browser/cocoa/preferences_localizer.mm',
'browser/cocoa/preferences_window_controller.h',
'browser/cocoa/preferences_window_controller.mm',
'browser/cocoa/sad_tab_view.h',
'browser/cocoa/sad_tab_view.mm',
'browser/cocoa/search_engine_list_model.h',
'browser/cocoa/search_engine_list_model.mm',
'browser/cocoa/shell_dialogs_mac.mm',
'browser/cocoa/status_bubble_mac.h',
'browser/cocoa/status_bubble_mac.mm',
'browser/cocoa/tab_cell.h',
'browser/cocoa/tab_cell.mm',
'browser/cocoa/tab_contents_controller.h',
'browser/cocoa/tab_contents_controller.mm',
'browser/cocoa/tab_controller.h',
'browser/cocoa/tab_controller.mm',
'browser/cocoa/tab_strip_controller.h',
'browser/cocoa/tab_strip_controller.mm',
'browser/cocoa/tab_strip_model_observer_bridge.h',
'browser/cocoa/tab_strip_model_observer_bridge.mm',
'browser/cocoa/tab_strip_view.h',
'browser/cocoa/tab_strip_view.mm',
'browser/cocoa/tab_view.h',
'browser/cocoa/tab_view.mm',
'browser/cocoa/tab_window_controller.h',
'browser/cocoa/tab_window_controller.mm',
'browser/cocoa/throbber_view.h',
'browser/cocoa/throbber_view.mm',
'browser/cocoa/toolbar_button_cell.h',
'browser/cocoa/toolbar_button_cell.mm',
'browser/cocoa/toolbar_controller.h',
'browser/cocoa/toolbar_controller.mm',
'browser/cocoa/toolbar_view.h',
'browser/cocoa/toolbar_view.mm',
'browser/command_updater.cc',
'browser/command_updater.h',
'browser/cross_site_request_manager.cc',
'browser/cross_site_request_manager.h',
'browser/dock_info_gtk.cc',
'browser/dock_info_win.cc',
'browser/dock_info.cc',
'browser/dock_info.h',
'browser/dom_operation_notification_details.h',
'browser/dom_ui/chrome_url_data_manager.cc',
'browser/dom_ui/chrome_url_data_manager.h',
'browser/dom_ui/debugger_ui.cc',
'browser/dom_ui/debugger_ui.h',
'browser/dom_ui/devtools_ui.cc',
'browser/dom_ui/devtools_ui.h',
'browser/dom_ui/dom_ui.cc',
'browser/dom_ui/dom_ui.h',
'browser/dom_ui/dom_ui_factory.cc',
'browser/dom_ui/dom_ui_factory.h',
'browser/dom_ui/dom_ui_favicon_source.cc',
'browser/dom_ui/dom_ui_favicon_source.h',
'browser/dom_ui/dom_ui_theme_source.cc',
'browser/dom_ui/dom_ui_theme_source.h',
'browser/dom_ui/dom_ui_thumbnail_source.cc',
'browser/dom_ui/dom_ui_thumbnail_source.h',
'browser/dom_ui/downloads_dom_handler.cc',
'browser/dom_ui/downloads_dom_handler.h',
'browser/dom_ui/downloads_ui.cc',
'browser/dom_ui/downloads_ui.h',
'browser/dom_ui/fileicon_source.cc',
'browser/dom_ui/fileicon_source.h',
'browser/dom_ui/history_ui.cc',
'browser/dom_ui/history_ui.h',
'browser/dom_ui/html_dialog_ui.cc',
'browser/dom_ui/html_dialog_ui.h',
'browser/dom_ui/new_tab_ui.cc',
'browser/dom_ui/new_tab_ui.h',
'browser/download/download_exe.cc',
'browser/download/download_file.cc',
'browser/download/download_file.h',
'browser/download/download_item_model.cc',
'browser/download/download_item_model.h',
'browser/download/download_manager.cc',
'browser/download/download_manager.h',
'browser/download/download_request_dialog_delegate.h',
'browser/download/download_request_dialog_delegate_win.cc',
'browser/download/download_request_dialog_delegate_win.h',
'browser/download/download_request_manager.cc',
'browser/download/download_request_manager.h',
'browser/download/download_shelf.cc',
'browser/download/download_shelf.h',
'browser/download/download_started_animation.h',
'browser/download/download_util.cc',
'browser/download/download_util.h',
'browser/download/save_file.cc',
'browser/download/save_file.h',
'browser/download/save_file_manager.cc',
'browser/download/save_file_manager.h',
'browser/download/save_item.cc',
'browser/download/save_item.h',
'browser/download/save_package.cc',
'browser/download/save_package.h',
'browser/download/save_types.h',
'browser/encoding_menu_controller.cc',
'browser/encoding_menu_controller.h',
'browser/extensions/extension_bookmarks_module.cc',
'browser/extensions/extension_bookmarks_module.h',
'browser/extensions/extension_bookmarks_module_constants.cc',
'browser/extensions/extension_bookmarks_module_constants.h',
'browser/extensions/extension_creator.cc',
'browser/extensions/extension_creator.h',
'browser/extensions/extension_event_names.cc',
'browser/extensions/extension_event_names.h',
'browser/extensions/extension_function.cc',
'browser/extensions/extension_function.h',
'browser/extensions/extension_function_dispatcher.cc',
'browser/extensions/extension_function_dispatcher.h',
'browser/extensions/extension_host.cc',
'browser/extensions/extension_host.h',
'browser/extensions/extension_message_service.cc',
'browser/extensions/extension_message_service.h',
'browser/extensions/extension_browser_event_router.cc',
'browser/extensions/extension_browser_event_router.h',
'browser/extensions/extension_page_actions_module.cc',
'browser/extensions/extension_page_actions_module.h',
'browser/extensions/extension_page_actions_module_constants.cc',
'browser/extensions/extension_page_actions_module_constants.h',
'browser/extensions/extension_process_manager.cc',
'browser/extensions/extension_process_manager.h',
'browser/extensions/extension_protocols.cc',
'browser/extensions/extension_protocols.h',
'browser/extensions/extension_shelf.cc',
'browser/extensions/extension_shelf.h',
'browser/extensions/extension_shelf_model.cc',
'browser/extensions/extension_shelf_model.h',
'browser/extensions/extension_tabs_module.cc',
'browser/extensions/extension_tabs_module.h',
'browser/extensions/extension_tabs_module_constants.cc',
'browser/extensions/extension_tabs_module_constants.h',
'browser/extensions/extension_view.cc',
'browser/extensions/extension_view.h',
'browser/extensions/extensions_service.cc',
'browser/extensions/extensions_service.h',
'browser/extensions/extensions_ui.cc',
'browser/extensions/extensions_ui.h',
'browser/extensions/external_extension_provider.h',
'browser/extensions/external_registry_extension_provider_win.cc',
'browser/extensions/external_registry_extension_provider_win.h',
'browser/extensions/external_pref_extension_provider.cc',
'browser/extensions/external_pref_extension_provider.h',
'browser/extensions/user_script_master.cc',
'browser/extensions/user_script_master.h',
'browser/external_protocol_handler.cc',
'browser/external_protocol_handler.h',
'browser/external_tab_container.cc',
'browser/external_tab_container.h',
'browser/fav_icon_helper.cc',
'browser/fav_icon_helper.h',
'browser/find_bar.h',
'browser/find_bar_controller.cc',
'browser/find_bar_controller.h',
'browser/find_notification_details.h',
'browser/first_run.cc',
'browser/first_run.h',
'browser/first_run_mac.mm',
'browser/first_run_win.cc',
'browser/first_run_gtk.cc',
'browser/gears_integration.cc',
'browser/gears_integration.h',
'browser/google_update.cc',
'browser/google_update.h',
'browser/google_update_settings_linux.cc',
'browser/google_update_settings_mac.mm',
'browser/google_url_tracker.cc',
'browser/google_url_tracker.h',
'browser/google_util.cc',
'browser/google_util.h',
'browser/gtk/about_chrome_dialog.cc',
'browser/gtk/about_chrome_dialog.h',
'browser/gtk/back_forward_button_gtk.cc',
'browser/gtk/back_forward_button_gtk.h',
'browser/gtk/back_forward_menu_model_gtk.cc',
'browser/gtk/back_forward_menu_model_gtk.h',
'browser/gtk/blocked_popup_container_view_gtk.cc',
'browser/gtk/blocked_popup_container_view_gtk.h',
'browser/gtk/bookmark_bar_gtk.cc',
'browser/gtk/bookmark_bar_gtk.h',
'browser/gtk/bookmark_bubble_gtk.cc',
'browser/gtk/bookmark_bubble_gtk.h',
'browser/gtk/bookmark_editor_gtk.cc',
'browser/gtk/bookmark_editor_gtk.h',
'browser/gtk/bookmark_manager_gtk.cc',
'browser/gtk/bookmark_manager_gtk.h',
'browser/gtk/bookmark_menu_controller_gtk.cc',
'browser/gtk/bookmark_menu_controller_gtk.h',
'browser/gtk/bookmark_utils_gtk.cc',
'browser/gtk/bookmark_utils_gtk.h',
'browser/gtk/bookmark_tree_model.cc',
'browser/gtk/bookmark_tree_model.h',
'browser/gtk/browser_titlebar.cc',
'browser/gtk/browser_titlebar.h',
'browser/gtk/browser_toolbar_gtk.cc',
'browser/gtk/browser_toolbar_gtk.h',
'browser/gtk/browser_window_factory_gtk.cc',
'browser/gtk/browser_window_gtk.cc',
'browser/gtk/browser_window_gtk.h',
'browser/gtk/clear_browsing_data_dialog_gtk.cc',
'browser/gtk/clear_browsing_data_dialog_gtk.h',
'browser/gtk/custom_button.cc',
'browser/gtk/custom_button.h',
'browser/gtk/dialogs_gtk.cc',
'browser/gtk/download_item_gtk.cc',
'browser/gtk/download_item_gtk.h',
'browser/gtk/download_shelf_gtk.cc',
'browser/gtk/download_shelf_gtk.h',
'browser/gtk/download_started_animation_gtk.cc',
'browser/gtk/go_button_gtk.cc',
'browser/gtk/go_button_gtk.h',
'browser/gtk/gtk_chrome_button.cc',
'browser/gtk/gtk_chrome_button.h',
'browser/gtk/gtk_chrome_link_button.cc',
'browser/gtk/gtk_chrome_link_button.h',
'browser/gtk/gtk_floating_container.cc',
'browser/gtk/gtk_floating_container.h',
'browser/gtk/hung_renderer_dialog_gtk.cc',
'browser/gtk/import_dialog_gtk.cc',
'browser/gtk/import_dialog_gtk.h',
'browser/gtk/import_lock_dialog_gtk.cc',
'browser/gtk/import_lock_dialog_gtk.h',
'browser/gtk/import_progress_dialog_gtk.cc',
'browser/gtk/import_progress_dialog_gtk.h',
'browser/gtk/info_bubble_gtk.cc',
'browser/gtk/info_bubble_gtk.h',
'browser/gtk/infobar_container_gtk.cc',
'browser/gtk/infobar_container_gtk.h',
'browser/gtk/infobar_gtk.cc',
'browser/gtk/infobar_gtk.h',
'browser/gtk/find_bar_gtk.cc',
'browser/gtk/find_bar_gtk.h',
'browser/gtk/focus_store_gtk.cc',
'browser/gtk/focus_store_gtk.h',
'browser/gtk/location_bar_view_gtk.cc',
'browser/gtk/location_bar_view_gtk.h',
'browser/gtk/menu_gtk.cc',
'browser/gtk/menu_gtk.h',
'browser/gtk/nine_box.cc',
'browser/gtk/nine_box.h',
'browser/gtk/options/general_page_gtk.cc',
'browser/gtk/options/general_page_gtk.h',
'browser/gtk/options/options_layout_gtk.cc',
'browser/gtk/options/options_layout_gtk.h',
'browser/gtk/options/options_window_gtk.cc',
'browser/gtk/options/url_picker_dialog_gtk.cc',
'browser/gtk/options/url_picker_dialog_gtk.h',
'browser/gtk/sad_tab_gtk.cc',
'browser/gtk/sad_tab_gtk.h',
'browser/gtk/slide_animator_gtk.cc',
'browser/gtk/slide_animator_gtk.h',
'browser/gtk/standard_menus.cc',
'browser/gtk/standard_menus.h',
'browser/gtk/status_bubble_gtk.cc',
'browser/gtk/status_bubble_gtk.h',
'browser/gtk/tab_contents_container_gtk.cc',
'browser/gtk/tab_contents_container_gtk.h',
'browser/gtk/tabs/dragged_tab_controller_gtk.cc',
'browser/gtk/tabs/dragged_tab_controller_gtk.h',
'browser/gtk/tabs/dragged_tab_gtk.cc',
'browser/gtk/tabs/dragged_tab_gtk.h',
'browser/gtk/tabs/tab_gtk.cc',
'browser/gtk/tabs/tab_gtk.h',
'browser/gtk/tabs/tab_renderer_gtk.cc',
'browser/gtk/tabs/tab_renderer_gtk.h',
'browser/gtk/tabs/tab_strip_gtk.cc',
'browser/gtk/tabs/tab_strip_gtk.h',
'browser/gtk/toolbar_star_toggle_gtk.cc',
'browser/gtk/toolbar_star_toggle_gtk.h',
'browser/hang_monitor/hung_plugin_action.cc',
'browser/hang_monitor/hung_plugin_action.h',
'browser/hang_monitor/hung_window_detector.cc',
'browser/hang_monitor/hung_window_detector.h',
'browser/history/archived_database.cc',
'browser/history/archived_database.h',
'browser/history/download_database.cc',
'browser/history/download_database.h',
'browser/history/download_types.h',
'browser/history/expire_history_backend.cc',
'browser/history/expire_history_backend.h',
'browser/history/history.cc',
'browser/history/history.h',
'browser/history/history_backend.cc',
'browser/history/history_backend.h',
'browser/history/history_database.cc',
'browser/history/history_database.h',
'browser/history/history_marshaling.h',
'browser/history/history_notifications.h',
'browser/history/history_publisher.cc',
'browser/history/history_publisher.h',
'browser/history/history_publisher_none.cc',
'browser/history/history_publisher_win.cc',
'browser/history/history_types.cc',
'browser/history/history_types.h',
'browser/history/in_memory_database.cc',
'browser/history/in_memory_database.h',
'browser/history/in_memory_history_backend.cc',
'browser/history/in_memory_history_backend.h',
'browser/history/page_usage_data.cc',
'browser/history/page_usage_data.h',
'browser/history/query_parser.cc',
'browser/history/query_parser.h',
'browser/history/snippet.cc',
'browser/history/snippet.h',
'browser/history/starred_url_database.cc',
'browser/history/starred_url_database.h',
'browser/history/text_database.cc',
'browser/history/text_database.h',
'browser/history/text_database_manager.cc',
'browser/history/text_database_manager.h',
'browser/history/thumbnail_database.cc',
'browser/history/thumbnail_database.h',
'browser/history/url_database.cc',
'browser/history/url_database.h',
'browser/history/visit_database.cc',
'browser/history/visit_database.h',
'browser/history/visit_tracker.cc',
'browser/history/visit_tracker.h',
'browser/history/visitsegment_database.cc',
'browser/history/visitsegment_database.h',
'browser/hung_renderer_dialog.h',
'browser/icon_loader.h',
'browser/icon_loader.cc',
'browser/icon_loader_linux.cc',
'browser/icon_loader_mac.mm',
'browser/icon_loader_win.cc',
'browser/icon_manager.cc',
'browser/icon_manager.h',
'browser/icon_manager_linux.cc',
'browser/icon_manager_mac.mm',
'browser/icon_manager_win.cc',
'browser/ime_input.cc',
'browser/ime_input.h',
'browser/importer/firefox2_importer.cc',
'browser/importer/firefox2_importer.h',
'browser/importer/firefox3_importer.cc',
'browser/importer/firefox3_importer.h',
'browser/importer/firefox_importer_utils.cc',
'browser/importer/firefox_importer_utils.h',
'browser/importer/firefox_profile_lock.cc',
'browser/importer/firefox_profile_lock.h',
'browser/importer/firefox_profile_lock_posix.cc',
'browser/importer/firefox_profile_lock_win.cc',
'browser/importer/ie_importer.cc',
'browser/importer/ie_importer.h',
'browser/importer/importer.cc',
'browser/importer/importer.h',
'browser/importer/mork_reader.cc',
'browser/importer/mork_reader.h',
'browser/importer/toolbar_importer.cc',
'browser/importer/toolbar_importer.h',
'browser/input_window_dialog.h',
'browser/input_window_dialog_gtk.cc',
'browser/input_window_dialog_win.cc',
'browser/jankometer.cc',
'browser/jankometer.h',
'browser/jsmessage_box_handler.cc',
'browser/jsmessage_box_handler.h',
'browser/keychain_mac.cc',
'browser/keychain_mac.h',
'browser/load_from_memory_cache_details.h',
'browser/load_notification_details.h',
'browser/location_bar.h',
'browser/login_prompt.cc',
'browser/login_prompt.h',
'browser/login_prompt_win.cc',
'browser/memory_details.cc',
'browser/memory_details.h',
'browser/meta_table_helper.cc',
'browser/meta_table_helper.h',
'browser/metrics/metrics_log.cc',
'browser/metrics/metrics_log.h',
'browser/metrics/metrics_response.cc',
'browser/metrics/metrics_response.h',
'browser/metrics/metrics_service.cc',
'browser/metrics/metrics_service.h',
'browser/metrics/user_metrics.cc',
'browser/metrics/user_metrics.h',
'browser/modal_html_dialog_delegate.cc',
'browser/modal_html_dialog_delegate.h',
'browser/net/chrome_url_request_context.cc',
'browser/net/chrome_url_request_context.h',
'browser/net/dns_global.cc',
'browser/net/dns_global.h',
'browser/net/dns_host_info.cc',
'browser/net/dns_host_info.h',
'browser/net/dns_master.cc',
'browser/net/dns_master.h',
'browser/net/referrer.cc',
'browser/net/referrer.h',
'browser/net/resolve_proxy_msg_helper.cc',
'browser/net/resolve_proxy_msg_helper.h',
'browser/net/sdch_dictionary_fetcher.cc',
'browser/net/sdch_dictionary_fetcher.h',
'browser/net/url_fetcher.cc',
'browser/net/url_fetcher.h',
'browser/net/url_fetcher_protect.cc',
'browser/net/url_fetcher_protect.h',
'browser/net/url_fixer_upper.cc',
'browser/net/url_fixer_upper.h',
'browser/options_page_base.cc',
'browser/options_page_base.h',
'browser/options_window.h',
'browser/page_info_window.cc',
'browser/page_info_window.h',
'browser/page_state.cc',
'browser/page_state.h',
'browser/password_manager/encryptor_linux.cc',
'browser/password_manager/encryptor_mac.mm',
'browser/password_manager/encryptor_win.cc',
'browser/password_manager/encryptor.h',
'browser/password_manager/ie7_password.cc',
'browser/password_manager/ie7_password.h',
'browser/password_manager/login_database_mac.cc',
'browser/password_manager/login_database_mac.h',
'browser/password_manager/login_database.cc',
'browser/password_manager/login_database.h',
'browser/password_manager/password_form_manager.cc',
'browser/password_manager/password_form_manager.h',
'browser/password_manager/password_manager.cc',
'browser/password_manager/password_manager.h',
'browser/password_manager/password_store.cc',
'browser/password_manager/password_store.h',
'browser/password_manager/password_store_default.cc',
'browser/password_manager/password_store_default.h',
# Temporarily disabled while we figure some stuff out.
# http://code.google.com/p/chromium/issues/detail?id=12351
# 'browser/password_manager/password_store_gnome.h',
# 'browser/password_manager/password_store_gnome.cc',
# 'browser/password_manager/password_store_kwallet.h',
# 'browser/password_manager/password_store_kwallet.cc',
'browser/password_manager/password_store_mac_internal.h',
'browser/password_manager/password_store_mac.h',
'browser/password_manager/password_store_mac.cc',
'browser/password_manager/password_store_win.h',
'browser/password_manager/password_store_win.cc',
'browser/plugin_installer.cc',
'browser/plugin_installer.h',
'browser/plugin_process_host.cc',
'browser/plugin_process_host.h',
'browser/plugin_service.cc',
'browser/plugin_service.h',
'browser/printing/page_number.cc',
'browser/printing/page_number.h',
'browser/printing/page_overlays.cc',
'browser/printing/page_overlays.h',
'browser/printing/page_range.cc',
'browser/printing/page_range.h',
'browser/printing/page_setup.cc',
'browser/printing/page_setup.h',
'browser/printing/print_job.cc',
'browser/printing/print_job.h',
'browser/printing/print_job_manager.cc',
'browser/printing/print_job_manager.h',
'browser/printing/print_job_worker.cc',
'browser/printing/print_job_worker.h',
'browser/printing/print_job_worker_owner.h',
'browser/printing/print_settings.cc',
'browser/printing/print_settings.h',
'browser/printing/print_view_manager.cc',
'browser/printing/print_view_manager.h',
'browser/printing/printed_document.cc',
'browser/printing/printed_document.h',
'browser/printing/printed_page.cc',
'browser/printing/printed_page.h',
'browser/printing/printed_pages_source.h',
'browser/printing/printer_query.cc',
'browser/printing/printer_query.h',
'browser/printing/win_printing_context.cc',
'browser/printing/win_printing_context.h',
'browser/process_singleton.h',
'browser/process_singleton_linux.cc',
'browser/process_singleton_mac.cc',
'browser/process_singleton_win.cc',
'browser/profile.cc',
'browser/profile.h',
'browser/profile_manager.cc',
'browser/profile_manager.h',
'browser/renderer_host/async_resource_handler.cc',
'browser/renderer_host/async_resource_handler.h',
'browser/renderer_host/audio_renderer_host.cc',
'browser/renderer_host/audio_renderer_host.h',
'browser/renderer_host/backing_store.h',
'browser/renderer_host/backing_store_manager.cc',
'browser/renderer_host/backing_store_manager.h',
'browser/renderer_host/backing_store_mac.cc',
'browser/renderer_host/backing_store_win.cc',
'browser/renderer_host/backing_store_x.cc',
'browser/renderer_host/browser_render_process_host.cc',
'browser/renderer_host/browser_render_process_host.h',
'browser/renderer_host/buffered_resource_handler.cc',
'browser/renderer_host/buffered_resource_handler.h',
'browser/renderer_host/cross_site_resource_handler.cc',
'browser/renderer_host/cross_site_resource_handler.h',
'browser/renderer_host/download_resource_handler.cc',
'browser/renderer_host/download_resource_handler.h',
'browser/renderer_host/download_throttling_resource_handler.cc',
'browser/renderer_host/download_throttling_resource_handler.h',
'browser/renderer_host/render_process_host.cc',
'browser/renderer_host/render_process_host.h',
'browser/renderer_host/render_sandbox_host_linux.h',
'browser/renderer_host/render_sandbox_host_linux.cc',
'browser/renderer_host/render_view_host.cc',
'browser/renderer_host/render_view_host.h',
'browser/renderer_host/render_view_host_delegate.h',
'browser/renderer_host/render_view_host_factory.cc',
'browser/renderer_host/render_view_host_factory.h',
'browser/renderer_host/render_widget_helper.cc',
'browser/renderer_host/render_widget_helper.h',
'browser/renderer_host/render_widget_host.cc',
'browser/renderer_host/render_widget_host.h',
'browser/renderer_host/render_widget_host_view.h',
'browser/renderer_host/render_widget_host_view_gtk.cc',
'browser/renderer_host/render_widget_host_view_gtk.h',
'browser/renderer_host/render_widget_host_view_mac.h',
'browser/renderer_host/render_widget_host_view_mac.mm',
'browser/renderer_host/render_widget_host_view_win.cc',
'browser/renderer_host/render_widget_host_view_win.h',
'browser/renderer_host/resource_dispatcher_host.cc',
'browser/renderer_host/resource_dispatcher_host.h',
'browser/renderer_host/resource_handler.h',
'browser/renderer_host/resource_message_filter.cc',
'browser/renderer_host/resource_message_filter.h',
'browser/renderer_host/resource_message_filter_gtk.cc',
'browser/renderer_host/resource_message_filter_mac.mm',
'browser/renderer_host/resource_message_filter_win.cc',
'browser/renderer_host/resource_request_details.h',
'browser/renderer_host/safe_browsing_resource_handler.cc',
'browser/renderer_host/safe_browsing_resource_handler.h',
'browser/renderer_host/save_file_resource_handler.cc',
'browser/renderer_host/save_file_resource_handler.h',
'browser/renderer_host/sync_resource_handler.cc',
'browser/renderer_host/sync_resource_handler.h',
'browser/renderer_host/web_cache_manager.cc',
'browser/renderer_host/web_cache_manager.h',
'browser/rlz/rlz.cc',
'browser/rlz/rlz.h',
'browser/safe_browsing/bloom_filter.cc',
'browser/safe_browsing/bloom_filter.h',
'browser/safe_browsing/chunk_range.cc',
'browser/safe_browsing/chunk_range.h',
'browser/safe_browsing/protocol_manager.cc',
'browser/safe_browsing/protocol_manager.h',
'browser/safe_browsing/protocol_parser.cc',
'browser/safe_browsing/protocol_parser.h',
'browser/safe_browsing/safe_browsing_blocking_page.cc',
'browser/safe_browsing/safe_browsing_blocking_page.h',
'browser/safe_browsing/safe_browsing_database.cc',
'browser/safe_browsing/safe_browsing_database.h',
'browser/safe_browsing/safe_browsing_database_bloom.cc',
'browser/safe_browsing/safe_browsing_database_bloom.h',
'browser/safe_browsing/safe_browsing_service.cc',
'browser/safe_browsing/safe_browsing_service.h',
'browser/safe_browsing/safe_browsing_util.cc',
'browser/safe_browsing/safe_browsing_util.h',
'browser/sandbox_policy.cc',
'browser/sandbox_policy.h',
'browser/search_engines/edit_keyword_controller_base.cc',
'browser/search_engines/edit_keyword_controller_base.h',
'browser/search_engines/template_url.cc',
'browser/search_engines/template_url.h',
'browser/search_engines/template_url_fetcher.cc',
'browser/search_engines/template_url_fetcher.h',
'browser/search_engines/template_url_model.cc',
'browser/search_engines/template_url_model.h',
'browser/search_engines/template_url_parser.cc',
'browser/search_engines/template_url_parser.h',
'browser/search_engines/template_url_prepopulate_data.cc',
'browser/search_engines/template_url_prepopulate_data.h',
'browser/session_startup_pref.cc',
'browser/session_startup_pref.h',
'browser/sessions/base_session_service.cc',
'browser/sessions/base_session_service.h',
'browser/sessions/session_backend.cc',
'browser/sessions/session_backend.h',
'browser/sessions/session_command.cc',
'browser/sessions/session_command.h',
'browser/sessions/session_id.cc',
'browser/sessions/session_id.h',
'browser/sessions/session_restore.cc',
'browser/sessions/session_restore.h',
'browser/sessions/session_service.cc',
'browser/sessions/session_service.h',
'browser/sessions/session_types.cc',
'browser/sessions/session_types.h',
'browser/sessions/tab_restore_service.cc',
'browser/sessions/tab_restore_service.h',
'browser/shell_dialogs.h',
'browser/shell_integration.cc',
'browser/shell_integration.h',
'browser/shell_integration_mac.mm',
'browser/spellcheck_worditerator.cc',
'browser/spellcheck_worditerator.h',
'browser/spellchecker.cc',
'browser/spellchecker.h',
'browser/ssl/ssl_blocking_page.cc',
'browser/ssl/ssl_blocking_page.h',
'browser/ssl/ssl_cert_error_handler.cc',
'browser/ssl/ssl_cert_error_handler.h',
'browser/ssl/ssl_error_handler.cc',
'browser/ssl/ssl_error_handler.h',
'browser/ssl/ssl_error_info.cc',
'browser/ssl/ssl_error_info.h',
'browser/ssl/ssl_host_state.cc',
'browser/ssl/ssl_host_state.h',
'browser/ssl/ssl_manager.cc',
'browser/ssl/ssl_manager.h',
'browser/ssl/ssl_mixed_content_handler.cc',
'browser/ssl/ssl_mixed_content_handler.h',
'browser/ssl/ssl_policy.cc',
'browser/ssl/ssl_policy.h',
'browser/ssl/ssl_policy_backend.cc',
'browser/ssl/ssl_policy_backend.h',
'browser/ssl/ssl_request_info.h',
'browser/status_bubble.h',
'browser/tab_contents/constrained_window.h',
'browser/tab_contents/infobar_delegate.cc',
'browser/tab_contents/infobar_delegate.h',
'browser/tab_contents/interstitial_page.cc',
'browser/tab_contents/interstitial_page.h',
'browser/tab_contents/navigation_controller.cc',
'browser/tab_contents/navigation_controller.h',
'browser/tab_contents/navigation_entry.cc',
'browser/tab_contents/navigation_entry.h',
'browser/tab_contents/page_navigator.h',
'browser/tab_contents/provisional_load_details.cc',
'browser/tab_contents/provisional_load_details.h',
'browser/tab_contents/render_view_context_menu.cc',
'browser/tab_contents/render_view_context_menu.h',
'browser/tab_contents/render_view_context_menu_gtk.cc',
'browser/tab_contents/render_view_context_menu_gtk.h',
'browser/tab_contents/render_view_context_menu_mac.mm',
'browser/tab_contents/render_view_context_menu_mac.h',
'browser/tab_contents/render_view_host_delegate_helper.cc',
'browser/tab_contents/render_view_host_delegate_helper.h',
'browser/tab_contents/render_view_host_manager.cc',
'browser/tab_contents/render_view_host_manager.h',
'browser/tab_contents/repost_form_warning.h',
'browser/tab_contents/security_style.h',
'browser/tab_contents/site_instance.cc',
'browser/tab_contents/site_instance.h',
'browser/tab_contents/tab_contents.cc',
'browser/tab_contents/tab_contents.h',
'browser/tab_contents/tab_contents_delegate.h',
'browser/tab_contents/tab_contents_view.cc',
'browser/tab_contents/tab_contents_view.h',
'browser/tab_contents/tab_contents_view_gtk.cc',
'browser/tab_contents/tab_contents_view_gtk.h',
'browser/tab_contents/tab_contents_view_mac.h',
'browser/tab_contents/tab_contents_view_mac.mm',
'browser/tab_contents/tab_util.cc',
'browser/tab_contents/tab_util.h',
'browser/tab_contents/thumbnail_generator.cc',
'browser/tab_contents/thumbnail_generator.h',
'browser/tab_contents/web_drag_source.cc',
'browser/tab_contents/web_drag_source.h',
'browser/tab_contents/web_drop_target.cc',
'browser/tab_contents/web_drop_target.h',
'browser/tabs/tab_strip_model.cc',
'browser/tabs/tab_strip_model.h',
'browser/tabs/tab_strip_model_order_controller.cc',
'browser/tabs/tab_strip_model_order_controller.h',
'browser/task_manager.cc',
'browser/task_manager.h',
'browser/task_manager_linux.cc',
'browser/task_manager_win.cc',
'browser/task_manager_resource_providers.cc',
'browser/task_manager_resource_providers.h',
'browser/theme_resources_util.cc',
'browser/theme_resources_util.h',
'browser/thumbnail_store.cc',
'browser/thumbnail_store.h',
'browser/toolbar_model.cc',
'browser/toolbar_model.h',
'browser/user_data_manager.cc',
'browser/user_data_manager.h',
'browser/utility_process_host.cc',
'browser/utility_process_host.h',
'browser/view_ids.h',
'browser/views/about_chrome_view.cc',
'browser/views/about_chrome_view.h',
'browser/views/about_ipc_dialog.cc',
'browser/views/about_ipc_dialog.h',
'browser/views/about_network_dialog.cc',
'browser/views/about_network_dialog.h',
'browser/views/autocomplete/autocomplete_popup_contents_view.cc',
'browser/views/autocomplete/autocomplete_popup_contents_view.h',
'browser/views/autocomplete/autocomplete_popup_win.cc',
'browser/views/autocomplete/autocomplete_popup_win.h',
'browser/views/blocked_popup_container_view_win.cc',
'browser/views/blocked_popup_container_view_win.h',
'browser/views/bookmark_bar_view.cc',
'browser/views/bookmark_bar_view.h',
'browser/views/bookmark_bubble_view.cc',
'browser/views/bookmark_bubble_view.h',
'browser/views/bookmark_editor_view.cc',
'browser/views/bookmark_editor_view.h',
'browser/views/bookmark_folder_tree_view.cc',
'browser/views/bookmark_folder_tree_view.h',
'browser/views/bookmark_manager_view.cc',
'browser/views/bookmark_manager_view.h',
'browser/views/bookmark_menu_button.cc',
'browser/views/bookmark_menu_button.h',
'browser/views/bookmark_menu_controller_views.cc',
'browser/views/bookmark_menu_controller_views.h',
'browser/views/bookmark_table_view.cc',
'browser/views/bookmark_table_view.h',
'browser/views/browser_bubble.cc',
'browser/views/browser_bubble.h',
'browser/views/browser_bubble_gtk.cc',
'browser/views/browser_bubble_win.cc',
'browser/views/browser_dialogs.h',
'browser/views/bug_report_view.cc',
'browser/views/bug_report_view.h',
'browser/views/chrome_views_delegate.cc',
'browser/views/chrome_views_delegate.h',
'browser/views/clear_browsing_data.cc',
'browser/views/clear_browsing_data.h',
'browser/views/constrained_window_impl.cc',
'browser/views/constrained_window_impl.h',
'browser/views/dialog_stubs_gtk.cc',
'browser/views/dom_view.cc',
'browser/views/dom_view.h',
'browser/views/download_item_view.cc',
'browser/views/download_item_view.h',
'browser/views/download_shelf_view.cc',
'browser/views/download_shelf_view.h',
'browser/views/download_started_animation_win.cc',
'browser/views/edit_keyword_controller.cc',
'browser/views/edit_keyword_controller.h',
'browser/views/event_utils.cc',
'browser/views/event_utils.h',
'browser/views/external_protocol_dialog.cc',
'browser/views/external_protocol_dialog.h',
'browser/views/find_bar_view.cc',
'browser/views/find_bar_view.h',
'browser/views/find_bar_win.cc',
'browser/views/find_bar_win.h',
'browser/views/first_run_bubble.cc',
'browser/views/first_run_bubble.h',
'browser/views/first_run_customize_view.cc',
'browser/views/first_run_customize_view.h',
'browser/views/first_run_view.cc',
'browser/views/first_run_view.h',
'browser/views/first_run_view_base.cc',
'browser/views/first_run_view_base.h',
'browser/views/frame/browser_frame.h',
'browser/views/frame/browser_frame_gtk.cc',
'browser/views/frame/browser_frame_gtk.h',
'browser/views/frame/browser_frame_win.cc',
'browser/views/frame/browser_frame_win.h',
'browser/views/frame/browser_non_client_frame_view.h',
'browser/views/frame/browser_root_view.cc',
'browser/views/frame/browser_root_view.h',
'browser/views/frame/browser_view.cc',
'browser/views/frame/browser_view.h',
'browser/views/frame/glass_browser_frame_view.cc',
'browser/views/frame/glass_browser_frame_view.h',
'browser/views/frame/opaque_browser_frame_view.cc',
'browser/views/frame/opaque_browser_frame_view.h',
'browser/views/fullscreen_exit_bubble.cc',
'browser/views/fullscreen_exit_bubble.h',
'browser/views/go_button.cc',
'browser/views/go_button.h',
'browser/views/html_dialog_view.cc',
'browser/views/html_dialog_view.h',
'browser/views/hung_renderer_view.cc',
'browser/views/importer_lock_view.cc',
'browser/views/importer_lock_view.h',
'browser/views/importer_view.cc',
'browser/views/importer_view.h',
'browser/views/importing_progress_view.cc',
'browser/views/importing_progress_view.h',
'browser/views/info_bubble.cc',
'browser/views/info_bubble.h',
'browser/views/infobars/infobar_container.cc',
'browser/views/infobars/infobar_container.h',
'browser/views/infobars/infobars.cc',
'browser/views/infobars/infobars.h',
'browser/views/jsmessage_box_dialog.cc',
'browser/views/jsmessage_box_dialog.h',
'browser/views/keyword_editor_view.cc',
'browser/views/keyword_editor_view.h',
'browser/views/location_bar_view.cc',
'browser/views/location_bar_view.h',
'browser/views/login_view.cc',
'browser/views/login_view.h',
'browser/views/new_profile_dialog.cc',
'browser/views/new_profile_dialog.h',
'browser/views/options/advanced_contents_view.cc',
'browser/views/options/advanced_contents_view.h',
'browser/views/options/advanced_page_view.cc',
'browser/views/options/advanced_page_view.h',
'browser/views/options/content_page_view.cc',
'browser/views/options/content_page_view.h',
'browser/views/options/cookies_view.cc',
'browser/views/options/cookies_view.h',
'browser/views/options/exceptions_page_view.cc',
'browser/views/options/exceptions_page_view.h',
'browser/views/options/fonts_languages_window_view.cc',
'browser/views/options/fonts_languages_window_view.h',
'browser/views/options/fonts_page_view.cc',
'browser/views/options/fonts_page_view.h',
'browser/views/options/general_page_view.cc',
'browser/views/options/general_page_view.h',
'browser/views/options/language_combobox_model.cc',
'browser/views/options/language_combobox_model.h',
'browser/views/options/languages_page_view.cc',
'browser/views/options/languages_page_view.h',
'browser/views/options/options_group_view.cc',
'browser/views/options/options_group_view.h',
'browser/views/options/options_page_view.cc',
'browser/views/options/options_page_view.h',
'browser/views/options/options_window_view.cc',
'browser/views/options/passwords_exceptions_window_view.cc',
'browser/views/options/passwords_exceptions_window_view.h',
'browser/views/options/passwords_page_view.cc',
'browser/views/options/passwords_page_view.h',
'browser/views/page_info_window_win.cc',
'browser/views/page_info_window_win.h',
'browser/views/repost_form_warning_view.cc',
'browser/views/repost_form_warning_view.h',
'browser/views/restart_message_box.cc',
'browser/views/restart_message_box.h',
'browser/views/sad_tab_view.cc',
'browser/views/sad_tab_view.h',
'browser/views/select_profile_dialog.cc',
'browser/views/select_profile_dialog.h',
'browser/views/shelf_item_dialog.cc',
'browser/views/shelf_item_dialog.h',
'browser/views/shell_dialogs_win.cc',
'browser/views/star_toggle.cc',
'browser/views/star_toggle.h',
'browser/views/status_bubble_views.cc',
'browser/views/status_bubble_views.h',
'browser/views/tab_icon_view.cc',
'browser/views/tab_icon_view.h',
'browser/views/tab_contents/tab_contents_container.cc',
'browser/views/tab_contents/tab_contents_container.h',
'browser/views/tab_contents/native_tab_contents_container.h',
'browser/views/tab_contents/native_tab_contents_container_gtk.cc',
'browser/views/tab_contents/native_tab_contents_container_gtk.h',
'browser/views/tab_contents/native_tab_contents_container_win.cc',
'browser/views/tab_contents/native_tab_contents_container_win.h',
'browser/views/tab_contents/render_view_context_menu_win.cc',
'browser/views/tab_contents/render_view_context_menu_win.h',
'browser/views/tab_contents/render_view_context_menu_external_win.cc',
'browser/views/tab_contents/render_view_context_menu_external_win.h',
'browser/views/tab_contents/tab_contents_view_gtk.cc',
'browser/views/tab_contents/tab_contents_view_gtk.h',
'browser/views/tab_contents/tab_contents_view_win.cc',
'browser/views/tab_contents/tab_contents_view_win.h',
'browser/views/tabs/dragged_tab_controller.cc',
'browser/views/tabs/dragged_tab_controller.h',
'browser/views/tabs/dragged_tab_view.cc',
'browser/views/tabs/dragged_tab_view.h',
'browser/views/tabs/grid.cc',
'browser/views/tabs/grid.h',
'browser/views/tabs/native_view_photobooth.h',
'browser/views/tabs/native_view_photobooth_gtk.cc',
'browser/views/tabs/native_view_photobooth_gtk.h',
'browser/views/tabs/native_view_photobooth_win.cc',
'browser/views/tabs/native_view_photobooth_win.h',
'browser/views/tabs/tab.cc',
'browser/views/tabs/tab.h',
'browser/views/tabs/tab_overview_cell.cc',
'browser/views/tabs/tab_overview_cell.h',
'browser/views/tabs/tab_overview_container.cc',
'browser/views/tabs/tab_overview_container.h',
'browser/views/tabs/tab_overview_controller.cc',
'browser/views/tabs/tab_overview_controller.h',
'browser/views/tabs/tab_overview_drag_controller.cc',
'browser/views/tabs/tab_overview_drag_controller.h',
'browser/views/tabs/tab_overview_grid.cc',
'browser/views/tabs/tab_overview_grid.h',
'browser/views/tabs/tab_overview_message_listener.cc',
'browser/views/tabs/tab_overview_message_listener.h',
'browser/views/tabs/tab_overview_types.cc',
'browser/views/tabs/tab_overview_types.h',
'browser/views/tabs/tab_renderer.cc',
'browser/views/tabs/tab_renderer.h',
'browser/views/tabs/tab_strip.cc',
'browser/views/tabs/tab_strip.h',
'browser/views/theme_helpers.cc',
'browser/views/theme_helpers.h',
'browser/views/toolbar_star_toggle.cc',
'browser/views/toolbar_star_toggle.h',
'browser/views/toolbar_view.cc',
'browser/views/toolbar_view.h',
'browser/views/uninstall_dialog.cc',
'browser/views/uninstall_dialog.h',
'browser/views/user_data_dir_dialog.cc',
'browser/views/user_data_dir_dialog.h',
'browser/visitedlink_master.cc',
'browser/visitedlink_master.h',
'browser/webdata/web_data_service.cc',
'browser/webdata/web_data_service.h',
'browser/webdata/web_data_service_win.cc',
'browser/webdata/web_database.cc',
'browser/webdata/web_database.h',
'browser/webdata/web_database_win.cc',
'browser/window_sizer.cc',
'browser/window_sizer.h',
'browser/window_sizer_mac.mm',
'browser/window_sizer_linux.cc',
'browser/window_sizer_win.cc',
'browser/worker_host/worker_process_host.cc',
'browser/worker_host/worker_process_host.h',
'browser/worker_host/worker_service.cc',
'browser/worker_host/worker_service.h',
'browser/zygote_host_linux.cc',
'browser/zygote_main_linux.cc',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
# This file is generated by GRIT.
'<(grit_out_dir)/grit/theme_resources_map.cc',
],
'conditions': [
['javascript_engine=="v8"', {
'defines': [
'CHROME_V8',
],
}],
['OS=="linux"', {
'dependencies': [
# Temporarily disabled while we figure some stuff out.
# http://code.google.com/p/chromium/issues/detail?id=12351
# '../build/linux/system.gyp:dbus-glib',
# '../build/linux/system.gyp:gnome-keyring',
'../build/linux/system.gyp:gtk',
'../build/linux/system.gyp:nss',
],
'sources!': [
'browser/extensions/extension_shelf.cc',
'browser/extensions/extension_shelf.h',
'browser/extensions/extension_view.cc',
'browser/extensions/extension_view.h',
# Windows-specific files.
'browser/password_manager/password_store_win.cc',
'browser/password_manager/password_store_win.h',
],
'conditions': [
['linux_breakpad==1', {
'sources': [
'browser/renderer_host/render_crash_handler_host_linux.cc',
'app/breakpad_linux.cc',
'app/breakpad_linux.h',
],
'dependencies': [
'../breakpad/breakpad.gyp:breakpad_client',
],
'include_dirs': [
# breakpad_linux.cc wants file_version_info_linux.h
'<(SHARED_INTERMEDIATE_DIR)',
],
}, {
'sources': [
'browser/renderer_host/render_crash_handler_host_linux_stub.cc',
'app/breakpad_linux_stub.cc',
'app/breakpad_linux.h',
],
}],
],
}],
['OS=="linux" and toolkit_views==0', {
'sources!': [
'browser/bookmarks/bookmark_drop_info.cc',
],
}],
['OS=="mac"', {
'sources/': [
# Exclude most of download.
['exclude', '^browser/download/'],
['include', '^browser/download/download_(file|manager|shelf)\\.cc$'],
['include', '^browser/download/download_request_manager\\.cc$'],
['include', '^browser/download/download_item_model\\.cc$'],
['include', '^browser/download/save_(file(_manager)?|item|package)\\.cc$'],
],
'sources!': [
'browser/automation/automation_provider_list_generic.cc',
'browser/bookmarks/bookmark_context_menu.cc',
'browser/bookmarks/bookmark_drop_info.cc',
'browser/dock_info.cc',
'browser/password_manager/password_store_gnome.h',
'browser/password_manager/password_store_gnome.cc',
'browser/password_manager/password_store_kwallet.h',
'browser/password_manager/password_store_kwallet.cc',
'browser/password_manager/password_store_win.cc',
'browser/password_manager/password_store_win.h',
'browser/extensions/extension_shelf.cc',
'browser/extensions/extension_shelf.h',
'browser/extensions/extension_view.cc',
'browser/extensions/extension_view.h',
],
'sources': [
# Build the necessary GTM sources
'../third_party/GTM/AppKit/GTMNSBezierPath+RoundRect.m',
'../third_party/GTM/AppKit/GTMNSColor+Luminance.m',
'../third_party/GTM/AppKit/GTMTheme.m',
'../third_party/GTM/AppKit/GTMUILocalizer.h',
'../third_party/GTM/AppKit/GTMUILocalizer.m',
# Build necessary Mozilla sources
'../third_party/mozilla/include/NSScreen+Utils.h',
'../third_party/mozilla/include/NSScreen+Utils.m',
'../third_party/mozilla/include/NSWorkspace+Utils.h',
'../third_party/mozilla/include/NSWorkspace+Utils.m',
'../third_party/mozilla/include/ToolTip.h',
'../third_party/mozilla/include/ToolTip.mm',
],
'include_dirs': [
'../third_party/GTM',
'../third_party/GTM/AppKit',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/SecurityInterface.framework',
'$(SDKROOT)/System/Library/Frameworks/QuartzCore.framework',
],
},
}],
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
'dependencies': [
'../google_update/google_update.gyp:google_update',
'installer/installer.gyp:installer_util',
'../printing/printing.gyp:printing',
'../third_party/cld/cld.gyp:cld',
'../views/views.gyp:views',
'../gears/gears.gyp:gears',
],
'export_dependent_settings': [
'../views/views.gyp:views',
],
'sources': [
# Using built-in rule in vstudio for midl.
'browser/history/history_indexer.idl',
],
'sources!': [
'browser/history/history_publisher_none.cc',
'browser/password_manager/password_store_gnome.h',
'browser/password_manager/password_store_gnome.cc',
'browser/password_manager/password_store_kwallet.h',
'browser/password_manager/password_store_kwallet.cc',
'browser/views/tabs/grid.cc',
'browser/views/tabs/grid.h',
'browser/views/tabs/tab_overview_cell.cc',
'browser/views/tabs/tab_overview_cell.h',
'browser/views/tabs/tab_overview_container.cc',
'browser/views/tabs/tab_overview_container.h',
'browser/views/tabs/tab_overview_controller.cc',
'browser/views/tabs/tab_overview_controller.h',
'browser/views/tabs/tab_overview_drag_controller.cc',
'browser/views/tabs/tab_overview_drag_controller.h',
'browser/views/tabs/tab_overview_grid.cc',
'browser/views/tabs/tab_overview_grid.h',
'browser/views/tabs/tab_overview_message_listener.cc',
'browser/views/tabs/tab_overview_message_listener.h',
'browser/views/tabs/tab_overview_types.cc',
'browser/views/tabs/tab_overview_types.h',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
}, { # 'OS!="win"
'sources/': [
# Exclude all of hang_monitor.
['exclude', '^browser/hang_monitor/'],
# Exclude parts of password_manager.
['exclude', '^browser/password_manager/ie7_password\\.cc$'],
# Exclude most of printing.
['exclude', '^browser/printing/'],
['include', '^browser/printing/page_(number|range|setup)\\.cc$'],
# Exclude all of rlz.
['exclude', '^browser/rlz/'],
# Exclude all of views.
['exclude', '^browser/views/'],
],
'conditions': [
['toolkit_views==1',{
'sources/': [
['include', '^browser/dock_info_gtk.cc'],
['include', '^browser/dock_info.cc'],
['include', '^browser/dock_info.h'],
['include', '^browser/extensions/'],
['include', '^browser/views/bookmark_bar_view.cc'],
['include', '^browser/views/bookmark_bar_view.h'],
['include', '^browser/views/bookmark_context_menu.cc'],
['include', '^browser/views/bookmark_context_menu.h'],
['include', '^browser/views/bookmark_menu_button.cc'],
['include', '^browser/views/bookmark_menu_button.h'],
['include', '^browser/views/bookmark_menu_controller_views.cc'],
['include', '^browser/views/browser_bubble_gtk.cc'],
['include', '^browser/views/browser_bubble.cc'],
['include', '^browser/views/browser_bubble.h'],
['include', '^browser/views/chrome_views_delegate.cc'],
['include', '^browser/views/dialog_stubs_gtk.cc'],
['include', '^browser/views/download_item_view.cc'],
['include', '^browser/views/download_item_view.h'],
['include', '^browser/views/download_shelf_view.cc'],
['include', '^browser/views/download_shelf_view.h'],
['include', '^browser/views/dragged_tab_controller.cc'],
['include', '^browser/views/dragged_tab_controller.h'],
['include', '^browser/views/event_utils.cc'],
['include', '^browser/views/event_utils.h'],
['include', '^browser/views/find_bar_view.cc'],
['include', '^browser/views/find_bar_view.h'],
['include', '^browser/views/find_bar_win.cc'],
['include', '^browser/views/find_bar_win.h'],
['include', '^browser/views/go_button.cc'],
['include', '^browser/views/go_button.h'],
['include', '^browser/views/toolbar_star_toggle.h'],
['include', '^browser/views/toolbar_star_toggle.cc'],
['include', '^browser/views/frame/browser_view.cc'],
['include', '^browser/views/frame/browser_view.h'],
['include', '^browser/views/frame/browser_frame_gtk.cc'],
['include', '^browser/views/frame/browser_frame_gtk.h'],
['include', '^browser/views/frame/browser_root_view.cc'],
['include', '^browser/views/frame/browser_root_view.h'],
['include', '^browser/views/frame/opaque_browser_frame_view.cc'],
['include', '^browser/views/frame/opaque_browser_frame_view.h'],
['include', '^browser/views/infobars/*'],
['include', '^browser/views/info_bubble.cc'],
['include', '^browser/views/info_bubble.h'],
['include', '^browser/views/location_bar_view.cc'],
['include', '^browser/views/location_bar_view.h'],
['include', '^browser/views/status_bubble_views.cc'],
['include', '^browser/views/status_bubble_views.h'],
['include', '^browser/views/tab_contents/native_tab_contents_container_gtk.cc'],
['include', '^browser/views/tab_contents/native_tab_contents_container_gtk.h'],
['include', '^browser/views/tab_contents/render_view_context_menu_win.cc'],
['include', '^browser/views/tab_contents/render_view_context_menu_win.h'],
['include', '^browser/views/tab_contents/tab_contents_container.cc'],
['include', '^browser/views/tab_contents/tab_contents_container.h'],
['include', '^browser/views/tab_contents/tab_contents_view_gtk.cc'],
['include', '^browser/views/tab_contents/tab_contents_view_gtk.h'],
['include', '^browser/views/tab_icon_view.cc'],
['include', '^browser/views/tab_icon_view.h'],
['include', '^browser/views/tabs/dragged_tab_controller.cc'],
['include', '^browser/views/tabs/dragged_tab_controller.h'],
['include', '^browser/views/tabs/dragged_tab_view.cc'],
['include', '^browser/views/tabs/dragged_tab_view.h'],
['include', '^browser/views/tabs/native_view_photobooth.h'],
['include', '^browser/views/tabs/native_view_photobooth_gtk.cc'],
['include', '^browser/views/tabs/native_view_photobooth_gtk.h'],
['include', '^browser/views/tabs/tab.cc'],
['include', '^browser/views/tabs/tab.h'],
['include', '^browser/views/tabs/tab_renderer.cc'],
['include', '^browser/views/tabs/tab_renderer.h'],
['include', '^browser/views/tabs/tab_strip.cc'],
['include', '^browser/views/tabs/tab_strip.h'],
['include', '^browser/views/toolbar_view.cc'],
['include', '^browser/views/toolbar_view.h'],
['include', '^browser/window_sizer.cc'],
['include', '^browser/window_sizer.h'],
# Exclude all of browser/gtk, then include the things we want.
['exclude', '^browser/gtk'],
['include', '^browser/gtk/autocomplete_edit_gtk.cc'],
['include', '^browser/gtk/autocomplete_edit_gtk.h'],
['include', '^browser/gtk/dialogs_gtk.cc'],
['include', '^browser/gtk/dialogs_gtk.h'],
['include', '^browser/gtk/download_started_animation_gtk.cc'],
['include', '^browser/gtk/download_started_animation_gtk.h'],
['include', '^browser/gtk/focus_store_gtk.cc'],
['include', '^browser/gtk/focus_store_gtk.h'],
['include', '^browser/gtk/hung_renderer_dialog_gtk.cc'],
['include', '^browser/gtk/hung_renderer_dialog_gtk.h'],
['include', '^browser/gtk/options'],
['include', '^browser/gtk/menu_gtk.cc'],
['include', '^browser/gtk/menu_gtk.h'],
['include', '^browser/gtk/sad_tab_gtk.cc'],
['include', '^browser/gtk/sad_tab_gtk.h'],
# More GTK stuff to exclude outside of the browser/gtk directory
['exclude', '^browser/bookmarks/bookmark_context_menu_gtk.cc'],
# Other excluded stuff.
['exclude', '^browser/extensions/external_registry_extension_provider_win.cc'],
['exclude', '^browser/tab_contents/tab_contents_view_gtk.cc'],
['exclude', '^browser/tab_contents/tab_contents_view_gtk.h'],
['exclude', '^browser/tab_contents/render_view_context_menu_gtk.cc'],
['exclude', '^browser/tab_contents/render_view_context_menu_gtk.h'],
],
}],
['linux2==1',{
'sources/': [
['include', 'browser/views/tabs/grid.cc'],
['include', 'browser/views/tabs/grid.h'],
['include', 'browser/views/tabs/tab_overview_cell.cc'],
['include', 'browser/views/tabs/tab_overview_cell.h'],
['include', 'browser/views/tabs/tab_overview_container.cc'],
['include', 'browser/views/tabs/tab_overview_container.h'],
['include', 'browser/views/tabs/tab_overview_controller.cc'],
['include', 'browser/views/tabs/tab_overview_controller.h'],
['include', 'browser/views/tabs/tab_overview_drag_controller.cc'],
['include', 'browser/views/tabs/tab_overview_drag_controller.h'],
['include', 'browser/views/tabs/tab_overview_grid.cc'],
['include', 'browser/views/tabs/tab_overview_grid.h'],
['include', 'browser/views/tabs/tab_overview_message_listener.cc'],
['include', 'browser/views/tabs/tab_overview_message_listener.h'],
['include', 'browser/views/tabs/tab_overview_types.cc'],
['include', 'browser/views/tabs/tab_overview_types.h'],
],
}],
],
# Exclude files that should be excluded for all non-Windows platforms.
'sources!': [
'browser/autocomplete/autocomplete_accessibility.cc',
'browser/automation/ui_controls.cc',
'browser/bookmarks/bookmark_menu_controller.cc',
'browser/bookmarks/bookmark_menu_controller.h',
'browser/browser_accessibility.cc',
'browser/browser_accessibility_manager.cc',
'browser/extensions/extension_shelf.cc',
'browser/extensions/extension_creator.cc',
'browser/dom_ui/html_dialog_contents.cc',
'browser/encoding_menu_controller_delegate.cc',
'browser/external_tab_container.cc',
'browser/google_update.cc',
'browser/history/history_indexer.idl',
'browser/history_tab_ui.cc',
'browser/history_view.cc',
'browser/ime_input.cc',
'browser/importer/ie_importer.cc',
'browser/jankometer.cc',
'browser/login_prompt.cc',
'browser/memory_details.cc',
'browser/modal_html_dialog_delegate.cc',
'browser/sandbox_policy.cc',
'browser/shell_integration.cc',
'browser/tab_contents/web_drag_source.cc',
'browser/tab_contents/web_drop_target.cc',
],
}],
],
},
{
'target_name': 'debugger',
'type': '<(library)',
'msvs_guid': '57823D8C-A317-4713-9125-2C91FDFD12D6',
'dependencies': [
'chrome_resources',
'theme_resources',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
],
'include_dirs': [
'..',
],
'sources': [
'browser/debugger/resources/debugger.css',
'browser/debugger/resources/debugger.html',
'browser/debugger/resources/debugger.js',
'browser/debugger/resources/debugger_shell.js',
'browser/debugger/debugger_host.h',
'browser/debugger/debugger_host_impl.cpp',
'browser/debugger/debugger_host_impl.h',
'browser/debugger/debugger_io.h',
'browser/debugger/debugger_io_socket.cc',
'browser/debugger/debugger_io_socket.h',
'browser/debugger/debugger_node.cc',
'browser/debugger/debugger_node.h',
'browser/debugger/debugger_remote_service.cc',
'browser/debugger/debugger_remote_service.h',
'browser/debugger/debugger_shell.cc',
'browser/debugger/debugger_shell.h',
# Currently unused, resurrect when porting to new platforms.
#'browser/debugger/debugger_shell_stubs.cc',
'browser/debugger/debugger_view.cc',
'browser/debugger/debugger_view.h',
'browser/debugger/debugger_window.cc',
'browser/debugger/debugger_window.h',
'browser/debugger/debugger_wrapper.cc',
'browser/debugger/debugger_wrapper.h',
'browser/debugger/devtools_client_host.h',
'browser/debugger/devtools_manager.cc',
'browser/debugger/devtools_manager.h',
'browser/debugger/devtools_protocol_handler.cc',
'browser/debugger/devtools_protocol_handler.h',
'browser/debugger/devtools_remote.h',
'browser/debugger/devtools_remote_listen_socket.cc',
'browser/debugger/devtools_remote_listen_socket.h',
'browser/debugger/devtools_remote_message.cc',
'browser/debugger/devtools_remote_message.h',
'browser/debugger/devtools_remote_service.cc',
'browser/debugger/devtools_remote_service.h',
'browser/debugger/devtools_window.cc',
'browser/debugger/devtools_window.h',
'browser/debugger/inspectable_tab_proxy.cc',
'browser/debugger/inspectable_tab_proxy.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS!="win"', {
'sources!': [
'browser/debugger/debugger_view.cc',
'browser/debugger/debugger_window.cc',
],
}],
],
},
{
'target_name': 'plugin',
'type': '<(library)',
'msvs_guid': '20A560A0-2CD0-4D9E-A58B-1F24B99C087A',
'dependencies': [
'common',
'chrome_resources',
'chrome_strings',
'../media/media.gyp:media',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
'third_party/hunspell/hunspell.gyp:hunspell',
'../webkit/webkit.gyp:glue',
],
'include_dirs': [
'<(INTERMEDIATE_DIR)',
],
'sources': [
# All .cc, .h, .m, and .mm files under plugins except for tests and
# mocks.
'plugin/chrome_plugin_host.cc',
'plugin/chrome_plugin_host.h',
'plugin/npobject_proxy.cc',
'plugin/npobject_proxy.h',
'plugin/npobject_stub.cc',
'plugin/npobject_stub.h',
'plugin/npobject_util.cc',
'plugin/npobject_util.h',
'plugin/plugin_channel.cc',
'plugin/plugin_channel.h',
'plugin/plugin_channel_base.cc',
'plugin/plugin_channel_base.h',
'plugin/plugin_main.cc',
'plugin/plugin_thread.cc',
'plugin/plugin_thread.h',
'plugin/webplugin_delegate_stub.cc',
'plugin/webplugin_delegate_stub.h',
'plugin/webplugin_proxy.cc',
'plugin/webplugin_proxy.h',
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled.h',
'msvs_precompiled_source': 'tools/build/win/precompiled.cc',
},
},
# These are layered in conditionals in the event other platforms
# end up using this module as well.
'conditions': [
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
},],
],
},
{
'target_name': 'renderer',
'type': '<(library)',
'msvs_guid': '9301A569-5D2B-4D11-9332-B1E30AEACB8D',
'dependencies': [
'common',
'plugin',
'chrome_resources',
'chrome_strings',
'../printing/printing.gyp:printing',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/npapi/npapi.gyp:npapi',
'../webkit/webkit.gyp:glue',
'../webkit/webkit.gyp:webkit',
],
'include_dirs': [
'..',
],
'sources': [
# TODO(jrg): to link ipc_tests, these files need to be in renderer.a.
# But app/ is the wrong directory for them.
# Better is to remove the dep of *_tests on renderer, but in the
# short term I'd like the build to work.
'renderer/automation/dom_automation_controller.cc',
'renderer/automation/dom_automation_controller.h',
'renderer/extensions/bindings_utils.cc',
'renderer/extensions/bindings_utils.h',
'renderer/extensions/event_bindings.cc',
'renderer/extensions/event_bindings.h',
'renderer/extensions/extension_process_bindings.cc',
'renderer/extensions/extension_process_bindings.h',
'renderer/extensions/renderer_extension_bindings.cc',
'renderer/extensions/renderer_extension_bindings.h',
'renderer/loadtimes_extension_bindings.h',
'renderer/loadtimes_extension_bindings.cc',
'renderer/media/audio_renderer_impl.cc',
'renderer/media/audio_renderer_impl.h',
'renderer/media/buffered_data_source.cc',
'renderer/media/buffered_data_source.h',
'renderer/net/render_dns_master.cc',
'renderer/net/render_dns_master.h',
'renderer/net/render_dns_queue.cc',
'renderer/net/render_dns_queue.h',
'renderer/resources/event_bindings.js',
'renderer/resources/extension_process_bindings.js',
'renderer/resources/greasemonkey_api.js',
'renderer/resources/json_schema.js',
'renderer/resources/renderer_extension_bindings.js',
'renderer/about_handler.cc',
'renderer/about_handler.h',
'renderer/audio_message_filter.cc',
'renderer/audio_message_filter.h',
'renderer/debug_message_handler.cc',
'renderer/debug_message_handler.h',
'renderer/devtools_agent.cc',
'renderer/devtools_agent.h',
'renderer/devtools_agent_filter.cc',
'renderer/devtools_agent_filter.h',
'renderer/devtools_client.cc',
'renderer/devtools_client.h',
'renderer/dom_ui_bindings.cc',
'renderer/dom_ui_bindings.h',
'renderer/external_host_bindings.cc',
'renderer/external_host_bindings.h',
'renderer/external_extension.cc',
'renderer/external_extension.h',
'renderer/js_only_v8_extensions.cc',
'renderer/js_only_v8_extensions.h',
'renderer/localized_error.cc',
'renderer/localized_error.h',
'renderer/navigation_state.h',
'renderer/plugin_channel_host.cc',
'renderer/plugin_channel_host.h',
'renderer/print_web_view_helper.cc',
'renderer/print_web_view_helper.h',
'renderer/render_process.cc',
'renderer/render_process.h',
'renderer/render_thread.cc',
'renderer/render_thread.h',
'renderer/render_view.cc',
'renderer/render_view.h',
'renderer/render_widget.cc',
'renderer/render_widget.h',
'renderer/renderer_glue.cc',
'renderer/renderer_histogram_snapshots.cc',
'renderer/renderer_histogram_snapshots.h',
'renderer/renderer_logging.h',
'renderer/renderer_logging_linux.cc',
'renderer/renderer_logging_mac.mm',
'renderer/renderer_logging_win.cc',
'renderer/renderer_main.cc',
'renderer/renderer_main_platform_delegate.h',
'renderer/renderer_main_platform_delegate_linux.cc',
'renderer/renderer_main_platform_delegate_mac.mm',
'renderer/renderer_main_platform_delegate_win.cc',
'renderer/renderer_webkitclient_impl.cc',
'renderer/renderer_webkitclient_impl.h',
'renderer/user_script_slave.cc',
'renderer/user_script_slave.h',
'renderer/visitedlink_slave.cc',
'renderer/visitedlink_slave.h',
'renderer/webplugin_delegate_proxy.cc',
'renderer/webplugin_delegate_proxy.h',
'renderer/webworker_proxy.cc',
'renderer/webworker_proxy.h',
'tools/build/win/precompiled_wtl.cc',
'tools/build/win/precompiled_wtl.h',
],
'link_settings': {
'mac_bundle_resources': [
'renderer/renderer.sb',
],
},
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
'conditions': [
# Linux-specific rules.
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
# Windows-specific rules.
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
},],
# As of yet unported-from-Windows code.
['OS!="win"', {
'sources!': [
'renderer/webworker_proxy.cc',
'renderer/webworker_proxy.h',
],
},],
],
},
{
'target_name': 'utility',
'type': '<(library)',
'msvs_guid': '4D2B38E6-65FF-4F97-B88A-E441DF54EBF7',
'dependencies': [
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
],
'sources': [
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
'utility/utility_main.cc',
'utility/utility_thread.cc',
'utility/utility_thread.h',
],
'include_dirs': [
'..',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled.h',
'msvs_precompiled_source': 'tools/build/win/precompiled.cc',
},
},
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'chrome',
'type': 'executable',
'mac_bundle': 1,
'msvs_guid': '7B219FAA-E360-43C8-B341-804A94EEFFAC',
'sources': [
# All .cc, .h, .m, and .mm files under app except for tests.
'app/breakpad_win.cc',
'app/breakpad_win.h',
'app/breakpad_mac.mm',
'app/breakpad_mac.h',
'app/chrome_exe_main.cc',
'app/chrome_exe_main.mm',
'app/chrome_exe_main_gtk.cc',
'app/chrome_exe_resource.h',
'app/client_util.cc',
'app/client_util.h',
'app/google_update_client.cc',
'app/google_update_client.h',
'app/hard_error_handler_win.cc',
'app/hard_error_handler_win.h',
'app/keystone_glue.h',
'app/keystone_glue.m',
'app/scoped_ole_initializer.h',
],
'dependencies': [
'../support/support.gyp:*',
],
'mac_bundle_resources': [
# put any pdfs down in the sources block below so pdfsqueeze runs on
# them.
'app/nibs/en.lproj/About.xib',
'app/nibs/en.lproj/BrowserWindow.xib',
'app/nibs/en.lproj/ClearBrowsingData.xib',
'app/nibs/en.lproj/FindBar.xib',
'app/nibs/en.lproj/FirstRunDialog.xib',
'app/nibs/en.lproj/MainMenu.xib',
'app/nibs/en.lproj/PageInfo.xib',
'app/nibs/en.lproj/Preferences.xib',
'app/nibs/en.lproj/SaveAccessoryView.xib',
'app/nibs/en.lproj/TabContents.xib',
'app/nibs/en.lproj/TabView.xib',
'app/nibs/en.lproj/Toolbar.xib',
'app/theme/back.pdf',
'app/theme/close_bar.pdf',
'app/theme/close_bar_h.pdf',
'app/theme/close_bar_p.pdf',
'app/theme/forward.pdf',
'app/theme/go.pdf',
'app/theme/grow_box.png',
'app/theme/nav.pdf',
'app/theme/newtab.pdf',
'app/theme/o2_globe.png',
'app/theme/o2_history.png',
'app/theme/o2_more.png',
'app/theme/o2_search.png',
'app/theme/o2_star.png',
'app/theme/otr_icon.pdf',
'app/theme/pageinfo_bad.png',
'app/theme/pageinfo_good.png',
'app/theme/reload.pdf',
'app/theme/sadtab.png',
'app/theme/star.pdf',
'app/theme/starred.pdf',
'app/theme/stop.pdf',
'../app/resources/throbber.png',
'app/theme/throbber_waiting.png',
'app/app-Info.plist',
],
# TODO(mark): Come up with a fancier way to do this. It should only
# be necessary to list app-Info.plist once, not the three times it is
# listed here.
'mac_bundle_resources!': [
'app/app-Info.plist',
],
'xcode_settings': {
'INFOPLIST_FILE': 'app/app-Info.plist',
},
'conditions': [
['OS=="linux"', {
'conditions': [
['branding=="Chrome"', {
'actions': [
{
'action_name': 'dump_symbols',
'inputs': [
'<(DEPTH)/build/linux/dump_app_syms',
'<(DEPTH)/build/linux/dump_signature.py',
'<(PRODUCT_DIR)/dump_syms',
'<(PRODUCT_DIR)/chrome',
],
'outputs': [
'<(PRODUCT_DIR)/chrome.breakpad',
],
'action': ['<(DEPTH)/build/linux/dump_app_syms',
'<(PRODUCT_DIR)/dump_syms',
'<(PRODUCT_DIR)/chrome', '<@(_outputs)'],
},
],
}],
],
'dependencies': [
# Needed for chrome_dll_main.cc #include of gtk/gtk.h
'../build/linux/system.gyp:gtk',
# Needed for chrome_dll_main.cc use of g_thread_init
'../build/linux/system.gyp:gthread',
],
'copies': [
{
'destination': '<(PRODUCT_DIR)',
'files': ['<(INTERMEDIATE_DIR)/repack/chrome.pak'],
},
{
'destination': '<(PRODUCT_DIR)/locales',
'files': ['<(INTERMEDIATE_DIR)/repack/da.pak',
'<(INTERMEDIATE_DIR)/repack/en-US.pak',
'<(INTERMEDIATE_DIR)/repack/he.pak',
'<(INTERMEDIATE_DIR)/repack/zh-TW.pak',
],
},
{
'destination': '<(PRODUCT_DIR)/themes',
'files': ['<(INTERMEDIATE_DIR)/repack/default.pak'],
},
],
}],
['OS=="linux" and (toolkit_views==1 or linux2==1)', {
'dependencies': [
'../views/views.gyp:views',
],
}],
['OS=="mac"', {
# 'branding' is a variable defined in common.gypi
# (e.g. "Chromium", "Chrome")
'conditions': [
['branding=="Chrome"', {
'mac_bundle_resources': ['app/theme/google_chrome/app.icns'],
'variables': {
'bundle_id': 'com.google.Chrome',
},
}, { # else: 'branding!="Chrome"
'mac_bundle_resources': ['app/theme/chromium/app.icns'],
'variables': {
'bundle_id': 'org.chromium.Chromium',
},
}],
['mac_breakpad==1', {
# Only include breakpad in official builds.
'variables': {
# A real .dSYM is needed for dump_syms to operate on.
'mac_real_dsym': 1,
},
'dependencies': [
'../breakpad/breakpad.gyp:breakpad',
'../breakpad/breakpad.gyp:dump_syms',
'../breakpad/breakpad.gyp:symupload',
],
'copies': [
{
'destination': '<(PRODUCT_DIR)/<(mac_product_name).app/Contents/Resources/',
'files': ['<(PRODUCT_DIR)/crash_inspector', '<(PRODUCT_DIR)/crash_report_sender.app'],
},
],
'postbuilds': [
{
'postbuild_name': 'Dump Symbols',
'action': ['<(DEPTH)/build/mac/dump_app_syms',
'<(branding)'],
},
],
}], # mac_breakpad
['mac_keystone==1', {
'copies': [
{
'destination': '<(PRODUCT_DIR)/<(mac_product_name).app/Contents/Frameworks/',
'files': ['../third_party/googlemac/Releases/Keystone/KeystoneRegistration.framework'],
},
],
}], # mac_keystone
],
'product_name': '<(mac_product_name)',
'xcode_settings': {
# chrome/app/app-Info.plist has:
# CFBundleIdentifier of CHROMIUM_BUNDLE_ID
# CFBundleName of CHROMIUM_SHORT_NAME
# Xcode then replaces these values with the branded values we set
# as settings on the target.
'CHROMIUM_BUNDLE_ID': '<(bundle_id)',
'CHROMIUM_SHORT_NAME': '<(branding)',
},
# Bring in pdfsqueeze and run it on all pdfs
'dependencies': [
'../build/temp_gyp/pdfsqueeze.gyp:pdfsqueeze',
],
'rules': [
{
'rule_name': 'pdfsqueeze',
'extension': 'pdf',
'inputs': [
'<(PRODUCT_DIR)/pdfsqueeze',
],
'outputs': [
'<(INTERMEDIATE_DIR)/pdfsqueeze/<(RULE_INPUT_ROOT).pdf',
],
'action': ['<(PRODUCT_DIR)/pdfsqueeze', '<(RULE_INPUT_PATH)', '<@(_outputs)'],
'message': 'Running pdfsqueeze on <(RULE_INPUT_PATH)',
},
],
}, { # else: OS != "mac"
'conditions': [
['branding=="Chrome"', {
'product_name': 'chrome'
}, { # else: Branding!="Chrome"
# TODO: change to:
# 'product_name': 'chromium'
# whenever we convert the rest of the infrastructure
# (buildbots etc.) to use "gyp -Dbranding=Chrome".
# NOTE: chrome/app/theme/chromium/BRANDING and
# chrome/app/theme/google_chrome/BRANDING have the short names,
# etc.; should we try to extract from there instead?
'product_name': 'chrome'
}],
],
}],
['OS=="mac"', {
# Mac addes an action to modify the Info.plist to meet our needs
# (see the script for why this is done).
'actions': [
{
'action_name': 'tweak_app_infoplist',
# We don't list any inputs or outputs because we always want
# the script to run. Why? Because it does thinks like record
# the svn revision into the info.plist, so there is no file to
# depend on that will change when ever that changes.
'inputs': [],
'outputs': [],
'action': ['<(DEPTH)/build/mac/tweak_app_infoplist',
'-b<(mac_breakpad)',
'-k<(mac_keystone)',
'<(branding)'],
},
],
}],
['OS=="mac"', {
# Copy web inspector resources to the Contents/Resources folder.
'copies': [
{
'destination': '<(PRODUCT_DIR)/<(mac_product_name).app/Contents/Resources',
'files': ['<(PRODUCT_DIR)/resources/inspector/'],
},
],
}],
['OS=="linux"', {
'conditions': [
['branding=="Chrome"', {
'dependencies': [
'installer/installer.gyp:installer_util',
],
}],
],
}],
['OS=="win"', {
'dependencies': [
# On Windows, make sure we've built chrome.dll, which
# contains all of the library code with Chromium
# functionality.
'chrome_dll',
'installer/installer.gyp:installer_util',
'installer/installer.gyp:installer_util_strings',
'../breakpad/breakpad.gyp:breakpad_handler',
'../breakpad/breakpad.gyp:breakpad_sender',
'../sandbox/sandbox.gyp:sandbox',
'../views/views.gyp:views',
'worker',
'app/locales/locales.gyp:*',
],
'sources': [
'app/chrome_exe.rc',
'app/chrome_exe_version.rc.version',
],
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/app',
],
'msvs_settings': {
'VCLinkerTool': {
'DelayLoadDLLs': [
'dbghelp.dll',
'dwmapi.dll',
'uxtheme.dll',
'ole32.dll',
'oleaut32.dll',
],
'ImportLibrary': '$(OutDir)\\lib\\chrome_exe.lib',
'ProgramDatabaseFile': '$(OutDir)\\chrome_exe.pdb',
# Set /SUBSYSTEM:WINDOWS for chrome.exe itself.
'SubSystem': '2',
},
'VCManifestTool': {
'AdditionalManifestFiles': '$(SolutionDir)\\app\\chrome.exe.manifest',
},
},
'actions': [
{
'action_name': 'version',
'variables': {
'lastchange_path':
'<(SHARED_INTERMEDIATE_DIR)/build/LASTCHANGE',
'version_py': 'tools/build/version.py',
'version_path': 'VERSION',
'template_input_path': 'app/chrome_exe_version.rc.version',
},
'conditions': [
[ 'branding == "Chrome"', {
'variables': {
'branding_path': 'app/theme/google_chrome/BRANDING',
},
}, { # else branding!="Chrome"
'variables': {
'branding_path': 'app/theme/chromium/BRANDING',
},
}],
],
'inputs': [
'<(template_input_path)',
'<(version_path)',
'<(branding_path)',
'<(lastchange_path)',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/app/chrome_exe_version.rc',
],
'action': [
'python',
'<(version_py)',
'-f', '<(version_path)',
'-f', '<(branding_path)',
'-f', '<(lastchange_path)',
'<(template_input_path)',
'<@(_outputs)',
],
'process_outputs_as_sources': 1,
'message': 'Generating version information in <(_outputs)'
},
{
'action_name': 'first_run',
'inputs': [
'app/FirstRun',
],
'outputs': [
'<(PRODUCT_DIR)/First Run',
],
'action': ['copy', '<@(_inputs)', '<@(_outputs)'],
'message': 'Copy first run complete sentinel file',
},
],
},{ # 'OS!="win"
'dependencies': [
# On Linux and Mac, link the dependencies (libraries)
# that make up actual Chromium functionality directly
# into the executable.
'<@(chromium_dependencies)',
],
'sources': [
'app/chrome_dll_main.cc',
'app/chrome_dll_resource.h',
],
'variables': {
'repack_path': '../tools/data_pack/repack.py',
},
'actions': [
{
'action_name': 'repack_chrome',
'variables': {
'pak_inputs': [
'<(grit_out_dir)/browser_resources.pak',
'<(grit_out_dir)/debugger_resources.pak',
'<(grit_out_dir)/common_resources.pak',
'<(grit_out_dir)/renderer_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/net/net_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_resources.pak',
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'outputs': [
'<(INTERMEDIATE_DIR)/repack/chrome.pak',
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
'process_outputs_as_mac_bundle_resources': 1,
},
{
'action_name': 'repack_theme',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/app/app_resources.pak',
'<(grit_out_dir)/theme_resources.pak',
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'outputs': [
'<(INTERMEDIATE_DIR)/repack/theme.pak',
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
'process_outputs_as_mac_bundle_resources': 1,
'conditions': [
['OS=="linux"', {
'outputs=': [
'<(INTERMEDIATE_DIR)/repack/default.pak',
]
}],
],
},
{
# TODO(mark): Make this work with more languages than the
# hardcoded da, en-US, he, zh-TW.
'action_name': 'repack_locale_da',
'variables': {
'pak_inputs': [
'<(grit_out_dir)/generated_resources_da.pak',
'<(grit_out_dir)/locale_settings_da.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_da.pak',
],
'conditions': [
['branding=="Chrome"', {
'pak_inputs': [
'<(grit_out_dir)/google_chrome_strings_da.pak',
]
}, { # else: branding!="Chrome"
'pak_inputs': [
'<(grit_out_dir)/chromium_strings_da.pak',
]
}],
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'conditions': [
['OS=="mac"', {
'outputs': [
# TODO(port): We can't simply emit the strings file without
# the nibs too, or the app fails to launch in this language.
# Currently, this is only for ui_tests, which won't work on
# the Mac anyway, so temporarily disable until we have the
# full strategy figured out. This goes for he and zh below.
# '<(INTERMEDIATE_DIR)/repack/da.lproj/locale.pak',
],
}, { # else: OS!="mac"
'outputs': [
'<(INTERMEDIATE_DIR)/repack/da.pak',
],
}],
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
},
{
# TODO(mark): Make this work with more languages than the
# hardcoded da, en-US, he, zh-TW.
'action_name': 'repack_locale_en_us',
'variables': {
'pak_inputs': [
'<(grit_out_dir)/generated_resources_en-US.pak',
'<(grit_out_dir)/locale_settings_en-US.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_en-US.pak',
],
'conditions': [
['branding=="Chrome"', {
'pak_inputs': [
'<(grit_out_dir)/google_chrome_strings_en-US.pak',
]
}, { # else: branding!="Chrome"
'pak_inputs': [
'<(grit_out_dir)/chromium_strings_en-US.pak',
]
}],
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'conditions': [
['OS=="mac"', {
'outputs': [
'<(INTERMEDIATE_DIR)/repack/en.lproj/locale.pak',
],
}, { # else: OS!="mac"
'outputs': [
'<(INTERMEDIATE_DIR)/repack/en-US.pak',
],
}],
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
'process_outputs_as_mac_bundle_resources': 1,
},
{
# TODO(mark): Make this work with more languages than the
# hardcoded da, en-US, he, zh-TW.
'action_name': 'repack_locale_he',
'variables': {
'pak_inputs': [
'<(grit_out_dir)/generated_resources_he.pak',
'<(grit_out_dir)/locale_settings_he.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_he.pak',
],
'conditions': [
['branding=="Chrome"', {
'pak_inputs': [
'<(grit_out_dir)/google_chrome_strings_he.pak',
]
}, { # else: branding!="Chrome"
'pak_inputs': [
'<(grit_out_dir)/chromium_strings_he.pak',
]
}],
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'conditions': [
['OS=="mac"', {
'outputs': [
# '<(INTERMEDIATE_DIR)/repack/he.lproj/locale.pak',
],
}, { # else: OS!="mac"
'outputs': [
'<(INTERMEDIATE_DIR)/repack/he.pak',
],
}],
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
},
{
# TODO(mark): Make this work with more languages than the
# hardcoded da, en-US, he, zh-TW.
'action_name': 'repack_locale_zh_tw',
'variables': {
'pak_inputs': [
'<(grit_out_dir)/generated_resources_zh-TW.pak',
'<(grit_out_dir)/locale_settings_zh-TW.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_zh-TW.pak',
],
'conditions': [
['branding=="Chrome"', {
'pak_inputs': [
'<(grit_out_dir)/google_chrome_strings_zh-TW.pak',
]
}, { # else: branding!="Chrome"
'pak_inputs': [
'<(grit_out_dir)/chromium_strings_zh-TW.pak',
]
}],
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'conditions': [
['OS=="mac"', {
'outputs': [
# '<(INTERMEDIATE_DIR)/repack/zh.lproj/locale.pak',
],
}, { # else: OS!="mac"
'outputs': [
'<(INTERMEDIATE_DIR)/repack/zh-TW.pak',
],
}],
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
},
],
'sources!': [
'app/chrome_exe_main.cc',
'app/client_util.cc',
'app/google_update_client.cc',
]
}],
],
},
{
'target_name': 'image_diff',
'type': 'executable',
'msvs_guid': '50B079C7-CD01-42D3-B8C4-9F8D9322E822',
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:base_gfx',
],
'sources': [
'tools/test/image_diff/image_diff.cc',
],
},
{
# This target contains mocks and test utilities that don't belong in
# production libraries but are used by more than one test executable.
'target_name': 'test_support_common',
'type': '<(library)',
'dependencies': [
'browser',
'common',
'renderer',
'chrome_resources',
'chrome_strings',
'theme_resources',
'../skia/skia.gyp:skia',
'../testing/gmock.gyp:gmock',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
# TODO: these should live here but are currently used by
# production code code in libbrowser (above).
#'browser/automation/url_request_mock_http_job.cc',
#'browser/automation/url_request_mock_http_job.h',
'browser/automation/url_request_mock_net_error_job.cc',
'browser/automation/url_request_mock_net_error_job.h',
'browser/renderer_host/mock_render_process_host.cc',
'browser/renderer_host/mock_render_process_host.h',
'browser/renderer_host/test_render_view_host.cc',
'browser/renderer_host/test_render_view_host.h',
'browser/tab_contents/test_web_contents.cc',
'browser/tab_contents/test_web_contents.h',
'common/ipc_test_sink.cc',
'common/ipc_test_sink.h',
'renderer/mock_keyboard.cc',
'renderer/mock_keyboard.h',
'renderer/mock_keyboard_driver_win.cc',
'renderer/mock_keyboard_driver_win.h',
'renderer/mock_printer.cc',
'renderer/mock_printer.h',
'renderer/mock_printer_driver_win.cc',
'renderer/mock_printer_driver_win.h',
'renderer/mock_render_process.h',
'renderer/mock_render_thread.cc',
'renderer/mock_render_thread.h',
'test/automation/autocomplete_edit_proxy.cc',
'test/automation/autocomplete_edit_proxy.h',
'test/automation/automation_constants.h',
'test/automation/automation_handle_tracker.cc',
'test/automation/automation_handle_tracker.h',
'test/automation/automation_proxy.cc',
'test/automation/automation_proxy.h',
'test/automation/browser_proxy.cc',
'test/automation/browser_proxy.h',
'test/automation/tab_proxy.cc',
'test/automation/tab_proxy.h',
'test/automation/window_proxy.cc',
'test/automation/window_proxy.h',
'test/chrome_process_util.cc',
'test/chrome_process_util.h',
'test/chrome_process_util_linux.cc',
'test/chrome_process_util_mac.cc',
'test/chrome_process_util_win.cc',
'test/in_process_browser_test.cc',
'test/in_process_browser_test.h',
'test/perf/mem_usage.cc',
'test/perf/mem_usage.h',
'test/testing_profile.cc',
'test/testing_profile.h',
'test/ui_test_utils.cc',
'test/ui_test_utils.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
}, { # OS != "win"
'sources!': [
'test/in_process_browser_test.cc',
'test/in_process_browser_test.h',
'test/perf/mem_usage.cc',
'test/perf/mem_usage.h',
'test/ui_test_utils.cc',
'test/ui_test_utils.h',
],
}],
],
},
{
'target_name': 'test_support_ui',
'type': '<(library)',
'dependencies': [
'test_support_common',
'chrome_resources',
'chrome_strings',
'theme_resources',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/automated_ui_tests/automated_ui_test_base.cc',
'test/automated_ui_tests/automated_ui_test_base.h',
'test/testing_browser_process.h',
'test/ui/javascript_test_util.cc',
'test/ui/npapi_test_helper.cc',
'test/ui/npapi_test_helper.h',
'test/ui/run_all_unittests.cc',
'test/ui/ui_test.cc',
'test/ui/ui_test.h',
'test/ui/ui_test_suite.cc',
'test/ui/ui_test_suite.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS!="win"', {
'sources!': [
'test/ui/npapi_test_helper.cc',
],
}],
],
},
{
'target_name': 'test_support_unit',
'type': '<(library)',
'dependencies': [
'test_support_common',
'chrome_resources',
'chrome_strings',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/unit/run_all_unittests.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
# Needed for the following #include chain:
# test/unit/run_all_unittests.cc
# test/unit/chrome_test_suite.h
# gtk/gtk.h
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'ipc_tests',
'type': 'executable',
'msvs_guid': 'B92AE829-E1CD-4781-824A-DCB1603A1672',
'dependencies': [
'common',
'test_support_unit',
'../base/base.gyp:base',
'../testing/gtest.gyp:gtest',
],
'sources': [
'common/ipc_fuzzing_tests.cc',
'common/ipc_send_fds_test.cc',
'common/ipc_tests.cc',
'common/ipc_tests.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS=="linux" and toolkit_views==1', {
'dependencies': [
'../views/views.gyp:views',
],
}],
],
},
{
'target_name': 'ui_tests',
'type': 'executable',
'msvs_guid': '76235B67-1C27-4627-8A33-4B2E1EF93EDE',
'dependencies': [
'chrome',
'browser',
'debugger',
'common',
'chrome_resources',
'chrome_strings',
'test_support_ui',
'../base/base.gyp:base',
'../net/net.gyp:net',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
],
'include_dirs': [
'..',
],
'sources': [
'app/chrome_main_uitest.cc',
'browser/browser_uitest.cc',
'browser/crash_recovery_uitest.cc',
'browser/download/download_uitest.cc',
'browser/download/save_page_uitest.cc',
'browser/errorpage_uitest.cc',
'browser/extensions/extension_uitest.cc',
'browser/history/redirect_uitest.cc',
'browser/iframe_uitest.cc',
'browser/images_uitest.cc',
'browser/locale_tests_uitest.cc',
'browser/login_prompt_uitest.cc',
'browser/media_uitest.cc',
'browser/metrics/metrics_service_uitest.cc',
'browser/printing/printing_layout_uitest.cc',
'browser/printing/printing_test.h',
'browser/renderer_host/resource_dispatcher_host_uitest.cc',
'browser/sanity_uitest.cc',
'browser/session_history_uitest.cc',
'browser/sessions/session_restore_uitest.cc',
'browser/ssl/ssl_uitest.cc',
'browser/tab_contents/view_source_uitest.cc',
'browser/tab_restore_uitest.cc',
'browser/unload_uitest.cc',
'browser/views/find_bar_win_uitest.cc',
'common/logging_chrome_uitest.cc',
'common/net/cache_uitest.cc',
'common/pref_service_uitest.cc',
'test/accessibility/accessibility_tests.cc',
'test/accessibility/accessibility_util.cc',
'test/accessibility/accessibility_util.h',
'test/accessibility/browser_impl.cc',
'test/accessibility/browser_impl.h',
'test/accessibility/constants.h',
'test/accessibility/keyboard_util.cc',
'test/accessibility/keyboard_util.h',
'test/accessibility/registry_util.cc',
'test/accessibility/registry_util.h',
'test/accessibility/tab_impl.cc',
'test/accessibility/tab_impl.h',
'test/automation/automation_proxy_uitest.cc',
'test/automated_ui_tests/automated_ui_test_test.cc',
'test/chrome_process_util_uitest.cc',
'test/reliability/page_load_test.cc',
'test/reliability/page_load_test.h',
'test/ui/dom_checker_uitest.cc',
'test/ui/history_uitest.cc',
'test/ui/layout_plugin_uitest.cc',
'test/ui/npapi_uitest.cc',
'test/ui/omnibox_uitest.cc',
'test/ui/sandbox_uitests.cc',
'test/ui/sunspider_uitest.cc',
'test/ui/v8_benchmark_uitest.cc',
'worker/worker_uitest.cc',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'sources!': [
# TODO(port)
'browser/crash_recovery_uitest.cc',
'browser/login_prompt_uitest.cc',
'browser/renderer_host/resource_dispatcher_host_uitest.cc',
'test/reliability/page_load_test.cc',
'test/ui/layout_plugin_uitest.cc',
],
}],
['OS=="linux" and toolkit_views==1', {
'dependencies': [
'../views/views.gyp:views',
],
}],
['OS=="mac"', {
'sources!': [
# TODO(port)
'app/chrome_main_uitest.cc',
'browser/crash_recovery_uitest.cc',
'browser/download/download_uitest.cc',
# blocked on download shelf
'browser/download/save_page_uitest.cc',
'browser/login_prompt_uitest.cc',
'browser/metrics/metrics_service_uitest.cc',
# leaves an extra window on screen after test completes.
'browser/sessions/session_restore_uitest.cc',
# hangs indefinitely but doesn't crash.
'browser/tab_restore_uitest.cc',
# puts up modal dialogs.
'browser/unload_uitest.cc',
'test/reliability/page_load_test.cc',
'test/ui/layout_plugin_uitest.cc',
'test/ui/omnibox_uitest.cc',
# these pass locally but fail on the bots
'common/net/cache_uitest.cc',
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
'dependencies': [
'test_support_common',
'../google_update/google_update.gyp:google_update',
'../views/views.gyp:views',
],
'link_settings': {
'libraries': [
'-lOleAcc.lib',
],
},
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
}, { # else: OS != "win"
'sources!': [
# TODO(port)? (Most of these include windows.h or similar.)
'browser/extensions/extension_uitest.cc',
'browser/media_uitest.cc',
'browser/printing/printing_layout_uitest.cc',
'browser/ssl/ssl_uitest.cc',
'browser/views/find_bar_win_uitest.cc',
'common/logging_chrome_uitest.cc',
'test/accessibility/accessibility_tests.cc',
'test/accessibility/accessibility_util.cc',
'test/accessibility/browser_impl.cc',
'test/accessibility/keyboard_util.cc',
'test/accessibility/registry_util.cc',
'test/accessibility/tab_impl.cc',
'test/ui/npapi_uitest.cc',
'test/ui/sandbox_uitests.cc',
'worker/worker_uitest.cc',
],
}],
],
},
{
'target_name': 'unit_tests',
'type': 'executable',
'msvs_guid': 'ECFC2BEC-9FC0-4AD9-9649-5F26793F65FC',
'msvs_existing_vcproj': 'test/unit/unittests.vcproj',
'dependencies': [
'chrome',
'browser',
'chrome_resources',
'chrome_strings',
'common',
'debugger',
'renderer',
'test_support_unit',
'utility',
'../app/app.gyp:app_resources',
'../net/net.gyp:net_resources',
'../net/net.gyp:net_test_support',
'../printing/printing.gyp:printing',
'../webkit/webkit.gyp:webkit',
'../webkit/webkit.gyp:webkit_resources',
'../skia/skia.gyp:skia',
'../testing/gmock.gyp:gmock',
'../testing/gtest.gyp:gtest',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
],
'include_dirs': [
'..',
],
'sources': [
'app/breakpad_mac_stubs.mm',
# *NO* files in chrome/app have unit tests (except keystone_glue)!!!
# It seems a waste to have an app_unittests target, so for now
# I add keystone_glue.m explicitly to this target.
'app/keystone_glue.m',
'app/keystone_glue_unittest.mm',
# All unittests in browser, common, and renderer.
'browser/autocomplete/autocomplete_unittest.cc',
'browser/autocomplete/autocomplete_popup_view_mac_unittest.mm',
'browser/autocomplete/history_contents_provider_unittest.cc',
'browser/autocomplete/history_url_provider_unittest.cc',
'browser/autocomplete/keyword_provider_unittest.cc',
'browser/autocomplete/search_provider_unittest.cc',
'browser/back_forward_menu_model_unittest.cc',
'browser/blocked_popup_container_unittest.cc',
'browser/bookmarks/bookmark_codec_unittest.cc',
'browser/bookmarks/bookmark_context_menu_test.cc',
'browser/bookmarks/bookmark_drag_data_unittest.cc',
'browser/bookmarks/bookmark_folder_tree_model_unittest.cc',
'browser/bookmarks/bookmark_html_writer_unittest.cc',
'browser/bookmarks/bookmark_index_unittest.cc',
'browser/bookmarks/bookmark_model_test_utils.cc',
'browser/bookmarks/bookmark_model_test_utils.h',
'browser/bookmarks/bookmark_model_unittest.cc',
'browser/bookmarks/bookmark_table_model_unittest.cc',
'browser/bookmarks/bookmark_utils_unittest.cc',
'browser/browser_commands_unittest.cc',
'browser/browser_theme_provider_unittest.cc',
'browser/browser_unittest.cc',
'browser/debugger/devtools_remote_message_unittest.cc',
'browser/debugger/devtools_remote_listen_socket_unittest.cc',
'browser/debugger/devtools_remote_listen_socket_unittest.h',
'browser/debugger/devtools_sanity_unittest.cc',
'browser/child_process_security_policy_unittest.cc',
'browser/chrome_thread_unittest.cc',
# It is safe to list */cocoa/* files in the "common" file list
# without an explicit exclusion since gyp is smart enough to
# exclude them from non-Mac builds.
'browser/cocoa/about_window_controller_unittest.mm',
'browser/cocoa/base_view_unittest.mm',
'browser/cocoa/background_gradient_view_unittest.mm',
'browser/cocoa/bookmark_bar_bridge_unittest.mm',
'browser/cocoa/bookmark_bar_controller_unittest.mm',
'browser/cocoa/bookmark_bar_view_unittest.mm',
'browser/cocoa/bookmark_button_cell_unittest.mm',
'browser/cocoa/bookmark_menu_bridge_unittest.mm',
'browser/cocoa/bookmark_menu_cocoa_controller_unittest.mm',
'browser/cocoa/browser_window_cocoa_unittest.mm',
'browser/cocoa/browser_window_controller_unittest.mm',
'browser/cocoa/cocoa_utils_unittest.mm',
'browser/cocoa/command_observer_bridge_unittest.mm',
'browser/cocoa/custom_home_pages_model_unittest.mm',
'browser/cocoa/find_bar_bridge_unittest.mm',
'browser/cocoa/find_bar_cocoa_controller_unittest.mm',
'browser/cocoa/find_bar_view_unittest.mm',
'browser/cocoa/location_bar_cell_unittest.mm',
'browser/cocoa/location_bar_view_mac_unittest.mm',
'browser/cocoa/location_bar_fieldeditor_unittest.mm',
'browser/cocoa/gradient_button_cell_unittest.mm',
'browser/cocoa/grow_box_view_unittest.mm',
'browser/cocoa/preferences_window_controller_unittest.mm',
'browser/cocoa/sad_tab_view_unittest.mm',
'browser/cocoa/search_engine_list_model_unittest.mm',
'browser/cocoa/status_bubble_mac_unittest.mm',
'browser/cocoa/tab_cell_unittest.mm',
'browser/cocoa/tab_controller_unittest.mm',
'browser/cocoa/tab_strip_controller_unittest.mm',
'browser/cocoa/tab_strip_view_unittest.mm',
'browser/cocoa/tab_view_unittest.mm',
'browser/cocoa/throbber_view_unittest.mm',
'browser/cocoa/toolbar_button_cell_unittest.mm',
'browser/cocoa/toolbar_controller_unittest.mm',
'browser/cocoa/toolbar_view_unittest.mm',
'browser/command_updater_unittest.cc',
'browser/debugger/devtools_manager_unittest.cc',
'browser/dom_ui/dom_ui_theme_source_unittest.cc',
'browser/dom_ui/dom_ui_unittest.cc',
'browser/download/download_manager_unittest.cc',
'browser/download/download_request_manager_unittest.cc',
'browser/download/save_package_unittest.cc',
'browser/encoding_menu_controller_unittest.cc',
'browser/extensions/extension_messages_unittest.cc',
'browser/extensions/extension_process_manager_unittest.cc',
'browser/extensions/extension_ui_unittest.cc',
'browser/extensions/extensions_service_unittest.cc',
'browser/extensions/test_extension_loader.cc',
'browser/extensions/user_script_master_unittest.cc',
'browser/find_backend_unittest.cc',
'browser/google_url_tracker_unittest.cc',
'browser/google_update_settings_linux_unittest.cc',
'browser/google_update_settings_mac_unittest.mm',
'browser/gtk/bookmark_editor_gtk_unittest.cc',
'browser/gtk/go_button_gtk_unittest.cc',
'browser/gtk/tabs/tab_renderer_gtk_unittest.cc',
'browser/history/expire_history_backend_unittest.cc',
'browser/history/history_backend_unittest.cc',
'browser/history/history_querying_unittest.cc',
'browser/history/history_types_unittest.cc',
'browser/history/history_unittest.cc',
'browser/history/query_parser_unittest.cc',
'browser/history/snippet_unittest.cc',
'browser/history/starred_url_database_unittest.cc',
'browser/history/text_database_manager_unittest.cc',
'browser/history/text_database_unittest.cc',
'browser/history/thumbnail_database_unittest.cc',
'browser/thumbnail_store_unittest.cc',
'browser/history/url_database_unittest.cc',
'browser/history/visit_database_unittest.cc',
'browser/history/visit_tracker_unittest.cc',
'browser/importer/firefox_importer_unittest.cc',
'browser/importer/importer_unittest.cc',
'browser/importer/toolbar_importer_unittest.cc',
'browser/login_prompt_unittest.cc',
'browser/metrics/metrics_log_unittest.cc',
'browser/metrics/metrics_response_unittest.cc',
'browser/net/chrome_url_request_context_unittest.cc',
'browser/net/dns_host_info_unittest.cc',
'browser/net/dns_master_unittest.cc',
'browser/net/resolve_proxy_msg_helper_unittest.cc',
'browser/net/test_url_fetcher_factory.cc',
'browser/net/test_url_fetcher_factory.h',
'browser/net/url_fetcher_unittest.cc',
'browser/net/url_fixer_upper_unittest.cc',
'browser/password_manager/encryptor_unittest.cc',
'browser/password_manager/login_database_unittest.cc',
'browser/password_manager/password_form_manager_unittest.cc',
'browser/password_manager/password_store_mac_unittest.cc',
'browser/printing/page_number_unittest.cc',
'browser/printing/page_overlays_unittest.cc',
'browser/printing/page_range_unittest.cc',
'browser/printing/page_setup_unittest.cc',
'browser/printing/print_job_unittest.cc',
'browser/printing/win_printing_context_unittest.cc',
'browser/profile_manager_unittest.cc',
'browser/renderer_host/audio_renderer_host_unittest.cc',
'browser/renderer_host/render_view_host_unittest.cc',
'browser/renderer_host/render_widget_host_unittest.cc',
'browser/renderer_host/resource_dispatcher_host_unittest.cc',
'browser/renderer_host/web_cache_manager_unittest.cc',
'browser/rlz/rlz_unittest.cc',
'browser/safe_browsing/bloom_filter_unittest.cc',
'browser/safe_browsing/chunk_range_unittest.cc',
'browser/safe_browsing/protocol_manager_unittest.cc',
'browser/safe_browsing/protocol_parser_unittest.cc',
'browser/safe_browsing/safe_browsing_blocking_page_unittest.cc',
'browser/safe_browsing/safe_browsing_database_unittest.cc',
'browser/safe_browsing/safe_browsing_util_unittest.cc',
'browser/search_engines/template_url_model_unittest.cc',
'browser/search_engines/template_url_parser_unittest.cc',
'browser/search_engines/template_url_prepopulate_data_unittest.cc',
'browser/search_engines/template_url_scraper_unittest.cc',
'browser/search_engines/template_url_unittest.cc',
'browser/sessions/session_backend_unittest.cc',
'browser/sessions/session_service_test_helper.cc',
'browser/sessions/session_service_test_helper.h',
'browser/sessions/session_service_unittest.cc',
'browser/sessions/tab_restore_service_unittest.cc',
'browser/spellcheck_unittest.cc',
'browser/ssl/ssl_host_state_unittest.cc',
'browser/tab_contents/navigation_controller_unittest.cc',
'browser/tab_contents/navigation_entry_unittest.cc',
'browser/tab_contents/render_view_host_manager_unittest.cc',
'browser/tab_contents/site_instance_unittest.cc',
'browser/tab_contents/thumbnail_generator_unittest.cc',
'browser/tab_contents/web_contents_unittest.cc',
'browser/tabs/tab_strip_model_unittest.cc',
'browser/task_manager_unittest.cc',
'browser/theme_resources_util_unittest.cc',
'browser/views/bookmark_editor_view_unittest.cc',
'browser/views/keyword_editor_view_unittest.cc',
'browser/visitedlink_unittest.cc',
'browser/webdata/web_database_unittest.cc',
'browser/window_sizer_unittest.cc',
'common/bzip2_unittest.cc',
'common/chrome_plugin_unittest.cc',
'common/extensions/extension_unittest.cc',
'common/extensions/url_pattern_unittest.cc',
'common/extensions/user_script_unittest.cc',
'common/file_descriptor_set_unittest.cc',
'common/gfx/emf_unittest.cc',
'common/important_file_writer_unittest.cc',
'common/ipc_message_unittest.cc',
'common/ipc_sync_channel_unittest.cc',
'common/ipc_sync_message_unittest.cc',
'common/ipc_sync_message_unittest.h',
'common/json_value_serializer_unittest.cc',
'common/mru_cache_unittest.cc',
'common/net/url_util_unittest.cc',
'common/notification_service_unittest.cc',
'common/pref_member_unittest.cc',
'common/pref_service_unittest.cc',
'common/property_bag_unittest.cc',
'common/resource_dispatcher_unittest.cc',
'common/time_format_unittest.cc',
'common/worker_thread_ticker_unittest.cc',
'common/zip_unittest.cc',
'renderer/audio_message_filter_unittest.cc',
'renderer/extensions/extension_api_client_unittest.cc',
'renderer/extensions/greasemonkey_api_unittest.cc',
'renderer/extensions/json_schema_unittest.cc',
'renderer/net/render_dns_master_unittest.cc',
'renderer/net/render_dns_queue_unittest.cc',
'renderer/render_process_unittest.cc',
'renderer/render_thread_unittest.cc',
'renderer/render_view_unittest.cc',
'renderer/render_widget_unittest.cc',
'renderer/renderer_logging_mac_unittest.mm',
'renderer/renderer_main_unittest.cc',
'test/browser_with_test_window_test.cc',
'test/browser_with_test_window_test.h',
'test/file_test_utils.h',
'test/file_test_utils.cc',
'test/render_view_test.cc',
'test/render_view_test.h',
'test/test_notification_tracker.cc',
'test/test_notification_tracker.h',
'test/v8_unit_test.cc',
'test/v8_unit_test.h',
'../views/controls/label_unittest.cc',
'../views/controls/table/table_view_unittest.cc',
'../views/focus/focus_manager_unittest.cc',
'../views/grid_layout_unittest.cc',
'../views/view_unittest.cc',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
'../build/linux/system.gyp:nss',
],
'sources!': [
# This test is mostly about renaming downloads to safe file
# names. As such we don't need/want to port it to linux. We
# might want to write our own tests for the download manager
# on linux, though.
'browser/download/download_manager_unittest.cc',
],
}],
['OS=="linux" and toolkit_views==1', {
'dependencies': [
'../views/views.gyp:views',
],
}],
['OS=="mac"', {
# The test fetches resources which means Mac need the app bundle to
# exist on disk so it can pull from it.
'dependencies': [
'chrome',
],
'include_dirs': [
'../third_party/GTM',
],
'sources!': [
'browser/bookmarks/bookmark_context_menu_test.cc',
'browser/back_forward_menu_model_unittest.cc',
'browser/download/download_manager_unittest.cc',
'browser/gtk/go_button_gtk_unittest.cc',
'browser/gtk/tabs/tab_renderer_gtk_unittest.cc',
'browser/tab_contents/navigation_controller_unittest.cc',
'browser/task_manager_unittest.cc',
'test/test_notification_tracker.cc',
'test/test_notification_tracker.h',
'third_party/hunspell/google/hunspell_tests.cc',
],
# TODO(mark): We really want this for all non-static library targets,
# but when we tried to pull it up to the common.gypi level, it broke
# other things like the ui, startup, and page_cycler tests. *shrug*
'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-ObjC']},
}, { # OS != "mac"
'dependencies': [
'convert_dict_lib',
'third_party/hunspell/hunspell.gyp:hunspell',
],
}],
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'dependencies': [
'chrome_dll_version',
'installer/installer.gyp:installer_util_strings',
'../views/views.gyp:views',
],
'include_dirs': [
'third_party/wtl/include',
],
'sources': [
'app/chrome_dll.rc',
'test/data/resource.rc',
# TODO: It would be nice to have these pulled in
# automatically from direct_dependent_settings in
# their various targets (net.gyp:net_resources, etc.),
# but that causes errors in other targets when
# resulting .res files get referenced multiple times.
'<(SHARED_INTERMEDIATE_DIR)/chrome/browser_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/common_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/debugger_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/renderer_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/net/net_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_resources.rc',
],
'sources!': [
'browser/gtk/tabs/tab_renderer_gtk_unittest.cc',
'common/file_descriptor_set_unittest.cc',
'common/net/url_util_unittest.cc',
],
'link_settings': {
'libraries': [
'-loleacc.lib',
'-lcomsupp.lib',
],
},
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
'msvs_settings': {
'VCLinkerTool': {
'LinkIncremental': '1', # /INCREMENTAL:NO
},
},
},
},
}, { # else: OS != "win"
'sources!': [
'browser/bookmarks/bookmark_codec_unittest.cc',
'browser/bookmarks/bookmark_drag_data_unittest.cc',
'browser/bookmarks/bookmark_folder_tree_model_unittest.cc',
'browser/bookmarks/bookmark_table_model_unittest.cc',
# Need to port browser_with_test_window_test.* first
'browser/browser_commands_unittest.cc',
'browser/browser_unittest.cc',
# Need to port browser/automation/ui_controls.h
'browser/debugger/devtools_sanity_unittest.cc',
'browser/extensions/extension_process_manager_unittest.cc',
'browser/extensions/test_extension_loader.cc',
'browser/importer/firefox_importer_unittest.cc',
'browser/importer/importer_unittest.cc',
'browser/login_prompt_unittest.cc',
'browser/printing/page_number_unittest.cc',
'browser/printing/page_overlays_unittest.cc',
'browser/printing/print_job_unittest.cc',
'browser/rlz/rlz_unittest.cc',
'browser/safe_browsing/safe_browsing_blocking_page_unittest.cc',
'browser/search_engines/template_url_scraper_unittest.cc',
'browser/views/bookmark_editor_view_unittest.cc',
'browser/views/find_bar_win_unittest.cc',
'browser/views/keyword_editor_view_unittest.cc',
'browser/window_sizer_unittest.cc',
'common/chrome_plugin_unittest.cc',
'common/gfx/emf_unittest.cc',
'common/net/url_util_unittest.cc',
'test/browser_with_test_window_test.cc',
'test/browser_with_test_window_test.h',
'../views/controls/label_unittest.cc',
'../views/controls/table/table_view_unittest.cc',
'../views/focus/focus_manager_unittest.cc',
'../views/grid_layout_unittest.cc',
'../views/view_unittest.cc',
],
}],
],
},
{
'target_name': 'startup_tests',
'type': 'executable',
'msvs_guid': 'D3E6C0FD-54C7-4FF2-9AE1-72F2DAFD820C',
'dependencies': [
'chrome',
'browser',
'common',
'chrome_resources',
'chrome_strings',
'test_support_ui',
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'sources': [
'test/startup/feature_startup_test.cc',
'test/startup/startup_test.cc',
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS=="linux" and toolkit_views==1', {
'dependencies': [
'../views/views.gyp:views',
],
}],
],
},
{
'target_name': 'page_cycler_tests',
'type': 'executable',
'msvs_guid': 'C9E0BD1D-B175-4A91-8380-3FDC81FAB9D7',
'dependencies': [
'chrome',
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'sources': [
'test/page_cycler/page_cycler_test.cc',
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS=="win" or (OS=="linux" and toolkit_views==1)', {
'dependencies': [
'../views/views.gyp:views',
],
}],
],
},
{
'target_name': 'tab_switching_test',
'type': 'executable',
'msvs_guid': 'A34770EA-A574-43E8-9327-F79C04770E98',
'dependencies': [
'chrome',
'test_support_common',
'test_support_ui',
'theme_resources',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/tab_switching/tab_switching_test.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'memory_test',
'type': 'executable',
'msvs_guid': 'A5F831FD-9B9C-4FEF-9FBA-554817B734CE',
'dependencies': [
'chrome',
'test_support_common',
'test_support_ui',
'theme_resources',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/memory_test/memory_test.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'url_fetch_test',
'type': 'executable',
'msvs_guid': '7EFD0C91-198E-4043-9E71-4A4C7879B929',
'dependencies': [
'chrome',
'test_support_common',
'test_support_ui',
'theme_resources',
'../net/net.gyp:net',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/url_fetch_test/url_fetch_test.cc',
],
'conditions': [
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
'sources': [
'tools/build/win/precompiled_wtl.cc',
'tools/build/win/precompiled_wtl.h',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
}], # OS="win"
], # conditions
},
],
'conditions': [
# We set feature variables so the different parts that need to check for
# the mac build use of breakpad/keystone, check that flag instead of coding
# it based on branding.
# We need the Mac app name on disk, so we stick this into a variable so
# the different places that need it can use the common variable.
# NOTE: chrome/app/theme/chromium/BRANDING and
# chrome/app/theme/google_chrome/BRANDING have the short names, etc.;
# but extracting from there still means xcodeproject are out of date until
# the next project regeneration.
['OS=="mac" and branding=="Chrome"', {
'variables': {
'mac_breakpad%': 1,
'mac_keystone%': 1,
'mac_product_name%': 'Google Chrome',
}
}, {
'variables': {
'mac_breakpad%': 0,
'mac_keystone%': 0,
'mac_product_name%': 'Chromium',
}
}],
['OS=="linux"', {
'conditions': [
['branding=="Chrome"', {
'variables': {
'linux_breakpad%': 1,
},
}, {
'variables': {
'linux_breakpad%': 0,
},
}],
],
}],
['OS=="mac"',
# On Mac only, add a project target called "build_app_dmg" that only
# builds a DMG out of the App (eventually will completely replace
# "package_app").
{ 'targets': [
{
'target_name': 'build_app_dmg',
# do NOT place this in the 'all' list; most won't want it.
# In gyp, booleans are 0/1 not True/False.
'suppress_wildcard': 1,
'type': 'none',
'dependencies': [
'chrome',
],
'variables': {
'build_app_dmg_script_path': '<(DEPTH)/build/mac/build_app_dmg',
},
'actions': [
{
'inputs': [
'<(build_app_dmg_script_path)',
'<(PRODUCT_DIR)/<(branding).app',
],
'outputs': [
'<(PRODUCT_DIR)/<(branding).dmg',
],
'action_name': 'build_app_dmg',
'action': ['<(build_app_dmg_script_path)', '<@(branding)'],
},
], # 'actions'
},
]
}, { # else: OS != "mac"
'targets': [
{
'target_name': 'convert_dict',
'type': 'executable',
'msvs_guid': '42ECD5EC-722F-41DE-B6B8-83764C8016DF',
'dependencies': [
'../base/base.gyp:base',
'convert_dict_lib',
'third_party/hunspell/hunspell.gyp:hunspell',
],
'sources': [
'tools/convert_dict/convert_dict.cc',
],
},
{
'target_name': 'convert_dict_lib',
'product_name': 'convert_dict',
'type': 'static_library',
'msvs_guid': '1F669F6B-3F4A-4308-E496-EE480BDF0B89',
'include_dirs': [
'..',
],
'sources': [
'tools/convert_dict/aff_reader.cc',
'tools/convert_dict/aff_reader.h',
'tools/convert_dict/dic_reader.cc',
'tools/convert_dict/dic_reader.h',
'tools/convert_dict/hunspell_reader.cc',
'tools/convert_dict/hunspell_reader.h',
],
},
{
'target_name': 'flush_cache',
'type': 'executable',
'msvs_guid': '4539AFB3-B8DC-47F3-A491-6DAC8FD26657',
'dependencies': [
'../base/base.gyp:base',
],
'sources': [
'tools/perf/flush_cache/flush_cache.cc',
],
},
{
'target_name': 'perf_tests',
'type': 'executable',
'msvs_guid': '9055E088-25C6-47FD-87D5-D9DD9FD75C9F',
'dependencies': [
'browser',
'common',
'debugger',
'renderer',
'chrome_resources',
'chrome_strings',
'../base/base.gyp:base',
'../base/base.gyp:test_support_base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../webkit/webkit.gyp:glue',
],
'sources': [
'browser/safe_browsing/database_perftest.cc',
'browser/safe_browsing/filter_false_positive_perftest.cc',
'browser/visitedlink_perftest.cc',
'common/json_value_serializer_perftest.cc',
'test/perf/perftests.cc',
'test/perf/url_parse_perftest.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'sources!': [
# TODO(port):
'browser/safe_browsing/filter_false_positive_perftest.cc',
'browser/visitedlink_perftest.cc',
],
}],
['OS=="win" or (OS=="linux" and toolkit_views==1)', {
'dependencies': [
'../views/views.gyp:views',
],
}],
],
'configurations': {
'Debug': {
'msvs_settings': {
'VCLinkerTool': {
'LinkIncremental': '1', # /INCREMENTAL:NO
},
},
},
},
},
],
}], # OS!="mac"
['OS!="win"',
{ 'targets': [
{
# Executable that runs each browser test in a new process.
'target_name': 'browser_tests',
'type': 'executable',
'dependencies': [
'browser',
'chrome',
'chrome_resources',
'debugger',
'test_support_common',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/browser/run_all_unittests.cc',
'test/in_process_browser_test.cc',
'test/in_process_browser_test.h',
'test/browser/browser_test_launcher_out_of_proc.cc',
'test/browser/browser_test_runner.cc',
'test/browser/browser_test_runner.h',
'test/unit/chrome_test_suite.h',
'test/ui_test_utils.cc',
# browser_tests_sources is defined in 'variables' at the top of the
# file.
'<@(browser_tests_sources)',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS=="mac"', {
# The test fetches resources which means Mac need the app bundle to
# exist on disk so it can pull from it.
'dependencies': [
'chrome',
],
'sources': [
'app/breakpad_mac_stubs.mm',
'app/keystone_glue.h',
'app/keystone_glue.m',
],
# TODO(mark): We really want this for all non-static library targets,
# but when we tried to pull it up to the common.gypi level, it broke
# other things like the ui, startup, and page_cycler tests. *shrug*
'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-ObjC']},
}],
],
},
]
}],
['OS=="win"',
{ 'targets': [
{
# TODO(sgk): remove this when we change the buildbots to
# use the generated build\all.sln file to build the world.
'target_name': 'pull_in_all',
'type': 'none',
'dependencies': [
'installer/installer.gyp:*',
'../app/app.gyp:*',
'../base/base.gyp:*',
'../media/media.gyp:*',
'../net/net.gyp:*',
'../printing/printing.gyp:*',
'../rlz/rlz.gyp:*',
'../sdch/sdch.gyp:*',
'../skia/skia.gyp:*',
'../testing/gmock.gyp:*',
'../testing/gtest.gyp:*',
'../third_party/bsdiff/bsdiff.gyp:*',
'../third_party/bspatch/bspatch.gyp:*',
'../third_party/bzip2/bzip2.gyp:*',
'../third_party/cld/cld.gyp:cld',
'../third_party/codesighs/codesighs.gyp:*',
'../third_party/ffmpeg/ffmpeg.gyp:*',
'../third_party/icu38/icu38.gyp:*',
'../third_party/libjpeg/libjpeg.gyp:*',
'../third_party/libpng/libpng.gyp:*',
'../third_party/libxml/libxml.gyp:*',
'../third_party/libxslt/libxslt.gyp:*',
'../third_party/lzma_sdk/lzma_sdk.gyp:*',
'../third_party/modp_b64/modp_b64.gyp:*',
'../third_party/npapi/npapi.gyp:*',
'../third_party/sqlite/sqlite.gyp:*',
'../third_party/tcmalloc/tcmalloc.gyp:*',
'../third_party/zlib/zlib.gyp:*',
'../webkit/tools/test_shell/test_shell.gyp:*',
'../webkit/webkit.gyp:*',
'../build/temp_gyp/googleurl.gyp:*',
'../breakpad/breakpad.gyp:*',
'../courgette/courgette.gyp:*',
'../gears/gears.gyp:*',
'../rlz/rlz.gyp:*',
'../sandbox/sandbox.gyp:*',
'../tools/memory_watcher/memory_watcher.gyp:*',
'../webkit/activex_shim/activex_shim.gyp:*',
'../webkit/activex_shim_dll/activex_shim_dll.gyp:*',
'../v8/tools/gyp/v8.gyp:v8_shell',
],
},
{
'target_name': 'chrome_dll',
'type': 'shared_library',
'product_name': 'chrome',
'msvs_guid': 'C0A7EE2C-2A6D-45BE-BA78-6D006FDF52D9',
'include_dirs': [
'third_party/wtl/include',
],
'dependencies': [
# On Windows, link the dependencies (libraries) that make
# up actual Chromium functionality into this .dll.
'<@(chromium_dependencies)',
'chrome_dll_version',
'chrome_resources',
'installer/installer.gyp:installer_util_strings',
# TODO(sgk): causes problems because theme_dll doesn't
# actually generate default.lib, but now expects it.
#'theme_dll',
'worker',
'../net/net.gyp:net_resources',
'../support/support.gyp:*',
'../third_party/cld/cld.gyp:cld',
'../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
'../third_party/tcmalloc/tcmalloc.gyp:tcmalloc',
'../views/views.gyp:views',
'../webkit/webkit.gyp:webkit_resources',
'../gears/gears.gyp:gears',
],
'defines': [
'CHROME_DLL',
'BROWSER_DLL',
'RENDERER_DLL',
'PLUGIN_DLL',
],
'sources': [
'app/chrome_dll.rc',
'app/chrome_dll_main.cc',
'app/chrome_dll_resource.h',
'<(SHARED_INTERMEDIATE_DIR)/chrome_dll_version/chrome_dll_version.rc',
'../webkit/glue/resources/aliasb.cur',
'../webkit/glue/resources/cell.cur',
'../webkit/glue/resources/col_resize.cur',
'../webkit/glue/resources/copy.cur',
'../webkit/glue/resources/row_resize.cur',
'../webkit/glue/resources/vertical_text.cur',
'../webkit/glue/resources/zoom_in.cur',
'../webkit/glue/resources/zoom_out.cur',
# TODO: It would be nice to have these pulled in
# automatically from direct_dependent_settings in
# their various targets (net.gyp:net_resources, etc.),
# but that causes errors in other targets when
# resulting .res files get referenced multiple times.
'<(SHARED_INTERMEDIATE_DIR)/chrome/browser_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/common_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/debugger_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/renderer_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/net/net_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_resources.rc',
# TODO(sgk): left-over from pre-gyp build, figure out
# if we still need them and/or how to update to gyp.
#'app/check_dependents.bat',
#'app/chrome.dll.deps',
],
'msvs_settings': {
'VCLinkerTool': {
'BaseAddress': '0x01c30000',
'DelayLoadDLLs': [
'crypt32.dll',
'cryptui.dll',
'winhttp.dll',
'wininet.dll',
'wsock32.dll',
'ws2_32.dll',
'winspool.drv',
'comdlg32.dll',
'imagehlp.dll',
'psapi.dll',
'urlmon.dll',
'imm32.dll',
],
'ImportLibrary': '$(OutDir)\\lib\\chrome_dll.lib',
'ProgramDatabaseFile': '$(OutDir)\\chrome_dll.pdb',
# Set /SUBSYSTEM:WINDOWS for chrome.dll (for consistency).
'SubSystem': '2',
},
'VCManifestTool': {
'AdditionalManifestFiles': '$(SolutionDir)\\app\\chrome.dll.manifest',
},
},
'configurations': {
'Debug': {
'msvs_settings': {
'VCLinkerTool': {
'LinkIncremental': '1', # /INCREMENTAL:NO
},
},
},
},
},
{
'target_name': 'chrome_dll_version',
'type': 'none',
#'msvs_guid': '414D4D24-5D65-498B-A33F-3A29AD3CDEDC',
'dependencies': [
'../build/util/build_util.gyp:lastchange',
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome_dll_version',
],
},
'actions': [
{
'action_name': 'version',
'variables': {
'lastchange_path':
'<(SHARED_INTERMEDIATE_DIR)/build/LASTCHANGE',
'version_py': 'tools/build/version.py',
'version_path': 'VERSION',
'template_input_path': 'app/chrome_dll_version.rc.version',
},
'conditions': [
[ 'branding == "Chrome"', {
'variables': {
'branding_path': 'app/theme/google_chrome/BRANDING',
},
}, { # else branding!="Chrome"
'variables': {
'branding_path': 'app/theme/chromium/BRANDING',
},
}],
],
'inputs': [
'<(template_input_path)',
'<(version_path)',
'<(branding_path)',
'<(lastchange_path)',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome_dll_version/chrome_dll_version.rc',
],
'action': [
'python',
'<(version_py)',
'-f', '<(version_path)',
'-f', '<(branding_path)',
'-f', '<(lastchange_path)',
'<(template_input_path)',
'<@(_outputs)',
],
'message': 'Generating version information in <(_outputs)'
},
],
},
{
'target_name': 'activex_test_control',
'type': 'shared_library',
'msvs_guid': '414D4D24-5D65-498B-A33F-3A29AD3CDEDC',
'include_dirs': [
'..',
'<(INTERMEDIATE_DIR)',
],
'link_settings': {
'libraries': [
'-lcomsuppw.lib',
],
},
'sources': [
'test/activex_test_control/activex_test_control.cc',
'test/activex_test_control/activex_test_control.def',
'test/activex_test_control/activex_test_control.idl',
'test/activex_test_control/activex_test_control.rc',
'test/activex_test_control/activex_test_control.rgs',
'test/activex_test_control/chrome_test_control.bmp',
'test/activex_test_control/chrome_test_control.cc',
'test/activex_test_control/chrome_test_control.h',
'test/activex_test_control/chrome_test_control.rgs',
'test/activex_test_control/chrome_test_control_cp.h',
'test/activex_test_control/resource.h',
],
},
{
'target_name': 'automated_ui_tests',
'type': 'executable',
'msvs_guid': 'D2250C20-3A94-4FB9-AF73-11BC5B73884B',
'dependencies': [
'browser',
'renderer',
'test_support_common',
'test_support_ui',
'theme_resources',
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
'../third_party/libxml/libxml.gyp:libxml',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'test/automated_ui_tests/automated_ui_tests.cc',
'test/automated_ui_tests/automated_ui_tests.h',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
},
{
'target_name': 'automation',
'type': '<(library)',
'msvs_guid': '1556EF78-C7E6-43C8-951F-F6B43AC0DD12',
'dependencies': [
'theme_resources',
'../skia/skia.gyp:skia',
],
'include_dirs': [
'..',
],
'sources': [
'test/automation/autocomplete_edit_proxy.cc',
'test/automation/autocomplete_edit_proxy.h',
'test/automation/automation_constants.h',
'test/automation/automation_handle_tracker.cc',
'test/automation/automation_handle_tracker.h',
'test/automation/automation_messages.h',
'test/automation/automation_messages_internal.h',
'test/automation/automation_proxy.cc',
'test/automation/automation_proxy.h',
'test/automation/browser_proxy.cc',
'test/automation/browser_proxy.h',
'test/automation/tab_proxy.cc',
'test/automation/tab_proxy.h',
'test/automation/window_proxy.cc',
'test/automation/window_proxy.h',
],
},
{
# Shared library used by the in-proc browser tests.
'target_name': 'browser_tests_dll',
'type': 'shared_library',
'product_name': 'browser_tests',
'msvs_guid': 'D7589D0D-304E-4589-85A4-153B7D84B07F',
'dependencies': [
'chrome',
'browser',
'chrome_dll_version',
'chrome_resources',
'installer/installer.gyp:installer_util_strings',
'debugger',
'renderer',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
'msvs_settings': {
'VCLinkerTool': {
'LinkIncremental': '1', # /INCREMENTAL:NO
},
},
},
},
'sources': [
'test/browser/run_all_unittests.cc',
'test/in_process_browser_test.cc',
'test/in_process_browser_test.h',
'test/unit/chrome_test_suite.h',
'test/ui_test_utils.cc',
'app/chrome_dll.rc',
'app/chrome_dll_resource.h',
'app/chrome_dll_version.rc.version',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/browser_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome_dll_version/chrome_dll_version.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/common_resources.rc',
# browser_tests_sources and browser_tests_source_win_specific are
# defined in 'variables' at the top of the file.
'<@(browser_tests_sources)',
'<@(browser_tests_sources_win_specific)',
],
},
{
# Executable that runs the browser tests in-process.
'target_name': 'browser_tests',
'type': 'executable',
'msvs_guid': '9B87804D-2502-480B-95AE-5A572CE91809',
'dependencies': [
'browser_tests_dll',
'../base/base.gyp:base',
],
'include_dirs': [
'..',
],
'sources': [
'test/browser/browser_test_launcher_in_proc.cc',
'test/browser/browser_test_runner.cc',
'test/browser/browser_test_runner.h',
],
'msvs_settings': {
'VCLinkerTool': {
# Use a PDB name different than the one for the DLL.
'ProgramDatabaseFile': '$(OutDir)\\browser_tests_exe.pdb',
},
},
},
{
'target_name': 'crash_service',
'type': 'executable',
'msvs_guid': '89C1C190-A5D1-4EC4-BD6A-67FF2195C7CC',
'dependencies': [
'common',
'../base/base.gyp:base',
'../breakpad/breakpad.gyp:breakpad_handler',
'../breakpad/breakpad.gyp:breakpad_sender',
],
'include_dirs': [
'..',
],
'sources': [
'tools/crash_service/crash_service.cc',
'tools/crash_service/crash_service.h',
'tools/crash_service/main.cc',
],
'msvs_settings': {
'VCLinkerTool': {
'SubSystem': '2', # Set /SUBSYSTEM:WINDOWS
},
},
},
{
'target_name': 'generate_profile',
'type': 'executable',
'msvs_guid': '2E969AE9-7B12-4EDB-8E8B-48C7AE7BE357',
'dependencies': [
'browser',
'debugger',
'renderer',
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
],
'include_dirs': [
'..',
],
'sources': [
'tools/profiles/generate_profile.cc',
'tools/profiles/thumbnail-inl.h',
],
},
{
'target_name': 'interactive_ui_tests',
'type': 'executable',
'msvs_guid': '018D4F38-6272-448F-A864-976DA09F05D0',
'dependencies': [
'chrome_dll_version',
'chrome_resources',
'chrome_strings',
'debugger',
'installer/installer.gyp:installer_util_strings',
'test_support_common',
'test_support_ui',
'third_party/hunspell/hunspell.gyp:hunspell',
'../net/net.gyp:net_resources',
'../skia/skia.gyp:skia',
'../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/libpng/libpng.gyp:libpng',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/zlib/zlib.gyp:zlib',
'../testing/gtest.gyp:gtest',
'../third_party/npapi/npapi.gyp:npapi',
'../views/views.gyp:views',
'../webkit/webkit.gyp:webkit_resources',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'browser/browser_focus_uitest.cc',
'browser/debugger/devtools_sanity_unittest.cc',
'browser/views/bookmark_bar_view_test.cc',
'browser/blocked_popup_container_interactive_uitest.cc',
'browser/views/find_bar_win_interactive_uitest.cc',
'browser/views/tabs/tab_dragging_test.cc',
'test/interactive_ui/npapi_interactive_test.cc',
'test/interactive_ui/view_event_test_base.cc',
'test/interactive_ui/view_event_test_base.h',
# Windows-only below here, will need addressing if/when
# this gets ported.
'../webkit/glue/resources/aliasb.cur',
'../webkit/glue/resources/cell.cur',
'../webkit/glue/resources/col_resize.cur',
'../webkit/glue/resources/copy.cur',
'../webkit/glue/resources/row_resize.cur',
'../webkit/glue/resources/vertical_text.cur',
'../webkit/glue/resources/zoom_in.cur',
'../webkit/glue/resources/zoom_out.cur',
'app/chrome_dll.rc',
'test/data/resource.rc',
# TODO: It would be nice to have these pulled in
# automatically from direct_dependent_settings in
# their various targets (net.gyp:net_resources, etc.),
# but that causes errors in other targets when
# resulting .res files get referenced multiple times.
'<(SHARED_INTERMEDIATE_DIR)/chrome/browser_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/common_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/debugger_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/chrome/renderer_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/net/net_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_resources.rc',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
'msvs_settings': {
'VCLinkerTool': {
'LinkIncremental': '1', # /INCREMENTAL:NO
},
},
},
},
},
{
'target_name': 'plugin_tests',
'type': 'executable',
'msvs_guid': 'A1CAA831-C507-4B2E-87F3-AEC63C9907F9',
'dependencies': [
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/libxslt/libxslt.gyp:libxslt',
'../third_party/npapi/npapi.gyp:npapi',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'test/plugin/plugin_test.cpp',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
},
{
'target_name': 'reliability_tests',
'type': 'executable',
'msvs_guid': '8A3E1774-1DE9-445C-982D-3EE37C8A752A',
'dependencies': [
'test_support_common',
'test_support_ui',
'theme_resources',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/reliability/page_load_test.cc',
'test/reliability/page_load_test.h',
'test/reliability/reliability_test_suite.h',
'test/reliability/run_all_unittests.cc',
],
},
{
'target_name': 'security_tests',
'type': 'shared_library',
'msvs_guid': 'E750512D-FC7C-4C98-BF04-0A0DAF882055',
'include_dirs': [
'..',
],
'sources': [
'test/injection_test_dll.h',
'test/security_tests/ipc_security_tests.cc',
'test/security_tests/ipc_security_tests.h',
'test/security_tests/security_tests.cc',
'../sandbox/tests/validation_tests/commands.cc',
'../sandbox/tests/validation_tests/commands.h',
],
},
{
'target_name': 'selenium_tests',
'type': 'executable',
'msvs_guid': 'E3749617-BA3D-4230-B54C-B758E56D9FA5',
'dependencies': [
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'test/selenium/selenium_test.cc',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
},
{
'target_name': 'test_chrome_plugin',
'type': 'shared_library',
'msvs_guid': '7F0A70F6-BE3F-4C19-B435-956AB8F30BA4',
'dependencies': [
'../base/base.gyp:base',
'../build/temp_gyp/googleurl.gyp:googleurl',
],
'include_dirs': [
'..',
],
'link_settings': {
'libraries': [
'-lwinmm.lib',
],
},
'sources': [
'test/chrome_plugin/test_chrome_plugin.cc',
'test/chrome_plugin/test_chrome_plugin.def',
'test/chrome_plugin/test_chrome_plugin.h',
],
},
{
'target_name': 'theme_dll',
'type': 'loadable_module',
'msvs_guid': 'FD683DD6-D9BF-4B1B-AB6D-A3AC03EDAA4D',
'product_name': 'default',
'dependencies': [
'theme_resources',
'../app/app.gyp:app_resources',
],
'sources': [
'<(grit_out_dir)/theme_resources.rc',
'<(SHARED_INTERMEDIATE_DIR)/app/app_resources.rc',
],
'msvs_settings': {
'VCLinkerTool': {
'BaseAddress': '0x3CE00000',
'OutputFile': '<(PRODUCT_DIR)/themes/default.dll',
'ResourceOnlyDLL': 'true',
},
},
'configurations': {
'Debug': {
'msvs_settings': {
'VCLinkerTool': {
'LinkIncremental': '1', # /INCREMENTAL:NO
},
},
},
},
},
{
'target_name': 'worker',
'type': '<(library)',
'msvs_guid': 'C78D02D0-A366-4EC6-A248-AA8E64C4BA18',
'dependencies': [
'../base/base.gyp:base',
'../webkit/webkit.gyp:webkit',
],
'sources': [
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
'worker/webworkerclient_proxy.cc',
'worker/webworkerclient_proxy.h',
'worker/worker_main.cc',
'worker/worker_thread.cc',
'worker/worker_thread.h',
'worker/worker_webkitclient_impl.cc',
'worker/worker_webkitclient_impl.h',
],
'include_dirs': [
'..',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled.h',
'msvs_precompiled_source': 'tools/build/win/precompiled.cc',
},
},
},
]}, # 'targets'
], # OS=="win"
# TODO(jrg): add in Windows code coverage targets.
['coverage!=0 and OS!="win"',
{ 'targets': [
{
'target_name': 'coverage',
# do NOT place this in the 'all' list; most won't want it.
# In gyp, booleans are 0/1 not True/False.
'suppress_wildcard': 1,
'type': 'none',
# If you add new tests here you may need to update the croc configs.
# E.g. build/{linux|mac}/chrome_linux.croc
'dependencies': [
'../base/base.gyp:base_unittests',
'../media/media.gyp:media_unittests',
'../net/net.gyp:net_unittests',
'../printing/printing.gyp:printing_unittests',
'unit_tests',
],
'actions': [
{
# 'message' for Linux/scons in particular
'message': 'Running coverage_posix.py to generate coverage numbers',
'inputs': [],
'outputs': [],
'action_name': 'coverage',
'action': [ 'python',
'../tools/code_coverage/coverage_posix.py',
'--directory',
'<(PRODUCT_DIR)',
'--',
'<@(_dependencies)'],
# Use outputs of this action as inputs for the main target build.
# Seems as a misnomer but makes this happy on Linux (scons).
'process_outputs_as_sources': 1,
},
], # 'actions'
},
]
}],
], # 'conditions'
}
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'includes': [
'../build/common.gypi',
],
'target_defaults': {
'sources/': [
['exclude', '/(cocoa|gtk|win)/'],
['exclude', '_(cocoa|gtk|linux|mac|posix|skia|win|x)\\.(cc|mm?)$'],
['exclude', '/(gtk|win|x11)_[^/]*\\.cc$'],
],
'conditions': [
['OS=="linux"', {'sources/': [
['include', '/gtk/'],
['include', '_(gtk|linux|posix|skia|x)\\.cc$'],
['include', '/(gtk|x11)_[^/]*\\.cc$'],
]}],
['OS=="mac"', {'sources/': [
['include', '/cocoa/'],
['include', '_(cocoa|mac|posix)\\.(cc|mm?)$'],
]}, { # else: OS != "mac"
'sources/': [
['exclude', '\\.mm?$'],
],
}],
['OS=="win"', {'sources/': [
['include', '_(win)\\.cc$'],
['include', '/win/'],
['include', '/win_[^/]*\\.cc$'],
]}],
],
},
'targets': [
{
# TODO(mark): It would be better if each static library that needed
# to run grit would list its own .grd files, but unfortunately some
# of the static libraries currently have circular dependencies among
# generated headers.
'target_name': 'chrome_resources',
'type': 'none',
'rules': [
{
'rule_name': 'grit',
'extension': 'grd',
'inputs': [
'../tools/grit/grit.py',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/grit/<(RULE_INPUT_ROOT).h',
'<(SHARED_INTERMEDIATE_DIR)/chrome/<(RULE_INPUT_ROOT).pak',
],
'action': ['python', '<@(_inputs)', '-i', '<(RULE_INPUT_PATH)', 'build', '-o', '<(SHARED_INTERMEDIATE_DIR)/chrome'],
'message': 'Generating resources from <(RULE_INPUT_PATH)',
},
],
'sources': [
# Data resources.
'browser/debugger/resources/debugger_resources.grd',
'browser/browser_resources.grd',
'common/common_resources.grd',
'renderer/renderer_resources.grd',
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome',
],
},
},
{
# TODO(mark): It would be better if each static library that needed
# to run grit would list its own .grd files, but unfortunately some
# of the static libraries currently have circular dependencies among
# generated headers.
'target_name': 'chrome_strings',
'type': 'none',
'rules': [
{
'rule_name': 'grit',
'extension': 'grd',
'inputs': [
'../tools/grit/grit.py',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/grit/<(RULE_INPUT_ROOT).h',
'<(SHARED_INTERMEDIATE_DIR)/chrome/<(RULE_INPUT_ROOT)_en-US.pak',
],
'action': ['python', '<@(_inputs)', '-i', '<(RULE_INPUT_PATH)', 'build', '-o', '<(SHARED_INTERMEDIATE_DIR)/chrome'],
'message': 'Generating resources from <(RULE_INPUT_PATH)',
},
],
'sources': [
# Localizable resources.
'app/resources/locale_settings.grd',
'app/chromium_strings.grd',
'app/generated_resources.grd',
'app/google_chrome_strings.grd',
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome',
],
},
},
{
# TODO(beng): rename to 'app' when moves to top level.
'target_name': 'app_base',
'type': '<(library)',
'msvs_guid': '4631946D-7D5F-44BD-A5A8-504C0A7033BE',
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:base_gfx',
'../net/net.gyp:net',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
],
'include_dirs': [
'..',
],
'sources': [
# All .cc, .h, and .mm files under app/ except for tests.
'../app/animation.cc',
'../app/animation.h',
'../app/drag_drop_types.cc',
'../app/drag_drop_types.h',
'../app/gfx/chrome_canvas.cc',
'../app/gfx/chrome_canvas.h',
'../app/gfx/chrome_canvas_linux.cc',
'../app/gfx/chrome_canvas_win.cc',
'../app/gfx/chrome_font.h',
'../app/gfx/chrome_font_gtk.cc',
'../app/gfx/chrome_font_mac.mm',
'../app/gfx/chrome_font_skia.cc',
'../app/gfx/chrome_font_win.cc',
'../app/gfx/favicon_size.h',
'../app/gfx/icon_util.cc',
'../app/gfx/icon_util.h',
'../app/gfx/insets.h',
'../app/gfx/path_gtk.cc',
'../app/gfx/path_win.cc',
'../app/gfx/path.h',
'../app/l10n_util.cc',
'../app/l10n_util.h',
'../app/l10n_util_posix.cc',
'../app/l10n_util_win.cc',
'../app/l10n_util_win.h',
'../app/message_box_flags.h',
'../app/os_exchange_data.cc',
'../app/os_exchange_data.h',
'../app/resource_bundle.cc',
'../app/resource_bundle.h',
'../app/resource_bundle_win.cc',
'../app/resource_bundle_linux.cc',
'../app/resource_bundle_mac.mm',
'../app/slide_animation.cc',
'../app/slide_animation.h',
'../app/throb_animation.cc',
'../app/throb_animation.h',
],
'direct_dependent_settings': {
'include_dirs': [
'..',
],
},
'conditions': [
['OS=="linux"', {
'dependencies': [
# chrome_font_gtk.cc uses fontconfig.
# TODO(evanm): I think this is wrong; it should just use GTK.
'../build/linux/system.gyp:fontconfig',
'../build/linux/system.gyp:gtk',
],
}],
['OS!="win"', {
'sources!': [
'../app/drag_drop_types.cc',
'../app/drag_drop_types.h',
'../app/gfx/icon_util.cc',
'../app/gfx/icon_util.h',
'../app/os_exchange_data.cc',
'../app/os_exchange_data.h',
],
}],
],
},
{
# theme_resources also generates a .cc file, so it can't use the rules above.
'target_name': 'theme_resources',
'type': 'none',
'variables': {
'grit_path': '../tools/grit/grit.py',
'grit_out_dir': '<(SHARED_INTERMEDIATE_DIR)/chrome',
},
'actions': [
{
'action_name': 'theme_resources',
'variables': {
'input_path': 'app/theme/theme_resources.grd',
},
'inputs': [
'<(input_path)',
],
'outputs': [
'<(grit_out_dir)/grit/theme_resources.h',
'<(grit_out_dir)/grit/theme_resources_map.cc',
'<(grit_out_dir)/grit/theme_resources_map.h',
'<(grit_out_dir)/theme_resources.pak',
'<(grit_out_dir)/theme_resources.rc',
],
'action': ['python', '<(grit_path)', '-i', '<(input_path)', 'build', '-o', '<(grit_out_dir)'],
'message': 'Generating resources from <(input_path)',
},
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome',
],
},
},
{
'target_name': 'common',
'type': '<(library)',
'dependencies': [
'app_base',
'chrome_resources',
'chrome_strings',
'../base/base.gyp:base',
'../base/base.gyp:base_gfx',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../net/net.gyp:net',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/zlib/zlib.gyp:zlib',
'../third_party/npapi/npapi.gyp:npapi',
'../webkit/webkit.gyp:glue',
],
'include_dirs': [
'..',
],
'sources': [
# All .cc, .h, and .mm files under chrome/common except for tests.
'common/extensions/url_pattern.cc',
'common/extensions/url_pattern.h',
'common/extensions/user_script.cc',
'common/extensions/user_script.h',
'common/gfx/color_utils.cc',
'common/gfx/color_utils.h',
'common/gfx/emf.cc',
'common/gfx/emf.h',
'common/gfx/text_elider.cc',
'common/gfx/text_elider.h',
'common/gfx/utils.h',
'common/gtk_util.cc',
'common/gtk_util.h',
'common/net/cookie_monster_sqlite.cc',
'common/net/cookie_monster_sqlite.h',
'common/net/dns.h',
'common/net/url_request_intercept_job.cc',
'common/net/url_request_intercept_job.h',
'common/accessibility_types.h',
'common/app_cache/app_cache_context_impl.cc',
'common/app_cache/app_cache_context_impl.h',
'common/app_cache/app_cache_dispatcher.cc',
'common/app_cache/app_cache_dispatcher.h',
'common/app_cache/app_cache_dispatcher_host.cc',
'common/app_cache/app_cache_dispatcher_host.h',
'common/bindings_policy.h',
'common/child_process.cc',
'common/child_process.h',
'common/child_process_host.cc',
'common/child_process_host.h',
'common/child_process_info.cc',
'common/child_process_info.h',
'common/child_thread.cc',
'common/child_thread.h',
'common/chrome_constants.cc',
'common/chrome_constants.h',
'common/chrome_counters.cc',
'common/chrome_counters.h',
'common/chrome_paths.cc',
'common/chrome_paths.h',
'common/chrome_paths_internal.h',
'common/chrome_paths_linux.cc',
'common/chrome_paths_mac.mm',
'common/chrome_paths_win.cc',
'common/chrome_plugin_api.h',
'common/chrome_plugin_lib.cc',
'common/chrome_plugin_lib.h',
'common/chrome_plugin_util.cc',
'common/chrome_plugin_util.h',
'common/chrome_switches.cc',
'common/chrome_switches.h',
'common/classfactory.cc',
'common/classfactory.h',
'common/common_glue.cc',
'common/debug_flags.cc',
'common/debug_flags.h',
'common/devtools_messages.h',
'common/devtools_messages_internal.h',
'common/env_vars.cc',
'common/env_vars.h',
'common/file_descriptor_set_posix.cc',
'common/file_descriptor_set_posix.h',
'common/filter_policy.h',
'common/gears_api.h',
'common/important_file_writer.cc',
'common/important_file_writer.h',
'common/ipc_channel.h',
'common/ipc_channel_posix.cc',
'common/ipc_channel_posix.h',
'common/ipc_channel_proxy.cc',
'common/ipc_channel_proxy.h',
'common/ipc_channel_win.cc',
'common/ipc_channel_win.h',
'common/ipc_logging.cc',
'common/ipc_logging.h',
'common/ipc_message.cc',
'common/ipc_message.h',
'common/ipc_message_macros.h',
'common/ipc_message_utils.cc',
'common/ipc_message_utils.h',
'common/ipc_sync_channel.cc',
'common/ipc_sync_channel.h',
'common/ipc_sync_message.cc',
'common/ipc_sync_message.h',
'common/json_value_serializer.cc',
'common/json_value_serializer.h',
'common/jstemplate_builder.cc',
'common/jstemplate_builder.h',
'common/libxml_utils.cc',
'common/libxml_utils.h',
'common/logging_chrome.cc',
'common/logging_chrome.h',
'common/main_function_params.h',
'common/message_router.cc',
'common/message_router.h',
'common/modal_dialog_event.h',
'common/mru_cache.h',
'common/navigation_types.h',
'common/native_web_keyboard_event.h',
'common/native_web_keyboard_event_linux.cc',
'common/native_web_keyboard_event_mac.mm',
'common/native_web_keyboard_event_win.cc',
'common/notification_details.h',
'common/notification_observer.h',
'common/notification_registrar.cc',
'common/notification_registrar.h',
'common/notification_service.cc',
'common/notification_service.h',
'common/notification_source.h',
'common/notification_type.h',
'common/owned_widget_gtk.cc',
'common/owned_widget_gtk.h',
'common/page_action.h',
'common/page_action.cc',
'common/page_transition_types.h',
'common/page_zoom.h',
'common/platform_util.h',
'common/platform_util_linux.cc',
'common/platform_util_mac.mm',
'common/platform_util_win.cc',
'common/plugin_messages.h',
'common/plugin_messages_internal.h',
'common/pref_member.cc',
'common/pref_member.h',
'common/pref_names.cc',
'common/pref_names.h',
'common/pref_service.cc',
'common/pref_service.h',
'common/process_watcher_posix.cc',
'common/process_watcher_win.cc',
'common/process_watcher.h',
'common/property_bag.cc',
'common/property_bag.h',
'common/quarantine_mac.h',
'common/quarantine_mac.mm',
'common/ref_counted_util.h',
'common/render_messages.h',
'common/render_messages_internal.h',
'common/resource_dispatcher.cc',
'common/resource_dispatcher.h',
'common/result_codes.h',
'common/sandbox_init_wrapper.cc',
'common/sandbox_init_wrapper.h',
'common/security_filter_peer.cc',
'common/security_filter_peer.h',
'common/sqlite_compiled_statement.cc',
'common/sqlite_compiled_statement.h',
'common/sqlite_utils.cc',
'common/sqlite_utils.h',
'common/task_queue.cc',
'common/task_queue.h',
'common/temp_scaffolding_stubs.cc',
'common/temp_scaffolding_stubs.h',
'common/thumbnail_score.cc',
'common/thumbnail_score.h',
'common/time_format.cc',
'common/time_format.h',
'common/transport_dib.h',
'common/transport_dib_linux.cc',
'common/transport_dib_mac.cc',
'common/transport_dib_win.cc',
'common/unzip.cc', # Requires zlib directly.
'common/unzip.h',
'common/url_constants.cc',
'common/url_constants.h',
'common/visitedlink_common.cc',
'common/visitedlink_common.h',
'common/webkit_param_traits.h',
'common/win_safe_util.cc',
'common/win_safe_util.h',
'common/win_util.cc',
'common/win_util.h',
'common/worker_thread_ticker.cc',
'common/worker_thread_ticker.h',
'common/x11_util.cc',
'common/x11_util.h',
'common/x11_util_internal.h',
'third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
],
'direct_dependent_settings': {
'include_dirs': [
'..',
],
},
'export_dependent_settings': [
'app_base',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'link_settings': {
'libraries': [
'-lX11',
'-lXrender',
'-lXext',
],
},
}, { # else: 'OS!="linux"'
'sources!': [
'third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
'sources!': [
'common/temp_scaffolding_stubs.cc',
'common/temp_scaffolding_stubs.h',
],
}, { # else: OS != "win"
'sources!': [
'common/gfx/emf.cc',
'common/classfactory.cc',
],
}],
],
},
{
'target_name': 'browser',
'type': '<(library)',
'dependencies': [
'common',
'chrome_resources',
'chrome_strings',
'theme_resources',
'../media/media.gyp:media',
'../net/net.gyp:net_resources',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
'third_party/hunspell/hunspell.gyp:hunspell',
'../webkit/webkit.gyp:glue',
],
'include_dirs': [
'..',
'<(INTERMEDIATE_DIR)',
],
'sources': [
# All .cc, .h, .m, and .mm files under browser except for tests and
# mocks.
'browser/alternate_nav_url_fetcher.cc',
'browser/alternate_nav_url_fetcher.h',
'browser/app_controller_mac.h',
'browser/app_controller_mac.mm',
'browser/app_modal_dialog.cc',
'browser/app_modal_dialog.h',
'browser/app_modal_dialog_gtk.cc',
'browser/app_modal_dialog_mac.mm',
'browser/app_modal_dialog_win.cc',
'browser/app_modal_dialog_queue.cc',
'browser/app_modal_dialog_queue.h',
'browser/autocomplete/autocomplete.cc',
'browser/autocomplete/autocomplete.h',
'browser/autocomplete/autocomplete_accessibility.cc',
'browser/autocomplete/autocomplete_accessibility.h',
'browser/autocomplete/autocomplete_edit.cc',
'browser/autocomplete/autocomplete_edit.h',
'browser/autocomplete/autocomplete_edit_view.h',
'browser/autocomplete/autocomplete_edit_view_gtk.cc',
'browser/autocomplete/autocomplete_edit_view_gtk.h',
'browser/autocomplete/autocomplete_edit_view_mac.h',
'browser/autocomplete/autocomplete_edit_view_mac.mm',
'browser/autocomplete/autocomplete_edit_view_win.cc',
'browser/autocomplete/autocomplete_edit_view_win.h',
'browser/autocomplete/autocomplete_popup_model.cc',
'browser/autocomplete/autocomplete_popup_model.h',
'browser/autocomplete/autocomplete_popup_view.h',
'browser/autocomplete/autocomplete_popup_view_gtk.cc',
'browser/autocomplete/autocomplete_popup_view_gtk.h',
'browser/autocomplete/autocomplete_popup_view_mac.h',
'browser/autocomplete/autocomplete_popup_view_mac.mm',
'browser/autocomplete/autocomplete_popup_view_win.cc',
'browser/autocomplete/autocomplete_popup_view_win.h',
'browser/autocomplete/history_contents_provider.cc',
'browser/autocomplete/history_contents_provider.h',
'browser/autocomplete/history_url_provider.cc',
'browser/autocomplete/history_url_provider.h',
'browser/autocomplete/keyword_provider.cc',
'browser/autocomplete/keyword_provider.h',
'browser/autocomplete/search_provider.cc',
'browser/autocomplete/search_provider.h',
'browser/autofill_manager.cc',
'browser/autofill_manager.h',
'browser/automation/automation_autocomplete_edit_tracker.h',
'browser/automation/automation_browser_tracker.h',
'browser/automation/automation_constrained_window_tracker.h',
'browser/automation/automation_provider.cc',
'browser/automation/automation_provider.h',
'browser/automation/automation_provider_list.cc',
'browser/automation/automation_provider_list_generic.cc',
'browser/automation/automation_provider_list_mac.mm',
'browser/automation/automation_provider_list.h',
'browser/automation/automation_resource_tracker.cc',
'browser/automation/automation_resource_tracker.h',
'browser/automation/automation_tab_tracker.h',
'browser/automation/automation_window_tracker.h',
'browser/automation/ui_controls.cc',
'browser/automation/ui_controls.h',
'browser/automation/url_request_failed_dns_job.cc',
'browser/automation/url_request_failed_dns_job.h',
# TODO: These should be moved to test_support (see below), but
# are currently used by production code in automation_provider.cc.
'browser/automation/url_request_mock_http_job.cc',
'browser/automation/url_request_mock_http_job.h',
'browser/automation/url_request_slow_download_job.cc',
'browser/automation/url_request_slow_download_job.h',
'browser/back_forward_menu_model.cc',
'browser/back_forward_menu_model.h',
'browser/back_forward_menu_model_win.cc',
'browser/back_forward_menu_model_win.h',
'browser/bookmarks/bookmark_codec.cc',
'browser/bookmarks/bookmark_codec.h',
'browser/bookmarks/bookmark_context_menu_gtk.cc',
'browser/bookmarks/bookmark_context_menu_win.cc',
'browser/bookmarks/bookmark_context_menu.cc',
'browser/bookmarks/bookmark_context_menu.h',
'browser/bookmarks/bookmark_drag_data.cc',
'browser/bookmarks/bookmark_drag_data.h',
'browser/bookmarks/bookmark_drop_info.cc',
'browser/bookmarks/bookmark_drop_info.h',
'browser/bookmarks/bookmark_editor.h',
'browser/bookmarks/bookmark_folder_tree_model.cc',
'browser/bookmarks/bookmark_folder_tree_model.h',
'browser/bookmarks/bookmark_html_writer.cc',
'browser/bookmarks/bookmark_html_writer.h',
'browser/bookmarks/bookmark_menu_controller_gtk.cc',
'browser/bookmarks/bookmark_menu_controller_gtk.h',
'browser/bookmarks/bookmark_menu_controller_win.cc',
'browser/bookmarks/bookmark_menu_controller_win.h',
'browser/bookmarks/bookmark_model.cc',
'browser/bookmarks/bookmark_model.h',
'browser/bookmarks/bookmark_service.h',
'browser/bookmarks/bookmark_storage.cc',
'browser/bookmarks/bookmark_storage.h',
'browser/bookmarks/bookmark_table_model.cc',
'browser/bookmarks/bookmark_table_model.h',
'browser/bookmarks/bookmark_utils.cc',
'browser/bookmarks/bookmark_utils.h',
'browser/browser.cc',
'browser/browser.h',
'browser/browser_about_handler.cc',
'browser/browser_about_handler.h',
'browser/browser_accessibility.cc',
'browser/browser_accessibility.h',
'browser/browser_accessibility_manager.cc',
'browser/browser_accessibility_manager.h',
'browser/browser_init.cc',
'browser/browser_init.h',
'browser/browser_list.cc',
'browser/browser_list.h',
'browser/browser_main.cc',
'browser/browser_main_gtk.cc',
'browser/browser_main_mac.mm',
'browser/browser_main_win.cc',
'browser/browser_main_win.h',
'browser/browser_prefs.cc',
'browser/browser_prefs.h',
'browser/browser_process.cc',
'browser/browser_process.h',
'browser/browser_process_impl.cc',
'browser/browser_process_impl.h',
'browser/browser_shutdown.cc',
'browser/browser_shutdown.h',
'browser/browser_trial.cc',
'browser/browser_trial.h',
'browser/browser_url_handler.cc',
'browser/browser_url_handler.h',
'browser/browser_window.h',
'browser/browser_window_factory.mm',
'browser/browsing_data_remover.cc',
'browser/browsing_data_remover.h',
'browser/browsing_instance.cc',
'browser/browsing_instance.h',
'browser/cancelable_request.cc',
'browser/cancelable_request.h',
'browser/cert_store.cc',
'browser/cert_store.h',
'browser/character_encoding.cc',
'browser/character_encoding.h',
'browser/chrome_plugin_browsing_context.cc',
'browser/chrome_plugin_browsing_context.h',
'browser/chrome_plugin_host.cc',
'browser/chrome_plugin_host.h',
'browser/chrome_thread.cc',
'browser/chrome_thread.h',
'browser/cocoa/base_view.h',
'browser/cocoa/base_view.mm',
'browser/cocoa/bookmark_bar_controller.h',
'browser/cocoa/bookmark_bar_controller.mm',
'browser/cocoa/bookmark_menu_bridge.h',
'browser/cocoa/bookmark_menu_bridge.mm',
'browser/cocoa/bookmark_menu_cocoa_controller.h',
'browser/cocoa/bookmark_menu_cocoa_controller.mm',
'browser/cocoa/browser_test_helper.h',
'browser/cocoa/browser_window_cocoa.h',
'browser/cocoa/browser_window_cocoa.mm',
'browser/cocoa/browser_window_controller.h',
'browser/cocoa/browser_window_controller.mm',
'browser/cocoa/cocoa_test_helper.h',
'browser/cocoa/command_observer_bridge.h',
'browser/cocoa/command_observer_bridge.mm',
'browser/cocoa/find_bar_bridge.h',
'browser/cocoa/find_bar_bridge.mm',
'browser/cocoa/find_bar_cocoa_controller.h',
'browser/cocoa/find_bar_cocoa_controller.mm',
'browser/cocoa/find_bar_view.h',
'browser/cocoa/find_bar_view.mm',
'browser/cocoa/grow_box_view.h',
'browser/cocoa/grow_box_view.m',
'browser/cocoa/location_bar_view_mac.h',
'browser/cocoa/location_bar_view_mac.mm',
'browser/cocoa/preferences_window_controller.h',
'browser/cocoa/preferences_window_controller.mm',
'browser/cocoa/sad_tab_view.h',
'browser/cocoa/sad_tab_view.mm',
'browser/cocoa/shell_dialogs_mac.mm',
'browser/cocoa/status_bubble_mac.h',
'browser/cocoa/status_bubble_mac.mm',
'browser/cocoa/tab_cell.h',
'browser/cocoa/tab_cell.mm',
'browser/cocoa/tab_contents_controller.h',
'browser/cocoa/tab_contents_controller.mm',
'browser/cocoa/tab_controller.h',
'browser/cocoa/tab_controller.mm',
'browser/cocoa/tab_strip_controller.h',
'browser/cocoa/tab_strip_controller.mm',
'browser/cocoa/tab_strip_model_observer_bridge.h',
'browser/cocoa/tab_strip_model_observer_bridge.mm',
'browser/cocoa/tab_strip_view.h',
'browser/cocoa/tab_strip_view.mm',
'browser/cocoa/tab_view.h',
'browser/cocoa/tab_view.mm',
'browser/cocoa/tab_window_controller.h',
'browser/cocoa/tab_window_controller.mm',
'browser/cocoa/toolbar_button_cell.h',
'browser/cocoa/toolbar_button_cell.mm',
'browser/cocoa/toolbar_controller.h',
'browser/cocoa/toolbar_controller.mm',
'browser/cocoa/toolbar_view.h',
'browser/cocoa/toolbar_view.mm',
'browser/command_updater.cc',
'browser/command_updater.h',
'browser/cross_site_request_manager.cc',
'browser/cross_site_request_manager.h',
'browser/debugger/debugger_host.h',
'browser/debugger/debugger_host_impl.cpp',
'browser/debugger/debugger_host_impl.h',
'browser/debugger/debugger_io.h',
'browser/debugger/debugger_io_socket.cc',
'browser/debugger/debugger_io_socket.h',
'browser/debugger/debugger_node.cc',
'browser/debugger/debugger_node.h',
'browser/debugger/debugger_remote_service.cc',
'browser/debugger/debugger_remote_service.h',
'browser/debugger/debugger_shell.cc',
'browser/debugger/debugger_shell.h',
'browser/debugger/debugger_shell_stubs.cc',
'browser/debugger/debugger_view.cc',
'browser/debugger/debugger_view.h',
'browser/debugger/debugger_window.cc',
'browser/debugger/debugger_window.h',
'browser/debugger/debugger_wrapper.cc',
'browser/debugger/debugger_wrapper.h',
'browser/debugger/devtools_client_host.h',
'browser/debugger/devtools_manager.cc',
'browser/debugger/devtools_manager.h',
'browser/debugger/devtools_protocol_handler.cc',
'browser/debugger/devtools_protocol_handler.h',
'browser/debugger/devtools_remote.h',
'browser/debugger/devtools_remote_listen_socket.cc',
'browser/debugger/devtools_remote_listen_socket.h',
'browser/debugger/devtools_remote_message.cc',
'browser/debugger/devtools_remote_message.h',
'browser/debugger/devtools_remote_service.cc',
'browser/debugger/devtools_remote_service.h',
'browser/debugger/devtools_view.cc',
'browser/debugger/devtools_view.h',
'browser/debugger/devtools_window.h',
'browser/debugger/devtools_window_gtk.cc',
'browser/debugger/devtools_window_mac.cc',
'browser/debugger/devtools_window_win.cc',
'browser/debugger/inspectable_tab_proxy.cc',
'browser/debugger/inspectable_tab_proxy.h',
'browser/dock_info.cc',
'browser/dock_info.h',
'browser/dom_operation_notification_details.h',
'browser/dom_ui/chrome_url_data_manager.cc',
'browser/dom_ui/chrome_url_data_manager.h',
'browser/dom_ui/debugger_ui.cc',
'browser/dom_ui/debugger_ui.h',
'browser/dom_ui/devtools_ui.cc',
'browser/dom_ui/devtools_ui.h',
'browser/dom_ui/dom_ui.cc',
'browser/dom_ui/dom_ui.h',
'browser/dom_ui/dom_ui_factory.cc',
'browser/dom_ui/dom_ui_factory.h',
'browser/dom_ui/dom_ui_favicon_source.cc',
'browser/dom_ui/dom_ui_favicon_source.h',
'browser/dom_ui/dom_ui_thumbnail_source.cc',
'browser/dom_ui/dom_ui_thumbnail_source.h',
'browser/dom_ui/downloads_ui.cc',
'browser/dom_ui/downloads_ui.h',
'browser/dom_ui/fileicon_source.cc',
'browser/dom_ui/fileicon_source.h',
'browser/dom_ui/history_ui.cc',
'browser/dom_ui/history_ui.h',
'browser/dom_ui/html_dialog_ui.cc',
'browser/dom_ui/html_dialog_ui.h',
'browser/dom_ui/new_tab_ui.cc',
'browser/dom_ui/new_tab_ui.h',
'browser/download/download_exe.cc',
'browser/download/download_file.cc',
'browser/download/download_file.h',
'browser/download/download_item_model.cc',
'browser/download/download_item_model.h',
'browser/download/download_manager.cc',
'browser/download/download_manager.h',
'browser/download/download_request_dialog_delegate.h',
'browser/download/download_request_dialog_delegate_win.cc',
'browser/download/download_request_dialog_delegate_win.h',
'browser/download/download_request_manager.cc',
'browser/download/download_request_manager.h',
'browser/download/download_shelf.cc',
'browser/download/download_shelf.h',
'browser/download/download_util.cc',
'browser/download/download_util.h',
'browser/download/save_file.cc',
'browser/download/save_file.h',
'browser/download/save_file_manager.cc',
'browser/download/save_file_manager.h',
'browser/download/save_item.cc',
'browser/download/save_item.h',
'browser/download/save_package.cc',
'browser/download/save_package.h',
'browser/download/save_types.h',
'browser/drag_utils.cc',
'browser/drag_utils.h',
'browser/encoding_menu_controller_delegate.cc',
'browser/encoding_menu_controller_delegate.h',
'browser/extensions/extension.cc',
'browser/extensions/extension.h',
'browser/extensions/extension_bookmarks_module.cc',
'browser/extensions/extension_bookmarks_module.h',
'browser/extensions/extension_error_reporter.cc',
'browser/extensions/extension_error_reporter.h',
'browser/extensions/extension_function.cc',
'browser/extensions/extension_function.h',
'browser/extensions/extension_function_dispatcher.cc',
'browser/extensions/extension_function_dispatcher.h',
'browser/extensions/extension_host.cc',
'browser/extensions/extension_host.h',
'browser/extensions/extension_message_service.cc',
'browser/extensions/extension_message_service.h',
'browser/extensions/extension_browser_event_router.cc',
'browser/extensions/extension_browser_event_router.h',
'browser/extensions/extension_page_actions_module.h',
'browser/extensions/extension_page_actions_module.cc',
'browser/extensions/extension_process_manager.cc',
'browser/extensions/extension_process_manager.h',
'browser/extensions/extension_protocols.cc',
'browser/extensions/extension_protocols.h',
'browser/extensions/extension_tabs_module.cc',
'browser/extensions/extension_tabs_module.h',
'browser/extensions/extension_view.cc',
'browser/extensions/extension_view.h',
'browser/extensions/extensions_service.cc',
'browser/extensions/extensions_service.h',
'browser/extensions/extensions_ui.cc',
'browser/extensions/extensions_ui.h',
'browser/extensions/user_script_master.cc',
'browser/extensions/user_script_master.h',
'browser/external_protocol_handler.cc',
'browser/external_protocol_handler.h',
'browser/external_tab_container.cc',
'browser/external_tab_container.h',
'browser/fav_icon_helper.cc',
'browser/fav_icon_helper.h',
'browser/find_bar.h',
'browser/find_bar_controller.cc',
'browser/find_bar_controller.h',
'browser/find_notification_details.h',
'browser/first_run.cc',
'browser/first_run.h',
'browser/gears_integration.cc',
'browser/gears_integration.h',
'browser/google_update.cc',
'browser/google_update.h',
'browser/google_url_tracker.cc',
'browser/google_url_tracker.h',
'browser/google_util.cc',
'browser/google_util.h',
'browser/gtk/about_chrome_dialog.cc',
'browser/gtk/about_chrome_dialog.h',
'browser/gtk/back_forward_menu_model_gtk.cc',
'browser/gtk/back_forward_menu_model_gtk.h',
'browser/gtk/bookmark_bar_gtk.cc',
'browser/gtk/bookmark_bar_gtk.h',
'browser/gtk/bookmark_bubble_gtk.cc',
'browser/gtk/bookmark_bubble_gtk.h',
'browser/gtk/bookmark_editor_gtk.cc',
'browser/gtk/bookmark_editor_gtk.h',
'browser/gtk/bookmark_tree_model.cc',
'browser/gtk/bookmark_tree_model.h',
'browser/gtk/browser_toolbar_gtk.cc',
'browser/gtk/browser_toolbar_gtk.h',
'browser/gtk/browser_window_factory_gtk.cc',
'browser/gtk/browser_window_gtk.cc',
'browser/gtk/browser_window_gtk.h',
'browser/gtk/custom_button.cc',
'browser/gtk/custom_button.h',
'browser/gtk/dialogs_gtk.cc',
'browser/gtk/download_item_gtk.cc',
'browser/gtk/download_item_gtk.h',
'browser/gtk/download_shelf_gtk.cc',
'browser/gtk/download_shelf_gtk.h',
'browser/gtk/go_button_gtk.cc',
'browser/gtk/go_button_gtk.h',
'browser/gtk/gtk_chrome_button.cc',
'browser/gtk/gtk_chrome_button.h',
'browser/gtk/info_bubble_gtk.cc',
'browser/gtk/info_bubble_gtk.h',
'browser/gtk/infobar_container_gtk.cc',
'browser/gtk/infobar_container_gtk.h',
'browser/gtk/infobar_gtk.cc',
'browser/gtk/infobar_gtk.h',
'browser/gtk/find_bar_gtk.cc',
'browser/gtk/find_bar_gtk.h',
'browser/gtk/link_button_gtk.cc',
'browser/gtk/link_button_gtk.cc',
'browser/gtk/location_bar_view_gtk.cc',
'browser/gtk/location_bar_view_gtk.h',
'browser/gtk/menu_gtk.cc',
'browser/gtk/menu_gtk.h',
'browser/gtk/nine_box.cc',
'browser/gtk/nine_box.h',
'browser/gtk/sad_tab_gtk.cc',
'browser/gtk/sad_tab_gtk.h',
'browser/gtk/slide_animator_gtk.cc',
'browser/gtk/slide_animator_gtk.h',
'browser/gtk/standard_menus.cc',
'browser/gtk/standard_menus.h',
'browser/gtk/status_bubble_gtk.cc',
'browser/gtk/status_bubble_gtk.h',
'browser/gtk/tab_contents_container_gtk.cc',
'browser/gtk/tab_contents_container_gtk.h',
'browser/gtk/tabs/dragged_tab_controller_gtk.cc',
'browser/gtk/tabs/dragged_tab_controller_gtk.h',
'browser/gtk/tabs/tab_gtk.cc',
'browser/gtk/tabs/tab_gtk.h',
'browser/gtk/tabs/tab_renderer_gtk.cc',
'browser/gtk/tabs/tab_renderer_gtk.h',
'browser/gtk/tabs/tab_strip_gtk.cc',
'browser/gtk/tabs/tab_strip_gtk.h',
'browser/gtk/toolbar_star_toggle_gtk.cc',
'browser/gtk/toolbar_star_toggle_gtk.h',
'browser/hang_monitor/hung_plugin_action.cc',
'browser/hang_monitor/hung_plugin_action.h',
'browser/hang_monitor/hung_window_detector.cc',
'browser/hang_monitor/hung_window_detector.h',
'browser/history/archived_database.cc',
'browser/history/archived_database.h',
'browser/history/download_database.cc',
'browser/history/download_database.h',
'browser/history/download_types.h',
'browser/history/expire_history_backend.cc',
'browser/history/expire_history_backend.h',
'browser/history/history.cc',
'browser/history/history.h',
'browser/history/history_backend.cc',
'browser/history/history_backend.h',
'browser/history/history_database.cc',
'browser/history/history_database.h',
'browser/history/history_marshaling.h',
'browser/history/history_notifications.h',
'browser/history/history_publisher.cc',
'browser/history/history_publisher.h',
'browser/history/history_publisher_none.cc',
'browser/history/history_publisher_win.cc',
'browser/history/history_types.cc',
'browser/history/history_types.h',
'browser/history/in_memory_database.cc',
'browser/history/in_memory_database.h',
'browser/history/in_memory_history_backend.cc',
'browser/history/in_memory_history_backend.h',
'browser/history/page_usage_data.cc',
'browser/history/page_usage_data.h',
'browser/history/query_parser.cc',
'browser/history/query_parser.h',
'browser/history/snippet.cc',
'browser/history/snippet.h',
'browser/history/starred_url_database.cc',
'browser/history/starred_url_database.h',
'browser/history/text_database.cc',
'browser/history/text_database.h',
'browser/history/text_database_manager.cc',
'browser/history/text_database_manager.h',
'browser/history/thumbnail_database.cc',
'browser/history/thumbnail_database.h',
'browser/history/url_database.cc',
'browser/history/url_database.h',
'browser/history/visit_database.cc',
'browser/history/visit_database.h',
'browser/history/visit_tracker.cc',
'browser/history/visit_tracker.h',
'browser/history/visitsegment_database.cc',
'browser/history/visitsegment_database.h',
'browser/hung_renderer_dialog.h',
'browser/icon_loader.cc',
'browser/icon_loader.h',
'browser/icon_manager.cc',
'browser/icon_manager.h',
'browser/ime_input.cc',
'browser/ime_input.h',
'browser/importer/firefox2_importer.cc',
'browser/importer/firefox2_importer.h',
'browser/importer/firefox3_importer.cc',
'browser/importer/firefox3_importer.h',
'browser/importer/firefox_importer_utils.cc',
'browser/importer/firefox_importer_utils.h',
'browser/importer/firefox_profile_lock.cc',
'browser/importer/firefox_profile_lock.h',
'browser/importer/firefox_profile_lock_posix.cc',
'browser/importer/firefox_profile_lock_win.cc',
'browser/importer/ie_importer.cc',
'browser/importer/ie_importer.h',
'browser/importer/importer.cc',
'browser/importer/importer.h',
'browser/importer/mork_reader.cc',
'browser/importer/mork_reader.h',
'browser/importer/toolbar_importer.cc',
'browser/importer/toolbar_importer.h',
'browser/input_window_dialog.h',
'browser/input_window_dialog_gtk.cc',
'browser/input_window_dialog_win.cc',
'browser/jankometer.cc',
'browser/jankometer.h',
'browser/jsmessage_box_handler.cc',
'browser/jsmessage_box_handler.h',
'browser/load_from_memory_cache_details.h',
'browser/load_notification_details.h',
'browser/location_bar.h',
'browser/login_prompt.cc',
'browser/login_prompt.h',
'browser/memory_details.cc',
'browser/memory_details.h',
'browser/meta_table_helper.cc',
'browser/meta_table_helper.h',
'browser/metrics/metrics_log.cc',
'browser/metrics/metrics_log.h',
'browser/metrics/metrics_response.cc',
'browser/metrics/metrics_response.h',
'browser/metrics/metrics_service.cc',
'browser/metrics/metrics_service.h',
'browser/metrics/user_metrics.cc',
'browser/metrics/user_metrics.h',
'browser/modal_html_dialog_delegate.cc',
'browser/modal_html_dialog_delegate.h',
'browser/net/chrome_url_request_context.cc',
'browser/net/chrome_url_request_context.h',
'browser/net/dns_global.cc',
'browser/net/dns_global.h',
'browser/net/dns_host_info.cc',
'browser/net/dns_host_info.h',
'browser/net/dns_master.cc',
'browser/net/dns_master.h',
'browser/net/referrer.cc',
'browser/net/referrer.h',
'browser/net/resolve_proxy_msg_helper.cc',
'browser/net/resolve_proxy_msg_helper.h',
'browser/net/sdch_dictionary_fetcher.cc',
'browser/net/sdch_dictionary_fetcher.h',
'browser/net/url_fetcher.cc',
'browser/net/url_fetcher.h',
'browser/net/url_fetcher_protect.cc',
'browser/net/url_fetcher_protect.h',
'browser/net/url_fixer_upper.cc',
'browser/net/url_fixer_upper.h',
'browser/options_window.h',
'browser/page_state.cc',
'browser/page_state.h',
'browser/password_manager/encryptor_linux.cc',
'browser/password_manager/encryptor_mac.mm',
'browser/password_manager/encryptor_win.cc',
'browser/password_manager/encryptor.h',
'browser/password_manager/ie7_password.cc',
'browser/password_manager/ie7_password.h',
'browser/password_manager/password_form_manager.cc',
'browser/password_manager/password_form_manager.h',
'browser/password_manager/password_form_manager_win.cc',
'browser/password_manager/password_manager.cc',
'browser/password_manager/password_manager.h',
'browser/plugin_installer.cc',
'browser/plugin_installer.h',
'browser/plugin_process_host.cc',
'browser/plugin_process_host.h',
'browser/plugin_service.cc',
'browser/plugin_service.h',
'browser/printing/page_number.cc',
'browser/printing/page_number.h',
'browser/printing/page_overlays.cc',
'browser/printing/page_overlays.h',
'browser/printing/page_range.cc',
'browser/printing/page_range.h',
'browser/printing/page_setup.cc',
'browser/printing/page_setup.h',
'browser/printing/print_job.cc',
'browser/printing/print_job.h',
'browser/printing/print_job_manager.cc',
'browser/printing/print_job_manager.h',
'browser/printing/print_job_worker.cc',
'browser/printing/print_job_worker.h',
'browser/printing/print_job_worker_owner.h',
'browser/printing/print_settings.cc',
'browser/printing/print_settings.h',
'browser/printing/print_view_manager.cc',
'browser/printing/print_view_manager.h',
'browser/printing/printed_document.cc',
'browser/printing/printed_document.h',
'browser/printing/printed_page.cc',
'browser/printing/printed_page.h',
'browser/printing/printed_pages_source.h',
'browser/printing/printer_query.cc',
'browser/printing/printer_query.h',
'browser/printing/win_printing_context.cc',
'browser/printing/win_printing_context.h',
'browser/process_singleton.h',
'browser/process_singleton_linux.cc',
'browser/process_singleton_mac.cc',
'browser/process_singleton_win.cc',
'browser/profile.cc',
'browser/profile.h',
'browser/profile_manager.cc',
'browser/profile_manager.h',
'browser/renderer_host/async_resource_handler.cc',
'browser/renderer_host/async_resource_handler.h',
'browser/renderer_host/audio_renderer_host.cc',
'browser/renderer_host/audio_renderer_host.h',
'browser/renderer_host/backing_store.cc',
'browser/renderer_host/backing_store.h',
'browser/renderer_host/backing_store_mac.cc',
'browser/renderer_host/backing_store_win.cc',
'browser/renderer_host/backing_store_x.cc',
'browser/renderer_host/browser_render_process_host.cc',
'browser/renderer_host/browser_render_process_host.h',
'browser/renderer_host/buffered_resource_handler.cc',
'browser/renderer_host/buffered_resource_handler.h',
'browser/renderer_host/cross_site_resource_handler.cc',
'browser/renderer_host/cross_site_resource_handler.h',
'browser/renderer_host/download_resource_handler.cc',
'browser/renderer_host/download_resource_handler.h',
'browser/renderer_host/download_throttling_resource_handler.cc',
'browser/renderer_host/download_throttling_resource_handler.h',
'browser/renderer_host/media_resource_handler.cc',
'browser/renderer_host/media_resource_handler.h',
'browser/renderer_host/render_process_host.cc',
'browser/renderer_host/render_process_host.h',
'browser/renderer_host/render_view_host.cc',
'browser/renderer_host/render_view_host.h',
'browser/renderer_host/render_view_host_delegate.h',
'browser/renderer_host/render_view_host_factory.cc',
'browser/renderer_host/render_view_host_factory.h',
'browser/renderer_host/render_widget_helper.cc',
'browser/renderer_host/render_widget_helper.h',
'browser/renderer_host/render_widget_host.cc',
'browser/renderer_host/render_widget_host.h',
'browser/renderer_host/render_widget_host_view.h',
'browser/renderer_host/render_widget_host_view_gtk.cc',
'browser/renderer_host/render_widget_host_view_gtk.h',
'browser/renderer_host/render_widget_host_view_mac.h',
'browser/renderer_host/render_widget_host_view_mac.mm',
'browser/renderer_host/render_widget_host_view_win.cc',
'browser/renderer_host/render_widget_host_view_win.h',
'browser/renderer_host/renderer_security_policy.cc',
'browser/renderer_host/renderer_security_policy.h',
'browser/renderer_host/resource_dispatcher_host.cc',
'browser/renderer_host/resource_dispatcher_host.h',
'browser/renderer_host/resource_handler.h',
'browser/renderer_host/resource_message_filter.cc',
'browser/renderer_host/resource_message_filter.h',
'browser/renderer_host/resource_message_filter_gtk.cc',
'browser/renderer_host/resource_message_filter_mac.mm',
'browser/renderer_host/resource_message_filter_win.cc',
'browser/renderer_host/resource_request_details.h',
'browser/renderer_host/safe_browsing_resource_handler.cc',
'browser/renderer_host/safe_browsing_resource_handler.h',
'browser/renderer_host/save_file_resource_handler.cc',
'browser/renderer_host/save_file_resource_handler.h',
'browser/renderer_host/sync_resource_handler.cc',
'browser/renderer_host/sync_resource_handler.h',
'browser/renderer_host/web_cache_manager.cc',
'browser/renderer_host/web_cache_manager.h',
'browser/rlz/rlz.cc',
'browser/rlz/rlz.h',
'browser/safe_browsing/bloom_filter.cc',
'browser/safe_browsing/bloom_filter.h',
'browser/safe_browsing/chunk_range.cc',
'browser/safe_browsing/chunk_range.h',
'browser/safe_browsing/protocol_manager.cc',
'browser/safe_browsing/protocol_manager.h',
'browser/safe_browsing/protocol_parser.cc',
'browser/safe_browsing/protocol_parser.h',
'browser/safe_browsing/safe_browsing_blocking_page.cc',
'browser/safe_browsing/safe_browsing_blocking_page.h',
'browser/safe_browsing/safe_browsing_database.cc',
'browser/safe_browsing/safe_browsing_database.h',
'browser/safe_browsing/safe_browsing_database_bloom.cc',
'browser/safe_browsing/safe_browsing_database_bloom.h',
'browser/safe_browsing/safe_browsing_service.cc',
'browser/safe_browsing/safe_browsing_service.h',
'browser/safe_browsing/safe_browsing_util.cc',
'browser/safe_browsing/safe_browsing_util.h',
'browser/sandbox_policy.cc',
'browser/sandbox_policy.h',
'browser/search_engines/template_url.cc',
'browser/search_engines/template_url.h',
'browser/search_engines/template_url_fetcher.cc',
'browser/search_engines/template_url_fetcher.h',
'browser/search_engines/template_url_model.cc',
'browser/search_engines/template_url_model.h',
'browser/search_engines/template_url_parser.cc',
'browser/search_engines/template_url_parser.h',
'browser/search_engines/template_url_prepopulate_data.cc',
'browser/search_engines/template_url_prepopulate_data.h',
'browser/session_startup_pref.cc',
'browser/session_startup_pref.h',
'browser/sessions/base_session_service.cc',
'browser/sessions/base_session_service.h',
'browser/sessions/session_backend.cc',
'browser/sessions/session_backend.h',
'browser/sessions/session_command.cc',
'browser/sessions/session_command.h',
'browser/sessions/session_id.cc',
'browser/sessions/session_id.h',
'browser/sessions/session_restore.cc',
'browser/sessions/session_restore.h',
'browser/sessions/session_service.cc',
'browser/sessions/session_service.h',
'browser/sessions/session_types.cc',
'browser/sessions/session_types.h',
'browser/sessions/tab_restore_service.cc',
'browser/sessions/tab_restore_service.h',
'browser/shell_dialogs.h',
'browser/shell_integration.cc',
'browser/shell_integration.h',
'browser/shell_integration_mac.mm',
'browser/spellcheck_worditerator.cc',
'browser/spellcheck_worditerator.h',
'browser/spellchecker.cc',
'browser/spellchecker.h',
'browser/ssl/ssl_blocking_page.cc',
'browser/ssl/ssl_blocking_page.h',
'browser/ssl/ssl_error_info.cc',
'browser/ssl/ssl_error_info.h',
'browser/ssl/ssl_host_state.cc',
'browser/ssl/ssl_host_state.h',
'browser/ssl/ssl_manager.cc',
'browser/ssl/ssl_manager.h',
'browser/ssl/ssl_policy.cc',
'browser/ssl/ssl_policy.h',
'browser/status_bubble.h',
'browser/tab_contents/constrained_window.h',
'browser/tab_contents/infobar_delegate.cc',
'browser/tab_contents/infobar_delegate.h',
'browser/tab_contents/interstitial_page.cc',
'browser/tab_contents/interstitial_page.h',
'browser/tab_contents/navigation_controller.cc',
'browser/tab_contents/navigation_controller.h',
'browser/tab_contents/navigation_entry.cc',
'browser/tab_contents/navigation_entry.h',
'browser/tab_contents/page_navigator.h',
'browser/tab_contents/provisional_load_details.cc',
'browser/tab_contents/provisional_load_details.h',
'browser/tab_contents/render_view_context_menu.cc',
'browser/tab_contents/render_view_context_menu.h',
'browser/tab_contents/render_view_context_menu_gtk.cc',
'browser/tab_contents/render_view_context_menu_gtk.h',
'browser/tab_contents/render_view_context_menu_mac.mm',
'browser/tab_contents/render_view_context_menu_mac.h',
'browser/tab_contents/render_view_context_menu_win.cc',
'browser/tab_contents/render_view_context_menu_win.h',
'browser/tab_contents/render_view_host_delegate_helper.cc',
'browser/tab_contents/render_view_host_delegate_helper.h',
'browser/tab_contents/render_view_host_manager.cc',
'browser/tab_contents/render_view_host_manager.h',
'browser/tab_contents/repost_form_warning.h',
'browser/tab_contents/security_style.h',
'browser/tab_contents/site_instance.cc',
'browser/tab_contents/site_instance.h',
'browser/tab_contents/tab_contents.cc',
'browser/tab_contents/tab_contents.h',
'browser/tab_contents/tab_contents_delegate.h',
'browser/tab_contents/tab_contents_view.cc',
'browser/tab_contents/tab_contents_view.h',
'browser/tab_contents/tab_contents_view_gtk.cc',
'browser/tab_contents/tab_contents_view_gtk.h',
'browser/tab_contents/tab_contents_view_mac.h',
'browser/tab_contents/tab_contents_view_mac.mm',
'browser/tab_contents/tab_contents_view_win.cc',
'browser/tab_contents/tab_contents_view_win.h',
'browser/tab_contents/tab_util.cc',
'browser/tab_contents/tab_util.h',
'browser/tab_contents/web_drag_source.cc',
'browser/tab_contents/web_drag_source.h',
'browser/tab_contents/web_drop_target.cc',
'browser/tab_contents/web_drop_target.h',
'browser/tabs/tab_strip_model.cc',
'browser/tabs/tab_strip_model.h',
'browser/tabs/tab_strip_model_order_controller.cc',
'browser/tabs/tab_strip_model_order_controller.h',
'browser/task_manager.cc',
'browser/task_manager.h',
'browser/task_manager_resource_providers.cc',
'browser/task_manager_resource_providers.h',
'browser/theme_resources_util.cc',
'browser/theme_resources_util.h',
'browser/toolbar_model.cc',
'browser/toolbar_model.h',
'browser/user_data_manager.cc',
'browser/user_data_manager.h',
'browser/view_ids.h',
'browser/views/about_chrome_view.cc',
'browser/views/about_chrome_view.h',
'browser/views/about_ipc_dialog.cc',
'browser/views/about_ipc_dialog.h',
'browser/views/about_network_dialog.cc',
'browser/views/about_network_dialog.h',
'browser/views/autocomplete/autocomplete_popup_contents_view.cc',
'browser/views/autocomplete/autocomplete_popup_contents_view.h',
'browser/views/autocomplete/autocomplete_popup_win.cc',
'browser/views/autocomplete/autocomplete_popup_win.h',
'browser/views/blocked_popup_container.cc',
'browser/views/blocked_popup_container.h',
'browser/views/bookmark_bar_view.cc',
'browser/views/bookmark_bar_view.h',
'browser/views/bookmark_bubble_view.cc',
'browser/views/bookmark_bubble_view.h',
'browser/views/bookmark_editor_view.cc',
'browser/views/bookmark_editor_view.h',
'browser/views/bookmark_folder_tree_view.cc',
'browser/views/bookmark_folder_tree_view.h',
'browser/views/bookmark_manager_view.cc',
'browser/views/bookmark_manager_view.h',
'browser/views/bookmark_menu_button.cc',
'browser/views/bookmark_menu_button.h',
'browser/views/bookmark_table_view.cc',
'browser/views/bookmark_table_view.h',
'browser/views/bug_report_view.cc',
'browser/views/bug_report_view.h',
'browser/views/clear_browsing_data.cc',
'browser/views/clear_browsing_data.h',
'browser/views/constrained_window_impl.cc',
'browser/views/constrained_window_impl.h',
'browser/views/dom_view.cc',
'browser/views/dom_view.h',
'browser/views/download_item_view.cc',
'browser/views/download_item_view.h',
'browser/views/download_shelf_view.cc',
'browser/views/download_shelf_view.h',
'browser/views/download_started_animation.cc',
'browser/views/download_started_animation.h',
'browser/views/edit_keyword_controller.cc',
'browser/views/edit_keyword_controller.h',
'browser/views/event_utils.cc',
'browser/views/event_utils.h',
'browser/views/external_protocol_dialog.cc',
'browser/views/external_protocol_dialog.h',
'browser/views/find_bar_view.cc',
'browser/views/find_bar_view.h',
'browser/views/find_bar_win.cc',
'browser/views/find_bar_win.h',
'browser/views/first_run_bubble.cc',
'browser/views/first_run_bubble.h',
'browser/views/first_run_customize_view.cc',
'browser/views/first_run_customize_view.h',
'browser/views/first_run_view.cc',
'browser/views/first_run_view.h',
'browser/views/first_run_view_base.cc',
'browser/views/first_run_view_base.h',
'browser/views/frame/browser_frame.cc',
'browser/views/frame/browser_frame.h',
'browser/views/frame/browser_root_view.cc',
'browser/views/frame/browser_root_view.h',
'browser/views/frame/browser_view.cc',
'browser/views/frame/browser_view.h',
'browser/views/frame/glass_browser_frame_view.cc',
'browser/views/frame/glass_browser_frame_view.h',
'browser/views/frame/opaque_browser_frame_view.cc',
'browser/views/frame/opaque_browser_frame_view.h',
'browser/views/fullscreen_exit_bubble.cc',
'browser/views/fullscreen_exit_bubble.h',
'browser/views/go_button.cc',
'browser/views/go_button.h',
'browser/views/html_dialog_view.cc',
'browser/views/html_dialog_view.h',
'browser/views/hung_renderer_view.cc',
'browser/views/hwnd_html_view.cc',
'browser/views/hwnd_html_view.h',
'browser/views/importer_lock_view.cc',
'browser/views/importer_lock_view.h',
'browser/views/importer_view.cc',
'browser/views/importer_view.h',
'browser/views/importing_progress_view.cc',
'browser/views/importing_progress_view.h',
'browser/views/info_bubble.cc',
'browser/views/info_bubble.h',
'browser/views/infobars/infobar_container.cc',
'browser/views/infobars/infobar_container.h',
'browser/views/infobars/infobars.cc',
'browser/views/infobars/infobars.h',
'browser/views/jsmessage_box_dialog.cc',
'browser/views/jsmessage_box_dialog.h',
'browser/views/keyword_editor_view.cc',
'browser/views/keyword_editor_view.h',
'browser/views/location_bar_view.cc',
'browser/views/location_bar_view.h',
'browser/views/login_view.cc',
'browser/views/login_view.h',
'browser/views/new_profile_dialog.cc',
'browser/views/new_profile_dialog.h',
'browser/views/options/advanced_contents_view.cc',
'browser/views/options/advanced_contents_view.h',
'browser/views/options/advanced_page_view.cc',
'browser/views/options/advanced_page_view.h',
'browser/views/options/content_page_view.cc',
'browser/views/options/content_page_view.h',
'browser/views/options/cookies_view.cc',
'browser/views/options/cookies_view.h',
'browser/views/options/exceptions_page_view.cc',
'browser/views/options/exceptions_page_view.h',
'browser/views/options/fonts_languages_window_view.cc',
'browser/views/options/fonts_languages_window_view.h',
'browser/views/options/fonts_page_view.cc',
'browser/views/options/fonts_page_view.h',
'browser/views/options/general_page_view.cc',
'browser/views/options/general_page_view.h',
'browser/views/options/language_combobox_model.cc',
'browser/views/options/language_combobox_model.h',
'browser/views/options/languages_page_view.cc',
'browser/views/options/languages_page_view.h',
'browser/views/options/options_group_view.cc',
'browser/views/options/options_group_view.h',
'browser/views/options/options_page_view.cc',
'browser/views/options/options_page_view.h',
'browser/views/options/options_window_view.cc',
'browser/views/options/passwords_exceptions_window_view.cc',
'browser/views/options/passwords_exceptions_window_view.h',
'browser/views/options/passwords_page_view.cc',
'browser/views/options/passwords_page_view.h',
'browser/views/page_info_window.cc',
'browser/views/page_info_window.h',
'browser/views/repost_form_warning_view.cc',
'browser/views/repost_form_warning_view.h',
'browser/views/restart_message_box.cc',
'browser/views/restart_message_box.h',
'browser/views/sad_tab_view.cc',
'browser/views/sad_tab_view.h',
'browser/views/select_profile_dialog.cc',
'browser/views/select_profile_dialog.h',
'browser/views/shelf_item_dialog.cc',
'browser/views/shelf_item_dialog.h',
'browser/views/shell_dialogs_win.cc',
'browser/views/standard_layout.h',
'browser/views/star_toggle.cc',
'browser/views/star_toggle.h',
'browser/views/status_bubble_views.cc',
'browser/views/status_bubble_views.h',
'browser/views/tab_contents_container_view.cc',
'browser/views/tab_contents_container_view.h',
'browser/views/tab_icon_view.cc',
'browser/views/tab_icon_view.h',
'browser/views/tabs/dragged_tab_controller.cc',
'browser/views/tabs/dragged_tab_controller.h',
'browser/views/tabs/dragged_tab_view.cc',
'browser/views/tabs/dragged_tab_view.h',
'browser/views/tabs/hwnd_photobooth.cc',
'browser/views/tabs/hwnd_photobooth.h',
'browser/views/tabs/tab.cc',
'browser/views/tabs/tab.h',
'browser/views/tabs/tab_renderer.cc',
'browser/views/tabs/tab_renderer.h',
'browser/views/tabs/tab_strip.cc',
'browser/views/tabs/tab_strip.h',
'browser/views/theme_helpers.cc',
'browser/views/theme_helpers.h',
'browser/views/toolbar_star_toggle.cc',
'browser/views/toolbar_star_toggle.h',
'browser/views/toolbar_view.cc',
'browser/views/toolbar_view.h',
'browser/views/uninstall_dialog.cc',
'browser/views/uninstall_dialog.h',
'browser/views/user_data_dir_dialog.cc',
'browser/views/user_data_dir_dialog.h',
'browser/visitedlink_master.cc',
'browser/visitedlink_master.h',
'browser/webdata/web_data_service.cc',
'browser/webdata/web_data_service.h',
'browser/webdata/web_data_service_win.cc',
'browser/webdata/web_database.cc',
'browser/webdata/web_database.h',
'browser/webdata/web_database_win.cc',
'browser/window_sizer.cc',
'browser/window_sizer.h',
'browser/worker_host/worker_process_host.cc',
'browser/worker_host/worker_process_host.h',
'browser/worker_host/worker_service.cc',
'browser/worker_host/worker_service.h',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
# This file is generated by GRIT.
'<(SHARED_INTERMEDIATE_DIR)/chrome/grit/theme_resources_map.cc',
],
'conditions': [
['javascript_engine=="v8"', {
'defines': [
'CHROME_V8',
],
}],
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'sources!': [
'browser/debugger/debugger_shell_stubs.cc',
# Windows-specific files.
'browser/download/download_exe.cc',
'browser/download/download_util.cc',
],
}],
['OS=="mac"', {
'sources/': [
# Exclude most of download.
['exclude', '^browser/download/'],
['include', '^browser/download/download_(file|manager|shelf)\\.cc$'],
['include', '^browser/download/download_request_manager\\.cc$'],
['include', '^browser/download/download_item_model\\.cc$'],
['include', '^browser/download/save_(file(_manager)?|item|package)\\.cc$'],
],
'sources!': [
'browser/automation/automation_provider_list_generic.cc',
'browser/bookmarks/bookmark_context_menu.cc',
'browser/bookmarks/bookmark_drop_info.cc',
'browser/debugger/debugger_shell_stubs.cc',
],
'sources': [
# Build the necessary GTM sources
'../third_party/GTM/AppKit/GTMNSBezierPath+RoundRect.m',
'../third_party/GTM/AppKit/GTMNSColor+Luminance.m',
'../third_party/GTM/AppKit/GTMTheme.m',
# Build necessary Mozilla sources
'../third_party/mozilla/include/NSWorkspace+Utils.h',
'../third_party/mozilla/include/NSWorkspace+Utils.m',
],
'include_dirs': [
'../third_party/GTM',
'../third_party/GTM/AppKit',
],
}],
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
'dependencies': [
'../google_update/google_update.gyp:google_update',
'installer/util/util.gyp:installer_util',
'../printing/printing.gyp:printing',
],
'sources': [
# Using built-in rule in vstudio for midl.
'browser/history/history_indexer.idl',
],
'sources!': [
'browser/debugger/debugger_shell_stubs.cc',
'browser/history/history_publisher_none.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
}, { # 'OS!="win"
'sources/': [
# Exclude all of hang_monitor.
['exclude', '^browser/hang_monitor/'],
# Exclude parts of password_manager.
['exclude', '^browser/password_manager/ie7_password\\.cc$'],
# Exclude most of printing.
['exclude', '^browser/printing/'],
['include', '^browser/printing/page_(number|range|setup)\\.cc$'],
# Exclude all of rlz.
['exclude', '^browser/rlz/'],
# Exclude all of views.
['exclude', '^browser/views/'],
],
'sources!': [
'browser/autocomplete/autocomplete_accessibility.cc',
'browser/automation/ui_controls.cc',
'browser/bookmarks/bookmark_menu_controller.cc',
'browser/bookmarks/bookmark_menu_controller.h',
'browser/browser_accessibility.cc',
'browser/browser_accessibility_manager.cc',
'browser/debugger/debugger_view.cc',
'browser/debugger/debugger_window.cc',
'browser/debugger/devtools_view.cc',
'browser/debugger/devtools_view.h',
'browser/debugger/devtools_window_gtk.cc',
'browser/debugger/devtools_window_win.cc',
'browser/dock_info.cc',
'browser/dom_ui/html_dialog_contents.cc',
'browser/drag_utils.cc',
'browser/encoding_menu_controller_delegate.cc',
'browser/external_tab_container.cc',
'browser/first_run.cc',
'browser/google_update.cc',
'browser/history/history_indexer.idl',
'browser/history_tab_ui.cc',
'browser/history_view.cc',
'browser/icon_loader.cc',
'browser/icon_manager.cc',
'browser/ime_input.cc',
'browser/importer/ie_importer.cc',
'browser/jankometer.cc',
'browser/login_prompt.cc',
'browser/memory_details.cc',
'browser/modal_html_dialog_delegate.cc',
'browser/sandbox_policy.cc',
'browser/shell_integration.cc',
'browser/tab_contents/web_drag_source.cc',
'browser/tab_contents/web_drop_target.cc',
'browser/task_manager.cc',
'browser/window_sizer.cc',
],
}],
],
},
{
'target_name': 'plugin',
'type': '<(library)',
'dependencies': [
'common',
'chrome_resources',
'chrome_strings',
'../media/media.gyp:media',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
'third_party/hunspell/hunspell.gyp:hunspell',
'../webkit/webkit.gyp:glue',
],
'include_dirs': [
'<(INTERMEDIATE_DIR)',
],
'sources': [
# All .cc, .h, .m, and .mm files under plugins except for tests and
# mocks.
'plugin/chrome_plugin_host.cc',
'plugin/chrome_plugin_host.h',
'plugin/npobject_proxy.cc',
'plugin/npobject_proxy.h',
'plugin/npobject_stub.cc',
'plugin/npobject_stub.h',
'plugin/npobject_util.cc',
'plugin/npobject_util.h',
'plugin/plugin_channel.cc',
'plugin/plugin_channel.h',
'plugin/plugin_channel_base.cc',
'plugin/plugin_channel_base.h',
'plugin/plugin_main.cc',
'plugin/plugin_thread.cc',
'plugin/plugin_thread.h',
'plugin/webplugin_delegate_stub.cc',
'plugin/webplugin_delegate_stub.h',
'plugin/webplugin_proxy.cc',
'plugin/webplugin_proxy.h',
],
# These are layered in conditionals in the event other platforms
# end up using this module as well.
'conditions': [
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
},],
],
},
{
'target_name': 'renderer',
'type': '<(library)',
'dependencies': [
'common',
'plugin',
'chrome_resources',
'chrome_strings',
'../printing/printing.gyp:printing',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/npapi/npapi.gyp:npapi',
'../webkit/webkit.gyp:glue',
'../webkit/webkit.gyp:webkit',
],
'include_dirs': [
'..',
],
'sources': [
# TODO(jrg): to link ipc_tests, these files need to be in renderer.a.
# But app/ is the wrong directory for them.
# Better is to remove the dep of *_tests on renderer, but in the
# short term I'd like the build to work.
'renderer/automation/dom_automation_controller.cc',
'renderer/automation/dom_automation_controller.h',
'renderer/extensions/bindings_utils.cc',
'renderer/extensions/bindings_utils.h',
'renderer/extensions/event_bindings.cc',
'renderer/extensions/event_bindings.h',
'renderer/extensions/extension_process_bindings.cc',
'renderer/extensions/extension_process_bindings.h',
'renderer/extensions/renderer_extension_bindings.cc',
'renderer/extensions/renderer_extension_bindings.h',
'renderer/loadtimes_extension_bindings.h',
'renderer/loadtimes_extension_bindings.cc',
'renderer/media/audio_renderer_impl.cc',
'renderer/media/audio_renderer_impl.h',
'renderer/media/buffered_data_source.cc',
'renderer/media/buffered_data_source.h',
'renderer/media/simple_data_source.cc',
'renderer/media/simple_data_source.h',
'renderer/media/video_renderer_impl.cc',
'renderer/media/video_renderer_impl.h',
'renderer/net/render_dns_master.cc',
'renderer/net/render_dns_master.h',
'renderer/net/render_dns_queue.cc',
'renderer/net/render_dns_queue.h',
'renderer/about_handler.cc',
'renderer/about_handler.h',
'renderer/audio_message_filter.cc',
'renderer/audio_message_filter.h',
'renderer/debug_message_handler.cc',
'renderer/debug_message_handler.h',
'renderer/devtools_agent.cc',
'renderer/devtools_agent.h',
'renderer/devtools_agent_filter.cc',
'renderer/devtools_agent_filter.h',
'renderer/devtools_client.cc',
'renderer/devtools_client.h',
'renderer/dom_ui_bindings.cc',
'renderer/dom_ui_bindings.h',
'renderer/external_host_bindings.cc',
'renderer/external_host_bindings.h',
'renderer/external_extension.cc',
'renderer/external_extension.h',
'renderer/js_only_v8_extensions.cc',
'renderer/js_only_v8_extensions.h',
'renderer/localized_error.cc',
'renderer/localized_error.h',
'renderer/plugin_channel_host.cc',
'renderer/plugin_channel_host.h',
'renderer/render_process.cc',
'renderer/render_process.h',
'renderer/render_thread.cc',
'renderer/render_thread.h',
'renderer/render_view.cc',
'renderer/render_view.h',
'renderer/render_widget.cc',
'renderer/render_widget.h',
'renderer/renderer_glue.cc',
'renderer/renderer_histogram_snapshots.cc',
'renderer/renderer_histogram_snapshots.h',
'renderer/renderer_logging.h',
'renderer/renderer_logging_linux.cc',
'renderer/renderer_logging_mac.mm',
'renderer/renderer_logging_win.cc',
'renderer/renderer_main.cc',
'renderer/renderer_main_platform_delegate.h',
'renderer/renderer_main_platform_delegate_linux.cc',
'renderer/renderer_main_platform_delegate_mac.mm',
'renderer/renderer_main_platform_delegate_win.cc',
'renderer/renderer_webkitclient_impl.cc',
'renderer/renderer_webkitclient_impl.h',
'renderer/user_script_slave.cc',
'renderer/user_script_slave.h',
'renderer/visitedlink_slave.cc',
'renderer/visitedlink_slave.h',
'renderer/webmediaplayer_impl.cc',
'renderer/webmediaplayer_impl.h',
'renderer/webplugin_delegate_proxy.cc',
'renderer/webplugin_delegate_proxy.h',
'renderer/webworker_proxy.cc',
'renderer/webworker_proxy.h',
],
'link_settings': {
'mac_bundle_resources': [
'renderer/renderer.sb',
],
},
'conditions': [
# Linux-specific rules.
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
# Windows-specific rules.
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
},],
# As of yet unported-from-Windows code.
['OS!="win"', {
'sources!': [
'renderer/webworker_proxy.cc',
'renderer/webworker_proxy.h',
],
},],
],
},
{
'target_name': 'app',
'type': 'executable',
'mac_bundle': 1,
'dependencies': [
'common',
'browser',
'renderer',
'../printing/printing.gyp:printing',
'../webkit/webkit.gyp:inspector_resources',
],
'sources': [
# All .cc, .h, .m, and .mm files under app except for tests.
'app/breakpad_win.cc',
'app/breakpad_win.h',
'app/breakpad_mac.mm',
'app/breakpad_mac.h',
'app/chrome_dll_main.cc',
'app/chrome_dll_resource.h',
'app/chrome_exe_main.cc',
'app/chrome_exe_main.mm',
'app/chrome_exe_main_gtk.cc',
'app/chrome_exe_resource.h',
'app/client_util.cc',
'app/client_util.h',
'app/google_update_client.cc',
'app/google_update_client.h',
'app/keystone_glue.h',
'app/keystone_glue.m',
'app/scoped_ole_initializer.h',
],
'mac_bundle_resources': [
'app/nibs/en.lproj/BrowserWindow.xib',
'app/nibs/en.lproj/FindBar.xib',
'app/nibs/en.lproj/MainMenu.xib',
'app/nibs/en.lproj/Preferences.xib',
'app/nibs/en.lproj/SaveAccessoryView.xib',
'app/nibs/en.lproj/TabContents.xib',
'app/nibs/en.lproj/TabView.xib',
'app/nibs/en.lproj/Toolbar.xib',
'app/theme/back.pdf',
'app/theme/close_bar.pdf',
'app/theme/close_bar_h.pdf',
'app/theme/close_bar_p.pdf',
'app/theme/forward.pdf',
'app/theme/go.pdf',
'app/theme/grow_box.png',
'app/theme/nav.pdf',
'app/theme/newtab.pdf',
'app/theme/o2_globe.png',
'app/theme/o2_history.png',
'app/theme/o2_more.png',
'app/theme/o2_search.png',
'app/theme/o2_star.png',
'app/theme/reload.pdf',
'app/theme/sadtab.png',
'app/theme/star.pdf',
'app/theme/starred.pdf',
'app/theme/stop.pdf',
'app/app-Info.plist',
],
# TODO(mark): Come up with a fancier way to do this. It should only
# be necessary to list app-Info.plist once, not the three times it is
# listed here.
'mac_bundle_resources!': [
'app/app-Info.plist',
],
'xcode_settings': {
'INFOPLIST_FILE': 'app/app-Info.plist',
},
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
# Needed for chrome_dll_main.cc #include of gtk/gtk.h
'../build/linux/system.gyp:gtk',
# Needed for chrome_dll_main.cc use of g_thread_init
'../build/linux/system.gyp:gthread',
],
'copies': [
{
'destination': '<(PRODUCT_DIR)',
'files': ['<(INTERMEDIATE_DIR)/repack/chrome.pak'],
},
{
'destination': '<(PRODUCT_DIR)/locales',
'files': ['<(INTERMEDIATE_DIR)/repack/en-US.pak'],
},
{
'destination': '<(PRODUCT_DIR)/themes',
'files': ['<(INTERMEDIATE_DIR)/repack/default.pak'],
},
],
}],
['OS=="mac"', {
# 'branding' is a variable defined in common.gypi
# (e.g. "Chromium", "Chrome")
'product_name': '<(branding)',
'conditions': [
['branding=="Chrome"', {
'mac_bundle_resources': ['app/theme/google_chrome/app.icns'],
# "bundle_id" is the name of the variable used to replace
# BUNDLE_ID in Info.plist.
'variables': {'bundle_id': 'com.google.Chrome'},
# Only include breakpad in official builds.
'dependencies': [
'../breakpad/breakpad.gyp:breakpad',
],
'copies': [
{
'destination': '<(PRODUCT_DIR)/<(branding).app/Contents/Resources/',
'files': ['<(PRODUCT_DIR)/crash_inspector', '<(PRODUCT_DIR)/crash_report_sender.app'],
},
]
}, { # else: branding!="Chrome"
'mac_bundle_resources': ['app/theme/chromium/app.icns'],
'variables': {'bundle_id': 'org.chromium.Chromium'},
}],
],
'xcode_settings': {
# chrome/app/app-Info.plist has a CFBundleIdentifier of BUNDLE_ID,
# to be replaced by a properly branded bundle ID in Xcode with
# these settings.
'INFOPLIST_PREPROCESS': 'YES',
'INFOPLIST_PREPROCESSOR_DEFINITIONS': ['BUNDLE_ID="<(bundle_id)"'],
},
}, { # else: OS != "mac"
'conditions': [
['branding=="Chrome"', {
'product_name': 'chrome'
}, { # else: Branding!="Chrome"
# TODO: change to:
# 'product_name': 'chromium'
# whenever we convert the rest of the infrastructure
# (buildbots etc.) to use "gyp -Dbranding=Chrome".
'product_name': 'chrome'
}],
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
'dependencies': [
'views',
'../build/temp_gyp/breakpad.gyp:breakpad_handler',
'../build/temp_gyp/breakpad.gyp:breakpad_sender',
'../sandbox/sandbox.gyp:sandbox',
'worker',
],
},{ # 'OS!="win"
'variables': {
'repack_path': '../tools/data_pack/repack.py',
},
'actions': [
{
'action_name': 'repack_chrome',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/browser_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/debugger_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/common_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/renderer_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/net/net_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_resources.pak',
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'outputs': [
'<(INTERMEDIATE_DIR)/repack/chrome.pak',
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
'process_outputs_as_mac_bundle_resources': 1,
},
{
'action_name': 'repack_theme',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/theme_resources.pak',
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'outputs': [
'<(INTERMEDIATE_DIR)/repack/theme.pak',
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
'process_outputs_as_mac_bundle_resources': 1,
'conditions': [
['OS=="linux"', {
'outputs=': [
'<(INTERMEDIATE_DIR)/repack/default.pak',
]
}],
],
},
{
# TODO(mark): Make this work with more languages than the
# hardcoded en-US.
'action_name': 'repack_locale',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/generated_resources_en-US.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/chromium_strings_en-US.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/locale_settings_en-US.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_en-US.pak',
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'conditions': [
['OS=="mac"', {
'outputs': [
'<(INTERMEDIATE_DIR)/repack/en.lproj/locale.pak',
],
}, { # else: OS!="mac"
'outputs': [
'<(INTERMEDIATE_DIR)/repack/en-US.pak',
],
}],
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
'process_outputs_as_mac_bundle_resources': 1,
},
],
'sources!': [
'app/chrome_exe_main.cc',
'app/client_util.cc',
'app/google_update_client.cc',
]
}],
],
},
{
'target_name': 'image_diff',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:base_gfx',
],
'sources': [
'tools/test/image_diff/image_diff.cc',
],
},
{
# This target contains mocks and test utilities that don't belong in
# production libraries but are used by more than one test executable.
'target_name': 'test_support_common',
'type': '<(library)',
'dependencies': [
'browser',
'common',
'renderer',
'chrome_resources',
'chrome_strings',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
# TODO: these should live here but are currently used by
# production code code in libbrowser (above).
#'browser/automation/url_request_mock_http_job.cc',
#'browser/automation/url_request_mock_http_job.h',
'browser/automation/url_request_mock_net_error_job.cc',
'browser/automation/url_request_mock_net_error_job.h',
'browser/renderer_host/mock_render_process_host.cc',
'browser/renderer_host/mock_render_process_host.h',
'browser/renderer_host/test_render_view_host.cc',
'browser/renderer_host/test_render_view_host.h',
'browser/tab_contents/test_web_contents.cc',
'browser/tab_contents/test_web_contents.h',
'common/ipc_test_sink.cc',
'common/ipc_test_sink.h',
'renderer/mock_render_process.h',
'renderer/mock_render_thread.cc',
'renderer/mock_render_thread.h',
'test/automation/autocomplete_edit_proxy.cc',
'test/automation/autocomplete_edit_proxy.h',
'test/automation/automation_constants.h',
'test/automation/automation_handle_tracker.cc',
'test/automation/automation_handle_tracker.h',
'test/automation/automation_proxy.cc',
'test/automation/automation_proxy.h',
'test/automation/browser_proxy.cc',
'test/automation/browser_proxy.h',
'test/automation/constrained_window_proxy.cc',
'test/automation/constrained_window_proxy.h',
'test/automation/tab_proxy.cc',
'test/automation/tab_proxy.h',
'test/automation/window_proxy.cc',
'test/automation/window_proxy.h',
'test/chrome_process_util.cc',
'test/chrome_process_util.h',
'test/chrome_process_util_linux.cc',
'test/chrome_process_util_mac.cc',
'test/chrome_process_util_win.cc',
'test/in_process_browser_test.cc',
'test/in_process_browser_test.h',
'test/testing_profile.cc',
'test/testing_profile.h',
'test/ui_test_utils.cc',
'test/ui_test_utils.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
}, { # OS != "win"
'sources!': [
'test/in_process_browser_test.cc',
'test/in_process_browser_test.h',
'test/ui_test_utils.cc',
'test/ui_test_utils.h',
],
}],
],
},
{
'target_name': 'test_support_ui',
'type': '<(library)',
'dependencies': [
'test_support_common',
'chrome_resources',
'chrome_strings',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/testing_browser_process.h',
'test/ui/npapi_test_helper.cc',
'test/ui/npapi_test_helper.h',
'test/ui/run_all_unittests.cc',
'test/ui/ui_test.cc',
'test/ui/ui_test.h',
'test/ui/ui_test_suite.cc',
'test/ui/ui_test_suite.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS!="win"', {
'sources!': [
'test/ui/npapi_test_helper.cc',
],
}],
],
},
{
'target_name': 'test_support_unit',
'type': '<(library)',
'dependencies': [
'test_support_common',
'chrome_resources',
'chrome_strings',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/unit/run_all_unittests.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
# Needed for the following #include chain:
# test/unit/run_all_unittests.cc
# test/unit/chrome_test_suite.h
# gtk/gtk.h
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'ipc_tests',
'type': 'executable',
'dependencies': [
'common',
'test_support_unit',
'../base/base.gyp:base',
'../testing/gtest.gyp:gtest',
],
'sources': [
'common/ipc_fuzzing_tests.cc',
'common/ipc_send_fds_test.cc',
'common/ipc_tests.cc',
'common/ipc_tests.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'ui_tests',
'type': 'executable',
'dependencies': [
'app',
'browser',
'common',
'chrome_resources',
'chrome_strings',
'test_support_ui',
'../base/base.gyp:base',
'../net/net.gyp:net',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
],
'include_dirs': [
'..',
],
'sources': [
'app/chrome_main_uitest.cc',
'browser/browser_uitest.cc',
'browser/crash_recovery_uitest.cc',
'browser/download/download_uitest.cc',
'browser/download/save_page_uitest.cc',
'browser/errorpage_uitest.cc',
'browser/history/redirect_uitest.cc',
'browser/iframe_uitest.cc',
'browser/images_uitest.cc',
'browser/locale_tests_uitest.cc',
'browser/login_prompt_uitest.cc',
'browser/metrics/metrics_service_uitest.cc',
'browser/printing/printing_layout_uitest.cc',
'browser/printing/printing_test.h',
'browser/renderer_host/resource_dispatcher_host_uitest.cc',
'browser/sanity_uitest.cc',
'browser/session_history_uitest.cc',
'browser/sessions/session_restore_uitest.cc',
'browser/ssl/ssl_uitest.cc',
'browser/tab_contents/view_source_uitest.cc',
'browser/tab_restore_uitest.cc',
'browser/unload_uitest.cc',
'browser/views/find_bar_win_uitest.cc',
'common/logging_chrome_uitest.cc',
'common/net/cache_uitest.cc',
'common/pref_service_uitest.cc',
'test/accessibility/accessibility_tests.cc',
'test/accessibility/accessibility_util.cc',
'test/accessibility/accessibility_util.h',
'test/accessibility/browser_impl.cc',
'test/accessibility/browser_impl.h',
'test/accessibility/constants.h',
'test/accessibility/keyboard_util.cc',
'test/accessibility/keyboard_util.h',
'test/accessibility/registry_util.cc',
'test/accessibility/registry_util.h',
'test/accessibility/tab_impl.cc',
'test/accessibility/tab_impl.h',
'test/automation/automation_proxy_uitest.cc',
'test/chrome_process_util_uitest.cc',
'test/perf/mem_usage.cc',
'test/perf/mem_usage.h',
'test/reliability/page_load_test.cc',
'test/reliability/page_load_test.h',
'test/ui/dom_checker_uitest.cc',
'test/ui/history_uitest.cc',
'test/ui/inspector_controller_uitest.cc',
'test/ui/layout_plugin_uitest.cc',
'test/ui/npapi_uitest.cc',
'test/ui/omnibox_uitest.cc',
'test/ui/sandbox_uitests.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
'../build/linux/system.gyp:gtk',
],
'sources!': [
# TODO(port)
'app/chrome_main_uitest.cc',
'browser/crash_recovery_uitest.cc',
'browser/login_prompt_uitest.cc',
'browser/metrics/metrics_service_uitest.cc',
'test/reliability/page_load_test.cc',
'test/ui/layout_plugin_uitest.cc',
],
}],
['OS=="mac"', {
'sources!': [
# TODO(port)
'app/chrome_main_uitest.cc',
'browser/crash_recovery_uitest.cc',
# blocked on download shelf
'browser/download/save_page_uitest.cc',
'browser/login_prompt_uitest.cc',
'browser/metrics/metrics_service_uitest.cc',
'browser/sessions/session_restore_uitest.cc',
'browser/tab_restore_uitest.cc',
'browser/unload_uitest.cc',
'test/reliability/page_load_test.cc',
'test/ui/layout_plugin_uitest.cc',
'test/ui/omnibox_uitest.cc',
# these pass locally but fail on the bots
'common/net/cache_uitest.cc',
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
'dependencies': [
'../google_update/google_update.gyp:google_update',
'views',
],
'link_settings': {
'libraries': [
'-lOleAcc.lib',
],
},
}, { # else: OS != "win"
'sources!': [
# TODO(port)? (Most of these include windows.h or similar.)
'browser/printing/printing_layout_uitest.cc',
'browser/ssl/ssl_uitest.cc',
'browser/views/find_bar_win_uitest.cc',
'common/logging_chrome_uitest.cc',
'test/accessibility/accessibility_tests.cc',
'test/accessibility/accessibility_util.cc',
'test/accessibility/browser_impl.cc',
'test/accessibility/keyboard_util.cc',
'test/accessibility/registry_util.cc',
'test/accessibility/tab_impl.cc',
'test/perf/mem_usage.cc',
'test/ui/npapi_uitest.cc',
'test/ui/sandbox_uitests.cc',
],
}],
],
},
{
'target_name': 'unit_tests',
'type': 'executable',
'dependencies': [
'browser',
'common',
'renderer',
'chrome_resources',
'chrome_strings',
'test_support_unit',
'../printing/printing.gyp:printing',
'../webkit/webkit.gyp:webkit',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
],
'include_dirs': [
'..',
],
'sources': [
'app/breakpad_mac_stubs.mm',
# All unittests in browser, common, and renderer.
'browser/autocomplete/autocomplete_unittest.cc',
'browser/autocomplete/history_contents_provider_unittest.cc',
'browser/autocomplete/history_url_provider_unittest.cc',
'browser/autocomplete/keyword_provider_unittest.cc',
'browser/back_forward_menu_model_unittest.cc',
'browser/bookmarks/bookmark_codec_unittest.cc',
'browser/bookmarks/bookmark_context_menu_test.cc',
'browser/bookmarks/bookmark_drag_data_unittest.cc',
'browser/bookmarks/bookmark_folder_tree_model_unittest.cc',
'browser/bookmarks/bookmark_html_writer_unittest.cc',
'browser/bookmarks/bookmark_model_test_utils.cc',
'browser/bookmarks/bookmark_model_test_utils.h',
'browser/bookmarks/bookmark_model_unittest.cc',
'browser/bookmarks/bookmark_table_model_unittest.cc',
'browser/bookmarks/bookmark_utils_unittest.cc',
'browser/browser_commands_unittest.cc',
'browser/browser_unittest.cc',
'browser/debugger/devtools_remote_message_unittest.cc',
'browser/debugger/devtools_remote_listen_socket_unittest.cc',
'browser/debugger/devtools_remote_listen_socket_unittest.h',
'browser/chrome_thread_unittest.cc',
# It is safe to list */cocoa/* files in the "common" file list
# without an explicit exclusion since gyp is smart enough to
# exclude them from non-Mac builds.
'browser/cocoa/base_view_unittest.mm',
'browser/cocoa/bookmark_bar_controller_unittest.mm',
'browser/cocoa/bookmark_menu_bridge_unittest.mm',
'browser/cocoa/bookmark_menu_cocoa_controller_unittest.mm',
'browser/cocoa/browser_window_cocoa_unittest.mm',
'browser/cocoa/command_observer_bridge_unittest.mm',
'browser/cocoa/find_bar_bridge_unittest.mm',
'browser/cocoa/find_bar_cocoa_controller_unittest.mm',
'browser/cocoa/find_bar_view_unittest.mm',
'browser/cocoa/location_bar_view_mac_unittest.mm',
'browser/cocoa/grow_box_view_unittest.mm',
'browser/cocoa/preferences_window_controller_unittest.mm',
'browser/cocoa/sad_tab_view_unittest.mm',
'browser/cocoa/status_bubble_mac_unittest.mm',
'browser/cocoa/tab_cell_unittest.mm',
'browser/cocoa/tab_controller_unittest.mm',
'browser/cocoa/tab_strip_controller_unittest.mm',
'browser/cocoa/tab_strip_view_unittest.mm',
'browser/cocoa/tab_view_unittest.mm',
'browser/cocoa/toolbar_button_cell_unittest.mm',
'browser/cocoa/toolbar_controller_unittest.mm',
'browser/cocoa/toolbar_view_unittest.mm',
'browser/command_updater_unittest.cc',
'browser/debugger/devtools_manager_unittest.cc',
'browser/dom_ui/dom_ui_unittest.cc',
'browser/download/download_manager_unittest.cc',
'browser/download/download_request_manager_unittest.cc',
'browser/download/save_package_unittest.cc',
'browser/extensions/extension_messages_unittest.cc',
'browser/extensions/extension_process_manager_unittest.h',
'browser/extensions/extension_ui_unittest.cc',
'browser/extensions/extension_unittest.cc',
'browser/extensions/extensions_service_unittest.cc',
'browser/extensions/test_extension_loader.cc',
'browser/extensions/user_script_master_unittest.cc',
'browser/google_url_tracker_unittest.cc',
'browser/gtk/bookmark_editor_gtk_unittest.cc',
'browser/gtk/go_button_gtk_unittest.cc',
'browser/gtk/tabs/tab_renderer_gtk_unittest.cc',
'browser/history/expire_history_backend_unittest.cc',
'browser/history/history_backend_unittest.cc',
'browser/history/history_querying_unittest.cc',
'browser/history/history_types_unittest.cc',
'browser/history/history_unittest.cc',
'browser/history/query_parser_unittest.cc',
'browser/history/snippet_unittest.cc',
'browser/history/starred_url_database_unittest.cc',
'browser/history/text_database_manager_unittest.cc',
'browser/history/text_database_unittest.cc',
'browser/history/thumbnail_database_unittest.cc',
'browser/history/url_database_unittest.cc',
'browser/history/visit_database_unittest.cc',
'browser/history/visit_tracker_unittest.cc',
'browser/importer/firefox_importer_unittest.cc',
'browser/importer/importer_unittest.cc',
'browser/importer/toolbar_importer_unittest.cc',
'browser/login_prompt_unittest.cc',
'browser/metrics/metrics_log_unittest.cc',
'browser/metrics/metrics_response_unittest.cc',
'browser/navigation_controller_unittest.cc',
'browser/navigation_entry_unittest.cc',
'browser/net/dns_host_info_unittest.cc',
'browser/net/dns_master_unittest.cc',
'browser/net/resolve_proxy_msg_helper_unittest.cc',
'browser/net/url_fetcher_unittest.cc',
'browser/net/url_fixer_upper_unittest.cc',
'browser/password_manager/encryptor_unittest.cc',
'browser/password_manager/password_form_manager_unittest.cc',
'browser/printing/page_number_unittest.cc',
'browser/printing/page_overlays_unittest.cc',
'browser/printing/page_range_unittest.cc',
'browser/printing/page_setup_unittest.cc',
'browser/printing/print_job_unittest.cc',
'browser/printing/win_printing_context_unittest.cc',
'browser/profile_manager_unittest.cc',
'browser/renderer_host/audio_renderer_host_unittest.cc',
'browser/renderer_host/render_view_host_unittest.cc',
'browser/renderer_host/render_widget_host_unittest.cc',
'browser/renderer_host/renderer_security_policy_unittest.cc',
'browser/renderer_host/resource_dispatcher_host_unittest.cc',
'browser/renderer_host/web_cache_manager_unittest.cc',
'browser/rlz/rlz_unittest.cc',
'browser/safe_browsing/bloom_filter_unittest.cc',
'browser/safe_browsing/chunk_range_unittest.cc',
'browser/safe_browsing/protocol_manager_unittest.cc',
'browser/safe_browsing/protocol_parser_unittest.cc',
'browser/safe_browsing/safe_browsing_database_unittest.cc',
'browser/safe_browsing/safe_browsing_util_unittest.cc',
'browser/search_engines/template_url_model_unittest.cc',
'browser/search_engines/template_url_parser_unittest.cc',
'browser/search_engines/template_url_prepopulate_data_unittest.cc',
'browser/search_engines/template_url_unittest.cc',
'browser/sessions/session_backend_unittest.cc',
'browser/sessions/session_service_test_helper.cc',
'browser/sessions/session_service_test_helper.h',
'browser/sessions/session_service_unittest.cc',
'browser/sessions/tab_restore_service_unittest.cc',
'browser/site_instance_unittest.cc',
'browser/spellcheck_unittest.cc',
'browser/tab_contents/render_view_host_manager_unittest.cc',
'browser/tab_contents/web_contents_unittest.cc',
'browser/tabs/tab_strip_model_unittest.cc',
'browser/task_manager_unittest.cc',
'browser/theme_resources_util_unittest.cc',
'browser/views/bookmark_editor_view_unittest.cc',
'browser/views/keyword_editor_view_unittest.cc',
'browser/visitedlink_unittest.cc',
'browser/webdata/web_database_unittest.cc',
'browser/window_sizer_unittest.cc',
'../app/animation_unittest.cc',
'common/bzip2_unittest.cc',
'common/chrome_plugin_unittest.cc',
'common/extensions/url_pattern_unittest.cc',
'common/extensions/user_script_unittest.cc',
'common/file_descriptor_set_unittest.cc',
'../app/gfx/chrome_font_unittest.cc',
'common/gfx/emf_unittest.cc',
'../app/gfx/icon_util_unittest.cc',
'common/gfx/text_elider_unittest.cc',
'common/important_file_writer_unittest.cc',
'common/ipc_message_unittest.cc',
'common/ipc_sync_channel_unittest.cc',
'common/ipc_sync_message_unittest.cc',
'common/ipc_sync_message_unittest.h',
'common/json_value_serializer_unittest.cc',
'../app/l10n_util_unittest.cc',
'common/mru_cache_unittest.cc',
'common/net/url_util_unittest.cc',
'common/notification_service_unittest.cc',
'../app/os_exchange_data_unittest.cc',
'common/pref_member_unittest.cc',
'common/pref_service_unittest.cc',
'common/property_bag_unittest.cc',
'common/resource_dispatcher_unittest.cc',
'common/time_format_unittest.cc',
'common/unzip_unittest.cc',
'common/win_util_unittest.cc',
'common/worker_thread_ticker_unittest.cc',
'renderer/extensions/extension_api_client_unittest.cc',
'renderer/extensions/greasemonkey_api_unittest.cc',
'renderer/extensions/json_schema_unittest.cc',
'renderer/net/render_dns_master_unittest.cc',
'renderer/net/render_dns_queue_unittest.cc',
'renderer/render_process_unittest.cc',
'renderer/render_thread_unittest.cc',
'renderer/render_view_unittest.cc',
'renderer/render_widget_unittest.cc',
'renderer/renderer_logging_mac_unittest.mm',
'renderer/renderer_main_unittest.cc',
'test/browser_with_test_window_test.cc',
'test/browser_with_test_window_test.h',
'test/render_view_test.cc',
'test/render_view_test.h',
'test/test_notification_tracker.cc',
'test/test_notification_tracker.h',
'test/v8_unit_test.cc',
'test/v8_unit_test.h',
'views/controls/label_unittest.cc',
'views/controls/table/table_view_unittest.cc',
'views/controls/tree/tree_node_iterator_unittest.cc',
'views/focus/focus_manager_unittest.cc',
'views/grid_layout_unittest.cc',
'views/view_unittest.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
'../build/linux/system.gyp:gtk',
'../build/linux/system.gyp:nss',
],
'sources!': [
# This test is mostly about renaming downloads to safe file
# names. As such we don't need/want to port it to linux. We
# might want to write our own tests for the download manager
# on linux, though.
'browser/download/download_manager_unittest.cc',
],
}],
['OS=="mac"', {
# The test fetches resources which means Mac need the app bundle to
# exist on disk so it can pull from it.
'dependencies': [
'app',
],
'include_dirs': [
'../third_party/GTM',
],
'sources!': [
'browser/bookmarks/bookmark_context_menu_test.cc',
'browser/back_forward_menu_model_unittest.cc',
'browser/download/download_manager_unittest.cc',
'browser/gtk/go_button_gtk_unittest.cc',
'browser/gtk/tabs/tab_renderer_gtk_unittest.cc',
'browser/navigation_controller_unittest.cc',
'renderer/render_view_unittest.cc',
'test/test_notification_tracker.cc',
'test/test_notification_tracker.h',
],
# TODO(mark): We really want this for all non-static library targets,
# but when we tried to pull it up to the common.gypi level, it broke
# other things like the ui, startup, and page_cycler tests. *shrug*
'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-ObjC']},
}],
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
'sources!': [
'browser/gtk/tabs/tab_renderer_gtk_unittest.cc',
'common/file_descriptor_set_unittest.cc',
'common/net/url_util_unittest.cc',
],
'dependencies': [
'views',
],
}, { # else: OS != "win"
'sources!': [
'browser/bookmarks/bookmark_codec_unittest.cc',
'browser/bookmarks/bookmark_drag_data_unittest.cc',
'browser/bookmarks/bookmark_folder_tree_model_unittest.cc',
'browser/bookmarks/bookmark_table_model_unittest.cc',
'browser/browser_commands_unittest.cc',
'browser/browser_unittest.cc',
'browser/extensions/test_extension_loader.cc',
'browser/importer/firefox_importer_unittest.cc',
'browser/importer/importer_unittest.cc',
'browser/login_prompt_unittest.cc',
'browser/password_manager/password_form_manager_unittest.cc',
'browser/printing/page_number_unittest.cc',
'browser/printing/page_overlays_unittest.cc',
'browser/printing/print_job_unittest.cc',
'browser/rlz/rlz_unittest.cc',
'browser/tabs/tab_strip_model_unittest.cc',
'browser/task_manager_unittest.cc',
'browser/views/bookmark_editor_view_unittest.cc',
'browser/views/find_bar_win_unittest.cc',
'browser/views/keyword_editor_view_unittest.cc',
'browser/window_sizer_unittest.cc',
'common/chrome_plugin_unittest.cc',
'common/gfx/emf_unittest.cc',
'../app/gfx/icon_util_unittest.cc',
'common/net/url_util_unittest.cc',
'../app/os_exchange_data_unittest.cc',
'test/browser_with_test_window_test.cc',
'test/browser_with_test_window_test.h',
'views/controls/label_unittest.cc',
'views/controls/table/table_view_unittest.cc',
'views/focus/focus_manager_unittest.cc',
'views/grid_layout_unittest.cc',
'views/view_unittest.cc',
],
}],
],
},
{
'target_name': 'startup_tests',
'type': 'executable',
'dependencies': [
'app',
'browser',
'common',
'chrome_resources',
'chrome_strings',
'test_support_ui',
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'sources': [
'test/startup/feature_startup_test.cc',
'test/startup/startup_test.cc',
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
'../build/linux/system.gyp:gtk',
],
}],
['OS!="win"', {
'sources!': [
'test/startup/feature_startup_test.cc',
],
}],
],
},
{
'target_name': 'page_cycler_tests',
'type': 'executable',
'dependencies': [
'chrome_resources',
'chrome_strings',
'test_support_ui',
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'sources': [
'test/page_cycler/page_cycler_test.cc',
'test/perf/mem_usage.cc',
'test/perf/mem_usage.h',
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS!="mac"', {
'dependencies': [
'views',
],
}],
['OS!="win"', {
'sources!': [
'test/perf/mem_usage.cc',
],
}],
],
},
],
'conditions': [
['OS=="linux"', {
'targets': [
{
'target_name': 'convert_dict',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'third_party/hunspell/hunspell.gyp:hunspell',
],
'sources': [
'tools/convert_dict/aff_reader.cc',
'tools/convert_dict/aff_reader.h',
'tools/convert_dict/convert_dict.cc',
'tools/convert_dict/dic_reader.cc',
'tools/convert_dict/dic_reader.h',
'tools/convert_dict/hunspell_reader.cc',
'tools/convert_dict/hunspell_reader.h',
],
},
{
'target_name': 'flush_cache',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
],
'sources': [
'tools/perf/flush_cache/flush_cache.cc',
],
},
],
}],
['OS=="mac"',
# On Mac only, add a project target called "package_app" that only
# runs a shell script (package_chrome.sh).
{ 'targets': [
{
'target_name': 'package_app',
# do NOT place this in the 'all' list; most won't want it.
# In gyp, booleans are 0/1 not True/False.
'suppress_wildcard': 1,
'type': 'none',
'dependencies': [
'app',
],
'actions': [
{
'inputs': [],
'outputs': [],
'action_name': 'package_chrome',
'action': ['tools/mac/package_chrome.sh' ],
},
], # 'actions'
},
]
}, { # else: OS != "mac"
'targets': [
{
'target_name': 'perf_tests',
'type': 'executable',
'dependencies': [
'browser',
'common',
'renderer',
'chrome_resources',
'chrome_strings',
'../base/base.gyp:base',
'../base/base.gyp:test_support_base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../webkit/webkit.gyp:glue',
],
'sources': [
'browser/visitedlink_perftest.cc',
'test/perf/perftests.cc',
'test/perf/url_parse_perftest.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'sources!': [
# TODO(port):
'browser/visitedlink_perftest.cc',
],
}],
['OS!="mac"', {
'dependencies': [
'views',
],
}],
],
},
],
}], # OS!="mac"
['OS=="win" or OS=="linux"',
{ 'targets': [
{
'target_name': 'views',
'type': '<(library)',
'dependencies': [
'common',
'chrome_resources',
'chrome_strings',
'theme_resources',
'../media/media.gyp:media',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
'third_party/hunspell/hunspell.gyp:hunspell',
'../webkit/webkit.gyp:glue',
],
'sources': [
# All .cc, .h under views, except unittests
'views/accelerator.cc',
'views/accelerator.h',
'views/accessibility/view_accessibility.cc',
'views/accessibility/view_accessibility.h',
'views/accessibility/view_accessibility_wrapper.cc',
'views/accessibility/view_accessibility_wrapper.h',
'views/background.cc',
'views/background.h',
'views/border.cc',
'views/border.h',
'views/controls/button/button.cc',
'views/controls/button/button.h',
'views/controls/button/button_dropdown.cc',
'views/controls/button/button_dropdown.h',
'views/controls/button/checkbox.cc',
'views/controls/button/checkbox.h',
'views/controls/button/custom_button.cc',
'views/controls/button/custom_button.h',
'views/controls/button/image_button.cc',
'views/controls/button/image_button.h',
'views/controls/button/menu_button.cc',
'views/controls/button/menu_button.h',
'views/controls/button/native_button.cc',
'views/controls/button/native_button.h',
'views/controls/button/native_button_win.cc',
'views/controls/button/native_button_win.h',
'views/controls/button/native_button_wrapper.h',
'views/controls/button/radio_button.cc',
'views/controls/button/radio_button.h',
'views/controls/button/text_button.cc',
'views/controls/button/text_button.h',
'views/controls/combo_box.cc',
'views/controls/combo_box.h',
'views/controls/hwnd_view.cc',
'views/controls/hwnd_view.h',
'views/controls/image_view.cc',
'views/controls/image_view.h',
'views/controls/label.cc',
'views/controls/label.h',
'views/controls/link.cc',
'views/controls/link.h',
'views/controls/menu/chrome_menu.cc',
'views/controls/menu/chrome_menu.h',
'views/controls/menu/controller.h',
'views/controls/menu/menu.cc',
'views/controls/menu/menu.h',
'views/controls/menu/view_menu_delegate.h',
'views/controls/message_box_view.cc',
'views/controls/message_box_view.h',
'views/controls/native_control.cc',
'views/controls/native_control.h',
'views/controls/native_control_win.cc',
'views/controls/native_control_win.h',
'views/controls/native_view_host.cc',
'views/controls/native_view_host.h',
'views/controls/scroll_view.cc',
'views/controls/scroll_view.h',
'views/controls/scrollbar/bitmap_scroll_bar.cc',
'views/controls/scrollbar/bitmap_scroll_bar.h',
'views/controls/scrollbar/native_scroll_bar.cc',
'views/controls/scrollbar/native_scroll_bar.h',
'views/controls/scrollbar/scroll_bar.cc',
'views/controls/scrollbar/scroll_bar.h',
'views/controls/separator.cc',
'views/controls/separator.h',
'views/controls/single_split_view.cc',
'views/controls/single_split_view.h',
'views/controls/tabbed_pane.cc',
'views/controls/tabbed_pane.h',
'views/controls/table/group_table_view.cc',
'views/controls/table/group_table_view.h',
'views/controls/table/table_view.cc',
'views/controls/table/table_view.h',
'views/controls/text_field.cc',
'views/controls/text_field.h',
'views/controls/throbber.cc',
'views/controls/throbber.h',
'views/controls/tree/tree_model.h',
'views/controls/tree/tree_node_iterator.h',
'views/controls/tree/tree_node_model.h',
'views/controls/tree/tree_view.cc',
'views/controls/tree/tree_view.h',
'views/event.cc',
'views/event.h',
'views/event_gtk.cc',
'views/event_win.cc',
'views/fill_layout.cc',
'views/fill_layout.h',
'views/focus/external_focus_tracker.cc',
'views/focus/external_focus_tracker.h',
'views/focus/focus_manager.cc',
'views/focus/focus_manager.h',
'views/focus/focus_util_win.cc',
'views/focus/focus_util_win.h',
'views/focus/view_storage.cc',
'views/focus/view_storage.h',
'views/grid_layout.cc',
'views/grid_layout.h',
'views/layout_manager.cc',
'views/layout_manager.h',
'views/painter.cc',
'views/painter.h',
'views/repeat_controller.cc',
'views/repeat_controller.h',
'views/view.cc',
'views/view.h',
'views/view_constants.cc',
'views/view_constants.h',
'views/view_gtk.cc',
'views/view_win.cc',
'views/widget/accelerator_handler.cc',
'views/widget/accelerator_handler.h',
'views/widget/aero_tooltip_manager.cc',
'views/widget/aero_tooltip_manager.h',
'views/widget/hwnd_notification_source.h',
'views/widget/root_view.cc',
'views/widget/root_view.h',
'views/widget/root_view_drop_target.cc',
'views/widget/root_view_drop_target.h',
'views/widget/root_view_gtk.cc',
'views/widget/root_view_win.cc',
'views/widget/tooltip_manager.cc',
'views/widget/tooltip_manager.h',
'views/widget/widget.h',
'views/widget/widget_gtk.cc',
'views/widget/widget_gtk.h',
'views/widget/widget_win.cc',
'views/widget/widget_win.h',
'views/window/client_view.cc',
'views/window/client_view.h',
'views/window/custom_frame_view.cc',
'views/window/custom_frame_view.h',
'views/window/dialog_client_view.cc',
'views/window/dialog_client_view.h',
'views/window/dialog_delegate.cc',
'views/window/dialog_delegate.h',
'views/window/native_frame_view.cc',
'views/window/native_frame_view.h',
'views/window/non_client_view.cc',
'views/window/non_client_view.h',
'views/window/window.h',
'views/window/window_delegate.cc',
'views/window/window_delegate.h',
'views/window/window_resources.h',
'views/window/window_win.cc',
'views/window/window_win.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'sources!': [
'views/accelerator.cc',
'views/accessibility/accessible_wrapper.cc',
'views/accessibility/view_accessibility.cc',
'views/controls/scrollbar/bitmap_scroll_bar.cc',
'views/controls/button/button_dropdown.cc',
'views/controls/button/checkbox.cc',
'views/controls/button/image_button.cc',
'views/controls/button/menu_button.cc',
'views/controls/combo_box.cc',
'views/controls/hwnd_view.cc',
'views/controls/link.cc',
'views/controls/menu/chrome_menu.cc',
'views/controls/menu/menu.cc',
'views/controls/message_box_view.cc',
'views/controls/scroll_view.cc',
'views/controls/table/group_table_view.cc',
'views/focus/external_focus_tracker.cc',
'views/focus/focus_manager.cc',
'views/controls/button/native_button.cc',
'views/controls/native_control.cc',
'views/controls/scrollbar/native_scroll_bar.cc',
'views/controls/button/radio_button.cc',
'views/resize_corner.cc',
'views/controls/separator.cc',
'views/controls/single_split_view.cc',
'views/controls/tabbed_pane.cc',
'views/controls/table/table_view.cc',
'views/controls/text_field.cc',
'views/controls/tree/tree_view.cc',
'views/event_win.cc',
'views/widget/accelerator_handler.cc',
'views/widget/aero_tooltip_manager.cc',
'views/widget/root_view_drop_target.cc',
'views/widget/tooltip_manager.cc',
'views/window/client_view.cc',
'views/window/custom_frame_view.cc',
'views/window/dialog_delegate.cc',
'views/window/dialog_client_view.cc',
'views/window/native_frame_view.cc',
'views/window/non_client_view.cc',
'views/window/window_delegate.cc',
],
}],
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
},],
['OS=="linux"', {
'sources!': [
'views/accelerator.cc',
'views/accessibility/accessible_wrapper.cc',
'views/accessibility/view_accessibility.cc',
'views/accessibility/view_accessibility_wrapper.cc',
'views/controls/scrollbar/bitmap_scroll_bar.cc',
'views/controls/button/image_button.cc',
'views/controls/button/button_dropdown.cc',
'views/controls/button/checkbox.cc',
'views/controls/menu/chrome_menu.cc',
'views/controls/combo_box.cc',
'views/focus/focus_manager.cc',
'views/controls/table/group_table_view.cc',
'views/controls/hwnd_view.cc',
'views/controls/link.cc',
'views/controls/menu/menu.cc',
'views/controls/button/menu_button.cc',
'views/controls/message_box_view.cc',
'views/controls/button/native_button.cc',
'views/controls/native_control.cc',
'views/controls/scrollbar/native_scroll_bar.cc',
'views/controls/button/radio_button.cc',
'views/resize_corner.cc',
'views/controls/separator.cc',
'views/controls/single_split_view.cc',
'views/controls/tabbed_pane.cc',
'views/controls/table/table_view.cc',
'views/controls/text_field.cc',
'views/controls/tree/tree_view.cc',
'views/widget/accelerator_handler.cc',
'views/widget/aero_tooltip_manager.cc',
'views/widget/root_view_drop_target.cc',
'views/widget/tooltip_manager.cc',
'views/widget/widget_win.cc',
'views/window/client_view.cc',
'views/window/custom_frame_view.cc',
'views/window/dialog_delegate.cc',
'views/window/dialog_client_view.cc',
'views/window/native_frame_view.cc',
'views/window/non_client_view.cc',
'views/window/window_delegate.cc',
'views/window/window_win.cc',
],
}],
],
},
],
}], # OS=="win" or OS=="linux"
['OS=="win"',
{ 'targets': [
{
'target_name': 'interactive_ui_tests',
'type': 'executable',
'dependencies': [
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'third_party/hunspell/hunspell.gyp:hunspell',
'views',
'../skia/skia.gyp:skia',
'../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/libpng/libpng.gyp:libpng',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/zlib/zlib.gyp:zlib',
'../testing/gtest.gyp:gtest',
'../third_party/npapi/npapi.gyp:npapi',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'browser/browser_focus_uitest.cc',
'browser/views/bookmark_bar_view_test.cc',
'browser/views/constrained_window_impl_interactive_uitest.cc',
'browser/views/find_bar_win_interactive_uitest.cc',
'browser/views/tabs/tab_dragging_test.cc',
'test/interactive_ui/npapi_interactive_test.cc',
'test/interactive_ui/view_event_test_base.cc',
'test/interactive_ui/view_event_test_base.h',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
},
{
'target_name': 'plugin_tests',
'type': 'executable',
'dependencies': [
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/libxslt/libxslt.gyp:libxslt',
'../third_party/npapi/npapi.gyp:npapi',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'test/plugin/plugin_test.cpp',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
},
{
'target_name': 'selenium_tests',
'type': 'executable',
'dependencies': [
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'test/selenium/selenium_test.cc',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
},
{
'target_name': 'worker',
'type': '<(library)',
'dependencies': [
'../base/base.gyp:base',
'../webkit/webkit.gyp:webkit',
],
'sources': [
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
'worker/webworkerclient_proxy.cc',
'worker/webworkerclient_proxy.h',
'worker/worker_main.cc',
'worker/worker_thread.cc',
'worker/worker_thread.h',
'worker/worker_webkitclient_impl.cc',
'worker/worker_webkitclient_impl.h',
],
'include_dirs': [
'..',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled.h',
'msvs_precompiled_source': 'tools/build/win/precompiled.cc',
},
},
},
]}, # 'targets'
], # OS=="win"
# TODO(jrg): add in Windows code coverage targets.
['coverage!=0 and OS!="win"',
{ 'targets': [
{
'target_name': 'coverage',
# do NOT place this in the 'all' list; most won't want it.
# In gyp, booleans are 0/1 not True/False.
'suppress_wildcard': 1,
'type': 'none',
'dependencies': [
'../base/base.gyp:base_unittests',
'../media/media.gyp:media_unittests',
'../net/net.gyp:net_unittests',
'../printing/printing.gyp:printing_unittests',
],
'actions': [
{
# 'message' for Linux/scons in particular
'message': 'Running coverage_posix.py to generate coverage numbers',
'inputs': [],
'outputs': [],
'action_name': 'coverage',
'action': [ 'python',
'../tools/code_coverage/coverage_posix.py',
'--directory',
'<(PRODUCT_DIR)',
'--',
'<@(_dependencies)'],
# Use outputs of this action as inputs for the main target build.
# Seems as a misnomer but makes this happy on Linux (scons).
'process_outputs_as_sources': 1,
},
], # 'actions'
},
]
}],
], # 'conditions'
}
Fix forge build by removing obsolete line from gyp.
git-svn-id: http://src.chromium.org/svn/trunk/src@15569 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
Former-commit-id: 263f0edb8929bbed21366609cb574f67e8c55307
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'includes': [
'../build/common.gypi',
],
'target_defaults': {
'sources/': [
['exclude', '/(cocoa|gtk|win)/'],
['exclude', '_(cocoa|gtk|linux|mac|posix|skia|win|x)\\.(cc|mm?)$'],
['exclude', '/(gtk|win|x11)_[^/]*\\.cc$'],
],
'conditions': [
['OS=="linux"', {'sources/': [
['include', '/gtk/'],
['include', '_(gtk|linux|posix|skia|x)\\.cc$'],
['include', '/(gtk|x11)_[^/]*\\.cc$'],
]}],
['OS=="mac"', {'sources/': [
['include', '/cocoa/'],
['include', '_(cocoa|mac|posix)\\.(cc|mm?)$'],
]}, { # else: OS != "mac"
'sources/': [
['exclude', '\\.mm?$'],
],
}],
['OS=="win"', {'sources/': [
['include', '_(win)\\.cc$'],
['include', '/win/'],
['include', '/win_[^/]*\\.cc$'],
]}],
],
},
'targets': [
{
# TODO(mark): It would be better if each static library that needed
# to run grit would list its own .grd files, but unfortunately some
# of the static libraries currently have circular dependencies among
# generated headers.
'target_name': 'chrome_resources',
'type': 'none',
'rules': [
{
'rule_name': 'grit',
'extension': 'grd',
'inputs': [
'../tools/grit/grit.py',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/grit/<(RULE_INPUT_ROOT).h',
'<(SHARED_INTERMEDIATE_DIR)/chrome/<(RULE_INPUT_ROOT).pak',
],
'action': ['python', '<@(_inputs)', '-i', '<(RULE_INPUT_PATH)', 'build', '-o', '<(SHARED_INTERMEDIATE_DIR)/chrome'],
'message': 'Generating resources from <(RULE_INPUT_PATH)',
},
],
'sources': [
# Data resources.
'browser/debugger/resources/debugger_resources.grd',
'browser/browser_resources.grd',
'common/common_resources.grd',
'renderer/renderer_resources.grd',
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome',
],
},
},
{
# TODO(mark): It would be better if each static library that needed
# to run grit would list its own .grd files, but unfortunately some
# of the static libraries currently have circular dependencies among
# generated headers.
'target_name': 'chrome_strings',
'type': 'none',
'rules': [
{
'rule_name': 'grit',
'extension': 'grd',
'inputs': [
'../tools/grit/grit.py',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/grit/<(RULE_INPUT_ROOT).h',
'<(SHARED_INTERMEDIATE_DIR)/chrome/<(RULE_INPUT_ROOT)_en-US.pak',
],
'action': ['python', '<@(_inputs)', '-i', '<(RULE_INPUT_PATH)', 'build', '-o', '<(SHARED_INTERMEDIATE_DIR)/chrome'],
'message': 'Generating resources from <(RULE_INPUT_PATH)',
},
],
'sources': [
# Localizable resources.
'app/resources/locale_settings.grd',
'app/chromium_strings.grd',
'app/generated_resources.grd',
'app/google_chrome_strings.grd',
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome',
],
},
},
{
# TODO(beng): rename to 'app' when moves to top level.
'target_name': 'app_base',
'type': '<(library)',
'msvs_guid': '4631946D-7D5F-44BD-A5A8-504C0A7033BE',
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:base_gfx',
'../net/net.gyp:net',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
],
'include_dirs': [
'..',
],
'sources': [
# All .cc, .h, and .mm files under app/ except for tests.
'../app/animation.cc',
'../app/animation.h',
'../app/drag_drop_types.cc',
'../app/drag_drop_types.h',
'../app/gfx/chrome_canvas.cc',
'../app/gfx/chrome_canvas.h',
'../app/gfx/chrome_canvas_linux.cc',
'../app/gfx/chrome_canvas_win.cc',
'../app/gfx/chrome_font.h',
'../app/gfx/chrome_font_gtk.cc',
'../app/gfx/chrome_font_mac.mm',
'../app/gfx/chrome_font_skia.cc',
'../app/gfx/chrome_font_win.cc',
'../app/gfx/favicon_size.h',
'../app/gfx/icon_util.cc',
'../app/gfx/icon_util.h',
'../app/gfx/insets.h',
'../app/gfx/path_gtk.cc',
'../app/gfx/path_win.cc',
'../app/gfx/path.h',
'../app/l10n_util.cc',
'../app/l10n_util.h',
'../app/l10n_util_posix.cc',
'../app/l10n_util_win.cc',
'../app/l10n_util_win.h',
'../app/message_box_flags.h',
'../app/os_exchange_data.cc',
'../app/os_exchange_data.h',
'../app/resource_bundle.cc',
'../app/resource_bundle.h',
'../app/resource_bundle_win.cc',
'../app/resource_bundle_linux.cc',
'../app/resource_bundle_mac.mm',
'../app/slide_animation.cc',
'../app/slide_animation.h',
'../app/throb_animation.cc',
'../app/throb_animation.h',
],
'direct_dependent_settings': {
'include_dirs': [
'..',
],
},
'conditions': [
['OS=="linux"', {
'dependencies': [
# chrome_font_gtk.cc uses fontconfig.
# TODO(evanm): I think this is wrong; it should just use GTK.
'../build/linux/system.gyp:fontconfig',
'../build/linux/system.gyp:gtk',
],
}],
['OS!="win"', {
'sources!': [
'../app/drag_drop_types.cc',
'../app/drag_drop_types.h',
'../app/gfx/icon_util.cc',
'../app/gfx/icon_util.h',
'../app/os_exchange_data.cc',
'../app/os_exchange_data.h',
],
}],
],
},
{
# theme_resources also generates a .cc file, so it can't use the rules above.
'target_name': 'theme_resources',
'type': 'none',
'variables': {
'grit_path': '../tools/grit/grit.py',
'grit_out_dir': '<(SHARED_INTERMEDIATE_DIR)/chrome',
},
'actions': [
{
'action_name': 'theme_resources',
'variables': {
'input_path': 'app/theme/theme_resources.grd',
},
'inputs': [
'<(input_path)',
],
'outputs': [
'<(grit_out_dir)/grit/theme_resources.h',
'<(grit_out_dir)/grit/theme_resources_map.cc',
'<(grit_out_dir)/grit/theme_resources_map.h',
'<(grit_out_dir)/theme_resources.pak',
'<(grit_out_dir)/theme_resources.rc',
],
'action': ['python', '<(grit_path)', '-i', '<(input_path)', 'build', '-o', '<(grit_out_dir)'],
'message': 'Generating resources from <(input_path)',
},
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome',
],
},
},
{
'target_name': 'common',
'type': '<(library)',
'dependencies': [
'app_base',
'chrome_resources',
'chrome_strings',
'../base/base.gyp:base',
'../base/base.gyp:base_gfx',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../net/net.gyp:net',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/zlib/zlib.gyp:zlib',
'../third_party/npapi/npapi.gyp:npapi',
'../webkit/webkit.gyp:glue',
],
'include_dirs': [
'..',
],
'sources': [
# All .cc, .h, and .mm files under chrome/common except for tests.
'common/extensions/url_pattern.cc',
'common/extensions/url_pattern.h',
'common/extensions/user_script.cc',
'common/extensions/user_script.h',
'common/gfx/color_utils.cc',
'common/gfx/color_utils.h',
'common/gfx/emf.cc',
'common/gfx/emf.h',
'common/gfx/text_elider.cc',
'common/gfx/text_elider.h',
'common/gfx/utils.h',
'common/gtk_util.cc',
'common/gtk_util.h',
'common/net/cookie_monster_sqlite.cc',
'common/net/cookie_monster_sqlite.h',
'common/net/dns.h',
'common/net/url_request_intercept_job.cc',
'common/net/url_request_intercept_job.h',
'common/accessibility_types.h',
'common/app_cache/app_cache_context_impl.cc',
'common/app_cache/app_cache_context_impl.h',
'common/app_cache/app_cache_dispatcher.cc',
'common/app_cache/app_cache_dispatcher.h',
'common/app_cache/app_cache_dispatcher_host.cc',
'common/app_cache/app_cache_dispatcher_host.h',
'common/bindings_policy.h',
'common/child_process.cc',
'common/child_process.h',
'common/child_process_host.cc',
'common/child_process_host.h',
'common/child_process_info.cc',
'common/child_process_info.h',
'common/child_thread.cc',
'common/child_thread.h',
'common/chrome_constants.cc',
'common/chrome_constants.h',
'common/chrome_counters.cc',
'common/chrome_counters.h',
'common/chrome_paths.cc',
'common/chrome_paths.h',
'common/chrome_paths_internal.h',
'common/chrome_paths_linux.cc',
'common/chrome_paths_mac.mm',
'common/chrome_paths_win.cc',
'common/chrome_plugin_api.h',
'common/chrome_plugin_lib.cc',
'common/chrome_plugin_lib.h',
'common/chrome_plugin_util.cc',
'common/chrome_plugin_util.h',
'common/chrome_switches.cc',
'common/chrome_switches.h',
'common/classfactory.cc',
'common/classfactory.h',
'common/common_glue.cc',
'common/debug_flags.cc',
'common/debug_flags.h',
'common/devtools_messages.h',
'common/devtools_messages_internal.h',
'common/env_vars.cc',
'common/env_vars.h',
'common/file_descriptor_set_posix.cc',
'common/file_descriptor_set_posix.h',
'common/filter_policy.h',
'common/gears_api.h',
'common/important_file_writer.cc',
'common/important_file_writer.h',
'common/ipc_channel.h',
'common/ipc_channel_posix.cc',
'common/ipc_channel_posix.h',
'common/ipc_channel_proxy.cc',
'common/ipc_channel_proxy.h',
'common/ipc_channel_win.cc',
'common/ipc_channel_win.h',
'common/ipc_logging.cc',
'common/ipc_logging.h',
'common/ipc_message.cc',
'common/ipc_message.h',
'common/ipc_message_macros.h',
'common/ipc_message_utils.cc',
'common/ipc_message_utils.h',
'common/ipc_sync_channel.cc',
'common/ipc_sync_channel.h',
'common/ipc_sync_message.cc',
'common/ipc_sync_message.h',
'common/json_value_serializer.cc',
'common/json_value_serializer.h',
'common/jstemplate_builder.cc',
'common/jstemplate_builder.h',
'common/libxml_utils.cc',
'common/libxml_utils.h',
'common/logging_chrome.cc',
'common/logging_chrome.h',
'common/main_function_params.h',
'common/message_router.cc',
'common/message_router.h',
'common/modal_dialog_event.h',
'common/mru_cache.h',
'common/navigation_types.h',
'common/native_web_keyboard_event.h',
'common/native_web_keyboard_event_linux.cc',
'common/native_web_keyboard_event_mac.mm',
'common/native_web_keyboard_event_win.cc',
'common/notification_details.h',
'common/notification_observer.h',
'common/notification_registrar.cc',
'common/notification_registrar.h',
'common/notification_service.cc',
'common/notification_service.h',
'common/notification_source.h',
'common/notification_type.h',
'common/owned_widget_gtk.cc',
'common/owned_widget_gtk.h',
'common/page_action.h',
'common/page_action.cc',
'common/page_transition_types.h',
'common/page_zoom.h',
'common/platform_util.h',
'common/platform_util_linux.cc',
'common/platform_util_mac.mm',
'common/platform_util_win.cc',
'common/plugin_messages.h',
'common/plugin_messages_internal.h',
'common/pref_member.cc',
'common/pref_member.h',
'common/pref_names.cc',
'common/pref_names.h',
'common/pref_service.cc',
'common/pref_service.h',
'common/process_watcher_posix.cc',
'common/process_watcher_win.cc',
'common/process_watcher.h',
'common/property_bag.cc',
'common/property_bag.h',
'common/quarantine_mac.h',
'common/quarantine_mac.mm',
'common/ref_counted_util.h',
'common/render_messages.h',
'common/render_messages_internal.h',
'common/resource_dispatcher.cc',
'common/resource_dispatcher.h',
'common/result_codes.h',
'common/sandbox_init_wrapper.cc',
'common/sandbox_init_wrapper.h',
'common/security_filter_peer.cc',
'common/security_filter_peer.h',
'common/sqlite_compiled_statement.cc',
'common/sqlite_compiled_statement.h',
'common/sqlite_utils.cc',
'common/sqlite_utils.h',
'common/task_queue.cc',
'common/task_queue.h',
'common/temp_scaffolding_stubs.cc',
'common/temp_scaffolding_stubs.h',
'common/thumbnail_score.cc',
'common/thumbnail_score.h',
'common/time_format.cc',
'common/time_format.h',
'common/transport_dib.h',
'common/transport_dib_linux.cc',
'common/transport_dib_mac.cc',
'common/transport_dib_win.cc',
'common/unzip.cc', # Requires zlib directly.
'common/unzip.h',
'common/url_constants.cc',
'common/url_constants.h',
'common/visitedlink_common.cc',
'common/visitedlink_common.h',
'common/webkit_param_traits.h',
'common/win_safe_util.cc',
'common/win_safe_util.h',
'common/win_util.cc',
'common/win_util.h',
'common/worker_thread_ticker.cc',
'common/worker_thread_ticker.h',
'common/x11_util.cc',
'common/x11_util.h',
'common/x11_util_internal.h',
'third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
],
'direct_dependent_settings': {
'include_dirs': [
'..',
],
},
'export_dependent_settings': [
'app_base',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'link_settings': {
'libraries': [
'-lX11',
'-lXrender',
'-lXext',
],
},
}, { # else: 'OS!="linux"'
'sources!': [
'third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
'sources!': [
'common/temp_scaffolding_stubs.cc',
'common/temp_scaffolding_stubs.h',
],
}, { # else: OS != "win"
'sources!': [
'common/gfx/emf.cc',
'common/classfactory.cc',
],
}],
],
},
{
'target_name': 'browser',
'type': '<(library)',
'dependencies': [
'common',
'chrome_resources',
'chrome_strings',
'theme_resources',
'../media/media.gyp:media',
'../net/net.gyp:net_resources',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
'third_party/hunspell/hunspell.gyp:hunspell',
'../webkit/webkit.gyp:glue',
],
'include_dirs': [
'..',
'<(INTERMEDIATE_DIR)',
],
'sources': [
# All .cc, .h, .m, and .mm files under browser except for tests and
# mocks.
'browser/alternate_nav_url_fetcher.cc',
'browser/alternate_nav_url_fetcher.h',
'browser/app_controller_mac.h',
'browser/app_controller_mac.mm',
'browser/app_modal_dialog.cc',
'browser/app_modal_dialog.h',
'browser/app_modal_dialog_gtk.cc',
'browser/app_modal_dialog_mac.mm',
'browser/app_modal_dialog_win.cc',
'browser/app_modal_dialog_queue.cc',
'browser/app_modal_dialog_queue.h',
'browser/autocomplete/autocomplete.cc',
'browser/autocomplete/autocomplete.h',
'browser/autocomplete/autocomplete_accessibility.cc',
'browser/autocomplete/autocomplete_accessibility.h',
'browser/autocomplete/autocomplete_edit.cc',
'browser/autocomplete/autocomplete_edit.h',
'browser/autocomplete/autocomplete_edit_view.h',
'browser/autocomplete/autocomplete_edit_view_gtk.cc',
'browser/autocomplete/autocomplete_edit_view_gtk.h',
'browser/autocomplete/autocomplete_edit_view_mac.h',
'browser/autocomplete/autocomplete_edit_view_mac.mm',
'browser/autocomplete/autocomplete_edit_view_win.cc',
'browser/autocomplete/autocomplete_edit_view_win.h',
'browser/autocomplete/autocomplete_popup_model.cc',
'browser/autocomplete/autocomplete_popup_model.h',
'browser/autocomplete/autocomplete_popup_view.h',
'browser/autocomplete/autocomplete_popup_view_gtk.cc',
'browser/autocomplete/autocomplete_popup_view_gtk.h',
'browser/autocomplete/autocomplete_popup_view_mac.h',
'browser/autocomplete/autocomplete_popup_view_mac.mm',
'browser/autocomplete/autocomplete_popup_view_win.cc',
'browser/autocomplete/autocomplete_popup_view_win.h',
'browser/autocomplete/history_contents_provider.cc',
'browser/autocomplete/history_contents_provider.h',
'browser/autocomplete/history_url_provider.cc',
'browser/autocomplete/history_url_provider.h',
'browser/autocomplete/keyword_provider.cc',
'browser/autocomplete/keyword_provider.h',
'browser/autocomplete/search_provider.cc',
'browser/autocomplete/search_provider.h',
'browser/autofill_manager.cc',
'browser/autofill_manager.h',
'browser/automation/automation_autocomplete_edit_tracker.h',
'browser/automation/automation_browser_tracker.h',
'browser/automation/automation_constrained_window_tracker.h',
'browser/automation/automation_provider.cc',
'browser/automation/automation_provider.h',
'browser/automation/automation_provider_list.cc',
'browser/automation/automation_provider_list_generic.cc',
'browser/automation/automation_provider_list_mac.mm',
'browser/automation/automation_provider_list.h',
'browser/automation/automation_resource_tracker.cc',
'browser/automation/automation_resource_tracker.h',
'browser/automation/automation_tab_tracker.h',
'browser/automation/automation_window_tracker.h',
'browser/automation/ui_controls.cc',
'browser/automation/ui_controls.h',
'browser/automation/url_request_failed_dns_job.cc',
'browser/automation/url_request_failed_dns_job.h',
# TODO: These should be moved to test_support (see below), but
# are currently used by production code in automation_provider.cc.
'browser/automation/url_request_mock_http_job.cc',
'browser/automation/url_request_mock_http_job.h',
'browser/automation/url_request_slow_download_job.cc',
'browser/automation/url_request_slow_download_job.h',
'browser/back_forward_menu_model.cc',
'browser/back_forward_menu_model.h',
'browser/back_forward_menu_model_win.cc',
'browser/back_forward_menu_model_win.h',
'browser/bookmarks/bookmark_codec.cc',
'browser/bookmarks/bookmark_codec.h',
'browser/bookmarks/bookmark_context_menu_gtk.cc',
'browser/bookmarks/bookmark_context_menu_win.cc',
'browser/bookmarks/bookmark_context_menu.cc',
'browser/bookmarks/bookmark_context_menu.h',
'browser/bookmarks/bookmark_drag_data.cc',
'browser/bookmarks/bookmark_drag_data.h',
'browser/bookmarks/bookmark_drop_info.cc',
'browser/bookmarks/bookmark_drop_info.h',
'browser/bookmarks/bookmark_editor.h',
'browser/bookmarks/bookmark_folder_tree_model.cc',
'browser/bookmarks/bookmark_folder_tree_model.h',
'browser/bookmarks/bookmark_html_writer.cc',
'browser/bookmarks/bookmark_html_writer.h',
'browser/bookmarks/bookmark_menu_controller_gtk.cc',
'browser/bookmarks/bookmark_menu_controller_gtk.h',
'browser/bookmarks/bookmark_menu_controller_win.cc',
'browser/bookmarks/bookmark_menu_controller_win.h',
'browser/bookmarks/bookmark_model.cc',
'browser/bookmarks/bookmark_model.h',
'browser/bookmarks/bookmark_service.h',
'browser/bookmarks/bookmark_storage.cc',
'browser/bookmarks/bookmark_storage.h',
'browser/bookmarks/bookmark_table_model.cc',
'browser/bookmarks/bookmark_table_model.h',
'browser/bookmarks/bookmark_utils.cc',
'browser/bookmarks/bookmark_utils.h',
'browser/browser.cc',
'browser/browser.h',
'browser/browser_about_handler.cc',
'browser/browser_about_handler.h',
'browser/browser_accessibility.cc',
'browser/browser_accessibility.h',
'browser/browser_accessibility_manager.cc',
'browser/browser_accessibility_manager.h',
'browser/browser_init.cc',
'browser/browser_init.h',
'browser/browser_list.cc',
'browser/browser_list.h',
'browser/browser_main.cc',
'browser/browser_main_gtk.cc',
'browser/browser_main_mac.mm',
'browser/browser_main_win.cc',
'browser/browser_main_win.h',
'browser/browser_prefs.cc',
'browser/browser_prefs.h',
'browser/browser_process.cc',
'browser/browser_process.h',
'browser/browser_process_impl.cc',
'browser/browser_process_impl.h',
'browser/browser_shutdown.cc',
'browser/browser_shutdown.h',
'browser/browser_trial.cc',
'browser/browser_trial.h',
'browser/browser_url_handler.cc',
'browser/browser_url_handler.h',
'browser/browser_window.h',
'browser/browser_window_factory.mm',
'browser/browsing_data_remover.cc',
'browser/browsing_data_remover.h',
'browser/browsing_instance.cc',
'browser/browsing_instance.h',
'browser/cancelable_request.cc',
'browser/cancelable_request.h',
'browser/cert_store.cc',
'browser/cert_store.h',
'browser/character_encoding.cc',
'browser/character_encoding.h',
'browser/chrome_plugin_browsing_context.cc',
'browser/chrome_plugin_browsing_context.h',
'browser/chrome_plugin_host.cc',
'browser/chrome_plugin_host.h',
'browser/chrome_thread.cc',
'browser/chrome_thread.h',
'browser/cocoa/base_view.h',
'browser/cocoa/base_view.mm',
'browser/cocoa/bookmark_bar_controller.h',
'browser/cocoa/bookmark_bar_controller.mm',
'browser/cocoa/bookmark_menu_bridge.h',
'browser/cocoa/bookmark_menu_bridge.mm',
'browser/cocoa/bookmark_menu_cocoa_controller.h',
'browser/cocoa/bookmark_menu_cocoa_controller.mm',
'browser/cocoa/browser_test_helper.h',
'browser/cocoa/browser_window_cocoa.h',
'browser/cocoa/browser_window_cocoa.mm',
'browser/cocoa/browser_window_controller.h',
'browser/cocoa/browser_window_controller.mm',
'browser/cocoa/cocoa_test_helper.h',
'browser/cocoa/command_observer_bridge.h',
'browser/cocoa/command_observer_bridge.mm',
'browser/cocoa/find_bar_bridge.h',
'browser/cocoa/find_bar_bridge.mm',
'browser/cocoa/find_bar_cocoa_controller.h',
'browser/cocoa/find_bar_cocoa_controller.mm',
'browser/cocoa/find_bar_view.h',
'browser/cocoa/find_bar_view.mm',
'browser/cocoa/grow_box_view.h',
'browser/cocoa/grow_box_view.m',
'browser/cocoa/location_bar_view_mac.h',
'browser/cocoa/location_bar_view_mac.mm',
'browser/cocoa/preferences_window_controller.h',
'browser/cocoa/preferences_window_controller.mm',
'browser/cocoa/sad_tab_view.h',
'browser/cocoa/sad_tab_view.mm',
'browser/cocoa/shell_dialogs_mac.mm',
'browser/cocoa/status_bubble_mac.h',
'browser/cocoa/status_bubble_mac.mm',
'browser/cocoa/tab_cell.h',
'browser/cocoa/tab_cell.mm',
'browser/cocoa/tab_contents_controller.h',
'browser/cocoa/tab_contents_controller.mm',
'browser/cocoa/tab_controller.h',
'browser/cocoa/tab_controller.mm',
'browser/cocoa/tab_strip_controller.h',
'browser/cocoa/tab_strip_controller.mm',
'browser/cocoa/tab_strip_model_observer_bridge.h',
'browser/cocoa/tab_strip_model_observer_bridge.mm',
'browser/cocoa/tab_strip_view.h',
'browser/cocoa/tab_strip_view.mm',
'browser/cocoa/tab_view.h',
'browser/cocoa/tab_view.mm',
'browser/cocoa/tab_window_controller.h',
'browser/cocoa/tab_window_controller.mm',
'browser/cocoa/toolbar_button_cell.h',
'browser/cocoa/toolbar_button_cell.mm',
'browser/cocoa/toolbar_controller.h',
'browser/cocoa/toolbar_controller.mm',
'browser/cocoa/toolbar_view.h',
'browser/cocoa/toolbar_view.mm',
'browser/command_updater.cc',
'browser/command_updater.h',
'browser/cross_site_request_manager.cc',
'browser/cross_site_request_manager.h',
'browser/debugger/debugger_host.h',
'browser/debugger/debugger_host_impl.cpp',
'browser/debugger/debugger_host_impl.h',
'browser/debugger/debugger_io.h',
'browser/debugger/debugger_io_socket.cc',
'browser/debugger/debugger_io_socket.h',
'browser/debugger/debugger_node.cc',
'browser/debugger/debugger_node.h',
'browser/debugger/debugger_remote_service.cc',
'browser/debugger/debugger_remote_service.h',
'browser/debugger/debugger_shell.cc',
'browser/debugger/debugger_shell.h',
'browser/debugger/debugger_shell_stubs.cc',
'browser/debugger/debugger_view.cc',
'browser/debugger/debugger_view.h',
'browser/debugger/debugger_window.cc',
'browser/debugger/debugger_window.h',
'browser/debugger/debugger_wrapper.cc',
'browser/debugger/debugger_wrapper.h',
'browser/debugger/devtools_client_host.h',
'browser/debugger/devtools_manager.cc',
'browser/debugger/devtools_manager.h',
'browser/debugger/devtools_protocol_handler.cc',
'browser/debugger/devtools_protocol_handler.h',
'browser/debugger/devtools_remote.h',
'browser/debugger/devtools_remote_listen_socket.cc',
'browser/debugger/devtools_remote_listen_socket.h',
'browser/debugger/devtools_remote_message.cc',
'browser/debugger/devtools_remote_message.h',
'browser/debugger/devtools_remote_service.cc',
'browser/debugger/devtools_remote_service.h',
'browser/debugger/devtools_view.cc',
'browser/debugger/devtools_view.h',
'browser/debugger/devtools_window.h',
'browser/debugger/devtools_window_gtk.cc',
'browser/debugger/devtools_window_mac.cc',
'browser/debugger/devtools_window_win.cc',
'browser/debugger/inspectable_tab_proxy.cc',
'browser/debugger/inspectable_tab_proxy.h',
'browser/dock_info.cc',
'browser/dock_info.h',
'browser/dom_operation_notification_details.h',
'browser/dom_ui/chrome_url_data_manager.cc',
'browser/dom_ui/chrome_url_data_manager.h',
'browser/dom_ui/debugger_ui.cc',
'browser/dom_ui/debugger_ui.h',
'browser/dom_ui/devtools_ui.cc',
'browser/dom_ui/devtools_ui.h',
'browser/dom_ui/dom_ui.cc',
'browser/dom_ui/dom_ui.h',
'browser/dom_ui/dom_ui_factory.cc',
'browser/dom_ui/dom_ui_factory.h',
'browser/dom_ui/dom_ui_favicon_source.cc',
'browser/dom_ui/dom_ui_favicon_source.h',
'browser/dom_ui/dom_ui_thumbnail_source.cc',
'browser/dom_ui/dom_ui_thumbnail_source.h',
'browser/dom_ui/downloads_ui.cc',
'browser/dom_ui/downloads_ui.h',
'browser/dom_ui/fileicon_source.cc',
'browser/dom_ui/fileicon_source.h',
'browser/dom_ui/history_ui.cc',
'browser/dom_ui/history_ui.h',
'browser/dom_ui/html_dialog_ui.cc',
'browser/dom_ui/html_dialog_ui.h',
'browser/dom_ui/new_tab_ui.cc',
'browser/dom_ui/new_tab_ui.h',
'browser/download/download_exe.cc',
'browser/download/download_file.cc',
'browser/download/download_file.h',
'browser/download/download_item_model.cc',
'browser/download/download_item_model.h',
'browser/download/download_manager.cc',
'browser/download/download_manager.h',
'browser/download/download_request_dialog_delegate.h',
'browser/download/download_request_dialog_delegate_win.cc',
'browser/download/download_request_dialog_delegate_win.h',
'browser/download/download_request_manager.cc',
'browser/download/download_request_manager.h',
'browser/download/download_shelf.cc',
'browser/download/download_shelf.h',
'browser/download/download_util.cc',
'browser/download/download_util.h',
'browser/download/save_file.cc',
'browser/download/save_file.h',
'browser/download/save_file_manager.cc',
'browser/download/save_file_manager.h',
'browser/download/save_item.cc',
'browser/download/save_item.h',
'browser/download/save_package.cc',
'browser/download/save_package.h',
'browser/download/save_types.h',
'browser/drag_utils.cc',
'browser/drag_utils.h',
'browser/encoding_menu_controller_delegate.cc',
'browser/encoding_menu_controller_delegate.h',
'browser/extensions/extension.cc',
'browser/extensions/extension.h',
'browser/extensions/extension_bookmarks_module.cc',
'browser/extensions/extension_bookmarks_module.h',
'browser/extensions/extension_error_reporter.cc',
'browser/extensions/extension_error_reporter.h',
'browser/extensions/extension_function.cc',
'browser/extensions/extension_function.h',
'browser/extensions/extension_function_dispatcher.cc',
'browser/extensions/extension_function_dispatcher.h',
'browser/extensions/extension_host.cc',
'browser/extensions/extension_host.h',
'browser/extensions/extension_message_service.cc',
'browser/extensions/extension_message_service.h',
'browser/extensions/extension_browser_event_router.cc',
'browser/extensions/extension_browser_event_router.h',
'browser/extensions/extension_page_actions_module.h',
'browser/extensions/extension_page_actions_module.cc',
'browser/extensions/extension_process_manager.cc',
'browser/extensions/extension_process_manager.h',
'browser/extensions/extension_protocols.cc',
'browser/extensions/extension_protocols.h',
'browser/extensions/extension_tabs_module.cc',
'browser/extensions/extension_tabs_module.h',
'browser/extensions/extension_view.cc',
'browser/extensions/extension_view.h',
'browser/extensions/extensions_service.cc',
'browser/extensions/extensions_service.h',
'browser/extensions/extensions_ui.cc',
'browser/extensions/extensions_ui.h',
'browser/extensions/user_script_master.cc',
'browser/extensions/user_script_master.h',
'browser/external_protocol_handler.cc',
'browser/external_protocol_handler.h',
'browser/external_tab_container.cc',
'browser/external_tab_container.h',
'browser/fav_icon_helper.cc',
'browser/fav_icon_helper.h',
'browser/find_bar.h',
'browser/find_bar_controller.cc',
'browser/find_bar_controller.h',
'browser/find_notification_details.h',
'browser/first_run.cc',
'browser/first_run.h',
'browser/gears_integration.cc',
'browser/gears_integration.h',
'browser/google_update.cc',
'browser/google_update.h',
'browser/google_url_tracker.cc',
'browser/google_url_tracker.h',
'browser/google_util.cc',
'browser/google_util.h',
'browser/gtk/about_chrome_dialog.cc',
'browser/gtk/about_chrome_dialog.h',
'browser/gtk/back_forward_menu_model_gtk.cc',
'browser/gtk/back_forward_menu_model_gtk.h',
'browser/gtk/bookmark_bar_gtk.cc',
'browser/gtk/bookmark_bar_gtk.h',
'browser/gtk/bookmark_bubble_gtk.cc',
'browser/gtk/bookmark_bubble_gtk.h',
'browser/gtk/bookmark_editor_gtk.cc',
'browser/gtk/bookmark_editor_gtk.h',
'browser/gtk/bookmark_tree_model.cc',
'browser/gtk/bookmark_tree_model.h',
'browser/gtk/browser_toolbar_gtk.cc',
'browser/gtk/browser_toolbar_gtk.h',
'browser/gtk/browser_window_factory_gtk.cc',
'browser/gtk/browser_window_gtk.cc',
'browser/gtk/browser_window_gtk.h',
'browser/gtk/custom_button.cc',
'browser/gtk/custom_button.h',
'browser/gtk/dialogs_gtk.cc',
'browser/gtk/download_item_gtk.cc',
'browser/gtk/download_item_gtk.h',
'browser/gtk/download_shelf_gtk.cc',
'browser/gtk/download_shelf_gtk.h',
'browser/gtk/go_button_gtk.cc',
'browser/gtk/go_button_gtk.h',
'browser/gtk/gtk_chrome_button.cc',
'browser/gtk/gtk_chrome_button.h',
'browser/gtk/info_bubble_gtk.cc',
'browser/gtk/info_bubble_gtk.h',
'browser/gtk/infobar_container_gtk.cc',
'browser/gtk/infobar_container_gtk.h',
'browser/gtk/infobar_gtk.cc',
'browser/gtk/infobar_gtk.h',
'browser/gtk/find_bar_gtk.cc',
'browser/gtk/find_bar_gtk.h',
'browser/gtk/link_button_gtk.cc',
'browser/gtk/link_button_gtk.cc',
'browser/gtk/location_bar_view_gtk.cc',
'browser/gtk/location_bar_view_gtk.h',
'browser/gtk/menu_gtk.cc',
'browser/gtk/menu_gtk.h',
'browser/gtk/nine_box.cc',
'browser/gtk/nine_box.h',
'browser/gtk/sad_tab_gtk.cc',
'browser/gtk/sad_tab_gtk.h',
'browser/gtk/slide_animator_gtk.cc',
'browser/gtk/slide_animator_gtk.h',
'browser/gtk/standard_menus.cc',
'browser/gtk/standard_menus.h',
'browser/gtk/status_bubble_gtk.cc',
'browser/gtk/status_bubble_gtk.h',
'browser/gtk/tab_contents_container_gtk.cc',
'browser/gtk/tab_contents_container_gtk.h',
'browser/gtk/tabs/dragged_tab_controller_gtk.cc',
'browser/gtk/tabs/dragged_tab_controller_gtk.h',
'browser/gtk/tabs/tab_gtk.cc',
'browser/gtk/tabs/tab_gtk.h',
'browser/gtk/tabs/tab_renderer_gtk.cc',
'browser/gtk/tabs/tab_renderer_gtk.h',
'browser/gtk/tabs/tab_strip_gtk.cc',
'browser/gtk/tabs/tab_strip_gtk.h',
'browser/gtk/toolbar_star_toggle_gtk.cc',
'browser/gtk/toolbar_star_toggle_gtk.h',
'browser/hang_monitor/hung_plugin_action.cc',
'browser/hang_monitor/hung_plugin_action.h',
'browser/hang_monitor/hung_window_detector.cc',
'browser/hang_monitor/hung_window_detector.h',
'browser/history/archived_database.cc',
'browser/history/archived_database.h',
'browser/history/download_database.cc',
'browser/history/download_database.h',
'browser/history/download_types.h',
'browser/history/expire_history_backend.cc',
'browser/history/expire_history_backend.h',
'browser/history/history.cc',
'browser/history/history.h',
'browser/history/history_backend.cc',
'browser/history/history_backend.h',
'browser/history/history_database.cc',
'browser/history/history_database.h',
'browser/history/history_marshaling.h',
'browser/history/history_notifications.h',
'browser/history/history_publisher.cc',
'browser/history/history_publisher.h',
'browser/history/history_publisher_none.cc',
'browser/history/history_publisher_win.cc',
'browser/history/history_types.cc',
'browser/history/history_types.h',
'browser/history/in_memory_database.cc',
'browser/history/in_memory_database.h',
'browser/history/in_memory_history_backend.cc',
'browser/history/in_memory_history_backend.h',
'browser/history/page_usage_data.cc',
'browser/history/page_usage_data.h',
'browser/history/query_parser.cc',
'browser/history/query_parser.h',
'browser/history/snippet.cc',
'browser/history/snippet.h',
'browser/history/starred_url_database.cc',
'browser/history/starred_url_database.h',
'browser/history/text_database.cc',
'browser/history/text_database.h',
'browser/history/text_database_manager.cc',
'browser/history/text_database_manager.h',
'browser/history/thumbnail_database.cc',
'browser/history/thumbnail_database.h',
'browser/history/url_database.cc',
'browser/history/url_database.h',
'browser/history/visit_database.cc',
'browser/history/visit_database.h',
'browser/history/visit_tracker.cc',
'browser/history/visit_tracker.h',
'browser/history/visitsegment_database.cc',
'browser/history/visitsegment_database.h',
'browser/hung_renderer_dialog.h',
'browser/icon_loader.cc',
'browser/icon_loader.h',
'browser/icon_manager.cc',
'browser/icon_manager.h',
'browser/ime_input.cc',
'browser/ime_input.h',
'browser/importer/firefox2_importer.cc',
'browser/importer/firefox2_importer.h',
'browser/importer/firefox3_importer.cc',
'browser/importer/firefox3_importer.h',
'browser/importer/firefox_importer_utils.cc',
'browser/importer/firefox_importer_utils.h',
'browser/importer/firefox_profile_lock.cc',
'browser/importer/firefox_profile_lock.h',
'browser/importer/firefox_profile_lock_posix.cc',
'browser/importer/firefox_profile_lock_win.cc',
'browser/importer/ie_importer.cc',
'browser/importer/ie_importer.h',
'browser/importer/importer.cc',
'browser/importer/importer.h',
'browser/importer/mork_reader.cc',
'browser/importer/mork_reader.h',
'browser/importer/toolbar_importer.cc',
'browser/importer/toolbar_importer.h',
'browser/input_window_dialog.h',
'browser/input_window_dialog_gtk.cc',
'browser/input_window_dialog_win.cc',
'browser/jankometer.cc',
'browser/jankometer.h',
'browser/jsmessage_box_handler.cc',
'browser/jsmessage_box_handler.h',
'browser/load_from_memory_cache_details.h',
'browser/load_notification_details.h',
'browser/location_bar.h',
'browser/login_prompt.cc',
'browser/login_prompt.h',
'browser/memory_details.cc',
'browser/memory_details.h',
'browser/meta_table_helper.cc',
'browser/meta_table_helper.h',
'browser/metrics/metrics_log.cc',
'browser/metrics/metrics_log.h',
'browser/metrics/metrics_response.cc',
'browser/metrics/metrics_response.h',
'browser/metrics/metrics_service.cc',
'browser/metrics/metrics_service.h',
'browser/metrics/user_metrics.cc',
'browser/metrics/user_metrics.h',
'browser/modal_html_dialog_delegate.cc',
'browser/modal_html_dialog_delegate.h',
'browser/net/chrome_url_request_context.cc',
'browser/net/chrome_url_request_context.h',
'browser/net/dns_global.cc',
'browser/net/dns_global.h',
'browser/net/dns_host_info.cc',
'browser/net/dns_host_info.h',
'browser/net/dns_master.cc',
'browser/net/dns_master.h',
'browser/net/referrer.cc',
'browser/net/referrer.h',
'browser/net/resolve_proxy_msg_helper.cc',
'browser/net/resolve_proxy_msg_helper.h',
'browser/net/sdch_dictionary_fetcher.cc',
'browser/net/sdch_dictionary_fetcher.h',
'browser/net/url_fetcher.cc',
'browser/net/url_fetcher.h',
'browser/net/url_fetcher_protect.cc',
'browser/net/url_fetcher_protect.h',
'browser/net/url_fixer_upper.cc',
'browser/net/url_fixer_upper.h',
'browser/options_window.h',
'browser/page_state.cc',
'browser/page_state.h',
'browser/password_manager/encryptor_linux.cc',
'browser/password_manager/encryptor_mac.mm',
'browser/password_manager/encryptor_win.cc',
'browser/password_manager/encryptor.h',
'browser/password_manager/ie7_password.cc',
'browser/password_manager/ie7_password.h',
'browser/password_manager/password_form_manager.cc',
'browser/password_manager/password_form_manager.h',
'browser/password_manager/password_form_manager_win.cc',
'browser/password_manager/password_manager.cc',
'browser/password_manager/password_manager.h',
'browser/plugin_installer.cc',
'browser/plugin_installer.h',
'browser/plugin_process_host.cc',
'browser/plugin_process_host.h',
'browser/plugin_service.cc',
'browser/plugin_service.h',
'browser/printing/page_number.cc',
'browser/printing/page_number.h',
'browser/printing/page_overlays.cc',
'browser/printing/page_overlays.h',
'browser/printing/page_range.cc',
'browser/printing/page_range.h',
'browser/printing/page_setup.cc',
'browser/printing/page_setup.h',
'browser/printing/print_job.cc',
'browser/printing/print_job.h',
'browser/printing/print_job_manager.cc',
'browser/printing/print_job_manager.h',
'browser/printing/print_job_worker.cc',
'browser/printing/print_job_worker.h',
'browser/printing/print_job_worker_owner.h',
'browser/printing/print_settings.cc',
'browser/printing/print_settings.h',
'browser/printing/print_view_manager.cc',
'browser/printing/print_view_manager.h',
'browser/printing/printed_document.cc',
'browser/printing/printed_document.h',
'browser/printing/printed_page.cc',
'browser/printing/printed_page.h',
'browser/printing/printed_pages_source.h',
'browser/printing/printer_query.cc',
'browser/printing/printer_query.h',
'browser/printing/win_printing_context.cc',
'browser/printing/win_printing_context.h',
'browser/process_singleton.h',
'browser/process_singleton_linux.cc',
'browser/process_singleton_mac.cc',
'browser/process_singleton_win.cc',
'browser/profile.cc',
'browser/profile.h',
'browser/profile_manager.cc',
'browser/profile_manager.h',
'browser/renderer_host/async_resource_handler.cc',
'browser/renderer_host/async_resource_handler.h',
'browser/renderer_host/audio_renderer_host.cc',
'browser/renderer_host/audio_renderer_host.h',
'browser/renderer_host/backing_store.cc',
'browser/renderer_host/backing_store.h',
'browser/renderer_host/backing_store_mac.cc',
'browser/renderer_host/backing_store_win.cc',
'browser/renderer_host/backing_store_x.cc',
'browser/renderer_host/browser_render_process_host.cc',
'browser/renderer_host/browser_render_process_host.h',
'browser/renderer_host/buffered_resource_handler.cc',
'browser/renderer_host/buffered_resource_handler.h',
'browser/renderer_host/cross_site_resource_handler.cc',
'browser/renderer_host/cross_site_resource_handler.h',
'browser/renderer_host/download_resource_handler.cc',
'browser/renderer_host/download_resource_handler.h',
'browser/renderer_host/download_throttling_resource_handler.cc',
'browser/renderer_host/download_throttling_resource_handler.h',
'browser/renderer_host/media_resource_handler.cc',
'browser/renderer_host/media_resource_handler.h',
'browser/renderer_host/render_process_host.cc',
'browser/renderer_host/render_process_host.h',
'browser/renderer_host/render_view_host.cc',
'browser/renderer_host/render_view_host.h',
'browser/renderer_host/render_view_host_delegate.h',
'browser/renderer_host/render_view_host_factory.cc',
'browser/renderer_host/render_view_host_factory.h',
'browser/renderer_host/render_widget_helper.cc',
'browser/renderer_host/render_widget_helper.h',
'browser/renderer_host/render_widget_host.cc',
'browser/renderer_host/render_widget_host.h',
'browser/renderer_host/render_widget_host_view.h',
'browser/renderer_host/render_widget_host_view_gtk.cc',
'browser/renderer_host/render_widget_host_view_gtk.h',
'browser/renderer_host/render_widget_host_view_mac.h',
'browser/renderer_host/render_widget_host_view_mac.mm',
'browser/renderer_host/render_widget_host_view_win.cc',
'browser/renderer_host/render_widget_host_view_win.h',
'browser/renderer_host/renderer_security_policy.cc',
'browser/renderer_host/renderer_security_policy.h',
'browser/renderer_host/resource_dispatcher_host.cc',
'browser/renderer_host/resource_dispatcher_host.h',
'browser/renderer_host/resource_handler.h',
'browser/renderer_host/resource_message_filter.cc',
'browser/renderer_host/resource_message_filter.h',
'browser/renderer_host/resource_message_filter_gtk.cc',
'browser/renderer_host/resource_message_filter_mac.mm',
'browser/renderer_host/resource_message_filter_win.cc',
'browser/renderer_host/resource_request_details.h',
'browser/renderer_host/safe_browsing_resource_handler.cc',
'browser/renderer_host/safe_browsing_resource_handler.h',
'browser/renderer_host/save_file_resource_handler.cc',
'browser/renderer_host/save_file_resource_handler.h',
'browser/renderer_host/sync_resource_handler.cc',
'browser/renderer_host/sync_resource_handler.h',
'browser/renderer_host/web_cache_manager.cc',
'browser/renderer_host/web_cache_manager.h',
'browser/rlz/rlz.cc',
'browser/rlz/rlz.h',
'browser/safe_browsing/bloom_filter.cc',
'browser/safe_browsing/bloom_filter.h',
'browser/safe_browsing/chunk_range.cc',
'browser/safe_browsing/chunk_range.h',
'browser/safe_browsing/protocol_manager.cc',
'browser/safe_browsing/protocol_manager.h',
'browser/safe_browsing/protocol_parser.cc',
'browser/safe_browsing/protocol_parser.h',
'browser/safe_browsing/safe_browsing_blocking_page.cc',
'browser/safe_browsing/safe_browsing_blocking_page.h',
'browser/safe_browsing/safe_browsing_database.cc',
'browser/safe_browsing/safe_browsing_database.h',
'browser/safe_browsing/safe_browsing_database_bloom.cc',
'browser/safe_browsing/safe_browsing_database_bloom.h',
'browser/safe_browsing/safe_browsing_service.cc',
'browser/safe_browsing/safe_browsing_service.h',
'browser/safe_browsing/safe_browsing_util.cc',
'browser/safe_browsing/safe_browsing_util.h',
'browser/sandbox_policy.cc',
'browser/sandbox_policy.h',
'browser/search_engines/template_url.cc',
'browser/search_engines/template_url.h',
'browser/search_engines/template_url_fetcher.cc',
'browser/search_engines/template_url_fetcher.h',
'browser/search_engines/template_url_model.cc',
'browser/search_engines/template_url_model.h',
'browser/search_engines/template_url_parser.cc',
'browser/search_engines/template_url_parser.h',
'browser/search_engines/template_url_prepopulate_data.cc',
'browser/search_engines/template_url_prepopulate_data.h',
'browser/session_startup_pref.cc',
'browser/session_startup_pref.h',
'browser/sessions/base_session_service.cc',
'browser/sessions/base_session_service.h',
'browser/sessions/session_backend.cc',
'browser/sessions/session_backend.h',
'browser/sessions/session_command.cc',
'browser/sessions/session_command.h',
'browser/sessions/session_id.cc',
'browser/sessions/session_id.h',
'browser/sessions/session_restore.cc',
'browser/sessions/session_restore.h',
'browser/sessions/session_service.cc',
'browser/sessions/session_service.h',
'browser/sessions/session_types.cc',
'browser/sessions/session_types.h',
'browser/sessions/tab_restore_service.cc',
'browser/sessions/tab_restore_service.h',
'browser/shell_dialogs.h',
'browser/shell_integration.cc',
'browser/shell_integration.h',
'browser/shell_integration_mac.mm',
'browser/spellcheck_worditerator.cc',
'browser/spellcheck_worditerator.h',
'browser/spellchecker.cc',
'browser/spellchecker.h',
'browser/ssl/ssl_blocking_page.cc',
'browser/ssl/ssl_blocking_page.h',
'browser/ssl/ssl_error_info.cc',
'browser/ssl/ssl_error_info.h',
'browser/ssl/ssl_host_state.cc',
'browser/ssl/ssl_host_state.h',
'browser/ssl/ssl_manager.cc',
'browser/ssl/ssl_manager.h',
'browser/ssl/ssl_policy.cc',
'browser/ssl/ssl_policy.h',
'browser/status_bubble.h',
'browser/tab_contents/constrained_window.h',
'browser/tab_contents/infobar_delegate.cc',
'browser/tab_contents/infobar_delegate.h',
'browser/tab_contents/interstitial_page.cc',
'browser/tab_contents/interstitial_page.h',
'browser/tab_contents/navigation_controller.cc',
'browser/tab_contents/navigation_controller.h',
'browser/tab_contents/navigation_entry.cc',
'browser/tab_contents/navigation_entry.h',
'browser/tab_contents/page_navigator.h',
'browser/tab_contents/provisional_load_details.cc',
'browser/tab_contents/provisional_load_details.h',
'browser/tab_contents/render_view_context_menu.cc',
'browser/tab_contents/render_view_context_menu.h',
'browser/tab_contents/render_view_context_menu_gtk.cc',
'browser/tab_contents/render_view_context_menu_gtk.h',
'browser/tab_contents/render_view_context_menu_mac.mm',
'browser/tab_contents/render_view_context_menu_mac.h',
'browser/tab_contents/render_view_context_menu_win.cc',
'browser/tab_contents/render_view_context_menu_win.h',
'browser/tab_contents/render_view_host_delegate_helper.cc',
'browser/tab_contents/render_view_host_delegate_helper.h',
'browser/tab_contents/render_view_host_manager.cc',
'browser/tab_contents/render_view_host_manager.h',
'browser/tab_contents/repost_form_warning.h',
'browser/tab_contents/security_style.h',
'browser/tab_contents/site_instance.cc',
'browser/tab_contents/site_instance.h',
'browser/tab_contents/tab_contents.cc',
'browser/tab_contents/tab_contents.h',
'browser/tab_contents/tab_contents_delegate.h',
'browser/tab_contents/tab_contents_view.cc',
'browser/tab_contents/tab_contents_view.h',
'browser/tab_contents/tab_contents_view_gtk.cc',
'browser/tab_contents/tab_contents_view_gtk.h',
'browser/tab_contents/tab_contents_view_mac.h',
'browser/tab_contents/tab_contents_view_mac.mm',
'browser/tab_contents/tab_contents_view_win.cc',
'browser/tab_contents/tab_contents_view_win.h',
'browser/tab_contents/tab_util.cc',
'browser/tab_contents/tab_util.h',
'browser/tab_contents/web_drag_source.cc',
'browser/tab_contents/web_drag_source.h',
'browser/tab_contents/web_drop_target.cc',
'browser/tab_contents/web_drop_target.h',
'browser/tabs/tab_strip_model.cc',
'browser/tabs/tab_strip_model.h',
'browser/tabs/tab_strip_model_order_controller.cc',
'browser/tabs/tab_strip_model_order_controller.h',
'browser/task_manager.cc',
'browser/task_manager.h',
'browser/task_manager_resource_providers.cc',
'browser/task_manager_resource_providers.h',
'browser/theme_resources_util.cc',
'browser/theme_resources_util.h',
'browser/toolbar_model.cc',
'browser/toolbar_model.h',
'browser/user_data_manager.cc',
'browser/user_data_manager.h',
'browser/view_ids.h',
'browser/views/about_chrome_view.cc',
'browser/views/about_chrome_view.h',
'browser/views/about_ipc_dialog.cc',
'browser/views/about_ipc_dialog.h',
'browser/views/about_network_dialog.cc',
'browser/views/about_network_dialog.h',
'browser/views/autocomplete/autocomplete_popup_contents_view.cc',
'browser/views/autocomplete/autocomplete_popup_contents_view.h',
'browser/views/autocomplete/autocomplete_popup_win.cc',
'browser/views/autocomplete/autocomplete_popup_win.h',
'browser/views/blocked_popup_container.cc',
'browser/views/blocked_popup_container.h',
'browser/views/bookmark_bar_view.cc',
'browser/views/bookmark_bar_view.h',
'browser/views/bookmark_bubble_view.cc',
'browser/views/bookmark_bubble_view.h',
'browser/views/bookmark_editor_view.cc',
'browser/views/bookmark_editor_view.h',
'browser/views/bookmark_folder_tree_view.cc',
'browser/views/bookmark_folder_tree_view.h',
'browser/views/bookmark_manager_view.cc',
'browser/views/bookmark_manager_view.h',
'browser/views/bookmark_menu_button.cc',
'browser/views/bookmark_menu_button.h',
'browser/views/bookmark_table_view.cc',
'browser/views/bookmark_table_view.h',
'browser/views/bug_report_view.cc',
'browser/views/bug_report_view.h',
'browser/views/clear_browsing_data.cc',
'browser/views/clear_browsing_data.h',
'browser/views/constrained_window_impl.cc',
'browser/views/constrained_window_impl.h',
'browser/views/dom_view.cc',
'browser/views/dom_view.h',
'browser/views/download_item_view.cc',
'browser/views/download_item_view.h',
'browser/views/download_shelf_view.cc',
'browser/views/download_shelf_view.h',
'browser/views/download_started_animation.cc',
'browser/views/download_started_animation.h',
'browser/views/edit_keyword_controller.cc',
'browser/views/edit_keyword_controller.h',
'browser/views/event_utils.cc',
'browser/views/event_utils.h',
'browser/views/external_protocol_dialog.cc',
'browser/views/external_protocol_dialog.h',
'browser/views/find_bar_view.cc',
'browser/views/find_bar_view.h',
'browser/views/find_bar_win.cc',
'browser/views/find_bar_win.h',
'browser/views/first_run_bubble.cc',
'browser/views/first_run_bubble.h',
'browser/views/first_run_customize_view.cc',
'browser/views/first_run_customize_view.h',
'browser/views/first_run_view.cc',
'browser/views/first_run_view.h',
'browser/views/first_run_view_base.cc',
'browser/views/first_run_view_base.h',
'browser/views/frame/browser_frame.cc',
'browser/views/frame/browser_frame.h',
'browser/views/frame/browser_root_view.cc',
'browser/views/frame/browser_root_view.h',
'browser/views/frame/browser_view.cc',
'browser/views/frame/browser_view.h',
'browser/views/frame/glass_browser_frame_view.cc',
'browser/views/frame/glass_browser_frame_view.h',
'browser/views/frame/opaque_browser_frame_view.cc',
'browser/views/frame/opaque_browser_frame_view.h',
'browser/views/fullscreen_exit_bubble.cc',
'browser/views/fullscreen_exit_bubble.h',
'browser/views/go_button.cc',
'browser/views/go_button.h',
'browser/views/html_dialog_view.cc',
'browser/views/html_dialog_view.h',
'browser/views/hung_renderer_view.cc',
'browser/views/hwnd_html_view.cc',
'browser/views/hwnd_html_view.h',
'browser/views/importer_lock_view.cc',
'browser/views/importer_lock_view.h',
'browser/views/importer_view.cc',
'browser/views/importer_view.h',
'browser/views/importing_progress_view.cc',
'browser/views/importing_progress_view.h',
'browser/views/info_bubble.cc',
'browser/views/info_bubble.h',
'browser/views/infobars/infobar_container.cc',
'browser/views/infobars/infobar_container.h',
'browser/views/infobars/infobars.cc',
'browser/views/infobars/infobars.h',
'browser/views/jsmessage_box_dialog.cc',
'browser/views/jsmessage_box_dialog.h',
'browser/views/keyword_editor_view.cc',
'browser/views/keyword_editor_view.h',
'browser/views/location_bar_view.cc',
'browser/views/location_bar_view.h',
'browser/views/login_view.cc',
'browser/views/login_view.h',
'browser/views/new_profile_dialog.cc',
'browser/views/new_profile_dialog.h',
'browser/views/options/advanced_contents_view.cc',
'browser/views/options/advanced_contents_view.h',
'browser/views/options/advanced_page_view.cc',
'browser/views/options/advanced_page_view.h',
'browser/views/options/content_page_view.cc',
'browser/views/options/content_page_view.h',
'browser/views/options/cookies_view.cc',
'browser/views/options/cookies_view.h',
'browser/views/options/exceptions_page_view.cc',
'browser/views/options/exceptions_page_view.h',
'browser/views/options/fonts_languages_window_view.cc',
'browser/views/options/fonts_languages_window_view.h',
'browser/views/options/fonts_page_view.cc',
'browser/views/options/fonts_page_view.h',
'browser/views/options/general_page_view.cc',
'browser/views/options/general_page_view.h',
'browser/views/options/language_combobox_model.cc',
'browser/views/options/language_combobox_model.h',
'browser/views/options/languages_page_view.cc',
'browser/views/options/languages_page_view.h',
'browser/views/options/options_group_view.cc',
'browser/views/options/options_group_view.h',
'browser/views/options/options_page_view.cc',
'browser/views/options/options_page_view.h',
'browser/views/options/options_window_view.cc',
'browser/views/options/passwords_exceptions_window_view.cc',
'browser/views/options/passwords_exceptions_window_view.h',
'browser/views/options/passwords_page_view.cc',
'browser/views/options/passwords_page_view.h',
'browser/views/page_info_window.cc',
'browser/views/page_info_window.h',
'browser/views/repost_form_warning_view.cc',
'browser/views/repost_form_warning_view.h',
'browser/views/restart_message_box.cc',
'browser/views/restart_message_box.h',
'browser/views/sad_tab_view.cc',
'browser/views/sad_tab_view.h',
'browser/views/select_profile_dialog.cc',
'browser/views/select_profile_dialog.h',
'browser/views/shelf_item_dialog.cc',
'browser/views/shelf_item_dialog.h',
'browser/views/shell_dialogs_win.cc',
'browser/views/standard_layout.h',
'browser/views/star_toggle.cc',
'browser/views/star_toggle.h',
'browser/views/status_bubble_views.cc',
'browser/views/status_bubble_views.h',
'browser/views/tab_contents_container_view.cc',
'browser/views/tab_contents_container_view.h',
'browser/views/tab_icon_view.cc',
'browser/views/tab_icon_view.h',
'browser/views/tabs/dragged_tab_controller.cc',
'browser/views/tabs/dragged_tab_controller.h',
'browser/views/tabs/dragged_tab_view.cc',
'browser/views/tabs/dragged_tab_view.h',
'browser/views/tabs/hwnd_photobooth.cc',
'browser/views/tabs/hwnd_photobooth.h',
'browser/views/tabs/tab.cc',
'browser/views/tabs/tab.h',
'browser/views/tabs/tab_renderer.cc',
'browser/views/tabs/tab_renderer.h',
'browser/views/tabs/tab_strip.cc',
'browser/views/tabs/tab_strip.h',
'browser/views/theme_helpers.cc',
'browser/views/theme_helpers.h',
'browser/views/toolbar_star_toggle.cc',
'browser/views/toolbar_star_toggle.h',
'browser/views/toolbar_view.cc',
'browser/views/toolbar_view.h',
'browser/views/uninstall_dialog.cc',
'browser/views/uninstall_dialog.h',
'browser/views/user_data_dir_dialog.cc',
'browser/views/user_data_dir_dialog.h',
'browser/visitedlink_master.cc',
'browser/visitedlink_master.h',
'browser/webdata/web_data_service.cc',
'browser/webdata/web_data_service.h',
'browser/webdata/web_data_service_win.cc',
'browser/webdata/web_database.cc',
'browser/webdata/web_database.h',
'browser/webdata/web_database_win.cc',
'browser/window_sizer.cc',
'browser/window_sizer.h',
'browser/worker_host/worker_process_host.cc',
'browser/worker_host/worker_process_host.h',
'browser/worker_host/worker_service.cc',
'browser/worker_host/worker_service.h',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
# This file is generated by GRIT.
'<(SHARED_INTERMEDIATE_DIR)/chrome/grit/theme_resources_map.cc',
],
'conditions': [
['javascript_engine=="v8"', {
'defines': [
'CHROME_V8',
],
}],
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'sources!': [
'browser/debugger/debugger_shell_stubs.cc',
# Windows-specific files.
'browser/download/download_exe.cc',
'browser/download/download_util.cc',
],
}],
['OS=="mac"', {
'sources/': [
# Exclude most of download.
['exclude', '^browser/download/'],
['include', '^browser/download/download_(file|manager|shelf)\\.cc$'],
['include', '^browser/download/download_request_manager\\.cc$'],
['include', '^browser/download/download_item_model\\.cc$'],
['include', '^browser/download/save_(file(_manager)?|item|package)\\.cc$'],
],
'sources!': [
'browser/automation/automation_provider_list_generic.cc',
'browser/bookmarks/bookmark_context_menu.cc',
'browser/bookmarks/bookmark_drop_info.cc',
'browser/debugger/debugger_shell_stubs.cc',
],
'sources': [
# Build the necessary GTM sources
'../third_party/GTM/AppKit/GTMNSBezierPath+RoundRect.m',
'../third_party/GTM/AppKit/GTMNSColor+Luminance.m',
'../third_party/GTM/AppKit/GTMTheme.m',
# Build necessary Mozilla sources
'../third_party/mozilla/include/NSWorkspace+Utils.h',
'../third_party/mozilla/include/NSWorkspace+Utils.m',
],
'include_dirs': [
'../third_party/GTM',
'../third_party/GTM/AppKit',
],
}],
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
'dependencies': [
'../google_update/google_update.gyp:google_update',
'installer/util/util.gyp:installer_util',
'../printing/printing.gyp:printing',
],
'sources': [
# Using built-in rule in vstudio for midl.
'browser/history/history_indexer.idl',
],
'sources!': [
'browser/debugger/debugger_shell_stubs.cc',
'browser/history/history_publisher_none.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
}, { # 'OS!="win"
'sources/': [
# Exclude all of hang_monitor.
['exclude', '^browser/hang_monitor/'],
# Exclude parts of password_manager.
['exclude', '^browser/password_manager/ie7_password\\.cc$'],
# Exclude most of printing.
['exclude', '^browser/printing/'],
['include', '^browser/printing/page_(number|range|setup)\\.cc$'],
# Exclude all of rlz.
['exclude', '^browser/rlz/'],
# Exclude all of views.
['exclude', '^browser/views/'],
],
'sources!': [
'browser/autocomplete/autocomplete_accessibility.cc',
'browser/automation/ui_controls.cc',
'browser/bookmarks/bookmark_menu_controller.cc',
'browser/bookmarks/bookmark_menu_controller.h',
'browser/browser_accessibility.cc',
'browser/browser_accessibility_manager.cc',
'browser/debugger/debugger_view.cc',
'browser/debugger/debugger_window.cc',
'browser/debugger/devtools_view.cc',
'browser/debugger/devtools_view.h',
'browser/debugger/devtools_window_gtk.cc',
'browser/debugger/devtools_window_win.cc',
'browser/dock_info.cc',
'browser/dom_ui/html_dialog_contents.cc',
'browser/drag_utils.cc',
'browser/encoding_menu_controller_delegate.cc',
'browser/external_tab_container.cc',
'browser/first_run.cc',
'browser/google_update.cc',
'browser/history/history_indexer.idl',
'browser/history_tab_ui.cc',
'browser/history_view.cc',
'browser/icon_loader.cc',
'browser/icon_manager.cc',
'browser/ime_input.cc',
'browser/importer/ie_importer.cc',
'browser/jankometer.cc',
'browser/login_prompt.cc',
'browser/memory_details.cc',
'browser/modal_html_dialog_delegate.cc',
'browser/sandbox_policy.cc',
'browser/shell_integration.cc',
'browser/tab_contents/web_drag_source.cc',
'browser/tab_contents/web_drop_target.cc',
'browser/task_manager.cc',
'browser/window_sizer.cc',
],
}],
],
},
{
'target_name': 'plugin',
'type': '<(library)',
'dependencies': [
'common',
'chrome_resources',
'chrome_strings',
'../media/media.gyp:media',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
'third_party/hunspell/hunspell.gyp:hunspell',
'../webkit/webkit.gyp:glue',
],
'include_dirs': [
'<(INTERMEDIATE_DIR)',
],
'sources': [
# All .cc, .h, .m, and .mm files under plugins except for tests and
# mocks.
'plugin/chrome_plugin_host.cc',
'plugin/chrome_plugin_host.h',
'plugin/npobject_proxy.cc',
'plugin/npobject_proxy.h',
'plugin/npobject_stub.cc',
'plugin/npobject_stub.h',
'plugin/npobject_util.cc',
'plugin/npobject_util.h',
'plugin/plugin_channel.cc',
'plugin/plugin_channel.h',
'plugin/plugin_channel_base.cc',
'plugin/plugin_channel_base.h',
'plugin/plugin_main.cc',
'plugin/plugin_thread.cc',
'plugin/plugin_thread.h',
'plugin/webplugin_delegate_stub.cc',
'plugin/webplugin_delegate_stub.h',
'plugin/webplugin_proxy.cc',
'plugin/webplugin_proxy.h',
],
# These are layered in conditionals in the event other platforms
# end up using this module as well.
'conditions': [
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
},],
],
},
{
'target_name': 'renderer',
'type': '<(library)',
'dependencies': [
'common',
'plugin',
'chrome_resources',
'chrome_strings',
'../printing/printing.gyp:printing',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/npapi/npapi.gyp:npapi',
'../webkit/webkit.gyp:glue',
'../webkit/webkit.gyp:webkit',
],
'include_dirs': [
'..',
],
'sources': [
# TODO(jrg): to link ipc_tests, these files need to be in renderer.a.
# But app/ is the wrong directory for them.
# Better is to remove the dep of *_tests on renderer, but in the
# short term I'd like the build to work.
'renderer/automation/dom_automation_controller.cc',
'renderer/automation/dom_automation_controller.h',
'renderer/extensions/bindings_utils.cc',
'renderer/extensions/bindings_utils.h',
'renderer/extensions/event_bindings.cc',
'renderer/extensions/event_bindings.h',
'renderer/extensions/extension_process_bindings.cc',
'renderer/extensions/extension_process_bindings.h',
'renderer/extensions/renderer_extension_bindings.cc',
'renderer/extensions/renderer_extension_bindings.h',
'renderer/loadtimes_extension_bindings.h',
'renderer/loadtimes_extension_bindings.cc',
'renderer/media/audio_renderer_impl.cc',
'renderer/media/audio_renderer_impl.h',
'renderer/media/buffered_data_source.cc',
'renderer/media/buffered_data_source.h',
'renderer/media/simple_data_source.cc',
'renderer/media/simple_data_source.h',
'renderer/media/video_renderer_impl.cc',
'renderer/media/video_renderer_impl.h',
'renderer/net/render_dns_master.cc',
'renderer/net/render_dns_master.h',
'renderer/net/render_dns_queue.cc',
'renderer/net/render_dns_queue.h',
'renderer/about_handler.cc',
'renderer/about_handler.h',
'renderer/audio_message_filter.cc',
'renderer/audio_message_filter.h',
'renderer/debug_message_handler.cc',
'renderer/debug_message_handler.h',
'renderer/devtools_agent.cc',
'renderer/devtools_agent.h',
'renderer/devtools_agent_filter.cc',
'renderer/devtools_agent_filter.h',
'renderer/devtools_client.cc',
'renderer/devtools_client.h',
'renderer/dom_ui_bindings.cc',
'renderer/dom_ui_bindings.h',
'renderer/external_host_bindings.cc',
'renderer/external_host_bindings.h',
'renderer/external_extension.cc',
'renderer/external_extension.h',
'renderer/js_only_v8_extensions.cc',
'renderer/js_only_v8_extensions.h',
'renderer/localized_error.cc',
'renderer/localized_error.h',
'renderer/plugin_channel_host.cc',
'renderer/plugin_channel_host.h',
'renderer/render_process.cc',
'renderer/render_process.h',
'renderer/render_thread.cc',
'renderer/render_thread.h',
'renderer/render_view.cc',
'renderer/render_view.h',
'renderer/render_widget.cc',
'renderer/render_widget.h',
'renderer/renderer_glue.cc',
'renderer/renderer_histogram_snapshots.cc',
'renderer/renderer_histogram_snapshots.h',
'renderer/renderer_logging.h',
'renderer/renderer_logging_linux.cc',
'renderer/renderer_logging_mac.mm',
'renderer/renderer_logging_win.cc',
'renderer/renderer_main.cc',
'renderer/renderer_main_platform_delegate.h',
'renderer/renderer_main_platform_delegate_linux.cc',
'renderer/renderer_main_platform_delegate_mac.mm',
'renderer/renderer_main_platform_delegate_win.cc',
'renderer/renderer_webkitclient_impl.cc',
'renderer/renderer_webkitclient_impl.h',
'renderer/user_script_slave.cc',
'renderer/user_script_slave.h',
'renderer/visitedlink_slave.cc',
'renderer/visitedlink_slave.h',
'renderer/webmediaplayer_impl.cc',
'renderer/webmediaplayer_impl.h',
'renderer/webplugin_delegate_proxy.cc',
'renderer/webplugin_delegate_proxy.h',
'renderer/webworker_proxy.cc',
'renderer/webworker_proxy.h',
],
'link_settings': {
'mac_bundle_resources': [
'renderer/renderer.sb',
],
},
'conditions': [
# Linux-specific rules.
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
# Windows-specific rules.
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
},],
# As of yet unported-from-Windows code.
['OS!="win"', {
'sources!': [
'renderer/webworker_proxy.cc',
'renderer/webworker_proxy.h',
],
},],
],
},
{
'target_name': 'app',
'type': 'executable',
'mac_bundle': 1,
'dependencies': [
'common',
'browser',
'renderer',
'../printing/printing.gyp:printing',
'../webkit/webkit.gyp:inspector_resources',
],
'sources': [
# All .cc, .h, .m, and .mm files under app except for tests.
'app/breakpad_win.cc',
'app/breakpad_win.h',
'app/breakpad_mac.mm',
'app/breakpad_mac.h',
'app/chrome_dll_main.cc',
'app/chrome_dll_resource.h',
'app/chrome_exe_main.cc',
'app/chrome_exe_main.mm',
'app/chrome_exe_main_gtk.cc',
'app/chrome_exe_resource.h',
'app/client_util.cc',
'app/client_util.h',
'app/google_update_client.cc',
'app/google_update_client.h',
'app/keystone_glue.h',
'app/keystone_glue.m',
'app/scoped_ole_initializer.h',
],
'mac_bundle_resources': [
'app/nibs/en.lproj/BrowserWindow.xib',
'app/nibs/en.lproj/FindBar.xib',
'app/nibs/en.lproj/MainMenu.xib',
'app/nibs/en.lproj/Preferences.xib',
'app/nibs/en.lproj/SaveAccessoryView.xib',
'app/nibs/en.lproj/TabContents.xib',
'app/nibs/en.lproj/TabView.xib',
'app/nibs/en.lproj/Toolbar.xib',
'app/theme/back.pdf',
'app/theme/close_bar.pdf',
'app/theme/close_bar_h.pdf',
'app/theme/close_bar_p.pdf',
'app/theme/forward.pdf',
'app/theme/go.pdf',
'app/theme/grow_box.png',
'app/theme/nav.pdf',
'app/theme/newtab.pdf',
'app/theme/o2_globe.png',
'app/theme/o2_history.png',
'app/theme/o2_more.png',
'app/theme/o2_search.png',
'app/theme/o2_star.png',
'app/theme/reload.pdf',
'app/theme/sadtab.png',
'app/theme/star.pdf',
'app/theme/starred.pdf',
'app/theme/stop.pdf',
'app/app-Info.plist',
],
# TODO(mark): Come up with a fancier way to do this. It should only
# be necessary to list app-Info.plist once, not the three times it is
# listed here.
'mac_bundle_resources!': [
'app/app-Info.plist',
],
'xcode_settings': {
'INFOPLIST_FILE': 'app/app-Info.plist',
},
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
# Needed for chrome_dll_main.cc #include of gtk/gtk.h
'../build/linux/system.gyp:gtk',
# Needed for chrome_dll_main.cc use of g_thread_init
'../build/linux/system.gyp:gthread',
],
'copies': [
{
'destination': '<(PRODUCT_DIR)',
'files': ['<(INTERMEDIATE_DIR)/repack/chrome.pak'],
},
{
'destination': '<(PRODUCT_DIR)/locales',
'files': ['<(INTERMEDIATE_DIR)/repack/en-US.pak'],
},
{
'destination': '<(PRODUCT_DIR)/themes',
'files': ['<(INTERMEDIATE_DIR)/repack/default.pak'],
},
],
}],
['OS=="mac"', {
# 'branding' is a variable defined in common.gypi
# (e.g. "Chromium", "Chrome")
'product_name': '<(branding)',
'conditions': [
['branding=="Chrome"', {
'mac_bundle_resources': ['app/theme/google_chrome/app.icns'],
# "bundle_id" is the name of the variable used to replace
# BUNDLE_ID in Info.plist.
'variables': {'bundle_id': 'com.google.Chrome'},
# Only include breakpad in official builds.
'dependencies': [
'../breakpad/breakpad.gyp:breakpad',
],
'copies': [
{
'destination': '<(PRODUCT_DIR)/<(branding).app/Contents/Resources/',
'files': ['<(PRODUCT_DIR)/crash_inspector', '<(PRODUCT_DIR)/crash_report_sender.app'],
},
]
}, { # else: branding!="Chrome"
'mac_bundle_resources': ['app/theme/chromium/app.icns'],
'variables': {'bundle_id': 'org.chromium.Chromium'},
}],
],
'xcode_settings': {
# chrome/app/app-Info.plist has a CFBundleIdentifier of BUNDLE_ID,
# to be replaced by a properly branded bundle ID in Xcode with
# these settings.
'INFOPLIST_PREPROCESS': 'YES',
'INFOPLIST_PREPROCESSOR_DEFINITIONS': ['BUNDLE_ID="<(bundle_id)"'],
},
}, { # else: OS != "mac"
'conditions': [
['branding=="Chrome"', {
'product_name': 'chrome'
}, { # else: Branding!="Chrome"
# TODO: change to:
# 'product_name': 'chromium'
# whenever we convert the rest of the infrastructure
# (buildbots etc.) to use "gyp -Dbranding=Chrome".
'product_name': 'chrome'
}],
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
'dependencies': [
'views',
'../build/temp_gyp/breakpad.gyp:breakpad_handler',
'../build/temp_gyp/breakpad.gyp:breakpad_sender',
'../sandbox/sandbox.gyp:sandbox',
'worker',
],
},{ # 'OS!="win"
'variables': {
'repack_path': '../tools/data_pack/repack.py',
},
'actions': [
{
'action_name': 'repack_chrome',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/browser_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/debugger_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/common_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/renderer_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/net/net_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_resources.pak',
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'outputs': [
'<(INTERMEDIATE_DIR)/repack/chrome.pak',
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
'process_outputs_as_mac_bundle_resources': 1,
},
{
'action_name': 'repack_theme',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/theme_resources.pak',
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'outputs': [
'<(INTERMEDIATE_DIR)/repack/theme.pak',
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
'process_outputs_as_mac_bundle_resources': 1,
'conditions': [
['OS=="linux"', {
'outputs=': [
'<(INTERMEDIATE_DIR)/repack/default.pak',
]
}],
],
},
{
# TODO(mark): Make this work with more languages than the
# hardcoded en-US.
'action_name': 'repack_locale',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/generated_resources_en-US.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/chromium_strings_en-US.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/locale_settings_en-US.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_en-US.pak',
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'conditions': [
['OS=="mac"', {
'outputs': [
'<(INTERMEDIATE_DIR)/repack/en.lproj/locale.pak',
],
}, { # else: OS!="mac"
'outputs': [
'<(INTERMEDIATE_DIR)/repack/en-US.pak',
],
}],
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
'process_outputs_as_mac_bundle_resources': 1,
},
],
'sources!': [
'app/chrome_exe_main.cc',
'app/client_util.cc',
'app/google_update_client.cc',
]
}],
],
},
{
'target_name': 'image_diff',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:base_gfx',
],
'sources': [
'tools/test/image_diff/image_diff.cc',
],
},
{
# This target contains mocks and test utilities that don't belong in
# production libraries but are used by more than one test executable.
'target_name': 'test_support_common',
'type': '<(library)',
'dependencies': [
'browser',
'common',
'renderer',
'chrome_resources',
'chrome_strings',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
# TODO: these should live here but are currently used by
# production code code in libbrowser (above).
#'browser/automation/url_request_mock_http_job.cc',
#'browser/automation/url_request_mock_http_job.h',
'browser/automation/url_request_mock_net_error_job.cc',
'browser/automation/url_request_mock_net_error_job.h',
'browser/renderer_host/mock_render_process_host.cc',
'browser/renderer_host/mock_render_process_host.h',
'browser/renderer_host/test_render_view_host.cc',
'browser/renderer_host/test_render_view_host.h',
'browser/tab_contents/test_web_contents.cc',
'browser/tab_contents/test_web_contents.h',
'common/ipc_test_sink.cc',
'common/ipc_test_sink.h',
'renderer/mock_render_process.h',
'renderer/mock_render_thread.cc',
'renderer/mock_render_thread.h',
'test/automation/autocomplete_edit_proxy.cc',
'test/automation/autocomplete_edit_proxy.h',
'test/automation/automation_constants.h',
'test/automation/automation_handle_tracker.cc',
'test/automation/automation_handle_tracker.h',
'test/automation/automation_proxy.cc',
'test/automation/automation_proxy.h',
'test/automation/browser_proxy.cc',
'test/automation/browser_proxy.h',
'test/automation/constrained_window_proxy.cc',
'test/automation/constrained_window_proxy.h',
'test/automation/tab_proxy.cc',
'test/automation/tab_proxy.h',
'test/automation/window_proxy.cc',
'test/automation/window_proxy.h',
'test/chrome_process_util.cc',
'test/chrome_process_util.h',
'test/chrome_process_util_linux.cc',
'test/chrome_process_util_mac.cc',
'test/chrome_process_util_win.cc',
'test/in_process_browser_test.cc',
'test/in_process_browser_test.h',
'test/testing_profile.cc',
'test/testing_profile.h',
'test/ui_test_utils.cc',
'test/ui_test_utils.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
}, { # OS != "win"
'sources!': [
'test/in_process_browser_test.cc',
'test/in_process_browser_test.h',
'test/ui_test_utils.cc',
'test/ui_test_utils.h',
],
}],
],
},
{
'target_name': 'test_support_ui',
'type': '<(library)',
'dependencies': [
'test_support_common',
'chrome_resources',
'chrome_strings',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/testing_browser_process.h',
'test/ui/npapi_test_helper.cc',
'test/ui/npapi_test_helper.h',
'test/ui/run_all_unittests.cc',
'test/ui/ui_test.cc',
'test/ui/ui_test.h',
'test/ui/ui_test_suite.cc',
'test/ui/ui_test_suite.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS!="win"', {
'sources!': [
'test/ui/npapi_test_helper.cc',
],
}],
],
},
{
'target_name': 'test_support_unit',
'type': '<(library)',
'dependencies': [
'test_support_common',
'chrome_resources',
'chrome_strings',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/unit/run_all_unittests.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
# Needed for the following #include chain:
# test/unit/run_all_unittests.cc
# test/unit/chrome_test_suite.h
# gtk/gtk.h
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'ipc_tests',
'type': 'executable',
'dependencies': [
'common',
'test_support_unit',
'../base/base.gyp:base',
'../testing/gtest.gyp:gtest',
],
'sources': [
'common/ipc_fuzzing_tests.cc',
'common/ipc_send_fds_test.cc',
'common/ipc_tests.cc',
'common/ipc_tests.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'ui_tests',
'type': 'executable',
'dependencies': [
'app',
'browser',
'common',
'chrome_resources',
'chrome_strings',
'test_support_ui',
'../base/base.gyp:base',
'../net/net.gyp:net',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
],
'include_dirs': [
'..',
],
'sources': [
'app/chrome_main_uitest.cc',
'browser/browser_uitest.cc',
'browser/crash_recovery_uitest.cc',
'browser/download/download_uitest.cc',
'browser/download/save_page_uitest.cc',
'browser/errorpage_uitest.cc',
'browser/history/redirect_uitest.cc',
'browser/iframe_uitest.cc',
'browser/images_uitest.cc',
'browser/locale_tests_uitest.cc',
'browser/login_prompt_uitest.cc',
'browser/metrics/metrics_service_uitest.cc',
'browser/printing/printing_layout_uitest.cc',
'browser/printing/printing_test.h',
'browser/renderer_host/resource_dispatcher_host_uitest.cc',
'browser/sanity_uitest.cc',
'browser/session_history_uitest.cc',
'browser/sessions/session_restore_uitest.cc',
'browser/ssl/ssl_uitest.cc',
'browser/tab_contents/view_source_uitest.cc',
'browser/tab_restore_uitest.cc',
'browser/unload_uitest.cc',
'browser/views/find_bar_win_uitest.cc',
'common/logging_chrome_uitest.cc',
'common/net/cache_uitest.cc',
'common/pref_service_uitest.cc',
'test/accessibility/accessibility_tests.cc',
'test/accessibility/accessibility_util.cc',
'test/accessibility/accessibility_util.h',
'test/accessibility/browser_impl.cc',
'test/accessibility/browser_impl.h',
'test/accessibility/constants.h',
'test/accessibility/keyboard_util.cc',
'test/accessibility/keyboard_util.h',
'test/accessibility/registry_util.cc',
'test/accessibility/registry_util.h',
'test/accessibility/tab_impl.cc',
'test/accessibility/tab_impl.h',
'test/automation/automation_proxy_uitest.cc',
'test/chrome_process_util_uitest.cc',
'test/perf/mem_usage.cc',
'test/perf/mem_usage.h',
'test/reliability/page_load_test.cc',
'test/reliability/page_load_test.h',
'test/ui/dom_checker_uitest.cc',
'test/ui/history_uitest.cc',
'test/ui/inspector_controller_uitest.cc',
'test/ui/layout_plugin_uitest.cc',
'test/ui/npapi_uitest.cc',
'test/ui/omnibox_uitest.cc',
'test/ui/sandbox_uitests.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
'../build/linux/system.gyp:gtk',
],
'sources!': [
# TODO(port)
'app/chrome_main_uitest.cc',
'browser/crash_recovery_uitest.cc',
'browser/login_prompt_uitest.cc',
'browser/metrics/metrics_service_uitest.cc',
'test/reliability/page_load_test.cc',
'test/ui/layout_plugin_uitest.cc',
],
}],
['OS=="mac"', {
'sources!': [
# TODO(port)
'app/chrome_main_uitest.cc',
'browser/crash_recovery_uitest.cc',
# blocked on download shelf
'browser/download/save_page_uitest.cc',
'browser/login_prompt_uitest.cc',
'browser/metrics/metrics_service_uitest.cc',
'browser/sessions/session_restore_uitest.cc',
'browser/tab_restore_uitest.cc',
'browser/unload_uitest.cc',
'test/reliability/page_load_test.cc',
'test/ui/layout_plugin_uitest.cc',
'test/ui/omnibox_uitest.cc',
# these pass locally but fail on the bots
'common/net/cache_uitest.cc',
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
'dependencies': [
'../google_update/google_update.gyp:google_update',
'views',
],
'link_settings': {
'libraries': [
'-lOleAcc.lib',
],
},
}, { # else: OS != "win"
'sources!': [
# TODO(port)? (Most of these include windows.h or similar.)
'browser/printing/printing_layout_uitest.cc',
'browser/ssl/ssl_uitest.cc',
'browser/views/find_bar_win_uitest.cc',
'common/logging_chrome_uitest.cc',
'test/accessibility/accessibility_tests.cc',
'test/accessibility/accessibility_util.cc',
'test/accessibility/browser_impl.cc',
'test/accessibility/keyboard_util.cc',
'test/accessibility/registry_util.cc',
'test/accessibility/tab_impl.cc',
'test/perf/mem_usage.cc',
'test/ui/npapi_uitest.cc',
'test/ui/sandbox_uitests.cc',
],
}],
],
},
{
'target_name': 'unit_tests',
'type': 'executable',
'dependencies': [
'browser',
'common',
'renderer',
'chrome_resources',
'chrome_strings',
'test_support_unit',
'../printing/printing.gyp:printing',
'../webkit/webkit.gyp:webkit',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
],
'include_dirs': [
'..',
],
'sources': [
'app/breakpad_mac_stubs.mm',
# All unittests in browser, common, and renderer.
'browser/autocomplete/autocomplete_unittest.cc',
'browser/autocomplete/history_contents_provider_unittest.cc',
'browser/autocomplete/history_url_provider_unittest.cc',
'browser/autocomplete/keyword_provider_unittest.cc',
'browser/back_forward_menu_model_unittest.cc',
'browser/bookmarks/bookmark_codec_unittest.cc',
'browser/bookmarks/bookmark_context_menu_test.cc',
'browser/bookmarks/bookmark_drag_data_unittest.cc',
'browser/bookmarks/bookmark_folder_tree_model_unittest.cc',
'browser/bookmarks/bookmark_html_writer_unittest.cc',
'browser/bookmarks/bookmark_model_test_utils.cc',
'browser/bookmarks/bookmark_model_test_utils.h',
'browser/bookmarks/bookmark_model_unittest.cc',
'browser/bookmarks/bookmark_table_model_unittest.cc',
'browser/bookmarks/bookmark_utils_unittest.cc',
'browser/browser_commands_unittest.cc',
'browser/browser_unittest.cc',
'browser/debugger/devtools_remote_message_unittest.cc',
'browser/debugger/devtools_remote_listen_socket_unittest.cc',
'browser/debugger/devtools_remote_listen_socket_unittest.h',
'browser/chrome_thread_unittest.cc',
# It is safe to list */cocoa/* files in the "common" file list
# without an explicit exclusion since gyp is smart enough to
# exclude them from non-Mac builds.
'browser/cocoa/base_view_unittest.mm',
'browser/cocoa/bookmark_bar_controller_unittest.mm',
'browser/cocoa/bookmark_menu_bridge_unittest.mm',
'browser/cocoa/bookmark_menu_cocoa_controller_unittest.mm',
'browser/cocoa/browser_window_cocoa_unittest.mm',
'browser/cocoa/command_observer_bridge_unittest.mm',
'browser/cocoa/find_bar_bridge_unittest.mm',
'browser/cocoa/find_bar_cocoa_controller_unittest.mm',
'browser/cocoa/find_bar_view_unittest.mm',
'browser/cocoa/location_bar_view_mac_unittest.mm',
'browser/cocoa/grow_box_view_unittest.mm',
'browser/cocoa/preferences_window_controller_unittest.mm',
'browser/cocoa/sad_tab_view_unittest.mm',
'browser/cocoa/status_bubble_mac_unittest.mm',
'browser/cocoa/tab_cell_unittest.mm',
'browser/cocoa/tab_controller_unittest.mm',
'browser/cocoa/tab_strip_controller_unittest.mm',
'browser/cocoa/tab_strip_view_unittest.mm',
'browser/cocoa/tab_view_unittest.mm',
'browser/cocoa/toolbar_button_cell_unittest.mm',
'browser/cocoa/toolbar_controller_unittest.mm',
'browser/cocoa/toolbar_view_unittest.mm',
'browser/command_updater_unittest.cc',
'browser/debugger/devtools_manager_unittest.cc',
'browser/dom_ui/dom_ui_unittest.cc',
'browser/download/download_manager_unittest.cc',
'browser/download/download_request_manager_unittest.cc',
'browser/download/save_package_unittest.cc',
'browser/extensions/extension_messages_unittest.cc',
'browser/extensions/extension_process_manager_unittest.h',
'browser/extensions/extension_ui_unittest.cc',
'browser/extensions/extension_unittest.cc',
'browser/extensions/extensions_service_unittest.cc',
'browser/extensions/test_extension_loader.cc',
'browser/extensions/user_script_master_unittest.cc',
'browser/google_url_tracker_unittest.cc',
'browser/gtk/bookmark_editor_gtk_unittest.cc',
'browser/gtk/go_button_gtk_unittest.cc',
'browser/gtk/tabs/tab_renderer_gtk_unittest.cc',
'browser/history/expire_history_backend_unittest.cc',
'browser/history/history_backend_unittest.cc',
'browser/history/history_querying_unittest.cc',
'browser/history/history_types_unittest.cc',
'browser/history/history_unittest.cc',
'browser/history/query_parser_unittest.cc',
'browser/history/snippet_unittest.cc',
'browser/history/starred_url_database_unittest.cc',
'browser/history/text_database_manager_unittest.cc',
'browser/history/text_database_unittest.cc',
'browser/history/thumbnail_database_unittest.cc',
'browser/history/url_database_unittest.cc',
'browser/history/visit_database_unittest.cc',
'browser/history/visit_tracker_unittest.cc',
'browser/importer/firefox_importer_unittest.cc',
'browser/importer/importer_unittest.cc',
'browser/importer/toolbar_importer_unittest.cc',
'browser/login_prompt_unittest.cc',
'browser/metrics/metrics_log_unittest.cc',
'browser/metrics/metrics_response_unittest.cc',
'browser/navigation_controller_unittest.cc',
'browser/navigation_entry_unittest.cc',
'browser/net/dns_host_info_unittest.cc',
'browser/net/dns_master_unittest.cc',
'browser/net/resolve_proxy_msg_helper_unittest.cc',
'browser/net/url_fetcher_unittest.cc',
'browser/net/url_fixer_upper_unittest.cc',
'browser/password_manager/encryptor_unittest.cc',
'browser/password_manager/password_form_manager_unittest.cc',
'browser/printing/page_number_unittest.cc',
'browser/printing/page_overlays_unittest.cc',
'browser/printing/page_range_unittest.cc',
'browser/printing/page_setup_unittest.cc',
'browser/printing/print_job_unittest.cc',
'browser/printing/win_printing_context_unittest.cc',
'browser/profile_manager_unittest.cc',
'browser/renderer_host/audio_renderer_host_unittest.cc',
'browser/renderer_host/render_view_host_unittest.cc',
'browser/renderer_host/render_widget_host_unittest.cc',
'browser/renderer_host/renderer_security_policy_unittest.cc',
'browser/renderer_host/resource_dispatcher_host_unittest.cc',
'browser/renderer_host/web_cache_manager_unittest.cc',
'browser/rlz/rlz_unittest.cc',
'browser/safe_browsing/bloom_filter_unittest.cc',
'browser/safe_browsing/chunk_range_unittest.cc',
'browser/safe_browsing/protocol_manager_unittest.cc',
'browser/safe_browsing/protocol_parser_unittest.cc',
'browser/safe_browsing/safe_browsing_database_unittest.cc',
'browser/safe_browsing/safe_browsing_util_unittest.cc',
'browser/search_engines/template_url_model_unittest.cc',
'browser/search_engines/template_url_parser_unittest.cc',
'browser/search_engines/template_url_prepopulate_data_unittest.cc',
'browser/search_engines/template_url_unittest.cc',
'browser/sessions/session_backend_unittest.cc',
'browser/sessions/session_service_test_helper.cc',
'browser/sessions/session_service_test_helper.h',
'browser/sessions/session_service_unittest.cc',
'browser/sessions/tab_restore_service_unittest.cc',
'browser/site_instance_unittest.cc',
'browser/spellcheck_unittest.cc',
'browser/tab_contents/render_view_host_manager_unittest.cc',
'browser/tab_contents/web_contents_unittest.cc',
'browser/tabs/tab_strip_model_unittest.cc',
'browser/task_manager_unittest.cc',
'browser/theme_resources_util_unittest.cc',
'browser/views/bookmark_editor_view_unittest.cc',
'browser/views/keyword_editor_view_unittest.cc',
'browser/visitedlink_unittest.cc',
'browser/webdata/web_database_unittest.cc',
'browser/window_sizer_unittest.cc',
'../app/animation_unittest.cc',
'common/bzip2_unittest.cc',
'common/chrome_plugin_unittest.cc',
'common/extensions/url_pattern_unittest.cc',
'common/extensions/user_script_unittest.cc',
'common/file_descriptor_set_unittest.cc',
'../app/gfx/chrome_font_unittest.cc',
'common/gfx/emf_unittest.cc',
'../app/gfx/icon_util_unittest.cc',
'common/gfx/text_elider_unittest.cc',
'common/important_file_writer_unittest.cc',
'common/ipc_message_unittest.cc',
'common/ipc_sync_channel_unittest.cc',
'common/ipc_sync_message_unittest.cc',
'common/ipc_sync_message_unittest.h',
'common/json_value_serializer_unittest.cc',
'../app/l10n_util_unittest.cc',
'common/mru_cache_unittest.cc',
'common/net/url_util_unittest.cc',
'common/notification_service_unittest.cc',
'../app/os_exchange_data_unittest.cc',
'common/pref_member_unittest.cc',
'common/pref_service_unittest.cc',
'common/property_bag_unittest.cc',
'common/resource_dispatcher_unittest.cc',
'common/time_format_unittest.cc',
'common/unzip_unittest.cc',
'common/win_util_unittest.cc',
'common/worker_thread_ticker_unittest.cc',
'renderer/extensions/extension_api_client_unittest.cc',
'renderer/extensions/greasemonkey_api_unittest.cc',
'renderer/extensions/json_schema_unittest.cc',
'renderer/net/render_dns_master_unittest.cc',
'renderer/net/render_dns_queue_unittest.cc',
'renderer/render_process_unittest.cc',
'renderer/render_thread_unittest.cc',
'renderer/render_view_unittest.cc',
'renderer/render_widget_unittest.cc',
'renderer/renderer_logging_mac_unittest.mm',
'renderer/renderer_main_unittest.cc',
'test/browser_with_test_window_test.cc',
'test/browser_with_test_window_test.h',
'test/render_view_test.cc',
'test/render_view_test.h',
'test/test_notification_tracker.cc',
'test/test_notification_tracker.h',
'test/v8_unit_test.cc',
'test/v8_unit_test.h',
'views/controls/label_unittest.cc',
'views/controls/table/table_view_unittest.cc',
'views/controls/tree/tree_node_iterator_unittest.cc',
'views/focus/focus_manager_unittest.cc',
'views/grid_layout_unittest.cc',
'views/view_unittest.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
'../build/linux/system.gyp:gtk',
'../build/linux/system.gyp:nss',
],
'sources!': [
# This test is mostly about renaming downloads to safe file
# names. As such we don't need/want to port it to linux. We
# might want to write our own tests for the download manager
# on linux, though.
'browser/download/download_manager_unittest.cc',
],
}],
['OS=="mac"', {
# The test fetches resources which means Mac need the app bundle to
# exist on disk so it can pull from it.
'dependencies': [
'app',
],
'include_dirs': [
'../third_party/GTM',
],
'sources!': [
'browser/bookmarks/bookmark_context_menu_test.cc',
'browser/back_forward_menu_model_unittest.cc',
'browser/download/download_manager_unittest.cc',
'browser/gtk/go_button_gtk_unittest.cc',
'browser/gtk/tabs/tab_renderer_gtk_unittest.cc',
'browser/navigation_controller_unittest.cc',
'renderer/render_view_unittest.cc',
'test/test_notification_tracker.cc',
'test/test_notification_tracker.h',
],
# TODO(mark): We really want this for all non-static library targets,
# but when we tried to pull it up to the common.gypi level, it broke
# other things like the ui, startup, and page_cycler tests. *shrug*
'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-ObjC']},
}],
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
'sources!': [
'browser/gtk/tabs/tab_renderer_gtk_unittest.cc',
'common/file_descriptor_set_unittest.cc',
'common/net/url_util_unittest.cc',
],
'dependencies': [
'views',
],
}, { # else: OS != "win"
'sources!': [
'browser/bookmarks/bookmark_codec_unittest.cc',
'browser/bookmarks/bookmark_drag_data_unittest.cc',
'browser/bookmarks/bookmark_folder_tree_model_unittest.cc',
'browser/bookmarks/bookmark_table_model_unittest.cc',
'browser/browser_commands_unittest.cc',
'browser/browser_unittest.cc',
'browser/extensions/test_extension_loader.cc',
'browser/importer/firefox_importer_unittest.cc',
'browser/importer/importer_unittest.cc',
'browser/login_prompt_unittest.cc',
'browser/password_manager/password_form_manager_unittest.cc',
'browser/printing/page_number_unittest.cc',
'browser/printing/page_overlays_unittest.cc',
'browser/printing/print_job_unittest.cc',
'browser/rlz/rlz_unittest.cc',
'browser/tabs/tab_strip_model_unittest.cc',
'browser/task_manager_unittest.cc',
'browser/views/bookmark_editor_view_unittest.cc',
'browser/views/find_bar_win_unittest.cc',
'browser/views/keyword_editor_view_unittest.cc',
'browser/window_sizer_unittest.cc',
'common/chrome_plugin_unittest.cc',
'common/gfx/emf_unittest.cc',
'../app/gfx/icon_util_unittest.cc',
'common/net/url_util_unittest.cc',
'../app/os_exchange_data_unittest.cc',
'test/browser_with_test_window_test.cc',
'test/browser_with_test_window_test.h',
'views/controls/label_unittest.cc',
'views/controls/table/table_view_unittest.cc',
'views/focus/focus_manager_unittest.cc',
'views/grid_layout_unittest.cc',
'views/view_unittest.cc',
],
}],
],
},
{
'target_name': 'startup_tests',
'type': 'executable',
'dependencies': [
'app',
'browser',
'common',
'chrome_resources',
'chrome_strings',
'test_support_ui',
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'sources': [
'test/startup/feature_startup_test.cc',
'test/startup/startup_test.cc',
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
'../build/linux/system.gyp:gtk',
],
}],
['OS!="win"', {
'sources!': [
'test/startup/feature_startup_test.cc',
],
}],
],
},
{
'target_name': 'page_cycler_tests',
'type': 'executable',
'dependencies': [
'chrome_resources',
'chrome_strings',
'test_support_ui',
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'sources': [
'test/page_cycler/page_cycler_test.cc',
'test/perf/mem_usage.cc',
'test/perf/mem_usage.h',
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS!="mac"', {
'dependencies': [
'views',
],
}],
['OS!="win"', {
'sources!': [
'test/perf/mem_usage.cc',
],
}],
],
},
],
'conditions': [
['OS=="linux"', {
'targets': [
{
'target_name': 'convert_dict',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'third_party/hunspell/hunspell.gyp:hunspell',
],
'sources': [
'tools/convert_dict/aff_reader.cc',
'tools/convert_dict/aff_reader.h',
'tools/convert_dict/convert_dict.cc',
'tools/convert_dict/dic_reader.cc',
'tools/convert_dict/dic_reader.h',
'tools/convert_dict/hunspell_reader.cc',
'tools/convert_dict/hunspell_reader.h',
],
},
{
'target_name': 'flush_cache',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
],
'sources': [
'tools/perf/flush_cache/flush_cache.cc',
],
},
],
}],
['OS=="mac"',
# On Mac only, add a project target called "package_app" that only
# runs a shell script (package_chrome.sh).
{ 'targets': [
{
'target_name': 'package_app',
# do NOT place this in the 'all' list; most won't want it.
# In gyp, booleans are 0/1 not True/False.
'suppress_wildcard': 1,
'type': 'none',
'dependencies': [
'app',
],
'actions': [
{
'inputs': [],
'outputs': [],
'action_name': 'package_chrome',
'action': ['tools/mac/package_chrome.sh' ],
},
], # 'actions'
},
]
}, { # else: OS != "mac"
'targets': [
{
'target_name': 'perf_tests',
'type': 'executable',
'dependencies': [
'browser',
'common',
'renderer',
'chrome_resources',
'chrome_strings',
'../base/base.gyp:base',
'../base/base.gyp:test_support_base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../webkit/webkit.gyp:glue',
],
'sources': [
'browser/visitedlink_perftest.cc',
'test/perf/perftests.cc',
'test/perf/url_parse_perftest.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'sources!': [
# TODO(port):
'browser/visitedlink_perftest.cc',
],
}],
['OS!="mac"', {
'dependencies': [
'views',
],
}],
],
},
],
}], # OS!="mac"
['OS=="win" or OS=="linux"',
{ 'targets': [
{
'target_name': 'views',
'type': '<(library)',
'dependencies': [
'common',
'chrome_resources',
'chrome_strings',
'theme_resources',
'../media/media.gyp:media',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
'third_party/hunspell/hunspell.gyp:hunspell',
'../webkit/webkit.gyp:glue',
],
'sources': [
# All .cc, .h under views, except unittests
'views/accelerator.cc',
'views/accelerator.h',
'views/accessibility/view_accessibility.cc',
'views/accessibility/view_accessibility.h',
'views/accessibility/view_accessibility_wrapper.cc',
'views/accessibility/view_accessibility_wrapper.h',
'views/background.cc',
'views/background.h',
'views/border.cc',
'views/border.h',
'views/controls/button/button.cc',
'views/controls/button/button.h',
'views/controls/button/button_dropdown.cc',
'views/controls/button/button_dropdown.h',
'views/controls/button/checkbox.cc',
'views/controls/button/checkbox.h',
'views/controls/button/custom_button.cc',
'views/controls/button/custom_button.h',
'views/controls/button/image_button.cc',
'views/controls/button/image_button.h',
'views/controls/button/menu_button.cc',
'views/controls/button/menu_button.h',
'views/controls/button/native_button.cc',
'views/controls/button/native_button.h',
'views/controls/button/native_button_win.cc',
'views/controls/button/native_button_win.h',
'views/controls/button/native_button_wrapper.h',
'views/controls/button/radio_button.cc',
'views/controls/button/radio_button.h',
'views/controls/button/text_button.cc',
'views/controls/button/text_button.h',
'views/controls/combo_box.cc',
'views/controls/combo_box.h',
'views/controls/hwnd_view.cc',
'views/controls/hwnd_view.h',
'views/controls/image_view.cc',
'views/controls/image_view.h',
'views/controls/label.cc',
'views/controls/label.h',
'views/controls/link.cc',
'views/controls/link.h',
'views/controls/menu/chrome_menu.cc',
'views/controls/menu/chrome_menu.h',
'views/controls/menu/controller.h',
'views/controls/menu/menu.cc',
'views/controls/menu/menu.h',
'views/controls/menu/view_menu_delegate.h',
'views/controls/message_box_view.cc',
'views/controls/message_box_view.h',
'views/controls/native_control.cc',
'views/controls/native_control.h',
'views/controls/native_control_win.cc',
'views/controls/native_control_win.h',
'views/controls/native_view_host.cc',
'views/controls/native_view_host.h',
'views/controls/scroll_view.cc',
'views/controls/scroll_view.h',
'views/controls/scrollbar/bitmap_scroll_bar.cc',
'views/controls/scrollbar/bitmap_scroll_bar.h',
'views/controls/scrollbar/native_scroll_bar.cc',
'views/controls/scrollbar/native_scroll_bar.h',
'views/controls/scrollbar/scroll_bar.cc',
'views/controls/scrollbar/scroll_bar.h',
'views/controls/separator.cc',
'views/controls/separator.h',
'views/controls/single_split_view.cc',
'views/controls/single_split_view.h',
'views/controls/tabbed_pane.cc',
'views/controls/tabbed_pane.h',
'views/controls/table/group_table_view.cc',
'views/controls/table/group_table_view.h',
'views/controls/table/table_view.cc',
'views/controls/table/table_view.h',
'views/controls/text_field.cc',
'views/controls/text_field.h',
'views/controls/throbber.cc',
'views/controls/throbber.h',
'views/controls/tree/tree_model.h',
'views/controls/tree/tree_node_iterator.h',
'views/controls/tree/tree_node_model.h',
'views/controls/tree/tree_view.cc',
'views/controls/tree/tree_view.h',
'views/event.cc',
'views/event.h',
'views/event_gtk.cc',
'views/event_win.cc',
'views/fill_layout.cc',
'views/fill_layout.h',
'views/focus/external_focus_tracker.cc',
'views/focus/external_focus_tracker.h',
'views/focus/focus_manager.cc',
'views/focus/focus_manager.h',
'views/focus/focus_util_win.cc',
'views/focus/focus_util_win.h',
'views/focus/view_storage.cc',
'views/focus/view_storage.h',
'views/grid_layout.cc',
'views/grid_layout.h',
'views/layout_manager.cc',
'views/layout_manager.h',
'views/painter.cc',
'views/painter.h',
'views/repeat_controller.cc',
'views/repeat_controller.h',
'views/view.cc',
'views/view.h',
'views/view_constants.cc',
'views/view_constants.h',
'views/view_gtk.cc',
'views/view_win.cc',
'views/widget/accelerator_handler.cc',
'views/widget/accelerator_handler.h',
'views/widget/aero_tooltip_manager.cc',
'views/widget/aero_tooltip_manager.h',
'views/widget/root_view.cc',
'views/widget/root_view.h',
'views/widget/root_view_drop_target.cc',
'views/widget/root_view_drop_target.h',
'views/widget/root_view_gtk.cc',
'views/widget/root_view_win.cc',
'views/widget/tooltip_manager.cc',
'views/widget/tooltip_manager.h',
'views/widget/widget.h',
'views/widget/widget_gtk.cc',
'views/widget/widget_gtk.h',
'views/widget/widget_win.cc',
'views/widget/widget_win.h',
'views/window/client_view.cc',
'views/window/client_view.h',
'views/window/custom_frame_view.cc',
'views/window/custom_frame_view.h',
'views/window/dialog_client_view.cc',
'views/window/dialog_client_view.h',
'views/window/dialog_delegate.cc',
'views/window/dialog_delegate.h',
'views/window/native_frame_view.cc',
'views/window/native_frame_view.h',
'views/window/non_client_view.cc',
'views/window/non_client_view.h',
'views/window/window.h',
'views/window/window_delegate.cc',
'views/window/window_delegate.h',
'views/window/window_resources.h',
'views/window/window_win.cc',
'views/window/window_win.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'sources!': [
'views/accelerator.cc',
'views/accessibility/accessible_wrapper.cc',
'views/accessibility/view_accessibility.cc',
'views/controls/scrollbar/bitmap_scroll_bar.cc',
'views/controls/button/button_dropdown.cc',
'views/controls/button/checkbox.cc',
'views/controls/button/image_button.cc',
'views/controls/button/menu_button.cc',
'views/controls/combo_box.cc',
'views/controls/hwnd_view.cc',
'views/controls/link.cc',
'views/controls/menu/chrome_menu.cc',
'views/controls/menu/menu.cc',
'views/controls/message_box_view.cc',
'views/controls/scroll_view.cc',
'views/controls/table/group_table_view.cc',
'views/focus/external_focus_tracker.cc',
'views/focus/focus_manager.cc',
'views/controls/button/native_button.cc',
'views/controls/native_control.cc',
'views/controls/scrollbar/native_scroll_bar.cc',
'views/controls/button/radio_button.cc',
'views/resize_corner.cc',
'views/controls/separator.cc',
'views/controls/single_split_view.cc',
'views/controls/tabbed_pane.cc',
'views/controls/table/table_view.cc',
'views/controls/text_field.cc',
'views/controls/tree/tree_view.cc',
'views/event_win.cc',
'views/widget/accelerator_handler.cc',
'views/widget/aero_tooltip_manager.cc',
'views/widget/root_view_drop_target.cc',
'views/widget/tooltip_manager.cc',
'views/window/client_view.cc',
'views/window/custom_frame_view.cc',
'views/window/dialog_delegate.cc',
'views/window/dialog_client_view.cc',
'views/window/native_frame_view.cc',
'views/window/non_client_view.cc',
'views/window/window_delegate.cc',
],
}],
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
},],
['OS=="linux"', {
'sources!': [
'views/accelerator.cc',
'views/accessibility/accessible_wrapper.cc',
'views/accessibility/view_accessibility.cc',
'views/accessibility/view_accessibility_wrapper.cc',
'views/controls/scrollbar/bitmap_scroll_bar.cc',
'views/controls/button/image_button.cc',
'views/controls/button/button_dropdown.cc',
'views/controls/button/checkbox.cc',
'views/controls/menu/chrome_menu.cc',
'views/controls/combo_box.cc',
'views/focus/focus_manager.cc',
'views/controls/table/group_table_view.cc',
'views/controls/hwnd_view.cc',
'views/controls/link.cc',
'views/controls/menu/menu.cc',
'views/controls/button/menu_button.cc',
'views/controls/message_box_view.cc',
'views/controls/button/native_button.cc',
'views/controls/native_control.cc',
'views/controls/scrollbar/native_scroll_bar.cc',
'views/controls/button/radio_button.cc',
'views/resize_corner.cc',
'views/controls/separator.cc',
'views/controls/single_split_view.cc',
'views/controls/tabbed_pane.cc',
'views/controls/table/table_view.cc',
'views/controls/text_field.cc',
'views/controls/tree/tree_view.cc',
'views/widget/accelerator_handler.cc',
'views/widget/aero_tooltip_manager.cc',
'views/widget/root_view_drop_target.cc',
'views/widget/tooltip_manager.cc',
'views/widget/widget_win.cc',
'views/window/client_view.cc',
'views/window/custom_frame_view.cc',
'views/window/dialog_delegate.cc',
'views/window/dialog_client_view.cc',
'views/window/native_frame_view.cc',
'views/window/non_client_view.cc',
'views/window/window_delegate.cc',
'views/window/window_win.cc',
],
}],
],
},
],
}], # OS=="win" or OS=="linux"
['OS=="win"',
{ 'targets': [
{
'target_name': 'interactive_ui_tests',
'type': 'executable',
'dependencies': [
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'third_party/hunspell/hunspell.gyp:hunspell',
'views',
'../skia/skia.gyp:skia',
'../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/libpng/libpng.gyp:libpng',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/zlib/zlib.gyp:zlib',
'../testing/gtest.gyp:gtest',
'../third_party/npapi/npapi.gyp:npapi',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'browser/browser_focus_uitest.cc',
'browser/views/bookmark_bar_view_test.cc',
'browser/views/constrained_window_impl_interactive_uitest.cc',
'browser/views/find_bar_win_interactive_uitest.cc',
'browser/views/tabs/tab_dragging_test.cc',
'test/interactive_ui/npapi_interactive_test.cc',
'test/interactive_ui/view_event_test_base.cc',
'test/interactive_ui/view_event_test_base.h',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
},
{
'target_name': 'plugin_tests',
'type': 'executable',
'dependencies': [
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/libxslt/libxslt.gyp:libxslt',
'../third_party/npapi/npapi.gyp:npapi',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'test/plugin/plugin_test.cpp',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
},
{
'target_name': 'selenium_tests',
'type': 'executable',
'dependencies': [
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'test/selenium/selenium_test.cc',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
},
{
'target_name': 'worker',
'type': '<(library)',
'dependencies': [
'../base/base.gyp:base',
'../webkit/webkit.gyp:webkit',
],
'sources': [
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
'worker/webworkerclient_proxy.cc',
'worker/webworkerclient_proxy.h',
'worker/worker_main.cc',
'worker/worker_thread.cc',
'worker/worker_thread.h',
'worker/worker_webkitclient_impl.cc',
'worker/worker_webkitclient_impl.h',
],
'include_dirs': [
'..',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled.h',
'msvs_precompiled_source': 'tools/build/win/precompiled.cc',
},
},
},
]}, # 'targets'
], # OS=="win"
# TODO(jrg): add in Windows code coverage targets.
['coverage!=0 and OS!="win"',
{ 'targets': [
{
'target_name': 'coverage',
# do NOT place this in the 'all' list; most won't want it.
# In gyp, booleans are 0/1 not True/False.
'suppress_wildcard': 1,
'type': 'none',
'dependencies': [
'../base/base.gyp:base_unittests',
'../media/media.gyp:media_unittests',
'../net/net.gyp:net_unittests',
'../printing/printing.gyp:printing_unittests',
],
'actions': [
{
# 'message' for Linux/scons in particular
'message': 'Running coverage_posix.py to generate coverage numbers',
'inputs': [],
'outputs': [],
'action_name': 'coverage',
'action': [ 'python',
'../tools/code_coverage/coverage_posix.py',
'--directory',
'<(PRODUCT_DIR)',
'--',
'<@(_dependencies)'],
# Use outputs of this action as inputs for the main target build.
# Seems as a misnomer but makes this happy on Linux (scons).
'process_outputs_as_sources': 1,
},
], # 'actions'
},
]
}],
], # 'conditions'
}
|
"""
Models backed by SQL using SQLAlchemy.
.. moduleauthor:: Martijn Vermaat <martijn@vermaat.name>
.. todo:: Perhaps add some delete cascade rules.
.. Licensed under the MIT license, see the LICENSE file.
"""
from datetime import date
import gzip
import os
import uuid
from flask import current_app
from sqlalchemy import Index
import bcrypt
from . import db
from .region_binning import assign_bin
DATA_SOURCE_FILETYPES = ('bed', 'vcf')
"""
.. todo:: Use the types for which we have validators.
"""
# Note: Add new roles at the end.
USER_ROLES = (
'admin', # Can do anything.
'importer', # Can import samples.
'annotator', # Can annotate samples.
'trader' # Can annotate samples if they are also imported.
)
class InvalidDataSource(Exception):
"""
Exception thrown if data source validation failed.
"""
def __init__(self, code, message):
self.code = code
self.message = message
super(Exception, self).__init__(code, message)
class DataUnavailable(Exception):
"""
Exception thrown if reading from a data source which data is not cached
anymore (in case of local storage) or does not exist anymore (in case of
a URL resource.
"""
def __init__(self, code, message):
self.code = code
self.message = message
super(Exception, self).__init__(code, message)
class User(db.Model):
"""
User in the system.
For the roles column we use a bitstring where the leftmost role in the
:data:`USER_ROLES` tuple is defined by the least-significant bit.
Essentially, this creates a set of roles.
.. todo:: Login should really be validated to only contain alphanums.
.. todo:: The bitstring encoding/decoding can probably be implemented more
efficiently.
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(200))
login = db.Column(db.String(200), index=True, unique=True)
password_hash = db.Column(db.String(200))
roles_bitstring = db.Column(db.Integer)
added = db.Column(db.Date)
def __init__(self, name, login, password, roles=[]):
self.name = name
self.login = login
self.password_hash = bcrypt.hashpw(password, bcrypt.gensalt())
self.roles_bitstring = sum(pow(2, i) for i, role in enumerate(USER_ROLES)
if role in roles)
self.added = date.today()
def __repr__(self):
return '<User %s identified by %s added %s is %s>' % (self.name, self.login, str(self.added), ', '.join(self.roles()))
def check_password(self, password):
return bcrypt.hashpw(password, self.password_hash) == self.password_hash
def roles(self):
return {role for i, role in enumerate(USER_ROLES)
if self.roles_bitstring & pow(2, i)}
class Variant(db.Model):
"""
Genomic variant.
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
chromosome = db.Column(db.String(2))
begin = db.Column(db.Integer)
end = db.Column(db.Integer)
reference = db.Column(db.String(200))
variant = db.Column(db.String(200))
bin = db.Column(db.Integer)
def __init__(self, chromosome, begin, end, reference, variant):
self.chromosome = chromosome
self.begin = begin
self.end = end
self.reference = reference
self.variant = variant
self.bin = assign_bin(self.begin, self.end)
def __repr__(self):
return '<Variant %s at chr%s:%i-%i>' % (
self.variant, self.chromosome, self.begin, self.end)
Index('variant_location',
Variant.chromosome, Variant.begin)
Index('variant_unique',
Variant.chromosome, Variant.begin, Variant.end,
Variant.reference, Variant.variant, unique=True)
class Sample(db.Model):
"""
Sample.
``coverage_profile`` is essentially ``not is_population_study``.
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
name = db.Column(db.String(200))
coverage_threshold = db.Column(db.Integer)
pool_size = db.Column(db.Integer)
added = db.Column(db.Date)
active = db.Column(db.Boolean, default=False)
coverage_profile = db.Column(db.Boolean)
user = db.relationship(User, backref=db.backref('samples', lazy='dynamic'))
def __init__(self, user, name, pool_size=1, coverage_threshold=None, coverage_profile=True):
self.user = user
self.name = name
self.coverage_threshold = coverage_threshold
self.pool_size = pool_size
self.added = date.today()
self.coverage_profile = True
def __repr__(self):
return '<Sample %s of %i added %s by %r>' % (self.name, self.pool_size, str(self.added), self.user)
class DataSource(db.Model):
"""
Data source (probably uploaded as a file). E.g. VCF file to be imported, or
BED track from which Region entries are created.
.. todo:: We can now provide data as an uploaded file or as a path to a
local file. We also want to be able to give a link to an internet
resource.
.. todo:: Checksums of data sources?
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
name = db.Column(db.String(200))
filename = db.Column(db.String(50))
filetype = db.Column(db.Enum(*DATA_SOURCE_FILETYPES, name='filetype'))
gzipped = db.Column(db.Boolean)
added = db.Column(db.Date)
sample_id = db.Column(db.Integer, db.ForeignKey('sample.id'), default=None)
user = db.relationship(User, backref=db.backref('data_sources', lazy='dynamic'))
sample = db.relationship(Sample, backref=db.backref('data_sources', lazy='dynamic'))
def __init__(self, user, name, filetype, upload=None, local_path=None, gzipped=False):
if not filetype in DATA_SOURCE_FILETYPES:
raise InvalidDataSource('unknown_filetype', 'Data source filetype is unknown')
self.user = user
self.name = name
self.filename = str(uuid.uuid4())
self.filetype = filetype
self.gzipped = gzipped
self.added = date.today()
filepath = os.path.join(current_app.config['FILES_DIR'], self.filename)
if upload is not None:
if gzipped:
upload.save(filepath)
else:
data = gzip.open(filepath, 'wb')
data.write(upload.read())
data.close()
self.gzipped = True
elif local_path is not None:
os.symlink(local_path, filepath)
if not self.is_valid():
os.unlink(filepath)
raise InvalidDataSource('invalid_data', 'Data source cannot be read')
def __repr__(self):
return '<DataSource %s as %s added %s by %r>' % (self.name, self.filetype, str(self.added), self.user)
@property
def imported(self):
return self.sample_id is not None
@property
def active(self):
return self.imported and self.sample.active
def data(self):
"""
Get open file-like handle to data contained in this data source for
reading.
.. note:: Be sure to close after calling this.
"""
filepath = os.path.join(current_app.config['FILES_DIR'], self.filename)
try:
if self.gzipped:
return gzip.open(filepath)
else:
return open(filepath)
except EnvironmentError:
raise DataUnavailable('data_source_not_cached', 'Data source is not in the cache')
def local_path(self):
"""
Get a local filepath for the data.
"""
return os.path.join(current_app.config['FILES_DIR'], self.filename)
def is_valid(self):
"""
Peek into the file and determine if it is of the given filetype.
"""
data = self.data()
def is_bed():
# Todo.
return True
def is_vcf():
return 'fileformat=VCFv4.1' in data.readline()
validators = {'bed': is_bed,
'vcf': is_vcf}
with data as data:
return validators[self.filetype]()
class Annotation(db.Model):
"""
Annotated data source.
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
data_source_id = db.Column(db.Integer, db.ForeignKey('data_source.id'))
filename = db.Column(db.String(50))
added = db.Column(db.Date)
data_source = db.relationship(DataSource, backref=db.backref('annotations', lazy='dynamic'))
def __init__(self, data_source):
self.data_source = data_source
self.filename = str(uuid.uuid4())
self.added = date.today()
def __repr__(self):
return '<Annotation for %r added %s>' % (self.data_source, str(self.added))
def data(self):
"""
Get open file-like handle to data contained in this annotation for
reading.
.. note:: Be sure to close after calling this.
"""
filepath = os.path.join(current_app.config['FILES_DIR'], self.filename)
return gzip.open(filepath)
def data_writer(self):
"""
Get open file-like handle to data contained in this annotation for
writing.
.. note:: Be sure to close after calling this.
"""
filepath = os.path.join(current_app.config['FILES_DIR'], self.filename)
return gzip.open(filepath, 'wb')
def local_path(self):
"""
Get a local filepath for the data.
"""
return os.path.join(current_app.config['FILES_DIR'], self.filename)
class Observation(db.Model):
"""
Observation in a sample.
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
variant_id = db.Column(db.Integer, db.ForeignKey('variant.id'))
data_source_id = db.Column(db.Integer, db.ForeignKey('data_source.id'))
# Depending on the type of sample, the following 3 fields may or may not
# have data. If we have no data, we store None.
total_coverage = db.Column(db.Integer)
variant_coverage = db.Column(db.Integer)
support = db.Column(db.Integer) # Number of individuals.
variant = db.relationship(Variant, backref=db.backref('observations', lazy='dynamic'))
data_source = db.relationship(DataSource, backref=db.backref('observations', lazy='dynamic'))
def __init__(self, variant, data_source, support=1, total_coverage=None, variant_coverage=None):
self.variant = variant
self.data_source = data_source
self.total_coverage = total_coverage
self.variant_coverage = variant_coverage
self.support = support
def __repr__(self):
return '<Observation %r from %r>' % (self.variant, self.data_source)
class Region(db.Model):
"""
Covered region for a sample.
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
data_source_id = db.Column(db.Integer, db.ForeignKey('data_source.id'))
chromosome = db.Column(db.String(2))
begin = db.Column(db.Integer)
end = db.Column(db.Integer)
bin = db.Column(db.Integer)
data_source = db.relationship(DataSource, backref=db.backref('regions', lazy='dynamic'))
def __init__(self, data_source, chromosome, begin, end):
self.data_source = data_source
self.chromosome = chromosome
self.begin = begin
self.end = end
self.bin = assign_bin(self.begin, self.end)
def __repr__(self):
return '<Region from %r at chr%s:%i-%i>' % (self.data_source, self.chromosome, self.begin, self.end)
Index('region_location',
Region.bin, Region.chromosome, Region.begin)
Add public field to Sample
"""
Models backed by SQL using SQLAlchemy.
.. moduleauthor:: Martijn Vermaat <martijn@vermaat.name>
.. todo:: Perhaps add some delete cascade rules.
.. Licensed under the MIT license, see the LICENSE file.
"""
from datetime import date
import gzip
import os
import uuid
from flask import current_app
from sqlalchemy import Index
import bcrypt
from . import db
from .region_binning import assign_bin
DATA_SOURCE_FILETYPES = ('bed', 'vcf')
"""
.. todo:: Use the types for which we have validators.
"""
# Note: Add new roles at the end.
USER_ROLES = (
'admin', # Can do anything.
'importer', # Can import samples.
'annotator', # Can annotate samples.
'trader' # Can annotate samples if they are also imported.
)
class InvalidDataSource(Exception):
"""
Exception thrown if data source validation failed.
"""
def __init__(self, code, message):
self.code = code
self.message = message
super(Exception, self).__init__(code, message)
class DataUnavailable(Exception):
"""
Exception thrown if reading from a data source which data is not cached
anymore (in case of local storage) or does not exist anymore (in case of
a URL resource.
"""
def __init__(self, code, message):
self.code = code
self.message = message
super(Exception, self).__init__(code, message)
class User(db.Model):
"""
User in the system.
For the roles column we use a bitstring where the leftmost role in the
:data:`USER_ROLES` tuple is defined by the least-significant bit.
Essentially, this creates a set of roles.
.. todo:: Login should really be validated to only contain alphanums.
.. todo:: The bitstring encoding/decoding can probably be implemented more
efficiently.
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(200))
login = db.Column(db.String(200), index=True, unique=True)
password_hash = db.Column(db.String(200))
roles_bitstring = db.Column(db.Integer)
added = db.Column(db.Date)
def __init__(self, name, login, password, roles=[]):
self.name = name
self.login = login
self.password_hash = bcrypt.hashpw(password, bcrypt.gensalt())
self.roles_bitstring = sum(pow(2, i) for i, role in enumerate(USER_ROLES)
if role in roles)
self.added = date.today()
def __repr__(self):
return '<User %s identified by %s added %s is %s>' % (self.name, self.login, str(self.added), ', '.join(self.roles()))
def check_password(self, password):
return bcrypt.hashpw(password, self.password_hash) == self.password_hash
def roles(self):
return {role for i, role in enumerate(USER_ROLES)
if self.roles_bitstring & pow(2, i)}
class Variant(db.Model):
"""
Genomic variant.
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
chromosome = db.Column(db.String(2))
begin = db.Column(db.Integer)
end = db.Column(db.Integer)
reference = db.Column(db.String(200))
variant = db.Column(db.String(200))
bin = db.Column(db.Integer)
def __init__(self, chromosome, begin, end, reference, variant):
self.chromosome = chromosome
self.begin = begin
self.end = end
self.reference = reference
self.variant = variant
self.bin = assign_bin(self.begin, self.end)
def __repr__(self):
return '<Variant %s at chr%s:%i-%i>' % (
self.variant, self.chromosome, self.begin, self.end)
Index('variant_location',
Variant.chromosome, Variant.begin)
Index('variant_unique',
Variant.chromosome, Variant.begin, Variant.end,
Variant.reference, Variant.variant, unique=True)
class Sample(db.Model):
"""
Sample.
``coverage_profile`` is essentially ``not is_population_study``.
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
name = db.Column(db.String(200))
coverage_threshold = db.Column(db.Integer)
pool_size = db.Column(db.Integer)
added = db.Column(db.Date)
active = db.Column(db.Boolean, default=False)
coverage_profile = db.Column(db.Boolean)
public = db.Column(db.Boolean)
user = db.relationship(User, backref=db.backref('samples', lazy='dynamic'))
def __init__(self, user, name, pool_size=1, coverage_threshold=None, coverage_profile=True, public=False):
self.user = user
self.name = name
self.coverage_threshold = coverage_threshold
self.pool_size = pool_size
self.added = date.today()
self.coverage_profile = coverage_profile
self.public = public
def __repr__(self):
return '<Sample %s of %i added %s by %r>' % (self.name, self.pool_size, str(self.added), self.user)
class DataSource(db.Model):
"""
Data source (probably uploaded as a file). E.g. VCF file to be imported, or
BED track from which Region entries are created.
.. todo:: We can now provide data as an uploaded file or as a path to a
local file. We also want to be able to give a link to an internet
resource.
.. todo:: Checksums of data sources?
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
name = db.Column(db.String(200))
filename = db.Column(db.String(50))
filetype = db.Column(db.Enum(*DATA_SOURCE_FILETYPES, name='filetype'))
gzipped = db.Column(db.Boolean)
added = db.Column(db.Date)
sample_id = db.Column(db.Integer, db.ForeignKey('sample.id'), default=None)
user = db.relationship(User, backref=db.backref('data_sources', lazy='dynamic'))
sample = db.relationship(Sample, backref=db.backref('data_sources', lazy='dynamic'))
def __init__(self, user, name, filetype, upload=None, local_path=None, gzipped=False):
if not filetype in DATA_SOURCE_FILETYPES:
raise InvalidDataSource('unknown_filetype', 'Data source filetype is unknown')
self.user = user
self.name = name
self.filename = str(uuid.uuid4())
self.filetype = filetype
self.gzipped = gzipped
self.added = date.today()
filepath = os.path.join(current_app.config['FILES_DIR'], self.filename)
if upload is not None:
if gzipped:
upload.save(filepath)
else:
data = gzip.open(filepath, 'wb')
data.write(upload.read())
data.close()
self.gzipped = True
elif local_path is not None:
os.symlink(local_path, filepath)
if not self.is_valid():
os.unlink(filepath)
raise InvalidDataSource('invalid_data', 'Data source cannot be read')
def __repr__(self):
return '<DataSource %s as %s added %s by %r>' % (self.name, self.filetype, str(self.added), self.user)
@property
def imported(self):
return self.sample_id is not None
@property
def active(self):
return self.imported and self.sample.active
def data(self):
"""
Get open file-like handle to data contained in this data source for
reading.
.. note:: Be sure to close after calling this.
"""
filepath = os.path.join(current_app.config['FILES_DIR'], self.filename)
try:
if self.gzipped:
return gzip.open(filepath)
else:
return open(filepath)
except EnvironmentError:
raise DataUnavailable('data_source_not_cached', 'Data source is not in the cache')
def local_path(self):
"""
Get a local filepath for the data.
"""
return os.path.join(current_app.config['FILES_DIR'], self.filename)
def is_valid(self):
"""
Peek into the file and determine if it is of the given filetype.
"""
data = self.data()
def is_bed():
# Todo.
return True
def is_vcf():
return 'fileformat=VCFv4.1' in data.readline()
validators = {'bed': is_bed,
'vcf': is_vcf}
with data as data:
return validators[self.filetype]()
class Annotation(db.Model):
"""
Annotated data source.
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
data_source_id = db.Column(db.Integer, db.ForeignKey('data_source.id'))
filename = db.Column(db.String(50))
added = db.Column(db.Date)
data_source = db.relationship(DataSource, backref=db.backref('annotations', lazy='dynamic'))
def __init__(self, data_source):
self.data_source = data_source
self.filename = str(uuid.uuid4())
self.added = date.today()
def __repr__(self):
return '<Annotation for %r added %s>' % (self.data_source, str(self.added))
def data(self):
"""
Get open file-like handle to data contained in this annotation for
reading.
.. note:: Be sure to close after calling this.
"""
filepath = os.path.join(current_app.config['FILES_DIR'], self.filename)
return gzip.open(filepath)
def data_writer(self):
"""
Get open file-like handle to data contained in this annotation for
writing.
.. note:: Be sure to close after calling this.
"""
filepath = os.path.join(current_app.config['FILES_DIR'], self.filename)
return gzip.open(filepath, 'wb')
def local_path(self):
"""
Get a local filepath for the data.
"""
return os.path.join(current_app.config['FILES_DIR'], self.filename)
class Observation(db.Model):
"""
Observation in a sample.
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
variant_id = db.Column(db.Integer, db.ForeignKey('variant.id'))
data_source_id = db.Column(db.Integer, db.ForeignKey('data_source.id'))
# Depending on the type of sample, the following 3 fields may or may not
# have data. If we have no data, we store None.
total_coverage = db.Column(db.Integer)
variant_coverage = db.Column(db.Integer)
support = db.Column(db.Integer) # Number of individuals.
variant = db.relationship(Variant, backref=db.backref('observations', lazy='dynamic'))
data_source = db.relationship(DataSource, backref=db.backref('observations', lazy='dynamic'))
def __init__(self, variant, data_source, support=1, total_coverage=None, variant_coverage=None):
self.variant = variant
self.data_source = data_source
self.total_coverage = total_coverage
self.variant_coverage = variant_coverage
self.support = support
def __repr__(self):
return '<Observation %r from %r>' % (self.variant, self.data_source)
class Region(db.Model):
"""
Covered region for a sample.
"""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
id = db.Column(db.Integer, primary_key=True)
data_source_id = db.Column(db.Integer, db.ForeignKey('data_source.id'))
chromosome = db.Column(db.String(2))
begin = db.Column(db.Integer)
end = db.Column(db.Integer)
bin = db.Column(db.Integer)
data_source = db.relationship(DataSource, backref=db.backref('regions', lazy='dynamic'))
def __init__(self, data_source, chromosome, begin, end):
self.data_source = data_source
self.chromosome = chromosome
self.begin = begin
self.end = end
self.bin = assign_bin(self.begin, self.end)
def __repr__(self):
return '<Region from %r at chr%s:%i-%i>' % (self.data_source, self.chromosome, self.begin, self.end)
Index('region_location',
Region.bin, Region.chromosome, Region.begin)
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'includes': [
'../build/common.gypi',
],
'target_defaults': {
'sources/': [
['exclude', '/(cocoa|gtk|win)/'],
['exclude', '_(cocoa|gtk|linux|mac|posix|skia|win|x)\\.(cc|mm?)$'],
['exclude', '/(gtk|win|x11)_[^/]*\\.cc$'],
],
'conditions': [
['OS=="linux"', {'sources/': [
['include', '/gtk/'],
['include', '_(gtk|linux|posix|skia|x)\\.cc$'],
['include', '/(gtk|x11)_[^/]*\\.cc$'],
]}],
['OS=="mac"', {'sources/': [
['include', '/cocoa/'],
['include', '_(cocoa|mac|posix)\\.(cc|mm?)$'],
]}, { # else: OS != "mac"
'sources/': [
['exclude', '\\.mm?$'],
],
}],
['OS=="win"', {'sources/': [
['include', '_(win)\\.cc$'],
['include', '/win/'],
['include', '/win_[^/]*\\.cc$'],
]}],
],
},
'targets': [
{
# TODO(mark): It would be better if each static library that needed
# to run grit would list its own .grd files, but unfortunately some
# of the static libraries currently have circular dependencies among
# generated headers.
'target_name': 'chrome_resources',
'type': 'none',
'rules': [
{
'rule_name': 'grit',
'extension': 'grd',
'variables': {
'conditions': [
['branding=="Chrome"', {
# TODO(mmoss) The .grd files look for _google_chrome, but for
# consistency they should look for GOOGLE_CHROME_BUILD like C++.
# Clean this up when Windows moves to gyp.
'chrome_build': '_google_chrome',
}, { # else: branding!="Chrome"
'chrome_build': '_chromium',
}],
],
},
'inputs': [
'../tools/grit/grit.py',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/grit/<(RULE_INPUT_ROOT).h',
'<(SHARED_INTERMEDIATE_DIR)/chrome/<(RULE_INPUT_ROOT).pak',
],
'action': ['python', '<@(_inputs)', '-i', '<(RULE_INPUT_PATH)',
'build', '-o', '<(SHARED_INTERMEDIATE_DIR)/chrome',
'-D', '<(chrome_build)'],
'message': 'Generating resources from <(RULE_INPUT_PATH)',
},
],
'sources': [
# Data resources.
'browser/debugger/resources/debugger_resources.grd',
'browser/browser_resources.grd',
'common/common_resources.grd',
'renderer/renderer_resources.grd',
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome',
],
},
'conditions': [
['OS=="win"', {
'dependencies': ['../build/win/system.gyp:cygwin'],
}],
],
},
{
# TODO(mark): It would be better if each static library that needed
# to run grit would list its own .grd files, but unfortunately some
# of the static libraries currently have circular dependencies among
# generated headers.
'target_name': 'chrome_strings',
'type': 'none',
'rules': [
{
'rule_name': 'grit',
'extension': 'grd',
'variables': {
'conditions': [
['branding=="Chrome"', {
# TODO(mmoss) The .grd files look for _google_chrome, but for
# consistency they should look for GOOGLE_CHROME_BUILD like C++.
# Clean this up when Windows moves to gyp.
'chrome_build': '_google_chrome',
}, { # else: branding!="Chrome"
'chrome_build': '_chromium',
}],
],
},
'inputs': [
'../tools/grit/grit.py',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/grit/<(RULE_INPUT_ROOT).h',
'<(SHARED_INTERMEDIATE_DIR)/chrome/<(RULE_INPUT_ROOT)_da.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/<(RULE_INPUT_ROOT)_en-US.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/<(RULE_INPUT_ROOT)_he.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/<(RULE_INPUT_ROOT)_zh-TW.pak',
],
'action': ['python', '<@(_inputs)', '-i', '<(RULE_INPUT_PATH)',
'build', '-o', '<(SHARED_INTERMEDIATE_DIR)/chrome',
'-D', '<(chrome_build)'],
'message': 'Generating resources from <(RULE_INPUT_PATH)',
},
],
'sources': [
# Localizable resources.
'app/resources/locale_settings.grd',
'app/chromium_strings.grd',
'app/generated_resources.grd',
'app/google_chrome_strings.grd',
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome',
],
},
'conditions': [
['OS=="win"', {
'dependencies': ['../build/win/system.gyp:cygwin'],
}],
],
},
{
# TODO(beng): rename to 'app' when moves to top level.
'target_name': 'app_base',
'type': '<(library)',
'msvs_guid': '4631946D-7D5F-44BD-A5A8-504C0A7033BE',
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:base_gfx',
'../net/net.gyp:net',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
],
'include_dirs': [
'..',
],
'sources': [
# All .cc, .h, and .mm files under app/ except for tests.
'../app/animation.cc',
'../app/animation.h',
'../app/drag_drop_types.cc',
'../app/drag_drop_types.h',
'../app/gfx/chrome_canvas.cc',
'../app/gfx/chrome_canvas.h',
'../app/gfx/chrome_canvas_linux.cc',
'../app/gfx/chrome_canvas_win.cc',
'../app/gfx/chrome_font.h',
'../app/gfx/chrome_font_gtk.cc',
'../app/gfx/chrome_font_mac.mm',
'../app/gfx/chrome_font_skia.cc',
'../app/gfx/chrome_font_win.cc',
'../app/gfx/color_utils.cc',
'../app/gfx/color_utils.h',
'../app/gfx/favicon_size.h',
'../app/gfx/icon_util.cc',
'../app/gfx/icon_util.h',
'../app/gfx/insets.h',
'../app/gfx/path_gtk.cc',
'../app/gfx/path_win.cc',
'../app/gfx/path.h',
'../app/gfx/text_elider.cc',
'../app/gfx/text_elider.h',
'../app/l10n_util.cc',
'../app/l10n_util.h',
'../app/l10n_util_posix.cc',
'../app/l10n_util_win.cc',
'../app/l10n_util_win.h',
'../app/message_box_flags.h',
'../app/os_exchange_data.cc',
'../app/os_exchange_data.h',
'../app/resource_bundle.cc',
'../app/resource_bundle.h',
'../app/resource_bundle_win.cc',
'../app/resource_bundle_linux.cc',
'../app/resource_bundle_mac.mm',
'../app/slide_animation.cc',
'../app/slide_animation.h',
'../app/theme_provider.h',
'../app/throb_animation.cc',
'../app/throb_animation.h',
'../app/win_util.cc',
'../app/win_util.h',
],
'direct_dependent_settings': {
'include_dirs': [
'..',
],
},
'conditions': [
['OS=="linux"', {
'dependencies': [
# chrome_font_gtk.cc uses fontconfig.
# TODO(evanm): I think this is wrong; it should just use GTK.
'../build/linux/system.gyp:fontconfig',
'../build/linux/system.gyp:gtk',
],
}],
['OS!="win"', {
'sources!': [
'../app/drag_drop_types.cc',
'../app/drag_drop_types.h',
'../app/gfx/icon_util.cc',
'../app/gfx/icon_util.h',
'../app/os_exchange_data.cc',
'../app/os_exchange_data.h',
],
}],
],
},
{
# theme_resources also generates a .cc file, so it can't use the rules above.
'target_name': 'theme_resources',
'type': 'none',
'variables': {
'grit_path': '../tools/grit/grit.py',
'grit_out_dir': '<(SHARED_INTERMEDIATE_DIR)/chrome',
},
'actions': [
{
'action_name': 'theme_resources',
'variables': {
'input_path': 'app/theme/theme_resources.grd',
'conditions': [
['branding=="Chrome"', {
# TODO(mmoss) The .grd files look for _google_chrome, but for
# consistency they should look for GOOGLE_CHROME_BUILD like C++.
# Clean this up when Windows moves to gyp.
'chrome_build': '_google_chrome',
}, { # else: branding!="Chrome"
'chrome_build': '_chromium',
}],
],
},
'inputs': [
'<(input_path)',
],
'outputs': [
'<(grit_out_dir)/grit/theme_resources.h',
'<(grit_out_dir)/grit/theme_resources_map.cc',
'<(grit_out_dir)/grit/theme_resources_map.h',
'<(grit_out_dir)/theme_resources.pak',
'<(grit_out_dir)/theme_resources.rc',
],
'action': ['python', '<(grit_path)', '-i', '<(input_path)', 'build',
'-o', '<(grit_out_dir)', '-D', '<(chrome_build)'],
'message': 'Generating resources from <(input_path)',
},
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome',
],
},
'conditions': [
['OS=="win"', {
'dependencies': ['../build/win/system.gyp:cygwin'],
}],
],
},
{
'target_name': 'common',
'type': '<(library)',
'dependencies': [
'app_base',
'chrome_resources',
'chrome_strings',
'../base/base.gyp:base',
'../base/base.gyp:base_gfx',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../net/net.gyp:net',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/zlib/zlib.gyp:zlib',
'../third_party/npapi/npapi.gyp:npapi',
'../webkit/webkit.gyp:glue',
],
'include_dirs': [
'..',
],
'sources': [
# All .cc, .h, and .mm files under chrome/common except for tests.
'common/extensions/url_pattern.cc',
'common/extensions/url_pattern.h',
'common/extensions/user_script.cc',
'common/extensions/user_script.h',
'common/gfx/emf.cc',
'common/gfx/emf.h',
'common/gfx/utils.h',
'common/gtk_util.cc',
'common/gtk_util.h',
'common/net/cookie_monster_sqlite.cc',
'common/net/cookie_monster_sqlite.h',
'common/net/dns.h',
'common/net/url_request_intercept_job.cc',
'common/net/url_request_intercept_job.h',
'common/app_cache/app_cache_context_impl.cc',
'common/app_cache/app_cache_context_impl.h',
'common/app_cache/app_cache_dispatcher.cc',
'common/app_cache/app_cache_dispatcher.h',
'common/app_cache/app_cache_dispatcher_host.cc',
'common/app_cache/app_cache_dispatcher_host.h',
'common/bindings_policy.h',
'common/child_process.cc',
'common/child_process.h',
'common/child_process_host.cc',
'common/child_process_host.h',
'common/child_process_info.cc',
'common/child_process_info.h',
'common/child_thread.cc',
'common/child_thread.h',
'common/chrome_constants.cc',
'common/chrome_constants.h',
'common/chrome_counters.cc',
'common/chrome_counters.h',
'common/chrome_paths.cc',
'common/chrome_paths.h',
'common/chrome_paths_internal.h',
'common/chrome_paths_linux.cc',
'common/chrome_paths_mac.mm',
'common/chrome_paths_win.cc',
'common/chrome_plugin_api.h',
'common/chrome_plugin_lib.cc',
'common/chrome_plugin_lib.h',
'common/chrome_plugin_util.cc',
'common/chrome_plugin_util.h',
'common/chrome_switches.cc',
'common/chrome_switches.h',
'common/classfactory.cc',
'common/classfactory.h',
'common/common_glue.cc',
'common/debug_flags.cc',
'common/debug_flags.h',
'common/devtools_messages.h',
'common/devtools_messages_internal.h',
'common/env_vars.cc',
'common/env_vars.h',
'common/file_descriptor_set_posix.cc',
'common/file_descriptor_set_posix.h',
'common/filter_policy.h',
'common/gears_api.h',
'common/important_file_writer.cc',
'common/important_file_writer.h',
'common/ipc_channel.h',
'common/ipc_channel_posix.cc',
'common/ipc_channel_posix.h',
'common/ipc_channel_proxy.cc',
'common/ipc_channel_proxy.h',
'common/ipc_channel_win.cc',
'common/ipc_channel_win.h',
'common/ipc_logging.cc',
'common/ipc_logging.h',
'common/ipc_message.cc',
'common/ipc_message.h',
'common/ipc_message_macros.h',
'common/ipc_message_utils.cc',
'common/ipc_message_utils.h',
'common/ipc_sync_channel.cc',
'common/ipc_sync_channel.h',
'common/ipc_sync_message.cc',
'common/ipc_sync_message.h',
'common/json_value_serializer.cc',
'common/json_value_serializer.h',
'common/jstemplate_builder.cc',
'common/jstemplate_builder.h',
'common/libxml_utils.cc',
'common/libxml_utils.h',
'common/logging_chrome.cc',
'common/logging_chrome.h',
'common/main_function_params.h',
'common/message_router.cc',
'common/message_router.h',
'common/modal_dialog_event.h',
'common/mru_cache.h',
'common/navigation_types.h',
'common/native_web_keyboard_event.h',
'common/native_web_keyboard_event_linux.cc',
'common/native_web_keyboard_event_mac.mm',
'common/native_web_keyboard_event_win.cc',
'common/notification_details.h',
'common/notification_observer.h',
'common/notification_registrar.cc',
'common/notification_registrar.h',
'common/notification_service.cc',
'common/notification_service.h',
'common/notification_source.h',
'common/notification_type.h',
'common/owned_widget_gtk.cc',
'common/owned_widget_gtk.h',
'common/page_action.h',
'common/page_action.cc',
'common/page_transition_types.h',
'common/page_zoom.h',
'common/platform_util.h',
'common/platform_util_linux.cc',
'common/platform_util_mac.mm',
'common/platform_util_win.cc',
'common/plugin_messages.h',
'common/plugin_messages_internal.h',
'common/pref_member.cc',
'common/pref_member.h',
'common/pref_names.cc',
'common/pref_names.h',
'common/pref_service.cc',
'common/pref_service.h',
'common/process_watcher_posix.cc',
'common/process_watcher_win.cc',
'common/process_watcher.h',
'common/property_bag.cc',
'common/property_bag.h',
'common/quarantine_mac.h',
'common/quarantine_mac.mm',
'common/ref_counted_util.h',
'common/render_messages.h',
'common/render_messages_internal.h',
'common/resource_dispatcher.cc',
'common/resource_dispatcher.h',
'common/result_codes.h',
'common/sandbox_init_wrapper.cc',
'common/sandbox_init_wrapper.h',
'common/security_filter_peer.cc',
'common/security_filter_peer.h',
'common/sqlite_compiled_statement.cc',
'common/sqlite_compiled_statement.h',
'common/sqlite_utils.cc',
'common/sqlite_utils.h',
'common/task_queue.cc',
'common/task_queue.h',
'common/temp_scaffolding_stubs.cc',
'common/temp_scaffolding_stubs.h',
'common/thumbnail_score.cc',
'common/thumbnail_score.h',
'common/time_format.cc',
'common/time_format.h',
'common/transport_dib.h',
'common/transport_dib_linux.cc',
'common/transport_dib_mac.cc',
'common/transport_dib_win.cc',
'common/unzip.cc', # Requires zlib directly.
'common/unzip.h',
'common/url_constants.cc',
'common/url_constants.h',
'common/visitedlink_common.cc',
'common/visitedlink_common.h',
'common/webkit_param_traits.h',
'common/win_safe_util.cc',
'common/win_safe_util.h',
'common/worker_thread_ticker.cc',
'common/worker_thread_ticker.h',
'common/x11_util.cc',
'common/x11_util.h',
'common/x11_util_internal.h',
'third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
],
'direct_dependent_settings': {
'include_dirs': [
'..',
],
},
'export_dependent_settings': [
'app_base',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'link_settings': {
'libraries': [
'-lX11',
'-lXrender',
'-lXext',
],
},
}, { # else: 'OS!="linux"'
'sources!': [
'third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
'sources!': [
'common/temp_scaffolding_stubs.cc',
'common/temp_scaffolding_stubs.h',
],
}, { # else: OS != "win"
'sources!': [
'common/gfx/emf.cc',
'common/classfactory.cc',
],
}],
],
},
{
'target_name': 'browser',
'type': '<(library)',
'dependencies': [
'common',
'chrome_resources',
'chrome_strings',
'theme_resources',
'../media/media.gyp:media',
'../net/net.gyp:net_resources',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
'third_party/hunspell/hunspell.gyp:hunspell',
'../webkit/webkit.gyp:glue',
],
'include_dirs': [
'..',
'<(INTERMEDIATE_DIR)',
],
'sources': [
# All .cc, .h, .m, and .mm files under browser except for tests and
# mocks.
'browser/alternate_nav_url_fetcher.cc',
'browser/alternate_nav_url_fetcher.h',
'browser/app_controller_mac.h',
'browser/app_controller_mac.mm',
'browser/app_modal_dialog.cc',
'browser/app_modal_dialog.h',
'browser/app_modal_dialog_gtk.cc',
'browser/app_modal_dialog_mac.mm',
'browser/app_modal_dialog_win.cc',
'browser/app_modal_dialog_queue.cc',
'browser/app_modal_dialog_queue.h',
'browser/autocomplete/autocomplete.cc',
'browser/autocomplete/autocomplete.h',
'browser/autocomplete/autocomplete_accessibility.cc',
'browser/autocomplete/autocomplete_accessibility.h',
'browser/autocomplete/autocomplete_edit.cc',
'browser/autocomplete/autocomplete_edit.h',
'browser/autocomplete/autocomplete_edit_view.h',
'browser/autocomplete/autocomplete_edit_view_gtk.cc',
'browser/autocomplete/autocomplete_edit_view_gtk.h',
'browser/autocomplete/autocomplete_edit_view_mac.h',
'browser/autocomplete/autocomplete_edit_view_mac.mm',
'browser/autocomplete/autocomplete_edit_view_win.cc',
'browser/autocomplete/autocomplete_edit_view_win.h',
'browser/autocomplete/autocomplete_popup_model.cc',
'browser/autocomplete/autocomplete_popup_model.h',
'browser/autocomplete/autocomplete_popup_view.h',
'browser/autocomplete/autocomplete_popup_view_gtk.cc',
'browser/autocomplete/autocomplete_popup_view_gtk.h',
'browser/autocomplete/autocomplete_popup_view_mac.h',
'browser/autocomplete/autocomplete_popup_view_mac.mm',
'browser/autocomplete/autocomplete_popup_view_win.cc',
'browser/autocomplete/autocomplete_popup_view_win.h',
'browser/autocomplete/history_contents_provider.cc',
'browser/autocomplete/history_contents_provider.h',
'browser/autocomplete/history_url_provider.cc',
'browser/autocomplete/history_url_provider.h',
'browser/autocomplete/keyword_provider.cc',
'browser/autocomplete/keyword_provider.h',
'browser/autocomplete/search_provider.cc',
'browser/autocomplete/search_provider.h',
'browser/autofill_manager.cc',
'browser/autofill_manager.h',
'browser/automation/automation_autocomplete_edit_tracker.h',
'browser/automation/automation_browser_tracker.h',
'browser/automation/automation_constrained_window_tracker.h',
'browser/automation/automation_provider.cc',
'browser/automation/automation_provider.h',
'browser/automation/automation_provider_list.cc',
'browser/automation/automation_provider_list_generic.cc',
'browser/automation/automation_provider_list_mac.mm',
'browser/automation/automation_provider_list.h',
'browser/automation/automation_resource_tracker.cc',
'browser/automation/automation_resource_tracker.h',
'browser/automation/automation_tab_tracker.h',
'browser/automation/automation_window_tracker.h',
'browser/automation/ui_controls.cc',
'browser/automation/ui_controls.h',
'browser/automation/url_request_failed_dns_job.cc',
'browser/automation/url_request_failed_dns_job.h',
# TODO: These should be moved to test_support (see below), but
# are currently used by production code in automation_provider.cc.
'browser/automation/url_request_mock_http_job.cc',
'browser/automation/url_request_mock_http_job.h',
'browser/automation/url_request_slow_download_job.cc',
'browser/automation/url_request_slow_download_job.h',
'browser/back_forward_menu_model.cc',
'browser/back_forward_menu_model.h',
'browser/back_forward_menu_model_win.cc',
'browser/back_forward_menu_model_win.h',
'browser/bookmarks/bookmark_codec.cc',
'browser/bookmarks/bookmark_codec.h',
'browser/bookmarks/bookmark_context_menu_gtk.cc',
'browser/bookmarks/bookmark_context_menu_win.cc',
'browser/bookmarks/bookmark_context_menu.cc',
'browser/bookmarks/bookmark_context_menu.h',
'browser/bookmarks/bookmark_drag_data.cc',
'browser/bookmarks/bookmark_drag_data.h',
'browser/bookmarks/bookmark_drop_info.cc',
'browser/bookmarks/bookmark_drop_info.h',
'browser/bookmarks/bookmark_editor.h',
'browser/bookmarks/bookmark_folder_tree_model.cc',
'browser/bookmarks/bookmark_folder_tree_model.h',
'browser/bookmarks/bookmark_html_writer.cc',
'browser/bookmarks/bookmark_html_writer.h',
'browser/bookmarks/bookmark_menu_controller_gtk.cc',
'browser/bookmarks/bookmark_menu_controller_gtk.h',
'browser/bookmarks/bookmark_menu_controller_win.cc',
'browser/bookmarks/bookmark_menu_controller_win.h',
'browser/bookmarks/bookmark_model.cc',
'browser/bookmarks/bookmark_model.h',
'browser/bookmarks/bookmark_service.h',
'browser/bookmarks/bookmark_storage.cc',
'browser/bookmarks/bookmark_storage.h',
'browser/bookmarks/bookmark_table_model.cc',
'browser/bookmarks/bookmark_table_model.h',
'browser/bookmarks/bookmark_utils.cc',
'browser/bookmarks/bookmark_utils.h',
'browser/browser.cc',
'browser/browser.h',
'browser/browser_about_handler.cc',
'browser/browser_about_handler.h',
'browser/browser_accessibility.cc',
'browser/browser_accessibility.h',
'browser/browser_accessibility_manager.cc',
'browser/browser_accessibility_manager.h',
'browser/browser_init.cc',
'browser/browser_init.h',
'browser/browser_list.cc',
'browser/browser_list.h',
'browser/browser_main.cc',
'browser/browser_main_gtk.cc',
'browser/browser_main_mac.mm',
'browser/browser_main_win.cc',
'browser/browser_main_win.h',
'browser/browser_prefs.cc',
'browser/browser_prefs.h',
'browser/browser_process.cc',
'browser/browser_process.h',
'browser/browser_process_impl.cc',
'browser/browser_process_impl.h',
'browser/browser_shutdown.cc',
'browser/browser_shutdown.h',
'browser/browser_theme_provider.cc',
'browser/browser_theme_provider.h',
'browser/browser_trial.cc',
'browser/browser_trial.h',
'browser/browser_url_handler.cc',
'browser/browser_url_handler.h',
'browser/browser_window.h',
'browser/browser_window_factory.mm',
'browser/browsing_data_remover.cc',
'browser/browsing_data_remover.h',
'browser/browsing_instance.cc',
'browser/browsing_instance.h',
'browser/cancelable_request.cc',
'browser/cancelable_request.h',
'browser/cert_store.cc',
'browser/cert_store.h',
'browser/character_encoding.cc',
'browser/character_encoding.h',
'browser/chrome_plugin_browsing_context.cc',
'browser/chrome_plugin_browsing_context.h',
'browser/chrome_plugin_host.cc',
'browser/chrome_plugin_host.h',
'browser/chrome_thread.cc',
'browser/chrome_thread.h',
'browser/cocoa/base_view.h',
'browser/cocoa/base_view.mm',
'browser/cocoa/bookmark_bar_controller.h',
'browser/cocoa/bookmark_bar_controller.mm',
'browser/cocoa/bookmark_menu_bridge.h',
'browser/cocoa/bookmark_menu_bridge.mm',
'browser/cocoa/bookmark_menu_cocoa_controller.h',
'browser/cocoa/bookmark_menu_cocoa_controller.mm',
'browser/cocoa/browser_test_helper.h',
'browser/cocoa/browser_window_cocoa.h',
'browser/cocoa/browser_window_cocoa.mm',
'browser/cocoa/browser_window_controller.h',
'browser/cocoa/browser_window_controller.mm',
'browser/cocoa/cocoa_test_helper.h',
'browser/cocoa/command_observer_bridge.h',
'browser/cocoa/command_observer_bridge.mm',
'browser/cocoa/find_bar_bridge.h',
'browser/cocoa/find_bar_bridge.mm',
'browser/cocoa/find_bar_cocoa_controller.h',
'browser/cocoa/find_bar_cocoa_controller.mm',
'browser/cocoa/find_bar_view.h',
'browser/cocoa/find_bar_view.mm',
'browser/cocoa/grow_box_view.h',
'browser/cocoa/grow_box_view.m',
'browser/cocoa/location_bar_view_mac.h',
'browser/cocoa/location_bar_view_mac.mm',
'browser/cocoa/preferences_window_controller.h',
'browser/cocoa/preferences_window_controller.mm',
'browser/cocoa/sad_tab_view.h',
'browser/cocoa/sad_tab_view.mm',
'browser/cocoa/shell_dialogs_mac.mm',
'browser/cocoa/status_bubble_mac.h',
'browser/cocoa/status_bubble_mac.mm',
'browser/cocoa/tab_cell.h',
'browser/cocoa/tab_cell.mm',
'browser/cocoa/tab_contents_controller.h',
'browser/cocoa/tab_contents_controller.mm',
'browser/cocoa/tab_controller.h',
'browser/cocoa/tab_controller.mm',
'browser/cocoa/tab_strip_controller.h',
'browser/cocoa/tab_strip_controller.mm',
'browser/cocoa/tab_strip_model_observer_bridge.h',
'browser/cocoa/tab_strip_model_observer_bridge.mm',
'browser/cocoa/tab_strip_view.h',
'browser/cocoa/tab_strip_view.mm',
'browser/cocoa/tab_view.h',
'browser/cocoa/tab_view.mm',
'browser/cocoa/tab_window_controller.h',
'browser/cocoa/tab_window_controller.mm',
'browser/cocoa/toolbar_button_cell.h',
'browser/cocoa/toolbar_button_cell.mm',
'browser/cocoa/toolbar_controller.h',
'browser/cocoa/toolbar_controller.mm',
'browser/cocoa/toolbar_view.h',
'browser/cocoa/toolbar_view.mm',
'browser/command_updater.cc',
'browser/command_updater.h',
'browser/cross_site_request_manager.cc',
'browser/cross_site_request_manager.h',
'browser/debugger/debugger_host.h',
'browser/debugger/debugger_host_impl.cpp',
'browser/debugger/debugger_host_impl.h',
'browser/debugger/debugger_io.h',
'browser/debugger/debugger_io_socket.cc',
'browser/debugger/debugger_io_socket.h',
'browser/debugger/debugger_node.cc',
'browser/debugger/debugger_node.h',
'browser/debugger/debugger_remote_service.cc',
'browser/debugger/debugger_remote_service.h',
'browser/debugger/debugger_shell.cc',
'browser/debugger/debugger_shell.h',
'browser/debugger/debugger_shell_stubs.cc',
'browser/debugger/debugger_view.cc',
'browser/debugger/debugger_view.h',
'browser/debugger/debugger_window.cc',
'browser/debugger/debugger_window.h',
'browser/debugger/debugger_wrapper.cc',
'browser/debugger/debugger_wrapper.h',
'browser/debugger/devtools_client_host.h',
'browser/debugger/devtools_manager.cc',
'browser/debugger/devtools_manager.h',
'browser/debugger/devtools_protocol_handler.cc',
'browser/debugger/devtools_protocol_handler.h',
'browser/debugger/devtools_remote.h',
'browser/debugger/devtools_remote_listen_socket.cc',
'browser/debugger/devtools_remote_listen_socket.h',
'browser/debugger/devtools_remote_message.cc',
'browser/debugger/devtools_remote_message.h',
'browser/debugger/devtools_remote_service.cc',
'browser/debugger/devtools_remote_service.h',
'browser/debugger/devtools_view.cc',
'browser/debugger/devtools_view.h',
'browser/debugger/devtools_window.h',
'browser/debugger/devtools_window_gtk.cc',
'browser/debugger/devtools_window_mac.cc',
'browser/debugger/devtools_window_win.cc',
'browser/debugger/inspectable_tab_proxy.cc',
'browser/debugger/inspectable_tab_proxy.h',
'browser/dock_info.cc',
'browser/dock_info.h',
'browser/dom_operation_notification_details.h',
'browser/dom_ui/chrome_url_data_manager.cc',
'browser/dom_ui/chrome_url_data_manager.h',
'browser/dom_ui/debugger_ui.cc',
'browser/dom_ui/debugger_ui.h',
'browser/dom_ui/devtools_ui.cc',
'browser/dom_ui/devtools_ui.h',
'browser/dom_ui/dom_ui.cc',
'browser/dom_ui/dom_ui.h',
'browser/dom_ui/dom_ui_factory.cc',
'browser/dom_ui/dom_ui_factory.h',
'browser/dom_ui/dom_ui_favicon_source.cc',
'browser/dom_ui/dom_ui_favicon_source.h',
'browser/dom_ui/dom_ui_theme_source.cc',
'browser/dom_ui/dom_ui_theme_source.h',
'browser/dom_ui/dom_ui_thumbnail_source.cc',
'browser/dom_ui/dom_ui_thumbnail_source.h',
'browser/dom_ui/downloads_ui.cc',
'browser/dom_ui/downloads_ui.h',
'browser/dom_ui/fileicon_source.cc',
'browser/dom_ui/fileicon_source.h',
'browser/dom_ui/history_ui.cc',
'browser/dom_ui/history_ui.h',
'browser/dom_ui/html_dialog_ui.cc',
'browser/dom_ui/html_dialog_ui.h',
'browser/dom_ui/new_tab_ui.cc',
'browser/dom_ui/new_tab_ui.h',
'browser/download/download_exe.cc',
'browser/download/download_file.cc',
'browser/download/download_file.h',
'browser/download/download_item_model.cc',
'browser/download/download_item_model.h',
'browser/download/download_manager.cc',
'browser/download/download_manager.h',
'browser/download/download_request_dialog_delegate.h',
'browser/download/download_request_dialog_delegate_win.cc',
'browser/download/download_request_dialog_delegate_win.h',
'browser/download/download_request_manager.cc',
'browser/download/download_request_manager.h',
'browser/download/download_shelf.cc',
'browser/download/download_shelf.h',
'browser/download/download_util.cc',
'browser/download/download_util.h',
'browser/download/save_file.cc',
'browser/download/save_file.h',
'browser/download/save_file_manager.cc',
'browser/download/save_file_manager.h',
'browser/download/save_item.cc',
'browser/download/save_item.h',
'browser/download/save_package.cc',
'browser/download/save_package.h',
'browser/download/save_types.h',
'browser/encoding_menu_controller_delegate.cc',
'browser/encoding_menu_controller_delegate.h',
'browser/extensions/extension.cc',
'browser/extensions/extension.h',
'browser/extensions/extension_bookmarks_module.cc',
'browser/extensions/extension_bookmarks_module.h',
'browser/extensions/extension_error_reporter.cc',
'browser/extensions/extension_error_reporter.h',
'browser/extensions/extension_function.cc',
'browser/extensions/extension_function.h',
'browser/extensions/extension_function_dispatcher.cc',
'browser/extensions/extension_function_dispatcher.h',
'browser/extensions/extension_host.cc',
'browser/extensions/extension_host.h',
'browser/extensions/extension_message_service.cc',
'browser/extensions/extension_message_service.h',
'browser/extensions/extension_browser_event_router.cc',
'browser/extensions/extension_browser_event_router.h',
'browser/extensions/extension_page_actions_module.h',
'browser/extensions/extension_page_actions_module.cc',
'browser/extensions/extension_process_manager.cc',
'browser/extensions/extension_process_manager.h',
'browser/extensions/extension_protocols.cc',
'browser/extensions/extension_protocols.h',
'browser/extensions/extension_tabs_module.cc',
'browser/extensions/extension_tabs_module.h',
'browser/extensions/extension_view.cc',
'browser/extensions/extension_view.h',
'browser/extensions/extensions_service.cc',
'browser/extensions/extensions_service.h',
'browser/extensions/extensions_ui.cc',
'browser/extensions/extensions_ui.h',
'browser/extensions/user_script_master.cc',
'browser/extensions/user_script_master.h',
'browser/external_protocol_handler.cc',
'browser/external_protocol_handler.h',
'browser/external_tab_container.cc',
'browser/external_tab_container.h',
'browser/fav_icon_helper.cc',
'browser/fav_icon_helper.h',
'browser/find_bar.h',
'browser/find_bar_controller.cc',
'browser/find_bar_controller.h',
'browser/find_notification_details.h',
'browser/first_run.cc',
'browser/first_run.h',
'browser/gears_integration.cc',
'browser/gears_integration.h',
'browser/google_update.cc',
'browser/google_update.h',
'browser/google_url_tracker.cc',
'browser/google_url_tracker.h',
'browser/google_util.cc',
'browser/google_util.h',
'browser/gtk/about_chrome_dialog.cc',
'browser/gtk/about_chrome_dialog.h',
'browser/gtk/back_forward_menu_model_gtk.cc',
'browser/gtk/back_forward_menu_model_gtk.h',
'browser/gtk/bookmark_bar_gtk.cc',
'browser/gtk/bookmark_bar_gtk.h',
'browser/gtk/bookmark_bubble_gtk.cc',
'browser/gtk/bookmark_bubble_gtk.h',
'browser/gtk/bookmark_editor_gtk.cc',
'browser/gtk/bookmark_editor_gtk.h',
'browser/gtk/bookmark_tree_model.cc',
'browser/gtk/bookmark_tree_model.h',
'browser/gtk/browser_toolbar_gtk.cc',
'browser/gtk/browser_toolbar_gtk.h',
'browser/gtk/browser_window_factory_gtk.cc',
'browser/gtk/browser_window_gtk.cc',
'browser/gtk/browser_window_gtk.h',
'browser/gtk/custom_button.cc',
'browser/gtk/custom_button.h',
'browser/gtk/dialogs_gtk.cc',
'browser/gtk/download_item_gtk.cc',
'browser/gtk/download_item_gtk.h',
'browser/gtk/download_shelf_gtk.cc',
'browser/gtk/download_shelf_gtk.h',
'browser/gtk/go_button_gtk.cc',
'browser/gtk/go_button_gtk.h',
'browser/gtk/gtk_chrome_button.cc',
'browser/gtk/gtk_chrome_button.h',
'browser/gtk/hung_renderer_dialog_gtk.cc',
'browser/gtk/import_dialog_gtk.cc',
'browser/gtk/import_dialog_gtk.h',
'browser/gtk/info_bubble_gtk.cc',
'browser/gtk/info_bubble_gtk.h',
'browser/gtk/infobar_container_gtk.cc',
'browser/gtk/infobar_container_gtk.h',
'browser/gtk/infobar_gtk.cc',
'browser/gtk/infobar_gtk.h',
'browser/gtk/find_bar_gtk.cc',
'browser/gtk/find_bar_gtk.h',
'browser/gtk/link_button_gtk.cc',
'browser/gtk/link_button_gtk.cc',
'browser/gtk/location_bar_view_gtk.cc',
'browser/gtk/location_bar_view_gtk.h',
'browser/gtk/menu_gtk.cc',
'browser/gtk/menu_gtk.h',
'browser/gtk/nine_box.cc',
'browser/gtk/nine_box.h',
'browser/gtk/sad_tab_gtk.cc',
'browser/gtk/sad_tab_gtk.h',
'browser/gtk/slide_animator_gtk.cc',
'browser/gtk/slide_animator_gtk.h',
'browser/gtk/standard_menus.cc',
'browser/gtk/standard_menus.h',
'browser/gtk/status_bubble_gtk.cc',
'browser/gtk/status_bubble_gtk.h',
'browser/gtk/tab_contents_container_gtk.cc',
'browser/gtk/tab_contents_container_gtk.h',
'browser/gtk/tabs/dragged_tab_controller_gtk.cc',
'browser/gtk/tabs/dragged_tab_controller_gtk.h',
'browser/gtk/tabs/tab_gtk.cc',
'browser/gtk/tabs/tab_gtk.h',
'browser/gtk/tabs/tab_renderer_gtk.cc',
'browser/gtk/tabs/tab_renderer_gtk.h',
'browser/gtk/tabs/tab_strip_gtk.cc',
'browser/gtk/tabs/tab_strip_gtk.h',
'browser/gtk/toolbar_star_toggle_gtk.cc',
'browser/gtk/toolbar_star_toggle_gtk.h',
'browser/hang_monitor/hung_plugin_action.cc',
'browser/hang_monitor/hung_plugin_action.h',
'browser/hang_monitor/hung_window_detector.cc',
'browser/hang_monitor/hung_window_detector.h',
'browser/history/archived_database.cc',
'browser/history/archived_database.h',
'browser/history/download_database.cc',
'browser/history/download_database.h',
'browser/history/download_types.h',
'browser/history/expire_history_backend.cc',
'browser/history/expire_history_backend.h',
'browser/history/history.cc',
'browser/history/history.h',
'browser/history/history_backend.cc',
'browser/history/history_backend.h',
'browser/history/history_database.cc',
'browser/history/history_database.h',
'browser/history/history_marshaling.h',
'browser/history/history_notifications.h',
'browser/history/history_publisher.cc',
'browser/history/history_publisher.h',
'browser/history/history_publisher_none.cc',
'browser/history/history_publisher_win.cc',
'browser/history/history_types.cc',
'browser/history/history_types.h',
'browser/history/in_memory_database.cc',
'browser/history/in_memory_database.h',
'browser/history/in_memory_history_backend.cc',
'browser/history/in_memory_history_backend.h',
'browser/history/page_usage_data.cc',
'browser/history/page_usage_data.h',
'browser/history/query_parser.cc',
'browser/history/query_parser.h',
'browser/history/snippet.cc',
'browser/history/snippet.h',
'browser/history/starred_url_database.cc',
'browser/history/starred_url_database.h',
'browser/history/text_database.cc',
'browser/history/text_database.h',
'browser/history/text_database_manager.cc',
'browser/history/text_database_manager.h',
'browser/history/thumbnail_database.cc',
'browser/history/thumbnail_database.h',
'browser/history/url_database.cc',
'browser/history/url_database.h',
'browser/history/visit_database.cc',
'browser/history/visit_database.h',
'browser/history/visit_tracker.cc',
'browser/history/visit_tracker.h',
'browser/history/visitsegment_database.cc',
'browser/history/visitsegment_database.h',
'browser/hung_renderer_dialog.h',
'browser/icon_loader.h',
'browser/icon_loader.cc',
'browser/icon_loader_linux.cc',
'browser/icon_loader_mac.mm',
'browser/icon_loader_win.cc',
'browser/icon_manager.cc',
'browser/icon_manager.h',
'browser/icon_manager_linux.cc',
'browser/icon_manager_mac.mm',
'browser/icon_manager_win.cc',
'browser/ime_input.cc',
'browser/ime_input.h',
'browser/importer/firefox2_importer.cc',
'browser/importer/firefox2_importer.h',
'browser/importer/firefox3_importer.cc',
'browser/importer/firefox3_importer.h',
'browser/importer/firefox_importer_utils.cc',
'browser/importer/firefox_importer_utils.h',
'browser/importer/firefox_profile_lock.cc',
'browser/importer/firefox_profile_lock.h',
'browser/importer/firefox_profile_lock_posix.cc',
'browser/importer/firefox_profile_lock_win.cc',
'browser/importer/ie_importer.cc',
'browser/importer/ie_importer.h',
'browser/importer/importer.cc',
'browser/importer/importer.h',
'browser/importer/mork_reader.cc',
'browser/importer/mork_reader.h',
'browser/importer/toolbar_importer.cc',
'browser/importer/toolbar_importer.h',
'browser/input_window_dialog.h',
'browser/input_window_dialog_gtk.cc',
'browser/input_window_dialog_win.cc',
'browser/jankometer.cc',
'browser/jankometer.h',
'browser/jsmessage_box_handler.cc',
'browser/jsmessage_box_handler.h',
'browser/load_from_memory_cache_details.h',
'browser/load_notification_details.h',
'browser/location_bar.h',
'browser/login_prompt.cc',
'browser/login_prompt.h',
'browser/memory_details.cc',
'browser/memory_details.h',
'browser/meta_table_helper.cc',
'browser/meta_table_helper.h',
'browser/metrics/metrics_log.cc',
'browser/metrics/metrics_log.h',
'browser/metrics/metrics_response.cc',
'browser/metrics/metrics_response.h',
'browser/metrics/metrics_service.cc',
'browser/metrics/metrics_service.h',
'browser/metrics/user_metrics.cc',
'browser/metrics/user_metrics.h',
'browser/modal_html_dialog_delegate.cc',
'browser/modal_html_dialog_delegate.h',
'browser/net/chrome_url_request_context.cc',
'browser/net/chrome_url_request_context.h',
'browser/net/dns_global.cc',
'browser/net/dns_global.h',
'browser/net/dns_host_info.cc',
'browser/net/dns_host_info.h',
'browser/net/dns_master.cc',
'browser/net/dns_master.h',
'browser/net/referrer.cc',
'browser/net/referrer.h',
'browser/net/resolve_proxy_msg_helper.cc',
'browser/net/resolve_proxy_msg_helper.h',
'browser/net/sdch_dictionary_fetcher.cc',
'browser/net/sdch_dictionary_fetcher.h',
'browser/net/url_fetcher.cc',
'browser/net/url_fetcher.h',
'browser/net/url_fetcher_protect.cc',
'browser/net/url_fetcher_protect.h',
'browser/net/url_fixer_upper.cc',
'browser/net/url_fixer_upper.h',
'browser/options_window.h',
'browser/page_state.cc',
'browser/page_state.h',
'browser/password_manager/encryptor_linux.cc',
'browser/password_manager/encryptor_mac.mm',
'browser/password_manager/encryptor_win.cc',
'browser/password_manager/encryptor.h',
'browser/password_manager/ie7_password.cc',
'browser/password_manager/ie7_password.h',
'browser/password_manager/password_form_manager.cc',
'browser/password_manager/password_form_manager.h',
'browser/password_manager/password_form_manager_win.cc',
'browser/password_manager/password_manager.cc',
'browser/password_manager/password_manager.h',
'browser/plugin_installer.cc',
'browser/plugin_installer.h',
'browser/plugin_process_host.cc',
'browser/plugin_process_host.h',
'browser/plugin_service.cc',
'browser/plugin_service.h',
'browser/printing/page_number.cc',
'browser/printing/page_number.h',
'browser/printing/page_overlays.cc',
'browser/printing/page_overlays.h',
'browser/printing/page_range.cc',
'browser/printing/page_range.h',
'browser/printing/page_setup.cc',
'browser/printing/page_setup.h',
'browser/printing/print_job.cc',
'browser/printing/print_job.h',
'browser/printing/print_job_manager.cc',
'browser/printing/print_job_manager.h',
'browser/printing/print_job_worker.cc',
'browser/printing/print_job_worker.h',
'browser/printing/print_job_worker_owner.h',
'browser/printing/print_settings.cc',
'browser/printing/print_settings.h',
'browser/printing/print_view_manager.cc',
'browser/printing/print_view_manager.h',
'browser/printing/printed_document.cc',
'browser/printing/printed_document.h',
'browser/printing/printed_page.cc',
'browser/printing/printed_page.h',
'browser/printing/printed_pages_source.h',
'browser/printing/printer_query.cc',
'browser/printing/printer_query.h',
'browser/printing/win_printing_context.cc',
'browser/printing/win_printing_context.h',
'browser/process_singleton.h',
'browser/process_singleton_linux.cc',
'browser/process_singleton_mac.cc',
'browser/process_singleton_win.cc',
'browser/profile.cc',
'browser/profile.h',
'browser/profile_manager.cc',
'browser/profile_manager.h',
'browser/renderer_host/async_resource_handler.cc',
'browser/renderer_host/async_resource_handler.h',
'browser/renderer_host/audio_renderer_host.cc',
'browser/renderer_host/audio_renderer_host.h',
'browser/renderer_host/backing_store.cc',
'browser/renderer_host/backing_store.h',
'browser/renderer_host/backing_store_mac.cc',
'browser/renderer_host/backing_store_win.cc',
'browser/renderer_host/backing_store_x.cc',
'browser/renderer_host/browser_render_process_host.cc',
'browser/renderer_host/browser_render_process_host.h',
'browser/renderer_host/buffered_resource_handler.cc',
'browser/renderer_host/buffered_resource_handler.h',
'browser/renderer_host/cross_site_resource_handler.cc',
'browser/renderer_host/cross_site_resource_handler.h',
'browser/renderer_host/download_resource_handler.cc',
'browser/renderer_host/download_resource_handler.h',
'browser/renderer_host/download_throttling_resource_handler.cc',
'browser/renderer_host/download_throttling_resource_handler.h',
'browser/renderer_host/media_resource_handler.cc',
'browser/renderer_host/media_resource_handler.h',
'browser/renderer_host/render_process_host.cc',
'browser/renderer_host/render_process_host.h',
'browser/renderer_host/render_view_host.cc',
'browser/renderer_host/render_view_host.h',
'browser/renderer_host/render_view_host_delegate.h',
'browser/renderer_host/render_view_host_factory.cc',
'browser/renderer_host/render_view_host_factory.h',
'browser/renderer_host/render_widget_helper.cc',
'browser/renderer_host/render_widget_helper.h',
'browser/renderer_host/render_widget_host.cc',
'browser/renderer_host/render_widget_host.h',
'browser/renderer_host/render_widget_host_view.h',
'browser/renderer_host/render_widget_host_view_gtk.cc',
'browser/renderer_host/render_widget_host_view_gtk.h',
'browser/renderer_host/render_widget_host_view_mac.h',
'browser/renderer_host/render_widget_host_view_mac.mm',
'browser/renderer_host/render_widget_host_view_win.cc',
'browser/renderer_host/render_widget_host_view_win.h',
'browser/renderer_host/renderer_security_policy.cc',
'browser/renderer_host/renderer_security_policy.h',
'browser/renderer_host/resource_dispatcher_host.cc',
'browser/renderer_host/resource_dispatcher_host.h',
'browser/renderer_host/resource_handler.h',
'browser/renderer_host/resource_message_filter.cc',
'browser/renderer_host/resource_message_filter.h',
'browser/renderer_host/resource_message_filter_gtk.cc',
'browser/renderer_host/resource_message_filter_mac.mm',
'browser/renderer_host/resource_message_filter_win.cc',
'browser/renderer_host/resource_request_details.h',
'browser/renderer_host/safe_browsing_resource_handler.cc',
'browser/renderer_host/safe_browsing_resource_handler.h',
'browser/renderer_host/save_file_resource_handler.cc',
'browser/renderer_host/save_file_resource_handler.h',
'browser/renderer_host/sync_resource_handler.cc',
'browser/renderer_host/sync_resource_handler.h',
'browser/renderer_host/web_cache_manager.cc',
'browser/renderer_host/web_cache_manager.h',
'browser/rlz/rlz.cc',
'browser/rlz/rlz.h',
'browser/safe_browsing/bloom_filter.cc',
'browser/safe_browsing/bloom_filter.h',
'browser/safe_browsing/chunk_range.cc',
'browser/safe_browsing/chunk_range.h',
'browser/safe_browsing/protocol_manager.cc',
'browser/safe_browsing/protocol_manager.h',
'browser/safe_browsing/protocol_parser.cc',
'browser/safe_browsing/protocol_parser.h',
'browser/safe_browsing/safe_browsing_blocking_page.cc',
'browser/safe_browsing/safe_browsing_blocking_page.h',
'browser/safe_browsing/safe_browsing_database.cc',
'browser/safe_browsing/safe_browsing_database.h',
'browser/safe_browsing/safe_browsing_database_bloom.cc',
'browser/safe_browsing/safe_browsing_database_bloom.h',
'browser/safe_browsing/safe_browsing_service.cc',
'browser/safe_browsing/safe_browsing_service.h',
'browser/safe_browsing/safe_browsing_util.cc',
'browser/safe_browsing/safe_browsing_util.h',
'browser/sandbox_policy.cc',
'browser/sandbox_policy.h',
'browser/search_engines/template_url.cc',
'browser/search_engines/template_url.h',
'browser/search_engines/template_url_fetcher.cc',
'browser/search_engines/template_url_fetcher.h',
'browser/search_engines/template_url_model.cc',
'browser/search_engines/template_url_model.h',
'browser/search_engines/template_url_parser.cc',
'browser/search_engines/template_url_parser.h',
'browser/search_engines/template_url_prepopulate_data.cc',
'browser/search_engines/template_url_prepopulate_data.h',
'browser/session_startup_pref.cc',
'browser/session_startup_pref.h',
'browser/sessions/base_session_service.cc',
'browser/sessions/base_session_service.h',
'browser/sessions/session_backend.cc',
'browser/sessions/session_backend.h',
'browser/sessions/session_command.cc',
'browser/sessions/session_command.h',
'browser/sessions/session_id.cc',
'browser/sessions/session_id.h',
'browser/sessions/session_restore.cc',
'browser/sessions/session_restore.h',
'browser/sessions/session_service.cc',
'browser/sessions/session_service.h',
'browser/sessions/session_types.cc',
'browser/sessions/session_types.h',
'browser/sessions/tab_restore_service.cc',
'browser/sessions/tab_restore_service.h',
'browser/shell_dialogs.h',
'browser/shell_integration.cc',
'browser/shell_integration.h',
'browser/shell_integration_mac.mm',
'browser/spellcheck_worditerator.cc',
'browser/spellcheck_worditerator.h',
'browser/spellchecker.cc',
'browser/spellchecker.h',
'browser/ssl/ssl_blocking_page.cc',
'browser/ssl/ssl_blocking_page.h',
'browser/ssl/ssl_error_info.cc',
'browser/ssl/ssl_error_info.h',
'browser/ssl/ssl_host_state.cc',
'browser/ssl/ssl_host_state.h',
'browser/ssl/ssl_manager.cc',
'browser/ssl/ssl_manager.h',
'browser/ssl/ssl_policy.cc',
'browser/ssl/ssl_policy.h',
'browser/status_bubble.h',
'browser/tab_contents/constrained_window.h',
'browser/tab_contents/infobar_delegate.cc',
'browser/tab_contents/infobar_delegate.h',
'browser/tab_contents/interstitial_page.cc',
'browser/tab_contents/interstitial_page.h',
'browser/tab_contents/navigation_controller.cc',
'browser/tab_contents/navigation_controller.h',
'browser/tab_contents/navigation_entry.cc',
'browser/tab_contents/navigation_entry.h',
'browser/tab_contents/page_navigator.h',
'browser/tab_contents/provisional_load_details.cc',
'browser/tab_contents/provisional_load_details.h',
'browser/tab_contents/render_view_context_menu.cc',
'browser/tab_contents/render_view_context_menu.h',
'browser/tab_contents/render_view_context_menu_gtk.cc',
'browser/tab_contents/render_view_context_menu_gtk.h',
'browser/tab_contents/render_view_context_menu_mac.mm',
'browser/tab_contents/render_view_context_menu_mac.h',
'browser/tab_contents/render_view_context_menu_win.cc',
'browser/tab_contents/render_view_context_menu_win.h',
'browser/tab_contents/render_view_host_delegate_helper.cc',
'browser/tab_contents/render_view_host_delegate_helper.h',
'browser/tab_contents/render_view_host_manager.cc',
'browser/tab_contents/render_view_host_manager.h',
'browser/tab_contents/repost_form_warning.h',
'browser/tab_contents/security_style.h',
'browser/tab_contents/site_instance.cc',
'browser/tab_contents/site_instance.h',
'browser/tab_contents/tab_contents.cc',
'browser/tab_contents/tab_contents.h',
'browser/tab_contents/tab_contents_delegate.h',
'browser/tab_contents/tab_contents_view.cc',
'browser/tab_contents/tab_contents_view.h',
'browser/tab_contents/tab_contents_view_gtk.cc',
'browser/tab_contents/tab_contents_view_gtk.h',
'browser/tab_contents/tab_contents_view_mac.h',
'browser/tab_contents/tab_contents_view_mac.mm',
'browser/tab_contents/tab_contents_view_win.cc',
'browser/tab_contents/tab_contents_view_win.h',
'browser/tab_contents/tab_util.cc',
'browser/tab_contents/tab_util.h',
'browser/tab_contents/web_drag_source.cc',
'browser/tab_contents/web_drag_source.h',
'browser/tab_contents/web_drop_target.cc',
'browser/tab_contents/web_drop_target.h',
'browser/tabs/tab_strip_model.cc',
'browser/tabs/tab_strip_model.h',
'browser/tabs/tab_strip_model_order_controller.cc',
'browser/tabs/tab_strip_model_order_controller.h',
'browser/task_manager.cc',
'browser/task_manager.h',
'browser/task_manager_resource_providers.cc',
'browser/task_manager_resource_providers.h',
'browser/theme_resources_util.cc',
'browser/theme_resources_util.h',
'browser/toolbar_model.cc',
'browser/toolbar_model.h',
'browser/user_data_manager.cc',
'browser/user_data_manager.h',
'browser/view_ids.h',
'browser/views/about_chrome_view.cc',
'browser/views/about_chrome_view.h',
'browser/views/about_ipc_dialog.cc',
'browser/views/about_ipc_dialog.h',
'browser/views/about_network_dialog.cc',
'browser/views/about_network_dialog.h',
'browser/views/autocomplete/autocomplete_popup_contents_view.cc',
'browser/views/autocomplete/autocomplete_popup_contents_view.h',
'browser/views/autocomplete/autocomplete_popup_win.cc',
'browser/views/autocomplete/autocomplete_popup_win.h',
'browser/views/blocked_popup_container.cc',
'browser/views/blocked_popup_container.h',
'browser/views/bookmark_bar_view.cc',
'browser/views/bookmark_bar_view.h',
'browser/views/bookmark_bubble_view.cc',
'browser/views/bookmark_bubble_view.h',
'browser/views/bookmark_editor_view.cc',
'browser/views/bookmark_editor_view.h',
'browser/views/bookmark_folder_tree_view.cc',
'browser/views/bookmark_folder_tree_view.h',
'browser/views/bookmark_manager_view.cc',
'browser/views/bookmark_manager_view.h',
'browser/views/bookmark_menu_button.cc',
'browser/views/bookmark_menu_button.h',
'browser/views/bookmark_table_view.cc',
'browser/views/bookmark_table_view.h',
'browser/views/bug_report_view.cc',
'browser/views/bug_report_view.h',
'browser/views/clear_browsing_data.cc',
'browser/views/clear_browsing_data.h',
'browser/views/constrained_window_impl.cc',
'browser/views/constrained_window_impl.h',
'browser/views/dom_view.cc',
'browser/views/dom_view.h',
'browser/views/download_item_view.cc',
'browser/views/download_item_view.h',
'browser/views/download_shelf_view.cc',
'browser/views/download_shelf_view.h',
'browser/views/download_started_animation.cc',
'browser/views/download_started_animation.h',
'browser/views/edit_keyword_controller.cc',
'browser/views/edit_keyword_controller.h',
'browser/views/event_utils.cc',
'browser/views/event_utils.h',
'browser/views/external_protocol_dialog.cc',
'browser/views/external_protocol_dialog.h',
'browser/views/find_bar_view.cc',
'browser/views/find_bar_view.h',
'browser/views/find_bar_win.cc',
'browser/views/find_bar_win.h',
'browser/views/first_run_bubble.cc',
'browser/views/first_run_bubble.h',
'browser/views/first_run_customize_view.cc',
'browser/views/first_run_customize_view.h',
'browser/views/first_run_view.cc',
'browser/views/first_run_view.h',
'browser/views/first_run_view_base.cc',
'browser/views/first_run_view_base.h',
'browser/views/frame/browser_frame.cc',
'browser/views/frame/browser_frame.h',
'browser/views/frame/browser_root_view.cc',
'browser/views/frame/browser_root_view.h',
'browser/views/frame/browser_view.cc',
'browser/views/frame/browser_view.h',
'browser/views/frame/glass_browser_frame_view.cc',
'browser/views/frame/glass_browser_frame_view.h',
'browser/views/frame/opaque_browser_frame_view.cc',
'browser/views/frame/opaque_browser_frame_view.h',
'browser/views/fullscreen_exit_bubble.cc',
'browser/views/fullscreen_exit_bubble.h',
'browser/views/go_button.cc',
'browser/views/go_button.h',
'browser/views/html_dialog_view.cc',
'browser/views/html_dialog_view.h',
'browser/views/hung_renderer_view.cc',
'browser/views/hwnd_html_view.cc',
'browser/views/hwnd_html_view.h',
'browser/views/importer_lock_view.cc',
'browser/views/importer_lock_view.h',
'browser/views/importer_view.cc',
'browser/views/importer_view.h',
'browser/views/importing_progress_view.cc',
'browser/views/importing_progress_view.h',
'browser/views/info_bubble.cc',
'browser/views/info_bubble.h',
'browser/views/infobars/infobar_container.cc',
'browser/views/infobars/infobar_container.h',
'browser/views/infobars/infobars.cc',
'browser/views/infobars/infobars.h',
'browser/views/jsmessage_box_dialog.cc',
'browser/views/jsmessage_box_dialog.h',
'browser/views/keyword_editor_view.cc',
'browser/views/keyword_editor_view.h',
'browser/views/location_bar_view.cc',
'browser/views/location_bar_view.h',
'browser/views/login_view.cc',
'browser/views/login_view.h',
'browser/views/new_profile_dialog.cc',
'browser/views/new_profile_dialog.h',
'browser/views/options/advanced_contents_view.cc',
'browser/views/options/advanced_contents_view.h',
'browser/views/options/advanced_page_view.cc',
'browser/views/options/advanced_page_view.h',
'browser/views/options/content_page_view.cc',
'browser/views/options/content_page_view.h',
'browser/views/options/cookies_view.cc',
'browser/views/options/cookies_view.h',
'browser/views/options/exceptions_page_view.cc',
'browser/views/options/exceptions_page_view.h',
'browser/views/options/fonts_languages_window_view.cc',
'browser/views/options/fonts_languages_window_view.h',
'browser/views/options/fonts_page_view.cc',
'browser/views/options/fonts_page_view.h',
'browser/views/options/general_page_view.cc',
'browser/views/options/general_page_view.h',
'browser/views/options/language_combobox_model.cc',
'browser/views/options/language_combobox_model.h',
'browser/views/options/languages_page_view.cc',
'browser/views/options/languages_page_view.h',
'browser/views/options/options_group_view.cc',
'browser/views/options/options_group_view.h',
'browser/views/options/options_page_view.cc',
'browser/views/options/options_page_view.h',
'browser/views/options/options_window_view.cc',
'browser/views/options/passwords_exceptions_window_view.cc',
'browser/views/options/passwords_exceptions_window_view.h',
'browser/views/options/passwords_page_view.cc',
'browser/views/options/passwords_page_view.h',
'browser/views/page_info_window.cc',
'browser/views/page_info_window.h',
'browser/views/repost_form_warning_view.cc',
'browser/views/repost_form_warning_view.h',
'browser/views/restart_message_box.cc',
'browser/views/restart_message_box.h',
'browser/views/sad_tab_view.cc',
'browser/views/sad_tab_view.h',
'browser/views/select_profile_dialog.cc',
'browser/views/select_profile_dialog.h',
'browser/views/shelf_item_dialog.cc',
'browser/views/shelf_item_dialog.h',
'browser/views/shell_dialogs_win.cc',
'browser/views/star_toggle.cc',
'browser/views/star_toggle.h',
'browser/views/status_bubble_views.cc',
'browser/views/status_bubble_views.h',
'browser/views/tab_contents_container_view.cc',
'browser/views/tab_contents_container_view.h',
'browser/views/tab_icon_view.cc',
'browser/views/tab_icon_view.h',
'browser/views/tabs/dragged_tab_controller.cc',
'browser/views/tabs/dragged_tab_controller.h',
'browser/views/tabs/dragged_tab_view.cc',
'browser/views/tabs/dragged_tab_view.h',
'browser/views/tabs/hwnd_photobooth.cc',
'browser/views/tabs/hwnd_photobooth.h',
'browser/views/tabs/tab.cc',
'browser/views/tabs/tab.h',
'browser/views/tabs/tab_renderer.cc',
'browser/views/tabs/tab_renderer.h',
'browser/views/tabs/tab_strip.cc',
'browser/views/tabs/tab_strip.h',
'browser/views/theme_helpers.cc',
'browser/views/theme_helpers.h',
'browser/views/toolbar_star_toggle.cc',
'browser/views/toolbar_star_toggle.h',
'browser/views/toolbar_view.cc',
'browser/views/toolbar_view.h',
'browser/views/uninstall_dialog.cc',
'browser/views/uninstall_dialog.h',
'browser/views/user_data_dir_dialog.cc',
'browser/views/user_data_dir_dialog.h',
'browser/visitedlink_master.cc',
'browser/visitedlink_master.h',
'browser/webdata/web_data_service.cc',
'browser/webdata/web_data_service.h',
'browser/webdata/web_data_service_win.cc',
'browser/webdata/web_database.cc',
'browser/webdata/web_database.h',
'browser/webdata/web_database_win.cc',
'browser/window_sizer.cc',
'browser/window_sizer.h',
'browser/worker_host/worker_process_host.cc',
'browser/worker_host/worker_process_host.h',
'browser/worker_host/worker_service.cc',
'browser/worker_host/worker_service.h',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
# This file is generated by GRIT.
'<(SHARED_INTERMEDIATE_DIR)/chrome/grit/theme_resources_map.cc',
],
'conditions': [
['javascript_engine=="v8"', {
'defines': [
'CHROME_V8',
],
}],
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'sources!': [
'browser/debugger/debugger_shell_stubs.cc',
# Windows-specific files.
'browser/download/download_exe.cc',
],
}],
['OS=="mac"', {
'sources/': [
# Exclude most of download.
['exclude', '^browser/download/'],
['include', '^browser/download/download_(file|manager|shelf)\\.cc$'],
['include', '^browser/download/download_request_manager\\.cc$'],
['include', '^browser/download/download_item_model\\.cc$'],
['include', '^browser/download/save_(file(_manager)?|item|package)\\.cc$'],
],
'sources!': [
'browser/automation/automation_provider_list_generic.cc',
'browser/bookmarks/bookmark_context_menu.cc',
'browser/bookmarks/bookmark_drop_info.cc',
'browser/debugger/debugger_shell_stubs.cc',
'browser/icon_manager.cc',
],
'sources': [
# Build the necessary GTM sources
'../third_party/GTM/AppKit/GTMNSBezierPath+RoundRect.m',
'../third_party/GTM/AppKit/GTMNSColor+Luminance.m',
'../third_party/GTM/AppKit/GTMTheme.m',
# Build necessary Mozilla sources
'../third_party/mozilla/include/NSWorkspace+Utils.h',
'../third_party/mozilla/include/NSWorkspace+Utils.m',
],
'include_dirs': [
'../third_party/GTM',
'../third_party/GTM/AppKit',
],
}],
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
'dependencies': [
'../google_update/google_update.gyp:google_update',
'installer/util/util.gyp:installer_util',
'../printing/printing.gyp:printing',
],
'sources': [
# Using built-in rule in vstudio for midl.
'browser/history/history_indexer.idl',
],
'sources!': [
'browser/debugger/debugger_shell_stubs.cc',
'browser/history/history_publisher_none.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
}, { # 'OS!="win"
'sources/': [
# Exclude all of hang_monitor.
['exclude', '^browser/hang_monitor/'],
# Exclude parts of password_manager.
['exclude', '^browser/password_manager/ie7_password\\.cc$'],
# Exclude most of printing.
['exclude', '^browser/printing/'],
['include', '^browser/printing/page_(number|range|setup)\\.cc$'],
# Exclude all of rlz.
['exclude', '^browser/rlz/'],
# Exclude all of views.
['exclude', '^browser/views/'],
],
'sources!': [
'browser/autocomplete/autocomplete_accessibility.cc',
'browser/automation/ui_controls.cc',
'browser/bookmarks/bookmark_menu_controller.cc',
'browser/bookmarks/bookmark_menu_controller.h',
'browser/browser_accessibility.cc',
'browser/browser_accessibility_manager.cc',
'browser/debugger/debugger_view.cc',
'browser/debugger/debugger_window.cc',
'browser/debugger/devtools_view.cc',
'browser/debugger/devtools_view.h',
'browser/debugger/devtools_window_gtk.cc',
'browser/debugger/devtools_window_win.cc',
'browser/dock_info.cc',
'browser/dom_ui/html_dialog_contents.cc',
'browser/encoding_menu_controller_delegate.cc',
'browser/external_tab_container.cc',
'browser/first_run.cc',
'browser/google_update.cc',
'browser/history/history_indexer.idl',
'browser/history_tab_ui.cc',
'browser/history_view.cc',
'browser/ime_input.cc',
'browser/importer/ie_importer.cc',
'browser/jankometer.cc',
'browser/login_prompt.cc',
'browser/memory_details.cc',
'browser/modal_html_dialog_delegate.cc',
'browser/sandbox_policy.cc',
'browser/shell_integration.cc',
'browser/tab_contents/web_drag_source.cc',
'browser/tab_contents/web_drop_target.cc',
'browser/task_manager.cc',
'browser/window_sizer.cc',
],
}],
],
},
{
'target_name': 'plugin',
'type': '<(library)',
'dependencies': [
'common',
'chrome_resources',
'chrome_strings',
'../media/media.gyp:media',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
'third_party/hunspell/hunspell.gyp:hunspell',
'../webkit/webkit.gyp:glue',
],
'include_dirs': [
'<(INTERMEDIATE_DIR)',
],
'sources': [
# All .cc, .h, .m, and .mm files under plugins except for tests and
# mocks.
'plugin/chrome_plugin_host.cc',
'plugin/chrome_plugin_host.h',
'plugin/npobject_proxy.cc',
'plugin/npobject_proxy.h',
'plugin/npobject_stub.cc',
'plugin/npobject_stub.h',
'plugin/npobject_util.cc',
'plugin/npobject_util.h',
'plugin/plugin_channel.cc',
'plugin/plugin_channel.h',
'plugin/plugin_channel_base.cc',
'plugin/plugin_channel_base.h',
'plugin/plugin_main.cc',
'plugin/plugin_thread.cc',
'plugin/plugin_thread.h',
'plugin/webplugin_delegate_stub.cc',
'plugin/webplugin_delegate_stub.h',
'plugin/webplugin_proxy.cc',
'plugin/webplugin_proxy.h',
],
# These are layered in conditionals in the event other platforms
# end up using this module as well.
'conditions': [
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
},],
],
},
{
'target_name': 'renderer',
'type': '<(library)',
'dependencies': [
'common',
'plugin',
'chrome_resources',
'chrome_strings',
'../printing/printing.gyp:printing',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/npapi/npapi.gyp:npapi',
'../webkit/webkit.gyp:glue',
'../webkit/webkit.gyp:webkit',
],
'include_dirs': [
'..',
],
'sources': [
# TODO(jrg): to link ipc_tests, these files need to be in renderer.a.
# But app/ is the wrong directory for them.
# Better is to remove the dep of *_tests on renderer, but in the
# short term I'd like the build to work.
'renderer/automation/dom_automation_controller.cc',
'renderer/automation/dom_automation_controller.h',
'renderer/extensions/bindings_utils.cc',
'renderer/extensions/bindings_utils.h',
'renderer/extensions/event_bindings.cc',
'renderer/extensions/event_bindings.h',
'renderer/extensions/extension_process_bindings.cc',
'renderer/extensions/extension_process_bindings.h',
'renderer/extensions/renderer_extension_bindings.cc',
'renderer/extensions/renderer_extension_bindings.h',
'renderer/loadtimes_extension_bindings.h',
'renderer/loadtimes_extension_bindings.cc',
'renderer/media/audio_renderer_impl.cc',
'renderer/media/audio_renderer_impl.h',
'renderer/media/buffered_data_source.cc',
'renderer/media/buffered_data_source.h',
'renderer/media/simple_data_source.cc',
'renderer/media/simple_data_source.h',
'renderer/media/video_renderer_impl.cc',
'renderer/media/video_renderer_impl.h',
'renderer/net/render_dns_master.cc',
'renderer/net/render_dns_master.h',
'renderer/net/render_dns_queue.cc',
'renderer/net/render_dns_queue.h',
'renderer/about_handler.cc',
'renderer/about_handler.h',
'renderer/audio_message_filter.cc',
'renderer/audio_message_filter.h',
'renderer/debug_message_handler.cc',
'renderer/debug_message_handler.h',
'renderer/devtools_agent.cc',
'renderer/devtools_agent.h',
'renderer/devtools_agent_filter.cc',
'renderer/devtools_agent_filter.h',
'renderer/devtools_client.cc',
'renderer/devtools_client.h',
'renderer/dom_ui_bindings.cc',
'renderer/dom_ui_bindings.h',
'renderer/external_host_bindings.cc',
'renderer/external_host_bindings.h',
'renderer/external_extension.cc',
'renderer/external_extension.h',
'renderer/js_only_v8_extensions.cc',
'renderer/js_only_v8_extensions.h',
'renderer/localized_error.cc',
'renderer/localized_error.h',
'renderer/plugin_channel_host.cc',
'renderer/plugin_channel_host.h',
'renderer/render_process.cc',
'renderer/render_process.h',
'renderer/render_thread.cc',
'renderer/render_thread.h',
'renderer/render_view.cc',
'renderer/render_view.h',
'renderer/render_widget.cc',
'renderer/render_widget.h',
'renderer/renderer_glue.cc',
'renderer/renderer_histogram_snapshots.cc',
'renderer/renderer_histogram_snapshots.h',
'renderer/renderer_logging.h',
'renderer/renderer_logging_linux.cc',
'renderer/renderer_logging_mac.mm',
'renderer/renderer_logging_win.cc',
'renderer/renderer_main.cc',
'renderer/renderer_main_platform_delegate.h',
'renderer/renderer_main_platform_delegate_linux.cc',
'renderer/renderer_main_platform_delegate_mac.mm',
'renderer/renderer_main_platform_delegate_win.cc',
'renderer/renderer_webkitclient_impl.cc',
'renderer/renderer_webkitclient_impl.h',
'renderer/user_script_slave.cc',
'renderer/user_script_slave.h',
'renderer/visitedlink_slave.cc',
'renderer/visitedlink_slave.h',
'renderer/webmediaplayer_impl.cc',
'renderer/webmediaplayer_impl.h',
'renderer/webplugin_delegate_proxy.cc',
'renderer/webplugin_delegate_proxy.h',
'renderer/webworker_proxy.cc',
'renderer/webworker_proxy.h',
],
'link_settings': {
'mac_bundle_resources': [
'renderer/renderer.sb',
],
},
'conditions': [
# Linux-specific rules.
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
# Windows-specific rules.
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
},],
# As of yet unported-from-Windows code.
['OS!="win"', {
'sources!': [
'renderer/webworker_proxy.cc',
'renderer/webworker_proxy.h',
],
},],
],
},
{
'target_name': 'app',
'type': 'executable',
'mac_bundle': 1,
'dependencies': [
'common',
'browser',
'renderer',
'../printing/printing.gyp:printing',
'../webkit/webkit.gyp:inspector_resources',
],
'sources': [
# All .cc, .h, .m, and .mm files under app except for tests.
'app/breakpad_win.cc',
'app/breakpad_win.h',
'app/breakpad_mac.mm',
'app/breakpad_mac.h',
'app/chrome_dll_main.cc',
'app/chrome_dll_resource.h',
'app/chrome_exe_main.cc',
'app/chrome_exe_main.mm',
'app/chrome_exe_main_gtk.cc',
'app/chrome_exe_resource.h',
'app/client_util.cc',
'app/client_util.h',
'app/google_update_client.cc',
'app/google_update_client.h',
'app/keystone_glue.h',
'app/keystone_glue.m',
'app/scoped_ole_initializer.h',
],
'mac_bundle_resources': [
'app/nibs/en.lproj/BrowserWindow.xib',
'app/nibs/en.lproj/FindBar.xib',
'app/nibs/en.lproj/MainMenu.xib',
'app/nibs/en.lproj/Preferences.xib',
'app/nibs/en.lproj/SaveAccessoryView.xib',
'app/nibs/en.lproj/TabContents.xib',
'app/nibs/en.lproj/TabView.xib',
'app/nibs/en.lproj/Toolbar.xib',
'app/theme/back.pdf',
'app/theme/close_bar.pdf',
'app/theme/close_bar_h.pdf',
'app/theme/close_bar_p.pdf',
'app/theme/forward.pdf',
'app/theme/go.pdf',
'app/theme/grow_box.png',
'app/theme/nav.pdf',
'app/theme/newtab.pdf',
'app/theme/o2_globe.png',
'app/theme/o2_history.png',
'app/theme/o2_more.png',
'app/theme/o2_search.png',
'app/theme/o2_star.png',
'app/theme/reload.pdf',
'app/theme/sadtab.png',
'app/theme/star.pdf',
'app/theme/starred.pdf',
'app/theme/stop.pdf',
'app/app-Info.plist',
],
# TODO(mark): Come up with a fancier way to do this. It should only
# be necessary to list app-Info.plist once, not the three times it is
# listed here.
'mac_bundle_resources!': [
'app/app-Info.plist',
],
'xcode_settings': {
'INFOPLIST_FILE': 'app/app-Info.plist',
},
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
# Needed for chrome_dll_main.cc #include of gtk/gtk.h
'../build/linux/system.gyp:gtk',
# Needed for chrome_dll_main.cc use of g_thread_init
'../build/linux/system.gyp:gthread',
],
'copies': [
{
'destination': '<(PRODUCT_DIR)',
'files': ['<(INTERMEDIATE_DIR)/repack/chrome.pak'],
},
{
'destination': '<(PRODUCT_DIR)/locales',
'files': ['<(INTERMEDIATE_DIR)/repack/da.pak',
'<(INTERMEDIATE_DIR)/repack/en-US.pak',
'<(INTERMEDIATE_DIR)/repack/he.pak',
'<(INTERMEDIATE_DIR)/repack/zh-TW.pak',
],
},
{
'destination': '<(PRODUCT_DIR)/themes',
'files': ['<(INTERMEDIATE_DIR)/repack/default.pak'],
},
],
}],
['OS=="mac"', {
# 'branding' is a variable defined in common.gypi
# (e.g. "Chromium", "Chrome")
'product_name': '<(branding)',
'conditions': [
['branding=="Chrome"', {
'mac_bundle_resources': ['app/theme/google_chrome/app.icns'],
'variables': {
'bundle_id': 'com.google.Chrome',
},
# Only include breakpad in official builds.
'dependencies': [
'../breakpad/breakpad.gyp:breakpad',
],
'copies': [
{
'destination': '<(PRODUCT_DIR)/<(branding).app/Contents/Resources/',
'files': ['<(PRODUCT_DIR)/crash_inspector', '<(PRODUCT_DIR)/crash_report_sender.app'],
},
]
}, { # else: branding!="Chrome"
'mac_bundle_resources': ['app/theme/chromium/app.icns'],
'variables': {
'bundle_id': 'org.chromium.Chromium',
},
}],
],
'xcode_settings': {
# chrome/app/app-Info.plist has a CFBundleIdentifier of
# CHROMIUM_BUNDLE_ID to be replaced by a branded bundle ID in Xcode
# with this settings.
'CHROMIUM_BUNDLE_ID': '<(bundle_id)',
},
}, { # else: OS != "mac"
'conditions': [
['branding=="Chrome"', {
'product_name': 'chrome'
}, { # else: Branding!="Chrome"
# TODO: change to:
# 'product_name': 'chromium'
# whenever we convert the rest of the infrastructure
# (buildbots etc.) to use "gyp -Dbranding=Chrome".
'product_name': 'chrome'
}],
],
}],
['OS=="mac"', {
# Mac addes an action to modify the Info.plist to meet our needs
# (see the script for why this is done).
'actions': [
{
'action_name': 'tweak_app_infoplist',
# We don't list any inputs or outputs because we always want
# the script to run. Why? Because it does thinks like record
# the svn revision into the info.plist, so there is no file to
# depend on that will change when ever that changes.
'inputs': [],
'outputs': [],
'action': ['<(DEPTH)/build/mac/tweak_app_infoplist',
'<(branding)'],
},
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
'dependencies': [
'views',
'../build/temp_gyp/breakpad.gyp:breakpad_handler',
'../build/temp_gyp/breakpad.gyp:breakpad_sender',
'../sandbox/sandbox.gyp:sandbox',
'worker',
],
},{ # 'OS!="win"
'variables': {
'repack_path': '../tools/data_pack/repack.py',
},
'actions': [
{
'action_name': 'repack_chrome',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/browser_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/debugger_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/common_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/renderer_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/net/net_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_resources.pak',
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'outputs': [
'<(INTERMEDIATE_DIR)/repack/chrome.pak',
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
'process_outputs_as_mac_bundle_resources': 1,
},
{
'action_name': 'repack_theme',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/theme_resources.pak',
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'outputs': [
'<(INTERMEDIATE_DIR)/repack/theme.pak',
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
'process_outputs_as_mac_bundle_resources': 1,
'conditions': [
['OS=="linux"', {
'outputs=': [
'<(INTERMEDIATE_DIR)/repack/default.pak',
]
}],
],
},
{
# TODO(mark): Make this work with more languages than the
# hardcoded da, en-US, he, zh-TW.
'action_name': 'repack_locale_da',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/generated_resources_da.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/locale_settings_da.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_da.pak',
],
'conditions': [
['branding=="Chrome"', {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/google_chrome_strings_da.pak',
]
}, { # else: branding!="Chrome"
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/chromium_strings_da.pak',
]
}],
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'conditions': [
['OS=="mac"', {
'outputs': [
'<(INTERMEDIATE_DIR)/repack/da.lproj/locale.pak',
],
}, { # else: OS!="mac"
'outputs': [
'<(INTERMEDIATE_DIR)/repack/da.pak',
],
}],
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
},
{
# TODO(mark): Make this work with more languages than the
# hardcoded da, en-US, he, zh-TW.
'action_name': 'repack_locale_en_us',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/generated_resources_en-US.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/locale_settings_en-US.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_en-US.pak',
],
'conditions': [
['branding=="Chrome"', {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/google_chrome_strings_en-US.pak',
]
}, { # else: branding!="Chrome"
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/chromium_strings_en-US.pak',
]
}],
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'conditions': [
['OS=="mac"', {
'outputs': [
'<(INTERMEDIATE_DIR)/repack/en.lproj/locale.pak',
],
}, { # else: OS!="mac"
'outputs': [
'<(INTERMEDIATE_DIR)/repack/en-US.pak',
],
}],
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
'process_outputs_as_mac_bundle_resources': 1,
},
{
# TODO(mark): Make this work with more languages than the
# hardcoded da, en-US, he, zh-TW.
'action_name': 'repack_locale_he',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/generated_resources_he.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/locale_settings_he.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_he.pak',
],
'conditions': [
['branding=="Chrome"', {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/google_chrome_strings_he.pak',
]
}, { # else: branding!="Chrome"
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/chromium_strings_he.pak',
]
}],
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'conditions': [
['OS=="mac"', {
'outputs': [
'<(INTERMEDIATE_DIR)/repack/he.lproj/locale.pak',
],
}, { # else: OS!="mac"
'outputs': [
'<(INTERMEDIATE_DIR)/repack/he.pak',
],
}],
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
},
{
# TODO(mark): Make this work with more languages than the
# hardcoded da, en-US, he, zh-TW.
'action_name': 'repack_locale_zh_tw',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/generated_resources_zh-TW.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/locale_settings_zh-TW.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_zh-TW.pak',
],
'conditions': [
['branding=="Chrome"', {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/google_chrome_strings_zh-TW.pak',
]
}, { # else: branding!="Chrome"
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/chromium_strings_zh-TW.pak',
]
}],
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'conditions': [
['OS=="mac"', {
'outputs': [
'<(INTERMEDIATE_DIR)/repack/zh.lproj/locale.pak',
],
}, { # else: OS!="mac"
'outputs': [
'<(INTERMEDIATE_DIR)/repack/zh-TW.pak',
],
}],
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
},
],
'sources!': [
'app/chrome_exe_main.cc',
'app/client_util.cc',
'app/google_update_client.cc',
]
}],
],
},
{
'target_name': 'image_diff',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:base_gfx',
],
'sources': [
'tools/test/image_diff/image_diff.cc',
],
},
{
# This target contains mocks and test utilities that don't belong in
# production libraries but are used by more than one test executable.
'target_name': 'test_support_common',
'type': '<(library)',
'dependencies': [
'browser',
'common',
'renderer',
'chrome_resources',
'chrome_strings',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
# TODO: these should live here but are currently used by
# production code code in libbrowser (above).
#'browser/automation/url_request_mock_http_job.cc',
#'browser/automation/url_request_mock_http_job.h',
'browser/automation/url_request_mock_net_error_job.cc',
'browser/automation/url_request_mock_net_error_job.h',
'browser/renderer_host/mock_render_process_host.cc',
'browser/renderer_host/mock_render_process_host.h',
'browser/renderer_host/test_render_view_host.cc',
'browser/renderer_host/test_render_view_host.h',
'browser/tab_contents/test_web_contents.cc',
'browser/tab_contents/test_web_contents.h',
'common/ipc_test_sink.cc',
'common/ipc_test_sink.h',
'renderer/mock_keyboard.h',
'renderer/mock_keyboard.cc',
'renderer/mock_render_process.h',
'renderer/mock_render_thread.cc',
'renderer/mock_render_thread.h',
'test/automation/autocomplete_edit_proxy.cc',
'test/automation/autocomplete_edit_proxy.h',
'test/automation/automation_constants.h',
'test/automation/automation_handle_tracker.cc',
'test/automation/automation_handle_tracker.h',
'test/automation/automation_proxy.cc',
'test/automation/automation_proxy.h',
'test/automation/browser_proxy.cc',
'test/automation/browser_proxy.h',
'test/automation/constrained_window_proxy.cc',
'test/automation/constrained_window_proxy.h',
'test/automation/tab_proxy.cc',
'test/automation/tab_proxy.h',
'test/automation/window_proxy.cc',
'test/automation/window_proxy.h',
'test/chrome_process_util.cc',
'test/chrome_process_util.h',
'test/chrome_process_util_linux.cc',
'test/chrome_process_util_mac.cc',
'test/chrome_process_util_win.cc',
'test/in_process_browser_test.cc',
'test/in_process_browser_test.h',
'test/testing_profile.cc',
'test/testing_profile.h',
'test/ui_test_utils.cc',
'test/ui_test_utils.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
}, { # OS != "win"
'sources!': [
'test/in_process_browser_test.cc',
'test/in_process_browser_test.h',
'test/ui_test_utils.cc',
'test/ui_test_utils.h',
],
}],
],
},
{
'target_name': 'test_support_ui',
'type': '<(library)',
'dependencies': [
'test_support_common',
'chrome_resources',
'chrome_strings',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/testing_browser_process.h',
'test/ui/npapi_test_helper.cc',
'test/ui/npapi_test_helper.h',
'test/ui/run_all_unittests.cc',
'test/ui/ui_test.cc',
'test/ui/ui_test.h',
'test/ui/ui_test_suite.cc',
'test/ui/ui_test_suite.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS!="win"', {
'sources!': [
'test/ui/npapi_test_helper.cc',
],
}],
],
},
{
'target_name': 'test_support_unit',
'type': '<(library)',
'dependencies': [
'test_support_common',
'chrome_resources',
'chrome_strings',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/unit/run_all_unittests.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
# Needed for the following #include chain:
# test/unit/run_all_unittests.cc
# test/unit/chrome_test_suite.h
# gtk/gtk.h
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'ipc_tests',
'type': 'executable',
'dependencies': [
'common',
'test_support_unit',
'../base/base.gyp:base',
'../testing/gtest.gyp:gtest',
],
'sources': [
'common/ipc_fuzzing_tests.cc',
'common/ipc_send_fds_test.cc',
'common/ipc_tests.cc',
'common/ipc_tests.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'ui_tests',
'type': 'executable',
'dependencies': [
'app',
'browser',
'common',
'chrome_resources',
'chrome_strings',
'test_support_ui',
'../base/base.gyp:base',
'../net/net.gyp:net',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
],
'include_dirs': [
'..',
],
'sources': [
'app/chrome_main_uitest.cc',
'browser/browser_uitest.cc',
'browser/crash_recovery_uitest.cc',
'browser/download/download_uitest.cc',
'browser/download/save_page_uitest.cc',
'browser/errorpage_uitest.cc',
'browser/history/redirect_uitest.cc',
'browser/iframe_uitest.cc',
'browser/images_uitest.cc',
'browser/locale_tests_uitest.cc',
'browser/login_prompt_uitest.cc',
'browser/metrics/metrics_service_uitest.cc',
'browser/printing/printing_layout_uitest.cc',
'browser/printing/printing_test.h',
'browser/renderer_host/resource_dispatcher_host_uitest.cc',
'browser/sanity_uitest.cc',
'browser/session_history_uitest.cc',
'browser/sessions/session_restore_uitest.cc',
'browser/ssl/ssl_uitest.cc',
'browser/tab_contents/view_source_uitest.cc',
'browser/tab_restore_uitest.cc',
'browser/unload_uitest.cc',
'browser/views/find_bar_win_uitest.cc',
'common/logging_chrome_uitest.cc',
'common/net/cache_uitest.cc',
'common/pref_service_uitest.cc',
'test/accessibility/accessibility_tests.cc',
'test/accessibility/accessibility_util.cc',
'test/accessibility/accessibility_util.h',
'test/accessibility/browser_impl.cc',
'test/accessibility/browser_impl.h',
'test/accessibility/constants.h',
'test/accessibility/keyboard_util.cc',
'test/accessibility/keyboard_util.h',
'test/accessibility/registry_util.cc',
'test/accessibility/registry_util.h',
'test/accessibility/tab_impl.cc',
'test/accessibility/tab_impl.h',
'test/automation/automation_proxy_uitest.cc',
'test/chrome_process_util_uitest.cc',
'test/perf/mem_usage.cc',
'test/perf/mem_usage.h',
'test/reliability/page_load_test.cc',
'test/reliability/page_load_test.h',
'test/ui/dom_checker_uitest.cc',
'test/ui/history_uitest.cc',
'test/ui/inspector_controller_uitest.cc',
'test/ui/layout_plugin_uitest.cc',
'test/ui/npapi_uitest.cc',
'test/ui/omnibox_uitest.cc',
'test/ui/sandbox_uitests.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
'../build/linux/system.gyp:gtk',
],
'sources!': [
# TODO(port)
'app/chrome_main_uitest.cc',
'browser/crash_recovery_uitest.cc',
'browser/login_prompt_uitest.cc',
'browser/metrics/metrics_service_uitest.cc',
'test/reliability/page_load_test.cc',
'test/ui/layout_plugin_uitest.cc',
],
}],
['OS=="mac"', {
'sources!': [
# TODO(port)
'app/chrome_main_uitest.cc',
'browser/crash_recovery_uitest.cc',
# blocked on download shelf
'browser/download/save_page_uitest.cc',
'browser/login_prompt_uitest.cc',
'browser/metrics/metrics_service_uitest.cc',
'browser/sessions/session_restore_uitest.cc',
'browser/tab_restore_uitest.cc',
'browser/unload_uitest.cc',
'test/reliability/page_load_test.cc',
'test/ui/layout_plugin_uitest.cc',
'test/ui/omnibox_uitest.cc',
# these pass locally but fail on the bots
'common/net/cache_uitest.cc',
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
'dependencies': [
'../google_update/google_update.gyp:google_update',
'views',
],
'link_settings': {
'libraries': [
'-lOleAcc.lib',
],
},
}, { # else: OS != "win"
'sources!': [
# TODO(port)? (Most of these include windows.h or similar.)
'browser/printing/printing_layout_uitest.cc',
'browser/ssl/ssl_uitest.cc',
'browser/views/find_bar_win_uitest.cc',
'common/logging_chrome_uitest.cc',
'test/accessibility/accessibility_tests.cc',
'test/accessibility/accessibility_util.cc',
'test/accessibility/browser_impl.cc',
'test/accessibility/keyboard_util.cc',
'test/accessibility/registry_util.cc',
'test/accessibility/tab_impl.cc',
'test/perf/mem_usage.cc',
'test/ui/npapi_uitest.cc',
'test/ui/sandbox_uitests.cc',
],
}],
],
},
{
'target_name': 'unit_tests',
'type': 'executable',
'dependencies': [
'app',
'browser',
'common',
'renderer',
'chrome_resources',
'chrome_strings',
'test_support_unit',
'../printing/printing.gyp:printing',
'../webkit/webkit.gyp:webkit',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
],
'include_dirs': [
'..',
],
'sources': [
'app/breakpad_mac_stubs.mm',
# All unittests in browser, common, and renderer.
'browser/autocomplete/autocomplete_unittest.cc',
'browser/autocomplete/autocomplete_popup_view_mac_unittest.mm',
'browser/autocomplete/history_contents_provider_unittest.cc',
'browser/autocomplete/history_url_provider_unittest.cc',
'browser/autocomplete/keyword_provider_unittest.cc',
'browser/back_forward_menu_model_unittest.cc',
'browser/bookmarks/bookmark_codec_unittest.cc',
'browser/bookmarks/bookmark_context_menu_test.cc',
'browser/bookmarks/bookmark_drag_data_unittest.cc',
'browser/bookmarks/bookmark_folder_tree_model_unittest.cc',
'browser/bookmarks/bookmark_html_writer_unittest.cc',
'browser/bookmarks/bookmark_model_test_utils.cc',
'browser/bookmarks/bookmark_model_test_utils.h',
'browser/bookmarks/bookmark_model_unittest.cc',
'browser/bookmarks/bookmark_table_model_unittest.cc',
'browser/bookmarks/bookmark_utils_unittest.cc',
'browser/browser_commands_unittest.cc',
'browser/browser_unittest.cc',
'browser/debugger/devtools_remote_message_unittest.cc',
'browser/debugger/devtools_remote_listen_socket_unittest.cc',
'browser/debugger/devtools_remote_listen_socket_unittest.h',
'browser/chrome_thread_unittest.cc',
# It is safe to list */cocoa/* files in the "common" file list
# without an explicit exclusion since gyp is smart enough to
# exclude them from non-Mac builds.
'browser/cocoa/base_view_unittest.mm',
'browser/cocoa/bookmark_bar_controller_unittest.mm',
'browser/cocoa/bookmark_menu_bridge_unittest.mm',
'browser/cocoa/bookmark_menu_cocoa_controller_unittest.mm',
'browser/cocoa/browser_window_cocoa_unittest.mm',
'browser/cocoa/command_observer_bridge_unittest.mm',
'browser/cocoa/find_bar_bridge_unittest.mm',
'browser/cocoa/find_bar_cocoa_controller_unittest.mm',
'browser/cocoa/find_bar_view_unittest.mm',
'browser/cocoa/location_bar_view_mac_unittest.mm',
'browser/cocoa/grow_box_view_unittest.mm',
'browser/cocoa/preferences_window_controller_unittest.mm',
'browser/cocoa/sad_tab_view_unittest.mm',
'browser/cocoa/status_bubble_mac_unittest.mm',
'browser/cocoa/tab_cell_unittest.mm',
'browser/cocoa/tab_controller_unittest.mm',
'browser/cocoa/tab_strip_controller_unittest.mm',
'browser/cocoa/tab_strip_view_unittest.mm',
'browser/cocoa/tab_view_unittest.mm',
'browser/cocoa/toolbar_button_cell_unittest.mm',
'browser/cocoa/toolbar_controller_unittest.mm',
'browser/cocoa/toolbar_view_unittest.mm',
'browser/command_updater_unittest.cc',
'browser/debugger/devtools_manager_unittest.cc',
'browser/dom_ui/dom_ui_unittest.cc',
'browser/download/download_manager_unittest.cc',
'browser/download/download_request_manager_unittest.cc',
'browser/download/save_package_unittest.cc',
'browser/extensions/extension_messages_unittest.cc',
'browser/extensions/extension_process_manager_unittest.h',
'browser/extensions/extension_ui_unittest.cc',
'browser/extensions/extension_unittest.cc',
'browser/extensions/extensions_service_unittest.cc',
'browser/extensions/test_extension_loader.cc',
'browser/extensions/user_script_master_unittest.cc',
'browser/google_url_tracker_unittest.cc',
'browser/gtk/bookmark_editor_gtk_unittest.cc',
'browser/gtk/go_button_gtk_unittest.cc',
'browser/gtk/tabs/tab_renderer_gtk_unittest.cc',
'browser/history/expire_history_backend_unittest.cc',
'browser/history/history_backend_unittest.cc',
'browser/history/history_querying_unittest.cc',
'browser/history/history_types_unittest.cc',
'browser/history/history_unittest.cc',
'browser/history/query_parser_unittest.cc',
'browser/history/snippet_unittest.cc',
'browser/history/starred_url_database_unittest.cc',
'browser/history/text_database_manager_unittest.cc',
'browser/history/text_database_unittest.cc',
'browser/history/thumbnail_database_unittest.cc',
'browser/history/url_database_unittest.cc',
'browser/history/visit_database_unittest.cc',
'browser/history/visit_tracker_unittest.cc',
'browser/importer/firefox_importer_unittest.cc',
'browser/importer/importer_unittest.cc',
'browser/importer/toolbar_importer_unittest.cc',
'browser/login_prompt_unittest.cc',
'browser/metrics/metrics_log_unittest.cc',
'browser/metrics/metrics_response_unittest.cc',
'browser/navigation_controller_unittest.cc',
'browser/navigation_entry_unittest.cc',
'browser/net/dns_host_info_unittest.cc',
'browser/net/dns_master_unittest.cc',
'browser/net/resolve_proxy_msg_helper_unittest.cc',
'browser/net/url_fetcher_unittest.cc',
'browser/net/url_fixer_upper_unittest.cc',
'browser/password_manager/encryptor_unittest.cc',
'browser/password_manager/password_form_manager_unittest.cc',
'browser/printing/page_number_unittest.cc',
'browser/printing/page_overlays_unittest.cc',
'browser/printing/page_range_unittest.cc',
'browser/printing/page_setup_unittest.cc',
'browser/printing/print_job_unittest.cc',
'browser/printing/win_printing_context_unittest.cc',
'browser/profile_manager_unittest.cc',
'browser/renderer_host/audio_renderer_host_unittest.cc',
'browser/renderer_host/render_view_host_unittest.cc',
'browser/renderer_host/render_widget_host_unittest.cc',
'browser/renderer_host/renderer_security_policy_unittest.cc',
'browser/renderer_host/resource_dispatcher_host_unittest.cc',
'browser/renderer_host/web_cache_manager_unittest.cc',
'browser/rlz/rlz_unittest.cc',
'browser/safe_browsing/bloom_filter_unittest.cc',
'browser/safe_browsing/chunk_range_unittest.cc',
'browser/safe_browsing/protocol_manager_unittest.cc',
'browser/safe_browsing/protocol_parser_unittest.cc',
'browser/safe_browsing/safe_browsing_database_unittest.cc',
'browser/safe_browsing/safe_browsing_util_unittest.cc',
'browser/search_engines/template_url_model_unittest.cc',
'browser/search_engines/template_url_parser_unittest.cc',
'browser/search_engines/template_url_prepopulate_data_unittest.cc',
'browser/search_engines/template_url_unittest.cc',
'browser/sessions/session_backend_unittest.cc',
'browser/sessions/session_service_test_helper.cc',
'browser/sessions/session_service_test_helper.h',
'browser/sessions/session_service_unittest.cc',
'browser/sessions/tab_restore_service_unittest.cc',
'browser/site_instance_unittest.cc',
'browser/spellcheck_unittest.cc',
'browser/tab_contents/render_view_host_manager_unittest.cc',
'browser/tab_contents/web_contents_unittest.cc',
'browser/tabs/tab_strip_model_unittest.cc',
'browser/task_manager_unittest.cc',
'browser/theme_resources_util_unittest.cc',
'browser/views/bookmark_editor_view_unittest.cc',
'browser/views/keyword_editor_view_unittest.cc',
'browser/visitedlink_unittest.cc',
'browser/webdata/web_database_unittest.cc',
'browser/window_sizer_unittest.cc',
'../app/animation_unittest.cc',
'common/bzip2_unittest.cc',
'common/chrome_plugin_unittest.cc',
'common/extensions/url_pattern_unittest.cc',
'common/extensions/user_script_unittest.cc',
'common/file_descriptor_set_unittest.cc',
'../app/gfx/chrome_font_unittest.cc',
'common/gfx/emf_unittest.cc',
'../app/gfx/icon_util_unittest.cc',
'../app/gfx/text_elider_unittest.cc',
'common/important_file_writer_unittest.cc',
'common/ipc_message_unittest.cc',
'common/ipc_sync_channel_unittest.cc',
'common/ipc_sync_message_unittest.cc',
'common/ipc_sync_message_unittest.h',
'common/json_value_serializer_unittest.cc',
'../app/l10n_util_unittest.cc',
'common/mru_cache_unittest.cc',
'common/net/url_util_unittest.cc',
'common/notification_service_unittest.cc',
'../app/os_exchange_data_unittest.cc',
'common/pref_member_unittest.cc',
'common/pref_service_unittest.cc',
'common/property_bag_unittest.cc',
'common/resource_dispatcher_unittest.cc',
'common/time_format_unittest.cc',
'common/unzip_unittest.cc',
'../app/win_util_unittest.cc',
'common/worker_thread_ticker_unittest.cc',
'renderer/extensions/extension_api_client_unittest.cc',
'renderer/extensions/greasemonkey_api_unittest.cc',
'renderer/extensions/json_schema_unittest.cc',
'renderer/net/render_dns_master_unittest.cc',
'renderer/net/render_dns_queue_unittest.cc',
'renderer/render_process_unittest.cc',
'renderer/render_thread_unittest.cc',
'renderer/render_view_unittest.cc',
'renderer/render_widget_unittest.cc',
'renderer/renderer_logging_mac_unittest.mm',
'renderer/renderer_main_unittest.cc',
'test/browser_with_test_window_test.cc',
'test/browser_with_test_window_test.h',
'test/render_view_test.cc',
'test/render_view_test.h',
'test/test_notification_tracker.cc',
'test/test_notification_tracker.h',
'test/v8_unit_test.cc',
'test/v8_unit_test.h',
'../views/controls/label_unittest.cc',
'../views/controls/table/table_view_unittest.cc',
'../views/controls/tree/tree_node_iterator_unittest.cc',
'../views/focus/focus_manager_unittest.cc',
'../views/grid_layout_unittest.cc',
'../views/view_unittest.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
'../build/linux/system.gyp:gtk',
'../build/linux/system.gyp:nss',
],
'sources!': [
# This test is mostly about renaming downloads to safe file
# names. As such we don't need/want to port it to linux. We
# might want to write our own tests for the download manager
# on linux, though.
'browser/download/download_manager_unittest.cc',
],
}],
['OS=="mac"', {
# The test fetches resources which means Mac need the app bundle to
# exist on disk so it can pull from it.
'dependencies': [
'app',
],
'include_dirs': [
'../third_party/GTM',
],
'sources!': [
'browser/bookmarks/bookmark_context_menu_test.cc',
'browser/back_forward_menu_model_unittest.cc',
'browser/download/download_manager_unittest.cc',
'browser/gtk/go_button_gtk_unittest.cc',
'browser/gtk/tabs/tab_renderer_gtk_unittest.cc',
'browser/navigation_controller_unittest.cc',
'renderer/render_view_unittest.cc',
'test/test_notification_tracker.cc',
'test/test_notification_tracker.h',
],
# TODO(mark): We really want this for all non-static library targets,
# but when we tried to pull it up to the common.gypi level, it broke
# other things like the ui, startup, and page_cycler tests. *shrug*
'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-ObjC']},
}],
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
'sources!': [
'browser/gtk/tabs/tab_renderer_gtk_unittest.cc',
'common/file_descriptor_set_unittest.cc',
'common/net/url_util_unittest.cc',
],
'dependencies': [
'views',
],
}, { # else: OS != "win"
'sources!': [
'browser/bookmarks/bookmark_codec_unittest.cc',
'browser/bookmarks/bookmark_drag_data_unittest.cc',
'browser/bookmarks/bookmark_folder_tree_model_unittest.cc',
'browser/bookmarks/bookmark_table_model_unittest.cc',
'browser/browser_commands_unittest.cc',
'browser/browser_unittest.cc',
'browser/extensions/test_extension_loader.cc',
'browser/importer/firefox_importer_unittest.cc',
'browser/importer/importer_unittest.cc',
'browser/login_prompt_unittest.cc',
'browser/password_manager/password_form_manager_unittest.cc',
'browser/printing/page_number_unittest.cc',
'browser/printing/page_overlays_unittest.cc',
'browser/printing/print_job_unittest.cc',
'browser/rlz/rlz_unittest.cc',
'browser/tabs/tab_strip_model_unittest.cc',
'browser/task_manager_unittest.cc',
'browser/views/bookmark_editor_view_unittest.cc',
'browser/views/find_bar_win_unittest.cc',
'browser/views/keyword_editor_view_unittest.cc',
'browser/window_sizer_unittest.cc',
'common/chrome_plugin_unittest.cc',
'common/gfx/emf_unittest.cc',
'../app/gfx/icon_util_unittest.cc',
'common/net/url_util_unittest.cc',
'../app/os_exchange_data_unittest.cc',
'test/browser_with_test_window_test.cc',
'test/browser_with_test_window_test.h',
'../views/controls/label_unittest.cc',
'../views/controls/table/table_view_unittest.cc',
'../views/focus/focus_manager_unittest.cc',
'../views/grid_layout_unittest.cc',
'../views/view_unittest.cc',
],
}],
],
},
{
'target_name': 'startup_tests',
'type': 'executable',
'dependencies': [
'app',
'browser',
'common',
'chrome_resources',
'chrome_strings',
'test_support_ui',
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'sources': [
'test/startup/feature_startup_test.cc',
'test/startup/startup_test.cc',
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'page_cycler_tests',
'type': 'executable',
'dependencies': [
'app',
'chrome_resources',
'chrome_strings',
'test_support_ui',
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'sources': [
'test/page_cycler/page_cycler_test.cc',
'test/perf/mem_usage.cc',
'test/perf/mem_usage.h',
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS!="mac"', {
'dependencies': [
'views',
],
}],
['OS!="win"', {
'sources!': [
'test/perf/mem_usage.cc',
],
}],
],
},
],
'conditions': [
['OS=="linux"', {
'targets': [
{
'target_name': 'convert_dict',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'third_party/hunspell/hunspell.gyp:hunspell',
],
'sources': [
'tools/convert_dict/aff_reader.cc',
'tools/convert_dict/aff_reader.h',
'tools/convert_dict/convert_dict.cc',
'tools/convert_dict/dic_reader.cc',
'tools/convert_dict/dic_reader.h',
'tools/convert_dict/hunspell_reader.cc',
'tools/convert_dict/hunspell_reader.h',
],
},
{
'target_name': 'flush_cache',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
],
'sources': [
'tools/perf/flush_cache/flush_cache.cc',
],
},
],
}],
['OS=="mac"',
# On Mac only, add a project target called "package_app" that only
# runs a shell script (package_chrome.sh).
# On Mac only, add a project target called "build_app_dmg" that only
# builds a DMG out of the App (eventually will completely replace
# "package_app").
{ 'targets': [
{
'target_name': 'package_app',
# do NOT place this in the 'all' list; most won't want it.
# In gyp, booleans are 0/1 not True/False.
'suppress_wildcard': 1,
'type': 'none',
'dependencies': [
'app',
'../breakpad/breakpad.gyp:dump_syms',
'../breakpad/breakpad.gyp:symupload',
],
'actions': [
{
'inputs': [],
'outputs': [],
'action_name': 'package_chrome',
'action': ['tools/mac/package_chrome.sh' ],
},
], # 'actions'
},
{
'target_name': 'build_app_dmg',
# do NOT place this in the 'all' list; most won't want it.
# In gyp, booleans are 0/1 not True/False.
'suppress_wildcard': 1,
'type': 'none',
'dependencies': [
'app',
],
'variables': {
'build_app_dmg_script_path': '<(DEPTH)/build/mac/build_app_dmg',
},
'actions': [
{
'inputs': [
'<(build_app_dmg_script_path)',
'<(PRODUCT_DIR)/<(branding).app',
],
'outputs': [
'<(PRODUCT_DIR)/<(branding).dmg',
],
'action_name': 'build_app_dmg',
'action': ['<(build_app_dmg_script_path)', '<@(branding)'],
},
], # 'actions'
},
]
}, { # else: OS != "mac"
'targets': [
{
'target_name': 'perf_tests',
'type': 'executable',
'dependencies': [
'browser',
'common',
'renderer',
'chrome_resources',
'chrome_strings',
'../base/base.gyp:base',
'../base/base.gyp:test_support_base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../webkit/webkit.gyp:glue',
],
'sources': [
'browser/visitedlink_perftest.cc',
'test/perf/perftests.cc',
'test/perf/url_parse_perftest.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'sources!': [
# TODO(port):
'browser/visitedlink_perftest.cc',
],
}],
['OS!="mac"', {
'dependencies': [
'views',
],
}],
],
},
],
}], # OS!="mac"
['OS=="win" or OS=="linux"',
{ 'targets': [
{
'target_name': 'views',
'type': '<(library)',
'dependencies': [
'common',
'chrome_resources',
'chrome_strings',
'theme_resources',
'../media/media.gyp:media',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
'third_party/hunspell/hunspell.gyp:hunspell',
'../webkit/webkit.gyp:glue',
],
'sources': [
# All .cc, .h under views, except unittests
'../views/accelerator.cc',
'../views/accelerator.h',
'../views/accessibility/accessibility_types.h',
'../views/accessibility/view_accessibility.cc',
'../views/accessibility/view_accessibility.h',
'../views/accessibility/view_accessibility_wrapper.cc',
'../views/accessibility/view_accessibility_wrapper.h',
'../views/background.cc',
'../views/background.h',
'../views/border.cc',
'../views/border.h',
'../views/controls/button/button.cc',
'../views/controls/button/button.h',
'../views/controls/button/button_dropdown.cc',
'../views/controls/button/button_dropdown.h',
'../views/controls/button/checkbox.cc',
'../views/controls/button/checkbox.h',
'../views/controls/button/custom_button.cc',
'../views/controls/button/custom_button.h',
'../views/controls/button/image_button.cc',
'../views/controls/button/image_button.h',
'../views/controls/button/menu_button.cc',
'../views/controls/button/menu_button.h',
'../views/controls/button/native_button.cc',
'../views/controls/button/native_button.h',
'../views/controls/button/native_button_gtk.cc',
'../views/controls/button/native_button_gtk.h',
'../views/controls/button/native_button_win.cc',
'../views/controls/button/native_button_win.h',
'../views/controls/button/native_button_wrapper.h',
'../views/controls/button/radio_button.cc',
'../views/controls/button/radio_button.h',
'../views/controls/button/text_button.cc',
'../views/controls/button/text_button.h',
'../views/controls/combo_box.cc',
'../views/controls/combo_box.h',
'../views/controls/hwnd_view.cc',
'../views/controls/hwnd_view.h',
'../views/controls/image_view.cc',
'../views/controls/image_view.h',
'../views/controls/label.cc',
'../views/controls/label.h',
'../views/controls/link.cc',
'../views/controls/link.h',
'../views/controls/menu/chrome_menu.cc',
'../views/controls/menu/chrome_menu.h',
'../views/controls/menu/controller.h',
'../views/controls/menu/menu.cc',
'../views/controls/menu/menu.h',
'../views/controls/menu/view_menu_delegate.h',
'../views/controls/message_box_view.cc',
'../views/controls/message_box_view.h',
'../views/controls/native_control.cc',
'../views/controls/native_control.h',
'../views/controls/native_control_gtk.cc',
'../views/controls/native_control_gtk.h',
'../views/controls/native_control_win.cc',
'../views/controls/native_control_win.h',
'../views/controls/native_view_host.cc',
'../views/controls/native_view_host.h',
'../views/controls/native_view_host_gtk.cc',
'../views/controls/native_view_host_gtk.h',
'../views/controls/scroll_view.cc',
'../views/controls/scroll_view.h',
'../views/controls/scrollbar/bitmap_scroll_bar.cc',
'../views/controls/scrollbar/bitmap_scroll_bar.h',
'../views/controls/scrollbar/native_scroll_bar.cc',
'../views/controls/scrollbar/native_scroll_bar.h',
'../views/controls/scrollbar/scroll_bar.cc',
'../views/controls/scrollbar/scroll_bar.h',
'../views/controls/separator.cc',
'../views/controls/separator.h',
'../views/controls/single_split_view.cc',
'../views/controls/single_split_view.h',
'../views/controls/tabbed_pane.cc',
'../views/controls/tabbed_pane.h',
'../views/controls/table/group_table_view.cc',
'../views/controls/table/group_table_view.h',
'../views/controls/table/table_view.cc',
'../views/controls/table/table_view.h',
'../views/controls/text_field.cc',
'../views/controls/text_field.h',
'../views/controls/throbber.cc',
'../views/controls/throbber.h',
'../views/controls/tree/tree_model.h',
'../views/controls/tree/tree_node_iterator.h',
'../views/controls/tree/tree_node_model.h',
'../views/controls/tree/tree_view.cc',
'../views/controls/tree/tree_view.h',
'../views/event.cc',
'../views/event.h',
'../views/event_gtk.cc',
'../views/event_win.cc',
'../views/fill_layout.cc',
'../views/fill_layout.h',
'../views/focus/external_focus_tracker.cc',
'../views/focus/external_focus_tracker.h',
'../views/focus/focus_manager.cc',
'../views/focus/focus_manager.h',
'../views/focus/focus_util_win.cc',
'../views/focus/focus_util_win.h',
'../views/focus/view_storage.cc',
'../views/focus/view_storage.h',
'../views/grid_layout.cc',
'../views/grid_layout.h',
'../views/layout_manager.cc',
'../views/layout_manager.h',
'../views/painter.cc',
'../views/painter.h',
'../views/repeat_controller.cc',
'../views/repeat_controller.h',
'../views/standard_layout.h',
'../views/view.cc',
'../views/view.h',
'../views/view_constants.cc',
'../views/view_constants.h',
'../views/view_gtk.cc',
'../views/view_win.cc',
'../views/widget/accelerator_handler.cc',
'../views/widget/accelerator_handler.h',
'../views/widget/aero_tooltip_manager.cc',
'../views/widget/aero_tooltip_manager.h',
'../views/widget/root_view.cc',
'../views/widget/root_view.h',
'../views/widget/root_view_drop_target.cc',
'../views/widget/root_view_drop_target.h',
'../views/widget/root_view_gtk.cc',
'../views/widget/root_view_win.cc',
'../views/widget/tooltip_manager.cc',
'../views/widget/tooltip_manager.h',
'../views/widget/widget.h',
'../views/widget/widget_gtk.cc',
'../views/widget/widget_gtk.h',
'../views/widget/widget_win.cc',
'../views/widget/widget_win.h',
'../views/window/client_view.cc',
'../views/window/client_view.h',
'../views/window/custom_frame_view.cc',
'../views/window/custom_frame_view.h',
'../views/window/dialog_client_view.cc',
'../views/window/dialog_client_view.h',
'../views/window/dialog_delegate.cc',
'../views/window/dialog_delegate.h',
'../views/window/native_frame_view.cc',
'../views/window/native_frame_view.h',
'../views/window/non_client_view.cc',
'../views/window/non_client_view.h',
'../views/window/window.h',
'../views/window/window_delegate.h',
'../views/window/window_delegate.cc',
'../views/window/window_resources.h',
'../views/window/window_gtk.cc',
'../views/window/window_gtk.h',
'../views/window/window_win.cc',
'../views/window/window_win.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'sources!': [
'../views/accelerator.cc',
'../views/accessibility/accessible_wrapper.cc',
'../views/accessibility/view_accessibility.cc',
'../views/controls/scrollbar/bitmap_scroll_bar.cc',
'../views/controls/button/button_dropdown.cc',
'../views/controls/button/checkbox.cc',
'../views/controls/button/menu_button.cc',
'../views/controls/combo_box.cc',
'../views/controls/hwnd_view.cc',
'../views/controls/link.cc',
'../views/controls/menu/chrome_menu.cc',
'../views/controls/menu/menu.cc',
'../views/controls/message_box_view.cc',
'../views/controls/scroll_view.cc',
'../views/controls/table/group_table_view.cc',
'../views/focus/external_focus_tracker.cc',
'../views/focus/focus_manager.cc',
'../views/controls/native_control.cc',
'../views/controls/scrollbar/native_scroll_bar.cc',
'../views/controls/button/radio_button.cc',
'../views/resize_corner.cc',
'../views/controls/separator.cc',
'../views/controls/single_split_view.cc',
'../views/controls/tabbed_pane.cc',
'../views/controls/table/table_view.cc',
'../views/controls/text_field.cc',
'../views/controls/tree/tree_view.cc',
'../views/event_win.cc',
'../views/widget/accelerator_handler.cc',
'../views/widget/aero_tooltip_manager.cc',
'../views/widget/root_view_drop_target.cc',
'../views/widget/tooltip_manager.cc',
'../views/window/dialog_delegate.cc',
'../views/window/dialog_client_view.cc',
'../views/window/hit_test.cc',
'../views/window/native_frame_view.cc',
],
}],
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
},],
['OS=="linux"', {
'sources!': [
'../views/accelerator.cc',
'../views/accessibility/accessible_wrapper.cc',
'../views/accessibility/view_accessibility.cc',
'../views/accessibility/view_accessibility_wrapper.cc',
'../views/controls/scrollbar/bitmap_scroll_bar.cc',
'../views/controls/button/button_dropdown.cc',
'../views/controls/button/checkbox.cc',
'../views/controls/menu/chrome_menu.cc',
'../views/controls/combo_box.cc',
'../views/focus/focus_manager.cc',
'../views/controls/table/group_table_view.cc',
'../views/controls/hwnd_view.cc',
'../views/controls/link.cc',
'../views/controls/menu/menu.cc',
'../views/controls/button/menu_button.cc',
'../views/controls/message_box_view.cc',
'../views/controls/native_control.cc',
'../views/controls/scrollbar/native_scroll_bar.cc',
'../views/controls/button/radio_button.cc',
'../views/resize_corner.cc',
'../views/controls/separator.cc',
'../views/controls/single_split_view.cc',
'../views/controls/tabbed_pane.cc',
'../views/controls/table/table_view.cc',
'../views/controls/text_field.cc',
'../views/controls/tree/tree_view.cc',
'../views/widget/accelerator_handler.cc',
'../views/widget/aero_tooltip_manager.cc',
'../views/widget/root_view_drop_target.cc',
'../views/widget/tooltip_manager.cc',
'../views/widget/widget_win.cc',
'../views/window/dialog_delegate.cc',
'../views/window/dialog_client_view.cc',
'../views/window/native_frame_view.cc',
],
}],
],
},
],
}], # OS=="win" or OS=="linux"
['OS=="win"',
{ 'targets': [
{
'target_name': 'interactive_ui_tests',
'type': 'executable',
'dependencies': [
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'third_party/hunspell/hunspell.gyp:hunspell',
'views',
'../skia/skia.gyp:skia',
'../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/libpng/libpng.gyp:libpng',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/zlib/zlib.gyp:zlib',
'../testing/gtest.gyp:gtest',
'../third_party/npapi/npapi.gyp:npapi',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'browser/browser_focus_uitest.cc',
'browser/views/bookmark_bar_view_test.cc',
'browser/views/constrained_window_impl_interactive_uitest.cc',
'browser/views/find_bar_win_interactive_uitest.cc',
'browser/views/tabs/tab_dragging_test.cc',
'test/interactive_ui/npapi_interactive_test.cc',
'test/interactive_ui/view_event_test_base.cc',
'test/interactive_ui/view_event_test_base.h',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
},
{
'target_name': 'plugin_tests',
'type': 'executable',
'dependencies': [
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/libxslt/libxslt.gyp:libxslt',
'../third_party/npapi/npapi.gyp:npapi',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'test/plugin/plugin_test.cpp',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
},
{
'target_name': 'selenium_tests',
'type': 'executable',
'dependencies': [
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'test/selenium/selenium_test.cc',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
},
{
'target_name': 'worker',
'type': '<(library)',
'dependencies': [
'../base/base.gyp:base',
'../webkit/webkit.gyp:webkit',
],
'sources': [
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
'worker/webworkerclient_proxy.cc',
'worker/webworkerclient_proxy.h',
'worker/worker_main.cc',
'worker/worker_thread.cc',
'worker/worker_thread.h',
'worker/worker_webkitclient_impl.cc',
'worker/worker_webkitclient_impl.h',
],
'include_dirs': [
'..',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled.h',
'msvs_precompiled_source': 'tools/build/win/precompiled.cc',
},
},
},
]}, # 'targets'
], # OS=="win"
# TODO(jrg): add in Windows code coverage targets.
['coverage!=0 and OS!="win"',
{ 'targets': [
{
'target_name': 'coverage',
# do NOT place this in the 'all' list; most won't want it.
# In gyp, booleans are 0/1 not True/False.
'suppress_wildcard': 1,
'type': 'none',
'dependencies': [
'../base/base.gyp:base_unittests',
'../media/media.gyp:media_unittests',
'../net/net.gyp:net_unittests',
'../printing/printing.gyp:printing_unittests',
],
'actions': [
{
# 'message' for Linux/scons in particular
'message': 'Running coverage_posix.py to generate coverage numbers',
'inputs': [],
'outputs': [],
'action_name': 'coverage',
'action': [ 'python',
'../tools/code_coverage/coverage_posix.py',
'--directory',
'<(PRODUCT_DIR)',
'--',
'<@(_dependencies)'],
# Use outputs of this action as inputs for the main target build.
# Seems as a misnomer but makes this happy on Linux (scons).
'process_outputs_as_sources': 1,
},
], # 'actions'
},
]
}],
], # 'conditions'
}
Add note about the branding files.
Review URL: http://codereview.chromium.org/113264
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@15851 0039d316-1c4b-4281-b951-d872f2087c98
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'includes': [
'../build/common.gypi',
],
'target_defaults': {
'sources/': [
['exclude', '/(cocoa|gtk|win)/'],
['exclude', '_(cocoa|gtk|linux|mac|posix|skia|win|x)\\.(cc|mm?)$'],
['exclude', '/(gtk|win|x11)_[^/]*\\.cc$'],
],
'conditions': [
['OS=="linux"', {'sources/': [
['include', '/gtk/'],
['include', '_(gtk|linux|posix|skia|x)\\.cc$'],
['include', '/(gtk|x11)_[^/]*\\.cc$'],
]}],
['OS=="mac"', {'sources/': [
['include', '/cocoa/'],
['include', '_(cocoa|mac|posix)\\.(cc|mm?)$'],
]}, { # else: OS != "mac"
'sources/': [
['exclude', '\\.mm?$'],
],
}],
['OS=="win"', {'sources/': [
['include', '_(win)\\.cc$'],
['include', '/win/'],
['include', '/win_[^/]*\\.cc$'],
]}],
],
},
'targets': [
{
# TODO(mark): It would be better if each static library that needed
# to run grit would list its own .grd files, but unfortunately some
# of the static libraries currently have circular dependencies among
# generated headers.
'target_name': 'chrome_resources',
'type': 'none',
'rules': [
{
'rule_name': 'grit',
'extension': 'grd',
'variables': {
'conditions': [
['branding=="Chrome"', {
# TODO(mmoss) The .grd files look for _google_chrome, but for
# consistency they should look for GOOGLE_CHROME_BUILD like C++.
# Clean this up when Windows moves to gyp.
'chrome_build': '_google_chrome',
}, { # else: branding!="Chrome"
'chrome_build': '_chromium',
}],
],
},
'inputs': [
'../tools/grit/grit.py',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/grit/<(RULE_INPUT_ROOT).h',
'<(SHARED_INTERMEDIATE_DIR)/chrome/<(RULE_INPUT_ROOT).pak',
],
'action': ['python', '<@(_inputs)', '-i', '<(RULE_INPUT_PATH)',
'build', '-o', '<(SHARED_INTERMEDIATE_DIR)/chrome',
'-D', '<(chrome_build)'],
'message': 'Generating resources from <(RULE_INPUT_PATH)',
},
],
'sources': [
# Data resources.
'browser/debugger/resources/debugger_resources.grd',
'browser/browser_resources.grd',
'common/common_resources.grd',
'renderer/renderer_resources.grd',
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome',
],
},
'conditions': [
['OS=="win"', {
'dependencies': ['../build/win/system.gyp:cygwin'],
}],
],
},
{
# TODO(mark): It would be better if each static library that needed
# to run grit would list its own .grd files, but unfortunately some
# of the static libraries currently have circular dependencies among
# generated headers.
'target_name': 'chrome_strings',
'type': 'none',
'rules': [
{
'rule_name': 'grit',
'extension': 'grd',
'variables': {
'conditions': [
['branding=="Chrome"', {
# TODO(mmoss) The .grd files look for _google_chrome, but for
# consistency they should look for GOOGLE_CHROME_BUILD like C++.
# Clean this up when Windows moves to gyp.
'chrome_build': '_google_chrome',
}, { # else: branding!="Chrome"
'chrome_build': '_chromium',
}],
],
},
'inputs': [
'../tools/grit/grit.py',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/grit/<(RULE_INPUT_ROOT).h',
'<(SHARED_INTERMEDIATE_DIR)/chrome/<(RULE_INPUT_ROOT)_da.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/<(RULE_INPUT_ROOT)_en-US.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/<(RULE_INPUT_ROOT)_he.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/<(RULE_INPUT_ROOT)_zh-TW.pak',
],
'action': ['python', '<@(_inputs)', '-i', '<(RULE_INPUT_PATH)',
'build', '-o', '<(SHARED_INTERMEDIATE_DIR)/chrome',
'-D', '<(chrome_build)'],
'message': 'Generating resources from <(RULE_INPUT_PATH)',
},
],
'sources': [
# Localizable resources.
'app/resources/locale_settings.grd',
'app/chromium_strings.grd',
'app/generated_resources.grd',
'app/google_chrome_strings.grd',
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome',
],
},
'conditions': [
['OS=="win"', {
'dependencies': ['../build/win/system.gyp:cygwin'],
}],
],
},
{
# TODO(beng): rename to 'app' when moves to top level.
'target_name': 'app_base',
'type': '<(library)',
'msvs_guid': '4631946D-7D5F-44BD-A5A8-504C0A7033BE',
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:base_gfx',
'../net/net.gyp:net',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
],
'include_dirs': [
'..',
],
'sources': [
# All .cc, .h, and .mm files under app/ except for tests.
'../app/animation.cc',
'../app/animation.h',
'../app/drag_drop_types.cc',
'../app/drag_drop_types.h',
'../app/gfx/chrome_canvas.cc',
'../app/gfx/chrome_canvas.h',
'../app/gfx/chrome_canvas_linux.cc',
'../app/gfx/chrome_canvas_win.cc',
'../app/gfx/chrome_font.h',
'../app/gfx/chrome_font_gtk.cc',
'../app/gfx/chrome_font_mac.mm',
'../app/gfx/chrome_font_skia.cc',
'../app/gfx/chrome_font_win.cc',
'../app/gfx/color_utils.cc',
'../app/gfx/color_utils.h',
'../app/gfx/favicon_size.h',
'../app/gfx/icon_util.cc',
'../app/gfx/icon_util.h',
'../app/gfx/insets.h',
'../app/gfx/path_gtk.cc',
'../app/gfx/path_win.cc',
'../app/gfx/path.h',
'../app/gfx/text_elider.cc',
'../app/gfx/text_elider.h',
'../app/l10n_util.cc',
'../app/l10n_util.h',
'../app/l10n_util_posix.cc',
'../app/l10n_util_win.cc',
'../app/l10n_util_win.h',
'../app/message_box_flags.h',
'../app/os_exchange_data.cc',
'../app/os_exchange_data.h',
'../app/resource_bundle.cc',
'../app/resource_bundle.h',
'../app/resource_bundle_win.cc',
'../app/resource_bundle_linux.cc',
'../app/resource_bundle_mac.mm',
'../app/slide_animation.cc',
'../app/slide_animation.h',
'../app/theme_provider.h',
'../app/throb_animation.cc',
'../app/throb_animation.h',
'../app/win_util.cc',
'../app/win_util.h',
],
'direct_dependent_settings': {
'include_dirs': [
'..',
],
},
'conditions': [
['OS=="linux"', {
'dependencies': [
# chrome_font_gtk.cc uses fontconfig.
# TODO(evanm): I think this is wrong; it should just use GTK.
'../build/linux/system.gyp:fontconfig',
'../build/linux/system.gyp:gtk',
],
}],
['OS!="win"', {
'sources!': [
'../app/drag_drop_types.cc',
'../app/drag_drop_types.h',
'../app/gfx/icon_util.cc',
'../app/gfx/icon_util.h',
'../app/os_exchange_data.cc',
'../app/os_exchange_data.h',
],
}],
],
},
{
# theme_resources also generates a .cc file, so it can't use the rules above.
'target_name': 'theme_resources',
'type': 'none',
'variables': {
'grit_path': '../tools/grit/grit.py',
'grit_out_dir': '<(SHARED_INTERMEDIATE_DIR)/chrome',
},
'actions': [
{
'action_name': 'theme_resources',
'variables': {
'input_path': 'app/theme/theme_resources.grd',
'conditions': [
['branding=="Chrome"', {
# TODO(mmoss) The .grd files look for _google_chrome, but for
# consistency they should look for GOOGLE_CHROME_BUILD like C++.
# Clean this up when Windows moves to gyp.
'chrome_build': '_google_chrome',
}, { # else: branding!="Chrome"
'chrome_build': '_chromium',
}],
],
},
'inputs': [
'<(input_path)',
],
'outputs': [
'<(grit_out_dir)/grit/theme_resources.h',
'<(grit_out_dir)/grit/theme_resources_map.cc',
'<(grit_out_dir)/grit/theme_resources_map.h',
'<(grit_out_dir)/theme_resources.pak',
'<(grit_out_dir)/theme_resources.rc',
],
'action': ['python', '<(grit_path)', '-i', '<(input_path)', 'build',
'-o', '<(grit_out_dir)', '-D', '<(chrome_build)'],
'message': 'Generating resources from <(input_path)',
},
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome',
],
},
'conditions': [
['OS=="win"', {
'dependencies': ['../build/win/system.gyp:cygwin'],
}],
],
},
{
'target_name': 'common',
'type': '<(library)',
'dependencies': [
'app_base',
'chrome_resources',
'chrome_strings',
'../base/base.gyp:base',
'../base/base.gyp:base_gfx',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../net/net.gyp:net',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/zlib/zlib.gyp:zlib',
'../third_party/npapi/npapi.gyp:npapi',
'../webkit/webkit.gyp:glue',
],
'include_dirs': [
'..',
],
'sources': [
# All .cc, .h, and .mm files under chrome/common except for tests.
'common/extensions/url_pattern.cc',
'common/extensions/url_pattern.h',
'common/extensions/user_script.cc',
'common/extensions/user_script.h',
'common/gfx/emf.cc',
'common/gfx/emf.h',
'common/gfx/utils.h',
'common/gtk_util.cc',
'common/gtk_util.h',
'common/net/cookie_monster_sqlite.cc',
'common/net/cookie_monster_sqlite.h',
'common/net/dns.h',
'common/net/url_request_intercept_job.cc',
'common/net/url_request_intercept_job.h',
'common/app_cache/app_cache_context_impl.cc',
'common/app_cache/app_cache_context_impl.h',
'common/app_cache/app_cache_dispatcher.cc',
'common/app_cache/app_cache_dispatcher.h',
'common/app_cache/app_cache_dispatcher_host.cc',
'common/app_cache/app_cache_dispatcher_host.h',
'common/bindings_policy.h',
'common/child_process.cc',
'common/child_process.h',
'common/child_process_host.cc',
'common/child_process_host.h',
'common/child_process_info.cc',
'common/child_process_info.h',
'common/child_thread.cc',
'common/child_thread.h',
'common/chrome_constants.cc',
'common/chrome_constants.h',
'common/chrome_counters.cc',
'common/chrome_counters.h',
'common/chrome_paths.cc',
'common/chrome_paths.h',
'common/chrome_paths_internal.h',
'common/chrome_paths_linux.cc',
'common/chrome_paths_mac.mm',
'common/chrome_paths_win.cc',
'common/chrome_plugin_api.h',
'common/chrome_plugin_lib.cc',
'common/chrome_plugin_lib.h',
'common/chrome_plugin_util.cc',
'common/chrome_plugin_util.h',
'common/chrome_switches.cc',
'common/chrome_switches.h',
'common/classfactory.cc',
'common/classfactory.h',
'common/common_glue.cc',
'common/debug_flags.cc',
'common/debug_flags.h',
'common/devtools_messages.h',
'common/devtools_messages_internal.h',
'common/env_vars.cc',
'common/env_vars.h',
'common/file_descriptor_set_posix.cc',
'common/file_descriptor_set_posix.h',
'common/filter_policy.h',
'common/gears_api.h',
'common/important_file_writer.cc',
'common/important_file_writer.h',
'common/ipc_channel.h',
'common/ipc_channel_posix.cc',
'common/ipc_channel_posix.h',
'common/ipc_channel_proxy.cc',
'common/ipc_channel_proxy.h',
'common/ipc_channel_win.cc',
'common/ipc_channel_win.h',
'common/ipc_logging.cc',
'common/ipc_logging.h',
'common/ipc_message.cc',
'common/ipc_message.h',
'common/ipc_message_macros.h',
'common/ipc_message_utils.cc',
'common/ipc_message_utils.h',
'common/ipc_sync_channel.cc',
'common/ipc_sync_channel.h',
'common/ipc_sync_message.cc',
'common/ipc_sync_message.h',
'common/json_value_serializer.cc',
'common/json_value_serializer.h',
'common/jstemplate_builder.cc',
'common/jstemplate_builder.h',
'common/libxml_utils.cc',
'common/libxml_utils.h',
'common/logging_chrome.cc',
'common/logging_chrome.h',
'common/main_function_params.h',
'common/message_router.cc',
'common/message_router.h',
'common/modal_dialog_event.h',
'common/mru_cache.h',
'common/navigation_types.h',
'common/native_web_keyboard_event.h',
'common/native_web_keyboard_event_linux.cc',
'common/native_web_keyboard_event_mac.mm',
'common/native_web_keyboard_event_win.cc',
'common/notification_details.h',
'common/notification_observer.h',
'common/notification_registrar.cc',
'common/notification_registrar.h',
'common/notification_service.cc',
'common/notification_service.h',
'common/notification_source.h',
'common/notification_type.h',
'common/owned_widget_gtk.cc',
'common/owned_widget_gtk.h',
'common/page_action.h',
'common/page_action.cc',
'common/page_transition_types.h',
'common/page_zoom.h',
'common/platform_util.h',
'common/platform_util_linux.cc',
'common/platform_util_mac.mm',
'common/platform_util_win.cc',
'common/plugin_messages.h',
'common/plugin_messages_internal.h',
'common/pref_member.cc',
'common/pref_member.h',
'common/pref_names.cc',
'common/pref_names.h',
'common/pref_service.cc',
'common/pref_service.h',
'common/process_watcher_posix.cc',
'common/process_watcher_win.cc',
'common/process_watcher.h',
'common/property_bag.cc',
'common/property_bag.h',
'common/quarantine_mac.h',
'common/quarantine_mac.mm',
'common/ref_counted_util.h',
'common/render_messages.h',
'common/render_messages_internal.h',
'common/resource_dispatcher.cc',
'common/resource_dispatcher.h',
'common/result_codes.h',
'common/sandbox_init_wrapper.cc',
'common/sandbox_init_wrapper.h',
'common/security_filter_peer.cc',
'common/security_filter_peer.h',
'common/sqlite_compiled_statement.cc',
'common/sqlite_compiled_statement.h',
'common/sqlite_utils.cc',
'common/sqlite_utils.h',
'common/task_queue.cc',
'common/task_queue.h',
'common/temp_scaffolding_stubs.cc',
'common/temp_scaffolding_stubs.h',
'common/thumbnail_score.cc',
'common/thumbnail_score.h',
'common/time_format.cc',
'common/time_format.h',
'common/transport_dib.h',
'common/transport_dib_linux.cc',
'common/transport_dib_mac.cc',
'common/transport_dib_win.cc',
'common/unzip.cc', # Requires zlib directly.
'common/unzip.h',
'common/url_constants.cc',
'common/url_constants.h',
'common/visitedlink_common.cc',
'common/visitedlink_common.h',
'common/webkit_param_traits.h',
'common/win_safe_util.cc',
'common/win_safe_util.h',
'common/worker_thread_ticker.cc',
'common/worker_thread_ticker.h',
'common/x11_util.cc',
'common/x11_util.h',
'common/x11_util_internal.h',
'third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
],
'direct_dependent_settings': {
'include_dirs': [
'..',
],
},
'export_dependent_settings': [
'app_base',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'link_settings': {
'libraries': [
'-lX11',
'-lXrender',
'-lXext',
],
},
}, { # else: 'OS!="linux"'
'sources!': [
'third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
'sources!': [
'common/temp_scaffolding_stubs.cc',
'common/temp_scaffolding_stubs.h',
],
}, { # else: OS != "win"
'sources!': [
'common/gfx/emf.cc',
'common/classfactory.cc',
],
}],
],
},
{
'target_name': 'browser',
'type': '<(library)',
'dependencies': [
'common',
'chrome_resources',
'chrome_strings',
'theme_resources',
'../media/media.gyp:media',
'../net/net.gyp:net_resources',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
'third_party/hunspell/hunspell.gyp:hunspell',
'../webkit/webkit.gyp:glue',
],
'include_dirs': [
'..',
'<(INTERMEDIATE_DIR)',
],
'sources': [
# All .cc, .h, .m, and .mm files under browser except for tests and
# mocks.
'browser/alternate_nav_url_fetcher.cc',
'browser/alternate_nav_url_fetcher.h',
'browser/app_controller_mac.h',
'browser/app_controller_mac.mm',
'browser/app_modal_dialog.cc',
'browser/app_modal_dialog.h',
'browser/app_modal_dialog_gtk.cc',
'browser/app_modal_dialog_mac.mm',
'browser/app_modal_dialog_win.cc',
'browser/app_modal_dialog_queue.cc',
'browser/app_modal_dialog_queue.h',
'browser/autocomplete/autocomplete.cc',
'browser/autocomplete/autocomplete.h',
'browser/autocomplete/autocomplete_accessibility.cc',
'browser/autocomplete/autocomplete_accessibility.h',
'browser/autocomplete/autocomplete_edit.cc',
'browser/autocomplete/autocomplete_edit.h',
'browser/autocomplete/autocomplete_edit_view.h',
'browser/autocomplete/autocomplete_edit_view_gtk.cc',
'browser/autocomplete/autocomplete_edit_view_gtk.h',
'browser/autocomplete/autocomplete_edit_view_mac.h',
'browser/autocomplete/autocomplete_edit_view_mac.mm',
'browser/autocomplete/autocomplete_edit_view_win.cc',
'browser/autocomplete/autocomplete_edit_view_win.h',
'browser/autocomplete/autocomplete_popup_model.cc',
'browser/autocomplete/autocomplete_popup_model.h',
'browser/autocomplete/autocomplete_popup_view.h',
'browser/autocomplete/autocomplete_popup_view_gtk.cc',
'browser/autocomplete/autocomplete_popup_view_gtk.h',
'browser/autocomplete/autocomplete_popup_view_mac.h',
'browser/autocomplete/autocomplete_popup_view_mac.mm',
'browser/autocomplete/autocomplete_popup_view_win.cc',
'browser/autocomplete/autocomplete_popup_view_win.h',
'browser/autocomplete/history_contents_provider.cc',
'browser/autocomplete/history_contents_provider.h',
'browser/autocomplete/history_url_provider.cc',
'browser/autocomplete/history_url_provider.h',
'browser/autocomplete/keyword_provider.cc',
'browser/autocomplete/keyword_provider.h',
'browser/autocomplete/search_provider.cc',
'browser/autocomplete/search_provider.h',
'browser/autofill_manager.cc',
'browser/autofill_manager.h',
'browser/automation/automation_autocomplete_edit_tracker.h',
'browser/automation/automation_browser_tracker.h',
'browser/automation/automation_constrained_window_tracker.h',
'browser/automation/automation_provider.cc',
'browser/automation/automation_provider.h',
'browser/automation/automation_provider_list.cc',
'browser/automation/automation_provider_list_generic.cc',
'browser/automation/automation_provider_list_mac.mm',
'browser/automation/automation_provider_list.h',
'browser/automation/automation_resource_tracker.cc',
'browser/automation/automation_resource_tracker.h',
'browser/automation/automation_tab_tracker.h',
'browser/automation/automation_window_tracker.h',
'browser/automation/ui_controls.cc',
'browser/automation/ui_controls.h',
'browser/automation/url_request_failed_dns_job.cc',
'browser/automation/url_request_failed_dns_job.h',
# TODO: These should be moved to test_support (see below), but
# are currently used by production code in automation_provider.cc.
'browser/automation/url_request_mock_http_job.cc',
'browser/automation/url_request_mock_http_job.h',
'browser/automation/url_request_slow_download_job.cc',
'browser/automation/url_request_slow_download_job.h',
'browser/back_forward_menu_model.cc',
'browser/back_forward_menu_model.h',
'browser/back_forward_menu_model_win.cc',
'browser/back_forward_menu_model_win.h',
'browser/bookmarks/bookmark_codec.cc',
'browser/bookmarks/bookmark_codec.h',
'browser/bookmarks/bookmark_context_menu_gtk.cc',
'browser/bookmarks/bookmark_context_menu_win.cc',
'browser/bookmarks/bookmark_context_menu.cc',
'browser/bookmarks/bookmark_context_menu.h',
'browser/bookmarks/bookmark_drag_data.cc',
'browser/bookmarks/bookmark_drag_data.h',
'browser/bookmarks/bookmark_drop_info.cc',
'browser/bookmarks/bookmark_drop_info.h',
'browser/bookmarks/bookmark_editor.h',
'browser/bookmarks/bookmark_folder_tree_model.cc',
'browser/bookmarks/bookmark_folder_tree_model.h',
'browser/bookmarks/bookmark_html_writer.cc',
'browser/bookmarks/bookmark_html_writer.h',
'browser/bookmarks/bookmark_menu_controller_gtk.cc',
'browser/bookmarks/bookmark_menu_controller_gtk.h',
'browser/bookmarks/bookmark_menu_controller_win.cc',
'browser/bookmarks/bookmark_menu_controller_win.h',
'browser/bookmarks/bookmark_model.cc',
'browser/bookmarks/bookmark_model.h',
'browser/bookmarks/bookmark_service.h',
'browser/bookmarks/bookmark_storage.cc',
'browser/bookmarks/bookmark_storage.h',
'browser/bookmarks/bookmark_table_model.cc',
'browser/bookmarks/bookmark_table_model.h',
'browser/bookmarks/bookmark_utils.cc',
'browser/bookmarks/bookmark_utils.h',
'browser/browser.cc',
'browser/browser.h',
'browser/browser_about_handler.cc',
'browser/browser_about_handler.h',
'browser/browser_accessibility.cc',
'browser/browser_accessibility.h',
'browser/browser_accessibility_manager.cc',
'browser/browser_accessibility_manager.h',
'browser/browser_init.cc',
'browser/browser_init.h',
'browser/browser_list.cc',
'browser/browser_list.h',
'browser/browser_main.cc',
'browser/browser_main_gtk.cc',
'browser/browser_main_mac.mm',
'browser/browser_main_win.cc',
'browser/browser_main_win.h',
'browser/browser_prefs.cc',
'browser/browser_prefs.h',
'browser/browser_process.cc',
'browser/browser_process.h',
'browser/browser_process_impl.cc',
'browser/browser_process_impl.h',
'browser/browser_shutdown.cc',
'browser/browser_shutdown.h',
'browser/browser_theme_provider.cc',
'browser/browser_theme_provider.h',
'browser/browser_trial.cc',
'browser/browser_trial.h',
'browser/browser_url_handler.cc',
'browser/browser_url_handler.h',
'browser/browser_window.h',
'browser/browser_window_factory.mm',
'browser/browsing_data_remover.cc',
'browser/browsing_data_remover.h',
'browser/browsing_instance.cc',
'browser/browsing_instance.h',
'browser/cancelable_request.cc',
'browser/cancelable_request.h',
'browser/cert_store.cc',
'browser/cert_store.h',
'browser/character_encoding.cc',
'browser/character_encoding.h',
'browser/chrome_plugin_browsing_context.cc',
'browser/chrome_plugin_browsing_context.h',
'browser/chrome_plugin_host.cc',
'browser/chrome_plugin_host.h',
'browser/chrome_thread.cc',
'browser/chrome_thread.h',
'browser/cocoa/base_view.h',
'browser/cocoa/base_view.mm',
'browser/cocoa/bookmark_bar_controller.h',
'browser/cocoa/bookmark_bar_controller.mm',
'browser/cocoa/bookmark_menu_bridge.h',
'browser/cocoa/bookmark_menu_bridge.mm',
'browser/cocoa/bookmark_menu_cocoa_controller.h',
'browser/cocoa/bookmark_menu_cocoa_controller.mm',
'browser/cocoa/browser_test_helper.h',
'browser/cocoa/browser_window_cocoa.h',
'browser/cocoa/browser_window_cocoa.mm',
'browser/cocoa/browser_window_controller.h',
'browser/cocoa/browser_window_controller.mm',
'browser/cocoa/cocoa_test_helper.h',
'browser/cocoa/command_observer_bridge.h',
'browser/cocoa/command_observer_bridge.mm',
'browser/cocoa/find_bar_bridge.h',
'browser/cocoa/find_bar_bridge.mm',
'browser/cocoa/find_bar_cocoa_controller.h',
'browser/cocoa/find_bar_cocoa_controller.mm',
'browser/cocoa/find_bar_view.h',
'browser/cocoa/find_bar_view.mm',
'browser/cocoa/grow_box_view.h',
'browser/cocoa/grow_box_view.m',
'browser/cocoa/location_bar_view_mac.h',
'browser/cocoa/location_bar_view_mac.mm',
'browser/cocoa/preferences_window_controller.h',
'browser/cocoa/preferences_window_controller.mm',
'browser/cocoa/sad_tab_view.h',
'browser/cocoa/sad_tab_view.mm',
'browser/cocoa/shell_dialogs_mac.mm',
'browser/cocoa/status_bubble_mac.h',
'browser/cocoa/status_bubble_mac.mm',
'browser/cocoa/tab_cell.h',
'browser/cocoa/tab_cell.mm',
'browser/cocoa/tab_contents_controller.h',
'browser/cocoa/tab_contents_controller.mm',
'browser/cocoa/tab_controller.h',
'browser/cocoa/tab_controller.mm',
'browser/cocoa/tab_strip_controller.h',
'browser/cocoa/tab_strip_controller.mm',
'browser/cocoa/tab_strip_model_observer_bridge.h',
'browser/cocoa/tab_strip_model_observer_bridge.mm',
'browser/cocoa/tab_strip_view.h',
'browser/cocoa/tab_strip_view.mm',
'browser/cocoa/tab_view.h',
'browser/cocoa/tab_view.mm',
'browser/cocoa/tab_window_controller.h',
'browser/cocoa/tab_window_controller.mm',
'browser/cocoa/toolbar_button_cell.h',
'browser/cocoa/toolbar_button_cell.mm',
'browser/cocoa/toolbar_controller.h',
'browser/cocoa/toolbar_controller.mm',
'browser/cocoa/toolbar_view.h',
'browser/cocoa/toolbar_view.mm',
'browser/command_updater.cc',
'browser/command_updater.h',
'browser/cross_site_request_manager.cc',
'browser/cross_site_request_manager.h',
'browser/debugger/debugger_host.h',
'browser/debugger/debugger_host_impl.cpp',
'browser/debugger/debugger_host_impl.h',
'browser/debugger/debugger_io.h',
'browser/debugger/debugger_io_socket.cc',
'browser/debugger/debugger_io_socket.h',
'browser/debugger/debugger_node.cc',
'browser/debugger/debugger_node.h',
'browser/debugger/debugger_remote_service.cc',
'browser/debugger/debugger_remote_service.h',
'browser/debugger/debugger_shell.cc',
'browser/debugger/debugger_shell.h',
'browser/debugger/debugger_shell_stubs.cc',
'browser/debugger/debugger_view.cc',
'browser/debugger/debugger_view.h',
'browser/debugger/debugger_window.cc',
'browser/debugger/debugger_window.h',
'browser/debugger/debugger_wrapper.cc',
'browser/debugger/debugger_wrapper.h',
'browser/debugger/devtools_client_host.h',
'browser/debugger/devtools_manager.cc',
'browser/debugger/devtools_manager.h',
'browser/debugger/devtools_protocol_handler.cc',
'browser/debugger/devtools_protocol_handler.h',
'browser/debugger/devtools_remote.h',
'browser/debugger/devtools_remote_listen_socket.cc',
'browser/debugger/devtools_remote_listen_socket.h',
'browser/debugger/devtools_remote_message.cc',
'browser/debugger/devtools_remote_message.h',
'browser/debugger/devtools_remote_service.cc',
'browser/debugger/devtools_remote_service.h',
'browser/debugger/devtools_view.cc',
'browser/debugger/devtools_view.h',
'browser/debugger/devtools_window.h',
'browser/debugger/devtools_window_gtk.cc',
'browser/debugger/devtools_window_mac.cc',
'browser/debugger/devtools_window_win.cc',
'browser/debugger/inspectable_tab_proxy.cc',
'browser/debugger/inspectable_tab_proxy.h',
'browser/dock_info.cc',
'browser/dock_info.h',
'browser/dom_operation_notification_details.h',
'browser/dom_ui/chrome_url_data_manager.cc',
'browser/dom_ui/chrome_url_data_manager.h',
'browser/dom_ui/debugger_ui.cc',
'browser/dom_ui/debugger_ui.h',
'browser/dom_ui/devtools_ui.cc',
'browser/dom_ui/devtools_ui.h',
'browser/dom_ui/dom_ui.cc',
'browser/dom_ui/dom_ui.h',
'browser/dom_ui/dom_ui_factory.cc',
'browser/dom_ui/dom_ui_factory.h',
'browser/dom_ui/dom_ui_favicon_source.cc',
'browser/dom_ui/dom_ui_favicon_source.h',
'browser/dom_ui/dom_ui_theme_source.cc',
'browser/dom_ui/dom_ui_theme_source.h',
'browser/dom_ui/dom_ui_thumbnail_source.cc',
'browser/dom_ui/dom_ui_thumbnail_source.h',
'browser/dom_ui/downloads_ui.cc',
'browser/dom_ui/downloads_ui.h',
'browser/dom_ui/fileicon_source.cc',
'browser/dom_ui/fileicon_source.h',
'browser/dom_ui/history_ui.cc',
'browser/dom_ui/history_ui.h',
'browser/dom_ui/html_dialog_ui.cc',
'browser/dom_ui/html_dialog_ui.h',
'browser/dom_ui/new_tab_ui.cc',
'browser/dom_ui/new_tab_ui.h',
'browser/download/download_exe.cc',
'browser/download/download_file.cc',
'browser/download/download_file.h',
'browser/download/download_item_model.cc',
'browser/download/download_item_model.h',
'browser/download/download_manager.cc',
'browser/download/download_manager.h',
'browser/download/download_request_dialog_delegate.h',
'browser/download/download_request_dialog_delegate_win.cc',
'browser/download/download_request_dialog_delegate_win.h',
'browser/download/download_request_manager.cc',
'browser/download/download_request_manager.h',
'browser/download/download_shelf.cc',
'browser/download/download_shelf.h',
'browser/download/download_util.cc',
'browser/download/download_util.h',
'browser/download/save_file.cc',
'browser/download/save_file.h',
'browser/download/save_file_manager.cc',
'browser/download/save_file_manager.h',
'browser/download/save_item.cc',
'browser/download/save_item.h',
'browser/download/save_package.cc',
'browser/download/save_package.h',
'browser/download/save_types.h',
'browser/encoding_menu_controller_delegate.cc',
'browser/encoding_menu_controller_delegate.h',
'browser/extensions/extension.cc',
'browser/extensions/extension.h',
'browser/extensions/extension_bookmarks_module.cc',
'browser/extensions/extension_bookmarks_module.h',
'browser/extensions/extension_error_reporter.cc',
'browser/extensions/extension_error_reporter.h',
'browser/extensions/extension_function.cc',
'browser/extensions/extension_function.h',
'browser/extensions/extension_function_dispatcher.cc',
'browser/extensions/extension_function_dispatcher.h',
'browser/extensions/extension_host.cc',
'browser/extensions/extension_host.h',
'browser/extensions/extension_message_service.cc',
'browser/extensions/extension_message_service.h',
'browser/extensions/extension_browser_event_router.cc',
'browser/extensions/extension_browser_event_router.h',
'browser/extensions/extension_page_actions_module.h',
'browser/extensions/extension_page_actions_module.cc',
'browser/extensions/extension_process_manager.cc',
'browser/extensions/extension_process_manager.h',
'browser/extensions/extension_protocols.cc',
'browser/extensions/extension_protocols.h',
'browser/extensions/extension_tabs_module.cc',
'browser/extensions/extension_tabs_module.h',
'browser/extensions/extension_view.cc',
'browser/extensions/extension_view.h',
'browser/extensions/extensions_service.cc',
'browser/extensions/extensions_service.h',
'browser/extensions/extensions_ui.cc',
'browser/extensions/extensions_ui.h',
'browser/extensions/user_script_master.cc',
'browser/extensions/user_script_master.h',
'browser/external_protocol_handler.cc',
'browser/external_protocol_handler.h',
'browser/external_tab_container.cc',
'browser/external_tab_container.h',
'browser/fav_icon_helper.cc',
'browser/fav_icon_helper.h',
'browser/find_bar.h',
'browser/find_bar_controller.cc',
'browser/find_bar_controller.h',
'browser/find_notification_details.h',
'browser/first_run.cc',
'browser/first_run.h',
'browser/gears_integration.cc',
'browser/gears_integration.h',
'browser/google_update.cc',
'browser/google_update.h',
'browser/google_url_tracker.cc',
'browser/google_url_tracker.h',
'browser/google_util.cc',
'browser/google_util.h',
'browser/gtk/about_chrome_dialog.cc',
'browser/gtk/about_chrome_dialog.h',
'browser/gtk/back_forward_menu_model_gtk.cc',
'browser/gtk/back_forward_menu_model_gtk.h',
'browser/gtk/bookmark_bar_gtk.cc',
'browser/gtk/bookmark_bar_gtk.h',
'browser/gtk/bookmark_bubble_gtk.cc',
'browser/gtk/bookmark_bubble_gtk.h',
'browser/gtk/bookmark_editor_gtk.cc',
'browser/gtk/bookmark_editor_gtk.h',
'browser/gtk/bookmark_tree_model.cc',
'browser/gtk/bookmark_tree_model.h',
'browser/gtk/browser_toolbar_gtk.cc',
'browser/gtk/browser_toolbar_gtk.h',
'browser/gtk/browser_window_factory_gtk.cc',
'browser/gtk/browser_window_gtk.cc',
'browser/gtk/browser_window_gtk.h',
'browser/gtk/custom_button.cc',
'browser/gtk/custom_button.h',
'browser/gtk/dialogs_gtk.cc',
'browser/gtk/download_item_gtk.cc',
'browser/gtk/download_item_gtk.h',
'browser/gtk/download_shelf_gtk.cc',
'browser/gtk/download_shelf_gtk.h',
'browser/gtk/go_button_gtk.cc',
'browser/gtk/go_button_gtk.h',
'browser/gtk/gtk_chrome_button.cc',
'browser/gtk/gtk_chrome_button.h',
'browser/gtk/hung_renderer_dialog_gtk.cc',
'browser/gtk/import_dialog_gtk.cc',
'browser/gtk/import_dialog_gtk.h',
'browser/gtk/info_bubble_gtk.cc',
'browser/gtk/info_bubble_gtk.h',
'browser/gtk/infobar_container_gtk.cc',
'browser/gtk/infobar_container_gtk.h',
'browser/gtk/infobar_gtk.cc',
'browser/gtk/infobar_gtk.h',
'browser/gtk/find_bar_gtk.cc',
'browser/gtk/find_bar_gtk.h',
'browser/gtk/link_button_gtk.cc',
'browser/gtk/link_button_gtk.cc',
'browser/gtk/location_bar_view_gtk.cc',
'browser/gtk/location_bar_view_gtk.h',
'browser/gtk/menu_gtk.cc',
'browser/gtk/menu_gtk.h',
'browser/gtk/nine_box.cc',
'browser/gtk/nine_box.h',
'browser/gtk/sad_tab_gtk.cc',
'browser/gtk/sad_tab_gtk.h',
'browser/gtk/slide_animator_gtk.cc',
'browser/gtk/slide_animator_gtk.h',
'browser/gtk/standard_menus.cc',
'browser/gtk/standard_menus.h',
'browser/gtk/status_bubble_gtk.cc',
'browser/gtk/status_bubble_gtk.h',
'browser/gtk/tab_contents_container_gtk.cc',
'browser/gtk/tab_contents_container_gtk.h',
'browser/gtk/tabs/dragged_tab_controller_gtk.cc',
'browser/gtk/tabs/dragged_tab_controller_gtk.h',
'browser/gtk/tabs/tab_gtk.cc',
'browser/gtk/tabs/tab_gtk.h',
'browser/gtk/tabs/tab_renderer_gtk.cc',
'browser/gtk/tabs/tab_renderer_gtk.h',
'browser/gtk/tabs/tab_strip_gtk.cc',
'browser/gtk/tabs/tab_strip_gtk.h',
'browser/gtk/toolbar_star_toggle_gtk.cc',
'browser/gtk/toolbar_star_toggle_gtk.h',
'browser/hang_monitor/hung_plugin_action.cc',
'browser/hang_monitor/hung_plugin_action.h',
'browser/hang_monitor/hung_window_detector.cc',
'browser/hang_monitor/hung_window_detector.h',
'browser/history/archived_database.cc',
'browser/history/archived_database.h',
'browser/history/download_database.cc',
'browser/history/download_database.h',
'browser/history/download_types.h',
'browser/history/expire_history_backend.cc',
'browser/history/expire_history_backend.h',
'browser/history/history.cc',
'browser/history/history.h',
'browser/history/history_backend.cc',
'browser/history/history_backend.h',
'browser/history/history_database.cc',
'browser/history/history_database.h',
'browser/history/history_marshaling.h',
'browser/history/history_notifications.h',
'browser/history/history_publisher.cc',
'browser/history/history_publisher.h',
'browser/history/history_publisher_none.cc',
'browser/history/history_publisher_win.cc',
'browser/history/history_types.cc',
'browser/history/history_types.h',
'browser/history/in_memory_database.cc',
'browser/history/in_memory_database.h',
'browser/history/in_memory_history_backend.cc',
'browser/history/in_memory_history_backend.h',
'browser/history/page_usage_data.cc',
'browser/history/page_usage_data.h',
'browser/history/query_parser.cc',
'browser/history/query_parser.h',
'browser/history/snippet.cc',
'browser/history/snippet.h',
'browser/history/starred_url_database.cc',
'browser/history/starred_url_database.h',
'browser/history/text_database.cc',
'browser/history/text_database.h',
'browser/history/text_database_manager.cc',
'browser/history/text_database_manager.h',
'browser/history/thumbnail_database.cc',
'browser/history/thumbnail_database.h',
'browser/history/url_database.cc',
'browser/history/url_database.h',
'browser/history/visit_database.cc',
'browser/history/visit_database.h',
'browser/history/visit_tracker.cc',
'browser/history/visit_tracker.h',
'browser/history/visitsegment_database.cc',
'browser/history/visitsegment_database.h',
'browser/hung_renderer_dialog.h',
'browser/icon_loader.h',
'browser/icon_loader.cc',
'browser/icon_loader_linux.cc',
'browser/icon_loader_mac.mm',
'browser/icon_loader_win.cc',
'browser/icon_manager.cc',
'browser/icon_manager.h',
'browser/icon_manager_linux.cc',
'browser/icon_manager_mac.mm',
'browser/icon_manager_win.cc',
'browser/ime_input.cc',
'browser/ime_input.h',
'browser/importer/firefox2_importer.cc',
'browser/importer/firefox2_importer.h',
'browser/importer/firefox3_importer.cc',
'browser/importer/firefox3_importer.h',
'browser/importer/firefox_importer_utils.cc',
'browser/importer/firefox_importer_utils.h',
'browser/importer/firefox_profile_lock.cc',
'browser/importer/firefox_profile_lock.h',
'browser/importer/firefox_profile_lock_posix.cc',
'browser/importer/firefox_profile_lock_win.cc',
'browser/importer/ie_importer.cc',
'browser/importer/ie_importer.h',
'browser/importer/importer.cc',
'browser/importer/importer.h',
'browser/importer/mork_reader.cc',
'browser/importer/mork_reader.h',
'browser/importer/toolbar_importer.cc',
'browser/importer/toolbar_importer.h',
'browser/input_window_dialog.h',
'browser/input_window_dialog_gtk.cc',
'browser/input_window_dialog_win.cc',
'browser/jankometer.cc',
'browser/jankometer.h',
'browser/jsmessage_box_handler.cc',
'browser/jsmessage_box_handler.h',
'browser/load_from_memory_cache_details.h',
'browser/load_notification_details.h',
'browser/location_bar.h',
'browser/login_prompt.cc',
'browser/login_prompt.h',
'browser/memory_details.cc',
'browser/memory_details.h',
'browser/meta_table_helper.cc',
'browser/meta_table_helper.h',
'browser/metrics/metrics_log.cc',
'browser/metrics/metrics_log.h',
'browser/metrics/metrics_response.cc',
'browser/metrics/metrics_response.h',
'browser/metrics/metrics_service.cc',
'browser/metrics/metrics_service.h',
'browser/metrics/user_metrics.cc',
'browser/metrics/user_metrics.h',
'browser/modal_html_dialog_delegate.cc',
'browser/modal_html_dialog_delegate.h',
'browser/net/chrome_url_request_context.cc',
'browser/net/chrome_url_request_context.h',
'browser/net/dns_global.cc',
'browser/net/dns_global.h',
'browser/net/dns_host_info.cc',
'browser/net/dns_host_info.h',
'browser/net/dns_master.cc',
'browser/net/dns_master.h',
'browser/net/referrer.cc',
'browser/net/referrer.h',
'browser/net/resolve_proxy_msg_helper.cc',
'browser/net/resolve_proxy_msg_helper.h',
'browser/net/sdch_dictionary_fetcher.cc',
'browser/net/sdch_dictionary_fetcher.h',
'browser/net/url_fetcher.cc',
'browser/net/url_fetcher.h',
'browser/net/url_fetcher_protect.cc',
'browser/net/url_fetcher_protect.h',
'browser/net/url_fixer_upper.cc',
'browser/net/url_fixer_upper.h',
'browser/options_window.h',
'browser/page_state.cc',
'browser/page_state.h',
'browser/password_manager/encryptor_linux.cc',
'browser/password_manager/encryptor_mac.mm',
'browser/password_manager/encryptor_win.cc',
'browser/password_manager/encryptor.h',
'browser/password_manager/ie7_password.cc',
'browser/password_manager/ie7_password.h',
'browser/password_manager/password_form_manager.cc',
'browser/password_manager/password_form_manager.h',
'browser/password_manager/password_form_manager_win.cc',
'browser/password_manager/password_manager.cc',
'browser/password_manager/password_manager.h',
'browser/plugin_installer.cc',
'browser/plugin_installer.h',
'browser/plugin_process_host.cc',
'browser/plugin_process_host.h',
'browser/plugin_service.cc',
'browser/plugin_service.h',
'browser/printing/page_number.cc',
'browser/printing/page_number.h',
'browser/printing/page_overlays.cc',
'browser/printing/page_overlays.h',
'browser/printing/page_range.cc',
'browser/printing/page_range.h',
'browser/printing/page_setup.cc',
'browser/printing/page_setup.h',
'browser/printing/print_job.cc',
'browser/printing/print_job.h',
'browser/printing/print_job_manager.cc',
'browser/printing/print_job_manager.h',
'browser/printing/print_job_worker.cc',
'browser/printing/print_job_worker.h',
'browser/printing/print_job_worker_owner.h',
'browser/printing/print_settings.cc',
'browser/printing/print_settings.h',
'browser/printing/print_view_manager.cc',
'browser/printing/print_view_manager.h',
'browser/printing/printed_document.cc',
'browser/printing/printed_document.h',
'browser/printing/printed_page.cc',
'browser/printing/printed_page.h',
'browser/printing/printed_pages_source.h',
'browser/printing/printer_query.cc',
'browser/printing/printer_query.h',
'browser/printing/win_printing_context.cc',
'browser/printing/win_printing_context.h',
'browser/process_singleton.h',
'browser/process_singleton_linux.cc',
'browser/process_singleton_mac.cc',
'browser/process_singleton_win.cc',
'browser/profile.cc',
'browser/profile.h',
'browser/profile_manager.cc',
'browser/profile_manager.h',
'browser/renderer_host/async_resource_handler.cc',
'browser/renderer_host/async_resource_handler.h',
'browser/renderer_host/audio_renderer_host.cc',
'browser/renderer_host/audio_renderer_host.h',
'browser/renderer_host/backing_store.cc',
'browser/renderer_host/backing_store.h',
'browser/renderer_host/backing_store_mac.cc',
'browser/renderer_host/backing_store_win.cc',
'browser/renderer_host/backing_store_x.cc',
'browser/renderer_host/browser_render_process_host.cc',
'browser/renderer_host/browser_render_process_host.h',
'browser/renderer_host/buffered_resource_handler.cc',
'browser/renderer_host/buffered_resource_handler.h',
'browser/renderer_host/cross_site_resource_handler.cc',
'browser/renderer_host/cross_site_resource_handler.h',
'browser/renderer_host/download_resource_handler.cc',
'browser/renderer_host/download_resource_handler.h',
'browser/renderer_host/download_throttling_resource_handler.cc',
'browser/renderer_host/download_throttling_resource_handler.h',
'browser/renderer_host/media_resource_handler.cc',
'browser/renderer_host/media_resource_handler.h',
'browser/renderer_host/render_process_host.cc',
'browser/renderer_host/render_process_host.h',
'browser/renderer_host/render_view_host.cc',
'browser/renderer_host/render_view_host.h',
'browser/renderer_host/render_view_host_delegate.h',
'browser/renderer_host/render_view_host_factory.cc',
'browser/renderer_host/render_view_host_factory.h',
'browser/renderer_host/render_widget_helper.cc',
'browser/renderer_host/render_widget_helper.h',
'browser/renderer_host/render_widget_host.cc',
'browser/renderer_host/render_widget_host.h',
'browser/renderer_host/render_widget_host_view.h',
'browser/renderer_host/render_widget_host_view_gtk.cc',
'browser/renderer_host/render_widget_host_view_gtk.h',
'browser/renderer_host/render_widget_host_view_mac.h',
'browser/renderer_host/render_widget_host_view_mac.mm',
'browser/renderer_host/render_widget_host_view_win.cc',
'browser/renderer_host/render_widget_host_view_win.h',
'browser/renderer_host/renderer_security_policy.cc',
'browser/renderer_host/renderer_security_policy.h',
'browser/renderer_host/resource_dispatcher_host.cc',
'browser/renderer_host/resource_dispatcher_host.h',
'browser/renderer_host/resource_handler.h',
'browser/renderer_host/resource_message_filter.cc',
'browser/renderer_host/resource_message_filter.h',
'browser/renderer_host/resource_message_filter_gtk.cc',
'browser/renderer_host/resource_message_filter_mac.mm',
'browser/renderer_host/resource_message_filter_win.cc',
'browser/renderer_host/resource_request_details.h',
'browser/renderer_host/safe_browsing_resource_handler.cc',
'browser/renderer_host/safe_browsing_resource_handler.h',
'browser/renderer_host/save_file_resource_handler.cc',
'browser/renderer_host/save_file_resource_handler.h',
'browser/renderer_host/sync_resource_handler.cc',
'browser/renderer_host/sync_resource_handler.h',
'browser/renderer_host/web_cache_manager.cc',
'browser/renderer_host/web_cache_manager.h',
'browser/rlz/rlz.cc',
'browser/rlz/rlz.h',
'browser/safe_browsing/bloom_filter.cc',
'browser/safe_browsing/bloom_filter.h',
'browser/safe_browsing/chunk_range.cc',
'browser/safe_browsing/chunk_range.h',
'browser/safe_browsing/protocol_manager.cc',
'browser/safe_browsing/protocol_manager.h',
'browser/safe_browsing/protocol_parser.cc',
'browser/safe_browsing/protocol_parser.h',
'browser/safe_browsing/safe_browsing_blocking_page.cc',
'browser/safe_browsing/safe_browsing_blocking_page.h',
'browser/safe_browsing/safe_browsing_database.cc',
'browser/safe_browsing/safe_browsing_database.h',
'browser/safe_browsing/safe_browsing_database_bloom.cc',
'browser/safe_browsing/safe_browsing_database_bloom.h',
'browser/safe_browsing/safe_browsing_service.cc',
'browser/safe_browsing/safe_browsing_service.h',
'browser/safe_browsing/safe_browsing_util.cc',
'browser/safe_browsing/safe_browsing_util.h',
'browser/sandbox_policy.cc',
'browser/sandbox_policy.h',
'browser/search_engines/template_url.cc',
'browser/search_engines/template_url.h',
'browser/search_engines/template_url_fetcher.cc',
'browser/search_engines/template_url_fetcher.h',
'browser/search_engines/template_url_model.cc',
'browser/search_engines/template_url_model.h',
'browser/search_engines/template_url_parser.cc',
'browser/search_engines/template_url_parser.h',
'browser/search_engines/template_url_prepopulate_data.cc',
'browser/search_engines/template_url_prepopulate_data.h',
'browser/session_startup_pref.cc',
'browser/session_startup_pref.h',
'browser/sessions/base_session_service.cc',
'browser/sessions/base_session_service.h',
'browser/sessions/session_backend.cc',
'browser/sessions/session_backend.h',
'browser/sessions/session_command.cc',
'browser/sessions/session_command.h',
'browser/sessions/session_id.cc',
'browser/sessions/session_id.h',
'browser/sessions/session_restore.cc',
'browser/sessions/session_restore.h',
'browser/sessions/session_service.cc',
'browser/sessions/session_service.h',
'browser/sessions/session_types.cc',
'browser/sessions/session_types.h',
'browser/sessions/tab_restore_service.cc',
'browser/sessions/tab_restore_service.h',
'browser/shell_dialogs.h',
'browser/shell_integration.cc',
'browser/shell_integration.h',
'browser/shell_integration_mac.mm',
'browser/spellcheck_worditerator.cc',
'browser/spellcheck_worditerator.h',
'browser/spellchecker.cc',
'browser/spellchecker.h',
'browser/ssl/ssl_blocking_page.cc',
'browser/ssl/ssl_blocking_page.h',
'browser/ssl/ssl_error_info.cc',
'browser/ssl/ssl_error_info.h',
'browser/ssl/ssl_host_state.cc',
'browser/ssl/ssl_host_state.h',
'browser/ssl/ssl_manager.cc',
'browser/ssl/ssl_manager.h',
'browser/ssl/ssl_policy.cc',
'browser/ssl/ssl_policy.h',
'browser/status_bubble.h',
'browser/tab_contents/constrained_window.h',
'browser/tab_contents/infobar_delegate.cc',
'browser/tab_contents/infobar_delegate.h',
'browser/tab_contents/interstitial_page.cc',
'browser/tab_contents/interstitial_page.h',
'browser/tab_contents/navigation_controller.cc',
'browser/tab_contents/navigation_controller.h',
'browser/tab_contents/navigation_entry.cc',
'browser/tab_contents/navigation_entry.h',
'browser/tab_contents/page_navigator.h',
'browser/tab_contents/provisional_load_details.cc',
'browser/tab_contents/provisional_load_details.h',
'browser/tab_contents/render_view_context_menu.cc',
'browser/tab_contents/render_view_context_menu.h',
'browser/tab_contents/render_view_context_menu_gtk.cc',
'browser/tab_contents/render_view_context_menu_gtk.h',
'browser/tab_contents/render_view_context_menu_mac.mm',
'browser/tab_contents/render_view_context_menu_mac.h',
'browser/tab_contents/render_view_context_menu_win.cc',
'browser/tab_contents/render_view_context_menu_win.h',
'browser/tab_contents/render_view_host_delegate_helper.cc',
'browser/tab_contents/render_view_host_delegate_helper.h',
'browser/tab_contents/render_view_host_manager.cc',
'browser/tab_contents/render_view_host_manager.h',
'browser/tab_contents/repost_form_warning.h',
'browser/tab_contents/security_style.h',
'browser/tab_contents/site_instance.cc',
'browser/tab_contents/site_instance.h',
'browser/tab_contents/tab_contents.cc',
'browser/tab_contents/tab_contents.h',
'browser/tab_contents/tab_contents_delegate.h',
'browser/tab_contents/tab_contents_view.cc',
'browser/tab_contents/tab_contents_view.h',
'browser/tab_contents/tab_contents_view_gtk.cc',
'browser/tab_contents/tab_contents_view_gtk.h',
'browser/tab_contents/tab_contents_view_mac.h',
'browser/tab_contents/tab_contents_view_mac.mm',
'browser/tab_contents/tab_contents_view_win.cc',
'browser/tab_contents/tab_contents_view_win.h',
'browser/tab_contents/tab_util.cc',
'browser/tab_contents/tab_util.h',
'browser/tab_contents/web_drag_source.cc',
'browser/tab_contents/web_drag_source.h',
'browser/tab_contents/web_drop_target.cc',
'browser/tab_contents/web_drop_target.h',
'browser/tabs/tab_strip_model.cc',
'browser/tabs/tab_strip_model.h',
'browser/tabs/tab_strip_model_order_controller.cc',
'browser/tabs/tab_strip_model_order_controller.h',
'browser/task_manager.cc',
'browser/task_manager.h',
'browser/task_manager_resource_providers.cc',
'browser/task_manager_resource_providers.h',
'browser/theme_resources_util.cc',
'browser/theme_resources_util.h',
'browser/toolbar_model.cc',
'browser/toolbar_model.h',
'browser/user_data_manager.cc',
'browser/user_data_manager.h',
'browser/view_ids.h',
'browser/views/about_chrome_view.cc',
'browser/views/about_chrome_view.h',
'browser/views/about_ipc_dialog.cc',
'browser/views/about_ipc_dialog.h',
'browser/views/about_network_dialog.cc',
'browser/views/about_network_dialog.h',
'browser/views/autocomplete/autocomplete_popup_contents_view.cc',
'browser/views/autocomplete/autocomplete_popup_contents_view.h',
'browser/views/autocomplete/autocomplete_popup_win.cc',
'browser/views/autocomplete/autocomplete_popup_win.h',
'browser/views/blocked_popup_container.cc',
'browser/views/blocked_popup_container.h',
'browser/views/bookmark_bar_view.cc',
'browser/views/bookmark_bar_view.h',
'browser/views/bookmark_bubble_view.cc',
'browser/views/bookmark_bubble_view.h',
'browser/views/bookmark_editor_view.cc',
'browser/views/bookmark_editor_view.h',
'browser/views/bookmark_folder_tree_view.cc',
'browser/views/bookmark_folder_tree_view.h',
'browser/views/bookmark_manager_view.cc',
'browser/views/bookmark_manager_view.h',
'browser/views/bookmark_menu_button.cc',
'browser/views/bookmark_menu_button.h',
'browser/views/bookmark_table_view.cc',
'browser/views/bookmark_table_view.h',
'browser/views/bug_report_view.cc',
'browser/views/bug_report_view.h',
'browser/views/clear_browsing_data.cc',
'browser/views/clear_browsing_data.h',
'browser/views/constrained_window_impl.cc',
'browser/views/constrained_window_impl.h',
'browser/views/dom_view.cc',
'browser/views/dom_view.h',
'browser/views/download_item_view.cc',
'browser/views/download_item_view.h',
'browser/views/download_shelf_view.cc',
'browser/views/download_shelf_view.h',
'browser/views/download_started_animation.cc',
'browser/views/download_started_animation.h',
'browser/views/edit_keyword_controller.cc',
'browser/views/edit_keyword_controller.h',
'browser/views/event_utils.cc',
'browser/views/event_utils.h',
'browser/views/external_protocol_dialog.cc',
'browser/views/external_protocol_dialog.h',
'browser/views/find_bar_view.cc',
'browser/views/find_bar_view.h',
'browser/views/find_bar_win.cc',
'browser/views/find_bar_win.h',
'browser/views/first_run_bubble.cc',
'browser/views/first_run_bubble.h',
'browser/views/first_run_customize_view.cc',
'browser/views/first_run_customize_view.h',
'browser/views/first_run_view.cc',
'browser/views/first_run_view.h',
'browser/views/first_run_view_base.cc',
'browser/views/first_run_view_base.h',
'browser/views/frame/browser_frame.cc',
'browser/views/frame/browser_frame.h',
'browser/views/frame/browser_root_view.cc',
'browser/views/frame/browser_root_view.h',
'browser/views/frame/browser_view.cc',
'browser/views/frame/browser_view.h',
'browser/views/frame/glass_browser_frame_view.cc',
'browser/views/frame/glass_browser_frame_view.h',
'browser/views/frame/opaque_browser_frame_view.cc',
'browser/views/frame/opaque_browser_frame_view.h',
'browser/views/fullscreen_exit_bubble.cc',
'browser/views/fullscreen_exit_bubble.h',
'browser/views/go_button.cc',
'browser/views/go_button.h',
'browser/views/html_dialog_view.cc',
'browser/views/html_dialog_view.h',
'browser/views/hung_renderer_view.cc',
'browser/views/hwnd_html_view.cc',
'browser/views/hwnd_html_view.h',
'browser/views/importer_lock_view.cc',
'browser/views/importer_lock_view.h',
'browser/views/importer_view.cc',
'browser/views/importer_view.h',
'browser/views/importing_progress_view.cc',
'browser/views/importing_progress_view.h',
'browser/views/info_bubble.cc',
'browser/views/info_bubble.h',
'browser/views/infobars/infobar_container.cc',
'browser/views/infobars/infobar_container.h',
'browser/views/infobars/infobars.cc',
'browser/views/infobars/infobars.h',
'browser/views/jsmessage_box_dialog.cc',
'browser/views/jsmessage_box_dialog.h',
'browser/views/keyword_editor_view.cc',
'browser/views/keyword_editor_view.h',
'browser/views/location_bar_view.cc',
'browser/views/location_bar_view.h',
'browser/views/login_view.cc',
'browser/views/login_view.h',
'browser/views/new_profile_dialog.cc',
'browser/views/new_profile_dialog.h',
'browser/views/options/advanced_contents_view.cc',
'browser/views/options/advanced_contents_view.h',
'browser/views/options/advanced_page_view.cc',
'browser/views/options/advanced_page_view.h',
'browser/views/options/content_page_view.cc',
'browser/views/options/content_page_view.h',
'browser/views/options/cookies_view.cc',
'browser/views/options/cookies_view.h',
'browser/views/options/exceptions_page_view.cc',
'browser/views/options/exceptions_page_view.h',
'browser/views/options/fonts_languages_window_view.cc',
'browser/views/options/fonts_languages_window_view.h',
'browser/views/options/fonts_page_view.cc',
'browser/views/options/fonts_page_view.h',
'browser/views/options/general_page_view.cc',
'browser/views/options/general_page_view.h',
'browser/views/options/language_combobox_model.cc',
'browser/views/options/language_combobox_model.h',
'browser/views/options/languages_page_view.cc',
'browser/views/options/languages_page_view.h',
'browser/views/options/options_group_view.cc',
'browser/views/options/options_group_view.h',
'browser/views/options/options_page_view.cc',
'browser/views/options/options_page_view.h',
'browser/views/options/options_window_view.cc',
'browser/views/options/passwords_exceptions_window_view.cc',
'browser/views/options/passwords_exceptions_window_view.h',
'browser/views/options/passwords_page_view.cc',
'browser/views/options/passwords_page_view.h',
'browser/views/page_info_window.cc',
'browser/views/page_info_window.h',
'browser/views/repost_form_warning_view.cc',
'browser/views/repost_form_warning_view.h',
'browser/views/restart_message_box.cc',
'browser/views/restart_message_box.h',
'browser/views/sad_tab_view.cc',
'browser/views/sad_tab_view.h',
'browser/views/select_profile_dialog.cc',
'browser/views/select_profile_dialog.h',
'browser/views/shelf_item_dialog.cc',
'browser/views/shelf_item_dialog.h',
'browser/views/shell_dialogs_win.cc',
'browser/views/star_toggle.cc',
'browser/views/star_toggle.h',
'browser/views/status_bubble_views.cc',
'browser/views/status_bubble_views.h',
'browser/views/tab_contents_container_view.cc',
'browser/views/tab_contents_container_view.h',
'browser/views/tab_icon_view.cc',
'browser/views/tab_icon_view.h',
'browser/views/tabs/dragged_tab_controller.cc',
'browser/views/tabs/dragged_tab_controller.h',
'browser/views/tabs/dragged_tab_view.cc',
'browser/views/tabs/dragged_tab_view.h',
'browser/views/tabs/hwnd_photobooth.cc',
'browser/views/tabs/hwnd_photobooth.h',
'browser/views/tabs/tab.cc',
'browser/views/tabs/tab.h',
'browser/views/tabs/tab_renderer.cc',
'browser/views/tabs/tab_renderer.h',
'browser/views/tabs/tab_strip.cc',
'browser/views/tabs/tab_strip.h',
'browser/views/theme_helpers.cc',
'browser/views/theme_helpers.h',
'browser/views/toolbar_star_toggle.cc',
'browser/views/toolbar_star_toggle.h',
'browser/views/toolbar_view.cc',
'browser/views/toolbar_view.h',
'browser/views/uninstall_dialog.cc',
'browser/views/uninstall_dialog.h',
'browser/views/user_data_dir_dialog.cc',
'browser/views/user_data_dir_dialog.h',
'browser/visitedlink_master.cc',
'browser/visitedlink_master.h',
'browser/webdata/web_data_service.cc',
'browser/webdata/web_data_service.h',
'browser/webdata/web_data_service_win.cc',
'browser/webdata/web_database.cc',
'browser/webdata/web_database.h',
'browser/webdata/web_database_win.cc',
'browser/window_sizer.cc',
'browser/window_sizer.h',
'browser/worker_host/worker_process_host.cc',
'browser/worker_host/worker_process_host.h',
'browser/worker_host/worker_service.cc',
'browser/worker_host/worker_service.h',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
# This file is generated by GRIT.
'<(SHARED_INTERMEDIATE_DIR)/chrome/grit/theme_resources_map.cc',
],
'conditions': [
['javascript_engine=="v8"', {
'defines': [
'CHROME_V8',
],
}],
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'sources!': [
'browser/debugger/debugger_shell_stubs.cc',
# Windows-specific files.
'browser/download/download_exe.cc',
],
}],
['OS=="mac"', {
'sources/': [
# Exclude most of download.
['exclude', '^browser/download/'],
['include', '^browser/download/download_(file|manager|shelf)\\.cc$'],
['include', '^browser/download/download_request_manager\\.cc$'],
['include', '^browser/download/download_item_model\\.cc$'],
['include', '^browser/download/save_(file(_manager)?|item|package)\\.cc$'],
],
'sources!': [
'browser/automation/automation_provider_list_generic.cc',
'browser/bookmarks/bookmark_context_menu.cc',
'browser/bookmarks/bookmark_drop_info.cc',
'browser/debugger/debugger_shell_stubs.cc',
'browser/icon_manager.cc',
],
'sources': [
# Build the necessary GTM sources
'../third_party/GTM/AppKit/GTMNSBezierPath+RoundRect.m',
'../third_party/GTM/AppKit/GTMNSColor+Luminance.m',
'../third_party/GTM/AppKit/GTMTheme.m',
# Build necessary Mozilla sources
'../third_party/mozilla/include/NSWorkspace+Utils.h',
'../third_party/mozilla/include/NSWorkspace+Utils.m',
],
'include_dirs': [
'../third_party/GTM',
'../third_party/GTM/AppKit',
],
}],
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
'dependencies': [
'../google_update/google_update.gyp:google_update',
'installer/util/util.gyp:installer_util',
'../printing/printing.gyp:printing',
],
'sources': [
# Using built-in rule in vstudio for midl.
'browser/history/history_indexer.idl',
],
'sources!': [
'browser/debugger/debugger_shell_stubs.cc',
'browser/history/history_publisher_none.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
}, { # 'OS!="win"
'sources/': [
# Exclude all of hang_monitor.
['exclude', '^browser/hang_monitor/'],
# Exclude parts of password_manager.
['exclude', '^browser/password_manager/ie7_password\\.cc$'],
# Exclude most of printing.
['exclude', '^browser/printing/'],
['include', '^browser/printing/page_(number|range|setup)\\.cc$'],
# Exclude all of rlz.
['exclude', '^browser/rlz/'],
# Exclude all of views.
['exclude', '^browser/views/'],
],
'sources!': [
'browser/autocomplete/autocomplete_accessibility.cc',
'browser/automation/ui_controls.cc',
'browser/bookmarks/bookmark_menu_controller.cc',
'browser/bookmarks/bookmark_menu_controller.h',
'browser/browser_accessibility.cc',
'browser/browser_accessibility_manager.cc',
'browser/debugger/debugger_view.cc',
'browser/debugger/debugger_window.cc',
'browser/debugger/devtools_view.cc',
'browser/debugger/devtools_view.h',
'browser/debugger/devtools_window_gtk.cc',
'browser/debugger/devtools_window_win.cc',
'browser/dock_info.cc',
'browser/dom_ui/html_dialog_contents.cc',
'browser/encoding_menu_controller_delegate.cc',
'browser/external_tab_container.cc',
'browser/first_run.cc',
'browser/google_update.cc',
'browser/history/history_indexer.idl',
'browser/history_tab_ui.cc',
'browser/history_view.cc',
'browser/ime_input.cc',
'browser/importer/ie_importer.cc',
'browser/jankometer.cc',
'browser/login_prompt.cc',
'browser/memory_details.cc',
'browser/modal_html_dialog_delegate.cc',
'browser/sandbox_policy.cc',
'browser/shell_integration.cc',
'browser/tab_contents/web_drag_source.cc',
'browser/tab_contents/web_drop_target.cc',
'browser/task_manager.cc',
'browser/window_sizer.cc',
],
}],
],
},
{
'target_name': 'plugin',
'type': '<(library)',
'dependencies': [
'common',
'chrome_resources',
'chrome_strings',
'../media/media.gyp:media',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
'third_party/hunspell/hunspell.gyp:hunspell',
'../webkit/webkit.gyp:glue',
],
'include_dirs': [
'<(INTERMEDIATE_DIR)',
],
'sources': [
# All .cc, .h, .m, and .mm files under plugins except for tests and
# mocks.
'plugin/chrome_plugin_host.cc',
'plugin/chrome_plugin_host.h',
'plugin/npobject_proxy.cc',
'plugin/npobject_proxy.h',
'plugin/npobject_stub.cc',
'plugin/npobject_stub.h',
'plugin/npobject_util.cc',
'plugin/npobject_util.h',
'plugin/plugin_channel.cc',
'plugin/plugin_channel.h',
'plugin/plugin_channel_base.cc',
'plugin/plugin_channel_base.h',
'plugin/plugin_main.cc',
'plugin/plugin_thread.cc',
'plugin/plugin_thread.h',
'plugin/webplugin_delegate_stub.cc',
'plugin/webplugin_delegate_stub.h',
'plugin/webplugin_proxy.cc',
'plugin/webplugin_proxy.h',
],
# These are layered in conditionals in the event other platforms
# end up using this module as well.
'conditions': [
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
},],
],
},
{
'target_name': 'renderer',
'type': '<(library)',
'dependencies': [
'common',
'plugin',
'chrome_resources',
'chrome_strings',
'../printing/printing.gyp:printing',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/npapi/npapi.gyp:npapi',
'../webkit/webkit.gyp:glue',
'../webkit/webkit.gyp:webkit',
],
'include_dirs': [
'..',
],
'sources': [
# TODO(jrg): to link ipc_tests, these files need to be in renderer.a.
# But app/ is the wrong directory for them.
# Better is to remove the dep of *_tests on renderer, but in the
# short term I'd like the build to work.
'renderer/automation/dom_automation_controller.cc',
'renderer/automation/dom_automation_controller.h',
'renderer/extensions/bindings_utils.cc',
'renderer/extensions/bindings_utils.h',
'renderer/extensions/event_bindings.cc',
'renderer/extensions/event_bindings.h',
'renderer/extensions/extension_process_bindings.cc',
'renderer/extensions/extension_process_bindings.h',
'renderer/extensions/renderer_extension_bindings.cc',
'renderer/extensions/renderer_extension_bindings.h',
'renderer/loadtimes_extension_bindings.h',
'renderer/loadtimes_extension_bindings.cc',
'renderer/media/audio_renderer_impl.cc',
'renderer/media/audio_renderer_impl.h',
'renderer/media/buffered_data_source.cc',
'renderer/media/buffered_data_source.h',
'renderer/media/simple_data_source.cc',
'renderer/media/simple_data_source.h',
'renderer/media/video_renderer_impl.cc',
'renderer/media/video_renderer_impl.h',
'renderer/net/render_dns_master.cc',
'renderer/net/render_dns_master.h',
'renderer/net/render_dns_queue.cc',
'renderer/net/render_dns_queue.h',
'renderer/about_handler.cc',
'renderer/about_handler.h',
'renderer/audio_message_filter.cc',
'renderer/audio_message_filter.h',
'renderer/debug_message_handler.cc',
'renderer/debug_message_handler.h',
'renderer/devtools_agent.cc',
'renderer/devtools_agent.h',
'renderer/devtools_agent_filter.cc',
'renderer/devtools_agent_filter.h',
'renderer/devtools_client.cc',
'renderer/devtools_client.h',
'renderer/dom_ui_bindings.cc',
'renderer/dom_ui_bindings.h',
'renderer/external_host_bindings.cc',
'renderer/external_host_bindings.h',
'renderer/external_extension.cc',
'renderer/external_extension.h',
'renderer/js_only_v8_extensions.cc',
'renderer/js_only_v8_extensions.h',
'renderer/localized_error.cc',
'renderer/localized_error.h',
'renderer/plugin_channel_host.cc',
'renderer/plugin_channel_host.h',
'renderer/render_process.cc',
'renderer/render_process.h',
'renderer/render_thread.cc',
'renderer/render_thread.h',
'renderer/render_view.cc',
'renderer/render_view.h',
'renderer/render_widget.cc',
'renderer/render_widget.h',
'renderer/renderer_glue.cc',
'renderer/renderer_histogram_snapshots.cc',
'renderer/renderer_histogram_snapshots.h',
'renderer/renderer_logging.h',
'renderer/renderer_logging_linux.cc',
'renderer/renderer_logging_mac.mm',
'renderer/renderer_logging_win.cc',
'renderer/renderer_main.cc',
'renderer/renderer_main_platform_delegate.h',
'renderer/renderer_main_platform_delegate_linux.cc',
'renderer/renderer_main_platform_delegate_mac.mm',
'renderer/renderer_main_platform_delegate_win.cc',
'renderer/renderer_webkitclient_impl.cc',
'renderer/renderer_webkitclient_impl.h',
'renderer/user_script_slave.cc',
'renderer/user_script_slave.h',
'renderer/visitedlink_slave.cc',
'renderer/visitedlink_slave.h',
'renderer/webmediaplayer_impl.cc',
'renderer/webmediaplayer_impl.h',
'renderer/webplugin_delegate_proxy.cc',
'renderer/webplugin_delegate_proxy.h',
'renderer/webworker_proxy.cc',
'renderer/webworker_proxy.h',
],
'link_settings': {
'mac_bundle_resources': [
'renderer/renderer.sb',
],
},
'conditions': [
# Linux-specific rules.
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
# Windows-specific rules.
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
},],
# As of yet unported-from-Windows code.
['OS!="win"', {
'sources!': [
'renderer/webworker_proxy.cc',
'renderer/webworker_proxy.h',
],
},],
],
},
{
'target_name': 'app',
'type': 'executable',
'mac_bundle': 1,
'dependencies': [
'common',
'browser',
'renderer',
'../printing/printing.gyp:printing',
'../webkit/webkit.gyp:inspector_resources',
],
'sources': [
# All .cc, .h, .m, and .mm files under app except for tests.
'app/breakpad_win.cc',
'app/breakpad_win.h',
'app/breakpad_mac.mm',
'app/breakpad_mac.h',
'app/chrome_dll_main.cc',
'app/chrome_dll_resource.h',
'app/chrome_exe_main.cc',
'app/chrome_exe_main.mm',
'app/chrome_exe_main_gtk.cc',
'app/chrome_exe_resource.h',
'app/client_util.cc',
'app/client_util.h',
'app/google_update_client.cc',
'app/google_update_client.h',
'app/keystone_glue.h',
'app/keystone_glue.m',
'app/scoped_ole_initializer.h',
],
'mac_bundle_resources': [
'app/nibs/en.lproj/BrowserWindow.xib',
'app/nibs/en.lproj/FindBar.xib',
'app/nibs/en.lproj/MainMenu.xib',
'app/nibs/en.lproj/Preferences.xib',
'app/nibs/en.lproj/SaveAccessoryView.xib',
'app/nibs/en.lproj/TabContents.xib',
'app/nibs/en.lproj/TabView.xib',
'app/nibs/en.lproj/Toolbar.xib',
'app/theme/back.pdf',
'app/theme/close_bar.pdf',
'app/theme/close_bar_h.pdf',
'app/theme/close_bar_p.pdf',
'app/theme/forward.pdf',
'app/theme/go.pdf',
'app/theme/grow_box.png',
'app/theme/nav.pdf',
'app/theme/newtab.pdf',
'app/theme/o2_globe.png',
'app/theme/o2_history.png',
'app/theme/o2_more.png',
'app/theme/o2_search.png',
'app/theme/o2_star.png',
'app/theme/reload.pdf',
'app/theme/sadtab.png',
'app/theme/star.pdf',
'app/theme/starred.pdf',
'app/theme/stop.pdf',
'app/app-Info.plist',
],
# TODO(mark): Come up with a fancier way to do this. It should only
# be necessary to list app-Info.plist once, not the three times it is
# listed here.
'mac_bundle_resources!': [
'app/app-Info.plist',
],
'xcode_settings': {
'INFOPLIST_FILE': 'app/app-Info.plist',
},
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
# Needed for chrome_dll_main.cc #include of gtk/gtk.h
'../build/linux/system.gyp:gtk',
# Needed for chrome_dll_main.cc use of g_thread_init
'../build/linux/system.gyp:gthread',
],
'copies': [
{
'destination': '<(PRODUCT_DIR)',
'files': ['<(INTERMEDIATE_DIR)/repack/chrome.pak'],
},
{
'destination': '<(PRODUCT_DIR)/locales',
'files': ['<(INTERMEDIATE_DIR)/repack/da.pak',
'<(INTERMEDIATE_DIR)/repack/en-US.pak',
'<(INTERMEDIATE_DIR)/repack/he.pak',
'<(INTERMEDIATE_DIR)/repack/zh-TW.pak',
],
},
{
'destination': '<(PRODUCT_DIR)/themes',
'files': ['<(INTERMEDIATE_DIR)/repack/default.pak'],
},
],
}],
['OS=="mac"', {
# 'branding' is a variable defined in common.gypi
# (e.g. "Chromium", "Chrome")
# NOTE: chrome/app/theme/chromium/BRANDING and
# chrome/app/theme/google_chrome/BRANDING have the short names, etc.;
# should we try to extract from there instead?
'product_name': '<(branding)',
'conditions': [
['branding=="Chrome"', {
'mac_bundle_resources': ['app/theme/google_chrome/app.icns'],
'variables': {
'bundle_id': 'com.google.Chrome',
},
# Only include breakpad in official builds.
'dependencies': [
'../breakpad/breakpad.gyp:breakpad',
],
'copies': [
{
'destination': '<(PRODUCT_DIR)/<(branding).app/Contents/Resources/',
'files': ['<(PRODUCT_DIR)/crash_inspector', '<(PRODUCT_DIR)/crash_report_sender.app'],
},
]
}, { # else: branding!="Chrome"
'mac_bundle_resources': ['app/theme/chromium/app.icns'],
'variables': {
'bundle_id': 'org.chromium.Chromium',
},
}],
],
'xcode_settings': {
# chrome/app/app-Info.plist has a CFBundleIdentifier of
# CHROMIUM_BUNDLE_ID to be replaced by a branded bundle ID in Xcode
# with this settings.
'CHROMIUM_BUNDLE_ID': '<(bundle_id)',
},
}, { # else: OS != "mac"
'conditions': [
['branding=="Chrome"', {
'product_name': 'chrome'
}, { # else: Branding!="Chrome"
# TODO: change to:
# 'product_name': 'chromium'
# whenever we convert the rest of the infrastructure
# (buildbots etc.) to use "gyp -Dbranding=Chrome".
# NOTE: chrome/app/theme/chromium/BRANDING and
# chrome/app/theme/google_chrome/BRANDING have the short names,
# etc.; should we try to extract from there instead?
'product_name': 'chrome'
}],
],
}],
['OS=="mac"', {
# Mac addes an action to modify the Info.plist to meet our needs
# (see the script for why this is done).
'actions': [
{
'action_name': 'tweak_app_infoplist',
# We don't list any inputs or outputs because we always want
# the script to run. Why? Because it does thinks like record
# the svn revision into the info.plist, so there is no file to
# depend on that will change when ever that changes.
'inputs': [],
'outputs': [],
'action': ['<(DEPTH)/build/mac/tweak_app_infoplist',
'<(branding)'],
},
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
'dependencies': [
'views',
'../build/temp_gyp/breakpad.gyp:breakpad_handler',
'../build/temp_gyp/breakpad.gyp:breakpad_sender',
'../sandbox/sandbox.gyp:sandbox',
'worker',
],
},{ # 'OS!="win"
'variables': {
'repack_path': '../tools/data_pack/repack.py',
},
'actions': [
{
'action_name': 'repack_chrome',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/browser_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/debugger_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/common_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/renderer_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/net/net_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_resources.pak',
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'outputs': [
'<(INTERMEDIATE_DIR)/repack/chrome.pak',
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
'process_outputs_as_mac_bundle_resources': 1,
},
{
'action_name': 'repack_theme',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/theme_resources.pak',
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'outputs': [
'<(INTERMEDIATE_DIR)/repack/theme.pak',
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
'process_outputs_as_mac_bundle_resources': 1,
'conditions': [
['OS=="linux"', {
'outputs=': [
'<(INTERMEDIATE_DIR)/repack/default.pak',
]
}],
],
},
{
# TODO(mark): Make this work with more languages than the
# hardcoded da, en-US, he, zh-TW.
'action_name': 'repack_locale_da',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/generated_resources_da.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/locale_settings_da.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_da.pak',
],
'conditions': [
['branding=="Chrome"', {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/google_chrome_strings_da.pak',
]
}, { # else: branding!="Chrome"
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/chromium_strings_da.pak',
]
}],
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'conditions': [
['OS=="mac"', {
'outputs': [
'<(INTERMEDIATE_DIR)/repack/da.lproj/locale.pak',
],
}, { # else: OS!="mac"
'outputs': [
'<(INTERMEDIATE_DIR)/repack/da.pak',
],
}],
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
},
{
# TODO(mark): Make this work with more languages than the
# hardcoded da, en-US, he, zh-TW.
'action_name': 'repack_locale_en_us',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/generated_resources_en-US.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/locale_settings_en-US.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_en-US.pak',
],
'conditions': [
['branding=="Chrome"', {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/google_chrome_strings_en-US.pak',
]
}, { # else: branding!="Chrome"
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/chromium_strings_en-US.pak',
]
}],
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'conditions': [
['OS=="mac"', {
'outputs': [
'<(INTERMEDIATE_DIR)/repack/en.lproj/locale.pak',
],
}, { # else: OS!="mac"
'outputs': [
'<(INTERMEDIATE_DIR)/repack/en-US.pak',
],
}],
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
'process_outputs_as_mac_bundle_resources': 1,
},
{
# TODO(mark): Make this work with more languages than the
# hardcoded da, en-US, he, zh-TW.
'action_name': 'repack_locale_he',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/generated_resources_he.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/locale_settings_he.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_he.pak',
],
'conditions': [
['branding=="Chrome"', {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/google_chrome_strings_he.pak',
]
}, { # else: branding!="Chrome"
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/chromium_strings_he.pak',
]
}],
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'conditions': [
['OS=="mac"', {
'outputs': [
'<(INTERMEDIATE_DIR)/repack/he.lproj/locale.pak',
],
}, { # else: OS!="mac"
'outputs': [
'<(INTERMEDIATE_DIR)/repack/he.pak',
],
}],
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
},
{
# TODO(mark): Make this work with more languages than the
# hardcoded da, en-US, he, zh-TW.
'action_name': 'repack_locale_zh_tw',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/generated_resources_zh-TW.pak',
'<(SHARED_INTERMEDIATE_DIR)/chrome/locale_settings_zh-TW.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_zh-TW.pak',
],
'conditions': [
['branding=="Chrome"', {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/google_chrome_strings_zh-TW.pak',
]
}, { # else: branding!="Chrome"
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome/chromium_strings_zh-TW.pak',
]
}],
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'conditions': [
['OS=="mac"', {
'outputs': [
'<(INTERMEDIATE_DIR)/repack/zh.lproj/locale.pak',
],
}, { # else: OS!="mac"
'outputs': [
'<(INTERMEDIATE_DIR)/repack/zh-TW.pak',
],
}],
],
'action': ['python', '<(repack_path)', '<@(_outputs)', '<@(pak_inputs)'],
},
],
'sources!': [
'app/chrome_exe_main.cc',
'app/client_util.cc',
'app/google_update_client.cc',
]
}],
],
},
{
'target_name': 'image_diff',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'../base/base.gyp:base_gfx',
],
'sources': [
'tools/test/image_diff/image_diff.cc',
],
},
{
# This target contains mocks and test utilities that don't belong in
# production libraries but are used by more than one test executable.
'target_name': 'test_support_common',
'type': '<(library)',
'dependencies': [
'browser',
'common',
'renderer',
'chrome_resources',
'chrome_strings',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
# TODO: these should live here but are currently used by
# production code code in libbrowser (above).
#'browser/automation/url_request_mock_http_job.cc',
#'browser/automation/url_request_mock_http_job.h',
'browser/automation/url_request_mock_net_error_job.cc',
'browser/automation/url_request_mock_net_error_job.h',
'browser/renderer_host/mock_render_process_host.cc',
'browser/renderer_host/mock_render_process_host.h',
'browser/renderer_host/test_render_view_host.cc',
'browser/renderer_host/test_render_view_host.h',
'browser/tab_contents/test_web_contents.cc',
'browser/tab_contents/test_web_contents.h',
'common/ipc_test_sink.cc',
'common/ipc_test_sink.h',
'renderer/mock_keyboard.h',
'renderer/mock_keyboard.cc',
'renderer/mock_render_process.h',
'renderer/mock_render_thread.cc',
'renderer/mock_render_thread.h',
'test/automation/autocomplete_edit_proxy.cc',
'test/automation/autocomplete_edit_proxy.h',
'test/automation/automation_constants.h',
'test/automation/automation_handle_tracker.cc',
'test/automation/automation_handle_tracker.h',
'test/automation/automation_proxy.cc',
'test/automation/automation_proxy.h',
'test/automation/browser_proxy.cc',
'test/automation/browser_proxy.h',
'test/automation/constrained_window_proxy.cc',
'test/automation/constrained_window_proxy.h',
'test/automation/tab_proxy.cc',
'test/automation/tab_proxy.h',
'test/automation/window_proxy.cc',
'test/automation/window_proxy.h',
'test/chrome_process_util.cc',
'test/chrome_process_util.h',
'test/chrome_process_util_linux.cc',
'test/chrome_process_util_mac.cc',
'test/chrome_process_util_win.cc',
'test/in_process_browser_test.cc',
'test/in_process_browser_test.h',
'test/testing_profile.cc',
'test/testing_profile.h',
'test/ui_test_utils.cc',
'test/ui_test_utils.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
}, { # OS != "win"
'sources!': [
'test/in_process_browser_test.cc',
'test/in_process_browser_test.h',
'test/ui_test_utils.cc',
'test/ui_test_utils.h',
],
}],
],
},
{
'target_name': 'test_support_ui',
'type': '<(library)',
'dependencies': [
'test_support_common',
'chrome_resources',
'chrome_strings',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/testing_browser_process.h',
'test/ui/npapi_test_helper.cc',
'test/ui/npapi_test_helper.h',
'test/ui/run_all_unittests.cc',
'test/ui/ui_test.cc',
'test/ui/ui_test.h',
'test/ui/ui_test_suite.cc',
'test/ui/ui_test_suite.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS!="win"', {
'sources!': [
'test/ui/npapi_test_helper.cc',
],
}],
],
},
{
'target_name': 'test_support_unit',
'type': '<(library)',
'dependencies': [
'test_support_common',
'chrome_resources',
'chrome_strings',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
],
'sources': [
'test/unit/run_all_unittests.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
# Needed for the following #include chain:
# test/unit/run_all_unittests.cc
# test/unit/chrome_test_suite.h
# gtk/gtk.h
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'ipc_tests',
'type': 'executable',
'dependencies': [
'common',
'test_support_unit',
'../base/base.gyp:base',
'../testing/gtest.gyp:gtest',
],
'sources': [
'common/ipc_fuzzing_tests.cc',
'common/ipc_send_fds_test.cc',
'common/ipc_tests.cc',
'common/ipc_tests.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'ui_tests',
'type': 'executable',
'dependencies': [
'app',
'browser',
'common',
'chrome_resources',
'chrome_strings',
'test_support_ui',
'../base/base.gyp:base',
'../net/net.gyp:net',
'../build/temp_gyp/googleurl.gyp:googleurl',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
],
'include_dirs': [
'..',
],
'sources': [
'app/chrome_main_uitest.cc',
'browser/browser_uitest.cc',
'browser/crash_recovery_uitest.cc',
'browser/download/download_uitest.cc',
'browser/download/save_page_uitest.cc',
'browser/errorpage_uitest.cc',
'browser/history/redirect_uitest.cc',
'browser/iframe_uitest.cc',
'browser/images_uitest.cc',
'browser/locale_tests_uitest.cc',
'browser/login_prompt_uitest.cc',
'browser/metrics/metrics_service_uitest.cc',
'browser/printing/printing_layout_uitest.cc',
'browser/printing/printing_test.h',
'browser/renderer_host/resource_dispatcher_host_uitest.cc',
'browser/sanity_uitest.cc',
'browser/session_history_uitest.cc',
'browser/sessions/session_restore_uitest.cc',
'browser/ssl/ssl_uitest.cc',
'browser/tab_contents/view_source_uitest.cc',
'browser/tab_restore_uitest.cc',
'browser/unload_uitest.cc',
'browser/views/find_bar_win_uitest.cc',
'common/logging_chrome_uitest.cc',
'common/net/cache_uitest.cc',
'common/pref_service_uitest.cc',
'test/accessibility/accessibility_tests.cc',
'test/accessibility/accessibility_util.cc',
'test/accessibility/accessibility_util.h',
'test/accessibility/browser_impl.cc',
'test/accessibility/browser_impl.h',
'test/accessibility/constants.h',
'test/accessibility/keyboard_util.cc',
'test/accessibility/keyboard_util.h',
'test/accessibility/registry_util.cc',
'test/accessibility/registry_util.h',
'test/accessibility/tab_impl.cc',
'test/accessibility/tab_impl.h',
'test/automation/automation_proxy_uitest.cc',
'test/chrome_process_util_uitest.cc',
'test/perf/mem_usage.cc',
'test/perf/mem_usage.h',
'test/reliability/page_load_test.cc',
'test/reliability/page_load_test.h',
'test/ui/dom_checker_uitest.cc',
'test/ui/history_uitest.cc',
'test/ui/inspector_controller_uitest.cc',
'test/ui/layout_plugin_uitest.cc',
'test/ui/npapi_uitest.cc',
'test/ui/omnibox_uitest.cc',
'test/ui/sandbox_uitests.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
'../build/linux/system.gyp:gtk',
],
'sources!': [
# TODO(port)
'app/chrome_main_uitest.cc',
'browser/crash_recovery_uitest.cc',
'browser/login_prompt_uitest.cc',
'browser/metrics/metrics_service_uitest.cc',
'test/reliability/page_load_test.cc',
'test/ui/layout_plugin_uitest.cc',
],
}],
['OS=="mac"', {
'sources!': [
# TODO(port)
'app/chrome_main_uitest.cc',
'browser/crash_recovery_uitest.cc',
# blocked on download shelf
'browser/download/save_page_uitest.cc',
'browser/login_prompt_uitest.cc',
'browser/metrics/metrics_service_uitest.cc',
'browser/sessions/session_restore_uitest.cc',
'browser/tab_restore_uitest.cc',
'browser/unload_uitest.cc',
'test/reliability/page_load_test.cc',
'test/ui/layout_plugin_uitest.cc',
'test/ui/omnibox_uitest.cc',
# these pass locally but fail on the bots
'common/net/cache_uitest.cc',
],
}],
['OS=="win"', {
'include_dirs': [
'third_party/wtl/include',
],
'dependencies': [
'../google_update/google_update.gyp:google_update',
'views',
],
'link_settings': {
'libraries': [
'-lOleAcc.lib',
],
},
}, { # else: OS != "win"
'sources!': [
# TODO(port)? (Most of these include windows.h or similar.)
'browser/printing/printing_layout_uitest.cc',
'browser/ssl/ssl_uitest.cc',
'browser/views/find_bar_win_uitest.cc',
'common/logging_chrome_uitest.cc',
'test/accessibility/accessibility_tests.cc',
'test/accessibility/accessibility_util.cc',
'test/accessibility/browser_impl.cc',
'test/accessibility/keyboard_util.cc',
'test/accessibility/registry_util.cc',
'test/accessibility/tab_impl.cc',
'test/perf/mem_usage.cc',
'test/ui/npapi_uitest.cc',
'test/ui/sandbox_uitests.cc',
],
}],
],
},
{
'target_name': 'unit_tests',
'type': 'executable',
'dependencies': [
'app',
'browser',
'common',
'renderer',
'chrome_resources',
'chrome_strings',
'test_support_unit',
'../printing/printing.gyp:printing',
'../webkit/webkit.gyp:webkit',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
],
'include_dirs': [
'..',
],
'sources': [
'app/breakpad_mac_stubs.mm',
# All unittests in browser, common, and renderer.
'browser/autocomplete/autocomplete_unittest.cc',
'browser/autocomplete/autocomplete_popup_view_mac_unittest.mm',
'browser/autocomplete/history_contents_provider_unittest.cc',
'browser/autocomplete/history_url_provider_unittest.cc',
'browser/autocomplete/keyword_provider_unittest.cc',
'browser/back_forward_menu_model_unittest.cc',
'browser/bookmarks/bookmark_codec_unittest.cc',
'browser/bookmarks/bookmark_context_menu_test.cc',
'browser/bookmarks/bookmark_drag_data_unittest.cc',
'browser/bookmarks/bookmark_folder_tree_model_unittest.cc',
'browser/bookmarks/bookmark_html_writer_unittest.cc',
'browser/bookmarks/bookmark_model_test_utils.cc',
'browser/bookmarks/bookmark_model_test_utils.h',
'browser/bookmarks/bookmark_model_unittest.cc',
'browser/bookmarks/bookmark_table_model_unittest.cc',
'browser/bookmarks/bookmark_utils_unittest.cc',
'browser/browser_commands_unittest.cc',
'browser/browser_unittest.cc',
'browser/debugger/devtools_remote_message_unittest.cc',
'browser/debugger/devtools_remote_listen_socket_unittest.cc',
'browser/debugger/devtools_remote_listen_socket_unittest.h',
'browser/chrome_thread_unittest.cc',
# It is safe to list */cocoa/* files in the "common" file list
# without an explicit exclusion since gyp is smart enough to
# exclude them from non-Mac builds.
'browser/cocoa/base_view_unittest.mm',
'browser/cocoa/bookmark_bar_controller_unittest.mm',
'browser/cocoa/bookmark_menu_bridge_unittest.mm',
'browser/cocoa/bookmark_menu_cocoa_controller_unittest.mm',
'browser/cocoa/browser_window_cocoa_unittest.mm',
'browser/cocoa/command_observer_bridge_unittest.mm',
'browser/cocoa/find_bar_bridge_unittest.mm',
'browser/cocoa/find_bar_cocoa_controller_unittest.mm',
'browser/cocoa/find_bar_view_unittest.mm',
'browser/cocoa/location_bar_view_mac_unittest.mm',
'browser/cocoa/grow_box_view_unittest.mm',
'browser/cocoa/preferences_window_controller_unittest.mm',
'browser/cocoa/sad_tab_view_unittest.mm',
'browser/cocoa/status_bubble_mac_unittest.mm',
'browser/cocoa/tab_cell_unittest.mm',
'browser/cocoa/tab_controller_unittest.mm',
'browser/cocoa/tab_strip_controller_unittest.mm',
'browser/cocoa/tab_strip_view_unittest.mm',
'browser/cocoa/tab_view_unittest.mm',
'browser/cocoa/toolbar_button_cell_unittest.mm',
'browser/cocoa/toolbar_controller_unittest.mm',
'browser/cocoa/toolbar_view_unittest.mm',
'browser/command_updater_unittest.cc',
'browser/debugger/devtools_manager_unittest.cc',
'browser/dom_ui/dom_ui_unittest.cc',
'browser/download/download_manager_unittest.cc',
'browser/download/download_request_manager_unittest.cc',
'browser/download/save_package_unittest.cc',
'browser/extensions/extension_messages_unittest.cc',
'browser/extensions/extension_process_manager_unittest.h',
'browser/extensions/extension_ui_unittest.cc',
'browser/extensions/extension_unittest.cc',
'browser/extensions/extensions_service_unittest.cc',
'browser/extensions/test_extension_loader.cc',
'browser/extensions/user_script_master_unittest.cc',
'browser/google_url_tracker_unittest.cc',
'browser/gtk/bookmark_editor_gtk_unittest.cc',
'browser/gtk/go_button_gtk_unittest.cc',
'browser/gtk/tabs/tab_renderer_gtk_unittest.cc',
'browser/history/expire_history_backend_unittest.cc',
'browser/history/history_backend_unittest.cc',
'browser/history/history_querying_unittest.cc',
'browser/history/history_types_unittest.cc',
'browser/history/history_unittest.cc',
'browser/history/query_parser_unittest.cc',
'browser/history/snippet_unittest.cc',
'browser/history/starred_url_database_unittest.cc',
'browser/history/text_database_manager_unittest.cc',
'browser/history/text_database_unittest.cc',
'browser/history/thumbnail_database_unittest.cc',
'browser/history/url_database_unittest.cc',
'browser/history/visit_database_unittest.cc',
'browser/history/visit_tracker_unittest.cc',
'browser/importer/firefox_importer_unittest.cc',
'browser/importer/importer_unittest.cc',
'browser/importer/toolbar_importer_unittest.cc',
'browser/login_prompt_unittest.cc',
'browser/metrics/metrics_log_unittest.cc',
'browser/metrics/metrics_response_unittest.cc',
'browser/navigation_controller_unittest.cc',
'browser/navigation_entry_unittest.cc',
'browser/net/dns_host_info_unittest.cc',
'browser/net/dns_master_unittest.cc',
'browser/net/resolve_proxy_msg_helper_unittest.cc',
'browser/net/url_fetcher_unittest.cc',
'browser/net/url_fixer_upper_unittest.cc',
'browser/password_manager/encryptor_unittest.cc',
'browser/password_manager/password_form_manager_unittest.cc',
'browser/printing/page_number_unittest.cc',
'browser/printing/page_overlays_unittest.cc',
'browser/printing/page_range_unittest.cc',
'browser/printing/page_setup_unittest.cc',
'browser/printing/print_job_unittest.cc',
'browser/printing/win_printing_context_unittest.cc',
'browser/profile_manager_unittest.cc',
'browser/renderer_host/audio_renderer_host_unittest.cc',
'browser/renderer_host/render_view_host_unittest.cc',
'browser/renderer_host/render_widget_host_unittest.cc',
'browser/renderer_host/renderer_security_policy_unittest.cc',
'browser/renderer_host/resource_dispatcher_host_unittest.cc',
'browser/renderer_host/web_cache_manager_unittest.cc',
'browser/rlz/rlz_unittest.cc',
'browser/safe_browsing/bloom_filter_unittest.cc',
'browser/safe_browsing/chunk_range_unittest.cc',
'browser/safe_browsing/protocol_manager_unittest.cc',
'browser/safe_browsing/protocol_parser_unittest.cc',
'browser/safe_browsing/safe_browsing_database_unittest.cc',
'browser/safe_browsing/safe_browsing_util_unittest.cc',
'browser/search_engines/template_url_model_unittest.cc',
'browser/search_engines/template_url_parser_unittest.cc',
'browser/search_engines/template_url_prepopulate_data_unittest.cc',
'browser/search_engines/template_url_unittest.cc',
'browser/sessions/session_backend_unittest.cc',
'browser/sessions/session_service_test_helper.cc',
'browser/sessions/session_service_test_helper.h',
'browser/sessions/session_service_unittest.cc',
'browser/sessions/tab_restore_service_unittest.cc',
'browser/site_instance_unittest.cc',
'browser/spellcheck_unittest.cc',
'browser/tab_contents/render_view_host_manager_unittest.cc',
'browser/tab_contents/web_contents_unittest.cc',
'browser/tabs/tab_strip_model_unittest.cc',
'browser/task_manager_unittest.cc',
'browser/theme_resources_util_unittest.cc',
'browser/views/bookmark_editor_view_unittest.cc',
'browser/views/keyword_editor_view_unittest.cc',
'browser/visitedlink_unittest.cc',
'browser/webdata/web_database_unittest.cc',
'browser/window_sizer_unittest.cc',
'../app/animation_unittest.cc',
'common/bzip2_unittest.cc',
'common/chrome_plugin_unittest.cc',
'common/extensions/url_pattern_unittest.cc',
'common/extensions/user_script_unittest.cc',
'common/file_descriptor_set_unittest.cc',
'../app/gfx/chrome_font_unittest.cc',
'common/gfx/emf_unittest.cc',
'../app/gfx/icon_util_unittest.cc',
'../app/gfx/text_elider_unittest.cc',
'common/important_file_writer_unittest.cc',
'common/ipc_message_unittest.cc',
'common/ipc_sync_channel_unittest.cc',
'common/ipc_sync_message_unittest.cc',
'common/ipc_sync_message_unittest.h',
'common/json_value_serializer_unittest.cc',
'../app/l10n_util_unittest.cc',
'common/mru_cache_unittest.cc',
'common/net/url_util_unittest.cc',
'common/notification_service_unittest.cc',
'../app/os_exchange_data_unittest.cc',
'common/pref_member_unittest.cc',
'common/pref_service_unittest.cc',
'common/property_bag_unittest.cc',
'common/resource_dispatcher_unittest.cc',
'common/time_format_unittest.cc',
'common/unzip_unittest.cc',
'../app/win_util_unittest.cc',
'common/worker_thread_ticker_unittest.cc',
'renderer/extensions/extension_api_client_unittest.cc',
'renderer/extensions/greasemonkey_api_unittest.cc',
'renderer/extensions/json_schema_unittest.cc',
'renderer/net/render_dns_master_unittest.cc',
'renderer/net/render_dns_queue_unittest.cc',
'renderer/render_process_unittest.cc',
'renderer/render_thread_unittest.cc',
'renderer/render_view_unittest.cc',
'renderer/render_widget_unittest.cc',
'renderer/renderer_logging_mac_unittest.mm',
'renderer/renderer_main_unittest.cc',
'test/browser_with_test_window_test.cc',
'test/browser_with_test_window_test.h',
'test/render_view_test.cc',
'test/render_view_test.h',
'test/test_notification_tracker.cc',
'test/test_notification_tracker.h',
'test/v8_unit_test.cc',
'test/v8_unit_test.h',
'../views/controls/label_unittest.cc',
'../views/controls/table/table_view_unittest.cc',
'../views/controls/tree/tree_node_iterator_unittest.cc',
'../views/focus/focus_manager_unittest.cc',
'../views/grid_layout_unittest.cc',
'../views/view_unittest.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
'../build/linux/system.gyp:gtk',
'../build/linux/system.gyp:nss',
],
'sources!': [
# This test is mostly about renaming downloads to safe file
# names. As such we don't need/want to port it to linux. We
# might want to write our own tests for the download manager
# on linux, though.
'browser/download/download_manager_unittest.cc',
],
}],
['OS=="mac"', {
# The test fetches resources which means Mac need the app bundle to
# exist on disk so it can pull from it.
'dependencies': [
'app',
],
'include_dirs': [
'../third_party/GTM',
],
'sources!': [
'browser/bookmarks/bookmark_context_menu_test.cc',
'browser/back_forward_menu_model_unittest.cc',
'browser/download/download_manager_unittest.cc',
'browser/gtk/go_button_gtk_unittest.cc',
'browser/gtk/tabs/tab_renderer_gtk_unittest.cc',
'browser/navigation_controller_unittest.cc',
'renderer/render_view_unittest.cc',
'test/test_notification_tracker.cc',
'test/test_notification_tracker.h',
],
# TODO(mark): We really want this for all non-static library targets,
# but when we tried to pull it up to the common.gypi level, it broke
# other things like the ui, startup, and page_cycler tests. *shrug*
'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-ObjC']},
}],
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
'sources!': [
'browser/gtk/tabs/tab_renderer_gtk_unittest.cc',
'common/file_descriptor_set_unittest.cc',
'common/net/url_util_unittest.cc',
],
'dependencies': [
'views',
],
}, { # else: OS != "win"
'sources!': [
'browser/bookmarks/bookmark_codec_unittest.cc',
'browser/bookmarks/bookmark_drag_data_unittest.cc',
'browser/bookmarks/bookmark_folder_tree_model_unittest.cc',
'browser/bookmarks/bookmark_table_model_unittest.cc',
'browser/browser_commands_unittest.cc',
'browser/browser_unittest.cc',
'browser/extensions/test_extension_loader.cc',
'browser/importer/firefox_importer_unittest.cc',
'browser/importer/importer_unittest.cc',
'browser/login_prompt_unittest.cc',
'browser/password_manager/password_form_manager_unittest.cc',
'browser/printing/page_number_unittest.cc',
'browser/printing/page_overlays_unittest.cc',
'browser/printing/print_job_unittest.cc',
'browser/rlz/rlz_unittest.cc',
'browser/tabs/tab_strip_model_unittest.cc',
'browser/task_manager_unittest.cc',
'browser/views/bookmark_editor_view_unittest.cc',
'browser/views/find_bar_win_unittest.cc',
'browser/views/keyword_editor_view_unittest.cc',
'browser/window_sizer_unittest.cc',
'common/chrome_plugin_unittest.cc',
'common/gfx/emf_unittest.cc',
'../app/gfx/icon_util_unittest.cc',
'common/net/url_util_unittest.cc',
'../app/os_exchange_data_unittest.cc',
'test/browser_with_test_window_test.cc',
'test/browser_with_test_window_test.h',
'../views/controls/label_unittest.cc',
'../views/controls/table/table_view_unittest.cc',
'../views/focus/focus_manager_unittest.cc',
'../views/grid_layout_unittest.cc',
'../views/view_unittest.cc',
],
}],
],
},
{
'target_name': 'startup_tests',
'type': 'executable',
'dependencies': [
'app',
'browser',
'common',
'chrome_resources',
'chrome_strings',
'test_support_ui',
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'sources': [
'test/startup/feature_startup_test.cc',
'test/startup/startup_test.cc',
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'views',
'../build/linux/system.gyp:gtk',
],
}],
],
},
{
'target_name': 'page_cycler_tests',
'type': 'executable',
'dependencies': [
'app',
'chrome_resources',
'chrome_strings',
'test_support_ui',
'../base/base.gyp:base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'sources': [
'test/page_cycler/page_cycler_test.cc',
'test/perf/mem_usage.cc',
'test/perf/mem_usage.h',
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
}],
['OS!="mac"', {
'dependencies': [
'views',
],
}],
['OS!="win"', {
'sources!': [
'test/perf/mem_usage.cc',
],
}],
],
},
],
'conditions': [
['OS=="linux"', {
'targets': [
{
'target_name': 'convert_dict',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
'third_party/hunspell/hunspell.gyp:hunspell',
],
'sources': [
'tools/convert_dict/aff_reader.cc',
'tools/convert_dict/aff_reader.h',
'tools/convert_dict/convert_dict.cc',
'tools/convert_dict/dic_reader.cc',
'tools/convert_dict/dic_reader.h',
'tools/convert_dict/hunspell_reader.cc',
'tools/convert_dict/hunspell_reader.h',
],
},
{
'target_name': 'flush_cache',
'type': 'executable',
'dependencies': [
'../base/base.gyp:base',
],
'sources': [
'tools/perf/flush_cache/flush_cache.cc',
],
},
],
}],
['OS=="mac"',
# On Mac only, add a project target called "package_app" that only
# runs a shell script (package_chrome.sh).
# On Mac only, add a project target called "build_app_dmg" that only
# builds a DMG out of the App (eventually will completely replace
# "package_app").
{ 'targets': [
{
'target_name': 'package_app',
# do NOT place this in the 'all' list; most won't want it.
# In gyp, booleans are 0/1 not True/False.
'suppress_wildcard': 1,
'type': 'none',
'dependencies': [
'app',
'../breakpad/breakpad.gyp:dump_syms',
'../breakpad/breakpad.gyp:symupload',
],
'actions': [
{
'inputs': [],
'outputs': [],
'action_name': 'package_chrome',
'action': ['tools/mac/package_chrome.sh' ],
},
], # 'actions'
},
{
'target_name': 'build_app_dmg',
# do NOT place this in the 'all' list; most won't want it.
# In gyp, booleans are 0/1 not True/False.
'suppress_wildcard': 1,
'type': 'none',
'dependencies': [
'app',
],
'variables': {
'build_app_dmg_script_path': '<(DEPTH)/build/mac/build_app_dmg',
},
'actions': [
{
'inputs': [
'<(build_app_dmg_script_path)',
'<(PRODUCT_DIR)/<(branding).app',
],
'outputs': [
'<(PRODUCT_DIR)/<(branding).dmg',
],
'action_name': 'build_app_dmg',
'action': ['<(build_app_dmg_script_path)', '<@(branding)'],
},
], # 'actions'
},
]
}, { # else: OS != "mac"
'targets': [
{
'target_name': 'perf_tests',
'type': 'executable',
'dependencies': [
'browser',
'common',
'renderer',
'chrome_resources',
'chrome_strings',
'../base/base.gyp:base',
'../base/base.gyp:test_support_base',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../webkit/webkit.gyp:glue',
],
'sources': [
'browser/visitedlink_perftest.cc',
'test/perf/perftests.cc',
'test/perf/url_parse_perftest.cc',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'sources!': [
# TODO(port):
'browser/visitedlink_perftest.cc',
],
}],
['OS!="mac"', {
'dependencies': [
'views',
],
}],
],
},
],
}], # OS!="mac"
['OS=="win" or OS=="linux"',
{ 'targets': [
{
'target_name': 'views',
'type': '<(library)',
'dependencies': [
'common',
'chrome_resources',
'chrome_strings',
'theme_resources',
'../media/media.gyp:media',
'../skia/skia.gyp:skia',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/icu38/icu38.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/npapi/npapi.gyp:npapi',
'third_party/hunspell/hunspell.gyp:hunspell',
'../webkit/webkit.gyp:glue',
],
'sources': [
# All .cc, .h under views, except unittests
'../views/accelerator.cc',
'../views/accelerator.h',
'../views/accessibility/accessibility_types.h',
'../views/accessibility/view_accessibility.cc',
'../views/accessibility/view_accessibility.h',
'../views/accessibility/view_accessibility_wrapper.cc',
'../views/accessibility/view_accessibility_wrapper.h',
'../views/background.cc',
'../views/background.h',
'../views/border.cc',
'../views/border.h',
'../views/controls/button/button.cc',
'../views/controls/button/button.h',
'../views/controls/button/button_dropdown.cc',
'../views/controls/button/button_dropdown.h',
'../views/controls/button/checkbox.cc',
'../views/controls/button/checkbox.h',
'../views/controls/button/custom_button.cc',
'../views/controls/button/custom_button.h',
'../views/controls/button/image_button.cc',
'../views/controls/button/image_button.h',
'../views/controls/button/menu_button.cc',
'../views/controls/button/menu_button.h',
'../views/controls/button/native_button.cc',
'../views/controls/button/native_button.h',
'../views/controls/button/native_button_gtk.cc',
'../views/controls/button/native_button_gtk.h',
'../views/controls/button/native_button_win.cc',
'../views/controls/button/native_button_win.h',
'../views/controls/button/native_button_wrapper.h',
'../views/controls/button/radio_button.cc',
'../views/controls/button/radio_button.h',
'../views/controls/button/text_button.cc',
'../views/controls/button/text_button.h',
'../views/controls/combo_box.cc',
'../views/controls/combo_box.h',
'../views/controls/hwnd_view.cc',
'../views/controls/hwnd_view.h',
'../views/controls/image_view.cc',
'../views/controls/image_view.h',
'../views/controls/label.cc',
'../views/controls/label.h',
'../views/controls/link.cc',
'../views/controls/link.h',
'../views/controls/menu/chrome_menu.cc',
'../views/controls/menu/chrome_menu.h',
'../views/controls/menu/controller.h',
'../views/controls/menu/menu.cc',
'../views/controls/menu/menu.h',
'../views/controls/menu/view_menu_delegate.h',
'../views/controls/message_box_view.cc',
'../views/controls/message_box_view.h',
'../views/controls/native_control.cc',
'../views/controls/native_control.h',
'../views/controls/native_control_gtk.cc',
'../views/controls/native_control_gtk.h',
'../views/controls/native_control_win.cc',
'../views/controls/native_control_win.h',
'../views/controls/native_view_host.cc',
'../views/controls/native_view_host.h',
'../views/controls/native_view_host_gtk.cc',
'../views/controls/native_view_host_gtk.h',
'../views/controls/scroll_view.cc',
'../views/controls/scroll_view.h',
'../views/controls/scrollbar/bitmap_scroll_bar.cc',
'../views/controls/scrollbar/bitmap_scroll_bar.h',
'../views/controls/scrollbar/native_scroll_bar.cc',
'../views/controls/scrollbar/native_scroll_bar.h',
'../views/controls/scrollbar/scroll_bar.cc',
'../views/controls/scrollbar/scroll_bar.h',
'../views/controls/separator.cc',
'../views/controls/separator.h',
'../views/controls/single_split_view.cc',
'../views/controls/single_split_view.h',
'../views/controls/tabbed_pane.cc',
'../views/controls/tabbed_pane.h',
'../views/controls/table/group_table_view.cc',
'../views/controls/table/group_table_view.h',
'../views/controls/table/table_view.cc',
'../views/controls/table/table_view.h',
'../views/controls/text_field.cc',
'../views/controls/text_field.h',
'../views/controls/throbber.cc',
'../views/controls/throbber.h',
'../views/controls/tree/tree_model.h',
'../views/controls/tree/tree_node_iterator.h',
'../views/controls/tree/tree_node_model.h',
'../views/controls/tree/tree_view.cc',
'../views/controls/tree/tree_view.h',
'../views/event.cc',
'../views/event.h',
'../views/event_gtk.cc',
'../views/event_win.cc',
'../views/fill_layout.cc',
'../views/fill_layout.h',
'../views/focus/external_focus_tracker.cc',
'../views/focus/external_focus_tracker.h',
'../views/focus/focus_manager.cc',
'../views/focus/focus_manager.h',
'../views/focus/focus_util_win.cc',
'../views/focus/focus_util_win.h',
'../views/focus/view_storage.cc',
'../views/focus/view_storage.h',
'../views/grid_layout.cc',
'../views/grid_layout.h',
'../views/layout_manager.cc',
'../views/layout_manager.h',
'../views/painter.cc',
'../views/painter.h',
'../views/repeat_controller.cc',
'../views/repeat_controller.h',
'../views/standard_layout.h',
'../views/view.cc',
'../views/view.h',
'../views/view_constants.cc',
'../views/view_constants.h',
'../views/view_gtk.cc',
'../views/view_win.cc',
'../views/widget/accelerator_handler.cc',
'../views/widget/accelerator_handler.h',
'../views/widget/aero_tooltip_manager.cc',
'../views/widget/aero_tooltip_manager.h',
'../views/widget/root_view.cc',
'../views/widget/root_view.h',
'../views/widget/root_view_drop_target.cc',
'../views/widget/root_view_drop_target.h',
'../views/widget/root_view_gtk.cc',
'../views/widget/root_view_win.cc',
'../views/widget/tooltip_manager.cc',
'../views/widget/tooltip_manager.h',
'../views/widget/widget.h',
'../views/widget/widget_gtk.cc',
'../views/widget/widget_gtk.h',
'../views/widget/widget_win.cc',
'../views/widget/widget_win.h',
'../views/window/client_view.cc',
'../views/window/client_view.h',
'../views/window/custom_frame_view.cc',
'../views/window/custom_frame_view.h',
'../views/window/dialog_client_view.cc',
'../views/window/dialog_client_view.h',
'../views/window/dialog_delegate.cc',
'../views/window/dialog_delegate.h',
'../views/window/native_frame_view.cc',
'../views/window/native_frame_view.h',
'../views/window/non_client_view.cc',
'../views/window/non_client_view.h',
'../views/window/window.h',
'../views/window/window_delegate.h',
'../views/window/window_delegate.cc',
'../views/window/window_resources.h',
'../views/window/window_gtk.cc',
'../views/window/window_gtk.h',
'../views/window/window_win.cc',
'../views/window/window_win.h',
],
'conditions': [
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:gtk',
],
'sources!': [
'../views/accelerator.cc',
'../views/accessibility/accessible_wrapper.cc',
'../views/accessibility/view_accessibility.cc',
'../views/controls/scrollbar/bitmap_scroll_bar.cc',
'../views/controls/button/button_dropdown.cc',
'../views/controls/button/checkbox.cc',
'../views/controls/button/menu_button.cc',
'../views/controls/combo_box.cc',
'../views/controls/hwnd_view.cc',
'../views/controls/link.cc',
'../views/controls/menu/chrome_menu.cc',
'../views/controls/menu/menu.cc',
'../views/controls/message_box_view.cc',
'../views/controls/scroll_view.cc',
'../views/controls/table/group_table_view.cc',
'../views/focus/external_focus_tracker.cc',
'../views/focus/focus_manager.cc',
'../views/controls/native_control.cc',
'../views/controls/scrollbar/native_scroll_bar.cc',
'../views/controls/button/radio_button.cc',
'../views/resize_corner.cc',
'../views/controls/separator.cc',
'../views/controls/single_split_view.cc',
'../views/controls/tabbed_pane.cc',
'../views/controls/table/table_view.cc',
'../views/controls/text_field.cc',
'../views/controls/tree/tree_view.cc',
'../views/event_win.cc',
'../views/widget/accelerator_handler.cc',
'../views/widget/aero_tooltip_manager.cc',
'../views/widget/root_view_drop_target.cc',
'../views/widget/tooltip_manager.cc',
'../views/window/dialog_delegate.cc',
'../views/window/dialog_client_view.cc',
'../views/window/hit_test.cc',
'../views/window/native_frame_view.cc',
],
}],
['OS=="win"', {
'defines': [
'__STD_C',
'_CRT_SECURE_NO_DEPRECATE',
'_SCL_SECURE_NO_DEPRECATE',
],
'include_dirs': [
'third_party/wtl/include',
],
},],
['OS=="linux"', {
'sources!': [
'../views/accelerator.cc',
'../views/accessibility/accessible_wrapper.cc',
'../views/accessibility/view_accessibility.cc',
'../views/accessibility/view_accessibility_wrapper.cc',
'../views/controls/scrollbar/bitmap_scroll_bar.cc',
'../views/controls/button/button_dropdown.cc',
'../views/controls/button/checkbox.cc',
'../views/controls/menu/chrome_menu.cc',
'../views/controls/combo_box.cc',
'../views/focus/focus_manager.cc',
'../views/controls/table/group_table_view.cc',
'../views/controls/hwnd_view.cc',
'../views/controls/link.cc',
'../views/controls/menu/menu.cc',
'../views/controls/button/menu_button.cc',
'../views/controls/message_box_view.cc',
'../views/controls/native_control.cc',
'../views/controls/scrollbar/native_scroll_bar.cc',
'../views/controls/button/radio_button.cc',
'../views/resize_corner.cc',
'../views/controls/separator.cc',
'../views/controls/single_split_view.cc',
'../views/controls/tabbed_pane.cc',
'../views/controls/table/table_view.cc',
'../views/controls/text_field.cc',
'../views/controls/tree/tree_view.cc',
'../views/widget/accelerator_handler.cc',
'../views/widget/aero_tooltip_manager.cc',
'../views/widget/root_view_drop_target.cc',
'../views/widget/tooltip_manager.cc',
'../views/widget/widget_win.cc',
'../views/window/dialog_delegate.cc',
'../views/window/dialog_client_view.cc',
'../views/window/native_frame_view.cc',
],
}],
],
},
],
}], # OS=="win" or OS=="linux"
['OS=="win"',
{ 'targets': [
{
'target_name': 'interactive_ui_tests',
'type': 'executable',
'dependencies': [
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'third_party/hunspell/hunspell.gyp:hunspell',
'views',
'../skia/skia.gyp:skia',
'../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
'../third_party/icu38/icu38.gyp:icui18n',
'../third_party/libpng/libpng.gyp:libpng',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/zlib/zlib.gyp:zlib',
'../testing/gtest.gyp:gtest',
'../third_party/npapi/npapi.gyp:npapi',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'browser/browser_focus_uitest.cc',
'browser/views/bookmark_bar_view_test.cc',
'browser/views/constrained_window_impl_interactive_uitest.cc',
'browser/views/find_bar_win_interactive_uitest.cc',
'browser/views/tabs/tab_dragging_test.cc',
'test/interactive_ui/npapi_interactive_test.cc',
'test/interactive_ui/view_event_test_base.cc',
'test/interactive_ui/view_event_test_base.h',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
},
{
'target_name': 'plugin_tests',
'type': 'executable',
'dependencies': [
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
'../third_party/libxml/libxml.gyp:libxml',
'../third_party/libxslt/libxslt.gyp:libxslt',
'../third_party/npapi/npapi.gyp:npapi',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'test/plugin/plugin_test.cpp',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
},
{
'target_name': 'selenium_tests',
'type': 'executable',
'dependencies': [
'chrome_resources',
'chrome_strings',
'test_support_common',
'test_support_ui',
'../skia/skia.gyp:skia',
'../testing/gtest.gyp:gtest',
],
'include_dirs': [
'..',
'third_party/wtl/include',
],
'sources': [
'test/selenium/selenium_test.cc',
'tools/build/win/precompiled_wtl.h',
'tools/build/win/precompiled_wtl.cc',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled_wtl.h',
'msvs_precompiled_source': 'tools/build/win/precompiled_wtl.cc',
},
},
},
{
'target_name': 'worker',
'type': '<(library)',
'dependencies': [
'../base/base.gyp:base',
'../webkit/webkit.gyp:webkit',
],
'sources': [
'tools/build/win/precompiled.cc',
'tools/build/win/precompiled.h',
'worker/webworkerclient_proxy.cc',
'worker/webworkerclient_proxy.h',
'worker/worker_main.cc',
'worker/worker_thread.cc',
'worker/worker_thread.h',
'worker/worker_webkitclient_impl.cc',
'worker/worker_webkitclient_impl.h',
],
'include_dirs': [
'..',
],
'configurations': {
'Debug': {
'msvs_precompiled_header': 'tools/build/win/precompiled.h',
'msvs_precompiled_source': 'tools/build/win/precompiled.cc',
},
},
},
]}, # 'targets'
], # OS=="win"
# TODO(jrg): add in Windows code coverage targets.
['coverage!=0 and OS!="win"',
{ 'targets': [
{
'target_name': 'coverage',
# do NOT place this in the 'all' list; most won't want it.
# In gyp, booleans are 0/1 not True/False.
'suppress_wildcard': 1,
'type': 'none',
'dependencies': [
'../base/base.gyp:base_unittests',
'../media/media.gyp:media_unittests',
'../net/net.gyp:net_unittests',
'../printing/printing.gyp:printing_unittests',
],
'actions': [
{
# 'message' for Linux/scons in particular
'message': 'Running coverage_posix.py to generate coverage numbers',
'inputs': [],
'outputs': [],
'action_name': 'coverage',
'action': [ 'python',
'../tools/code_coverage/coverage_posix.py',
'--directory',
'<(PRODUCT_DIR)',
'--',
'<@(_dependencies)'],
# Use outputs of this action as inputs for the main target build.
# Seems as a misnomer but makes this happy on Linux (scons).
'process_outputs_as_sources': 1,
},
], # 'actions'
},
]
}],
], # 'conditions'
}
|
import pyproctor
from shelf.endpoint_decorators import decorators
from mock import Mock
class EndpointDecoratorsTest(pyproctor.TestBase):
def test_authorization_header_is_not_logged(self):
"""
Very simple test. We found out the Authorization header was being
logged by endpoint decorators. The only function that logs headers
is decorators.logheaders, so this is testing that function. It
must redact any headers added to the REDACTED_HEADERS list.
"""
logger_mock = Mock()
logger_mock.info = Mock()
request_mock = type("FakeRequest", (), {
"headers": {
"Authorization": "no",
"SafeStuff": "This better be logged this way",
"authorization": "no",
"auThOrIzAtion": "no",
"authentication": "no",
"host": "yes",
"accept": "yes"
}
})
mock_container = type("FakeContainer", (), {
"logger": logger_mock,
"request": request_mock
})
@decorators.logheaders
def test_log_headers(*args, **kwargs):
pass
test_log_headers(mock_container)
logger_mock.info.assert_called_with("RESPONSE HEADERS : \n"
"authentication: REDACTED\n"
"accept: yes\n"
"host: yes\n"
"auThOrIzAtion: REDACTED\n"
"Authorization: REDACTED\n"
"authorization: REDACTED\n"
"SafeStuff: This better be logged this way")
Added unit test that tests JSON parse errors in the logbodies function in endpoint_decorators
import pyproctor
from shelf.endpoint_decorators import decorators
from mock import Mock
class EndpointDecoratorsTest(pyproctor.TestBase):
def test_authorization_header_is_not_logged(self):
"""
Very simple test. We found out the Authorization header was being
logged by endpoint decorators. The only function that logs headers
is decorators.logheaders, so this is testing that function. It
must redact any headers added to the REDACTED_HEADERS list.
"""
logger_mock = Mock()
logger_mock.info = Mock()
request_mock = type("FakeRequest", (), {
"headers": {
"Authorization": "no",
"SafeStuff": "This better be logged this way",
"authorization": "no",
"auThOrIzAtion": "no",
"authentication": "no",
"host": "yes",
"accept": "yes"
}
})
mock_container = type("FakeContainer", (), {
"logger": logger_mock,
"request": request_mock
})
@decorators.logheaders
def test_log_headers(*args, **kwargs):
pass
test_log_headers(mock_container)
logger_mock.info.assert_called_with("RESPONSE HEADERS : \n"
"authentication: REDACTED\n"
"accept: yes\n"
"host: yes\n"
"auThOrIzAtion: REDACTED\n"
"Authorization: REDACTED\n"
"authorization: REDACTED\n"
"SafeStuff: This better be logged this way")
def test_logbodies_errors_on_invalid_json(self):
"""
"""
data = "{\"invalid\": ...}"
valueErrorStr = "No JSON object could be decoded"
logger_mock = Mock()
logger_mock.info = Mock()
logger_mock.exception = Mock()
request_mock = type("FakeRequest", (), {
"headers": {
"content-type": "application/json"
},
"data": data
})
request_mock.get_data = Mock(return_value=data)
mock_container = type("FakeContainer", (), {
"logger": logger_mock,
"request": request_mock
})
@decorators.logbodies
def test_log_bodies(*args, **kwargs):
return request_mock
test_log_bodies(mock_container)
logger_mock.info.assert_any_call("Invalid JSON from request.")
# Get the call arguments from logger_mock.exception,
# and assert if the first argument is a ValueError with
# the correct error message.
args, kwargs = logger_mock.exception.call_args
callArg = args[0]
self.assertTrue(callArg.message == valueErrorStr)
self.assertTrue(isinstance(callArg, ValueError))
|
# -*- coding: UTF-8 -*-
"""
fbchat
~~~~~~
Facebook Chat (Messenger) for Python
:copyright: (c) 2015 - 2018 by Taehoon Kim
:license: BSD, see LICENSE for more details.
"""
from __future__ import unicode_literals
from .client import *
__title__ = 'fbchat'
__version__ = '1.3.8'
__description__ = 'Facebook Chat (Messenger) for Python'
__copyright__ = 'Copyright 2015 - 2018 by Taehoon Kim'
__license__ = 'BSD'
__author__ = 'Taehoon Kim; Moreels Pieter-Jan; Mads Marquart'
__email__ = 'carpedm20@gmail.com'
__all__ = [
'Client',
]
Version up, thanks to @orenyomtov and @ ThatAlexanderA
* Added `removeFriend` method, #298
* Removed `lxml` from dependencies, #301
* Moved configuration to setup.cfg instead of setup.py
# -*- coding: UTF-8 -*-
"""
fbchat
~~~~~~
Facebook Chat (Messenger) for Python
:copyright: (c) 2015 - 2018 by Taehoon Kim
:license: BSD, see LICENSE for more details.
"""
from __future__ import unicode_literals
from .client import *
__title__ = 'fbchat'
__version__ = '1.3.9'
__description__ = 'Facebook Chat (Messenger) for Python'
__copyright__ = 'Copyright 2015 - 2018 by Taehoon Kim'
__license__ = 'BSD'
__author__ = 'Taehoon Kim; Moreels Pieter-Jan; Mads Marquart'
__email__ = 'carpedm20@gmail.com'
__all__ = [
'Client',
]
|
# -*- coding: utf-8 -*-
import argparse
import numpy as np
from sympy.functions.special.delta_functions import Heaviside
import matplotlib.pyplot as plt
import itertools
from sympy import *
def load_default():
print "TODO: load default scores"
return None, None
def get_data():
""" Get scores data.
If there are no arguments in command line load default
"""
parser = argparse.ArgumentParser(description="Solve the ROC curve")
parser.add_argument("-tr", "--train", type=argparse.FileType('r'),
help="Scores for train data", metavar="TR",
dest="train_file")
parser.add_argument("-te", "--test", type=argparse.FileType('r'),
help="Scores for test data", metavar="TE",
dest="test_file")
parser.add_argument("-p","--plot", action='store_true',
help="Make plot",
dest="plot")
try:
args = parser.parse_args()
if args.train_file is None or args.test_file is None:
load_default()
else:
train = np.loadtxt(args.train_file)
test = np.loadtxt(args.test_file)
#return train, test, args.plot
c_train = train[train[:,2]==1]
i_train = train[train[:,2]==0]
c_test = test[train[:,2]==1]
i_test = test[train[:,2]==0]
return (c_train,i_train), (c_test,i_test),args.plot
except SystemExit:
#TODO: load default scores filenames
print "Default"
load_default()
def score_norm():
# u is the slope and v is the displacement of the sigmoid. z is the score
sigmoid = lambda u,v,z: 1 / (1 + np.exp(u*(v-z)))
# Fused scores restictions:
# sum(w) = 1 for all w
# w >= 0 for all w
# f(u,v,w)(z) = wT * sigmoid(u,v)(z) in [0,1]
def aprox_AUR(w, clients, impostors):
# Hacer la normalización antes!!!!
# Delete clients/impostors tag
c_scores = clients[:,:-1]
i_scores = impostors[:,:-1]
num_scores = c_scores.shape[1]
heaviside = lambda x: 0.5 if x == 0 else 0 if x < 0 else 1
#sum_scores = np.sum(np.array([c-i for c in c_scores for i in i_scores]))
sum_scores = 0.0
for c in c_scores:
for i in i_scores:
for score in xrange(num_scores):
#print w[score]
sum_scores += heaviside(w[score]*(c[score]-i[score]))
#sum_scores = sum(sum_scores)
aprox_aur = sum_scores / float(c_scores.shape[0] * i_scores.shape[0])
return aprox_aur
def aprox_w(clients, impostors):
aurW = {}
wp = itertools.product(np.arange(0,1.1,0.1), repeat=2)
weights = [ w for w in wp if sum(w)==1.0]
for w in weights:
aur = aprox_AUR(w, clients,impostors)
if aur in aurW.keys():
aurW[aur].append(w)
else:
aurW[aur] = [w]
#for k,values in aurW.iteritems():
# print ("AUR = %4f " % k)
# for v in values:
# print("\t [ %.2f, %.2f ]" % (v[0], v[1]))
maxAUR = max(aurW.keys())
print ("El valor máximo del área bajo la curva ROC= %4f \nCon los pesos:" % maxAUR)
for v in aurW[maxAUR]:
print(" [ %.2f, %.2f ]" % (v[0], v[1]))
def min_AUR(clients, impostors):
# Delete clients/impostors tag
c_scores = clients[:,:-1]
i_scores = impostors[:,:-1]
# Score normalization sigmoid
norm_sigmoid = lambda u,v,z: 1 / (1 + np.exp(u*(v-z)))
sigmoid = lambda beta,z: 1 / (1 + np.exp(-(beta-z)))
z = Symbol('z')
diff(sigmoid)
#derJ_U = w *
if __name__ == "__main__":
(c_train,i_train),(c_test,i_test), p= get_data()
aprox_w(c_train,i_train)
if p:
f, (ax1, ax2) = plt.subplots(2,sharex=True, sharey=True)
#c_train = train[train[:,2]==1]
#i_train = train[train[:,2]==0]
ax1.set_ylabel("Score 1")
ax2.set_ylabel("Score 1")
ax2.set_xlabel("Score 2")
ax1.plot(c_train[:,0],c_train[:,1],'o', color='green')
ax1.plot(i_train[:,0],i_train[:,1],'o', color='red')
ax1.set_title('Train Scores')
#c_test = test[test[:,2]==1]
#i_test = test[test[:,2]==0]
ax2.plot(c_test[:,0],c_test[:,1],'o', color='green')
ax2.plot(i_test[:,0],i_test[:,1],'o', color='red')
ax2.set_title('Test Scores')
plt.show()
normalized fusion score data
# -*- coding: utf-8 -*-
import argparse
import numpy as np
from sympy.functions.special.delta_functions import Heaviside
import matplotlib.pyplot as plt
import itertools
from sympy import *
def load_default():
print "TODO: load default scores"
return None, None
def get_data():
""" Get scores data.
If there are no arguments in command line load default
"""
parser = argparse.ArgumentParser(description="Solve the ROC curve")
parser.add_argument("-tr", "--train", type=argparse.FileType('r'),
help="Scores for train data", metavar="TR",
dest="train_file")
parser.add_argument("-te", "--test", type=argparse.FileType('r'),
help="Scores for test data", metavar="TE",
dest="test_file")
parser.add_argument("-p","--plot", action='store_true',
help="Make plot",
dest="plot")
try:
args = parser.parse_args()
if args.train_file is None or args.test_file is None:
load_default()
else:
train = np.loadtxt(args.train_file)
test = np.loadtxt(args.test_file)
# Normalization
for score in xrange(train.shape[1]-1):
train[:,score] = np.divide(train[:,score]-np.mean(train[:,score]), np.std(train[:,score]))
test[:,score] = np.divide(test[:,score]-np.mean(test[:,score]), np.std(test[:,score]))
# Split clients & impostors
c_train = train[train[:,2]==1]
i_train = train[train[:,2]==0]
c_test = test[train[:,2]==1]
i_test = test[train[:,2]==0]
return (c_train,i_train), (c_test,i_test),args.plot
except SystemExit:
#TODO: load default scores filenames
print "Default"
load_default()
def aprox_AUR(w, clients, impostors):
# Delete clients/impostors tag
c_scores = clients[:,:-1]
i_scores = impostors[:,:-1]
num_scores = c_scores.shape[1]
heaviside = lambda x: 0.5 if x == 0 else 0 if x < 0 else 1
sum_scores = 0.0
for c in c_scores:
for i in i_scores:
for score in xrange(num_scores):
subs_scores = w[score]*c[score]-i[score]
sum_scores += heaviside(subs_scores)
aprox_aur = sum_scores / float(c_scores.shape[0] * i_scores.shape[0])
return aprox_aur
def aprox_w(clients, impostors):
aurW = {}
wp = itertools.product(np.arange(0,1.1,0.1), repeat=2)
weights = [ w for w in wp if sum(w)==1.0]
for w in weights:
aur = aprox_AUR(w, clients,impostors)
if aur in aurW.keys():
aurW[aur].append(w)
else:
aurW[aur] = [w]
#for k,values in aurW.iteritems():
# print ("AUR = %4f " % k)
# for v in values:
# print("\t [ %.2f, %.2f ]" % (v[0], v[1]))
maxAUR = max(aurW.keys())
print ("El valor máximo del área bajo la curva ROC= %4f \nCon los pesos:" % maxAUR)
for v in aurW[maxAUR]:
print(" [ %.2f, %.2f ]" % (v[0], v[1]))
def min_AUR(clients, impostors):
# Delete clients/impostors tag
c_scores = clients[:,:-1]
i_scores = impostors[:,:-1]
# Score normalization sigmoid
norm_sigmoid = lambda u,v,z: 1 / (1 + np.exp(u*(v-z)))
sigmoid = lambda beta,z: 1 / (1 + np.exp(-(beta-z)))
z = Symbol('z')
diff(sigmoid)
#derJ_U = w *
if __name__ == "__main__":
(c_train,i_train),(c_test,i_test), p= get_data()
aprox_w(c_train,i_train)
if p:
f, (ax1, ax2) = plt.subplots(2,sharex=True, sharey=True)
#c_train = train[train[:,2]==1]
#i_train = train[train[:,2]==0]
ax1.set_ylabel("Score 1")
ax2.set_ylabel("Score 1")
ax2.set_xlabel("Score 2")
ax1.plot(c_train[:,0],c_train[:,1],'o', color='green')
ax1.plot(i_train[:,0],i_train[:,1],'o', color='red')
ax1.set_title('Train Scores')
#c_test = test[test[:,2]==1]
#i_test = test[test[:,2]==0]
ax2.plot(c_test[:,0],c_test[:,1],'o', color='green')
ax2.plot(i_test[:,0],i_test[:,1],'o', color='red')
ax2.set_title('Test Scores')
plt.show()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.