index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/TestStressSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.source;
import static org.fest.reflect.core.Reflection.field;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.util.List;
import junit.framework.Assert;
import junit.framework.TestCase;
import org.apache.flume.ChannelException;
import org.apache.flume.Context;
import org.apache.flume.CounterGroup;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.PollableSource.Status;
import org.apache.flume.channel.ChannelProcessor;
import org.junit.Before;
import org.junit.Test;
public class TestStressSource {
private ChannelProcessor mockProcessor;
@Before
public void setUp() {
mockProcessor = mock(ChannelProcessor.class);
}
private Event getEvent(StressSource source) {
return field("event").ofType(Event.class)
.in(source)
.get();
}
@SuppressWarnings("unchecked")
private List<Event> getLastProcessedEventList(StressSource source) {
return field("eventBatchListToProcess").ofType(List.class).in(source).get();
}
private CounterGroup getCounterGroup(StressSource source) {
return field("counterGroup").ofType(CounterGroup.class).in(source).get();
}
@Test
public void testMaxTotalEvents() throws InterruptedException,
EventDeliveryException {
StressSource source = new StressSource();
source.setChannelProcessor(mockProcessor);
Context context = new Context();
context.put("maxTotalEvents", "35");
source.configure(context);
source.start();
for (int i = 0; i < 50; i++) {
source.process();
}
verify(mockProcessor, times(35)).processEvent(getEvent(source));
}
@Test
public void testRateLimitedEventsNoBatch() throws InterruptedException,
EventDeliveryException {
StressSource source = new StressSource();
source.setChannelProcessor(mockProcessor);
Context context = new Context();
context.put("maxTotalEvents", "20");
context.put("maxEventsPerSecond", "20");
source.configure(context);
long startTime = System.currentTimeMillis();
source.start();
for (int i = 0; i < 20; i++) {
source.process();
}
long finishTime = System.currentTimeMillis();
//Expecting to see within a second +/- 30% for 20 events
Assert.assertTrue(finishTime - startTime < 1300);
Assert.assertTrue(finishTime - startTime > 700);
source.stop();
}
@Test
public void testNonRateLimitedEventsNoBatch() throws InterruptedException,
EventDeliveryException {
StressSource source = new StressSource();
source.setChannelProcessor(mockProcessor);
Context context = new Context();
//Test with no limit - expect to see very fast performance
context = new Context();
context.put("maxTotalEvents", "20");
context.put("maxEventsPerSecond", "0");
source.configure(context);
long startTime = System.currentTimeMillis();
source.start();
for (int i = 0; i <= 20; i++) {
source.process();
}
long finishTime = System.currentTimeMillis();
Assert.assertTrue(finishTime - startTime < 70);
}
@Test
public void testRateLimitedEventsBatch() throws InterruptedException,
EventDeliveryException {
StressSource source = new StressSource();
source.setChannelProcessor(mockProcessor);
Context context = new Context();
context.put("maxTotalEvents", "20");
context.put("maxEventsPerSecond", "20");
context.put("batchSize", "3");
source.configure(context);
long startTime = System.currentTimeMillis();
source.start();
for (int i = 0; i < 20; i++) {
source.process();
}
long finishTime = System.currentTimeMillis();
//Expecting to see within a second +/- 30% for 20 events
Assert.assertTrue(finishTime - startTime < 1300);
Assert.assertTrue(finishTime - startTime > 700);
source.stop();
}
@Test
public void testNonRateLimitedEventsBatch() throws InterruptedException,
EventDeliveryException {
StressSource source = new StressSource();
source.setChannelProcessor(mockProcessor);
Context context = new Context();
//Test with no limit - expect to see very fast performance
context.put("maxTotalEvents", "20");
context.put("maxEventsPerSecond", "0");
source.configure(context);
long startTime = System.currentTimeMillis();
source.start();
for (int i = 0; i <= 20; i++) {
source.process();
}
long finishTime = System.currentTimeMillis();
Assert.assertTrue(finishTime - startTime < 70);
}
@Test
public void testBatchEvents() throws InterruptedException,
EventDeliveryException {
StressSource source = new StressSource();
source.setChannelProcessor(mockProcessor);
Context context = new Context();
context.put("maxTotalEvents", "35");
context.put("batchSize", "10");
source.configure(context);
source.start();
for (int i = 0; i < 50; i++) {
if (source.process() == Status.BACKOFF) {
TestCase.assertTrue("Source should have sent all events in 4 batches", i == 4);
break;
}
if (i < 3) {
verify(mockProcessor,
times(i + 1)).processEventBatch(getLastProcessedEventList(source));
} else {
verify(mockProcessor,
times(1)).processEventBatch(getLastProcessedEventList(source));
}
}
long successfulEvents = getCounterGroup(source).get("events.successful");
TestCase.assertTrue("Number of successful events should be 35 but was " +
successfulEvents, successfulEvents == 35);
long failedEvents = getCounterGroup(source).get("events.failed");
TestCase.assertTrue("Number of failure events should be 0 but was " +
failedEvents, failedEvents == 0);
}
@Test
public void testBatchEventsWithoutMatTotalEvents() throws InterruptedException,
EventDeliveryException {
StressSource source = new StressSource();
source.setChannelProcessor(mockProcessor);
Context context = new Context();
context.put("batchSize", "10");
source.configure(context);
source.start();
for (int i = 0; i < 10; i++) {
Assert.assertFalse("StressSource with no maxTotalEvents should not return " +
Status.BACKOFF, source.process() == Status.BACKOFF);
}
verify(mockProcessor,
times(10)).processEventBatch(getLastProcessedEventList(source));
long successfulEvents = getCounterGroup(source).get("events.successful");
TestCase.assertTrue("Number of successful events should be 100 but was " +
successfulEvents, successfulEvents == 100);
long failedEvents = getCounterGroup(source).get("events.failed");
TestCase.assertTrue("Number of failure events should be 0 but was " +
failedEvents, failedEvents == 0);
}
@Test
public void testMaxSuccessfulEvents() throws InterruptedException,
EventDeliveryException {
StressSource source = new StressSource();
source.setChannelProcessor(mockProcessor);
Context context = new Context();
context.put("maxSuccessfulEvents", "35");
source.configure(context);
source.start();
for (int i = 0; i < 10; i++) {
source.process();
}
// 1 failed call, 10 successful
doThrow(new ChannelException("stub")).when(
mockProcessor).processEvent(getEvent(source));
source.process();
doNothing().when(mockProcessor).processEvent(getEvent(source));
for (int i = 0; i < 10; i++) {
source.process();
}
// 1 failed call, 50 successful
doThrow(new ChannelException("stub")).when(
mockProcessor).processEvent(getEvent(source));
source.process();
doNothing().when(mockProcessor).processEvent(getEvent(source));
for (int i = 0; i < 50; i++) {
source.process();
}
// We should have called processEvent(evt) 37 times, twice for failures
// and twice for successful events.
verify(mockProcessor, times(37)).processEvent(getEvent(source));
}
}
| 9,900 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/TestPollableSourceRunner.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.source;
import com.google.common.collect.Lists;
import java.util.concurrent.CountDownLatch;
import org.apache.flume.Channel;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.PollableSource;
import org.apache.flume.Transaction;
import org.apache.flume.channel.ChannelProcessor;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.channel.ReplicatingChannelSelector;
import org.apache.flume.conf.Configurables;
import org.apache.flume.event.EventBuilder;
import org.apache.flume.lifecycle.LifecycleState;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestPollableSourceRunner {
private static final Logger logger = LoggerFactory
.getLogger(TestPollableSourceRunner.class);
private PollableSourceRunner sourceRunner;
@Before
public void setUp() {
sourceRunner = new PollableSourceRunner();
}
@Test
public void testLifecycle() throws InterruptedException {
final Channel channel = new MemoryChannel();
final CountDownLatch latch = new CountDownLatch(50);
Configurables.configure(channel, new Context());
final ChannelSelector cs = new ReplicatingChannelSelector();
cs.setChannels(Lists.newArrayList(channel));
PollableSource source = new PollableSource() {
private String name;
private ChannelProcessor cp = new ChannelProcessor(cs);
@Override
public Status process() throws EventDeliveryException {
Transaction transaction = channel.getTransaction();
try {
transaction.begin();
Event event = EventBuilder.withBody(String.valueOf(
"Event " + latch.getCount()).getBytes());
latch.countDown();
if (latch.getCount() % 20 == 0) {
throw new EventDeliveryException("I don't like event:" + event);
}
channel.put(event);
transaction.commit();
return Status.READY;
} catch (EventDeliveryException e) {
logger.error("Unable to deliver event. Exception follows.", e);
transaction.rollback();
return Status.BACKOFF;
} finally {
transaction.close();
}
}
@Override
public long getBackOffSleepIncrement() {
return PollableSourceConstants.DEFAULT_BACKOFF_SLEEP_INCREMENT;
}
@Override
public long getMaxBackOffSleepInterval() {
return PollableSourceConstants.DEFAULT_MAX_BACKOFF_SLEEP;
}
@Override
public void start() {
// Unused.
}
@Override
public void stop() {
// Unused.
}
@Override
public LifecycleState getLifecycleState() {
// Unused.
return null;
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public String getName() {
return name;
}
@Override
public void setChannelProcessor(ChannelProcessor channelProcessor) {
cp = channelProcessor;
}
@Override
public ChannelProcessor getChannelProcessor() {
return cp;
}
};
sourceRunner.setSource(source);
sourceRunner.start();
latch.await();
sourceRunner.stop();
}
}
| 9,901 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/TestAvroSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.source;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.security.cert.X509Certificate;
import java.nio.channels.ServerSocketChannel;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.function.Consumer;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.ipc.netty.NettyTransceiver;
import org.apache.avro.ipc.specific.SpecificRequestor;
import org.apache.flume.Channel;
import org.apache.flume.ChannelException;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.FlumeException;
import org.apache.flume.Transaction;
import org.apache.flume.channel.ChannelProcessor;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.channel.ReplicatingChannelSelector;
import org.apache.flume.conf.Configurables;
import org.apache.flume.instrumentation.SourceCounter;
import org.apache.flume.lifecycle.LifecycleController;
import org.apache.flume.lifecycle.LifecycleState;
import org.apache.flume.source.avro.AvroFlumeEvent;
import org.apache.flume.source.avro.AvroSourceProtocol;
import org.apache.flume.source.avro.Status;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.internal.util.reflection.Whitebox;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.netty.channel.ChannelPipeline;
import io.netty.handler.codec.compression.JZlibDecoder;
import io.netty.handler.codec.compression.JZlibEncoder;
import io.netty.handler.codec.compression.ZlibEncoder;
import io.netty.handler.ssl.SslHandler;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyListOf;
import static org.mockito.Mockito.doThrow;
public class TestAvroSource {
private static final Logger logger = LoggerFactory
.getLogger(TestAvroSource.class);
private int selectedPort;
private AvroSource source;
private Channel channel;
private InetAddress localhost;
@Before
public void setUp() throws UnknownHostException {
localhost = InetAddress.getByName("127.0.0.1");
source = new AvroSource();
channel = new MemoryChannel();
Configurables.configure(channel, new Context());
List<Channel> channels = new ArrayList<Channel>();
channels.add(channel);
ChannelSelector rcs = new ReplicatingChannelSelector();
rcs.setChannels(channels);
source.setChannelProcessor(new ChannelProcessor(rcs));
}
@Test
public void testLifecycle() throws InterruptedException, IOException {
Context context = new Context();
context.put("port", String.valueOf(selectedPort = getFreePort()));
context.put("bind", "0.0.0.0");
Configurables.configure(source, context);
source.start();
Assert
.assertTrue("Reached start or error", LifecycleController.waitForOneOf(
source, LifecycleState.START_OR_ERROR));
Assert.assertEquals("Server is started", LifecycleState.START,
source.getLifecycleState());
source.stop();
Assert.assertTrue("Reached stop or error",
LifecycleController.waitForOneOf(source, LifecycleState.STOP_OR_ERROR));
Assert.assertEquals("Server is stopped", LifecycleState.STOP,
source.getLifecycleState());
}
@Test
public void testSourceStoppedOnFlumeExceptionIfPortUsed()
throws InterruptedException, IOException {
final String loopbackIPv4 = "127.0.0.1";
final int port = 10500;
// create a dummy socket bound to a known port.
try (ServerSocketChannel dummyServerSocket = ServerSocketChannel.open()) {
dummyServerSocket.socket().setReuseAddress(true);
dummyServerSocket.socket().bind(new InetSocketAddress(loopbackIPv4, port));
Context context = new Context();
context.put("port", String.valueOf(port));
context.put("bind", loopbackIPv4);
Configurables.configure(source, context);
try {
source.start();
Assert.fail("Expected an exception during startup caused by binding on a used port");
} catch (FlumeException e) {
logger.info("Received an expected exception.", e);
Assert.assertTrue("Expected a server socket setup related root cause",
e.getMessage().contains("server socket"));
}
}
// As port is already in use, an exception is thrown and the source is stopped
// cleaning up the opened sockets during source.start().
Assert.assertEquals("Server is stopped", LifecycleState.STOP,
source.getLifecycleState());
}
@Test
public void testInvalidAddress()
throws InterruptedException, IOException {
final String invalidHost = "invalid.host";
final int port = 10501;
Context context = new Context();
context.put("port", String.valueOf(port));
context.put("bind", invalidHost);
Configurables.configure(source, context);
try {
source.start();
Assert.fail("Expected an exception during startup caused by binding on a invalid host");
} catch (FlumeException e) {
logger.info("Received an expected exception.", e);
Assert.assertTrue("Expected a server socket setup related root cause",
e.getMessage().contains("server socket"));
}
// As port is already in use, an exception is thrown and the source is stopped
// cleaning up the opened sockets during source.start().
Assert.assertEquals("Server is stopped", LifecycleState.STOP,
source.getLifecycleState());
}
@Test
public void testRequestWithNoCompression() throws InterruptedException, IOException {
doRequest(false, false, 6);
}
@Test
public void testRequestWithCompressionOnClientAndServerOnLevel0()
throws InterruptedException, IOException {
doRequest(true, true, 0);
}
@Test
public void testRequestWithCompressionOnClientAndServerOnLevel1()
throws InterruptedException, IOException {
doRequest(true, true, 1);
}
@Test
public void testRequestWithCompressionOnClientAndServerOnLevel6()
throws InterruptedException, IOException {
doRequest(true, true, 6);
}
@Test
public void testRequestWithCompressionOnClientAndServerOnLevel9()
throws InterruptedException, IOException {
doRequest(true, true, 9);
}
@Test(expected = org.apache.avro.AvroRuntimeException.class)
public void testRequestWithCompressionOnServerOnly() throws InterruptedException, IOException {
//This will fail because both client and server need compression on
doRequest(true, false, 6);
}
@Test(expected = org.apache.avro.AvroRuntimeException.class)
public void testRequestWithCompressionOnClientOnly() throws InterruptedException, IOException {
//This will fail because both client and server need compression on
doRequest(false, true, 6);
}
private void doRequest(boolean serverEnableCompression, boolean clientEnableCompression,
int compressionLevel) throws InterruptedException, IOException {
Context context = new Context();
context.put("port", String.valueOf(selectedPort = getFreePort()));
context.put("bind", "0.0.0.0");
context.put("threads", "50");
if (serverEnableCompression) {
context.put("compression-type", "deflate");
} else {
context.put("compression-type", "none");
}
Configurables.configure(source, context);
source.start();
Assert
.assertTrue("Reached start or error", LifecycleController.waitForOneOf(
source, LifecycleState.START_OR_ERROR));
Assert.assertEquals("Server is started", LifecycleState.START,
source.getLifecycleState());
AvroSourceProtocol client;
NettyTransceiver nettyTransceiver;
if (clientEnableCompression) {
nettyTransceiver = new NettyTransceiver(new InetSocketAddress(selectedPort),
(ch) -> {
ChannelPipeline pipeline = ch.pipeline();
ZlibEncoder encoder = new JZlibEncoder(compressionLevel);
pipeline.addFirst("deflater", encoder);
pipeline.addFirst("inflater", new JZlibDecoder());
});
client = SpecificRequestor.getClient(AvroSourceProtocol.class, nettyTransceiver);
} else {
nettyTransceiver = new NettyTransceiver(new InetSocketAddress(selectedPort));
client = SpecificRequestor.getClient(AvroSourceProtocol.class, nettyTransceiver);
}
AvroFlumeEvent avroEvent = new AvroFlumeEvent();
avroEvent.setHeaders(new HashMap<>());
avroEvent.setBody(ByteBuffer.wrap("Hello avro".getBytes()));
Status status = client.append(avroEvent);
Assert.assertEquals(Status.OK, status);
Transaction transaction = channel.getTransaction();
transaction.begin();
Event event = channel.take();
Assert.assertNotNull(event);
Assert.assertEquals("Channel contained our event", "Hello avro",
new String(event.getBody()));
transaction.commit();
transaction.close();
logger.debug("Round trip event:{}", event);
nettyTransceiver.close();
source.stop();
Assert.assertTrue("Reached stop or error",
LifecycleController.waitForOneOf(source, LifecycleState.STOP_OR_ERROR));
Assert.assertEquals("Server is stopped", LifecycleState.STOP,
source.getLifecycleState());
}
@Test
public void testSslRequestWithComponentKeystore() throws InterruptedException, IOException {
Context context = new Context();
context.put("port", String.valueOf(selectedPort = getFreePort()));
context.put("bind", "0.0.0.0");
context.put("ssl", "true");
context.put("keystore", "src/test/resources/server.p12");
context.put("keystore-password", "password");
context.put("keystore-type", "PKCS12");
Configurables.configure(source, context);
doSslRequest();
}
@Test
public void testSslRequestWithGlobalKeystore() throws InterruptedException, IOException {
System.setProperty("javax.net.ssl.keyStore", "src/test/resources/server.p12");
System.setProperty("javax.net.ssl.keyStorePassword", "password");
System.setProperty("javax.net.ssl.keyStoreType", "PKCS12");
Context context = new Context();
context.put("port", String.valueOf(selectedPort = getFreePort()));
context.put("bind", "0.0.0.0");
context.put("ssl", "true");
Configurables.configure(source, context);
doSslRequest();
System.clearProperty("javax.net.ssl.keyStore");
System.clearProperty("javax.net.ssl.keyStorePassword");
}
private void doSslRequest() throws InterruptedException, IOException {
source.start();
Assert
.assertTrue("Reached start or error", LifecycleController.waitForOneOf(
source, LifecycleState.START_OR_ERROR));
Assert.assertEquals("Server is started", LifecycleState.START,
source.getLifecycleState());
AvroSourceProtocol client = SpecificRequestor.getClient(
AvroSourceProtocol.class, new NettyTransceiver(new InetSocketAddress(selectedPort),
(ch) -> {
ChannelPipeline pipeline = ch.pipeline();
SSLEngine engine = createSSLEngine();
engine.setUseClientMode(true);
pipeline.addFirst("ssl", new SslHandler(engine));
}));
AvroFlumeEvent avroEvent = new AvroFlumeEvent();
avroEvent.setHeaders(new HashMap<>());
avroEvent.setBody(ByteBuffer.wrap("Hello avro ssl".getBytes()));
Status status = client.append(avroEvent);
Assert.assertEquals(Status.OK, status);
Transaction transaction = channel.getTransaction();
transaction.begin();
Event event = channel.take();
Assert.assertNotNull(event);
Assert.assertEquals("Channel contained our event", "Hello avro ssl",
new String(event.getBody()));
transaction.commit();
transaction.close();
logger.debug("Round trip event:{}", event);
source.stop();
Assert.assertTrue("Reached stop or error",
LifecycleController.waitForOneOf(source, LifecycleState.STOP_OR_ERROR));
Assert.assertEquals("Server is stopped", LifecycleState.STOP,
source.getLifecycleState());
}
private SSLEngine createSSLEngine() {
try {
SSLContext sslContext = SSLContext.getInstance("TLS");
sslContext.init(null, new TrustManager[]{new PermissiveTrustManager()}, null);
SSLEngine sslEngine = sslContext.createSSLEngine();
sslEngine.setUseClientMode(true);
return sslEngine;
} catch (Exception ex) {
throw new RuntimeException("Cannot create SSL channel", ex);
}
}
/**
* Bogus trust manager accepting any certificate
*/
private static class PermissiveTrustManager implements X509TrustManager {
@Override
public void checkClientTrusted(X509Certificate[] certs, String s) {
// nothing
}
@Override
public void checkServerTrusted(X509Certificate[] certs, String s) {
// nothing
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
}
}
@Test
public void testValidIpFilterAllows()
throws InterruptedException, IOException {
doIpFilterTest(localhost, "allow:name:localhost,deny:ip:*", true, false);
doIpFilterTest(localhost, "allow:ip:" + localhost.getHostAddress() +
",deny:ip:*", true, false);
doIpFilterTest(localhost, "allow:ip:*", true, false);
doIpFilterTest(localhost, "allow:ip:" +
localhost.getHostAddress().substring(0, 3) +
"*,deny:ip:*", true, false);
doIpFilterTest(localhost, "allow:ip:127.0.0.2,allow:ip:" +
localhost.getHostAddress().substring(0, 3) +
"*,deny:ip:*", true, false);
doIpFilterTest(localhost, "allow:name:localhost,deny:ip:*", true, true);
doIpFilterTest(localhost, "allow:ip:*", true, true);
}
@Test
public void testValidIpFilterDenys()
throws InterruptedException, IOException {
doIpFilterTest(localhost, "deny:ip:*", false, false);
doIpFilterTest(localhost, "deny:name:localhost", false, false);
doIpFilterTest(localhost, "deny:ip:" + localhost.getHostAddress() +
",allow:ip:*", false, false);
doIpFilterTest(localhost, "deny:ip:*", false, false);
doIpFilterTest(localhost, "allow:ip:45.2.2.2,deny:ip:*", false, false);
doIpFilterTest(localhost, "deny:ip:" +
localhost.getHostAddress().substring(0, 3) +
"*,allow:ip:*", false, false);
doIpFilterTest(localhost, "deny:ip:*", false, true);
}
@Test
public void testInvalidIpFilter() throws InterruptedException, IOException {
doIpFilterTest(localhost, "deny:ip:*", false, false);
doIpFilterTest(localhost, "allow:name:localhost", true, false);
doIpFilterTest(localhost, "deny:ip:127.0.0.2,allow:ip:*,deny:ip:" +
localhost.getHostAddress(), true, false);
doIpFilterTest(localhost, "deny:ip:" +
localhost.getHostAddress().substring(0, 3) + "*,allow:ip:*",
false, false);
// Private lambda expression to check the received FlumeException within this test
Consumer<Exception> exceptionChecker = (Exception ex) -> {
logger.info("Received an expected exception", ex);
//covers all ipFilter related exceptions
Assert.assertTrue("Expected an ipFilterRules related exception",
ex.getMessage().contains("ipFilter"));
};
try {
doIpFilterTest(localhost, null, false, false);
Assert.fail("The null ipFilterRules config should have thrown an exception.");
} catch (FlumeException e) {
exceptionChecker.accept(e);
}
try {
doIpFilterTest(localhost, "", true, false);
Assert.fail("The empty string ipFilterRules config should have thrown "
+ "an exception");
} catch (FlumeException e) {
exceptionChecker.accept(e);
}
try {
doIpFilterTest(localhost, "homer:ip:45.4.23.1", true, false);
Assert.fail("Bad ipFilterRules config should have thrown an exception.");
} catch (FlumeException e) {
exceptionChecker.accept(e);
}
try {
doIpFilterTest(localhost, "allow:sleeps:45.4.23.1", true, false);
Assert.fail("Bad ipFilterRules config should have thrown an exception.");
} catch (FlumeException e) {
exceptionChecker.accept(e);
}
}
public void doIpFilterTest(InetAddress dest, String ruleDefinition,
boolean eventShouldBeAllowed, boolean testWithSSL)
throws InterruptedException, IOException {
Context context = new Context();
context.put("port", String.valueOf(selectedPort = getFreePort()));
context.put("bind", "0.0.0.0");
context.put("ipFilter", "true");
if (ruleDefinition != null) {
context.put("ipFilterRules", ruleDefinition);
}
if (testWithSSL) {
logger.info("Client testWithSSL" + testWithSSL);
context.put("ssl", "true");
context.put("keystore", "src/test/resources/server.p12");
context.put("keystore-password", "password");
context.put("keystore-type", "PKCS12");
}
// Invalid configuration may result in a FlumeException
Configurables.configure(source, context);
source.start();
Assert
.assertTrue("Reached start or error", LifecycleController.waitForOneOf(
source, LifecycleState.START_OR_ERROR));
Assert.assertEquals("Server is started", LifecycleState.START,
source.getLifecycleState());
AvroSourceProtocol client;
NettyTransceiver nettyTransceiver = null;
try {
if (testWithSSL) {
nettyTransceiver = new NettyTransceiver(new InetSocketAddress(dest, selectedPort),
(ch) -> {
ChannelPipeline pipeline = ch.pipeline();
SSLEngine engine = createSSLEngine();
if (engine != null) {
engine.setUseClientMode(true);
pipeline.addFirst("ssl", new SslHandler(engine));
}
});
client = SpecificRequestor.getClient(AvroSourceProtocol.class, nettyTransceiver);
} else {
nettyTransceiver = new NettyTransceiver(new InetSocketAddress(dest, selectedPort));
client = SpecificRequestor.getClient(AvroSourceProtocol.class, nettyTransceiver);
}
AvroFlumeEvent avroEvent = new AvroFlumeEvent();
avroEvent.setHeaders(new HashMap<CharSequence, CharSequence>());
avroEvent.setBody(ByteBuffer.wrap("Hello avro ipFilter".getBytes()));
logger.info("Client about to append");
Status status = client.append(avroEvent);
logger.info("Client appended");
Assert.assertEquals(Status.OK, status);
} catch (IOException | AvroRuntimeException e) {
Assert.assertTrue("Should have been allowed: " + ruleDefinition, !eventShouldBeAllowed);
return;
} finally {
if (nettyTransceiver != null) {
nettyTransceiver.close();
}
source.stop();
}
Assert.assertTrue("Should have been denied: " + ruleDefinition,
eventShouldBeAllowed);
Transaction transaction = channel.getTransaction();
transaction.begin();
Event event = channel.take();
Assert.assertNotNull(event);
Assert.assertEquals("Channel contained our event", "Hello avro ipFilter",
new String(event.getBody()));
transaction.commit();
transaction.close();
logger.debug("Round trip event:{}", event);
Assert.assertTrue("Reached stop or error",
LifecycleController.waitForOneOf(source, LifecycleState.STOP_OR_ERROR));
Assert.assertEquals("Server is stopped", LifecycleState.STOP,
source.getLifecycleState());
}
@Test
public void testErrorCounterChannelWriteFail() throws Exception {
Context context = new Context();
context.put("port", String.valueOf(selectedPort = getFreePort()));
context.put("bind", "0.0.0.0");
source.configure(context);
ChannelProcessor cp = Mockito.mock(ChannelProcessor.class);
doThrow(new ChannelException("dummy")).when(cp).processEvent(any(Event.class));
doThrow(new ChannelException("dummy")).when(cp).processEventBatch(anyListOf(Event.class));
source.setChannelProcessor(cp);
source.start();
AvroFlumeEvent avroEvent = new AvroFlumeEvent();
avroEvent.setHeaders(new HashMap<CharSequence, CharSequence>());
avroEvent.setBody(ByteBuffer.wrap("Hello avro ssl".getBytes()));
source.append(avroEvent);
source.appendBatch(Arrays.asList(avroEvent));
SourceCounter sc = (SourceCounter) Whitebox.getInternalState(source, "sourceCounter");
Assert.assertEquals(2, sc.getChannelWriteFail());
source.stop();
}
private static int getFreePort() throws IOException {
try (ServerSocket socket = new ServerSocket(0)) {
return socket.getLocalPort();
}
}
}
| 9,902 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/TestDefaultSourceFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.source;
import org.apache.flume.Source;
import org.apache.flume.SourceFactory;
import org.apache.flume.source.http.HTTPSource;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TestDefaultSourceFactory {
private SourceFactory sourceFactory;
@Before
public void setUp() {
sourceFactory = new DefaultSourceFactory();
}
@Test
public void testDuplicateCreate() {
Source avroSource1 = sourceFactory.create("avroSource1", "avro");
Source avroSource2 = sourceFactory.create("avroSource2", "avro");
Assert.assertNotNull(avroSource1);
Assert.assertNotNull(avroSource2);
Assert.assertNotSame(avroSource1, avroSource2);
Assert.assertTrue(avroSource1 instanceof AvroSource);
Assert.assertTrue(avroSource2 instanceof AvroSource);
Source s1 = sourceFactory.create("avroSource1", "avro");
Source s2 = sourceFactory.create("avroSource2", "avro");
Assert.assertNotSame(avroSource1, s1);
Assert.assertNotSame(avroSource2, s2);
}
private void verifySourceCreation(String name, String type,
Class<?> typeClass) throws Exception {
Source src = sourceFactory.create(name, type);
Assert.assertNotNull(src);
Assert.assertTrue(typeClass.isInstance(src));
}
@Test
public void testSourceCreation() throws Exception {
verifySourceCreation("seq-src", "seq", SequenceGeneratorSource.class);
verifySourceCreation("netcat-src", "netcat", NetcatSource.class);
verifySourceCreation("netcat-udp-src", "netcatudp", NetcatUdpSource.class);
verifySourceCreation("exec-src", "exec", ExecSource.class);
verifySourceCreation("avro-src", "avro", AvroSource.class);
verifySourceCreation("syslogtcp-src", "syslogtcp", SyslogTcpSource.class);
verifySourceCreation("multiport_syslogtcp-src", "multiport_syslogtcp",
MultiportSyslogTCPSource.class);
verifySourceCreation("syslogudp-src", "syslogudp", SyslogUDPSource.class);
verifySourceCreation("spooldir-src", "spooldir",
SpoolDirectorySource.class);
verifySourceCreation("http-src", "http", HTTPSource.class);
verifySourceCreation("thrift-src", "thrift", ThriftSource.class);
verifySourceCreation("custom-src", MockSource.class.getCanonicalName(),
MockSource.class);
}
}
| 9,903 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/TestSyslogUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.source;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import org.apache.flume.Event;
import org.junit.Assert;
import org.junit.Test;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.Clock;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.util.Calendar;
import java.util.Date;
import java.util.HashSet;
import java.util.Locale;
import java.util.Map;
import io.netty.buffer.ByteBuf;
import static io.netty.buffer.Unpooled.*;
public class TestSyslogUtils {
@Test
public void TestHeader0() throws ParseException {
String stamp1 = "2012-04-13T11:11:11";
String format1 = "yyyy-MM-dd'T'HH:mm:ssZ";
String host1 = "ubuntu-11.cloudera.com";
String data1 = "some msg";
// timestamp with hh:mm format timezone with no version
String msg1 = "<10>" + stamp1 + "+08:00" + " " + host1 + " " + data1 + "\n";
checkHeader(msg1, stamp1 + "+0800", format1, host1, data1);
}
@Test
public void TestHeader1() throws ParseException {
String stamp1 = "2012-04-13T11:11:11";
String format1 = "yyyy-MM-dd'T'HH:mm:ss";
String host1 = "ubuntu-11.cloudera.com";
String data1 = "some msg";
String msg1 = "<10>1 " + stamp1 + " " + host1 + " " + data1 + "\n";
checkHeader(msg1, stamp1, format1, host1, data1);
}
@Test
public void TestHeader2() throws ParseException {
String stamp1 = "2012-04-13T11:11:11";
String format1 = "yyyy-MM-dd'T'HH:mm:ssZ";
String host1 = "ubuntu-11.cloudera.com";
String data1 = "some msg";
// timestamp with 'Z' appended, translates to UTC
String msg1 = "<10>1 " + stamp1 + "Z" + " " + host1 + " " + data1 + "\n";
checkHeader(msg1, stamp1 + "+0000", format1, host1, data1);
}
@Test
public void TestHeader3() throws ParseException {
String stamp1 = "2012-04-13T11:11:11";
String format1 = "yyyy-MM-dd'T'HH:mm:ssZ";
String host1 = "ubuntu-11.cloudera.com";
String data1 = "some msg";
// timestamp with hh:mm format timezone
String msg1 = "<10>1 " + stamp1 + "+08:00" + " " + host1 + " " + data1 + "\n";
checkHeader(msg1, stamp1 + "+0800", format1, host1, data1);
}
@Test
public void TestHeader4() throws ParseException {
String host1 = "ubuntu-11.cloudera.com";
String data1 = "some msg";
// null format timestamp (-)
String msg1 = "<10>1 " + "-" + " " + host1 + " " + data1 + "\n";
checkHeader(msg1, null, null, host1, data1);
}
@Test
public void TestHeader5() throws ParseException {
String stamp1 = "2012-04-13T11:11:11";
String format1 = "yyyy-MM-dd'T'HH:mm:ss";
String host1 = "-";
String data1 = "some msg";
// null host
String msg1 = "<10>1 " + stamp1 + " " + host1 + " " + data1 + "\n";
checkHeader(msg1, stamp1, format1, null, data1);
}
@Test
public void TestHeader6() throws ParseException {
String stamp1 = "2012-04-13T11:11:11";
String format1 = "yyyy-MM-dd'T'HH:mm:ssZ";
String host1 = "-";
String data1 = "some msg";
// null host
String msg1 = "<10>1 " + stamp1 + "Z" + " " + host1 + " " + data1 + "\n";
checkHeader(msg1, stamp1 + "+0000", format1, null, data1);
}
@Test
public void TestHeader7() throws ParseException {
String stamp1 = "2012-04-13T11:11:11";
String format1 = "yyyy-MM-dd'T'HH:mm:ssZ";
String host1 = "-";
String data1 = "some msg";
// null host
String msg1 = "<10>1 " + stamp1 + "+08:00" + " " + host1 + " " + data1 + "\n";
checkHeader(msg1, stamp1 + "+0800", format1, null, data1);
}
@Test
public void TestHeader8() throws ParseException {
String stamp1 = "2012-04-13T11:11:11.999";
String format1 = "yyyy-MM-dd'T'HH:mm:ss.S";
String host1 = "ubuntu-11.cloudera.com";
String data1 = "some msg";
String msg1 = "<10>1 " + stamp1 + " " + host1 + " " + data1 + "\n";
checkHeader(msg1, stamp1, format1, host1, data1);
}
@Test
public void TestHeader9() throws ParseException {
SimpleDateFormat sdf = new SimpleDateFormat("MMM d hh:MM:ss", Locale.ENGLISH);
Calendar cal = Calendar.getInstance();
String year = String.valueOf(cal.get(Calendar.YEAR));
String stamp1 = sdf.format(cal.getTime());
String format1 = "yyyyMMM d HH:mm:ss";
String host1 = "ubuntu-11.cloudera.com";
String data1 = "some msg";
// timestamp with 'Z' appended, translates to UTC
String msg1 = "<10>" + stamp1 + " " + host1 + " " + data1 + "\n";
checkHeader(msg1, year + stamp1, format1, host1, data1);
}
@Test
public void TestHeader10() throws ParseException {
SimpleDateFormat sdf = new SimpleDateFormat("MMM d hh:MM:ss", Locale.ENGLISH);
Calendar cal = Calendar.getInstance();
String year = String.valueOf(cal.get(Calendar.YEAR));
String stamp1 = sdf.format(cal.getTime());
String format1 = "yyyyMMM d HH:mm:ss";
String host1 = "ubuntu-11.cloudera.com";
String data1 = "some msg";
// timestamp with 'Z' appended, translates to UTC
String msg1 = "<10>" + stamp1 + " " + host1 + " " + data1 + "\n";
checkHeader(msg1, year + stamp1, format1, host1, data1);
}
@Test
public void TestHeader11() throws ParseException {
// SyslogUtils should truncate microsecond precision to only 3 digits.
// This is to maintain consistency between the two syslog implementations.
String inputStamp = "2014-10-03T17:20:01.123456-07:00";
String outputStamp = "2014-10-03T17:20:01.123-07:00";
String format1 = "yyyy-MM-dd'T'HH:mm:ss.S";
String host1 = "ubuntu-11.cloudera.com";
String data1 = "some msg";
String msg1 = "<10>" + inputStamp + " " + host1 + " " + data1 + "\n";
checkHeader(msg1, outputStamp, format1, host1, data1);
}
@Test
public void TestRfc3164HeaderApacheLogWithNulls() throws ParseException {
SimpleDateFormat sdf = new SimpleDateFormat("MMM d hh:MM:ss", Locale.ENGLISH);
Calendar cal = Calendar.getInstance();
String year = String.valueOf(cal.get(Calendar.YEAR));
String stamp1 = sdf.format(cal.getTime());
String format1 = "yyyyMMM d HH:mm:ss";
String host1 = "ubuntu-11.cloudera.com";
String data1 = "- hyphen_null_breaks_5424_pattern [07/Jun/2012:14:46:44 -0600]";
String msg1 = "<10>" + stamp1 + " " + host1 + " " + data1 + "\n";
checkHeader(msg1, year + stamp1, format1, host1, data1);
}
/* This test creates a series of dates that range from 10 months in the past to (5 days short
* of) one month in the future. This tests that the year addition code is clever enough to
* handle scenarios where the event received was generated in a different year to what flume
* considers to be "current" (e.g. where there has been some lag somewhere, especially when
* flicking over on New Year's eve, or when you are about to flick over and the flume's
* system clock is slightly slower than the Syslog source's clock).
*/
@Test
public void TestRfc3164Dates() throws ParseException {
//We're going to run this test using a mocked clock, once for the next 13 months
for (int monthOffset = 0; monthOffset <= 13; monthOffset++) {
Clock mockClock = Clock.fixed(
LocalDateTime.now().plusMonths(monthOffset).toInstant(ZoneOffset.UTC),
Clock.systemDefaultZone().getZone()
);
//We're then going to try input dates (without the year) for all 12 months, starting
//10 months ago, and finishing next month (all relative to our mocked clock)
for (int i = -10; i <= 1; i++) {
SimpleDateFormat sdf = new SimpleDateFormat("MMM d hh:MM:ss", Locale.ENGLISH);
Date date = new Date(mockClock.millis());
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.add(Calendar.MONTH, i);
//Small tweak to avoid the 1 month in the future ticking over by a few seconds between now
//and when the checkHeader actually runs
if (i == 1) {
cal.add(Calendar.DAY_OF_MONTH, -1);
}
String stamp1 = sdf.format(cal.getTime());
String year = String.valueOf(cal.get(Calendar.YEAR));
String format1 = "yyyyMMM d HH:mm:ss";
String host1 = "ubuntu-11.cloudera.com";
String data1 = "some msg";
// timestamp with 'Z' appended, translates to UTC
String msg1 = "<10>" + stamp1 + " " + host1 + " " + data1 + "\n";
checkHeader(msg1, year + stamp1, format1, host1, data1, mockClock);
}
}
}
public static void checkHeader(String keepFields, String msg1, String stamp1, String format1,
String host1, String data1, Clock clock) throws ParseException {
SyslogUtils util;
if (keepFields == null || keepFields.isEmpty()) {
util = new SyslogUtils(SyslogUtils.DEFAULT_SIZE, new HashSet<String>(), false, clock);
} else {
util = new SyslogUtils(SyslogUtils.DEFAULT_SIZE,
SyslogUtils.chooseFieldsToKeep(keepFields),
false, clock);
}
ByteBuf buff = buffer(200);
buff.writeBytes(msg1.getBytes());
Event e = util.extractEvent(buff);
if (e == null) {
throw new NullPointerException("Event is null");
}
Map<String, String> headers2 = e.getHeaders();
if (stamp1 == null) {
Assert.assertFalse(headers2.containsKey("timestamp"));
} else {
SimpleDateFormat formater = new SimpleDateFormat(format1, Locale.ENGLISH);
Assert.assertEquals(String.valueOf(formater.parse(stamp1).getTime()),
headers2.get("timestamp"));
}
if (host1 == null) {
Assert.assertFalse(headers2.containsKey("host"));
} else {
String host2 = headers2.get("host");
Assert.assertEquals(host2, host1);
}
Assert.assertEquals(data1, new String(e.getBody()));
}
public static void checkHeader(String keepFields, String msg1, String stamp1, String format1,
String host1, String data1) throws ParseException {
checkHeader(
keepFields, msg1, stamp1, format1,
host1, data1, Clock.system(Clock.systemDefaultZone().getZone())
);
}
public static void checkHeader(String msg1, String stamp1, String format1,
String host1, String data1, Clock clock) throws ParseException {
checkHeader("none", msg1, stamp1, format1, host1, data1, clock);
}
// Check headers for when keepFields is "none".
public static void checkHeader(String msg1, String stamp1, String format1,
String host1, String data1) throws ParseException {
checkHeader(
"none", msg1, stamp1, format1,
host1, data1, Clock.system(Clock.systemDefaultZone().getZone())
);
}
/**
* Test bad event format 1: Priority is not numeric
*/
@Test
public void testExtractBadEvent1() {
String badData1 = "<10F> bad bad data\n";
SyslogUtils util = new SyslogUtils(false);
ByteBuf buff = buffer(100);
buff.writeBytes(badData1.getBytes());
Event e = util.extractEvent(buff);
if (e == null) {
throw new NullPointerException("Event is null");
}
Map<String, String> headers = e.getHeaders();
Assert.assertEquals("0", headers.get(SyslogUtils.SYSLOG_FACILITY));
Assert.assertEquals("0", headers.get(SyslogUtils.SYSLOG_SEVERITY));
Assert.assertEquals(SyslogUtils.SyslogStatus.INVALID.getSyslogStatus(),
headers.get(SyslogUtils.EVENT_STATUS));
Assert.assertEquals(badData1.trim(), new String(e.getBody()).trim());
}
/**
* Test bad event format 2: The first char is not <
*/
@Test
public void testExtractBadEvent2() {
String badData1 = "hi guys! <10> bad bad data\n";
SyslogUtils util = new SyslogUtils(false);
ByteBuf buff = buffer(100);
buff.writeBytes(badData1.getBytes());
Event e = util.extractEvent(buff);
if (e == null) {
throw new NullPointerException("Event is null");
}
Map<String, String> headers = e.getHeaders();
Assert.assertEquals("0", headers.get(SyslogUtils.SYSLOG_FACILITY));
Assert.assertEquals("0", headers.get(SyslogUtils.SYSLOG_SEVERITY));
Assert.assertEquals(SyslogUtils.SyslogStatus.INVALID.getSyslogStatus(),
headers.get(SyslogUtils.EVENT_STATUS));
Assert.assertEquals(badData1.trim(), new String(e.getBody()).trim());
}
/**
* Test bad event format 3: Empty priority - <>
*/
@Test
public void testExtractBadEvent3() {
String badData1 = "<> bad bad data\n";
SyslogUtils util = new SyslogUtils(false);
ByteBuf buff = buffer(100);
buff.writeBytes(badData1.getBytes());
Event e = util.extractEvent(buff);
if (e == null) {
throw new NullPointerException("Event is null");
}
Map<String, String> headers = e.getHeaders();
Assert.assertEquals("0", headers.get(SyslogUtils.SYSLOG_FACILITY));
Assert.assertEquals("0", headers.get(SyslogUtils.SYSLOG_SEVERITY));
Assert.assertEquals(SyslogUtils.SyslogStatus.INVALID.getSyslogStatus(),
headers.get(SyslogUtils.EVENT_STATUS));
Assert.assertEquals(badData1.trim(), new String(e.getBody()).trim());
}
/**
* Test bad event format 4: Priority too long
*/
@Test
public void testExtractBadEvent4() {
String badData1 = "<123123123123123123123123123123> bad bad data\n";
SyslogUtils util = new SyslogUtils(false);
ByteBuf buff = buffer(100);
buff.writeBytes(badData1.getBytes());
Event e = util.extractEvent(buff);
if (e == null) {
throw new NullPointerException("Event is null");
}
Map<String, String> headers = e.getHeaders();
Assert.assertEquals("0", headers.get(SyslogUtils.SYSLOG_FACILITY));
Assert.assertEquals("0", headers.get(SyslogUtils.SYSLOG_SEVERITY));
Assert.assertEquals(SyslogUtils.SyslogStatus.INVALID.getSyslogStatus(),
headers.get(SyslogUtils.EVENT_STATUS));
Assert.assertEquals(badData1.trim(), new String(e.getBody()).trim());
}
/**
* Good event
*/
@Test
public void testExtractGoodEvent() {
String priority = "<10>";
String goodData1 = "Good good good data\n";
SyslogUtils util = new SyslogUtils(false);
ByteBuf buff = buffer(100);
buff.writeBytes((priority + goodData1).getBytes());
Event e = util.extractEvent(buff);
if (e == null) {
throw new NullPointerException("Event is null");
}
Map<String, String> headers = e.getHeaders();
Assert.assertEquals("1", headers.get(SyslogUtils.SYSLOG_FACILITY));
Assert.assertEquals("2", headers.get(SyslogUtils.SYSLOG_SEVERITY));
Assert.assertEquals(null, headers.get(SyslogUtils.EVENT_STATUS));
Assert.assertEquals(priority + goodData1.trim(),
new String(e.getBody()).trim());
}
/**
* Bad event immediately followed by a good event
*/
@Test
public void testBadEventGoodEvent() {
String badData1 = "hi guys! <10F> bad bad data\n";
SyslogUtils util = new SyslogUtils(false);
ByteBuf buff = buffer(100);
buff.writeBytes(badData1.getBytes());
String priority = "<10>";
String goodData1 = "Good good good data\n";
buff.writeBytes((priority + goodData1).getBytes());
Event e = util.extractEvent(buff);
if (e == null) {
throw new NullPointerException("Event is null");
}
Map<String, String> headers = e.getHeaders();
Assert.assertEquals("0", headers.get(SyslogUtils.SYSLOG_FACILITY));
Assert.assertEquals("0", headers.get(SyslogUtils.SYSLOG_SEVERITY));
Assert.assertEquals(SyslogUtils.SyslogStatus.INVALID.getSyslogStatus(),
headers.get(SyslogUtils.EVENT_STATUS));
Assert.assertEquals(badData1.trim(), new String(e.getBody()).trim());
Event e2 = util.extractEvent(buff);
if (e2 == null) {
throw new NullPointerException("Event is null");
}
Map<String, String> headers2 = e2.getHeaders();
Assert.assertEquals("1", headers2.get(SyslogUtils.SYSLOG_FACILITY));
Assert.assertEquals("2", headers2.get(SyslogUtils.SYSLOG_SEVERITY));
Assert.assertEquals(null, headers2.get(SyslogUtils.EVENT_STATUS));
Assert.assertEquals(priority + goodData1.trim(), new String(e2.getBody()).trim());
}
@Test
public void testGoodEventBadEvent() {
String badData1 = "hi guys! <10F> bad bad data\n";
String priority = "<10>";
String goodData1 = "Good good good data\n";
SyslogUtils util = new SyslogUtils(false);
ByteBuf buff = buffer(100);
buff.writeBytes((priority + goodData1).getBytes());
buff.writeBytes(badData1.getBytes());
Event e2 = util.extractEvent(buff);
if (e2 == null) {
throw new NullPointerException("Event is null");
}
Map<String, String> headers2 = e2.getHeaders();
Assert.assertEquals("1", headers2.get(SyslogUtils.SYSLOG_FACILITY));
Assert.assertEquals("2", headers2.get(SyslogUtils.SYSLOG_SEVERITY));
Assert.assertEquals(null,
headers2.get(SyslogUtils.EVENT_STATUS));
Assert.assertEquals(priority + goodData1.trim(),
new String(e2.getBody()).trim());
Event e = util.extractEvent(buff);
if (e == null) {
throw new NullPointerException("Event is null");
}
Map<String, String> headers = e.getHeaders();
Assert.assertEquals("0", headers.get(SyslogUtils.SYSLOG_FACILITY));
Assert.assertEquals("0", headers.get(SyslogUtils.SYSLOG_SEVERITY));
Assert.assertEquals(SyslogUtils.SyslogStatus.INVALID.getSyslogStatus(),
headers.get(SyslogUtils.EVENT_STATUS));
Assert.assertEquals(badData1.trim(), new String(e.getBody()).trim());
}
@Test
public void testBadEventBadEvent() {
String badData1 = "hi guys! <10F> bad bad data\n";
SyslogUtils util = new SyslogUtils(false);
ByteBuf buff = buffer(100);
buff.writeBytes(badData1.getBytes());
String badData2 = "hi guys! <20> bad bad data\n";
buff.writeBytes((badData2).getBytes());
Event e = util.extractEvent(buff);
if (e == null) {
throw new NullPointerException("Event is null");
}
Map<String, String> headers = e.getHeaders();
Assert.assertEquals("0", headers.get(SyslogUtils.SYSLOG_FACILITY));
Assert.assertEquals("0", headers.get(SyslogUtils.SYSLOG_SEVERITY));
Assert.assertEquals(SyslogUtils.SyslogStatus.INVALID.getSyslogStatus(),
headers.get(SyslogUtils.EVENT_STATUS));
Assert.assertEquals(badData1.trim(), new String(e.getBody()).trim());
Event e2 = util.extractEvent(buff);
if (e2 == null) {
throw new NullPointerException("Event is null");
}
Map<String, String> headers2 = e2.getHeaders();
Assert.assertEquals("0", headers2.get(SyslogUtils.SYSLOG_FACILITY));
Assert.assertEquals("0", headers2.get(SyslogUtils.SYSLOG_SEVERITY));
Assert.assertEquals(SyslogUtils.SyslogStatus.INVALID.getSyslogStatus(),
headers2.get(SyslogUtils.EVENT_STATUS));
Assert.assertEquals(badData2.trim(), new String(e2.getBody()).trim());
}
@Test
public void testGoodEventGoodEvent() {
String priority = "<10>";
String goodData1 = "Good good good data\n";
SyslogUtils util = new SyslogUtils(false);
ByteBuf buff = buffer(100);
buff.writeBytes((priority + goodData1).getBytes());
String priority2 = "<20>";
String goodData2 = "Good really good data\n";
buff.writeBytes((priority2 + goodData2).getBytes());
Event e = util.extractEvent(buff);
if (e == null) {
throw new NullPointerException("Event is null");
}
Map<String, String> headers = e.getHeaders();
Assert.assertEquals("1", headers.get(SyslogUtils.SYSLOG_FACILITY));
Assert.assertEquals("2", headers.get(SyslogUtils.SYSLOG_SEVERITY));
Assert.assertEquals(null,
headers.get(SyslogUtils.EVENT_STATUS));
Assert.assertEquals(priority + goodData1.trim(),
new String(e.getBody()).trim());
Event e2 = util.extractEvent(buff);
if (e2 == null) {
throw new NullPointerException("Event is null");
}
Map<String, String> headers2 = e2.getHeaders();
Assert.assertEquals("2", headers2.get(SyslogUtils.SYSLOG_FACILITY));
Assert.assertEquals("4", headers2.get(SyslogUtils.SYSLOG_SEVERITY));
Assert.assertEquals(null,
headers.get(SyslogUtils.EVENT_STATUS));
Assert.assertEquals(priority2 + goodData2.trim(),
new String(e2.getBody()).trim());
}
@Test
public void testExtractBadEventLarge() {
String badData1 = "<10> bad bad data bad bad\n";
// The minimum size (which is 10) overrides the 5 specified here.
SyslogUtils util = new SyslogUtils(5, null, false);
ByteBuf buff = buffer(100);
buff.writeBytes(badData1.getBytes());
Event e = util.extractEvent(buff);
if (e == null) {
throw new NullPointerException("Event is null");
}
Map<String, String> headers = e.getHeaders();
Assert.assertEquals("1", headers.get(SyslogUtils.SYSLOG_FACILITY));
Assert.assertEquals("2", headers.get(SyslogUtils.SYSLOG_SEVERITY));
Assert.assertEquals(SyslogUtils.SyslogStatus.INCOMPLETE.getSyslogStatus(),
headers.get(SyslogUtils.EVENT_STATUS));
Assert.assertEquals("<10> bad b".trim(), new String(e.getBody()).trim());
Event e2 = util.extractEvent(buff);
if (e2 == null) {
throw new NullPointerException("Event is null");
}
Map<String, String> headers2 = e2.getHeaders();
Assert.assertEquals("0", headers2.get(SyslogUtils.SYSLOG_FACILITY));
Assert.assertEquals("0", headers2.get(SyslogUtils.SYSLOG_SEVERITY));
Assert.assertEquals(SyslogUtils.SyslogStatus.INVALID.getSyslogStatus(),
headers2.get(SyslogUtils.EVENT_STATUS));
Assert.assertEquals("ad data ba".trim(), new String(e2.getBody()).trim());
}
@Test
public void testKeepFields() throws Exception {
String stamp1 = "2012-04-13T11:11:11";
String format1 = "yyyy-MM-dd'T'HH:mm:ssZ";
String host1 = "ubuntu-11.cloudera.com";
String data1 = "some msg";
// timestamp with hh:mm format timezone
String msg1 = "<10>1 " + stamp1 + "+08:00" + " " + host1 + " " + data1 + "\n";
checkHeader("none", msg1, stamp1 + "+0800", format1, host1, data1);
checkHeader("false", msg1, stamp1 + "+0800", format1, host1, data1);
String data2 = "ubuntu-11.cloudera.com some msg";
checkHeader("hostname", msg1, stamp1 + "+0800", format1, host1, data2);
String data3 = "2012-04-13T11:11:11+08:00 ubuntu-11.cloudera.com some msg";
checkHeader("timestamp hostname", msg1, stamp1 + "+0800", format1, host1, data3);
String data4 = "<10>2012-04-13T11:11:11+08:00 ubuntu-11.cloudera.com some msg";
checkHeader("priority timestamp hostname", msg1, stamp1 + "+0800", format1, host1, data4);
String data5 = "<10>1 2012-04-13T11:11:11+08:00 ubuntu-11.cloudera.com some msg";
checkHeader("priority version timestamp hostname", msg1, stamp1 + "+0800",
format1, host1, data5);
checkHeader("all", msg1, stamp1 + "+0800", format1, host1, data5);
checkHeader("true", msg1, stamp1 + "+0800", format1, host1, data5);
}
@Test
public void testGetIPWhenSuccessful() {
SocketAddress socketAddress = new InetSocketAddress("localhost", 2000);
String ip = SyslogUtils.getIP(socketAddress);
assertEquals("127.0.0.1", ip);
}
@Test
public void testGetIPWhenInputIsNull() {
SocketAddress socketAddress = null;
String ip = SyslogUtils.getIP(socketAddress);
assertEquals("", ip);
}
@Test
public void testGetIPWhenInputIsNotInetSocketAddress() {
SocketAddress socketAddress = new SocketAddress() {};
String ip = SyslogUtils.getIP(socketAddress);
assertEquals("", ip);
}
@Test
public void testGetHostnameWhenSuccessful() {
SocketAddress socketAddress = new InetSocketAddress("127.0.0.1", 2000);
String hostname = SyslogUtils.getHostname(socketAddress);
if (!isLocalHost(hostname)) {
fail("Expected either 'localhost' or '127.0.0.1' but got '" + hostname + "'");
}
}
@Test
public void testGetHostnameWhenInputIsNull() {
SocketAddress socketAddress = null;
String hostname = SyslogUtils.getHostname(socketAddress);
assertEquals("", hostname);
}
@Test
public void testGetHostnameWhenInputIsNotInetSocketAddress() {
SocketAddress socketAddress = new SocketAddress() {};
String hostname = SyslogUtils.getHostname(socketAddress);
assertEquals("", hostname);
}
public static boolean isLocalHost(String value) {
return value != null && (value.endsWith("localhost") || value.equals("127.0.0.1"));
}
}
| 9,904 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/TestNetcatSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.source;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.LineIterator;
import org.apache.flume.Channel;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.FlumeException;
import org.apache.flume.Transaction;
import org.apache.flume.channel.ChannelProcessor;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.channel.ReplicatingChannelSelector;
import org.apache.flume.conf.Configurables;
import org.apache.flume.lifecycle.LifecycleController;
import org.apache.flume.lifecycle.LifecycleState;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.UnknownHostException;
import java.nio.channels.ServerSocketChannel;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
public class TestNetcatSource {
private static final Logger logger =
LoggerFactory.getLogger(TestAvroSource.class);
private static int getFreePort() {
try (ServerSocket socket = new ServerSocket(0)) {
return socket.getLocalPort();
} catch (IOException e) {
throw new AssertionError("Can not find free port.", e);
}
}
/**
* Five first sentences of the Fables "The Crow and the Fox"
* written by Jean de La Fontaine, French poet.
*
* @see <a href="http://en.wikipedia.org/wiki/Jean_de_La_Fontaine">Jean de La Fontaine on
* wikipedia</a>
*/
private final String french = "Maître Corbeau, sur un arbre perché, " +
"Tenait en son bec un fromage. " +
"Maître Renard, par l'odeur alléché, " +
"Lui tint à peu près ce langage : " +
"Et bonjour, Monsieur du Corbeau,";
private final String english = "At the top of a tree perched Master Crow; " +
"In his beak he was holding a cheese. " +
"Drawn by the smell, Master Fox spoke, below. " +
"The words, more or less, were these: " +
"\"Hey, now, Sir Crow! Good day, good day!";
private int selectedPort;
private NetcatSource source;
private Channel channel;
private InetAddress localhost;
private Charset defaultCharset = Charset.forName("UTF-8");
/**
* We set up the the Netcat source and Flume Memory Channel on localhost
*
* @throws UnknownHostException
*/
@Before
public void setUp() throws UnknownHostException {
localhost = InetAddress.getByName("127.0.0.1");
source = new NetcatSource();
channel = new MemoryChannel();
Configurables.configure(channel, new Context());
List<Channel> channels = new ArrayList<Channel>();
channels.add(channel);
ChannelSelector rcs = new ReplicatingChannelSelector();
rcs.setChannels(channels);
source.setChannelProcessor(new ChannelProcessor(rcs));
}
/**
* Test with UTF-16BE encoding Text with both french and english sentences
*
* @throws InterruptedException
* @throws IOException
*/
@Test
public void testUTF16BEencoding() throws InterruptedException, IOException {
String encoding = "UTF-16BE";
startSource(encoding, "false", "1", "512");
Socket netcatSocket = new Socket(localhost, selectedPort);
try {
// Test on english text snippet
for (int i = 0; i < 20; i++) {
sendEvent(netcatSocket, english, encoding);
Assert.assertArrayEquals("Channel contained our event", english.getBytes(defaultCharset),
getFlumeEvent());
}
// Test on french text snippet
for (int i = 0; i < 20; i++) {
sendEvent(netcatSocket, french, encoding);
Assert.assertArrayEquals("Channel contained our event", french.getBytes(defaultCharset),
getFlumeEvent());
}
} finally {
netcatSocket.close();
stopSource();
}
}
/**
* Test with UTF-16LE encoding Text with both french and english sentences
*
* @throws InterruptedException
* @throws IOException
*/
@Test
public void testUTF16LEencoding() throws InterruptedException, IOException {
String encoding = "UTF-16LE";
startSource(encoding, "false", "1", "512");
Socket netcatSocket = new Socket(localhost, selectedPort);
try {
// Test on english text snippet
for (int i = 0; i < 20; i++) {
sendEvent(netcatSocket, english, encoding);
Assert.assertArrayEquals("Channel contained our event", english.getBytes(defaultCharset),
getFlumeEvent());
}
// Test on french text snippet
for (int i = 0; i < 20; i++) {
sendEvent(netcatSocket, french, encoding);
Assert.assertArrayEquals("Channel contained our event", french.getBytes(defaultCharset),
getFlumeEvent());
}
} finally {
netcatSocket.close();
stopSource();
}
}
/**
* Test with UTF-8 encoding Text with both french and english sentences
*
* @throws InterruptedException
* @throws IOException
*/
@Test
public void testUTF8encoding() throws InterruptedException, IOException {
String encoding = "UTF-8";
startSource(encoding, "false", "1", "512");
Socket netcatSocket = new Socket(localhost, selectedPort);
try {
// Test on english text snippet
for (int i = 0; i < 20; i++) {
sendEvent(netcatSocket, english, encoding);
Assert.assertArrayEquals("Channel contained our event", english.getBytes(defaultCharset),
getFlumeEvent());
}
// Test on french text snippet
for (int i = 0; i < 20; i++) {
sendEvent(netcatSocket, french, encoding);
Assert.assertArrayEquals("Channel contained our event", french.getBytes(defaultCharset),
getFlumeEvent());
}
} finally {
netcatSocket.close();
stopSource();
}
}
/**
* Test with ISO-8859-1 encoding Text with both french and english sentences
*
* @throws InterruptedException
* @throws IOException
*/
@Test
public void testIS88591encoding() throws InterruptedException, IOException {
String encoding = "ISO-8859-1";
startSource(encoding, "false", "1", "512");
Socket netcatSocket = new Socket(localhost, selectedPort);
try {
// Test on english text snippet
for (int i = 0; i < 20; i++) {
sendEvent(netcatSocket, english, encoding);
Assert.assertArrayEquals("Channel contained our event", english.getBytes(defaultCharset),
getFlumeEvent());
}
// Test on french text snippet
for (int i = 0; i < 20; i++) {
sendEvent(netcatSocket, french, encoding);
Assert.assertArrayEquals("Channel contained our event", french.getBytes(defaultCharset),
getFlumeEvent());
}
} finally {
netcatSocket.close();
stopSource();
}
}
/**
* Test if an ack is sent for every event in the correct encoding
*
* @throws InterruptedException
* @throws IOException
*/
@Test
public void testAck() throws InterruptedException, IOException {
String encoding = "UTF-8";
String ackEvent = "OK";
startSource(encoding, "true", "1", "512");
Socket netcatSocket = new Socket(localhost, selectedPort);
LineIterator inputLineIterator = IOUtils.lineIterator(netcatSocket.getInputStream(), encoding);
try {
// Test on english text snippet
for (int i = 0; i < 20; i++) {
sendEvent(netcatSocket, english, encoding);
Assert.assertArrayEquals("Channel contained our event", english.getBytes(defaultCharset),
getFlumeEvent());
Assert.assertEquals("Socket contained the Ack", ackEvent, inputLineIterator.nextLine());
}
// Test on french text snippet
for (int i = 0; i < 20; i++) {
sendEvent(netcatSocket, french, encoding);
Assert.assertArrayEquals("Channel contained our event", french.getBytes(defaultCharset),
getFlumeEvent());
Assert.assertEquals("Socket contained the Ack", ackEvent, inputLineIterator.nextLine());
}
} finally {
netcatSocket.close();
stopSource();
}
}
/**
* Test that line above MaxLineLength are discarded
*
* @throws InterruptedException
* @throws IOException
*/
@Test
public void testMaxLineLength() throws InterruptedException, IOException {
String encoding = "UTF-8";
startSource(encoding, "false", "1", "10");
Socket netcatSocket = new Socket(localhost, selectedPort);
try {
sendEvent(netcatSocket, "123456789", encoding);
Assert.assertArrayEquals("Channel contained our event",
"123456789".getBytes(defaultCharset), getFlumeEvent());
sendEvent(netcatSocket, english, encoding);
Assert.assertEquals("Channel does not contain an event", null, getRawFlumeEvent());
} finally {
netcatSocket.close();
stopSource();
}
}
/**
* Test that line above MaxLineLength are discarded
*
* @throws InterruptedException
* @throws IOException
*/
@Test
public void testMaxLineLengthwithAck() throws InterruptedException, IOException {
String encoding = "UTF-8";
String ackEvent = "OK";
String ackErrorEvent = "FAILED: Event exceeds the maximum length (10 chars, including newline)";
startSource(encoding, "true", "1", "10");
Socket netcatSocket = new Socket(localhost, selectedPort);
LineIterator inputLineIterator = IOUtils.lineIterator(netcatSocket.getInputStream(), encoding);
try {
sendEvent(netcatSocket, "123456789", encoding);
Assert.assertArrayEquals("Channel contained our event",
"123456789".getBytes(defaultCharset), getFlumeEvent());
Assert.assertEquals("Socket contained the Ack", ackEvent, inputLineIterator.nextLine());
sendEvent(netcatSocket, english, encoding);
Assert.assertEquals("Channel does not contain an event", null, getRawFlumeEvent());
Assert.assertEquals("Socket contained the Error Ack", ackErrorEvent, inputLineIterator
.nextLine());
} finally {
netcatSocket.close();
stopSource();
}
}
/**
* Tests that the source is stopped when an exception is thrown
* on port bind attempt due to port already being in use.
*
* @throws InterruptedException
*/
@Test
public void testSourceStoppedOnFlumeException() throws InterruptedException, IOException {
boolean isFlumeExceptionThrown = false;
// create a dummy socket bound to a known port.
try (ServerSocketChannel dummyServerSocket = ServerSocketChannel.open()) {
dummyServerSocket.socket().setReuseAddress(true);
dummyServerSocket.socket().bind(new InetSocketAddress("0.0.0.0", 10500));
Context context = new Context();
context.put("port", String.valueOf(10500));
context.put("bind", "0.0.0.0");
context.put("ack-every-event", "false");
Configurables.configure(source, context);
source.start();
} catch (FlumeException fe) {
isFlumeExceptionThrown = true;
}
// As port is already in use, an exception is thrown and the source is stopped
// cleaning up the opened sockets during source.start().
Assert.assertTrue("Flume exception is thrown as port already in use", isFlumeExceptionThrown);
Assert.assertEquals("Server is stopped", LifecycleState.STOP,
source.getLifecycleState());
}
private void startSource(String encoding, String ack, String batchSize, String maxLineLength)
throws InterruptedException {
Context context = new Context();
context.put("port", String.valueOf(selectedPort = getFreePort()));
context.put("bind", "0.0.0.0");
context.put("ack-every-event", ack);
context.put("encoding", encoding);
context.put("batch-size", batchSize);
context.put("max-line-length", maxLineLength);
Configurables.configure(source, context);
source.start();
Assert.assertTrue("Reached start or error",
LifecycleController.waitForOneOf(source, LifecycleState.START_OR_ERROR));
Assert.assertEquals("Server is started", LifecycleState.START,
source.getLifecycleState());
}
private void sendEvent(Socket socket, String content, String encoding) throws IOException {
OutputStream output = socket.getOutputStream();
IOUtils.write(content + IOUtils.LINE_SEPARATOR_UNIX, output, encoding);
output.flush();
}
private byte[] getFlumeEvent() {
Transaction transaction = channel.getTransaction();
transaction.begin();
Event event = channel.take();
Assert.assertNotNull(event);
try {
transaction.commit();
} catch (Throwable t) {
transaction.rollback();
} finally {
transaction.close();
}
logger.debug("Round trip event:{}", event);
return event.getBody();
}
private Event getRawFlumeEvent() {
Transaction transaction = channel.getTransaction();
transaction.begin();
Event event = channel.take();
try {
transaction.commit();
} catch (Throwable t) {
transaction.rollback();
} finally {
transaction.close();
}
logger.debug("Round trip event:{}", event);
return event;
}
private void stopSource() throws InterruptedException {
source.stop();
Assert.assertTrue("Reached stop or error",
LifecycleController.waitForOneOf(source, LifecycleState.STOP_OR_ERROR));
Assert.assertEquals("Server is stopped", LifecycleState.STOP,
source.getLifecycleState());
logger.info("Source stopped");
}
}
| 9,905 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/TestThriftSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.source;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.flume.Channel;
import org.apache.flume.ChannelException;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.FlumeException;
import org.apache.flume.Transaction;
import org.apache.flume.api.RpcClient;
import org.apache.flume.api.RpcClientConfigurationConstants;
import org.apache.flume.api.RpcClientFactory;
import org.apache.flume.channel.ChannelProcessor;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.channel.ReplicatingChannelSelector;
import org.apache.flume.conf.Configurables;
import org.apache.flume.event.EventBuilder;
import org.apache.flume.instrumentation.SourceCounter;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.Ignore;
import org.mockito.Mockito;
import org.mockito.internal.util.reflection.Whitebox;
import java.io.IOException;
import java.net.ServerSocket;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyListOf;
import static org.mockito.Mockito.doThrow;
public class TestThriftSource {
private ThriftSource source;
private MemoryChannel channel;
private RpcClient client;
private final Properties props = new Properties();
private int port;
@Before
public void setUp() throws IOException {
try (ServerSocket socket = new ServerSocket(0)) {
port = socket.getLocalPort();
}
props.clear();
props.setProperty("hosts", "h1");
props.setProperty("hosts.h1", "0.0.0.0:" + String.valueOf(port));
props.setProperty(RpcClientConfigurationConstants.CONFIG_BATCH_SIZE, "10");
props.setProperty(RpcClientConfigurationConstants.CONFIG_REQUEST_TIMEOUT, "2000");
channel = new MemoryChannel();
source = new ThriftSource();
}
@After
public void stop() throws Exception {
source.stop();
}
private void configureSource() {
List<Channel> channels = new ArrayList<Channel>();
channels.add(channel);
ChannelSelector rcs = new ReplicatingChannelSelector();
rcs.setChannels(channels);
source.setChannelProcessor(new ChannelProcessor(rcs));
}
@Ignore("This test is flakey and causes tests to fail pretty often.")
@Test
public void testAppendSSLWithComponentKeystore() throws Exception {
Context context = new Context();
channel.configure(context);
configureSource();
context.put(ThriftSource.CONFIG_BIND, "0.0.0.0");
context.put(ThriftSource.CONFIG_PORT, String.valueOf(port));
context.put("ssl", "true");
context.put("keystore", "src/test/resources/keystorefile.jks");
context.put("keystore-password", "password");
context.put("keystore-type", "JKS");
Configurables.configure(source, context);
doAppendSSL();
}
@Ignore("This test is flakey and causes tests to fail pretty often.")
@Test
public void testAppendSSLWithGlobalKeystore() throws Exception {
System.setProperty("javax.net.ssl.keyStore", "src/test/resources/keystorefile.jks");
System.setProperty("javax.net.ssl.keyStorePassword", "password");
System.setProperty("javax.net.ssl.keyStoreType", "JKS");
Context context = new Context();
channel.configure(context);
configureSource();
context.put(ThriftSource.CONFIG_BIND, "0.0.0.0");
context.put(ThriftSource.CONFIG_PORT, String.valueOf(port));
context.put("ssl", "true");
Configurables.configure(source, context);
doAppendSSL();
System.clearProperty("javax.net.ssl.keyStore");
System.clearProperty("javax.net.ssl.keyStorePassword");
System.clearProperty("javax.net.ssl.keyStoreType");
}
private void doAppendSSL() throws EventDeliveryException {
Properties sslprops = (Properties)props.clone();
sslprops.put("ssl", "true");
sslprops.put("truststore", "src/test/resources/truststorefile.jks");
sslprops.put("truststore-password", "password");
client = RpcClientFactory.getThriftInstance(sslprops);
source.start();
for (int i = 0; i < 30; i++) {
client.append(EventBuilder.withBody(String.valueOf(i).getBytes()));
}
Transaction transaction = channel.getTransaction();
transaction.begin();
for (int i = 0; i < 30; i++) {
Event event = channel.take();
Assert.assertNotNull(event);
Assert.assertEquals(String.valueOf(i), new String(event.getBody()));
}
transaction.commit();
transaction.close();
}
@Test
public void testAppend() throws Exception {
client = RpcClientFactory.getThriftInstance(props);
Context context = new Context();
channel.configure(context);
configureSource();
context.put(ThriftSource.CONFIG_BIND, "0.0.0.0");
context.put(ThriftSource.CONFIG_PORT, String.valueOf(port));
Configurables.configure(source, context);
source.start();
for (int i = 0; i < 30; i++) {
client.append(EventBuilder.withBody(String.valueOf(i).getBytes()));
}
Transaction transaction = channel.getTransaction();
transaction.begin();
for (int i = 0; i < 30; i++) {
Event event = channel.take();
Assert.assertNotNull(event);
Assert.assertEquals(String.valueOf(i), new String(event.getBody()));
}
transaction.commit();
transaction.close();
}
@Test
public void testAppendBatch() throws Exception {
client = RpcClientFactory.getThriftInstance(props);
Context context = new Context();
context.put("capacity", "1000");
context.put("transactionCapacity", "1000");
channel.configure(context);
configureSource();
context.put(ThriftSource.CONFIG_BIND, "0.0.0.0");
context.put(ThriftSource.CONFIG_PORT, String.valueOf(port));
Configurables.configure(source, context);
source.start();
for (int i = 0; i < 30; i++) {
List<Event> events = Lists.newArrayList();
for (int j = 0; j < 10; j++) {
Map<String, String> hdrs = Maps.newHashMap();
hdrs.put("time", String.valueOf(System.currentTimeMillis()));
events.add(EventBuilder.withBody(String.valueOf(i).getBytes(), hdrs));
}
client.appendBatch(events);
}
Transaction transaction = channel.getTransaction();
transaction.begin();
long after = System.currentTimeMillis();
List<Integer> events = Lists.newArrayList();
for (int i = 0; i < 300; i++) {
Event event = channel.take();
Assert.assertNotNull(event);
Assert.assertTrue(Long.valueOf(event.getHeaders().get("time")) <= after);
events.add(Integer.parseInt(new String(event.getBody())));
}
transaction.commit();
transaction.close();
Collections.sort(events);
int index = 0;
//30 batches of 10
for (int i = 0; i < 30; i++) {
for (int j = 0; j < 10; j++) {
Assert.assertEquals(i, events.get(index++).intValue());
}
}
}
@Test
public void testAppendBigBatch() throws Exception {
client = RpcClientFactory.getThriftInstance(props);
Context context = new Context();
context.put("capacity", "3000");
context.put("transactionCapacity", "3000");
channel.configure(context);
configureSource();
context.put(ThriftSource.CONFIG_BIND, "0.0.0.0");
context.put(ThriftSource.CONFIG_PORT, String.valueOf(port));
Configurables.configure(source, context);
source.start();
for (int i = 0; i < 5; i++) {
List<Event> events = Lists.newArrayList();
for (int j = 0; j < 500; j++) {
Map<String, String> hdrs = Maps.newHashMap();
hdrs.put("time", String.valueOf(System.currentTimeMillis()));
events.add(EventBuilder.withBody(String.valueOf(i).getBytes(), hdrs));
}
client.appendBatch(events);
}
Transaction transaction = channel.getTransaction();
transaction.begin();
long after = System.currentTimeMillis();
List<Integer> events = Lists.newArrayList();
for (int i = 0; i < 2500; i++) {
Event event = channel.take();
Assert.assertNotNull(event);
Assert.assertTrue(Long.valueOf(event.getHeaders().get("time")) < after);
events.add(Integer.parseInt(new String(event.getBody())));
}
transaction.commit();
transaction.close();
Collections.sort(events);
int index = 0;
//10 batches of 500
for (int i = 0; i < 5; i++) {
for (int j = 0; j < 500; j++) {
Assert.assertEquals(i, events.get(index++).intValue());
}
}
}
@Test
public void testMultipleClients() throws Exception {
ExecutorService submitter = Executors.newCachedThreadPool();
client = RpcClientFactory.getThriftInstance(props);
Context context = new Context();
context.put("capacity", "1000");
context.put("transactionCapacity", "1000");
channel.configure(context);
configureSource();
context.put(ThriftSource.CONFIG_BIND, "0.0.0.0");
context.put(ThriftSource.CONFIG_PORT, String.valueOf(port));
Configurables.configure(source, context);
source.start();
ExecutorCompletionService<Void> completionService = new ExecutorCompletionService<>(submitter);
for (int i = 0; i < 30; i++) {
completionService.submit(new SubmitHelper(i), null);
}
//wait for all threads to be done
for (int i = 0; i < 30; i++) {
completionService.take();
}
Transaction transaction = channel.getTransaction();
transaction.begin();
long after = System.currentTimeMillis();
List<Integer> events = Lists.newArrayList();
for (int i = 0; i < 300; i++) {
Event event = channel.take();
Assert.assertNotNull(event);
Assert.assertTrue(Long.valueOf(event.getHeaders().get("time")) < after);
events.add(Integer.parseInt(new String(event.getBody())));
}
transaction.commit();
transaction.close();
Collections.sort(events);
int index = 0;
//30 batches of 10
for (int i = 0; i < 30; i++) {
for (int j = 0; j < 10; j++) {
Assert.assertEquals(i, events.get(index++).intValue());
}
}
}
@Test
public void testErrorCounterChannelWriteFail() throws Exception {
client = RpcClientFactory.getThriftInstance(props);
Context context = new Context();
context.put(ThriftSource.CONFIG_BIND, "0.0.0.0");
context.put(ThriftSource.CONFIG_PORT, String.valueOf(port));
source.configure(context);
ChannelProcessor cp = Mockito.mock(ChannelProcessor.class);
doThrow(new ChannelException("dummy")).when(cp).processEvent(any(Event.class));
doThrow(new ChannelException("dummy")).when(cp).processEventBatch(anyListOf(Event.class));
source.setChannelProcessor(cp);
source.start();
Event event = EventBuilder.withBody("hello".getBytes());
try {
client.append(event);
} catch (EventDeliveryException e) {
//
}
try {
client.appendBatch(Arrays.asList(event));
} catch (EventDeliveryException e) {
//
}
SourceCounter sc = (SourceCounter) Whitebox.getInternalState(source, "sourceCounter");
Assert.assertEquals(2, sc.getChannelWriteFail());
source.stop();
}
private class SubmitHelper implements Runnable {
private final int i;
public SubmitHelper(int i) {
this.i = i;
}
@Override
public void run() {
List<Event> events = Lists.newArrayList();
for (int j = 0; j < 10; j++) {
Map<String, String> hdrs = Maps.newHashMap();
hdrs.put("time", String.valueOf(System.currentTimeMillis()));
events.add(EventBuilder.withBody(String.valueOf(i).getBytes(), hdrs));
}
try {
client.appendBatch(events);
} catch (EventDeliveryException e) {
throw new FlumeException(e);
}
}
}
}
| 9,906 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/MockSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.source;
import org.apache.flume.Source;
import org.apache.flume.channel.ChannelProcessor;
import org.apache.flume.lifecycle.LifecycleState;
public class MockSource implements Source {
private String name;
public MockSource() {
}
@Override
public void start() {
}
@Override
public void stop() {
}
@Override
public LifecycleState getLifecycleState() {
return null;
}
@Override
public void setChannelProcessor(ChannelProcessor cp) {
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public String getName() {
return name;
}
@Override
public ChannelProcessor getChannelProcessor() {
return null;
}
}
| 9,907 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/TestMultiportSyslogTCPSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.source;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketAddress;
import java.net.UnknownHostException;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.Charset;
import java.security.cert.X509Certificate;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.function.BiConsumer;
import org.apache.flume.Channel;
import org.apache.flume.ChannelException;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.Transaction;
import org.apache.flume.channel.ChannelProcessor;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.channel.ReplicatingChannelSelector;
import org.apache.flume.conf.Configurables;
import org.apache.flume.instrumentation.SourceCounter;
import org.apache.flume.source.MultiportSyslogTCPSource.LineSplitter;
import org.apache.flume.source.MultiportSyslogTCPSource.MultiportSyslogHandler;
import org.apache.flume.source.MultiportSyslogTCPSource.ParsedBuffer;
import org.apache.flume.source.MultiportSyslogTCPSource.ThreadSafeDecoder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.mina.core.buffer.IoBuffer;
import org.apache.mina.core.session.DefaultIoSessionDataStructureFactory;
import org.apache.mina.transport.socket.nio.NioSession;
import org.joda.time.DateTime;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.internal.util.reflection.Whitebox;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.*;
import javax.net.SocketFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
public class TestMultiportSyslogTCPSource {
private static final Logger LOGGER = LogManager.getLogger();
private static final String TEST_CLIENT_IP_HEADER = "testClientIPHeader";
private static final String TEST_CLIENT_HOSTNAME_HEADER = "testClientHostnameHeader";
private static final int getFreePort() throws IOException {
try (ServerSocket socket = new ServerSocket(0)) {
return socket.getLocalPort();
}
}
private final DateTime time = new DateTime();
private final String stamp1 = time.toString();
private final String host1 = "localhost.localdomain";
private final String data1 = "proc1 - some msg";
/**
* Helper function to generate a syslog message.
* @param counter
* @return
*/
private byte[] getEvent(int counter) {
// timestamp with 'Z' appended, translates to UTC
String msg1 = "<10>" + stamp1 + " " + host1 + " " + data1 + " "
+ String.valueOf(counter) + "\n";
return msg1.getBytes();
}
private List<Integer> testNPorts(MultiportSyslogTCPSource source, Channel channel,
List<Event> channelEvents, int numPorts, ChannelProcessor channelProcessor,
BiConsumer<Integer, byte[]> eventSenderFuncton, Context additionalContext)
throws IOException {
LOGGER.info("source: {}, channel: {}, numPorts: {}", source.toString(),
channel.getName(), numPorts);
Context channelContext = new Context();
channelContext.put("capacity", String.valueOf(2000));
channelContext.put("transactionCapacity", String.valueOf(2000));
Configurables.configure(channel, channelContext);
if (channelProcessor == null) {
List<Channel> channels = Lists.newArrayList();
channels.add(channel);
ChannelSelector rcs = new ReplicatingChannelSelector();
rcs.setChannels(channels);
source.setChannelProcessor(new ChannelProcessor(rcs));
} else {
source.setChannelProcessor(channelProcessor);
}
List<Integer> portList = new ArrayList<>(numPorts);
while (portList.size() < numPorts) {
int port = getFreePort();
if (!portList.contains(port)) {
portList.add(port);
}
}
StringBuilder ports = new StringBuilder();
for (int i = 0; i < numPorts; i++) {
ports.append(String.valueOf(portList.get(i))).append(" ");
}
LOGGER.info("ports: {}", ports.toString());
Context context = new Context();
context.put(SyslogSourceConfigurationConstants.CONFIG_PORTS,
ports.toString().trim());
context.put("portHeader", "port");
context.putAll(additionalContext.getParameters());
source.configure(context);
source.start();
for (int i = 0; i < numPorts; i++) {
byte[] data = getEvent(i);
eventSenderFuncton.accept(portList.get(i), data);
LOGGER.info("Sent {} to port {}", new String(data), portList.get(i));
}
Transaction txn = channel.getTransaction();
txn.begin();
for (int i = 0; i < numPorts; i++) {
Event e = channel.take();
if (e == null) {
LOGGER.error("Got a null event for port number: {}", i);
throw new NullPointerException("Event is null");
}
channelEvents.add(e);
}
try {
txn.commit();
} catch (Throwable t) {
txn.rollback();
} finally {
txn.close();
}
return portList;
}
/**
* Basic test to exercise multiple-port parsing.
*/
@Test
public void testMultiplePorts() throws IOException, ParseException {
MultiportSyslogTCPSource source = new MultiportSyslogTCPSource();
Channel channel = new MemoryChannel();
channel.setName("MultiplePorts");
List<Event> channelEvents = new ArrayList<>();
int numPorts = 1000;
final List<Socket> socketList = new ArrayList<>();
List<Integer> portList = testNPorts(source, channel, channelEvents,
numPorts, null, getSimpleEventSender(socketList), new Context());
//Since events can arrive out of order, search for each event in the array
processEvents(channelEvents, numPorts, portList);
closeSockets(socketList);
source.stop();
}
/**
* Basic test to exercise multiple-port parsing.
*/
@Ignore("This test is flakey and causes tests to fail pretty often.")
@Test
public void testMultiplePortsSSL() throws Exception {
SSLContext sslContext = SSLContext.getInstance("TLS");
sslContext.init(null, new TrustManager[]{new X509TrustManager() {
@Override
public void checkClientTrusted(X509Certificate[] certs, String s) {
// nothing
}
@Override
public void checkServerTrusted(X509Certificate[] certs, String s) {
// nothing
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
}
} },
null);
SocketFactory socketFactory = sslContext.getSocketFactory();
Context context = new Context();
context.put("ssl", "true");
context.put("keystore", "src/test/resources/server.flume-keystore.p12");
context.put("keystore-password", "password");
context.put("keystore-type", "PKCS12");
MultiportSyslogTCPSource source = new MultiportSyslogTCPSource();
Channel channel = new MemoryChannel();
channel.setName("MultiPortSSL");
List<Event> channelEvents = new ArrayList<>();
int numPorts = 10;
List<Socket> socketList = new ArrayList<>();
List<Integer> portList = testNPorts(source, channel, channelEvents,
numPorts, null, getSSLEventSender(socketFactory, socketList), context);
//Since events can arrive out of order, search for each event in the array
processEvents(channelEvents, numPorts, portList);
closeSockets(socketList);
source.stop();
}
private void closeSockets(List<Socket> socketList) {
socketList.forEach((socket) -> {
try {
socket.close();
} catch (IOException ioe) {
LOGGER.warn("Error closing socket: {}", ioe.getMessage());
}
});
}
private BiConsumer<Integer, byte[]> getSSLEventSender(SocketFactory socketFactory,
final List<Socket> socketList) {
return (port, event) -> {
try {
Socket syslogSocket = socketFactory.createSocket(InetAddress.getLocalHost(), port);
socketList.add(syslogSocket);
syslogSocket.getOutputStream().write(event);
} catch (Exception e) {
e.printStackTrace();
}
};
}
private BiConsumer<Integer, byte[]> getSimpleEventSender(final List<Socket> socketList) {
return (Integer port, byte[] event) -> {
try {
Socket syslogSocket = new Socket(InetAddress.getLocalHost(), port);
socketList.add(syslogSocket);
syslogSocket.getOutputStream().write(event);
} catch (IOException e) {
e.printStackTrace();
}
};
}
private void processEvents(List<Event> channelEvents, int numPorts, List<Integer> portList) {
for (int i = 0; i < numPorts ; i++) {
Iterator<Event> iter = channelEvents.iterator();
while (iter.hasNext()) {
Event e = iter.next();
Map<String, String> headers = e.getHeaders();
// rely on port to figure out which event it is
Integer port = null;
if (headers.containsKey("port")) {
port = Integer.parseInt(headers.get("port"));
}
iter.remove();
Assert.assertEquals("Timestamps must match",
String.valueOf(time.getMillis()), headers.get("timestamp"));
String host2 = headers.get("host");
Assert.assertEquals(host1, host2);
if (port != null) {
int num = portList.indexOf(port);
Assert.assertEquals(data1 + " " + String.valueOf(num),
new String(e.getBody()));
}
}
}
}
/**
* Test the reassembly of a single line across multiple packets.
*/
@Test
public void testFragmented() throws CharacterCodingException {
final int maxLen = 100;
IoBuffer savedBuf = IoBuffer.allocate(maxLen);
String origMsg = "<1>- - blah blam foo\n";
IoBuffer buf1 = IoBuffer.wrap(
origMsg.substring(0, 11).getBytes(Charsets.UTF_8));
IoBuffer buf2 = IoBuffer.wrap(
origMsg.substring(11, 16).getBytes(Charsets.UTF_8));
IoBuffer buf3 = IoBuffer.wrap(
origMsg.substring(16, 21).getBytes(Charsets.UTF_8));
LineSplitter lineSplitter = new LineSplitter(maxLen);
ParsedBuffer parsedLine = new ParsedBuffer();
Assert.assertFalse("Incomplete line should not be parsed",
lineSplitter.parseLine(buf1, savedBuf, parsedLine));
Assert.assertFalse("Incomplete line should not be parsed",
lineSplitter.parseLine(buf2, savedBuf, parsedLine));
Assert.assertTrue("Completed line should be parsed",
lineSplitter.parseLine(buf3, savedBuf, parsedLine));
// the fragmented message should now be reconstructed
Assert.assertEquals(origMsg.trim(),
parsedLine.buffer.getString(Charsets.UTF_8.newDecoder()));
parsedLine.buffer.rewind();
MultiportSyslogHandler handler = new MultiportSyslogHandler(
maxLen, 100, null, null, null, null, null,
new ThreadSafeDecoder(Charsets.UTF_8),
new ConcurrentHashMap<Integer, ThreadSafeDecoder>(),null);
Event event = handler.parseEvent(parsedLine, Charsets.UTF_8.newDecoder());
String body = new String(event.getBody(), Charsets.UTF_8);
Assert.assertEquals("Event body incorrect",
origMsg.trim().substring(7), body);
}
/**
* Test parser handling of different character sets.
*/
@Test
public void testCharsetParsing() throws FileNotFoundException, IOException {
String header = "<10>2012-08-11T01:01:01Z localhost ";
String enBody = "Yarf yarf yarf";
String enMsg = header + enBody;
String frBody = "Comment " + "\u00EA" + "tes-vous?";
String frMsg = header + frBody;
String esBody = "¿Cómo estás?";
String esMsg = header + esBody;
// defaults to UTF-8
MultiportSyslogHandler handler = new MultiportSyslogHandler(
1000, 10, new ChannelProcessor(new ReplicatingChannelSelector()),
new SourceCounter("test"), null, null, null,
new ThreadSafeDecoder(Charsets.UTF_8),
new ConcurrentHashMap<Integer, ThreadSafeDecoder>(),null);
ParsedBuffer parsedBuf = new ParsedBuffer();
parsedBuf.incomplete = false;
// should be able to encode/decode any of these messages in UTF-8 or ISO
String[] bodies = { enBody, esBody, frBody };
String[] msgs = { enMsg, esMsg, frMsg };
Charset[] charsets = { Charsets.UTF_8, Charsets.ISO_8859_1 };
for (Charset charset : charsets) {
for (int i = 0; i < msgs.length; i++) {
String msg = msgs[i];
String body = bodies[i];
parsedBuf.buffer = IoBuffer.wrap(msg.getBytes(charset));
Event evt = handler.parseEvent(parsedBuf, charset.newDecoder());
String result = new String(evt.getBody(), charset);
// this doesn't work with non-UTF-8 chars... not sure why...
Assert.assertEquals(charset + " parse error: " + msg, body, result);
Assert.assertNull(
evt.getHeaders().get(SyslogUtils.EVENT_STATUS));
}
}
// Construct an invalid UTF-8 sequence.
// The parser should still generate an Event, but mark it as INVALID.
byte[] badUtf8Seq = enMsg.getBytes(Charsets.ISO_8859_1);
int badMsgLen = badUtf8Seq.length;
badUtf8Seq[badMsgLen - 2] = (byte)0xFE; // valid ISO-8859-1, invalid UTF-8
badUtf8Seq[badMsgLen - 1] = (byte)0xFF; // valid ISO-8859-1, invalid UTF-8
parsedBuf.buffer = IoBuffer.wrap(badUtf8Seq);
Event evt = handler.parseEvent(parsedBuf, Charsets.UTF_8.newDecoder());
Assert.assertEquals("event body: " +
new String(evt.getBody(), Charsets.ISO_8859_1) +
" and my default charset = " + Charset.defaultCharset() +
" with event = " + evt,
SyslogUtils.SyslogStatus.INVALID.getSyslogStatus(),
evt.getHeaders().get(SyslogUtils.EVENT_STATUS));
Assert.assertArrayEquals("Raw message data should be kept in body of event",
badUtf8Seq, evt.getBody());
SourceCounter sc = (SourceCounter) Whitebox.getInternalState(handler, "sourceCounter");
Assert.assertEquals(1, sc.getEventReadFail());
}
@Test
public void testHandlerGenericFail() throws Exception {
// defaults to UTF-8
MultiportSyslogHandler handler = new MultiportSyslogHandler(
1000, 10, new ChannelProcessor(new ReplicatingChannelSelector()),
new SourceCounter("test"), null, null, null,
new ThreadSafeDecoder(Charsets.UTF_8),
new ConcurrentHashMap<Integer, ThreadSafeDecoder>(), null);
handler.exceptionCaught(null, new RuntimeException("dummy"));
SourceCounter sc = (SourceCounter) Whitebox.getInternalState(handler, "sourceCounter");
Assert.assertEquals(1, sc.getGenericProcessingFail());
}
// helper function
private static Event takeEvent(Channel channel) {
Transaction txn = channel.getTransaction();
txn.begin();
Event evt = channel.take();
txn.commit();
txn.close();
return evt;
}
/**
* Test that different charsets are parsed by different ports correctly.
*/
@Test
public void testPortCharsetHandling() throws UnknownHostException, Exception {
///////////////////////////////////////////////////////
// port setup
InetAddress localAddr = InetAddress.getLocalHost();
DefaultIoSessionDataStructureFactory dsFactory =
new DefaultIoSessionDataStructureFactory();
// one faker on port 10001
int port1 = 10001;
NioSession session1 = mock(NioSession.class);
session1.setAttributeMap(dsFactory.getAttributeMap(session1));
SocketAddress sockAddr1 = new InetSocketAddress(localAddr, port1);
when(session1.getLocalAddress()).thenReturn(sockAddr1);
// another faker on port 10002
int port2 = 10002;
NioSession session2 = mock(NioSession.class);
session2.setAttributeMap(dsFactory.getAttributeMap(session2));
SocketAddress sockAddr2 = new InetSocketAddress(localAddr, port2);
when(session2.getLocalAddress()).thenReturn(sockAddr2);
// set up expected charsets per port
ConcurrentMap<Integer, ThreadSafeDecoder> portCharsets =
new ConcurrentHashMap<Integer, ThreadSafeDecoder>();
portCharsets.put(port1, new ThreadSafeDecoder(Charsets.ISO_8859_1));
portCharsets.put(port2, new ThreadSafeDecoder(Charsets.UTF_8));
///////////////////////////////////////////////////////
// channel / source setup
// set up channel to receive events
MemoryChannel chan = new MemoryChannel();
chan.configure(new Context());
chan.start();
ReplicatingChannelSelector sel = new ReplicatingChannelSelector();
sel.setChannels(Lists.<Channel>newArrayList(chan));
ChannelProcessor chanProc = new ChannelProcessor(sel);
// defaults to UTF-8
MultiportSyslogHandler handler = new MultiportSyslogHandler(
1000, 10, chanProc, new SourceCounter("test"), null, null, null,
new ThreadSafeDecoder(Charsets.UTF_8), portCharsets, null);
// initialize buffers
handler.sessionCreated(session1);
handler.sessionCreated(session2);
///////////////////////////////////////////////////////
// event setup
// Create events of varying charsets.
String header = "<10>2012-08-17T02:14:00-07:00 192.168.1.110 ";
// These chars encode under ISO-8859-1 as illegal bytes under UTF-8.
String dangerousChars = "þÿÀÁ";
///////////////////////////////////////////////////////
// encode and send them through the message handler
String msg;
IoBuffer buf;
Event evt;
// valid ISO-8859-1 on the right (ISO-8859-1) port
msg = header + dangerousChars + "\n";
buf = IoBuffer.wrap(msg.getBytes(Charsets.ISO_8859_1));
handler.messageReceived(session1, buf);
evt = takeEvent(chan);
Assert.assertNotNull("Event vanished!", evt);
Assert.assertNull(evt.getHeaders().get(SyslogUtils.EVENT_STATUS));
// valid ISO-8859-1 on the wrong (UTF-8) port
msg = header + dangerousChars + "\n";
buf = IoBuffer.wrap(msg.getBytes(Charsets.ISO_8859_1));
handler.messageReceived(session2, buf);
evt = takeEvent(chan);
Assert.assertNotNull("Event vanished!", evt);
Assert.assertEquals("Expected invalid event due to character encoding",
SyslogUtils.SyslogStatus.INVALID.getSyslogStatus(),
evt.getHeaders().get(SyslogUtils.EVENT_STATUS));
// valid UTF-8 on the right (UTF-8) port
msg = header + dangerousChars + "\n";
buf = IoBuffer.wrap(msg.getBytes(Charsets.UTF_8));
handler.messageReceived(session2, buf);
evt = takeEvent(chan);
Assert.assertNotNull("Event vanished!", evt);
Assert.assertNull(evt.getHeaders().get(SyslogUtils.EVENT_STATUS));
SourceCounter sc = (SourceCounter) Whitebox.getInternalState(handler, "sourceCounter");
Assert.assertEquals(1, sc.getEventReadFail());
}
@Test
public void testErrorCounterChannelWriteFail() throws Exception {
MultiportSyslogTCPSource source = new MultiportSyslogTCPSource();
Channel channel = new MemoryChannel();
List<Event> channelEvents = new ArrayList<>();
ChannelProcessor cp = Mockito.mock(ChannelProcessor.class);
doThrow(new ChannelException("dummy")).doNothing().when(cp)
.processEventBatch(anyListOf(Event.class));
List<Socket> socketList = new ArrayList<>();
try {
testNPorts(source, channel, channelEvents, 1, cp,
getSimpleEventSender(socketList), new Context());
} catch (Exception e) {
//
}
SourceCounter sc = (SourceCounter) Whitebox.getInternalState(source, "sourceCounter");
closeSockets(socketList);
Assert.assertEquals(1, sc.getChannelWriteFail());
source.stop();
}
@Test
public void testClientHeaders() throws IOException {
MultiportSyslogTCPSource source = new MultiportSyslogTCPSource();
Channel channel = new MemoryChannel();
Configurables.configure(channel, new Context());
List<Channel> channels = Lists.newArrayList();
channels.add(channel);
ChannelSelector rcs = new ReplicatingChannelSelector();
rcs.setChannels(channels);
source.setChannelProcessor(new ChannelProcessor(rcs));
int port = getFreePort();
Context context = new Context();
InetAddress loopbackAddress = InetAddress.getLoopbackAddress();
context.put("host", loopbackAddress.getHostAddress());
context.put("ports", String.valueOf(port));
context.put("clientIPHeader", TEST_CLIENT_IP_HEADER);
context.put("clientHostnameHeader", TEST_CLIENT_HOSTNAME_HEADER);
source.configure(context);
source.start();
//create a socket to send a test event
Socket syslogSocket = new Socket(loopbackAddress.getHostAddress(), port);
syslogSocket.getOutputStream().write(getEvent(0));
Event e = takeEvent(channel);
source.stop();
Map<String, String> headers = e.getHeaders();
checkHeader(headers, TEST_CLIENT_IP_HEADER, loopbackAddress.getHostAddress());
checkHeader(headers, TEST_CLIENT_HOSTNAME_HEADER, loopbackAddress.getHostName());
}
private static void checkHeader(Map<String, String> headers, String headerName,
String expectedValue) {
assertTrue("Missing event header: " + headerName, headers.containsKey(headerName));
String headerValue = headers.get(headerName);
if (TEST_CLIENT_HOSTNAME_HEADER.equals(headerName)) {
if (!TestSyslogUtils.isLocalHost(headerValue)) {
fail("Expected either 'localhost' or '127.0.0.1' but got " + headerValue);
}
} else {
assertEquals("Event header value does not match: " + headerName,
expectedValue, headerValue);
}
}
}
| 9,908 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/TestExecSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.source;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang.SystemUtils;
import org.apache.flume.Channel;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.Transaction;
import org.apache.flume.channel.ChannelProcessor;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.channel.ReplicatingChannelSelector;
import org.apache.flume.conf.Configurables;
import org.apache.flume.lifecycle.LifecycleException;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import javax.management.Attribute;
import javax.management.AttributeList;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.management.ManagementFactory;
import java.nio.charset.Charset;
import java.util.List;
import java.util.regex.Pattern;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
public class TestExecSource {
private AbstractSource source;
private Channel channel = new MemoryChannel();
private Context context = new Context();
private ChannelSelector rcs = new ReplicatingChannelSelector();
@Before
public void setUp() {
context.put("keep-alive", "1");
context.put("capacity", "1000");
context.put("transactionCapacity", "1000");
Configurables.configure(channel, context);
rcs.setChannels(Lists.newArrayList(channel));
source = new ExecSource();
source.setChannelProcessor(new ChannelProcessor(rcs));
}
@After
public void tearDown() {
source.stop();
// Remove the MBean registered for Monitoring
ObjectName objName = null;
try {
objName = new ObjectName("org.apache.flume.source"
+ ":type=" + source.getName());
ManagementFactory.getPlatformMBeanServer().unregisterMBean(objName);
} catch (Exception ex) {
System.out.println("Failed to unregister the monitored counter: "
+ objName + ex.getMessage());
}
}
@Test
public void testProcess() throws InterruptedException, LifecycleException,
EventDeliveryException, IOException {
// Generates a random files for input\output
File inputFile = File.createTempFile("input", null);
File ouputFile = File.createTempFile("ouput", null);
FileUtils.forceDeleteOnExit(inputFile);
FileUtils.forceDeleteOnExit(ouputFile);
// Generates input file with a random data set (10 lines, 200 characters each)
FileOutputStream outputStream1 = new FileOutputStream(inputFile);
for (int i = 0; i < 10; i++) {
outputStream1.write(RandomStringUtils.randomAlphanumeric(200).getBytes());
outputStream1.write('\n');
}
outputStream1.close();
String command = SystemUtils.IS_OS_WINDOWS ?
String.format("cmd /c type %s", inputFile.getAbsolutePath()) :
String.format("cat %s", inputFile.getAbsolutePath());
context.put("command", command);
context.put("keep-alive", "1");
context.put("capacity", "1000");
context.put("transactionCapacity", "1000");
Configurables.configure(source, context);
source.start();
Thread.sleep(2000);
Transaction transaction = channel.getTransaction();
transaction.begin();
Event event;
FileOutputStream outputStream = new FileOutputStream(ouputFile);
while ((event = channel.take()) != null) {
outputStream.write(event.getBody());
outputStream.write('\n');
}
outputStream.close();
transaction.commit();
transaction.close();
Assert.assertEquals(FileUtils.checksumCRC32(inputFile),
FileUtils.checksumCRC32(ouputFile));
}
@Test
public void testShellCommandSimple() throws InterruptedException, LifecycleException,
EventDeliveryException, IOException {
if (SystemUtils.IS_OS_WINDOWS) {
runTestShellCmdHelper("powershell -ExecutionPolicy Unrestricted -command",
"1..5", new String[] { "1", "2", "3", "4", "5" });
} else {
runTestShellCmdHelper("/bin/bash -c", "seq 5",
new String[] { "1", "2", "3", "4", "5" });
}
}
@Test
public void testShellCommandBackTicks() throws InterruptedException, LifecycleException,
EventDeliveryException, IOException {
// command with backticks
if (SystemUtils.IS_OS_WINDOWS) {
runTestShellCmdHelper("powershell -ExecutionPolicy Unrestricted -command", "$(1..5)",
new String[] { "1", "2", "3", "4", "5" });
} else {
runTestShellCmdHelper("/bin/bash -c", "echo `seq 5`",
new String[] { "1 2 3 4 5" });
runTestShellCmdHelper("/bin/bash -c", "echo $(seq 5)",
new String[] { "1 2 3 4 5" });
}
}
@Test
public void testShellCommandComplex()
throws InterruptedException, LifecycleException, EventDeliveryException, IOException {
// command with wildcards & pipes
String[] expected = {"1234", "abcd", "ijk", "xyz", "zzz"};
// pipes
if (SystemUtils.IS_OS_WINDOWS) {
runTestShellCmdHelper("powershell -ExecutionPolicy Unrestricted -command",
"'zzz','1234','xyz','abcd','ijk' | sort", expected);
} else {
runTestShellCmdHelper("/bin/bash -c",
"echo zzz 1234 xyz abcd ijk | xargs -n1 echo | sort -f", expected);
}
}
@Test
public void testShellCommandScript() throws InterruptedException, LifecycleException,
EventDeliveryException, IOException {
// mini script
if (SystemUtils.IS_OS_WINDOWS) {
runTestShellCmdHelper("powershell -ExecutionPolicy Unrestricted -command",
"foreach ($i in 1..5) { $i }",
new String[] { "1", "2", "3", "4", "5" });
// shell arithmetic
runTestShellCmdHelper("powershell -ExecutionPolicy Unrestricted -command",
"if(2+2 -gt 3) { 'good' } else { 'not good' } ",
new String[] { "good" });
} else {
runTestShellCmdHelper("/bin/bash -c", "for i in {1..5}; do echo $i;done",
new String[] { "1", "2", "3", "4", "5" });
// shell arithmetic
runTestShellCmdHelper("/bin/bash -c",
"if ((2+2>3)); " + "then echo good; else echo not good; fi",
new String[] { "good" });
}
}
@Test
public void testShellCommandEmbeddingAndEscaping()
throws InterruptedException, LifecycleException, EventDeliveryException, IOException {
// mini script
String fileName = SystemUtils.IS_OS_WINDOWS ?
"src\\test\\resources\\test_command.ps1" :
"src/test/resources/test_command.txt";
BufferedReader reader = new BufferedReader(new FileReader(fileName));
try {
String shell = SystemUtils.IS_OS_WINDOWS ?
"powershell -ExecutionPolicy Unrestricted -command" :
"/bin/bash -c";
String command1 = reader.readLine();
Assert.assertNotNull(command1);
String[] output1 = new String[] { "'1'", "\"2\"", "\\3", "\\4" };
runTestShellCmdHelper(shell, command1, output1);
String command2 = reader.readLine();
Assert.assertNotNull(command2);
String[] output2 = new String[] { "1", "2", "3", "4", "5" };
runTestShellCmdHelper(shell, command2, output2);
String command3 = reader.readLine();
Assert.assertNotNull(command3);
String[] output3 = new String[] { "2", "3", "4", "5", "6" };
runTestShellCmdHelper(shell, command3, output3);
} finally {
reader.close();
}
}
@Test
public void testMonitoredCounterGroup()
throws InterruptedException, LifecycleException, EventDeliveryException, IOException {
// mini script
if (SystemUtils.IS_OS_WINDOWS) {
runTestShellCmdHelper("powershell -ExecutionPolicy Unrestricted -command",
"foreach ($i in 1..5) { $i }",
new String[] { "1", "2", "3", "4", "5" });
} else {
runTestShellCmdHelper("/bin/bash -c", "for i in {1..5}; do echo $i;done",
new String[] { "1", "2", "3", "4", "5" });
}
ObjectName objName = null;
try {
objName = new ObjectName("org.apache.flume.source" + ":type=" + source.getName());
MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer();
String[] strAtts = { "Type", "EventReceivedCount", "EventAcceptedCount" };
AttributeList attrList = mbeanServer.getAttributes(objName, strAtts);
Assert.assertNotNull(attrList.get(0));
Assert.assertEquals("Expected Value: Type", "Type",
((Attribute) attrList.get(0)).getName());
Assert.assertEquals("Expected Value: SOURCE", "SOURCE",
((Attribute) attrList.get(0)).getValue());
Assert.assertNotNull(attrList.get(1));
Assert.assertEquals("Expected Value: EventReceivedCount", "EventReceivedCount",
((Attribute) attrList.get(1)).getName());
Assert.assertEquals("Expected Value: 5", "5",
((Attribute) attrList.get(1)).getValue().toString());
Assert.assertNotNull(attrList.get(2));
Assert.assertEquals("Expected Value: EventAcceptedCount", "EventAcceptedCount",
((Attribute) attrList.get(2)).getName());
Assert.assertEquals("Expected Value: 5", "5",
((Attribute) attrList.get(2)).getValue().toString());
} catch (Exception ex) {
System.out.println("Unable to retreive the monitored counter: " + objName + ex.getMessage());
}
}
@Test
public void testBatchTimeout()
throws InterruptedException, LifecycleException, EventDeliveryException, IOException {
String filePath = "/tmp/flume-execsource." + Thread.currentThread().getId();
String eventBody = "TestMessage";
FileOutputStream outputStream = new FileOutputStream(filePath);
context.put(ExecSourceConfigurationConstants.CONFIG_BATCH_SIZE, "50000");
context.put(ExecSourceConfigurationConstants.CONFIG_BATCH_TIME_OUT, "750");
context.put("shell", SystemUtils.IS_OS_WINDOWS ?
"powershell -ExecutionPolicy Unrestricted -command" :
"/bin/bash -c");
context.put("command", SystemUtils.IS_OS_WINDOWS ?
"Get-Content " + filePath +
" | Select-Object -Last 10" :
("tail -f " + filePath));
Configurables.configure(source, context);
source.start();
Transaction transaction = channel.getTransaction();
transaction.begin();
for (int lineNumber = 0; lineNumber < 3; lineNumber++) {
outputStream.write((eventBody).getBytes());
outputStream.write(String.valueOf(lineNumber).getBytes());
outputStream.write('\n');
outputStream.flush();
}
outputStream.close();
Thread.sleep(1500);
for (int i = 0; i < 3; i++) {
Event event = channel.take();
assertNotNull(event);
assertNotNull(event.getBody());
assertEquals(eventBody + String.valueOf(i), new String(event.getBody()));
}
transaction.commit();
transaction.close();
source.stop();
File file = new File(filePath);
FileUtils.forceDelete(file);
}
private void runTestShellCmdHelper(String shell, String command, String[] expectedOutput)
throws InterruptedException, LifecycleException, EventDeliveryException, IOException {
context.put("shell", shell);
context.put("command", command);
Configurables.configure(source, context);
source.start();
// Some commands might take longer to complete, specially on Windows
// or on slow environments (e.g. Travis CI).
Thread.sleep(2500);
Transaction transaction = channel.getTransaction();
transaction.begin();
try {
List<String> output = Lists.newArrayList();
Event event;
while ((event = channel.take()) != null) {
output.add(new String(event.getBody(), Charset.defaultCharset()));
}
transaction.commit();
Assert.assertArrayEquals(expectedOutput, output.toArray(new String[] {}));
} finally {
transaction.close();
source.stop();
}
}
@Test
public void testRestart() throws InterruptedException, LifecycleException,
EventDeliveryException, IOException {
context.put(ExecSourceConfigurationConstants.CONFIG_RESTART_THROTTLE, "10");
context.put(ExecSourceConfigurationConstants.CONFIG_RESTART, "true");
context.put("command", SystemUtils.IS_OS_WINDOWS ? "cmd /c echo flume" : "echo flume");
Configurables.configure(source, context);
source.start();
Transaction transaction = channel.getTransaction();
transaction.begin();
long start = System.currentTimeMillis();
for (int i = 0; i < 5; i++) {
Event event = channel.take();
assertNotNull(event);
assertNotNull(event.getBody());
assertEquals("flume", new String(event.getBody(), Charsets.UTF_8));
}
// ensure restartThrottle was turned down as expected
assertTrue(System.currentTimeMillis() - start < 10000L);
transaction.commit();
transaction.close();
source.stop();
}
/**
* Tests to make sure that the shutdown mechanism works. There are races
* in this test if the system has another sleep command running with the
* same sleep interval but we pick rarely used sleep times and make an
* effort to detect if our sleep time is already in use. Note the
* ps -ef command should work on both macs and linux.
*/
@Test
public void testShutdown() throws Exception {
int seconds = 272; // pick a rare sleep time
// now find one that is not in use
boolean searchForCommand = true;
while (searchForCommand) {
searchForCommand = false;
String command = SystemUtils.IS_OS_WINDOWS ? "cmd /c sleep " + seconds : "sleep " + seconds;
String searchTxt = SystemUtils.IS_OS_WINDOWS ? "sleep.exe" : "\b" + command + "\b";
Pattern pattern = Pattern.compile(searchTxt);
for (String line : exec(SystemUtils.IS_OS_WINDOWS ?
"cmd /c tasklist /FI \"SESSIONNAME eq Console\"" :
"ps -ef")) {
if (pattern.matcher(line).find()) {
seconds++;
searchForCommand = true;
break;
}
}
}
// yes in the mean time someone could use our sleep time
// but this should be a fairly rare scenario
String command = "sleep " + seconds;
Pattern pattern = Pattern.compile("\b" + command + "\b");
context.put(ExecSourceConfigurationConstants.CONFIG_RESTART, "false");
context.put("command", command);
Configurables.configure(source, context);
source.start();
Thread.sleep(1000L);
source.stop();
Thread.sleep(1000L);
for (String line : exec(SystemUtils.IS_OS_WINDOWS ?
"cmd /c tasklist /FI \"SESSIONNAME eq Console\"" :
"ps -ef")) {
if (pattern.matcher(line).find()) {
Assert.fail("Found [" + line + "]");
}
}
}
private static List<String> exec(String command) throws Exception {
String[] commandArgs = command.split("\\s+");
Process process = new ProcessBuilder(commandArgs).start();
BufferedReader reader = null;
try {
reader = new BufferedReader(new InputStreamReader(process.getInputStream()));
List<String> result = Lists.newArrayList();
String line;
while ((line = reader.readLine()) != null) {
result.add(line);
}
return result;
} finally {
int exit = process.waitFor();
process.destroy();
if (reader != null) {
reader.close();
}
if (exit != 0) {
throw new IllegalStateException("Command [" + command + "] exited with " + exit);
}
}
}
}
| 9,909 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/TestSpoolDirectorySource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.flume.source;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
import com.google.common.io.Files;
import org.apache.flume.Channel;
import org.apache.flume.ChannelException;
import org.apache.flume.ChannelFullException;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.Transaction;
import org.apache.flume.channel.ChannelProcessor;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.channel.ReplicatingChannelSelector;
import org.apache.flume.client.avro.ReliableSpoolingFileEventReader;
import org.apache.flume.conf.Configurables;
import org.apache.flume.instrumentation.SourceCounter;
import org.apache.flume.lifecycle.LifecycleController;
import org.apache.flume.lifecycle.LifecycleState;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Matchers;
import org.mockito.Mockito;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
public class TestSpoolDirectorySource {
static SpoolDirectorySource source;
static MemoryChannel channel;
private File tmpDir;
@Before
public void setUp() {
source = new SpoolDirectorySource();
channel = new MemoryChannel();
Configurables.configure(channel, new Context());
List<Channel> channels = new ArrayList<Channel>();
channels.add(channel);
ChannelSelector rcs = new ReplicatingChannelSelector();
rcs.setChannels(channels);
source.setChannelProcessor(new ChannelProcessor(rcs));
tmpDir = Files.createTempDir();
}
@After
public void tearDown() {
deleteFiles(tmpDir);
tmpDir.delete();
}
/**
* Helper method to recursively clean up testing directory
*
* @param directory the directory to clean up
*/
private void deleteFiles(File directory) {
for (File f : directory.listFiles()) {
if (f.isDirectory()) {
deleteFiles(f);
f.delete();
} else {
f.delete();
}
}
}
@Test(expected = IllegalArgumentException.class)
public void testInvalidSortOrder() {
Context context = new Context();
context.put(SpoolDirectorySourceConfigurationConstants.SPOOL_DIRECTORY,
tmpDir.getAbsolutePath());
context.put(SpoolDirectorySourceConfigurationConstants.CONSUME_ORDER,
"undefined");
Configurables.configure(source, context);
}
@Test
public void testValidSortOrder() {
Context context = new Context();
context.put(SpoolDirectorySourceConfigurationConstants.SPOOL_DIRECTORY,
tmpDir.getAbsolutePath());
context.put(SpoolDirectorySourceConfigurationConstants.CONSUME_ORDER,
"oLdESt");
Configurables.configure(source, context);
context.put(SpoolDirectorySourceConfigurationConstants.CONSUME_ORDER,
"yoUnGest");
Configurables.configure(source, context);
context.put(SpoolDirectorySourceConfigurationConstants.CONSUME_ORDER,
"rAnDom");
Configurables.configure(source, context);
}
@Test
public void testPutFilenameHeader() throws IOException, InterruptedException {
Context context = new Context();
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("file1line1\nfile1line2\nfile1line3\nfile1line4\n" +
"file1line5\nfile1line6\nfile1line7\nfile1line8\n",
f1, Charsets.UTF_8);
context.put(SpoolDirectorySourceConfigurationConstants.SPOOL_DIRECTORY,
tmpDir.getAbsolutePath());
context.put(SpoolDirectorySourceConfigurationConstants.FILENAME_HEADER,
"true");
context.put(SpoolDirectorySourceConfigurationConstants.FILENAME_HEADER_KEY,
"fileHeaderKeyTest");
Configurables.configure(source, context);
source.start();
while (source.getSourceCounter().getEventAcceptedCount() < 8) {
Thread.sleep(10);
}
Transaction txn = channel.getTransaction();
txn.begin();
Event e = channel.take();
Assert.assertNotNull("Event must not be null", e);
Assert.assertNotNull("Event headers must not be null", e.getHeaders());
Assert.assertNotNull(e.getHeaders().get("fileHeaderKeyTest"));
Assert.assertEquals(f1.getAbsolutePath(),
e.getHeaders().get("fileHeaderKeyTest"));
txn.commit();
txn.close();
}
/**
* Tests if SpoolDirectorySource sets basename headers on events correctly
*/
@Test
public void testPutBasenameHeader() throws IOException, InterruptedException {
Context context = new Context();
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("file1line1\nfile1line2\nfile1line3\nfile1line4\n" +
"file1line5\nfile1line6\nfile1line7\nfile1line8\n",
f1, Charsets.UTF_8);
context.put(SpoolDirectorySourceConfigurationConstants.SPOOL_DIRECTORY,
tmpDir.getAbsolutePath());
context.put(SpoolDirectorySourceConfigurationConstants.BASENAME_HEADER,
"true");
context.put(SpoolDirectorySourceConfigurationConstants.BASENAME_HEADER_KEY,
"basenameHeaderKeyTest");
Configurables.configure(source, context);
source.start();
while (source.getSourceCounter().getEventAcceptedCount() < 8) {
Thread.sleep(10);
}
Transaction txn = channel.getTransaction();
txn.begin();
Event e = channel.take();
Assert.assertNotNull("Event must not be null", e);
Assert.assertNotNull("Event headers must not be null", e.getHeaders());
Assert.assertNotNull(e.getHeaders().get("basenameHeaderKeyTest"));
Assert.assertEquals(f1.getName(),
e.getHeaders().get("basenameHeaderKeyTest"));
txn.commit();
txn.close();
}
/**
* Tests SpoolDirectorySource with parameter recursion set to true
*/
@Test
public void testRecursion_SetToTrue() throws IOException, InterruptedException {
File subDir = new File(tmpDir, "directorya/directoryb/directoryc");
boolean directoriesCreated = subDir.mkdirs();
Assert.assertTrue("source directories must be created", directoriesCreated);
final String FILE_NAME = "recursion_file.txt";
File f1 = new File(subDir, FILE_NAME);
String origBody = "file1line1\nfile1line2\nfile1line3\nfile1line4\n" +
"file1line5\nfile1line6\nfile1line7\nfile1line8\n";
Files.write(origBody, f1, Charsets.UTF_8);
Context context = new Context();
context.put(SpoolDirectorySourceConfigurationConstants.RECURSIVE_DIRECTORY_SEARCH,
"true"); // enable recursion, so we should find the file we created above
context.put(SpoolDirectorySourceConfigurationConstants.SPOOL_DIRECTORY,
tmpDir.getAbsolutePath()); // spool set to root dir
context.put(SpoolDirectorySourceConfigurationConstants.FILENAME_HEADER,
"true"); // put the file name in the "file" header
Configurables.configure(source, context);
source.start();
Assert.assertTrue("Recursion setting in source is correct",
source.getRecursiveDirectorySearch());
Transaction txn = channel.getTransaction();
txn.begin();
long startTime = System.currentTimeMillis();
Event e = null;
while (System.currentTimeMillis() - startTime < 300 && e == null) {
e = channel.take();
Thread.sleep(10);
}
Assert.assertNotNull("Event must not be null", e);
Assert.assertNotNull("Event headers must not be null", e.getHeaders());
Assert.assertTrue("File header value did not end with expected filename",
e.getHeaders().get("file").endsWith(FILE_NAME));
ByteArrayOutputStream baos = new ByteArrayOutputStream();
do { // collecting the whole body
baos.write(e.getBody());
baos.write('\n'); // newline characters are consumed in the process
e = channel.take();
} while (e != null);
Assert.assertEquals("Event body is correct",
Arrays.toString(origBody.getBytes()),
Arrays.toString(baos.toByteArray()));
txn.commit();
txn.close();
}
/**
* This test will place a file into a sub-directory of the spool directory
* since the recursion setting is false there should not be any transactions
* to take from the channel. The 500 ms is arbitrary and simply follows
* what the other tests use to "assume" that since there is no data then this worked.
*/
@Test
public void testRecursion_SetToFalse() throws IOException, InterruptedException {
Context context = new Context();
File subDir = new File(tmpDir, "directory");
boolean directoriesCreated = subDir.mkdirs();
Assert.assertTrue("source directories must be created", directoriesCreated);
File f1 = new File(subDir.getAbsolutePath() + "/file1.txt");
Files.write("file1line1\nfile1line2\nfile1line3\nfile1line4\n" +
"file1line5\nfile1line6\nfile1line7\nfile1line8\n", f1, Charsets.UTF_8);
context.put(SpoolDirectorySourceConfigurationConstants.RECURSIVE_DIRECTORY_SEARCH,
"false");
context.put(SpoolDirectorySourceConfigurationConstants.SPOOL_DIRECTORY,
tmpDir.getAbsolutePath());
context.put(SpoolDirectorySourceConfigurationConstants.FILENAME_HEADER,
"true");
context.put(SpoolDirectorySourceConfigurationConstants.FILENAME_HEADER_KEY,
"fileHeaderKeyTest");
Configurables.configure(source, context);
source.start();
// check the source to ensure the setting has been set via the context object
Assert.assertFalse("Recursion setting in source is not set to false (this" +
"test does not want recursion enabled)", source.getRecursiveDirectorySearch());
Transaction txn = channel.getTransaction();
txn.begin();
long startTime = System.currentTimeMillis();
Event e = null;
while (System.currentTimeMillis() - startTime < 300 && e == null) {
e = channel.take();
Thread.sleep(10);
}
Assert.assertNull("Event must be null", e);
txn.commit();
txn.close();
}
@Test
public void testLifecycle() throws IOException, InterruptedException {
Context context = new Context();
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("file1line1\nfile1line2\nfile1line3\nfile1line4\n" +
"file1line5\nfile1line6\nfile1line7\nfile1line8\n", f1, Charsets.UTF_8);
context.put(SpoolDirectorySourceConfigurationConstants.SPOOL_DIRECTORY,
tmpDir.getAbsolutePath());
Configurables.configure(source, context);
for (int i = 0; i < 10; i++) {
source.start();
Assert
.assertTrue("Reached start or error", LifecycleController.waitForOneOf(
source, LifecycleState.START_OR_ERROR));
Assert.assertEquals("Server is started", LifecycleState.START,
source.getLifecycleState());
source.stop();
Assert.assertTrue("Reached stop or error",
LifecycleController.waitForOneOf(source, LifecycleState.STOP_OR_ERROR));
Assert.assertEquals("Server is stopped", LifecycleState.STOP,
source.getLifecycleState());
}
}
@Test
public void testReconfigure() throws InterruptedException, IOException {
final int NUM_RECONFIGS = 20;
for (int i = 0; i < NUM_RECONFIGS; i++) {
Context context = new Context();
File file = new File(tmpDir.getAbsolutePath() + "/file-" + i);
Files.write("File " + i, file, Charsets.UTF_8);
context.put(SpoolDirectorySourceConfigurationConstants.SPOOL_DIRECTORY,
tmpDir.getAbsolutePath());
Configurables.configure(source, context);
source.start();
Thread.sleep(TimeUnit.SECONDS.toMillis(1));
Transaction txn = channel.getTransaction();
txn.begin();
try {
Event event = channel.take();
String content = new String(event.getBody(), Charsets.UTF_8);
Assert.assertEquals("File " + i, content);
txn.commit();
} catch (Throwable t) {
txn.rollback();
} finally {
txn.close();
}
source.stop();
Assert.assertFalse("Fatal error on iteration " + i, source.hasFatalError());
}
}
@Test
public void testSourceDoesNotDieOnFullChannel() throws Exception {
Context chContext = new Context();
chContext.put("capacity", "2");
chContext.put("transactionCapacity", "2");
chContext.put("keep-alive", "0");
channel.stop();
Configurables.configure(channel, chContext);
channel.start();
Context context = new Context();
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("file1line1\nfile1line2\nfile1line3\nfile1line4\n" +
"file1line5\nfile1line6\nfile1line7\nfile1line8\n",
f1, Charsets.UTF_8);
context.put(SpoolDirectorySourceConfigurationConstants.SPOOL_DIRECTORY,
tmpDir.getAbsolutePath());
context.put(SpoolDirectorySourceConfigurationConstants.BATCH_SIZE, "2");
Configurables.configure(source, context);
source.setBackOff(false);
source.start();
// Wait for the source to read enough events to fill up the channel.
long startTime = System.currentTimeMillis();
while (System.currentTimeMillis() - startTime < 5000 && !source.didHitChannelFullException()) {
Thread.sleep(10);
}
Assert.assertTrue("Expected to hit ChannelFullException, but did not!",
source.didHitChannelFullException());
List<String> dataOut = Lists.newArrayList();
for (int i = 0; i < 8; ) {
Transaction tx = channel.getTransaction();
tx.begin();
Event e = channel.take();
if (e != null) {
dataOut.add(new String(e.getBody(), "UTF-8"));
i++;
}
e = channel.take();
if (e != null) {
dataOut.add(new String(e.getBody(), "UTF-8"));
i++;
}
tx.commit();
tx.close();
}
Assert.assertEquals(8, dataOut.size());
source.stop();
}
@Test
public void testEndWithZeroByteFiles() throws IOException, InterruptedException {
Context context = new Context();
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("file1line1\n", f1, Charsets.UTF_8);
File f2 = new File(tmpDir.getAbsolutePath() + "/file2");
File f3 = new File(tmpDir.getAbsolutePath() + "/file3");
File f4 = new File(tmpDir.getAbsolutePath() + "/file4");
Files.touch(f2);
Files.touch(f3);
Files.touch(f4);
context.put(SpoolDirectorySourceConfigurationConstants.SPOOL_DIRECTORY,
tmpDir.getAbsolutePath());
Configurables.configure(source, context);
source.start();
// Need better way to ensure all files were processed.
Thread.sleep(5000);
Assert.assertFalse("Server did not error", source.hasFatalError());
Assert.assertEquals("Four messages were read",
4, source.getSourceCounter().getEventAcceptedCount());
source.stop();
}
@Test
public void testWithAllEmptyFiles()
throws InterruptedException, IOException {
Context context = new Context();
File[] f = new File[10];
for (int i = 0; i < 10; i++) {
f[i] = new File(tmpDir.getAbsolutePath() + "/file" + i);
Files.write(new byte[0], f[i]);
}
context.put(SpoolDirectorySourceConfigurationConstants.SPOOL_DIRECTORY,
tmpDir.getAbsolutePath());
context.put(SpoolDirectorySourceConfigurationConstants.FILENAME_HEADER,
"true");
context.put(SpoolDirectorySourceConfigurationConstants.FILENAME_HEADER_KEY,
"fileHeaderKeyTest");
Configurables.configure(source, context);
source.start();
Thread.sleep(10);
for (int i = 0; i < 10; i++) {
Transaction txn = channel.getTransaction();
txn.begin();
Event e = channel.take();
Assert.assertNotNull("Event must not be null", e);
Assert.assertNotNull("Event headers must not be null", e.getHeaders());
Assert.assertNotNull(e.getHeaders().get("fileHeaderKeyTest"));
Assert.assertEquals(f[i].getAbsolutePath(),
e.getHeaders().get("fileHeaderKeyTest"));
Assert.assertArrayEquals(new byte[0], e.getBody());
txn.commit();
txn.close();
}
source.stop();
}
@Test
public void testWithEmptyAndDataFiles()
throws InterruptedException, IOException {
Context context = new Context();
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("some data".getBytes(), f1);
File f2 = new File(tmpDir.getAbsolutePath() + "/file2");
Files.write(new byte[0], f2);
context.put(SpoolDirectorySourceConfigurationConstants.SPOOL_DIRECTORY,
tmpDir.getAbsolutePath());
Configurables.configure(source, context);
source.start();
Thread.sleep(10);
for (int i = 0; i < 2; i++) {
Transaction txn = channel.getTransaction();
txn.begin();
Event e = channel.take();
txn.commit();
txn.close();
}
Transaction txn = channel.getTransaction();
txn.begin();
Assert.assertNull(channel.take());
txn.commit();
txn.close();
source.stop();
}
private SourceCounter errorCounterCommonInit() {
SourceCounter sc = new SourceCounter("dummy");
sc.start();
Context context = new Context();
context.put(SpoolDirectorySourceConfigurationConstants.SPOOL_DIRECTORY,
tmpDir.getAbsolutePath());
Configurables.configure(source, context);
return sc;
}
@Test
public void testErrorCounters() throws Exception {
SourceCounter sc = errorCounterCommonInit();
ChannelProcessor cp = Mockito.mock(ChannelProcessor.class);
Mockito.doThrow(new ChannelException("dummy"))
.doThrow(new ChannelFullException("dummy"))
.doThrow(new RuntimeException("runtime"))
.when(cp).processEventBatch(Matchers.anyListOf(Event.class));
source.setChannelProcessor(cp);
ReliableSpoolingFileEventReader reader = Mockito.mock(ReliableSpoolingFileEventReader.class);
List<Event> events = new ArrayList<>();
events.add(Mockito.mock(Event.class));
Mockito.doReturn(events)
.doReturn(events)
.doReturn(events)
.doThrow(new IOException("dummy"))
.when(reader).readEvents(Mockito.anyInt());
Runnable runner = source. new SpoolDirectoryRunnable(reader, sc);
try {
runner.run();
} catch (Exception ex) {
//Expected
}
Assert.assertEquals(2, sc.getChannelWriteFail());
Assert.assertEquals(1, sc.getGenericProcessingFail());
}
@Test
public void testErrorCounterEventReadFail() throws Exception {
SourceCounter sc = errorCounterCommonInit();
ChannelProcessor cp = Mockito.mock(ChannelProcessor.class);
source.setChannelProcessor(cp);
ReliableSpoolingFileEventReader reader = Mockito.mock(ReliableSpoolingFileEventReader.class);
List<Event> events = new ArrayList<>();
events.add(Mockito.mock(Event.class));
Mockito.doReturn(events)
.doThrow(new IOException("dummy"))
.when(reader).readEvents(Mockito.anyInt());
Runnable runner = source. new SpoolDirectoryRunnable(reader, sc);
try {
runner.run();
} catch (Exception ex) {
//Expected
}
Assert.assertEquals(1, sc.getEventReadFail());
}
}
| 9,910 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/TestSequenceGeneratorSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.source;
import org.apache.flume.ChannelException;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.PollableSource;
import org.apache.flume.channel.ChannelProcessor;
import org.apache.flume.conf.Configurables;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
public class TestSequenceGeneratorSource {
private PollableSource source;
@Before
public void setUp() {
source = new SequenceGeneratorSource();
source.setName(TestSequenceGeneratorSource.class.getCanonicalName());
}
@Test
public void testLifecycle() throws org.apache.flume.EventDeliveryException {
final int DOPROCESS_LOOPS = 5;
Context context = new Context();
Configurables.configure(source, context);
ChannelProcessor cp = Mockito.mock(ChannelProcessor.class);
source.setChannelProcessor(cp);
source.start();
for (int i = 0; i < DOPROCESS_LOOPS; i++) {
source.process();
}
source.stop();
//no exception is expected during lifecycle calls
}
@Test
public void testSingleEvents() throws EventDeliveryException {
final int BATCH_SIZE = 1;
final int TOTAL_EVENTS = 5;
final int DOPROCESS_LOOPS = 10;
Context context = new Context();
context.put("batchSize", Integer.toString(BATCH_SIZE));
context.put("totalEvents", Integer.toString(TOTAL_EVENTS));
Configurables.configure(source, context);
ChannelProcessor cp = Mockito.mock(ChannelProcessor.class);
Mockito
.doNothing()
.doThrow(ChannelException.class) // failure injection
.doNothing()
.when(cp).processEvent(Mockito.any(Event.class));
source.setChannelProcessor(cp);
source.start();
for (int i = 0; i < DOPROCESS_LOOPS; i++) {
source.process();
}
ArgumentCaptor<Event> argumentCaptor = ArgumentCaptor.forClass(Event.class);
Mockito.verify(cp, Mockito.times(6)).processEvent(argumentCaptor.capture());
Mockito.verify(cp, Mockito.never()).processEventBatch(Mockito.anyListOf(Event.class));
verifyEventSequence(TOTAL_EVENTS, argumentCaptor.getAllValues());
}
@Test
public void testBatch() throws EventDeliveryException {
final int BATCH_SIZE = 3;
final int TOTAL_EVENTS = 5;
final int DOPROCESS_LOOPS = 10;
Context context = new Context();
context.put("batchSize", Integer.toString(BATCH_SIZE));
context.put("totalEvents", Integer.toString(TOTAL_EVENTS));
Configurables.configure(source, context);
ChannelProcessor cp = Mockito.mock(ChannelProcessor.class);
Mockito
.doNothing()
.doThrow(ChannelException.class) //failure injection on the second batch
.doNothing()
.when(cp).processEventBatch(Mockito.anyListOf(Event.class));
source.setChannelProcessor(cp);
source.start();
for (int i = 0; i < DOPROCESS_LOOPS; i++) {
source.process();
}
ArgumentCaptor<List<Event>> argumentCaptor = ArgumentCaptor.forClass((Class)List.class);
Mockito.verify(cp, Mockito.never()).processEvent(Mockito.any(Event.class));
Mockito.verify(cp, Mockito.times(3)).processEventBatch(argumentCaptor.capture());
List<List<Event>> eventBatches = argumentCaptor.getAllValues();
verifyEventSequence(TOTAL_EVENTS, flatOutBatches(eventBatches));
}
/**
* SequenceGeneratorSource produces a complete 0,1,2,...,totalEvents-1 sequence.
* This utility function can verify whether the received sequence is correct
* after deduplication and sorting.
*/
private static void verifyEventSequence(int expectedTotalEvents, List<Event> actualEvents) {
Set<Integer> uniqueEvents = new LinkedHashSet<>();
for (Event e : actualEvents) {
uniqueEvents.add(Integer.parseInt(new String(e.getBody())));
}
List<Integer> sortedFilteredEvents = new ArrayList<>(uniqueEvents);
Collections.sort(sortedFilteredEvents);
Assert.assertEquals("mismatching number of events",
expectedTotalEvents, sortedFilteredEvents.size());
for (int i = 0; i < sortedFilteredEvents.size(); ++i) {
Assert.assertEquals("missing or unexpected event body",
i, (int)sortedFilteredEvents.get(i));
}
}
/**
* Converts a list of batches of events to a flattened single list of events
*/
private static List<Event> flatOutBatches(List<List<Event>> eventBatches) {
List<Event> events = new ArrayList<>();
for (List<Event> le : eventBatches) {
for (Event e : le) {
events.add(e);
}
}
return events;
}
}
| 9,911 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/TestNetcatUdpSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.source;
import java.util.ArrayList;
import java.util.List;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.InetAddress;
import java.net.DatagramSocket;
import com.google.common.base.Charsets;
import org.apache.flume.Channel;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.Transaction;
import org.apache.flume.channel.ChannelProcessor;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.channel.ReplicatingChannelSelector;
import org.apache.flume.conf.Configurables;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.LoggerFactory;
public class TestNetcatUdpSource {
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(TestNetcatUdpSource.class);
private NetcatUdpSource source;
private Channel channel;
private static final int TEST_NETCAT_PORT = 0;
private final String shortString = "Lorem ipsum dolor sit amet.";
private final String mediumString = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. " +
"Nunc maximus rhoncus viverra. Nunc a metus.";
private void init() {
source = new NetcatUdpSource();
channel = new MemoryChannel();
Configurables.configure(channel, new Context());
List<Channel> channels = new ArrayList<Channel>();
channels.add(channel);
ChannelSelector rcs = new ReplicatingChannelSelector();
rcs.setChannels(channels);
source.setChannelProcessor(new ChannelProcessor(rcs));
Context context = new Context();
context.put("port", String.valueOf(TEST_NETCAT_PORT));
source.configure(context);
}
/** Tests the keepFields configuration parameter (enabled or disabled)
using SyslogUDPSource.*/
private void runUdpTest(String data1) throws IOException {
init();
source.start();
// Write some message to the port
DatagramSocket socket;
DatagramPacket datagramPacket;
datagramPacket = new DatagramPacket(data1.getBytes(),
data1.getBytes().length,
InetAddress.getLocalHost(), source.getSourcePort());
for (int i = 0; i < 10 ; i++) {
socket = new DatagramSocket();
socket.send(datagramPacket);
socket.close();
}
List<Event> channelEvents = new ArrayList<Event>();
Transaction txn = channel.getTransaction();
txn.begin();
for (int i = 0; i < 10; i++) {
Event e = channel.take();
Assert.assertNotNull(e);
channelEvents.add(e);
}
try {
txn.commit();
} catch (Throwable t) {
txn.rollback();
} finally {
txn.close();
}
source.stop();
for (Event e : channelEvents) {
Assert.assertNotNull(e);
String str = new String(e.getBody(), Charsets.UTF_8);
logger.info(str);
Assert.assertArrayEquals(data1.getBytes(),
e.getBody());
}
}
@Test
public void testLargePayload() throws Exception {
init();
source.start();
// Write some message to the netcat port
byte[] largePayload = getPayload(1000).getBytes();
DatagramSocket socket;
DatagramPacket datagramPacket;
datagramPacket = new DatagramPacket(largePayload,
1000,
InetAddress.getLocalHost(), source.getSourcePort());
for (int i = 0; i < 10 ; i++) {
socket = new DatagramSocket();
socket.send(datagramPacket);
socket.close();
}
List<Event> channelEvents = new ArrayList<Event>();
Transaction txn = channel.getTransaction();
txn.begin();
for (int i = 0; i < 10; i++) {
Event e = channel.take();
Assert.assertNotNull(e);
channelEvents.add(e);
}
try {
txn.commit();
} catch (Throwable t) {
txn.rollback();
} finally {
txn.close();
}
source.stop();
for (Event e : channelEvents) {
Assert.assertNotNull(e);
Assert.assertArrayEquals(largePayload, e.getBody());
}
}
@Test
public void testShortString() throws IOException {
runUdpTest(shortString);
}
@Test
public void testMediumString() throws IOException {
runUdpTest(mediumString);
}
private String getPayload(int length) {
StringBuilder payload = new StringBuilder(length);
for (int n = 0; n < length; ++n) {
payload.append("x");
}
return payload.toString();
}
}
| 9,912 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/TestSyslogParser.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.source;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
import org.apache.flume.Event;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.junit.Assert;
import org.junit.Test;
import java.nio.charset.Charset;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class TestSyslogParser {
@Test
public void testRfc5424DateParsing() {
final String[] examples = {
"1985-04-12T23:20:50.52Z", "1985-04-12T19:20:50.52-04:00",
"2003-10-11T22:14:15.003Z", "2003-08-24T05:14:15.000003-07:00",
"2012-04-13T11:11:11-08:00", "2012-04-13T08:08:08.0001+00:00",
"2012-04-13T08:08:08.251+00:00"
};
SyslogParser parser = new SyslogParser();
DateTimeFormatter jodaParser = ISODateTimeFormat.dateTimeParser();
for (String ex : examples) {
Assert.assertEquals(
"Problem parsing date string: " + ex,
jodaParser.parseMillis(ex),
parser.parseRfc5424Date(ex));
}
}
@Test
public void testMessageParsing() {
SyslogParser parser = new SyslogParser();
Charset charset = Charsets.UTF_8;
List<String> messages = Lists.newArrayList();
// supported examples from RFC 3164
messages.add("<34>Oct 11 22:14:15 mymachine su: 'su root' failed for " +
"lonvick on /dev/pts/8");
messages.add("<13>Feb 5 17:32:18 10.0.0.99 Use the BFG!");
messages.add("<165>Aug 24 05:34:00 CST 1987 mymachine myproc[10]: %% " +
"It's time to make the do-nuts. %% Ingredients: Mix=OK, Jelly=OK # " +
"Devices: Mixer=OK, Jelly_Injector=OK, Frier=OK # Transport: " +
"Conveyer1=OK, Conveyer2=OK # %%");
messages.add("<0>Oct 22 10:52:12 scapegoat 1990 Oct 22 10:52:01 TZ-6 " +
"scapegoat.dmz.example.org 10.1.2.3 sched[0]: That's All Folks!");
// supported examples from RFC 5424
messages.add("<34>1 2003-10-11T22:14:15.003Z mymachine.example.com su - " +
"ID47 - BOM'su root' failed for lonvick on /dev/pts/8");
messages.add("<165>1 2003-08-24T05:14:15.000003-07:00 192.0.2.1 myproc " +
"8710 - - %% It's time to make the do-nuts.");
// non-standard (but common) messages (RFC3339 dates, no version digit)
messages.add("<13>2003-08-24T05:14:15Z localhost snarf?");
messages.add("<13>2012-08-16T14:34:03-08:00 127.0.0.1 test shnap!");
// test with default keepFields = false
for (String msg : messages) {
Set<String> keepFields = new HashSet<String>();
Event event = parser.parseMessage(msg, charset, keepFields);
Assert.assertNull("Failure to parse known-good syslog message",
event.getHeaders().get(SyslogUtils.EVENT_STATUS));
}
// test that priority, timestamp and hostname are preserved in event body
for (String msg : messages) {
Set<String> keepFields = new HashSet<String>();
keepFields.add(SyslogUtils.KEEP_FIELDS_ALL);
Event event = parser.parseMessage(msg, charset, keepFields);
Assert.assertArrayEquals(event.getBody(), msg.getBytes());
Assert.assertNull("Failure to parse known-good syslog message",
event.getHeaders().get(SyslogUtils.EVENT_STATUS));
}
// test that hostname is preserved in event body
for (String msg : messages) {
Set<String> keepFields = new HashSet<String>();
keepFields.add(SyslogSourceConfigurationConstants.CONFIG_KEEP_FIELDS_HOSTNAME);
Event event = parser.parseMessage(msg, charset, keepFields);
Assert.assertTrue("Failure to persist hostname",
new String(event.getBody()).contains(event.getHeaders().get("host")));
Assert.assertNull("Failure to parse known-good syslog message",
event.getHeaders().get(SyslogUtils.EVENT_STATUS));
}
}
} | 9,913 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/TestSyslogTcpSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.source;
import com.google.common.base.Charsets;
import org.apache.flume.Channel;
import org.apache.flume.ChannelException;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.Transaction;
import org.apache.flume.channel.ChannelProcessor;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.channel.ReplicatingChannelSelector;
import org.apache.flume.conf.Configurables;
import org.joda.time.DateTime;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doThrow;
import javax.net.SocketFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
public class TestSyslogTcpSource {
private static final org.slf4j.Logger logger =
LoggerFactory.getLogger(TestSyslogTcpSource.class);
private static final String TEST_CLIENT_IP_HEADER = "testClientIPHeader";
private static final String TEST_CLIENT_HOSTNAME_HEADER = "testClientHostnameHeader";
private SyslogTcpSource source;
private Channel channel;
private static final int TEST_SYSLOG_PORT = 0;
private final DateTime time = new DateTime();
private final String stamp1 = time.toString();
private final String host1 = "localhost.localdomain";
private final String data1 = "test syslog data";
private final String bodyWithHostname = host1 + " " + data1;
private final String bodyWithTimestamp = stamp1 + " " + data1;
private final String bodyWithTandH = "<10>" + stamp1 + " " + host1 + " " + data1 + "\n";
private void init(String keepFields) {
init(keepFields, new Context());
}
private void init(String keepFields, Context context) {
source = new SyslogTcpSource();
channel = new MemoryChannel();
Configurables.configure(channel, new Context());
List<Channel> channels = new ArrayList<>();
channels.add(channel);
ChannelSelector rcs = new ReplicatingChannelSelector();
rcs.setChannels(channels);
source.setChannelProcessor(new ChannelProcessor(rcs));
context.put("host", InetAddress.getLoopbackAddress().getHostAddress());
context.put("port", String.valueOf(TEST_SYSLOG_PORT));
context.put("keepFields", keepFields);
source.configure(context);
}
private void initSsl() {
Context context = new Context();
context.put("ssl", "true");
context.put("keystore", "src/test/resources/server.p12");
context.put("keystore-password", "password");
context.put("keystore-type", "PKCS12");
init("none", context);
}
/** Tests the keepFields configuration parameter (enabled or disabled)
using SyslogTcpSource.*/
private void runKeepFieldsTest(String keepFields) throws IOException {
init(keepFields);
source.start();
// Write some message to the syslog port
InetSocketAddress addr = source.getBoundAddress();
for (int i = 0; i < 10 ; i++) {
try (Socket syslogSocket = new Socket(addr.getAddress(), addr.getPort())) {
syslogSocket.getOutputStream().write(bodyWithTandH.getBytes());
}
}
List<Event> channelEvents = new ArrayList<>();
Transaction txn = channel.getTransaction();
txn.begin();
for (int i = 0; i < 10; i++) {
Event e = channel.take();
if (e == null) {
throw new NullPointerException("Event is null");
}
channelEvents.add(e);
}
try {
txn.commit();
} catch (Throwable t) {
txn.rollback();
} finally {
txn.close();
}
source.stop();
for (Event e : channelEvents) {
Assert.assertNotNull(e);
String str = new String(e.getBody(), Charsets.UTF_8);
logger.info(str);
if (keepFields.equals("true") || keepFields.equals("all")) {
Assert.assertArrayEquals(bodyWithTandH.trim().getBytes(), e.getBody());
} else if (keepFields.equals("false") || keepFields.equals("none")) {
Assert.assertArrayEquals(data1.getBytes(), e.getBody());
} else if (keepFields.equals("hostname")) {
Assert.assertArrayEquals(bodyWithHostname.getBytes(), e.getBody());
} else if (keepFields.equals("timestamp")) {
Assert.assertArrayEquals(bodyWithTimestamp.getBytes(), e.getBody());
}
}
}
@Test
public void testKeepFields() throws IOException {
runKeepFieldsTest("all");
// Backwards compatibility
runKeepFieldsTest("true");
}
@Test
public void testRemoveFields() throws IOException {
runKeepFieldsTest("none");
// Backwards compatibility
runKeepFieldsTest("false");
}
@Test
public void testKeepHostname() throws IOException {
runKeepFieldsTest("hostname");
}
@Test
public void testKeepTimestamp() throws IOException {
runKeepFieldsTest("timestamp");
}
@Test
public void testSourceCounter() throws IOException {
runKeepFieldsTest("all");
assertEquals(10, source.getSourceCounter().getEventAcceptedCount());
assertEquals(10, source.getSourceCounter().getEventReceivedCount());
}
@Test
public void testSourceCounterChannelFail() throws Exception {
init("true");
errorCounterCommon(new ChannelException("dummy"));
for (int i = 0; i < 10 && source.getSourceCounter().getChannelWriteFail() == 0; i++) {
Thread.sleep(100);
}
assertEquals(1, source.getSourceCounter().getChannelWriteFail());
}
@Test
public void testSourceCounterEventFail() throws Exception {
init("true");
errorCounterCommon(new RuntimeException("dummy"));
for (int i = 0; i < 10 && source.getSourceCounter().getEventReadFail() == 0; i++) {
Thread.sleep(100);
}
assertEquals(1, source.getSourceCounter().getEventReadFail());
}
private void errorCounterCommon(Exception e) throws IOException {
ChannelProcessor cp = Mockito.mock(ChannelProcessor.class);
doThrow(e).when(cp).processEvent(any(Event.class));
source.setChannelProcessor(cp);
source.start();
// Write some message to the syslog port
InetSocketAddress addr = source.getBoundAddress();
try (Socket syslogSocket = new Socket(addr.getAddress(), addr.getPort())) {
syslogSocket.getOutputStream().write(bodyWithTandH.getBytes());
}
}
@Test
public void testSSLMessages() throws Exception {
initSsl();
source.start();
InetSocketAddress address = source.getBoundAddress();
SSLContext sslContext = SSLContext.getInstance("TLS");
sslContext.init(null, new TrustManager[]{new X509TrustManager() {
@Override
public void checkClientTrusted(X509Certificate[] certs, String s) {
// nothing
}
@Override
public void checkServerTrusted(X509Certificate[] certs, String s) {
// nothing
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
}
} },
null);
SocketFactory socketFactory = sslContext.getSocketFactory();
Socket socket = socketFactory.createSocket();
socket.connect(address);
OutputStream outputStream = socket.getOutputStream();
outputStream.write(bodyWithTandH.getBytes());
socket.close();
// Thread.sleep(100);
Transaction transaction = channel.getTransaction();
transaction.begin();
Event event = channel.take();
assertEquals(new String(event.getBody()), data1);
transaction.commit();
transaction.close();
}
@Test
public void testClientHeaders() throws IOException {
String testClientIPHeader = "testClientIPHeader";
String testClientHostnameHeader = "testClientHostnameHeader";
Context context = new Context();
context.put("clientIPHeader", testClientIPHeader);
context.put("clientHostnameHeader", testClientHostnameHeader);
init("none", context);
source.start();
// Write some message to the syslog port
InetSocketAddress addr = source.getBoundAddress();
Socket syslogSocket = new Socket(addr.getAddress(), addr.getPort());
syslogSocket.getOutputStream().write(bodyWithTandH.getBytes());
Transaction txn = channel.getTransaction();
txn.begin();
Event e = channel.take();
try {
txn.commit();
} catch (Throwable t) {
txn.rollback();
} finally {
txn.close();
}
source.stop();
Map<String, String> headers = e.getHeaders();
InetAddress loopbackAddress = InetAddress.getLoopbackAddress();
checkHeader(headers, TEST_CLIENT_IP_HEADER, loopbackAddress.getHostAddress());
checkHeader(headers, TEST_CLIENT_HOSTNAME_HEADER, loopbackAddress.getHostName());
}
private static void checkHeader(Map<String, String> headers, String headerName,
String expectedValue) {
assertTrue("Missing event header: " + headerName, headers.containsKey(headerName));
String headerValue = headers.get(headerName);
if (TEST_CLIENT_HOSTNAME_HEADER.equals(headerName)) {
if (!TestSyslogUtils.isLocalHost(headerValue)) {
fail("Expected either 'localhost' or '127.0.0.1', got: " + headerValue);
}
} else {
assertEquals("Event header value does not match: " + headerName,
expectedValue, headerValue);
}
}
}
| 9,914 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/shaded | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/shaded/guava/TestRateLimiter.java | package org.apache.flume.source.shaded.guava;
/*
* Copyright (C) 2012 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//import static java.lang.reflect.Modifier.isStatic;
import static java.util.concurrent.TimeUnit.MICROSECONDS;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
//import com.google.common.collect.ImmutableClassToInstanceMap;
//import com.google.common.collect.ImmutableSet;
//import com.google.common.collect.Lists;
//import com.google.common.testing.NullPointerTester;
//import com.google.common.testing.NullPointerTester.Visibility;
import org.apache.flume.source.shaded.guava.RateLimiter.SleepingStopwatch;
import com.google.common.collect.Lists;
import junit.framework.TestCase;
//import org.easymock.EasyMock;
//import org.mockito.Mockito;
//import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
//import java.util.concurrent.TimeUnit;
/**
* Tests for RateLimiter.
*
* @author Dimitris Andreou
*/
public class TestRateLimiter extends TestCase {
private static final double EPSILON = 1e-8;
private final FakeStopwatch stopwatch = new FakeStopwatch();
public void testSimple() {
RateLimiter limiter = RateLimiter.create(stopwatch, 5.0);
limiter.acquire(); // R0.00, since it's the first request
limiter.acquire(); // R0.20
limiter.acquire(); // R0.20
assertEvents("R0.00", "R0.20", "R0.20");
}
public void testImmediateTryAcquire() {
RateLimiter r = RateLimiter.create(1);
assertTrue("Unable to acquire initial permit", r.tryAcquire());
assertFalse("Capable of acquiring secondary permit", r.tryAcquire());
}
public void testSimpleRateUpdate() {
RateLimiter limiter = RateLimiter.create(5.0, 5, SECONDS);
assertEquals(5.0, limiter.getRate());
limiter.setRate(10.0);
assertEquals(10.0, limiter.getRate());
try {
limiter.setRate(0.0);
fail();
} catch (IllegalArgumentException expected) { }
try {
limiter.setRate(-10.0);
fail();
} catch (IllegalArgumentException expected) { }
}
public void testAcquireParameterValidation() {
RateLimiter limiter = RateLimiter.create(999);
try {
limiter.acquire(0);
fail();
} catch (IllegalArgumentException expected) {
}
try {
limiter.acquire(-1);
fail();
} catch (IllegalArgumentException expected) {
}
try {
limiter.tryAcquire(0);
fail();
} catch (IllegalArgumentException expected) {
}
try {
limiter.tryAcquire(-1);
fail();
} catch (IllegalArgumentException expected) {
}
try {
limiter.tryAcquire(0, 1, SECONDS);
fail();
} catch (IllegalArgumentException expected) {
}
try {
limiter.tryAcquire(-1, 1, SECONDS);
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testSimpleWithWait() {
RateLimiter limiter = RateLimiter.create(stopwatch, 5.0);
limiter.acquire(); // R0.00
stopwatch.sleepMillis(200); // U0.20, we are ready for the next request...
limiter.acquire(); // R0.00, ...which is granted immediately
limiter.acquire(); // R0.20
assertEvents("R0.00", "U0.20", "R0.00", "R0.20");
}
public void testSimpleAcquireReturnValues() {
RateLimiter limiter = RateLimiter.create(stopwatch, 5.0);
assertEquals(0.0, limiter.acquire(), EPSILON); // R0.00
stopwatch.sleepMillis(200); // U0.20, we are ready for the next request...
assertEquals(0.0, limiter.acquire(), EPSILON); // R0.00, ...which is granted immediately
assertEquals(0.2, limiter.acquire(), EPSILON); // R0.20
assertEvents("R0.00", "U0.20", "R0.00", "R0.20");
}
public void testSimpleAcquireEarliestAvailableIsInPast() {
RateLimiter limiter = RateLimiter.create(stopwatch, 5.0);
assertEquals(0.0, limiter.acquire(), EPSILON);
stopwatch.sleepMillis(400);
assertEquals(0.0, limiter.acquire(), EPSILON);
assertEquals(0.0, limiter.acquire(), EPSILON);
assertEquals(0.2, limiter.acquire(), EPSILON);
}
public void testOneSecondBurst() {
RateLimiter limiter = RateLimiter.create(stopwatch, 5.0);
stopwatch.sleepMillis(1000); // max capacity reached
stopwatch.sleepMillis(1000); // this makes no difference
limiter.acquire(1); // R0.00, since it's the first request
limiter.acquire(1); // R0.00, from capacity
limiter.acquire(3); // R0.00, from capacity
limiter.acquire(1); // R0.00, concluding a burst of 5 permits
limiter.acquire(); // R0.20, capacity exhausted
assertEvents("U1.00", "U1.00",
"R0.00", "R0.00", "R0.00", "R0.00", // first request and burst
"R0.20");
}
public void testCreateWarmupParameterValidation() {
RateLimiter.create(1.0, 1, NANOSECONDS);
RateLimiter.create(1.0, 0, NANOSECONDS);
try {
RateLimiter.create(0.0, 1, NANOSECONDS);
fail();
} catch (IllegalArgumentException expected) {
}
try {
RateLimiter.create(1.0, -1, NANOSECONDS);
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testWarmUp() {
RateLimiter limiter = RateLimiter.create(stopwatch, 2.0, 4000, MILLISECONDS);
for (int i = 0; i < 8; i++) {
limiter.acquire(); // #1
}
stopwatch.sleepMillis(500); // #2: to repay for the last acquire
stopwatch.sleepMillis(4000); // #3: becomes cold again
for (int i = 0; i < 8; i++) {
limiter.acquire(); // // #4
}
stopwatch.sleepMillis(500); // #5: to repay for the last acquire
stopwatch.sleepMillis(2000); // #6: didn't get cold! It would take another 2 seconds to go cold
for (int i = 0; i < 8; i++) {
limiter.acquire(); // #7
}
assertEvents(
"R0.00, R1.38, R1.13, R0.88, R0.63, R0.50, R0.50, R0.50", // #1
"U0.50", // #2
"U4.00", // #3
"R0.00, R1.38, R1.13, R0.88, R0.63, R0.50, R0.50, R0.50", // #4
"U0.50", // #5
"U2.00", // #6
"R0.00, R0.50, R0.50, R0.50, R0.50, R0.50, R0.50, R0.50"); // #7
}
public void testWarmUpAndUpdate() {
RateLimiter limiter = RateLimiter.create(stopwatch, 2.0, 4000, MILLISECONDS);
for (int i = 0; i < 8; i++) {
limiter.acquire(); // // #1
}
stopwatch.sleepMillis(4500); // #2: back to cold state (warmup period + repay last acquire)
for (int i = 0; i < 3; i++) { // only three steps, we're somewhere in the warmup period
limiter.acquire(); // #3
}
limiter.setRate(4.0); // double the rate!
limiter.acquire(); // #4, we repay the debt of the last acquire (imposed by the old rate)
for (int i = 0; i < 4; i++) {
limiter.acquire(); // #5
}
stopwatch.sleepMillis(4250); // #6, back to cold state (warmup period + repay last acquire)
for (int i = 0; i < 11; i++) {
limiter.acquire(); // #7, showing off the warmup starting from totally cold
}
// make sure the areas (times) remain the same, while permits are different
assertEvents(
"R0.00, R1.38, R1.13, R0.88, R0.63, R0.50, R0.50, R0.50", // #1
"U4.50", // #2
"R0.00, R1.38, R1.13", // #3, after that the rate changes
"R0.88", // #4, this is what the throttling would be with the old rate
"R0.34, R0.28, R0.25, R0.25", // #5
"U4.25", // #6
"R0.00, R0.72, R0.66, R0.59, R0.53, R0.47, R0.41", // #7
"R0.34, R0.28, R0.25, R0.25"); // #7 (cont.), note, this matches #5
}
public void testBurstyAndUpdate() {
RateLimiter rateLimiter = RateLimiter.create(stopwatch, 1.0);
rateLimiter.acquire(1); // no wait
rateLimiter.acquire(1); // R1.00, to repay previous
rateLimiter.setRate(2.0); // update the rate!
rateLimiter.acquire(1); // R1.00, to repay previous (the previous was under the old rate!)
rateLimiter.acquire(2); // R0.50, to repay previous (now the rate takes effect)
rateLimiter.acquire(4); // R1.00, to repay previous
rateLimiter.acquire(1); // R2.00, to repay previous
assertEvents("R0.00", "R1.00", "R1.00", "R0.50", "R1.00", "R2.00");
}
public void testTryAcquire_noWaitAllowed() {
RateLimiter limiter = RateLimiter.create(stopwatch, 5.0);
assertTrue(limiter.tryAcquire(0, SECONDS));
assertFalse(limiter.tryAcquire(0, SECONDS));
assertFalse(limiter.tryAcquire(0, SECONDS));
stopwatch.sleepMillis(100);
assertFalse(limiter.tryAcquire(0, SECONDS));
}
public void testTryAcquire_someWaitAllowed() {
RateLimiter limiter = RateLimiter.create(stopwatch, 5.0);
assertTrue(limiter.tryAcquire(0, SECONDS));
assertTrue(limiter.tryAcquire(200, MILLISECONDS));
assertFalse(limiter.tryAcquire(100, MILLISECONDS));
stopwatch.sleepMillis(100);
assertTrue(limiter.tryAcquire(100, MILLISECONDS));
}
public void testTryAcquire_overflow() {
RateLimiter limiter = RateLimiter.create(stopwatch, 5.0);
assertTrue(limiter.tryAcquire(0, MICROSECONDS));
stopwatch.sleepMillis(100);
assertTrue(limiter.tryAcquire(Long.MAX_VALUE, MICROSECONDS));
}
public void testTryAcquire_negative() {
RateLimiter limiter = RateLimiter.create(stopwatch, 5.0);
assertTrue(limiter.tryAcquire(5, 0, SECONDS));
stopwatch.sleepMillis(900);
assertFalse(limiter.tryAcquire(1, Long.MIN_VALUE, SECONDS));
stopwatch.sleepMillis(100);
assertTrue(limiter.tryAcquire(1, -1, SECONDS));
}
public void testSimpleWeights() {
RateLimiter rateLimiter = RateLimiter.create(stopwatch, 1.0);
rateLimiter.acquire(1); // no wait
rateLimiter.acquire(1); // R1.00, to repay previous
rateLimiter.acquire(2); // R1.00, to repay previous
rateLimiter.acquire(4); // R2.00, to repay previous
rateLimiter.acquire(8); // R4.00, to repay previous
rateLimiter.acquire(1); // R8.00, to repay previous
assertEvents("R0.00", "R1.00", "R1.00", "R2.00", "R4.00", "R8.00");
}
public void testInfinity_Bursty() {
RateLimiter limiter = RateLimiter.create(stopwatch, Double.POSITIVE_INFINITY);
limiter.acquire(Integer.MAX_VALUE / 4);
limiter.acquire(Integer.MAX_VALUE / 2);
limiter.acquire(Integer.MAX_VALUE);
assertEvents("R0.00", "R0.00", "R0.00"); // no wait, infinite rate!
limiter.setRate(2.0);
limiter.acquire();
limiter.acquire();
limiter.acquire();
limiter.acquire();
limiter.acquire();
assertEvents(
"R0.00", // First comes the saved-up burst, which defaults to a 1-second burst (2 requests).
"R0.00",
"R0.00", // Now comes the free request.
"R0.50", // Now it's 0.5 seconds per request.
"R0.50");
limiter.setRate(Double.POSITIVE_INFINITY);
limiter.acquire();
limiter.acquire();
limiter.acquire();
assertEvents("R0.50", "R0.00", "R0.00"); // we repay the last request (.5sec), then back to +oo
}
/** https://code.google.com/p/guava-libraries/issues/detail?id=1791 */
public void testInfinity_BustyTimeElapsed() {
RateLimiter limiter = RateLimiter.create(stopwatch, Double.POSITIVE_INFINITY);
stopwatch.instant += 1000000;
limiter.setRate(2.0);
for (int i = 0; i < 5; i++) {
limiter.acquire();
}
assertEvents(
"R0.00", // First comes the saved-up burst, which defaults to a 1-second burst (2 requests).
"R0.00",
"R0.00", // Now comes the free request.
"R0.50", // Now it's 0.5 seconds per request.
"R0.50");
}
public void testInfinity_WarmUp() {
RateLimiter limiter = RateLimiter.create(
stopwatch, Double.POSITIVE_INFINITY, 10, SECONDS);
limiter.acquire(Integer.MAX_VALUE / 4);
limiter.acquire(Integer.MAX_VALUE / 2);
limiter.acquire(Integer.MAX_VALUE);
assertEvents("R0.00", "R0.00", "R0.00");
limiter.setRate(1.0);
limiter.acquire();
limiter.acquire();
limiter.acquire();
assertEvents("R0.00", "R1.00", "R1.00");
limiter.setRate(Double.POSITIVE_INFINITY);
limiter.acquire();
limiter.acquire();
limiter.acquire();
assertEvents("R1.00", "R0.00", "R0.00");
}
public void testInfinity_WarmUpTimeElapsed() {
RateLimiter limiter = RateLimiter.create(stopwatch, Double.POSITIVE_INFINITY, 10, SECONDS);
stopwatch.instant += 1000000;
limiter.setRate(1.0);
for (int i = 0; i < 5; i++) {
limiter.acquire();
}
assertEvents("R0.00", "R1.00", "R1.00", "R1.00", "R1.00");
}
/**
* Make sure that bursts can never go above 1-second-worth-of-work for the current
* rate, even when we change the rate.
*/
public void testWeNeverGetABurstMoreThanOneSec() {
RateLimiter limiter = RateLimiter.create(stopwatch, 1.0);
int[] rates = { 1000, 1, 10, 1000000, 10, 1};
for (int rate : rates) {
int oneSecWorthOfWork = rate;
stopwatch.sleepMillis(rate * 1000);
limiter.setRate(rate);
long burst = measureTotalTimeMillis(limiter, oneSecWorthOfWork, new Random());
// we allow one second worth of work to go in a burst (i.e. take less than a second)
assertTrue(burst <= 1000);
long afterBurst = measureTotalTimeMillis(limiter, oneSecWorthOfWork, new Random());
// but work beyond that must take at least one second
assertTrue(afterBurst >= 1000);
}
}
/**
* This neat test shows that no matter what weights we use in our requests, if we push X
* amount of permits in a cool state, where X = rate * timeToCoolDown, and we have
* specified a timeToWarmUp() period, it will cost as the prescribed amount of time. E.g.,
* calling [acquire(5), acquire(1)] takes exactly the same time as
* [acquire(2), acquire(3), acquire(1)].
*/
public void testTimeToWarmUpIsHonouredEvenWithWeights() {
Random random = new Random();
int maxPermits = 10;
double[] qpsToTest = { 4.0, 2.0, 1.0, 0.5, 0.1 };
for (int trial = 0; trial < 100; trial++) {
for (double qps : qpsToTest) {
// Since we know that: maxPermits = 0.5 * warmup / stableInterval;
// then if maxPermits == 10, we have:
// warmupSeconds = 20 / qps
long warmupMillis = (long) ((2 * maxPermits / qps) * 1000.0);
RateLimiter rateLimiter = RateLimiter.create(
stopwatch, qps, warmupMillis, MILLISECONDS);
assertEquals(warmupMillis, measureTotalTimeMillis(rateLimiter, maxPermits, random));
}
}
}
private long measureTotalTimeMillis(RateLimiter rateLimiter, int permits, Random random) {
long startTime = stopwatch.instant;
while (permits > 0) {
int nextPermitsToAcquire = Math.max(1, random.nextInt(permits));
permits -= nextPermitsToAcquire;
rateLimiter.acquire(nextPermitsToAcquire);
}
rateLimiter.acquire(1); // to repay for any pending debt
return NANOSECONDS.toMillis(stopwatch.instant - startTime);
}
private void assertEvents(String... events) {
assertEquals(Arrays.toString(events), stopwatch.readEventsAndClear());
}
/**
* The stopwatch gathers events and presents them as strings.
* R0.6 means a delay of 0.6 seconds caused by the (R)ateLimiter
* U1.0 means the (U)ser caused the stopwatch to sleep for a second.
*/
static class FakeStopwatch extends SleepingStopwatch {
long instant = 0L;
final List<String> events = Lists.newArrayList();
@Override
public long readMicros() {
return NANOSECONDS.toMicros(instant);
}
void sleepMillis(int millis) {
sleepMicros("U", MILLISECONDS.toMicros(millis));
}
void sleepMicros(String caption, long micros) {
instant += MICROSECONDS.toNanos(micros);
events.add(caption + String.format("%3.2f", (micros / 1000000.0)));
}
@Override
void sleepMicrosUninterruptibly(long micros) {
sleepMicros("R", micros);
}
String readEventsAndClear() {
try {
return events.toString();
} finally {
events.clear();
}
}
@Override
public String toString() {
return events.toString();
}
}
}
| 9,915 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/http/TestBLOBHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.source.http;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.ReadListener;
import javax.servlet.ServletInputStream;
import javax.servlet.http.HttpServletRequest;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.junit.Before;
import org.junit.Test;
/**
*
*/
public class TestBLOBHandler {
HTTPSourceHandler handler;
@Before
public void setUp() {
handler = new BLOBHandler();
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testCSVData() throws Exception {
Map requestParameterMap = new HashMap();
requestParameterMap.put("param1", new String[] { "value1" });
requestParameterMap.put("param2", new String[] { "value2" });
HttpServletRequest req = mock(HttpServletRequest.class);
final String csvData = "a,b,c";
ServletInputStream servletInputStream = new DelegatingServletInputStream(
new ByteArrayInputStream(csvData.getBytes()));
when(req.getInputStream()).thenReturn(servletInputStream);
when(req.getParameterMap()).thenReturn(requestParameterMap);
Context context = mock(Context.class);
when(
context.getString(BLOBHandler.MANDATORY_PARAMETERS,
BLOBHandler.DEFAULT_MANDATORY_PARAMETERS)).thenReturn(
"param1,param2");
handler.configure(context);
List<Event> deserialized = handler.getEvents(req);
assertEquals(1, deserialized.size());
Event e = deserialized.get(0);
assertEquals(new String(e.getBody()), csvData);
assertEquals(e.getHeaders().get("param1"), "value1");
assertEquals(e.getHeaders().get("param2"), "value2");
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void testTabData() throws Exception {
Map requestParameterMap = new HashMap();
requestParameterMap.put("param1", new String[] { "value1" });
HttpServletRequest req = mock(HttpServletRequest.class);
final String tabData = "a\tb\tc";
ServletInputStream servletInputStream = new DelegatingServletInputStream(
new ByteArrayInputStream(tabData.getBytes()));
when(req.getInputStream()).thenReturn(servletInputStream);
when(req.getParameterMap()).thenReturn(requestParameterMap);
Context context = mock(Context.class);
when(
context.getString(BLOBHandler.MANDATORY_PARAMETERS,
BLOBHandler.DEFAULT_MANDATORY_PARAMETERS)).thenReturn("param1");
handler.configure(context);
List<Event> deserialized = handler.getEvents(req);
assertEquals(1, deserialized.size());
Event e = deserialized.get(0);
assertEquals(new String(e.getBody()), tabData);
assertEquals(e.getHeaders().get("param1"), "value1");
}
@SuppressWarnings({ "rawtypes" })
@Test(expected = IllegalArgumentException.class)
public void testMissingParameters() throws Exception {
Map requestParameterMap = new HashMap();
HttpServletRequest req = mock(HttpServletRequest.class);
final String tabData = "a\tb\tc";
ServletInputStream servletInputStream = new DelegatingServletInputStream(
new ByteArrayInputStream(tabData.getBytes()));
when(req.getInputStream()).thenReturn(servletInputStream);
when(req.getParameterMap()).thenReturn(requestParameterMap);
Context context = mock(Context.class);
when(
context.getString(BLOBHandler.MANDATORY_PARAMETERS,
BLOBHandler.DEFAULT_MANDATORY_PARAMETERS)).thenReturn("param1");
handler.configure(context);
handler.getEvents(req);
}
class DelegatingServletInputStream extends ServletInputStream {
private final InputStream sourceStream;
/**
* Create a DelegatingServletInputStream for the given source stream.
*
* @param sourceStream
* the source stream (never <code>null</code>)
*/
public DelegatingServletInputStream(InputStream sourceStream) {
this.sourceStream = sourceStream;
}
/**
* Return the underlying source stream (never <code>null</code>).
*/
public final InputStream getSourceStream() {
return this.sourceStream;
}
public int read() throws IOException {
return this.sourceStream.read();
}
public void close() throws IOException {
super.close();
this.sourceStream.close();
}
public boolean isFinished() {
throw new UnsupportedOperationException("Not supported yet.");
}
public boolean isReady() {
throw new UnsupportedOperationException("Not supported yet.");
}
public void setReadListener(ReadListener arg0) {
throw new UnsupportedOperationException("Not supported yet.");
}
}
}
| 9,916 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/http/TestHTTPSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.source.http;
import com.google.common.collect.Maps;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import org.apache.flume.Channel;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.Transaction;
import org.apache.flume.channel.ChannelProcessor;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.channel.ReplicatingChannelSelector;
import org.apache.flume.conf.Configurables;
import org.apache.flume.event.JSONEvent;
import org.apache.flume.instrumentation.SourceCounter;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpOptions;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.client.methods.HttpTrace;
import org.apache.http.HttpResponse;
import org.apache.http.conn.ssl.NoopHostnameVerifier;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.HttpClientBuilder;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.internal.util.reflection.Whitebox;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectInstance;
import javax.management.ObjectName;
import javax.management.Query;
import javax.management.QueryExp;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSession;
import javax.net.ssl.SSLSocket;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.reflect.Type;
import java.net.HttpURLConnection;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.URL;
import java.net.UnknownHostException;
import java.security.SecureRandom;
import java.security.cert.CertificateException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import static org.fest.reflect.core.Reflection.field;
import static org.mockito.Matchers.anyListOf;
import static org.mockito.Mockito.doThrow;
/**
*
*/
public class TestHTTPSource {
private static HTTPSource httpSource;
private static HTTPSource httpsSource;
private static HTTPSource httpsGlobalKeystoreSource;
private static Channel httpChannel;
private static Channel httpsChannel;
private static Channel httpsGlobalKeystoreChannel;
private static int httpPort;
private static int httpsPort;
private static int httpsGlobalKeystorePort;
private HttpClient httpClient;
private HttpPost postRequest;
private static int findFreePort() throws IOException {
ServerSocket socket = new ServerSocket(0);
int port = socket.getLocalPort();
socket.close();
return port;
}
private static Context getDefaultNonSecureContext(int port) throws IOException {
Context ctx = new Context();
ctx.put(HTTPSourceConfigurationConstants.CONFIG_BIND, "0.0.0.0");
ctx.put(HTTPSourceConfigurationConstants.CONFIG_PORT, String.valueOf(port));
ctx.put("QueuedThreadPool.MaxThreads", "100");
return ctx;
}
private static Context getDefaultSecureContext(int port) throws IOException {
Context sslContext = new Context();
sslContext.put(HTTPSourceConfigurationConstants.CONFIG_PORT, String.valueOf(port));
sslContext.put(HTTPSourceConfigurationConstants.SSL_ENABLED, "true");
sslContext.put(HTTPSourceConfigurationConstants.SSL_KEYSTORE_PASSWORD, "password");
sslContext.put(HTTPSourceConfigurationConstants.SSL_KEYSTORE,
"src/test/resources/jettykeystore");
return sslContext;
}
private static Context getDefaultSecureContextGlobalKeystore(int port) throws IOException {
System.setProperty("javax.net.ssl.keyStore", "src/test/resources/jettykeystore");
System.setProperty("javax.net.ssl.keyStorePassword", "password");
Context sslContext = new Context();
sslContext.put(HTTPSourceConfigurationConstants.CONFIG_PORT, String.valueOf(port));
sslContext.put(HTTPSourceConfigurationConstants.SSL_ENABLED, "true");
return sslContext;
}
@BeforeClass
public static void setUpClass() throws Exception {
httpSource = new HTTPSource();
httpChannel = new MemoryChannel();
httpPort = findFreePort();
configureSourceAndChannel(httpSource, httpChannel, getDefaultNonSecureContext(httpPort));
httpChannel.start();
httpSource.start();
httpsSource = new HTTPSource();
httpsChannel = new MemoryChannel();
httpsPort = findFreePort();
configureSourceAndChannel(httpsSource, httpsChannel, getDefaultSecureContext(httpsPort));
httpsChannel.start();
httpsSource.start();
httpsGlobalKeystoreSource = new HTTPSource();
httpsGlobalKeystoreChannel = new MemoryChannel();
httpsGlobalKeystorePort = findFreePort();
configureSourceAndChannel(httpsGlobalKeystoreSource, httpsGlobalKeystoreChannel,
getDefaultSecureContextGlobalKeystore(httpsGlobalKeystorePort));
httpsGlobalKeystoreChannel.start();
httpsGlobalKeystoreSource.start();
System.clearProperty("javax.net.ssl.keyStore");
System.clearProperty("javax.net.ssl.keyStorePassword");
}
private static void configureSourceAndChannel(
HTTPSource source, Channel channel, Context context
) {
Context channelContext = new Context();
channelContext.put("capacity", "100");
Configurables.configure(channel, channelContext);
Configurables.configure(source, context);
ChannelSelector rcs1 = new ReplicatingChannelSelector();
rcs1.setChannels(Collections.singletonList(channel));
source.setChannelProcessor(new ChannelProcessor(rcs1));
}
@AfterClass
public static void tearDownClass() throws Exception {
httpSource.stop();
httpChannel.stop();
httpsSource.stop();
httpsChannel.stop();
httpsGlobalKeystoreSource.stop();
httpsGlobalKeystoreChannel.stop();
}
@Before
public void setUp() {
HttpClientBuilder builder = HttpClientBuilder.create();
httpClient = builder.build();
postRequest = new HttpPost("http://0.0.0.0:" + httpPort);
SourceCounter sc = (SourceCounter) Whitebox.getInternalState(httpSource, "sourceCounter");
sc.start();
}
@After
public void tearDown() {
SourceCounter sc = (SourceCounter) Whitebox.getInternalState(httpSource, "sourceCounter");
sc.stop();
}
@Test
public void testSimple() throws IOException, InterruptedException {
StringEntity input = new StringEntity("[{\"headers\":{\"a\": \"b\"},\"body\": \"random_body\"},"
+ "{\"headers\":{\"e\": \"f\"},\"body\": \"random_body2\"}]");
//if we do not set the content type to JSON, the client will use
//ISO-8859-1 as the charset. JSON standard does not support this.
input.setContentType("application/json");
postRequest.setEntity(input);
HttpResponse response = httpClient.execute(postRequest);
Assert.assertEquals(HttpServletResponse.SC_OK,
response.getStatusLine().getStatusCode());
Transaction tx = httpChannel.getTransaction();
tx.begin();
Event e = httpChannel.take();
Assert.assertNotNull(e);
Assert.assertEquals("b", e.getHeaders().get("a"));
Assert.assertEquals("random_body", new String(e.getBody(), "UTF-8"));
e = httpChannel.take();
Assert.assertNotNull(e);
Assert.assertEquals("f", e.getHeaders().get("e"));
Assert.assertEquals("random_body2", new String(e.getBody(), "UTF-8"));
tx.commit();
tx.close();
}
@Test
public void testTrace() throws Exception {
doTestForbidden(new HttpTrace("http://0.0.0.0:" + httpPort));
}
@Test
public void testOptions() throws Exception {
doTestForbidden(new HttpOptions("http://0.0.0.0:" + httpPort));
}
private void doTestForbidden(HttpRequestBase request) throws Exception {
HttpResponse response = httpClient.execute(request);
Assert.assertEquals(HttpServletResponse.SC_FORBIDDEN,
response.getStatusLine().getStatusCode());
}
@Test
public void testSimpleUTF16() throws IOException, InterruptedException {
StringEntity input = new StringEntity("[{\"headers\":{\"a\": \"b\"},\"body\": \"random_body\"},"
+ "{\"headers\":{\"e\": \"f\"},\"body\": \"random_body2\"}]", "UTF-16");
input.setContentType("application/json; charset=utf-16");
postRequest.setEntity(input);
HttpResponse response = httpClient.execute(postRequest);
Assert.assertEquals(HttpServletResponse.SC_OK,
response.getStatusLine().getStatusCode());
Transaction tx = httpChannel.getTransaction();
tx.begin();
Event e = httpChannel.take();
Assert.assertNotNull(e);
Assert.assertEquals("b", e.getHeaders().get("a"));
Assert.assertEquals("random_body", new String(e.getBody(), "UTF-16"));
e = httpChannel.take();
Assert.assertNotNull(e);
Assert.assertEquals("f", e.getHeaders().get("e"));
Assert.assertEquals("random_body2", new String(e.getBody(), "UTF-16"));
tx.commit();
tx.close();
}
@Test
public void testInvalid() throws Exception {
StringEntity input = new StringEntity("[{\"a\": \"b\",[\"d\":\"e\"],\"body\": \"random_body\"},"
+ "{\"e\": \"f\",\"body\": \"random_body2\"}]");
input.setContentType("application/json");
postRequest.setEntity(input);
HttpResponse response = httpClient.execute(postRequest);
Assert.assertEquals(HttpServletResponse.SC_BAD_REQUEST,
response.getStatusLine().getStatusCode());
SourceCounter sc = (SourceCounter) Whitebox.getInternalState(httpSource, "sourceCounter");
Assert.assertEquals(1, sc.getEventReadFail());
}
@Test
public void testBigBatchDeserializationUTF8() throws Exception {
testBatchWithVariousEncoding("UTF-8");
}
@Test
public void testBigBatchDeserializationUTF16() throws Exception {
testBatchWithVariousEncoding("UTF-16");
}
@Test
public void testBigBatchDeserializationUTF32() throws Exception {
testBatchWithVariousEncoding("UTF-32");
}
@Test
public void testCounterGenericFail() throws Exception {
ChannelProcessor cp = Mockito.mock(ChannelProcessor.class);
doThrow(new RuntimeException("dummy")).when(cp).processEventBatch(anyListOf(Event.class));
ChannelProcessor oldCp = httpSource.getChannelProcessor();
httpSource.setChannelProcessor(cp);
testBatchWithVariousEncoding("UTF-8");
SourceCounter sc = (SourceCounter) Whitebox.getInternalState(httpSource, "sourceCounter");
Assert.assertEquals(1, sc.getGenericProcessingFail());
httpSource.setChannelProcessor(oldCp);
}
@Test
public void testSingleEvent() throws Exception {
StringEntity input = new StringEntity("[{\"headers\" : {\"a\": \"b\"},\"body\":"
+ " \"random_body\"}]");
input.setContentType("application/json");
postRequest.setEntity(input);
httpClient.execute(postRequest);
Transaction tx = httpChannel.getTransaction();
tx.begin();
Event e = httpChannel.take();
Assert.assertNotNull(e);
Assert.assertEquals("b", e.getHeaders().get("a"));
Assert.assertEquals("random_body", new String(e.getBody(),"UTF-8"));
tx.commit();
tx.close();
}
/**
* First test that the unconfigured behaviour is as-expected, then add configurations
* to a new channel and observe the difference.
* For some of the properties, the most convenient way to test is using the MBean interface
* We test all of HttpConfiguration, ServerConnector, QueuedThreadPool and SslContextFactory
* sub-configurations (but not all properties)
*/
@Test
public void testConfigurables() throws Exception {
StringEntity input = new StringEntity("[{\"headers\" : {\"a\": \"b\"},\"body\":"
+ " \"random_body\"}]");
input.setContentType("application/json");
postRequest.setEntity(input);
HttpResponse resp = httpClient.execute(postRequest);
// Testing default behaviour (to not provided X-Powered-By, but to provide Server headers)
Assert.assertTrue(resp.getHeaders("X-Powered-By").length == 0);
Assert.assertTrue(resp.getHeaders("Server").length == 1);
Transaction tx = httpChannel.getTransaction();
tx.begin();
Event e = httpChannel.take();
Assert.assertNotNull(e);
tx.commit();
tx.close();
Assert.assertTrue(findMBeans("org.eclipse.jetty.util.thread:type=queuedthreadpool,*",
"maxThreads", 123).size() == 0);
Assert.assertTrue(findMBeans("org.eclipse.jetty.server:type=serverconnector,*",
"acceptQueueSize", 22).size() == 0);
int newPort = findFreePort();
Context configuredSourceContext = getDefaultNonSecureContext(newPort);
configuredSourceContext.put("HttpConfiguration.sendServerVersion", "false");
configuredSourceContext.put("HttpConfiguration.sendXPoweredBy", "true");
configuredSourceContext.put("ServerConnector.acceptQueueSize", "22");
configuredSourceContext.put("QueuedThreadPool.maxThreads", "123");
HTTPSource newSource = new HTTPSource();
Channel newChannel = new MemoryChannel();
configureSourceAndChannel(newSource, newChannel, configuredSourceContext);
newChannel.start();
newSource.start();
HttpPost newPostRequest = new HttpPost("http://0.0.0.0:" + newPort);
resp = httpClient.execute(newPostRequest);
Assert.assertTrue(resp.getHeaders("X-Powered-By").length > 0);
Assert.assertTrue(resp.getHeaders("Server").length == 0);
Assert.assertTrue(findMBeans("org.eclipse.jetty.util.thread:type=queuedthreadpool,*",
"maxThreads", 123).size() == 1);
Assert.assertTrue(findMBeans("org.eclipse.jetty.server:type=serverconnector,*",
"acceptQueueSize", 22).size() == 1);
newSource.stop();
newChannel.stop();
//Configure SslContextFactory with junk protocols (expect failure)
newPort = findFreePort();
configuredSourceContext = getDefaultSecureContext(newPort);
configuredSourceContext.put("SslContextFactory.IncludeProtocols", "abc def");
newSource = new HTTPSource();
newChannel = new MemoryChannel();
configureSourceAndChannel(newSource, newChannel, configuredSourceContext);
newChannel.start();
newSource.start();
newPostRequest = new HttpPost("http://0.0.0.0:" + newPort);
try {
doTestHttps(null, newPort, httpsChannel);
//We are testing that this fails because we've deliberately configured the wrong protocols
Assert.assertTrue(false);
} catch (AssertionError ex) {
//no-op
}
newSource.stop();
newChannel.stop();
}
@Test
public void testFullChannel() throws Exception {
HttpResponse response = putWithEncoding("UTF-8", 150).response;
Assert.assertEquals(HttpServletResponse.SC_SERVICE_UNAVAILABLE,
response.getStatusLine().getStatusCode());
SourceCounter sc = (SourceCounter) Whitebox.getInternalState(httpSource, "sourceCounter");
Assert.assertEquals(1, sc.getChannelWriteFail());
}
@Test
public void testFail() throws Exception {
HTTPSourceHandler handler = field("handler").ofType(HTTPSourceHandler.class)
.in(httpSource).get();
//Cause an exception in the source - this is equivalent to any exception
//thrown by the handler since the handler is called inside a try-catch
field("handler").ofType(HTTPSourceHandler.class).in(httpSource).set(null);
HttpResponse response = putWithEncoding("UTF-8", 1).response;
Assert.assertEquals(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
response.getStatusLine().getStatusCode());
//Set the original handler back so tests don't fail after this runs.
field("handler").ofType(HTTPSourceHandler.class).in(httpSource).set(handler);
}
@Test
public void testMBeans() throws Exception {
MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer();
ObjectName objectName = new ObjectName("org.eclipse.jetty.*:*");
Set<ObjectInstance> queryMBeans = mbeanServer.queryMBeans(objectName, null);
Assert.assertTrue(queryMBeans.size() > 0);
}
@Test
public void testHandlerThrowingException() throws Exception {
//This will cause the handler to throw an
//UnsupportedCharsetException.
HttpResponse response = putWithEncoding("ISO-8859-1", 150).response;
Assert.assertEquals(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
response.getStatusLine().getStatusCode());
}
private Set<ObjectInstance> findMBeans(String name, String attribute, int value)
throws MalformedObjectNameException {
MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer();
ObjectName objectName = new ObjectName(name);
QueryExp q = Query.eq(Query.attr(attribute), Query.value(value));
return mbeanServer.queryMBeans(objectName, q);
}
private ResultWrapper putWithEncoding(String encoding, int n) throws Exception {
Type listType = new TypeToken<List<JSONEvent>>() {}.getType();
List<JSONEvent> events = new ArrayList<JSONEvent>();
Random rand = new Random();
for (int i = 0; i < n; i++) {
Map<String, String> input = Maps.newHashMap();
for (int j = 0; j < 10; j++) {
input.put(String.valueOf(i) + String.valueOf(j), String.valueOf(i));
}
JSONEvent e = new JSONEvent();
e.setHeaders(input);
e.setBody(String.valueOf(rand.nextGaussian()).getBytes(encoding));
events.add(e);
}
Gson gson = new Gson();
String json = gson.toJson(events, listType);
StringEntity input = new StringEntity(json);
input.setContentType("application/json; charset=" + encoding);
postRequest.setEntity(input);
HttpResponse resp = httpClient.execute(postRequest);
return new ResultWrapper(resp, events);
}
@Test
public void testHttps() throws Exception {
doTestHttps(null, httpsPort, httpsChannel);
}
@Test (expected = javax.net.ssl.SSLHandshakeException.class)
public void testHttpsSSLv3() throws Exception {
doTestHttps("SSLv3", httpsPort, httpsChannel);
}
@Test
public void testHttpsGlobalKeystore() throws Exception {
doTestHttps(null, httpsGlobalKeystorePort, httpsGlobalKeystoreChannel);
}
private void doTestHttps(String protocol, int port, Channel channel) throws Exception {
Type listType = new TypeToken<List<JSONEvent>>() {
}.getType();
List<JSONEvent> events = new ArrayList<JSONEvent>();
Random rand = new Random();
for (int i = 0; i < 10; i++) {
Map<String, String> input = Maps.newHashMap();
for (int j = 0; j < 10; j++) {
input.put(String.valueOf(i) + String.valueOf(j), String.valueOf(i));
}
input.put("MsgNum", String.valueOf(i));
JSONEvent e = new JSONEvent();
e.setHeaders(input);
e.setBody(String.valueOf(rand.nextGaussian()).getBytes("UTF-8"));
events.add(e);
}
Gson gson = new Gson();
String json = gson.toJson(events, listType);
HttpsURLConnection httpsURLConnection = null;
Transaction transaction = null;
try {
TrustManager[] trustAllCerts = {
new X509TrustManager() {
@Override
public void checkClientTrusted(java.security.cert.X509Certificate[] x509Certificates,
String s) throws CertificateException {
// noop
}
@Override
public void checkServerTrusted(java.security.cert.X509Certificate[] x509Certificates,
String s) throws CertificateException {
// noop
}
public java.security.cert.X509Certificate[] getAcceptedIssuers() {
return null;
}
}
};
SSLContext sc = null;
javax.net.ssl.SSLSocketFactory factory = null;
if (System.getProperty("java.vendor").contains("IBM")) {
sc = SSLContext.getInstance("SSL_TLS");
} else {
sc = SSLContext.getInstance("SSL");
}
HostnameVerifier hv = new HostnameVerifier() {
public boolean verify(String arg0, SSLSession arg1) {
return true;
}
};
sc.init(null, trustAllCerts, new SecureRandom());
if (protocol != null) {
factory = new DisabledProtocolsSocketFactory(sc.getSocketFactory(), protocol);
} else {
factory = sc.getSocketFactory();
}
HttpsURLConnection.setDefaultSSLSocketFactory(factory);
HttpsURLConnection.setDefaultHostnameVerifier(NoopHostnameVerifier.INSTANCE);
URL sslUrl = new URL("https://0.0.0.0:" + port);
httpsURLConnection = (HttpsURLConnection) sslUrl.openConnection();
httpsURLConnection.setDoInput(true);
httpsURLConnection.setDoOutput(true);
httpsURLConnection.setRequestMethod("POST");
httpsURLConnection.getOutputStream().write(json.getBytes());
int statusCode = httpsURLConnection.getResponseCode();
Assert.assertEquals(200, statusCode);
transaction = channel.getTransaction();
transaction.begin();
for (int i = 0; i < 10; i++) {
Event e = channel.take();
Assert.assertNotNull(e);
Assert.assertEquals(String.valueOf(i), e.getHeaders().get("MsgNum"));
}
} finally {
if (transaction != null) {
transaction.commit();
transaction.close();
}
httpsURLConnection.disconnect();
}
}
@Test
public void testHttpsSourceNonHttpsClient() throws Exception {
Type listType = new TypeToken<List<JSONEvent>>() {
}.getType();
List<JSONEvent> events = new ArrayList<JSONEvent>();
Random rand = new Random();
for (int i = 0; i < 10; i++) {
Map<String, String> input = Maps.newHashMap();
for (int j = 0; j < 10; j++) {
input.put(String.valueOf(i) + String.valueOf(j), String.valueOf(i));
}
input.put("MsgNum", String.valueOf(i));
JSONEvent e = new JSONEvent();
e.setHeaders(input);
e.setBody(String.valueOf(rand.nextGaussian()).getBytes("UTF-8"));
events.add(e);
}
Gson gson = new Gson();
String json = gson.toJson(events, listType);
HttpURLConnection httpURLConnection = null;
try {
URL url = new URL("http://0.0.0.0:" + httpsPort);
httpURLConnection = (HttpURLConnection) url.openConnection();
httpURLConnection.setDoInput(true);
httpURLConnection.setDoOutput(true);
httpURLConnection.setRequestMethod("POST");
httpURLConnection.getOutputStream().write(json.getBytes());
httpURLConnection.getResponseCode();
Assert.fail("HTTP Client cannot connect to HTTPS source");
} catch (Exception exception) {
Assert.assertTrue("Exception expected", true);
} finally {
httpURLConnection.disconnect();
}
}
private void takeWithEncoding(String encoding, int n, List<JSONEvent> events) throws Exception {
Transaction tx = httpChannel.getTransaction();
tx.begin();
Event e = null;
int i = 0;
while (true) {
e = httpChannel.take();
if (e == null) {
break;
}
Event current = events.get(i++);
Assert.assertEquals(new String(current.getBody(), encoding),
new String(e.getBody(), encoding));
Assert.assertEquals(current.getHeaders(), e.getHeaders());
}
Assert.assertEquals(n, events.size());
tx.commit();
tx.close();
}
private void testBatchWithVariousEncoding(String encoding) throws Exception {
testBatchWithVariousEncoding(encoding, 50);
}
private void testBatchWithVariousEncoding(String encoding, int n)
throws Exception {
List<JSONEvent> events = putWithEncoding(encoding, n).events;
takeWithEncoding(encoding, n, events);
}
private class ResultWrapper {
public final HttpResponse response;
public final List<JSONEvent> events;
public ResultWrapper(HttpResponse resp, List<JSONEvent> events) {
this.response = resp;
this.events = events;
}
}
private class DisabledProtocolsSocketFactory extends javax.net.ssl.SSLSocketFactory {
private final javax.net.ssl.SSLSocketFactory socketFactory;
private final String[] protocols;
DisabledProtocolsSocketFactory(javax.net.ssl.SSLSocketFactory factory, String protocol) {
this.socketFactory = factory;
protocols = new String[1];
protocols[0] = protocol;
}
@Override
public String[] getDefaultCipherSuites() {
return socketFactory.getDefaultCipherSuites();
}
@Override
public String[] getSupportedCipherSuites() {
return socketFactory.getSupportedCipherSuites();
}
@Override
public Socket createSocket(Socket socket, String s, int i, boolean b) throws IOException {
SSLSocket sc = (SSLSocket) socketFactory.createSocket(socket, s, i, b);
sc.setEnabledProtocols(protocols);
return sc;
}
@Override
public Socket createSocket(String s, int i) throws IOException, UnknownHostException {
SSLSocket sc = (SSLSocket) socketFactory.createSocket(s, i);
sc.setEnabledProtocols(protocols);
return sc;
}
@Override
public Socket createSocket(String s, int i, InetAddress inetAddress, int i2)
throws IOException, UnknownHostException {
SSLSocket sc = (SSLSocket) socketFactory.createSocket(s, i, inetAddress, i2);
sc.setEnabledProtocols(protocols);
return sc;
}
@Override
public Socket createSocket(InetAddress inetAddress, int i) throws IOException {
SSLSocket sc = (SSLSocket) socketFactory.createSocket(inetAddress, i);
sc.setEnabledProtocols(protocols);
return sc;
}
@Override
public Socket createSocket(InetAddress inetAddress, int i,
InetAddress inetAddress2, int i2) throws IOException {
SSLSocket sc = (SSLSocket) socketFactory.createSocket(inetAddress, i,
inetAddress2, i2);
sc.setEnabledProtocols(protocols);
return sc;
}
}
}
| 9,917 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/http/FlumeHttpServletRequestWrapper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.source.http;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.security.Principal;
import java.util.Collection;
import java.util.Enumeration;
import java.util.Locale;
import java.util.Map;
import javax.servlet.AsyncContext;
import javax.servlet.DispatcherType;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletInputStream;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import javax.servlet.http.HttpUpgradeHandler;
import javax.servlet.http.Part;
/**
*
*/
class FlumeHttpServletRequestWrapper implements HttpServletRequest {
private BufferedReader reader;
String charset;
public FlumeHttpServletRequestWrapper(String data, String charset)
throws UnsupportedEncodingException {
reader = new BufferedReader(new InputStreamReader(
new ByteArrayInputStream(data.getBytes(charset)), charset));
this.charset = charset;
}
public FlumeHttpServletRequestWrapper(String data) throws UnsupportedEncodingException {
this(data, "UTF-8");
}
@Override
public String getAuthType() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Cookie[] getCookies() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public long getDateHeader(String name) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getHeader(String name) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Enumeration<String> getHeaders(String name) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Enumeration<String> getHeaderNames() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public int getIntHeader(String name) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getMethod() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getPathInfo() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getPathTranslated() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getContextPath() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getQueryString() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getRemoteUser() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public boolean isUserInRole(String role) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Principal getUserPrincipal() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getRequestedSessionId() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getRequestURI() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public StringBuffer getRequestURL() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getServletPath() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public HttpSession getSession(boolean create) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public HttpSession getSession() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public boolean isRequestedSessionIdValid() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public boolean isRequestedSessionIdFromCookie() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public boolean isRequestedSessionIdFromURL() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public boolean isRequestedSessionIdFromUrl() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Object getAttribute(String name) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Enumeration<String> getAttributeNames() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getCharacterEncoding() {
return charset;
}
@Override
public void setCharacterEncoding(String env) throws UnsupportedEncodingException {
this.charset = env;
}
@Override
public int getContentLength() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getContentType() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public ServletInputStream getInputStream() throws IOException {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getParameter(String name) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Enumeration<String> getParameterNames() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String[] getParameterValues(String name) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Map<String,String[]> getParameterMap() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getProtocol() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getScheme() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getServerName() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public int getServerPort() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public BufferedReader getReader() throws IOException {
return reader;
}
@Override
public String getRemoteAddr() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getRemoteHost() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public void setAttribute(String name, Object o) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public void removeAttribute(String name) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Locale getLocale() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Enumeration<Locale> getLocales() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public boolean isSecure() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public RequestDispatcher getRequestDispatcher(String path) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getRealPath(String path) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public int getRemotePort() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getLocalName() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getLocalAddr() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public int getLocalPort() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public AsyncContext getAsyncContext() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public long getContentLengthLong() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public DispatcherType getDispatcherType() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public ServletContext getServletContext() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public boolean isAsyncStarted() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public boolean isAsyncSupported() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public AsyncContext startAsync() throws IllegalStateException {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public AsyncContext startAsync(ServletRequest arg0, ServletResponse arg1)
throws IllegalStateException {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public boolean authenticate(HttpServletResponse arg0)
throws IOException, ServletException {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String changeSessionId() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Part getPart(String arg0) throws IOException, ServletException {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Collection<Part> getParts() throws IOException, ServletException {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public void login(String arg0, String arg1) throws ServletException {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public void logout() throws ServletException {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public <T extends HttpUpgradeHandler> T upgrade(Class<T> arg0)
throws IOException, ServletException {
throw new UnsupportedOperationException("Not supported yet.");
}
}
| 9,918 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/source/http/TestJSONHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.source.http;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import java.lang.reflect.Type;
import java.nio.charset.UnsupportedCharsetException;
import java.util.List;
import java.util.Map;
import java.util.Random;
import javax.servlet.http.HttpServletRequest;
import junit.framework.Assert;
import org.apache.flume.Event;
import org.apache.flume.event.JSONEvent;
import org.junit.Before;
import org.junit.Test;
/**
*
*/
public class TestJSONHandler {
HTTPSourceHandler handler;
@Before
public void setUp() {
handler = new JSONHandler();
}
@Test
public void testMultipleEvents() throws Exception {
String json = "[{\"headers\":{\"a\": \"b\"},\"body\": \"random_body\"},"
+ "{\"headers\":{\"e\": \"f\"},\"body\": \"random_body2\"}]";
HttpServletRequest req = new FlumeHttpServletRequestWrapper(json);
List<Event> deserialized = handler.getEvents(req);
Event e = deserialized.get(0);
Assert.assertEquals("b", e.getHeaders().get("a"));
Assert.assertEquals("random_body", new String(e.getBody(),"UTF-8"));
e = deserialized.get(1);
Assert.assertEquals("f", e.getHeaders().get("e"));
Assert.assertEquals("random_body2", new String(e.getBody(),"UTF-8"));
}
@Test
public void testMultipleEventsUTF16() throws Exception {
String json = "[{\"headers\":{\"a\": \"b\"},\"body\": \"random_body\"},"
+ "{\"headers\":{\"e\": \"f\"},\"body\": \"random_body2\"}]";
HttpServletRequest req = new FlumeHttpServletRequestWrapper(json, "UTF-16");
List<Event> deserialized = handler.getEvents(req);
Event e = deserialized.get(0);
Assert.assertEquals("b", e.getHeaders().get("a"));
Assert.assertEquals("random_body", new String(e.getBody(), "UTF-16"));
e = deserialized.get(1);
Assert.assertEquals("f", e.getHeaders().get("e"));
Assert.assertEquals("random_body2", new String(e.getBody(), "UTF-16"));
}
@Test
public void testMultipleEventsUTF32() throws Exception {
String json = "[{\"headers\":{\"a\": \"b\"},\"body\": \"random_body\"},"
+ "{\"headers\":{\"e\": \"f\"},\"body\": \"random_body2\"}]";
HttpServletRequest req = new FlumeHttpServletRequestWrapper(json, "UTF-32");
List<Event> deserialized = handler.getEvents(req);
Event e = deserialized.get(0);
Assert.assertEquals("b", e.getHeaders().get("a"));
Assert.assertEquals("random_body", new String(e.getBody(), "UTF-32"));
e = deserialized.get(1);
Assert.assertEquals("f", e.getHeaders().get("e"));
Assert.assertEquals("random_body2", new String(e.getBody(), "UTF-32"));
}
@Test
public void testMultipleEventsUTF8() throws Exception {
String json = "[{\"headers\":{\"a\": \"b\"},\"body\": \"random_body\"},"
+ "{\"headers\":{\"e\": \"f\"},\"body\": \"random_body2\"}]";
HttpServletRequest req = new FlumeHttpServletRequestWrapper(json, "UTF-8");
List<Event> deserialized = handler.getEvents(req);
Event e = deserialized.get(0);
Assert.assertEquals("b", e.getHeaders().get("a"));
Assert.assertEquals("random_body", new String(e.getBody(), "UTF-8"));
e = deserialized.get(1);
Assert.assertEquals("f", e.getHeaders().get("e"));
Assert.assertEquals("random_body2", new String(e.getBody(), "UTF-8"));
}
@Test
public void testEscapedJSON() throws Exception {
//JSON allows escaping double quotes to add it in the data.
String json = "[{\"headers\":{\"a\": \"b\"}},"
+ "{\"headers\":{\"e\": \"f\"},\"body\": \"rand\\\"om_body2\"}]";
HttpServletRequest req = new FlumeHttpServletRequestWrapper(json);
List<Event> deserialized = handler.getEvents(req);
Event e = deserialized.get(0);
Assert.assertEquals("b", e.getHeaders().get("a"));
Assert.assertTrue(e.getBody().length == 0);
e = deserialized.get(1);
Assert.assertEquals("f", e.getHeaders().get("e"));
Assert.assertEquals("rand\"om_body2", new String(e.getBody(),"UTF-8"));
}
@Test
public void testNoBody() throws Exception {
String json = "[{\"headers\" : {\"a\": \"b\"}},"
+ "{\"headers\" : {\"e\": \"f\"},\"body\": \"random_body2\"}]";
HttpServletRequest req = new FlumeHttpServletRequestWrapper(json);
List<Event> deserialized = handler.getEvents(req);
Event e = deserialized.get(0);
Assert.assertEquals("b", e.getHeaders().get("a"));
Assert.assertTrue(e.getBody().length == 0);
e = deserialized.get(1);
Assert.assertEquals("f", e.getHeaders().get("e"));
Assert.assertEquals("random_body2", new String(e.getBody(),"UTF-8"));
}
@Test
public void testSingleHTMLEvent() throws Exception {
String json = "[{\"headers\": {\"a\": \"b\"},"
+ "\"body\": \"<html><body>test</body></html>\"}]";
HttpServletRequest req = new FlumeHttpServletRequestWrapper(json);
List<Event> deserialized = handler.getEvents(req);
Event e = deserialized.get(0);
Assert.assertEquals("b", e.getHeaders().get("a"));
Assert.assertEquals("<html><body>test</body></html>",
new String(e.getBody(),"UTF-8"));
}
@Test
public void testSingleEvent() throws Exception {
String json = "[{\"headers\" : {\"a\": \"b\"},\"body\": \"random_body\"}]";
HttpServletRequest req = new FlumeHttpServletRequestWrapper(json);
List<Event> deserialized = handler.getEvents(req);
Event e = deserialized.get(0);
Assert.assertEquals("b", e.getHeaders().get("a"));
Assert.assertEquals("random_body", new String(e.getBody(),"UTF-8"));
}
@Test(expected = HTTPBadRequestException.class)
public void testBadEvent() throws Exception {
String json = "{[\"a\": \"b\"],\"body\": \"random_body\"}";
HttpServletRequest req = new FlumeHttpServletRequestWrapper(json);
handler.getEvents(req);
Assert.fail();
}
@Test(expected = UnsupportedCharsetException.class)
public void testError() throws Exception {
String json = "[{\"headers\" : {\"a\": \"b\"},\"body\": \"random_body\"}]";
HttpServletRequest req = new FlumeHttpServletRequestWrapper(json, "ISO-8859-1");
handler.getEvents(req);
Assert.fail();
}
@Test
public void testSingleEventInArray() throws Exception {
String json = "[{\"headers\": {\"a\": \"b\"},\"body\": \"random_body\"}]";
HttpServletRequest req = new FlumeHttpServletRequestWrapper(json);
List<Event> deserialized = handler.getEvents(req);
Event e = deserialized.get(0);
Assert.assertEquals("b", e.getHeaders().get("a"));
Assert.assertEquals("random_body", new String(e.getBody(),"UTF-8"));
}
@Test
public void testMultipleLargeEvents() throws Exception {
String json = "[{\"headers\" : {\"a\": \"b\", \"a2\": \"b2\","
+ "\"a3\": \"b3\",\"a4\": \"b4\"},\"body\": \"random_body\"},"
+ "{\"headers\" :{\"e\": \"f\",\"e2\": \"f2\","
+ "\"e3\": \"f3\",\"e4\": \"f4\",\"e5\": \"f5\"},"
+ "\"body\": \"random_body2\"},"
+ "{\"headers\" :{\"q1\": \"b\",\"q2\": \"b2\",\"q3\": \"b3\",\"q4\": \"b4\"},"
+ "\"body\": \"random_bodyq\"}]";
HttpServletRequest req = new FlumeHttpServletRequestWrapper(json);
List<Event> deserialized = handler.getEvents(req);
Event e = deserialized.get(0);
Assert.assertNotNull(e);
Assert.assertEquals("b", e.getHeaders().get("a"));
Assert.assertEquals("b2", e.getHeaders().get("a2"));
Assert.assertEquals("b3", e.getHeaders().get("a3"));
Assert.assertEquals("b4", e.getHeaders().get("a4"));
Assert.assertEquals("random_body", new String(e.getBody(),"UTF-8"));
e = deserialized.get(1);
Assert.assertNotNull(e);
Assert.assertEquals("f", e.getHeaders().get("e"));
Assert.assertEquals("f2", e.getHeaders().get("e2"));
Assert.assertEquals("f3", e.getHeaders().get("e3"));
Assert.assertEquals("f4", e.getHeaders().get("e4"));
Assert.assertEquals("f5", e.getHeaders().get("e5"));
Assert.assertEquals("random_body2", new String(e.getBody(),"UTF-8"));
e = deserialized.get(2);
Assert.assertNotNull(e);
Assert.assertEquals("b", e.getHeaders().get("q1"));
Assert.assertEquals("b2", e.getHeaders().get("q2"));
Assert.assertEquals("b3", e.getHeaders().get("q3"));
Assert.assertEquals("b4", e.getHeaders().get("q4"));
Assert.assertEquals("random_bodyq", new String(e.getBody(),"UTF-8"));
}
@Test
public void testDeserializarion() throws Exception {
Type listType = new TypeToken<List<JSONEvent>>() {
}.getType();
List<JSONEvent> events = Lists.newArrayList();
Random rand = new Random();
for (int i = 1; i < 10; i++) {
Map<String, String> input = Maps.newHashMap();
for (int j = 1; j < 10; j++) {
input.put(String.valueOf(i) + String.valueOf(j), String.valueOf(i));
}
JSONEvent e = new JSONEvent();
e.setBody(String.valueOf(rand.nextGaussian()).getBytes("UTF-8"));
e.setHeaders(input);
events.add(e);
}
Gson gson = new Gson();
List<Event> deserialized = handler.getEvents(
new FlumeHttpServletRequestWrapper(gson.toJson(events, listType)));
int i = 0;
for (Event e : deserialized) {
Event current = events.get(i++);
Assert.assertEquals(new String(current.getBody(),"UTF-8"),
new String(e.getBody(),"UTF-8"));
Assert.assertEquals(current.getHeaders(), e.getHeaders());
}
}
}
| 9,919 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/lifecycle/TestLifecycleSupervisor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.lifecycle;
import org.apache.flume.CounterGroup;
import org.apache.flume.lifecycle.LifecycleSupervisor.SupervisorPolicy;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TestLifecycleSupervisor {
private LifecycleSupervisor supervisor;
@Before
public void setUp() {
supervisor = new LifecycleSupervisor();
}
@Test
public void testLifecycle() throws LifecycleException, InterruptedException {
supervisor.start();
supervisor.stop();
}
@Test
public void testSupervise() throws LifecycleException, InterruptedException {
supervisor.start();
/* Attempt to supervise a known-to-fail config. */
/*
* LogicalNode node = new LogicalNode(); SupervisorPolicy policy = new
* SupervisorPolicy.OnceOnlyPolicy(); supervisor.supervise(node, policy,
* LifecycleState.START);
*/
CountingLifecycleAware node = new CountingLifecycleAware();
SupervisorPolicy policy = new SupervisorPolicy.OnceOnlyPolicy();
supervisor.supervise(node, policy, LifecycleState.START);
Thread.sleep(10000);
node = new CountingLifecycleAware();
policy = new SupervisorPolicy.OnceOnlyPolicy();
supervisor.supervise(node, policy, LifecycleState.START);
Thread.sleep(5000);
supervisor.stop();
}
@Test
public void testSuperviseBroken() throws LifecycleException,
InterruptedException {
supervisor.start();
/* Attempt to supervise a known-to-fail config. */
LifecycleAware node = new LifecycleAware() {
@Override
public void stop() {
}
@Override
public void start() {
throw new NullPointerException("Boom!");
}
@Override
public LifecycleState getLifecycleState() {
return LifecycleState.IDLE;
}
};
SupervisorPolicy policy = new SupervisorPolicy.OnceOnlyPolicy();
supervisor.supervise(node, policy, LifecycleState.START);
Thread.sleep(5000);
supervisor.stop();
}
@Test
public void testSuperviseSupervisor() throws LifecycleException,
InterruptedException {
supervisor.start();
LifecycleSupervisor supervisor2 = new LifecycleSupervisor();
CountingLifecycleAware node = new CountingLifecycleAware();
SupervisorPolicy policy = new SupervisorPolicy.OnceOnlyPolicy();
supervisor2.supervise(node, policy, LifecycleState.START);
supervisor.supervise(supervisor2,
new SupervisorPolicy.AlwaysRestartPolicy(), LifecycleState.START);
Thread.sleep(10000);
supervisor.stop();
}
@Test
public void testUnsuperviseServce() throws LifecycleException,
InterruptedException {
supervisor.start();
LifecycleAware service = new CountingLifecycleAware();
SupervisorPolicy policy = new SupervisorPolicy.OnceOnlyPolicy();
supervisor.supervise(service, policy, LifecycleState.START);
supervisor.unsupervise(service);
service.stop();
supervisor.stop();
}
@Test
public void testStopServce() throws LifecycleException, InterruptedException {
supervisor.start();
CountingLifecycleAware service = new CountingLifecycleAware();
SupervisorPolicy policy = new SupervisorPolicy.OnceOnlyPolicy();
Assert.assertEquals(Long.valueOf(0), service.counterGroup.get("start"));
Assert.assertEquals(Long.valueOf(0), service.counterGroup.get("stop"));
supervisor.supervise(service, policy, LifecycleState.START);
Thread.sleep(3200);
Assert.assertEquals(Long.valueOf(1), service.counterGroup.get("start"));
Assert.assertEquals(Long.valueOf(0), service.counterGroup.get("stop"));
supervisor.setDesiredState(service, LifecycleState.STOP);
Thread.sleep(3200);
Assert.assertEquals(Long.valueOf(1), service.counterGroup.get("start"));
Assert.assertEquals(Long.valueOf(1), service.counterGroup.get("stop"));
supervisor.stop();
}
public static class CountingLifecycleAware implements LifecycleAware {
public CounterGroup counterGroup;
private LifecycleState lifecycleState;
public CountingLifecycleAware() {
lifecycleState = LifecycleState.IDLE;
counterGroup = new CounterGroup();
}
@Override
public void start() {
counterGroup.incrementAndGet("start");
lifecycleState = LifecycleState.START;
}
@Override
public void stop() {
counterGroup.incrementAndGet("stop");
lifecycleState = LifecycleState.STOP;
}
@Override
public LifecycleState getLifecycleState() {
return lifecycleState;
}
}
}
| 9,920 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/lifecycle/TestLifecycleController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.lifecycle;
import junit.framework.Assert;
import org.junit.Test;
public class TestLifecycleController {
@Test
public void testWaitForState() throws LifecycleException,
InterruptedException {
LifecycleAware delegate = new SleeperLifecycleDelegate();
Assert.assertTrue(delegate.getLifecycleState().equals(LifecycleState.IDLE));
delegate.start();
boolean reached = LifecycleController.waitForState(delegate,
LifecycleState.START, 2000);
Assert.assertEquals(true, reached);
Assert.assertEquals(LifecycleState.START, delegate.getLifecycleState());
delegate.stop();
reached = LifecycleController.waitForState(delegate, LifecycleState.STOP,
2000);
Assert.assertEquals(true, reached);
Assert.assertEquals(LifecycleState.STOP, delegate.getLifecycleState());
delegate.start();
reached = LifecycleController.waitForState(delegate, LifecycleState.IDLE,
500);
Assert.assertEquals(false, reached);
Assert.assertEquals(LifecycleState.START, delegate.getLifecycleState());
}
@Test
public void testWaitForOneOf() throws LifecycleException,
InterruptedException {
LifecycleAware delegate = new SleeperLifecycleDelegate();
Assert.assertEquals(LifecycleState.IDLE, delegate.getLifecycleState());
delegate.start();
boolean reached = LifecycleController.waitForOneOf(delegate,
new LifecycleState[] { LifecycleState.STOP, LifecycleState.START },
2000);
Assert.assertTrue("Matched a state change", reached);
Assert.assertEquals(LifecycleState.START, delegate.getLifecycleState());
}
public static class SleeperLifecycleDelegate implements LifecycleAware {
private long sleepTime;
private LifecycleState state;
public SleeperLifecycleDelegate() {
sleepTime = 0;
state = LifecycleState.IDLE;
}
@Override
public void start() {
try {
Thread.sleep(sleepTime);
} catch (InterruptedException e) {
// Do nothing.
}
state = LifecycleState.START;
}
@Override
public void stop() {
try {
Thread.sleep(sleepTime);
} catch (InterruptedException e) {
// Do nothing
}
state = LifecycleState.STOP;
}
@Override
public LifecycleState getLifecycleState() {
return state;
}
public long getSleepTime() {
return sleepTime;
}
public void setSleepTime(long sleepTime) {
this.sleepTime = sleepTime;
}
}
}
| 9,921 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/sink/TestThriftSink.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.sink;
import com.google.common.base.Charsets;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.Sink;
import org.apache.flume.Transaction;
import org.apache.flume.api.ThriftRpcClient;
import org.apache.flume.api.ThriftTestingSource;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.conf.Configurables;
import org.apache.flume.event.EventBuilder;
import org.apache.flume.lifecycle.LifecycleController;
import org.apache.flume.lifecycle.LifecycleState;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.Ignore;
import javax.net.ssl.KeyManagerFactory;
import java.net.ServerSocket;
import java.nio.charset.Charset;
import java.util.concurrent.atomic.AtomicLong;
public class TestThriftSink {
private ThriftTestingSource src;
private ThriftSink sink;
private MemoryChannel channel;
private String hostname;
private int port;
@Before
public void setUp() throws Exception {
sink = new ThriftSink();
channel = new MemoryChannel();
hostname = "0.0.0.0";
try (ServerSocket socket = new ServerSocket(0)) {
port = socket.getLocalPort();
}
Context context = createBaseContext();
context.put(ThriftRpcClient.CONFIG_PROTOCOL, ThriftRpcClient.COMPACT_PROTOCOL);
sink.setChannel(channel);
Configurables.configure(sink, context);
Configurables.configure(channel, context);
}
private Context createBaseContext() {
Context context = new Context();
context.put("hostname", hostname);
context.put("port", String.valueOf(port));
context.put("batch-size", String.valueOf(2));
context.put("connect-timeout", String.valueOf(2000L));
context.put("request-timeout", String.valueOf(2000L));
return context;
}
@After
public void tearDown() throws Exception {
channel.stop();
sink.stop();
src.stop();
}
@Test
public void testProcess() throws Exception {
Event event = EventBuilder.withBody("test event 1", Charsets.UTF_8);
src = new ThriftTestingSource(ThriftTestingSource.HandlerType.OK.name(),
port, ThriftRpcClient.COMPACT_PROTOCOL);
channel.start();
sink.start();
Transaction transaction = channel.getTransaction();
transaction.begin();
for (int i = 0; i < 11; i++) {
channel.put(event);
}
transaction.commit();
transaction.close();
for (int i = 0; i < 6; i++) {
Sink.Status status = sink.process();
Assert.assertEquals(Sink.Status.READY, status);
}
Assert.assertEquals(Sink.Status.BACKOFF, sink.process());
sink.stop();
Assert.assertEquals(11, src.flumeEvents.size());
Assert.assertEquals(6, src.batchCount);
Assert.assertEquals(0, src.individualCount);
}
@Test
public void testTimeout() throws Exception {
AtomicLong delay = new AtomicLong();
src = new ThriftTestingSource(ThriftTestingSource.HandlerType.ALTERNATE
.name(), port, ThriftRpcClient.COMPACT_PROTOCOL);
src.setDelay(delay);
delay.set(2500);
Event event = EventBuilder.withBody("foo", Charsets.UTF_8);
sink.start();
Transaction txn = channel.getTransaction();
txn.begin();
for (int i = 0; i < 4; i++) {
channel.put(event);
}
txn.commit();
txn.close();
// should throw EventDeliveryException due to connect timeout
boolean threw = false;
try {
sink.process();
} catch (EventDeliveryException ex) {
threw = true;
}
Assert.assertTrue("Must throw due to connect timeout", threw);
// now, allow the connect handshake to occur
delay.set(0);
sink.process();
// should throw another EventDeliveryException due to request timeout
delay.set(2500L); // because request-timeout = 2000
threw = false;
try {
sink.process();
} catch (EventDeliveryException ex) {
threw = true;
}
Assert.assertTrue("Must throw due to request timeout", threw);
sink.stop();
}
@Test
public void testFailedConnect() throws Exception {
Event event = EventBuilder.withBody("test event 1", Charset.forName("UTF8"));
sink.start();
Thread.sleep(500L); // let socket startup
Thread.sleep(500L); // sleep a little to allow close occur
Transaction transaction = channel.getTransaction();
transaction.begin();
for (int i = 0; i < 10; i++) {
channel.put(event);
}
transaction.commit();
transaction.close();
for (int i = 0; i < 5; i++) {
boolean threwException = false;
try {
sink.process();
} catch (EventDeliveryException e) {
threwException = true;
}
Assert.assertTrue("Must throw EventDeliveryException if disconnected",
threwException);
}
src = new ThriftTestingSource(ThriftTestingSource.HandlerType.OK.name(),
port, ThriftRpcClient.COMPACT_PROTOCOL);
for (int i = 0; i < 5; i++) {
Sink.Status status = sink.process();
Assert.assertEquals(Sink.Status.READY, status);
}
Assert.assertEquals(Sink.Status.BACKOFF, sink.process());
sink.stop();
}
@Ignore("This test is flakey and causes tests to fail pretty often.")
@Test
public void testSslProcessWithComponentTruststore() throws Exception {
Context context = createBaseContext();
context.put("ssl", String.valueOf(true));
context.put("truststore", "src/test/resources/truststorefile.jks");
context.put("truststore-password", "password");
Configurables.configure(sink, context);
doTestSslProcess();
}
@Ignore("This test is flakey and causes tests to fail pretty often.")
@Test
public void testSslProcessWithComponentTruststoreNoPassword() throws Exception {
Context context = createBaseContext();
context.put("ssl", String.valueOf(true));
context.put("truststore", "src/test/resources/truststorefile.jks");
Configurables.configure(sink, context);
doTestSslProcess();
}
@Ignore("This test is flakey and causes tests to fail pretty often.")
@Test
public void testSslProcessWithGlobalTruststore() throws Exception {
System.setProperty("javax.net.ssl.trustStore", "src/test/resources/truststorefile.jks");
System.setProperty("javax.net.ssl.trustStorePassword", "password");
Context context = createBaseContext();
context.put("ssl", String.valueOf(true));
Configurables.configure(sink, context);
doTestSslProcess();
System.clearProperty("javax.net.ssl.trustStore");
System.clearProperty("javax.net.ssl.trustStorePassword");
}
@Ignore("This test is flakey and causes tests to fail pretty often.")
@Test
public void testSslProcessWithGlobalTruststoreNoPassword() throws Exception {
System.setProperty("javax.net.ssl.trustStore", "src/test/resources/truststorefile.jks");
Context context = createBaseContext();
context.put("ssl", String.valueOf(true));
Configurables.configure(sink, context);
doTestSslProcess();
System.clearProperty("javax.net.ssl.trustStore");
}
private void doTestSslProcess() throws Exception {
src = new ThriftTestingSource(ThriftTestingSource.HandlerType.OK.name(), port,
ThriftRpcClient.COMPACT_PROTOCOL, "src/test/resources/keystorefile.jks",
"password", KeyManagerFactory.getDefaultAlgorithm(), "JKS");
channel.start();
sink.start();
Transaction transaction = channel.getTransaction();
transaction.begin();
Event event = EventBuilder.withBody("test event 1", Charsets.UTF_8);
for (int i = 0; i < 11; i++) {
channel.put(event);
}
transaction.commit();
transaction.close();
for (int i = 0; i < 6; i++) {
Sink.Status status = sink.process();
Assert.assertEquals(Sink.Status.READY, status);
}
Assert.assertEquals(Sink.Status.BACKOFF, sink.process());
sink.stop();
Assert.assertEquals(11, src.flumeEvents.size());
Assert.assertEquals(6, src.batchCount);
Assert.assertEquals(0, src.individualCount);
}
@Test
public void testSslSinkWithNonSslServer() throws Exception {
src = new ThriftTestingSource(ThriftTestingSource.HandlerType.OK.name(),
port, ThriftRpcClient.COMPACT_PROTOCOL);
Context context = createBaseContext();
context.put("ssl", String.valueOf(true));
context.put("truststore", "src/test/resources/truststorefile.jks");
context.put("truststore-password", "password");
Configurables.configure(sink, context);
boolean failed = doRequestWhenFailureExpected();
if (!failed) {
Assert.fail("SSL-enabled sink successfully connected to a non-SSL-enabled server, " +
"that's wrong.");
}
}
@Test
public void testSslSinkWithNonTrustedCert() throws Exception {
src = new ThriftTestingSource(ThriftTestingSource.HandlerType.OK.name(), port,
ThriftRpcClient.COMPACT_PROTOCOL, "src/test/resources/keystorefile.jks",
"password", KeyManagerFactory.getDefaultAlgorithm(), "JKS");
Context context = createBaseContext();
context.put("ssl", String.valueOf(true));
Configurables.configure(sink, context);
boolean failed = doRequestWhenFailureExpected();
if (!failed) {
Assert.fail("SSL-enabled sink successfully connected to a server with an " +
"untrusted certificate when it should have failed");
}
}
private boolean doRequestWhenFailureExpected() throws Exception {
channel.start();
sink.start();
Assert.assertTrue(LifecycleController.waitForOneOf(sink,
LifecycleState.START_OR_ERROR, 5000));
Transaction transaction = channel.getTransaction();
transaction.begin();
Event event = EventBuilder.withBody("test event 1", Charsets.UTF_8);
channel.put(event);
transaction.commit();
transaction.close();
boolean failed;
try {
Sink.Status status = sink.process();
failed = false;
} catch (EventDeliveryException ex) {
// This is correct
failed = true;
}
sink.stop();
Assert.assertTrue(LifecycleController.waitForOneOf(sink,
LifecycleState.STOP_OR_ERROR, 5000));
return failed;
}
}
| 9,922 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/sink/TestLoggerSink.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.sink;
import com.google.common.base.Strings;
import org.apache.flume.Channel;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.channel.PseudoTxnMemoryChannel;
import org.apache.flume.conf.Configurables;
import org.apache.flume.event.EventBuilder;
import org.apache.flume.lifecycle.LifecycleException;
import org.junit.Before;
import org.junit.Test;
public class TestLoggerSink {
private LoggerSink sink;
@Before
public void setUp() {
sink = new LoggerSink();
}
/**
* Lack of exception test.
*/
@Test
public void testAppend() throws InterruptedException, LifecycleException,
EventDeliveryException {
Channel channel = new PseudoTxnMemoryChannel();
Context context = new Context();
Configurables.configure(channel, context);
Configurables.configure(sink, context);
sink.setChannel(channel);
sink.start();
for (int i = 0; i < 10; i++) {
Event event = EventBuilder.withBody(("Test " + i).getBytes());
channel.put(event);
sink.process();
}
sink.stop();
}
@Test
public void testAppendWithCustomSize() throws InterruptedException, LifecycleException,
EventDeliveryException {
Channel channel = new PseudoTxnMemoryChannel();
Context context = new Context();
context.put(LoggerSink.MAX_BYTES_DUMP_KEY, String.valueOf(30));
Configurables.configure(channel, context);
Configurables.configure(sink, context);
sink.setChannel(channel);
sink.start();
for (int i = 0; i < 10; i++) {
Event event = EventBuilder.withBody((Strings.padStart("Test " + i, 30, 'P')).getBytes());
channel.put(event);
sink.process();
}
sink.stop();
}
}
| 9,923 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/sink/TestRollingFileSink.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.sink;
import org.apache.flume.Channel;
import org.apache.flume.ChannelException;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.Transaction;
import org.apache.flume.channel.BasicTransactionSemantics;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.channel.PseudoTxnMemoryChannel;
import org.apache.flume.conf.Configurables;
import org.apache.flume.event.SimpleEvent;
import org.apache.flume.instrumentation.SinkCounter;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.internal.util.reflection.Whitebox;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
public class TestRollingFileSink {
private static final Logger logger = LoggerFactory
.getLogger(TestRollingFileSink.class);
private File tmpDir;
private RollingFileSink sink;
@Before
public void setUp() {
tmpDir = new File("/tmp/flume-rfs-" + System.currentTimeMillis() + "-"
+ Thread.currentThread().getId());
sink = new RollingFileSink();
sink.setChannel(new MemoryChannel());
tmpDir.mkdirs();
}
@After
public void tearDown() {
tmpDir.delete();
}
@Test
public void testLifecycle() {
Context context = new Context();
context.put("sink.directory", tmpDir.getPath());
Configurables.configure(sink, context);
sink.start();
sink.stop();
}
@Test
public void testAppend() throws InterruptedException,
EventDeliveryException, IOException {
Context context = new Context();
context.put("sink.directory", tmpDir.getPath());
context.put("sink.rollInterval", "1");
context.put("sink.batchSize", "1");
doTest(context);
}
@Test
public void testAppend2() throws InterruptedException,
EventDeliveryException, IOException {
Context context = new Context();
context.put("sink.directory", tmpDir.getPath());
context.put("sink.rollInterval", "0");
context.put("sink.batchSize", "1");
doTest(context);
}
@Test
public void testAppend3()
throws InterruptedException, EventDeliveryException, IOException {
File tmpDir = new File("target/tmpLog");
tmpDir.mkdirs();
cleanDirectory(tmpDir);
Context context = new Context();
context.put("sink.directory", "target/tmpLog");
context.put("sink.rollInterval", "0");
context.put("sink.batchSize", "1");
context.put("sink.pathManager.prefix", "test3-");
context.put("sink.pathManager.extension", "txt");
doTest(context);
}
@Test
public void testRollTime()
throws InterruptedException, EventDeliveryException, IOException {
File tmpDir = new File("target/tempLog");
tmpDir.mkdirs();
cleanDirectory(tmpDir);
Context context = new Context();
context.put("sink.directory", "target/tempLog/");
context.put("sink.rollInterval", "1");
context.put("sink.batchSize", "1");
context.put("sink.pathManager", "rolltime");
context.put("sink.pathManager.prefix", "test4-");
context.put("sink.pathManager.extension", "txt");
doTest(context);
}
@Test
public void testChannelException() throws InterruptedException, IOException {
Context context = new Context();
context.put("sink.directory", tmpDir.getPath());
context.put("sink.rollInterval", "0");
context.put("sink.batchSize", "1");
Channel channel = Mockito.mock(Channel.class);
Mockito.when(channel.take()).thenThrow(new ChannelException("dummy"));
Transaction transaction = Mockito.mock(BasicTransactionSemantics.class);
Mockito.when(channel.getTransaction()).thenReturn(transaction);
try {
doTest(context, channel);
} catch (EventDeliveryException e) {
//
}
SinkCounter sinkCounter = (SinkCounter) Whitebox.getInternalState(sink, "sinkCounter");
Assert.assertEquals(1, sinkCounter.getChannelReadFail());
}
private void doTest(Context context) throws EventDeliveryException, InterruptedException,
IOException {
doTest(context, null);
}
private void doTest(Context context, Channel channel) throws EventDeliveryException,
InterruptedException, IOException {
Configurables.configure(sink, context);
if (channel == null) {
channel = new PseudoTxnMemoryChannel();
Configurables.configure(channel, context);
}
sink.setChannel(channel);
sink.start();
for (int i = 0; i < 10; i++) {
Event event = new SimpleEvent();
event.setBody(("Test event " + i).getBytes());
channel.put(event);
sink.process();
Thread.sleep(500);
}
sink.stop();
for (String file : sink.getDirectory().list()) {
BufferedReader reader =
new BufferedReader(new FileReader(new File(sink.getDirectory(), file)));
String lastLine = null;
String currentLine = null;
while ((currentLine = reader.readLine()) != null) {
lastLine = currentLine;
logger.debug("Produced file:{} lastLine:{}", file, lastLine);
}
reader.close();
}
}
private void cleanDirectory(File dir) {
File[] files = dir.listFiles();
for (File file : files) {
file.delete();
}
}
/**
* This test is to reproduce batch size and
* transaction capacity related configuration
* problems
*/
@Test(expected = EventDeliveryException.class)
public void testTransCapBatchSizeCompatibility() throws EventDeliveryException {
Context context = new Context();
context.put("sink.directory", tmpDir.getPath());
context.put("sink.rollInterval", "0");
context.put("sink.batchSize", "1000");
Configurables.configure(sink, context);
context.put("capacity", "50");
context.put("transactionCapacity", "5");
Channel channel = new MemoryChannel();
Configurables.configure(channel, context);
sink.setChannel(channel);
sink.start();
try {
for (int j = 0; j < 10; j++) {
Transaction tx = channel.getTransaction();
tx.begin();
for (int i = 0; i < 5; i++) {
Event event = new SimpleEvent();
event.setBody(("Test event " + i).getBytes());
channel.put(event);
}
tx.commit();
tx.close();
}
sink.process();
} finally {
sink.stop();
}
}
}
| 9,924 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/sink/TestDefaultSinkFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.sink;
import org.apache.flume.Sink;
import org.apache.flume.SinkFactory;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TestDefaultSinkFactory {
private SinkFactory sinkFactory;
@Before
public void setUp() {
sinkFactory = new DefaultSinkFactory();
}
@Test
public void testDuplicateCreate() {
Sink avroSink1 = sinkFactory.create("avroSink1", "avro");
Sink avroSink2 = sinkFactory.create("avroSink2", "avro");
Assert.assertNotNull(avroSink1);
Assert.assertNotNull(avroSink2);
Assert.assertNotSame(avroSink1, avroSink2);
Assert.assertTrue(avroSink1 instanceof AvroSink);
Assert.assertTrue(avroSink2 instanceof AvroSink);
Sink s1 = sinkFactory.create("avroSink1", "avro");
Sink s2 = sinkFactory.create("avroSink2", "avro");
Assert.assertNotSame(avroSink1, s1);
Assert.assertNotSame(avroSink2, s2);
}
private void verifySinkCreation(String name, String type, Class<?> typeClass)
throws Exception {
Sink sink = sinkFactory.create(name, type);
Assert.assertNotNull(sink);
Assert.assertTrue(typeClass.isInstance(sink));
}
@Test
public void testSinkCreation() throws Exception {
verifySinkCreation("null-sink", "null", NullSink.class);
verifySinkCreation("logger-sink", "logger", LoggerSink.class);
verifySinkCreation("file-roll-sink", "file_roll", RollingFileSink.class);
verifySinkCreation("avro-sink", "avro", AvroSink.class);
}
}
| 9,925 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/sink/TestAvroSink.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.sink;
import com.google.common.base.Charsets;
import org.apache.avro.ipc.netty.NettyServer;
import org.apache.avro.ipc.Server;
import org.apache.avro.ipc.specific.SpecificResponder;
import org.apache.flume.Channel;
import org.apache.flume.ChannelException;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.Sink;
import org.apache.flume.Transaction;
import org.apache.flume.api.RpcClient;
import org.apache.flume.channel.BasicTransactionSemantics;
import org.apache.flume.channel.ChannelProcessor;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.channel.ReplicatingChannelSelector;
import org.apache.flume.conf.Configurables;
import org.apache.flume.event.EventBuilder;
import org.apache.flume.instrumentation.SinkCounter;
import org.apache.flume.lifecycle.LifecycleController;
import org.apache.flume.lifecycle.LifecycleState;
import org.apache.flume.source.AvroSource;
import org.apache.flume.source.avro.AvroFlumeEvent;
import org.apache.flume.source.avro.AvroSourceProtocol;
import org.apache.flume.source.avro.Status;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.internal.util.reflection.Whitebox;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.nio.charset.Charset;
import java.security.KeyStore;
import java.security.Security;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import io.netty.channel.ChannelPipeline;
import io.netty.handler.ssl.SslHandler;
public class TestAvroSink {
private static final Logger logger = LoggerFactory
.getLogger(TestAvroSink.class);
private static final String hostname = "127.0.0.1";
private static final Integer port;
static {
try (ServerSocket socket = new ServerSocket(0)) {
port = socket.getLocalPort();
} catch (IOException e) {
throw new AssertionError("Cannot find free port", e);
}
}
private AvroSink sink;
private Channel channel;
public void setUp() {
setUp("none", 0);
}
public void setUp(String compressionType, int compressionLevel) {
if (sink != null) {
throw new RuntimeException("double setup");
}
sink = new AvroSink();
channel = new MemoryChannel();
Context context = createBaseContext();
if (compressionType.equals("deflate")) {
context.put("compression-type", compressionType);
context.put("compression-level", Integer.toString(compressionLevel));
}
sink.setChannel(channel);
Configurables.configure(sink, context);
Configurables.configure(channel, context);
}
private Context createBaseContext() {
Context context = new Context();
context.put("hostname", hostname);
context.put("port", String.valueOf(port));
context.put("batch-size", String.valueOf(2));
context.put("connect-timeout", String.valueOf(2000L));
context.put("request-timeout", String.valueOf(3000L));
return context;
}
private Server createServer(AvroSourceProtocol protocol) throws InterruptedException {
Server server = new NettyServer(new SpecificResponder(AvroSourceProtocol.class, protocol),
new InetSocketAddress(hostname, port));
return server;
}
@Test
public void testLifecycle() throws InterruptedException,
InstantiationException, IllegalAccessException {
setUp();
Server server = createServer(new MockAvroServer());
server.start();
sink.start();
Assert.assertTrue(LifecycleController.waitForOneOf(sink,
LifecycleState.START_OR_ERROR, 5000));
sink.stop();
Assert.assertTrue(LifecycleController.waitForOneOf(sink,
LifecycleState.STOP_OR_ERROR, 5000));
server.close();
}
@Test
public void testProcess() throws InterruptedException,
EventDeliveryException, InstantiationException, IllegalAccessException {
setUp();
Event event = EventBuilder.withBody("test event 1", Charsets.UTF_8);
Server server = createServer(new MockAvroServer());
server.start();
sink.start();
Assert.assertTrue(LifecycleController.waitForOneOf(sink,
LifecycleState.START_OR_ERROR, 5000));
Transaction transaction = channel.getTransaction();
transaction.begin();
for (int i = 0; i < 10; i++) {
channel.put(event);
}
transaction.commit();
transaction.close();
for (int i = 0; i < 5; i++) {
Sink.Status status = sink.process();
Assert.assertEquals(Sink.Status.READY, status);
}
Assert.assertEquals(Sink.Status.BACKOFF, sink.process());
sink.stop();
Assert.assertTrue(LifecycleController.waitForOneOf(sink,
LifecycleState.STOP_OR_ERROR, 5000));
server.close();
}
@Test
public void testChannelException() throws InterruptedException,
EventDeliveryException, InstantiationException, IllegalAccessException {
setUp();
Server server = createServer(new MockAvroServer());
server.start();
sink.start();
Channel channel = Mockito.mock(Channel.class);
Mockito.when(channel.take()).thenThrow(new ChannelException("dummy"));
Transaction transaction = Mockito.mock(BasicTransactionSemantics.class);
Mockito.when(channel.getTransaction()).thenReturn(transaction);
sink.setChannel(channel);
Sink.Status status = sink.process();
sink.stop();
server.close();
SinkCounter sinkCounter = (SinkCounter) Whitebox.getInternalState(sink, "sinkCounter");
Assert.assertEquals(1, sinkCounter.getChannelReadFail());
}
@Test
public void testTimeout() throws InterruptedException,
EventDeliveryException, InstantiationException, IllegalAccessException {
setUp();
Event event = EventBuilder.withBody("foo", Charsets.UTF_8);
AtomicLong delay = new AtomicLong();
Server server = createServer(new DelayMockAvroServer(delay));
server.start();
sink.start();
Assert.assertTrue(LifecycleController.waitForOneOf(sink,
LifecycleState.START_OR_ERROR, 5000));
Transaction txn = channel.getTransaction();
txn.begin();
for (int i = 0; i < 4; i++) {
channel.put(event);
}
txn.commit();
txn.close();
// should throw EventDeliveryException due to connect timeout
delay.set(3000L); // because connect-timeout = 2000
boolean threw = false;
try {
sink.process();
} catch (EventDeliveryException ex) {
logger.info("Correctly threw due to connect timeout. Exception follows.",
ex);
threw = true;
}
Assert.assertTrue("Must throw due to connect timeout", threw);
// now, allow the connect handshake to occur
delay.set(0);
sink.process();
// should throw another EventDeliveryException due to request timeout
delay.set(4000L); // because request-timeout = 3000
threw = false;
try {
sink.process();
} catch (EventDeliveryException ex) {
logger.info("Correctly threw due to request timeout. Exception follows.",
ex);
threw = true;
}
Assert.assertTrue("Must throw due to request timeout", threw);
sink.stop();
Assert.assertTrue(LifecycleController.waitForOneOf(sink,
LifecycleState.STOP_OR_ERROR, 5000));
server.close();
SinkCounter sinkCounter = (SinkCounter) Whitebox.getInternalState(sink, "sinkCounter");
Assert.assertEquals(2, sinkCounter.getEventWriteFail());
}
@Test
public void testFailedConnect() throws InterruptedException,
EventDeliveryException, InstantiationException, IllegalAccessException {
setUp();
Event event = EventBuilder.withBody("test event 1",
Charset.forName("UTF8"));
Server server = createServer(new MockAvroServer());
server.start();
sink.start();
Assert.assertTrue(LifecycleController.waitForOneOf(sink,
LifecycleState.START_OR_ERROR, 5000));
Thread.sleep(500L); // let socket startup
server.close();
Thread.sleep(500L); // sleep a little to allow close occur
Transaction transaction = channel.getTransaction();
transaction.begin();
for (int i = 0; i < 10; i++) {
channel.put(event);
}
transaction.commit();
transaction.close();
for (int i = 0; i < 5; i++) {
boolean threwException = false;
try {
sink.process();
} catch (EventDeliveryException e) {
threwException = true;
}
Assert.assertTrue("Must throw EventDeliveryException if disconnected",
threwException);
}
server = createServer(new MockAvroServer());
server.start();
for (int i = 0; i < 5; i++) {
Sink.Status status = sink.process();
Assert.assertEquals(Sink.Status.READY, status);
}
Assert.assertEquals(Sink.Status.BACKOFF, sink.process());
sink.stop();
Assert.assertTrue(LifecycleController.waitForOneOf(sink,
LifecycleState.STOP_OR_ERROR, 5000));
server.close();
SinkCounter sinkCounter = (SinkCounter) Whitebox.getInternalState(sink, "sinkCounter");
Assert.assertEquals(5, sinkCounter.getEventWriteFail());
Assert.assertEquals(4, sinkCounter.getConnectionFailedCount());
}
@Test
public void testReset() throws Exception {
setUp();
Server server = createServer(new MockAvroServer());
server.start();
Context context = new Context();
context.put("hostname", hostname);
context.put("port", String.valueOf(port));
context.put("batch-size", String.valueOf(2));
context.put("connect-timeout", String.valueOf(2000L));
context.put("request-timeout", String.valueOf(3000L));
context.put("reset-connection-interval", String.valueOf("5"));
sink.setChannel(channel);
Configurables.configure(sink, context);
sink.start();
RpcClient firstClient = sink.getUnderlyingClient();
Thread.sleep(6000);
Transaction t = channel.getTransaction();
t.begin();
channel.put(EventBuilder.withBody("This is a test", Charset.defaultCharset()));
t.commit();
t.close();
sink.process();
// Make sure they are not the same object, connection should be reset
Assert.assertFalse(firstClient == sink.getUnderlyingClient());
sink.stop();
context.put("hostname", hostname);
context.put("port", String.valueOf(port));
context.put("batch-size", String.valueOf(2));
context.put("connect-timeout", String.valueOf(2000L));
context.put("request-timeout", String.valueOf(3000L));
context.put("reset-connection-interval", String.valueOf("0"));
sink.setChannel(channel);
Configurables.configure(sink, context);
sink.start();
firstClient = sink.getUnderlyingClient();
Thread.sleep(6000);
// Make sure they are the same object, since connection should not be reset
Assert.assertTrue(firstClient == sink.getUnderlyingClient());
sink.stop();
context.clear();
context.put("hostname", hostname);
context.put("port", String.valueOf(port));
context.put("batch-size", String.valueOf(2));
context.put("connect-timeout", String.valueOf(2000L));
context.put("request-timeout", String.valueOf(3000L));
sink.setChannel(channel);
Configurables.configure(sink, context);
sink.start();
firstClient = sink.getUnderlyingClient();
Thread.sleep(6000);
// Make sure they are the same object, since connection should not be reset
Assert.assertTrue(firstClient == sink.getUnderlyingClient());
sink.stop();
server.close();
}
@Test
public void testSslProcessTrustAllCerts() throws InterruptedException,
EventDeliveryException, InstantiationException, IllegalAccessException {
setUp();
Context context = createBaseContext();
context.put("ssl", String.valueOf(true));
context.put("trust-all-certs", String.valueOf(true));
Configurables.configure(sink, context);
doTestSslProcess();
}
@Test
public void testSslProcessWithComponentTruststore() throws InterruptedException,
EventDeliveryException, InstantiationException, IllegalAccessException {
setUp();
Context context = createBaseContext();
context.put("ssl", String.valueOf(true));
context.put("truststore", "src/test/resources/truststore.jks");
context.put("truststore-password", "password");
Configurables.configure(sink, context);
doTestSslProcess();
}
@Test
public void testSslProcessWithComponentTruststoreNoPassword() throws InterruptedException,
EventDeliveryException, InstantiationException, IllegalAccessException {
setUp();
Context context = createBaseContext();
context.put("ssl", String.valueOf(true));
context.put("truststore", "src/test/resources/truststore.jks");
Configurables.configure(sink, context);
doTestSslProcess();
}
@Test
public void testSslProcessWithGlobalTruststore() throws InterruptedException,
EventDeliveryException, InstantiationException, IllegalAccessException {
setUp();
System.setProperty("javax.net.ssl.trustStore", "src/test/resources/truststore.jks");
System.setProperty("javax.net.ssl.trustStorePassword", "password");
Context context = createBaseContext();
context.put("ssl", String.valueOf(true));
Configurables.configure(sink, context);
doTestSslProcess();
System.clearProperty("javax.net.ssl.trustStore");
System.clearProperty("javax.net.ssl.trustStorePassword");
}
@Test
public void testSslProcessWithGlobalTruststoreNoPassword() throws InterruptedException,
EventDeliveryException, InstantiationException, IllegalAccessException {
setUp();
System.setProperty("javax.net.ssl.trustStore", "src/test/resources/truststore.jks");
Context context = createBaseContext();
context.put("ssl", String.valueOf(true));
Configurables.configure(sink, context);
doTestSslProcess();
System.clearProperty("javax.net.ssl.trustStore");
}
private void doTestSslProcess() throws InterruptedException,
EventDeliveryException, InstantiationException, IllegalAccessException {
Server server = createSslServer(new MockAvroServer());
server.start();
sink.start();
Assert.assertTrue(LifecycleController.waitForOneOf(sink,
LifecycleState.START_OR_ERROR, 5000));
Transaction transaction = channel.getTransaction();
transaction.begin();
Event event = EventBuilder.withBody("test event 1", Charsets.UTF_8);
for (int i = 0; i < 10; i++) {
channel.put(event);
}
transaction.commit();
transaction.close();
for (int i = 0; i < 5; i++) {
Sink.Status status = sink.process();
Assert.assertEquals(Sink.Status.READY, status);
}
Assert.assertEquals(Sink.Status.BACKOFF, sink.process());
sink.stop();
Assert.assertTrue(LifecycleController.waitForOneOf(sink,
LifecycleState.STOP_OR_ERROR, 5000));
server.close();
}
@Test
public void testSslWithCompression() throws InterruptedException,
EventDeliveryException, InstantiationException, IllegalAccessException {
setUp("deflate", 6);
boolean bound = false;
AvroSource source;
Channel sourceChannel;
int selectedPort;
source = new AvroSource();
sourceChannel = new MemoryChannel();
Configurables.configure(sourceChannel, new Context());
List<Channel> channels = new ArrayList<Channel>();
channels.add(sourceChannel);
ChannelSelector rcs = new ReplicatingChannelSelector();
rcs.setChannels(channels);
source.setChannelProcessor(new ChannelProcessor(rcs));
Context context = new Context();
context.put("port", port.toString());
context.put("bind", hostname);
context.put("threads", "50");
context.put("compression-type", "deflate");
context.put("ssl", String.valueOf(true));
context.put("keystore", "src/test/resources/server.p12");
context.put("keystore-password", "password");
context.put("keystore-type", "PKCS12");
Configurables.configure(source, context);
source.start();
Assert
.assertTrue("Reached start or error", LifecycleController.waitForOneOf(
source, LifecycleState.START_OR_ERROR));
Assert.assertEquals("Server is started", LifecycleState.START,
source.getLifecycleState());
Event event = EventBuilder.withBody("Hello avro",
Charset.forName("UTF8"));
context = createBaseContext();
context.put("ssl", String.valueOf(true));
context.put("trust-all-certs", String.valueOf(true));
context.put("compression-type", "deflate");
context.put("compression-level", Integer.toString(6));
Configurables.configure(sink, context);
sink.start();
Transaction sickTransaction = channel.getTransaction();
sickTransaction.begin();
for (int i = 0; i < 10; i++) {
channel.put(event);
}
sickTransaction.commit();
sickTransaction.close();
for (int i = 0; i < 5; i++) {
Sink.Status status = sink.process();
logger.debug("Calling Process " + i + " times:" + status);
Assert.assertEquals(Sink.Status.READY, status);
}
sink.stop();
Transaction sourceTransaction = sourceChannel.getTransaction();
sourceTransaction.begin();
Event sourceEvent = sourceChannel.take();
Assert.assertNotNull(sourceEvent);
Assert.assertEquals("Channel contained our event", "Hello avro",
new String(sourceEvent.getBody()));
sourceTransaction.commit();
sourceTransaction.close();
logger.debug("Round trip event:{}", sourceEvent);
source.stop();
Assert.assertTrue("Reached stop or error",
LifecycleController.waitForOneOf(source, LifecycleState.STOP_OR_ERROR));
Assert.assertEquals("Server is stopped", LifecycleState.STOP,
source.getLifecycleState());
}
@Test
public void testSslSinkWithNonSslServer() throws InterruptedException,
InstantiationException, IllegalAccessException {
setUp();
Server server = createServer(new MockAvroServer());
server.start();
Context context = createBaseContext();
context.put("ssl", String.valueOf(true));
context.put("trust-all-certs", String.valueOf(true));
Configurables.configure(sink, context);
boolean failed = doRequestWhenFailureExpected();
server.close();
if (!failed) {
Assert.fail("SSL-enabled sink successfully connected to a non-SSL-enabled server, " +
"that's wrong.");
}
SinkCounter sinkCounter = (SinkCounter) Whitebox.getInternalState(sink, "sinkCounter");
Assert.assertEquals(1, sinkCounter.getEventWriteFail());
}
@Test
public void testSslSinkWithNonTrustedCert() throws InterruptedException,
InstantiationException, IllegalAccessException {
setUp();
Server server = createSslServer(new MockAvroServer());
server.start();
Context context = createBaseContext();
context.put("ssl", String.valueOf(true));
Configurables.configure(sink, context);
boolean failed = doRequestWhenFailureExpected();
server.close();
if (!failed) {
Assert.fail("SSL-enabled sink successfully connected to a server with an " +
"untrusted certificate when it should have failed");
}
SinkCounter sinkCounter = (SinkCounter) Whitebox.getInternalState(sink, "sinkCounter");
Assert.assertEquals(1, sinkCounter.getEventWriteFail());
}
private boolean doRequestWhenFailureExpected()
throws InterruptedException {
sink.start();
Assert.assertTrue(LifecycleController.waitForOneOf(sink,
LifecycleState.START_OR_ERROR, 5000));
Transaction transaction = channel.getTransaction();
transaction.begin();
Event event = EventBuilder.withBody("test event 1", Charsets.UTF_8);
channel.put(event);
transaction.commit();
transaction.close();
boolean failed;
try {
sink.process();
failed = false;
} catch (EventDeliveryException ex) {
logger.info("Correctly failed to send event", ex);
failed = true;
}
sink.stop();
Assert.assertTrue(LifecycleController.waitForOneOf(sink,
LifecycleState.STOP_OR_ERROR, 5000));
return failed;
}
@Test
public void testRequestWithNoCompression()
throws InterruptedException, IOException, EventDeliveryException {
doRequest(false, false, 6);
}
@Test
public void testRequestWithCompressionOnClientAndServerOnLevel0()
throws InterruptedException, IOException, EventDeliveryException {
doRequest(true, true, 0);
}
@Test
public void testRequestWithCompressionOnClientAndServerOnLevel1()
throws InterruptedException, IOException, EventDeliveryException {
doRequest(true, true, 1);
}
@Test
public void testRequestWithCompressionOnClientAndServerOnLevel6()
throws InterruptedException, IOException, EventDeliveryException {
doRequest(true, true, 6);
}
@Test
public void testRequestWithCompressionOnClientAndServerOnLevel9()
throws InterruptedException, IOException, EventDeliveryException {
doRequest(true, true, 9);
}
private void doRequest(boolean serverEnableCompression, boolean clientEnableCompression,
int compressionLevel)
throws InterruptedException, IOException, EventDeliveryException {
if (clientEnableCompression) {
setUp("deflate", compressionLevel);
} else {
setUp("none", compressionLevel);
}
boolean bound = false;
AvroSource source;
Channel sourceChannel;
int selectedPort;
source = new AvroSource();
sourceChannel = new MemoryChannel();
Configurables.configure(sourceChannel, new Context());
List<Channel> channels = new ArrayList<Channel>();
channels.add(sourceChannel);
ChannelSelector rcs = new ReplicatingChannelSelector();
rcs.setChannels(channels);
source.setChannelProcessor(new ChannelProcessor(rcs));
Context context = new Context();
context.put("port", port.toString());
context.put("bind", hostname);
context.put("threads", "50");
if (serverEnableCompression) {
context.put("compression-type", "deflate");
} else {
context.put("compression-type", "none");
}
Configurables.configure(source, context);
source.start();
Assert.assertTrue("Reached start or error",
LifecycleController.waitForOneOf(source, LifecycleState.START_OR_ERROR));
Assert.assertEquals("Server is started",
LifecycleState.START, source.getLifecycleState());
Event event = EventBuilder.withBody("Hello avro", Charset.forName("UTF8"));
sink.start();
Transaction sickTransaction = channel.getTransaction();
sickTransaction.begin();
for (int i = 0; i < 10; i++) {
channel.put(event);
}
sickTransaction.commit();
sickTransaction.close();
for (int i = 0; i < 5; i++) {
Sink.Status status = sink.process();
logger.debug("Calling Process " + i + " times:" + status);
Assert.assertEquals(Sink.Status.READY, status);
}
sink.stop();
Transaction sourceTransaction = sourceChannel.getTransaction();
sourceTransaction.begin();
Event sourceEvent = sourceChannel.take();
Assert.assertNotNull(sourceEvent);
Assert.assertEquals("Channel contained our event", "Hello avro",
new String(sourceEvent.getBody()));
sourceTransaction.commit();
sourceTransaction.close();
logger.debug("Round trip event:{}", sourceEvent);
source.stop();
Assert.assertTrue("Reached stop or error",
LifecycleController.waitForOneOf(source, LifecycleState.STOP_OR_ERROR));
Assert.assertEquals("Server is stopped", LifecycleState.STOP,
source.getLifecycleState());
}
private static class MockAvroServer implements AvroSourceProtocol {
@Override
public Status append(AvroFlumeEvent event) {
logger.debug("Received event:{}", event);
return Status.OK;
}
@Override
public Status appendBatch(List<AvroFlumeEvent> events) {
logger.debug("Received event batch:{}", events);
return Status.OK;
}
}
private static class DelayMockAvroServer implements AvroSourceProtocol.Callback {
private final AtomicLong delay;
public DelayMockAvroServer(AtomicLong delay) {
this.delay = delay;
}
private void sleep() throws IOException {
try {
Thread.sleep(delay.get());
} catch (InterruptedException e) {
throw new IOException("Interrupted while sleeping", e);
}
}
@Override
public Status append(AvroFlumeEvent event) {
logger.debug("Received event:{}; delaying for {}ms", event, delay);
try {
sleep();
} catch (IOException e) {
logger.debug("IOException detected");
}
return Status.OK;
}
@Override
public void append(AvroFlumeEvent event,
org.apache.avro.ipc.Callback<Status> status)
throws IOException {
logger.debug("Received event:{}; delaying for {}ms", event, delay);
sleep();
}
@Override
public Status appendBatch(List<AvroFlumeEvent> events) {
logger.debug("Received event batch:{}; delaying for {}ms", events, delay);
try {
sleep();
} catch (IOException e) {
logger.debug("IOException detected");
}
return Status.OK;
}
@Override
public void appendBatch(List<AvroFlumeEvent> events,
org.apache.avro.ipc.Callback<Status> status)
throws IOException {
logger.debug("Received event batch:{}; delaying for {}ms", events, delay);
sleep();
}
}
private Server createSslServer(AvroSourceProtocol protocol) throws InterruptedException {
return new NettyServer(new SpecificResponder(AvroSourceProtocol.class, protocol),
new InetSocketAddress(hostname, port),
(ch) -> {
ChannelPipeline pipeline = ch.pipeline();
SSLEngine engine = createSSLEngine();
engine.setUseClientMode(false);
pipeline.addLast("ssl", new SslHandler(engine));
});
}
private SSLEngine createSSLEngine() {
String keystore = "src/test/resources/server.p12";
String keystorePassword = "password";
String keystoreType = "PKCS12";
try {
KeyStore ks = KeyStore.getInstance(keystoreType);
ks.load(new FileInputStream(keystore), keystorePassword.toCharArray());
// Set up key manager factory to use our key store
KeyManagerFactory kmf = KeyManagerFactory.getInstance(getAlgorithm());
kmf.init(ks, keystorePassword.toCharArray());
SSLContext sslContext = SSLContext.getInstance("TLS");
sslContext.init(kmf.getKeyManagers(), null, null);
return sslContext.createSSLEngine();
} catch (Exception ex) {
throw new RuntimeException("Cannot create SSL Engine", ex);
}
}
private String getAlgorithm() {
String algorithm = Security.getProperty("ssl.KeyManagerFactory.algorithm");
if (algorithm == null) {
algorithm = "SunX509";
}
return algorithm;
}
}
| 9,926 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/sink/TestLoadBalancingSinkProcessor.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.sink;
import junit.framework.Assert;
import org.apache.flume.Channel;
import org.apache.flume.ChannelException;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.Sink;
import org.apache.flume.Sink.Status;
import org.apache.flume.Transaction;
import org.apache.flume.channel.AbstractChannel;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class TestLoadBalancingSinkProcessor {
private Context getContext(String selectorType, boolean backoff) {
Map<String, String> p = new HashMap<String, String>();
p.put("selector", selectorType);
p.put("backoff", String.valueOf(backoff));
Context ctx = new Context(p);
return ctx;
}
private Context getContext(String selectorType) {
Map<String, String> p = new HashMap<String, String>();
p.put("selector", selectorType);
Context ctx = new Context(p);
return ctx;
}
private LoadBalancingSinkProcessor getProcessor(
String selectorType, List<Sink> sinks, boolean backoff) {
return getProcessor(sinks, getContext(selectorType, backoff));
}
private LoadBalancingSinkProcessor getProcessor(List<Sink> sinks, Context ctx) {
LoadBalancingSinkProcessor lbsp = new LoadBalancingSinkProcessor();
lbsp.setSinks(sinks);
lbsp.configure(ctx);
lbsp.start();
return lbsp;
}
@Test
public void testDefaultConfiguration() throws Exception {
// If no selector is specified, the round-robin selector should be used
Channel ch = new MockChannel();
int n = 100;
int numEvents = 3 * n;
for (int i = 0; i < numEvents; i++) {
ch.put(new MockEvent("test" + i));
}
MockSink s1 = new MockSink(1);
s1.setChannel(ch);
MockSink s2 = new MockSink(2);
s2.setChannel(ch);
MockSink s3 = new MockSink(3);
s3.setChannel(ch);
List<Sink> sinks = new ArrayList<Sink>();
sinks.add(s1);
sinks.add(s2);
sinks.add(s3);
LoadBalancingSinkProcessor lbsp = getProcessor(sinks, new Context());
Status s = Status.READY;
while (s != Status.BACKOFF) {
s = lbsp.process();
}
Assert.assertTrue(s1.getEvents().size() == n);
Assert.assertTrue(s2.getEvents().size() == n);
Assert.assertTrue(s3.getEvents().size() == n);
}
@Test
public void testRandomOneActiveSink() throws Exception {
Channel ch = new MockChannel();
int n = 10;
int numEvents = n;
for (int i = 0; i < numEvents; i++) {
ch.put(new MockEvent("test" + i));
}
MockSink s1 = new MockSink(1);
s1.setChannel(ch);
// s1 always fails
s1.setFail(true);
MockSink s2 = new MockSink(2);
s2.setChannel(ch);
MockSink s3 = new MockSink(3);
s3.setChannel(ch);
// s3 always fails
s3.setFail(true);
List<Sink> sinks = new ArrayList<Sink>();
sinks.add(s1);
sinks.add(s2);
sinks.add(s3);
LoadBalancingSinkProcessor lbsp = getProcessor("random", sinks, false);
Sink.Status s = Sink.Status.READY;
while (s != Sink.Status.BACKOFF) {
s = lbsp.process();
}
Assert.assertTrue(s1.getEvents().size() == 0);
Assert.assertTrue(s2.getEvents().size() == n);
Assert.assertTrue(s3.getEvents().size() == 0);
}
@Test
public void testRandomBackoff() throws Exception {
Channel ch = new MockChannel();
int n = 100;
int numEvents = n;
for (int i = 0; i < numEvents; i++) {
ch.put(new MockEvent("test" + i));
}
MockSink s1 = new MockSink(1);
s1.setChannel(ch);
// s1 always fails
s1.setFail(true);
MockSink s2 = new MockSink(2);
s2.setChannel(ch);
MockSink s3 = new MockSink(3);
s3.setChannel(ch);
// s3 always fails
s3.setFail(true);
List<Sink> sinks = new ArrayList<Sink>();
sinks.add(s1);
sinks.add(s2);
sinks.add(s3);
LoadBalancingSinkProcessor lbsp = getProcessor("random", sinks, true);
// TODO: there is a remote possibility that s0 or s2
// never get hit by the random assignment
// and thus not backoffed, causing the test to fail
for (int i = 0; i < 50; i++) {
// a well behaved runner would always check the return.
lbsp.process();
}
Assert.assertEquals(50, s2.getEvents().size());
s2.setFail(true);
s1.setFail(false); // s1 should still be backed off
try {
lbsp.process();
// nothing should be able to process right now
Assert.fail("Expected EventDeliveryException");
} catch (EventDeliveryException e) {
// this is expected
}
Thread.sleep(2100); // wait for s1 to no longer be backed off
Sink.Status s = Sink.Status.READY;
while (s != Sink.Status.BACKOFF) {
s = lbsp.process();
}
Assert.assertEquals(50, s1.getEvents().size());
Assert.assertEquals(50, s2.getEvents().size());
Assert.assertEquals(0, s3.getEvents().size());
}
@Test
public void testRandomPersistentFailure() throws Exception {
Channel ch = new MockChannel();
int n = 100;
int numEvents = 3 * n;
for (int i = 0; i < numEvents; i++) {
ch.put(new MockEvent("test" + i));
}
MockSink s1 = new MockSink(1);
s1.setChannel(ch);
MockSink s2 = new MockSink(2);
s2.setChannel(ch);
// s2 always fails
s2.setFail(true);
MockSink s3 = new MockSink(3);
s3.setChannel(ch);
List<Sink> sinks = new ArrayList<Sink>();
sinks.add(s1);
sinks.add(s2);
sinks.add(s3);
LoadBalancingSinkProcessor lbsp = getProcessor("random",sinks, false);
Status s = Status.READY;
while (s != Status.BACKOFF) {
s = lbsp.process();
}
Assert.assertTrue(s2.getEvents().size() == 0);
Assert.assertTrue(s1.getEvents().size() + s3.getEvents().size() == 3 * n);
}
@Test
public void testRandomNoFailure() throws Exception {
Channel ch = new MockChannel();
int n = 10000;
int numEvents = n;
for (int i = 0; i < numEvents; i++) {
ch.put(new MockEvent("test" + i));
}
MockSink s1 = new MockSink(1);
s1.setChannel(ch);
MockSink s2 = new MockSink(2);
s2.setChannel(ch);
MockSink s3 = new MockSink(3);
s3.setChannel(ch);
MockSink s4 = new MockSink(4);
s4.setChannel(ch);
MockSink s5 = new MockSink(5);
s5.setChannel(ch);
MockSink s6 = new MockSink(6);
s6.setChannel(ch);
MockSink s7 = new MockSink(7);
s7.setChannel(ch);
MockSink s8 = new MockSink(8);
s8.setChannel(ch);
MockSink s9 = new MockSink(9);
s9.setChannel(ch);
MockSink s0 = new MockSink(0);
s0.setChannel(ch);
List<Sink> sinks = new ArrayList<Sink>();
sinks.add(s1);
sinks.add(s2);
sinks.add(s3);
sinks.add(s4);
sinks.add(s5);
sinks.add(s6);
sinks.add(s7);
sinks.add(s8);
sinks.add(s9);
sinks.add(s0);
LoadBalancingSinkProcessor lbsp = getProcessor("random",sinks, false);
Status s = Status.READY;
while (s != Status.BACKOFF) {
s = lbsp.process();
}
Set<Integer> sizeSet = new HashSet<Integer>();
int sum = 0;
for (Sink ms : sinks) {
int count = ((MockSink) ms).getEvents().size();
sum += count;
sizeSet.add(count);
}
// Assert that all the events were accounted for
Assert.assertEquals(n, sum);
// Assert that at least two sinks came with different event sizes.
// This makes sense if the total number of events is evenly divisible by
// the total number of sinks. In which case the round-robin policy will
// end up causing all sinks to get the same number of events where as
// the random policy will have very low probability of doing that.
Assert.assertTrue("Miraculous distribution", sizeSet.size() > 1);
}
@Test
public void testRoundRobinOneActiveSink() throws Exception {
Channel ch = new MockChannel();
int n = 10;
int numEvents = n;
for (int i = 0; i < numEvents; i++) {
ch.put(new MockEvent("test" + i));
}
MockSink s1 = new MockSink(1);
s1.setChannel(ch);
// s1 always fails
s1.setFail(true);
MockSink s2 = new MockSink(2);
s2.setChannel(ch);
MockSink s3 = new MockSink(3);
s3.setChannel(ch);
// s3 always fails
s3.setFail(true);
List<Sink> sinks = new ArrayList<Sink>();
sinks.add(s1);
sinks.add(s2);
sinks.add(s3);
LoadBalancingSinkProcessor lbsp = getProcessor("round_robin", sinks, false);
Sink.Status s = Sink.Status.READY;
while (s != Sink.Status.BACKOFF) {
s = lbsp.process();
}
Assert.assertTrue(s1.getEvents().size() == 0);
Assert.assertTrue(s2.getEvents().size() == n);
Assert.assertTrue(s3.getEvents().size() == 0);
}
@Test
public void testRoundRobinPersistentFailure() throws Exception {
Channel ch = new MockChannel();
int n = 100;
int numEvents = 3 * n;
for (int i = 0; i < numEvents; i++) {
ch.put(new MockEvent("test" + i));
}
MockSink s1 = new MockSink(1);
s1.setChannel(ch);
MockSink s2 = new MockSink(2);
s2.setChannel(ch);
// s2 always fails
s2.setFail(true);
MockSink s3 = new MockSink(3);
s3.setChannel(ch);
List<Sink> sinks = new ArrayList<Sink>();
sinks.add(s1);
sinks.add(s2);
sinks.add(s3);
LoadBalancingSinkProcessor lbsp = getProcessor("round_robin",sinks, false);
Status s = Status.READY;
while (s != Status.BACKOFF) {
s = lbsp.process();
}
Assert.assertTrue(s1.getEvents().size() == n);
Assert.assertTrue(s2.getEvents().size() == 0);
Assert.assertTrue(s3.getEvents().size() == 2 * n);
}
// test that even if the sink recovers immediately that it is kept out of commission briefly
// test also verifies that when a sink fails, events are balanced over remaining sinks
@Test
public void testRoundRobinBackoffInitialFailure() throws EventDeliveryException {
Channel ch = new MockChannel();
int n = 100;
int numEvents = 3 * n;
for (int i = 0; i < numEvents; i++) {
ch.put(new MockEvent("test" + i));
}
MockSink s1 = new MockSink(1);
s1.setChannel(ch);
MockSink s2 = new MockSink(2);
s2.setChannel(ch);
MockSink s3 = new MockSink(3);
s3.setChannel(ch);
List<Sink> sinks = new ArrayList<Sink>();
sinks.add(s1);
sinks.add(s2);
sinks.add(s3);
LoadBalancingSinkProcessor lbsp = getProcessor("round_robin",sinks, true);
Status s = Status.READY;
for (int i = 0; i < 3 && s != Status.BACKOFF; i++) {
s = lbsp.process();
}
s2.setFail(true);
for (int i = 0; i < 3 && s != Status.BACKOFF; i++) {
s = lbsp.process();
}
s2.setFail(false);
while (s != Status.BACKOFF) {
s = lbsp.process();
}
Assert.assertEquals((3 * n) / 2, s1.getEvents().size());
Assert.assertEquals(1, s2.getEvents().size());
Assert.assertEquals((3 * n) / 2 - 1, s3.getEvents().size());
}
@Test
public void testRoundRobinBackoffIncreasingBackoffs()
throws EventDeliveryException, InterruptedException {
Channel ch = new MockChannel();
int n = 100;
int numEvents = 3 * n;
for (int i = 0; i < numEvents; i++) {
ch.put(new MockEvent("test" + i));
}
MockSink s1 = new MockSink(1);
s1.setChannel(ch);
MockSink s2 = new MockSink(2);
s2.setChannel(ch);
s2.setFail(true);
MockSink s3 = new MockSink(3);
s3.setChannel(ch);
List<Sink> sinks = new ArrayList<Sink>();
sinks.add(s1);
sinks.add(s2);
sinks.add(s3);
LoadBalancingSinkProcessor lbsp = getProcessor("round_robin",sinks, true);
Status s = Status.READY;
for (int i = 0; i < 3 && s != Status.BACKOFF; i++) {
s = lbsp.process();
}
Assert.assertEquals(0, s2.getEvents().size());
Thread.sleep(2100);
// this should let the sink come out of backoff and get backed off for a longer time
for (int i = 0; i < 3 && s != Status.BACKOFF; i++) {
s = lbsp.process();
}
Assert.assertEquals(0, s2.getEvents().size());
s2.setFail(false);
Thread.sleep(2100);
// this time it shouldn't come out of backoff yet as the timeout isn't over
for (int i = 0; i < 3 && s != Status.BACKOFF; i++) {
s = lbsp.process();
}
Assert.assertEquals(0, s2.getEvents().size());
// after this s2 should be receiving events agains
Thread.sleep(2100);
while (s != Status.BACKOFF) {
s = lbsp.process();
}
Assert.assertEquals( n + 2, s1.getEvents().size());
Assert.assertEquals( n - 3, s2.getEvents().size());
Assert.assertEquals( n + 1, s3.getEvents().size());
}
@Test
public void testRoundRobinBackoffFailureRecovery()
throws EventDeliveryException, InterruptedException {
Channel ch = new MockChannel();
int n = 100;
int numEvents = 3 * n;
for (int i = 0; i < numEvents; i++) {
ch.put(new MockEvent("test" + i));
}
MockSink s1 = new MockSink(1);
s1.setChannel(ch);
MockSink s2 = new MockSink(2);
s2.setChannel(ch);
s2.setFail(true);
MockSink s3 = new MockSink(3);
s3.setChannel(ch);
List<Sink> sinks = new ArrayList<Sink>();
sinks.add(s1);
sinks.add(s2);
sinks.add(s3);
LoadBalancingSinkProcessor lbsp = getProcessor("round_robin",sinks, true);
Status s = Status.READY;
for (int i = 0; i < 3 && s != Status.BACKOFF; i++) {
s = lbsp.process();
}
s2.setFail(false);
Thread.sleep(2001);
while (s != Status.BACKOFF) {
s = lbsp.process();
}
Assert.assertEquals(n + 1, s1.getEvents().size());
Assert.assertEquals(n - 1, s2.getEvents().size());
Assert.assertEquals(n, s3.getEvents().size());
}
@Test
public void testRoundRobinNoFailure() throws Exception {
Channel ch = new MockChannel();
int n = 100;
int numEvents = 3 * n;
for (int i = 0; i < numEvents; i++) {
ch.put(new MockEvent("test" + i));
}
MockSink s1 = new MockSink(1);
s1.setChannel(ch);
MockSink s2 = new MockSink(2);
s2.setChannel(ch);
MockSink s3 = new MockSink(3);
s3.setChannel(ch);
List<Sink> sinks = new ArrayList<Sink>();
sinks.add(s1);
sinks.add(s2);
sinks.add(s3);
LoadBalancingSinkProcessor lbsp = getProcessor("round_robin",sinks, false);
Status s = Status.READY;
while (s != Status.BACKOFF) {
s = lbsp.process();
}
Assert.assertTrue(s1.getEvents().size() == n);
Assert.assertTrue(s2.getEvents().size() == n);
Assert.assertTrue(s3.getEvents().size() == n);
}
@Test
public void testCustomSelector() throws Exception {
Channel ch = new MockChannel();
int n = 10;
int numEvents = n;
for (int i = 0; i < numEvents; i++) {
ch.put(new MockEvent("test" + i));
}
MockSink s1 = new MockSink(1);
s1.setChannel(ch);
// s1 always fails
s1.setFail(true);
MockSink s2 = new MockSink(2);
s2.setChannel(ch);
MockSink s3 = new MockSink(3);
s3.setChannel(ch);
List<Sink> sinks = new ArrayList<Sink>();
sinks.add(s1);
sinks.add(s2);
sinks.add(s3);
// This selector will result in all events going to s2
Context ctx = getContext(FixedOrderSelector.class.getCanonicalName());
ctx.put("selector." + FixedOrderSelector.SET_ME, "foo");
LoadBalancingSinkProcessor lbsp = getProcessor(sinks, ctx);
Sink.Status s = Sink.Status.READY;
while (s != Sink.Status.BACKOFF) {
s = lbsp.process();
}
Assert.assertTrue(s1.getEvents().size() == 0);
Assert.assertTrue(s2.getEvents().size() == n);
Assert.assertTrue(s3.getEvents().size() == 0);
}
private static class MockSink extends AbstractSink {
private final int id;
private List<Event> events = new ArrayList();
private boolean fail = false;
private MockSink(int id) {
this.id = id;
}
List<Event> getEvents() {
return events;
}
int getId() {
return id;
}
void setFail(boolean bFail) {
fail = bFail;
}
@Override
public Status process() throws EventDeliveryException {
if (fail) {
throw new EventDeliveryException("failed");
}
Event e = this.getChannel().take();
if (e == null) {
return Status.BACKOFF;
}
events.add(e);
return Status.READY;
}
}
private static class MockChannel extends AbstractChannel {
private List<Event> events = new ArrayList<Event>();
@Override
public void put(Event event) throws ChannelException {
events.add(event);
}
@Override
public Event take() throws ChannelException {
if (events.size() > 0) {
return events.remove(0);
}
return null;
}
@Override
public Transaction getTransaction() {
return null;
}
}
private static class MockEvent implements Event {
private static final Map<String, String> EMPTY_HEADERS =
Collections.unmodifiableMap(new HashMap<String, String>());
private byte[] body;
MockEvent(String str) {
this.body = str.getBytes();
}
@Override
public Map<String, String> getHeaders() {
return EMPTY_HEADERS;
}
@Override
public void setHeaders(Map<String, String> headers) {
throw new UnsupportedOperationException();
}
@Override
public byte[] getBody() {
return body;
}
@Override
public void setBody(byte[] body) {
this.body = body;
}
}
}
| 9,927 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/sink/FixedOrderSelector.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.sink;
import java.util.Iterator;
import org.apache.flume.Context;
import org.apache.flume.Sink;
/**
* A test selector that always returns the iteration order of specified
* sinks for testing purposes. This selector expects that the configuration
* key {@value #SET_ME} is specified with a non-null value.
*/
public class FixedOrderSelector extends AbstractSinkSelector {
public static final String SET_ME = "setme";
@Override
public Iterator<Sink> createSinkIterator() {
return getSinks().iterator();
}
@Override
public void configure(Context context) {
super.configure(context);
if (context.getString(SET_ME) == null) {
throw new RuntimeException("config key " + SET_ME + " not specified");
}
}
}
| 9,928 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/sink/SinkProcessorFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.sink;
import java.util.ArrayList;
import java.util.List;
import org.apache.flume.Context;
import org.apache.flume.Sink;
import org.apache.flume.SinkFactory;
import org.apache.flume.SinkProcessor;
import org.junit.Assert;
import org.junit.Test;
public class SinkProcessorFactoryTest {
@Test
public void test() {
Context context = new Context();
context.put("type", FailoverSinkProcessor.class.getName());
context.put("priority.sink1", "1");
context.put("priority.sink2", "2");
SinkFactory sf = new DefaultSinkFactory();
List<Sink> sinks = new ArrayList<Sink>();
sinks.add(sf.create("sink1", "avro"));
sinks.add(sf.create("sink2", "avro"));
SinkProcessor sp = SinkProcessorFactory.getProcessor(context, sinks);
context.put("type", "failover");
SinkProcessor sp2 = SinkProcessorFactory.getProcessor(context, sinks);
Assert.assertEquals(sp.getClass(), sp2.getClass());
}
@Test
public void testInstantiatingLoadBalancingSinkProcessor() {
Context context = new Context();
context.put("type", LoadBalancingSinkProcessor.class.getName());
context.put("selector", "random");
SinkFactory sf = new DefaultSinkFactory();
List<Sink> sinks = new ArrayList<Sink>();
sinks.add(sf.create("sink1", "avro"));
sinks.add(sf.create("sink2", "avro"));
SinkProcessor sp = SinkProcessorFactory.getProcessor(context, sinks);
context.put("type", "load_balance");
SinkProcessor sp2 = SinkProcessorFactory.getProcessor(context, sinks);
Assert.assertEquals(sp.getClass(), sp2.getClass());
}
}
| 9,929 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/sink/TestFailoverSinkProcessor.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.sink;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import junit.framework.Assert;
import org.apache.flume.Channel;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.Sink;
import org.apache.flume.SinkRunner;
import org.apache.flume.Transaction;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.conf.Configurables;
import org.apache.flume.event.EventBuilder;
import org.apache.flume.lifecycle.LifecycleState;
import org.junit.Test;
public class TestFailoverSinkProcessor {
// a simple sink for predictable testing purposes that fails after
// a given number of events have been consumed
class ConsumeXSink implements Sink {
volatile int remaining;
private LifecycleState state;
private String name;
private Channel channel;
private Integer written;
public ConsumeXSink(int consumeCount) {
remaining = consumeCount;
written = 0;
}
@Override
public void start() {
state = LifecycleState.START;
}
@Override
public void stop() {
state = LifecycleState.STOP;
}
@Override
public LifecycleState getLifecycleState() {
return state;
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public String getName() {
return name;
}
@Override
public void setChannel(Channel channel) {
this.channel = channel;
}
@Override
public Channel getChannel() {
return channel;
}
public synchronized void setRemaining(int remaining) {
this.remaining = remaining;
}
@Override
public Status process() throws EventDeliveryException {
synchronized (this) {
if (remaining <= 0) {
throw new EventDeliveryException("can't consume more");
}
}
Transaction tx = channel.getTransaction();
tx.begin();
Event e = channel.take();
tx.commit();
tx.close();
if (e != null) {
synchronized (this) {
remaining--;
}
written++;
}
return Status.READY;
}
public Integer getWritten() {
return written;
}
}
/**
* Test failover by feeding events to the channel and verifying at various
* stages that the number of events consumed by each sink matches expected
* failover patterns
*
* @throws InterruptedException
*/
@Test
public void testFailover() throws InterruptedException {
Channel ch = new MemoryChannel();
ConsumeXSink s1 = new ConsumeXSink(10);
s1.setChannel(ch);
s1.setName("s1");
ConsumeXSink s2 = new ConsumeXSink(50);
s2.setChannel(ch);
s2.setName("s2");
ConsumeXSink s3 = new ConsumeXSink(100);
s3.setChannel(ch);
s3.setName("s3");
Context context = new Context();
Configurables.configure(s1, context);
Configurables.configure(s2, context);
Configurables.configure(s3, context);
Configurables.configure(ch, context);
ch.start();
List<Sink> sinks = new LinkedList<Sink>();
sinks.add(s1);
sinks.add(s2);
sinks.add(s3);
SinkGroup group = new SinkGroup(sinks);
Map<String, String> params = new HashMap<String, String>();
params.put("sinks", "s1 s2 s3");
params.put("processor.type", "failover");
params.put("processor.priority.s1", "3");
params.put("processor.priority.s2", "2");
params.put("processor.priority.s3", "1");
params.put("processor.maxpenalty", "10000");
context.putAll(params);
Configurables.configure(group, context);
SinkRunner runner = new SinkRunner(group.getProcessor());
runner.start();
Assert.assertEquals(LifecycleState.START, s1.getLifecycleState());
Assert.assertEquals(LifecycleState.START, s2.getLifecycleState());
Assert.assertEquals(LifecycleState.START, s3.getLifecycleState());
for (int i = 0; i < 15; i++) {
Transaction tx = ch.getTransaction();
tx.begin();
ch.put(EventBuilder.withBody("test".getBytes()));
tx.commit();
tx.close();
}
Thread.sleep(100);
Assert.assertEquals(new Integer(10), s1.getWritten());
Assert.assertEquals(new Integer(5), s2.getWritten());
for (int i = 0; i < 50; i++) {
Transaction tx = ch.getTransaction();
tx.begin();
ch.put(EventBuilder.withBody("test".getBytes()));
tx.commit();
tx.close();
}
Thread.sleep(100);
Assert.assertEquals(new Integer(50), s2.getWritten());
Assert.assertEquals(new Integer(5), s3.getWritten());
// test rollover to recovered servers
s2.setRemaining(20);
// get us past the retry time for the failed sink
Thread.sleep(5000);
for (int i = 0; i < 100; i++) {
Transaction tx = ch.getTransaction();
tx.begin();
ch.put(EventBuilder.withBody("test".getBytes()));
tx.commit();
tx.close();
}
Thread.sleep(1000);
Assert.assertEquals(new Integer(10), s1.getWritten());
Assert.assertEquals(new Integer(70), s2.getWritten());
Assert.assertEquals(new Integer(85), s3.getWritten());
runner.stop();
ch.stop();
}
}
| 9,930 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/instrumentation/TestMonitoredCounterGroup.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.instrumentation;
import junit.framework.Assert;
import org.junit.Before;
import org.junit.Test;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import java.lang.management.ManagementFactory;
import java.util.Random;
public class TestMonitoredCounterGroup {
private static final int MAX_BOUNDS = 1000;
private static final String ROOT_OBJ_NAME_PREFIX = "org.apache.flume.";
private static final String SOURCE_OBJ_NAME_PREFIX = ROOT_OBJ_NAME_PREFIX
+ "source:type=";
private static final String CHANNEL_OBJ_NAME_PREFIX = ROOT_OBJ_NAME_PREFIX
+ "channel:type=";
private static final String SINK_OBJ_NAME_PREFIX = ROOT_OBJ_NAME_PREFIX
+ "sink:type=";
private static final String ATTR_START_TIME = "StartTime";
private static final String ATTR_STOP_TIME = "StopTime";
private static final String SRC_ATTR_EVENT_RECEVIED_COUNT =
"EventReceivedCount";
private static final String SRC_ATTR_EVENT_ACCEPTED_COUNT =
"EventAcceptedCount";
private static final String SRC_ATTR_APPEND_RECEVIED_COUNT =
"AppendReceivedCount";
private static final String SRC_ATTR_APPEND_ACCEPTED_COUNT =
"AppendAcceptedCount";
private static final String SRC_ATTR_APPEND_BATCH_RECEVIED_COUNT =
"AppendBatchReceivedCount";
private static final String SRC_ATTR_APPEND_BATCH_ACCEPTED_COUNT =
"AppendBatchAcceptedCount";
private static final String CH_ATTR_CHANNEL_SIZE = "ChannelSize";
private static final String CH_ATTR_EVENT_PUT_ATTEMPT =
"EventPutAttemptCount";
private static final String CH_ATTR_EVENT_TAKE_ATTEMPT =
"EventTakeAttemptCount";
private static final String CH_ATTR_EVENT_PUT_SUCCESS =
"EventPutSuccessCount";
private static final String CH_ATTR_EVENT_TAKE_SUCCESS =
"EventTakeSuccessCount";
private static final String SK_ATTR_CONN_CREATED =
"ConnectionCreatedCount";
private static final String SK_ATTR_CONN_CLOSED =
"ConnectionClosedCount";
private static final String SK_ATTR_CONN_FAILED =
"ConnectionFailedCount";
private static final String SK_ATTR_BATCH_EMPTY =
"BatchEmptyCount";
private static final String SK_ATTR_BATCH_UNDERFLOW =
"BatchUnderflowCount";
private static final String SK_ATTR_BATCH_COMPLETE =
"BatchCompleteCount";
private static final String SK_ATTR_EVENT_DRAIN_ATTEMPT =
"EventDrainAttemptCount";
private static final String SK_ATTR_EVENT_DRAIN_SUCCESS =
"EventDrainSuccessCount";
private MBeanServer mbServer;
private Random random;
@Before
public void setUp() {
mbServer = ManagementFactory.getPlatformMBeanServer();
random = new Random(System.nanoTime());
}
@Test
public void testSinkCounter() throws Exception {
String name = getRandomName();
SinkCounter skc = new SinkCounter(name);
skc.register();
ObjectName on = new ObjectName(SINK_OBJ_NAME_PREFIX + name);
assertSkCounterState(on, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L);
skc.start();
long start1 = getStartTime(on);
Assert.assertTrue("StartTime", start1 != 0L);
Assert.assertTrue("StopTime", getStopTime(on) == 0L);
int connCreated = random.nextInt(MAX_BOUNDS);
int connClosed = random.nextInt(MAX_BOUNDS);
int connFailed = random.nextInt(MAX_BOUNDS);
int batchEmpty = random.nextInt(MAX_BOUNDS);
int batchUnderflow = random.nextInt(MAX_BOUNDS);
int batchComplete = random.nextInt(MAX_BOUNDS);
int eventDrainAttempt = random.nextInt(MAX_BOUNDS);
int eventDrainSuccess = random.nextInt(MAX_BOUNDS);
for (int i = 0; i < connCreated; i++) {
skc.incrementConnectionCreatedCount();
}
for (int i = 0; i < connClosed; i++) {
skc.incrementConnectionClosedCount();
}
for (int i = 0; i < connFailed; i++) {
skc.incrementConnectionFailedCount();
}
for (int i = 0; i < batchEmpty; i++) {
skc.incrementBatchEmptyCount();
}
for (int i = 0; i < batchUnderflow; i++) {
skc.incrementBatchUnderflowCount();
}
for (int i = 0; i < batchComplete; i++) {
skc.incrementBatchCompleteCount();
}
for (int i = 0; i < eventDrainAttempt; i++) {
skc.incrementEventDrainAttemptCount();
}
for (int i = 0; i < eventDrainSuccess; i++) {
skc.incrementEventDrainSuccessCount();
}
assertSkCounterState(on, connCreated, connClosed, connFailed, batchEmpty,
batchUnderflow, batchComplete, eventDrainAttempt, eventDrainSuccess);
skc.stop();
Assert.assertTrue("StartTime", getStartTime(on) != 0L);
Assert.assertTrue("StopTime", getStopTime(on) != 0L);
assertSkCounterState(on, connCreated, connClosed, connFailed, batchEmpty,
batchUnderflow, batchComplete, eventDrainAttempt, eventDrainSuccess);
// give start time a chance to increment
Thread.sleep(5L);
skc.start();
Assert.assertTrue("StartTime", getStartTime(on) != 0L);
Assert.assertTrue("StartTime", getStartTime(on) > start1);
Assert.assertTrue("StopTime", getStopTime(on) == 0L);
assertSkCounterState(on, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L);
int eventDrainAttempt2 = random.nextInt(MAX_BOUNDS);
int eventDrainSuccess2 = random.nextInt(MAX_BOUNDS);
skc.addToEventDrainAttemptCount(eventDrainAttempt2);
skc.addToEventDrainSuccessCount(eventDrainSuccess2);
assertSkCounterState(on, 0L, 0L, 0L, 0L, 0L, 0L,
eventDrainAttempt2, eventDrainSuccess2);
}
@Test
public void testChannelCounter() throws Exception {
String name = getRandomName();
ChannelCounter chc = new ChannelCounter(name);
chc.register();
ObjectName on = new ObjectName(CHANNEL_OBJ_NAME_PREFIX + name);
assertChCounterState(on, 0L, 0L, 0L, 0L, 0L);
Assert.assertTrue("StartTime", getStartTime(on) == 0L);
Assert.assertTrue("StopTime", getStopTime(on) == 0L);
chc.start();
long start1 = getStartTime(on);
Assert.assertTrue("StartTime", start1 != 0L);
Assert.assertTrue("StopTime", getStopTime(on) == 0L);
int numChannelSize = random.nextInt(MAX_BOUNDS);
int numEventPutAttempt = random.nextInt(MAX_BOUNDS);
int numEventTakeAttempt = random.nextInt(MAX_BOUNDS);
int numEventPutSuccess = random.nextInt(MAX_BOUNDS);
int numEventTakeSuccess = random.nextInt(MAX_BOUNDS);
chc.setChannelSize(numChannelSize);
for (int i = 0; i < numEventPutAttempt; i++) {
chc.incrementEventPutAttemptCount();
}
for (int i = 0; i < numEventTakeAttempt; i++) {
chc.incrementEventTakeAttemptCount();
}
chc.addToEventPutSuccessCount(numEventPutSuccess);
chc.addToEventTakeSuccessCount(numEventTakeSuccess);
assertChCounterState(on, numChannelSize, numEventPutAttempt,
numEventTakeAttempt, numEventPutSuccess, numEventTakeSuccess);
chc.stop();
Assert.assertTrue("StartTime", getStartTime(on) != 0L);
Assert.assertTrue("StopTime", getStopTime(on) != 0L);
assertChCounterState(on, numChannelSize, numEventPutAttempt,
numEventTakeAttempt, numEventPutSuccess, numEventTakeSuccess);
// give start time a chance to increment
Thread.sleep(5L);
chc.start();
Assert.assertTrue("StartTime", getStartTime(on) != 0L);
Assert.assertTrue("StartTime", getStartTime(on) > start1);
Assert.assertTrue("StopTime", getStopTime(on) == 0L);
assertChCounterState(on, 0L, 0L, 0L, 0L, 0L);
}
@Test
public void testSourceCounter() throws Exception {
String name = getRandomName();
SourceCounter srcc = new SourceCounter(name);
srcc.register();
ObjectName on = new ObjectName(SOURCE_OBJ_NAME_PREFIX + name);
assertSrcCounterState(on, 0L, 0L, 0L, 0L, 0L, 0L);
Assert.assertTrue("StartTime", getStartTime(on) == 0L);
Assert.assertTrue("StopTime", getStopTime(on) == 0L);
srcc.start();
long start1 = getStartTime(on);
Assert.assertTrue("StartTime", start1 != 0L);
Assert.assertTrue("StopTime", getStopTime(on) == 0L);
int numEventReceived = random.nextInt(MAX_BOUNDS);
int numEventAccepted = random.nextInt(MAX_BOUNDS);
int numAppendReceived = random.nextInt(MAX_BOUNDS);
int numAppendAccepted = random.nextInt(MAX_BOUNDS);
int numAppendBatchReceived = random.nextInt(MAX_BOUNDS);
int numAppendBatchAccepted = random.nextInt(MAX_BOUNDS);
srcc.addToEventReceivedCount(numEventReceived);
srcc.addToEventAcceptedCount(numEventAccepted);
for (int i = 0; i < numAppendReceived; i++) {
srcc.incrementAppendReceivedCount();
}
for (int i = 0; i < numAppendAccepted; i++) {
srcc.incrementAppendAcceptedCount();
}
for (int i = 0; i < numAppendBatchReceived; i++) {
srcc.incrementAppendBatchReceivedCount();
}
for (int i = 0; i < numAppendBatchAccepted; i++) {
srcc.incrementAppendBatchAcceptedCount();
}
assertSrcCounterState(on, numEventReceived, numEventAccepted,
numAppendReceived, numAppendAccepted, numAppendBatchReceived,
numAppendBatchAccepted);
srcc.stop();
Assert.assertTrue("StartTime", getStartTime(on) != 0L);
Assert.assertTrue("StopTime", getStopTime(on) != 0L);
assertSrcCounterState(on, numEventReceived, numEventAccepted,
numAppendReceived, numAppendAccepted, numAppendBatchReceived,
numAppendBatchAccepted);
// give start time a chance to increment
Thread.sleep(5L);
srcc.start();
Assert.assertTrue("StartTime", getStartTime(on) != 0L);
Assert.assertTrue("StartTime", getStartTime(on) > start1);
Assert.assertTrue("StopTime", getStopTime(on) == 0L);
assertSrcCounterState(on, 0L, 0L, 0L, 0L, 0L, 0L);
int numEventReceived2 = random.nextInt(MAX_BOUNDS);
int numEventAccepted2 = random.nextInt(MAX_BOUNDS);
for (int i = 0; i < numEventReceived2; i++) {
srcc.incrementEventReceivedCount();
}
for (int i = 0; i < numEventAccepted2; i++) {
srcc.incrementEventAcceptedCount();
}
assertSrcCounterState(on, numEventReceived2, numEventAccepted2,
0L, 0L, 0L, 0L);
}
@Test
public void testRegisterTwice() throws Exception {
String name = "re-register-" + getRandomName();
SourceCounter c1 = new SourceCounter(name);
c1.register();
ObjectName on = new ObjectName(SOURCE_OBJ_NAME_PREFIX + name);
Assert.assertEquals("StartTime", 0L, getStartTime(on));
Assert.assertEquals("StopTime", 0L, getStopTime(on));
c1.start();
c1.stop();
Assert.assertTrue("StartTime", getStartTime(on) > 0L);
Assert.assertTrue("StopTime", getStopTime(on) > 0L);
SourceCounter c2 = new SourceCounter(name);
c2.register();
Assert.assertEquals("StartTime", 0L, getStartTime(on));
Assert.assertEquals("StopTime", 0L, getStopTime(on));
}
private void assertSrcCounterState(ObjectName on, long eventReceivedCount,
long eventAcceptedCount, long appendReceivedCount,
long appendAcceptedCount, long appendBatchReceivedCount,
long appendBatchAcceptedCount) throws Exception {
Assert.assertEquals("SrcEventReceived",
getSrcEventReceivedCount(on),
eventReceivedCount);
Assert.assertEquals("SrcEventAccepted",
getSrcEventAcceptedCount(on),
eventAcceptedCount);
Assert.assertEquals("SrcAppendReceived",
getSrcAppendReceivedCount(on),
appendReceivedCount);
Assert.assertEquals("SrcAppendAccepted",
getSrcAppendAcceptedCount(on),
appendAcceptedCount);
Assert.assertEquals("SrcAppendBatchReceived",
getSrcAppendBatchReceivedCount(on),
appendBatchReceivedCount);
Assert.assertEquals("SrcAppendBatchAccepted",
getSrcAppendBatchAcceptedCount(on),
appendBatchAcceptedCount);
}
private void assertChCounterState(ObjectName on, long channelSize,
long eventPutAttempt, long eventTakeAttempt, long eventPutSuccess,
long eventTakeSuccess) throws Exception {
Assert.assertEquals("ChChannelSize",
getChChannelSize(on),
channelSize);
Assert.assertEquals("ChEventPutAttempt",
getChEventPutAttempt(on),
eventPutAttempt);
Assert.assertEquals("ChEventTakeAttempt",
getChEventTakeAttempt(on),
eventTakeAttempt);
Assert.assertEquals("ChEventPutSuccess",
getChEventPutSuccess(on),
eventPutSuccess);
Assert.assertEquals("ChEventTakeSuccess",
getChEventTakeSuccess(on),
eventTakeSuccess);
}
private void assertSkCounterState(ObjectName on, long connCreated,
long connClosed, long connFailed, long batchEmpty, long batchUnderflow,
long batchComplete, long eventDrainAttempt, long eventDrainSuccess)
throws Exception {
Assert.assertEquals("SkConnCreated",
getSkConnectionCreated(on),
connCreated);
Assert.assertEquals("SkConnClosed",
getSkConnectionClosed(on),
connClosed);
Assert.assertEquals("SkConnFailed",
getSkConnectionFailed(on),
connFailed);
Assert.assertEquals("SkBatchEmpty",
getSkBatchEmpty(on),
batchEmpty);
Assert.assertEquals("SkBatchUnderflow",
getSkBatchUnderflow(on),
batchUnderflow);
Assert.assertEquals("SkBatchComplete",
getSkBatchComplete(on),
batchComplete);
Assert.assertEquals("SkEventDrainAttempt",
getSkEventDrainAttempt(on),
eventDrainAttempt);
Assert.assertEquals("SkEventDrainSuccess",
getSkEventDrainSuccess(on),
eventDrainSuccess);
}
private long getStartTime(ObjectName on) throws Exception {
return getLongAttribute(on, ATTR_START_TIME);
}
private long getStopTime(ObjectName on) throws Exception {
return getLongAttribute(on, ATTR_STOP_TIME);
}
private long getSkConnectionCreated(ObjectName on) throws Exception {
return getLongAttribute(on, SK_ATTR_CONN_CREATED);
}
private long getSkConnectionClosed(ObjectName on) throws Exception {
return getLongAttribute(on, SK_ATTR_CONN_CLOSED);
}
private long getSkConnectionFailed(ObjectName on) throws Exception {
return getLongAttribute(on, SK_ATTR_CONN_FAILED);
}
private long getSkBatchEmpty(ObjectName on) throws Exception {
return getLongAttribute(on, SK_ATTR_BATCH_EMPTY);
}
private long getSkBatchUnderflow(ObjectName on) throws Exception {
return getLongAttribute(on, SK_ATTR_BATCH_UNDERFLOW);
}
private long getSkBatchComplete(ObjectName on) throws Exception {
return getLongAttribute(on, SK_ATTR_BATCH_COMPLETE);
}
private long getSkEventDrainAttempt(ObjectName on) throws Exception {
return getLongAttribute(on, SK_ATTR_EVENT_DRAIN_ATTEMPT);
}
private long getSkEventDrainSuccess(ObjectName on) throws Exception {
return getLongAttribute(on, SK_ATTR_EVENT_DRAIN_SUCCESS);
}
private long getChChannelSize(ObjectName on) throws Exception {
return getLongAttribute(on, CH_ATTR_CHANNEL_SIZE);
}
private long getChEventPutAttempt(ObjectName on) throws Exception {
return getLongAttribute(on, CH_ATTR_EVENT_PUT_ATTEMPT);
}
private long getChEventTakeAttempt(ObjectName on) throws Exception {
return getLongAttribute(on, CH_ATTR_EVENT_TAKE_ATTEMPT);
}
private long getChEventPutSuccess(ObjectName on) throws Exception {
return getLongAttribute(on, CH_ATTR_EVENT_PUT_SUCCESS);
}
private long getChEventTakeSuccess(ObjectName on) throws Exception {
return getLongAttribute(on, CH_ATTR_EVENT_TAKE_SUCCESS);
}
private long getSrcAppendBatchAcceptedCount(ObjectName on) throws Exception {
return getLongAttribute(on, SRC_ATTR_APPEND_BATCH_ACCEPTED_COUNT);
}
private long getSrcAppendBatchReceivedCount(ObjectName on) throws Exception {
return getLongAttribute(on, SRC_ATTR_APPEND_BATCH_RECEVIED_COUNT);
}
private long getSrcAppendAcceptedCount(ObjectName on) throws Exception {
return getLongAttribute(on, SRC_ATTR_APPEND_ACCEPTED_COUNT);
}
private long getSrcAppendReceivedCount(ObjectName on) throws Exception {
return getLongAttribute(on, SRC_ATTR_APPEND_RECEVIED_COUNT);
}
private long getSrcEventAcceptedCount(ObjectName on) throws Exception {
return getLongAttribute(on, SRC_ATTR_EVENT_ACCEPTED_COUNT);
}
private long getSrcEventReceivedCount(ObjectName on) throws Exception {
return getLongAttribute(on, SRC_ATTR_EVENT_RECEVIED_COUNT);
}
private long getLongAttribute(ObjectName on, String attr) throws Exception {
Object result = getAttribute(on, attr);
return ((Long) result).longValue();
}
private Object getAttribute(ObjectName objName, String attrName)
throws Exception {
return mbServer.getAttribute(objName, attrName);
}
private String getRandomName() {
return "random-" + System.nanoTime();
}
}
| 9,931 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/instrumentation | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/instrumentation/util/TestJMXPollUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.instrumentation.util;
import java.util.Map;
import org.apache.flume.Channel;
import org.apache.flume.Context;
import org.apache.flume.Transaction;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.channel.PseudoTxnMemoryChannel;
import org.apache.flume.conf.Configurables;
import org.apache.flume.event.EventBuilder;
import org.junit.Assert;
import org.junit.Test;
/**
*
*/
public class TestJMXPollUtil {
Channel memChannel = new MemoryChannel();
Channel pmemChannel = new PseudoTxnMemoryChannel();
@Test
public void testJMXPoll() {
memChannel.setName("memChannel");
pmemChannel.setName("pmemChannel");
Context c = new Context();
Configurables.configure(memChannel, c);
Configurables.configure(pmemChannel, c);
memChannel.start();
pmemChannel.start();
Transaction txn = memChannel.getTransaction();
txn.begin();
memChannel.put(EventBuilder.withBody("blah".getBytes()));
memChannel.put(EventBuilder.withBody("blah".getBytes()));
txn.commit();
txn.close();
txn = memChannel.getTransaction();
txn.begin();
memChannel.take();
txn.commit();
txn.close();
Transaction txn2 = pmemChannel.getTransaction();
txn2.begin();
pmemChannel.put(EventBuilder.withBody("blah".getBytes()));
pmemChannel.put(EventBuilder.withBody("blah".getBytes()));
txn2.commit();
txn2.close();
txn2 = pmemChannel.getTransaction();
txn2.begin();
pmemChannel.take();
txn2.commit();
txn2.close();
Map<String, Map<String, String>> mbeans = JMXPollUtil.getAllMBeans();
Assert.assertNotNull(mbeans);
Map<String, String> memBean = mbeans.get("CHANNEL.memChannel");
Assert.assertNotNull(memBean);
JMXTestUtils.checkChannelCounterParams(memBean);
Map<String, String> pmemBean = mbeans.get("CHANNEL.pmemChannel");
Assert.assertNotNull(pmemBean);
JMXTestUtils.checkChannelCounterParams(pmemBean);
memChannel.stop();
pmemChannel.stop();
}
}
| 9,932 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/instrumentation | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/instrumentation/util/JMXTestUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.instrumentation.util;
import java.util.Map;
import org.junit.Assert;
/**
*
*/
public class JMXTestUtils {
public static void checkChannelCounterParams(Map<String, String> attrs) {
Assert.assertNotNull(attrs.get("StartTime"));
Assert.assertNotNull(attrs.get("StopTime"));
Assert.assertTrue(Long.parseLong(attrs.get("ChannelSize")) != 0);
Assert.assertTrue(Long.parseLong(attrs.get("EventPutAttemptCount")) == 2);
Assert.assertTrue(Long.parseLong(attrs.get("EventTakeAttemptCount")) == 1);
Assert.assertTrue(Long.parseLong(attrs.get("EventPutSuccessCount")) == 2);
Assert.assertTrue(Long.parseLong(attrs.get("EventTakeSuccessCount")) == 1);
}
}
| 9,933 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/instrumentation | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/instrumentation/http/BaseHTTPMetricsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.instrumentation.http;
import org.apache.flume.Channel;
import org.apache.flume.Context;
import org.apache.flume.Transaction;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.channel.PseudoTxnMemoryChannel;
import org.apache.flume.conf.Configurables;
import org.apache.flume.event.EventBuilder;
import java.net.ServerSocket;
public class BaseHTTPMetricsTest {
private Channel memChannel = new MemoryChannel();
private Channel pmemChannel = new PseudoTxnMemoryChannel();
static int getFreePort() throws Exception {
try (ServerSocket socket = new ServerSocket(0)) {
return socket.getLocalPort();
}
}
void runLoad() {
memChannel.setName("memChannel");
pmemChannel.setName("pmemChannel");
Context c = new Context();
Configurables.configure(memChannel, c);
Configurables.configure(pmemChannel, c);
memChannel.start();
pmemChannel.start();
Transaction txn = memChannel.getTransaction();
txn.begin();
memChannel.put(EventBuilder.withBody("blah".getBytes()));
memChannel.put(EventBuilder.withBody("blah".getBytes()));
txn.commit();
txn.close();
txn = memChannel.getTransaction();
txn.begin();
memChannel.take();
txn.commit();
txn.close();
Transaction txn2 = pmemChannel.getTransaction();
txn2.begin();
pmemChannel.put(EventBuilder.withBody("blah".getBytes()));
pmemChannel.put(EventBuilder.withBody("blah".getBytes()));
txn2.commit();
txn2.close();
txn2 = pmemChannel.getTransaction();
txn2.begin();
pmemChannel.take();
txn2.commit();
txn2.close();
}
void shutdown() {
memChannel.stop();
pmemChannel.stop();
}
}
| 9,934 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/instrumentation | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/instrumentation/http/TestHTTPMetricsServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.instrumentation.http;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import org.apache.flume.Context;
import org.apache.flume.instrumentation.MonitorService;
import org.apache.flume.instrumentation.util.JMXTestUtils;
import org.junit.Assert;
import org.junit.Test;
import javax.servlet.http.HttpServletResponse;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.lang.reflect.Type;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Map;
public class TestHTTPMetricsServer extends BaseHTTPMetricsTest {
private Type mapType = new TypeToken<Map<String, Map<String, String>>>() {}.getType();
private Gson gson = new Gson();
@Test
public void testJSON() throws Exception {
runLoad();
testWithPort(getFreePort());
shutdown();
}
private void testWithPort(int port) throws Exception {
MonitorService srv = new HTTPMetricsServer();
Context context = new Context();
context.put(HTTPMetricsServer.CONFIG_PORT, String.valueOf(port));
srv.configure(context);
srv.start();
Thread.sleep(1000);
URL url = new URL("http://0.0.0.0:" + port + "/metrics");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("GET");
BufferedReader reader = new BufferedReader(
new InputStreamReader(conn.getInputStream()));
String line;
String result = "";
while ((line = reader.readLine()) != null) {
result += line;
}
reader.close();
Map<String, Map<String, String>> mbeans = gson.fromJson(result, mapType);
Assert.assertNotNull(mbeans);
Map<String, String> memBean = mbeans.get("CHANNEL.memChannel");
Assert.assertNotNull(memBean);
JMXTestUtils.checkChannelCounterParams(memBean);
Map<String, String> pmemBean = mbeans.get("CHANNEL.pmemChannel");
Assert.assertNotNull(pmemBean);
JMXTestUtils.checkChannelCounterParams(pmemBean);
srv.stop();
}
@Test
public void testTrace() throws Exception {
doTestForbiddenMethods(getFreePort(),"TRACE");
}
@Test
public void testOptions() throws Exception {
doTestForbiddenMethods(getFreePort(),"OPTIONS");
}
private void doTestForbiddenMethods(int port, String method) throws Exception {
MonitorService srv = new HTTPMetricsServer();
Context context = new Context();
context.put(HTTPMetricsServer.CONFIG_PORT, String.valueOf(port));
srv.configure(context);
srv.start();
Thread.sleep(1000);
URL url = new URL("http://0.0.0.0:" + port + "/metrics");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod(method);
Assert.assertEquals(HttpServletResponse.SC_FORBIDDEN, conn.getResponseCode());
srv.stop();
}
}
| 9,935 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/instrumentation | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/instrumentation/http/TestPrometheusMetricsServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.instrumentation.http;
import org.apache.flume.Context;
import org.apache.flume.instrumentation.MonitorService;
import org.junit.Assert;
import org.junit.Test;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
public class TestPrometheusMetricsServer extends BaseHTTPMetricsTest {
@Test
public void testMetics() throws Exception {
runLoad();
testWithPort(getFreePort());
shutdown();
}
private void testWithPort(int port) throws Exception {
MonitorService srv = new PrometheusHTTPMetricsServer();
Context context = new Context();
context.put(PrometheusHTTPMetricsServer.CONFIG_PORT, String.valueOf(port));
srv.configure(context);
srv.start();
Thread.sleep(1000);
URL url = new URL("http://0.0.0.0:" + port + "/metrics");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("GET");
BufferedReader reader = new BufferedReader(
new InputStreamReader(conn.getInputStream()));
String line;
String result = "";
while ((line = reader.readLine()) != null) {
result += line + "\n";
}
reader.close();
String[] targetOutputs = {"ChannelSize{component=\"pmemChannel\",} 1.0\n",
"Flume_ChannelSize{component=\"memChannel\",} 1.0\n",
"Flume_ChannelCapacity{component=\"pmemChannel\",} 0.0\n",
"Flume_ChannelCapacity{component=\"memChannel\",} 100.0\n",
"Flume_EventPutAttemptCount_total{component=\"pmemChannel\",} 2.0\n",
"Flume_EventPutAttemptCount_total{component=\"memChannel\",} 2.0\n",
"Flume_EventTakeAttemptCount_total{component=\"pmemChannel\",} 1.0\n",
"Flume_EventTakeAttemptCount_total{component=\"memChannel\",} 1.0\n",
"Flume_EventPutSuccessCount_total{component=\"pmemChannel\",} 2.0\n",
"Flume_EventPutSuccessCount_total{component=\"memChannel\",} 2.0\n",
"Flume_EventTakeSuccessCount_total{component=\"pmemChannel\",} 1.0\n",
"Flume_EventTakeSuccessCount_total{component=\"memChannel\",} 1.0\n"};
for (String target : targetOutputs) {
Assert.assertTrue(result.contains(target));
}
srv.stop();
}
}
| 9,936 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/instrumentation | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/instrumentation/kafka/KafkaSourceCounterTest.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.instrumentation.kafka;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class KafkaSourceCounterTest {
KafkaSourceCounter counter;
@Before
public void setUp() throws Exception {
counter = new KafkaSourceCounter("test");
}
@Test
public void testAddToKafkaEventGetTimer() throws Exception {
Assert.assertEquals(1L, counter.addToKafkaEventGetTimer(1L));
}
@Test
public void testAddToKafkaCommitTimer() throws Exception {
Assert.assertEquals(1L, counter.addToKafkaCommitTimer(1L));
}
@Test
public void testIncrementKafkaEmptyCount() throws Exception {
Assert.assertEquals(1L, counter.incrementKafkaEmptyCount());
}
@Test
public void testGetKafkaCommitTimer() throws Exception {
Assert.assertEquals(0, counter.getKafkaCommitTimer());
}
@Test
public void testGetKafkaEventGetTimer() throws Exception {
Assert.assertEquals(0, counter.getKafkaEventGetTimer());
}
@Test
public void testGetKafkaEmptyCount() throws Exception {
Assert.assertEquals(0, counter.getKafkaEmptyCount());
}
} | 9,937 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/channel/TestChannelUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.channel;
import java.util.List;
import org.apache.flume.ChannelException;
import org.apache.flume.Event;
import org.junit.Assert;
import org.junit.Test;
public class TestChannelUtils
extends AbstractBasicChannelSemanticsTest {
@Test
public void testHappyPath1() {
ChannelUtils.put(channel, events.get(0));
Assert.assertTrue(channel.wasLastTransactionCommitted());
Assert.assertFalse(channel.wasLastTransactionRolledBack());
Assert.assertTrue(channel.wasLastTransactionClosed());
}
@Test
public void testHappyPath2() {
ChannelUtils.take(channel);
Assert.assertTrue(channel.wasLastTransactionCommitted());
Assert.assertFalse(channel.wasLastTransactionRolledBack());
Assert.assertTrue(channel.wasLastTransactionClosed());
}
@Test
public void testHappyPath3() {
ChannelUtils.put(channel, events.get(0));
Assert.assertSame(events.get(0), ChannelUtils.take(channel));
}
@Test
public void testHappyPath4() {
for (int i = 0; i < events.size(); ++i) {
ChannelUtils.put(channel, events.get(i));
}
for (int i = 0; i < events.size(); ++i) {
Assert.assertSame(events.get(i), ChannelUtils.take(channel));
}
}
@Test
public void testHappyPath5() {
int rounds = 10;
for (int i = 0; i < rounds; ++i) {
ChannelUtils.put(channel, events);
}
for (int i = 0; i < rounds; ++i) {
List<Event> takenEvents = ChannelUtils.take(channel, events.size());
Assert.assertTrue(takenEvents.size() == events.size());
for (int j = 0; j < events.size(); ++j) {
Assert.assertSame(events.get(j), takenEvents.get(j));
}
}
}
private void testTransact(final TestChannel.Mode mode,
Class<? extends Throwable> exceptionClass, final Runnable test) {
testException(exceptionClass, new Runnable() {
@Override
public void run() {
ChannelUtils.transact(channel, new Runnable() {
@Override
public void run() {
testMode(mode, test);
}
});
}
});
Assert.assertFalse(channel.wasLastTransactionCommitted());
Assert.assertTrue(channel.wasLastTransactionRolledBack());
Assert.assertTrue(channel.wasLastTransactionClosed());
}
private void testTransact(TestChannel.Mode mode,
Class<? extends Throwable> exceptionClass) {
testTransact(mode, exceptionClass, new Runnable() {
@Override
public void run() {
channel.put(events.get(0));
}
});
}
@Test
public void testError() {
testTransact(TestChannel.Mode.THROW_ERROR, TestError.class);
}
@Test
public void testRuntimeException() {
testTransact(TestChannel.Mode.THROW_RUNTIME, TestRuntimeException.class);
}
@Test
public void testChannelException() {
testTransact(TestChannel.Mode.THROW_CHANNEL, ChannelException.class);
}
@Test
public void testInterrupt() throws Exception {
testTransact(TestChannel.Mode.SLEEP, InterruptedException.class,
new Runnable() {
@Override
public void run() {
interruptTest(new Runnable() {
@Override
public void run() {
channel.put(events.get(0));
}
});
}
});
}
}
| 9,938 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/channel/TestReplicatingChannelSelector.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.channel;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import junit.framework.Assert;
import org.apache.flume.Channel;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Context;
import org.apache.flume.conf.Configurables;
import org.junit.Before;
import org.junit.Test;
public class TestReplicatingChannelSelector {
private List<Channel> channels = new ArrayList<Channel>();
private ChannelSelector selector;
@Before
public void setUp() throws Exception {
channels.clear();
channels.add(MockChannel.createMockChannel("ch1"));
channels.add(MockChannel.createMockChannel("ch2"));
channels.add(MockChannel.createMockChannel("ch3"));
channels.add(MockChannel.createMockChannel("ch4"));
selector = ChannelSelectorFactory.create(
channels, new HashMap<String, String>());
}
@Test
public void testReplicatingSelector() throws Exception {
selector.configure(new Context());
List<Channel> channels = selector.getRequiredChannels(new MockEvent());
Assert.assertNotNull(channels);
Assert.assertEquals(4, channels.size());
Assert.assertEquals("ch1", channels.get(0).getName());
Assert.assertEquals("ch2", channels.get(1).getName());
Assert.assertEquals("ch3", channels.get(2).getName());
Assert.assertEquals("ch4", channels.get(3).getName());
List<Channel> optCh = selector.getOptionalChannels(new MockEvent());
Assert.assertEquals(0, optCh.size());
}
@Test
public void testOptionalChannels() throws Exception {
Context context = new Context();
context.put(ReplicatingChannelSelector.CONFIG_OPTIONAL, "ch1");
Configurables.configure(selector, context);
List<Channel> channels = selector.getRequiredChannels(new MockEvent());
Assert.assertNotNull(channels);
Assert.assertEquals(3, channels.size());
Assert.assertEquals("ch2", channels.get(0).getName());
Assert.assertEquals("ch3", channels.get(1).getName());
Assert.assertEquals("ch4", channels.get(2).getName());
List<Channel> optCh = selector.getOptionalChannels(new MockEvent());
Assert.assertEquals(1, optCh.size());
Assert.assertEquals("ch1", optCh.get(0).getName());
}
@Test
public void testMultipleOptionalChannels() throws Exception {
Context context = new Context();
context.put(ReplicatingChannelSelector.CONFIG_OPTIONAL, "ch1 ch4");
Configurables.configure(selector, context);
List<Channel> channels = selector.getRequiredChannels(new MockEvent());
Assert.assertNotNull(channels);
Assert.assertEquals(2, channels.size());
Assert.assertEquals("ch2", channels.get(0).getName());
Assert.assertEquals("ch3", channels.get(1).getName());
List<Channel> optCh = selector.getOptionalChannels(new MockEvent());
Assert.assertEquals(2, optCh.size());
Assert.assertEquals("ch1", optCh.get(0).getName());
Assert.assertEquals("ch4", optCh.get(1).getName());
}
@Test
public void testMultipleOptionalChannelsSameChannelTwice() throws Exception {
Context context = new Context();
context.put(ReplicatingChannelSelector.CONFIG_OPTIONAL, "ch1 ch4 ch1");
Configurables.configure(selector, context);
List<Channel> channels = selector.getRequiredChannels(new MockEvent());
Assert.assertNotNull(channels);
Assert.assertEquals(2, channels.size());
Assert.assertEquals("ch2", channels.get(0).getName());
Assert.assertEquals("ch3", channels.get(1).getName());
List<Channel> optCh = selector.getOptionalChannels(new MockEvent());
Assert.assertEquals(2, optCh.size());
Assert.assertEquals("ch1", optCh.get(0).getName());
Assert.assertEquals("ch4", optCh.get(1).getName());
}
}
| 9,939 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/channel/TestMultiplexingChannelSelector.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.channel;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import junit.framework.Assert;
import org.apache.flume.Channel;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Event;
import org.junit.Before;
import org.junit.Test;
public class TestMultiplexingChannelSelector {
private List<Channel> channels = new ArrayList<Channel>();
private ChannelSelector selector;
private Map<String, String> config = new HashMap<String, String>();
@Before
public void setUp() throws Exception {
channels.clear();
channels.add(MockChannel.createMockChannel("ch1"));
channels.add(MockChannel.createMockChannel("ch2"));
channels.add(MockChannel.createMockChannel("ch3"));
config.put("type", "multiplexing");
config.put("header", "myheader");
config.put("optional.foo", "ch2 ch3");
config.put("optional.xyz", "ch1 ch3");
config.put("optional.zebra", "ch1 ch2");
}
@Test
public void testSelection() throws Exception {
config.put("mapping.foo", "ch1 ch2");
config.put("mapping.bar", "ch2 ch3");
config.put("mapping.xyz", "ch1 ch2 ch3");
config.put("default", "ch1 ch3");
selector = ChannelSelectorFactory.create(channels, config);
Assert.assertTrue(selector instanceof MultiplexingChannelSelector);
Event event1 = new MockEvent();
Map<String, String> header1 = new HashMap<String, String>();
header1.put("myheader", "foo");// should match ch1 ch2
event1.setHeaders(header1);
List<Channel> reqCh1 = selector.getRequiredChannels(event1);
Assert.assertEquals(2, reqCh1.size());
Assert.assertTrue(reqCh1.get(0).getName().equals("ch1"));
Assert.assertTrue(reqCh1.get(1).getName().equals("ch2"));
List<Channel> optCh1 = selector.getOptionalChannels(event1);
Assert.assertTrue(optCh1.size() == 1);
//ch2 should not be there -- since it is a required channel
Assert.assertTrue(optCh1.get(0).getName().equals("ch3"));
Event event2 = new MockEvent();
Map<String, String> header2 = new HashMap<String, String>();
header2.put("myheader", "bar"); // should match ch2 ch3
event2.setHeaders(header2);
List<Channel> reqCh2 = selector.getRequiredChannels(event2);
Assert.assertEquals(2, reqCh2.size());
Assert.assertTrue(reqCh2.get(0).getName().equals("ch2"));
Assert.assertTrue(reqCh2.get(1).getName().equals("ch3"));
List<Channel> optCh2 = selector.getOptionalChannels(event2);
Assert.assertTrue(optCh2.isEmpty());
Event event3 = new MockEvent();
Map<String, String> header3 = new HashMap<String, String>();
header3.put("myheader", "xyz"); // should match ch1 ch2 ch3
event3.setHeaders(header3);
List<Channel> reqCh3 = selector.getRequiredChannels(event3);
Assert.assertEquals(3, reqCh3.size());
Assert.assertTrue(reqCh3.get(0).getName().equals("ch1"));
Assert.assertTrue(reqCh3.get(1).getName().equals("ch2"));
Assert.assertTrue(reqCh3.get(2).getName().equals("ch3"));
List<Channel> optCh3 = selector.getOptionalChannels(event3);
//All of the optional channels should go away.
Assert.assertTrue(optCh3.size() == 0);
}
//If the header information cannot map the event to any of the channels
//it should always be mapped to the default channel(s).
@Test
public void testNoSelection() throws Exception {
config.put("mapping.foo", "ch1 ch2");
config.put("mapping.bar", "ch2 ch3");
config.put("mapping.xyz", "ch1 ch2 ch3");
config.put("default", "ch1 ch3");
selector = ChannelSelectorFactory.create(channels, config);
Assert.assertTrue(selector instanceof MultiplexingChannelSelector);
Event noHeaderEvent = new MockEvent();
List<Channel> reqCh1 = selector.getRequiredChannels(noHeaderEvent);
List<Channel> optCh1 = selector.getOptionalChannels(noHeaderEvent);
Assert.assertEquals(2, reqCh1.size());
Assert.assertTrue(reqCh1.get(0).getName().equals("ch1"));
Assert.assertTrue(reqCh1.get(1).getName().equals("ch3"));
Assert.assertTrue(optCh1.isEmpty());
Map<String, String> header2 = new HashMap<String, String>();
header2.put("someheader", "foo");
Event invalidHeaderEvent = new MockEvent();
invalidHeaderEvent.setHeaders(header2);
List<Channel> reqCh2 = selector.getRequiredChannels(invalidHeaderEvent);
List<Channel> optCh2 = selector.getOptionalChannels(invalidHeaderEvent);
Assert.assertEquals(2, reqCh2.size());
Assert.assertTrue(reqCh2.get(0).getName().equals("ch1"));
Assert.assertTrue(reqCh2.get(1).getName().equals("ch3"));
Assert.assertTrue(optCh2.isEmpty());
Map<String, String> header3 = new HashMap<String, String>();
header3.put("myheader", "bar1");
Event unmatchedHeaderEvent = new MockEvent();
unmatchedHeaderEvent.setHeaders(header3);
List<Channel> reqCh3 = selector.getRequiredChannels(unmatchedHeaderEvent);
List<Channel> optCh3 = selector.getOptionalChannels(unmatchedHeaderEvent);
Assert.assertEquals(2, reqCh3.size());
Assert.assertTrue(reqCh3.get(0).getName().equals("ch1"));
Assert.assertTrue(reqCh3.get(1).getName().equals("ch3"));
Assert.assertTrue(optCh3.isEmpty());
Map<String, String> header4 = new HashMap<String, String>();
header4.put("myheader", "zebra");
Event zebraEvent = new MockEvent();
zebraEvent.setHeaders(header4);
List<Channel> reqCh4 = selector.getRequiredChannels(zebraEvent);
List<Channel> optCh4 = selector.getOptionalChannels(zebraEvent);
Assert.assertEquals(2, reqCh4.size());
Assert.assertTrue(reqCh4.get(0).getName().equals("ch1"));
Assert.assertTrue(reqCh4.get(1).getName().equals("ch3"));
//Since ch1 is also in default list, it is removed.
Assert.assertTrue(optCh4.size() == 1);
Assert.assertTrue(optCh4.get(0).getName().equals("ch2"));
List<Channel> allChannels = selector.getAllChannels();
Assert.assertTrue(allChannels.size() == 3);
Assert.assertTrue(allChannels.get(0).getName().equals("ch1"));
Assert.assertTrue(allChannels.get(1).getName().equals("ch2"));
Assert.assertTrue(allChannels.get(2).getName().equals("ch3"));
}
@Test
public void testNoDefault() {
config.put("mapping.foo", "ch1 ch2");
config.put("mapping.bar", "ch2 ch3");
config.put("mapping.xyz", "ch1 ch2 ch3");
config.put("mapping.zebra", "ch2");
config.put("optional.zebra", "ch1 ch3");
selector = ChannelSelectorFactory.create(channels, config);
Assert.assertTrue(selector instanceof MultiplexingChannelSelector);
Event event1 = new MockEvent();
Map<String, String> header1 = new HashMap<String, String>();
header1.put("myheader", "foo");// should match ch1 ch2
event1.setHeaders(header1);
List<Channel> reqCh1 = selector.getRequiredChannels(event1);
Assert.assertEquals(2, reqCh1.size());
Assert.assertEquals("ch1", reqCh1.get(0).getName());
Assert.assertEquals("ch2", reqCh1.get(1).getName());
List<Channel> optCh1 = selector.getOptionalChannels(event1);
Assert.assertTrue(optCh1.size() == 1);
//ch2 should not be there -- since it is a required channel
Assert.assertEquals("ch3", optCh1.get(0).getName());
Event event2 = new MockEvent();
Map<String, String> header2 = new HashMap<String, String>();
header2.put("myheader", "bar"); // should match ch2 ch3
event2.setHeaders(header2);
List<Channel> reqCh2 = selector.getRequiredChannels(event2);
Assert.assertEquals(2, reqCh2.size());
Assert.assertEquals("ch2", reqCh2.get(0).getName());
Assert.assertEquals("ch3", reqCh2.get(1).getName());
List<Channel> optCh2 = selector.getOptionalChannels(event2);
Assert.assertTrue(optCh2.isEmpty());
Event event3 = new MockEvent();
Map<String, String> header3 = new HashMap<String, String>();
header3.put("myheader", "xyz"); // should match ch1 ch2 ch3
event3.setHeaders(header3);
List<Channel> reqCh3 = selector.getRequiredChannels(event3);
Assert.assertEquals(3, reqCh3.size());
Assert.assertEquals("ch1", reqCh3.get(0).getName());
Assert.assertEquals("ch2", reqCh3.get(1).getName());
Assert.assertEquals("ch3", reqCh3.get(2).getName());
List<Channel> optCh3 = selector.getOptionalChannels(event3);
//All of the optional channels should go away.
Assert.assertTrue(optCh3.isEmpty());
Event event4 = new MockEvent();
Map<String, String> header4 = new HashMap<String, String>();
header4.put("myheader", "zebra");
event4.setHeaders(header4);
List<Channel> reqCh4 = selector.getRequiredChannels(event4);
Assert.assertEquals(1, reqCh4.size());
Assert.assertEquals("ch2", reqCh4.get(0).getName());
List<Channel> optCh4 = selector.getOptionalChannels(event4);
Assert.assertEquals(2, optCh4.size());
Assert.assertEquals("ch1", optCh4.get(0).getName());
Assert.assertEquals("ch3", optCh4.get(1).getName());
}
@Test
public void testNoMandatory() {
config.put("default", "ch3");
config.put("optional.foo", "ch1 ch2");
config.put("optional.zebra", "ch2 ch3");
selector = ChannelSelectorFactory.create(channels, config);
Assert.assertTrue(selector instanceof MultiplexingChannelSelector);
Event event1 = new MockEvent();
Map<String, String> header1 = new HashMap<String, String>();
header1.put("myheader", "foo");// should match ch1 ch2
event1.setHeaders(header1);
List<Channel> reqCh1 = selector.getRequiredChannels(event1);
Assert.assertEquals(1, reqCh1.size());
Assert.assertEquals("ch3", reqCh1.get(0).getName());
List<Channel> optCh1 = selector.getOptionalChannels(event1);
Assert.assertEquals(2, optCh1.size());
//ch2 should not be there -- since it is a required channel
Assert.assertEquals("ch1", optCh1.get(0).getName());
Assert.assertEquals("ch2", optCh1.get(1).getName());
Event event4 = new MockEvent();
Map<String, String> header4 = new HashMap<String, String>();
header4.put("myheader", "zebra");
event4.setHeaders(header4);
List<Channel> reqCh4 = selector.getRequiredChannels(event4);
Assert.assertEquals(1, reqCh4.size());
Assert.assertTrue(reqCh4.get(0).getName().equals("ch3"));
List<Channel> optCh4 = selector.getOptionalChannels(event4);
//ch3 was returned as a required channel, because it is default.
//So it is not returned in optional
Assert.assertEquals(1, optCh4.size());
Assert.assertEquals("ch2", optCh4.get(0).getName());
}
@Test
public void testOnlyOptional() {
config.put("optional.foo", "ch1 ch2");
config.put("optional.zebra", "ch2 ch3");
selector = ChannelSelectorFactory.create(channels, config);
Assert.assertTrue(selector instanceof MultiplexingChannelSelector);
Event event1 = new MockEvent();
Map<String, String> header1 = new HashMap<String, String>();
header1.put("myheader", "foo");// should match ch1 ch2
event1.setHeaders(header1);
List<Channel> reqCh1 = selector.getRequiredChannels(event1);
Assert.assertTrue(reqCh1.isEmpty());
List<Channel> optCh1 = selector.getOptionalChannels(event1);
Assert.assertEquals(2,optCh1.size());
//ch2 should not be there -- since it is a required channel
Event event4 = new MockEvent();
Map<String, String> header4 = new HashMap<String, String>();
header4.put("myheader", "zebra");
event4.setHeaders(header4);
List<Channel> reqCh4 = selector.getRequiredChannels(event4);
Assert.assertTrue(reqCh4.isEmpty());
List<Channel> optCh4 = selector.getOptionalChannels(event4);
Assert.assertEquals(2, optCh4.size());
Assert.assertEquals("ch2", optCh4.get(0).getName());
Assert.assertEquals("ch3", optCh4.get(1).getName());
}
}
| 9,940 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/channel/TestMemoryChannelConcurrency.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.channel;
import org.apache.flume.Channel;
import org.apache.flume.ChannelException;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.Transaction;
import org.apache.flume.conf.Configurables;
import org.apache.flume.event.EventBuilder;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.util.Map.Entry;
import java.util.Random;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
public class TestMemoryChannelConcurrency {
private CyclicBarrier barrier;
@Before
public void setUp() {
}
@Test
public void testTransactionConcurrency() throws InterruptedException {
final Channel channel = new MemoryChannel();
barrier = new CyclicBarrier(2);
Configurables.configure(channel, new Context());
Thread t1 = new Thread(new Runnable() {
@Override
public void run() {
Transaction tx = channel.getTransaction();
tx.begin();
channel.put(EventBuilder.withBody("first event".getBytes()));
try {
barrier.await();
barrier.await();
tx.rollback();
barrier.await();
tx.close();
// final barrier to make sure both threads manage to finish
barrier.await();
} catch (InterruptedException e) {
Assert.fail();
} catch (BrokenBarrierException e) {
Assert.fail();
}
}
});
Thread t2 = new Thread(new Runnable() {
@Override
public void run() {
Transaction tx = channel.getTransaction();
try {
barrier.await();
tx.begin();
channel.put(EventBuilder.withBody("second event".getBytes()));
barrier.await();
barrier.await();
tx.commit();
tx.close();
// final barrier to make sure both threads manage to finish
barrier.await();
} catch (InterruptedException e) {
Assert.fail();
} catch (BrokenBarrierException e) {
Assert.fail();
}
}
});
t1.start();
t2.start();
t1.join(1000);
if (t1.isAlive()) {
Assert.fail("Thread1 failed to finish");
t1.interrupt();
}
t2.join(1000);
if (t2.isAlive()) {
Assert.fail("Thread2 failed to finish");
t2.interrupt();
}
Transaction tx = channel.getTransaction();
tx.begin();
Event e = channel.take();
Assert.assertEquals("second event", new String(e.getBody()));
Assert.assertNull(channel.take());
tx.commit();
tx.close();
}
/**
* Works with a startgate/endgate latches to make sure all threads run at the same time.
* Threads randomly choose to commit or rollback random numbers of actions, tagging them with the
* thread no. The correctness check is made by recording committed entries into a map, and
* verifying the count after the endgate.
* Since nothing is taking the puts out, allow for a big capacity
*
* @throws InterruptedException
*/
@Test
public void testManyThreads() throws InterruptedException {
final Channel channel = new MemoryChannel();
Context context = new Context();
context.put("keep-alive", "1");
context.put("capacity", "5000"); // theoretical maximum of 100 threads * 10 * 5
// because we're just grabbing the whole lot in one commit
// normally a transactionCapacity significantly lower than the channel capacity would be
// recommended
context.put("transactionCapacity", "5000");
Configurables.configure(channel, context);
final ConcurrentHashMap<String, AtomicInteger> committedPuts =
new ConcurrentHashMap<String, AtomicInteger>();
final int threadCount = 100;
final CountDownLatch startGate = new CountDownLatch(1);
final CountDownLatch endGate = new CountDownLatch(threadCount);
for (int i = 0; i < threadCount; i++) {
Thread t = new Thread() {
@Override
public void run() {
Long tid = Thread.currentThread().getId();
String strtid = tid.toString();
Random rng = new Random(tid);
try {
startGate.await();
} catch (InterruptedException e1) {
Thread.currentThread().interrupt();
}
for (int j = 0; j < 10; j++) {
int events = rng.nextInt(5) + 1;
Transaction tx = channel.getTransaction();
tx.begin();
for (int k = 0; k < events; k++) {
channel.put(EventBuilder.withBody(strtid.getBytes()));
}
if (rng.nextBoolean()) {
tx.commit();
AtomicInteger tcount = committedPuts.get(strtid);
if (tcount == null) {
committedPuts.put(strtid, new AtomicInteger(events));
} else {
tcount.addAndGet(events);
}
} else {
tx.rollback();
}
tx.close();
}
endGate.countDown();
}
};
t.start();
}
startGate.countDown();
endGate.await();
if (committedPuts.isEmpty()) {
Assert.fail();
}
// verify the counts
Transaction tx = channel.getTransaction();
tx.begin();
Event e;
while ((e = channel.take()) != null) {
String index = new String(e.getBody());
AtomicInteger remain = committedPuts.get(index);
int post = remain.decrementAndGet();
if (post == 0) {
committedPuts.remove(index);
}
}
tx.commit();
tx.close();
if (!committedPuts.isEmpty()) {
Assert.fail();
}
}
@Test
public void testConcurrentSinksAndSources() throws InterruptedException {
final Channel channel = new MemoryChannel();
Context context = new Context();
context.put("keep-alive", "1");
context.put("capacity", "100"); // theoretical maximum of 100 threads * 10 * 5
// because we're just grabbing the whole lot in one commit
// normally a transactionCapacity significantly lower than the channel capacity would be
// recommended
context.put("transactionCapacity", "100");
Configurables.configure(channel, context);
final ConcurrentHashMap<String, AtomicInteger> committedPuts =
new ConcurrentHashMap<String, AtomicInteger>();
final ConcurrentHashMap<String, AtomicInteger> committedTakes =
new ConcurrentHashMap<String, AtomicInteger>();
final int threadCount = 100;
final CountDownLatch startGate = new CountDownLatch(1);
final CountDownLatch endGate = new CountDownLatch(threadCount);
// start a sink and source for each
for (int i = 0; i < threadCount / 2; i++) {
Thread t = new Thread() {
@Override
public void run() {
Long tid = Thread.currentThread().getId();
String strtid = tid.toString();
Random rng = new Random(tid);
try {
startGate.await();
} catch (InterruptedException e1) {
Thread.currentThread().interrupt();
}
for (int j = 0; j < 10; j++) {
int events = rng.nextInt(5) + 1;
Transaction tx = channel.getTransaction();
tx.begin();
for (int k = 0; k < events; k++) {
channel.put(EventBuilder.withBody(strtid.getBytes()));
}
if (rng.nextBoolean()) {
try {
tx.commit();
AtomicInteger tcount = committedPuts.get(strtid);
if (tcount == null) {
committedPuts.put(strtid, new AtomicInteger(events));
} else {
tcount.addAndGet(events);
}
} catch (ChannelException e) {
System.out.print("puts commit failed");
tx.rollback();
}
} else {
tx.rollback();
}
tx.close();
}
endGate.countDown();
}
};
// start source
t.start();
final Integer takeMapLock = 0;
t = new Thread() {
@Override
public void run() {
Random rng = new Random(Thread.currentThread().getId());
try {
startGate.await();
} catch (InterruptedException e1) {
Thread.currentThread().interrupt();
}
for (int j = 0; j < 10; j++) {
int events = rng.nextInt(5) + 1;
Transaction tx = channel.getTransaction();
tx.begin();
Event[] taken = new Event[events];
int k;
for (k = 0; k < events; k++) {
taken[k] = channel.take();
if (taken[k] == null) break;
}
if (rng.nextBoolean()) {
try {
tx.commit();
for (Event e : taken) {
if (e == null) break;
String index = new String(e.getBody());
synchronized (takeMapLock) {
AtomicInteger remain = committedTakes.get(index);
if (remain == null) {
committedTakes.put(index, new AtomicInteger(1));
} else {
remain.incrementAndGet();
}
}
}
} catch (ChannelException e) {
System.out.print("takes commit failed");
tx.rollback();
}
} else {
tx.rollback();
}
tx.close();
}
endGate.countDown();
}
};
// start sink
t.start();
}
startGate.countDown();
if (!endGate.await(20, TimeUnit.SECONDS)) {
Assert.fail("Not all threads ended succesfully");
}
// verify the counts
Transaction tx = channel.getTransaction();
tx.begin();
Event e;
// first pull out what's left in the channel and remove it from the
// committed map
while ((e = channel.take()) != null) {
String index = new String(e.getBody());
AtomicInteger remain = committedPuts.get(index);
int post = remain.decrementAndGet();
if (post == 0) {
committedPuts.remove(index);
}
}
tx.commit();
tx.close();
// now just check the committed puts match the committed takes
for (Entry<String, AtomicInteger> takes : committedTakes.entrySet()) {
AtomicInteger count = committedPuts.get(takes.getKey());
if (count == null) {
Assert.fail("Putted data doesn't exist");
}
if (count.get() != takes.getValue().get()) {
Assert.fail(String.format("Mismatched put and take counts expected %d had %d",
count.get(), takes.getValue().get()));
}
committedPuts.remove(takes.getKey());
}
if (!committedPuts.isEmpty()) {
Assert.fail("Puts still has entries remaining");
}
}
}
| 9,941 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/channel/TestMemoryChannelTransaction.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.channel;
import org.apache.flume.Channel;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.Transaction;
import org.apache.flume.conf.Configurables;
import org.apache.flume.event.EventBuilder;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
public class TestMemoryChannelTransaction {
private Channel channel;
@Before
public void setUp() {
channel = new MemoryChannel();
}
@Test
public void testCommit() throws InterruptedException, EventDeliveryException {
Event event;
Event event2;
Context context = new Context();
int putCounter = 0;
context.put("keep-alive", "1");
context.put("capacity", "100");
context.put("transactionCapacity", "50");
Configurables.configure(channel, context);
Transaction transaction = channel.getTransaction();
Assert.assertNotNull(transaction);
transaction.begin();
for (putCounter = 0; putCounter < 10; putCounter++) {
event = EventBuilder.withBody(("test event" + putCounter).getBytes());
channel.put(event);
}
transaction.commit();
transaction.close();
transaction = channel.getTransaction();
Assert.assertNotNull(transaction);
transaction = channel.getTransaction();
transaction.begin();
for (int i = 0; i < 10; i++) {
event2 = channel.take();
Assert.assertNotNull("lost an event", event2);
Assert.assertArrayEquals(event2.getBody(), ("test event" + i).getBytes());
// System.out.println(event2.toString());
}
event2 = channel.take();
Assert.assertNull("extra event found", event2);
transaction.commit();
transaction.close();
}
@Test
public void testRollBack() throws InterruptedException,
EventDeliveryException {
Event event;
Event event2;
Context context = new Context();
int putCounter = 0;
context.put("keep-alive", "1");
Configurables.configure(channel, context);
Transaction transaction = channel.getTransaction();
Assert.assertNotNull(transaction);
// add events and rollback txn
transaction.begin();
for (putCounter = 0; putCounter < 10; putCounter++) {
event = EventBuilder.withBody(("test event" + putCounter).getBytes());
channel.put(event);
}
transaction.rollback();
transaction.close();
// verify that no events are stored due to rollback
transaction = channel.getTransaction();
transaction.begin();
event2 = channel.take();
Assert.assertNull("extra event found", event2);
transaction.commit();
transaction.close();
// add events and commit
transaction = channel.getTransaction();
transaction.begin();
for (putCounter = 0; putCounter < 10; putCounter++) {
event = EventBuilder.withBody(("test event" + putCounter).getBytes());
channel.put(event);
}
transaction.commit();
transaction.close();
transaction = channel.getTransaction();
Assert.assertNotNull(transaction);
// verify events are there, then rollback the take
transaction.begin();
for (int i = 0; i < 10; i++) {
event2 = channel.take();
Assert.assertNotNull("lost an event", event2);
Assert.assertArrayEquals(event2.getBody(), ("test event" + i).getBytes());
}
event2 = channel.take();
Assert.assertNull("extra event found", event2);
transaction.rollback();
transaction.close();
// verify that the events were left in there due to rollback
transaction = channel.getTransaction();
transaction.begin();
for (int i = 0; i < 10; i++) {
event2 = channel.take();
Assert.assertNotNull("lost an event", event2);
Assert.assertArrayEquals(event2.getBody(), ("test event" + i).getBytes());
}
event2 = channel.take();
Assert.assertNull("extra event found", event2);
transaction.rollback();
transaction.close();
}
@Ignore("BasicChannelSemantics doesn't support re-entrant transactions")
@Test
public void testReEntTxn() throws InterruptedException,
EventDeliveryException {
Event event;
Event event2;
Context context = new Context();
int putCounter = 0;
context.put("keep-alive", "1");
Configurables.configure(channel, context);
Transaction transaction = channel.getTransaction();
Assert.assertNotNull(transaction);
transaction.begin(); // first begin
for (putCounter = 0; putCounter < 10; putCounter++) {
transaction.begin(); // inner begin
event = EventBuilder.withBody(("test event" + putCounter).getBytes());
channel.put(event);
transaction.commit(); // inner commit
}
transaction.commit();
transaction.close();
transaction = channel.getTransaction();
Assert.assertNotNull(transaction);
transaction.begin();
for (int i = 0; i < 10; i++) {
event2 = channel.take();
Assert.assertNotNull("lost an event", event2);
Assert.assertArrayEquals(event2.getBody(), ("test event" + i).getBytes());
// System.out.println(event2.toString());
}
event2 = channel.take();
Assert.assertNull("extra event found", event2);
transaction.commit();
transaction.close();
}
@Ignore("BasicChannelSemantics doesn't support re-entrant transactions")
@Test
public void testReEntTxnRollBack() throws InterruptedException,
EventDeliveryException {
Event event;
Event event2;
Context context = new Context();
int putCounter = 0;
context.put("keep-alive", "1");
Configurables.configure(channel, context);
Transaction transaction = channel.getTransaction();
Assert.assertNotNull(transaction);
// add events and rollback txn
transaction.begin();
for (putCounter = 0; putCounter < 10; putCounter++) {
event = EventBuilder.withBody(("test event" + putCounter).getBytes());
channel.put(event);
}
transaction.rollback();
transaction.close();
// verify that no events are stored due to rollback
transaction = channel.getTransaction();
transaction.begin();
event2 = channel.take();
Assert.assertNull("extra event found", event2);
transaction.commit();
transaction.close();
// add events and commit
transaction = channel.getTransaction();
transaction.begin();
for (putCounter = 0; putCounter < 10; putCounter++) {
event = EventBuilder.withBody(("test event" + putCounter).getBytes());
channel.put(event);
}
transaction.commit();
transaction.close();
transaction = channel.getTransaction();
Assert.assertNotNull(transaction);
// verify events are there, then rollback the take
transaction.begin();
for (int i = 0; i < 10; i++) {
transaction.begin(); // inner begin
event2 = channel.take();
Assert.assertNotNull("lost an event", event2);
Assert.assertArrayEquals(event2.getBody(), ("test event" + i).getBytes());
transaction.commit(); // inner commit
}
event2 = channel.take();
Assert.assertNull("extra event found", event2);
transaction.rollback();
transaction.close();
// verify that the events were left in there due to rollback
transaction = channel.getTransaction();
transaction.begin();
for (int i = 0; i < 10; i++) {
event2 = channel.take();
Assert.assertNotNull("lost an event", event2);
Assert.assertArrayEquals(event2.getBody(), ("test event" + i).getBytes());
}
event2 = channel.take();
Assert.assertNull("extra event found", event2);
transaction.rollback();
transaction.close();
}
}
| 9,942 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/channel/MockEvent.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.channel;
import java.util.HashMap;
import java.util.Map;
import org.apache.flume.Event;
public class MockEvent implements Event {
private Map<String, String> headers;
private byte[] body;
public MockEvent() {
this(new HashMap<String, String>(), new byte[0]);
}
public MockEvent(Map<String, String> headers, byte[] body) {
this.headers = new HashMap<String, String>();
this.headers.putAll(headers);
this.body = new byte[body.length];
System.arraycopy(body, 0, this.body, 0, body.length);
}
@Override
public Map<String, String> getHeaders() {
return headers;
}
@Override
public void setHeaders(Map<String, String> headers) {
this.headers = new HashMap<String, String>();
this.headers.putAll(headers);
}
@Override
public byte[] getBody() {
return body;
}
@Override
public void setBody(byte[] body) {
this.body = new byte[body.length];
System.arraycopy(body, 0, this.body, 0, body.length);
}
}
| 9,943 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/channel/TestLoadBalancingChannelSelector.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.channel;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.flume.Channel;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Context;
import org.apache.flume.conf.BasicConfigurationConstants;
import org.apache.flume.conf.channel.ChannelSelectorType;
import org.junit.Before;
import org.junit.Test;
import junit.framework.Assert;
public class TestLoadBalancingChannelSelector {
private List<Channel> channels = new ArrayList<Channel>();
private ChannelSelector selector;
@Before
public void setUp() throws Exception {
channels.clear();
channels.add(MockChannel.createMockChannel("ch1"));
channels.add(MockChannel.createMockChannel("ch2"));
channels.add(MockChannel.createMockChannel("ch3"));
channels.add(MockChannel.createMockChannel("ch4"));
Map<String, String> config = new HashMap<>();
config.put(BasicConfigurationConstants.CONFIG_TYPE, ChannelSelectorType.LOAD_BALANCING.name());
selector = ChannelSelectorFactory.create(channels, config);
}
@Test
public void testLoadBalancingSelector() throws Exception {
selector.configure(new Context());
validateChannel(selector, "ch1");
validateChannel(selector, "ch2");
validateChannel(selector, "ch3");
validateChannel(selector, "ch4");
List<Channel> optCh = selector.getOptionalChannels(new MockEvent());
Assert.assertEquals(0, optCh.size());
}
private void validateChannel(ChannelSelector selector, String channelName) {
List<Channel> channels = selector.getRequiredChannels(new MockEvent());
Assert.assertNotNull(channels);
Assert.assertEquals(1, channels.size());
Assert.assertEquals(channelName, channels.get(0).getName());
}
}
| 9,944 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/channel/TestChannelProcessor.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.channel;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
import org.apache.flume.Channel;
import org.apache.flume.ChannelException;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.Transaction;
import org.apache.flume.conf.Configurables;
import org.apache.flume.event.EventBuilder;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TestChannelProcessor {
/**
* Ensure that we bubble up any specific exception thrown from getTransaction
* instead of another exception masking it such as an NPE
*/
@Test(expected = ChannelException.class)
public void testExceptionFromGetTransaction() {
// create a channel which unexpectedly throws a ChEx on getTransaction()
Channel ch = mock(Channel.class);
when(ch.getTransaction()).thenThrow(new ChannelException("doh!"));
ChannelSelector sel = new ReplicatingChannelSelector();
sel.setChannels(Lists.newArrayList(ch));
ChannelProcessor proc = new ChannelProcessor(sel);
List<Event> events = Lists.newArrayList();
events.add(EventBuilder.withBody("event 1", Charsets.UTF_8));
proc.processEventBatch(events);
}
/**
* Ensure that we see the original NPE from the PreConditions check instead
* of an auto-generated NPE, which could be masking something else.
*/
@Test
public void testNullFromGetTransaction() {
// channel which returns null from getTransaction()
Channel ch = mock(Channel.class);
when(ch.getTransaction()).thenReturn(null);
ChannelSelector sel = new ReplicatingChannelSelector();
sel.setChannels(Lists.newArrayList(ch));
ChannelProcessor proc = new ChannelProcessor(sel);
List<Event> events = Lists.newArrayList();
events.add(EventBuilder.withBody("event 1", Charsets.UTF_8));
boolean threw = false;
try {
proc.processEventBatch(events);
} catch (NullPointerException ex) {
threw = true;
Assert.assertNotNull("NPE must be manually thrown", ex.getMessage());
}
Assert.assertTrue("Must throw NPE", threw);
}
/*
* Test delivery to optional and required channels
* Test both processEvent and processEventBatch
*/
@Test
public void testRequiredAndOptionalChannels() {
Context context = new Context();
ArrayList<Channel> channels = new ArrayList<Channel>();
for (int i = 0; i < 4; i++) {
Channel ch = new MemoryChannel();
ch.setName("ch" + i);
Configurables.configure(ch, context);
channels.add(ch);
}
ChannelSelector selector = new ReplicatingChannelSelector();
selector.setChannels(channels);
context = new Context();
context.put(ReplicatingChannelSelector.CONFIG_OPTIONAL, "ch2 ch3");
Configurables.configure(selector, context);
ChannelProcessor processor = new ChannelProcessor(selector);
context = new Context();
Configurables.configure(processor, context);
Event event1 = EventBuilder.withBody("event 1", Charsets.UTF_8);
processor.processEvent(event1);
try {
Thread.sleep(3000);
} catch (InterruptedException e) {
}
for (Channel channel : channels) {
Transaction transaction = channel.getTransaction();
transaction.begin();
Event event_ch = channel.take();
Assert.assertEquals(event1, event_ch);
transaction.commit();
transaction.close();
}
List<Event> events = Lists.newArrayList();
for (int i = 0; i < 100; i++) {
events.add(EventBuilder.withBody("event " + i, Charsets.UTF_8));
}
processor.processEventBatch(events);
try {
Thread.sleep(3000);
} catch (InterruptedException e) {
}
for (Channel channel : channels) {
Transaction transaction = channel.getTransaction();
transaction.begin();
for (int i = 0; i < 100; i++) {
Event event_ch = channel.take();
Assert.assertNotNull(event_ch);
}
transaction.commit();
transaction.close();
}
}
}
| 9,945 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/channel/TestMemoryChannel.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.channel;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableMap;
import org.apache.flume.ChannelException;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.Transaction;
import org.apache.flume.conf.Configurables;
import org.apache.flume.event.EventBuilder;
import org.apache.flume.event.SimpleEvent;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.LinkedBlockingDeque;
import static org.fest.reflect.core.Reflection.field;
public class TestMemoryChannel {
private MemoryChannel channel;
@Before
public void setUp() {
channel = new MemoryChannel();
}
@Test
public void testPutTake() throws InterruptedException, EventDeliveryException {
Event event = EventBuilder.withBody("test event".getBytes());
Context context = new Context();
Configurables.configure(channel, context);
Transaction transaction = channel.getTransaction();
Assert.assertNotNull(transaction);
transaction.begin();
channel.put(event);
transaction.commit();
transaction.close();
transaction = channel.getTransaction();
Assert.assertNotNull(transaction);
transaction.begin();
Event event2 = channel.take();
Assert.assertEquals(event, event2);
transaction.commit();
}
@Test
public void testPutAcceptsNullValueInHeader() {
Configurables.configure(channel, new Context());
Event event = EventBuilder.withBody("test body".getBytes(Charsets.UTF_8),
Collections.<String, String>singletonMap("test_key", null));
Transaction txPut = channel.getTransaction();
txPut.begin();
channel.put(event);
txPut.commit();
txPut.close();
Transaction txTake = channel.getTransaction();
txTake.begin();
Event eventTaken = channel.take();
Assert.assertEquals(event, eventTaken);
txTake.commit();
}
@Test
public void testChannelResize() {
Context context = new Context();
Map<String, String> parms = new HashMap<String, String>();
parms.put("capacity", "5");
parms.put("transactionCapacity", "5");
context.putAll(parms);
Configurables.configure(channel, context);
Transaction transaction = channel.getTransaction();
transaction.begin();
for (int i = 0; i < 5; i++) {
channel.put(EventBuilder.withBody(String.format("test event %d", i).getBytes()));
}
transaction.commit();
transaction.close();
/*
* Verify overflow semantics
*/
transaction = channel.getTransaction();
boolean overflowed = false;
try {
transaction.begin();
channel.put(EventBuilder.withBody("overflow event".getBytes()));
transaction.commit();
} catch (ChannelException e) {
overflowed = true;
transaction.rollback();
} finally {
transaction.close();
}
Assert.assertTrue(overflowed);
/*
* Reconfigure capacity down and add another event, shouldn't result in exception
*/
parms.put("capacity", "6");
context.putAll(parms);
Configurables.configure(channel, context);
transaction = channel.getTransaction();
transaction.begin();
channel.put(EventBuilder.withBody("extended capacity event".getBytes()));
transaction.commit();
transaction.close();
/*
* Attempt to reconfigure capacity to below current entry count and verify
* it wasn't carried out
*/
parms.put("capacity", "2");
parms.put("transactionCapacity", "2");
context.putAll(parms);
Configurables.configure(channel, context);
for (int i = 0; i < 6; i++) {
transaction = channel.getTransaction();
transaction.begin();
Assert.assertNotNull(channel.take());
transaction.commit();
transaction.close();
}
}
@Test(expected = ChannelException.class)
public void testTransactionPutCapacityOverload() {
Context context = new Context();
Map<String, String> parms = new HashMap<String, String>();
parms.put("capacity", "5");
parms.put("transactionCapacity", "2");
context.putAll(parms);
Configurables.configure(channel, context);
Transaction transaction = channel.getTransaction();
transaction.begin();
channel.put(EventBuilder.withBody("test".getBytes()));
channel.put(EventBuilder.withBody("test".getBytes()));
// shouldn't be able to fit a third in the buffer
channel.put(EventBuilder.withBody("test".getBytes()));
Assert.fail();
}
@Test(expected = ChannelException.class)
public void testCapacityOverload() {
Context context = new Context();
Map<String, String> parms = new HashMap<String, String>();
parms.put("capacity", "5");
parms.put("transactionCapacity", "3");
context.putAll(parms);
Configurables.configure(channel, context);
Transaction transaction = channel.getTransaction();
transaction.begin();
channel.put(EventBuilder.withBody("test".getBytes()));
channel.put(EventBuilder.withBody("test".getBytes()));
channel.put(EventBuilder.withBody("test".getBytes()));
transaction.commit();
transaction.close();
transaction = channel.getTransaction();
transaction.begin();
channel.put(EventBuilder.withBody("test".getBytes()));
channel.put(EventBuilder.withBody("test".getBytes()));
channel.put(EventBuilder.withBody("test".getBytes()));
// this should kill it
transaction.commit();
Assert.fail();
}
@Test
public void testCapacityBufferEmptyingAfterTakeCommit() {
Context context = new Context();
Map<String, String> parms = new HashMap<String, String>();
parms.put("capacity", "3");
parms.put("transactionCapacity", "3");
context.putAll(parms);
Configurables.configure(channel, context);
Transaction tx = channel.getTransaction();
tx.begin();
channel.put(EventBuilder.withBody("test".getBytes()));
channel.put(EventBuilder.withBody("test".getBytes()));
channel.put(EventBuilder.withBody("test".getBytes()));
tx.commit();
tx.close();
tx = channel.getTransaction();
tx.begin();
channel.take();
channel.take();
tx.commit();
tx.close();
tx = channel.getTransaction();
tx.begin();
channel.put(EventBuilder.withBody("test".getBytes()));
channel.put(EventBuilder.withBody("test".getBytes()));
tx.commit();
tx.close();
}
@Test
public void testCapacityBufferEmptyingAfterRollback() {
Context context = new Context();
Map<String, String> parms = new HashMap<String, String>();
parms.put("capacity", "3");
parms.put("transactionCapacity", "3");
context.putAll(parms);
Configurables.configure(channel, context);
Transaction tx = channel.getTransaction();
tx.begin();
channel.put(EventBuilder.withBody("test".getBytes()));
channel.put(EventBuilder.withBody("test".getBytes()));
channel.put(EventBuilder.withBody("test".getBytes()));
tx.rollback();
tx.close();
tx = channel.getTransaction();
tx.begin();
channel.put(EventBuilder.withBody("test".getBytes()));
channel.put(EventBuilder.withBody("test".getBytes()));
channel.put(EventBuilder.withBody("test".getBytes()));
tx.commit();
tx.close();
}
@Test(expected = ChannelException.class)
public void testByteCapacityOverload() {
Context context = new Context();
Map<String, String> parms = new HashMap<String, String>();
parms.put("byteCapacity", "2000");
parms.put("byteCapacityBufferPercentage", "20");
context.putAll(parms);
Configurables.configure(channel, context);
byte[] eventBody = new byte[405];
Transaction transaction = channel.getTransaction();
transaction.begin();
channel.put(EventBuilder.withBody(eventBody));
channel.put(EventBuilder.withBody(eventBody));
channel.put(EventBuilder.withBody(eventBody));
transaction.commit();
transaction.close();
transaction = channel.getTransaction();
transaction.begin();
channel.put(EventBuilder.withBody(eventBody));
channel.put(EventBuilder.withBody(eventBody));
// this should kill it
transaction.commit();
Assert.fail();
}
@Test
public void testByteCapacityAfterRollback() {
Context ctx = new Context(ImmutableMap.of("byteCapacity", "1000"));
Configurables.configure(channel, ctx);
Assert.assertEquals(8, channel.getBytesRemainingValue());
Event e = new SimpleEvent();
Transaction t = channel.getTransaction();
t.begin();
channel.put(e);
t.rollback();
Assert.assertEquals(8, channel.getBytesRemainingValue());
}
public void testByteCapacityBufferEmptyingAfterTakeCommit() {
Context context = new Context();
Map<String, String> parms = new HashMap<String, String>();
parms.put("byteCapacity", "2000");
parms.put("byteCapacityBufferPercentage", "20");
context.putAll(parms);
Configurables.configure(channel, context);
byte[] eventBody = new byte[405];
Transaction tx = channel.getTransaction();
tx.begin();
channel.put(EventBuilder.withBody(eventBody));
channel.put(EventBuilder.withBody(eventBody));
channel.put(EventBuilder.withBody(eventBody));
channel.put(EventBuilder.withBody(eventBody));
try {
channel.put(EventBuilder.withBody(eventBody));
throw new RuntimeException("Put was able to overflow byte capacity.");
} catch (ChannelException ce) {
//Do nothing
}
tx.commit();
tx.close();
tx = channel.getTransaction();
tx.begin();
channel.take();
channel.take();
tx.commit();
tx.close();
tx = channel.getTransaction();
tx.begin();
channel.put(EventBuilder.withBody(eventBody));
channel.put(EventBuilder.withBody(eventBody));
try {
channel.put(EventBuilder.withBody(eventBody));
throw new RuntimeException("Put was able to overflow byte capacity.");
} catch (ChannelException ce) {
//Do nothing
}
tx.commit();
tx.close();
}
@Test
public void testByteCapacityBufferEmptyingAfterRollback() {
Context context = new Context();
Map<String, String> parms = new HashMap<String, String>();
parms.put("byteCapacity", "2000");
parms.put("byteCapacityBufferPercentage", "20");
context.putAll(parms);
Configurables.configure(channel, context);
byte[] eventBody = new byte[405];
Transaction tx = channel.getTransaction();
tx.begin();
channel.put(EventBuilder.withBody(eventBody));
channel.put(EventBuilder.withBody(eventBody));
channel.put(EventBuilder.withBody(eventBody));
tx.rollback();
tx.close();
tx = channel.getTransaction();
tx.begin();
channel.put(EventBuilder.withBody(eventBody));
channel.put(EventBuilder.withBody(eventBody));
channel.put(EventBuilder.withBody(eventBody));
tx.commit();
tx.close();
}
@Test
public void testByteCapacityBufferChangeConfig() {
Context context = new Context();
Map<String, String> parms = new HashMap<String, String>();
parms.put("byteCapacity", "2000");
parms.put("byteCapacityBufferPercentage", "20");
context.putAll(parms);
Configurables.configure(channel, context);
byte[] eventBody = new byte[405];
Transaction tx = channel.getTransaction();
tx.begin();
channel.put(EventBuilder.withBody(eventBody));
tx.commit();
tx.close();
channel.stop();
parms.put("byteCapacity", "1500");
context.putAll(parms);
Configurables.configure(channel, context);
channel.start();
tx = channel.getTransaction();
tx.begin();
channel.put(EventBuilder.withBody(eventBody));
try {
channel.put(EventBuilder.withBody(eventBody));
tx.commit();
Assert.fail();
} catch (ChannelException e) {
//success
tx.rollback();
} finally {
tx.close();
}
channel.stop();
parms.put("byteCapacity", "250");
parms.put("byteCapacityBufferPercentage", "20");
context.putAll(parms);
Configurables.configure(channel, context);
channel.start();
tx = channel.getTransaction();
tx.begin();
channel.put(EventBuilder.withBody(eventBody));
tx.commit();
tx.close();
channel.stop();
parms.put("byteCapacity", "300");
context.putAll(parms);
Configurables.configure(channel, context);
channel.start();
tx = channel.getTransaction();
tx.begin();
try {
for (int i = 0; i < 2; i++) {
channel.put(EventBuilder.withBody(eventBody));
}
tx.commit();
Assert.fail();
} catch (ChannelException e) {
//success
tx.rollback();
} finally {
tx.close();
}
channel.stop();
parms.put("byteCapacity", "3300");
context.putAll(parms);
Configurables.configure(channel, context);
channel.start();
tx = channel.getTransaction();
tx.begin();
try {
for (int i = 0; i < 15; i++) {
channel.put(EventBuilder.withBody(eventBody));
}
tx.commit();
Assert.fail();
} catch (ChannelException e) {
//success
tx.rollback();
} finally {
tx.close();
}
channel.stop();
parms.put("byteCapacity", "4000");
context.putAll(parms);
Configurables.configure(channel, context);
channel.start();
tx = channel.getTransaction();
tx.begin();
try {
for (int i = 0; i < 25; i++) {
channel.put(EventBuilder.withBody(eventBody));
}
tx.commit();
Assert.fail();
} catch (ChannelException e) {
//success
tx.rollback();
} finally {
tx.close();
}
channel.stop();
}
/*
* This would cause a NPE without FLUME-1622.
*/
@Test
public void testNullEmptyEvent() {
Context context = new Context();
Map<String, String> parms = new HashMap<String, String>();
parms.put("byteCapacity", "2000");
parms.put("byteCapacityBufferPercentage", "20");
context.putAll(parms);
Configurables.configure(channel, context);
Transaction tx = channel.getTransaction();
tx.begin();
//This line would cause a NPE without FLUME-1622.
channel.put(EventBuilder.withBody(null));
tx.commit();
tx.close();
tx = channel.getTransaction();
tx.begin();
channel.put(EventBuilder.withBody(new byte[0]));
tx.commit();
tx.close();
}
@Test
public void testNegativeCapacities() {
Context context = new Context();
Map<String, String> parms = new HashMap<String, String>();
parms.put("capacity", "-3");
parms.put("transactionCapacity", "-1");
context.putAll(parms);
Configurables.configure(channel, context);
Assert.assertTrue(field("queue")
.ofType(LinkedBlockingDeque.class)
.in(channel).get()
.remainingCapacity() > 0);
Assert.assertTrue(field("transCapacity")
.ofType(Integer.class)
.in(channel).get() > 0);
}
}
| 9,946 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/channel/MockChannel.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.channel;
import java.util.ArrayList;
import java.util.List;
import org.apache.flume.Channel;
import org.apache.flume.ChannelException;
import org.apache.flume.Event;
import org.apache.flume.Transaction;
public class MockChannel extends AbstractChannel {
private List<Event> events = new ArrayList<Event>();
public static Channel createMockChannel(String name) {
Channel ch = new MockChannel();
ch.setName(name);
return ch;
}
@Override
public void put(Event event) throws ChannelException {
events.add(event);
}
@Override
public Event take() throws ChannelException {
return (events.size() > 0) ? events.get(0) : null;
}
@Override
public Transaction getTransaction() {
return new MockTransaction();
}
private static class MockTransaction implements Transaction {
@Override
public void begin() {
}
@Override
public void commit() {
}
@Override
public void rollback() {
}
@Override
public void close() {
}
}
}
| 9,947 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/channel/TestBasicChannelSemantics.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.channel;
import java.util.concurrent.Future;
import org.apache.flume.Event;
import org.apache.flume.Transaction;
import org.junit.Assert;
import org.junit.Test;
public class TestBasicChannelSemantics
extends AbstractBasicChannelSemanticsTest {
@Test
public void testHappyPath() {
for (int i = 0; i < events.size(); ++i) {
Transaction transaction = channel.getTransaction();
transaction.begin();
channel.put(events.get(i));
transaction.commit();
transaction.close();
}
for (int i = 0; i < events.size(); ++i) {
Transaction transaction = channel.getTransaction();
transaction.begin();
Assert.assertSame(events.get(i), channel.take());
transaction.commit();
transaction.close();
}
}
@Test
public void testMultiThreadedHappyPath() throws Exception {
final int testLength = 1000;
Future<?> producer = executor.submit(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(500);
for (int i = 0; i < testLength; ++i) {
Transaction transaction = channel.getTransaction();
transaction.begin();
channel.put(events.get(i % events.size()));
transaction.commit();
transaction.close();
Thread.sleep(1);
}
Thread.sleep(500);
} catch (InterruptedException e) {
Assert.fail();
}
}
});
int i = 0;
while (!producer.isDone()) {
Transaction transaction = channel.getTransaction();
transaction.begin();
Event event = channel.take();
if (event != null) {
Assert.assertSame(events.get(i % events.size()), event);
++i;
}
transaction.commit();
transaction.close();
}
Assert.assertEquals(testLength, i);
producer.get();
}
@Test
public void testGetTransaction() throws Exception {
final Transaction transaction = channel.getTransaction();
executor.submit(new Runnable() {
@Override
public void run() {
Assert.assertNotSame(transaction, channel.getTransaction());
}
}).get();
Assert.assertSame(transaction, channel.getTransaction());
transaction.begin();
executor.submit(new Runnable() {
@Override
public void run() {
Assert.assertNotSame(transaction, channel.getTransaction());
}
}).get();
Assert.assertSame(transaction, channel.getTransaction());
transaction.commit();
executor.submit(new Runnable() {
@Override
public void run() {
Assert.assertNotSame(transaction, channel.getTransaction());
}
}).get();
Assert.assertSame(transaction, channel.getTransaction());
transaction.close();
executor.submit(new Runnable() {
@Override
public void run() {
Assert.assertNotSame(transaction, channel.getTransaction());
}
}).get();
Assert.assertNotSame(transaction, channel.getTransaction());
}
@Test
public void testBegin() throws Exception {
final Transaction transaction = channel.getTransaction();
testExceptions(new Runnable() {
@Override
public void run() {
transaction.begin();
}
});
transaction.begin();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.begin();
}
});
transaction.commit();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.begin();
}
});
transaction.close();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.begin();
}
});
}
@Test
public void testPut1() throws Exception {
testIllegalState(new Runnable() {
@Override
public void run() {
channel.put(events.get(0));
}
});
Transaction transaction = channel.getTransaction();
testIllegalState(new Runnable() {
@Override
public void run() {
channel.put(events.get(0));
}
});
transaction.begin();
channel.put(events.get(0));
testIllegalArgument(new Runnable() {
@Override
public void run() {
channel.put(null);
}
});
testExceptions(new Runnable() {
@Override
public void run() {
channel.put(events.get(0));
}
});
transaction.commit();
testIllegalState(new Runnable() {
@Override
public void run() {
channel.put(events.get(0));
}
});
transaction.close();
testIllegalState(new Runnable() {
@Override
public void run() {
channel.put(events.get(0));
}
});
}
@Test
public void testPut2() throws Exception {
Transaction transaction = channel.getTransaction();
transaction.begin();
channel.put(events.get(0));
transaction.rollback();
testIllegalState(new Runnable() {
@Override
public void run() {
channel.put(events.get(0));
}
});
transaction.close();
testIllegalState(new Runnable() {
@Override
public void run() {
channel.put(events.get(0));
}
});
}
@Test
public void testPut3() throws Exception {
Transaction transaction = channel.getTransaction();
transaction.begin();
channel.put(events.get(0));
final Transaction finalTransaction = transaction;
testChannelException(new Runnable() {
@Override
public void run() {
finalTransaction.commit();
}
});
transaction.rollback();
testIllegalState(new Runnable() {
@Override
public void run() {
channel.put(events.get(0));
}
});
transaction.close();
testIllegalState(new Runnable() {
@Override
public void run() {
channel.put(events.get(0));
}
});
}
@Test
public void testTake1() throws Exception {
testIllegalState(new Runnable() {
@Override
public void run() {
channel.take();
}
});
Transaction transaction = channel.getTransaction();
testIllegalState(new Runnable() {
@Override
public void run() {
channel.take();
}
});
transaction.begin();
Assert.assertNull(channel.take());
for (int i = 0; i < 1000; ++i) {
channel.put(events.get(i % events.size()));
}
Assert.assertNotNull(channel.take());
testWrongThread(new Runnable() {
@Override
public void run() {
channel.take();
}
});
testBasicExceptions(new Runnable() {
@Override
public void run() {
channel.take();
}
});
testMode(TestChannel.Mode.SLEEP, new Runnable() {
@Override
public void run() {
interruptTest(new Runnable() {
@Override
public void run() {
Assert.assertNull(channel.take());
Assert.assertTrue(Thread.interrupted());
}
});
}
});
Assert.assertNotNull(channel.take());
transaction.commit();
testIllegalState(new Runnable() {
@Override
public void run() {
channel.take();
}
});
transaction.close();
testIllegalState(new Runnable() {
@Override
public void run() {
channel.take();
}
});
}
@Test
public void testTake2() throws Exception {
Transaction transaction = channel.getTransaction();
transaction.begin();
channel.take();
transaction.rollback();
testIllegalState(new Runnable() {
@Override
public void run() {
channel.take();
}
});
transaction.close();
testIllegalState(new Runnable() {
@Override
public void run() {
channel.take();
}
});
}
@Test
public void testTake3() throws Exception {
Transaction transaction = channel.getTransaction();
transaction.begin();
channel.take();
final Transaction finalTransaction = transaction;
testChannelException(new Runnable() {
@Override
public void run() {
finalTransaction.commit();
}
});
transaction.rollback();
testIllegalState(new Runnable() {
@Override
public void run() {
channel.take();
}
});
transaction.close();
testIllegalState(new Runnable() {
@Override
public void run() {
channel.take();
}
});
}
@Test
public void testCommit1() throws Exception {
final Transaction transaction = channel.getTransaction();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.commit();
}
});
transaction.begin();
testExceptions(new Runnable() {
@Override
public void run() {
transaction.commit();
}
});
transaction.commit();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.commit();
}
});
transaction.close();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.commit();
}
});
}
@Test
public void testCommit2() throws Exception {
final Transaction transaction = channel.getTransaction();
transaction.begin();
transaction.rollback();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.commit();
}
});
transaction.close();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.commit();
}
});
}
@Test
public void testRollback1() throws Exception {
final Transaction transaction = channel.getTransaction();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
transaction.begin();
testWrongThread(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
transaction.rollback();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
transaction.close();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
}
@Test
public void testRollback2() throws Exception {
final Transaction transaction = channel.getTransaction();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
transaction.begin();
testError(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
transaction.close();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
}
@Test
public void testRollback3() throws Exception {
final Transaction transaction = channel.getTransaction();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
transaction.begin();
testRuntimeException(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
transaction.close();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
}
@Test
public void testRollback4() throws Exception {
final Transaction transaction = channel.getTransaction();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
transaction.begin();
testChannelException(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
transaction.close();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
}
@Test
public void testRollback5() throws Exception {
final Transaction transaction = channel.getTransaction();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
transaction.begin();
testInterrupt(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
transaction.close();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
}
@Test
public void testRollback6() throws Exception {
final Transaction transaction = channel.getTransaction();
transaction.begin();
transaction.commit();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
transaction.close();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
}
@Test
public void testRollback7() throws Exception {
final Transaction transaction = channel.getTransaction();
transaction.begin();
testExceptions(new Runnable() {
@Override
public void run() {
transaction.commit();
}
});
transaction.rollback();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
transaction.close();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.rollback();
}
});
}
@Test
public void testClose1() throws Exception {
final Transaction transaction = channel.getTransaction();
testError(new Runnable() {
@Override
public void run() {
transaction.close();
}
});
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.close();
}
});
}
@Test
public void testClose2() throws Exception {
final Transaction transaction = channel.getTransaction();
testRuntimeException(new Runnable() {
@Override
public void run() {
transaction.close();
}
});
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.close();
}
});
}
@Test
public void testClose3() throws Exception {
final Transaction transaction = channel.getTransaction();
testChannelException(new Runnable() {
@Override
public void run() {
transaction.close();
}
});
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.close();
}
});
}
@Test
public void testClose4() throws Exception {
final Transaction transaction = channel.getTransaction();
transaction.begin();
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.close();
}
});
}
@Test
public void testClose5() throws Exception {
final Transaction transaction = channel.getTransaction();
transaction.begin();
testChannelException(new Runnable() {
@Override
public void run() {
transaction.commit();
}
});
testIllegalState(new Runnable() {
@Override
public void run() {
transaction.close();
}
});
}
}
| 9,948 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/channel/AbstractBasicChannelSemanticsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.channel;
import java.util.ArrayDeque;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.apache.flume.ChannelException;
import org.apache.flume.Event;
import org.apache.flume.event.EventBuilder;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import com.google.common.base.Preconditions;
public abstract class AbstractBasicChannelSemanticsTest {
protected static List<Event> events;
static {
Event[] array = new Event[7];
for (int i = 0; i < array.length; ++i) {
array[i] = EventBuilder.withBody(("test event " + i).getBytes());
}
events = Collections.unmodifiableList(Arrays.asList(array));
}
protected ExecutorService executor = null;
protected TestChannel channel = null;
protected static class TestChannel extends BasicChannelSemantics {
private Queue<Event> queue = new ArrayDeque<Event>();
public enum Mode {
NORMAL,
THROW_ERROR,
THROW_RUNTIME,
THROW_CHANNEL,
SLEEP
}
private Mode mode = Mode.NORMAL;
private boolean lastTransactionCommitted = false;
private boolean lastTransactionRolledBack = false;
private boolean lastTransactionClosed = false;
public Mode getMode() {
return mode;
}
public void setMode(Mode mode) {
this.mode = mode;
}
public boolean wasLastTransactionCommitted() {
return lastTransactionCommitted;
}
public boolean wasLastTransactionRolledBack() {
return lastTransactionRolledBack;
}
public boolean wasLastTransactionClosed() {
return lastTransactionClosed;
}
@Override
protected BasicTransactionSemantics createTransaction() {
return new TestTransaction();
}
protected class TestTransaction extends BasicTransactionSemantics {
protected void doMode() throws InterruptedException {
switch (mode) {
case THROW_ERROR:
throw new TestError();
case THROW_RUNTIME:
throw new TestRuntimeException();
case THROW_CHANNEL:
throw new ChannelException("test");
case SLEEP:
Thread.sleep(300000);
break;
}
}
@Override
protected void doBegin() throws InterruptedException {
doMode();
}
@Override
protected void doPut(Event event) throws InterruptedException {
doMode();
synchronized (queue) {
queue.add(event);
}
}
@Override
protected Event doTake() throws InterruptedException {
doMode();
synchronized (queue) {
return queue.poll();
}
}
@Override
protected void doCommit() throws InterruptedException {
doMode();
lastTransactionCommitted = true;
}
@Override
protected void doRollback() throws InterruptedException {
lastTransactionRolledBack = true;
doMode();
}
@Override
protected void doClose() {
lastTransactionClosed = true;
Preconditions.checkState(mode != TestChannel.Mode.SLEEP,
"doClose() can't throw InterruptedException, so why SLEEP?");
try {
doMode();
} catch (InterruptedException e) {
Assert.fail();
}
}
}
}
protected static class TestError extends Error {
static final long serialVersionUID = -1;
}
protected static class TestRuntimeException extends RuntimeException {
static final long serialVersionUID = -1;
}
protected void testException(Class<? extends Throwable> exceptionClass,
Runnable test) {
try {
test.run();
Assert.fail();
} catch (Throwable e) {
if (exceptionClass == InterruptedException.class
&& e instanceof ChannelException
&& e.getCause() instanceof InterruptedException) {
Assert.assertTrue(Thread.interrupted());
} else if (!exceptionClass.isInstance(e)) {
throw new AssertionError(e);
}
}
}
protected void testIllegalArgument(Runnable test) {
testException(IllegalArgumentException.class, test);
}
protected void testIllegalState(Runnable test) {
testException(IllegalStateException.class, test);
}
protected void testWrongThread(final Runnable test) throws Exception {
executor.submit(new Runnable() {
@Override
public void run() {
testIllegalState(test);
}
}).get();
}
protected void testMode(TestChannel.Mode mode, Runnable test) {
TestChannel.Mode oldMode = channel.getMode();
try {
channel.setMode(mode);
test.run();
} finally {
channel.setMode(oldMode);
}
}
protected void testException(TestChannel.Mode mode,
final Class<? extends Throwable> exceptionClass, final Runnable test) {
testMode(mode, new Runnable() {
@Override
public void run() {
testException(exceptionClass, test);
}
});
}
protected void testError(Runnable test) {
testException(TestChannel.Mode.THROW_ERROR, TestError.class, test);
}
protected void testRuntimeException(Runnable test) {
testException(TestChannel.Mode.THROW_RUNTIME, TestRuntimeException.class,
test);
}
protected void testChannelException(Runnable test) {
testException(TestChannel.Mode.THROW_CHANNEL, ChannelException.class, test);
}
protected void testInterrupt(final Runnable test) {
testMode(TestChannel.Mode.SLEEP, new Runnable() {
@Override
public void run() {
testException(InterruptedException.class, new Runnable() {
@Override
public void run() {
interruptTest(test);
}
});
}
});
}
protected void interruptTest(final Runnable test) {
final Thread mainThread = Thread.currentThread();
Future<?> future = executor.submit(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
}
mainThread.interrupt();
}
});
test.run();
try {
future.get();
} catch (Exception e) {
throw new AssertionError(e);
}
}
protected void testExceptions(Runnable test) throws Exception {
testWrongThread(test);
testBasicExceptions(test);
testInterrupt(test);
}
protected void testBasicExceptions(Runnable test) throws Exception {
testError(test);
testRuntimeException(test);
testChannelException(test);
}
@Before
public void before() {
Preconditions.checkState(channel == null, "test cleanup failed!");
Preconditions.checkState(executor == null, "test cleanup failed!");
channel = new TestChannel();
executor = Executors.newCachedThreadPool();
}
@After
public void after() {
channel = null;
executor.shutdown();
executor = null;
}
}
| 9,949 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/formatter | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/formatter/output/TestBucketPath.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.formatter.output;
import org.apache.flume.Clock;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import javax.annotation.Nullable;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.SimpleTimeZone;
import java.util.TimeZone;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TestBucketPath {
private static final TimeZone CUSTOM_TIMEZONE = new SimpleTimeZone(1, "custom-timezone");
private Calendar cal;
private Map<String, String> headers;
private Map<String, String> headersWithTimeZone;
@Before
public void setUp() {
cal = createCalendar(2012, 5, 23, 13, 46, 33, 234, null);
headers = new HashMap<>();
headers.put("timestamp", String.valueOf(cal.getTimeInMillis()));
Calendar calWithTimeZone = createCalendar(2012, 5, 23, 13, 46, 33, 234, CUSTOM_TIMEZONE);
headersWithTimeZone = new HashMap<>();
headersWithTimeZone.put("timestamp", String.valueOf(calWithTimeZone.getTimeInMillis()));
}
/**
* Tests if the internally cached SimpleDateFormat instances can be reused with different
* TimeZone without interference.
*/
@Test
public void testDateFormatCache() {
TimeZone utcTimeZone = TimeZone.getTimeZone("UTC");
String test = "%c";
BucketPath.escapeString(
test, headers, utcTimeZone, false, Calendar.HOUR_OF_DAY, 12, false);
String escapedString = BucketPath.escapeString(
test, headers, false, Calendar.HOUR_OF_DAY, 12);
System.out.println("Escaped String: " + escapedString);
SimpleDateFormat format = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy");
Date d = new Date(cal.getTimeInMillis());
String expectedString = format.format(d);
System.out.println("Expected String: " + expectedString);
Assert.assertEquals(expectedString, escapedString);
}
/**
* Tests if the timestamp with the default timezone is properly rounded down
* to 12 hours using "%c" ("EEE MMM d HH:mm:ss yyyy") formatting.
*/
@Test
public void testDateFormatHours() {
String test = "%c";
String escapedString = BucketPath.escapeString(
test, headers, true, Calendar.HOUR_OF_DAY, 12);
System.out.println("Escaped String: " + escapedString);
Calendar cal2 = createCalendar(2012, 5, 23, 12, 0, 0, 0, null);
SimpleDateFormat format = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy");
Date d = new Date(cal2.getTimeInMillis());
String expectedString = format.format(d);
System.out.println("Expected String: " + expectedString);
Assert.assertEquals(expectedString, escapedString);
}
/**
* Tests if the timestamp with the custom timezone is properly rounded down
* to 12 hours using "%c" ("EEE MMM d HH:mm:ss yyyy") formatting.
*/
@Test
public void testDateFormatHoursTimeZone() {
String test = "%c";
String escapedString = BucketPath.escapeString(
test, headersWithTimeZone, CUSTOM_TIMEZONE, true, Calendar.HOUR_OF_DAY, 12, false);
System.out.println("Escaped String: " + escapedString);
Calendar cal2 = createCalendar(2012, 5, 23, 12, 0, 0, 0, CUSTOM_TIMEZONE);
SimpleDateFormat format = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy");
format.setTimeZone(CUSTOM_TIMEZONE);
Date d = new Date(cal2.getTimeInMillis());
String expectedString = format.format(d);
System.out.println("Expected String: " + expectedString);
Assert.assertEquals(expectedString, escapedString);
}
/**
* Tests if the timestamp with the default timezone is properly rounded down
* to 5 minutes using "%s" (seconds) formatting
*/
@Test
public void testDateFormatMinutes() {
String test = "%s";
String escapedString = BucketPath.escapeString(
test, headers, true, Calendar.MINUTE, 5);
System.out.println("Escaped String: " + escapedString);
Calendar cal2 = createCalendar(2012, 5, 23, 13, 45, 0, 0, null);
String expectedString = String.valueOf(cal2.getTimeInMillis() / 1000);
System.out.println("Expected String: " + expectedString);
Assert.assertEquals(expectedString, escapedString);
}
/**
* Tests if the timestamp with the custom timezone is properly rounded down
* to 5 minutes using "%s" (seconds) formatting
*/
@Test
public void testDateFormatMinutesTimeZone() {
String test = "%s";
String escapedString = BucketPath.escapeString(
test, headersWithTimeZone, CUSTOM_TIMEZONE, true, Calendar.MINUTE, 5, false);
System.out.println("Escaped String: " + escapedString);
Calendar cal2 = createCalendar(2012, 5, 23, 13, 45, 0, 0, CUSTOM_TIMEZONE);
String expectedString = String.valueOf(cal2.getTimeInMillis() / 1000);
System.out.println("Expected String: " + expectedString);
Assert.assertEquals(expectedString, escapedString);
}
/**
* Tests if the timestamp with the default timezone is properly rounded down
* to 5 seconds using "%s" (seconds) formatting
*/
@Test
public void testDateFormatSeconds() {
String test = "%s";
String escapedString = BucketPath.escapeString(
test, headers, true, Calendar.SECOND, 5);
System.out.println("Escaped String: " + escapedString);
Calendar cal2 = createCalendar(2012, 5, 23, 13, 46, 30, 0, null);
String expectedString = String.valueOf(cal2.getTimeInMillis() / 1000);
System.out.println("Expected String: " + expectedString);
Assert.assertEquals(expectedString, escapedString);
}
/**
* Tests if the timestamp with the custom timezone is properly rounded down
* to 5 seconds using "%s" (seconds) formatting
*/
@Test
public void testDateFormatSecondsTimeZone() {
String test = "%s";
String escapedString = BucketPath.escapeString(
test, headersWithTimeZone, CUSTOM_TIMEZONE, true, Calendar.SECOND, 5, false);
System.out.println("Escaped String: " + escapedString);
Calendar cal2 = createCalendar(2012, 5, 23, 13, 46, 30, 0, CUSTOM_TIMEZONE);
String expectedString = String.valueOf(cal2.getTimeInMillis() / 1000);
System.out.println("Expected String: " + expectedString);
Assert.assertEquals(expectedString, escapedString);
}
/**
* Tests if the timestamp is properly formatted without rounding it down.
*/
@Test
public void testNoRounding() {
String test = "%c";
String escapedString = BucketPath.escapeString(
test, headers, false, Calendar.HOUR_OF_DAY, 12);
System.out.println("Escaped String: " + escapedString);
SimpleDateFormat format = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy");
Date d = new Date(cal.getTimeInMillis());
String expectedString = format.format(d);
System.out.println("Expected String: " + expectedString);
Assert.assertEquals(expectedString, escapedString);
}
@Test
public void testNoPadding() {
Calendar calender;
Map<String, String> calender_timestamp;
calender = Calendar.getInstance();
//Check single digit dates
calender.set(2014, (5 - 1), 3, 13, 46, 33);
calender_timestamp = new HashMap<String, String>();
calender_timestamp.put("timestamp", String.valueOf(calender.getTimeInMillis()));
SimpleDateFormat format = new SimpleDateFormat("M-d");
String test = "%n-%e"; // eg 5-3
String escapedString = BucketPath.escapeString(
test, calender_timestamp, false, Calendar.HOUR_OF_DAY, 12);
Date d = new Date(calender.getTimeInMillis());
String expectedString = format.format(d);
//Check two digit dates
calender.set(2014, (11 - 1), 13, 13, 46, 33);
calender_timestamp.put("timestamp", String.valueOf(calender.getTimeInMillis()));
escapedString += " " + BucketPath.escapeString(
test, calender_timestamp, false, Calendar.HOUR_OF_DAY, 12);
System.out.println("Escaped String: " + escapedString);
d = new Date(calender.getTimeInMillis());
expectedString += " " + format.format(d);
System.out.println("Expected String: " + expectedString);
Assert.assertEquals(expectedString, escapedString);
}
@Test
public void testDateFormatTimeZone() {
TimeZone utcTimeZone = TimeZone.getTimeZone("UTC");
String test = "%c";
String escapedString = BucketPath.escapeString(
test, headers, utcTimeZone, false, Calendar.HOUR_OF_DAY, 12, false);
System.out.println("Escaped String: " + escapedString);
SimpleDateFormat format = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy");
format.setTimeZone(utcTimeZone);
Date d = new Date(cal.getTimeInMillis());
String expectedString = format.format(d);
System.out.println("Expected String: " + expectedString);
Assert.assertEquals(expectedString, escapedString);
}
@Test
public void testDateRace() {
Clock mockClock = mock(Clock.class);
DateTimeFormatter parser = ISODateTimeFormat.dateTimeParser();
long two = parser.parseMillis("2013-04-21T02:59:59-00:00");
long three = parser.parseMillis("2013-04-21T03:00:00-00:00");
when(mockClock.currentTimeMillis()).thenReturn(two, three);
// save & modify static state (yuck)
Clock origClock = BucketPath.getClock();
BucketPath.setClock(mockClock);
String pat = "%H:%M";
String escaped = BucketPath.escapeString(pat,
new HashMap<String, String>(),
TimeZone.getTimeZone("UTC"), true, Calendar.MINUTE, 10, true);
// restore static state
BucketPath.setClock(origClock);
Assert.assertEquals("Race condition detected", "02:50", escaped);
}
private static Calendar createCalendar(int year, int month, int day,
int hour, int minute, int second, int ms,
@Nullable TimeZone timeZone) {
Calendar cal = (timeZone == null) ? Calendar.getInstance() : Calendar.getInstance(timeZone);
cal.set(year, month, day, hour, minute, second);
cal.set(Calendar.MILLISECOND, ms);
return cal;
}
@Test
public void testStaticEscapeStrings() {
Map<String, String> staticStrings;
staticStrings = new HashMap<>();
try {
InetAddress addr = InetAddress.getLocalHost();
staticStrings.put("localhost", addr.getHostName());
staticStrings.put("IP", addr.getHostAddress());
staticStrings.put("FQDN", addr.getCanonicalHostName());
} catch (UnknownHostException e) {
Assert.fail("Test failed due to UnkownHostException");
}
TimeZone utcTimeZone = TimeZone.getTimeZone("UTC");
String filePath = "%[localhost]/%[IP]/%[FQDN]";
String realPath = BucketPath.escapeString(filePath, headers,
utcTimeZone, false, Calendar.HOUR_OF_DAY, 12, false);
String[] args = realPath.split("\\/");
Assert.assertEquals(args[0],staticStrings.get("localhost"));
Assert.assertEquals(args[1],staticStrings.get("IP"));
Assert.assertEquals(args[2],staticStrings.get("FQDN"));
StringBuilder s = new StringBuilder();
s.append("Expected String: ").append(staticStrings.get("localhost"));
s.append("/").append(staticStrings.get("IP")).append("/");
s.append(staticStrings.get("FQDN"));
System.out.println(s);
System.out.println("Escaped String: " + realPath );
}
@Test (expected = RuntimeException.class)
public void testStaticEscapeStringsNoKey() {
Map<String, String> staticStrings;
staticStrings = new HashMap<>();
TimeZone utcTimeZone = TimeZone.getTimeZone("UTC");
String filePath = "%[abcdefg]/%[IP]/%[FQDN]";
String realPath = BucketPath.escapeString(filePath, headers,
utcTimeZone, false, Calendar.HOUR_OF_DAY, 12, false);
}
}
| 9,950 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/event/TestEventHelper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.event;
import org.junit.Assert;
import org.junit.Test;
public class TestEventHelper {
@Test
public void testPrintable() {
SimpleEvent event = new SimpleEvent();
event.setBody("Some text".getBytes());
String eventDump = EventHelper.dumpEvent(event);
System.out.println(eventDump);
Assert.assertTrue(eventDump, eventDump.contains("Some text"));
}
@Test
public void testNonPrintable() {
SimpleEvent event = new SimpleEvent();
byte[] body = new byte[5];
event.setBody(body);
String eventDump = EventHelper.dumpEvent(event);
Assert.assertTrue(eventDump, eventDump.contains("....."));
}
}
| 9,951 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/client | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/client/avro/TestReliableSpoolingFileEventReader.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.client.avro;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.Semaphore;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.SystemUtils;
import org.apache.commons.lang.mutable.MutableLong;
import org.apache.flume.Event;
import org.apache.flume.FlumeException;
import org.apache.flume.client.avro.ReliableSpoolingFileEventReader.DeletePolicy;
import org.apache.flume.instrumentation.SourceCounter;
import org.apache.flume.client.avro.ReliableSpoolingFileEventReader.TrackingPolicy;
import org.apache.flume.source.SpoolDirectorySourceConfigurationConstants;
import org.apache.flume.source.SpoolDirectorySourceConfigurationConstants.ConsumeOrder;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.io.Files;
import junit.framework.Assert;
public class TestReliableSpoolingFileEventReader {
private static final Logger logger = LoggerFactory.getLogger(TestReliableSpoolingFileEventReader.class);
private static final File WORK_DIR = new File(
"target/test/work/" + TestReliableSpoolingFileEventReader.class.getSimpleName());
private static final File TRACKER_DIR = new File(WORK_DIR,
SpoolDirectorySourceConfigurationConstants.DEFAULT_TRACKER_DIR);
@Before
public void setup() throws IOException, InterruptedException {
if (!WORK_DIR.isDirectory()) {
Files.createParentDirs(new File(WORK_DIR, "dummy"));
}
long lastModified = 0;
// write out a few files
for (int i = 0; i < 4; i++) {
File fileName = new File(WORK_DIR, "file" + i);
StringBuilder sb = new StringBuilder();
// write as many lines as the index of the file
for (int j = 0; j < i; j++) {
sb.append("file" + i + "line" + j + "\n");
}
Files.write(sb.toString(), fileName, Charsets.UTF_8);
// Make sure all the files have the same timestamp.
if (lastModified == 0) {
lastModified = fileName.lastModified();
} else {
fileName.setLastModified(lastModified);
}
}
Thread.sleep(1500L); // make sure timestamp is later
Files.write("\n", new File(WORK_DIR, "emptylineFile"), Charsets.UTF_8);
}
@After
public void tearDown() {
setDirWritable(WORK_DIR);
deleteDir(WORK_DIR);
}
private void setDirWritable(File dir){
// make dir writable
dir.setWritable(true);
}
private void deleteDir(File dir) {
// delete all the files & dirs we created
try {
FileUtils.deleteDirectory(dir);
} catch (IOException e) {
logger.warn("Cannot delete work directory {}", dir.getAbsolutePath(), e);
}
}
private void processEventsWithReader(ReliableEventReader reader, int nEvents) throws IOException {
List<Event> events;
do {
events = reader.readEvents(nEvents);
reader.commit();
} while (!events.isEmpty());
}
/**
* Verify if the give dir contains only the given files
*
* @param dir
* the directory to check
* @param files
* the files that should be contained in dir
* @return true only if the dir contains exactly the same files given, false
* otherwise
*/
private boolean checkLeftFilesInDir(File dir, String[] files) {
List<File> actualFiles = listFiles(dir);
Set<String> expectedFiles = new HashSet<String>(Arrays.asList(files));
// Verify if the number of files in the dir is the expected
if (actualFiles.size() != expectedFiles.size()) {
return false;
}
// Then check files by name
for (File f : actualFiles) {
expectedFiles.remove(f.getName());
}
return expectedFiles.isEmpty();
}
@Test
public void testIncludePattern() throws IOException {
ReliableEventReader reader = new ReliableSpoolingFileEventReader.Builder()
.spoolDirectory(WORK_DIR)
.includePattern("^file2$")
.deletePolicy(DeletePolicy.IMMEDIATE.toString())
.sourceCounter(new SourceCounter("test"))
.build();
String[] beforeFiles = { "file0", "file1", "file2", "file3", "emptylineFile" };
Assert.assertTrue("Expected " + beforeFiles.length + " files in working dir",
checkLeftFilesInDir(WORK_DIR, beforeFiles));
processEventsWithReader(reader, 10);
String[] afterFiles = { "file0", "file1", "file3", "emptylineFile" };
Assert.assertTrue("Expected " + afterFiles.length + " files left in working dir",
checkLeftFilesInDir(WORK_DIR, afterFiles));
Assert.assertTrue("Expected no files left in tracker dir", checkLeftFilesInDir(TRACKER_DIR, new String[0]));
}
@Test
public void testIgnorePattern() throws IOException {
ReliableEventReader reader =
new ReliableSpoolingFileEventReader.Builder()
.spoolDirectory(WORK_DIR)
.ignorePattern("^file2$")
.deletePolicy(DeletePolicy.IMMEDIATE.toString())
.sourceCounter(new SourceCounter("test"))
.build();
String[] beforeFiles = { "file0", "file1", "file2", "file3", "emptylineFile" };
Assert.assertTrue("Expected " + beforeFiles.length + " files in working dir",
checkLeftFilesInDir(WORK_DIR, beforeFiles));
processEventsWithReader(reader, 10);
String[] files = { "file2" };
Assert.assertTrue("Expected " + files.length + " files left in working dir", checkLeftFilesInDir(WORK_DIR, files));
Assert.assertTrue("Expected no files left in tracker dir", checkLeftFilesInDir(TRACKER_DIR, new String[0]));
}
@Test
public void testIncludeExcludePatternNoConflict() throws IOException {
// Expected behavior mixing include/exclude conditions:
// - file0, file1, file3: not deleted as matching ignore pattern and not
// matching include pattern
// - file2: deleted as not matching ignore pattern and matching include
// pattern
// - emptylineFile: not deleted as not matching ignore pattern but not
// matching include pattern as well
ReliableEventReader reader = new ReliableSpoolingFileEventReader.Builder()
.spoolDirectory(WORK_DIR)
.ignorePattern("^file[013]$")
.includePattern("^file2$")
.deletePolicy(DeletePolicy.IMMEDIATE.toString())
.sourceCounter(new SourceCounter("test"))
.build();
String[] beforeFiles = { "file0", "file1", "file2", "file3", "emptylineFile" };
Assert.assertTrue("Expected " + beforeFiles.length + " files in working dir",
checkLeftFilesInDir(WORK_DIR, beforeFiles));
processEventsWithReader(reader, 10);
String[] files = { "file0", "file1", "file3", "emptylineFile" };
Assert.assertTrue("Expected " + files.length + " files left in working dir", checkLeftFilesInDir(WORK_DIR, files));
Assert.assertTrue("Expected no files left in tracker dir", checkLeftFilesInDir(TRACKER_DIR, new String[0]));
}
@Test
public void testIncludeExcludePatternConflict() throws IOException {
// This test will stress what happens when both ignore and include options
// are specified and the two patterns match at the same time.
// Expected behavior:
// - file2: not deleted as both include and ignore patterns match (safety
// measure: ignore always wins on conflict)
ReliableEventReader reader = new ReliableSpoolingFileEventReader.Builder()
.spoolDirectory(WORK_DIR)
.ignorePattern("^file2$")
.includePattern("^file2$")
.deletePolicy(DeletePolicy.IMMEDIATE.toString())
.sourceCounter(new SourceCounter("test"))
.build();
String[] beforeFiles = { "file0", "file1", "file2", "file3", "emptylineFile" };
Assert.assertTrue("Expected " + beforeFiles.length + " files in working dir",
checkLeftFilesInDir(WORK_DIR, beforeFiles));
processEventsWithReader(reader, 10);
String[] files = { "file0", "file1", "file2", "file3", "emptylineFile" };
Assert.assertTrue("Expected " + files.length + " files left in working dir", checkLeftFilesInDir(WORK_DIR, files));
Assert.assertTrue("Expected no files left in tracker dir", checkLeftFilesInDir(TRACKER_DIR, new String[0]));
}
@Test
public void testRepeatedCallsWithCommitAlways() throws IOException {
ReliableEventReader reader =
new ReliableSpoolingFileEventReader.Builder().spoolDirectory(WORK_DIR)
.sourceCounter(new SourceCounter("test"))
.build();
final int expectedLines = 1 + 1 + 2 + 3 + 1;
int seenLines = 0;
for (int i = 0; i < 10; i++) {
List<Event> events = reader.readEvents(10);
seenLines += events.size();
reader.commit();
}
Assert.assertEquals(expectedLines, seenLines);
}
@Test
public void testRepeatedCallsWithCommitOnSuccess() throws IOException {
String trackerDirPath =
SpoolDirectorySourceConfigurationConstants.DEFAULT_TRACKER_DIR;
File trackerDir = new File(WORK_DIR, trackerDirPath);
ReliableEventReader reader =
new ReliableSpoolingFileEventReader.Builder().spoolDirectory(WORK_DIR)
.trackerDirPath(trackerDirPath)
.sourceCounter(new SourceCounter("test"))
.build();
final int expectedLines = 1 + 1 + 2 + 3 + 1;
int seenLines = 0;
for (int i = 0; i < 10; i++) {
List<Event> events = reader.readEvents(10);
int numEvents = events.size();
if (numEvents > 0) {
seenLines += numEvents;
reader.commit();
// ensure that there are files in the trackerDir
File[] files = trackerDir.listFiles();
Assert.assertNotNull(files);
Assert.assertTrue("Expected tracker files in tracker dir " + trackerDir
.getAbsolutePath(), files.length > 0);
}
}
Assert.assertEquals(expectedLines, seenLines);
}
@Test
public void testFileDeletion() throws IOException {
ReliableEventReader reader =
new ReliableSpoolingFileEventReader.Builder().spoolDirectory(WORK_DIR)
.deletePolicy(DeletePolicy.IMMEDIATE.name())
.sourceCounter(new SourceCounter("test"))
.build();
List<File> before = listFiles(WORK_DIR);
Assert.assertEquals("Expected 5, not: " + before, 5, before.size());
List<Event> events;
do {
events = reader.readEvents(10);
reader.commit();
} while (!events.isEmpty());
List<File> after = listFiles(WORK_DIR);
Assert.assertEquals("Expected 0, not: " + after, 0, after.size());
List<File> trackerFiles = listFiles(new File(WORK_DIR,
SpoolDirectorySourceConfigurationConstants.DEFAULT_TRACKER_DIR));
Assert.assertEquals("Expected 0, not: " + trackerFiles, 0,
trackerFiles.size());
}
@Test(expected = FlumeException.class)
public void testRenameTrackingPolicyOnReadonlySpoolDirectory() throws IOException {
File workDir = WORK_DIR;
if(workDir.setReadOnly()){
new ReliableSpoolingFileEventReader.Builder().spoolDirectory(workDir)
.trackingPolicy(TrackingPolicy.RENAME.name())
.sourceCounter(new SourceCounter("test"))
.build();
} else {
// Operation on directory permission is not supported in current operating system.
throw new FlumeException("Operation on directory permission is not supported in current operating system.");
}
}
@Test()
public void testTrackerDirTrackingPolicyOnReadonlySpoolDirectory() throws IOException {
File workDir = WORK_DIR;
String trackerDirPath =
SpoolDirectorySourceConfigurationConstants.DEFAULT_TRACKER_DIR;
File trackerDir = new File(WORK_DIR, trackerDirPath);
if (!trackerDir.exists()) {
trackerDir.mkdir();
}
if(workDir.setReadOnly()){
new ReliableSpoolingFileEventReader.Builder().spoolDirectory(workDir)
.trackingPolicy(TrackingPolicy.TRACKER_DIR.name())
.trackerDirPath(trackerDirPath)
.sourceCounter(new SourceCounter("test"))
.build();
} else {
// Operation on directory permission is not supported in current operating system.
return;
}
}
@Test(expected = NullPointerException.class)
public void testNullConsumeOrder() throws IOException {
new ReliableSpoolingFileEventReader.Builder().spoolDirectory(WORK_DIR)
.consumeOrder(null)
.sourceCounter(new SourceCounter("test"))
.build();
}
@Test
public void testConsumeFileRandomly() throws IOException {
ReliableEventReader reader =
new ReliableSpoolingFileEventReader.Builder().spoolDirectory(WORK_DIR)
.consumeOrder(ConsumeOrder.RANDOM)
.sourceCounter(new SourceCounter("test"))
.build();
File fileName = new File(WORK_DIR, "new-file");
FileUtils.write(fileName, "New file created in the end. Shoud be read randomly.\n");
Set<String> actual = Sets.newHashSet();
readEventsForFilesInDir(WORK_DIR, reader, actual);
Set<String> expected = Sets.newHashSet();
createExpectedFromFilesInSetup(expected);
expected.add("");
expected.add("New file created in the end. Shoud be read randomly.");
Assert.assertEquals(expected, actual);
}
@Test
public void testConsumeFileRandomlyNewFile() throws Exception {
// Atomic moves are not supported in Windows.
if (SystemUtils.IS_OS_WINDOWS) {
return;
}
final ReliableEventReader reader =
new ReliableSpoolingFileEventReader.Builder().spoolDirectory(WORK_DIR)
.consumeOrder(ConsumeOrder.RANDOM)
.sourceCounter(new SourceCounter("test"))
.build();
File fileName = new File(WORK_DIR, "new-file");
FileUtils.write(fileName, "New file created in the end. Shoud be read randomly.\n");
Set<String> expected = Sets.newHashSet();
int totalFiles = WORK_DIR.listFiles().length;
final Set<String> actual = Sets.newHashSet();
ExecutorService executor = Executors.newSingleThreadExecutor();
final Semaphore semaphore1 = new Semaphore(0);
final Semaphore semaphore2 = new Semaphore(0);
Future<Void> wait = executor.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
readEventsForFilesInDir(WORK_DIR, reader, actual, semaphore1, semaphore2);
return null;
}
});
semaphore1.acquire();
File finalFile = new File(WORK_DIR, "t-file");
FileUtils.write(finalFile, "Last file");
semaphore2.release();
wait.get();
int listFilesCount = ((ReliableSpoolingFileEventReader)reader).getListFilesCount();
finalFile.delete();
createExpectedFromFilesInSetup(expected);
expected.add("");
expected.add("New file created in the end. Shoud be read randomly.");
expected.add("Last file");
Assert.assertTrue(listFilesCount < (totalFiles + 2));
Assert.assertEquals(expected, actual);
}
@Test
public void testConsumeFileOldest() throws IOException, InterruptedException {
ReliableEventReader reader =
new ReliableSpoolingFileEventReader.Builder().spoolDirectory(WORK_DIR)
.consumeOrder(ConsumeOrder.OLDEST)
.sourceCounter(new SourceCounter("test"))
.build();
File file1 = new File(WORK_DIR, "new-file1");
File file2 = new File(WORK_DIR, "new-file2");
File file3 = new File(WORK_DIR, "new-file3");
Thread.sleep(1000L);
FileUtils.write(file2, "New file2 created.\n");
Thread.sleep(1000L);
FileUtils.write(file1, "New file1 created.\n");
Thread.sleep(1000L);
FileUtils.write(file3, "New file3 created.\n");
// order of age oldest to youngest (file2, file1, file3)
List<String> actual = Lists.newLinkedList();
readEventsForFilesInDir(WORK_DIR, reader, actual);
List<String> expected = Lists.newLinkedList();
createExpectedFromFilesInSetup(expected);
expected.add(""); // Empty file was added in the last in setup.
expected.add("New file2 created.");
expected.add("New file1 created.");
expected.add("New file3 created.");
Assert.assertEquals(expected, actual);
}
@Test
public void testConsumeFileYoungest() throws IOException, InterruptedException {
ReliableEventReader reader =
new ReliableSpoolingFileEventReader.Builder().spoolDirectory(WORK_DIR)
.consumeOrder(ConsumeOrder.YOUNGEST)
.sourceCounter(new SourceCounter("test"))
.build();
File file1 = new File(WORK_DIR, "new-file1");
File file2 = new File(WORK_DIR, "new-file2");
File file3 = new File(WORK_DIR, "new-file3");
Thread.sleep(1000L);
FileUtils.write(file2, "New file2 created.\n");
Thread.sleep(1000L);
FileUtils.write(file3, "New file3 created.\n");
Thread.sleep(1000L);
FileUtils.write(file1, "New file1 created.\n");
// order of age youngest to oldest (file2, file3, file1)
List<String> actual = Lists.newLinkedList();
readEventsForFilesInDir(WORK_DIR, reader, actual);
List<String> expected = Lists.newLinkedList();
createExpectedFromFilesInSetup(expected);
Collections.sort(expected);
// Empty Line file was added in the last in Setup.
expected.add(0, "");
expected.add(0, "New file2 created.");
expected.add(0, "New file3 created.");
expected.add(0, "New file1 created.");
Assert.assertEquals(expected, actual);
}
@Test
public void testConsumeFileOldestWithLexicographicalComparision()
throws IOException, InterruptedException {
ReliableEventReader reader =
new ReliableSpoolingFileEventReader.Builder().spoolDirectory(WORK_DIR)
.consumeOrder(ConsumeOrder.OLDEST)
.sourceCounter(new SourceCounter("test"))
.build();
File file1 = new File(WORK_DIR, "new-file1");
File file2 = new File(WORK_DIR, "new-file2");
File file3 = new File(WORK_DIR, "new-file3");
Thread.sleep(1000L);
FileUtils.write(file3, "New file3 created.\n");
FileUtils.write(file2, "New file2 created.\n");
FileUtils.write(file1, "New file1 created.\n");
file2.setLastModified(file3.lastModified());
file1.setLastModified(file2.lastModified());
// file ages are same now they need to be ordered
// lexicographically (file1, file2, file3).
List<String> actual = Lists.newLinkedList();
readEventsForFilesInDir(WORK_DIR, reader, actual);
List<String> expected = Lists.newLinkedList();
createExpectedFromFilesInSetup(expected);
expected.add(""); // Empty file was added in the last in setup.
expected.add("New file1 created.");
expected.add("New file2 created.");
expected.add("New file3 created.");
Assert.assertEquals(expected, actual);
}
@Test
public void testConsumeFileYoungestWithLexicographicalComparision()
throws IOException, InterruptedException {
ReliableEventReader reader =
new ReliableSpoolingFileEventReader.Builder().spoolDirectory(WORK_DIR)
.consumeOrder(ConsumeOrder.YOUNGEST)
.sourceCounter(new SourceCounter("test"))
.build();
File file1 = new File(WORK_DIR, "new-file1");
File file2 = new File(WORK_DIR, "new-file2");
File file3 = new File(WORK_DIR, "new-file3");
Thread.sleep(1000L);
FileUtils.write(file1, "New file1 created.\n");
FileUtils.write(file2, "New file2 created.\n");
FileUtils.write(file3, "New file3 created.\n");
file2.setLastModified(file3.lastModified());
file1.setLastModified(file2.lastModified());
// file ages are same now they need to be ordered
// lexicographically (file1, file2, file3).
List<String> actual = Lists.newLinkedList();
readEventsForFilesInDir(WORK_DIR, reader, actual);
List<String> expected = Lists.newLinkedList();
createExpectedFromFilesInSetup(expected);
expected.add(0, ""); // Empty file was added in the last in setup.
expected.add(0, "New file3 created.");
expected.add(0, "New file2 created.");
expected.add(0, "New file1 created.");
Assert.assertEquals(expected, actual);
}
@Test public void testLargeNumberOfFilesOLDEST() throws IOException {
templateTestForRecursiveDirs(ConsumeOrder.OLDEST, null, 3, 3, 37, TrackingPolicy.RENAME);
}
@Test public void testLargeNumberOfFilesYOUNGEST() throws IOException {
templateTestForRecursiveDirs(ConsumeOrder.YOUNGEST, Comparator.reverseOrder(),
3, 3, 37, TrackingPolicy.RENAME);
}
@Test public void testLargeNumberOfFilesRANDOM() throws IOException {
templateTestForRecursiveDirs(ConsumeOrder.RANDOM, null, 3, 3, 37, TrackingPolicy.RENAME);
}
@Test public void testLargeNumberOfFilesOLDESTTrackerDir() throws IOException {
templateTestForRecursiveDirs(ConsumeOrder.OLDEST, null, 3, 3, 10, TrackingPolicy.TRACKER_DIR);
}
@Test public void testLargeNumberOfFilesYOUNGESTTrackerDir() throws IOException {
templateTestForRecursiveDirs(ConsumeOrder.YOUNGEST, Comparator.reverseOrder(),
3, 3, 10, TrackingPolicy.TRACKER_DIR);
}
@Test public void testLargeNumberOfFilesRANDOMTrackerDir() throws IOException {
templateTestForRecursiveDirs(ConsumeOrder.RANDOM, null, 3, 3, 10, TrackingPolicy.TRACKER_DIR);
}
@Test
public void testZeroByteTrackerFile() throws IOException {
String trackerDirPath =
SpoolDirectorySourceConfigurationConstants.DEFAULT_TRACKER_DIR;
File trackerDir = new File(WORK_DIR, trackerDirPath);
if (!trackerDir.exists()) {
trackerDir.mkdir();
}
File trackerFile = new File(trackerDir, ReliableSpoolingFileEventReader.metaFileName);
if (trackerFile.exists()) {
trackerFile.delete();
}
trackerFile.createNewFile();
ReliableEventReader reader =
new ReliableSpoolingFileEventReader.Builder().spoolDirectory(WORK_DIR)
.trackerDirPath(trackerDirPath)
.sourceCounter(new SourceCounter("test"))
.build();
final int expectedLines = 1;
int seenLines = 0;
List<Event> events = reader.readEvents(10);
int numEvents = events.size();
if (numEvents > 0) {
seenLines += numEvents;
reader.commit();
}
// This line will fail, if the zero-byte tracker file has not been handled
Assert.assertEquals(expectedLines, seenLines);
}
private void templateTestForRecursiveDirs(ConsumeOrder order, Comparator<Long> comparator, int depth, int dirNum,
int fileNum, TrackingPolicy trackingPolicy) throws IOException {
File dir = null;
try {
dir = new File("target/test/work/" + this.getClass().getSimpleName() + "_large");
Files.createParentDirs(new File(dir, "dummy"));
ReliableEventReader reader =
new ReliableSpoolingFileEventReader.Builder().spoolDirectory(dir)
.consumeOrder(order)
.trackingPolicy(trackingPolicy.toString())
.recursiveDirectorySearch(true)
.sourceCounter(new SourceCounter("test"))
.build();
Map<Long, List<String>> expected;
if (comparator == null) {
expected = new TreeMap<Long, List<String>>();
} else {
expected = new TreeMap<Long, List<String>>(comparator);
}
createMultilevelFiles(dir, 0, depth, dirNum, fileNum, expected, new MutableLong(0L));
Collection<String> expectedColl;
int index = 0;
if (order == ConsumeOrder.RANDOM) {
expectedColl = Sets.newHashSet();
} else {
expectedColl = new ArrayList<>();
}
for (Entry<Long, List<String>> entry : expected.entrySet()) {
Collections.sort(entry.getValue());
expectedColl.addAll(entry.getValue());
}
int expNum = expectedColl.size();
int actualNum = 0;
for (int i = 0; i < expNum; i++) {
List<Event> events;
events = reader.readEvents(10);
for (Event e : events) {
actualNum++;
if (order == ConsumeOrder.RANDOM) {
Assert.assertTrue(expectedColl.remove(new String(e.getBody())));
} else {
String exp = ((ArrayList<String>) expectedColl).get(index);
String actual = new String(e.getBody());
Assert.assertEquals(exp, actual);
index++;
}
}
reader.commit();
}
Assert.assertEquals(expNum, actualNum);
} finally {
deleteDir(dir);
}
}
private void createMultilevelFiles(File dir, int currDepth, int maxDepth, int dirNum, int fileNum,
Map<Long, List<String>> expected, MutableLong id) throws IOException {
if (currDepth == maxDepth) {
createFiles(dir, fileNum, expected, id);
} else {
for (int i = 0; i < dirNum; i++) {
File nextDir = new File(dir, "dir-" + i);
nextDir.mkdirs();
createMultilevelFiles(nextDir, currDepth + 1, maxDepth, dirNum, fileNum, expected, id);
}
}
}
private void createFiles(File dir, int fileNum, Map<Long, List<String>> expected, MutableLong id) throws IOException {
for (int i = 0; i < fileNum; i++) {
File f = new File(dir, "file-" + id);
String data = f.getPath();
Files.write(data, f, Charsets.UTF_8);
long lastMod = id.longValue() * 10000L;
f.setLastModified(lastMod);
if (expected.containsKey(f.lastModified())) {
expected.get(f.lastModified()).add(data);
} else {
expected.put(f.lastModified(), Lists.newArrayList(data));
}
id.increment();
}
}
private void readEventsForFilesInDir(File dir, ReliableEventReader reader,
Collection<String> actual) throws IOException {
readEventsForFilesInDir(dir, reader, actual, null, null);
}
/* Read events, one for each file in the given directory. */
private void readEventsForFilesInDir(File dir, ReliableEventReader reader,
Collection<String> actual, Semaphore semaphore1,
Semaphore semaphore2) throws IOException {
List<Event> events;
boolean executed = false;
for (int i = 0; i < listFiles(dir).size(); i++) {
events = reader.readEvents(10);
for (Event e : events) {
if (reader instanceof ReliableSpoolingFileEventReader) {
logger.debug("Adding event for file {} with body \"{}\"",
((ReliableSpoolingFileEventReader) reader).getLastFileReadInfo(), new String(e.getBody()));
}
actual.add(new String(e.getBody()));
}
reader.commit();
try {
if (!executed) {
executed = true;
if (semaphore1 != null) {
semaphore1.release();
}
if (semaphore2 != null) {
semaphore2.acquire();
}
}
} catch (Exception ex) {
throw new IOException(ex);
}
}
}
/* Create expected results out of the files created in the setup method. */
private void createExpectedFromFilesInSetup(Collection<String> expected) {
expected.add("");
for (int i = 0; i < 4; i++) {
for (int j = 0; j < i; j++) {
expected.add("file" + i + "line" + j);
}
}
}
private static List<File> listFiles(File dir) {
List<File> files = Lists.newArrayList(dir.listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
return !pathname.isDirectory();
}
}));
return files;
}
}
| 9,952 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/client | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/client/avro/TestSpoolingFileLineReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.client.avro;
import com.google.common.base.Charsets;
import com.google.common.base.Throwables;
import com.google.common.collect.Lists;
import com.google.common.io.Files;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.instrumentation.SourceCounter;
import org.apache.flume.serialization.LineDeserializer;
import org.apache.flume.source.SpoolDirectorySourceConfigurationConstants;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.util.List;
import static org.junit.Assert.*;
public class TestSpoolingFileLineReader {
Logger logger = LoggerFactory.getLogger(TestSpoolingFileLineReader.class);
private static String completedSuffix =
SpoolDirectorySourceConfigurationConstants.DEFAULT_SPOOLED_FILE_SUFFIX;
private static int bufferMaxLineLength = 500;
private File tmpDir;
static String bodyAsString(Event event) {
return new String(event.getBody());
}
static List<String> bodiesAsStrings(List<Event> events) {
List<String> bodies = Lists.newArrayListWithCapacity(events.size());
for (Event event : events) {
bodies.add(bodyAsString(event));
}
return bodies;
}
private ReliableSpoolingFileEventReader getParser(int maxLineLength) {
Context ctx = new Context();
ctx.put(LineDeserializer.MAXLINE_KEY, Integer.toString(maxLineLength));
ReliableSpoolingFileEventReader parser;
try {
parser = new ReliableSpoolingFileEventReader.Builder()
.spoolDirectory(tmpDir)
.completedSuffix(completedSuffix)
.deserializerContext(ctx)
.sourceCounter(new SourceCounter("dummy"))
.build();
} catch (IOException ioe) {
throw Throwables.propagate(ioe);
}
return parser;
}
private ReliableSpoolingFileEventReader getParser() {
return getParser(bufferMaxLineLength);
}
private FileFilter directoryFilter() {
return new FileFilter() {
public boolean accept(File candidate) {
if (candidate.isDirectory()) {
return false;
}
return true;
}
};
}
@Before
public void setUp() {
tmpDir = Files.createTempDir();
}
@After
public void tearDown() {
for (File f : tmpDir.listFiles()) {
if (f.isDirectory()) {
for (File sdf : f.listFiles()) {
sdf.delete();
}
}
f.delete();
}
tmpDir.delete();
}
@Test
// Create three multi-line files then read them back out. Ensures that
// files are accessed in correct order and that lines are read correctly
// from files.
public void testBasicSpooling() throws IOException {
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
File f2 = new File(tmpDir.getAbsolutePath() + "/file2");
File f3 = new File(tmpDir.getAbsolutePath() + "/file3");
Files.write("file1line1\nfile1line2\n", f1, Charsets.UTF_8);
Files.write("file2line1\nfile2line2\n", f2, Charsets.UTF_8);
Files.write("file3line1\nfile3line2\n", f3, Charsets.UTF_8);
ReliableSpoolingFileEventReader parser = getParser();
List<String> out = Lists.newArrayList();
for (int i = 0; i < 6; i++) {
logger.info("At line {}", i);
String body = bodyAsString(parser.readEvent());
logger.debug("Seen event with body: {}", body);
out.add(body);
parser.commit();
}
// Make sure we got every line
assertTrue(out.contains("file1line1"));
assertTrue(out.contains("file1line2"));
assertTrue(out.contains("file2line1"));
assertTrue(out.contains("file2line2"));
assertTrue(out.contains("file3line1"));
assertTrue(out.contains("file3line2"));
List<File> outFiles = Lists.newArrayList(tmpDir.listFiles(directoryFilter()));
assertEquals(3, outFiles.size());
// Make sure files 1 and 2 have been processed and file 3 is still open
assertTrue(outFiles.contains(new File(tmpDir + "/file1" + completedSuffix)));
assertTrue(outFiles.contains(new File(tmpDir + "/file2" + completedSuffix)));
assertTrue(outFiles.contains(new File(tmpDir + "/file3")));
}
@Test
// Make sure this works when there are initially no files
public void testInitiallyEmptyDirectory() throws IOException {
ReliableSpoolingFileEventReader parser = getParser();
assertNull(parser.readEvent());
assertEquals(0, parser.readEvents(10).size());
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("file1line1\nfile1line2\n", f1, Charsets.UTF_8);
List<String> out = bodiesAsStrings(parser.readEvents(2));
parser.commit();
// Make sure we got all of the first file
assertTrue(out.contains("file1line1"));
assertTrue(out.contains("file1line2"));
File f2 = new File(tmpDir.getAbsolutePath() + "/file2");
Files.write("file2line1\nfile2line2\n", f2, Charsets.UTF_8);
parser.readEvent(); // force roll
parser.commit();
List<File> outFiles = Lists.newArrayList(tmpDir.listFiles(directoryFilter()));
assertEquals(2, outFiles.size());
assertTrue(
outFiles.contains(new File(tmpDir + "/file1" + completedSuffix)));
assertTrue(outFiles.contains(new File(tmpDir + "/file2")));
}
@Test(expected = IllegalStateException.class)
// Ensures that file immutability is enforced.
public void testFileChangesDuringRead() throws IOException {
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("file1line1\nfile1line2\n", f1, Charsets.UTF_8);
ReliableSpoolingFileEventReader parser1 = getParser();
List<String> out = Lists.newArrayList();
out.addAll(bodiesAsStrings(parser1.readEvents(2)));
parser1.commit();
assertEquals(2, out.size());
assertTrue(out.contains("file1line1"));
assertTrue(out.contains("file1line2"));
Files.append("file1line3\n", f1, Charsets.UTF_8);
out.add(bodyAsString(parser1.readEvent()));
parser1.commit();
out.add(bodyAsString(parser1.readEvent()));
parser1.commit();
}
// Test when a competing destination file is found, but it matches,
// and we are on a Windows machine.
@Test
public void testDestinationExistsAndSameFileWindows() throws IOException {
System.setProperty("os.name", "Some version of Windows");
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
File f1Completed = new File(tmpDir.getAbsolutePath() + "/file1" +
completedSuffix);
Files.write("file1line1\nfile1line2\n", f1, Charsets.UTF_8);
Files.write("file1line1\nfile1line2\n", f1Completed, Charsets.UTF_8);
ReliableSpoolingFileEventReader parser = getParser();
List<String> out = Lists.newArrayList();
for (int i = 0; i < 2; i++) {
out.add(bodyAsString(parser.readEvent()));
parser.commit();
}
File f2 = new File(tmpDir.getAbsolutePath() + "/file2");
Files.write("file2line1\nfile2line2\n", f2, Charsets.UTF_8);
for (int i = 0; i < 2; i++) {
out.add(bodyAsString(parser.readEvent()));
parser.commit();
}
// Make sure we got every line
assertEquals(4, out.size());
assertTrue(out.contains("file1line1"));
assertTrue(out.contains("file1line2"));
assertTrue(out.contains("file2line1"));
assertTrue(out.contains("file2line2"));
// Make sure original is deleted
List<File> outFiles = Lists.newArrayList(tmpDir.listFiles(directoryFilter()));
assertEquals(2, outFiles.size());
assertTrue(outFiles.contains(new File(tmpDir + "/file2")));
assertTrue(outFiles.contains(
new File(tmpDir + "/file1" + completedSuffix)));
}
// Test when a competing destination file is found, but it matches,
// and we are not on a Windows machine.
@Test(expected = IllegalStateException.class)
public void testDestinationExistsAndSameFileNotOnWindows() throws IOException {
System.setProperty("os.name", "Some version of Linux");
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
File f1Completed = new File(tmpDir.getAbsolutePath() + "/file1" +
completedSuffix);
Files.write("file1line1\nfile1line2\n", f1, Charsets.UTF_8);
Files.write("file1line1\nfile1line2\n", f1Completed, Charsets.UTF_8);
ReliableSpoolingFileEventReader parser = getParser();
List<String> out = Lists.newArrayList();
for (int i = 0; i < 2; i++) {
out.add(bodyAsString(parser.readEvent()));
parser.commit();
}
File f2 = new File(tmpDir.getAbsolutePath() + "/file2");
Files.write("file2line1\nfile2line2\n", f2, Charsets.UTF_8);
for (int i = 0; i < 2; i++) {
out.add(bodyAsString(parser.readEvent()));
parser.commit();
}
// Not reached
}
@Test
// Test a basic case where a commit is missed.
public void testBasicCommitFailure() throws IOException {
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("file1line1\nfile1line2\nfile1line3\nfile1line4\n" +
"file1line5\nfile1line6\nfile1line7\nfile1line8\n" +
"file1line9\nfile1line10\nfile1line11\nfile1line12\n",
f1, Charsets.UTF_8);
ReliableSpoolingFileEventReader parser = getParser();
List<String> out1 = bodiesAsStrings(parser.readEvents(4));
assertTrue(out1.contains("file1line1"));
assertTrue(out1.contains("file1line2"));
assertTrue(out1.contains("file1line3"));
assertTrue(out1.contains("file1line4"));
List<String> out2 = bodiesAsStrings(parser.readEvents(4));
assertTrue(out2.contains("file1line1"));
assertTrue(out2.contains("file1line2"));
assertTrue(out2.contains("file1line3"));
assertTrue(out2.contains("file1line4"));
parser.commit();
List<String> out3 = bodiesAsStrings(parser.readEvents(4));
assertTrue(out3.contains("file1line5"));
assertTrue(out3.contains("file1line6"));
assertTrue(out3.contains("file1line7"));
assertTrue(out3.contains("file1line8"));
parser.commit();
List<String> out4 = bodiesAsStrings(parser.readEvents(4));
assertEquals(4, out4.size());
assertTrue(out4.contains("file1line9"));
assertTrue(out4.contains("file1line10"));
assertTrue(out4.contains("file1line11"));
assertTrue(out4.contains("file1line12"));
}
@Test
// Test a case where a commit is missed and the buffer size shrinks.
public void testBasicCommitFailureAndBufferSizeChanges() throws IOException {
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("file1line1\nfile1line2\nfile1line3\nfile1line4\n" +
"file1line5\nfile1line6\nfile1line7\nfile1line8\n" +
"file1line9\nfile1line10\nfile1line11\nfile1line12\n",
f1, Charsets.UTF_8);
ReliableSpoolingFileEventReader parser = getParser();
List<String> out1 = bodiesAsStrings(parser.readEvents(5));
assertTrue(out1.contains("file1line1"));
assertTrue(out1.contains("file1line2"));
assertTrue(out1.contains("file1line3"));
assertTrue(out1.contains("file1line4"));
assertTrue(out1.contains("file1line5"));
List<String> out2 = bodiesAsStrings(parser.readEvents(2));
assertTrue(out2.contains("file1line1"));
assertTrue(out2.contains("file1line2"));
parser.commit();
List<String> out3 = bodiesAsStrings(parser.readEvents(2));
assertTrue(out3.contains("file1line3"));
assertTrue(out3.contains("file1line4"));
parser.commit();
List<String> out4 = bodiesAsStrings(parser.readEvents(2));
assertTrue(out4.contains("file1line5"));
assertTrue(out4.contains("file1line6"));
parser.commit();
List<String> out5 = bodiesAsStrings(parser.readEvents(2));
assertTrue(out5.contains("file1line7"));
assertTrue(out5.contains("file1line8"));
parser.commit();
List<String> out6 = bodiesAsStrings(parser.readEvents(15));
assertTrue(out6.contains("file1line9"));
assertTrue(out6.contains("file1line10"));
assertTrue(out6.contains("file1line11"));
assertTrue(out6.contains("file1line12"));
}
// Test when a competing destination file is found and it does not match.
@Test(expected = IllegalStateException.class)
public void testDestinationExistsAndDifferentFile() throws IOException {
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
File f1Completed =
new File(tmpDir.getAbsolutePath() + "/file1" + completedSuffix);
Files.write("file1line1\nfile1line2\n", f1, Charsets.UTF_8);
Files.write("file1line1\nfile1XXXe2\n", f1Completed, Charsets.UTF_8);
ReliableSpoolingFileEventReader parser = getParser();
List<String> out = Lists.newArrayList();
for (int i = 0; i < 2; i++) {
out.add(bodyAsString(parser.readEvent()));
parser.commit();
}
File f2 = new File(tmpDir.getAbsolutePath() + "/file2");
Files.write("file2line1\nfile2line2\n", f2, Charsets.UTF_8);
for (int i = 0; i < 2; i++) {
out.add(bodyAsString(parser.readEvent()));
parser.commit();
}
// Not reached
}
// Empty files should be treated the same as other files and rolled.
@Test
public void testBehaviorWithEmptyFile() throws IOException {
File f1 = new File(tmpDir.getAbsolutePath() + "/file0");
Files.touch(f1);
ReliableSpoolingFileEventReader parser = getParser();
File f2 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("file1line1\nfile1line2\nfile1line3\nfile1line4\n" +
"file1line5\nfile1line6\nfile1line7\nfile1line8\n",
f2, Charsets.UTF_8);
// Skip over first file, which is empty, and will return an empty event.
Event event = parser.readEvent();
assertEquals(0, event.getBody().length);
List<String> out = bodiesAsStrings(parser.readEvents(8));
parser.commit();
assertEquals(8, out.size());
assertTrue(out.contains("file1line1"));
assertTrue(out.contains("file1line2"));
assertTrue(out.contains("file1line3"));
assertTrue(out.contains("file1line4"));
assertTrue(out.contains("file1line5"));
assertTrue(out.contains("file1line6"));
assertTrue(out.contains("file1line7"));
assertTrue(out.contains("file1line8"));
assertNull(parser.readEvent());
// Make sure original is deleted
List<File> outFiles = Lists.newArrayList(tmpDir.listFiles(directoryFilter()));
assertEquals(2, outFiles.size());
assertTrue("Outfiles should have file0 & file1: " + outFiles,
outFiles.contains(new File(tmpDir + "/file0" + completedSuffix)));
assertTrue("Outfiles should have file0 & file1: " + outFiles,
outFiles.contains(new File(tmpDir + "/file1" + completedSuffix)));
}
@Test
public void testBatchedReadsWithinAFile() throws IOException {
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("file1line1\nfile1line2\nfile1line3\nfile1line4\n" +
"file1line5\nfile1line6\nfile1line7\nfile1line8\n",
f1, Charsets.UTF_8);
ReliableSpoolingFileEventReader parser = getParser();
List<String> out = bodiesAsStrings(parser.readEvents(5));
parser.commit();
// Make sure we got every line
assertEquals(5, out.size());
assertTrue(out.contains("file1line1"));
assertTrue(out.contains("file1line2"));
assertTrue(out.contains("file1line3"));
assertTrue(out.contains("file1line4"));
assertTrue(out.contains("file1line5"));
}
@Test
public void testBatchedReadsAcrossFileBoundary() throws IOException {
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("file1line1\nfile1line2\nfile1line3\nfile1line4\n" +
"file1line5\nfile1line6\nfile1line7\nfile1line8\n",
f1, Charsets.UTF_8);
ReliableSpoolingFileEventReader parser = getParser();
List<String> out1 = bodiesAsStrings(parser.readEvents(5));
parser.commit();
File f2 = new File(tmpDir.getAbsolutePath() + "/file2");
Files.write("file2line1\nfile2line2\nfile2line3\nfile2line4\n" +
"file2line5\nfile2line6\nfile2line7\nfile2line8\n",
f2, Charsets.UTF_8);
List<String> out2 = bodiesAsStrings(parser.readEvents(5));
parser.commit();
List<String> out3 = bodiesAsStrings(parser.readEvents(5));
parser.commit();
// Should have first 5 lines of file1
assertEquals(5, out1.size());
assertTrue(out1.contains("file1line1"));
assertTrue(out1.contains("file1line2"));
assertTrue(out1.contains("file1line3"));
assertTrue(out1.contains("file1line4"));
assertTrue(out1.contains("file1line5"));
// Should have 3 remaining lines of file1
assertEquals(3, out2.size());
assertTrue(out2.contains("file1line6"));
assertTrue(out2.contains("file1line7"));
assertTrue(out2.contains("file1line8"));
// Should have first 5 lines of file2
assertEquals(5, out3.size());
assertTrue(out3.contains("file2line1"));
assertTrue(out3.contains("file2line2"));
assertTrue(out3.contains("file2line3"));
assertTrue(out3.contains("file2line4"));
assertTrue(out3.contains("file2line5"));
// file1 should be moved now
List<File> outFiles = Lists.newArrayList(tmpDir.listFiles(directoryFilter()));
assertEquals(2, outFiles.size());
assertTrue(outFiles.contains(
new File(tmpDir + "/file1" + completedSuffix)));
assertTrue(outFiles.contains(new File(tmpDir + "/file2")));
}
@Test
// Test the case where we read finish reading and fully commit a file, then
// the directory is empty.
public void testEmptyDirectoryAfterCommittingFile() throws IOException {
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("file1line1\nfile1line2\n", f1, Charsets.UTF_8);
ReliableSpoolingFileEventReader parser = getParser();
List<String> allLines = bodiesAsStrings(parser.readEvents(2));
assertEquals(2, allLines.size());
parser.commit();
List<String> empty = bodiesAsStrings(parser.readEvents(10));
assertEquals(0, empty.size());
}
// When a line violates the character limit we should truncate it.
@Test
public void testLineExceedsMaxLineLength() throws IOException {
final int maxLineLength = 12;
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("file1line1\nfile1line2\nfile1line3\nfile1line4\n" +
"file1line5\nfile1line6\nfile1line7\nfile1line8\n" +
"reallyreallyreallyreallyLongLineHerefile1line9\n" +
"file1line10\nfile1line11\nfile1line12\nfile1line13\n",
f1, Charsets.UTF_8);
ReliableSpoolingFileEventReader parser = getParser(maxLineLength);
List<String> out1 = bodiesAsStrings(parser.readEvents(5));
assertTrue(out1.contains("file1line1"));
assertTrue(out1.contains("file1line2"));
assertTrue(out1.contains("file1line3"));
assertTrue(out1.contains("file1line4"));
assertTrue(out1.contains("file1line5"));
parser.commit();
List<String> out2 = bodiesAsStrings(parser.readEvents(4));
assertTrue(out2.contains("file1line6"));
assertTrue(out2.contains("file1line7"));
assertTrue(out2.contains("file1line8"));
assertTrue(out2.contains("reallyreally"));
parser.commit();
List<String> out3 = bodiesAsStrings(parser.readEvents(5));
assertTrue(out3.contains("reallyreally"));
assertTrue(out3.contains("LongLineHere"));
assertTrue(out3.contains("file1line9"));
assertTrue(out3.contains("file1line10"));
assertTrue(out3.contains("file1line11"));
parser.commit();
List<String> out4 = bodiesAsStrings(parser.readEvents(5));
assertTrue(out4.contains("file1line12"));
assertTrue(out4.contains("file1line13"));
assertEquals(2, out4.size());
parser.commit();
assertEquals(0, parser.readEvents(5).size());
}
@Test
public void testNameCorrespondsToLatestRead() throws IOException {
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("file1line1\nfile1line2\nfile1line3\nfile1line4\n" +
"file1line5\nfile1line6\nfile1line7\nfile1line8\n",
f1, Charsets.UTF_8);
ReliableSpoolingFileEventReader parser = getParser();
parser.readEvents(5);
parser.commit();
assertNotNull(parser.getLastFileRead());
assertTrue(parser.getLastFileRead().endsWith("file1"));
File f2 = new File(tmpDir.getAbsolutePath() + "/file2");
Files.write("file2line1\nfile2line2\nfile2line3\nfile2line4\n" +
"file2line5\nfile2line6\nfile2line7\nfile2line8\n",
f2, Charsets.UTF_8);
parser.readEvents(5);
parser.commit();
assertNotNull(parser.getLastFileRead());
assertTrue(parser.getLastFileRead().endsWith("file1"));
parser.readEvents(5);
parser.commit();
assertNotNull(parser.getLastFileRead());
assertTrue(parser.getLastFileRead().endsWith("file2"));
parser.readEvents(5);
assertTrue(parser.getLastFileRead().endsWith("file2"));
parser.readEvents(5);
assertTrue(parser.getLastFileRead().endsWith("file2"));
}
}
| 9,953 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/client | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/client/avro/TestBufferedLineReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.client.avro;
import static org.apache.flume.client.avro.TestSpoolingFileLineReader.bodyAsString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.apache.flume.client.avro.TestSpoolingFileLineReader.bodiesAsStrings;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.google.common.base.Charsets;
import com.google.common.io.Files;
public class TestBufferedLineReader {
private File tmpDir;
@Before
public void before() {
tmpDir = Files.createTempDir();
}
@After
public void after() {
for (File f : tmpDir.listFiles()) {
f.delete();
}
tmpDir.delete();
}
@Test
public void testSimpleRead() throws IOException {
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("file1line1\nfile1line2\nfile1line3\nfile1line4\n" +
"file1line5\nfile1line6\nfile1line7\nfile1line8\n",
f1, Charsets.UTF_8);
SimpleTextLineEventReader reader = new SimpleTextLineEventReader(new FileReader(f1));
assertEquals("file1line1", bodyAsString(reader.readEvent()));
assertEquals("file1line2", bodyAsString(reader.readEvent()));
assertEquals("file1line3", bodyAsString(reader.readEvent()));
assertEquals("file1line4", bodyAsString(reader.readEvent()));
assertEquals("file1line5", bodyAsString(reader.readEvent()));
assertEquals("file1line6", bodyAsString(reader.readEvent()));
assertEquals("file1line7", bodyAsString(reader.readEvent()));
assertEquals("file1line8", bodyAsString(reader.readEvent()));
assertEquals(null, reader.readEvent());
}
@Test
public void testBatchedReadsWithinAFile() throws IOException {
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("file1line1\nfile1line2\nfile1line3\nfile1line4\n" +
"file1line5\nfile1line6\nfile1line7\nfile1line8\n",
f1, Charsets.UTF_8);
SimpleTextLineEventReader reader = new SimpleTextLineEventReader(new FileReader(f1));
List<String> out = bodiesAsStrings(reader.readEvents(5));
// Make sure we got every line
assertEquals(5, out.size());
assertTrue(out.contains("file1line1"));
assertTrue(out.contains("file1line2"));
assertTrue(out.contains("file1line3"));
assertTrue(out.contains("file1line4"));
assertTrue(out.contains("file1line5"));
}
@Test
public void testBatchedReadsAtFileBoundary() throws IOException {
File f1 = new File(tmpDir.getAbsolutePath() + "/file1");
Files.write("file1line1\nfile1line2\nfile1line3\nfile1line4\n" +
"file1line5\nfile1line6\nfile1line7\nfile1line8\n",
f1, Charsets.UTF_8);
SimpleTextLineEventReader reader = new SimpleTextLineEventReader(new FileReader(f1));
List<String> out = bodiesAsStrings(reader.readEvents(10));
// Make sure we got exactly 8 lines
assertEquals(8, out.size());
assertTrue(out.contains("file1line1"));
assertTrue(out.contains("file1line2"));
assertTrue(out.contains("file1line3"));
assertTrue(out.contains("file1line4"));
assertTrue(out.contains("file1line5"));
assertTrue(out.contains("file1line6"));
assertTrue(out.contains("file1line7"));
assertTrue(out.contains("file1line8"));
}
}
| 9,954 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/interceptor/CensoringInterceptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.interceptor;
import java.util.List;
import java.util.Map;
import org.apache.flume.Context;
import org.apache.flume.Event;
/**
* Simple interceptor that removes a header called "Bad-Words" from all events.
*/
public class CensoringInterceptor implements Interceptor {
private CensoringInterceptor() {
// no-op
}
@Override
public void initialize() {
// no-op
}
@Override
public Event intercept(Event event) {
Map<String, String> headers = event.getHeaders();
if (headers.containsKey("Bad-Words")) {
headers.remove("Bad-Words");
}
return event;
}
@Override
public List<Event> intercept(List<Event> events) {
for (Event e : events) {
intercept(e);
}
return events;
}
@Override
public void close() {
// no-op
}
public static class Builder implements Interceptor.Builder {
@Override
public Interceptor build() {
return new CensoringInterceptor();
}
@Override
public void configure(Context context) {
// no-op
}
}
}
| 9,955 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/interceptor/TestHostInterceptor.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.interceptor;
import java.net.InetAddress;
import com.google.common.base.Charsets;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.event.EventBuilder;
import org.apache.flume.interceptor.HostInterceptor.Constants;
import org.junit.Assert;
import org.junit.Test;
public class TestHostInterceptor {
/**
* Ensure that the "host" header gets set (to something)
*/
@Test
public void testBasic() throws Exception {
Interceptor.Builder builder = InterceptorBuilderFactory.newInstance(
InterceptorType.HOST.toString());
Interceptor interceptor = builder.build();
Event eventBeforeIntercept = EventBuilder.withBody("test event",
Charsets.UTF_8);
Assert.assertNull(eventBeforeIntercept.getHeaders().get(Constants.HOST));
Event eventAfterIntercept = interceptor.intercept(eventBeforeIntercept);
String actualHost = eventAfterIntercept.getHeaders().get(Constants.HOST);
Assert.assertNotNull(actualHost);
}
@Test
public void testCustomHeader() throws Exception {
Interceptor.Builder builder = InterceptorBuilderFactory.newInstance(
InterceptorType.HOST.toString());
Context ctx = new Context();
ctx.put("preserveExisting", "false");
ctx.put("hostHeader", "hostname");
builder.configure(ctx);
Interceptor interceptor = builder.build();
Event eventBeforeIntercept = EventBuilder.withBody("test event",
Charsets.UTF_8);
Assert.assertNull(eventBeforeIntercept.getHeaders().get("hostname"));
Event eventAfterIntercept = interceptor.intercept(eventBeforeIntercept);
String actualHost = eventAfterIntercept.getHeaders().get("hostname");
Assert.assertNotNull(actualHost);
Assert.assertEquals(InetAddress.getLocalHost().getHostAddress(),
actualHost);
}
/**
* Ensure host is NOT overwritten when preserveExisting=true.
*/
@Test
public void testPreserve() throws Exception {
Context ctx = new Context();
ctx.put("preserveExisting", "true");
Interceptor.Builder builder = InterceptorBuilderFactory.newInstance(
InterceptorType.HOST.toString());
builder.configure(ctx);
Interceptor interceptor = builder.build();
final String ORIGINAL_HOST = "originalhost";
Event eventBeforeIntercept = EventBuilder.withBody("test event",
Charsets.UTF_8);
eventBeforeIntercept.getHeaders().put(Constants.HOST, ORIGINAL_HOST);
Assert.assertEquals(ORIGINAL_HOST,
eventBeforeIntercept.getHeaders().get(Constants.HOST));
String expectedHost = ORIGINAL_HOST;
Event eventAfterIntercept = interceptor.intercept(eventBeforeIntercept);
String actualHost = eventAfterIntercept.getHeaders().get(Constants.HOST);
Assert.assertNotNull(actualHost);
Assert.assertEquals(expectedHost, actualHost);
}
/**
* Ensure host IS overwritten when preserveExisting=false.
*/
@Test
public void testClobber() throws Exception {
Context ctx = new Context();
ctx.put("preserveExisting", "false"); // default behavior
Interceptor.Builder builder = InterceptorBuilderFactory
.newInstance(InterceptorType.HOST.toString());
builder.configure(ctx);
Interceptor interceptor = builder.build();
final String ORIGINAL_HOST = "originalhost";
Event eventBeforeIntercept = EventBuilder.withBody("test event",
Charsets.UTF_8);
eventBeforeIntercept.getHeaders().put(Constants.HOST, ORIGINAL_HOST);
Assert.assertEquals(ORIGINAL_HOST, eventBeforeIntercept.getHeaders()
.get(Constants.HOST));
String expectedHost = InetAddress.getLocalHost().getHostAddress();
Event eventAfterIntercept = interceptor.intercept(eventBeforeIntercept);
String actualHost = eventAfterIntercept.getHeaders().get(Constants.HOST);
Assert.assertNotNull(actualHost);
Assert.assertEquals(expectedHost, actualHost);
}
/**
* Ensure host IP is used by default instead of host name.
*/
@Test
public void testUseIP() throws Exception {
Context ctx = new Context();
ctx.put("useIP", "true"); // default behavior
Interceptor.Builder builder = InterceptorBuilderFactory
.newInstance(InterceptorType.HOST.toString());
builder.configure(ctx);
Interceptor interceptor = builder.build();
final String ORIGINAL_HOST = "originalhost";
Event eventBeforeIntercept = EventBuilder.withBody("test event",
Charsets.UTF_8);
eventBeforeIntercept.getHeaders().put(Constants.HOST, ORIGINAL_HOST);
Assert.assertEquals(ORIGINAL_HOST, eventBeforeIntercept.getHeaders()
.get(Constants.HOST));
String expectedHost = InetAddress.getLocalHost().getHostAddress();
Event eventAfterIntercept = interceptor.intercept(eventBeforeIntercept);
String actualHost = eventAfterIntercept.getHeaders().get(Constants.HOST);
Assert.assertNotNull(actualHost);
Assert.assertEquals(expectedHost, actualHost);
}
/**
* Ensure host name can be used instead of host IP.
*/
@Test
public void testUseHostname() throws Exception {
Context ctx = new Context();
ctx.put("useIP", "false");
Interceptor.Builder builder = InterceptorBuilderFactory
.newInstance(InterceptorType.HOST.toString());
builder.configure(ctx);
Interceptor interceptor = builder.build();
final String ORIGINAL_HOST = "originalhost";
Event eventBeforeIntercept = EventBuilder.withBody("test event",
Charsets.UTF_8);
eventBeforeIntercept.getHeaders().put(Constants.HOST, ORIGINAL_HOST);
Assert.assertEquals(ORIGINAL_HOST, eventBeforeIntercept.getHeaders()
.get(Constants.HOST));
String expectedHost = InetAddress.getLocalHost().getCanonicalHostName();
Event eventAfterIntercept = interceptor.intercept(eventBeforeIntercept);
String actualHost = eventAfterIntercept.getHeaders().get(Constants.HOST);
Assert.assertNotNull(actualHost);
Assert.assertEquals(expectedHost, actualHost);
}
}
| 9,956 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/interceptor/TestRegexExtractorInterceptorMillisSerializer.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.interceptor;
import junit.framework.Assert;
import org.apache.flume.Context;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.junit.Test;
public class TestRegexExtractorInterceptorMillisSerializer {
@Test
public void shouldRequirePatternInConfiguration() {
try {
RegexExtractorInterceptorMillisSerializer fixture =
new RegexExtractorInterceptorMillisSerializer();
fixture.configure(new Context());
Assert.fail();
} catch (IllegalArgumentException ex) {
// Expected...
}
}
@Test
public void shouldRequireValidPatternInConfiguration() {
try {
RegexExtractorInterceptorMillisSerializer fixture =
new RegexExtractorInterceptorMillisSerializer();
Context context = new Context();
context.put("pattern", "ABCDEFG");
fixture.configure(context);
Assert.fail();
} catch (IllegalArgumentException ex) {
// Expected...
}
}
@Test
public void shouldReturnMillisFromPattern() {
RegexExtractorInterceptorMillisSerializer fixture =
new RegexExtractorInterceptorMillisSerializer();
Context context = new Context();
String pattern = "yyyy-MM-dd HH:mm:ss";
context.put("pattern", pattern);
fixture.configure(context);
DateTimeFormatter formatter = DateTimeFormat.forPattern(pattern);
long time = (System.currentTimeMillis() / 1000L) * 1000L;
Assert.assertEquals(String.valueOf(time),
fixture.serialize(formatter.print(time)));
}
}
| 9,957 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/interceptor/TestTimestampInterceptor.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.interceptor;
import com.google.common.base.Charsets;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.event.EventBuilder;
import org.apache.flume.interceptor.TimestampInterceptor.Constants;
import org.junit.Assert;
import org.junit.Test;
public class TestTimestampInterceptor {
/**
* Ensure that the "timestamp" header gets set (to something)
*/
@Test
public void testBasic() throws ClassNotFoundException, InstantiationException,
IllegalAccessException {
Interceptor.Builder builder = InterceptorBuilderFactory.newInstance(
InterceptorType.TIMESTAMP.toString());
Interceptor interceptor = builder.build();
Event event = EventBuilder.withBody("test event", Charsets.UTF_8);
Assert.assertNull(event.getHeaders().get(Constants.DEFAULT_HEADER_NAME));
Long now = System.currentTimeMillis();
event = interceptor.intercept(event);
String timestampStr = event.getHeaders().get(Constants.DEFAULT_HEADER_NAME);
Assert.assertNotNull(timestampStr);
Assert.assertTrue(Long.parseLong(timestampStr) >= now);
}
/**
* Ensure timestamp is NOT overwritten when preserveExistingTimestamp == true
*/
@Test
public void testPreserve() throws ClassNotFoundException,
InstantiationException, IllegalAccessException {
Context ctx = new Context();
ctx.put("preserveExisting", "true");
Interceptor.Builder builder = InterceptorBuilderFactory.newInstance(
InterceptorType.TIMESTAMP.toString());
builder.configure(ctx);
Interceptor interceptor = builder.build();
long originalTs = 1L;
Event event = EventBuilder.withBody("test event", Charsets.UTF_8);
event.getHeaders().put(Constants.DEFAULT_HEADER_NAME, Long.toString(originalTs));
Assert.assertEquals(Long.toString(originalTs),
event.getHeaders().get(Constants.DEFAULT_HEADER_NAME));
event = interceptor.intercept(event);
String timestampStr = event.getHeaders().get(Constants.DEFAULT_HEADER_NAME);
Assert.assertNotNull(timestampStr);
Assert.assertTrue(Long.parseLong(timestampStr) == originalTs);
}
/**
* Ensure timestamp IS overwritten when preserveExistingTimestamp == false
*/
@Test
public void testClobber() throws ClassNotFoundException,
InstantiationException, IllegalAccessException {
Context ctx = new Context();
ctx.put("preserveExisting", "false"); // DEFAULT BEHAVIOR
Interceptor.Builder builder = InterceptorBuilderFactory.newInstance(
InterceptorType.TIMESTAMP.toString());
builder.configure(ctx);
Interceptor interceptor = builder.build();
long originalTs = 1L;
Event event = EventBuilder.withBody("test event", Charsets.UTF_8);
event.getHeaders().put(Constants.DEFAULT_HEADER_NAME, Long.toString(originalTs));
Assert.assertEquals(Long.toString(originalTs),
event.getHeaders().get(Constants.DEFAULT_HEADER_NAME));
Long now = System.currentTimeMillis();
event = interceptor.intercept(event);
String timestampStr = event.getHeaders().get(Constants.DEFAULT_HEADER_NAME);
Assert.assertNotNull(timestampStr);
Assert.assertTrue(Long.parseLong(timestampStr) >= now);
}
@Test
public void testCustomHeader() throws Exception {
Context ctx = new Context();
ctx.put(TimestampInterceptor.Constants.CONFIG_HEADER_NAME, "timestampHeader");
Interceptor.Builder builder = InterceptorBuilderFactory.newInstance(
InterceptorType.TIMESTAMP.toString());
builder.configure(ctx);
Interceptor interceptor = builder.build();
long originalTs = 1L;
Event event = EventBuilder.withBody("test event", Charsets.UTF_8);
event.getHeaders().put(Constants.DEFAULT_HEADER_NAME, Long.toString(originalTs));
Long now = System.currentTimeMillis();
event = interceptor.intercept(event);
Assert.assertEquals(Long.toString(originalTs),
event.getHeaders().get(Constants.DEFAULT_HEADER_NAME));
String timestampStr = event.getHeaders().get("timestampHeader");
Assert.assertNotNull(timestampStr);
Assert.assertTrue(Long.parseLong(timestampStr) >= now);
}
}
| 9,958 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/interceptor/TestCensoringInterceptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.interceptor;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.util.Map;
import org.apache.flume.Channel;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.Transaction;
import org.apache.flume.channel.ChannelProcessor;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.channel.ReplicatingChannelSelector;
import org.apache.flume.event.EventBuilder;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestCensoringInterceptor {
Logger logger =
LoggerFactory.getLogger(TestCensoringInterceptor.class);
@Test
public void testCensor() {
MemoryChannel memCh = new MemoryChannel();
memCh.configure(new Context());
memCh.start();
ChannelSelector cs = new ReplicatingChannelSelector();
cs.setChannels(Lists.<Channel>newArrayList(memCh));
ChannelProcessor cp = new ChannelProcessor(cs);
// source config
Map<String, String> cfgMap = Maps.newHashMap();
cfgMap.put("interceptors", "a");
String builderClass = CensoringInterceptor.Builder.class.getName();
cfgMap.put("interceptors.a.type", builderClass);
Context ctx = new Context(cfgMap);
// setup
cp.configure(ctx);
cp.initialize();
Map<String, String> headers = Maps.newHashMap();
String badWord = "scribe";
headers.put("Bad-Words", badWord);
Event event1 = EventBuilder.withBody("test", Charsets.UTF_8, headers);
Assert.assertEquals(badWord, event1.getHeaders().get("Bad-Words"));
cp.processEvent(event1);
Transaction tx = memCh.getTransaction();
tx.begin();
Event event1a = memCh.take();
Assert.assertNull(event1a.getHeaders().get("Bad-Words"));
tx.commit();
tx.close();
// cleanup / shutdown
cp.close();
memCh.stop();
}
}
| 9,959 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/interceptor/TestRegexExtractorInterceptor.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.interceptor;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.event.EventBuilder;
import org.apache.flume.interceptor.Interceptor.Builder;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.google.common.base.Charsets;
import com.google.common.base.Joiner;
public class TestRegexExtractorInterceptor {
private Builder fixtureBuilder;
@Before
public void init() throws Exception {
fixtureBuilder = InterceptorBuilderFactory
.newInstance(InterceptorType.REGEX_EXTRACTOR.toString());
}
@Test
public void shouldNotAllowConfigurationWithoutRegex() throws Exception {
try {
fixtureBuilder.build();
Assert.fail();
} catch (IllegalArgumentException ex) {
// Pass...
}
}
@Test
public void shouldNotAllowConfigurationWithIllegalRegex() throws Exception {
try {
Context context = new Context();
context.put(RegexExtractorInterceptor.REGEX, "?&?&&&?&?&?&&&??");
fixtureBuilder.configure(context);
fixtureBuilder.build();
Assert.fail();
} catch (IllegalArgumentException ex) {
// Pass...
}
}
@Test
public void shouldNotAllowConfigurationWithoutMatchIds() throws Exception {
try {
Context context = new Context();
context.put(RegexExtractorInterceptor.REGEX, ".*");
context.put(RegexExtractorInterceptor.SERIALIZERS, "");
fixtureBuilder.configure(context);
fixtureBuilder.build();
Assert.fail();
} catch (IllegalArgumentException ex) {
// Pass...
}
}
@Test
public void shouldNotAllowMisconfiguredSerializers() throws Exception {
try {
Context context = new Context();
context.put(RegexExtractorInterceptor.REGEX, "(\\d):(\\d):(\\d)");
context.put(RegexExtractorInterceptor.SERIALIZERS, ",,,");
fixtureBuilder.configure(context);
fixtureBuilder.build();
Assert.fail();
} catch (IllegalArgumentException ex) {
// Pass...
}
}
@Test
public void shouldNotAllowEmptyNames() throws Exception {
try {
String space = " ";
Context context = new Context();
context.put(RegexExtractorInterceptor.REGEX, "(\\d):(\\d):(\\d)");
context.put(RegexExtractorInterceptor.SERIALIZERS,
Joiner.on(',').join(space, space, space));
fixtureBuilder.configure(context);
fixtureBuilder.build();
Assert.fail();
} catch (IllegalArgumentException ex) {
// Pass...
}
}
@Test
public void shouldExtractAddHeadersForAllMatchGroups() throws Exception {
Context context = new Context();
context.put(RegexExtractorInterceptor.REGEX, "(\\d):(\\d):(\\d)");
context.put(RegexExtractorInterceptor.SERIALIZERS, "s1 s2 s3");
context.put(RegexExtractorInterceptor.SERIALIZERS + ".s1.name", "Num1");
context.put(RegexExtractorInterceptor.SERIALIZERS + ".s2.name", "Num2");
context.put(RegexExtractorInterceptor.SERIALIZERS + ".s3.name", "Num3");
fixtureBuilder.configure(context);
Interceptor fixture = fixtureBuilder.build();
Event event = EventBuilder.withBody("1:2:3.4foobar5", Charsets.UTF_8);
Event expected = EventBuilder.withBody("1:2:3.4foobar5", Charsets.UTF_8);
expected.getHeaders().put("Num1", "1");
expected.getHeaders().put("Num2", "2");
expected.getHeaders().put("Num3", "3");
Event actual = fixture.intercept(event);
Assert.assertArrayEquals(expected.getBody(), actual.getBody());
Assert.assertEquals(expected.getHeaders(), actual.getHeaders());
}
@Test
public void shouldExtractAddHeadersForAllMatchGroupsIgnoringMissingIds()
throws Exception {
String body = "2012-10-17 14:34:44,338";
Context context = new Context();
// Skip the second group
context.put(RegexExtractorInterceptor.REGEX,
"^(\\d\\d\\d\\d-\\d\\d-\\d\\d\\s\\d\\d:\\d\\d)(:\\d\\d,\\d\\d\\d)");
context.put(RegexExtractorInterceptor.SERIALIZERS, "s1");
context
.put(RegexExtractorInterceptor.SERIALIZERS + ".s1.name", "timestamp");
fixtureBuilder.configure(context);
Interceptor fixture = fixtureBuilder.build();
Event event = EventBuilder.withBody(body, Charsets.UTF_8);
Event expected = EventBuilder.withBody(body, Charsets.UTF_8);
expected.getHeaders().put("timestamp", "2012-10-17 14:34");
Event actual = fixture.intercept(event);
Assert.assertArrayEquals(expected.getBody(), actual.getBody());
Assert.assertEquals(expected.getHeaders(), actual.getHeaders());
}
@Test
public void shouldExtractAddHeadersUsingSpecifiedSerializer()
throws Exception {
long now = (System.currentTimeMillis() / 60000L) * 60000L;
String pattern = "yyyy-MM-dd HH:mm:ss,SSS";
DateTimeFormatter formatter = DateTimeFormat.forPattern(pattern);
String body = formatter.print(now);
System.out.println(body);
Context context = new Context();
// Skip the second group
context.put(RegexExtractorInterceptor.REGEX,
"^(\\d\\d\\d\\d-\\d\\d-\\d\\d\\s\\d\\d:\\d\\d)(:\\d\\d,\\d\\d\\d)");
context.put(RegexExtractorInterceptor.SERIALIZERS, "s1 s2");
String millisSerializers = RegexExtractorInterceptorMillisSerializer.class.getName();
context.put(RegexExtractorInterceptor.SERIALIZERS + ".s1.type", millisSerializers);
context.put(RegexExtractorInterceptor.SERIALIZERS + ".s1.name", "timestamp");
context.put(RegexExtractorInterceptor.SERIALIZERS + ".s1.pattern", "yyyy-MM-dd HH:mm");
// Default type
context.put(RegexExtractorInterceptor.SERIALIZERS + ".s2.name", "data");
fixtureBuilder.configure(context);
Interceptor fixture = fixtureBuilder.build();
Event event = EventBuilder.withBody(body, Charsets.UTF_8);
Event expected = EventBuilder.withBody(body, Charsets.UTF_8);
expected.getHeaders().put("timestamp", String.valueOf(now));
expected.getHeaders().put("data", ":00,000");
Event actual = fixture.intercept(event);
Assert.assertArrayEquals(expected.getBody(), actual.getBody());
Assert.assertEquals(expected.getHeaders(), actual.getHeaders());
}
}
| 9,960 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/interceptor/TestStaticInterceptor.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.interceptor;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.event.EventBuilder;
import org.apache.flume.interceptor.StaticInterceptor.Constants;
import org.junit.Assert;
import org.junit.Test;
import com.google.common.base.Charsets;
public class TestStaticInterceptor {
@Test
public void testDefaultKeyValue() throws ClassNotFoundException,
InstantiationException, IllegalAccessException {
Interceptor.Builder builder = InterceptorBuilderFactory.newInstance(
InterceptorType.STATIC.toString());
builder.configure(new Context());
Interceptor interceptor = builder.build();
Event event = EventBuilder.withBody("test", Charsets.UTF_8);
Assert.assertNull(event.getHeaders().get(Constants.KEY));
event = interceptor.intercept(event);
String val = event.getHeaders().get(Constants.KEY);
Assert.assertNotNull(val);
Assert.assertEquals(Constants.VALUE, val);
}
@Test
public void testCustomKeyValue() throws ClassNotFoundException,
InstantiationException, IllegalAccessException {
Interceptor.Builder builder = InterceptorBuilderFactory.newInstance(
InterceptorType.STATIC.toString());
Context ctx = new Context();
ctx.put(Constants.KEY, "myKey");
ctx.put(Constants.VALUE, "myVal");
builder.configure(ctx);
Interceptor interceptor = builder.build();
Event event = EventBuilder.withBody("test", Charsets.UTF_8);
Assert.assertNull(event.getHeaders().get("myKey"));
event = interceptor.intercept(event);
String val = event.getHeaders().get("myKey");
Assert.assertNotNull(val);
Assert.assertEquals("myVal", val);
}
@Test
public void testReplace() throws ClassNotFoundException,
InstantiationException, IllegalAccessException {
Interceptor.Builder builder = InterceptorBuilderFactory.newInstance(
InterceptorType.STATIC.toString());
Context ctx = new Context();
ctx.put(Constants.PRESERVE, "false");
ctx.put(Constants.VALUE, "replacement value");
builder.configure(ctx);
Interceptor interceptor = builder.build();
Event event = EventBuilder.withBody("test", Charsets.UTF_8);
event.getHeaders().put(Constants.KEY, "incumbent value");
Assert.assertNotNull(event.getHeaders().get(Constants.KEY));
event = interceptor.intercept(event);
String val = event.getHeaders().get(Constants.KEY);
Assert.assertNotNull(val);
Assert.assertEquals("replacement value", val);
}
@Test
public void testPreserve() throws ClassNotFoundException,
InstantiationException, IllegalAccessException {
Interceptor.Builder builder = InterceptorBuilderFactory.newInstance(
InterceptorType.STATIC.toString());
Context ctx = new Context();
ctx.put(Constants.PRESERVE, "true");
ctx.put(Constants.VALUE, "replacement value");
builder.configure(ctx);
Interceptor interceptor = builder.build();
Event event = EventBuilder.withBody("test", Charsets.UTF_8);
event.getHeaders().put(Constants.KEY, "incumbent value");
Assert.assertNotNull(event.getHeaders().get(Constants.KEY));
event = interceptor.intercept(event);
String val = event.getHeaders().get(Constants.KEY);
Assert.assertNotNull(val);
Assert.assertEquals("incumbent value", val);
}
} | 9,961 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/interceptor/TestSearchAndReplaceInterceptor.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.interceptor;
import com.google.common.base.Charsets;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.event.EventBuilder;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
public class TestSearchAndReplaceInterceptor {
private static final Logger logger =
LoggerFactory.getLogger(TestSearchAndReplaceInterceptor.class);
private void testSearchReplace(Context context, String input, String output)
throws Exception {
Interceptor.Builder builder = InterceptorBuilderFactory.newInstance(
InterceptorType.SEARCH_REPLACE.toString());
builder.configure(context);
Interceptor interceptor = builder.build();
Event event = EventBuilder.withBody(input, Charsets.UTF_8);
event = interceptor.intercept(event);
String val = new String(event.getBody(), Charsets.UTF_8);
assertEquals(output, val);
logger.info(val);
}
@Test
public void testRemovePrefix() throws Exception {
Context context = new Context();
context.put("searchPattern", "^prefix");
context.put("replaceString", "");
testSearchReplace(context, "prefix non-prefix suffix", " non-prefix suffix");
}
@Test
public void testSyslogStripPriority() throws Exception {
final String input = "<13>Feb 5 17:32:18 10.0.0.99 Use the BFG!";
final String output = "Feb 5 17:32:18 10.0.0.99 Use the BFG!";
Context context = new Context();
context.put("searchPattern", "^<[0-9]+>");
context.put("replaceString", "");
testSearchReplace(context, input, output);
}
@Test
public void testCapturedGroups() throws Exception {
final String input = "The quick brown fox jumped over the lazy dog.";
final String output = "The hungry dog ate the careless fox.";
Context context = new Context();
context.put("searchPattern", "The quick brown ([a-z]+) jumped over the lazy ([a-z]+).");
context.put("replaceString", "The hungry $2 ate the careless $1.");
testSearchReplace(context, input, output);
}
@Test
public void testRepeatedRemoval() throws Exception {
final String input = "Email addresses: test@test.com and foo@test.com";
final String output = "Email addresses: REDACTED and REDACTED";
Context context = new Context();
context.put("searchPattern", "[A-Za-z0-9_.]+@[A-Za-z0-9_-]+\\.com");
context.put("replaceString", "REDACTED");
testSearchReplace(context, input, output);
}
@Test
public void testReplaceEmpty() throws Exception {
final String input = "Abc123@test.com";
final String output = "@test.com";
Context context = new Context();
context.put("searchPattern", "^[A-Za-z0-9_]+");
testSearchReplace(context, input, output);
context.put("replaceString", "");
testSearchReplace(context, input, output);
}
} | 9,962 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/interceptor/TestRegexExtractorInterceptorPassThroughSerializer.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.interceptor;
import junit.framework.Assert;
import org.apache.flume.Context;
import org.junit.Test;
public class TestRegexExtractorInterceptorPassThroughSerializer {
@Test
public void shouldReturnSameValue() {
RegexExtractorInterceptorPassThroughSerializer fixture =
new RegexExtractorInterceptorPassThroughSerializer();
fixture.configure(new Context());
String input = "testing (1,2,3,4)";
Assert.assertEquals(input, fixture.serialize(input));
}
}
| 9,963 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/interceptor/RemoveHeaderInterceptorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.interceptor;
import com.google.common.collect.ImmutableMap;
import junit.framework.Assert;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.event.EventBuilder;
import org.junit.Test;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.PatternSyntaxException;
public class RemoveHeaderInterceptorTest {
private static final String HEADER1 = "my-header10";
private static final String HEADER2 = "my-header11";
private static final String HEADER3 = "my-header12";
private static final String HEADER4 = "my-header20";
private static final String HEADER5 = "my-header21";
private static final String DEFAULT_SEPARATOR = ", ";
private static final String MY_SEPARATOR = ";";
private Event buildEventWithHeader() {
return EventBuilder.withBody("My test event".getBytes(), ImmutableMap.of(
HEADER1, HEADER1, HEADER2, HEADER2, HEADER3, HEADER3, HEADER4, HEADER4,
HEADER5, HEADER5));
}
private Event buildEventWithoutHeader() {
return EventBuilder.withBody("My test event".getBytes());
}
@Test(expected = PatternSyntaxException.class)
public void testBadConfig() throws Exception {
new RemoveHeaderIntBuilder().fromList(HEADER1, "(").build();
}
@Test
public void testWithName() throws IllegalAccessException, ClassNotFoundException,
InstantiationException {
final Interceptor removeHeaderInterceptor = new RemoveHeaderIntBuilder()
.withName(HEADER4).build();
final Event event1 = buildEventWithHeader();
Assert.assertEquals(HEADER1, event1.getHeaders().get(HEADER1));
Assert.assertEquals(HEADER2, event1.getHeaders().get(HEADER2));
Assert.assertEquals(HEADER3, event1.getHeaders().get(HEADER3));
Assert.assertEquals(HEADER4, event1.getHeaders().get(HEADER4));
Assert.assertEquals(HEADER5, event1.getHeaders().get(HEADER5));
removeHeaderInterceptor.intercept(event1);
Assert.assertEquals(HEADER1, event1.getHeaders().get(HEADER1));
Assert.assertEquals(HEADER2, event1.getHeaders().get(HEADER2));
Assert.assertEquals(HEADER3, event1.getHeaders().get(HEADER3));
Assert.assertNull(event1.getHeaders().get(HEADER4));
Assert.assertEquals(HEADER5, event1.getHeaders().get(HEADER5));
final Event event2 = buildEventWithoutHeader();
Assert.assertTrue(event2.getHeaders().isEmpty());
removeHeaderInterceptor.intercept(event2);
Assert.assertTrue(event2.getHeaders().isEmpty());
}
@Test
public void testFromListWithDefaultSeparator1() throws Exception {
final Interceptor removeHeaderInterceptor = new RemoveHeaderIntBuilder()
.fromList(HEADER4 + MY_SEPARATOR + HEADER2).build();
final Event event1 = buildEventWithHeader();
Assert.assertEquals(HEADER1, event1.getHeaders().get(HEADER1));
Assert.assertEquals(HEADER2, event1.getHeaders().get(HEADER2));
Assert.assertEquals(HEADER3, event1.getHeaders().get(HEADER3));
Assert.assertEquals(HEADER4, event1.getHeaders().get(HEADER4));
Assert.assertEquals(HEADER5, event1.getHeaders().get(HEADER5));
removeHeaderInterceptor.intercept(event1);
Assert.assertEquals(HEADER1, event1.getHeaders().get(HEADER1));
Assert.assertEquals(HEADER2, event1.getHeaders().get(HEADER2));
Assert.assertEquals(HEADER3, event1.getHeaders().get(HEADER3));
Assert.assertEquals(HEADER4, event1.getHeaders().get(HEADER4));
Assert.assertEquals(HEADER5, event1.getHeaders().get(HEADER5));
final Event event2 = buildEventWithoutHeader();
Assert.assertTrue(event2.getHeaders().isEmpty());
removeHeaderInterceptor.intercept(event2);
Assert.assertTrue(event2.getHeaders().isEmpty());
}
@Test
public void testFromListWithDefaultSeparator2() throws Exception {
final Interceptor removeHeaderInterceptor = new RemoveHeaderIntBuilder()
.fromList(HEADER4 + DEFAULT_SEPARATOR + HEADER2).build();
final Event event1 = buildEventWithHeader();
Assert.assertEquals(HEADER1, event1.getHeaders().get(HEADER1));
Assert.assertEquals(HEADER2, event1.getHeaders().get(HEADER2));
Assert.assertEquals(HEADER3, event1.getHeaders().get(HEADER3));
Assert.assertEquals(HEADER4, event1.getHeaders().get(HEADER4));
Assert.assertEquals(HEADER5, event1.getHeaders().get(HEADER5));
removeHeaderInterceptor.intercept(event1);
Assert.assertEquals(HEADER1, event1.getHeaders().get(HEADER1));
Assert.assertNull(event1.getHeaders().get(HEADER2));
Assert.assertEquals(HEADER3, event1.getHeaders().get(HEADER3));
Assert.assertNull(event1.getHeaders().get(HEADER4));
Assert.assertEquals(HEADER5, event1.getHeaders().get(HEADER5));
final Event event2 = buildEventWithoutHeader();
Assert.assertTrue(event2.getHeaders().isEmpty());
removeHeaderInterceptor.intercept(event2);
Assert.assertTrue(event2.getHeaders().isEmpty());
}
@Test
public void testFromListWithCustomSeparator1() throws Exception {
final Interceptor removeHeaderInterceptor = new RemoveHeaderIntBuilder()
.fromList(HEADER4 + MY_SEPARATOR + HEADER2, MY_SEPARATOR).build();
final Event event1 = buildEventWithHeader();
Assert.assertEquals(HEADER1, event1.getHeaders().get(HEADER1));
Assert.assertEquals(HEADER2, event1.getHeaders().get(HEADER2));
Assert.assertEquals(HEADER3, event1.getHeaders().get(HEADER3));
Assert.assertEquals(HEADER4, event1.getHeaders().get(HEADER4));
Assert.assertEquals(HEADER5, event1.getHeaders().get(HEADER5));
removeHeaderInterceptor.intercept(event1);
Assert.assertEquals(HEADER1, event1.getHeaders().get(HEADER1));
Assert.assertNull(event1.getHeaders().get(HEADER2));
Assert.assertEquals(HEADER3, event1.getHeaders().get(HEADER3));
Assert.assertNull(event1.getHeaders().get(HEADER4));
Assert.assertEquals(HEADER5, event1.getHeaders().get(HEADER5));
final Event event2 = buildEventWithoutHeader();
Assert.assertTrue(event2.getHeaders().isEmpty());
removeHeaderInterceptor.intercept(event2);
Assert.assertTrue(event2.getHeaders().isEmpty());
}
@Test
public void testFromListWithCustomSeparator2() throws Exception {
final Interceptor removeHeaderInterceptor = new RemoveHeaderIntBuilder()
.fromList(HEADER4 + DEFAULT_SEPARATOR + HEADER2, MY_SEPARATOR).build();
final Event event1 = buildEventWithHeader();
Assert.assertEquals(HEADER1, event1.getHeaders().get(HEADER1));
Assert.assertEquals(HEADER2, event1.getHeaders().get(HEADER2));
Assert.assertEquals(HEADER3, event1.getHeaders().get(HEADER3));
Assert.assertEquals(HEADER4, event1.getHeaders().get(HEADER4));
Assert.assertEquals(HEADER5, event1.getHeaders().get(HEADER5));
removeHeaderInterceptor.intercept(event1);
Assert.assertEquals(HEADER1, event1.getHeaders().get(HEADER1));
Assert.assertEquals(HEADER2, event1.getHeaders().get(HEADER2));
Assert.assertEquals(HEADER3, event1.getHeaders().get(HEADER3));
Assert.assertEquals(HEADER4, event1.getHeaders().get(HEADER4));
Assert.assertEquals(HEADER5, event1.getHeaders().get(HEADER5));
final Event event2 = buildEventWithoutHeader();
Assert.assertTrue(event2.getHeaders().isEmpty());
removeHeaderInterceptor.intercept(event2);
Assert.assertTrue(event2.getHeaders().isEmpty());
}
@Test
public void testMatchRegex() throws Exception {
final Interceptor removeHeaderInterceptor = new RemoveHeaderIntBuilder()
.matchRegex("my-header1.*").build();
final Event event1 = buildEventWithHeader();
Assert.assertEquals(HEADER1, event1.getHeaders().get(HEADER1));
Assert.assertEquals(HEADER2, event1.getHeaders().get(HEADER2));
Assert.assertEquals(HEADER3, event1.getHeaders().get(HEADER3));
Assert.assertEquals(HEADER4, event1.getHeaders().get(HEADER4));
Assert.assertEquals(HEADER5, event1.getHeaders().get(HEADER5));
removeHeaderInterceptor.intercept(event1);
Assert.assertNull(event1.getHeaders().get(HEADER1));
Assert.assertNull(event1.getHeaders().get(HEADER2));
Assert.assertNull(event1.getHeaders().get(HEADER3));
Assert.assertEquals(HEADER4, event1.getHeaders().get(HEADER4));
Assert.assertEquals(HEADER5, event1.getHeaders().get(HEADER5));
final Event event2 = buildEventWithoutHeader();
Assert.assertTrue(event2.getHeaders().isEmpty());
removeHeaderInterceptor.intercept(event2);
Assert.assertTrue(event2.getHeaders().isEmpty());
}
@Test
public void testAll() throws Exception {
final Interceptor removeHeaderInterceptor = new RemoveHeaderIntBuilder()
.matchRegex("my-header2.*")
.fromList(HEADER1 + MY_SEPARATOR + HEADER3, MY_SEPARATOR)
.withName(HEADER2).build();
final Event event1 = buildEventWithHeader();
Assert.assertEquals(HEADER1, event1.getHeaders().get(HEADER1));
Assert.assertEquals(HEADER2, event1.getHeaders().get(HEADER2));
Assert.assertEquals(HEADER3, event1.getHeaders().get(HEADER3));
Assert.assertEquals(HEADER4, event1.getHeaders().get(HEADER4));
Assert.assertEquals(HEADER5, event1.getHeaders().get(HEADER5));
removeHeaderInterceptor.intercept(event1);
Assert.assertTrue(event1.getHeaders().isEmpty());
final Event event2 = buildEventWithoutHeader();
Assert.assertTrue(event2.getHeaders().isEmpty());
removeHeaderInterceptor.intercept(event2);
Assert.assertTrue(event2.getHeaders().isEmpty());
}
private static class RemoveHeaderIntBuilder {
final Map<String, String> contextMap = new HashMap<>();
RemoveHeaderIntBuilder withName(final String str) {
contextMap.put(RemoveHeaderInterceptor.WITH_NAME, str);
return this;
}
RemoveHeaderIntBuilder fromList(final String str) {
contextMap.put(RemoveHeaderInterceptor.FROM_LIST, str);
return this;
}
RemoveHeaderIntBuilder fromList(final String str,
final String separator) {
fromList(str);
contextMap.put(RemoveHeaderInterceptor.LIST_SEPARATOR, separator);
return this;
}
RemoveHeaderIntBuilder matchRegex(final String str) {
contextMap.put(RemoveHeaderInterceptor.MATCH_REGEX, str);
return this;
}
public Interceptor build() throws InstantiationException, IllegalAccessException,
ClassNotFoundException {
Interceptor.Builder builder = InterceptorBuilderFactory.newInstance(
InterceptorType.REMOVE_HEADER.toString());
builder.configure(new Context(contextMap));
return builder.build();
}
}
}
| 9,964 |
0 | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/test/java/org/apache/flume/interceptor/TestRegexFilteringInterceptor.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.interceptor;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.event.EventBuilder;
import org.apache.flume.interceptor.RegexFilteringInterceptor.Constants;
import org.junit.Assert;
import org.junit.Test;
import com.google.common.base.Charsets;
public class TestRegexFilteringInterceptor {
@Test
/** By default, we should pass through any event. */
public void testDefaultBehavior() throws ClassNotFoundException,
InstantiationException, IllegalAccessException {
Interceptor.Builder builder = InterceptorBuilderFactory.newInstance(
InterceptorType.REGEX_FILTER.toString());
builder.configure(new Context());
Interceptor interceptor = builder.build();
Event event = EventBuilder.withBody("test", Charsets.UTF_8);
Event filteredEvent = interceptor.intercept(event);
Assert.assertNotNull(filteredEvent);
Assert.assertEquals(event, filteredEvent);
}
@Test
public void testInclusion() throws ClassNotFoundException,
InstantiationException, IllegalAccessException {
Interceptor.Builder builder = InterceptorBuilderFactory.newInstance(
InterceptorType.REGEX_FILTER.toString());
Context ctx = new Context();
ctx.put(Constants.REGEX, "(INFO.*)|(WARNING.*)");
ctx.put(Constants.EXCLUDE_EVENTS, "false");
builder.configure(ctx);
Interceptor interceptor = builder.build();
Event shouldPass1 = EventBuilder.withBody("INFO: some message",
Charsets.UTF_8);
Assert.assertNotNull(interceptor.intercept(shouldPass1));
Event shouldPass2 = EventBuilder.withBody("WARNING: some message",
Charsets.UTF_8);
Assert.assertNotNull(interceptor.intercept(shouldPass2));
Event shouldNotPass = EventBuilder.withBody("DEBUG: some message",
Charsets.UTF_8);
Assert.assertNull(interceptor.intercept(shouldNotPass));
builder.configure(ctx);
}
@Test
public void testExclusion() throws ClassNotFoundException,
InstantiationException, IllegalAccessException {
Interceptor.Builder builder = InterceptorBuilderFactory.newInstance(
InterceptorType.REGEX_FILTER.toString());
Context ctx = new Context();
ctx.put(Constants.REGEX, ".*DEBUG.*");
ctx.put(Constants.EXCLUDE_EVENTS, "true");
builder.configure(ctx);
Interceptor interceptor = builder.build();
Event shouldPass1 = EventBuilder.withBody("INFO: some message",
Charsets.UTF_8);
Assert.assertNotNull(interceptor.intercept(shouldPass1));
Event shouldPass2 = EventBuilder.withBody("WARNING: some message",
Charsets.UTF_8);
Assert.assertNotNull(interceptor.intercept(shouldPass2));
Event shouldNotPass = EventBuilder.withBody("this message has DEBUG in it",
Charsets.UTF_8);
Assert.assertNull(interceptor.intercept(shouldNotPass));
builder.configure(ctx);
}
} | 9,965 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/EventDrivenSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume;
/**
* A {@link Source} that does not need an external driver to poll for
* {@linkplain Event events} to ingest; it provides its own event-driven
* mechanism to invoke event processing.
*/
public interface EventDrivenSource extends Source {
}
| 9,966 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/ChannelSelector.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume;
import java.util.List;
import org.apache.flume.conf.Configurable;
/**
* <p>
* Allows the selection of a subset of channels from the given set based on
* its implementation policy. Different implementations of this interface
* embody different policies that affect the choice of channels that a source
* will push the incoming events to.
* </p>
*/
public interface ChannelSelector extends NamedComponent, Configurable {
/**
* @param channels all channels the selector could select from.
*/
public void setChannels(List<Channel> channels);
/**
* Returns a list of required channels. A failure in writing the event to
* these channels must be communicated back to the source that received this
* event.
* @param event
* @return the list of required channels that this selector has selected for
* the given event.
*/
public List<Channel> getRequiredChannels(Event event);
/**
* Returns a list of optional channels. A failure in writing the event to
* these channels must be ignored.
* @param event
* @return the list of optional channels that this selector has selected for
* the given event.
*/
public List<Channel> getOptionalChannels(Event event);
/**
* @return the list of all channels that this selector is configured to work
* with.
*/
public List<Channel> getAllChannels();
}
| 9,967 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/SourceRunner.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume;
import org.apache.flume.lifecycle.LifecycleAware;
import org.apache.flume.source.EventDrivenSourceRunner;
import org.apache.flume.source.PollableSourceRunner;
/**
* A source runner controls how a source is driven.
*
* This is an abstract class used for instantiating derived classes.
*/
public abstract class SourceRunner implements LifecycleAware {
private Source source;
/**
* Static factory method to instantiate a source runner implementation that
* corresponds to the type of {@link Source} specified.
*
* @param source The source to run
* @return A runner that can run the specified source
* @throws IllegalArgumentException if the specified source does not implement
* a supported derived interface of {@link SourceRunner}.
*/
public static SourceRunner forSource(Source source) {
SourceRunner runner = null;
if (source instanceof PollableSource) {
runner = new PollableSourceRunner();
((PollableSourceRunner) runner).setSource((PollableSource) source);
} else if (source instanceof EventDrivenSource) {
runner = new EventDrivenSourceRunner();
((EventDrivenSourceRunner) runner).setSource((EventDrivenSource) source);
} else {
throw new IllegalArgumentException("No known runner type for source "
+ source);
}
return runner;
}
public Source getSource() {
return source;
}
public void setSource(Source source) {
this.source = source;
}
}
| 9,968 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/CounterGroup.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume;
import java.util.HashMap;
import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicLong;
/**
* Used for counting events, collecting metrics, etc.
*/
public class CounterGroup {
private String name;
private HashMap<String, AtomicLong> counters;
public CounterGroup() {
counters = new HashMap<String, AtomicLong>();
}
public synchronized Long get(String name) {
return getCounter(name).get();
}
public synchronized Long incrementAndGet(String name) {
return getCounter(name).incrementAndGet();
}
public synchronized Long addAndGet(String name, Long delta) {
return getCounter(name).addAndGet(delta);
}
public synchronized void add(CounterGroup counterGroup) {
synchronized (counterGroup) {
for (Entry<String, AtomicLong> entry : counterGroup.getCounters()
.entrySet()) {
addAndGet(entry.getKey(), entry.getValue().get());
}
}
}
public synchronized void set(String name, Long value) {
getCounter(name).set(value);
}
public synchronized AtomicLong getCounter(String name) {
if (!counters.containsKey(name)) {
counters.put(name, new AtomicLong());
}
return counters.get(name);
}
@Override
public synchronized String toString() {
return "{ name:" + name + " counters:" + counters + " }";
}
public synchronized String getName() {
return name;
}
public synchronized void setName(String name) {
this.name = name;
}
public synchronized HashMap<String, AtomicLong> getCounters() {
return counters;
}
public synchronized void setCounters(HashMap<String, AtomicLong> counters) {
this.counters = counters;
}
}
| 9,969 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/SinkFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume;
public interface SinkFactory {
Sink create(String name, String type)
throws FlumeException;
Class<? extends Sink> getClass(String type)
throws FlumeException;
}
| 9,970 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/SinkRunner.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.flume.lifecycle.LifecycleAware;
import org.apache.flume.lifecycle.LifecycleState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p>
* A driver for {@linkplain Sink sinks} that polls them, attempting to
* {@linkplain Sink#process() process} events if any are available in the
* {@link Channel}.
* </p>
*
* <p>
* Note that, unlike {@linkplain Source sources}, all sinks are polled.
* </p>
*
* @see org.apache.flume.Sink
* @see org.apache.flume.SourceRunner
*/
public class SinkRunner implements LifecycleAware {
private static final Logger logger = LoggerFactory
.getLogger(SinkRunner.class);
private static final long backoffSleepIncrement = 1000;
private static final long maxBackoffSleep = 5000;
private CounterGroup counterGroup;
private PollingRunner runner;
private Thread runnerThread;
private LifecycleState lifecycleState;
private SinkProcessor policy;
public SinkRunner() {
counterGroup = new CounterGroup();
lifecycleState = LifecycleState.IDLE;
}
public SinkRunner(SinkProcessor policy) {
this();
setSink(policy);
}
public SinkProcessor getPolicy() {
return policy;
}
public void setSink(SinkProcessor policy) {
this.policy = policy;
}
@Override
public void start() {
SinkProcessor policy = getPolicy();
policy.start();
runner = new PollingRunner();
runner.policy = policy;
runner.counterGroup = counterGroup;
runner.shouldStop = new AtomicBoolean();
runnerThread = new Thread(runner);
runnerThread.setName("SinkRunner-PollingRunner-" +
policy.getClass().getSimpleName());
runnerThread.start();
lifecycleState = LifecycleState.START;
}
@Override
public void stop() {
if (runnerThread != null) {
runner.shouldStop.set(true);
runnerThread.interrupt();
while (runnerThread.isAlive()) {
try {
logger.debug("Waiting for runner thread to exit");
runnerThread.join(500);
} catch (InterruptedException e) {
logger.debug("Interrupted while waiting for runner thread to exit. Exception follows.",
e);
}
}
}
getPolicy().stop();
lifecycleState = LifecycleState.STOP;
}
@Override
public String toString() {
return "SinkRunner: { policy:" + getPolicy() + " counterGroup:"
+ counterGroup + " }";
}
@Override
public LifecycleState getLifecycleState() {
return lifecycleState;
}
/**
* {@link Runnable} that {@linkplain SinkProcessor#process() polls} a
* {@link SinkProcessor} and manages event delivery notification,
* {@link Sink.Status BACKOFF} delay handling, etc.
*/
public static class PollingRunner implements Runnable {
private SinkProcessor policy;
private AtomicBoolean shouldStop;
private CounterGroup counterGroup;
@Override
public void run() {
logger.debug("Polling sink runner starting");
while (!shouldStop.get()) {
try {
if (policy.process().equals(Sink.Status.BACKOFF)) {
counterGroup.incrementAndGet("runner.backoffs");
Thread.sleep(Math.min(
counterGroup.incrementAndGet("runner.backoffs.consecutive")
* backoffSleepIncrement, maxBackoffSleep));
} else {
counterGroup.set("runner.backoffs.consecutive", 0L);
}
} catch (InterruptedException e) {
logger.debug("Interrupted while processing an event. Exiting.");
counterGroup.incrementAndGet("runner.interruptions");
} catch (Exception e) {
logger.error("Unable to deliver event. Exception follows.", e);
if (e instanceof EventDeliveryException) {
counterGroup.incrementAndGet("runner.deliveryErrors");
} else {
counterGroup.incrementAndGet("runner.errors");
}
try {
Thread.sleep(maxBackoffSleep);
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
}
}
logger.debug("Polling runner exiting. Metrics:{}", counterGroup);
}
}
}
| 9,971 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/Sink.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume;
import org.apache.flume.annotations.InterfaceAudience;
import org.apache.flume.annotations.InterfaceStability;
import org.apache.flume.lifecycle.LifecycleAware;
/**
* <p>
* A sink is connected to a <tt>Channel</tt> and consumes its contents,
* sending them to a configured destination that may vary according to
* the sink type.
* </p>
* <p>
* Sinks can be grouped together for various behaviors using <tt>SinkGroup</tt>
* and <tt>SinkProcessor</tt>. They are polled periodically by a
* <tt>SinkRunner</tt> via the processor</p>
*<p>
* Sinks are associated with unique names that can be used for separating
* configuration and working namespaces.
* </p>
* <p>
* While the {@link Sink#process()} call is guaranteed to only be accessed
* by a single thread, other calls may be concurrently accessed and should
* thus be protected.
* </p>
*
* @see org.apache.flume.Channel
* @see org.apache.flume.SinkProcessor
* @see org.apache.flume.SinkRunner
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface Sink extends LifecycleAware, NamedComponent {
/**
* <p>Sets the channel the sink will consume from</p>
* @param channel The channel to be polled
*/
public void setChannel(Channel channel);
/**
* @return the channel associated with this sink
*/
public Channel getChannel();
/**
* <p>Requests the sink to attempt to consume data from attached channel</p>
* <p><strong>Note</strong>: This method should be consuming from the channel
* within the bounds of a Transaction. On successful delivery, the transaction
* should be committed, and on failure it should be rolled back.
* @return READY if 1 or more Events were successfully delivered, BACKOFF if
* no data could be retrieved from the channel feeding this sink
* @throws EventDeliveryException In case of any kind of failure to
* deliver data to the next hop destination.
*/
public Status process() throws EventDeliveryException;
public static enum Status {
READY, BACKOFF
}
}
| 9,972 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/VersionAnnotation.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* This class is about package attribute that captures
* version info of Flume that was compiled.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.PACKAGE)
public @interface VersionAnnotation {
/**
* Get the Flume version
* @return the version string "1.1"
*/
String version();
/**
* Get the subversion revision.
* @return the revision number as a string (eg. "100755")
*/
String revision();
/**
* Get the branch from which this was compiled.
* @return The branch name, e.g. "trunk"
*/
String branch();
/**
* Get the username that compiled Flume.
*/
String user();
/**
* Get the date when Flume was compiled.
* @return the date in unix 'date' format
*/
String date();
/**
* Get the url for the subversion repository.
*/
String url();
/**
* Get a checksum of the source files from which
* Flume was compiled.
* @return a string that uniquely identifies the source
**/
String srcChecksum();
}
| 9,973 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/ChannelException.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume;
/**
* <p>A channel exception is raised whenever a channel operation fails.</p>
*/
public class ChannelException extends RuntimeException {
private static final long serialVersionUID = 1L;
/**
* @param message the exception message
*/
public ChannelException(String message) {
super(message);
}
/**
* @param ex the causal exception
*/
public ChannelException(Throwable ex) {
super(ex);
}
/**
* @param message the exception message
* @param ex the causal exception
*/
public ChannelException(String message, Throwable ex) {
super(message, ex);
}
}
| 9,974 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/Channel.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume;
import org.apache.flume.annotations.InterfaceAudience;
import org.apache.flume.annotations.InterfaceStability;
import org.apache.flume.lifecycle.LifecycleAware;
/**
* <p>
* A channel connects a {@link Source} to a {@link Sink}. The source
* acts as producer while the sink acts as a consumer of events. The channel
* itself is the buffer between the two.
* </p>
* <p>
* A channel exposes a {@link Transaction} interface that can be used by
* its clients to ensure atomic {@linkplain #put(Event) put} and
* {@linkplain #take() take} semantics.
* This is necessary to guarantee single hop reliability between agents.
* For instance, a source will successfully produce an {@linkplain Event event}
* if and only if that event can be committed to the source's associated
* channel. Similarly, a sink will consume an event if and
* only if its respective endpoint can accept the event. The
* extent of transaction support varies for different channel implementations
* ranging from strong to best-effort semantics.
* </p>
* <p>
* Channels are associated with unique {@linkplain NamedComponent names} that
* can be used for separating configuration and working namespaces.
* </p>
* <p>
* Channels must be thread safe, protecting any internal invariants as no
* guarantees are given as to when and by how many sources/sinks they may
* be simultaneously accessed by.
* </p>
*
* @see org.apache.flume.Source
* @see org.apache.flume.Sink
* @see org.apache.flume.Transaction
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface Channel extends LifecycleAware, NamedComponent {
/**
* <p>Puts the given event into the channel.</p>
* <p><strong>Note</strong>: This method must be invoked within an active
* {@link Transaction} boundary. Failure to do so can lead to unpredictable
* results.</p>
* @param event the event to transport.
* @throws ChannelException in case this operation fails.
* @see org.apache.flume.Transaction#begin()
*/
public void put(Event event) throws ChannelException;
/**
* <p>Returns the next event from the channel if available. If the channel
* does not have any events available, this method must return {@code null}.
* </p>
* <p><strong>Note</strong>: This method must be invoked within an active
* {@link Transaction} boundary. Failure to do so can lead to unpredictable
* results.</p>
* @return the next available event or {@code null} if no events are
* available.
* @throws ChannelException in case this operation fails.
* @see org.apache.flume.Transaction#begin()
*/
public Event take() throws ChannelException;
/**
* @return the transaction instance associated with this channel.
*/
public Transaction getTransaction();
}
| 9,975 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/ChannelFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume;
public interface ChannelFactory {
Channel create(String name, String type) throws FlumeException;
Class<? extends Channel> getClass(String type) throws FlumeException;
}
| 9,976 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/SystemClock.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume;
/**
* Default implementation of Clock which uses System
*/
public class SystemClock implements Clock {
public long currentTimeMillis() {
return System.currentTimeMillis();
}
}
| 9,977 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/NamedComponent.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume;
import org.apache.flume.annotations.InterfaceAudience;
import org.apache.flume.annotations.InterfaceStability;
/**
* Enables a component to be tagged with a name so that it can be referred
* to uniquely within the configuration system.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface NamedComponent {
public void setName(String name);
public String getName();
}
| 9,978 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/PollableSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume;
import org.apache.flume.source.EventDrivenSourceRunner;
/**
* A {@link Source} that requires an external driver to poll to determine
* whether there are {@linkplain Event events} that are available to ingest
* from the source.
*
* @see org.apache.flume.source.EventDrivenSourceRunner
*/
public interface PollableSource extends Source {
/**
* <p>
* Attempt to pull an item from the source, sending it to the channel.
* </p>
* <p>
* When driven by an {@link EventDrivenSourceRunner} process is guaranteed
* to be called only by a single thread at a time, with no concurrency.
* Any other mechanism driving a pollable source must follow the same
* semantics.
* </p>
* @return {@code READY} if one or more events were created from the source.
* {@code BACKOFF} if no events could be created from the source.
* @throws EventDeliveryException If there was a failure in delivering to
* the attached channel, or if a failure occurred in acquiring data from
* the source.
*/
public Status process() throws EventDeliveryException;
public long getBackOffSleepIncrement();
public long getMaxBackOffSleepInterval();
public static enum Status {
READY, BACKOFF
}
}
| 9,979 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/Source.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume;
import org.apache.flume.annotations.InterfaceAudience;
import org.apache.flume.annotations.InterfaceStability;
import org.apache.flume.channel.ChannelProcessor;
import org.apache.flume.lifecycle.LifecycleAware;
/**
* <p>
* A source generates {@plainlink Event events} and calls methods on the
* configured {@link ChannelProcessor} to persist those events into the
* configured {@linkplain Channel channels}.
* </p>
*
* <p>
* Sources are associated with unique {@linkplain NamedComponent names} that can
* be used for separating configuration and working namespaces.
* </p>
*
* <p>
* No guarantees are given regarding thread safe access.
* </p>
*
* @see org.apache.flume.Channel
* @see org.apache.flume.Sink
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface Source extends LifecycleAware, NamedComponent {
/**
* Specifies which channel processor will handle this source's events.
*
* @param channelProcessor
*/
public void setChannelProcessor(ChannelProcessor channelProcessor);
/**
* Returns the channel processor that will handle this source's events.
*/
public ChannelProcessor getChannelProcessor();
}
| 9,980 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/ChannelFullException.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume;
public class ChannelFullException extends ChannelException {
private static final long serialVersionUID = -8098141359417449525L;
/**
* @param message the exception message
*/
public ChannelFullException(String message) {
super(message);
}
/**
* @param ex the causal exception
*/
public ChannelFullException(Throwable ex) {
super(ex);
}
/**
* @param message the exception message
* @param ex the causal exception
*/
public ChannelFullException(String message, Throwable ex) {
super(message, ex);
}
}
| 9,981 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/SinkProcessor.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume;
import java.util.List;
import org.apache.flume.Sink.Status;
import org.apache.flume.conf.Configurable;
import org.apache.flume.lifecycle.LifecycleAware;
/**
* <p>
* Interface for a device that allows abstraction of the behavior of multiple
* sinks, always assigned to a SinkRunner
* </p>
* <p>
* A sink processors {@link SinkProcessor#process()} method will only be
* accessed by a single runner thread. However configuration methods
* such as {@link Configurable#configure} may be concurrently accessed.
*
* @see org.apache.flume.Sink
* @see org.apache.flume.SinkRunner
* @see org.apache.flume.sink.SinkGroup
*/
public interface SinkProcessor extends LifecycleAware, Configurable {
/**
* <p>Handle a request to poll the owned sinks.</p>
*
* <p>The processor is expected to call {@linkplain Sink#process()} on
* whatever sink(s) appropriate, handling failures as appropriate and
* throwing {@link EventDeliveryException} when there is a failure to
* deliver any events according to the delivery policy defined by the
* sink processor implementation. See specific implementations of this
* interface for delivery behavior and policies.</p>
*
* @return Returns {@code READY} if events were successfully consumed,
* or {@code BACKOFF} if no events were available in the channel to consume.
* @throws EventDeliveryException if the behavior guaranteed by the processor
* couldn't be carried out.
*/
Status process() throws EventDeliveryException;
/**
* <p>Set all sinks to work with.</p>
*
* <p>Sink specific parameters are passed to the processor via configure</p>
*
* @param sinks A non-null, non-empty list of sinks to be chosen from by the
* processor
*/
void setSinks(List<Sink> sinks);
} | 9,982 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/Clock.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume;
/**
* Facade for System.currentTimeMillis for Testing
*/
public interface Clock {
long currentTimeMillis();
}
| 9,983 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/SourceFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume;
public interface SourceFactory {
Source create(String sourceName, String type)
throws FlumeException;
Class<? extends Source> getClass(String type)
throws FlumeException;
}
| 9,984 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/Constants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume;
public final class Constants {
/**
* Disables the fail-fast startup behavior. This would be used in the
* scenario where the agent is expected to start, but the config
* file be populated at a later point in time.
*/
public static final String SYSPROP_CALLED_FROM_SERVICE
= "flume.called.from.service";
private Constants() {
// disable explicit object creation
}
}
| 9,985 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/Transaction.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume;
import org.apache.flume.channel.BasicChannelSemantics;
import org.apache.flume.channel.BasicTransactionSemantics;
/**
* <p>Provides the transaction boundary while accessing a channel.</p>
* <p>A <tt>Transaction</tt> instance is used to encompass channel access
* via the following idiom:</p>
* <pre><code>
* Channel ch = ...
* Transaction tx = ch.getTransaction();
* try {
* tx.begin();
* ...
* // ch.put(event) or ch.take()
* ...
* tx.commit();
* } catch (ChannelException ex) {
* tx.rollback();
* ...
* } finally {
* tx.close();
* }
* </code></pre>
* <p>Depending upon the implementation of the channel, the transaction
* semantics may be strong, or best-effort only.</p>
*
* <p>
* Transactions must be thread safe. To provide a guarantee of thread safe
* access to Transactions, see {@link BasicChannelSemantics} and
* {@link BasicTransactionSemantics}.
*
* @see org.apache.flume.Channel
*/
public interface Transaction {
enum TransactionState { Started, Committed, RolledBack, Closed }
/**
* <p>Starts a transaction boundary for the current channel operation. If a
* transaction is already in progress, this method will join that transaction
* using reference counting.</p>
* <p><strong>Note</strong>: For every invocation of this method there must
* be a corresponding invocation of {@linkplain #close()} method. Failure
* to ensure this can lead to dangling transactions and unpredictable results.
* </p>
*/
void begin();
/**
* Indicates that the transaction can be successfully committed. It is
* required that a transaction be in progress when this method is invoked.
*/
void commit();
/**
* Indicates that the transaction can must be aborted. It is
* required that a transaction be in progress when this method is invoked.
*/
void rollback();
/**
* <p>Ends a transaction boundary for the current channel operation. If a
* transaction is already in progress, this method will join that transaction
* using reference counting. The transaction is completed only if there
* are no more references left for this transaction.</p>
* <p><strong>Note</strong>: For every invocation of this method there must
* be a corresponding invocation of {@linkplain #begin()} method. Failure
* to ensure this can lead to dangling transactions and unpredictable results.
* </p>
*/
void close();
}
| 9,986 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/tools/DirectMemoryUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.tools;
import java.lang.management.ManagementFactory;
import java.lang.management.RuntimeMXBean;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.atomic.AtomicInteger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
public class DirectMemoryUtils {
private static final Logger LOG = LoggerFactory
.getLogger(DirectMemoryUtils.class);
private static final String MAX_DIRECT_MEMORY_PARAM =
"-XX:MaxDirectMemorySize=";
private static final long DEFAULT_SIZE = getDefaultDirectMemorySize();
private static final AtomicInteger allocated = new AtomicInteger(0);
public static ByteBuffer allocate(int size) {
Preconditions.checkArgument(size > 0, "Size must be greater than zero");
long maxDirectMemory = getDirectMemorySize();
long allocatedCurrently = allocated.get();
LOG.info("Direct Memory Allocation: " +
" Allocation = " + size +
", Allocated = " + allocatedCurrently +
", MaxDirectMemorySize = " + maxDirectMemory +
", Remaining = " + Math.max(0,(maxDirectMemory - allocatedCurrently)));
try {
ByteBuffer result = ByteBuffer.allocateDirect(size);
allocated.addAndGet(size);
return result;
} catch (OutOfMemoryError error) {
LOG.error("Error allocating " + size + ", you likely want" +
" to increase " + MAX_DIRECT_MEMORY_PARAM, error);
throw error;
}
}
public static void clean(ByteBuffer buffer) throws Exception {
Preconditions.checkArgument(buffer.isDirect(),
"buffer isn't direct!");
Method cleanerMethod = buffer.getClass().getMethod("cleaner");
cleanerMethod.setAccessible(true);
Object cleaner = cleanerMethod.invoke(buffer);
Method cleanMethod = cleaner.getClass().getMethod("clean");
cleanMethod.setAccessible(true);
cleanMethod.invoke(cleaner);
allocated.getAndAdd(-buffer.capacity());
long maxDirectMemory = getDirectMemorySize();
LOG.info("Direct Memory Deallocation: " +
", Allocated = " + allocated.get() +
", MaxDirectMemorySize = " + maxDirectMemory +
", Remaining = " + Math.max(0, (maxDirectMemory - allocated.get())));
}
public static long getDirectMemorySize() {
RuntimeMXBean RuntimemxBean = ManagementFactory.getRuntimeMXBean();
List<String> arguments = Lists.reverse(RuntimemxBean.getInputArguments());
long multiplier = 1; //for the byte case.
for (String s : arguments) {
if (s.contains(MAX_DIRECT_MEMORY_PARAM)) {
String memSize = s.toLowerCase(Locale.ENGLISH)
.replace(MAX_DIRECT_MEMORY_PARAM.toLowerCase(Locale.ENGLISH), "").trim();
if (memSize.contains("k")) {
multiplier = 1024;
} else if (memSize.contains("m")) {
multiplier = 1048576;
} else if (memSize.contains("g")) {
multiplier = 1073741824;
}
memSize = memSize.replaceAll("[^\\d]", "");
long retValue = Long.parseLong(memSize);
return retValue * multiplier;
}
}
return DEFAULT_SIZE;
}
private static long getDefaultDirectMemorySize() {
try {
Class<?> VM = Class.forName("sun.misc.VM");
Method maxDirectMemory = VM.getDeclaredMethod("maxDirectMemory", (Class<?>)null);
Object result = maxDirectMemory.invoke(null, (Object[])null);
if (result != null && result instanceof Long) {
return (Long)result;
}
} catch (Exception e) {
LOG.info("Unable to get maxDirectMemory from VM: " +
e.getClass().getSimpleName() + ": " + e.getMessage());
}
// default according to VM.maxDirectMemory()
return Runtime.getRuntime().maxMemory();
}
}
| 9,987 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/tools/PlatformDetect.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.tools;
import java.util.Locale;
/**
* Utilities for platform & operating system detection
*/
public class PlatformDetect {
/**
* Detects whether we are running under Microsoft Windows.
* @return true if and only if we are running on a Windows system.
*/
public static boolean isWindows() {
String os = System.getProperty("os.name");
boolean isWin = (os.toLowerCase(Locale.ENGLISH).indexOf("win") >= 0);
return isWin;
}
}
| 9,988 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/tools/TimestampRoundDownUtil.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.tools;
import java.util.Calendar;
import java.util.TimeZone;
import com.google.common.base.Preconditions;
import org.apache.flume.annotations.InterfaceAudience;
import org.apache.flume.annotations.InterfaceStability;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class TimestampRoundDownUtil {
/**
*
* @param timestamp - The time stamp to be rounded down.
* For parsing the <tt>timestamp</tt> the system default timezone will be used.
* @param roundDownSec - The <tt>timestamp</tt> is rounded down to the largest
* multiple of <tt>roundDownSec</tt> seconds
* less than or equal to <tt>timestamp.</tt> Should be between 0 and 60.
* @return - Rounded down timestamp
* @throws IllegalStateException
* @see TimestampRoundDownUtil#roundDownTimeStampSeconds(long, int, TimeZone)
*/
public static long roundDownTimeStampSeconds(long timestamp,
int roundDownSec) throws IllegalStateException {
return roundDownTimeStampSeconds(timestamp, roundDownSec, null);
}
/**
*
* @param timestamp - The time stamp to be rounded down.
* @param roundDownSec - The <tt>timestamp</tt> is rounded down to the largest
* multiple of <tt>roundDownSec</tt> seconds
* less than or equal to <tt>timestamp.</tt> Should be between 0 and 60.
* @param timeZone - The timezone to use for parsing the <tt>timestamp</tt>.
* @return - Rounded down timestamp
* @throws IllegalStateException
*/
public static long roundDownTimeStampSeconds(long timestamp, int roundDownSec, TimeZone timeZone)
throws IllegalStateException {
Preconditions.checkArgument(roundDownSec > 0 && roundDownSec <= 60,
"RoundDownSec must be > 0 and <=60");
Calendar cal = roundDownField(timestamp, Calendar.SECOND, roundDownSec, timeZone);
cal.set(Calendar.MILLISECOND, 0);
return cal.getTimeInMillis();
}
/**
*
* @param timestamp - The time stamp to be rounded down.
* For parsing the <tt>timestamp</tt> the system default timezone will be used.
* @param roundDownMins - The <tt>timestamp</tt> is rounded down to the
* largest multiple of <tt>roundDownMins</tt> minutes less than
* or equal to <tt>timestamp.</tt> Should be between 0 and 60.
* @return - Rounded down timestamp
* @throws IllegalStateException
* @see TimestampRoundDownUtil#roundDownTimeStampMinutes(long, int, TimeZone)
*/
public static long roundDownTimeStampMinutes(long timestamp,
int roundDownMins) throws IllegalStateException {
return roundDownTimeStampMinutes(timestamp, roundDownMins, null);
}
/**
*
* @param timestamp - The time stamp to be rounded down.
* @param roundDownMins - The <tt>timestamp</tt> is rounded down to the
* largest multiple of <tt>roundDownMins</tt> minutes less than
* or equal to <tt>timestamp.</tt> Should be between 0 and 60.
* @param timeZone - The timezone to use for parsing the <tt>timestamp</tt>.
* If <tt>null</tt> the system default will be used.
* @return - Rounded down timestamp
* @throws IllegalStateException
*/
public static long roundDownTimeStampMinutes(long timestamp, int roundDownMins, TimeZone timeZone)
throws IllegalStateException {
Preconditions.checkArgument(roundDownMins > 0 && roundDownMins <= 60,
"RoundDown must be > 0 and <=60");
Calendar cal = roundDownField(timestamp, Calendar.MINUTE, roundDownMins, timeZone);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return cal.getTimeInMillis();
}
/**
*
* @param timestamp - The time stamp to be rounded down.
* For parsing the <tt>timestamp</tt> the system default timezone will be used.
* @param roundDownHours - The <tt>timestamp</tt> is rounded down to the
* largest multiple of <tt>roundDownHours</tt> hours less than
* or equal to <tt>timestamp.</tt> Should be between 0 and 24.
* @return - Rounded down timestamp
* @throws IllegalStateException
* @see TimestampRoundDownUtil#roundDownTimeStampHours(long, int, TimeZone)
*/
public static long roundDownTimeStampHours(long timestamp,
int roundDownHours) throws IllegalStateException {
return roundDownTimeStampHours(timestamp, roundDownHours, null);
}
/**
*
* @param timestamp - The time stamp to be rounded down.
* @param roundDownHours - The <tt>timestamp</tt> is rounded down to the
* largest multiple of <tt>roundDownHours</tt> hours less than
* or equal to <tt>timestamp.</tt> Should be between 0 and 24.
* @param timeZone - The timezone to use for parsing the <tt>timestamp</tt>.
* If <tt>null</tt> the system default will be used.
* @return - Rounded down timestamp
* @throws IllegalStateException
*/
public static long roundDownTimeStampHours(long timestamp, int roundDownHours, TimeZone timeZone)
throws IllegalStateException {
Preconditions.checkArgument(roundDownHours > 0 && roundDownHours <= 24,
"RoundDown must be > 0 and <=24");
Calendar cal = roundDownField(timestamp, Calendar.HOUR_OF_DAY, roundDownHours, timeZone);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return cal.getTimeInMillis();
}
private static Calendar roundDownField(long timestamp, int field, int roundDown,
TimeZone timeZone) {
Preconditions.checkArgument(timestamp > 0, "Timestamp must be positive");
Calendar cal = (timeZone == null) ? Calendar.getInstance() : Calendar.getInstance(timeZone);
cal.setTimeInMillis(timestamp);
int fieldVal = cal.get(field);
int remainder = (fieldVal % roundDown);
cal.set(field, fieldVal - remainder);
return cal;
}
}
| 9,989 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/tools/HTTPServerConstraintUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.tools;
import org.eclipse.jetty.util.security.Constraint;
import org.eclipse.jetty.security.ConstraintMapping;
import org.eclipse.jetty.security.ConstraintSecurityHandler;
// Most of the code in this class is copied from HBASE-10473
/**
* Utility class to define constraints on Jetty HTTP servers
*/
public class HTTPServerConstraintUtil {
private HTTPServerConstraintUtil() {
}
/**
* Generate constraints for the Flume HTTP Source
* @return ConstraintSecurityHandler for use with Jetty servlet
*/
public static ConstraintSecurityHandler enforceConstraints() {
Constraint c = new Constraint();
c.setAuthenticate(true);
ConstraintMapping cmt = new ConstraintMapping();
cmt.setConstraint(c);
cmt.setMethod("TRACE");
cmt.setPathSpec("/*");
ConstraintMapping cmo = new ConstraintMapping();
cmo.setConstraint(c);
cmo.setMethod("OPTIONS");
cmo.setPathSpec("/*");
ConstraintSecurityHandler sh = new ConstraintSecurityHandler();
sh.setConstraintMappings(new ConstraintMapping[]{cmt, cmo});
return sh;
}
}
| 9,990 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/tools/FlumeBeanConfigurator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.tools;
import java.lang.reflect.Method;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.flume.Context;
import org.apache.flume.conf.ConfigurationException;
/**
* Utility class to enable runtime configuration of Java objects using provided
* Flume context objects (or equivalent). The methods use reflection to identify
* Fields on the configurable object and then looks for matching properties in
* the provided properties bundle.
*
*/
public class FlumeBeanConfigurator {
/**
* Utility method that will set properties on a Java bean (<code>Object configurable</code>)
* based on the provided <code>properties</code> bundle.
* If there is a type issue, or an access problem
* then a <code>ConfigurationException</code> will be thrown.
*
* @param configurable Any properties must be modifiable via setter methods.
* @param properties Map<String, String>
* @throws ConfigurationException
*/
public static void setConfigurationFields(Object configurable, Map<String, String> properties)
throws ConfigurationException {
Class<?> clazz = configurable.getClass();
for (Method method : clazz.getMethods()) {
String methodName = method.getName();
if (methodName.startsWith("set") && method.getParameterTypes().length == 1) {
String fieldName = methodName.substring(3);
String value = properties.get(StringUtils.uncapitalize(fieldName));
if (value != null) {
Class<?> fieldType = method.getParameterTypes()[0];;
try {
if (fieldType.equals(String.class)) {
method.invoke(configurable, value);
} else if (fieldType.equals(boolean.class)) {
method.invoke(configurable, Boolean.parseBoolean(value));
} else if (fieldType.equals(short.class)) {
method.invoke(configurable, Short.parseShort(value));
} else if (fieldType.equals(long.class)) {
method.invoke(configurable, Long.parseLong(value));
} else if (fieldType.equals(float.class)) {
method.invoke(configurable, Float.parseFloat(value));
} else if (fieldType.equals(int.class)) {
method.invoke(configurable, Integer.parseInt(value));
} else if (fieldType.equals(double.class)) {
method.invoke(configurable, Double.parseDouble(value));
} else if (fieldType.equals(char.class)) {
method.invoke(configurable, value.charAt(0));
} else if (fieldType.equals(byte.class)) {
method.invoke(configurable, Byte.parseByte(value));
} else if (fieldType.equals(String[].class)) {
method.invoke(configurable, (Object)value.split("\\s+"));
} else {
throw new ConfigurationException(
"Unable to configure component due to an unsupported type on field: "
+ fieldName);
}
} catch (Exception ex) {
if (ex instanceof ConfigurationException) {
throw (ConfigurationException)ex;
} else {
throw new ConfigurationException("Unable to configure component: ", ex);
}
}
}
}
}
}
/**
* Utility method that will set properties on a Java bean (<code>Object configurable</code>)
* based on the provided <code>Context</code>.
* N.B. This method will take the Flume Context and look for sub-properties named after the
* class name of the <code>configurable</code> object.
* If there is a type issue, or an access problem
* then a <code>ConfigurationException</code> will be thrown.
*
* @param configurable Any properties must be modifiable via setter methods.
* @param context
* @throws ConfigurationException
*/
public static void setConfigurationFields(Object configurable, Context context)
throws ConfigurationException {
Class<?> clazz = configurable.getClass();
Map<String, String> properties = context.getSubProperties(clazz.getSimpleName() + ".");
setConfigurationFields(configurable, properties);
}
/**
* Utility method that will set properties on a Java bean (<code>Object configurable</code>)
* based on the provided <code>Context</code>.
* N.B. This method will take the Flume Context and look for sub-properties named after the
* <code>subPropertiesPrefix</code> String.
* If there is a type issue, or an access problem
* then a <code>ConfigurationException</code> will be thrown.
*
* @param configurable Object: Any properties must be modifiable via setter methods.
* @param context org.apache.flume.Context;
* @param subPropertiesPrefix String
* @throws ConfigurationException
*/
public static void setConfigurationFields(Object configurable, Context context,
String subPropertiesPrefix) throws ConfigurationException {
Map<String, String> properties = context.getSubProperties(subPropertiesPrefix);
setConfigurationFields(configurable, properties);
}
}
| 9,991 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/tools/GetJavaProperty.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.tools;
/**
* A generic way for querying Java properties.
*/
public class GetJavaProperty {
public static void main(String[] args) {
if (args.length == 0) {
for (Object prop : System.getProperties().keySet()) {
System.out.println(prop + "=" + System.getProperty((String)prop, ""));
}
} else {
for (String prop : args) {
System.out.println(prop + "=" + System.getProperty(prop, ""));
}
}
}
}
| 9,992 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/tools/VersionInfo.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.tools;
import org.apache.flume.VersionAnnotation;
/*
* This class provides version info of Flume NG
*/
public class VersionInfo {
private static Package myPackage;
private static VersionAnnotation version;
static {
myPackage = VersionAnnotation.class.getPackage();
version = myPackage.getAnnotation(VersionAnnotation.class);
}
/**
* Get the meta-data for the Flume package.
* @return
*/
static Package getPackage() {
return myPackage;
}
/**
* Get the Flume version.
* @return the Flume version string, eg. "1.1"
*/
public static String getVersion() {
return version != null ? version.version() : "Unknown";
}
/**
* Get the subversion revision number for the root directory
* @return the revision number, eg. "100755"
*/
public static String getRevision() {
if (version != null
&& version.revision() != null
&& !version.revision().isEmpty()) {
return version.revision();
}
return "Unknown";
}
/**
* Get the branch on which this originated.
* @return The branch name, e.g. "trunk" or "branches/branch-1.1"
*/
public static String getBranch() {
return version != null ? version.branch() : "Unknown";
}
/**
* The date that Flume was compiled.
* @return the compilation date in unix date format
*/
public static String getDate() {
return version != null ? version.date() : "Unknown";
}
/**
* The user that compiled Flume.
* @return the username of the user
*/
public static String getUser() {
return version != null ? version.user() : "Unknown";
}
/**
* Get the subversion URL for the root Flume directory.
*/
public static String getUrl() {
return version != null ? version.url() : "Unknown";
}
/**
* Get the checksum of the source files from which Flume was
* built.
**/
public static String getSrcChecksum() {
return version != null ? version.srcChecksum() : "Unknown";
}
/**
* Returns the build version info which includes version,
* revision, user, date and source checksum
*/
public static String getBuildVersion() {
return VersionInfo.getVersion() +
" from " + VersionInfo.getRevision() +
" by " + VersionInfo.getUser() +
" on " + VersionInfo.getDate() +
" source checksum " + VersionInfo.getSrcChecksum();
}
public static void main(String[] args) {
System.out.println("Flume " + getVersion());
System.out.println("Source code repository: "
+ "https://git.apache.org/repos/asf/flume.git");
System.out.println("Revision: " + getRevision());
System.out.println("Compiled by " + getUser() + " on " + getDate());
System.out.println("From source with checksum " + getSrcChecksum());
}
}
| 9,993 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/serialization/DurablePositionTracker.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.serialization;
import java.io.File;
import java.io.IOException;
import com.google.common.base.Preconditions;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.specific.SpecificDatumReader;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.flume.annotations.InterfaceAudience;
import org.apache.flume.annotations.InterfaceStability;
import org.apache.flume.tools.PlatformDetect;
/**
* <p/>Class that stores object state in an avro container file.
* The file is only ever appended to.
* At construction time, the object reads data from the end of the file and
* caches that data for use by a client application. After construction, reads
* never go to disk.
* Writes always flush to disk.
*
* <p/>Note: This class is not thread-safe.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class DurablePositionTracker implements PositionTracker {
private final File trackerFile;
private final DataFileWriter<TransferStateFileMeta> writer;
private final DataFileReader<TransferStateFileMeta> reader;
private final TransferStateFileMeta metaCache;
private String target;
private boolean isOpen;
/**
* If the file exists at startup, then read it, roll it, and open a new one.
* We go through this to avoid issues with partial reads at the end of the
* file from a previous crash. If we append to a bad record,
* our writes may never be visible.
* @param trackerFile
* @param target
* @return DurablePositionTracker
* @throws IOException
*/
public static DurablePositionTracker getInstance(File trackerFile,
String target) throws IOException {
if (!trackerFile.exists()) {
return new DurablePositionTracker(trackerFile, target);
}
// exists
DurablePositionTracker oldTracker =
new DurablePositionTracker(trackerFile, target);
String existingTarget = oldTracker.getTarget();
long targetPosition = oldTracker.getPosition();
oldTracker.close();
File tmpMeta = File.createTempFile(trackerFile.getName(), ".tmp",
trackerFile.getParentFile());
tmpMeta.delete();
DurablePositionTracker tmpTracker =
new DurablePositionTracker(tmpMeta, existingTarget);
tmpTracker.storePosition(targetPosition);
tmpTracker.close();
// On windows, things get messy with renames...
// FIXME: This is not atomic. Consider implementing a recovery procedure
// so that if it does not exist at startup, check for a rolled version
// before creating a new file from scratch.
if (PlatformDetect.isWindows()) {
if (!trackerFile.delete()) {
throw new IOException("Unable to delete existing meta file " +
trackerFile);
}
}
// rename tmp file to meta
if (!tmpMeta.renameTo(trackerFile)) {
throw new IOException("Unable to rename " + tmpMeta + " to " +
trackerFile);
}
// return a new known-good version that is open for append
DurablePositionTracker newTracker =
new DurablePositionTracker(trackerFile, existingTarget);
return newTracker;
}
/**
* If the file exists, read it and open it for append.
* @param trackerFile
* @param target
* @throws IOException
*/
DurablePositionTracker(File trackerFile, String target)
throws IOException {
Preconditions.checkNotNull(trackerFile, "trackerFile must not be null");
Preconditions.checkNotNull(target, "target must not be null");
this.trackerFile = trackerFile;
this.target = target;
DatumWriter<TransferStateFileMeta> dout =
new SpecificDatumWriter<TransferStateFileMeta>(
TransferStateFileMeta.SCHEMA$);
DatumReader<TransferStateFileMeta> din =
new SpecificDatumReader<TransferStateFileMeta>(
TransferStateFileMeta.SCHEMA$);
writer = new DataFileWriter<TransferStateFileMeta>(dout);
if (trackerFile.exists()) {
// open it for append
writer.appendTo(trackerFile);
reader = new DataFileReader<TransferStateFileMeta>(trackerFile, din);
this.target = reader.getMetaString("file");
} else {
// create the file
this.target = target;
writer.setMeta("file", target);
writer.create(TransferStateFileMeta.SCHEMA$, trackerFile);
reader = new DataFileReader<TransferStateFileMeta>(trackerFile, din);
}
target = getTarget();
// initialize @ line = 0;
metaCache = TransferStateFileMeta.newBuilder().setOffset(0L).build();
initReader();
isOpen = true;
}
/**
* Read the last record in the file.
*/
private void initReader() throws IOException {
long syncPos = trackerFile.length() - 256L;
if (syncPos < 0) syncPos = 0L;
reader.sync(syncPos);
while (reader.hasNext()) {
reader.next(metaCache);
}
}
@Override
public synchronized void storePosition(long position) throws IOException {
metaCache.setOffset(position);
writer.append(metaCache);
writer.sync();
writer.flush();
}
@Override
public synchronized long getPosition() {
return metaCache.getOffset();
}
@Override
public String getTarget() {
return target;
}
@Override
public void close() throws IOException {
if (isOpen) {
writer.close();
reader.close();
isOpen = false;
}
}
} | 9,994 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/serialization/Seekable.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.serialization;
import java.io.IOException;
public interface Seekable {
void seek(long position) throws IOException;
long tell() throws IOException;
}
| 9,995 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/serialization/Resettable.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.serialization;
import org.apache.flume.annotations.InterfaceAudience;
import org.apache.flume.annotations.InterfaceStability;
import java.io.IOException;
/**
* Defines an API for objects that can be mark()ed and reset() on arbitrary
* boundaries. Any implementation that has a limited buffer for the mark(),
* like {@link java.io.InputStream}, must not implement Resettable.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public interface Resettable {
/**
* Indicate that the current position should be returned to in the case of
* {@link #reset()} being called.
* @throws IOException
*/
void mark() throws IOException;
/**
* Return to the last marked position, or the beginning of the stream if
* {@link #mark()} has never been called.
* @throws IOException
*/
void reset() throws IOException;
}
| 9,996 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/serialization/AvroEventDeserializer.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flume.serialization;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import org.apache.avro.Schema;
import org.apache.avro.SchemaNormalization;
import org.apache.avro.file.DataFileConstants;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.SeekableInput;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.commons.codec.binary.Hex;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.FlumeException;
import org.apache.flume.event.EventBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.security.NoSuchAlgorithmException;
import java.util.List;
import java.util.Locale;
/**
* A deserializer that parses Avro container files, generating one Flume event
* per record in the Avro file, and storing binary avro-encoded records in
* the Flume event body.
*/
public class AvroEventDeserializer implements EventDeserializer {
private static final Logger logger = LoggerFactory.getLogger(AvroEventDeserializer.class);
private final AvroSchemaType schemaType;
private final ResettableInputStream ris;
private Schema schema;
private byte[] schemaHash;
private String schemaHashString;
private DataFileReader<GenericRecord> fileReader;
private GenericDatumWriter datumWriter;
private GenericRecord record;
private ByteArrayOutputStream out;
private BinaryEncoder encoder;
@VisibleForTesting
public static enum AvroSchemaType {
HASH,
LITERAL;
}
public static final String CONFIG_SCHEMA_TYPE_KEY = "schemaType";
public static final String AVRO_SCHEMA_HEADER_HASH
= "flume.avro.schema.hash";
public static final String AVRO_SCHEMA_HEADER_LITERAL
= "flume.avro.schema.literal";
private AvroEventDeserializer(Context context, ResettableInputStream ris) {
this.ris = ris;
schemaType = AvroSchemaType.valueOf(
context.getString(CONFIG_SCHEMA_TYPE_KEY,
AvroSchemaType.HASH.toString()).toUpperCase(Locale.ENGLISH));
if (schemaType == AvroSchemaType.LITERAL) {
logger.warn(CONFIG_SCHEMA_TYPE_KEY + " set to " +
AvroSchemaType.LITERAL.toString() + ", so storing full Avro " +
"schema in the header of each event, which may be inefficient. " +
"Consider using the hash of the schema " +
"instead of the literal schema.");
}
}
private void initialize() throws IOException, NoSuchAlgorithmException {
SeekableResettableInputBridge in = new SeekableResettableInputBridge(ris);
long pos = in.tell();
in.seek(0L);
fileReader = new DataFileReader<GenericRecord>(in,
new GenericDatumReader<GenericRecord>());
fileReader.sync(pos);
schema = fileReader.getSchema();
datumWriter = new GenericDatumWriter(schema);
out = new ByteArrayOutputStream();
encoder = EncoderFactory.get().binaryEncoder(out, encoder);
schemaHash = SchemaNormalization.parsingFingerprint("CRC-64-AVRO", schema);
schemaHashString = Hex.encodeHexString(schemaHash);
}
@Override
public Event readEvent() throws IOException {
if (fileReader.hasNext()) {
record = fileReader.next(record);
out.reset();
datumWriter.write(record, encoder);
encoder.flush();
// annotate header with 64-bit schema CRC hash in hex
Event event = EventBuilder.withBody(out.toByteArray());
if (schemaType == AvroSchemaType.HASH) {
event.getHeaders().put(AVRO_SCHEMA_HEADER_HASH, schemaHashString);
} else {
event.getHeaders().put(AVRO_SCHEMA_HEADER_LITERAL, schema.toString());
}
return event;
}
return null;
}
@Override
public List<Event> readEvents(int numEvents) throws IOException {
List<Event> events = Lists.newArrayList();
for (int i = 0; i < numEvents && fileReader.hasNext(); i++) {
Event event = readEvent();
if (event != null) {
events.add(event);
}
}
return events;
}
@Override
public void mark() throws IOException {
long pos = fileReader.previousSync() - DataFileConstants.SYNC_SIZE;
if (pos < 0) pos = 0;
((RemoteMarkable) ris).markPosition(pos);
}
@Override
public void reset() throws IOException {
long pos = ((RemoteMarkable) ris).getMarkPosition();
fileReader.sync(pos);
}
@Override
public void close() throws IOException {
ris.close();
}
public static class Builder implements EventDeserializer.Builder {
@Override
public EventDeserializer build(Context context, ResettableInputStream in) {
if (!(in instanceof RemoteMarkable)) {
throw new IllegalArgumentException("Cannot use this deserializer " +
"without a RemoteMarkable input stream");
}
AvroEventDeserializer deserializer
= new AvroEventDeserializer(context, in);
try {
deserializer.initialize();
} catch (Exception e) {
throw new FlumeException("Cannot instantiate deserializer", e);
}
return deserializer;
}
}
private static class SeekableResettableInputBridge implements SeekableInput {
ResettableInputStream ris;
public SeekableResettableInputBridge(ResettableInputStream ris) {
this.ris = ris;
}
@Override
public void seek(long p) throws IOException {
ris.seek(p);
}
@Override
public long tell() throws IOException {
return ris.tell();
}
@Override
public long length() throws IOException {
if (ris instanceof LengthMeasurable) {
return ((LengthMeasurable) ris).length();
} else {
// FIXME: Avro doesn't seem to complain about this,
// but probably not a great idea...
return Long.MAX_VALUE;
}
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
return ris.read(b, off, len);
}
@Override
public void close() throws IOException {
ris.close();
}
}
}
| 9,997 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/serialization/EventSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.serialization;
import java.io.IOException;
import java.io.OutputStream;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.annotations.InterfaceAudience;
import org.apache.flume.annotations.InterfaceStability;
/**
* <p>
* This interface provides callbacks for important serialization-related events.
* This allows generic implementations of serializers to be plugged in, allowing
* implementations of this interface to do arbitrary header and message
* formatting, as well as file and message framing.
* </p>
*
* <p>
* The following general semantics should be used by drivers that call this
* interface:
*
* <pre>
* // open file (for example... or otherwise create some new stream)
* OutputStream out = new FileOutputStream(file); // open for create
*
* // build serializer using builder interface
* EventSerializer serializer = builder.build(ctx, out);
*
* // hook to write header (since in this case we opened the file for create)
* serializer.afterCreate();
*
* // write one or more events
* serializer.write(event1);
* serializer.write(event2);
* serializer.write(event3);
*
* // periodically flush any internal buffers from EventSerializer.write()
* serializer.flush();
*
* // The driver responsible for specifying and implementing its durability
* // semantics (if any) for flushing or syncing the underlying stream.
* out.flush();
*
* // when closing the file...
*
* // make sure we got all buffered events flushed from the serializer
* serializer.flush();
*
* // write trailer before closing file
* serializer.beforeClose();
*
* // Driver is responsible for flushing the underlying stream, if needed,
* // before closing it.
* out.flush();
* out.close();
* </pre>
*
* </p>
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface EventSerializer {
/**
* {@link Context} prefix
*/
public static String CTX_PREFIX = "serializer.";
/**
* Hook to write a header after file is opened for the first time.
*/
public void afterCreate() throws IOException;
/**
* Hook to handle any framing needed when file is re-opened (for write).<br/>
* Could have been named {@code afterOpenForAppend()}.
*/
public void afterReopen() throws IOException;
/**
* Serialize and write the given event.
* @param event Event to write to the underlying stream.
* @throws IOException
*/
public void write(Event event) throws IOException;
/**
* Hook to flush any internal write buffers to the underlying stream.
* It is NOT necessary for an implementation to then call flush() / sync()
* on the underlying stream itself, since those semantics would be provided
* by the driver that calls this API.
*/
public void flush() throws IOException;
/**
* Hook to write a trailer before the stream is closed.
* Implementations must not buffer data in this call since
* EventSerializer.flush() is not guaranteed to be called after beforeClose().
*/
public void beforeClose() throws IOException;
/**
* Specify whether this output format supports reopening files for append.
* For example, this method should return {@code false} if
* {@link #beforeClose()} writes a trailer that "finalizes" the file
* (this type of behavior is file format-specific).<br/>
* Could have been named {@code supportsAppend()}.
*/
public boolean supportsReopen();
/**
* Knows how to construct this event serializer.<br/>
* <b>Note: Implementations MUST provide a public a no-arg constructor.</b>
*/
public interface Builder {
public EventSerializer build(Context context, OutputStream out);
}
}
| 9,998 |
0 | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume | Create_ds/flume/flume-ng-core/src/main/java/org/apache/flume/serialization/EventDeserializerFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.serialization;
import com.google.common.base.Preconditions;
import org.apache.flume.Context;
import org.apache.flume.FlumeException;
import org.apache.flume.annotations.InterfaceAudience;
import org.apache.flume.annotations.InterfaceStability;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Locale;
@InterfaceAudience.Private
@InterfaceStability.Stable
public class EventDeserializerFactory {
private static final Logger logger =
LoggerFactory.getLogger(EventDeserializerFactory.class);
public static EventDeserializer getInstance(
String deserializerType, Context context, ResettableInputStream in) {
Preconditions.checkNotNull(deserializerType,
"serializer type must not be null");
// try to find builder class in enum of known output serializers
EventDeserializerType type;
try {
type = EventDeserializerType.valueOf(deserializerType.toUpperCase(Locale.ENGLISH));
} catch (IllegalArgumentException e) {
logger.debug("Not in enum, loading builder class: {}", deserializerType);
type = EventDeserializerType.OTHER;
}
Class<? extends EventDeserializer.Builder> builderClass =
type.getBuilderClass();
// handle the case where they have specified their own builder in the config
if (builderClass == null) {
try {
Class c = Class.forName(deserializerType);
if (c != null && EventDeserializer.Builder.class.isAssignableFrom(c)) {
builderClass = (Class<? extends EventDeserializer.Builder>) c;
} else {
String errMessage = "Unable to instantiate Builder from " +
deserializerType + ": does not appear to implement " +
EventDeserializer.Builder.class.getName();
throw new FlumeException(errMessage);
}
} catch (ClassNotFoundException ex) {
logger.error("Class not found: " + deserializerType, ex);
throw new FlumeException(ex);
}
}
// build the builder
EventDeserializer.Builder builder;
try {
builder = builderClass.newInstance();
} catch (InstantiationException ex) {
String errMessage = "Cannot instantiate builder: " + deserializerType;
logger.error(errMessage, ex);
throw new FlumeException(errMessage, ex);
} catch (IllegalAccessException ex) {
String errMessage = "Cannot instantiate builder: " + deserializerType;
logger.error(errMessage, ex);
throw new FlumeException(errMessage, ex);
}
return builder.build(context, in);
}
}
| 9,999 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.