index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/camel-kafka-connector/tests/itests-slack/src/test/java/org/apache/camel/kafkaconnector/slack | Create_ds/camel-kafka-connector/tests/itests-slack/src/test/java/org/apache/camel/kafkaconnector/slack/source/CamelSourceSlackITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.slack.source;
import java.util.concurrent.ExecutionException;
import org.apache.camel.kafkaconnector.common.AbstractKafkaTest;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.apache.camel.kafkaconnector.common.clients.kafka.KafkaClient;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.condition.EnabledIfSystemProperty;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
/*
This test is disabled by default because requires manual steps.
You need to set 3 system properties to run this test:
-Dit.test.slack.enable=true to enable the test
-Dit.test.slack.channel=#channel to inform the channel to send the message to
-Dit.test.slack.token=<token> The token is a string that starts with xoxb
Preparing for the test. You need create a bot and give it the following scopes:
channel:history, channels:read and incoming-webhook. The settings for these,
along with the token can be found on the page OAuth & Permissions.
*/
@EnabledIfSystemProperty(named = "it.test.slack.enable", matches = "true")
public class CamelSourceSlackITCase extends AbstractKafkaTest {
private static final Logger LOG = LoggerFactory.getLogger(CamelSourceSlackITCase.class);
private String slackChannel = System.getProperty("it.test.slack.channel");
private String token = System.getProperty("it.test.slack.token");
private boolean received;
@Override
protected String[] getConnectorsInTest() {
return new String[]{"camel-slack-kafka-connector"};
}
private <T> boolean checkRecord(ConsumerRecord<String, T> record) {
LOG.debug("Received: {}", record.value());
if (record.value() instanceof String) {
LOG.debug("Received text: {}", record.value());
} else {
fail(String.format("Unexpected message type: %s", record.value().getClass()));
}
received = true;
return false;
}
@Test
@Timeout(90)
public void testBasicSendReceive() throws ExecutionException, InterruptedException {
String kafkaTopic = getTopicForTest(this);
ConnectorPropertyFactory factory = CamelSlackPropertyFactory
.basic()
.withKafkaTopic(kafkaTopic)
.withChannel(slackChannel)
.withMaxResults(1)
.withToken(token)
.withTransformsConfig("SlackTransforms")
.withEntry("type", "org.apache.camel.kafkaconnector.slack.transformers.SlackTransforms")
.end();
factory.log();
getKafkaConnectService().initializeConnectorBlocking(factory, 1);
LOG.debug("Creating the consumer ...");
KafkaClient<String, String> kafkaClient = new KafkaClient<>(getKafkaService().getBootstrapServers());
kafkaClient.consume(kafkaTopic, this::checkRecord);
LOG.debug("Created the consumer ...");
assertTrue(received, "Didn't receive any messages");
}
}
| 9,100 |
0 | Create_ds/camel-kafka-connector/tests/itests-slack/src/test/java/org/apache/camel/kafkaconnector/slack | Create_ds/camel-kafka-connector/tests/itests-slack/src/test/java/org/apache/camel/kafkaconnector/slack/sink/CamelSinkSlackITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.slack.sink;
import java.util.concurrent.ExecutionException;
import org.apache.camel.kafkaconnector.common.AbstractKafkaTest;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.apache.camel.kafkaconnector.common.clients.kafka.KafkaClient;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.condition.EnabledIfSystemProperty;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.jupiter.api.Assertions.fail;
/**
* Integration tests for the JMS sink
*/
/* This test is disabled by default because requires manual verification on Slack end.
You need to set 3 system properties to run this test:
-Dit.test.slack.enable=true to enable the test
-Dit.test.slack.channel=#channel to inform the channel to send the message to
-Dit.test.slack.webhookUrl=https://host.slack.com/id/of/the/hook to pass the incoming hook URL to the test
*/
@EnabledIfSystemProperty(named = "it.test.slack.enable", matches = "true")
public class CamelSinkSlackITCase extends AbstractKafkaTest {
private static final Logger LOG = LoggerFactory.getLogger(CamelSinkSlackITCase.class);
private String slackChannel = System.getProperty("it.test.slack.channel");
private String webhookUrl = System.getProperty("it.test.slack.webhookUrl");
private String topicName;
@BeforeEach
void setUp() {
topicName = getTopicForTest(this);
}
@Override
protected String[] getConnectorsInTest() {
return new String[]{"camel-slack-kafka-connector"};
}
private void runTest(ConnectorPropertyFactory connectorPropertyFactory, String message) throws ExecutionException, InterruptedException {
connectorPropertyFactory.log();
getKafkaConnectService().initializeConnector(connectorPropertyFactory);
KafkaClient<String, String> kafkaClient = new KafkaClient<>(getKafkaService().getBootstrapServers());
kafkaClient.produce(topicName, message);
LOG.debug("Created the consumer ... About to receive messages");
}
@Test
@Timeout(90)
public void testBasicSendReceive() {
try {
ConnectorPropertyFactory connectorPropertyFactory = CamelSlackPropertyFactory
.basic()
.withTopics(topicName)
.withChannel(slackChannel)
.withWebhookUrl(webhookUrl);
runTest(connectorPropertyFactory, "Sink test message sent to Slack from testBasicSendReceive");
} catch (Exception e) {
LOG.error("Slack test failed: {}", e.getMessage(), e);
fail(e.getMessage());
}
}
@Test
@Timeout(90)
public void testBasicSendReceiveWithUrl() {
try {
ConnectorPropertyFactory connectorPropertyFactory = CamelSlackPropertyFactory
.basic()
.withTopics(topicName)
.withUrl(slackChannel)
.append("webhookUrl", webhookUrl)
.buildUrl();
runTest(connectorPropertyFactory, "Sink test message sent to Slack from testBasicSendReceiveWithUrl");
} catch (Exception e) {
LOG.error("Slack test failed: {}", e.getMessage(), e);
fail(e.getMessage());
}
}
}
| 9,101 |
0 | Create_ds/camel-kafka-connector/tests/itests-slack/src/test/java/org/apache/camel/kafkaconnector/slack | Create_ds/camel-kafka-connector/tests/itests-slack/src/test/java/org/apache/camel/kafkaconnector/slack/sink/CamelSlackPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.slack.sink;
import org.apache.camel.kafkaconnector.common.EndpointUrlBuilder;
import org.apache.camel.kafkaconnector.common.SinkConnectorPropertyFactory;
/**
* Creates the set of properties used by a Camel Slack Sink Connector
*/
final class CamelSlackPropertyFactory extends SinkConnectorPropertyFactory<CamelSlackPropertyFactory> {
private CamelSlackPropertyFactory() {
}
public CamelSlackPropertyFactory withChannel(String channel) {
return setProperty("camel.sink.path.channel", channel);
}
public CamelSlackPropertyFactory withWebhookUrl(String webhookUrl) {
return setProperty("camel.sink.endpoint.webhookUrl", webhookUrl);
}
public EndpointUrlBuilder<CamelSlackPropertyFactory> withUrl(String channel) {
String queueUrl = String.format("slack:%s", channel);
return new EndpointUrlBuilder<>(this::withSinkUrl, queueUrl);
}
public static CamelSlackPropertyFactory basic() {
return new CamelSlackPropertyFactory()
.withName("CamelSlackSinkConnector")
.withTasksMax(1)
.withConnectorClass("org.apache.camel.kafkaconnector.slack.CamelSlackSinkConnector")
.withKeyConverterClass("org.apache.kafka.connect.storage.StringConverter")
.withValueConverterClass("org.apache.kafka.connect.storage.StringConverter");
}
}
| 9,102 |
0 | Create_ds/camel-kafka-connector/tests/itests-azure-storage-queue/src/test/java/org/apache/camel/kafkaconnector/azure/storage/queue | Create_ds/camel-kafka-connector/tests/itests-azure-storage-queue/src/test/java/org/apache/camel/kafkaconnector/azure/storage/queue/source/CamelSourceAzureStorageQueuePropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.azure.storage.queue.source;
import org.apache.camel.kafkaconnector.common.SourceConnectorPropertyFactory;
public class CamelSourceAzureStorageQueuePropertyFactory extends SourceConnectorPropertyFactory<CamelSourceAzureStorageQueuePropertyFactory> {
public CamelSourceAzureStorageQueuePropertyFactory withAccountName(String value) {
return setProperty("camel.kamelet.azure-storage-queue-source.accountName", value);
}
public CamelSourceAzureStorageQueuePropertyFactory withQueueName(String value) {
return setProperty("camel.kamelet.azure-storage-queue-source.queueName", value);
}
public CamelSourceAzureStorageQueuePropertyFactory withAccessKey(String value) {
return setProperty("camel.kamelet.azure-storage-queue-source.accessKey", value);
}
public CamelSourceAzureStorageQueuePropertyFactory withConfiguration(String configurationClass) {
return setProperty("camel.component.azure-storage-queue.configuration", classRef(configurationClass));
}
public static CamelSourceAzureStorageQueuePropertyFactory basic() {
return new CamelSourceAzureStorageQueuePropertyFactory()
.withTasksMax(1)
.withName("CamelAzurequeueSourceConnector")
.withConnectorClass("org.apache.camel.kafkaconnector.azurestoragequeuesource.CamelAzurestoragequeuesourceSourceConnector")
.withKeyConverterClass("org.apache.kafka.connect.storage.StringConverter")
.withValueConverterClass("org.apache.kafka.connect.storage.StringConverter")
.setProperty("camel.component.kamelet.location", "kamelets");
}
}
| 9,103 |
0 | Create_ds/camel-kafka-connector/tests/itests-azure-storage-queue/src/test/java/org/apache/camel/kafkaconnector/azure/storage/queue | Create_ds/camel-kafka-connector/tests/itests-azure-storage-queue/src/test/java/org/apache/camel/kafkaconnector/azure/storage/queue/source/CamelSourceAzureStorageQueueITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.azure.storage.queue.source;
import java.io.IOException;
import java.util.concurrent.ExecutionException;
import com.azure.storage.queue.QueueClient;
import com.azure.storage.queue.QueueServiceClient;
import org.apache.camel.kafkaconnector.azure.storage.queue.common.TestQueueConfiguration;
import org.apache.camel.kafkaconnector.azure.storage.services.AzureStorageClientUtils;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.apache.camel.kafkaconnector.common.test.CamelSourceTestSupport;
import org.apache.camel.kafkaconnector.common.test.TestMessageConsumer;
import org.apache.camel.test.infra.azure.common.AzureCredentialsHolder;
import org.apache.camel.test.infra.azure.common.services.AzureService;
import org.apache.camel.test.infra.azure.storage.queue.services.AzureStorageQueueServiceFactory;
import org.apache.camel.test.infra.common.TestUtils;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.extension.RegisterExtension;
import static org.junit.jupiter.api.Assertions.assertEquals;
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public class CamelSourceAzureStorageQueueITCase extends CamelSourceTestSupport {
@RegisterExtension
public static AzureService service = AzureStorageQueueServiceFactory.createService();
private QueueServiceClient client;
private QueueClient queueClient;
private String queueName;
private String topicName;
private int expect = 10;
@Override
protected String[] getConnectorsInTest() {
return new String[]{"camel-azure-storage-queue-source-kafka-connector"};
}
@BeforeEach
public void setUp() {
topicName = getTopicForTest(this);
client = AzureStorageClientUtils.getClient();
queueName = "test-queue" + TestUtils.randomWithRange(0, 100);
queueClient = client.createQueue(queueName);
}
@AfterEach
public void tearDown() {
if (client != null) {
client.deleteQueue(queueName);
}
}
@Override
protected void produceTestData() {
sendMessages();
}
@Override
protected void verifyMessages(TestMessageConsumer<?> consumer) {
int received = consumer.consumedMessages().size();
assertEquals(received, expect, "Didn't process the expected amount of messages");
}
private void sendMessages() {
for (int i = 0; i < expect; i++) {
queueClient.sendMessage("Test message " + i);
}
}
@Test
@Timeout(90)
public void testBasicSendReceive() throws InterruptedException, ExecutionException, IOException {
AzureCredentialsHolder azureCredentialsHolder = service.azureCredentials();
ConnectorPropertyFactory connectorPropertyFactory = CamelSourceAzureStorageQueuePropertyFactory
.basic()
.withConfiguration(TestQueueConfiguration.class.getName())
.withKafkaTopic(topicName)
.withAccessKey(azureCredentialsHolder.accountKey())
.withAccountName(azureCredentialsHolder.accountName())
.withQueueName(queueName);
runTest(connectorPropertyFactory, topicName, expect);
}
}
| 9,104 |
0 | Create_ds/camel-kafka-connector/tests/itests-azure-storage-queue/src/test/java/org/apache/camel/kafkaconnector/azure/storage/queue | Create_ds/camel-kafka-connector/tests/itests-azure-storage-queue/src/test/java/org/apache/camel/kafkaconnector/azure/storage/queue/sink/CamelSinkAzureStorageQueueITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.azure.storage.queue.sink;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import com.azure.storage.queue.QueueClient;
import com.azure.storage.queue.QueueServiceClient;
import com.azure.storage.queue.models.PeekedMessageItem;
import org.apache.camel.kafkaconnector.azure.storage.queue.common.TestQueueConfiguration;
import org.apache.camel.kafkaconnector.azure.storage.services.AzureStorageClientUtils;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.apache.camel.kafkaconnector.common.test.CamelSinkTestSupport;
import org.apache.camel.test.infra.azure.common.AzureCredentialsHolder;
import org.apache.camel.test.infra.azure.common.services.AzureService;
import org.apache.camel.test.infra.azure.storage.queue.services.AzureStorageQueueServiceFactory;
import org.apache.camel.test.infra.common.TestUtils;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public class CamelSinkAzureStorageQueueITCase extends CamelSinkTestSupport {
@RegisterExtension
public static AzureService service = AzureStorageQueueServiceFactory.createService();
private static final Logger LOG = LoggerFactory.getLogger(CamelSinkAzureStorageQueueITCase.class);
private QueueServiceClient client;
private QueueClient queueClient;
private String queueName;
private String topicName;
private int expect = 10;
private int received;
@Override
protected String[] getConnectorsInTest() {
return new String[]{"camel-azure-storage-queue-sink-kafka-connector"};
}
@BeforeEach
public void setUp() {
topicName = getTopicForTest(this);
client = AzureStorageClientUtils.getClient();
queueName = "test-queue" + TestUtils.randomWithRange(0, 100);
queueClient = client.createQueue(queueName);
received = 0;
}
@AfterEach
public void tearDown() {
if (client != null) {
client.deleteQueue(queueName);
}
}
@Override
protected void consumeMessages(CountDownLatch latch) {
try {
consume();
} finally {
latch.countDown();
}
}
@Override
protected void verifyMessages(CountDownLatch latch) throws InterruptedException {
if (latch.await(120, TimeUnit.SECONDS)) {
assertEquals(expect, received,
"Didn't process the expected amount of messages: " + received + " != " + expect);
} else {
fail("Failed to receive the messages within the specified time");
}
}
private void acknowledgeReceived(PeekedMessageItem peekedMessageItem) {
received++;
LOG.info("Received: {}", peekedMessageItem.getMessageText());
}
private boolean canConsume() {
return queueClient.getProperties().getApproximateMessagesCount() >= expect;
}
private void consume() {
LOG.debug("Created the consumer ...");
TestUtils.waitFor(this::canConsume);
LOG.debug("About to receive messages");
int count = queueClient.getProperties().getApproximateMessagesCount();
queueClient.peekMessages(count, null, null).forEach(this::acknowledgeReceived);
}
@Test
@Timeout(90)
public void testBasicSendReceive() throws Exception {
AzureCredentialsHolder azureCredentialsHolder = service.azureCredentials();
ConnectorPropertyFactory connectorPropertyFactory = CamelSinkAzureStorageQueuePropertyFactory
.basic()
.withConfiguration(TestQueueConfiguration.class.getName())
.withTopics(topicName)
.withAccessKey(azureCredentialsHolder.accountKey())
.withAccountName(azureCredentialsHolder.accountName())
.withQueueName(queueName);
runTest(connectorPropertyFactory, topicName, expect);
}
}
| 9,105 |
0 | Create_ds/camel-kafka-connector/tests/itests-azure-storage-queue/src/test/java/org/apache/camel/kafkaconnector/azure/storage/queue | Create_ds/camel-kafka-connector/tests/itests-azure-storage-queue/src/test/java/org/apache/camel/kafkaconnector/azure/storage/queue/sink/CamelSinkAzureStorageQueuePropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.azure.storage.queue.sink;
import org.apache.camel.kafkaconnector.common.SinkConnectorPropertyFactory;
public class CamelSinkAzureStorageQueuePropertyFactory extends SinkConnectorPropertyFactory<CamelSinkAzureStorageQueuePropertyFactory> {
public CamelSinkAzureStorageQueuePropertyFactory withAccountName(String value) {
return setProperty("camel.kamelet.azure-storage-queue-sink.accountName", value);
}
public CamelSinkAzureStorageQueuePropertyFactory withQueueName(String value) {
return setProperty("camel.kamelet.azure-storage-queue-sink.queueName", value);
}
public CamelSinkAzureStorageQueuePropertyFactory withAccessKey(String value) {
return setProperty("camel.kamelet.azure-storage-queue-sink.accessKey", value);
}
public CamelSinkAzureStorageQueuePropertyFactory withConfiguration(String configurationClass) {
return setProperty("camel.component.azure-storage-queue.configuration", classRef(configurationClass));
}
public static CamelSinkAzureStorageQueuePropertyFactory basic() {
return new CamelSinkAzureStorageQueuePropertyFactory()
.withTasksMax(1)
.withName("CamelAzurestoragequeueSinkConnector")
.withConnectorClass("org.apache.camel.kafkaconnector.azurestoragequeuesink.CamelAzurestoragequeuesinkSinkConnector")
.withKeyConverterClass("org.apache.kafka.connect.storage.StringConverter")
.withValueConverterClass("org.apache.kafka.connect.storage.StringConverter")
.setProperty("camel.component.kamelet.location", "kamelets");
}
}
| 9,106 |
0 | Create_ds/camel-kafka-connector/tests/itests-azure-storage-queue/src/test/java/org/apache/camel/kafkaconnector/azure/storage/queue | Create_ds/camel-kafka-connector/tests/itests-azure-storage-queue/src/test/java/org/apache/camel/kafkaconnector/azure/storage/queue/common/TestQueueConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.azure.storage.queue.common;
import com.azure.storage.queue.QueueServiceClient;
import org.apache.camel.component.azure.storage.queue.QueueConfiguration;
import org.apache.camel.kafkaconnector.azure.storage.services.AzureStorageClientUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestQueueConfiguration extends QueueConfiguration {
private static final Logger LOG = LoggerFactory.getLogger(TestQueueConfiguration.class);
private QueueServiceClient serviceClient;
@Override
public QueueServiceClient getServiceClient() {
LOG.info("Creating a custom QueueServiceClient");
if (serviceClient == null) {
serviceClient = AzureStorageClientUtils.getClient();
}
return serviceClient;
}
}
| 9,107 |
0 | Create_ds/camel-kafka-connector/tests/itests-azure-storage-queue/src/test/java/org/apache/camel/kafkaconnector/azure/storage | Create_ds/camel-kafka-connector/tests/itests-azure-storage-queue/src/test/java/org/apache/camel/kafkaconnector/azure/storage/services/AzureStorageClientUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.azure.storage.services;
import com.azure.core.http.policy.HttpLogDetailLevel;
import com.azure.core.http.policy.HttpLogOptions;
import com.azure.storage.common.StorageSharedKeyCredential;
import com.azure.storage.queue.QueueServiceClient;
import com.azure.storage.queue.QueueServiceClientBuilder;
import com.azure.storage.queue.QueueServiceVersion;
import org.apache.camel.test.infra.azure.common.AzureConfigs;
@Deprecated
public final class AzureStorageClientUtils {
private AzureStorageClientUtils() {
}
public static QueueServiceClient getClient() {
String instanceType = System.getProperty("azure.instance.type");
String accountName = System.getProperty(AzureConfigs.ACCOUNT_NAME);
String accountKey = System.getProperty(AzureConfigs.ACCOUNT_KEY);
StorageSharedKeyCredential credential = new StorageSharedKeyCredential(accountName, accountKey);
String host = System.getProperty(AzureConfigs.HOST);
String port = System.getProperty(AzureConfigs.PORT);
String endpoint;
if (instanceType == null || instanceType.equals("local-azure-container")) {
endpoint = String.format("http://%s:%s/%s", host, port, accountName);
} else {
if (host == null || host.isEmpty()) {
endpoint = String.format("https://%s.queue.core.windows.net/%s", accountName, accountKey);
} else {
endpoint = String.format("http://%s:%s/%s", host, port, accountName);
}
}
return new QueueServiceClientBuilder()
.endpoint(endpoint)
.credential(credential)
.httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS).setPrettyPrintBody(true))
.serviceVersion(QueueServiceVersion.V2019_12_12)
.buildClient();
}
}
| 9,108 |
0 | Create_ds/camel-kafka-connector/tests/itests-hdfs/src/test/java/org/apache/camel/kafkaconnector/hdfs | Create_ds/camel-kafka-connector/tests/itests-hdfs/src/test/java/org/apache/camel/kafkaconnector/hdfs/sink/CamelHDFSPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.hdfs.sink;
import org.apache.camel.kafkaconnector.common.SinkConnectorPropertyFactory;
final class CamelHDFSPropertyFactory extends SinkConnectorPropertyFactory<CamelHDFSPropertyFactory> {
private CamelHDFSPropertyFactory() {
}
public CamelHDFSPropertyFactory withHostname(String value) {
return setProperty("camel.sink.path.hostName", value);
}
public CamelHDFSPropertyFactory withPort(int value) {
return setProperty("camel.sink.path.port", value);
}
public CamelHDFSPropertyFactory withPath(String value) {
return setProperty("camel.sink.path.path", value);
}
public CamelHDFSPropertyFactory withSplitStrategy(String value) {
return setProperty("camel.sink.endpoint.splitStrategy", value);
}
public CamelHDFSPropertyFactory withReplication(int value) {
return setProperty("camel.sink.endpoint.replication", value);
}
public CamelHDFSPropertyFactory withOwner(String value) {
return setProperty("camel.sink.endpoint.owner", value);
}
public CamelHDFSPropertyFactory withAppend(boolean value) {
return setProperty("camel.sink.endpoint.append", value);
}
public CamelHDFSPropertyFactory withBufferSize(int value) {
return setProperty("camel.sink.endpoint.bufferSize", value);
}
public static CamelHDFSPropertyFactory basic() {
return new CamelHDFSPropertyFactory()
.withName("CamelHDFSSinkConnector")
.withTasksMax(1)
.withConnectorClass("org.apache.camel.kafkaconnector.hdfs.CamelHdfsSinkConnector")
.withKeyConverterClass("org.apache.kafka.connect.storage.StringConverter")
.withValueConverterClass("org.apache.kafka.connect.storage.StringConverter")
.withReplication(1);
}
}
| 9,109 |
0 | Create_ds/camel-kafka-connector/tests/itests-hdfs/src/test/java/org/apache/camel/kafkaconnector/hdfs | Create_ds/camel-kafka-connector/tests/itests-hdfs/src/test/java/org/apache/camel/kafkaconnector/hdfs/sink/CamelSinkHDFSITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.hdfs.sink;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.apache.camel.kafkaconnector.common.test.CamelSinkTestSupport;
import org.apache.camel.kafkaconnector.common.test.StringMessageProducer;
import org.apache.camel.kafkaconnector.hdfs.utils.HDFSEasy;
import org.apache.camel.test.AvailablePortFinder;
import org.apache.camel.test.infra.common.TestUtils;
import org.apache.camel.test.infra.hdfs.v2.services.HDFSService;
import org.apache.camel.test.infra.hdfs.v2.services.HDFSServiceFactory;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.junit.runners.model.InitializationError;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public class CamelSinkHDFSITCase extends CamelSinkTestSupport {
@RegisterExtension
public static HDFSService hdfsService = HDFSServiceFactory.createSingletonService(AvailablePortFinder.getNextAvailable());
private static final Logger LOG = LoggerFactory.getLogger(CamelSinkHDFSITCase.class);
private HDFSEasy hdfsEasy;
private Path currentBasePath;
private String topicName;
private final int expect = 10;
private static class CustomProducer extends StringMessageProducer {
public CustomProducer(String bootstrapServer, String topicName, int count) {
super(bootstrapServer, topicName, count);
}
@Override
public String testMessageContent(int current) {
return "Sink test message: " + current;
}
}
@Override
protected String[] getConnectorsInTest() {
return new String[] {"camel-hdfs-kafka-connector"};
}
@BeforeEach
public void setUp() throws IOException, URISyntaxException, InitializationError {
topicName = getTopicForTest(this);
hdfsEasy = new HDFSEasy(hdfsService.getHDFSHost(), hdfsService.getPort());
String currentPath = "/test" + TestUtils.randomWithRange(0, 256) + "/";
currentBasePath = new Path(currentPath);
boolean hdfsServiceCorrectlyStarted = TestUtils.waitFor(() -> hdfsEasy.createFile(new Path(currentBasePath, "initTest"), "test")
&& hdfsEasy.delete(new Path(currentBasePath, "initTest")));
if (hdfsServiceCorrectlyStarted) {
if (!hdfsEasy.delete(currentBasePath)) {
// This is OK: directory may not exist on the path
LOG.debug("The directory at {} was not removed", currentBasePath.getName());
}
} else {
throw new InitializationError("HDFS Service didn't start properly.");
}
}
@AfterEach
public void tearDown() {
if (!hdfsEasy.delete(currentBasePath)) {
LOG.warn("The directory at {} was not removed", currentBasePath.getName());
}
}
@Override
protected void consumeMessages(CountDownLatch latch) {
try {
TestUtils.waitFor(this::filesCreated);
} finally {
latch.countDown();
}
}
@Override
protected void verifyMessages(CountDownLatch latch) throws InterruptedException {
if (latch.await(30, TimeUnit.SECONDS)) {
boolean filesCreated = filesCreated();
assertTrue(filesCreated, "The files were not created on the remote host");
try {
assertEquals(hdfsEasy.countFiles(currentBasePath), expect, "The number of files created vs expected do not match");
final String baseMessage = "Sink test message: ";
hdfsEasy.listFiles(currentBasePath)
.stream()
.filter(f -> !f.getPath().getName().contains(".opened"))
.forEach(f -> printFile(f, baseMessage));
} catch (IOException e) {
fail(e.getMessage());
}
} else {
fail("Failed to receive the messages within the specified time");
}
}
private boolean filesCreated() {
return hdfsEasy.filesCreated(currentBasePath, expect);
}
private void printFile(LocatedFileStatus f, String matchString) {
try {
String contents = hdfsEasy.readFile(f.getPath());
LOG.debug("Retrieved file {} with contents: {}", f.getPath(), contents);
boolean contains = contents.contains(matchString);
assertTrue(contains, "Unexpected content for the remote file " + f.getPath().getName() + " content: [" + contents + "] should contain [" + matchString + "]");
} catch (IOException e) {
LOG.debug("Reading returned file {} failed: {}", f.getPath(), e.getMessage());
fail("I/O error: " + e.getMessage());
}
}
@Test
@Timeout(90)
public void testBasicSendReceive() throws Exception {
ConnectorPropertyFactory connectorPropertyFactory = CamelHDFSPropertyFactory
.basic()
.withTopics(topicName)
.withHostname(hdfsService.getHDFSHost())
.withPort(hdfsService.getPort())
.withPath(currentBasePath.getName())
.withSplitStrategy("MESSAGES:1,IDLE:1000");
runTest(connectorPropertyFactory, new CustomProducer(getKafkaService().getBootstrapServers(), topicName, expect));
}
}
| 9,110 |
0 | Create_ds/camel-kafka-connector/tests/itests-hdfs/src/test/java/org/apache/camel/kafkaconnector/hdfs | Create_ds/camel-kafka-connector/tests/itests-hdfs/src/test/java/org/apache/camel/kafkaconnector/hdfs/utils/HDFSEasy.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.hdfs.utils;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class HDFSEasy {
private static final Logger LOG = LoggerFactory.getLogger(HDFSEasy.class);
private DistributedFileSystem dfs = new DistributedFileSystem();
public HDFSEasy(String host, int port) throws URISyntaxException, IOException {
dfs.initialize(new URI("hdfs://" + host + ":" + port), new Configuration());
}
public List<LocatedFileStatus> listFiles(Path path) throws IOException {
RemoteIterator<LocatedFileStatus> i = dfs.listFiles(path, false);
List<LocatedFileStatus> retList = new ArrayList<>();
while (i.hasNext()) {
LocatedFileStatus locatedFileStatus = i.next();
retList.add(locatedFileStatus);
}
return retList;
}
public boolean delete(Path path) {
try {
if (dfs.exists(path)) {
LOG.debug("Removing HDFS directory {}", path.getName());
if (!dfs.delete(path, true)) {
LOG.debug("Failed to remove directory {}", path.getName());
return false;
}
return true;
}
} catch (IOException e) {
LOG.warn("Unable to remove HDFS directory {}: {}", path.getName(), e.getMessage(), e);
}
return false;
}
public String readFile(Path filePath) throws IOException {
final FSDataInputStream streamReader = dfs.open(filePath);
final Scanner scanner = new Scanner(streamReader);
StringBuilder sb = new StringBuilder();
while (scanner.hasNextLine()) {
sb.append(scanner.nextLine());
}
return sb.toString();
}
public String readFile(String filePath) throws IOException {
return readFile(new Path(filePath));
}
public int countFiles(Path path) throws IOException {
RemoteIterator<LocatedFileStatus> i = dfs.listFiles(path, false);
int files = 0;
while (i.hasNext()) {
files++;
i.next();
}
return files;
}
/**
* Checks if a set of (minimum number of) files was created on the given path representing a directory
* @param path the path to check for the files
* @param minFiles the number of files created (using 0 just checks if the directory is there)
* @return true if the path contains at least minFiles and false otherwise
*/
public boolean filesCreated(Path path, int minFiles) {
try {
return countFiles(path) >= minFiles;
} catch (Exception e) {
LOG.warn("I/O exception: {} due to {} while checking if file {} exists", e.getMessage(), e.getCause(), path.getName());
return false;
}
}
public boolean filesCreated(Path path) {
return filesCreated(path, 0);
}
public boolean filesCreated(String path) {
return filesCreated(new Path(path));
}
public boolean exists(Path path) {
try {
return dfs.exists(path);
} catch (Exception e) {
LOG.warn("I/O exception while checking if file {} exists", path.getName());
return false;
}
}
public boolean createFile(Path filePath, String content) {
FSDataOutputStream streamWriter = null;
try {
streamWriter = dfs.create(filePath);
streamWriter.writeBytes(content);
streamWriter.flush();
} catch (IOException e) {
LOG.debug("Error in file creation: " + e.getMessage());
return false;
} finally {
if (streamWriter != null) {
try {
streamWriter.close();
} catch (IOException e) {
LOG.debug("Error in file creation during stream close: " + e.getMessage());
return false;
}
}
}
return true;
}
}
| 9,111 |
0 | Create_ds/camel-kafka-connector/tests/itests-salesforce/src/test/java/org/apache/camel/kafkaconnector/salesforce | Create_ds/camel-kafka-connector/tests/itests-salesforce/src/test/java/org/apache/camel/kafkaconnector/salesforce/clients/SfdxCommand.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.salesforce.clients;
import java.util.ArrayList;
import java.util.List;
public final class SfdxCommand {
private List<String> commands = new ArrayList<>();
private SfdxCommand() {
commands.add("sfdx");
}
public SfdxCommand withArgument(String argument) {
commands.add(argument);
return this;
}
public SfdxCommand withArgument(String argument, String value) {
commands.add(argument);
commands.add(value);
return this;
}
public static SfdxCommand forceDataRecordCreate() {
SfdxCommand command = new SfdxCommand();
return command.withArgument("force:data:record:create");
}
public static SfdxCommand forceDataRecordDelete() {
SfdxCommand command = new SfdxCommand();
return command.withArgument("force:data:record:delete");
}
public static SfdxCommand forceDataRecordUpdate() {
SfdxCommand command = new SfdxCommand();
return command.withArgument("force:data:record:update");
}
public static SfdxCommand forceDataRecordGet() {
SfdxCommand command = new SfdxCommand();
return command.withArgument("force:data:record:get");
}
public String[] commands() {
return commands.toArray(new String[0]);
}
}
| 9,112 |
0 | Create_ds/camel-kafka-connector/tests/itests-salesforce/src/test/java/org/apache/camel/kafkaconnector/salesforce | Create_ds/camel-kafka-connector/tests/itests-salesforce/src/test/java/org/apache/camel/kafkaconnector/salesforce/clients/SalesforceCliContainer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.salesforce.clients;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.wait.strategy.Wait;
public class SalesforceCliContainer extends GenericContainer<SalesforceCliContainer> {
private static final Logger LOG = LoggerFactory.getLogger(SalesforceCliContainer.class);
private static final String HOST_PATH = System.getProperty("it.test.salesforce.sfdx.path");
public SalesforceCliContainer() {
super("salesforce/salesforcedx:latest-full");
withFileSystemBind(HOST_PATH, "/root/.sfdx");
withPrivilegedMode(true);
withCommand("/bin/bash", "-c", "chmod 600 /root/.sfdx/key.json ; echo running ; while true ; do sleep 1 ; echo running ; done");
waitingFor(Wait.forLogMessage(".*running.*", 1));
}
public ExecResult execCommand(SfdxCommand sfdxCommand) throws IOException, InterruptedException {
return execInContainer(sfdxCommand.commands());
}
public static boolean verifyCommand(SfdxCommand sfdxCommand, ExecResult result) {
if (result.getExitCode() != 0) {
LOG.error("Unable to execute command (stdout): {}", sfdxCommand.commands());
LOG.error("Command stdout: {}", result.getStdout());
LOG.error("Command stderr: {}", result.getStderr());
return false;
}
return true;
}
}
| 9,113 |
0 | Create_ds/camel-kafka-connector/tests/itests-salesforce/src/test/java/org/apache/camel/kafkaconnector/salesforce | Create_ds/camel-kafka-connector/tests/itests-salesforce/src/test/java/org/apache/camel/kafkaconnector/salesforce/source/CamelSalesforcePropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.salesforce.source;
import org.apache.camel.kafkaconnector.common.EndpointUrlBuilder;
import org.apache.camel.kafkaconnector.common.SourceConnectorPropertyFactory;
final class CamelSalesforcePropertyFactory extends SourceConnectorPropertyFactory<CamelSalesforcePropertyFactory> {
private CamelSalesforcePropertyFactory() {
}
public CamelSalesforcePropertyFactory withClientId(String value) {
return setProperty("camel.component.salesforce.clientId", value);
}
public CamelSalesforcePropertyFactory withClientSecret(String value) {
return setProperty("camel.component.salesforce.clientSecret", value);
}
public CamelSalesforcePropertyFactory withPassword(String value) {
return setProperty("camel.component.salesforce.password", value);
}
public CamelSalesforcePropertyFactory withUserName(String value) {
return setProperty("camel.component.salesforce.userName", value);
}
public CamelSalesforcePropertyFactory withNotifyForFields(String fields) {
return setProperty("camel.source.endpoint.notifyForFields", fields);
}
public CamelSalesforcePropertyFactory withNotifyForOperations(String value) {
return setProperty("camel.source.endpoint.notifyForOperations", value);
}
public CamelSalesforcePropertyFactory withNotifyForOperationCreate(String value) {
return setProperty("camel.component.salesforce.notifyForOperationCreate", value);
}
public CamelSalesforcePropertyFactory withNotifyForOperationDelete(String value) {
return setProperty("camel.component.salesforce.notifyForOperationDelete", value);
}
public CamelSalesforcePropertyFactory withNotifyForOperationUpdate(String value) {
return setProperty("camel.component.salesforce.notifyForOperationUpdate", value);
}
public CamelSalesforcePropertyFactory withHttpClient(String value) {
return setProperty("camel.source.endpoint.httpClient", classRef(value));
}
public CamelSalesforcePropertyFactory withSObjectName(String value) {
return setProperty("camel.source.endpoint.sObjectName", value);
}
public CamelSalesforcePropertyFactory withSObjectQuery(String value) {
return setProperty("camel.source.endpoint.sObjectQuery", value);
}
public CamelSalesforcePropertyFactory withSObjectClass(String value) {
return setProperty("camel.component.salesforce.sObjectClass", value);
}
public CamelSalesforcePropertyFactory withUpdateTopic(boolean value) {
return setProperty("camel.source.endpoint.updateTopic", value);
}
public CamelSalesforcePropertyFactory withLoginUrl(String value) {
return setProperty("camel.component.salesforce.loginUrl", value);
}
public CamelSalesforcePropertyFactory withTopicName(String value) {
return setProperty("camel.source.path.topicName", value);
}
public CamelSalesforcePropertyFactory withRawPayload(boolean value) {
return setProperty("camel.source.endpoint.rawPayload", value);
}
public CamelSalesforcePropertyFactory withPackages(String value) {
return setProperty("camel.component.salesforce.packages", value);
}
public CamelSalesforcePropertyFactory withReplayId(int value) {
return setProperty("camel.source.endpoint.replayId", value);
}
public CamelSalesforcePropertyFactory withApiVersion(String value) {
return setProperty("camel.component.salesforce.apiVersion", value);
}
public EndpointUrlBuilder<CamelSalesforcePropertyFactory> withUrl(String topic) {
String queueUrl = String.format("salesforce:%s", topic);
return new EndpointUrlBuilder<>(this::withSourceUrl, queueUrl);
}
public static CamelSalesforcePropertyFactory basic() {
return new CamelSalesforcePropertyFactory()
.withName("CamelSalesforceConnector")
.withTasksMax(1)
.withConnectorClass("org.apache.camel.kafkaconnector.salesforce.CamelSalesforceSourceConnector")
.withKeyConverterClass("org.apache.kafka.connect.storage.StringConverter")
.withValueConverterClass("org.apache.kafka.connect.storage.StringConverter")
.withLoginUrl("https://login.salesforce.com");
}
}
| 9,114 |
0 | Create_ds/camel-kafka-connector/tests/itests-salesforce/src/test/java/org/apache/camel/kafkaconnector/salesforce | Create_ds/camel-kafka-connector/tests/itests-salesforce/src/test/java/org/apache/camel/kafkaconnector/salesforce/source/CamelSourceSalesforceITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.salesforce.source;
import java.io.IOException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.apache.camel.kafkaconnector.common.AbstractKafkaTest;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.apache.camel.kafkaconnector.common.clients.kafka.KafkaClient;
import org.apache.camel.kafkaconnector.salesforce.clients.SalesforceCliContainer;
import org.apache.camel.kafkaconnector.salesforce.clients.SfdxCommand;
import org.apache.camel.test.infra.common.TestUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.condition.EnabledIfSystemProperty;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.Container.ExecResult;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
import static org.apache.camel.kafkaconnector.salesforce.clients.SalesforceCliContainer.verifyCommand;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
/* This test is disabled by default because requires setup on Salesforce end.
Creating API keys:
https://help.salesforce.com/articleView?id=connected_app_create_api_integration.htm
You need to set the following system properties to run this test:
-Dit.test.salesforce.enable=true to enable the test
-Dit.test.salesforce.client.id=<client ID>
-Dit.test.salesforce.client.secret=<client secret>
-Dit.test.salesforce.password=<user password>
-Dit.test.salesforce.username=<user name>
-Dit.test.salesforce.sfdx.path=/path/to/sfdx
The it.test.salesforce.sfdx.path property should point to the directory containing the sfdx
CLI client configuration. This can be generated using the following steps:
1. Run the Salesforce CLI container:
docker run --rm --privileged --name salesforce-cli -it -v /path/to/sfdx:/root/.sfdx salesforce/salesforcedx
2. Within the container, use the following command to login:
sfdx force:auth:device:login -s -d -i <client ID>
3. Provide the client secret when request and execute the steps requested by the CLI.
4. Verify that you are logged in correctly using the following command
sfdx force:auth:list
It should present an output like:
#### authenticated orgs
ALIAS USERNAME ORG ID INSTANCE URL OAUTH METHOD
───── ──────────────────── ────────────────── ─────────────────────────── ────────────
angusyoung@gmail.com SOME NUMERIC ID https://eu31.salesforce.com web
Note: after leaving the container you might need to adjust the permissions of the directory
containing the sfdx configuration files (/path/to/sfdx).
*/
@Testcontainers
@EnabledIfSystemProperty(named = "it.test.salesforce.enable", matches = "true")
public class CamelSourceSalesforceITCase extends AbstractKafkaTest {
private static final Logger LOG = LoggerFactory.getLogger(CamelSourceSalesforceITCase.class);
@Container
public final SalesforceCliContainer container = new SalesforceCliContainer();
private final String clientId = System.getProperty("it.test.salesforce.client.id");
private final String clientSecret = System.getProperty("it.test.salesforce.client.secret");
private final String password = System.getProperty("it.test.salesforce.password");
private final String userName = System.getProperty("it.test.salesforce.username");
private volatile boolean received;
private String account;
private String topicName;
@Override
protected String[] getConnectorsInTest() {
return new String[] {"camel-salesforce-kafka-connector"};
}
@BeforeEach
public void setUp() throws IOException, InterruptedException {
received = false;
account = "TestAccount" + TestUtils.randomWithRange(1, 100);
topicName = getTopicForTest(this);
SfdxCommand sfdxCommand = SfdxCommand.forceDataRecordCreate()
.withArgument("-u", userName)
.withArgument("--sobjecttype", "Account")
.withArgument("--values", String.format("Name=%s", account));
LOG.debug("Creating the test account");
ExecResult result = container.execCommand(sfdxCommand);
if (!verifyCommand(sfdxCommand, result)) {
fail("Unable to create test account on Salesforce");
}
}
@AfterEach
public void tearDown() throws IOException, InterruptedException {
SfdxCommand sfdxCommand = SfdxCommand.forceDataRecordDelete()
.withArgument("-u", userName)
.withArgument("--sobjecttype", "Account")
.withArgument("--where", String.format("Name=%s", account));
LOG.debug("Deleting the test account");
ExecResult result = container.execCommand(sfdxCommand);
if (!verifyCommand(sfdxCommand, result)) {
fail("Unable to delete the test account on Salesforce");
}
account = null;
}
private <T> boolean checkRecord(ConsumerRecord<String, T> record) {
LOG.debug("Received: {}", record.value());
received = true;
return false;
}
public void runBasicTest(ConnectorPropertyFactory connectorPropertyFactory) throws ExecutionException, InterruptedException {
connectorPropertyFactory.log();
getKafkaConnectService().initializeConnectorBlocking(connectorPropertyFactory, 1);
LOG.debug("Creating the consumer ...");
KafkaClient<String, String> kafkaClient = new KafkaClient<>(getKafkaService().getBootstrapServers());
kafkaClient.consume(topicName, this::checkRecord);
LOG.debug("Created the consumer ...");
assertTrue(received, "Didn't receive any messages");
}
private boolean updateTestAccount() {
final int limit = 50;
int count = 0;
while (!received && count < limit) {
LOG.debug("Updating the account to desc {}", count);
try {
SfdxCommand sfdxCommand = SfdxCommand.forceDataRecordUpdate()
.withArgument("-u", userName)
.withArgument("--sobjecttype", "Account")
.withArgument("--where", String.format("Name=%s", account))
.withArgument("--values", String.format("Description=desc%d", count));
LOG.debug("Updating the test account");
ExecResult result = container.execCommand(sfdxCommand);
if (!verifyCommand(sfdxCommand, result)) {
fail("Unable to delete the test account on Salesforce");
}
Thread.sleep(TimeUnit.SECONDS.toMillis(1));
} catch (IOException e) {
LOG.error("I/O exception while updating the account: {}", e.getMessage(), e);
return false;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOG.error("Interrupted while updating the account: {}", e.getMessage(), e);
return false;
}
count++;
}
if (count >= limit) {
return false;
}
return true;
}
@Test
@Timeout(180)
public void testBasicConsume() throws ExecutionException, InterruptedException {
ConnectorPropertyFactory factory = CamelSalesforcePropertyFactory.basic()
.withKafkaTopic(topicName)
.withUserName(userName)
.withPassword(password)
.withClientId(clientId)
.withClientSecret(clientSecret)
.withNotifyForFields("ALL")
.withUpdateTopic(true)
.withRawPayload(true)
.withPackages("org.apache.camel.salesforce.dto")
.withSObjectClass("org.apache.camel.salesforce.dto.Account")
.withSObjectQuery("SELECT Id, Name FROM Account")
.withTopicName("CamelKafkaConnectorTopic");
Executors.newCachedThreadPool().submit(this::updateTestAccount);
runBasicTest(factory);
}
@Test
@Timeout(180)
public void testBasicConsumeUsingUrl() throws ExecutionException, InterruptedException {
ConnectorPropertyFactory factory = CamelSalesforcePropertyFactory.basic()
.withKafkaTopic(topicName)
.withUserName(userName)
.withPassword(password)
.withClientId(clientId)
.withClientSecret(clientSecret)
.withUrl("CamelKafkaConnectorTopic")
.append("notifyForFields", "ALL")
.append("updateTopic", "true")
.append("rawPayload", "true")
.append("sObjectClass", "org.apache.camel.salesforce.dto.Account")
.append("sObjectQuery", "SELECT Id, Name FROM Account")
.buildUrl();
Executors.newCachedThreadPool().submit(this::updateTestAccount);
runBasicTest(factory);
}
/*
For this test to work, Change Data Capture need to be enabled on the setup. For lightnining, as of now, this is
Setup -> Integrations -> Change Data Capture
*/
@Test
@Timeout(180)
public void testBasicCDC() throws ExecutionException, InterruptedException {
/*
* NOTE: this test requires SalesForce API >= than 37.0. Camel defaults to
* API version 34.0.
*
* The reason is that on older versions of this API did not return the list
* of supported extensions during the hand-shake (ie.:
* ext={replay=true, payload.format=true}). This behavior causes the rcvMeta
* handler on the CometDReplayExtension class in the salesforce component to
* consider the replay extension as "not supported".
*
* Subsequently, when using the /meta/subscribe channel to subscribe to
* account change events on /data/AccountChangeEvent, the replay ID is not
* provided on the request message - and it is a required parameter. This
* leads to a situation where the Salesforce API server returns a plain
* HTTP error 500 without much details.
*/
ConnectorPropertyFactory factory = CamelSalesforcePropertyFactory.basic()
.withKafkaTopic(topicName)
.withUserName(userName)
.withPassword(password)
.withClientId(clientId)
.withClientSecret(clientSecret)
.withRawPayload(true)
.withReplayId(-2)
.withApiVersion("37.0")
.withTopicName("/data/AccountChangeEvent");
runBasicTest(factory);
}
@Test
@Timeout(180)
public void testBasicCDCUsingUrl() throws ExecutionException, InterruptedException {
ConnectorPropertyFactory factory = CamelSalesforcePropertyFactory.basic()
.withKafkaTopic(topicName)
.withUserName(userName)
.withPassword(password)
.withClientId(clientId)
.withClientSecret(clientSecret)
.withApiVersion("37.0")
.withUrl("data/AccountChangeEvent")
.append("replayId", "-2")
.append("rawPayload", "true")
.buildUrl();
runBasicTest(factory);
}
}
| 9,115 |
0 | Create_ds/camel-kafka-connector/tests/itests-salesforce/src/test/java/org/apache/camel/kafkaconnector/salesforce | Create_ds/camel-kafka-connector/tests/itests-salesforce/src/test/java/org/apache/camel/kafkaconnector/salesforce/sink/CamelSalesforcePropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.salesforce.sink;
import org.apache.camel.kafkaconnector.common.SourceConnectorPropertyFactory;
public class CamelSalesforcePropertyFactory extends SourceConnectorPropertyFactory<CamelSalesforcePropertyFactory> {
public CamelSalesforcePropertyFactory withClientId(String value) {
return setProperty("camel.component.salesforce.clientId", value);
}
public CamelSalesforcePropertyFactory withClientSecret(String value) {
return setProperty("camel.component.salesforce.clientSecret", value);
}
public CamelSalesforcePropertyFactory withPassword(String value) {
return setProperty("camel.component.salesforce.password", value);
}
public CamelSalesforcePropertyFactory withUserName(String value) {
return setProperty("camel.component.salesforce.userName", value);
}
public CamelSalesforcePropertyFactory withLoginUrl(String value) {
return setProperty("camel.component.salesforce.loginUrl", value);
}
public CamelSalesforcePropertyFactory withRawPayload(boolean value) {
return setProperty("camel.component.salesforce.rawPayload", value);
}
public CamelSalesforcePropertyFactory withPackages(String value) {
return setProperty("camel.component.salesforce.packages", value);
}
public CamelSalesforcePropertyFactory withApiVersion(String value) {
return setProperty("camel.component.salesforce.apiVersion", value);
}
public CamelSalesforcePropertyFactory withOperationName(String value) {
return setProperty("camel.sink.path.operationName", value);
}
public CamelSalesforcePropertyFactory withSObjectName(String value) {
return setProperty("camel.sink.endpoint.sObjectName", value);
}
public static CamelSalesforcePropertyFactory basic() {
return new CamelSalesforcePropertyFactory()
.withName("CamelSalesforceConnector")
.withLoginUrl("https://login.salesforce.com")
.withTasksMax(1)
.withConnectorClass("org.apache.camel.kafkaconnector.salesforce.CamelSalesforceSinkConnector")
.withKeyConverterClass("org.apache.kafka.connect.storage.StringConverter")
.withValueConverterClass("org.apache.kafka.connect.storage.StringConverter");
}
}
| 9,116 |
0 | Create_ds/camel-kafka-connector/tests/itests-salesforce/src/test/java/org/apache/camel/kafkaconnector/salesforce | Create_ds/camel-kafka-connector/tests/itests-salesforce/src/test/java/org/apache/camel/kafkaconnector/salesforce/sink/CamelSinkSalesforceITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.salesforce.sink;
import java.io.IOException;
import java.util.concurrent.ExecutionException;
import org.apache.camel.kafkaconnector.common.AbstractKafkaTest;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.apache.camel.kafkaconnector.common.clients.kafka.KafkaClient;
import org.apache.camel.kafkaconnector.salesforce.clients.SalesforceCliContainer;
import org.apache.camel.kafkaconnector.salesforce.clients.SfdxCommand;
import org.apache.camel.test.infra.common.TestUtils;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.condition.EnabledIfSystemProperty;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.Container.ExecResult;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
import static org.apache.camel.kafkaconnector.salesforce.clients.SalesforceCliContainer.verifyCommand;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
/* This test is disabled by default because requires setup on Salesforce end.
Creating API keys:
https://help.salesforce.com/articleView?id=connected_app_create_api_integration.htm
You need to set the following system properties to run this test:
-Dit.test.salesforce.enable=true to enable the test
-Dit.test.salesforce.client.id=<client ID>
-Dit.test.salesforce.client.secret=<client secret>
-Dit.test.salesforce.password=<user password>
-Dit.test.salesforce.username=<user name>
-Dit.test.salesforce.sfdx.path=/path/to/sfdx
The it.test.salesforce.sfdx.path property should point to the directory containing the sfdx
CLI client configuration. This can be generated using the following steps:
1. Run the Salesforce CLI container:
docker run --rm --name salesforce-cli -it -v /path/to/sfdx:/root/.sfdx salesforce/salesforcedx
2. Within the container, use the following command to login:
sfdx force:auth:device:login -s -d -i <client ID>
3. Provide the client secret when request and execute the steps requested by the CLI.
4. Verify that you are logged in correctly using the following command
sfdx force:auth:list
It should present an output like:
#### authenticated orgs
ALIAS USERNAME ORG ID INSTANCE URL OAUTH METHOD
───── ──────────────────── ────────────────── ─────────────────────────── ────────────
angusyoung@gmail.com SOME NUMERIC ID https://eu31.salesforce.com web
Note: after leaving the container you might need to adjust the permissions of the directory
containing the sfdx configuration files (/path/to/sfdx).
*/
@Testcontainers
@EnabledIfSystemProperty(named = "it.test.salesforce.enable", matches = "true")
public class CamelSinkSalesforceITCase extends AbstractKafkaTest {
private static final Logger LOG = LoggerFactory.getLogger(CamelSinkSalesforceITCase.class);
@Container
public final SalesforceCliContainer container = new SalesforceCliContainer();
private final String clientId = System.getProperty("it.test.salesforce.client.id");
private final String clientSecret = System.getProperty("it.test.salesforce.client.secret");
private final String password = System.getProperty("it.test.salesforce.password");
private final String userName = System.getProperty("it.test.salesforce.username");
private String accountName;
private boolean recordCreated;
private String topicName;
@Override
protected String[] getConnectorsInTest() {
return new String[] {"camel-salesforce-kafka-connector"};
}
@BeforeEach
public void setUp() {
accountName = "TestSinkAccount" + TestUtils.randomWithRange(1, 100);
topicName = getTopicForTest(this);
}
@AfterEach
public void tearDown() throws IOException, InterruptedException {
SfdxCommand sfdxCommand = SfdxCommand.forceDataRecordDelete()
.withArgument("-u", userName)
.withArgument("--sobjecttype", "Account")
.withArgument("--where", String.format("Name=%s", accountName));
LOG.debug("Deleting the test account {}", accountName);
ExecResult result = container.execCommand(sfdxCommand);
if (!verifyCommand(sfdxCommand, result)) {
fail("Unable to delete the test account on Salesforce");
}
accountName = null;
}
private boolean waitForRecordCreation() {
SfdxCommand sfdxCommand = SfdxCommand.forceDataRecordGet()
.withArgument("-u", userName)
.withArgument("--sobjecttype", "Account")
.withArgument("--where", String.format("Name=%s", accountName));
LOG.debug("Check if the test account {} was created on Salesforce", accountName);
try {
ExecResult result = container.execCommand(sfdxCommand);
if (verifyCommand(sfdxCommand, result)) {
recordCreated = true;
return true;
}
} catch (IOException e) {
LOG.warn("I/O exception while checking if the record was created: {}", e.getMessage(), e);
return false;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOG.warn("The thread was interrupted while waiting for the record creation");
return false;
}
return false;
}
private void runTest(ConnectorPropertyFactory connectorPropertyFactory) throws ExecutionException, InterruptedException {
connectorPropertyFactory.log();
getKafkaConnectService().initializeConnectorBlocking(connectorPropertyFactory, 1);
KafkaClient<String, String> kafkaClient = new KafkaClient<>(getKafkaService().getBootstrapServers());
// Ideally we should use the DTOs, but they cause the source check to fail
String data = String.format("{\"attributes\":{\"referenceId\":null,\"type\":\"Account\",\"url\":null},"
+ "\"Description\":\"%s\",\"Name\":\"%s\"}", "Created during sink test", accountName);
LOG.info("Sending new account {}", data);
kafkaClient.produce(topicName, data);
}
@Test
@Timeout(180)
public void testBasicProduce() throws ExecutionException, InterruptedException {
ConnectorPropertyFactory factory = CamelSalesforcePropertyFactory.basic()
.withKafkaTopic(topicName)
.withUserName(userName)
.withPassword(password)
.withClientId(clientId)
.withClientSecret(clientSecret)
.withRawPayload(true)
.withPackages("org.apache.camel.salesforce.dto")
.withSObjectName("Account")
.withOperationName("createSObject");
runTest(factory);
TestUtils.waitFor(this::waitForRecordCreation);
assertTrue(recordCreated, "The record was not created");
}
}
| 9,117 |
0 | Create_ds/camel-kafka-connector/tests/itests-azure-storage-blob/src/test/java/org/apache/camel/kafkaconnector/azure/storage/blob | Create_ds/camel-kafka-connector/tests/itests-azure-storage-blob/src/test/java/org/apache/camel/kafkaconnector/azure/storage/blob/sink/CamelSinkAzureStorageBlobPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.azure.storage.blob.sink;
import org.apache.camel.kafkaconnector.common.SinkConnectorPropertyFactory;
public class CamelSinkAzureStorageBlobPropertyFactory extends SinkConnectorPropertyFactory<CamelSinkAzureStorageBlobPropertyFactory> {
public CamelSinkAzureStorageBlobPropertyFactory withAccountName(String value) {
return setProperty("camel.kamelet.azure-storage-blob-sink.accountName", value);
}
public CamelSinkAzureStorageBlobPropertyFactory withContainerName(String value) {
return setProperty("camel.kamelet.azure-storage-blob-sink.containerName", value);
}
public CamelSinkAzureStorageBlobPropertyFactory withAccessKey(String value) {
return setProperty("camel.kamelet.azure-storage-blob-sink.accessKey", value);
}
public CamelSinkAzureStorageBlobPropertyFactory withBlobName(String value) {
return setProperty("camel.kamelet.azure-storage-blob-sink.blobName", value);
}
public CamelSinkAzureStorageBlobPropertyFactory withConfiguration(String configurationClass) {
return setProperty("camel.component.azure-storage-blob.configuration", classRef(configurationClass));
}
public CamelSinkAzureStorageBlobPropertyFactory withOperation(String value) {
return setProperty("camel.kamelet.azure-storage-blob-sink.operation", value);
}
public static CamelSinkAzureStorageBlobPropertyFactory basic() {
return new CamelSinkAzureStorageBlobPropertyFactory()
.withTasksMax(1)
.withName("CamelAzurestorageblobSinkConnector")
.withConnectorClass("org.apache.camel.kafkaconnector.azurestorageblobsink.CamelAzurestorageblobsinkSinkConnector")
.withKeyConverterClass("org.apache.kafka.connect.storage.StringConverter")
.withValueConverterClass("org.apache.kafka.connect.storage.StringConverter")
.setProperty("camel.component.kamelet.location", "kamelets");
}
}
| 9,118 |
0 | Create_ds/camel-kafka-connector/tests/itests-azure-storage-blob/src/test/java/org/apache/camel/kafkaconnector/azure/storage/blob | Create_ds/camel-kafka-connector/tests/itests-azure-storage-blob/src/test/java/org/apache/camel/kafkaconnector/azure/storage/blob/sink/CamelSinkAzureStorageBlobITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.azure.storage.blob.sink;
import java.io.ByteArrayOutputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import com.azure.storage.blob.BlobClient;
import com.azure.storage.blob.BlobContainerClient;
import com.azure.storage.blob.BlobServiceClient;
import com.azure.storage.blob.models.BlobItem;
import org.apache.camel.kafkaconnector.CamelSinkTask;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.apache.camel.kafkaconnector.common.test.CamelSinkTestSupport;
import org.apache.camel.kafkaconnector.common.test.StringMessageProducer;
import org.apache.camel.test.infra.azure.common.AzureCredentialsHolder;
import org.apache.camel.test.infra.azure.common.services.AzureService;
import org.apache.camel.test.infra.azure.storage.blob.clients.AzureStorageBlobClientUtils;
import org.apache.camel.test.infra.azure.storage.blob.services.AzureStorageBlobServiceFactory;
import org.apache.camel.test.infra.common.TestUtils;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public class CamelSinkAzureStorageBlobITCase extends CamelSinkTestSupport {
@RegisterExtension
public static AzureService service = AzureStorageBlobServiceFactory.createService();
private static final Logger LOG = LoggerFactory.getLogger(CamelSinkAzureStorageBlobITCase.class);
private BlobServiceClient client;
private BlobContainerClient blobContainerClient;
private String blobContainerName;
private Map<String, String> sentData = new HashMap<>();
private String topicName;
private int expect = 10;
private int received;
private class CustomProducer extends StringMessageProducer {
public CustomProducer(String bootstrapServer, String topicName, int count) {
super(bootstrapServer, topicName, count);
}
@Override
public String testMessageContent(int current) {
return "test " + current + " data";
}
@Override
public Map<String, String> messageHeaders(String text, int current) {
Map<String, String> messageParameters = new HashMap<>();
String sentFile = "test " + current;
sentData.put(sentFile, testMessageContent(current));
messageParameters.put(CamelSinkTask.HEADER_CAMEL_PREFIX + "file", sentFile);
return messageParameters;
}
}
@Override
protected String[] getConnectorsInTest() {
return new String[]{"camel-azure-storage-blob-sink-kafka-connector"};
}
@BeforeEach
public void setUpBlob() {
topicName = getTopicForTest(this);
client = AzureStorageBlobClientUtils.getClient();
blobContainerName = "test-" + TestUtils.randomWithRange(1, 100);
blobContainerClient = client.createBlobContainer(blobContainerName);
}
@AfterEach
public void tearDown() {
if (client != null) {
client.deleteBlobContainer(blobContainerName);
}
}
@Override
protected void consumeMessages(CountDownLatch latch) {
try {
consume();
} finally {
latch.countDown();
}
}
@Override
protected void verifyMessages(CountDownLatch latch) throws InterruptedException {
if (latch.await(240, TimeUnit.SECONDS)) {
assertEquals(expect, received,
"Didn't process the expected amount of messages: " + received + " != " + expect);
} else {
fail("Failed to receive the messages within the specified time");
}
}
private boolean canConsume() {
return blobContainerClient.exists() && blobContainerClient.listBlobs().stream().count() >= expect;
}
private void consume() {
LOG.debug("Created the consumer ...");
TestUtils.waitFor(this::canConsume);
int retries = 10;
do {
received = 0;
for (BlobItem blobContainerItem : blobContainerClient.listBlobs()) {
String receivedFile = blobContainerItem.getName();
BlobClient blobClient = blobContainerClient.getBlobClient(receivedFile);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
blobClient.download(outputStream);
String contentFile = outputStream.toString();
LOG.info("Received: '{}' with content: '{}' expected content: '{}'", receivedFile, contentFile, sentData.get(receivedFile));
assertEquals(sentData.get(receivedFile), contentFile, "Did not receive the same message that was sent");
received++;
}
retries--;
} while (received != 10 && retries > 0);
}
@Test
@Timeout(90)
public void testBasicSendReceive() throws Exception {
AzureCredentialsHolder azureCredentialsHolder = service.azureCredentials();
ConnectorPropertyFactory factory = CamelSinkAzureStorageBlobPropertyFactory
.basic()
.withTopics(topicName)
.withConfiguration(TestBlobConfiguration.class.getName())
.withAccessKey(azureCredentialsHolder.accountKey())
.withAccountName(azureCredentialsHolder.accountName())
.withContainerName(blobContainerName)
.withOperation("uploadBlockBlob");
runTest(factory, new CustomProducer(getKafkaService().getBootstrapServers(), topicName, expect));
}
}
| 9,119 |
0 | Create_ds/camel-kafka-connector/tests/itests-azure-storage-blob/src/test/java/org/apache/camel/kafkaconnector/azure/storage/blob | Create_ds/camel-kafka-connector/tests/itests-azure-storage-blob/src/test/java/org/apache/camel/kafkaconnector/azure/storage/blob/sink/TestBlobConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.azure.storage.blob.sink;
import com.azure.storage.blob.BlobServiceClient;
import org.apache.camel.component.azure.storage.blob.BlobConfiguration;
import org.apache.camel.test.infra.azure.storage.blob.clients.AzureStorageBlobClientUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestBlobConfiguration extends BlobConfiguration {
private static final Logger LOG = LoggerFactory.getLogger(TestBlobConfiguration.class);
private BlobServiceClient serviceClient;
@Override
public BlobServiceClient getServiceClient() {
if (serviceClient == null) {
LOG.info("Creating a custom BlobServiceClient");
serviceClient = AzureStorageBlobClientUtils.getClient();
}
return serviceClient;
}
}
| 9,120 |
0 | Create_ds/camel-kafka-connector/tests/itests-jdbc/src/test/java/org/apache/camel/kafkaconnector/jdbc | Create_ds/camel-kafka-connector/tests/itests-jdbc/src/test/java/org/apache/camel/kafkaconnector/jdbc/sink/CamelSinkJDBCITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.jdbc.sink;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.apache.camel.kafkaconnector.CamelSinkTask;
import org.apache.camel.kafkaconnector.common.test.CamelSinkTestSupport;
import org.apache.camel.kafkaconnector.common.test.StringMessageProducer;
import org.apache.camel.kafkaconnector.jdbc.client.DatabaseClient;
import org.apache.camel.kafkaconnector.jdbc.services.TestDataSource;
import org.apache.camel.test.infra.common.TestUtils;
import org.apache.camel.test.infra.jdbc.services.JDBCService;
import org.apache.camel.test.infra.jdbc.services.JDBCServiceBuilder;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.JdbcDatabaseContainer;
import org.testcontainers.containers.PostgreSQLContainer;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public class CamelSinkJDBCITCase extends CamelSinkTestSupport {
@RegisterExtension
static JDBCService jdbcService;
private static final Logger LOG = LoggerFactory.getLogger(CamelSinkJDBCITCase.class);
private DatabaseClient client;
private String topicName;
private final int expect = 10;
private int received;
static {
final String postgresImage = "postgres:13.0";
JdbcDatabaseContainer<?> container = new PostgreSQLContainer<>(postgresImage)
.withDatabaseName("camel")
.withUsername("ckc")
.withPassword("ckcDevel123")
.withInitScript("schema.sql")
.withStartupTimeoutSeconds(60);
// Let the JDBC Service handle container lifecycle
jdbcService = JDBCServiceBuilder.newBuilder()
.withContainer(container)
.build();
}
private static class CustomProducer extends StringMessageProducer {
public CustomProducer(String bootstrapServer, String topicName, int count) {
super(bootstrapServer, topicName, count);
}
@Override
public String testMessageContent(int current) {
return "insert into test(test_name, test_data) values(:?TestName, :?TestData)";
}
@Override
public Map<String, String> messageHeaders(String text, int current) {
Map<String, String> jdbcParameters = new HashMap<>();
// The prefix 'CamelHeader' is removed by the SinkTask
jdbcParameters.put(CamelSinkTask.HEADER_CAMEL_PREFIX + "TestName", "SomeName" + TestUtils.randomWithRange(0, 100));
jdbcParameters.put(CamelSinkTask.HEADER_CAMEL_PREFIX + "TestData", "test data " + current);
return jdbcParameters;
}
}
@BeforeEach
public void setUp() throws SQLException {
topicName = getTopicForTest(this);
client = new DatabaseClient(jdbcService.jdbcUrl());
received = 0;
}
@Override
protected String[] getConnectorsInTest() {
return new String[] {"camel-jdbc-kafka-connector"};
}
@Override
protected void consumeMessages(CountDownLatch latch) {
try {
LOG.debug("Waiting for indices");
TestUtils.waitFor(() -> {
try {
return client.hasAtLeastRecords("test", expect);
} catch (SQLException e) {
LOG.warn("Failed to read the test table: {}", e.getMessage(), e);
return false;
}
});
} finally {
latch.countDown();
}
}
@Override
protected void verifyMessages(CountDownLatch latch) throws InterruptedException {
if (latch.await(20, TimeUnit.SECONDS)) {
try {
client.runQuery("select * from test", this::verifyData);
assertEquals(expect, received, "Did not receive the same amount of messages sent");
} catch (SQLException e) {
fail(e.getMessage());
}
} else {
fail(String.format("Failed to receive the messages within the specified time: received %d of %d",
received, expect));
}
}
private void verifyData(ResultSet rs) {
try {
received++;
String testName = rs.getString("test_name");
String testData = rs.getString("test_data");
assertTrue(testName.startsWith("SomeName"), String.format("Unexpected test name %s", testName));
assertTrue(testData.startsWith("test data"), String.format("Unexpected test data %s", testData));
} catch (SQLException e) {
LOG.error("Unable to fetch record from result set: {}", e.getMessage(), e);
fail(String.format("Unable to fetch record from result set: %s", e.getMessage()));
}
}
@Timeout(30)
@Test
public void testDBFetch() throws Exception {
CamelJDBCPropertyFactory factory = CamelJDBCPropertyFactory.basic()
.withDataSource(CamelJDBCPropertyFactory.classRef(TestDataSource.class.getName()))
.withDataSourceName("someName")
.withUseHeaderAsParameters(true)
.withTopics(topicName);
runTest(factory, new CustomProducer(getKafkaService().getBootstrapServers(), topicName, expect));
}
}
| 9,121 |
0 | Create_ds/camel-kafka-connector/tests/itests-jdbc/src/test/java/org/apache/camel/kafkaconnector/jdbc | Create_ds/camel-kafka-connector/tests/itests-jdbc/src/test/java/org/apache/camel/kafkaconnector/jdbc/sink/CamelJDBCPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.jdbc.sink;
import org.apache.camel.kafkaconnector.common.SinkConnectorPropertyFactory;
public final class CamelJDBCPropertyFactory extends SinkConnectorPropertyFactory<CamelJDBCPropertyFactory> {
private CamelJDBCPropertyFactory() {
}
public CamelJDBCPropertyFactory withDataSource(String value) {
return setProperty("camel.component.jdbc.dataSource", value);
}
public CamelJDBCPropertyFactory withDataSourceName(String value) {
return setProperty("camel.sink.path.dataSourceName", value);
}
public CamelJDBCPropertyFactory withUseHeaderAsParameters(boolean value) {
return setProperty("camel.sink.endpoint.useHeadersAsParameters", value);
}
public static CamelJDBCPropertyFactory basic() {
return new CamelJDBCPropertyFactory()
.withName("CamelJDBCSinkConnector")
.withTasksMax(1)
.withConnectorClass("org.apache.camel.kafkaconnector.jdbc.CamelJdbcSinkConnector")
.withKeyConverterClass("org.apache.kafka.connect.storage.StringConverter")
.withValueConverterClass("org.apache.kafka.connect.storage.StringConverter");
}
}
| 9,122 |
0 | Create_ds/camel-kafka-connector/tests/itests-jdbc/src/test/java/org/apache/camel/kafkaconnector/jdbc | Create_ds/camel-kafka-connector/tests/itests-jdbc/src/test/java/org/apache/camel/kafkaconnector/jdbc/sink/CamelSinkJDBCNoDataSourceITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.jdbc.sink;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.apache.camel.kafkaconnector.CamelSinkTask;
import org.apache.camel.kafkaconnector.common.AbstractKafkaTest;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.apache.camel.kafkaconnector.common.clients.kafka.KafkaClient;
import org.apache.camel.kafkaconnector.jdbc.client.DatabaseClient;
import org.apache.camel.test.infra.common.TestUtils;
import org.apache.camel.test.infra.jdbc.services.JDBCService;
import org.apache.camel.test.infra.jdbc.services.JDBCServiceBuilder;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.JdbcDatabaseContainer;
import org.testcontainers.containers.PostgreSQLContainer;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public class CamelSinkJDBCNoDataSourceITCase extends AbstractKafkaTest {
@RegisterExtension
static JDBCService jdbcService;
private static final Logger LOG = LoggerFactory.getLogger(CamelSinkJDBCNoDataSourceITCase.class);
private final int expect = 10;
private int received;
private String topicName;
static {
final String postgresImage = "postgres:13.0";
JdbcDatabaseContainer<?> container = new PostgreSQLContainer<>(postgresImage)
.withDatabaseName("camel")
.withUsername("ckc")
.withPassword("ckcDevel123")
.withInitScript("schema.sql")
.withStartupTimeoutSeconds(60);
// Let the JDBC Service handle container lifecycle
jdbcService = JDBCServiceBuilder.newBuilder()
.withContainer(container)
.build();
}
@BeforeEach
void setUp() {
topicName = getTopicForTest(this);
}
@Override
protected String[] getConnectorsInTest() {
return new String[] {"camel-jdbc-kafka-connector"};
}
private void putRecords(CountDownLatch latch) {
KafkaClient<String, String> kafkaClient = new KafkaClient<>(getKafkaService().getBootstrapServers());
String body = "insert into test(test_name, test_data) values(:?TestName, :?TestData)";
try {
for (int i = 0; i < expect; i++) {
Map<String, String> jdbcParameters = new HashMap<>();
// The prefix 'CamelHeader' is removed by the SinkTask
jdbcParameters.put(CamelSinkTask.HEADER_CAMEL_PREFIX + "TestName", "SomeName" + TestUtils.randomWithRange(0, 100));
jdbcParameters.put(CamelSinkTask.HEADER_CAMEL_PREFIX + "TestData", "test data " + i);
try {
kafkaClient.produce(topicName, body, jdbcParameters);
} catch (ExecutionException e) {
LOG.error("Unable to produce messages: {}", e.getMessage(), e);
} catch (InterruptedException e) {
break;
}
}
} finally {
latch.countDown();
}
}
private void verifyData(ResultSet rs) {
try {
received++;
String testName = rs.getString("test_name");
String testData = rs.getString("test_data");
assertTrue(testName.startsWith("SomeName"), String.format("Unexpected test name %s", testName));
assertTrue(testData.startsWith("test data"), String.format("Unexpected test data %s", testData));
} catch (SQLException e) {
LOG.error("Unable to fetch record from result set: {}", e.getMessage(), e);
fail(String.format("Unable to fetch record from result set: %s", e.getMessage()));
}
}
public void runTest(ConnectorPropertyFactory propertyFactory) throws ExecutionException, InterruptedException {
propertyFactory.log();
getKafkaConnectService().initializeConnectorBlocking(propertyFactory, 1);
CountDownLatch latch = new CountDownLatch(1);
ExecutorService service = Executors.newCachedThreadPool();
service.submit(() -> putRecords(latch));
if (!latch.await(30, TimeUnit.SECONDS)) {
fail("Timed out wait for data to be added to the Kafka cluster");
}
LOG.debug("Waiting for indices");
try {
DatabaseClient client = new DatabaseClient(jdbcService.jdbcUrl());
TestUtils.waitFor(() -> {
try {
return client.hasAtLeastRecords("test", expect);
} catch (SQLException e) {
LOG.warn("Failed to read the test table: {}", e.getMessage(), e);
return false;
}
});
client.runQuery("select * from test", this::verifyData);
} catch (SQLException e) {
LOG.error("Unable to execute the SQL query: {}", e.getMessage(), e);
fail(e.getMessage());
}
assertEquals(expect, received, "Did not receive the same amount of messages sent");
LOG.debug("Created the consumer ... About to receive messages");
}
@Test
public void testDBFetch() throws ExecutionException, InterruptedException {
CamelJDBCPropertyFactory factory = CamelJDBCPropertyFactory.basic()
.withComponentConfig("camel.component.jdbc.dataSource", "#class:org.postgresql.ds.PGSimpleDataSource")
.withEntry("user", "ckc")
.withEntry("password", "ckcDevel123")
.withEntry("url", jdbcService.jdbcUrl())
.end()
.withDataSourceName("anotherName")
.withUseHeaderAsParameters(true)
.withTopics(topicName);
runTest(factory);
}
}
| 9,123 |
0 | Create_ds/camel-kafka-connector/tests/itests-jdbc/src/test/java/org/apache/camel/kafkaconnector/jdbc | Create_ds/camel-kafka-connector/tests/itests-jdbc/src/test/java/org/apache/camel/kafkaconnector/jdbc/client/DatabaseClient.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.jdbc.client;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.function.Consumer;
import org.postgresql.ds.PGSimpleDataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DatabaseClient {
private static final Logger LOG = LoggerFactory.getLogger(DatabaseClient.class);
private PGSimpleDataSource datasource;
private final Connection connection;
public DatabaseClient(String url) throws SQLException {
LOG.info("Opening a new database connection using the URL {}", url);
datasource = new PGSimpleDataSource();
datasource.setURL(url);
datasource.setUser("ckc");
datasource.setPassword("ckcDevel123");
connection = datasource.getConnection();
}
public void runQuery(String query, Consumer<ResultSet> consumer) throws SQLException {
try (ResultSet rs = connection.prepareStatement(query).executeQuery()) {
while (rs.next()) {
consumer.accept(rs);
}
}
}
public int count(String table) throws SQLException {
String query = String.format("select count(*) as count from %s", table);
try (ResultSet rs = connection.prepareStatement(query).executeQuery()) {
if (rs.next()) {
return rs.getInt("count");
}
}
return 0;
}
public boolean hasAtLeastRecords(String table, int expected) throws SQLException {
int count = count(table);
return count >= expected;
}
}
| 9,124 |
0 | Create_ds/camel-kafka-connector/tests/itests-jdbc/src/test/java/org/apache/camel/kafkaconnector/jdbc | Create_ds/camel-kafka-connector/tests/itests-jdbc/src/test/java/org/apache/camel/kafkaconnector/jdbc/services/TestDataSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.jdbc.services;
import org.apache.camel.test.infra.jdbc.common.JDBCProperties;
import org.postgresql.ds.PGSimpleDataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestDataSource extends PGSimpleDataSource {
private static final Logger LOG = LoggerFactory.getLogger(TestDataSource.class);
private static final String URL;
static {
URL = System.getProperty(JDBCProperties.JDBC_CONNECTION_URL);
}
public TestDataSource() {
super();
setUrl(URL);
setUser("ckc");
setPassword("ckcDevel123");
}
}
| 9,125 |
0 | Create_ds/camel-kafka-connector/tests/itests-sql/src/test/java/org/apache/camel/kafkaconnector/sql | Create_ds/camel-kafka-connector/tests/itests-sql/src/test/java/org/apache/camel/kafkaconnector/sql/source/CamelSqlPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.sql.source;
import org.apache.camel.kafkaconnector.common.SinkConnectorPropertyFactory;
public final class CamelSqlPropertyFactory extends SinkConnectorPropertyFactory<CamelSqlPropertyFactory> {
private CamelSqlPropertyFactory() {
}
public CamelSqlPropertyFactory withServerName(String value) {
return setProperty("camel.kamelet.postgresql-source.serverName", value);
}
public CamelSqlPropertyFactory withUsername(String value) {
return setProperty("camel.kamelet.postgresql-source.username", value);
}
public CamelSqlPropertyFactory withPassword(String value) {
return setProperty("camel.kamelet.postgresql-source.password", value);
}
public CamelSqlPropertyFactory withQuery(String value) {
return setProperty("camel.kamelet.postgresql-source.query", value);
}
public CamelSqlPropertyFactory withDatabaseName(String value) {
return setProperty("camel.kamelet.postgresql-source.databaseName", value);
}
public CamelSqlPropertyFactory withPort(String port) {
return setProperty("camel.kamelet.postgresql-source.serverPort", port);
}
public static CamelSqlPropertyFactory basic() {
return new CamelSqlPropertyFactory()
.withName("CamelSQLSourceConnector")
.withTasksMax(1)
.withConnectorClass("org.apache.camel.kafkaconnector.postgresqlsource.CamelPostgresqlsourceSourceConnector")
.withKeyConverterClass("org.apache.kafka.connect.storage.StringConverter")
.withValueConverterClass("org.apache.kafka.connect.storage.StringConverter")
.setProperty("camel.component.kamelet.location", "kamelets")
.setProperty("camel.component.properties.environment-variable-mode", "1");
}
}
| 9,126 |
0 | Create_ds/camel-kafka-connector/tests/itests-sql/src/test/java/org/apache/camel/kafkaconnector/sql | Create_ds/camel-kafka-connector/tests/itests-sql/src/test/java/org/apache/camel/kafkaconnector/sql/source/CamelSourceSQLITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.sql.source;
import java.util.concurrent.ExecutionException;
import org.apache.camel.kafkaconnector.common.test.CamelSourceTestSupport;
import org.apache.camel.kafkaconnector.common.test.TestMessageConsumer;
import org.apache.camel.test.infra.jdbc.services.JDBCService;
import org.apache.camel.test.infra.jdbc.services.JDBCServiceBuilder;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.testcontainers.containers.JdbcDatabaseContainer;
import org.testcontainers.containers.PostgreSQLContainer;
import static org.junit.jupiter.api.Assertions.assertEquals;
//@Disabled("Database connection fails with connection refused.")
public class CamelSourceSQLITCase extends CamelSourceTestSupport {
private static final String DATABASE_NAME = "camel";
private static final String USERNAME = "ckc";
private static final String PASSWORD = "ckcDevel123";
@RegisterExtension
public JDBCService sqlService;
private final int expect = 1;
private String hostname;
private String port;
public CamelSourceSQLITCase() {
JdbcDatabaseContainer<?> container = new PostgreSQLContainer<>("postgres:13.0")
.withDatabaseName(DATABASE_NAME)
.withUsername(USERNAME)
.withPassword(PASSWORD)
.withInitScript("schema.sql")
.withStartupTimeoutSeconds(60);
sqlService = JDBCServiceBuilder.newBuilder()
.withContainer(container)
.build();
sqlService.initialize();
hostname = container.getContainerIpAddress();
port = String.valueOf(container.getMappedPort(PostgreSQLContainer.POSTGRESQL_PORT));
}
@Override
protected String[] getConnectorsInTest() {
return new String[] {"camel-postgresql-source-kafka-connector"};
}
@Override
protected void produceTestData() {
// NO-OP, already done via init script in the service initialization
}
@Override
protected void verifyMessages(TestMessageConsumer<?> consumer) {
int received = consumer.consumedMessages().size();
assertEquals(received, expect, "Didn't process the expected amount of messages");
}
@Timeout(30)
@Test
public void testDBFetch() throws ExecutionException, InterruptedException {
String topicName = getTopicForTest(this);
CamelSqlPropertyFactory factory = CamelSqlPropertyFactory
.basic()
.withDatabaseName(DATABASE_NAME)
.withServerName(hostname)
.withPort(port)
.withUsername(USERNAME)
.withPassword(PASSWORD)
.withQuery("select * from test")
.withTopics(topicName);
runTest(factory, topicName, expect);
}
}
| 9,127 |
0 | Create_ds/camel-kafka-connector/tests/itests-sql/src/test/java/org/apache/camel/kafkaconnector/sql | Create_ds/camel-kafka-connector/tests/itests-sql/src/test/java/org/apache/camel/kafkaconnector/sql/sink/CamelSqlPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.sql.sink;
import org.apache.camel.kafkaconnector.common.SinkConnectorPropertyFactory;
public final class CamelSqlPropertyFactory extends SinkConnectorPropertyFactory<CamelSqlPropertyFactory> {
private CamelSqlPropertyFactory() {
}
public CamelSqlPropertyFactory withServerName(String value) {
return setProperty("camel.kamelet.postgresql-sink.serverName", value);
}
public CamelSqlPropertyFactory withUsername(String value) {
return setProperty("camel.kamelet.postgresql-sink.username", value);
}
public CamelSqlPropertyFactory withPassword(String value) {
return setProperty("camel.kamelet.postgresql-sink.password", value);
}
public CamelSqlPropertyFactory withQuery(String value) {
return setProperty("camel.kamelet.postgresql-sink.query", value);
}
public CamelSqlPropertyFactory withDatabaseName(String value) {
return setProperty("camel.kamelet.postgresql-sink.databaseName", value);
}
public CamelSqlPropertyFactory withPort(String port) {
return setProperty("camel.kamelet.postgresql-sink.port", port);
}
public static CamelSqlPropertyFactory basic() {
return new CamelSqlPropertyFactory()
.withName("CamelSQLSinkConnector")
.withTasksMax(1)
.withConnectorClass("org.apache.camel.kafkaconnector.postgresqlsink.CamelPostgresqlsinkSinkConnector")
.withKeyConverterClass("org.apache.kafka.connect.storage.StringConverter")
.withValueConverterClass("org.apache.kafka.connect.storage.StringConverter")
.setProperty("camel.component.kamelet.location", "kamelets")
.setProperty("camel.component.properties.environment-variable-mode", "1");
}
}
| 9,128 |
0 | Create_ds/camel-kafka-connector/tests/itests-sql/src/test/java/org/apache/camel/kafkaconnector/sql | Create_ds/camel-kafka-connector/tests/itests-sql/src/test/java/org/apache/camel/kafkaconnector/sql/sink/CamelSinkSQLITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.sql.sink;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.apache.camel.kafkaconnector.CamelSinkTask;
import org.apache.camel.kafkaconnector.common.test.CamelSinkTestSupport;
import org.apache.camel.kafkaconnector.common.test.StringMessageProducer;
import org.apache.camel.kafkaconnector.sql.client.DatabaseClient;
import org.apache.camel.test.infra.common.TestUtils;
import org.apache.camel.test.infra.jdbc.services.JDBCService;
import org.apache.camel.test.infra.jdbc.services.JDBCServiceBuilder;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.JdbcDatabaseContainer;
import org.testcontainers.containers.PostgreSQLContainer;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
public class CamelSinkSQLITCase extends CamelSinkTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(CamelSinkSQLITCase.class);
private static final String DATABASE_NAME = "camel";
private static final String USERNAME = "ckc";
private static final String PASSWORD = "ckcDevel123";
@RegisterExtension
public JDBCService sqlService;
private DatabaseClient client;
private String topicName;
private final int expect = 1;
private int received;
private String hostname;
private String port;
private static class CustomProducer extends StringMessageProducer {
public CustomProducer(String bootstrapServer, String topicName, int count) {
super(bootstrapServer, topicName, count);
}
@Override
public String testMessageContent(int current) {
return "test";
}
@Override
public Map<String, String> messageHeaders(String text, int current) {
Map<String, String> sqlParameters = new HashMap<>();
// The prefix 'CamelHeader' is removed by the SinkTask
sqlParameters.put(CamelSinkTask.HEADER_CAMEL_PREFIX + "TestName", "SomeName" + TestUtils.randomWithRange(0, 100));
sqlParameters.put(CamelSinkTask.HEADER_CAMEL_PREFIX + "TestData", "test data " + current);
return sqlParameters;
}
}
public CamelSinkSQLITCase() {
JdbcDatabaseContainer<?> container = new PostgreSQLContainer<>("postgres:13.0")
.withDatabaseName(DATABASE_NAME)
.withUsername(USERNAME)
.withPassword(PASSWORD)
.withInitScript("schema.sql")
.withStartupTimeoutSeconds(60);
sqlService = JDBCServiceBuilder.newBuilder()
.withContainer(container)
.build();
sqlService.initialize();
hostname = container.getContainerIpAddress();
port = String.valueOf(container.getMappedPort(PostgreSQLContainer.POSTGRESQL_PORT));
}
@Override
protected String[] getConnectorsInTest() {
return new String[] {"camel-postgresql-sink-kafka-connector"};
}
@BeforeEach
public void setUp() throws SQLException {
topicName = getTopicForTest(this);
client = new DatabaseClient(sqlService.jdbcUrl());
}
@Override
protected void consumeMessages(CountDownLatch latch) {
try {
TestUtils.waitFor(() -> {
try {
return client.hasAtLeastRecords("test", expect);
} catch (SQLException e) {
LOG.warn("Failed to read the test table: {}", e.getMessage(), e);
return false;
}
});
} finally {
latch.countDown();
}
}
@Override
protected void verifyMessages(CountDownLatch latch) throws InterruptedException {
if (latch.await(25, TimeUnit.SECONDS)) {
try {
client.runQuery("select * from test", this::verifyData);
assertEquals(expect, received, "Did not receive as much data as expected");
} catch (SQLException e) {
fail(e.getMessage());
}
} else {
fail("Failed to receive the messages within the specified time");
}
}
private void verifyData(ResultSet rs) {
try {
received++;
String testName = rs.getString("test_name");
String testData = rs.getString("test_data");
assertTrue(testName.startsWith("SomeName"), String.format("Unexpected test name %s", testName));
assertTrue(testData.startsWith("test data"), String.format("Unexpected test data %s", testData));
} catch (SQLException e) {
LOG.error("Unable to fetch record from result set: {}", e.getMessage(), e);
fail(String.format("Unable to fetch record from result set: %s", e.getMessage()));
}
}
@Timeout(60)
@Test
public void testDBFetch() throws Exception {
CamelSqlPropertyFactory factory = CamelSqlPropertyFactory
.basic()
.withDatabaseName(DATABASE_NAME)
.withServerName(hostname)
.withPort(port)
.withUsername(USERNAME)
.withPassword(PASSWORD)
.withQuery("insert into test(test_name, test_data) values(:#TestName,:#TestData)")
.withTopics(topicName);
runTest(factory, new CustomProducer(getKafkaService().getBootstrapServers(), topicName, expect));
}
}
| 9,129 |
0 | Create_ds/camel-kafka-connector/tests/itests-sql/src/test/java/org/apache/camel/kafkaconnector/sql | Create_ds/camel-kafka-connector/tests/itests-sql/src/test/java/org/apache/camel/kafkaconnector/sql/client/DatabaseClient.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.sql.client;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.function.Consumer;
import org.postgresql.ds.PGSimpleDataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DatabaseClient {
private static final Logger LOG = LoggerFactory.getLogger(DatabaseClient.class);
private PGSimpleDataSource datasource;
private final Connection connection;
public DatabaseClient(String url) throws SQLException {
LOG.info("Opening a new database connection using the URL {}", url);
datasource = new PGSimpleDataSource();
datasource.setURL(url);
datasource.setUser("ckc");
datasource.setPassword("ckcDevel123");
connection = datasource.getConnection();
}
public void runQuery(String query, Consumer<ResultSet> consumer) throws SQLException {
try (ResultSet rs = connection.prepareStatement(query).executeQuery()) {
while (rs.next()) {
consumer.accept(rs);
}
}
}
public int count(String table) throws SQLException {
String query = String.format("select count(*) as count from %s", table);
try (ResultSet rs = connection.prepareStatement(query).executeQuery()) {
if (rs.next()) {
return rs.getInt("count");
}
}
return 0;
}
public boolean hasAtLeastRecords(String table, int expected) throws SQLException {
int count = count(table);
return count >= expected;
}
}
| 9,130 |
0 | Create_ds/camel-kafka-connector/tests/itests-cxfrs/src/test/java/org/apache/camel/kafkaconnector | Create_ds/camel-kafka-connector/tests/itests-cxfrs/src/test/java/org/apache/camel/kafkaconnector/cxfrs/Customer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.cxfrs;
import jakarta.xml.bind.annotation.XmlRootElement;
import org.apache.camel.util.ObjectHelper;
/**
*
* @version
*/
@XmlRootElement(name = "Customer")
public class Customer {
private long id;
private String name;
public Customer() {
}
public Customer(long id, String name) {
setId(id);
setName(name);
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (int) (id ^ (id >>> 32));
result = prime * result + ((name == null) ? 0 : name.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof Customer)) {
return false;
}
if (this == obj) {
return true;
}
Customer other = (Customer) obj;
return id == other.id && ObjectHelper.equal(name, other.name);
}
}
| 9,131 |
0 | Create_ds/camel-kafka-connector/tests/itests-cxfrs/src/test/java/org/apache/camel/kafkaconnector | Create_ds/camel-kafka-connector/tests/itests-cxfrs/src/test/java/org/apache/camel/kafkaconnector/cxfrs/CustomerServiceResource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.cxfrs;
import jakarta.ws.rs.Consumes;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.PUT;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.PathParam;
import jakarta.ws.rs.Produces;
import jakarta.ws.rs.core.Response;
// START SNIPPET: example
@Path("/customerservice/")
public interface CustomerServiceResource {
@GET
@Path("/customers/{id}/")
Customer getCustomer(@PathParam("id") String id);
@PUT
@Path("/customers/")
Response updateCustomer(Customer customer);
@Path("/{id}")
@PUT()
@Consumes({ "application/xml", "text/plain",
"application/json" })
@Produces({ "application/xml", "text/plain",
"application/json" })
Object invoke(@PathParam("id") String id,
String payload);
}
// END SNIPPET: example
| 9,132 |
0 | Create_ds/camel-kafka-connector/tests/itests-cxfrs/src/test/java/org/apache/camel/kafkaconnector/cxfrs | Create_ds/camel-kafka-connector/tests/itests-cxfrs/src/test/java/org/apache/camel/kafkaconnector/cxfrs/source/CamelSourceCXFRSPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.cxfrs.source;
import org.apache.camel.kafkaconnector.common.EndpointUrlBuilder;
import org.apache.camel.kafkaconnector.common.SourceConnectorPropertyFactory;
/**
* Creates the set of properties used by a Camel CXF Source Connector
*/
final class CamelSourceCXFRSPropertyFactory extends SourceConnectorPropertyFactory<CamelSourceCXFRSPropertyFactory> {
private CamelSourceCXFRSPropertyFactory() {
}
public CamelSourceCXFRSPropertyFactory withAddress(String address) {
return setProperty("camel.source.path.address", address);
}
public CamelSourceCXFRSPropertyFactory withResourceClass(String resourceClasses) {
return setProperty("camel.source.endpoint.resourceClasses", resourceClasses);
}
public CamelSourceCXFRSPropertyFactory withPublishedEndpointUrl(String publishedEndpointUrl) {
return setProperty("camel.source.endpoint.publishedEndpointUrl", publishedEndpointUrl);
}
public static CamelSourceCXFRSPropertyFactory basic() {
return new CamelSourceCXFRSPropertyFactory()
.withName("CamelCXFRSSourceConnector")
.withTasksMax(1)
.withConnectorClass("org.apache.camel.kafkaconnector.cxfrs.CamelCxfrsSourceConnector")
.withKeyConverterClass("org.apache.kafka.connect.storage.StringConverter")
.withValueConverterClass("org.apache.kafka.connect.storage.StringConverter");
}
public EndpointUrlBuilder<CamelSourceCXFRSPropertyFactory> withUrl(String cxfUrl) {
String url = String.format("cxfrs://%s", cxfUrl);
return new EndpointUrlBuilder<>(this::withSourceUrl, url);
}
}
| 9,133 |
0 | Create_ds/camel-kafka-connector/tests/itests-cxfrs/src/test/java/org/apache/camel/kafkaconnector/cxfrs | Create_ds/camel-kafka-connector/tests/itests-cxfrs/src/test/java/org/apache/camel/kafkaconnector/cxfrs/source/CamelSourceCXFRSITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.cxfrs.source;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.apache.camel.kafkaconnector.common.test.CamelSourceTestSupport;
import org.apache.camel.kafkaconnector.common.test.TestMessageConsumer;
import org.apache.camel.kafkaconnector.common.utils.NetworkUtils;
import org.apache.camel.test.infra.common.TestUtils;
import org.apache.cxf.Bus;
import org.apache.cxf.BusFactory;
import org.apache.cxf.ext.logging.LoggingInInterceptor;
import org.apache.cxf.ext.logging.LoggingOutInterceptor;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
/**
* A simple test case that checks whether the CXF RS Consumer Endpoint produces the expected number of
* messages
*/
public class CamelSourceCXFRSITCase extends CamelSourceTestSupport {
protected static final String LOCALHOST = NetworkUtils.getHostname();
protected static final int PORT = NetworkUtils.getFreePort(LOCALHOST, 20000, 30000);
protected static final String CXT = PORT + "/CxfRsConsumerTest";
protected static final String CXF_RS_ENDPOINT_ADDRESS = "http://" + LOCALHOST + ":" + CXT + "/rest";
private static String[] receivedValue = {"[126]", "[123]", "[400]"};
private static final Logger LOG = LoggerFactory.getLogger(CamelSourceCXFRSITCase.class);
private int received;
private final int expect = 3;
@Override
protected String[] getConnectorsInTest() {
return new String[] {"camel-cxfrs-kafka-connector"};
}
@BeforeEach
public void setUp() {
received = 0;
}
@Override
protected void produceTestData() {
TestUtils.waitFor(() -> NetworkUtils.portIsOpen(LOCALHOST, PORT));
try {
Bus bus = BusFactory.newInstance().createBus();
bus.getInInterceptors().add(new LoggingInInterceptor());
bus.getOutInterceptors().add(new LoggingOutInterceptor());
try {
doTestGetCustomer("rest");
} catch (Exception e) {
LOG.info("Test Invocation Failure", e);
}
} catch (Exception e) {
LOG.info("Unable to invoke service: {}", e.getMessage(), e);
fail("Unable to invoke service");
}
}
@Override
protected void verifyMessages(TestMessageConsumer<?> consumer) {
LOG.info("Consumed messages: {}", consumer.consumedMessages());
for (ConsumerRecord<String, ?> record : consumer.consumedMessages()) {
Object receivedObject = consumer.consumedMessages().get(received).value();
if (!(receivedObject instanceof String)) {
fail("Unexpected message type");
}
String result = (String) receivedObject;
assertEquals(receivedValue[received++], result);
}
}
@Test
@Timeout(30)
public void testBasicSendReceive() {
try {
String topicName = getTopicForTest(this);
ConnectorPropertyFactory connectorPropertyFactory = CamelSourceCXFRSPropertyFactory
.basic()
.withKafkaTopic(topicName)
.withAddress(CXF_RS_ENDPOINT_ADDRESS)
.withResourceClass("org.apache.camel.kafkaconnector.cxfrs.CustomerServiceResource");
runTestBlocking(connectorPropertyFactory, topicName, expect);
} catch (Exception e) {
LOG.error("CXF test failed: {}", e.getMessage(), e);
fail(e.getMessage());
}
}
private void invokeGetCustomer(String uri, String expect) throws Exception {
HttpGet get = new HttpGet(uri);
get.addHeader("Accept", "application/json");
CloseableHttpClient httpclient = HttpClientBuilder.create().build();
try {
HttpResponse response = httpclient.execute(get);
} finally {
httpclient.close();
}
}
private void doTestGetCustomer(String contextUri) throws Exception {
invokeGetCustomer("http://" + LOCALHOST + ":" + CXT + "/" + contextUri + "/customerservice/customers/126",
"{\"Customer\":{\"id\":126,\"name\":\"CKC\"}}");
invokeGetCustomer("http://" + LOCALHOST + ":" + CXT + "/" + contextUri + "/customerservice/customers/123",
"customer response back!");
invokeGetCustomer("http://" + LOCALHOST + ":" + CXT + "/" + contextUri + "/customerservice/customers/400",
"The remoteAddress is 127.0.0.1");
}
}
| 9,134 |
0 | Create_ds/camel-kafka-connector/tests/itests-cxfrs/src/test/java/org/apache/camel/kafkaconnector/cxfrs | Create_ds/camel-kafka-connector/tests/itests-cxfrs/src/test/java/org/apache/camel/kafkaconnector/cxfrs/source/HelloService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.cxfrs.source;
import java.util.List;
public interface HelloService {
String sayHello();
void ping();
int getInvocationCount();
String echo(String text) throws Exception;
Boolean echoBoolean(Boolean bool);
String complexParameters(List<String> par1, List<String> par2);
}
| 9,135 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/SourceConnectorPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common;
import org.apache.camel.LoggingLevel;
import static org.apache.camel.kafkaconnector.CamelConnectorConfig.CAMEL_CONNECTOR_AGGREGATE_NAME;
import static org.apache.camel.kafkaconnector.CamelConnectorConfig.CAMEL_CONNECTOR_AGGREGATE_SIZE_CONF;
import static org.apache.camel.kafkaconnector.CamelConnectorConfig.CAMEL_CONNECTOR_AGGREGATE_TIMEOUT_CONF;
import static org.apache.camel.kafkaconnector.CamelSourceConnectorConfig.CAMEL_SOURCE_CONTENT_LOG_LEVEL_CONF;
import static org.apache.camel.kafkaconnector.CamelSourceConnectorConfig.CAMEL_SOURCE_URL_CONF;
import static org.apache.camel.kafkaconnector.CamelSourceConnectorConfig.TOPIC_CONF;
public abstract class SourceConnectorPropertyFactory<T extends SourceConnectorPropertyFactory<T>> extends BasicConnectorPropertyFactory<T> {
public T withKafkaTopic(String topic) {
return setProperty(TOPIC_CONF, topic);
}
public T withSourceUrl(String sourceUrl) {
return setProperty(CAMEL_SOURCE_URL_CONF, sourceUrl);
}
public T withSourceContentLogginglevel(LoggingLevel level) {
return setProperty(CAMEL_SOURCE_CONTENT_LOG_LEVEL_CONF, level.toString());
}
public T withAggregate(String aggregate, int size, int timeout) {
return withBeans(CAMEL_CONNECTOR_AGGREGATE_NAME, classRef(aggregate))
.setProperty(CAMEL_CONNECTOR_AGGREGATE_SIZE_CONF, size)
.setProperty(CAMEL_CONNECTOR_AGGREGATE_TIMEOUT_CONF, timeout);
}
}
| 9,136 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/EndpointUrlBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common;
import java.util.function.Function;
public class EndpointUrlBuilder<T extends ConnectorPropertyFactory> {
private final Function<String, T> setter;
private String baseUrl;
private boolean started;
public EndpointUrlBuilder(Function<String, T> setter, String baseUrl) {
this.setter = setter;
this.baseUrl = baseUrl;
}
private EndpointUrlBuilder<T> start() {
if (started) {
throw new RuntimeException("Already started preparing the URL for formatting");
}
started = true;
return this;
}
public EndpointUrlBuilder<T> append(String name, String value) {
if (!started) {
start();
baseUrl = String.format("%s?%s=%s", baseUrl, name, value);
} else {
baseUrl = String.format("%s&%s=%s", baseUrl, name, value);
}
return this;
}
public EndpointUrlBuilder<T> appendIfAvailable(String name, String value) {
if (value != null && !value.isEmpty()) {
return append(name, value);
}
return this;
}
public EndpointUrlBuilder<T> append(String name, int value) {
if (!started) {
start();
baseUrl = String.format("%s?%s=%d", baseUrl, name, value);
} else {
baseUrl = String.format("%s&%s=%d", baseUrl, name, value);
}
return this;
}
public T buildUrl() {
return setter.apply(baseUrl);
}
}
| 9,137 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/SinkConnectorPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common;
import org.apache.camel.LoggingLevel;
import static org.apache.camel.kafkaconnector.CamelSinkConnectorConfig.CAMEL_SINK_URL_CONF;
import static org.apache.camel.kafkaconnector.CamelSourceConnectorConfig.CAMEL_SOURCE_CONTENT_LOG_LEVEL_CONF;
public abstract class SinkConnectorPropertyFactory<T extends SinkConnectorPropertyFactory<T>> extends BasicConnectorPropertyFactory<T> {
public T withTopics(String topics) {
return setProperty("topics", topics);
}
public T withSinkUrl(String sinkUrl) {
return setProperty(CAMEL_SINK_URL_CONF, sinkUrl);
}
public T withSinkContentLogginglevel(LoggingLevel level) {
return setProperty(CAMEL_SOURCE_CONTENT_LOG_LEVEL_CONF, level.toString());
}
}
| 9,138 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/ConnectorPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An interface for producing different types of connector properties that match
* a specific type of connector in test.
*/
public interface ConnectorPropertyFactory {
/**
* Gets the properties used to configure the connector
* @return a Properties object containing the set of properties for the connector
*/
Properties getProperties();
default void log() {
Properties properties = getProperties();
Logger log = LoggerFactory.getLogger(ConnectorPropertyFactory.class);
log.info("Using the following properties for the test: ");
properties.forEach((key, value) -> log.info("{}={}", key, value));
}
}
| 9,139 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/BasicConnectorPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.kafka.connect.runtime.ConnectorConfig;
public abstract class BasicConnectorPropertyFactory<T extends BasicConnectorPropertyFactory<T>> implements ConnectorPropertyFactory {
private final Properties connectorProps = new Properties();
public T withName(String name) {
connectorProps.put(ConnectorConfig.NAME_CONFIG, name);
return (T) this;
}
public T withTasksMax(int tasksMax) {
connectorProps.put("tasks.max", String.valueOf(tasksMax));
return (T) this;
}
public T withConnectorClass(String connectorClass) {
connectorProps.put(ConnectorConfig.CONNECTOR_CLASS_CONFIG, connectorClass);
return (T) this;
}
public T withKeyConverterClass(String converterClass) {
connectorProps.put(ConnectorConfig.KEY_CONVERTER_CLASS_CONFIG, converterClass);
return (T) this;
}
public T withBeans(String name, String value) {
connectorProps.put("camel.beans." + name, value);
return (T) this;
}
public T withValueConverterClass(String converterClass) {
connectorProps.put(ConnectorConfig.VALUE_CONVERTER_CLASS_CONFIG, converterClass);
return (T) this;
}
public IdempotencyConfigBuilder<T> withIdempotency() {
return new IdempotencyConfigBuilder<>((T) this, connectorProps);
}
/**
* This enables sending failed records to the DLQ. Note: it automatically configure other required/recommended
* options!
* @param topicName the DLQ topic name
* @return this object instance
*/
public T withDeadLetterQueueTopicName(String topicName) {
// There's no constant for the DLQ settings
connectorProps.put("errors.deadletterqueue.topic.name", topicName);
connectorProps.put("errors.deadletterqueue.topic.replication.factor", 1);
connectorProps.put(ConnectorConfig.ERRORS_LOG_ENABLE_CONFIG, true);
return (T) this;
}
public TransformsConfigBuilder<T> withTransformsConfig(String name) {
return new TransformsConfigBuilder<>((T) this, getProperties(), name);
}
public ComponentConfigBuilder<T> withComponentConfig(String name, String value) {
return new ComponentConfigBuilder<>((T) this, getProperties(), name, value);
}
public T setProperty(String name, Object value) {
connectorProps.put(name, value);
return (T) this;
}
public static String classRef(String className) {
return "#class:" + className;
}
public static String classRef(Class<?> clazz) {
return "#class:" + clazz.getName();
}
public T merge(Properties properties) {
Set<Map.Entry<Object, Object>> set = properties.entrySet();
connectorProps.putAll(set.stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (a, b)->b)));
return (T) this;
}
@Override
public Properties getProperties() {
return connectorProps;
}
}
| 9,140 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/TransformsConfigBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common;
import java.util.Properties;
import org.apache.kafka.connect.runtime.ConnectorConfig;
public class TransformsConfigBuilder<T extends ConnectorPropertyFactory> {
private T handle;
private Properties properties;
private String name;
public TransformsConfigBuilder(T handle, Properties properties, String name) {
this.handle = handle;
this.properties = properties;
this.name = name;
properties.put(ConnectorConfig.TRANSFORMS_CONFIG, name);
}
public TransformsConfigBuilder<T> withEntry(String key, String value) {
properties.put(ConnectorConfig.TRANSFORMS_CONFIG + "." + name + "." + key, value);
return this;
}
public T end() {
return handle;
}
}
| 9,141 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/ComponentConfigBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common;
import java.util.Properties;
public class ComponentConfigBuilder<T extends ConnectorPropertyFactory> {
private final T handle;
private final Properties properties;
private final String name;
public ComponentConfigBuilder(T handle, Properties properties, String name, String value) {
this.handle = handle;
this.properties = properties;
this.name = name;
properties.put(name, value);
}
public ComponentConfigBuilder<T> withEntry(String key, String value) {
properties.put(name + "." + key, value);
return this;
}
public T end() {
return handle;
}
}
| 9,142 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/AbstractKafkaTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common;
import org.apache.camel.kafkaconnector.common.services.kafka.EmbeddedKafkaService;
import org.apache.camel.kafkaconnector.common.services.kafkaconnect.KafkaConnectRunnerFactory;
import org.apache.camel.kafkaconnector.common.services.kafkaconnect.KafkaConnectService;
import org.apache.camel.kafkaconnector.common.utils.PropertyUtils;
import org.apache.camel.test.infra.common.TestUtils;
import org.apache.camel.test.infra.kafka.services.ContainerLocalKafkaService;
import org.apache.camel.test.infra.kafka.services.KafkaService;
import org.apache.camel.test.infra.kafka.services.KafkaServiceFactory;
import org.apache.camel.test.infra.kafka.services.RemoteKafkaService;
import org.apache.camel.test.infra.kafka.services.StrimziService;
import org.junit.jupiter.api.extension.RegisterExtension;
public abstract class AbstractKafkaTest {
@RegisterExtension
public final KafkaService kafkaService;
@RegisterExtension
public final KafkaConnectService kafkaConnectService;
static {
PropertyUtils.load();
}
public AbstractKafkaTest() {
PluginPathHelper.getInstance().registerConnector(getConnectorsInTest());
kafkaService = KafkaServiceFactory
.builder()
.addLocalMapping(EmbeddedKafkaService::new)
.addRemoteMapping(RemoteKafkaService::new)
.addMapping("embedded", EmbeddedKafkaService::new)
.addMapping("local-strimzi-container", StrimziService::new)
.addMapping("local-cp-kafka-container", ContainerLocalKafkaService::new)
.build();
kafkaService.initialize();
kafkaConnectService = KafkaConnectRunnerFactory.createService(kafkaService);
}
protected abstract String[] getConnectorsInTest();
public KafkaService getKafkaService() {
return kafkaService;
}
public KafkaConnectService getKafkaConnectService() {
return kafkaConnectService;
}
/**
* Gets a topic name for the test class
* @param clazz
* @return
*/
protected String getDefaultTestTopic(Class<?> clazz) {
return clazz.getName();
}
protected String getTopicForTest(Object testObject) {
return getDefaultTestTopic(testObject.getClass()) + "." + TestUtils.randomWithRange(0, 1000);
}
}
| 9,143 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/PluginPathHelper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.shaded.org.apache.commons.io.DirectoryWalker;
public final class PluginPathHelper {
private static final Logger LOG = LoggerFactory.getLogger(PluginPathHelper.class);
private static PluginPathHelper instance;
private volatile List<String> connectors;
private static class PluginWalker extends DirectoryWalker<String> {
public PluginWalker() {
super(null, 2);
}
@Override
protected void handleFile(File file, int depth, Collection<String> results) throws IOException {
String fileName = file.getName();
if (fileName.endsWith(".jar")) {
if (fileName.contains("kafka-connector") && fileName.contains("camel")) {
String parentDir = file.getParentFile().getCanonicalPath();
if (parentDir.endsWith("target")) {
LOG.debug("Adding directory (1): {}: {}", depth, parentDir);
results.add(parentDir);
}
}
}
}
@Override
protected boolean handleDirectory(File directory, int depth, Collection<String> results) throws IOException {
String directoryName = directory.getName();
if (directoryName.equals("target")) {
String pluginDir = directory.getCanonicalPath();
LOG.debug("Adding directory (2): {}: {}", depth, pluginDir);
results.add(pluginDir);
}
return true;
}
public List<String> findPlugins(File startDir) {
List<String> results = new ArrayList<>();
try {
walk(startDir, results);
} catch (IOException e) {
LOG.error("I/O error while traversing candidate plugin dirs: {}", e.getMessage(), e);
}
return results;
}
}
private PluginPathHelper() {
}
private List<String> findPluginPaths() {
List<String> pluginPaths = new ArrayList<>();
for (String module : connectors) {
String path = System.getProperty("project.basedir") + File.separator + module;
File pathFile = new File(path);
try {
LOG.debug("Base dir used for search: {}, with path: {}, coming from components: {}, {}, {}", pathFile.getCanonicalPath(), path, System.getProperty("project.basedir"), File.separator, module);
} catch (IOException e) {
LOG.error("I/O exception: {}", e.getMessage(), e);
}
PluginWalker pluginWalker = new PluginWalker();
pluginPaths.addAll(pluginWalker.findPlugins(pathFile));
}
return pluginPaths;
}
/*
* We need to construct a list of directories containing *only* the connector classes (ie.: those that
* specialize Kafka's Connector abstract class.
*
* Our build generates jar files with for every connector and puts that in the target directory which the
* build dir (ie.: ${project.root}/core/target/camel-kafka-connector-0.0.1-SNAPSHOT.jar,
* ${project.root}/connectors/camel-sjms2-kafka-connector/target/camel-sjms2-kafka-connector-0.0.1-SNAPSHOT.jar,
* etc).
*
* The code within the pluginPaths traverses the directories for the core and connectors module, filtering any
* file that matches all of the following conditions:
* 1) ends with jar
* 2) is located in the target directory
* 3) contains the strings 'camel' and 'kafka-connector' as part of their name.
*
* Then for every connector jar file that it finds, it configures the embedded runtime to includes the parent dir
* into the configuration.
*
* Why it does this?
*
* 1) Because having the connector classes in the classpath could cause library conflicts causing the connectors
* and the Kafka connect runtime to fail.
* 2) Having the connectors on the classpath causes the following error to appear multiple times in the logs:
* 'Plugin class loader for connector: [name] was not found'
*
* ref: https://docs.confluent.io/current/connect/userguide.html
*/
public String pluginPaths() {
String ret = String.join(",", findPluginPaths());
LOG.info("Returning the following directories for the plugin path: {}", ret);
return ret;
}
public void registerConnector(String[] connectorNames) {
// prepend "connectors/" to all connectors name
connectors = Stream.of(connectorNames)
.map(connectorName -> "connectors" + File.separator + connectorName)
.collect(Collectors.toList());
}
public static PluginPathHelper getInstance() {
if (instance == null) {
instance = new PluginPathHelper();
}
return instance;
}
}
| 9,144 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/IdempotencyConfigBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common;
import java.util.Properties;
public class IdempotencyConfigBuilder<T extends ConnectorPropertyFactory> {
private final T handle;
private final Properties properties;
public IdempotencyConfigBuilder(T handle, Properties properties) {
this.handle = handle;
this.properties = properties;
withEnabled(true);
}
private IdempotencyConfigBuilder<T> withEntry(String key, Object value) {
properties.put("camel.idempotency." + key, value);
return this;
}
public IdempotencyConfigBuilder<T> withEnabled(boolean value) {
return withEntry("enabled", value);
}
public IdempotencyConfigBuilder<T> withRepositoryType(String value) {
return withEntry("repository.type", value);
}
public IdempotencyConfigBuilder<T> withExpressionType(String value) {
return withEntry("expression.type", value);
}
public IdempotencyConfigBuilder<T> withExpressionHeader(String value) {
return withEntry("expression.header", value);
}
public IdempotencyConfigBuilder<T> withMemoryDimension(String value) {
return withEntry("memory.dimension", value);
}
public IdempotencyConfigBuilder<T> withKafkaTopic(String value) {
return withEntry("kafka.topic", value);
}
public IdempotencyConfigBuilder<T> withKafkaBootstrapServers(String value) {
return withEntry("kafka.bootstrap.servers", value);
}
public IdempotencyConfigBuilder<T> withKafkaMaxCacheSize(String value) {
return withEntry("kafka.max.cache.size", value);
}
public IdempotencyConfigBuilder<T> withKafkaPollDurationMs(String value) {
return withEntry("kafka.poll.duration.ms", value);
}
public T end() {
return handle;
}
}
| 9,145 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/clients | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/clients/kafka/DefaultProducerPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.clients.kafka;
import java.util.Properties;
import java.util.UUID;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
public class DefaultProducerPropertyFactory implements ProducerPropertyFactory {
private final String bootstrapServer;
/**
* Constructs the properties using the given bootstrap server
* @param bootstrapServer the address of the server in the format
* PLAINTEXT://${address}:${port}
*/
public DefaultProducerPropertyFactory(String bootstrapServer) {
this.bootstrapServer = bootstrapServer;
}
@Override
public Properties getProperties() {
Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer);
props.put(ProducerConfig.CLIENT_ID_CONFIG, UUID.randomUUID().toString());
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
StringSerializer.class.getName());
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
StringSerializer.class.getName());
return props;
}
}
| 9,146 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/clients | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/clients/kafka/ConsumerPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.clients.kafka;
import java.util.Properties;
/**
* An interface to produce properties that can be used to configure a Kafka consumer. The
* CLI runtime equivalent for this file is the consumer.properties file from the Kafka
* provided along with the Kafka deliverable
*
*/
public interface ConsumerPropertyFactory {
/**
* Gets the properties used to configure the consumer
* @return a Properties object containing the set of properties for the consumer
*/
Properties getProperties();
}
| 9,147 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/clients | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/clients/kafka/ProducerPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.clients.kafka;
import java.util.Properties;
/**
* An interface to produce properties that can be used to configure a Kafka consumer. The
* CLI runtime equivalent for this file is the consumer.properties file from the Kafka
* provided along with the Kafka deliverable
*
*/
public interface ProducerPropertyFactory {
/**
* Gets the properties used to configure the consumer
* @return a Properties object containing the set of properties for the consumer
*/
Properties getProperties();
}
| 9,148 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/clients | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/clients/kafka/KafkaClient.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.clients.kafka;
import java.time.Duration;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.function.Consumer;
import java.util.function.Predicate;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.header.Header;
/**
* A very simple test message consumer that can consume messages of different types
*
* @param <K> Key type
* @param <V> Value type
*/
public class KafkaClient<K, V> {
private final ConsumerPropertyFactory consumerPropertyFactory;
private final ProducerPropertyFactory producerPropertyFactory;
private KafkaProducer<K, V> producer;
private KafkaConsumer<K, V> consumer;
private static class TestHeader implements Header {
private final String key;
private final String value;
public TestHeader(String key, String value) {
this.key = key;
this.value = value;
}
@Override
public String key() {
return this.key;
}
@Override
public byte[] value() {
return value.getBytes();
}
}
/**
* Constructs the properties using the given bootstrap server
*
* @param bootstrapServer the address of the server in the format
* PLAINTEXT://${address}:${port}
*/
public KafkaClient(String bootstrapServer) {
this(new DefaultConsumerPropertyFactory(bootstrapServer), new DefaultProducerPropertyFactory(bootstrapServer));
}
/**
* Constructs the properties using the given bootstrap server
*
* @param consumerPropertyFactory a property factory for Kafka client consumers
* @param producerPropertyFactory a property factory for Kafka client producers
*/
public KafkaClient(ConsumerPropertyFactory consumerPropertyFactory, ProducerPropertyFactory producerPropertyFactory) {
this.consumerPropertyFactory = consumerPropertyFactory;
this.producerPropertyFactory = producerPropertyFactory;
producer = new KafkaProducer<>(producerPropertyFactory.getProperties());
consumer = new KafkaConsumer<>(consumerPropertyFactory.getProperties());
}
/**
* Consumes message from the given topic
*
* @param topic the topic to consume the messages from
* @param recordConsumer the a function to consume the received messages
*/
public void consumeAvailable(String topic, Consumer<ConsumerRecord<K, V>> recordConsumer) {
consumer.subscribe(Collections.singletonList(topic));
ConsumerRecords<K, V> records = consumer.poll(Duration.ofMillis(100));
for (ConsumerRecord<K, V> record : records) {
recordConsumer.accept(record);
}
}
/**
* Consumes message from the given topic until the predicate returns false
*
* @param topic the topic to consume the messages from
* @param predicate the predicate to test when the messages arrive
*/
public void consume(String topic, Predicate<ConsumerRecord<K, V>> predicate) {
consumer.subscribe(Collections.singletonList(topic));
// TODO: handle failures, timeouts, etc
while (true) {
ConsumerRecords<K, V> records = consumer.poll(Duration.ofMillis(10));
for (ConsumerRecord<K, V> record : records) {
if (!predicate.test(record)) {
return;
}
}
}
}
/**
* Sends data to a topic
*
* @param topic the topic to send data to
* @param message the message to send
* @throws ExecutionException
* @throws InterruptedException
*/
public void produce(String topic, V message) throws ExecutionException, InterruptedException {
ProducerRecord<K, V> record = new ProducerRecord<>(topic, message);
Future<RecordMetadata> future = producer.send(record);
future.get();
}
/**
* Sends data to a topic
*
* @param topic the topic to send data to
* @param message the message to send
* @throws ExecutionException
* @throws InterruptedException
*/
public void produce(String topic, V message, Map<String, String> headers) throws ExecutionException, InterruptedException {
ProducerRecord<K, V> record = new ProducerRecord<>(topic, message);
for (Map.Entry<String, String> entry : headers.entrySet()) {
record.headers().add(new TestHeader(entry.getKey(), entry.getValue()));
}
Future<RecordMetadata> future = producer.send(record);
future.get();
}
public AdminClient getAdminClient() {
return AdminClient.create(producerPropertyFactory.getProperties());
}
/**
* Delete a topic
*
* @param topic the topic to be deleted
*/
public void deleteTopic(String topic) {
AdminClient adminClient = getAdminClient();
adminClient.deleteTopics(Collections.singleton(topic));
}
}
| 9,149 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/clients | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/clients/kafka/ByteProducerPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.clients.kafka;
import java.util.Properties;
import java.util.UUID;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.BytesSerializer;
import org.apache.kafka.common.serialization.StringSerializer;
public class ByteProducerPropertyFactory implements ProducerPropertyFactory {
private final String bootstrapServer;
/**
* Constructs the properties using the given bootstrap server
* @param bootstrapServer the address of the server in the format
* PLAINTEXT://${address}:${port}
*/
public ByteProducerPropertyFactory(String bootstrapServer) {
this.bootstrapServer = bootstrapServer;
}
@Override
public Properties getProperties() {
Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer);
props.put(ProducerConfig.CLIENT_ID_CONFIG, UUID.randomUUID().toString());
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
StringSerializer.class.getName());
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
BytesSerializer.class.getName());
return props;
}
}
| 9,150 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/clients | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/clients/kafka/DefaultConsumerPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.clients.kafka;
import java.util.Properties;
import java.util.UUID;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
/**
* A property producer that can be used to create a Kafka consumer with a minimum
* set of configurations that can consume from a Kafka topic.
*
* The consumer behavior from using this set of properties causes the consumer to
* consumes all published messages "from-beginning".
*/
public class DefaultConsumerPropertyFactory implements ConsumerPropertyFactory {
private final String bootstrapServer;
/**
* Constructs the properties using the given bootstrap server
* @param bootstrapServer the address of the server in the format
* PLAINTEXT://${address}:${port}
*/
public DefaultConsumerPropertyFactory(String bootstrapServer) {
this.bootstrapServer = bootstrapServer;
}
@Override
public Properties getProperties() {
Properties props = new Properties();
props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer);
props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString());
props.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
StringDeserializer.class.getName());
props.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
StringDeserializer.class.getName());
props.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
return props;
}
}
| 9,151 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/test/AbstractTestMessageProducer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.test;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import org.apache.camel.kafkaconnector.common.clients.kafka.KafkaClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractTestMessageProducer<T> implements TestMessageProducer<T> {
private static final Logger LOG = LoggerFactory.getLogger(AbstractTestMessageProducer.class);
private final KafkaClient<String, T> kafkaClient;
private final String topicName;
private final int count;
public AbstractTestMessageProducer(KafkaClient<String, T> kafkaClient, String topicName, int count) {
this.kafkaClient = kafkaClient;
this.topicName = topicName;
this.count = count;
}
public AbstractTestMessageProducer(String bootstrapServer, String topicName, int count) {
this.kafkaClient = createKafkaClient(bootstrapServer);
this.topicName = topicName;
this.count = count;
}
protected KafkaClient<String, T> createKafkaClient(String bootstrapServer) {
return new KafkaClient<>(bootstrapServer);
}
public void produceMessages() throws ExecutionException, InterruptedException {
LOG.trace("Producing messages ...");
for (int i = 0; i < count; i++) {
T message = testMessageContent(i);
Map<String, String> headers = messageHeaders(message, i);
if (headers == null) {
kafkaClient.produce(topicName, message);
} else {
kafkaClient.produce(topicName, message, headers);
}
}
}
}
| 9,152 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/test/AbstractTestMessageConsumer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.test;
import java.util.ArrayList;
import java.util.List;
import org.apache.camel.kafkaconnector.common.clients.kafka.KafkaClient;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractTestMessageConsumer<T> implements TestMessageConsumer<T> {
private static final Logger LOG = LoggerFactory.getLogger(AbstractTestMessageConsumer.class);
protected final KafkaClient<String, T> kafkaClient;
protected final String topicName;
private final int count;
private final List<ConsumerRecord<String, T>> receivedMessages;
private volatile int received;
public AbstractTestMessageConsumer(KafkaClient<String, T> kafkaClient, String topicName, int count) {
this.kafkaClient = kafkaClient;
this.topicName = topicName;
this.count = count;
receivedMessages = new ArrayList<>(count);
}
public boolean checkRecord(ConsumerRecord<String, T> record) {
LOG.debug("Received: {}", record.value());
received++;
receivedMessages.add(record);
if (received == count) {
return false;
}
return true;
}
@Override
public void consumeMessages() {
kafkaClient.consume(topicName, this::checkRecord);
}
@Override
public List<ConsumerRecord<String, T>> consumedMessages() {
return receivedMessages;
}
}
| 9,153 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/test/StringMessageConsumer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.test;
import org.apache.camel.kafkaconnector.common.clients.kafka.KafkaClient;
/**
* A consumer that receives the 'count' amount of text messages from the Kafka broker
*/
public class StringMessageConsumer extends AbstractTestMessageConsumer<String> {
public StringMessageConsumer(KafkaClient<String, String> kafkaClient, String topicName, int count) {
super(kafkaClient, topicName, count);
}
}
| 9,154 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/test/FunctionalTestMessageProducer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.test;
@FunctionalInterface
public interface FunctionalTestMessageProducer {
void produceMessages();
}
| 9,155 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/test/StringMessageProducer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.test;
import java.util.Map;
import org.apache.camel.kafkaconnector.common.clients.kafka.KafkaClient;
/**
* A producer that sends the 'count' amount of text messages to the Kafka broker
*/
public class StringMessageProducer extends AbstractTestMessageProducer<String> {
public StringMessageProducer(String bootStrapServer, String topicName, int count) {
super(bootStrapServer, topicName, count);
}
public StringMessageProducer(KafkaClient<String, String> kafkaClient, String topicName, int count) {
super(kafkaClient, topicName, count);
}
@Override
public Map<String, String> messageHeaders(String text, int current) {
return null;
}
@Override
public String testMessageContent(int current) {
return "Sink test message " + current;
}
}
| 9,156 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/test/CamelSinkTestSupport.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.test;
import java.time.Duration;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.apache.camel.kafkaconnector.common.AbstractKafkaTest;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class CamelSinkTestSupport extends AbstractKafkaTest {
private static final Logger LOG = LoggerFactory.getLogger(CamelSinkTestSupport.class);
/**
* A simple test runner that follows the steps: initialize, start consumer, produce messages, verify results
*
* @param connectorPropertyFactory A factory for connector properties
* @param topic the topic to send the messages to
* @param count the number of messages to send
* @throws Exception For test-specific exceptions
*/
protected void runTest(ConnectorPropertyFactory connectorPropertyFactory, String topic, int count) throws Exception {
StringMessageProducer stringMessageProducer = new StringMessageProducer(getKafkaService().getBootstrapServers(),
topic, count);
runTest(connectorPropertyFactory, stringMessageProducer);
}
/**
* A simple test runner that follows the steps: initialize, start consumer, produce messages, verify results
*
* @param connectorPropertyFactory A factory for connector properties
* @throws Exception For test-specific exceptions
*/
protected void runTest(ConnectorPropertyFactory connectorPropertyFactory, TestMessageProducer producer) throws Exception {
connectorPropertyFactory.log();
getKafkaConnectService().initializeConnectorBlocking(connectorPropertyFactory, 1);
LOG.debug("Creating the consumer ...");
ExecutorService service = Executors.newCachedThreadPool();
CountDownLatch latch = new CountDownLatch(1);
service.submit(() -> consumeMessages(latch));
producer.produceMessages();
LOG.debug("Waiting for the messages to be processed");
service.shutdown();
LOG.debug("Waiting for the test to complete");
verifyMessages(latch);
}
/**
* A more flexible test runner that can use a custom producer of test messages
* @param connectorPropertyFactory a factory for connector properties
* @param producer the test message producer
* @throws ExecutionException
* @throws InterruptedException
*/
protected void runTest(ConnectorPropertyFactory connectorPropertyFactory, FunctionalTestMessageProducer producer) throws ExecutionException, InterruptedException {
connectorPropertyFactory.log();
getKafkaConnectService().initializeConnectorBlocking(connectorPropertyFactory, 1);
LOG.debug("Creating the consumer ...");
ExecutorService service = Executors.newCachedThreadPool();
CountDownLatch latch = new CountDownLatch(1);
service.submit(() -> consumeMessages(latch));
producer.produceMessages();
LOG.debug("Waiting for the messages to be processed");
service.shutdown();
LOG.debug("Waiting for the test to complete");
verifyMessages(latch);
}
/**
* A simple test runner that follows the steps: initialize, start consumer, produce messages, verify results
*
* @param connectorPropertyFactory A factory for connector properties
* @throws Exception For test-specific exceptions
*/
protected void runTestNonBlocking(ConnectorPropertyFactory connectorPropertyFactory, TestMessageProducer producer) throws Exception {
connectorPropertyFactory.log();
getKafkaConnectService().initializeConnector(connectorPropertyFactory);
LOG.debug("Creating the consumer ...");
ExecutorService service = Executors.newCachedThreadPool();
CountDownLatch latch = new CountDownLatch(1);
service.submit(() -> consumeMessages(latch));
producer.produceMessages();
LOG.debug("Waiting for the messages to be processed");
service.shutdown();
LOG.debug("Waiting for the test to complete");
verifyMessages(latch);
}
protected boolean waitForData() {
try {
Thread.sleep(Duration.ofSeconds(1).toMillis());
return true;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return false;
}
}
protected abstract void consumeMessages(CountDownLatch latch);
protected abstract void verifyMessages(CountDownLatch latch) throws InterruptedException;
}
| 9,157 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/test/TestMessageConsumer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.test;
import java.util.List;
import org.apache.kafka.clients.consumer.ConsumerRecord;
public interface TestMessageConsumer<T> {
void consumeMessages();
List<ConsumerRecord<String, T>> consumedMessages();
}
| 9,158 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/test/TestMessageProducer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.test;
import java.util.Map;
import java.util.concurrent.ExecutionException;
/**
* A producer of test messages
*/
public interface TestMessageProducer<T> {
Map<String, String> messageHeaders(T text, int current);
T testMessageContent(int current);
void produceMessages() throws ExecutionException, InterruptedException;
}
| 9,159 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/test/IntegerMessageConsumer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.test;
import org.apache.camel.kafkaconnector.common.clients.kafka.KafkaClient;
/**
* A consumer that receives the 'count' amount of text messages from the Kafka broker
*/
public class IntegerMessageConsumer extends AbstractTestMessageConsumer<Integer> {
public IntegerMessageConsumer(KafkaClient<String, Integer> kafkaClient, String topicName, int count) {
super(kafkaClient, topicName, count);
}
}
| 9,160 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/test/CamelSourceTestSupport.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.test;
import java.util.concurrent.ExecutionException;
import org.apache.camel.kafkaconnector.common.AbstractKafkaTest;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.apache.camel.kafkaconnector.common.clients.kafka.KafkaClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class CamelSourceTestSupport extends AbstractKafkaTest {
private static final Logger LOG = LoggerFactory.getLogger(CamelSourceTestSupport.class);
protected abstract void produceTestData();
protected abstract void verifyMessages(TestMessageConsumer<?> consumer);
/**
* A simple test runner that follows the steps: initialize, start producer, consume messages, verify results
*
* @param connectorPropertyFactory A factory for connector properties
* @param topic the topic to send the messages to
* @param count the number of messages to send
* @throws Exception For test-specific exceptions
*/
public void runTest(ConnectorPropertyFactory connectorPropertyFactory, String topic, int count) throws ExecutionException, InterruptedException {
KafkaClient<String, String> kafkaClient = new KafkaClient<>(getKafkaService().getBootstrapServers());
StringMessageConsumer consumer = new StringMessageConsumer(kafkaClient, topic, count);
runTest(connectorPropertyFactory, consumer);
}
/**
* A simple test runner that follows the steps: initialize, start producer, consume messages, verify results
*
* @param connectorPropertyFactory A factory for connector properties
* @param consumer A Kafka consumer consumer for the test messages
* @throws Exception For test-specific exceptions
*/
public void runTest(ConnectorPropertyFactory connectorPropertyFactory, TestMessageConsumer<?> consumer) throws ExecutionException, InterruptedException {
runTest(connectorPropertyFactory, consumer, this::produceTestData);
}
/**
* A simple test runner that follows the steps: initialize, start producer, consume messages, verify results
*
* @param connectorPropertyFactory A factory for connector properties
* @param consumer A Kafka consumer consumer for the test messages
* @param producer A producer for the test messages
* @throws Exception For test-specific exceptions
*/
public void runTest(ConnectorPropertyFactory connectorPropertyFactory, TestMessageConsumer<?> consumer,
FunctionalTestMessageProducer producer) throws ExecutionException, InterruptedException {
connectorPropertyFactory.log();
LOG.debug("Initialized the connector and put the data for the test execution");
getKafkaConnectService().initializeConnector(connectorPropertyFactory);
LOG.debug("Producing test data to be collected by the connector and sent to Kafka");
producer.produceMessages();
LOG.debug("Creating the Kafka consumer ...");
consumer.consumeMessages();
LOG.debug("Ran the Kafka consumer ...");
LOG.debug("Verifying messages");
verifyMessages(consumer);
LOG.debug("Verified messages");
}
/**
* A simple blocking test runner that follows the steps: initialize, start producer, consume messages, verify results
*
* @param connectorPropertyFactory A factory for connector properties
* @param topic the topic to send the messages to
* @param count the number of messages to send
* @throws Exception For test-specific exceptions
*/
public void runTestBlocking(ConnectorPropertyFactory connectorPropertyFactory, String topic, int count) throws ExecutionException, InterruptedException {
KafkaClient<String, String> kafkaClient = new KafkaClient<>(getKafkaService().getBootstrapServers());
StringMessageConsumer consumer = new StringMessageConsumer(kafkaClient, topic, count);
runTestBlocking(connectorPropertyFactory, consumer);
}
/**
* A simple blocking test runner that follows the steps: initialize, start producer, consume messages, verify results
*
* @param connectorPropertyFactory A factory for connector properties
* @param consumer A Kafka consumer consumer for the test messages
* @throws Exception For test-specific exceptions
*/
public void runTestBlocking(ConnectorPropertyFactory connectorPropertyFactory, TestMessageConsumer<?> consumer) throws ExecutionException, InterruptedException {
runTestBlocking(connectorPropertyFactory, consumer, this::produceTestData);
}
/**
* A simple blocking test runner that follows the steps: initialize, start producer, consume messages, verify results
*
* @param connectorPropertyFactory A factory for connector properties
* @param consumer A Kafka consumer consumer for the test messages
* @param producer A producer for the test messages
* @throws Exception For test-specific exceptions
*/
public void runTestBlocking(ConnectorPropertyFactory connectorPropertyFactory, TestMessageConsumer<?> consumer,
FunctionalTestMessageProducer producer) throws ExecutionException, InterruptedException {
connectorPropertyFactory.log();
LOG.debug("Initialized the connector and put the data for the test execution");
getKafkaConnectService().initializeConnectorBlocking(connectorPropertyFactory, 1);
LOG.debug("Producing test data to be collected by the connector and sent to Kafka");
producer.produceMessages();
LOG.debug("Creating the Kafka consumer ...");
consumer.consumeMessages();
LOG.debug("Ran the Kafka consumer ...");
LOG.debug("Verifying messages");
verifyMessages(consumer);
LOG.debug("Verified messages");
}
}
| 9,161 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/utils/NetworkUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.utils;
import java.io.IOException;
import java.net.ConnectException;
import java.net.DatagramSocket;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.UnknownHostException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class NetworkUtils {
public static final int DEFAULT_ENDING_PORT = 65535;
public static final int DEFAULT_STARTING_PORT = 49152;
public static int freeStartingPort = DEFAULT_STARTING_PORT;
private static final Logger LOG = LoggerFactory.getLogger(NetworkUtils.class);
private NetworkUtils() {
// utils class
}
public static int getFreePort() {
return getFreePort(getHostname());
}
public static int getFreePort(String host) {
return getFreePort(host, freeStartingPort, DEFAULT_ENDING_PORT);
}
public static int getFreePort(String host, Protocol protocol) {
return getFreePort(host, freeStartingPort, DEFAULT_ENDING_PORT, protocol);
}
public static int getFreePort(String host, int startingPort, int endingPort) {
return getFreePort(host, startingPort, endingPort, Protocol.TCP);
}
public static synchronized int getFreePort(String host, int startingPort, int endingPort, Protocol protocol) {
int freePort = 0;
for (int i = startingPort; i <= endingPort; i++) {
boolean found = checkPort(host, i, protocol);
if (found) {
freePort = i;
break;
}
}
return freePort;
}
public static boolean checkPort(String host, int port, Protocol protocol) {
try {
switch (protocol) {
case TCP:
try (ServerSocket ss = new ServerSocket()) {
ss.setReuseAddress(true);
ss.bind(new InetSocketAddress(host, port), 1);
ss.getLocalPort();
if (port == freeStartingPort) {
freeStartingPort++;
}
return true;
} catch (IOException e) {
return false;
}
case UDP:
(new DatagramSocket(new InetSocketAddress(host, port))).close();
if (port == freeStartingPort) {
freeStartingPort++;
}
return true;
default:
return false;
}
} catch (IOException e) {
return false;
}
}
public static boolean portIsOpen(String host, int port) {
try (Socket socket = new Socket(host, port)) {
return true;
} catch (UnknownHostException e) {
LOG.warn("Unknown host: {}", host);
return false;
} catch (IOException e) {
if (e instanceof ConnectException) {
LOG.info("Port {} is likely closed: {}", port, e.getMessage());
} else {
LOG.warn("Unhandled I/O exception: {}", e.getMessage(), e);
}
return false;
}
}
public enum Protocol {
UDP,
TCP
}
public static String getHostname() {
return "localhost";
}
public static String getAddress(String protocol) {
return String.format("%s://%s:%d", protocol, NetworkUtils.getHostname(), NetworkUtils.getFreePort());
}
public static String getAddress(String protocol, int port) {
return String.format("%s://%s:%d", protocol, NetworkUtils.getHostname(), port);
}
}
| 9,162 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/utils/PropertyUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.utils;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.jupiter.api.Assertions.fail;
public final class PropertyUtils {
private static final Logger LOG = LoggerFactory.getLogger(PropertyUtils.class);
private static Properties properties = new Properties();
private PropertyUtils() {
}
public static Properties getProperties() {
return properties;
}
public static void load() {
String fileName = System.getProperty("test.properties");
if (fileName == null) {
LOG.info("Test properties was not provided, therefore not loading any test properties");
return;
}
try (InputStream stream = new FileInputStream(fileName)) {
properties.load(stream);
System.getProperties().putAll(properties);
} catch (FileNotFoundException e) {
LOG.error("Test properties provided at {} does not exist, therefore aborting the test execution",
fileName);
fail("The given test properties file does not exist");
} catch (IOException e) {
LOG.error("I/O error reading the test properties at {}: {}",
fileName, e.getMessage(), e);
fail("Unable to read the test properties file");
}
}
}
| 9,163 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/services | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/services/kafkaconnect/KafkaConnectEmbedded.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.services.kafkaconnect;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Consumer;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.apache.camel.kafkaconnector.common.services.kafka.EmbeddedKafkaService;
import org.apache.camel.test.infra.kafka.services.KafkaService;
import org.apache.kafka.clients.admin.Admin;
import org.apache.kafka.connect.runtime.AbstractStatus;
import org.apache.kafka.connect.runtime.ConnectorConfig;
import org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo;
import org.apache.kafka.connect.util.clusters.EmbeddedConnectCluster;
import org.apache.kafka.test.TestUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class KafkaConnectEmbedded implements KafkaConnectService {
private static final Logger LOG = LoggerFactory.getLogger(KafkaConnectEmbedded.class);
private final EmbeddedConnectCluster cluster;
private String connectorName;
public KafkaConnectEmbedded(KafkaService kafkaService) {
if (!(kafkaService instanceof EmbeddedKafkaService)) {
throw new RuntimeException("Invalid Kafka service type: "
+ (kafkaService == null ? "null" : kafkaService.getClass()));
}
this.cluster = ((EmbeddedKafkaService) kafkaService).getCluster();
}
private void convertProperty(Map<String, String> map, Object key, Object value) {
map.put(String.valueOf(key), String.valueOf(value));
}
@Override
public void initializeConnector(ConnectorPropertyFactory propertyFactory) {
LOG.trace("Adding the new connector");
Map<String, String> configuredProperties = new HashMap<>();
propertyFactory.getProperties().forEach((k, v) -> convertProperty(configuredProperties, k, v));
connectorName = configuredProperties.get(ConnectorConfig.NAME_CONFIG);
LOG.info("Initializing connector {}", connectorName);
cluster.configureConnector(connectorName, configuredProperties);
LOG.trace("Added the new connector");
}
private boolean doCheckState(ConnectorStateInfo connectorStateInfo, Integer expectedTaskNumber) {
return connectorStateInfo.tasks().size() >= expectedTaskNumber
&& connectorStateInfo.connector().state().equals(AbstractStatus.State.RUNNING.toString())
&& connectorStateInfo.tasks().stream().allMatch(s -> s.state().equals(AbstractStatus.State.RUNNING.toString()));
}
@Override
public void initializeConnectorBlocking(ConnectorPropertyFactory propertyFactory, Integer expectedTaskNumber) throws InterruptedException {
initializeConnector(propertyFactory);
TestUtils.waitForCondition(() -> {
ConnectorStateInfo connectorStateInfo = null;
do {
connectorStateInfo = cluster.connectorStatus(connectorName);
Thread.sleep(20L);
} while (connectorStateInfo == null);
return doCheckState(connectorStateInfo, expectedTaskNumber);
}, 30000L, "The connector " + connectorName + " did not start within a reasonable time");
}
@Override
public void stop() {
if (connectorName != null) {
try {
LOG.info("Removing connector {}", connectorName);
cluster.deleteConnector(connectorName);
LOG.info("Removing topics used during the test");
Admin client = cluster.kafka().createAdminClient();
client.deleteTopics(cluster.connectorTopics(connectorName).topics());
} finally {
connectorName = null;
}
}
}
@Override
public void start() {
// NO-OP
}
private ConnectorStateInfo getConnectorStatus(String connectorName) {
return cluster.connectorStatus(connectorName);
}
public void connectorStateCheck(Consumer<ConnectorStateInfo> taskStateConsumer) {
cluster.connectors().forEach(c -> taskStateConsumer.accept(getConnectorStatus(c)));
}
}
| 9,164 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/services | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/services/kafkaconnect/KafkaConnectRunnerService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.services.kafkaconnect;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.apache.camel.test.infra.kafka.services.KafkaService;
import org.apache.kafka.connect.runtime.ConnectorConfig;
import org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.jupiter.api.Assertions.fail;
public class KafkaConnectRunnerService implements KafkaConnectService {
private static final Logger LOG = LoggerFactory.getLogger(KafkaConnectRunnerService.class);
private final KafkaConnectRunner kafkaConnectRunner;
private final ExecutorService service = Executors.newCachedThreadPool();
public KafkaConnectRunnerService(KafkaService kafkaService) {
Objects.requireNonNull(kafkaService);
LOG.debug("Connecting the Kafka Connect Runner to {}", kafkaService.getBootstrapServers());
this.kafkaConnectRunner = new KafkaConnectRunner(kafkaService.getBootstrapServers());
}
private void checkInitializationState(KafkaConnectRunner.ConnectorInitState initState) {
Objects.nonNull(initState);
Throwable error = initState.getError();
Map<String, String> configs = initState.getConfigs();
String name = configs.get(ConnectorConfig.NAME_CONFIG);
if (error != null) {
LOG.error("Failed to create the connector {}: {}", name, error.getMessage(), error);
throw new RuntimeException(String.format("Failed to create the connector %s: %s", name,
error.getMessage()), error);
} else {
if (initState.isCreated()) {
LOG.debug("Created and initialized the connector {}", name);
} else {
LOG.debug("Failed to create connector {}", name);
throw new RuntimeException(String.format("Failed to create connector %s", name));
}
}
}
private void checkInitializationState(KafkaConnectRunner.ConnectorInitState initState, CountDownLatch latch) {
try {
checkInitializationState(initState);
} finally {
latch.countDown();
}
}
@Override
public void initializeConnector(ConnectorPropertyFactory propertyFactory) throws ExecutionException, InterruptedException {
kafkaConnectRunner.initializeConnector(propertyFactory, this::checkInitializationState);
}
@Override
public void initializeConnectorBlocking(ConnectorPropertyFactory propertyFactory, Integer expectedTaskNumber) throws ExecutionException, InterruptedException {
CountDownLatch latch = new CountDownLatch(1);
kafkaConnectRunner.initializeConnector(propertyFactory, this::checkInitializationState, latch);
if (!latch.await(30, TimeUnit.SECONDS)) {
fail("The connector did not start within a reasonable time");
}
}
public void stop() {
kafkaConnectRunner.stop();
try {
if (!service.awaitTermination(5, TimeUnit.SECONDS)) {
LOG.warn("Timed out while waiting for the embedded runner to stop");
}
} catch (InterruptedException e) {
LOG.warn("The test was interrupted while executing");
}
}
public void start() {
CountDownLatch latch = new CountDownLatch(1);
service.submit(() -> kafkaConnectRunner.run(latch));
try {
if (!latch.await(30, TimeUnit.SECONDS)) {
LOG.warn("The Kafka Connect Runner timed out while initializing");
throw new RuntimeException("The Kafka Connect Runner timed out while initializing");
}
} catch (InterruptedException e) {
LOG.error("The test was interrupted while executing");
}
}
public void connectorStateCheck(Consumer<ConnectorStateInfo> taskStateConsumer) {
kafkaConnectRunner.connectorStateCheck(taskStateConsumer);
}
}
| 9,165 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/services | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/services/kafkaconnect/KafkaConnectPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.services.kafkaconnect;
import java.util.Properties;
/**
* An interface for producing different types of Kafka connect properties. The CLI runtime
* equivalent for this file is the connect-standalone.properties
*/
public interface KafkaConnectPropertyFactory {
/**
* Gets the properties used to configure the Kafka connect runtime
* @return a Properties object containing the set of properties for the Kafka connect
* runtime
*/
Properties getProperties();
}
| 9,166 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/services | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/services/kafkaconnect/KafkaConnectService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.services.kafkaconnect;
import java.util.concurrent.ExecutionException;
import java.util.function.Consumer;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo;
import org.junit.jupiter.api.extension.AfterTestExecutionCallback;
import org.junit.jupiter.api.extension.BeforeTestExecutionCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
public interface KafkaConnectService extends BeforeTestExecutionCallback, AfterTestExecutionCallback {
void initializeConnector(ConnectorPropertyFactory propertyFactory) throws ExecutionException, InterruptedException;
void initializeConnectorBlocking(ConnectorPropertyFactory propertyFactory, Integer expectedTasksnumber) throws ExecutionException, InterruptedException;
void stop();
void start();
void connectorStateCheck(Consumer<ConnectorStateInfo> taskStateConsumer);
@Override
default void afterTestExecution(ExtensionContext extensionContext) {
stop();
}
@Override
default void beforeTestExecution(ExtensionContext extensionContext) {
start();
}
}
| 9,167 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/services | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/services/kafkaconnect/DefaultKafkaConnectPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.services.kafkaconnect;
import java.util.Properties;
import org.apache.camel.kafkaconnector.common.PluginPathHelper;
import org.apache.camel.kafkaconnector.common.utils.NetworkUtils;
import org.apache.kafka.connect.runtime.standalone.StandaloneConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A set of properties for the Kafka connect runtime that match the standard configuration
* used for the standalone CLI connect runtime.
*/
class DefaultKafkaConnectPropertyFactory implements KafkaConnectPropertyFactory {
private static final Logger LOG = LoggerFactory.getLogger(DefaultKafkaConnectPropertyFactory.class);
private final String bootstrapServer;
/**
* Constructs the properties using the given bootstrap server
* @param bootstrapServer the address of the server in the format
* PLAINTEXT://${address}:${port}
*/
public DefaultKafkaConnectPropertyFactory(String bootstrapServer) {
this.bootstrapServer = bootstrapServer;
}
@Override
public Properties getProperties() {
Properties props = new Properties();
props.put(StandaloneConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer);
props.put(StandaloneConfig.KEY_CONVERTER_CLASS_CONFIG, "org.apache.kafka.connect.json.JsonConverter");
props.put(StandaloneConfig.VALUE_CONVERTER_CLASS_CONFIG, "org.apache.kafka.connect.json.JsonConverter");
props.put(StandaloneConfig.OFFSET_STORAGE_FILE_FILENAME_CONFIG, this.getClass().getResource("/").getPath() + "connect.offsets");
props.put(StandaloneConfig.OFFSET_COMMIT_INTERVAL_MS_CONFIG, "10000");
String address = NetworkUtils.getAddress("http");
LOG.info("Using the following address for the listener configuration: {}", address);
props.put(StandaloneConfig.LISTENERS_CONFIG, address);
String pluginPaths = PluginPathHelper.getInstance().pluginPaths();
props.put(StandaloneConfig.PLUGIN_PATH_CONFIG, pluginPaths);
return props;
}
}
| 9,168 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/services | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/services/kafkaconnect/KafkaConnectRunner.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.services.kafkaconnect;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.apache.camel.kafkaconnector.common.clients.kafka.KafkaClient;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.connect.connector.policy.AllConnectorClientConfigOverridePolicy;
import org.apache.kafka.connect.runtime.Connect;
import org.apache.kafka.connect.runtime.ConnectorConfig;
import org.apache.kafka.connect.runtime.Herder;
import org.apache.kafka.connect.runtime.Worker;
import org.apache.kafka.connect.runtime.WorkerInfo;
import org.apache.kafka.connect.runtime.isolation.Plugins;
import org.apache.kafka.connect.runtime.rest.RestClient;
import org.apache.kafka.connect.runtime.rest.RestServer;
import org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo;
import org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo;
import org.apache.kafka.connect.runtime.standalone.StandaloneConfig;
import org.apache.kafka.connect.runtime.standalone.StandaloneHerder;
import org.apache.kafka.connect.storage.FileOffsetBackingStore;
import org.apache.kafka.connect.util.FutureCallback;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An embeddable Kafka Connect runtime for usage during the tests. It is equivalent
* to the Kafka connect standalone CLI
*/
class KafkaConnectRunner {
private static final Logger LOG = LoggerFactory.getLogger(KafkaConnectRunner.class);
private final String bootstrapServer;
private final KafkaConnectPropertyFactory kafkaConnectPropertyFactory;
private final List<ConnectorPropertyFactory> connectorPropertyFactories = new ArrayList<>();
private Connect connect;
private Herder herder;
/**
* Contains the initialization state. This just abstracts internal details from the
* runner, so those details don't leak in other parts of the test code
*/
public class ConnectorInitState {
private Map<String, String> configs;
private boolean created;
private Throwable error;
public ConnectorInitState(Map<String, String> configs, boolean created, Throwable error) {
this.configs = configs;
this.created = created;
this.error = error;
}
public Map<String, String> getConfigs() {
return configs;
}
public boolean isCreated() {
return created;
}
public Throwable getError() {
return error;
}
}
/**
* Constructs the properties using the given bootstrap server
* @param bootstrapServer the address of the server in the format
* PLAINTEXT://${address}:${port}
*/
public KafkaConnectRunner(String bootstrapServer) {
this.bootstrapServer = bootstrapServer;
this.kafkaConnectPropertyFactory = new DefaultKafkaConnectPropertyFactory(bootstrapServer);
}
/**
* here does not seem to be a public interface for embedding a Kafka connect runtime,
* therefore, this code is modeled from the behavior taken from
* https://github.com/apache/kafka/blob/2.1/connect/runtime/src/main/java/org/apache/kafka/connect/cli/ConnectStandalone.java
* and performs the initialization in a roughly similar manner.
*
*/
private void init() {
LOG.info("Started worker initialization");
Time time = Time.SYSTEM;
// Initializes the system runtime information and logs some of the information
WorkerInfo initInfo = new WorkerInfo();
initInfo.logAll();
Properties props = kafkaConnectPropertyFactory.getProperties();
Map<String, String> standAloneProperties = Utils.propsToStringMap(props);
// Not needed, but we need this one to initialize the worker
Plugins plugins = new Plugins(standAloneProperties);
StandaloneConfig config = new StandaloneConfig(standAloneProperties);
String kafkaClusterId = config.kafkaClusterId();
AllConnectorClientConfigOverridePolicy allConnectorClientConfigOverridePolicy = new AllConnectorClientConfigOverridePolicy();
RestClient restClient = new RestClient(config);
RestServer rest = new RestServer(config, restClient);
rest.initializeServer();
/*
According to the Kafka source code "... Worker runs a (dynamic) set of tasks
in a set of threads, doing the work of actually moving data to/from Kafka ..."
*/
Worker worker = new Worker(bootstrapServer, time, plugins, config, new FileOffsetBackingStore(), allConnectorClientConfigOverridePolicy);
/*
From Kafka source code: " ... The herder interface tracks and manages workers
and connectors ..."
*/
herder = new StandaloneHerder(worker, kafkaClusterId, allConnectorClientConfigOverridePolicy);
connect = new Connect(herder, rest);
LOG.info("Finished initializing the worker");
}
/**
* Offers the list of connector properties producers to be configured prior to running
* the embeddable connect runtime
* @return A list object that can be modified to include or remove connector property
* producers
*/
public List<ConnectorPropertyFactory> getConnectorPropertyProducers() {
return connectorPropertyFactories;
}
public void initializeConnector(ConnectorPropertyFactory connectorPropertyFactory,
Consumer<ConnectorInitState> callback) throws ExecutionException, InterruptedException {
Properties connectorProps = connectorPropertyFactory.getProperties();
FutureCallback<Herder.Created<ConnectorInfo>> cb = new FutureCallback<>((error, info) ->
callback.accept(new ConnectorInitState(info.result().config(), info.created(), error)));
herder.putConnectorConfig(
connectorProps.getProperty(ConnectorConfig.NAME_CONFIG),
Utils.propsToStringMap(connectorProps), false, cb);
cb.get();
}
public <T> void initializeConnector(ConnectorPropertyFactory connectorPropertyFactory,
BiConsumer<ConnectorInitState, T> callback, T payload) throws ExecutionException, InterruptedException {
Properties connectorProps = connectorPropertyFactory.getProperties();
FutureCallback<Herder.Created<ConnectorInfo>> cb = new FutureCallback<>((error, info) ->
callback.accept(new ConnectorInitState(info.result().config(), info.created(), error), payload));
herder.putConnectorConfig(
connectorProps.getProperty(ConnectorConfig.NAME_CONFIG),
Utils.propsToStringMap(connectorProps), false, cb);
cb.get();
}
/**
* Run the embeddable Kafka connect runtime
* @return true if successfully started the runtime or false otherwise
*/
public boolean run(CountDownLatch latch) {
try {
init();
LOG.info("Starting the connect interface");
connect.start();
LOG.info("Started the connect interface");
} catch (Throwable t) {
LOG.error("Container init or start has failed due to: ", t);
} finally {
latch.countDown();
}
connect.awaitStop();
return true;
}
/**
* Stops the embeddable Kafka connect runtime
*/
public void stop() {
if (connect != null) {
LOG.info("Removing topics used during the test");
KafkaClient<?, ?> kafkaClient = new KafkaClient<>(bootstrapServer);
for (String connector : herder.connectors()) {
herder.connectorActiveTopics(connector).topics().forEach(t -> kafkaClient.deleteTopic(t));
}
connect.stop();
} else {
LOG.warn("Trying to stop an uninitialized Kafka Connect Runner");
}
}
private ConnectorStateInfo getConnectorStatus(String connectorName) {
return herder.connectorStatus(connectorName);
}
public void connectorStateCheck(Consumer<ConnectorStateInfo> taskStateConsumer) {
herder.connectors().forEach(c -> taskStateConsumer.accept(getConnectorStatus(c)));
}
}
| 9,169 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/services | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/services/kafkaconnect/KafkaConnectRunnerFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.services.kafkaconnect;
import org.apache.camel.kafkaconnector.common.services.kafka.EmbeddedKafkaService;
import org.apache.camel.test.infra.kafka.services.KafkaService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class KafkaConnectRunnerFactory {
private static final Logger LOG = LoggerFactory.getLogger(KafkaConnectRunnerFactory.class);
private KafkaConnectRunnerFactory() {
}
public static KafkaConnectService createService(KafkaService kafkaService) {
if (kafkaService instanceof EmbeddedKafkaService) {
LOG.info("Using the Kafka embedded Kafka service");
return new KafkaConnectEmbedded(kafkaService);
}
LOG.info("Using the custom Kafka service");
return new KafkaConnectRunnerService(kafkaService);
}
}
| 9,170 |
0 | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/services | Create_ds/camel-kafka-connector/tests/itests-common/src/test/java/org/apache/camel/kafkaconnector/common/services/kafka/EmbeddedKafkaService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.common.services.kafka;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import org.apache.camel.kafkaconnector.common.PluginPathHelper;
import org.apache.camel.kafkaconnector.common.utils.NetworkUtils;
import org.apache.camel.test.infra.kafka.services.KafkaService;
import org.apache.kafka.connect.runtime.WorkerConfig;
import org.apache.kafka.connect.util.clusters.EmbeddedConnectCluster;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class EmbeddedKafkaService implements KafkaService {
private static final Logger LOG = LoggerFactory.getLogger(EmbeddedKafkaService.class);
private static final long OFFSET_COMMIT_INTERVAL_MS = TimeUnit.SECONDS.toMillis(30);
private EmbeddedConnectCluster cluster;
private boolean started;
public EmbeddedKafkaService() {
buildCluster();
}
private void buildCluster() {
LOG.info("Creating the embedded Kafka connect instance");
EmbeddedConnectCluster.Builder builder = new EmbeddedConnectCluster.Builder();
Properties brokerProps = new Properties();
brokerProps.put("auto.create.topics.enable", String.valueOf(true));
Map<String, String> workerProps = new HashMap<>();
workerProps.put(WorkerConfig.OFFSET_COMMIT_INTERVAL_MS_CONFIG, String.valueOf(OFFSET_COMMIT_INTERVAL_MS));
String address = "http://localhost:" + NetworkUtils.getFreePort();
LOG.info("Using the following address for the listener configuration: {}", address);
workerProps.put(WorkerConfig.LISTENERS_CONFIG, address);
String pluginPaths = PluginPathHelper.getInstance().pluginPaths();
LOG.info("Adding the returned directories to the plugin path. This may take A VERY long time to complete");
workerProps.put(WorkerConfig.PLUGIN_PATH_CONFIG, pluginPaths);
LOG.info("Building the embedded Kafka connect instance");
this.cluster = builder
.name("connect-cluster")
.numWorkers(1)
.numBrokers(1)
.brokerProps(brokerProps)
.workerProps(workerProps)
.maskExitProcedures(true)
.build();
LOG.info("Built the embedded Kafka connect instance");
}
@Override
public String getBootstrapServers() {
if (started) {
return cluster.kafka().bootstrapServers();
}
return null;
}
@Override
public void registerProperties() {
// NO-OP
}
@Override
public void initialize() {
if (!started) {
cluster.start();
started = true;
registerProperties();
LOG.info("Kafka bootstrap server running at address {}", getBootstrapServers());
}
}
@Override
public void shutdown() {
LOG.info("Stopping the embedded kafka cluster service");
if (started) {
cluster.stop();
started = false;
}
}
@Override
public void beforeTestExecution(ExtensionContext extensionContext) {
initialize();
}
@Override
public void afterTestExecution(ExtensionContext context) {
shutdown();
}
// WARNING: this may come uninitialized
public EmbeddedConnectCluster getCluster() {
if (!started) {
LOG.warn("Returning a non-initialized cluster");
}
return cluster;
}
}
| 9,171 |
0 | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh/source/CamelSourceSshITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.ssh.source;
import java.util.concurrent.ExecutionException;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.apache.camel.kafkaconnector.common.test.CamelSourceTestSupport;
import org.apache.camel.kafkaconnector.common.test.TestMessageConsumer;
import org.apache.camel.kafkaconnector.ssh.services.SshService;
import org.apache.camel.kafkaconnector.ssh.services.SshServiceFactory;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.extension.RegisterExtension;
import static org.junit.jupiter.api.Assertions.assertEquals;
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public class CamelSourceSshITCase extends CamelSourceTestSupport {
@RegisterExtension
public static SshService sshService = SshServiceFactory.createService();
private final int expect = 1;
private String oldUserHome = System.getProperty("user.home");
@Override
protected String[] getConnectorsInTest() {
return new String[] {"camel-ssh-source-kafka-connector"};
}
@BeforeAll
public void setupKeyHome() {
System.setProperty("user.home", "target/user-home");
}
@AfterAll
public void tearDownKeyHome() {
System.setProperty("user.home", oldUserHome);
}
@Override
protected void produceTestData() {
}
@Override
protected void verifyMessages(TestMessageConsumer<?> consumer) {
int received = consumer.consumedMessages().size();
assertEquals(received, expect, "Didn't process the expected amount of messages");
}
@Timeout(90)
@Test
public void testRetrieveFromSsh() throws ExecutionException, InterruptedException {
String topic = getTopicForTest(this);
ConnectorPropertyFactory connectorPropertyFactory = CamelSshPropertyFactory
.basic()
.withKafkaTopic(topic)
.withHost(sshService.getSshHost())
.withPort(Integer.toString(sshService.getSshPort()))
.withUsername("root")
.withPassword("root")
.withPollcommand("date")
.withTransformsConfig("SshTransforms")
.withEntry("type", "org.apache.camel.kafkaconnector.transforms.CamelTypeConverterTransform$Value")
.withEntry("target.type", "java.lang.String")
.end();
runTest(connectorPropertyFactory, topic, expect);
}
}
| 9,172 |
0 | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh/source/CamelSshPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.ssh.source;
import org.apache.camel.kafkaconnector.common.SourceConnectorPropertyFactory;
final class CamelSshPropertyFactory extends SourceConnectorPropertyFactory<CamelSshPropertyFactory> {
private CamelSshPropertyFactory() {
}
public CamelSshPropertyFactory withHost(String host) {
return setProperty("camel.kamelet.ssh-source.connectionHost", host);
}
public CamelSshPropertyFactory withPort(String port) {
return setProperty("camel.kamelet.ssh-source.connectionPort", port);
}
public CamelSshPropertyFactory withUsername(String username) {
return setProperty("camel.kamelet.ssh-source.username", username);
}
public CamelSshPropertyFactory withPassword(String password) {
return setProperty("camel.kamelet.ssh-source.password", password);
}
public CamelSshPropertyFactory withPollcommand(String pollCommand) {
return setProperty("camel.kamelet.ssh-source.pollCommand", pollCommand);
}
public static CamelSshPropertyFactory basic() {
return new CamelSshPropertyFactory().withName("CamelSshSourceConnector")
.withTasksMax(1)
.withConnectorClass("org.apache.camel.kafkaconnector.sshsource.CamelSshsourceSourceConnector")
.withKeyConverterClass("org.apache.kafka.connect.storage.StringConverter")
.setProperty("camel.component.kamelet.location", "kamelets")
.setProperty("camel.component.properties.environment-variable-mode", "1");
}
}
| 9,173 |
0 | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh/sink/CamelSshPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.ssh.sink;
import org.apache.camel.kafkaconnector.common.SinkConnectorPropertyFactory;
final class CamelSshPropertyFactory extends SinkConnectorPropertyFactory<CamelSshPropertyFactory> {
private CamelSshPropertyFactory() {
}
public CamelSshPropertyFactory withHost(String host) {
return setProperty("camel.sink.path.host", host);
}
public CamelSshPropertyFactory withPort(String port) {
return setProperty("camel.sink.path.port", port);
}
public CamelSshPropertyFactory withUsername(String username) {
return setProperty("camel.sink.endpoint.username", username);
}
public CamelSshPropertyFactory withPassword(String password) {
return setProperty("camel.sink.endpoint.password", password);
}
public static CamelSshPropertyFactory basic() {
return new CamelSshPropertyFactory().withName("CamelSshSourceConnector")
.withTasksMax(1)
.withConnectorClass("org.apache.camel.kafkaconnector.sshsink.CamelSshsinkSinkConnector")
.withKeyConverterClass("org.apache.kafka.connect.storage.StringConverter")
.withValueConverterClass("org.apache.kafka.connect.storage.StringConverter")
.setProperty("camel.component.kamelet.location", "kamelets")
.setProperty("camel.component.properties.environment-variable-mode", "1");
}
}
| 9,174 |
0 | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh/sink/CamelSinkSshITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.ssh.sink;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.apache.camel.kafkaconnector.common.test.CamelSinkTestSupport;
import org.apache.camel.kafkaconnector.common.test.StringMessageProducer;
import org.apache.camel.kafkaconnector.ssh.services.SshService;
import org.apache.camel.kafkaconnector.ssh.services.SshServiceFactory;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.jupiter.api.Assertions.fail;
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public class CamelSinkSshITCase extends CamelSinkTestSupport {
@RegisterExtension
public static SshService sshService = SshServiceFactory.createService();
private static final Logger LOG = LoggerFactory.getLogger(CamelSinkSshITCase.class);
private final int expect = 3;
private String topic;
private static class CustomProducer extends StringMessageProducer {
public CustomProducer(String bootstrapServer, String topicName, int count) {
super(bootstrapServer, topicName, count);
}
@Override
public String testMessageContent(int current) {
return "date";
}
}
@Override
protected String[] getConnectorsInTest() {
return new String[] {"camel-ssh-sink-kafka-connector"};
}
@BeforeEach
public void setUp() {
topic = getTopicForTest(this);
}
@Override
protected void consumeMessages(CountDownLatch latch) {
latch.countDown();
}
@Override
protected void verifyMessages(CountDownLatch latch) throws InterruptedException {
if (!latch.await(30, TimeUnit.SECONDS)) {
fail("Timed out wait for data to be added to the Kafka cluster");
}
}
@Timeout(90)
@Test
public void testSshCommand() throws Exception {
ConnectorPropertyFactory connectorPropertyFactory = CamelSshPropertyFactory
.basic()
.withTopics(topic)
.withHost(sshService.getSshHost())
.withPort(Integer.toString(sshService.getSshPort()))
.withUsername("root")
.withPassword("root");
runTestNonBlocking(connectorPropertyFactory, new CustomProducer(getKafkaService().getBootstrapServers(), topic, expect));
}
}
| 9,175 |
0 | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh/common/SshProperties.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.ssh.common;
public final class SshProperties {
public static final String SSH_HOST = "ssh.host";
public static final String SSH_PORT = "ssh.port";
private SshProperties() {
}
}
| 9,176 |
0 | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh/services/SshService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.ssh.services;
import org.apache.camel.test.infra.common.services.TestService;
import org.junit.jupiter.api.extension.AfterAllCallback;
import org.junit.jupiter.api.extension.BeforeAllCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
public interface SshService extends TestService, BeforeAllCallback, AfterAllCallback {
int getSshPort();
default String getSshEndpoint() {
return getSshHost() + ":" + getSshPort();
}
String getSshHost();
@Override
default void beforeAll(ExtensionContext extensionContext) {
initialize();
}
@Override
default void afterAll(ExtensionContext extensionContext) {
shutdown();
}
}
| 9,177 |
0 | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh/services/SshServiceFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.ssh.services;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class SshServiceFactory {
private static final Logger LOG = LoggerFactory.getLogger(SshServiceFactory.class);
private SshServiceFactory() {
}
public static SshService createService() {
String instanceType = System.getProperty("ssh.instance.type");
if (instanceType == null || instanceType.equals("local-ssh-container")) {
return new SshLocalContainerService();
}
if (instanceType.equals("remote")) {
return new SshRemoteService();
}
LOG.error("ssh instance must be one of 'local-ssh-container' or 'remote");
throw new UnsupportedOperationException(String.format("Invalid rabbitmq instance type: %s", instanceType));
}
}
| 9,178 |
0 | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh/services/SshLocalContainerService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.ssh.services;
import org.apache.camel.kafkaconnector.ssh.common.SshProperties;
import org.apache.camel.test.infra.common.services.ContainerService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SshLocalContainerService implements SshService, ContainerService<SshContainer> {
private static final Logger LOG = LoggerFactory.getLogger(SshLocalContainerService.class);
private SshContainer container;
public SshLocalContainerService() {
container = new SshContainer();
}
@Override
public int getSshPort() {
return container.getSSHPort();
}
@Override
public String getSshHost() {
return container.getSSHHost();
}
@Override
public void registerProperties() {
System.setProperty(SshProperties.SSH_PORT, String.valueOf(getSshPort()));
System.setProperty(SshProperties.SSH_HOST, getSshHost());
}
@Override
public void initialize() {
container.start();
LOG.info("SSH server running at address {}", getSshEndpoint());
}
@Override
public void shutdown() {
LOG.info("Stopping the Ssh container");
container.stop();
}
@Override
public SshContainer getContainer() {
return container;
}
}
| 9,179 |
0 | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh/services/SshContainer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.ssh.services;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.wait.strategy.Wait;
/**
* A local instance of an Ubuntu Server with SSH enabled
*/
public class SshContainer extends GenericContainer<SshContainer> {
private static final String SSH_IMAGE = "rastasheep/ubuntu-sshd:14.04";
private static final int SSH_PORT = 22;
public SshContainer() {
super(SSH_IMAGE);
withExposedPorts(SSH_PORT);
waitingFor(Wait.forListeningPort());
}
public int getSSHPort() {
return getMappedPort(SSH_PORT);
}
public String getSSHHost() {
return getContainerIpAddress();
}
}
| 9,180 |
0 | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh | Create_ds/camel-kafka-connector/tests/itests-ssh/src/test/java/org/apache/camel/kafkaconnector/ssh/services/SshRemoteService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.ssh.services;
import org.apache.camel.kafkaconnector.ssh.common.SshProperties;
public class SshRemoteService implements SshService {
private static final int DEFAULT_SSH_PORT = 22;
@Override
public void registerProperties() {
// NO-OP
}
@Override
public void initialize() {
registerProperties();
}
@Override
public void shutdown() {
}
@Override
public int getSshPort() {
String strPort = System.getProperty(SshProperties.SSH_PORT);
if (strPort != null) {
return Integer.parseInt(strPort);
}
return DEFAULT_SSH_PORT;
}
@Override
public String getSshHost() {
return System.getProperty(SshProperties.SSH_HOST);
}
}
| 9,181 |
0 | Create_ds/camel-kafka-connector/tests/itests-google-pubsub/src/test/java/org/apache/camel/kafkaconnector/google/pubsub | Create_ds/camel-kafka-connector/tests/itests-google-pubsub/src/test/java/org/apache/camel/kafkaconnector/google/pubsub/clients/GooglePubEasy.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.google.pubsub.clients;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.rpc.FixedTransportChannelProvider;
import com.google.cloud.pubsub.v1.MessageReceiver;
import com.google.cloud.pubsub.v1.Subscriber;
import com.google.cloud.pubsub.v1.SubscriptionAdminClient;
import com.google.cloud.pubsub.v1.SubscriptionAdminSettings;
import com.google.cloud.pubsub.v1.TopicAdminClient;
import com.google.cloud.pubsub.v1.TopicAdminSettings;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.pubsub.v1.ProjectSubscriptionName;
import com.google.pubsub.v1.PushConfig;
import com.google.pubsub.v1.Subscription;
import com.google.pubsub.v1.TopicName;
import io.grpc.ManagedChannel;
import io.grpc.ManagedChannelBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class GooglePubEasy {
private static final Logger LOG = LoggerFactory.getLogger(GooglePubEasy.class);
private final List<String> receivedMessages = new ArrayList<>();
private final String serviceAddress;
private final String project;
private final ManagedChannel channel;
private final FixedTransportChannelProvider channelProvider;
private ProjectSubscriptionName projectSubscriptionName;
private Subscriber subscriber;
public GooglePubEasy(String serviceAddress, String project) {
this.serviceAddress = serviceAddress;
this.project = project;
channel = ManagedChannelBuilder
.forTarget(String.format(serviceAddress))
.usePlaintext()
.build();
channelProvider =
FixedTransportChannelProvider.create(GrpcTransportChannel.create(channel));
}
public void createTopic(String topicName) throws IOException, InterruptedException {
doCreateTopic(topicName);
}
public void deleteTopic(String topicName) throws IOException, InterruptedException {
doDeleteTopic(topicName);
}
public void createSubscription(String subscriptionName, String topicName) throws IOException {
TopicName googleTopic = TopicName.of(project, topicName);
projectSubscriptionName = ProjectSubscriptionName.of(project, subscriptionName);
SubscriptionAdminSettings adminSettings = SubscriptionAdminSettings
.newBuilder()
.setCredentialsProvider(NoCredentialsProvider.create())
.setTransportChannelProvider(channelProvider)
.build();
try (SubscriptionAdminClient subscriptionAdminClient = SubscriptionAdminClient.create(adminSettings)) {
Subscription subscription = subscriptionAdminClient.createSubscription(
projectSubscriptionName, googleTopic, PushConfig.getDefaultInstance(), 10);
}
}
public void deleteSubscription(String subscriptionName) throws IOException {
projectSubscriptionName = ProjectSubscriptionName.of(project, subscriptionName);
SubscriptionAdminSettings adminSettings = SubscriptionAdminSettings
.newBuilder()
.setCredentialsProvider(NoCredentialsProvider.create())
.setTransportChannelProvider(channelProvider)
.build();
try (SubscriptionAdminClient subscriptionAdminClient = SubscriptionAdminClient.create(adminSettings)) {
subscriptionAdminClient.deleteSubscription(projectSubscriptionName);
}
}
private void doCreateTopic(String topicName) throws IOException, InterruptedException {
TopicName googleTopic = TopicName.of(project, topicName);
TopicAdminSettings topicAdminSettings = TopicAdminSettings
.newBuilder()
.setCredentialsProvider(NoCredentialsProvider.create())
.setTransportChannelProvider(channelProvider)
.build();
try (TopicAdminClient client = TopicAdminClient.create(topicAdminSettings)) {
LOG.info("Creating topic {} (original {})", googleTopic.toString(), googleTopic.getTopic());
client.createTopic(googleTopic);
if (client.awaitTermination(10, TimeUnit.SECONDS)) {
client.shutdownNow();
}
}
}
private void doDeleteTopic(String topicName) throws IOException, InterruptedException {
TopicName googleTopic = TopicName.of(project, topicName);
TopicAdminSettings topicAdminSettings = TopicAdminSettings
.newBuilder()
.setCredentialsProvider(NoCredentialsProvider.create())
.setTransportChannelProvider(channelProvider)
.build();
try (TopicAdminClient client = TopicAdminClient.create(topicAdminSettings)) {
LOG.info("Deleting topic {} (original {})", googleTopic.toString(), googleTopic.getTopic());
client.deleteTopic(googleTopic);
if (client.awaitTermination(10, TimeUnit.SECONDS)) {
client.shutdownNow();
}
}
}
public void receive() {
try {
MessageReceiver receiver = (pubsubMessage, ackReplyConsumer) -> {
String data = pubsubMessage.getData().toString();
LOG.info("Received: {}", data);
receivedMessages.add(data);
if (receivedMessages.size() >= 10) {
subscriber.stopAsync();
}
ackReplyConsumer.ack();
};
subscriber = Subscriber
.newBuilder(projectSubscriptionName, receiver)
.setCredentialsProvider(NoCredentialsProvider.create())
.setChannelProvider(channelProvider)
.build();
LOG.info("Adding listener ...");
subscriber.addListener(
new Subscriber.Listener() {
@Override
public void failed(Subscriber.State from, Throwable failure) {
LOG.error(failure.getMessage(), failure);
}
},
MoreExecutors.directExecutor());
LOG.info("Starting async ...");
subscriber.startAsync().awaitRunning();
LOG.info("Waiting for messages ...");
subscriber.awaitTerminated(25, TimeUnit.SECONDS);
} catch (TimeoutException e) {
subscriber.stopAsync();
} finally {
if (subscriber != null) {
subscriber.stopAsync();
}
}
}
public void shutdown() {
if (channel != null) {
channel.shutdown();
}
}
public List<String> getReceivedMessages() {
return Collections.unmodifiableList(receivedMessages);
}
}
| 9,182 |
0 | Create_ds/camel-kafka-connector/tests/itests-google-pubsub/src/test/java/org/apache/camel/kafkaconnector/google/pubsub | Create_ds/camel-kafka-connector/tests/itests-google-pubsub/src/test/java/org/apache/camel/kafkaconnector/google/pubsub/sink/CamelSinkGooglePubSubITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.google.pubsub.sink;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.apache.camel.kafkaconnector.common.ConnectorPropertyFactory;
import org.apache.camel.kafkaconnector.common.test.CamelSinkTestSupport;
import org.apache.camel.kafkaconnector.google.pubsub.clients.GooglePubEasy;
import org.apache.camel.test.infra.common.TestUtils;
import org.apache.camel.test.infra.google.pubsub.services.GooglePubSubService;
import org.apache.camel.test.infra.google.pubsub.services.GooglePubSubServiceFactory;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
public class CamelSinkGooglePubSubITCase extends CamelSinkTestSupport {
@RegisterExtension
public static GooglePubSubService service = GooglePubSubServiceFactory.createService();
private static final Logger LOG = LoggerFactory.getLogger(CamelSinkGooglePubSubITCase.class);
private String project = "ckc";
private GooglePubEasy easyClient;
private String googlePubSubTopic;
private String testSubscription;
private final int expected = 10;
@Override
protected String[] getConnectorsInTest() {
return new String[]{"camel-google-pubsub-sink-kafka-connector"};
}
@BeforeEach
public void setUp() {
googlePubSubTopic = "ckctopic" + TestUtils.randomWithRange(0, 100);
testSubscription = "test-subscription" + TestUtils.randomWithRange(0, 100);
LOG.info("Requesting topic {} for the pub/sub client", googlePubSubTopic);
easyClient = new GooglePubEasy(service.getServiceAddress(), project);
try {
easyClient.createTopic(googlePubSubTopic);
easyClient.createSubscription(testSubscription, googlePubSubTopic);
} catch (InterruptedException | IOException e) {
fail(e.getMessage());
}
}
@AfterEach
public void tearDown() {
try {
easyClient.deleteSubscription(testSubscription);
easyClient.deleteTopic(googlePubSubTopic);
} catch (InterruptedException | IOException e) {
fail(e.getMessage());
} finally {
easyClient.shutdown();
}
}
@Override
protected void consumeMessages(CountDownLatch latch) {
try {
easyClient.receive();
} finally {
latch.countDown();
}
}
@Override
protected void verifyMessages(CountDownLatch latch) throws InterruptedException {
List<String> receivedMessages = easyClient.getReceivedMessages();
if (latch.await(120, TimeUnit.SECONDS)) {
assertEquals(expected, receivedMessages.size(), "Did not receive as many messages as was sent");
} else {
fail("Failed to receive the messages within the specified time");
}
}
@Test
public void testBasicSendReceive() throws Exception {
String topicName = getTopicForTest(this);
ConnectorPropertyFactory connectorPropertyFactory = CamelGooglePubSubPropertyFactory
.basic()
.withTopics(topicName)
.withProjectId(project)
.withDestinationName(googlePubSubTopic)
.withEndpoint(service.getServiceAddress());
runTest(connectorPropertyFactory, topicName, expected);
}
}
| 9,183 |
0 | Create_ds/camel-kafka-connector/tests/itests-google-pubsub/src/test/java/org/apache/camel/kafkaconnector/google/pubsub | Create_ds/camel-kafka-connector/tests/itests-google-pubsub/src/test/java/org/apache/camel/kafkaconnector/google/pubsub/sink/CamelGooglePubSubPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.google.pubsub.sink;
import org.apache.camel.kafkaconnector.common.SinkConnectorPropertyFactory;
public class CamelGooglePubSubPropertyFactory extends SinkConnectorPropertyFactory<CamelGooglePubSubPropertyFactory> {
public CamelGooglePubSubPropertyFactory withProjectId(String value) {
return setProperty("camel.kamelet.google-pubsub-sink.projectId", value);
}
public CamelGooglePubSubPropertyFactory withDestinationName(String value) {
return setProperty("camel.kamelet.google-pubsub-sink.destinationName", value);
}
public CamelGooglePubSubPropertyFactory withServiceAccountKey(String value) {
return setProperty("camel.kamelet.google-pubsub-sink.serviceAccountKey", value);
}
public CamelGooglePubSubPropertyFactory withEndpoint(String value) {
return setProperty("camel.component.google-pubsub.endpoint", value);
}
public CamelGooglePubSubPropertyFactory withAuthenticate(boolean authenticationEnabled) {
return setProperty("camel.component.google-pubsub.authenticate", authenticationEnabled);
}
public static CamelGooglePubSubPropertyFactory basic() {
return new CamelGooglePubSubPropertyFactory()
.withTasksMax(1)
.withAuthenticate(false)
.withServiceAccountKey("dummy")
.withName("CamelGooglePubSub")
.withConnectorClass("org.apache.camel.kafkaconnector.googlepubsubsink.CamelGooglepubsubsinkSinkConnector")
.withKeyConverterClass("org.apache.kafka.connect.storage.StringConverter")
.withValueConverterClass("org.apache.kafka.connect.storage.StringConverter")
.setProperty("camel.component.kamelet.location", "kamelets");
}
}
| 9,184 |
0 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2/services/AWSIAMLocalContainerServiceWithTimeout.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.test.infra.aws2.services;
public class AWSIAMLocalContainerServiceWithTimeout extends AWSLocalContainerService {
public AWSIAMLocalContainerServiceWithTimeout() {
super(new AWSContainerWithTimeout(System.getProperty("aws.container", "localstack/localstack:1.3.0"), new Service[]{Service.IAM}));
}
}
| 9,185 |
0 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2/services/AWSDynamodbLocalContainerServiceWithTimeout.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.test.infra.aws2.services;
public class AWSDynamodbLocalContainerServiceWithTimeout extends AWSLocalContainerService {
public AWSDynamodbLocalContainerServiceWithTimeout() {
super(new AWSContainerWithTimeout(System.getProperty("aws.container", "localstack/localstack:1.3.0"), new Service[]{Service.DYNAMODB}));
}
}
| 9,186 |
0 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2/services/AWSContainerWithTimeout.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.test.infra.aws2.services;
import java.time.Duration;
import org.testcontainers.containers.wait.strategy.Wait;
public class AWSContainerWithTimeout extends AWSContainer {
private static final String DEFAULT_STARTUP_TIMEOUT = "60";
public AWSContainerWithTimeout() {
super();
}
public AWSContainerWithTimeout(String imageName, Service... services) {
super(imageName, services);
}
@Override
protected void setupContainer() {
int startupTimeout = Integer.parseInt(System.getProperty("aws.container.startup.timeout", DEFAULT_STARTUP_TIMEOUT));
this.withExposedPorts(new Integer[]{4566});
this.waitingFor(Wait.forLogMessage(".*Ready\\.\n", 1)
.withStartupTimeout(Duration.ofSeconds(startupTimeout)));
}
}
| 9,187 |
0 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2/services/AWSServiceFactoryWithTimeout.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.test.infra.aws2.services;
import java.util.function.Supplier;
import org.apache.camel.test.infra.aws.common.services.AWSService;
import org.apache.camel.test.infra.common.services.SimpleTestServiceBuilder;
public final class AWSServiceFactoryWithTimeout {
private AWSServiceFactoryWithTimeout() {
}
public static <T extends AWSService> SimpleTestServiceBuilder<T> builder() {
return new SimpleTestServiceBuilder("aws");
}
private static AWSService createService(Supplier<AWSService> supplier) {
return (AWSService)builder().addRemoteMapping(AWSRemoteService::new).addLocalMapping(supplier).withPropertyNameFormat("%s-service.instance.type").build();
}
public static AWSService createKinesisService() {
return (AWSService)builder().addRemoteMapping(AWSRemoteService::new).addLocalMapping(AWSKinesisLocalContainerServiceWithTimeout::new).withPropertyNameFormat("%s-service.kinesis.instance.type").build();
}
public static AWSService createSQSService() {
return createService(AWSSQSLocalContainerServiceWithTimeout::new);
}
public static AWSService createS3Service() {
return createService(AWSS3LocalContainerServiceWithTimeout::new);
}
public static AWSService createSNSService() {
return createService(AWSSNSLocalContainerServiceWithTimeout::new);
}
public static AWSService createCloudWatchService() {
return createService(AWSCloudWatchLocalContainerServiceWithTimeout::new);
}
public static AWSService createEC2Service() {
return createService(AWSEC2LocalContainerServiceWithTimeout::new);
}
public static AWSService createEventBridgeService() {
return createService(AWSEventBridgeLocalContainerServiceWithTimeout::new);
}
public static AWSService createIAMService() {
return createService(AWSIAMLocalContainerServiceWithTimeout::new);
}
public static AWSService createKMSService() {
return createService(AWSKMSLocalContainerServiceWithTimeout::new);
}
public static AWSService createLambdaService() {
return createService(AWSLambdaLocalContainerServiceWithTimeout::new);
}
public static AWSService createSTSService() {
return createService(AWSSTSLocalContainerServiceWithTimeout::new);
}
public static AWSService createDynamodbService() {
return createService(AWSDynamodbLocalContainerServiceWithTimeout::new);
}
}
| 9,188 |
0 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2/services/AWSCloudWatchLocalContainerServiceWithTimeout.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.test.infra.aws2.services;
public class AWSCloudWatchLocalContainerServiceWithTimeout extends AWSLocalContainerService {
public AWSCloudWatchLocalContainerServiceWithTimeout() {
super(new AWSContainerWithTimeout(System.getProperty("aws.container", "localstack/localstack:1.3.0"), new Service[]{Service.CLOUD_WATCH}));
}
}
| 9,189 |
0 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2/services/AWSKinesisLocalContainerServiceWithTimeout.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.test.infra.aws2.services;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.awssdk.core.SdkSystemSetting;
public class AWSKinesisLocalContainerServiceWithTimeout extends AWSLocalContainerService {
private static final Logger LOG = LoggerFactory.getLogger(AWSKinesisLocalContainerService.class);
public AWSKinesisLocalContainerServiceWithTimeout() {
super(new AWSContainerWithTimeout(System.getProperty("aws.container", "localstack/localstack:1.3.0"), new Service[]{Service.KINESIS}));
}
static {
System.setProperty(SdkSystemSetting.CBOR_ENABLED.property(), "false");
}
}
| 9,190 |
0 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2/services/AWSLambdaLocalContainerServiceWithTimeout.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.test.infra.aws2.services;
public class AWSLambdaLocalContainerServiceWithTimeout extends AWSLocalContainerService {
public AWSLambdaLocalContainerServiceWithTimeout() {
super(new AWSContainerWithTimeout(System.getProperty("aws.container", "localstack/localstack:1.3.0"), new Service[]{Service.LAMBDA}));
}
}
| 9,191 |
0 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2/services/AWSEventBridgeLocalContainerServiceWithTimeout.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.test.infra.aws2.services;
public class AWSEventBridgeLocalContainerServiceWithTimeout extends AWSLocalContainerService {
public AWSEventBridgeLocalContainerServiceWithTimeout() {
super(new AWSContainerWithTimeout(System.getProperty("aws.container", "localstack/localstack:1.3.0"), new Service[]{Service.EVENT_BRIDGE}));
}
}
| 9,192 |
0 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2/services/AWSSNSLocalContainerServiceWithTimeout.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.test.infra.aws2.services;
public class AWSSNSLocalContainerServiceWithTimeout extends AWSLocalContainerService {
public AWSSNSLocalContainerServiceWithTimeout() {
super(new AWSContainerWithTimeout(System.getProperty("aws.container", "localstack/localstack:1.3.0"), new Service[]{Service.SNS}));
}
}
| 9,193 |
0 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2/services/AWSSQSLocalContainerServiceWithTimeout.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.test.infra.aws2.services;
public class AWSSQSLocalContainerServiceWithTimeout extends AWSLocalContainerService {
public AWSSQSLocalContainerServiceWithTimeout() {
super(new AWSContainerWithTimeout(System.getProperty("aws.container", "localstack/localstack:1.3.0"), new Service[]{Service.SQS}));
}
} | 9,194 |
0 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2/services/AWSSTSLocalContainerServiceWithTimeout.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.test.infra.aws2.services;
public class AWSSTSLocalContainerServiceWithTimeout extends AWSLocalContainerService {
public AWSSTSLocalContainerServiceWithTimeout() {
super(new AWSContainerWithTimeout(System.getProperty("aws.container", "localstack/localstack:1.3.0"), new Service[]{Service.STS}));
}
}
| 9,195 |
0 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2/services/AWSEC2LocalContainerServiceWithTimeout.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.test.infra.aws2.services;
public class AWSEC2LocalContainerServiceWithTimeout extends AWSLocalContainerService {
public AWSEC2LocalContainerServiceWithTimeout() {
super(new AWSContainerWithTimeout(System.getProperty("aws.container", "localstack/localstack:1.3.0"), new Service[]{Service.EC2}));
}
}
| 9,196 |
0 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2/services/AWSKMSLocalContainerServiceWithTimeout.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.test.infra.aws2.services;
public class AWSKMSLocalContainerServiceWithTimeout extends AWSLocalContainerService {
public AWSKMSLocalContainerServiceWithTimeout() {
super(new AWSContainerWithTimeout(System.getProperty("aws.container", "localstack/localstack:1.3.0"), new Service[]{Service.KMS}));
}
}
| 9,197 |
0 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/test/infra/aws2/services/AWSS3LocalContainerServiceWithTimeout.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.test.infra.aws2.services;
public class AWSS3LocalContainerServiceWithTimeout extends AWSLocalContainerService {
public AWSS3LocalContainerServiceWithTimeout() {
super(new AWSContainerWithTimeout(System.getProperty("aws.container", "localstack/localstack:1.3.0"), new Service[]{Service.S3}));
}
}
| 9,198 |
0 | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/kafkaconnector/aws/v2/kinesis | Create_ds/camel-kafka-connector/tests/itests-aws-v2/src/test/java/org/apache/camel/kafkaconnector/aws/v2/kinesis/source/CamelAWSKinesisPropertyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.aws.v2.kinesis.source;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.apache.camel.kafkaconnector.aws.v2.common.AWSPropertiesUtils;
import org.apache.camel.kafkaconnector.common.SourceConnectorPropertyFactory;
import org.apache.camel.test.infra.aws.common.AWSConfigs;
/**
* Creates the set of properties used by a Camel Kinesis Source Connector
*/
final class CamelAWSKinesisPropertyFactory extends SourceConnectorPropertyFactory<CamelAWSKinesisPropertyFactory> {
public static final Map<String, String> SPRING_STYLE = new HashMap<>();
static {
SPRING_STYLE.put(AWSConfigs.ACCESS_KEY, "camel.kamelet.aws-kinesis-source.accessKey");
SPRING_STYLE.put(AWSConfigs.SECRET_KEY, "camel.kamelet.aws-kinesis-source.secretKey");
SPRING_STYLE.put(AWSConfigs.REGION, "camel.kamelet.aws-kinesis-source.region");
}
private CamelAWSKinesisPropertyFactory() {
}
public CamelAWSKinesisPropertyFactory withAmazonConfig(Properties amazonConfigs) {
return withAmazonConfig(amazonConfigs, this.SPRING_STYLE);
}
public CamelAWSKinesisPropertyFactory withAmazonConfig(Properties amazonConfigs, Map<String, String> style) {
AWSPropertiesUtils.setCommonProperties(amazonConfigs, style, this);
return this;
}
public CamelAWSKinesisPropertyFactory withStream(String streamName) {
return setProperty("camel.kamelet.aws-kinesis-source.stream", streamName);
}
public CamelAWSKinesisPropertyFactory withConfiguration(String configurationClass) {
return setProperty("camel.component.aws2-kinesis.configuration",
classRef(configurationClass));
}
public static CamelAWSKinesisPropertyFactory basic() {
return new CamelAWSKinesisPropertyFactory()
.withName("CamelAwskinesisSourceConnector")
.withTasksMax(1)
.withConnectorClass("org.apache.camel.kafkaconnector.awskinesissource.CamelAwskinesissourceSourceConnector")
.withKeyConverterClass("org.apache.kafka.connect.storage.StringConverter")
.withValueConverterClass("org.apache.kafka.connect.storage.StringConverter")
.setProperty("camel.component.kamelet.location", "kamelets");
}
}
| 9,199 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.