repo stringclasses 1k
values | file_url stringlengths 96 373 | file_path stringlengths 11 294 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 6
values | commit_sha stringclasses 1k
values | retrieved_at stringdate 2026-01-04 14:45:56 2026-01-04 18:30:23 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
FederatedAI/FATE-Serving | https://github.com/FederatedAI/FATE-Serving/blob/a25807586a960051b9acd4e0114f94a13ddc90ef/fate-serving-extension/src/main/java/com/webank/ai/fate/serving/adaptor/dataaccess/BatchTestFileAdapter.java | fate-serving-extension/src/main/java/com/webank/ai/fate/serving/adaptor/dataaccess/BatchTestFileAdapter.java | /*
* Copyright 2019 The FATE Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webank.ai.fate.serving.adaptor.dataaccess;
import com.webank.ai.fate.serving.core.bean.BatchHostFeatureAdaptorResult;
import com.webank.ai.fate.serving.core.bean.BatchHostFederatedParams;
import com.webank.ai.fate.serving.core.bean.Context;
import com.webank.ai.fate.serving.core.bean.Dict;
import com.webank.ai.fate.serving.core.constant.StatusCode;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class BatchTestFileAdapter extends AbstractBatchFeatureDataAdaptor {
private static final Logger logger = LoggerFactory.getLogger(BatchTestFileAdapter.class);
@Override
public void init() {
}
@Override
public BatchHostFeatureAdaptorResult getFeatures(Context context, List<BatchHostFederatedParams.SingleInferenceData> featureIdList) {
BatchHostFeatureAdaptorResult batchHostFeatureAdaptorResult = new BatchHostFeatureAdaptorResult();
try {
Map<String, Object> data = new HashMap<>();
List<String> lines = Files.readAllLines(Paths.get(System.getProperty(Dict.PROPERTY_USER_DIR), "host_data.csv"));
lines.forEach(line -> {
for (String kv : StringUtils.split(line, ",")) {
String[] a = StringUtils.split(kv, ":");
data.put(a[0], Double.valueOf(a[1]));
}
});
featureIdList.forEach(singleInferenceData -> {
Map<Integer, BatchHostFeatureAdaptorResult.SingleBatchHostFeatureAdaptorResult> indexMap = batchHostFeatureAdaptorResult.getIndexResultMap();
BatchHostFeatureAdaptorResult.SingleBatchHostFeatureAdaptorResult singleBatchHostFeatureAdaptorResult = new BatchHostFeatureAdaptorResult.SingleBatchHostFeatureAdaptorResult();
Map clone = (Map) ((HashMap) data).clone();
singleBatchHostFeatureAdaptorResult.setFeatures(clone);
singleBatchHostFeatureAdaptorResult.setRetcode(StatusCode.SUCCESS);
indexMap.put(singleInferenceData.getIndex(), singleBatchHostFeatureAdaptorResult);
});
batchHostFeatureAdaptorResult.setRetcode(StatusCode.SUCCESS);
} catch (Exception ex) {
logger.error(ex.getMessage());
batchHostFeatureAdaptorResult.setRetcode(StatusCode.HOST_FEATURE_ERROR);
}
return batchHostFeatureAdaptorResult;
}
@Override
public List<ParamDescriptor> desc() {
return null;
}
}
| java | Apache-2.0 | a25807586a960051b9acd4e0114f94a13ddc90ef | 2026-01-05T02:38:33.335296Z | false |
FederatedAI/FATE-Serving | https://github.com/FederatedAI/FATE-Serving/blob/a25807586a960051b9acd4e0114f94a13ddc90ef/fate-serving-extension/src/main/java/com/webank/ai/fate/serving/adaptor/dataaccess/HttpAdapter.java | fate-serving-extension/src/main/java/com/webank/ai/fate/serving/adaptor/dataaccess/HttpAdapter.java | /*
* Copyright 2019 The FATE Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webank.ai.fate.serving.adaptor.dataaccess;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.webank.ai.fate.serving.common.utils.HttpAdapterClientPool;
import com.webank.ai.fate.serving.core.bean.*;
import com.webank.ai.fate.serving.core.constant.StatusCode;
import com.webank.ai.fate.serving.core.utils.JsonUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
public class HttpAdapter extends AbstractSingleFeatureDataAdaptor {
private static final Logger logger = LoggerFactory.getLogger(HttpAdapter.class);
private final static String HTTP_ADAPTER_URL = MetaInfo.PROPERTY_HTTP_ADAPTER_URL;
private static final ObjectMapper objectMapper = new ObjectMapper();
@Override
public void init() {
environment.getProperty("port");
}
@Override
public ReturnResult getData(Context context, Map<String, Object> featureIds) {
ReturnResult returnResult = new ReturnResult();
HttpAdapterResponse responseResult ;
try {
//get data by http
responseResult = HttpAdapterClientPool.doPost(HTTP_ADAPTER_URL, featureIds);
int responseCode = responseResult.getCode();
switch (responseCode) {
case HttpAdapterResponseCodeEnum.COMMON_HTTP_SUCCESS_CODE:
Map<String, Object> responseResultData = responseResult.getData();
if (responseResultData == null || responseResultData.size() == 0) {
returnResult.setRetcode(StatusCode.FEATURE_DATA_ADAPTOR_ERROR);
returnResult.setRetmsg("responseData is null ");
} else if (!responseResultData.get("code").equals(HttpAdapterResponseCodeEnum.SUCCESS_CODE)) {
returnResult.setRetcode(StatusCode.FEATURE_DATA_ADAPTOR_ERROR);
returnResult.setRetmsg("responseData is : " + objectMapper.writeValueAsString(responseResultData.get("data")));
} else {
((Map<String, Object>)responseResultData.get("data")).remove("code");
returnResult.setRetcode(StatusCode.SUCCESS);
returnResult.setData(responseResultData);
}
break;
case HttpAdapterResponseCodeEnum.ERROR_CODE:
returnResult.setRetcode(StatusCode.FEATURE_DATA_ADAPTOR_ERROR);
returnResult.setRetmsg(" data not found ");
break;
default:
returnResult.setRetcode(StatusCode.FEATURE_DATA_ADAPTOR_ERROR);
returnResult.setRetmsg("responseCode unknown ");
}
if (logger.isDebugEnabled()) {
logger.debug("HttpAdapter result, {}", JsonUtil.object2Json(returnResult));
}
} catch (Exception ex) {
logger.error(ex.getMessage());
returnResult.setRetcode(StatusCode.SYSTEM_ERROR);
}
return returnResult;
}
public static void main(String[] args) {
}
}
| java | Apache-2.0 | a25807586a960051b9acd4e0114f94a13ddc90ef | 2026-01-05T02:38:33.335296Z | false |
FederatedAI/FATE-Serving | https://github.com/FederatedAI/FATE-Serving/blob/a25807586a960051b9acd4e0114f94a13ddc90ef/fate-serving-extension/src/main/java/com/webank/ai/fate/serving/adaptor/dataaccess/MockBatchAdapter.java | fate-serving-extension/src/main/java/com/webank/ai/fate/serving/adaptor/dataaccess/MockBatchAdapter.java | /*
* Copyright 2019 The FATE Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webank.ai.fate.serving.adaptor.dataaccess;
import com.webank.ai.fate.serving.core.bean.BatchHostFeatureAdaptorResult;
import com.webank.ai.fate.serving.core.bean.BatchHostFederatedParams;
import com.webank.ai.fate.serving.core.bean.Context;
import com.webank.ai.fate.serving.core.constant.StatusCode;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class MockBatchAdapter extends AbstractBatchFeatureDataAdaptor {
private static final Logger logger = LoggerFactory.getLogger(MockBatchAdapter.class);
@Override
public void init() {
}
@Override
public BatchHostFeatureAdaptorResult getFeatures(Context context, List<BatchHostFederatedParams.SingleInferenceData> featureIdList) {
BatchHostFeatureAdaptorResult batchHostFeatureAdaptorResult = new BatchHostFeatureAdaptorResult();
featureIdList.forEach(singleInferenceData -> {
Map<Integer, BatchHostFeatureAdaptorResult.SingleBatchHostFeatureAdaptorResult> indexMap = batchHostFeatureAdaptorResult.getIndexResultMap();
Map<String, Object> data = new HashMap<>();
String mockData = "x0:1,x1:5,x2:13,x3:58,x4:95,x5:352,x6:418,x7:833,x8:888,x9:937,x10:32776";
for (String kv : StringUtils.split(mockData, ",")) {
String[] a = StringUtils.split(kv, ":");
data.put(a[0], Double.valueOf(a[1]));
}
BatchHostFeatureAdaptorResult.SingleBatchHostFeatureAdaptorResult singleBatchHostFeatureAdaptorResult = new BatchHostFeatureAdaptorResult.SingleBatchHostFeatureAdaptorResult();
singleBatchHostFeatureAdaptorResult.setFeatures(data);
singleBatchHostFeatureAdaptorResult.setRetcode(StatusCode.SUCCESS);
indexMap.put(singleInferenceData.getIndex(), singleBatchHostFeatureAdaptorResult);
});
batchHostFeatureAdaptorResult.setRetcode(StatusCode.SUCCESS);
return batchHostFeatureAdaptorResult;
}
@Override
public List<ParamDescriptor> desc() {
return null;
}
}
| java | Apache-2.0 | a25807586a960051b9acd4e0114f94a13ddc90ef | 2026-01-05T02:38:33.335296Z | false |
google/gwteventbinder | https://github.com/google/gwteventbinder/blob/46d3e72e9006a4c7801c077b1b83ffb11bf687f7/eventbinder-sample/src/main/java/sample/client/ContactsLoadedEvent.java | eventbinder-sample/src/main/java/sample/client/ContactsLoadedEvent.java | /*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package sample.client;
import com.google.web.bindery.event.shared.binder.GenericEvent;
import java.util.List;
/**
* Event fired when contacts have been loaded from the server and are available
* to the client.
*/
public class ContactsLoadedEvent extends GenericEvent {
private final List<String> contactNames;
public ContactsLoadedEvent(List<String> contactNames) {
this.contactNames = contactNames;
}
public List<String> getContactNames() {
return contactNames;
}
}
| java | Apache-2.0 | 46d3e72e9006a4c7801c077b1b83ffb11bf687f7 | 2026-01-05T02:40:44.700710Z | false |
google/gwteventbinder | https://github.com/google/gwteventbinder/blob/46d3e72e9006a4c7801c077b1b83ffb11bf687f7/eventbinder-sample/src/main/java/sample/client/ContactScreenOpenedEvent.java | eventbinder-sample/src/main/java/sample/client/ContactScreenOpenedEvent.java | /*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package sample.client;
import com.google.web.bindery.event.shared.binder.GenericEvent;
/**
* Event fired when the user opens the contact screen.
*/
public class ContactScreenOpenedEvent extends GenericEvent {}
| java | Apache-2.0 | 46d3e72e9006a4c7801c077b1b83ffb11bf687f7 | 2026-01-05T02:40:44.700710Z | false |
google/gwteventbinder | https://github.com/google/gwteventbinder/blob/46d3e72e9006a4c7801c077b1b83ffb11bf687f7/eventbinder-sample/src/main/java/sample/client/ServerProxy.java | eventbinder-sample/src/main/java/sample/client/ServerProxy.java | /*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package sample.client;
import com.google.gwt.core.client.GWT;
import com.google.gwt.core.client.Scheduler;
import com.google.gwt.core.client.Scheduler.RepeatingCommand;
import com.google.gwt.event.shared.EventBus;
import com.google.web.bindery.event.shared.binder.EventBinder;
import com.google.web.bindery.event.shared.binder.EventHandler;
import java.util.LinkedList;
/**
* Proxy for a contacts server. This proxy stands in on the event bus on behalf
* of the server, converting events to (fake) server calls, and firing other
* events after the server returns information.
*/
class ServerProxy {
interface MyEventBinder extends EventBinder<ServerProxy> {}
private static final MyEventBinder eventBinder = GWT.create(MyEventBinder.class);
private final EventBus eventBus;
ServerProxy(EventBus eventBus) {
this.eventBus = eventBus;
eventBinder.bindEventHandlers(this, eventBus);
}
@EventHandler
void onContactsScreenOpened(ContactScreenOpenedEvent event) {
// Pretend to make a server request
Scheduler.get().scheduleFixedDelay(new RepeatingCommand() {
@Override
public boolean execute() {
LinkedList<String> results = new LinkedList<String>();
results.add("John Doe");
results.add("Jane Doe");
eventBus.fireEvent(new ContactsLoadedEvent(results));
return false;
}
}, 1000);
}
}
| java | Apache-2.0 | 46d3e72e9006a4c7801c077b1b83ffb11bf687f7 | 2026-01-05T02:40:44.700710Z | false |
google/gwteventbinder | https://github.com/google/gwteventbinder/blob/46d3e72e9006a4c7801c077b1b83ffb11bf687f7/eventbinder-sample/src/main/java/sample/client/ContactsPresenter.java | eventbinder-sample/src/main/java/sample/client/ContactsPresenter.java | /*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package sample.client;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.shared.EventBus;
import com.google.gwt.user.client.ui.HasWidgets;
import com.google.gwt.user.client.ui.Label;
import com.google.web.bindery.event.shared.HandlerRegistration;
import com.google.web.bindery.event.shared.binder.EventBinder;
import com.google.web.bindery.event.shared.binder.EventHandler;
/**
* Presenter for the main contacts screen, which shows a list of contacts once
* they've been loaded from the server.
*/
class ContactsPresenter {
interface MyEventBinder extends EventBinder<ContactsPresenter> {}
private static final MyEventBinder eventBinder = GWT.create(MyEventBinder.class);
private final EventBus eventBus;
private HasWidgets view;
private HandlerRegistration eventRegistration;
ContactsPresenter(EventBus eventBus) {
this.eventBus = eventBus;
}
/**
* Causes this presenter to start listening for events on the event bus. This method should be
* called when this presenter becomes the active presenter.
*/
void start() {
// It's also possible to bind handlers in the constructor. This is useful for presenters that
// should always be listening for events.
eventRegistration = eventBinder.bindEventHandlers(this, eventBus);
}
/**
* Causes this presenter to stop listening for the events on the event bus. This method should be
* called when this presenter is no longer active.
*/
void stop() {
eventRegistration.removeHandler();
}
void setView(HasWidgets view) {
this.view = view;
}
@EventHandler
void onContactsScreenOpened(ContactScreenOpenedEvent event) {
view.clear();
view.add(new Label("Please wait..."));
}
@EventHandler
void onContactsLoaded(ContactsLoadedEvent event) {
view.clear();
for (String contactName : event.getContactNames()) {
view.add(new Label(contactName));
}
}
}
| java | Apache-2.0 | 46d3e72e9006a4c7801c077b1b83ffb11bf687f7 | 2026-01-05T02:40:44.700710Z | false |
google/gwteventbinder | https://github.com/google/gwteventbinder/blob/46d3e72e9006a4c7801c077b1b83ffb11bf687f7/eventbinder-sample/src/main/java/sample/client/SidebarPresenter.java | eventbinder-sample/src/main/java/sample/client/SidebarPresenter.java | /*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package sample.client;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.dom.client.HasClickHandlers;
import com.google.gwt.event.shared.EventBus;
import com.google.web.bindery.event.shared.binder.EventBinder;
/**
* Presenter for the application's sidebar, which just shows a button that the
* user can click on to load contacts.
*/
class SidebarPresenter {
interface MyEventBinder extends EventBinder<SidebarPresenter> {}
private static final MyEventBinder eventBinder = GWT.create(MyEventBinder.class);
private final EventBus eventBus;
SidebarPresenter(EventBus eventBus) {
this.eventBus = eventBus;
eventBinder.bindEventHandlers(this, eventBus);
}
public void setView(HasClickHandlers view) {
view.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent arg0) {
eventBus.fireEvent(new ContactScreenOpenedEvent());
}
});
}
}
| java | Apache-2.0 | 46d3e72e9006a4c7801c077b1b83ffb11bf687f7 | 2026-01-05T02:40:44.700710Z | false |
google/gwteventbinder | https://github.com/google/gwteventbinder/blob/46d3e72e9006a4c7801c077b1b83ffb11bf687f7/eventbinder-sample/src/main/java/sample/client/SampleEntryPoint.java | eventbinder-sample/src/main/java/sample/client/SampleEntryPoint.java | /*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package sample.client;
import com.google.gwt.core.client.EntryPoint;
import com.google.gwt.dom.client.Style;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.event.shared.SimpleEventBus;
import com.google.gwt.user.client.ui.Button;
import com.google.gwt.user.client.ui.RootPanel;
import com.google.gwt.user.client.ui.VerticalPanel;
/**
* Initializes the application. Nothing to see here: everything interesting
* happens in the presenters.
*/
public class SampleEntryPoint implements EntryPoint {
@Override
public void onModuleLoad() {
// Create the object graph - a real application would use Gin
SimpleEventBus eventBus = new SimpleEventBus();
SidebarPresenter sidebarPresenter = new SidebarPresenter(eventBus);
Button sidebarView = new Button("Contacts");
sidebarView.getElement().getStyle().setFloat(Style.Float.LEFT);
sidebarView.getElement().getStyle().setMarginRight(20, Unit.PX);
sidebarPresenter.setView(sidebarView);
RootPanel.get().add(sidebarView);
ContactsPresenter contactsPresenter = new ContactsPresenter(eventBus);
VerticalPanel contactsView = new VerticalPanel();
contactsPresenter.setView(contactsView);
RootPanel.get().add(contactsView);
// Start listening for events in the presenter
contactsPresenter.start();
// Eagerly bind the server proxy
ServerProxy server = new ServerProxy(eventBus);
}
}
| java | Apache-2.0 | 46d3e72e9006a4c7801c077b1b83ffb11bf687f7 | 2026-01-05T02:40:44.700710Z | false |
google/gwteventbinder | https://github.com/google/gwteventbinder/blob/46d3e72e9006a4c7801c077b1b83ffb11bf687f7/eventbinder/src/test/java/com/google/web/bindery/event/TestSuite.java | eventbinder/src/test/java/com/google/web/bindery/event/TestSuite.java | package com.google.web.bindery.event;
import com.google.web.bindery.event.shared.binder.EventBinderTest;
import com.google.web.bindery.event.gwt.rebind.binder.EventBinderWriterTest;
import com.google.gwt.junit.tools.GWTTestSuite;
import junit.framework.Test;
import org.junit.runners.Suite;
import org.junit.runner.RunWith;
@RunWith(Suite.class)
@Suite.SuiteClasses({EventBinderTest.class, EventBinderWriterTest.class})
public class TestSuite {}
| java | Apache-2.0 | 46d3e72e9006a4c7801c077b1b83ffb11bf687f7 | 2026-01-05T02:40:44.700710Z | false |
google/gwteventbinder | https://github.com/google/gwteventbinder/blob/46d3e72e9006a4c7801c077b1b83ffb11bf687f7/eventbinder/src/test/java/com/google/web/bindery/event/gwt/rebind/binder/EventBinderWriterTest.java | eventbinder/src/test/java/com/google/web/bindery/event/gwt/rebind/binder/EventBinderWriterTest.java | /*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.web.bindery.event.gwt.rebind.binder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.contains;
import static org.mockito.Matchers.eq;
import static org.mockito.Matchers.isNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.google.gwt.core.ext.TreeLogger;
import com.google.gwt.core.ext.TreeLogger.HelpInfo;
import com.google.gwt.core.ext.TreeLogger.Type;
import com.google.gwt.core.ext.UnableToCompleteException;
import com.google.gwt.core.ext.typeinfo.JClassType;
import com.google.gwt.core.ext.typeinfo.JMethod;
import com.google.gwt.core.ext.typeinfo.JType;
import com.google.gwt.core.ext.typeinfo.TypeOracle;
import com.google.gwt.user.rebind.SourceWriter;
import com.google.gwt.user.rebind.StringSourceWriter;
import com.google.web.bindery.event.shared.binder.EventHandler;
import com.google.web.bindery.event.shared.binder.GenericEvent;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.runners.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import java.util.HashMap;
import java.util.Map;
/**
* Tests for {@link EventBinderWriter}. Most functionality should be covered by
* EventBinderTest; this should only check the basic output format and error
* cases.
*
* @author ekuefler@google.com (Erik Kuefler)
*/
@RunWith(MockitoJUnitRunner.class)
public class EventBinderWriterTest {
@Mock private JClassType target;
@Mock private TreeLogger logger;
private JClassType genericEventType;
private Map<Class<? extends GenericEvent>, JClassType> eventTypes;
private TypeOracle typeOracle;
private EventBinderWriter writer;
private SourceWriter output;
@Before
public void setUp() {
eventTypes = new HashMap<Class<? extends GenericEvent>, JClassType>();
typeOracle = createTypeOracle();
genericEventType = getEventType(GenericEvent.class);
writer = new EventBinderWriter(logger, genericEventType);
output = new StringSourceWriter();
}
@Test
public void shouldWriteDoBindEventHandler() throws Exception {
JClassType eventType1 = getEventType(MyEvent1.class);
JClassType eventType2 = getEventType(MyEvent2.class);
JMethod method1 = newMethod("method1", eventType1);
JMethod method2 = newMethod("method2", eventType2);
JMethod method3 = newMethod("method3", new JType[] {genericEventType},
new Class[] {MyEvent1.class, MyEvent2.class});
JMethod method4 = newMethod("method4", new JType[] {},
new Class[] {MyEvent1.class});
when(target.getQualifiedSourceName()).thenReturn("MyTarget");
when(target.getInheritableMethods()).thenReturn(new JMethod[] {method1, method2, method3, method4});
writer.writeDoBindEventHandlers(target, output, typeOracle);
assertEquals(join(
"protected List<HandlerRegistration> doBindEventHandlers("
+ "final MyTarget target, EventBus eventBus) {",
" List<HandlerRegistration> registrations = new LinkedList<HandlerRegistration>();",
" bind(eventBus, registrations, " + className(MyEvent1.class) + ".class, new GenericEventHandler() {",
" public void handleEvent(GenericEvent event) { target.method1((" + className(MyEvent1.class) + ") event); }",
" });",
" bind(eventBus, registrations, " + className(MyEvent2.class) +".class, new GenericEventHandler() {",
" public void handleEvent(GenericEvent event) { target.method2((" + className(MyEvent2.class) + ") event); }",
" });",
" bind(eventBus, registrations, " + className(MyEvent1.class) + ".class, new GenericEventHandler() {",
" public void handleEvent(GenericEvent event) { target.method3((" + className(MyEvent1.class) +") event); }",
" });",
" bind(eventBus, registrations, " + className(MyEvent2.class) + ".class, new GenericEventHandler() {",
" public void handleEvent(GenericEvent event) { target.method3((" + className(MyEvent2.class) + ") event); }",
" });",
" bind(eventBus, registrations, " + className(MyEvent1.class) + ".class, new GenericEventHandler() {",
" public void handleEvent(GenericEvent event) { target.method4(); }",
" });",
" return registrations;",
"}"), output.toString());
}
@Test
public void shouldFailOnZeroParametersWithoutEvents() throws Exception {
JMethod method = newMethod("myMethod");
when(target.getInheritableMethods()).thenReturn(new JMethod[] {method});
try {
writer.writeDoBindEventHandlers(target, output, typeOracle);
fail("Exception not thrown");
} catch (UnableToCompleteException expected) {}
verify(logger).log(
eq(Type.ERROR), contains("myMethod"), isNull(Throwable.class), isNull(HelpInfo.class));
}
@Test
public void shouldFailOnTwoParameters() throws Exception {
JMethod method = newMethod("myMethod", mock(JType.class), mock(JType.class));
when(target.getInheritableMethods()).thenReturn(new JMethod[] {method});
try {
writer.writeDoBindEventHandlers(target, output, typeOracle);
fail("Exception not thrown");
} catch (UnableToCompleteException expected) {}
verify(logger).log(
eq(Type.ERROR), contains("myMethod"), isNull(Throwable.class), isNull(HelpInfo.class));
}
@Test
public void shouldFailOnInvalidParameter() throws Exception {
JClassType paramType = mock(JClassType.class);
when(paramType.isAssignableTo(genericEventType)).thenReturn(false);
when(paramType.isClassOrInterface()).thenReturn(paramType);
JMethod method = newMethod("myMethod", paramType);
when(target.getInheritableMethods()).thenReturn(new JMethod[] {method});
try {
writer.writeDoBindEventHandlers(target, output, typeOracle);
fail("Exception not thrown");
} catch (UnableToCompleteException expected) {}
verify(logger).log(
eq(Type.ERROR), contains("myMethod"), isNull(Throwable.class), isNull(HelpInfo.class));
}
@Test
public void shouldFailForEventWhichIsNotAssignableToParameter() throws Exception {
JClassType eventType1 = getEventType(MyEvent1.class);
JMethod method = newMethod("myMethod", new JType[] {eventType1}, new Class[] {MyEvent2.class});
when(target.getInheritableMethods()).thenReturn(new JMethod[] {method});
try {
writer.writeDoBindEventHandlers(target, output, typeOracle);
fail("Exception not thrown");
} catch (UnableToCompleteException expected) {}
verify(logger).log(
eq(Type.ERROR), contains("myMethod"), isNull(Throwable.class), isNull(HelpInfo.class));
}
@Test
public void shouldFailOnPrimitiveParameter() throws Exception {
JClassType paramType = mock(JClassType.class);
when(paramType.isClassOrInterface()).thenReturn(null);
JMethod method = newMethod("myMethod", paramType);
when(target.getInheritableMethods()).thenReturn(new JMethod[] {method});
try {
writer.writeDoBindEventHandlers(target, output, typeOracle);
fail("Exception not thrown");
} catch (UnableToCompleteException expected) {}
verify(logger).log(
eq(Type.ERROR), contains("myMethod"), isNull(Throwable.class), isNull(HelpInfo.class));
}
@Test
public void shouldFailOnAbstractParameter() throws Exception {
JClassType paramType = getEventType(AbstractEvent.class);
when(paramType.isAbstract()).thenReturn(true);
JMethod method = newMethod("myMethod", paramType);
when(target.getInheritableMethods()).thenReturn(new JMethod[] {method});
try {
writer.writeDoBindEventHandlers(target, output, typeOracle);
fail("Exception not thrown");
} catch (UnableToCompleteException expected) {}
verify(logger).log(
eq(Type.ERROR), contains("myMethod"), isNull(Throwable.class), isNull(HelpInfo.class));
}
@SuppressWarnings("unchecked")
private JMethod newMethod(String name, JType... params) {
return newMethod(name, params, new Class[0]);
}
@SuppressWarnings("unchecked")
private JMethod newMethod(String name, JType[] params, Class[] events) {
EventHandler eventHandler = mock(EventHandler.class);
when(eventHandler.handles()).thenReturn(events);
JMethod method = mock(JMethod.class);
when(method.getAnnotation(EventHandler.class)).thenReturn(eventHandler);
when(method.getName()).thenReturn(name);
when(method.getParameterTypes()).thenReturn(params);
return method;
}
private JClassType getEventType(Class<? extends GenericEvent> event) {
if (eventTypes.containsKey(event)) {
return eventTypes.get(event);
}
JClassType type = mock(JClassType.class);
eventTypes.put(event, type);
when(type.isClassOrInterface()).thenReturn(type);
when(type.isAssignableTo(getEventType(GenericEvent.class))).thenReturn(true);
when(type.getOracle()).thenReturn(typeOracle);
when(type.getQualifiedSourceName()).thenReturn(className(event));
return type;
}
private TypeOracle createTypeOracle() {
TypeOracle typeOracle = mock(TypeOracle.class);
when(typeOracle.findType(anyString())).then(new Answer<JClassType>() {
@Override
public JClassType answer(InvocationOnMock invocationOnMock) throws Throwable {
String parameter = (String) invocationOnMock.getArguments()[0];
Class<? extends GenericEvent> klass;
try {
klass = (Class<? extends GenericEvent>) Class.forName(parameter);
} catch (ClassNotFoundException ex) {
char[] klassName = parameter.toCharArray();
klassName[parameter.lastIndexOf('.')] = '$';
klass = (Class<? extends GenericEvent>) Class.forName(String.valueOf(klassName));
}
return getEventType(klass);
}
});
return typeOracle;
}
private String join(String... strings) {
StringBuilder builder = new StringBuilder();
for (String string : strings) {
builder.append(string).append('\n');
}
return builder.toString();
}
private String className(Class<? extends GenericEvent> event) {
return event.getName().replace('$', '.');
}
public static class MyEvent1 extends GenericEvent {}
public static class MyEvent2 extends GenericEvent {}
public static abstract class AbstractEvent extends GenericEvent {}
}
| java | Apache-2.0 | 46d3e72e9006a4c7801c077b1b83ffb11bf687f7 | 2026-01-05T02:40:44.700710Z | false |
google/gwteventbinder | https://github.com/google/gwteventbinder/blob/46d3e72e9006a4c7801c077b1b83ffb11bf687f7/eventbinder/src/test/java/com/google/web/bindery/event/shared/binder/EventBinderTest.java | eventbinder/src/test/java/com/google/web/bindery/event/shared/binder/EventBinderTest.java | /*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.web.bindery.event.shared.binder;
import com.google.gwt.core.shared.GWT;
import com.google.gwt.junit.client.GWTTestCase;
import com.google.web.bindery.event.shared.EventBus;
import com.google.web.bindery.event.shared.HandlerRegistration;
import com.google.web.bindery.event.shared.SimpleEventBus;
import com.google.web.bindery.event.shared.binder.subpackage.SomeActivity;
/**
* End-to-end test of {@link EventBinder} and associated classes.
*
* @author ekuefler@google.com (Erik Kuefler)
*/
public class EventBinderTest extends GWTTestCase {
@Override
public String getModuleName() {
return "com.google.web.bindery.event.EventBinder";
}
public void testEventBinder() {
EventBus eventBus = new SimpleEventBus();
TestPresenter presenter = new TestPresenter();
TestPresenter.MyEventBinder binder = GWT.create(TestPresenter.MyEventBinder.class);
binder.bindEventHandlers(presenter, eventBus);
// Test one event
assertEquals(0, presenter.firstEventsHandled);
eventBus.fireEvent(new FirstEvent());
assertEquals(1, presenter.firstEventsHandled);
assertEquals(1, presenter.firstEventsWithoutParameterHandled);
assertEquals(1, presenter.firstAndSecondEventsHandled);
// Test another event twice
assertEquals(0, presenter.secondEventsHandled);
eventBus.fireEvent(new SecondEvent());
eventBus.fireEvent(new SecondEvent());
assertEquals(2, presenter.secondEventsHandled);
assertEquals(3, presenter.firstAndSecondEventsHandled);
}
public void testEventBinder_unbindEventHandlers() {
EventBus eventBus = new SimpleEventBus();
TestPresenter presenter = new TestPresenter();
TestPresenter.MyEventBinder binder = GWT.create(TestPresenter.MyEventBinder.class);
HandlerRegistration registration = binder.bindEventHandlers(presenter, eventBus);
assertEquals(0, presenter.firstEventsHandled);
assertEquals(0, presenter.firstEventsWithoutParameterHandled);
assertEquals(0, presenter.secondEventsHandled);
// Before unregistering
eventBus.fireEvent(new FirstEvent());
eventBus.fireEvent(new SecondEvent());
assertEquals(1, presenter.firstEventsHandled);
assertEquals(1, presenter.firstEventsWithoutParameterHandled);
assertEquals(1, presenter.secondEventsHandled);
assertEquals(2, presenter.firstAndSecondEventsHandled);
// After unregistering
registration.removeHandler();
eventBus.fireEvent(new FirstEvent());
eventBus.fireEvent(new SecondEvent());
assertEquals(1, presenter.firstEventsHandled);
assertEquals(1, presenter.firstEventsWithoutParameterHandled);
assertEquals(1, presenter.secondEventsHandled);
assertEquals(2, presenter.firstAndSecondEventsHandled);
// After re-registering
binder.bindEventHandlers(presenter, eventBus);
eventBus.fireEvent(new FirstEvent());
eventBus.fireEvent(new SecondEvent());
assertEquals(2, presenter.firstEventsHandled);
assertEquals(2, presenter.firstEventsWithoutParameterHandled);
assertEquals(2, presenter.secondEventsHandled);
assertEquals(4, presenter.firstAndSecondEventsHandled);
}
public void testEventBinder_withLegacyEventBus() {
com.google.gwt.event.shared.EventBus eventBus =
new com.google.gwt.event.shared.SimpleEventBus();
TestPresenter presenter = new TestPresenter();
TestPresenter.MyEventBinder binder = GWT.create(TestPresenter.MyEventBinder.class);
binder.bindEventHandlers(presenter, eventBus);
assertEquals(0, presenter.firstEventsHandled);
eventBus.fireEvent(new FirstEvent());
assertEquals(1, presenter.firstEventsHandled);
}
public void testEventBinder_withHandlersInSuperclass() {
EventBus eventBus = new SimpleEventBus();
SubPresenter presenter = new SubPresenter();
SubPresenter.MyEventBinder binder = GWT.create(SubPresenter.MyEventBinder.class);
binder.bindEventHandlers(presenter, eventBus);
eventBus.fireEvent(new FirstEvent());
eventBus.fireEvent(new SecondEvent());
eventBus.fireEvent(new ThirdEvent());
// FirstEvent has a handler in both classes, so it should be handled twice
assertEquals(1, presenter.firstEventsHandled);
assertEquals(1, presenter.firstEventsWithoutParameterHandled);
assertEquals(1, presenter.subclassFirstEventsHandled);
// SecondEvent's handler is overridden in the subclass, so it should only be handled there
assertEquals(0, presenter.secondEventsHandled);
assertEquals(1, presenter.subclassSecondEventsHandled);
// ThirdEvent is only handled in the superclass
assertEquals(1, presenter.thirdEventsHandled);
// First+Second events are handled in superclass
assertEquals(2, presenter.firstAndSecondEventsHandled);
}
// https://github.com/google/gwteventbinder/issues/28
public void testEventBinder_inDifferentPackage() {
EventBus eventBus = new SimpleEventBus();
TestPresenter presenter = new TestPresenter();
SomeActivity.SomeEventBinder binder = GWT.create(SomeActivity.SomeEventBinder.class);
binder.bindEventHandlers(presenter, eventBus);
// Test one event
assertEquals(0, presenter.firstEventsHandled);
eventBus.fireEvent(new FirstEvent());
assertEquals(1, presenter.firstEventsHandled);
}
public static class TestPresenter {
interface MyEventBinder extends EventBinder<TestPresenter> {}
int firstEventsHandled;
int secondEventsHandled;
int thirdEventsHandled;
int firstAndSecondEventsHandled;
int firstEventsWithoutParameterHandled;
@EventHandler
public void onFirstEvent(FirstEvent e) {
firstEventsHandled++;
}
@EventHandler
public void onSecondEvent(SecondEvent e) {
secondEventsHandled++;
}
@EventHandler
public void onThirdEvent(ThirdEvent e) {
thirdEventsHandled++;
}
@EventHandler(handles = {FirstEvent.class, SecondEvent.class})
public void onFirstAndSecondEvent(GenericEvent event) {
firstAndSecondEventsHandled++;
}
@EventHandler(handles = {FirstEvent.class})
public void onFirstEventWithoutParameter() {
firstEventsWithoutParameterHandled++;
}
}
static class SubPresenter extends TestPresenter {
interface MyEventBinder extends EventBinder<SubPresenter> {}
int subclassFirstEventsHandled;
int subclassSecondEventsHandled;
@EventHandler
void onFirstEventAgain(FirstEvent e) {
subclassFirstEventsHandled++;
}
@Override
@EventHandler
public void onSecondEvent(SecondEvent e) {
subclassSecondEventsHandled++;
}
}
public static class FirstEvent extends GenericEvent {}
public static class SecondEvent extends GenericEvent {}
public static class ThirdEvent extends GenericEvent {}
}
| java | Apache-2.0 | 46d3e72e9006a4c7801c077b1b83ffb11bf687f7 | 2026-01-05T02:40:44.700710Z | false |
google/gwteventbinder | https://github.com/google/gwteventbinder/blob/46d3e72e9006a4c7801c077b1b83ffb11bf687f7/eventbinder/src/test/java/com/google/web/bindery/event/shared/binder/subpackage/SomeActivity.java | eventbinder/src/test/java/com/google/web/bindery/event/shared/binder/subpackage/SomeActivity.java | package com.google.web.bindery.event.shared.binder.subpackage;
import com.google.web.bindery.event.shared.binder.EventBinder;
import com.google.web.bindery.event.shared.binder.EventBinderTest;
/** This class exists to test that EventBinder works when referencing a class in a different package. */
public class SomeActivity {
public interface SomeEventBinder extends EventBinder<EventBinderTest.TestPresenter> {}
}
| java | Apache-2.0 | 46d3e72e9006a4c7801c077b1b83ffb11bf687f7 | 2026-01-05T02:40:44.700710Z | false |
google/gwteventbinder | https://github.com/google/gwteventbinder/blob/46d3e72e9006a4c7801c077b1b83ffb11bf687f7/eventbinder/src/main/java/com/google/web/bindery/event/gwt/rebind/binder/EventBinderGenerator.java | eventbinder/src/main/java/com/google/web/bindery/event/gwt/rebind/binder/EventBinderGenerator.java | /*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.web.bindery.event.gwt.rebind.binder;
import com.google.gwt.core.ext.Generator;
import com.google.gwt.core.ext.GeneratorContext;
import com.google.gwt.core.ext.TreeLogger;
import com.google.gwt.core.ext.TreeLogger.Type;
import com.google.gwt.core.ext.UnableToCompleteException;
import com.google.gwt.core.ext.typeinfo.JClassType;
import com.google.gwt.core.ext.typeinfo.NotFoundException;
import com.google.gwt.core.ext.typeinfo.TypeOracle;
import com.google.gwt.user.rebind.ClassSourceFileComposerFactory;
import com.google.gwt.user.rebind.SourceWriter;
import com.google.web.bindery.event.shared.EventBus;
import com.google.web.bindery.event.shared.HandlerRegistration;
import com.google.web.bindery.event.shared.binder.EventBinder;
import com.google.web.bindery.event.shared.binder.GenericEvent;
import com.google.web.bindery.event.shared.binder.impl.AbstractEventBinder;
import com.google.web.bindery.event.shared.binder.impl.GenericEventHandler;
import java.io.PrintWriter;
import java.util.LinkedList;
import java.util.List;
/**
* Generator for {@link EventBinder}. Takes care of the ugly parts of creating
* the source writer and then delegates to {@link EventBinderWriter}. This class
* is used by the GWT compiler and should not be referenced directly by users.
*
* @author ekuefler@google.com (Erik Kuefler)
*/
public class EventBinderGenerator extends Generator {
@Override
public String generate(TreeLogger logger, GeneratorContext context,
String typeName) throws UnableToCompleteException {
try {
JClassType eventBinderType = context.getTypeOracle().getType(typeName);
JClassType targetType = getTargetType(eventBinderType, context.getTypeOracle());
SourceWriter writer = createSourceWriter(logger, context, eventBinderType, targetType);
if (writer != null) { // Otherwise the class was already created
new EventBinderWriter(
logger,
context.getTypeOracle().getType(GenericEvent.class.getCanonicalName()))
.writeDoBindEventHandlers(targetType, writer, context.getTypeOracle());
writer.commit(logger);
}
return getFullyQualifiedGeneratedClassName(eventBinderType);
} catch (NotFoundException e) {
logger.log(Type.ERROR, "Error generating " + typeName, e);
throw new UnableToCompleteException();
}
}
private JClassType getTargetType(JClassType interfaceType, TypeOracle typeOracle) {
JClassType[] superTypes = interfaceType.getImplementedInterfaces();
JClassType eventBinderType = typeOracle.findType(EventBinder.class.getCanonicalName());
if (superTypes.length != 1
|| !superTypes[0].isAssignableFrom(eventBinderType)
|| superTypes[0].isParameterized() == null) {
throw new IllegalArgumentException(
interfaceType + " must extend EventBinder with a type parameter");
}
return superTypes[0].isParameterized().getTypeArgs()[0];
}
private SourceWriter createSourceWriter(
TreeLogger logger,
GeneratorContext context,
JClassType eventBinderType,
JClassType targetType) {
String simpleName = getSimpleGeneratedClassName(eventBinderType);
String packageName = eventBinderType.getPackage().getName();
ClassSourceFileComposerFactory composer =
new ClassSourceFileComposerFactory(packageName, simpleName);
composer.setSuperclass(AbstractEventBinder.class.getCanonicalName()
+ "<" + targetType.getQualifiedSourceName() + ">");
composer.addImplementedInterface(eventBinderType.getName());
composer.addImport(EventBinder.class.getCanonicalName());
composer.addImport(EventBus.class.getCanonicalName());
composer.addImport(GenericEvent.class.getCanonicalName());
composer.addImport(GenericEventHandler.class.getCanonicalName());
composer.addImport(HandlerRegistration.class.getCanonicalName());
composer.addImport(LinkedList.class.getCanonicalName());
composer.addImport(List.class.getCanonicalName());
PrintWriter printWriter = context.tryCreate(logger, packageName, simpleName);
return (printWriter != null) ? composer.createSourceWriter(context, printWriter) : null;
}
private String getSimpleGeneratedClassName(JClassType eventBinderType) {
return eventBinderType.getName().replace('.', '_') + "Impl";
}
private String getFullyQualifiedGeneratedClassName(JClassType eventBinderType) {
return new StringBuilder()
.append(eventBinderType.getPackage().getName())
.append('.')
.append(getSimpleGeneratedClassName(eventBinderType))
.toString();
}
}
| java | Apache-2.0 | 46d3e72e9006a4c7801c077b1b83ffb11bf687f7 | 2026-01-05T02:40:44.700710Z | false |
google/gwteventbinder | https://github.com/google/gwteventbinder/blob/46d3e72e9006a4c7801c077b1b83ffb11bf687f7/eventbinder/src/main/java/com/google/web/bindery/event/gwt/rebind/binder/EventBinderWriter.java | eventbinder/src/main/java/com/google/web/bindery/event/gwt/rebind/binder/EventBinderWriter.java | /*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.web.bindery.event.gwt.rebind.binder;
import com.google.gwt.core.ext.TreeLogger;
import com.google.gwt.core.ext.TreeLogger.Type;
import com.google.gwt.core.ext.UnableToCompleteException;
import com.google.gwt.core.ext.typeinfo.JClassType;
import com.google.gwt.core.ext.typeinfo.JMethod;
import com.google.gwt.core.ext.typeinfo.TypeOracle;
import com.google.gwt.user.rebind.SourceWriter;
import com.google.web.bindery.event.shared.binder.EventHandler;
import com.google.web.bindery.event.shared.binder.GenericEvent;
import java.util.ArrayList;
import java.util.List;
/**
* Writes implementations of
* {@link com.google.web.bindery.event.shared.binder.impl.AbstractEventBinder}. The
* generated class implements the single abstract doBindEventHandlers method by
* calling bind() for each method in the target annotated with
* {@link EventHandler}.
*
* @author ekuefler@google.com (Erik Kuefler)
*/
class EventBinderWriter {
private final TreeLogger logger;
private final JClassType genericEventType;
EventBinderWriter(TreeLogger logger, JClassType genericEventType) {
this.logger = logger;
this.genericEventType = genericEventType;
}
void writeDoBindEventHandlers(JClassType target, SourceWriter writer, TypeOracle typeOracle)
throws UnableToCompleteException {
writeBindMethodHeader(writer, target.getQualifiedSourceName());
for (JMethod method : target.getInheritableMethods()) {
EventHandler annotation = method.getAnnotation(EventHandler.class);
if (annotation != null) {
writeHandlerForBindMethod(annotation, writer, method, typeOracle);
}
}
writeBindMethodFooter(writer);
}
private void writeBindMethodHeader(SourceWriter writer, String targetName) {
writer.println("protected List<HandlerRegistration> doBindEventHandlers("
+ "final %s target, EventBus eventBus) {",
targetName);
writer.indent();
writer.println(
"List<HandlerRegistration> registrations = new LinkedList<HandlerRegistration>();");
}
private void writeHandlerForBindMethod(EventHandler annotation, SourceWriter writer,
JMethod method, TypeOracle typeOracle) throws UnableToCompleteException {
JClassType eventParameter = null;
if (method.getParameterTypes().length == 1) {
eventParameter = method.getParameterTypes()[0].isClassOrInterface();
}
if (annotation.handles().length == 0 && !isAConcreteGenericEvent(eventParameter)) {
logger.log(Type.ERROR, "Method " + method.getName()
+ " annotated with @EventHandler without event classes must have exactly "
+ "one argument of a concrete type assignable to GenericEvent");
throw new UnableToCompleteException();
}
List<String> eventTypes = new ArrayList<String>();
if (annotation.handles().length != 0) {
for (Class<? extends GenericEvent> event : annotation.handles()) {
String eventTypeName = event.getCanonicalName();
JClassType eventClassType = typeOracle.findType(eventTypeName);
if (eventClassType == null) {
logger.log(Type.ERROR, "Can't resolve " + eventTypeName);
throw new UnableToCompleteException();
}
if (eventParameter != null && !eventClassType.isAssignableTo(eventParameter)) {
logger.log(Type.ERROR, "Event " + eventTypeName + " isn't assignable to "
+ eventParameter.getName() + " in method: " + method.getName());
throw new UnableToCompleteException();
}
eventTypes.add(eventClassType.getQualifiedSourceName());
}
} else {
eventTypes.add(eventParameter.getQualifiedSourceName());
}
for (String eventType : eventTypes) {
writer.println("bind(eventBus, registrations, %s.class, new GenericEventHandler() {",
eventType);
if (eventParameter != null) {
writer.indentln("public void handleEvent(GenericEvent event) { target.%s((%s) event); }",
method.getName(), eventType);
} else {
writer.indentln("public void handleEvent(GenericEvent event) { target.%s(); }",
method.getName());
}
writer.println("});");
}
}
private boolean isAConcreteGenericEvent(JClassType param) {
return param != null && !param.isAbstract() && param.isAssignableTo(genericEventType);
}
private void writeBindMethodFooter(SourceWriter writer) {
writer.println("return registrations;");
writer.outdent();
writer.println("}");
}
}
| java | Apache-2.0 | 46d3e72e9006a4c7801c077b1b83ffb11bf687f7 | 2026-01-05T02:40:44.700710Z | false |
google/gwteventbinder | https://github.com/google/gwteventbinder/blob/46d3e72e9006a4c7801c077b1b83ffb11bf687f7/eventbinder/src/main/java/com/google/web/bindery/event/shared/binder/EventBinder.java | eventbinder/src/main/java/com/google/web/bindery/event/shared/binder/EventBinder.java | /*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.web.bindery.event.shared.binder;
import com.google.web.bindery.event.shared.EventBus;
import com.google.web.bindery.event.shared.HandlerRegistration;
/**
* API for generated code that connects an EventBus to some event handlers. Use
* GWT.create() to get instances of this interface. The generated class will
* search the target's methods for any annotated with {@link EventHandler} and
* register a handler with the event bus that calls that method. The first and
* only parameter of the annotated methods must specify the type of event to
* handle; the method's name is ignored.
* <p>
* Users of this interface should define an interface which extends
* EventBinder, and invoke bindEventHandlers on an instance of that interface in
* the class's constructor:
*
* <pre>
* interface MyEventBinder extends EventBinder<MyClass> {}
* private static MyEventBinder eventBinder = GWT.create(MyEventBinder.class);
*
* public MyClass(EventBus eventBus) {
* eventBinder.bindEventHandlers(this, eventBus);
* }
*
* {@literal @}EventHandler
* void onContactsLoaded(ContactsLoadedEvent event) {
* // Interesting stuff goes here...
* }
* </pre>
* See {@link EventHandler} for more usage examples.
*
* @param <T> type of object being bound, which should be the same as the type
* enclosing this interface
* @author ekuefler@google.com (Erik Kuefler)
*/
public interface EventBinder<T> {
/**
* Connects an event bus to each event handler method on a target object.
* After this method returns, whenever the event bus delivers an event, it
* will call the handler with the same event type on the given target.
*
* @param target class to search for {@link EventHandler}-annotated methods
* @param eventBus event bus on which handlers for the annotated methods
* should be registered
* @return a registration that can be used to unbind all handlers registered
* via this call
*/
public HandlerRegistration bindEventHandlers(T target, EventBus eventBus);
}
| java | Apache-2.0 | 46d3e72e9006a4c7801c077b1b83ffb11bf687f7 | 2026-01-05T02:40:44.700710Z | false |
google/gwteventbinder | https://github.com/google/gwteventbinder/blob/46d3e72e9006a4c7801c077b1b83ffb11bf687f7/eventbinder/src/main/java/com/google/web/bindery/event/shared/binder/EventHandler.java | eventbinder/src/main/java/com/google/web/bindery/event/shared/binder/EventHandler.java | /*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.web.bindery.event.shared.binder;
import com.google.web.bindery.event.shared.EventBus;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation for a method that handles events sent through an {@link EventBus}.
* The method must have one parameter, which is usually a subclass of
* {@link GenericEvent}. The method will start receiving events after it is
* connected to the EventBus using an {@link EventBinder}. Only the parameter of
* the method is relevant; the name is ignored. For example, the given method
* will be invoked whenever a ProfileLoadedEvent is fired on the event bus to
* which the {@link EventBinder} was bound:
*
* <pre>
* {@literal @}EventHandler
* void onProfileLoaded(ProfileLoadedEvent event) {
* getView().setEmail(event.getProfile().getEmailAddress());
* getView().showSignoutMenu();
* }
* </pre>
*
* Note that an {@link EventBinder} MUST be used to register these annotations,
* otherwise they will have no effect.
*
* @see EventBinder
* @author ekuefler@google.com (Erik Kuefler)
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface EventHandler {
/**
* <p>Events that should be handled by the annotated method.</p>
*
* <p>If handles array is empty (default) then the method handles only event
* of type of the method's parameter and in this case the method's parameter
* is required. If handles array is not empty the method's parameter may be
* omitted.</p>
*
* <p>Every event type from this array should be assignable to the parameter
* of the method.</p>
*
* <p>Examples:</p>
* <ul>
* <li>Method onEvent1 handles only EventOne</li>
* <li>Method onEvent2 handles EventOne and EventTwo without event parameter</li>
* <li>Method onEvent3 handles EventOne and EventTwo and has event parameter</li>
* </ul>
*
* <pre>
* {@literal @}EventHandler
* void onEvent1(EventOne event) {
* doSomething1();
* }
* {@literal @}EventHandler(handles = {EventOne.class, EventTwo.class})
* void onEvent2() {
* doSomething2();
* }
*
* {@literal @}EventHandler(handles = {EventOne.class, EventTwo.class})
* void onEvent3(ParentOfEventOneAndTwo event) {
* doSomething3();
* }
* </pre>
*
*/
Class<? extends GenericEvent>[] handles() default {};
}
| java | Apache-2.0 | 46d3e72e9006a4c7801c077b1b83ffb11bf687f7 | 2026-01-05T02:40:44.700710Z | false |
google/gwteventbinder | https://github.com/google/gwteventbinder/blob/46d3e72e9006a4c7801c077b1b83ffb11bf687f7/eventbinder/src/main/java/com/google/web/bindery/event/shared/binder/GenericEvent.java | eventbinder/src/main/java/com/google/web/bindery/event/shared/binder/GenericEvent.java | /*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.web.bindery.event.shared.binder;
import com.google.gwt.event.shared.GwtEvent;
import com.google.web.bindery.event.shared.binder.impl.GenericEventHandler;
import com.google.web.bindery.event.shared.binder.impl.GenericEventType;
/**
* Base class for all events fired on the event bus. Subclasses of this can be
* listened for in presenters using the {@link EventHandler} annotation.
* <p>
* Events (subclasses of this class) should be very simple and immutable value
* types. In the simplest case for an event that takes no arguments, the body of
* the class can be entirely blank. In this case, the event is effectively just
* a tag that is never referenced directly after it is fired.
* <p>
* In the slightly more complex case, events can take any number of arguments in
* their constructor. These arguments should be assigned to final fields for
* which public accessors should be exposed. Handlers can then access the
* arguments via the public methods. Events should rarely contains more logic
* than this and MUST be immutable. Since the same event is passed to each
* handler and the order in which the handlers will see the events is undefined,
* mutable events are very dangerous.
* <p>
* A complete example of a single-argument event is shown below:
* <pre>
* public class ContactsLoadedEvent extends GenericEvent {
*
* private final List<Contacts> contacts;
*
* public PurchaseActionLoadedEvent(List<Contacts> contacts) {
* this.contacts = contacts;
* }
*
* public List<Contacts> getContacts() {
* return contacts;
* }
* }
* </pre>
*
* @author ekuefler@google.com (Erik Kuefler)
*/
public abstract class GenericEvent extends GwtEvent<GenericEventHandler> {
@Override
public GenericEventType getAssociatedType() {
return GenericEventType.getTypeOf(getClass());
}
@Override
protected void dispatch(GenericEventHandler handler) {
handler.handleEvent(this);
}
}
| java | Apache-2.0 | 46d3e72e9006a4c7801c077b1b83ffb11bf687f7 | 2026-01-05T02:40:44.700710Z | false |
google/gwteventbinder | https://github.com/google/gwteventbinder/blob/46d3e72e9006a4c7801c077b1b83ffb11bf687f7/eventbinder/src/main/java/com/google/web/bindery/event/shared/binder/impl/AbstractEventBinder.java | eventbinder/src/main/java/com/google/web/bindery/event/shared/binder/impl/AbstractEventBinder.java | /*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.web.bindery.event.shared.binder.impl;
import com.google.web.bindery.event.shared.EventBus;
import com.google.web.bindery.event.shared.HandlerRegistration;
import com.google.web.bindery.event.shared.binder.EventBinder;
import com.google.web.bindery.event.shared.binder.GenericEvent;
import java.util.List;
/**
* Base class containing code shared by all generated {@link EventBinder}s.
* Users shouldn't need to reference this class directly.
*
* @param <T> type of object being bound
* @author ekuefler@google.com (Erik Kuefler)
*/
public abstract class AbstractEventBinder<T> implements EventBinder<T> {
@Override
public final HandlerRegistration bindEventHandlers(T target, EventBus eventBus) {
final List<HandlerRegistration> registrations = doBindEventHandlers(target, eventBus);
return new HandlerRegistration() {
@Override
public void removeHandler() {
for (HandlerRegistration registration : registrations) {
registration.removeHandler();
}
registrations.clear();
}
};
}
/**
* Implemented by EventBinderGenerator to do the actual work of binding event handlers on the
* target.
*/
protected abstract List<HandlerRegistration> doBindEventHandlers(T target, EventBus eventBus);
/**
* Registers the given handler for the given event class on the given event bus. Factored out
* into a method here instead of generated directly in order to simplify the generated code and
* save a little space.
*/
protected final <U extends GenericEvent> void bind(
EventBus eventBus,
List<HandlerRegistration> registrations,
Class<U> type,
GenericEventHandler handler) {
registrations.add(eventBus.addHandler(GenericEventType.getTypeOf(type), handler));
}
}
| java | Apache-2.0 | 46d3e72e9006a4c7801c077b1b83ffb11bf687f7 | 2026-01-05T02:40:44.700710Z | false |
google/gwteventbinder | https://github.com/google/gwteventbinder/blob/46d3e72e9006a4c7801c077b1b83ffb11bf687f7/eventbinder/src/main/java/com/google/web/bindery/event/shared/binder/impl/GenericEventType.java | eventbinder/src/main/java/com/google/web/bindery/event/shared/binder/impl/GenericEventType.java | /*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.web.bindery.event.shared.binder.impl;
import com.google.gwt.event.shared.GwtEvent.Type;
import com.google.web.bindery.event.shared.binder.EventBinder;
import com.google.web.bindery.event.shared.binder.GenericEvent;
import java.util.HashMap;
import java.util.Map;
/**
* Utility to maintain a mapping from subtypes of {@link GenericEvent} to
* {@link Type}s for those events. Users shouldn't need to reference this class
* directly.
*
* @author ekuefler@google.com (Erik Kuefler)
*/
public class GenericEventType extends Type<GenericEventHandler> {
private static final Map<Class<?>, GenericEventType> TYPE_MAP =
new HashMap<Class<?>, GenericEventType>();
/**
* Creates a new EventType for the given event class. Repeated invocations of
* this method for the same type will return the same object. This method is
* called by generated {@link EventBinder}s and shouldn't normally have to be
* called directly by users.
*/
public static <T extends GenericEvent> GenericEventType getTypeOf(Class<T> clazz) {
GenericEventType eventType = TYPE_MAP.get(clazz);
if (eventType == null) {
eventType = new GenericEventType();
TYPE_MAP.put(clazz, eventType);
}
return eventType;
}
private GenericEventType() {}
}
| java | Apache-2.0 | 46d3e72e9006a4c7801c077b1b83ffb11bf687f7 | 2026-01-05T02:40:44.700710Z | false |
google/gwteventbinder | https://github.com/google/gwteventbinder/blob/46d3e72e9006a4c7801c077b1b83ffb11bf687f7/eventbinder/src/main/java/com/google/web/bindery/event/shared/binder/impl/GenericEventHandler.java | eventbinder/src/main/java/com/google/web/bindery/event/shared/binder/impl/GenericEventHandler.java | /*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.web.bindery.event.shared.binder.impl;
import com.google.web.bindery.event.shared.binder.EventBinder;
import com.google.web.bindery.event.shared.binder.GenericEvent;
/**
* The API that a {@link GenericEvent} uses to send events to generated code.
* Instances of this class will be generated automatically when constructing an
* EventBinder. Having a single generic handler means that we don't have to
* define a separate handler interface for each type of event.
* <p>
* Instances of this class are instantiated automatically via
* {@link EventBinder} , so users should not have to reference this class unless
* they choose to bind events manually.
*
* @author ekuefler@google.com (Erik Kuefler)
*/
public interface GenericEventHandler extends com.google.gwt.event.shared.EventHandler {
void handleEvent(GenericEvent event);
}
| java | Apache-2.0 | 46d3e72e9006a4c7801c077b1b83ffb11bf687f7 | 2026-01-05T02:40:44.700710Z | false |
MichaelEvans/ChromaHashView | https://github.com/MichaelEvans/ChromaHashView/blob/46099c8b8dd92ae3c2810766f7f4ef916417d720/library/src/main/java/org/michaelevans/chromahashview/ChromaHashView.java | library/src/main/java/org/michaelevans/chromahashview/ChromaHashView.java | package org.michaelevans.chromahashview;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.text.Editable;
import android.text.InputType;
import android.text.TextWatcher;
import android.util.AttributeSet;
import android.util.Log;
import android.widget.EditText;
import java.nio.charset.Charset;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
/**
* @author Michael Evans <michaelcevans10@gmail.com>
*/
public class ChromaHashView extends EditText {
private static int DEFAULT_NUM_OF_VALUES = 3;
private static int MINIMUM_CHARACTER_THRESHOLD = 6;
Paint paint = new Paint();
private String[] colors;
MessageDigest md5 = null;
public ChromaHashView(Context context) {
super(context);
init();
}
public ChromaHashView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public ChromaHashView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init();
}
private void init() {
try {
md5 = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException nsaex) {
}
setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_VARIATION_PASSWORD);
addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
@Override
public void afterTextChanged(Editable s) {
String text = s.toString();
if(s.length() > 0) {
md5.reset();
md5.update(text.getBytes());
byte[] result = md5.digest();
StringBuilder hexString = new StringBuilder();
for (byte aResult : result) {
String newByteFull = "00" + Integer.toHexString(0xFF & aResult);
hexString.append(newByteFull.substring(newByteFull.length() - 2));
}
String md5hash = hexString.toString();
colors = new String[]{md5hash.substring(0, 6), md5hash.substring(6, 12), md5hash.substring(12, 18), md5hash.substring(18, 24), md5hash.substring(24, 30)};
if (s.length() < MINIMUM_CHARACTER_THRESHOLD) {
for (int i = 0; i < colors.length; i++) {
colors[i] = colorToGreyScale(colors[i]);
}
}
}else{
colors = null;
}
}
});
}
private String colorToGreyScale(String color) {
String r = color.substring(0, 2);
return r + r + r;
}
@Override
protected void onDraw(Canvas canvas) {
if (colors != null) {
setPadding(getPaddingLeft(), getPaddingTop(), (20 * DEFAULT_NUM_OF_VALUES + 30), getPaddingBottom());
for (int i = 0; i < DEFAULT_NUM_OF_VALUES; i++) {
paint.setColor(Color.parseColor("#" + colors[i]));
canvas.drawRect(getWidth() + getScrollX() - 20 * i - 35, 15, getWidth() + getScrollX() - 20 * i - 15, getHeight() - 15, paint);
}
}
super.onDraw(canvas);
}
}
| java | Apache-2.0 | 46099c8b8dd92ae3c2810766f7f4ef916417d720 | 2026-01-05T02:40:28.486586Z | false |
MichaelEvans/ChromaHashView | https://github.com/MichaelEvans/ChromaHashView/blob/46099c8b8dd92ae3c2810766f7f4ef916417d720/demo/src/main/java/org/michaelevans/chromahashview/demo/MainActivity.java | demo/src/main/java/org/michaelevans/chromahashview/demo/MainActivity.java | package org.michaelevans.chromahashview.demo;
import android.app.Activity;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
public class MainActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
| java | Apache-2.0 | 46099c8b8dd92ae3c2810766f7f4ef916417d720 | 2026-01-05T02:40:28.486586Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-http-test/src/test/java/dev/snowdrop/vertx/http/test/TestApplication.java | vertx-spring-boot-starter-http-test/src/test/java/dev/snowdrop/vertx/http/test/TestApplication.java | package dev.snowdrop.vertx.http.test;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Mono;
@RestController
@SpringBootApplication
public class TestApplication {
public static void main(String[] args) {
SpringApplication.run(TestApplication.class, args);
}
@GetMapping
public Mono<String> test() {
return Mono.just("test");
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-http-test/src/test/java/dev/snowdrop/vertx/http/test/DefaultWebTestClientHttpsIT.java | vertx-spring-boot-starter-http-test/src/test/java/dev/snowdrop/vertx/http/test/DefaultWebTestClientHttpsIT.java | package dev.snowdrop.vertx.http.test;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.web.reactive.server.WebTestClient;
@SpringBootTest(
webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT,
properties = {
"vertx.http.client.ssl=true",
"vertx.http.client.trust-all=true",
"vertx.http.server.ssl=true",
"vertx.http.server.client-auth=NONE",
"server.ssl.key-store-type=JKS",
"server.ssl.key-store=target/test-classes/tls/server-keystore.jks",
"server.ssl.key-store-password=wibble"
}
)
public class DefaultWebTestClientHttpsIT {
@Autowired
private WebTestClient client;
@Test
public void testAccessToHttpsResource() {
client
.get()
.exchange()
.expectBody(String.class)
.isEqualTo("test");
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-http-test/src/test/java/dev/snowdrop/vertx/http/test/DefaultWebTestClientIT.java | vertx-spring-boot-starter-http-test/src/test/java/dev/snowdrop/vertx/http/test/DefaultWebTestClientIT.java | package dev.snowdrop.vertx.http.test;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.web.reactive.server.WebTestClient;
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
public class DefaultWebTestClientIT {
@Autowired
private WebTestClient client;
@Test
public void testAccessToHttpResource() {
client.get()
.exchange()
.expectBody(String.class)
.isEqualTo("test");
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-http-test/src/test/java/dev/snowdrop/vertx/http/test/DefaultWebTestClientCodecIT.java | vertx-spring-boot-starter-http-test/src/test/java/dev/snowdrop/vertx/http/test/DefaultWebTestClientCodecIT.java | package dev.snowdrop.vertx.http.test;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.junit.jupiter.api.Test;
import org.reactivestreams.Publisher;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.web.codec.CodecCustomizer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.ResolvableType;
import org.springframework.core.codec.Decoder;
import org.springframework.core.io.buffer.DataBuffer;
import org.springframework.test.web.reactive.server.WebTestClient;
import org.springframework.util.MimeType;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@SpringBootTest(
webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT,
classes = { TestApplication.class, DefaultWebTestClientCodecIT.CustomWebTestClientConfiguration.class }
)
public class DefaultWebTestClientCodecIT {
@Autowired
private WebTestClient client;
@Test
public void testDefaultWebTestClient() {
client.get()
.exchange()
.expectBody(CustomType.class)
.isEqualTo(new CustomType("test"));
}
static class CustomType {
private final String value;
CustomType(String value) {
Objects.requireNonNull(value);
this.value = value;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CustomType that = (CustomType) o;
return Objects.equals(value, that.value);
}
@Override
public int hashCode() {
return value.hashCode();
}
@Override
public String toString() {
return value;
}
}
static class CustomTypeDecoder implements Decoder<CustomType> {
@Override
public boolean canDecode(ResolvableType elementType, MimeType mimeType) {
return true;
}
@Override
public Flux<CustomType> decode(Publisher<DataBuffer> inputStream, ResolvableType elementType, MimeType mimeType,
Map<String, Object> hints) {
return Flux.just(new CustomType("test"));
}
@Override
public Mono<CustomType> decodeToMono(Publisher<DataBuffer> inputStream, ResolvableType elementType,
MimeType mimeType, Map<String, Object> hints) {
return Mono.just(new CustomType("test"));
}
@Override
public List<MimeType> getDecodableMimeTypes() {
return Collections.singletonList(MimeType.valueOf("text/plain"));
}
}
@Configuration
static class CustomWebTestClientConfiguration {
@Bean
public CodecCustomizer codecCustomizer() {
return configurer -> configurer.customCodecs().register(new CustomTypeDecoder());
}
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-http-test/src/test/java/dev/snowdrop/vertx/http/test/CustomWebTestClientIT.java | vertx-spring-boot-starter-http-test/src/test/java/dev/snowdrop/vertx/http/test/CustomWebTestClientIT.java | package dev.snowdrop.vertx.http.test;
import dev.snowdrop.vertx.http.client.VertxClientHttpConnector;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.HttpMethod;
import org.springframework.test.web.reactive.server.WebTestClient;
import org.springframework.test.web.reactive.server.WebTestClientConfigurer;
import static org.assertj.core.api.Assertions.assertThat;
@SpringBootTest(
webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT,
classes = { TestApplication.class, CustomWebTestClientIT.CustomWebTestClientConfiguration.class }
)
public class CustomWebTestClientIT {
@Autowired
private WebTestClient client;
@Test
public void shouldInjectCustomClient() {
assertThat(client).isInstanceOf(CustomWebTestClient.class);
}
@Configuration
static class CustomWebTestClientConfiguration {
@Bean
public WebTestClient customWebTestClient(VertxClientHttpConnector connector) {
return new CustomWebTestClient();
}
}
private static class CustomWebTestClient implements WebTestClient {
@Override
public RequestHeadersUriSpec<?> get() {
return null;
}
@Override
public RequestHeadersUriSpec<?> head() {
return null;
}
@Override
public RequestBodyUriSpec post() {
return null;
}
@Override
public RequestBodyUriSpec put() {
return null;
}
@Override
public RequestBodyUriSpec patch() {
return null;
}
@Override
public RequestHeadersUriSpec<?> delete() {
return null;
}
@Override
public RequestHeadersUriSpec<?> options() {
return null;
}
@Override
public RequestBodyUriSpec method(HttpMethod method) {
return null;
}
@Override
public Builder mutate() {
return null;
}
@Override
public WebTestClient mutateWith(WebTestClientConfigurer configurer) {
return null;
}
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-http-test/src/test/java/dev/snowdrop/vertx/http/test/VertxWebTestClientContextCustomizerFactoryTest.java | vertx-spring-boot-starter-http-test/src/test/java/dev/snowdrop/vertx/http/test/VertxWebTestClientContextCustomizerFactoryTest.java | package dev.snowdrop.vertx.http.test;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.ContextCustomizer;
import static org.assertj.core.api.Assertions.assertThat;
public class VertxWebTestClientContextCustomizerFactoryTest {
private final VertxWebTestClientContextCustomizerFactory factory = new VertxWebTestClientContextCustomizerFactory();
@Test
public void shouldCreateCustomizer() {
ContextCustomizer customizer = factory.createContextCustomizer(EmbeddedServerTestClass.class, null);
assertThat(customizer).isNotNull();
assertThat(customizer).isInstanceOf(VertxWebTestClientContextCustomizer.class);
}
@Test
public void shouldIgnoreNonSpringBootTestClass() {
ContextCustomizer customizer = factory.createContextCustomizer(NonEmbeddedServerTestClass.class, null);
assertThat(customizer).isNull();
}
@Test
public void shouldIgnoreNonEmbeddedServerTestClass() {
ContextCustomizer customizer = factory.createContextCustomizer(NonTestClass.class, null);
assertThat(customizer).isNull();
}
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
private static class EmbeddedServerTestClass {}
@SpringBootTest
private static class NonEmbeddedServerTestClass {}
private static class NonTestClass {}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-http-test/src/test/java/dev/snowdrop/vertx/http/test/VertxWebTestClientRegistrarTest.java | vertx-spring-boot-starter-http-test/src/test/java/dev/snowdrop/vertx/http/test/VertxWebTestClientRegistrarTest.java | package dev.snowdrop.vertx.http.test;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.beans.factory.ListableBeanFactory;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.context.ApplicationContext;
import org.springframework.core.Ordered;
import org.springframework.test.web.reactive.server.WebTestClient;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoInteractions;
@ExtendWith(MockitoExtension.class)
public class VertxWebTestClientRegistrarTest {
@Mock
private ListableBeanFactory mockFactory;
@Mock
private ApplicationContext mockContext;
@Mock
private BeanDefinitionRegistry mockRegistry;
@Mock
private WebTestClient mockClient;
private final VertxWebTestClientRegistrar registrar = new VertxWebTestClientRegistrar();
@BeforeEach
public void setUp() {
registrar.setBeanFactory(mockFactory);
registrar.setApplicationContext(mockContext);
}
@Test
public void shouldHaveLowestPrecedence() {
assertThat(registrar.getOrder()).isEqualTo(Ordered.LOWEST_PRECEDENCE);
}
@Test
public void shouldRegisterBean() {
given(mockFactory.getBeanNamesForType(WebTestClient.class, false, false))
.willReturn(new String[]{});
registrar.postProcessBeanDefinitionRegistry(mockRegistry);
ArgumentCaptor<RootBeanDefinition> definitionCaptor = ArgumentCaptor.forClass(RootBeanDefinition.class);
verify(mockRegistry).registerBeanDefinition(eq(WebTestClient.class.getName()), definitionCaptor.capture());
RootBeanDefinition definition = definitionCaptor.getValue();
assertThat(definition.getBeanClass()).isEqualTo(WebTestClient.class);
assertThat(definition.getInstanceSupplier()).isInstanceOf(VertxWebTestClientSupplier.class);
assertThat(definition.isLazyInit()).isTrue();
}
@Test
public void shouldSkipIfBeanExists() {
given(mockFactory.getBeanNamesForType(WebTestClient.class, false, false))
.willReturn(new String[]{ WebTestClient.class.getName() });
registrar.postProcessBeanDefinitionRegistry(mockRegistry);
verifyNoInteractions(mockRegistry);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-http-test/src/test/java/dev/snowdrop/vertx/http/test/VertxWebTestClientContextCustomizerTest.java | vertx-spring-boot-starter-http-test/src/test/java/dev/snowdrop/vertx/http/test/VertxWebTestClientContextCustomizerTest.java | package dev.snowdrop.vertx.http.test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.context.ConfigurableApplicationContext;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoInteractions;
@ExtendWith(MockitoExtension.class)
public class VertxWebTestClientContextCustomizerTest {
@Mock
private ConfigurableApplicationContext mockContext;
@Mock
private ConfigurableListableBeanFactory mockFactory;
@Mock
private DefaultListableBeanFactory mockRegistry;
private final VertxWebTestClientContextCustomizer customizer = new VertxWebTestClientContextCustomizer();
@Test
public void shouldRegisterBean() {
given(mockContext.getBeanFactory()).willReturn(mockRegistry);
customizer.customizeContext(mockContext, null);
ArgumentCaptor<RootBeanDefinition> definitionCaptor = ArgumentCaptor.forClass(RootBeanDefinition.class);
verify(mockRegistry)
.registerBeanDefinition(eq(VertxWebTestClientRegistrar.class.getName()), definitionCaptor.capture());
RootBeanDefinition definition = definitionCaptor.getValue();
assertThat(definition.getBeanClass()).isEqualTo(VertxWebTestClientRegistrar.class);
assertThat(definition.getRole()).isEqualTo(BeanDefinition.ROLE_INFRASTRUCTURE);
}
@Test
public void shouldIgnoreNonRegistryBeanFactory() {
given(mockContext.getBeanFactory()).willReturn(mockFactory);
customizer.customizeContext(mockContext, null);
verifyNoInteractions(mockRegistry);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-http-test/src/main/java/dev/snowdrop/vertx/http/test/VertxWebTestClientSupplier.java | vertx-spring-boot-starter-http-test/src/main/java/dev/snowdrop/vertx/http/test/VertxWebTestClientSupplier.java | package dev.snowdrop.vertx.http.test;
import java.util.Collection;
import java.util.function.Supplier;
import dev.snowdrop.vertx.http.client.VertxClientHttpConnector;
import dev.snowdrop.vertx.http.server.VertxReactiveWebServerFactory;
import dev.snowdrop.vertx.http.server.properties.HttpServerProperties;
import org.springframework.boot.web.codec.CodecCustomizer;
import org.springframework.context.ApplicationContext;
import org.springframework.test.web.reactive.server.WebTestClient;
import org.springframework.web.reactive.function.client.ExchangeStrategies;
public class VertxWebTestClientSupplier implements Supplier<WebTestClient> {
private final ApplicationContext applicationContext;
private WebTestClient webTestClient;
public VertxWebTestClientSupplier(ApplicationContext applicationContext) {
this.applicationContext = applicationContext;
}
@Override
public WebTestClient get() {
if (webTestClient == null) {
webTestClient = createWebTestClient();
}
return webTestClient;
}
private WebTestClient createWebTestClient() {
VertxClientHttpConnector connector = applicationContext.getBean(VertxClientHttpConnector.class);
WebTestClient.Builder builder = WebTestClient.bindToServer(connector);
String baseUrl = String.format("%s://localhost:%s", getProtocol(), getPort());
builder.baseUrl(baseUrl);
customizeWebTestClientCodecs(builder);
return builder.build();
}
private String getProtocol() {
VertxReactiveWebServerFactory factory = applicationContext.getBean(VertxReactiveWebServerFactory.class);
boolean isSsl;
if (factory.getSsl() != null) {
isSsl = factory.getSsl().isEnabled();
} else {
HttpServerProperties serverProperties = applicationContext.getBean(HttpServerProperties.class);
isSsl = serverProperties.isSsl();
}
return isSsl ? "https" : "http";
}
private String getPort() {
return applicationContext.getEnvironment().getProperty("local.server.port", "8080");
}
private void customizeWebTestClientCodecs(WebTestClient.Builder builder) {
Collection<CodecCustomizer> customizers = applicationContext.getBeansOfType(CodecCustomizer.class).values();
ExchangeStrategies strategies = ExchangeStrategies.builder()
.codecs(codecs -> customizers.forEach(customizer -> customizer.customize(codecs)))
.build();
builder.exchangeStrategies(strategies);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-http-test/src/main/java/dev/snowdrop/vertx/http/test/VertxWebTestClientRegistrar.java | vertx-spring-boot-starter-http-test/src/main/java/dev/snowdrop/vertx/http/test/VertxWebTestClientRegistrar.java | package dev.snowdrop.vertx.http.test;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.BeanFactoryAware;
import org.springframework.beans.factory.BeanFactoryUtils;
import org.springframework.beans.factory.ListableBeanFactory;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.BeanDefinitionRegistryPostProcessor;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.core.Ordered;
import org.springframework.test.web.reactive.server.WebTestClient;
public class VertxWebTestClientRegistrar implements BeanDefinitionRegistryPostProcessor, Ordered, BeanFactoryAware,
ApplicationContextAware {
private BeanFactory beanFactory;
private ApplicationContext applicationContext;
@Override
public void setBeanFactory(BeanFactory beanFactory) throws BeansException {
this.beanFactory = beanFactory;
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
@Override
public void postProcessBeanDefinitionRegistry(BeanDefinitionRegistry registry) throws BeansException {
if (!isWebTestClientRegistered()) {
VertxWebTestClientSupplier supplier = new VertxWebTestClientSupplier(applicationContext);
RootBeanDefinition definition = new RootBeanDefinition(WebTestClient.class, supplier);
definition.setLazyInit(true);
registry.registerBeanDefinition(WebTestClient.class.getName(), definition);
}
}
@Override
public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) {
}
@Override
public int getOrder() {
return Ordered.LOWEST_PRECEDENCE;
}
private boolean isWebTestClientRegistered() {
return BeanFactoryUtils.beanNamesForTypeIncludingAncestors((ListableBeanFactory) this.beanFactory,
WebTestClient.class, false, false).length > 0;
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-http-test/src/main/java/dev/snowdrop/vertx/http/test/VertxWebTestClientContextCustomizer.java | vertx-spring-boot-starter-http-test/src/main/java/dev/snowdrop/vertx/http/test/VertxWebTestClientContextCustomizer.java | package dev.snowdrop.vertx.http.test;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.test.context.ContextCustomizer;
import org.springframework.test.context.MergedContextConfiguration;
public class VertxWebTestClientContextCustomizer implements ContextCustomizer {
@Override
public void customizeContext(ConfigurableApplicationContext context, MergedContextConfiguration mergedConfig) {
ConfigurableListableBeanFactory beanFactory = context.getBeanFactory();
if (beanFactory instanceof BeanDefinitionRegistry) {
registerWebTestClient((BeanDefinitionRegistry) beanFactory);
}
}
@Override
public boolean equals(Object obj) {
return obj != null && obj.getClass() == getClass();
}
@Override
public int hashCode() {
return getClass().hashCode();
}
private void registerWebTestClient(BeanDefinitionRegistry registry) {
RootBeanDefinition definition = new RootBeanDefinition(VertxWebTestClientRegistrar.class);
definition.setRole(BeanDefinition.ROLE_INFRASTRUCTURE);
registry.registerBeanDefinition(VertxWebTestClientRegistrar.class.getName(), definition);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-http-test/src/main/java/dev/snowdrop/vertx/http/test/VertxWebTestClientContextCustomizerFactory.java | vertx-spring-boot-starter-http-test/src/main/java/dev/snowdrop/vertx/http/test/VertxWebTestClientContextCustomizerFactory.java | package dev.snowdrop.vertx.http.test;
import java.util.List;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.core.annotation.AnnotatedElementUtils;
import org.springframework.test.context.ContextConfigurationAttributes;
import org.springframework.test.context.ContextCustomizer;
import org.springframework.test.context.ContextCustomizerFactory;
public class VertxWebTestClientContextCustomizerFactory implements ContextCustomizerFactory {
@Override
public ContextCustomizer createContextCustomizer(Class<?> testClass,
List<ContextConfigurationAttributes> configAttributes) {
if (isEmbeddedSpringBootTest(testClass)) {
return new VertxWebTestClientContextCustomizer();
}
return null;
}
private boolean isEmbeddedSpringBootTest(Class<?> testClass) {
SpringBootTest annotation = AnnotatedElementUtils.getMergedAnnotation(testClass, SpringBootTest.class);
return annotation != null && annotation.webEnvironment().isEmbedded();
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-amqp/src/test/java/dev/snowdrop/vertx/sample/amqp/AmqpSampleApplicationTest.java | vertx-spring-boot-samples/vertx-spring-boot-sample-amqp/src/test/java/dev/snowdrop/vertx/sample/amqp/AmqpSampleApplicationTest.java | package dev.snowdrop.vertx.sample.amqp;
import java.util.List;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.context.ApplicationContext;
import org.springframework.test.web.reactive.server.WebTestClient;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;
import static org.springframework.http.MediaType.TEXT_EVENT_STREAM;
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
public class AmqpSampleApplicationTest {
@Autowired
ApplicationContext context;
private WebTestClient client;
@BeforeEach
public void setup() {
this.client = WebTestClient.bindToApplicationContext(this.context).build();
}
@Test
public void testProcessorController() {
assertThat(getProcessedMessages()).isEmpty();
submitMessageForProcessing("first");
submitMessageForProcessing("second");
await()
.atMost(2, SECONDS)
.untilAsserted(() -> assertThat(getProcessedMessages()).containsOnly("FIRST", "SECOND"));
}
private List<String> getProcessedMessages() {
return client.get()
.accept(TEXT_EVENT_STREAM)
.exchange()
.expectStatus()
.isOk()
.returnResult(String.class)
.getResponseBody()
.collectList()
.block();
}
private void submitMessageForProcessing(String message) {
client.post()
.bodyValue(message)
.exchange()
.expectStatus()
.isOk();
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-amqp/src/main/java/dev/snowdrop/vertx/sample/amqp/Controller.java | vertx-spring-boot-samples/vertx-spring-boot-sample-amqp/src/main/java/dev/snowdrop/vertx/sample/amqp/Controller.java | package dev.snowdrop.vertx.sample.amqp;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import static org.springframework.http.MediaType.TEXT_EVENT_STREAM_VALUE;
/**
* Rest controller exposing GET and POST resources to receive processed messages and submit messages for processing.
*/
@RestController
public class Controller {
private final MessagesManager messagesManager;
public Controller(MessagesManager messagesManager) {
this.messagesManager = messagesManager;
}
/**
* Get a flux of messages processed up to this point.
*/
@GetMapping(produces = TEXT_EVENT_STREAM_VALUE)
public Flux<String> getProcessedMessages() {
return Flux.fromIterable(messagesManager.getProcessedMessages());
}
/**
* Submit a message for processing by publishing it to a processing requests queue.
*/
@PostMapping
public Mono<Void> submitMessageForProcessing(@RequestBody String body) {
return messagesManager.processMessage(body.trim());
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-amqp/src/main/java/dev/snowdrop/vertx/sample/amqp/MessagesManager.java | vertx-spring-boot-samples/vertx-spring-boot-sample-amqp/src/main/java/dev/snowdrop/vertx/sample/amqp/MessagesManager.java | package dev.snowdrop.vertx.sample.amqp;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import dev.snowdrop.vertx.amqp.AmqpClient;
import dev.snowdrop.vertx.amqp.AmqpMessage;
import org.apache.activemq.artemis.core.server.embedded.EmbeddedActiveMQ;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.stereotype.Component;
import reactor.core.Disposable;
import reactor.core.publisher.Mono;
import static dev.snowdrop.vertx.sample.amqp.AmqpSampleApplication.PROCESSING_REQUESTS_QUEUE;
import static dev.snowdrop.vertx.sample.amqp.AmqpSampleApplication.PROCESSING_RESULTS_QUEUE;
/**
* Processor client submits messages to the requests queue and subscribes to the results queue for processed messages.
*/
@Component
public class MessagesManager implements InitializingBean, DisposableBean {
private final Logger logger = LoggerFactory.getLogger(MessagesManager.class);
private final List<String> processedMessages = new CopyOnWriteArrayList<>();
private final AmqpClient client;
private Disposable receiverDisposer;
// Injecting EmbeddedActiveMQ to make sure it has started before creating this component.
public MessagesManager(AmqpClient client, EmbeddedActiveMQ server) {
this.client = client;
}
/**
* Create a processed messages receiver and subscribe to its messages publisher.
*/
@Override
public void afterPropertiesSet() {
receiverDisposer = client.createReceiver(PROCESSING_RESULTS_QUEUE)
.flatMapMany(receiver -> receiver.flux()
.doOnCancel(() -> receiver.close().block())) // Close the receiver once subscription is disposed
.subscribe(this::handleMessage);
}
/**
* Cancel processed messages publisher subscription.
*/
@Override
public void destroy() {
if (receiverDisposer != null) {
receiverDisposer.dispose();
}
}
/**
* Get messages which were processed up to this moment.
*
* @return List of processed messages.
*/
public List<String> getProcessedMessages() {
return processedMessages;
}
/**
* Submit a message for processing by publishing it to a processing requests queue.
*
* @param body Message body to be processed.
* @return Mono which is completed once message is sent.
*/
public Mono<Void> processMessage(String body) {
logger.info("Sending message '{}' for processing", body);
AmqpMessage message = AmqpMessage.create()
.withBody(body)
.build();
return client.createSender(PROCESSING_REQUESTS_QUEUE)
.flatMap(sender -> sender.sendWithAck(message).then(sender.close()));
}
private void handleMessage(AmqpMessage message) {
String body = message.bodyAsString();
logger.info("Received processed message '{}'", body);
processedMessages.add(body);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-amqp/src/main/java/dev/snowdrop/vertx/sample/amqp/AmqpSampleApplication.java | vertx-spring-boot-samples/vertx-spring-boot-sample-amqp/src/main/java/dev/snowdrop/vertx/sample/amqp/AmqpSampleApplication.java | package dev.snowdrop.vertx.sample.amqp;
import org.apache.activemq.artemis.core.server.embedded.EmbeddedActiveMQ;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
@SpringBootApplication
public class AmqpSampleApplication {
final static String PROCESSING_REQUESTS_QUEUE = "processing-requests";
final static String PROCESSING_RESULTS_QUEUE = "processing-results";
public static void main(String[] args) {
SpringApplication.run(AmqpSampleApplication.class, args);
}
@Bean(destroyMethod = "stop")
public EmbeddedActiveMQ embeddedBroker() throws Exception {
return new EmbeddedActiveMQ().start();
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-amqp/src/main/java/dev/snowdrop/vertx/sample/amqp/UppercaseProcessor.java | vertx-spring-boot-samples/vertx-spring-boot-sample-amqp/src/main/java/dev/snowdrop/vertx/sample/amqp/UppercaseProcessor.java | package dev.snowdrop.vertx.sample.amqp;
import dev.snowdrop.vertx.amqp.AmqpClient;
import dev.snowdrop.vertx.amqp.AmqpMessage;
import dev.snowdrop.vertx.amqp.AmqpSender;
import org.apache.activemq.artemis.core.server.embedded.EmbeddedActiveMQ;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.stereotype.Component;
import reactor.core.Disposable;
import reactor.core.publisher.Mono;
import static dev.snowdrop.vertx.sample.amqp.AmqpSampleApplication.PROCESSING_REQUESTS_QUEUE;
import static dev.snowdrop.vertx.sample.amqp.AmqpSampleApplication.PROCESSING_RESULTS_QUEUE;
/**
* Uppercase processor subscribes to the requests queue, converts each received message to uppercase and send it to the
* results queue.
*/
@Component
public class UppercaseProcessor implements InitializingBean, DisposableBean {
private final Logger logger = LoggerFactory.getLogger(UppercaseProcessor.class);
private final AmqpClient client;
private Disposable receiverDisposer;
// Injecting EmbeddedActiveMQ to make sure it has started before creating this component.
public UppercaseProcessor(AmqpClient client, EmbeddedActiveMQ server) {
this.client = client;
}
/**
* Create a processing requests receiver and subscribe to its messages publisher.
*/
@Override
public void afterPropertiesSet() {
receiverDisposer = client.createReceiver(PROCESSING_REQUESTS_QUEUE)
.flatMapMany(receiver -> receiver.flux()
.doOnCancel(() -> receiver.close().block())) // Close the receiver once subscription is disposed
.flatMap(this::handleMessage)
.subscribe();
}
/**
* Cancel processing requests publisher subscription.
*/
@Override
public void destroy() {
if (receiverDisposer != null) {
receiverDisposer.dispose();
}
}
/**
* Convert message body to an uppercase and send it to a results queue.
*/
private Mono<Void> handleMessage(AmqpMessage originalMessage) {
logger.info("Processing '{}'", originalMessage.bodyAsString());
AmqpMessage processedMessage = AmqpMessage.create()
.withBody(originalMessage.bodyAsString().toUpperCase())
.build();
return client.createSender(PROCESSING_RESULTS_QUEUE)
.flatMap(sender -> sender.sendWithAck(processedMessage).then(sender.close()));
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-kafka/src/test/java/dev/snowdrop/vertx/sample/kafka/KafkaSampleApplicationTest.java | vertx-spring-boot-samples/vertx-spring-boot-sample-kafka/src/test/java/dev/snowdrop/vertx/sample/kafka/KafkaSampleApplicationTest.java | package dev.snowdrop.vertx.sample.kafka;
import java.time.Duration;
import java.util.List;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.context.ApplicationContext;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.web.reactive.server.WebTestClient;
import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;
import static org.springframework.http.MediaType.TEXT_EVENT_STREAM;
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
@EmbeddedKafka(partitions = 1, ports = { 9092 })
public class KafkaSampleApplicationTest {
@Autowired
ApplicationContext context;
private WebTestClient client;
@BeforeEach
public void setup() {
this.client = WebTestClient.bindToApplicationContext(this.context).build();
}
@Test
public void shouldLogAndReceiveMessages() {
logMessage("first");
logMessage("second");
await()
.atMost(Duration.ofSeconds(2))
.untilAsserted(() -> assertThat(getLoggedMessages()).containsOnly("first", "second"));
}
private List<String> getLoggedMessages() {
return client.get()
.accept(TEXT_EVENT_STREAM)
.exchange()
.expectStatus()
.isOk()
.returnResult(String.class)
.getResponseBody()
.collectList()
.block(Duration.ofSeconds(2));
}
private void logMessage(String message) {
client.post()
.bodyValue(message)
.exchange()
.expectStatus()
.isOk();
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-kafka/src/main/java/dev/snowdrop/vertx/sample/kafka/Controller.java | vertx-spring-boot-samples/vertx-spring-boot-sample-kafka/src/main/java/dev/snowdrop/vertx/sample/kafka/Controller.java | package dev.snowdrop.vertx.sample.kafka;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import static org.springframework.http.MediaType.TEXT_EVENT_STREAM_VALUE;
/**
* HTTP controller exposing GET and POST resources to log messages and to receive the previously logged ones.
*/
@RestController
public class Controller {
private final KafkaLogger logger;
private final KafkaLog log;
public Controller(KafkaLogger logger, KafkaLog log) {
this.logger = logger;
this.log = log;
}
/**
* Get a flux of previously logged messages.
*/
@GetMapping(produces = TEXT_EVENT_STREAM_VALUE)
public Flux<String> getMessages() {
return Flux.fromIterable(log.getMessages());
}
/**
* Log a message.
*/
@PostMapping
public Mono<Void> logMessage(@RequestBody String body) {
return logger.logMessage(body.trim());
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-kafka/src/main/java/dev/snowdrop/vertx/sample/kafka/KafkaLog.java | vertx-spring-boot-samples/vertx-spring-boot-sample-kafka/src/main/java/dev/snowdrop/vertx/sample/kafka/KafkaLog.java | package dev.snowdrop.vertx.sample.kafka;
import java.time.Duration;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import dev.snowdrop.vertx.kafka.ConsumerRecord;
import dev.snowdrop.vertx.kafka.KafkaConsumer;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import reactor.core.Disposable;
import static dev.snowdrop.vertx.sample.kafka.KafkaSampleApplication.LOG_TOPIC;
final class KafkaLog implements InitializingBean, DisposableBean {
private final List<String> messages = new CopyOnWriteArrayList<>();
private final KafkaConsumer<String, String> consumer;
private Disposable consumerDisposer;
KafkaLog(KafkaConsumer<String, String> consumer) {
this.consumer = consumer;
}
@Override
public void afterPropertiesSet() {
consumerDisposer = consumer.subscribe(LOG_TOPIC)
.thenMany(consumer.flux())
.log("Kafka log consumer")
.map(ConsumerRecord::value)
.subscribe(messages::add);
}
@Override
public void destroy() {
if (consumerDisposer != null) {
consumerDisposer.dispose();
}
consumer.unsubscribe()
.block(Duration.ofSeconds(2));
}
public List<String> getMessages() {
return messages;
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-kafka/src/main/java/dev/snowdrop/vertx/sample/kafka/KafkaSampleApplication.java | vertx-spring-boot-samples/vertx-spring-boot-sample-kafka/src/main/java/dev/snowdrop/vertx/sample/kafka/KafkaSampleApplication.java | package dev.snowdrop.vertx.sample.kafka;
import dev.snowdrop.vertx.kafka.KafkaConsumerFactory;
import dev.snowdrop.vertx.kafka.KafkaProducerFactory;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
@SpringBootApplication
public class KafkaSampleApplication {
static final String LOG_TOPIC = "log";
public static void main(String[] args) {
SpringApplication.run(KafkaSampleApplication.class, args);
}
@Bean
public KafkaLogger kafkaLogger(KafkaProducerFactory producerFactory) {
return new KafkaLogger(producerFactory.create());
}
@Bean
public KafkaLog kafkaLog(KafkaConsumerFactory consumerFactory) {
return new KafkaLog(consumerFactory.create());
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-kafka/src/main/java/dev/snowdrop/vertx/sample/kafka/KafkaLogger.java | vertx-spring-boot-samples/vertx-spring-boot-sample-kafka/src/main/java/dev/snowdrop/vertx/sample/kafka/KafkaLogger.java | package dev.snowdrop.vertx.sample.kafka;
import dev.snowdrop.vertx.kafka.KafkaProducer;
import dev.snowdrop.vertx.kafka.ProducerRecord;
import reactor.core.publisher.Mono;
import static dev.snowdrop.vertx.sample.kafka.KafkaSampleApplication.LOG_TOPIC;
final class KafkaLogger {
private final KafkaProducer<String, String> producer;
KafkaLogger(KafkaProducer<String, String> producer) {
this.producer = producer;
}
public Mono<Void> logMessage(String body) {
// Generic key and value types can be inferred if both key and value are used to create a builder
ProducerRecord<String, String> record = ProducerRecord.<String, String>builder(LOG_TOPIC, body).build();
return producer.send(record)
.log("Kafka logger producer")
.then();
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-chunked/src/main/java/dev/snowdrop/vertx/sample/chunked/ChunkedResponseSampleApplication.java | vertx-spring-boot-samples/vertx-spring-boot-sample-chunked/src/main/java/dev/snowdrop/vertx/sample/chunked/ChunkedResponseSampleApplication.java | package dev.snowdrop.vertx.sample.chunked;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.core.io.ClassPathResource;
import org.springframework.web.reactive.function.server.RouterFunction;
import org.springframework.web.reactive.function.server.ServerResponse;
import static org.springframework.web.reactive.function.server.RouterFunctions.resources;
import static org.springframework.web.reactive.function.server.RouterFunctions.route;
@SpringBootApplication
public class ChunkedResponseSampleApplication {
public static void main(String[] args) {
SpringApplication.run(ChunkedResponseSampleApplication.class, args);
}
@Bean
public RouterFunction<ServerResponse> dataRouter(DataHandler dataHandler) {
return route()
.GET("/data", dataHandler::get)
.build();
}
@Bean
public RouterFunction<ServerResponse> staticResourceRouter() {
return resources("/**", new ClassPathResource("static/"));
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-chunked/src/main/java/dev/snowdrop/vertx/sample/chunked/DataHandler.java | vertx-spring-boot-samples/vertx-spring-boot-sample-chunked/src/main/java/dev/snowdrop/vertx/sample/chunked/DataHandler.java | package dev.snowdrop.vertx.sample.chunked;
import java.time.Duration;
import java.util.List;
import dev.snowdrop.vertx.mail.MailClient;
import dev.snowdrop.vertx.mail.MailMessage;
import dev.snowdrop.vertx.mail.SimpleMailMessage;
import dev.snowdrop.vertx.mail.MailResult;
import org.springframework.stereotype.Component;
import org.springframework.web.reactive.function.client.WebClient;
import org.springframework.web.reactive.function.server.ServerRequest;
import org.springframework.web.reactive.function.server.ServerResponse;
import org.springframework.web.server.ServerWebInputException;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import static org.springframework.http.MediaType.APPLICATION_JSON;
import static org.springframework.web.reactive.function.server.ServerResponse.ok;
@Component
public class DataHandler {
private static final String FROM_ADDRESS = "examples@snowdrop.dev";
private final MailClient mailClient;
private final WebClient client;
public DataHandler(MailClient mailClient, WebClient.Builder clientBuilder) {
this.mailClient = mailClient;
this.client = clientBuilder
.baseUrl("https://httpbin.org")
.build();
}
public Mono<ServerResponse> get(ServerRequest request) {
String count = request.queryParam("count")
.orElseThrow(() -> new ServerWebInputException("Count is required"));
String email = request.queryParam("email")
.orElseThrow(() -> new ServerWebInputException("Email is required"));
System.out.println(String.format("Request for %s entries", count));
// Get data from httpbin
Flux<String> chunks = client.get()
.uri("/stream/{count}", count)
.retrieve()
.bodyToFlux(String.class)
.log()
// Delay to make a stream of data easily visible in the UI
.delayElements(Duration.ofMillis(200))
.publish()
.refCount(2);
// Send batches of 10 entries by email
chunks.buffer(10)
.flatMap(entries -> this.sendEmail(email, entries))
.subscribe();
// Return a stream of entries to the requester
return ok()
.contentType(APPLICATION_JSON)
.body(chunks, String.class);
}
private Mono<MailResult> sendEmail(String address, List<String> entries) {
System.out.println("Sending an email with " + entries.size() + " entries to " + address);
MailMessage message = new SimpleMailMessage()
.setFrom(FROM_ADDRESS)
.addTo(address)
.setSubject(String.format("%d entries from httpbin", entries.size()))
.setText(String.join(", ", entries));
return mailClient.send(message);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-mail/src/main/java/dev/snowdrop/vertx/sample/mail/MailHandler.java | vertx-spring-boot-samples/vertx-spring-boot-sample-mail/src/main/java/dev/snowdrop/vertx/sample/mail/MailHandler.java | package dev.snowdrop.vertx.sample.mail;
import dev.snowdrop.vertx.mail.MailClient;
import dev.snowdrop.vertx.mail.MailMessage;
import dev.snowdrop.vertx.mail.SimpleMailMessage;
import org.springframework.stereotype.Component;
import org.springframework.util.MultiValueMap;
import org.springframework.web.reactive.function.server.ServerRequest;
import org.springframework.web.reactive.function.server.ServerResponse;
import reactor.core.publisher.Mono;
import static org.springframework.web.reactive.function.server.ServerResponse.noContent;
@Component
public class MailHandler {
private final MailClient mailClient;
public MailHandler(MailClient mailClient) {
this.mailClient = mailClient;
}
public Mono<ServerResponse> send(ServerRequest request) {
return request.formData()
.log()
.map(this::formToMessage)
.flatMap(mailClient::send)
.flatMap(result -> noContent().build());
}
private MailMessage formToMessage(MultiValueMap<String, String> form) {
return new SimpleMailMessage()
.setFrom(form.getFirst("from"))
.setTo(form.get("to"))
.setSubject(form.getFirst("subject"))
.setText(form.getFirst("text"));
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-mail/src/main/java/dev/snowdrop/vertx/sample/mail/MailSampleApplication.java | vertx-spring-boot-samples/vertx-spring-boot-sample-mail/src/main/java/dev/snowdrop/vertx/sample/mail/MailSampleApplication.java | package dev.snowdrop.vertx.sample.mail;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.core.io.ClassPathResource;
import org.springframework.web.reactive.function.server.RouterFunction;
import org.springframework.web.reactive.function.server.ServerResponse;
import static org.springframework.http.MediaType.APPLICATION_FORM_URLENCODED;
import static org.springframework.web.reactive.function.server.RequestPredicates.accept;
import static org.springframework.web.reactive.function.server.RouterFunctions.resources;
import static org.springframework.web.reactive.function.server.RouterFunctions.route;
@SpringBootApplication
public class MailSampleApplication {
public static void main(String[] args) {
SpringApplication.run(MailSampleApplication.class, args);
}
@Bean
public RouterFunction<ServerResponse> mailRouter(MailHandler mailHandler) {
return route()
.POST("/mail", accept(APPLICATION_FORM_URLENCODED), mailHandler::send)
.build();
}
@Bean
public RouterFunction<ServerResponse> staticResourceRouter() {
return resources("/**", new ClassPathResource("static/"));
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-http-oauth/src/main/java/dev/snowdrop/vertx/sample/http/oauth/OAuthSampleApplication.java | vertx-spring-boot-samples/vertx-spring-boot-sample-http-oauth/src/main/java/dev/snowdrop/vertx/sample/http/oauth/OAuthSampleApplication.java | package dev.snowdrop.vertx.sample.http.oauth;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class OAuthSampleApplication {
public static void main(String[] args) {
SpringApplication.run(OAuthSampleApplication.class, args);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-http-oauth/src/main/java/dev/snowdrop/vertx/sample/http/oauth/HelloController.java | vertx-spring-boot-samples/vertx-spring-boot-sample-http-oauth/src/main/java/dev/snowdrop/vertx/sample/http/oauth/HelloController.java | package dev.snowdrop.vertx.sample.http.oauth;
import org.springframework.security.core.annotation.AuthenticationPrincipal;
import org.springframework.security.oauth2.core.user.OAuth2User;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Mono;
@RestController
public class HelloController {
@GetMapping
public Mono<String> hello(@AuthenticationPrincipal OAuth2User oauth2User) {
return Mono.just("Hello, " + oauth2User.getAttributes().get("name") + "!");
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-amqp-tls/src/test/java/dev/snowdrop/vertx/sample/amqp/AmqpSampleApplicationTest.java | vertx-spring-boot-samples/vertx-spring-boot-sample-amqp-tls/src/test/java/dev/snowdrop/vertx/sample/amqp/AmqpSampleApplicationTest.java | package dev.snowdrop.vertx.sample.amqp;
import java.util.List;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.context.ApplicationContext;
import org.springframework.test.web.reactive.server.WebTestClient;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;
import static org.springframework.http.MediaType.TEXT_EVENT_STREAM;
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
public class AmqpSampleApplicationTest {
@Autowired
ApplicationContext context;
private WebTestClient client;
@BeforeEach
public void setup() {
this.client = WebTestClient.bindToApplicationContext(this.context).build();
}
@Test
public void shouldLogAndReceiveMessages() {
logMessage("first");
logMessage("second");
await()
.atMost(2, SECONDS)
.untilAsserted(() -> assertThat(getLoggedMessages()).containsOnly("first", "second"));
}
private List<String> getLoggedMessages() {
return client.get()
.accept(TEXT_EVENT_STREAM)
.exchange()
.expectStatus()
.isOk()
.returnResult(String.class)
.getResponseBody()
.collectList()
.block();
}
private void logMessage(String message) {
client.post()
.bodyValue(message)
.exchange()
.expectStatus()
.isOk();
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-amqp-tls/src/main/java/dev/snowdrop/vertx/sample/amqp/Controller.java | vertx-spring-boot-samples/vertx-spring-boot-sample-amqp-tls/src/main/java/dev/snowdrop/vertx/sample/amqp/Controller.java | package dev.snowdrop.vertx.sample.amqp;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import static org.springframework.http.MediaType.TEXT_EVENT_STREAM_VALUE;
/**
* HTTP controller exposing GET and POST resources to log messages and to receive the previously logged ones.
*/
@RestController
public class Controller {
private final AmqpLogger logger;
private final AmqpLog log;
public Controller(AmqpLogger logger, AmqpLog log) {
this.logger = logger;
this.log = log;
}
/**
* Get a flux of previously logged messages.
*/
@GetMapping(produces = TEXT_EVENT_STREAM_VALUE)
public Flux<String> getMessages() {
return Flux.fromIterable(log.getMessages());
}
/**
* Log a message.
*/
@PostMapping
public Mono<Void> logMessage(@RequestBody String body) {
return logger.logMessage(body.trim());
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-amqp-tls/src/main/java/dev/snowdrop/vertx/sample/amqp/AmqpLogger.java | vertx-spring-boot-samples/vertx-spring-boot-sample-amqp-tls/src/main/java/dev/snowdrop/vertx/sample/amqp/AmqpLogger.java | package dev.snowdrop.vertx.sample.amqp;
import dev.snowdrop.vertx.amqp.AmqpClient;
import dev.snowdrop.vertx.amqp.AmqpMessage;
import reactor.core.publisher.Mono;
import static dev.snowdrop.vertx.sample.amqp.AmqpSampleApplication.QUEUE;
final class AmqpLogger {
private final AmqpClient client;
AmqpLogger(AmqpClient client) {
this.client = client;
}
public Mono<Void> logMessage(String body) {
System.out.println("Sending message '" + body + "' to AMQP log");
AmqpMessage message = AmqpMessage.create()
.withBody(body)
.build();
return client.createSender(QUEUE)
.flatMap(sender -> sender.sendWithAck(message).then(sender.close()));
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-amqp-tls/src/main/java/dev/snowdrop/vertx/sample/amqp/AmqpLog.java | vertx-spring-boot-samples/vertx-spring-boot-sample-amqp-tls/src/main/java/dev/snowdrop/vertx/sample/amqp/AmqpLog.java | package dev.snowdrop.vertx.sample.amqp;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import dev.snowdrop.vertx.amqp.AmqpClient;
import dev.snowdrop.vertx.amqp.AmqpMessage;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import reactor.core.Disposable;
import static dev.snowdrop.vertx.sample.amqp.AmqpSampleApplication.QUEUE;
final class AmqpLog implements InitializingBean, DisposableBean {
private final List<String> messages = new CopyOnWriteArrayList<>();
private final AmqpClient client;
private Disposable receiverDisposer;
AmqpLog(AmqpClient client) {
this.client = client;
}
@Override
public void afterPropertiesSet() {
receiverDisposer = client.createReceiver(QUEUE)
.flatMapMany(receiver -> receiver.flux()
.doOnCancel(() -> receiver.close().block())) // Close the receiver once subscription is disposed
.subscribe(this::handleMessage);
}
@Override
public void destroy() {
if (receiverDisposer != null) {
receiverDisposer.dispose();
}
}
public List<String> getMessages() {
return messages;
}
private void handleMessage(AmqpMessage message) {
System.out.println("Received log message '" + message.bodyAsString() + "'");
messages.add(message.bodyAsString());
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-amqp-tls/src/main/java/dev/snowdrop/vertx/sample/amqp/AmqpSampleApplication.java | vertx-spring-boot-samples/vertx-spring-boot-sample-amqp-tls/src/main/java/dev/snowdrop/vertx/sample/amqp/AmqpSampleApplication.java | package dev.snowdrop.vertx.sample.amqp;
import dev.snowdrop.vertx.amqp.AmqpClient;
import org.apache.activemq.artemis.core.server.embedded.EmbeddedActiveMQ;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
@SpringBootApplication
public class AmqpSampleApplication {
final static String QUEUE = "messages";
public static void main(String[] args) {
SpringApplication.run(AmqpSampleApplication.class, args);
}
// Injecting EmbeddedActiveMQ to make sure it has started before creating this bean.
@Bean
public AmqpLogger amqpLogger(AmqpClient client, EmbeddedActiveMQ server) {
return new AmqpLogger(client);
}
// Injecting EmbeddedActiveMQ to make sure it has started before creating this bean.
@Bean
public AmqpLog amqpLog(AmqpClient client, EmbeddedActiveMQ server) {
return new AmqpLog(client);
}
@Bean(destroyMethod = "stop")
public EmbeddedActiveMQ embeddedBroker() throws Exception {
return new EmbeddedActiveMQ().start();
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-http/src/main/java/dev/snowdrop/vertx/sample/http/HttpSampleApplication.java | vertx-spring-boot-samples/vertx-spring-boot-sample-http/src/main/java/dev/snowdrop/vertx/sample/http/HttpSampleApplication.java | package dev.snowdrop.vertx.sample.http;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.web.reactive.function.server.RouterFunction;
import org.springframework.web.reactive.function.server.ServerRequest;
import org.springframework.web.reactive.function.server.ServerResponse;
import reactor.core.publisher.Mono;
import static org.springframework.web.reactive.function.BodyInserters.fromValue;
import static org.springframework.web.reactive.function.server.RouterFunctions.route;
import static org.springframework.web.reactive.function.server.ServerResponse.ok;
@SpringBootApplication
public class HttpSampleApplication {
public static void main(String[] args) {
SpringApplication.run(HttpSampleApplication.class, args);
}
@Bean
public RouterFunction<ServerResponse> helloRouter() {
return route()
.GET("/hello", this::helloHandler)
.build();
}
private Mono<ServerResponse> helloHandler(ServerRequest request) {
String name = request
.queryParam("name")
.orElse("World");
String message = String.format("Hello, %s!", name);
return ok()
.body(fromValue(message));
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-sse/src/main/java/dev/snowdrop/vertx/sample/sse/SseController.java | vertx-spring-boot-samples/vertx-spring-boot-sample-sse/src/main/java/dev/snowdrop/vertx/sample/sse/SseController.java | package dev.snowdrop.vertx.sample.sse;
import java.time.Duration;
import java.util.Random;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;
@RestController
public class SseController {
@GetMapping(produces = MediaType.TEXT_EVENT_STREAM_VALUE)
public Flux<Integer> getRandomNumberStream() {
Random random = new Random();
return Flux.interval(Duration.ofSeconds(1))
.map(i -> random.nextInt())
.log();
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-sse/src/main/java/dev/snowdrop/vertx/sample/sse/SseSampleApplication.java | vertx-spring-boot-samples/vertx-spring-boot-sample-sse/src/main/java/dev/snowdrop/vertx/sample/sse/SseSampleApplication.java | package dev.snowdrop.vertx.sample.sse;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class SseSampleApplication {
public static void main(String[] args) {
SpringApplication.run(SseSampleApplication.class, args);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-http-security/src/main/java/dev/snowdrop/vertx/sample/http/security/SecurityConfiguration.java | vertx-spring-boot-samples/vertx-spring-boot-sample-http-security/src/main/java/dev/snowdrop/vertx/sample/http/security/SecurityConfiguration.java | package dev.snowdrop.vertx.sample.http.security;
import org.springframework.context.annotation.Bean;
import org.springframework.security.config.annotation.web.reactive.EnableWebFluxSecurity;
import org.springframework.security.core.userdetails.MapReactiveUserDetailsService;
import org.springframework.security.core.userdetails.User;
import org.springframework.security.core.userdetails.UserDetails;
@EnableWebFluxSecurity
public class SecurityConfiguration {
@Bean
public MapReactiveUserDetailsService userDetailsService() {
UserDetails user = User.withDefaultPasswordEncoder()
.username("user")
.password("user")
.roles("USER")
.build();
return new MapReactiveUserDetailsService(user);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-http-security/src/main/java/dev/snowdrop/vertx/sample/http/security/HttpSecuritySampleApplication.java | vertx-spring-boot-samples/vertx-spring-boot-sample-http-security/src/main/java/dev/snowdrop/vertx/sample/http/security/HttpSecuritySampleApplication.java | package dev.snowdrop.vertx.sample.http.security;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class HttpSecuritySampleApplication {
public static void main(String[] args) {
SpringApplication.run(HttpSecuritySampleApplication.class, args);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-http-security/src/main/java/dev/snowdrop/vertx/sample/http/security/HelloController.java | vertx-spring-boot-samples/vertx-spring-boot-sample-http-security/src/main/java/dev/snowdrop/vertx/sample/http/security/HelloController.java | package dev.snowdrop.vertx.sample.http.security;
import java.security.Principal;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Mono;
@RestController
public class HelloController {
@GetMapping("/")
public Mono<String> hello(Mono<Principal> principal) {
return principal
.map(Principal::getName)
.map(this::helloMessage);
}
private String helloMessage(String username) {
return "Hello, " + username + "!";
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-websocket/src/test/java/dev/snowdrop/vertx/sample/websocket/WebSocketSampleApplicationTest.java | vertx-spring-boot-samples/vertx-spring-boot-sample-websocket/src/test/java/dev/snowdrop/vertx/sample/websocket/WebSocketSampleApplicationTest.java | package dev.snowdrop.vertx.sample.websocket;
import java.net.URI;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.web.server.LocalServerPort;
import org.springframework.web.reactive.socket.WebSocketMessage;
import org.springframework.web.reactive.socket.client.WebSocketClient;
import reactor.core.Disposable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.Matchers.contains;
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
public class WebSocketSampleApplicationTest {
@LocalServerPort
private int port;
@Autowired
private WebSocketClient client;
private Disposable disposable;
private URI serviceUri;
@BeforeEach
public void setUp() {
serviceUri = URI.create("ws://localhost:" + port + "/echo-upper");
}
@AfterEach
public void tearDown() {
if (disposable != null) {
disposable.dispose(); // Terminate the socket subscription
}
}
@Test
public void testWebSocket() {
Flux<String> originalMessages = Flux.just("first", "second");
List<String> responseMessages = new CopyOnWriteArrayList<>();
disposable = client.execute(serviceUri, session -> {
// Convert strings to WebSocket messages and send them
Mono<Void> outputMono = session.send(originalMessages.map(session::textMessage));
Mono<Void> inputMono = session.receive() // Receive a messages stream
.map(WebSocketMessage::getPayloadAsText) // Extract a payload from each message
.doOnNext(responseMessages::add) // Store the payload to a collection
.then();
return outputMono.then(inputMono); // Start receiving messages after sending.
}).subscribe(); // Subscribe to the socket. Original messages will be sent and then we'll start receiving responses.
await()
.atMost(2, SECONDS)
.until(() -> responseMessages, contains("FIRST", "SECOND"));
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-samples/vertx-spring-boot-sample-websocket/src/main/java/dev/snowdrop/vertx/sample/websocket/WebSocketSampleApplication.java | vertx-spring-boot-samples/vertx-spring-boot-sample-websocket/src/main/java/dev/snowdrop/vertx/sample/websocket/WebSocketSampleApplication.java | package dev.snowdrop.vertx.sample.websocket;
import java.util.Collections;
import java.util.Map;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.web.reactive.HandlerMapping;
import org.springframework.web.reactive.handler.SimpleUrlHandlerMapping;
import org.springframework.web.reactive.socket.WebSocketHandler;
import org.springframework.web.reactive.socket.WebSocketMessage;
import org.springframework.web.reactive.socket.WebSocketSession;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@SpringBootApplication
public class WebSocketSampleApplication {
public static void main(String[] args) {
SpringApplication.run(WebSocketSampleApplication.class, args);
}
@Bean
public HandlerMapping handlerMapping() {
// Define URL mapping for the socket handlers
Map<String, WebSocketHandler> handlers = Collections.singletonMap("/echo-upper", this::toUppercaseHandler);
SimpleUrlHandlerMapping handlerMapping = new SimpleUrlHandlerMapping();
handlerMapping.setUrlMap(handlers);
// Set a higher precedence than annotated controllers (smaller value means higher precedence)
handlerMapping.setOrder(-1);
return handlerMapping;
}
private Mono<Void> toUppercaseHandler(WebSocketSession session) {
Flux<WebSocketMessage> messages = session.receive() // Get incoming messages stream
.filter(message -> message.getType() == WebSocketMessage.Type.TEXT) // Filter out non-text messages
.map(message -> message.getPayloadAsText().toUpperCase()) // Execute service logic
.map(session::textMessage); // Create a response message
return session.send(messages); // Send response messages
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter/src/test/java/dev/snowdrop/vertx/VertxPropertiesTest.java | vertx-spring-boot-starter/src/test/java/dev/snowdrop/vertx/VertxPropertiesTest.java | package dev.snowdrop.vertx;
import java.util.concurrent.TimeUnit;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import static org.assertj.core.api.Assertions.assertThat;
@SpringBootTest(
classes = VertxAutoConfiguration.class,
properties = {
"vertx.eventLoopPoolSize=1",
"vertx.maxWorkerExecuteTimeUnit=SECONDS",
"vertx.fileSystem.fileCachingEnabled=false"
})
public class VertxPropertiesTest {
@Autowired
private VertxProperties properties;
@Test
public void shouldGetProperties() {
// Verify default primitive value
assertThat(properties.getWorkerPoolSize()).isEqualTo(20);
// Verify overwritten primitive value
assertThat(properties.getEventLoopPoolSize()).isEqualTo(1);
// Verify default enum value
assertThat(properties.getMaxEventLoopExecuteTimeUnit()).isEqualTo(TimeUnit.NANOSECONDS);
// Verify overwritten enum value
assertThat(properties.getMaxWorkerExecuteTimeUnit()).isEqualTo(TimeUnit.SECONDS);
// Verify default file system value
assertThat(properties.getFileSystem().isClassPathResolvingEnabled()).isTrue();
// Verify overwritten file system value
assertThat(properties.getFileSystem().isFileCachingEnabled()).isFalse();
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter/src/test/java/dev/snowdrop/vertx/VertxAutoConfigurationTest.java | vertx-spring-boot-starter/src/test/java/dev/snowdrop/vertx/VertxAutoConfigurationTest.java | package dev.snowdrop.vertx;
import io.vertx.core.Vertx;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import static org.assertj.core.api.Assertions.assertThat;
@SpringBootTest(classes = VertxAutoConfiguration.class)
public class VertxAutoConfigurationTest {
@Autowired
private Vertx vertx;
@Test
public void shouldInjectVertxInstance() {
assertThat(vertx).isNotNull();
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter/src/main/java/dev/snowdrop/vertx/VertxAutoConfiguration.java | vertx-spring-boot-starter/src/main/java/dev/snowdrop/vertx/VertxAutoConfiguration.java | package dev.snowdrop.vertx;
import io.vertx.core.Vertx;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
@ConditionalOnClass(Vertx.class)
@EnableConfigurationProperties(VertxProperties.class)
public class VertxAutoConfiguration {
// Let the Vertx user to handle instance closing.
// This is done in particular for HTTP server which is closed by Spring Context after beans are destroyed.
// Allowing Vertx bean to be destroyed by the context would block HTTP server from calling its close method.
@Bean(destroyMethod = "")
public Vertx vertx(VertxProperties properties) {
return Vertx.vertx(properties.toVertxOptions());
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter/src/main/java/dev/snowdrop/vertx/VertxProperties.java | vertx-spring-boot-starter/src/main/java/dev/snowdrop/vertx/VertxProperties.java | package dev.snowdrop.vertx;
import java.util.List;
import java.util.concurrent.TimeUnit;
import io.vertx.core.VertxOptions;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.dns.AddressResolverOptions;
import io.vertx.core.file.FileSystemOptions;
import io.vertx.core.metrics.MetricsOptions;
import org.springframework.boot.context.properties.ConfigurationProperties;
@ConfigurationProperties(prefix = VertxProperties.PROPERTIES_PREFIX)
public class VertxProperties {
static final String PROPERTIES_PREFIX = "vertx";
/**
* A number of event loop threads to be used by the Vert.x instance.
* <p>
* Default: 2 * available processors.
*
* @see VertxOptions#getEventLoopPoolSize()
*/
private int eventLoopPoolSize = VertxOptions.DEFAULT_EVENT_LOOP_POOL_SIZE;
/**
* A maximum number of worker threads to be used by the Vert.x instance.
* Worker threads are used for running blocking code and worker verticles.
* <p>
* Default: 20
*
* @see VertxOptions#getWorkerPoolSize()
*/
private int workerPoolSize = VertxOptions.DEFAULT_WORKER_POOL_SIZE;
/**
* An internal blocking pool size.
* Vert.x maintains a pool for internal blocking operations
* <p>
* Default: 20
*
* @see VertxOptions#getWorkerPoolSize()
*/
private int internalBlockingPoolSize = VertxOptions.DEFAULT_INTERNAL_BLOCKING_POOL_SIZE;
/**
* A blocked thread check period, in {@link VertxProperties#blockedThreadCheckIntervalUnit}.
* This setting determines how often Vert.x will check whether event loop threads are executing for too long.
* <p>
* Default: 1000ms
*
* @see VertxOptions#getBlockedThreadCheckInterval()
*/
private long blockedThreadCheckInterval = VertxOptions.DEFAULT_BLOCKED_THREAD_CHECK_INTERVAL;
/**
* Get the value of max event loop execute time, in {@link VertxProperties#maxEventLoopExecuteTimeUnit}.
* Vert.x will automatically log a warning if it detects that event loop threads haven't returned within this time.
* This can be used to detect where the user is blocking an event loop thread, contrary to the Golden Rule of the
* holy Event Loop.
* <p>
* Default: 2000000000ns (2s)
*
* @see VertxOptions#getMaxEventLoopExecuteTime()
*/
private long maxEventLoopExecuteTime = VertxOptions.DEFAULT_MAX_EVENT_LOOP_EXECUTE_TIME;
/**
* Get the value of max worker execute time, in {@link VertxProperties#maxWorkerExecuteTimeUnit}.
* Vert.x will automatically log a warning if it detects that worker threads haven't returned within this time.
* This can be used to detect where the user is blocking a worker thread for too long. Although worker threads
* can be blocked longer than event loop threads, they shouldn't be blocked for long periods of time.
* <p>
* Default: 60000000000ns (60s)
*
* @see VertxOptions#getMaxWorkerExecuteTime()
*/
private long maxWorkerExecuteTime = VertxOptions.DEFAULT_MAX_WORKER_EXECUTE_TIME;
/**
* Whether HA is enabled on the Vert.x instance.
* <p>
* Default: false
*
* @see VertxOptions#isHAEnabled()
*/
private boolean haEnabled = VertxOptions.DEFAULT_HA_ENABLED;
/**
* A quorum size to be used when HA is enabled.
* <p>
* Default: 1
*
* @see VertxOptions#getQuorumSize()
*/
private int quorumSize = VertxOptions.DEFAULT_QUORUM_SIZE;
/**
* An HA group to be used when HA is enabled.
* <p>
* Default: __DEFAULT__
*
* @see VertxOptions#getHAGroup()
*/
private String haGroup = VertxOptions.DEFAULT_HA_GROUP;
/**
* A threshold value in {@link VertxProperties#warningExceptionTimeUnit} above which a blocked warning contains a stack trace.
* <p>
* Default: 5000000000ns (5s)
*
* @see VertxOptions#getWarningExceptionTime()
*/
private long warningExceptionTime = TimeUnit.SECONDS.toNanos(5);
/**
* Whether to prefer the native transport to the JDK transport.
* <p>
* Default: false
*
* @see VertxOptions#getPreferNativeTransport()
*/
private boolean preferNativeTransport = VertxOptions.DEFAULT_PREFER_NATIVE_TRANSPORT;
/**
* A time unit of {@link VertxProperties#maxEventLoopExecuteTime}.
* <p>
* Default: ns
*
* @see VertxOptions#getMaxEventLoopExecuteTimeUnit()
*/
private TimeUnit maxEventLoopExecuteTimeUnit = VertxOptions.DEFAULT_MAX_EVENT_LOOP_EXECUTE_TIME_UNIT;
/**
* A time unit of {@link VertxProperties#maxWorkerExecuteTime}.
* <p>
* Default: ns
*
* @see VertxOptions#getMaxWorkerExecuteTimeUnit()
*/
private TimeUnit maxWorkerExecuteTimeUnit = VertxOptions.DEFAULT_MAX_WORKER_EXECUTE_TIME_UNIT;
/**
* A time unit of {@link VertxProperties#warningExceptionTime}.
* <p>
* Default: ns
*
* @see VertxOptions#getWarningExceptionTimeUnit()
*/
private TimeUnit warningExceptionTimeUnit = VertxOptions.DEFAULT_WARNING_EXCEPTION_TIME_UNIT;
/**
* A time unit of {@link VertxProperties#blockedThreadCheckInterval}.
* <p>
* Default: ms
*
* @see VertxOptions#getBlockedThreadCheckIntervalUnit()
*/
private TimeUnit blockedThreadCheckIntervalUnit = VertxOptions.DEFAULT_BLOCKED_THREAD_CHECK_INTERVAL_UNIT;
/**
* Whether metrics are enabled on the Vert.x instance.
* <p>
* Default: false
*
* @see MetricsOptions#isEnabled()
*/
private boolean metricsEnabled = MetricsOptions.DEFAULT_METRICS_ENABLED;
private FileSystem fileSystem = new FileSystem();
private AddressResolver addressResolver = new AddressResolver();
public VertxOptions toVertxOptions() {
VertxOptions vertxOptions = new VertxOptions();
vertxOptions.setEventLoopPoolSize(eventLoopPoolSize);
vertxOptions.setWorkerPoolSize(workerPoolSize);
vertxOptions.setInternalBlockingPoolSize(internalBlockingPoolSize);
vertxOptions.setBlockedThreadCheckInterval(blockedThreadCheckInterval);
vertxOptions.setMaxEventLoopExecuteTime(maxEventLoopExecuteTime);
vertxOptions.setMaxWorkerExecuteTime(maxWorkerExecuteTime);
vertxOptions.setHAEnabled(haEnabled);
vertxOptions.setQuorumSize(quorumSize);
vertxOptions.setHAGroup(haGroup);
vertxOptions.setWarningExceptionTime(warningExceptionTime);
vertxOptions.setPreferNativeTransport(preferNativeTransport);
vertxOptions.setMaxEventLoopExecuteTimeUnit(maxEventLoopExecuteTimeUnit);
vertxOptions.setMaxWorkerExecuteTimeUnit(maxWorkerExecuteTimeUnit);
vertxOptions.setWarningExceptionTimeUnit(warningExceptionTimeUnit);
vertxOptions.setBlockedThreadCheckIntervalUnit(blockedThreadCheckIntervalUnit);
MetricsOptions metricsOptions = new MetricsOptions();
metricsOptions.setEnabled(metricsEnabled);
vertxOptions.setMetricsOptions(metricsOptions);
FileSystemOptions fileSystemOptions = new FileSystemOptions();
fileSystemOptions.setClassPathResolvingEnabled(fileSystem.isClassPathResolvingEnabled());
fileSystemOptions.setFileCachingEnabled(fileSystem.isFileCachingEnabled());
vertxOptions.setFileSystemOptions(fileSystemOptions);
AddressResolverOptions addressResolverOptions = new AddressResolverOptions();
addressResolverOptions.setHostsPath(addressResolver.getHostsPath());
addressResolverOptions.setHostsValue(addressResolver.getHostsValue());
addressResolverOptions.setServers(addressResolver.getServers());
addressResolverOptions.setOptResourceEnabled(addressResolver.isOptResourceEnabled());
addressResolverOptions.setCacheMinTimeToLive(addressResolver.getCacheMinTimeToLive());
addressResolverOptions.setCacheMaxTimeToLive(addressResolver.getCacheMaxTimeToLive());
addressResolverOptions.setCacheNegativeTimeToLive(addressResolver.getCacheNegativeTimeToLive());
addressResolverOptions.setQueryTimeout(addressResolver.getQueryTimeout());
addressResolverOptions.setMaxQueries(addressResolver.getMaxQueries());
addressResolverOptions.setRdFlag(addressResolver.isRdFlag());
addressResolverOptions.setSearchDomains(addressResolver.getSearchDomains());
addressResolverOptions.setNdots(addressResolver.getNdots());
addressResolverOptions.setRotateServers(addressResolver.isRotateServers());
vertxOptions.setAddressResolverOptions(addressResolverOptions);
return vertxOptions;
}
public int getEventLoopPoolSize() {
return eventLoopPoolSize;
}
public void setEventLoopPoolSize(int eventLoopPoolSize) {
this.eventLoopPoolSize = eventLoopPoolSize;
}
public int getWorkerPoolSize() {
return workerPoolSize;
}
public void setWorkerPoolSize(int workerPoolSize) {
this.workerPoolSize = workerPoolSize;
}
public int getInternalBlockingPoolSize() {
return internalBlockingPoolSize;
}
public void setInternalBlockingPoolSize(int internalBlockingPoolSize) {
this.internalBlockingPoolSize = internalBlockingPoolSize;
}
public long getBlockedThreadCheckInterval() {
return blockedThreadCheckInterval;
}
public void setBlockedThreadCheckInterval(long blockedThreadCheckInterval) {
this.blockedThreadCheckInterval = blockedThreadCheckInterval;
}
public long getMaxEventLoopExecuteTime() {
return maxEventLoopExecuteTime;
}
public void setMaxEventLoopExecuteTime(long maxEventLoopExecuteTime) {
this.maxEventLoopExecuteTime = maxEventLoopExecuteTime;
}
public long getMaxWorkerExecuteTime() {
return maxWorkerExecuteTime;
}
public void setMaxWorkerExecuteTime(long maxWorkerExecuteTime) {
this.maxWorkerExecuteTime = maxWorkerExecuteTime;
}
public boolean isHaEnabled() {
return haEnabled;
}
public void setHaEnabled(boolean haEnabled) {
this.haEnabled = haEnabled;
}
public int getQuorumSize() {
return quorumSize;
}
public void setQuorumSize(int quorumSize) {
this.quorumSize = quorumSize;
}
public String getHaGroup() {
return haGroup;
}
public void setHaGroup(String haGroup) {
this.haGroup = haGroup;
}
public long getWarningExceptionTime() {
return warningExceptionTime;
}
public void setWarningExceptionTime(long warningExceptionTime) {
this.warningExceptionTime = warningExceptionTime;
}
public boolean isPreferNativeTransport() {
return preferNativeTransport;
}
public void setPreferNativeTransport(boolean preferNativeTransport) {
this.preferNativeTransport = preferNativeTransport;
}
public TimeUnit getMaxEventLoopExecuteTimeUnit() {
return maxEventLoopExecuteTimeUnit;
}
public void setMaxEventLoopExecuteTimeUnit(TimeUnit maxEventLoopExecuteTimeUnit) {
this.maxEventLoopExecuteTimeUnit = maxEventLoopExecuteTimeUnit;
}
public TimeUnit getMaxWorkerExecuteTimeUnit() {
return maxWorkerExecuteTimeUnit;
}
public void setMaxWorkerExecuteTimeUnit(TimeUnit maxWorkerExecuteTimeUnit) {
this.maxWorkerExecuteTimeUnit = maxWorkerExecuteTimeUnit;
}
public TimeUnit getWarningExceptionTimeUnit() {
return warningExceptionTimeUnit;
}
public void setWarningExceptionTimeUnit(TimeUnit warningExceptionTimeUnit) {
this.warningExceptionTimeUnit = warningExceptionTimeUnit;
}
public TimeUnit getBlockedThreadCheckIntervalUnit() {
return blockedThreadCheckIntervalUnit;
}
public void setBlockedThreadCheckIntervalUnit(TimeUnit blockedThreadCheckIntervalUnit) {
this.blockedThreadCheckIntervalUnit = blockedThreadCheckIntervalUnit;
}
public boolean isMetricsEnabled() {
return metricsEnabled;
}
public void setMetricsEnabled(boolean metricsEnabled) {
this.metricsEnabled = metricsEnabled;
}
public FileSystem getFileSystem() {
return fileSystem;
}
public AddressResolver getAddressResolver() {
return addressResolver;
}
public static class FileSystem {
/**
* Whether classpath resolving is enabled.
* <p>
* Default: {@link FileSystemOptions#DEFAULT_CLASS_PATH_RESOLVING_ENABLED}
*
* @see FileSystemOptions#isClassPathResolvingEnabled()
*/
private boolean classPathResolvingEnabled = FileSystemOptions.DEFAULT_CLASS_PATH_RESOLVING_ENABLED;
/**
* Whether file caching is enabled for class path resolving.
* <p>
* Default: {@link FileSystemOptions#DEFAULT_FILE_CACHING_ENABLED}
*
* @see FileSystemOptions#isFileCachingEnabled()
*/
private boolean fileCachingEnabled = FileSystemOptions.DEFAULT_FILE_CACHING_ENABLED;
public boolean isClassPathResolvingEnabled() {
return classPathResolvingEnabled;
}
public void setClassPathResolvingEnabled(boolean classPathResolvingEnabled) {
this.classPathResolvingEnabled = classPathResolvingEnabled;
}
public boolean isFileCachingEnabled() {
return fileCachingEnabled;
}
public void setFileCachingEnabled(boolean fileCachingEnabled) {
this.fileCachingEnabled = fileCachingEnabled;
}
}
public static class AddressResolver {
/**
* A path to the alternate hosts configuration file.
*
* @see AddressResolverOptions#getHostsPath()
*/
private String hostsPath;
/**
* A hosts configuration file value.
*
* @see AddressResolverOptions#getHostsValue()
*/
private Buffer hostsValue;
/**
* A list of dns servers.
* <p>
* Default: null
*
* @see AddressResolverOptions#getServers()
*/
private List<String> servers = AddressResolverOptions.DEFAULT_SERVERS;
/**
* Whether an optional record is automatically included in DNS queries.
* <p>
* Default: true
*
* @see AddressResolverOptions#isOptResourceEnabled()
*/
private boolean optResourceEnabled = AddressResolverOptions.DEFAULT_OPT_RESOURCE_ENABLED;
/**
* A cache min TTL in seconds.
* <p>
* Default: 0
*
* @see AddressResolverOptions#getCacheMinTimeToLive()
*/
private int cacheMinTimeToLive = AddressResolverOptions.DEFAULT_CACHE_MIN_TIME_TO_LIVE;
/**
* A cache max TTL in seconds.
* <p>
* Default: Integer.MAX_VALUE
*
* @see AddressResolverOptions#getCacheMaxTimeToLive()
*/
private int cacheMaxTimeToLive = AddressResolverOptions.DEFAULT_CACHE_MAX_TIME_TO_LIVE;
/**
* A cache negative TTL in seconds.
* <p>
* Default: 0
*
* @see AddressResolverOptions#getCacheNegativeTimeToLive()
*/
private int cacheNegativeTimeToLive = AddressResolverOptions.DEFAULT_CACHE_NEGATIVE_TIME_TO_LIVE;
/**
* A query timeout in milliseconds.
* <p>
* Default: 5000
*
* @see AddressResolverOptions#getQueryTimeout()
*/
private long queryTimeout = AddressResolverOptions.DEFAULT_QUERY_TIMEOUT;
/**
* A maximum number of queries to be sent during a resolution.
* <p>
* Default: 4
*
* @see AddressResolverOptions#getMaxQueries()
*/
private int maxQueries = AddressResolverOptions.DEFAULT_MAX_QUERIES;
/**
* A DNS queries <i>Recursion Desired</i> flag value.
* <p>
* Default: true
*
* @see AddressResolverOptions#getRdFlag()
*/
private boolean rdFlag = AddressResolverOptions.DEFAULT_RD_FLAG;
/**
* A list of search domains.
* <p>
* Default: null
*
* @see AddressResolverOptions#getSearchDomains()
*/
private List<String> searchDomains = AddressResolverOptions.DEFAULT_SEACH_DOMAINS;
/**
* An ndots value
* <p>
* Default: {@link AddressResolverOptions#DEFAULT_NDOTS}
*
* @see AddressResolverOptions#getNdots()
*/
private int ndots = AddressResolverOptions.DEFAULT_NDOTS;
/**
* Whether a dns server selection uses round robin.
* <p>
* Default: {@link AddressResolverOptions#DEFAULT_ROTATE_SERVERS}
*
* @see AddressResolverOptions#isRotateServers()
*/
private boolean rotateServers = AddressResolverOptions.DEFAULT_ROTATE_SERVERS;
public String getHostsPath() {
return hostsPath;
}
public void setHostsPath(String hostsPath) {
this.hostsPath = hostsPath;
}
public Buffer getHostsValue() {
return hostsValue;
}
public void setHostsValue(Buffer hostsValue) {
this.hostsValue = hostsValue;
}
public List<String> getServers() {
return servers;
}
public void setServers(List<String> servers) {
this.servers = servers;
}
public boolean isOptResourceEnabled() {
return optResourceEnabled;
}
public void setOptResourceEnabled(boolean optResourceEnabled) {
this.optResourceEnabled = optResourceEnabled;
}
public int getCacheMinTimeToLive() {
return cacheMinTimeToLive;
}
public void setCacheMinTimeToLive(int cacheMinTimeToLive) {
this.cacheMinTimeToLive = cacheMinTimeToLive;
}
public int getCacheMaxTimeToLive() {
return cacheMaxTimeToLive;
}
public void setCacheMaxTimeToLive(int cacheMaxTimeToLive) {
this.cacheMaxTimeToLive = cacheMaxTimeToLive;
}
public int getCacheNegativeTimeToLive() {
return cacheNegativeTimeToLive;
}
public void setCacheNegativeTimeToLive(int cacheNegativeTimeToLive) {
this.cacheNegativeTimeToLive = cacheNegativeTimeToLive;
}
public long getQueryTimeout() {
return queryTimeout;
}
public void setQueryTimeout(long queryTimeout) {
this.queryTimeout = queryTimeout;
}
public int getMaxQueries() {
return maxQueries;
}
public void setMaxQueries(int maxQueries) {
this.maxQueries = maxQueries;
}
public boolean isRdFlag() {
return rdFlag;
}
public void setRdFlag(boolean rdFlag) {
this.rdFlag = rdFlag;
}
public List<String> getSearchDomains() {
return searchDomains;
}
public void setSearchDomains(List<String> searchDomains) {
this.searchDomains = searchDomains;
}
public int getNdots() {
return ndots;
}
public void setNdots(int ndots) {
this.ndots = ndots;
}
public boolean isRotateServers() {
return rotateServers;
}
public void setRotateServers(boolean rotateServers) {
this.rotateServers = rotateServers;
}
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter/src/main/java/dev/snowdrop/vertx/streams/Pump.java | vertx-spring-boot-starter/src/main/java/dev/snowdrop/vertx/streams/Pump.java | package dev.snowdrop.vertx.streams;
public interface Pump {
static <T> Pump pump(ReadStream<T> readStream, WriteStream<T> writeStream) {
return new SnowdropPump<>(readStream, writeStream);
}
static <T> Pump pump(ReadStream<T> readStream, WriteStream<T> writeStream, int writeQueueMaxSize) {
return new SnowdropPump<>(readStream, writeStream, writeQueueMaxSize);
}
Pump setWriteQueueMaxSize(int maxSize);
Pump start();
Pump stop();
int numberPumped();
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter/src/main/java/dev/snowdrop/vertx/streams/ReadStream.java | vertx-spring-boot-starter/src/main/java/dev/snowdrop/vertx/streams/ReadStream.java | package dev.snowdrop.vertx.streams;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
public interface ReadStream<T> {
Mono<T> mono();
Flux<T> flux();
default Pipe<T> pipe() {
vertxReadStream().pause();
return new SnowdropPipe<>(this);
}
default Mono<Void> pipeTo(WriteStream<T> destination) {
return new SnowdropPipe<>(this).to(destination);
}
io.vertx.core.streams.ReadStream vertxReadStream();
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter/src/main/java/dev/snowdrop/vertx/streams/SnowdropPipe.java | vertx-spring-boot-starter/src/main/java/dev/snowdrop/vertx/streams/SnowdropPipe.java | package dev.snowdrop.vertx.streams;
import io.smallrye.mutiny.converters.uni.UniReactorConverters;
import reactor.core.publisher.Mono;
class SnowdropPipe<T> implements Pipe<T> {
private final io.vertx.mutiny.core.streams.Pipe<T> delegate;
@SuppressWarnings({ "unchecked", "rawtypes" })
SnowdropPipe(ReadStream<T> readStream) {
this.delegate = new io.vertx.mutiny.core.streams.Pipe<>(
new io.vertx.core.streams.impl.PipeImpl<>(readStream.vertxReadStream()));
}
@Override
public Pipe<T> endOnFailure(boolean end) {
delegate.endOnFailure(end);
return this;
}
@Override
public Pipe<T> endOnSuccess(boolean end) {
delegate.endOnSuccess(end);
return this;
}
@Override
public Pipe<T> endOnComplete(boolean end) {
delegate.endOnComplete(end);
return this;
}
@Override
public Mono<Void> to(WriteStream<T> destination) {
return delegate.to(toMutinyWriteStream(destination))
.convert()
.with(UniReactorConverters.toMono());
}
@Override
public void close() {
delegate.close();
}
private io.vertx.mutiny.core.streams.WriteStream<T> toMutinyWriteStream(WriteStream<T> writeStream) {
return io.vertx.mutiny.core.streams.WriteStream.newInstance(writeStream.vertxWriteStream());
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter/src/main/java/dev/snowdrop/vertx/streams/Pipe.java | vertx-spring-boot-starter/src/main/java/dev/snowdrop/vertx/streams/Pipe.java | package dev.snowdrop.vertx.streams;
import reactor.core.publisher.Mono;
public interface Pipe<T> {
Pipe<T> endOnFailure(boolean end);
Pipe<T> endOnSuccess(boolean end);
Pipe<T> endOnComplete(boolean end);
Mono<Void> to(WriteStream<T> destination);
void close();
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter/src/main/java/dev/snowdrop/vertx/streams/WriteStream.java | vertx-spring-boot-starter/src/main/java/dev/snowdrop/vertx/streams/WriteStream.java | package dev.snowdrop.vertx.streams;
import java.util.function.Consumer;
import reactor.core.publisher.Mono;
public interface WriteStream<T> {
WriteStream<T> exceptionHandler(Consumer<Throwable> handler);
WriteStream<T> drainHandler(Consumer<Void> handler);
WriteStream<T> setWriteQueueMaxSize(int maxSize);
boolean writeQueueFull();
Mono<Void> write(T data);
Mono<Void> end();
default Mono<Void> end(T data) {
return write(data).then(end());
}
io.vertx.core.streams.WriteStream vertxWriteStream();
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter/src/main/java/dev/snowdrop/vertx/streams/SnowdropPump.java | vertx-spring-boot-starter/src/main/java/dev/snowdrop/vertx/streams/SnowdropPump.java | package dev.snowdrop.vertx.streams;
class SnowdropPump<T> implements Pump {
private final io.vertx.core.streams.Pump delegate;
@SuppressWarnings("unchecked")
SnowdropPump(ReadStream<T> readStream, WriteStream<T> writeStream) {
delegate = io.vertx.core.streams.Pump.pump(readStream.vertxReadStream(), writeStream.vertxWriteStream());
}
@SuppressWarnings("unchecked")
SnowdropPump(ReadStream<T> readStream, WriteStream<T> writeStream, int writeQueueMaxSize) {
delegate = io.vertx.core.streams.Pump.pump(readStream.vertxReadStream(), writeStream.vertxWriteStream(),
writeQueueMaxSize);
}
@Override
public Pump setWriteQueueMaxSize(int maxSize) {
delegate.setWriteQueueMaxSize(maxSize);
return this;
}
@Override
public Pump start() {
delegate.start();
return this;
}
@Override
public Pump stop() {
delegate.stop();
return this;
}
@Override
public int numberPumped() {
return delegate.numberPumped();
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/SnowdropKafkaProducerTest.java | vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/SnowdropKafkaProducerTest.java | package dev.snowdrop.vertx.kafka;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import java.util.Arrays;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.function.Function;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import io.smallrye.mutiny.Uni;
import io.vertx.kafka.client.common.PartitionInfo;
import io.vertx.kafka.client.producer.RecordMetadata;
import reactor.test.StepVerifier;
@ExtendWith(MockitoExtension.class)
public class SnowdropKafkaProducerTest {
@Mock
private io.vertx.mutiny.kafka.client.producer.KafkaProducer<Integer, String> mockMutinyProducer;
@Mock
private io.vertx.kafka.client.producer.KafkaProducer<Integer, String> mockVertxProducer;
private KafkaProducer<Integer, String> producer;
@BeforeEach
public void setUp() {
producer = new SnowdropKafkaProducer<>(mockMutinyProducer);
}
@Test
@SuppressWarnings("unchecked")
public void shouldSend() {
// Setup a response to be returned by Mutiny producer
RecordMetadata vertxRecordMetadata = new RecordMetadata(2, 3, 4, "t");
given(mockMutinyProducer.send(any()))
.willReturn(Uni.createFrom().item(vertxRecordMetadata));
// Create snowdrop producer record and send it
ProducerRecord<Integer, String> record = ProducerRecord
.builder("topic", "value", 1)
.withTimestamp(2)
.withPartition(3)
.withHeader(Header.create("h1", "v1"))
.build();
StepVerifier.create(producer.send(record))
.expectNext(new SnowdropRecordMetadata(vertxRecordMetadata))
.verifyComplete();
// Capture Mutiny producer record submitted by snowdrop producer
ArgumentCaptor<io.vertx.mutiny.kafka.client.producer.KafkaProducerRecord<Integer, String>> mutinyRecordCaptor =
ArgumentCaptor.forClass(io.vertx.mutiny.kafka.client.producer.KafkaProducerRecord.class);
verify(mockMutinyProducer).send(mutinyRecordCaptor.capture());
io.vertx.mutiny.kafka.client.producer.KafkaProducerRecord<Integer, String> mutinyRecord =
mutinyRecordCaptor.getValue();
// Verify that snowdrop producer converted records correctly
assertThat(mutinyRecord.topic()).isEqualTo("topic");
assertThat(mutinyRecord.value()).isEqualTo("value");
assertThat(mutinyRecord.key()).isEqualTo(1);
assertThat(mutinyRecord.timestamp()).isEqualTo(2);
assertThat(mutinyRecord.partition()).isEqualTo(3);
assertThat(mutinyRecord.headers()).hasSize(1);
assertThat(mutinyRecord.headers().get(0).key()).isEqualTo("h1");
assertThat(mutinyRecord.headers().get(0).value().toString()).isEqualTo("v1");
}
@Test
public void shouldGetPartition() {
PartitionInfo firstPartitionInfo = mock(PartitionInfo.class);
PartitionInfo secondPartitionInfo = mock(PartitionInfo.class);
given(mockMutinyProducer.partitionsFor("test-topic"))
.willReturn(Uni.createFrom().item(Arrays.asList(firstPartitionInfo, secondPartitionInfo)));
StepVerifier.create(producer.partitionsFor("test-topic"))
.expectNext(new SnowdropPartitionInfo(firstPartitionInfo))
.expectNext(new SnowdropPartitionInfo(secondPartitionInfo))
.verifyComplete();
}
@Test
@SuppressWarnings("unchecked")
public void shouldFlush() {
given(mockMutinyProducer.flush())
.will(args -> Uni.createFrom().voidItem());
StepVerifier.create(producer.flush())
.verifyComplete();
}
@Test
public void shouldHandleFlushFailure() {
given(mockMutinyProducer.flush())
.will(args -> Uni.createFrom().failure(new RuntimeException("test")));
StepVerifier.create(producer.flush())
.verifyErrorMessage("test");
}
@Test
public void shouldClose() {
given(mockMutinyProducer.close())
.willReturn(Uni.createFrom().voidItem());
StepVerifier.create(producer.close())
.verifyComplete();
}
@Test
public void shouldCloseWithTimeout() {
given(mockMutinyProducer.close(1L))
.willReturn(Uni.createFrom().voidItem());
StepVerifier.create(producer.close(1L))
.verifyComplete();
}
@Test
public void shouldDoOnVertxProducer() {
given(mockMutinyProducer.getDelegate())
.willReturn(mockVertxProducer);
AtomicReference<io.vertx.kafka.client.producer.KafkaProducer<Integer, String>> vertxConsumer =
new AtomicReference<>();
Function<io.vertx.kafka.client.producer.KafkaProducer<Integer, String>, Boolean> function = vp -> {
vertxConsumer.set(vp);
return true;
};
StepVerifier.create(producer.doOnVertxProducer(function))
.expectNext(true)
.verifyComplete();
assertThat(vertxConsumer.get()).isEqualTo(mockVertxProducer);
}
@Test
public void shouldAddExceptionHandler() {
Consumer<Throwable> handler = System.out::println;
producer.exceptionHandler(handler);
verify(mockMutinyProducer).exceptionHandler(handler);
}
@Test
public void shouldAddDrainHandler() {
Consumer<Void> handler = System.out::println;
producer.drainHandler(handler);
verify(mockMutinyProducer).drainHandler(any());
}
@Test
public void shouldWriteQueueMaxSize() {
producer.setWriteQueueMaxSize(1);
verify(mockMutinyProducer).setWriteQueueMaxSize(1);
}
@Test
public void shouldCheckIfWriteQueueIsFull() {
given(mockMutinyProducer.writeQueueFull()).willReturn(true);
assertThat(producer.writeQueueFull()).isTrue();
}
@Test
@SuppressWarnings("unchecked")
public void shouldWrite() {
given(mockMutinyProducer.write(any()))
.willReturn(Uni.createFrom().voidItem());
// Create snowdrop producer record and write it
ProducerRecord<Integer, String> record = ProducerRecord
.builder("topic", "value", 1)
.withTimestamp(2)
.withPartition(3)
.withHeader(Header.create("h1", "v1"))
.build();
StepVerifier.create(producer.write(record))
.verifyComplete();
// Capture mutiny producer record submitted by snowdrop producer
ArgumentCaptor<io.vertx.mutiny.kafka.client.producer.KafkaProducerRecord<Integer, String>> mutinyRecordCaptor =
ArgumentCaptor.forClass(io.vertx.mutiny.kafka.client.producer.KafkaProducerRecord.class);
verify(mockMutinyProducer).write(mutinyRecordCaptor.capture());
io.vertx.mutiny.kafka.client.producer.KafkaProducerRecord<Integer, String> mutinyRecord =
mutinyRecordCaptor.getValue();
// Verify that snowdrop producer converted records correctly
assertThat(mutinyRecord.topic()).isEqualTo("topic");
assertThat(mutinyRecord.value()).isEqualTo("value");
assertThat(mutinyRecord.key()).isEqualTo(1);
assertThat(mutinyRecord.timestamp()).isEqualTo(2);
assertThat(mutinyRecord.partition()).isEqualTo(3);
assertThat(mutinyRecord.headers()).hasSize(1);
assertThat(mutinyRecord.headers().get(0).key()).isEqualTo("h1");
assertThat(mutinyRecord.headers().get(0).value().toString()).isEqualTo("v1");
}
@Test
public void shouldEnd() {
given(mockMutinyProducer.end())
.willReturn(Uni.createFrom().voidItem());
StepVerifier.create(producer.end())
.verifyComplete();
}
@Test
public void shouldGetVertxWriteStream() {
given(mockMutinyProducer.getDelegate())
.willReturn(mockVertxProducer);
assertThat(producer.vertxWriteStream()).isEqualTo(mockVertxProducer);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/SnowdropPartitionTest.java | vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/SnowdropPartitionTest.java | package dev.snowdrop.vertx.kafka;
import io.vertx.kafka.client.common.TopicPartition;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class SnowdropPartitionTest {
@Test
public void shouldCreateFromValues() {
SnowdropPartition partition = new SnowdropPartition("test-topic", 1);
assertThat(partition.topic()).isEqualTo("test-topic");
assertThat(partition.partition()).isEqualTo(1);
}
@Test
public void shouldCreateFromVertxTopicPartition() {
TopicPartition vertxPartition = new TopicPartition("test-topic", 1);
SnowdropPartition partition = new SnowdropPartition(vertxPartition);
assertThat(partition.topic()).isEqualTo("test-topic");
assertThat(partition.partition()).isEqualTo(1);
}
@Test
public void shouldNotAllowEmptyTopic() {
try {
new SnowdropPartition(null, 1);
} catch (IllegalArgumentException e) {
assertThat(e.getMessage()).isEqualTo("Topic cannot be empty");
}
}
@Test
public void shouldNotAllowNegativePartition() {
try {
new SnowdropPartition("test-topic", -1);
} catch (IllegalArgumentException e) {
assertThat(e.getMessage()).isEqualTo("Partition cannot be negative");
}
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/SnowdropKafkaConsumerTest.java | vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/SnowdropKafkaConsumerTest.java | package dev.snowdrop.vertx.kafka;
import java.time.Duration;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.function.Function;
import io.smallrye.mutiny.Multi;
import io.smallrye.mutiny.Uni;
import io.vertx.kafka.client.common.PartitionInfo;
import io.vertx.kafka.client.common.TopicPartition;
import io.vertx.kafka.client.consumer.OffsetAndMetadata;
import io.vertx.kafka.client.consumer.OffsetAndTimestamp;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import reactor.test.StepVerifier;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.mock;
@ExtendWith(MockitoExtension.class)
public class SnowdropKafkaConsumerTest {
@Mock
private io.vertx.kafka.client.consumer.KafkaConsumer<Integer, String> mockVertxConsumer;
@Mock
private io.vertx.mutiny.kafka.client.consumer.KafkaConsumer<Integer, String> mockMutinyConsumer;
@Mock
private io.vertx.mutiny.kafka.client.consumer.KafkaConsumerRecord<Integer, String> mockMutinyConsumerRecord;
private SnowdropKafkaConsumer<Integer, String> consumer;
@BeforeEach
public void setUp() {
consumer = new SnowdropKafkaConsumer<>(mockMutinyConsumer);
}
@Test
public void shouldSubscribeToSingleTopic() {
given(mockMutinyConsumer.subscribe("test-topic"))
.willReturn(Uni.createFrom().voidItem());
StepVerifier.create(consumer.subscribe("test-topic"))
.verifyComplete();
}
@Test
public void shouldSubscribeToMultipleTopics() {
given(mockMutinyConsumer.subscribe(toSet("test-topic-1", "test-topic-2")))
.willReturn(Uni.createFrom().voidItem());
StepVerifier.create(consumer.subscribe(asList("test-topic-1", "test-topic-2")))
.verifyComplete();
}
@Test
public void shouldAssignSinglePartition() {
given(mockMutinyConsumer.assign(new TopicPartition("test-topic", 1)))
.willReturn(Uni.createFrom().voidItem());
StepVerifier.create(consumer.assign(Partition.create("test-topic", 1)))
.verifyComplete();
}
@Test
public void shouldAssignMultiplePartitions() {
Set<TopicPartition> vertxPartitions = toSet(
new TopicPartition("test-topic-1", 0),
new TopicPartition("test-topic-2", 1)
);
given(mockMutinyConsumer.assign(vertxPartitions))
.willReturn(Uni.createFrom().voidItem());
Collection<Partition> partitions = asList(
Partition.create("test-topic-1", 0),
Partition.create("test-topic-2", 1)
);
StepVerifier.create(consumer.assign(partitions))
.verifyComplete();
}
@Test
public void shouldUnsubscribe() {
given(mockMutinyConsumer.unsubscribe())
.willReturn(Uni.createFrom().voidItem());
StepVerifier.create(consumer.unsubscribe())
.verifyComplete();
}
@Test
public void shouldGetSubscriptions() {
given(mockMutinyConsumer.subscription())
.willReturn(Uni.createFrom().item(toSet("test-topic-1", "test-topic-2")));
StepVerifier.create(consumer.subscriptions())
.expectNext("test-topic-1")
.expectNext("test-topic-2")
.verifyComplete();
}
@Test
public void shouldGetAssignments() {
Set<TopicPartition> vertxPartitions = toSet(
new TopicPartition("test-topic-1", 0),
new TopicPartition("test-topic-2", 1)
);
given(mockMutinyConsumer.assignment())
.willReturn(Uni.createFrom().item(vertxPartitions));
StepVerifier.create(consumer.assignments())
.expectNext(Partition.create("test-topic-1", 0))
.expectNext(Partition.create("test-topic-2", 1))
.verifyComplete();
}
@Test
public void shouldGetPartitionsFor() {
PartitionInfo firstPartition = mock(PartitionInfo.class);
PartitionInfo secondPartition = mock(PartitionInfo.class);
given(mockMutinyConsumer.partitionsFor("test-topic"))
.willReturn(Uni.createFrom().item(asList(firstPartition, secondPartition)));
StepVerifier.create(consumer.partitionsFor("test-topic"))
.expectNext(new SnowdropPartitionInfo(firstPartition))
.expectNext(new SnowdropPartitionInfo(secondPartition))
.verifyComplete();
}
@Test
public void shouldAddPartitionsRevokedHandler() {
given(mockMutinyConsumer.partitionsRevokedHandler(any()))
.will(a -> {
Consumer<Set<TopicPartition>> handler = a.getArgument(0);
handler.accept(toSet(
new TopicPartition("test-topic", 1),
new TopicPartition("test-topic", 2)
));
return mockMutinyConsumer;
});
AtomicReference<Set<Partition>> partitions = new AtomicReference<>();
consumer.partitionsRevokedHandler(partitions::set);
await()
.atMost(Duration.ofSeconds(2))
.untilAtomic(partitions, is(notNullValue()));
assertThat(partitions.get()).containsOnly(
Partition.create("test-topic", 1),
Partition.create("test-topic", 2)
);
}
@Test
public void shouldAddPartitionsAssignedHandler() {
given(mockMutinyConsumer.partitionsAssignedHandler(any()))
.will(a -> {
Consumer<Set<TopicPartition>> handler = a.getArgument(0);
handler.accept(toSet(
new TopicPartition("test-topic", 1),
new TopicPartition("test-topic", 2)
));
return mockMutinyConsumer;
});
AtomicReference<Set<Partition>> partitions = new AtomicReference<>();
consumer.partitionsAssignedHandler(partitions::set);
await()
.atMost(Duration.ofSeconds(2))
.untilAtomic(partitions, is(notNullValue()));
assertThat(partitions.get()).containsOnly(
Partition.create("test-topic", 1),
Partition.create("test-topic", 2)
);
}
@Test
public void shouldSeek() {
given(mockMutinyConsumer.seek(new TopicPartition("test-topic", 1), 2))
.willReturn(Uni.createFrom().voidItem());
StepVerifier.create(consumer.seek(Partition.create("test-topic", 1), 2))
.verifyComplete();
}
@Test
public void shouldSeekToBeginningOfSinglePartition() {
given(mockMutinyConsumer.seekToBeginning(new TopicPartition("test-topic", 1)))
.willReturn(Uni.createFrom().voidItem());
StepVerifier.create(consumer.seekToBeginning(Partition.create("test-topic", 1)))
.verifyComplete();
}
@Test
public void shouldSeekToBeginningOfMultiplePartitions() {
given(mockMutinyConsumer.seekToBeginning(toSet(
new TopicPartition("test-topic", 1),
new TopicPartition("test-topic", 2)))
).willReturn(Uni.createFrom().voidItem());
StepVerifier
.create(consumer.seekToBeginning(asList(
Partition.create("test-topic", 1),
Partition.create("test-topic", 2)))
).verifyComplete();
}
@Test
public void shouldSeekToEndOfSinglePartition() {
given(mockMutinyConsumer.seekToEnd(new TopicPartition("test-topic", 1)))
.willReturn(Uni.createFrom().voidItem());
StepVerifier.create(consumer.seekToEnd(Partition.create("test-topic", 1)))
.verifyComplete();
}
@Test
public void shouldSeekToEndOfMultiplePartitions() {
given(mockMutinyConsumer.seekToEnd(toSet(
new TopicPartition("test-topic", 1),
new TopicPartition("test-topic", 2)))
).willReturn(Uni.createFrom().voidItem());
StepVerifier
.create(consumer.seekToEnd(asList(
Partition.create("test-topic", 1),
Partition.create("test-topic", 2)))
).verifyComplete();
}
@Test
public void shouldGetPosition() {
given(mockMutinyConsumer.position(new TopicPartition("test-topic", 1)))
.willReturn(Uni.createFrom().item(1L));
StepVerifier.create(consumer.position(Partition.create("test-topic", 1)))
.expectNext(1L)
.verifyComplete();
}
@Test
public void shouldGetCommitted() {
given(mockMutinyConsumer.committed(new TopicPartition("test-topic", 1)))
.willReturn(Uni.createFrom().item(new OffsetAndMetadata(2, "test-metadata")));
StepVerifier.create(consumer.committed(Partition.create("test-topic", 1)))
.expectNext(2L)
.verifyComplete();
}
@Test
public void shouldGetBeginningOffset() {
given(mockMutinyConsumer.beginningOffsets(new TopicPartition("test-topic", 1)))
.willReturn(Uni.createFrom().item(2L));
StepVerifier.create(consumer.beginningOffset(Partition.create("test-topic", 1)))
.expectNext(2L)
.verifyComplete();
}
@Test
public void shouldGetEndOffset() {
given(mockMutinyConsumer.endOffsets(new TopicPartition("test-topic", 1)))
.willReturn(Uni.createFrom().item(2L));
StepVerifier.create(consumer.endOffset(Partition.create("test-topic", 1)))
.expectNext(2L)
.verifyComplete();
}
@Test
public void shouldGetTimeOffset() {
given(mockMutinyConsumer.offsetsForTimes(new TopicPartition("test-topic", 1), 2L))
.willReturn(Uni.createFrom().item(new OffsetAndTimestamp(2L, 3L)));
StepVerifier.create(consumer.timeOffset(Partition.create("test-topic", 1), 2L))
.expectNext(2L)
.verifyComplete();
}
@Test
public void shouldCommit() {
given(mockMutinyConsumer.commit())
.willReturn(Uni.createFrom().voidItem());
StepVerifier.create(consumer.commit())
.verifyComplete();
}
@Test
public void shouldClose() {
given(mockMutinyConsumer.close())
.willReturn(Uni.createFrom().voidItem());
StepVerifier.create(consumer.close())
.verifyComplete();
}
@Test
public void shouldDoOnVertxConsumer() {
given(mockMutinyConsumer.getDelegate())
.willReturn(mockVertxConsumer);
AtomicReference<io.vertx.kafka.client.consumer.KafkaConsumer<Integer, String>> vertxConsumer =
new AtomicReference<>();
Function<io.vertx.kafka.client.consumer.KafkaConsumer<Integer, String>, Boolean> function = vc -> {
vertxConsumer.set(vc);
return true;
};
StepVerifier.create(consumer.doOnVertxConsumer(function))
.expectNext(true)
.verifyComplete();
assertThat(vertxConsumer.get()).isEqualTo(mockVertxConsumer);
}
@Test
public void shouldGetMono() {
given(mockMutinyConsumer.toMulti()).willReturn(Multi.createFrom().item(mockMutinyConsumerRecord));
StepVerifier.create(consumer.mono())
.expectNext(new SnowdropConsumerRecord<>(mockMutinyConsumerRecord))
.verifyComplete();
}
@Test
public void shouldGetFlux() {
given(mockMutinyConsumer.toMulti()).willReturn(Multi.createFrom().item(mockMutinyConsumerRecord));
StepVerifier.create(consumer.flux())
.expectNext(new SnowdropConsumerRecord<>(mockMutinyConsumerRecord))
.verifyComplete();
}
@Test
public void shouldGetVertxReadStream() {
given(mockMutinyConsumer.getDelegate()).willReturn(mockVertxConsumer);
assertThat(consumer.vertxReadStream()).isEqualTo(mockVertxConsumer);
}
@SafeVarargs
private final <T> Set<T> toSet(T... elements) {
return new HashSet<>(asList(elements));
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/SnowdropNodeTest.java | vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/SnowdropNodeTest.java | package dev.snowdrop.vertx.kafka;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class SnowdropNodeTest {
@Test
public void shouldCreate() {
io.vertx.kafka.client.common.Node
vertxNode = new io.vertx.kafka.client.common.Node(true, "test-host", 1, "1", true, 8080, "test-rack");
Node snowdropNode = new SnowdropNode(vertxNode);
assertThat(snowdropNode.getId()).isEqualTo(1);
assertThat(snowdropNode.getIdString()).isEqualTo("1");
assertThat(snowdropNode.getHost()).isEqualTo("test-host");
assertThat(snowdropNode.getPort()).isEqualTo(8080);
assertThat(snowdropNode.hasRack()).isEqualTo(true);
assertThat(snowdropNode.getRack()).isEqualTo("test-rack");
assertThat(snowdropNode.isEmpty()).isEqualTo(true);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/SnowdropRecordMetadataTest.java | vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/SnowdropRecordMetadataTest.java | package dev.snowdrop.vertx.kafka;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class SnowdropRecordMetadataTest {
@Test
public void shouldCreate() {
io.vertx.kafka.client.producer.RecordMetadata
vertxRecordMetadata = new io.vertx.kafka.client.producer.RecordMetadata(2, 3, 4, "test-topic");
RecordMetadata snowdropRecordMetadata = new SnowdropRecordMetadata(vertxRecordMetadata);
assertThat(snowdropRecordMetadata.topic()).isEqualTo("test-topic");
assertThat(snowdropRecordMetadata.partition()).isEqualTo(3);
assertThat(snowdropRecordMetadata.offset()).isEqualTo(2);
assertThat(snowdropRecordMetadata.timestamp()).isEqualTo(4);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/SnowdropPartitionInfoTest.java | vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/SnowdropPartitionInfoTest.java | package dev.snowdrop.vertx.kafka;
import java.util.Arrays;
import java.util.List;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class SnowdropPartitionInfoTest {
@Test
public void shouldCreate() {
List<io.vertx.kafka.client.common.Node> vertxNodes = Arrays.asList(
new io.vertx.kafka.client.common.Node(true, "test-host", 1, "1", true, 8080, "test-rack"),
new io.vertx.kafka.client.common.Node(true, "test-host", 2, "2", true, 8080, "test-rack")
);
io.vertx.kafka.client.common.PartitionInfo vertxPartitionInfo =
new io.vertx.kafka.client.common.PartitionInfo(vertxNodes, vertxNodes.get(0), 1, vertxNodes, "test-topic");
List<Node> snowdropNodes =
Arrays.asList(new SnowdropNode(vertxNodes.get(0)), new SnowdropNode(vertxNodes.get(1)));
PartitionInfo snowdropPartitionInfo = new SnowdropPartitionInfo(vertxPartitionInfo);
assertThat(snowdropPartitionInfo.getTopic()).isEqualTo("test-topic");
assertThat(snowdropPartitionInfo.getPartition()).isEqualTo(1);
assertThat(snowdropPartitionInfo.getReplicas()).containsAll(snowdropNodes);
assertThat(snowdropPartitionInfo.getInSyncReplicas()).containsAll(snowdropNodes);
assertThat(snowdropPartitionInfo.getLeader()).isEqualTo(snowdropNodes.get(0));
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/SnowdropHeaderTest.java | vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/SnowdropHeaderTest.java | package dev.snowdrop.vertx.kafka;
import java.nio.charset.StandardCharsets;
import io.vertx.mutiny.kafka.client.producer.KafkaHeader;
import org.junit.jupiter.api.Test;
import org.springframework.core.io.buffer.DataBuffer;
import org.springframework.core.io.buffer.DefaultDataBufferFactory;
import static org.assertj.core.api.Assertions.assertThat;
public class SnowdropHeaderTest {
@Test
public void shouldCreateHeaderFromDataBuffer() {
DataBuffer value = new DefaultDataBufferFactory().wrap("value".getBytes(StandardCharsets.UTF_8));
SnowdropHeader header = new SnowdropHeader("key", value);
assertThat(header.key()).isEqualTo("key");
assertThat(header.value()).isEqualTo(value);
}
@Test
public void shouldCreateHeaderFromString() {
DataBuffer value = new DefaultDataBufferFactory().wrap("value".getBytes(StandardCharsets.UTF_8));
SnowdropHeader header = new SnowdropHeader("key", "value");
assertThat(header.key()).isEqualTo("key");
assertThat(header.value()).isEqualTo(value);
}
@Test
public void shouldCreateHeaderFromMutinyHeader() {
DataBuffer value = new DefaultDataBufferFactory().wrap("value".getBytes(StandardCharsets.UTF_8));
SnowdropHeader header = new SnowdropHeader(KafkaHeader.header("key", "value"));
assertThat(header.key()).isEqualTo("key");
assertThat(header.value()).isEqualTo(value);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/SnowdropConsumerRecordTest.java | vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/SnowdropConsumerRecordTest.java | package dev.snowdrop.vertx.kafka;
import java.util.Arrays;
import io.vertx.mutiny.kafka.client.producer.KafkaHeader;
import org.apache.kafka.common.record.TimestampType;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.BDDMockito.given;
@ExtendWith(MockitoExtension.class)
public class SnowdropConsumerRecordTest {
@Mock
private org.apache.kafka.clients.consumer.ConsumerRecord mockKafkaConsumerRecord;
@Mock
private io.vertx.mutiny.kafka.client.consumer.KafkaConsumerRecord<Integer, String> mockMutinyConsumerRecord;
private ConsumerRecord<Integer, String> record;
@BeforeEach
public void setUp() {
given(mockMutinyConsumerRecord.topic()).willReturn("test-topic");
given(mockMutinyConsumerRecord.partition()).willReturn(1);
given(mockMutinyConsumerRecord.offset()).willReturn(2L);
given(mockMutinyConsumerRecord.timestamp()).willReturn(3L);
given(mockMutinyConsumerRecord.timestampType()).willReturn(TimestampType.CREATE_TIME);
given(mockMutinyConsumerRecord.key()).willReturn(4);
given(mockMutinyConsumerRecord.value()).willReturn("test-value");
given(mockMutinyConsumerRecord.headers()).willReturn(Arrays.asList(
KafkaHeader.header("h1", "v1"),
KafkaHeader.header("h2", "v2")
));
record = new SnowdropConsumerRecord<>(mockMutinyConsumerRecord);
}
@Test
public void shouldGetTopic() {
assertThat(record.topic()).isEqualTo("test-topic");
}
@Test
public void shouldGetPartition() {
assertThat(record.partition()).isEqualTo(1);
}
@Test
public void shouldGetOffset() {
assertThat(record.offset()).isEqualTo(2L);
}
@Test
public void shouldGetTimestamp() {
assertThat(record.timestamp()).isEqualTo(3L);
}
@Test
public void shouldGetTimestampType() {
assertThat(record.timestampType())
.isEqualTo(new SnowdropTimestampType(org.apache.kafka.common.record.TimestampType.CREATE_TIME));
}
@Test
public void shouldGetKey() {
assertThat(record.key()).isEqualTo(4);
}
@Test
public void shouldGetValue() {
assertThat(record.value()).isEqualTo("test-value");
}
@Test
public void shouldGetHeaders() {
assertThat(record.headers()).containsOnly(
Header.create("h1", "v1"),
Header.create("h2", "v2")
);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/ProducerRecordTest.java | vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/ProducerRecordTest.java | package dev.snowdrop.vertx.kafka;
import java.util.Collections;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class ProducerRecordTest {
@Test
public void shouldCreateRecordWithDefaultValues() {
ProducerRecord<Integer, String> record = ProducerRecord
.<Integer, String>builder("test-topic", "test-value")
.build();
assertThat(record.topic()).isEqualTo("test-topic");
assertThat(record.value()).isEqualTo("test-value");
assertThat(record.key()).isNull();
assertThat(record.timestamp()).isNull();
assertThat(record.partition()).isNull();
assertThat(record.headers()).isEmpty();
}
@Test
public void shouldCreateRecordWithProvidedValues() {
ProducerRecord<Integer, String> record = ProducerRecord
.builder("test-topic", "test-value", 1)
.withTimestamp(2)
.withPartition(3)
.withHeader(Header.create("key1", "value1"))
.withHeaders(Collections.singletonList(Header.create("key2", "value2")))
.build();
assertThat(record.topic()).isEqualTo("test-topic");
assertThat(record.value()).isEqualTo("test-value");
assertThat(record.key()).isEqualTo(1);
assertThat(record.timestamp()).isEqualTo(2);
assertThat(record.partition()).isEqualTo(3);
assertThat(record.headers())
.containsOnly(Header.create("key1", "value1"), Header.create("key2", "value2"));
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/it/VertxSerializationIT.java | vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/it/VertxSerializationIT.java | package dev.snowdrop.vertx.kafka.it;
import java.time.Duration;
import java.util.concurrent.atomic.AtomicReference;
import dev.snowdrop.vertx.kafka.KafkaConsumer;
import dev.snowdrop.vertx.kafka.KafkaConsumerFactory;
import dev.snowdrop.vertx.kafka.KafkaProducer;
import dev.snowdrop.vertx.kafka.KafkaProducerFactory;
import dev.snowdrop.vertx.kafka.KafkaProperties;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
import io.vertx.kafka.client.serialization.BufferDeserializer;
import io.vertx.kafka.client.serialization.BufferSerializer;
import io.vertx.kafka.client.serialization.JsonArrayDeserializer;
import io.vertx.kafka.client.serialization.JsonArraySerializer;
import io.vertx.kafka.client.serialization.JsonObjectDeserializer;
import io.vertx.kafka.client.serialization.JsonObjectSerializer;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.context.EmbeddedKafka;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
@SpringBootTest(properties = {
"vertx.kafka.producer.key.serializer=org.apache.kafka.common.serialization.StringSerializer",
"vertx.kafka.consumer.key.deserializer=org.apache.kafka.common.serialization.StringDeserializer",
"vertx.kafka.consumer.group.id=test"
})
@EmbeddedKafka(partitions = 1)
public class VertxSerializationIT extends AbstractIT {
private static final String VALUE_SERIALIZER = "value.serializer";
private static final String VALUE_DESERIALIZER = "value.deserializer";
private static final String JSON_OBJECT_SERIALIZER = JsonObjectSerializer.class.getName();
private static final String JSON_OBJECT_DESERIALIZER = JsonObjectDeserializer.class.getName();
private static final String JSON_ARRAY_SERIALIZER = JsonArraySerializer.class.getName();
private static final String JSON_ARRAY_DESERIALIZER = JsonArrayDeserializer.class.getName();
private static final String BUFFER_SERIALIZER = BufferSerializer.class.getName();
private static final String BUFFER_DESERIALIZER = BufferDeserializer.class.getName();
@Autowired
private EmbeddedKafkaBroker broker;
@Autowired
private KafkaProperties properties;
@Autowired
private KafkaProducerFactory producerFactory;
@Autowired
private KafkaConsumerFactory consumerFactory;
@BeforeEach
public void setUp() {
super.setUp(producerFactory, consumerFactory, properties, broker);
}
@AfterEach
public void tearDown() {
super.tearDown();
}
@Test
public void shouldSendAndReceiveJsonObject() throws InterruptedException {
String topic = "json-object-topic";
KafkaConsumer<String, JsonObject> consumer =
createConsumer(singletonMap(VALUE_DESERIALIZER, JSON_OBJECT_DESERIALIZER));
AtomicReference<JsonObject> result = new AtomicReference<>();
subscribe(consumer, topic, r -> result.set(r.value()));
waitForAssignmentPropagation();
KafkaProducer<String, JsonObject> producer =
createProducer(singletonMap(VALUE_SERIALIZER, JSON_OBJECT_SERIALIZER));
sendToTopic(producer, topic, JsonObject.mapFrom(singletonMap("k1", "v1")));
await()
.atMost(Duration.ofSeconds(5))
.untilAtomic(result, is(notNullValue()));
assertThat(result.get())
.isEqualTo(JsonObject.mapFrom(singletonMap("k1", "v1")));
}
@Test
public void shouldSendAndReceiveJsonArray() throws InterruptedException {
String topic = "json-array-topic";
KafkaConsumer<String, JsonArray> consumer =
createConsumer(singletonMap(VALUE_DESERIALIZER, JSON_ARRAY_DESERIALIZER));
AtomicReference<JsonArray> result = new AtomicReference<>();
subscribe(consumer, topic, r -> result.set(r.value()));
waitForAssignmentPropagation();
KafkaProducer<String, JsonArray> producer =
createProducer(singletonMap(VALUE_SERIALIZER, JSON_ARRAY_SERIALIZER));
sendToTopic(producer, topic, new JsonArray(singletonList("v1")));
await()
.atMost(Duration.ofSeconds(5))
.untilAtomic(result, is(notNullValue()));
assertThat(result.get())
.isEqualTo(new JsonArray(singletonList("v1")));
}
@Test
public void shouldSendAndReceiveBuffer() throws InterruptedException {
String topic = "json-buffer-topic";
KafkaConsumer<String, Buffer> consumer = createConsumer(singletonMap(VALUE_DESERIALIZER, BUFFER_DESERIALIZER));
AtomicReference<Buffer> result = new AtomicReference<>();
subscribe(consumer, topic, r -> result.set(r.value()));
waitForAssignmentPropagation();
KafkaProducer<String, Buffer> producer = createProducer(singletonMap(VALUE_SERIALIZER, BUFFER_SERIALIZER));
sendToTopic(producer, topic, Buffer.buffer("v1"));
await()
.atMost(Duration.ofSeconds(5))
.untilAtomic(result, is(notNullValue()));
assertThat(result.get())
.isEqualTo(Buffer.buffer("v1"));
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/it/TestApplication.java | vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/it/TestApplication.java | package dev.snowdrop.vertx.kafka.it;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class TestApplication {
public static void main(String[] args) {
SpringApplication.run(TestApplication.class, args);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/it/ProducerIT.java | vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/it/ProducerIT.java | package dev.snowdrop.vertx.kafka.it;
import java.time.Duration;
import java.util.concurrent.atomic.AtomicBoolean;
import dev.snowdrop.vertx.kafka.KafkaProducer;
import dev.snowdrop.vertx.kafka.KafkaProducerFactory;
import dev.snowdrop.vertx.kafka.KafkaProperties;
import dev.snowdrop.vertx.kafka.ProducerRecord;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.context.EmbeddedKafka;
import reactor.test.StepVerifier;
import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;
@SpringBootTest(properties = {
"vertx.kafka.producer.key.serializer=org.apache.kafka.common.serialization.StringSerializer",
"vertx.kafka.producer.value.serializer=org.apache.kafka.common.serialization.StringSerializer"
})
@EmbeddedKafka(topics = "test", partitions = 1)
public class ProducerIT extends AbstractIT {
@Autowired
private EmbeddedKafkaBroker broker;
@Autowired
private KafkaProperties properties;
@Autowired
private KafkaProducerFactory producerFactory;
@BeforeEach
public void setUp() {
super.setUp(producerFactory, null, properties, broker);
}
@AfterEach
public void tearDown() {
super.tearDown();
}
@Test
public void shouldSend() {
ProducerRecord<String, String> record = ProducerRecord
.<String, String>builder("test", "test-value")
.build();
KafkaProducer<String, String> producer = createProducer();
StepVerifier.create(producer.send(record))
.assertNext(metadata -> {
assertThat(metadata.topic()).isEqualTo("test");
assertThat(metadata.partition()).isEqualTo(0);
})
.verifyComplete();
}
@Test
public void shouldWrite() {
ProducerRecord<String, String> record = ProducerRecord
.<String, String>builder("test", "test-value")
.build();
KafkaProducer<String, String> producer = createProducer();
StepVerifier.create(producer.write(record))
.verifyComplete();
}
@Test
public void shouldGetPartitionInfo() {
KafkaProducer<String, String> producer = createProducer();
StepVerifier.create(producer.partitionsFor("test"))
.assertNext(partitionInfo -> {
assertThat(partitionInfo.getTopic()).isEqualTo("test");
assertThat(partitionInfo.getPartition()).isEqualTo(0);
assertThat(partitionInfo.getLeader()).isNotNull();
assertThat(partitionInfo.getReplicas()).isNotEmpty();
assertThat(partitionInfo.getInSyncReplicas()).isNotEmpty();
})
.verifyComplete();
}
@Test
public void shouldCloseAndHandleException() {
ProducerRecord<String, String> record = ProducerRecord
.<String, String>builder("test", "test-value")
.build();
AtomicBoolean wasExceptionHandled = new AtomicBoolean(false);
KafkaProducer<String, String> producer = createProducer();
producer.exceptionHandler(t -> wasExceptionHandled.set(true));
StepVerifier.create(producer.close())
.verifyComplete();
StepVerifier.create(producer.write(record))
.verifyErrorMessage("Cannot perform operation after producer has been closed");
await()
.atMost(Duration.ofSeconds(2))
.untilTrue(wasExceptionHandled);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/it/SinglePartitionE2EIT.java | vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/it/SinglePartitionE2EIT.java | package dev.snowdrop.vertx.kafka.it;
import java.time.Duration;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import dev.snowdrop.vertx.kafka.ConsumerRecord;
import dev.snowdrop.vertx.kafka.KafkaConsumer;
import dev.snowdrop.vertx.kafka.KafkaConsumerFactory;
import dev.snowdrop.vertx.kafka.KafkaProducer;
import dev.snowdrop.vertx.kafka.KafkaProducerFactory;
import dev.snowdrop.vertx.kafka.KafkaProperties;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.context.EmbeddedKafka;
import static java.util.Collections.singletonMap;
import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;
@SpringBootTest(properties = {
"vertx.kafka.producer.key.serializer=org.apache.kafka.common.serialization.StringSerializer",
"vertx.kafka.producer.value.serializer=org.apache.kafka.common.serialization.StringSerializer",
"vertx.kafka.consumer.key.deserializer=org.apache.kafka.common.serialization.StringDeserializer",
"vertx.kafka.consumer.value.deserializer=org.apache.kafka.common.serialization.StringDeserializer"
})
@EmbeddedKafka(partitions = 1)
public class SinglePartitionE2EIT extends AbstractIT {
private static final String GROUP_ID = "group.id";
@Autowired
private EmbeddedKafkaBroker broker;
@Autowired
private KafkaProperties properties;
@Autowired
private KafkaProducerFactory producerFactory;
@Autowired
private KafkaConsumerFactory consumerFactory;
@BeforeEach
public void setUp() {
super.setUp(producerFactory, consumerFactory, properties, broker);
}
@AfterEach
public void tearDown() {
super.tearDown();
}
@Test
public void shouldSendAndReceiveWithSingleConsumer() throws InterruptedException {
String topic = "single-consumer";
KafkaConsumer<String, String> consumer = createConsumer(singletonMap(GROUP_ID, "single-consumer-main"));
List<ConsumerRecord<String, String>> records = new CopyOnWriteArrayList<>();
subscribe(consumer, topic, records::add);
waitForAssignmentPropagation();
KafkaProducer<String, String> producer = createProducer();
sendToTopic(producer, topic, "k1", "v1");
sendToTopic(producer, topic, "k2", "v2");
await()
.atMost(Duration.ofSeconds(5))
.untilAsserted(() -> assertThat(records).hasSize(2));
assertConsumerRecord(records.get(0), topic, "k1", "v1", 0);
assertConsumerRecord(records.get(1), topic, "k2", "v2", 1);
}
@Test
public void shouldSendAndReceiveWithTwoConsumerGroups() throws InterruptedException {
String topic = "two-groups";
KafkaConsumer<String, String> firstConsumer = createConsumer(singletonMap(GROUP_ID, "two-groups-main"));
KafkaConsumer<String, String> secondConsumer = createConsumer(singletonMap(GROUP_ID, "two-groups-alternative"));
List<ConsumerRecord<String, String>> firstConsumerRecords = new CopyOnWriteArrayList<>();
List<ConsumerRecord<String, String>> secondConsumerRecords = new CopyOnWriteArrayList<>();
subscribe(firstConsumer, topic, firstConsumerRecords::add);
subscribe(secondConsumer, topic, secondConsumerRecords::add);
waitForAssignmentPropagation();
KafkaProducer<String, String> producer = createProducer();
sendToTopic(producer, topic, "k1", "v1");
sendToTopic(producer, topic, "k2", "v2");
await()
.atMost(Duration.ofSeconds(5))
.untilAsserted(() -> {
assertThat(firstConsumerRecords).hasSize(2);
assertThat(secondConsumerRecords).hasSize(2);
});
assertThat(firstConsumerRecords).containsAll(secondConsumerRecords);
assertConsumerRecord(firstConsumerRecords.get(0), topic, "k1", "v1", 0);
assertConsumerRecord(firstConsumerRecords.get(1), topic, "k2", "v2", 1);
}
private void assertConsumerRecord(ConsumerRecord<String, String> record, String topic, String key,
String value, int offset) {
assertThat(record.topic()).isEqualTo(topic);
assertThat(record.partition()).isEqualTo(0);
assertThat(record.key()).isEqualTo(key);
assertThat(record.value()).isEqualTo(value);
assertThat(record.offset()).isEqualTo(offset);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/it/AbstractIT.java | vertx-spring-boot-starter-kafka/src/test/java/dev/snowdrop/vertx/kafka/it/AbstractIT.java | package dev.snowdrop.vertx.kafka.it;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import dev.snowdrop.vertx.kafka.ConsumerRecord;
import dev.snowdrop.vertx.kafka.KafkaConsumer;
import dev.snowdrop.vertx.kafka.KafkaConsumerFactory;
import dev.snowdrop.vertx.kafka.KafkaProducer;
import dev.snowdrop.vertx.kafka.KafkaProducerFactory;
import dev.snowdrop.vertx.kafka.KafkaProperties;
import dev.snowdrop.vertx.kafka.ProducerRecord;
import io.vertx.core.Vertx;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import reactor.core.publisher.Mono;
import static java.util.Collections.emptyMap;
public abstract class AbstractIT {
private KafkaProducerFactory producerFactory;
private KafkaConsumerFactory consumerFactory;
private final List<KafkaProducer<?, ?>> producersToCleanup = new LinkedList<>();
private final List<KafkaConsumer<?, ?>> consumersToCleanup = new LinkedList<>();
protected void setUp(KafkaProducerFactory producerFactory, KafkaConsumerFactory consumerFactory,
KafkaProperties properties, EmbeddedKafkaBroker broker) {
this.producerFactory = producerFactory;
this.consumerFactory = consumerFactory;
properties.setConsumer(addBootstrapServersToConfig(properties.getConsumer(), broker.getBrokersAsString()));
properties.setProducer(addBootstrapServersToConfig(properties.getProducer(), broker.getBrokersAsString()));
}
protected void tearDown() {
Vertx.vertx().executeBlocking(future -> {
producersToCleanup.stream()
.map(KafkaProducer::close)
.forEach(Mono::block);
consumersToCleanup.stream()
.map(KafkaConsumer::close)
.forEach(Mono::block);
future.complete();
});
}
protected <K, V> KafkaProducer<K, V> createProducer() {
return createProducer(emptyMap());
}
protected <K, V> KafkaProducer<K, V> createProducer(Map<String, String> config) {
KafkaProducer<K, V> producer = producerFactory.create(config);
// Preserve the producer for cleanup after a test
producersToCleanup.add(producer);
return producer;
}
protected <K, V> KafkaConsumer<K, V> createConsumer(Map<String, String> config) {
KafkaConsumer<K, V> consumer = consumerFactory.create(config);
// Preserve the consumer for cleanup after a test
consumersToCleanup.add(consumer);
return consumer;
}
protected <K, V> void subscribe(KafkaConsumer<K, V> consumer, String topic, Consumer<ConsumerRecord<K, V>> handler) {
consumer.flux()
.log(consumer + " receiving")
.subscribe(handler);
consumer
.subscribe(topic)
.block();
}
protected void waitForAssignmentPropagation() throws InterruptedException {
// Give Kafka some time to execute partition assignment
Thread.sleep(2000);
}
protected <K, V> void sendToTopic(KafkaProducer<K, V> producer, String topic, V value) {
producer
.send(ProducerRecord.<K, V>builder(topic, value).build())
.block();
}
protected <K, V> void sendToTopic(KafkaProducer<K, V> producer, String topic, K key, V value) {
producer
.send(ProducerRecord.builder(topic, value, key).build())
.block();
}
private Map<String, String> addBootstrapServersToConfig(Map<String, String> config, String bootstrapServers) {
// Workaround for Spring Kafka 2.2.11. In 2.3.x property can be injected automatically
config.put("bootstrap.servers", bootstrapServers);
return config;
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/SnowdropKafkaConsumerFactory.java | vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/SnowdropKafkaConsumerFactory.java | package dev.snowdrop.vertx.kafka;
import java.util.Map;
import io.vertx.mutiny.core.Vertx;
final class SnowdropKafkaConsumerFactory implements KafkaConsumerFactory {
private final Vertx vertx;
private final KafkaProperties properties;
SnowdropKafkaConsumerFactory(Vertx vertx, KafkaProperties properties) {
this.vertx = vertx;
this.properties = properties;
}
@Override
public <K, V> KafkaConsumer<K, V> create(Map<String, String> config) {
Map<String, String> consumerConfig = properties.getConsumer();
consumerConfig.putAll(config);
return new SnowdropKafkaConsumer<>(
io.vertx.mutiny.kafka.client.consumer.KafkaConsumer.create(vertx, consumerConfig));
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/KafkaConsumer.java | vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/KafkaConsumer.java | package dev.snowdrop.vertx.kafka;
import java.util.Collection;
import java.util.Set;
import java.util.function.Consumer;
import java.util.function.Function;
import dev.snowdrop.vertx.streams.ReadStream;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
public interface KafkaConsumer<K, V> extends ReadStream<ConsumerRecord<K, V>> {
Mono<Void> subscribe(String topic);
Mono<Void> subscribe(Collection<String> topics);
Mono<Void> assign(Partition partition);
Mono<Void> assign(Collection<Partition> partitions);
Mono<Void> unsubscribe();
Flux<String> subscriptions();
Flux<Partition> assignments();
Flux<PartitionInfo> partitionsFor(String topic);
void partitionsRevokedHandler(Consumer<Set<Partition>> handler);
void partitionsAssignedHandler(Consumer<Set<Partition>> handler);
Mono<Void> seek(Partition partition, long offset);
Mono<Void> seekToBeginning(Partition partition);
Mono<Void> seekToBeginning(Collection<Partition> partitions);
Mono<Void> seekToEnd(Partition partition);
Mono<Void> seekToEnd(Collection<Partition> partitions);
Mono<Long> position(Partition partition);
Mono<Long> committed(Partition partition);
Mono<Long> beginningOffset(Partition partition);
Mono<Long> endOffset(Partition partition);
Mono<Long> timeOffset(Partition partition, long timestamp);
Mono<Void> commit();
Mono<Void> close();
<T> Mono<T> doOnVertxConsumer(Function<io.vertx.kafka.client.consumer.KafkaConsumer<K, V>, T> function);
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/Node.java | vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/Node.java | package dev.snowdrop.vertx.kafka;
public interface Node {
int getId();
String getIdString();
String getHost();
int getPort();
boolean hasRack();
String getRack();
boolean isEmpty();
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/KafkaAutoConfiguration.java | vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/KafkaAutoConfiguration.java | package dev.snowdrop.vertx.kafka;
import io.vertx.core.Vertx;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
@EnableConfigurationProperties(KafkaProperties.class)
@ConditionalOnBean(Vertx.class)
@ConditionalOnProperty(prefix = KafkaProperties.PROPERTIES_PREFIX, value = "enabled", matchIfMissing = true)
public class KafkaAutoConfiguration {
@Bean
public KafkaProducerFactory kafkaProducerFactory(KafkaProperties properties, Vertx vertx) {
return new SnowdropKafkaProducerFactory(io.vertx.mutiny.core.Vertx.newInstance(vertx), properties);
}
@Bean
public KafkaConsumerFactory kafkaConsumerFactory(KafkaProperties properties, Vertx vertx) {
return new SnowdropKafkaConsumerFactory(io.vertx.mutiny.core.Vertx.newInstance(vertx), properties);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/SnowdropPartition.java | vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/SnowdropPartition.java | package dev.snowdrop.vertx.kafka;
import java.util.Objects;
import io.vertx.kafka.client.common.TopicPartition;
import org.springframework.util.StringUtils;
final class SnowdropPartition implements Partition {
private final String topic;
private final int partition;
SnowdropPartition(String topic, int partition) {
if (StringUtils.isEmpty(topic)) {
throw new IllegalArgumentException("Topic cannot be empty");
}
if (partition < 0) {
throw new IllegalArgumentException("Partition cannot be negative");
}
this.topic = topic;
this.partition = partition;
}
SnowdropPartition(TopicPartition topicPartition) {
this(topicPartition.getTopic(), topicPartition.getPartition());
}
@Override
public String topic() {
return topic;
}
@Override
public int partition() {
return partition;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SnowdropPartition that = (SnowdropPartition) o;
return partition == that.partition &&
Objects.equals(topic, that.topic);
}
@Override
public int hashCode() {
return Objects.hash(topic, partition);
}
@Override
public String toString() {
return String.format("%s{topic='%s', partition=%d}", SnowdropPartition.class.getSimpleName(), topic,
partition);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/KafkaProperties.java | vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/KafkaProperties.java | package dev.snowdrop.vertx.kafka;
import java.util.HashMap;
import java.util.Map;
import org.springframework.boot.context.properties.ConfigurationProperties;
@ConfigurationProperties(prefix = KafkaProperties.PROPERTIES_PREFIX)
public final class KafkaProperties {
static final String PROPERTIES_PREFIX = "vertx.kafka";
/**
* Enable Kafka starter.
*/
private boolean enabled = true;
/**
* Consumer specific properties.
*
* @see org.apache.kafka.clients.consumer.ConsumerConfig
*/
private Map<String, String> consumer = new HashMap<>();
/**
* Producer specific properties.
*
* @see org.apache.kafka.clients.producer.ProducerConfig
*/
private Map<String, String> producer = new HashMap<>();
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public Map<String, String> getConsumer() {
return new HashMap<>(consumer);
}
public void setConsumer(Map<String, String> consumer) {
this.consumer = new HashMap<>(consumer);
}
public Map<String, String> getProducer() {
return new HashMap<>(producer);
}
public void setProducer(Map<String, String> producer) {
this.producer = new HashMap<>(producer);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/SnowdropProducerRecord.java | vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/SnowdropProducerRecord.java | package dev.snowdrop.vertx.kafka;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
final class SnowdropProducerRecord<K, V> implements ProducerRecord<K, V> {
private final K key;
private final V value;
private final String topic;
private final Integer partition;
private final Long timestamp;
private final List<Header> headers;
SnowdropProducerRecord(K key, V value, String topic, Integer partition, Long timestamp, List<Header> headers) {
this.key = key;
this.value = value;
this.topic = topic;
this.partition = partition;
this.timestamp = timestamp;
this.headers = Collections.unmodifiableList(headers);
}
@Override
public K key() {
return key;
}
@Override
public V value() {
return value;
}
@Override
public String topic() {
return topic;
}
@Override
public Integer partition() {
return partition;
}
@Override
public Long timestamp() {
return timestamp;
}
@Override
public List<Header> headers() {
return headers;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SnowdropProducerRecord<?, ?> that = (SnowdropProducerRecord<?, ?>) o;
return Objects.equals(key, that.key) &&
Objects.equals(value, that.value) &&
Objects.equals(topic, that.topic) &&
Objects.equals(partition, that.partition) &&
Objects.equals(timestamp, that.timestamp) &&
Objects.equals(headers, that.headers);
}
@Override
public int hashCode() {
return Objects.hash(key, value, topic, partition, timestamp, headers);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/Header.java | vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/Header.java | package dev.snowdrop.vertx.kafka;
import org.springframework.core.io.buffer.DataBuffer;
public interface Header {
String key();
DataBuffer value();
static Header create(String key, String value) {
return new SnowdropHeader(key, value);
}
static Header create(String key, DataBuffer value) {
return new SnowdropHeader(key, value);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/SnowdropKafkaProducer.java | vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/SnowdropKafkaProducer.java | package dev.snowdrop.vertx.kafka;
import java.util.List;
import java.util.Objects;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import io.smallrye.mutiny.converters.uni.UniReactorConverters;
import io.vertx.core.streams.WriteStream;
import io.vertx.mutiny.core.buffer.Buffer;
import io.vertx.mutiny.kafka.client.producer.KafkaHeader;
import io.vertx.mutiny.kafka.client.producer.KafkaProducerRecord;
import org.springframework.util.StringUtils;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
final class SnowdropKafkaProducer<K, V> implements KafkaProducer<K, V> {
private final io.vertx.mutiny.kafka.client.producer.KafkaProducer<K, V> delegate;
SnowdropKafkaProducer(io.vertx.mutiny.kafka.client.producer.KafkaProducer<K, V> delegate) {
this.delegate = delegate;
}
@Override
public Mono<RecordMetadata> send(ProducerRecord<K, V> record) {
Objects.requireNonNull(record, "Record cannot be null");
return delegate.send(toMutinyProducerRecord(record))
.convert()
.with(UniReactorConverters.toMono())
.map(SnowdropRecordMetadata::new);
}
@Override
public Flux<PartitionInfo> partitionsFor(String topic) {
if (StringUtils.isEmpty(topic)) {
throw new IllegalArgumentException("Topic cannot be empty");
}
return delegate.partitionsFor(topic)
.convert()
.with(UniReactorConverters.toMono())
.flatMapMany(Flux::fromIterable)
.map(SnowdropPartitionInfo::new);
}
@Override
public Mono<Void> flush() {
return Mono.from(delegate.flush().toMulti());
}
@Override
public Mono<Void> close() {
return delegate.close().convert().with(UniReactorConverters.toMono());
}
@Override
public Mono<Void> close(long timeout) {
return delegate.close(timeout).convert().with(UniReactorConverters.toMono());
}
@Override
@SuppressWarnings("unchecked") // Mutiny API returns KafkaProducer without generics
public <T> Mono<T> doOnVertxProducer(Function<io.vertx.kafka.client.producer.KafkaProducer<K, V>, T> function) {
Objects.requireNonNull(function, "Function cannot be null");
return Mono.create(sink -> {
try {
T result = function.apply((io.vertx.kafka.client.producer.KafkaProducer<K, V>) delegate.getDelegate());
sink.success(result);
} catch (Throwable t) {
sink.error(t);
}
});
}
@Override
public KafkaProducer<K, V> exceptionHandler(Consumer<Throwable> handler) {
delegate.exceptionHandler(handler);
return this;
}
@Override
public KafkaProducer<K, V> drainHandler(Consumer<Void> handler) {
delegate.drainHandler(() -> handler.accept(null));
return this;
}
@Override
public KafkaProducer<K, V> setWriteQueueMaxSize(int maxSize) {
delegate.setWriteQueueMaxSize(maxSize);
return this;
}
@Override
public boolean writeQueueFull() {
return delegate.writeQueueFull();
}
@Override
public Mono<Void> write(ProducerRecord<K, V> record) {
Objects.requireNonNull(record, "Record cannot be null");
return delegate.write(toMutinyProducerRecord(record)).convert().with(UniReactorConverters.toMono());
}
@Override
public Mono<Void> end() {
return delegate.end().convert().with(UniReactorConverters.toMono());
}
@Override
public WriteStream vertxWriteStream() {
return delegate.getDelegate();
}
private KafkaProducerRecord<K, V> toMutinyProducerRecord(ProducerRecord<K, V> record) {
List<KafkaHeader> mutinyHeaders = record
.headers()
.stream()
.map(this::toMutinyHeader)
.collect(Collectors.toList());
return KafkaProducerRecord
.create(record.topic(), record.key(), record.value(), record.timestamp(), record.partition())
.addHeaders(mutinyHeaders);
}
private KafkaHeader toMutinyHeader(Header header) {
return KafkaHeader.header(header.key(), Buffer.buffer(header.value().asByteBuffer().array()));
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/SnowdropRecordMetadata.java | vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/SnowdropRecordMetadata.java | package dev.snowdrop.vertx.kafka;
import java.util.Objects;
final class SnowdropRecordMetadata implements RecordMetadata {
private final String topic;
private final long partition;
private final long offset;
private final long timestamp;
SnowdropRecordMetadata(io.vertx.kafka.client.producer.RecordMetadata vertxRecordMetadata) {
this.topic = vertxRecordMetadata.getTopic();
this.partition = vertxRecordMetadata.getPartition();
this.offset = vertxRecordMetadata.getOffset();
this.timestamp = vertxRecordMetadata.getTimestamp();
}
public String topic() {
return topic;
}
public long partition() {
return partition;
}
public long offset() {
return offset;
}
public long timestamp() {
return timestamp;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SnowdropRecordMetadata that = (SnowdropRecordMetadata) o;
return partition == that.partition &&
offset == that.offset &&
timestamp == that.timestamp &&
Objects.equals(topic, that.topic);
}
@Override
public int hashCode() {
return Objects.hash(topic, partition, offset, timestamp);
}
@Override
public String toString() {
return String.format("%s{topic='%s', partition=%d, offset=%d, timestamp=%d}",
getClass().getSimpleName(), topic, partition, offset, timestamp);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/Partition.java | vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/Partition.java | package dev.snowdrop.vertx.kafka;
public interface Partition {
String topic();
int partition();
static Partition create(String topic, int partition) {
return new SnowdropPartition(topic, partition);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
snowdrop/vertx-spring-boot | https://github.com/snowdrop/vertx-spring-boot/blob/f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff/vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/SnowdropConsumerRecord.java | vertx-spring-boot-starter-kafka/src/main/java/dev/snowdrop/vertx/kafka/SnowdropConsumerRecord.java | package dev.snowdrop.vertx.kafka;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import io.vertx.mutiny.kafka.client.consumer.KafkaConsumerRecord;
final class SnowdropConsumerRecord<K, V> implements ConsumerRecord<K, V> {
private final String topic;
private final int partition;
private final long offset;
private final long timestamp;
private final TimestampType timestampType;
private final K key;
private final V value;
private final List<Header> headers;
SnowdropConsumerRecord(KafkaConsumerRecord<K, V> delegate) {
this.topic = delegate.topic();
this.partition = delegate.partition();
this.offset = delegate.offset();
this.timestamp = delegate.timestamp();
this.timestampType =
delegate.timestampType() == null ? null : new SnowdropTimestampType(delegate.timestampType());
this.key = delegate.key();
this.value = delegate.value();
this.headers =
delegate.headers() == null ? new LinkedList<>() : delegate
.headers()
.stream()
.map(SnowdropHeader::new)
.collect(Collectors.toList());
}
@Override
public String topic() {
return topic;
}
@Override
public int partition() {
return partition;
}
@Override
public long offset() {
return offset;
}
@Override
public long timestamp() {
return timestamp;
}
@Override
public TimestampType timestampType() {
return timestampType;
}
@Override
public K key() {
return key;
}
@Override
public V value() {
return value;
}
@Override
public List<Header> headers() {
return new LinkedList<>(headers);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SnowdropConsumerRecord<?, ?> that = (SnowdropConsumerRecord<?, ?>) o;
return partition == that.partition &&
offset == that.offset &&
timestamp == that.timestamp &&
Objects.equals(topic, that.topic) &&
Objects.equals(timestampType, that.timestampType) &&
Objects.equals(key, that.key) &&
Objects.equals(value, that.value) &&
Objects.equals(headers, that.headers);
}
@Override
public int hashCode() {
return Objects.hash(topic, partition, offset, timestamp, timestampType, key, value, headers);
}
@Override
public String toString() {
return String.format(
"%s{topic='%s', partition=%d, offset=%d, timestamp=%d, timestampType='%s', key='%s', value='%s', headers='%s'}",
getClass().getSimpleName(), topic, partition, offset, timestamp, timestampType, key, value, headers);
}
}
| java | Apache-2.0 | f6c0be4d6e09d03044f8ab7e9a07d00148fa96ff | 2026-01-05T02:40:52.865792Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.