language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/core/SingleColumnRowMapper.java | {
"start": 1820,
"end": 9830
} | class ____<T> implements RowMapper<@Nullable T> {
private @Nullable Class<?> requiredType;
private @Nullable ConversionService conversionService = DefaultConversionService.getSharedInstance();
/**
* Create a new {@code SingleColumnRowMapper} for bean-style configuration.
* @see #setRequiredType
*/
public SingleColumnRowMapper() {
}
/**
* Create a new {@code SingleColumnRowMapper}.
* @param requiredType the type that each result object is expected to match
*/
public SingleColumnRowMapper(Class<T> requiredType) {
if (requiredType != Object.class) {
setRequiredType(requiredType);
}
}
/**
* Create a new {@code SingleColumnRowMapper}.
* @param requiredType the type that each result object is expected to match
* @param conversionService a {@link ConversionService} for converting a fetched value
* @since 7.0
*/
public SingleColumnRowMapper(Class<T> requiredType, @Nullable ConversionService conversionService) {
if (requiredType != Object.class) {
setRequiredType(requiredType);
}
setConversionService(conversionService);
}
/**
* Set the type that each result object is expected to match.
* <p>If not specified, the column value will be exposed as
* returned by the JDBC driver.
*/
public void setRequiredType(Class<T> requiredType) {
this.requiredType = ClassUtils.resolvePrimitiveIfNecessary(requiredType);
}
/**
* Set a {@link ConversionService} for converting a fetched value.
* <p>Default is the {@link DefaultConversionService}.
* @since 5.0.4
* @see DefaultConversionService#getSharedInstance()
*/
public void setConversionService(@Nullable ConversionService conversionService) {
this.conversionService = conversionService;
}
/**
* Extract a value for the single column in the current row.
* <p>Validates that there is only one column selected,
* then delegates to {@code getColumnValue()} and also
* {@code convertValueToRequiredType}, if necessary.
* @see java.sql.ResultSetMetaData#getColumnCount()
* @see #getColumnValue(java.sql.ResultSet, int, Class)
* @see #convertValueToRequiredType(Object, Class)
*/
@Override
@SuppressWarnings("unchecked")
public @Nullable T mapRow(ResultSet rs, int rowNum) throws SQLException {
// Validate column count.
ResultSetMetaData rsmd = rs.getMetaData();
int nrOfColumns = rsmd.getColumnCount();
if (nrOfColumns != 1) {
throw new IncorrectResultSetColumnCountException(1, nrOfColumns);
}
// Extract column value from JDBC ResultSet.
Object result = getColumnValue(rs, 1, this.requiredType);
if (result != null && this.requiredType != null && !this.requiredType.isInstance(result)) {
// Extracted value does not match already: try to convert it.
try {
return (T) convertValueToRequiredType(result, this.requiredType);
}
catch (IllegalArgumentException ex) {
throw new TypeMismatchDataAccessException(
"Type mismatch affecting row number " + rowNum + " and column type '" +
rsmd.getColumnTypeName(1) + "': " + ex.getMessage());
}
}
return (T) result;
}
/**
* Retrieve a JDBC object value for the specified column.
* <p>The default implementation calls
* {@link JdbcUtils#getResultSetValue(java.sql.ResultSet, int, Class)}.
* If no required type has been specified, this method delegates to
* {@code getColumnValue(rs, index)}, which basically calls
* {@code ResultSet.getObject(index)} but applies some additional
* default conversion to appropriate value types.
* @param rs is the ResultSet holding the data
* @param index is the column index
* @param requiredType the type that each result object is expected to match
* (or {@code null} if none specified)
* @return the Object value
* @throws SQLException in case of extraction failure
* @see org.springframework.jdbc.support.JdbcUtils#getResultSetValue(java.sql.ResultSet, int, Class)
* @see #getColumnValue(java.sql.ResultSet, int)
*/
protected @Nullable Object getColumnValue(ResultSet rs, int index, @Nullable Class<?> requiredType) throws SQLException {
if (requiredType != null) {
return JdbcUtils.getResultSetValue(rs, index, requiredType);
}
else {
// No required type specified -> perform default extraction.
return getColumnValue(rs, index);
}
}
/**
* Retrieve a JDBC object value for the specified column, using the most
* appropriate value type. Called if no required type has been specified.
* <p>The default implementation delegates to {@code JdbcUtils.getResultSetValue()},
* which uses the {@code ResultSet.getObject(index)} method. Additionally,
* it includes a "hack" to get around Oracle returning a non-standard object for
* their TIMESTAMP datatype. See the {@code JdbcUtils#getResultSetValue()}
* javadoc for details.
* @param rs is the ResultSet holding the data
* @param index is the column index
* @return the Object value
* @throws SQLException in case of extraction failure
* @see org.springframework.jdbc.support.JdbcUtils#getResultSetValue(java.sql.ResultSet, int)
*/
protected @Nullable Object getColumnValue(ResultSet rs, int index) throws SQLException {
return JdbcUtils.getResultSetValue(rs, index);
}
/**
* Convert the given column value to the specified required type.
* Only called if the extracted column value does not match already.
* <p>If the required type is String, the value will simply get stringified
* via {@code toString()}. In case of a Number, the value will be
* converted into a Number, either through number conversion or through
* String parsing (depending on the value type). Otherwise, the value will
* be converted to a required type using the {@link ConversionService}.
* @param value the column value as extracted from {@code getColumnValue()}
* (never {@code null})
* @param requiredType the type that each result object is expected to match
* (never {@code null})
* @return the converted value
* @see #getColumnValue(java.sql.ResultSet, int, Class)
*/
@SuppressWarnings("unchecked")
protected @Nullable Object convertValueToRequiredType(Object value, Class<?> requiredType) {
if (String.class == requiredType) {
return value.toString();
}
else if (Number.class.isAssignableFrom(requiredType)) {
if (value instanceof Number number) {
// Convert original Number to target Number class.
return NumberUtils.convertNumberToTargetClass(number, (Class<Number>) requiredType);
}
else {
// Convert stringified value to target Number class.
return NumberUtils.parseNumber(value.toString(),(Class<Number>) requiredType);
}
}
else if (this.conversionService != null && this.conversionService.canConvert(value.getClass(), requiredType)) {
return this.conversionService.convert(value, requiredType);
}
else {
throw new IllegalArgumentException(
"Value [" + value + "] is of type [" + value.getClass().getName() +
"] and cannot be converted to required type [" + requiredType.getName() + "]");
}
}
/**
* Static factory method to create a new {@code SingleColumnRowMapper}.
* @param requiredType the type that each result object is expected to match
* @since 4.1
* @see #newInstance(Class, ConversionService)
*/
public static <T> SingleColumnRowMapper<T> newInstance(Class<T> requiredType) {
return new SingleColumnRowMapper<>(requiredType);
}
/**
* Static factory method to create a new {@code SingleColumnRowMapper}.
* @param requiredType the type that each result object is expected to match
* @param conversionService the {@link ConversionService} for converting a
* fetched value, or {@code null} for none
* @since 5.0.4
* @see #newInstance(Class)
* @see #setConversionService
*/
public static <T> SingleColumnRowMapper<T> newInstance(
Class<T> requiredType, @Nullable ConversionService conversionService) {
SingleColumnRowMapper<T> rowMapper = newInstance(requiredType);
rowMapper.setConversionService(conversionService);
return rowMapper;
}
}
| SingleColumnRowMapper |
java | quarkusio__quarkus | extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/transaction/AbstractTransactionLifecycleTest.java | {
"start": 9945,
"end": 10062
} | enum ____ {
STATEMENT,
FLUSH,
TRANSACTION_COMPLETION;
}
public static | LifecycleOperation |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/NarrowingCompoundAssignmentTest.java | {
"start": 883,
"end": 1209
} | class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(NarrowingCompoundAssignment.class, getClass());
@Test
public void positiveCase() {
compilationHelper
.addSourceLines(
"Test.java",
"""
| NarrowingCompoundAssignmentTest |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/model/rest/RestBindingDefinition.java | {
"start": 1435,
"end": 7549
} | class ____ extends OptionalIdentifiedDefinition<RestBindingDefinition> {
@XmlTransient
private Map<String, String> defaultValues;
@XmlTransient
private Map<String, String> allowedValues;
@XmlTransient
private Boolean requiredBody;
@XmlTransient
private Set<String> requiredHeaders;
@XmlTransient
private Set<String> requiredQueryParameters;
@XmlTransient
private Map<String, String> responseCodes;
@XmlTransient
private Set<String> responseHeaders;
@XmlAttribute
private String consumes;
@XmlAttribute
private String produces;
@XmlAttribute
@Metadata(defaultValue = "off", enums = "off,auto,json,xml,json_xml")
private String bindingMode;
@XmlAttribute
@Metadata(label = "advanced")
private String type;
@XmlTransient
private Class<?> typeClass;
@XmlAttribute
@Metadata(label = "advanced")
private String outType;
@XmlTransient
private Class<?> outTypeClass;
@XmlAttribute
@Metadata(label = "advanced", javaType = "java.lang.Boolean", defaultValue = "false")
private String skipBindingOnErrorCode;
@XmlAttribute
@Metadata(label = "advanced", javaType = "java.lang.Boolean", defaultValue = "false")
private String clientRequestValidation;
@XmlAttribute
@Metadata(label = "advanced", javaType = "java.lang.Boolean", defaultValue = "false")
private String clientResponseValidation;
@XmlAttribute
@Metadata(label = "advanced", javaType = "java.lang.Boolean", defaultValue = "false")
private String enableCORS;
@XmlAttribute
@Metadata(label = "advanced", javaType = "java.lang.Boolean", defaultValue = "false")
private String enableNoContentResponse;
@XmlAttribute
@Metadata(label = "advanced")
private String component;
public RestBindingDefinition() {
}
@Override
public String toString() {
return "RestBinding";
}
public String getConsumes() {
return consumes;
}
/**
* Adds a default value for the query parameter
*
* @param paramName query parameter name
* @param defaultValue the default value
*/
public void addDefaultValue(String paramName, String defaultValue) {
if (defaultValues == null) {
defaultValues = new HashMap<>();
}
defaultValues.put(paramName, defaultValue);
}
/**
* Adds allowed value(s) for the query parameter
*
* @param paramName query parameter name
* @param allowedValue the allowed value (separate by comma)
*/
public void addAllowedValue(String paramName, String allowedValue) {
if (allowedValues == null) {
allowedValues = new HashMap<>();
}
allowedValues.put(paramName, allowedValue);
}
/**
* Adds a required query parameter
*
* @param paramName query parameter name
*/
public void addRequiredQueryParameter(String paramName) {
if (requiredQueryParameters == null) {
requiredQueryParameters = new HashSet<>();
}
requiredQueryParameters.add(paramName);
}
public Set<String> getRequiredQueryParameters() {
return requiredQueryParameters;
}
/**
* Adds a response code
*/
public void addResponseCode(String code, String contentType) {
if (responseCodes == null) {
responseCodes = new HashMap<>();
}
responseCodes.put(code, contentType);
}
public Map<String, String> getResponseCodes() {
return responseCodes;
}
/**
* Adds a response code
*/
public void addResponseHeader(String headerName) {
// content-type header should be skipped
if ("content-type".equalsIgnoreCase(headerName)) {
return;
}
if (responseHeaders == null) {
responseHeaders = new HashSet<>();
}
responseHeaders.add(headerName);
}
public Set<String> getResponseHeaders() {
return responseHeaders;
}
/**
* Adds a required HTTP header
*
* @param headerName HTTP header name
*/
public void addRequiredHeader(String headerName) {
if (requiredHeaders == null) {
requiredHeaders = new HashSet<>();
}
requiredHeaders.add(headerName);
}
public Set<String> getRequiredHeaders() {
return requiredHeaders;
}
public Boolean getRequiredBody() {
return requiredBody;
}
public void setRequiredBody(Boolean requiredBody) {
this.requiredBody = requiredBody;
}
/**
* Gets the registered default values for query parameters
*/
public Map<String, String> getDefaultValues() {
return defaultValues;
}
/**
* Gets the registered allowed values for query parameters
*/
public Map<String, String> getAllowedValues() {
return allowedValues;
}
/**
* Sets the component name that this definition will apply to
*/
public void setComponent(String component) {
this.component = component;
}
public String getComponent() {
return component;
}
/**
* To define the content type what the REST service consumes (accept as input), such as application/xml or
* application/json
*/
public void setConsumes(String consumes) {
this.consumes = consumes;
}
public String getProduces() {
return produces;
}
/**
* To define the content type what the REST service produces (uses for output), such as application/xml or
* application/json
*/
public void setProduces(String produces) {
this.produces = produces;
}
public String getBindingMode() {
return bindingMode;
}
/**
* Sets the binding mode to use.
* <p/>
* The default value is off
*/
public void setBindingMode(String bindingMode) {
this.bindingMode = bindingMode;
}
public String getType() {
return type;
}
/**
* Sets the | RestBindingDefinition |
java | apache__dubbo | dubbo-metadata/dubbo-metadata-report-zookeeper/src/test/java/org/apache/dubbo/metadata/store/zookeeper/ZookeeperMetadataReportTest.java | {
"start": 2406,
"end": 17441
} | class ____ {
private ZookeeperMetadataReport zookeeperMetadataReport;
private URL registryUrl;
private ZookeeperMetadataReportFactory zookeeperMetadataReportFactory;
private static String zookeeperConnectionAddress1;
@BeforeAll
public static void beforeAll() {
zookeeperConnectionAddress1 = System.getProperty("zookeeper.connection.address.1");
}
@BeforeEach
public void setUp() throws Exception {
this.registryUrl = URL.valueOf(zookeeperConnectionAddress1);
zookeeperMetadataReportFactory = new ZookeeperMetadataReportFactory(ApplicationModel.defaultModel());
this.zookeeperMetadataReport =
(ZookeeperMetadataReport) zookeeperMetadataReportFactory.getMetadataReport(registryUrl);
}
private void deletePath(MetadataIdentifier metadataIdentifier, ZookeeperMetadataReport zookeeperMetadataReport) {
String category = zookeeperMetadataReport.toRootDir() + metadataIdentifier.getUniqueKey(KeyTypeEnum.PATH);
zookeeperMetadataReport.zkClient.delete(category);
}
@Test
void testStoreProvider() throws ClassNotFoundException, InterruptedException {
String interfaceName = "org.apache.dubbo.metadata.store.zookeeper.ZookeeperMetadataReport4TstService";
String version = "1.0.0.zk.md";
String group = null;
String application = "vic.zk.md";
MetadataIdentifier providerMetadataIdentifier =
storePrivider(zookeeperMetadataReport, interfaceName, version, group, application);
String fileContent = zookeeperMetadataReport.zkClient.getContent(
zookeeperMetadataReport.getNodePath(providerMetadataIdentifier));
fileContent = waitSeconds(fileContent, 3500, zookeeperMetadataReport.getNodePath(providerMetadataIdentifier));
Assertions.assertNotNull(fileContent);
deletePath(providerMetadataIdentifier, zookeeperMetadataReport);
fileContent = zookeeperMetadataReport.zkClient.getContent(
zookeeperMetadataReport.getNodePath(providerMetadataIdentifier));
fileContent = waitSeconds(fileContent, 1000, zookeeperMetadataReport.getNodePath(providerMetadataIdentifier));
Assertions.assertNull(fileContent);
providerMetadataIdentifier = storePrivider(zookeeperMetadataReport, interfaceName, version, group, application);
fileContent = zookeeperMetadataReport.zkClient.getContent(
zookeeperMetadataReport.getNodePath(providerMetadataIdentifier));
fileContent = waitSeconds(fileContent, 3500, zookeeperMetadataReport.getNodePath(providerMetadataIdentifier));
Assertions.assertNotNull(fileContent);
FullServiceDefinition fullServiceDefinition = JsonUtils.toJavaObject(fileContent, FullServiceDefinition.class);
Assertions.assertEquals(fullServiceDefinition.getParameters().get("paramTest"), "zkTest");
}
@Test
void testConsumer() throws ClassNotFoundException, InterruptedException {
String interfaceName = "org.apache.dubbo.metadata.store.zookeeper.ZookeeperMetadataReport4TstService";
String version = "1.0.0.zk.md";
String group = null;
String application = "vic.zk.md";
MetadataIdentifier consumerMetadataIdentifier =
storeConsumer(zookeeperMetadataReport, interfaceName, version, group, application);
String fileContent = zookeeperMetadataReport.zkClient.getContent(
zookeeperMetadataReport.getNodePath(consumerMetadataIdentifier));
fileContent = waitSeconds(fileContent, 3500, zookeeperMetadataReport.getNodePath(consumerMetadataIdentifier));
Assertions.assertNotNull(fileContent);
deletePath(consumerMetadataIdentifier, zookeeperMetadataReport);
fileContent = zookeeperMetadataReport.zkClient.getContent(
zookeeperMetadataReport.getNodePath(consumerMetadataIdentifier));
fileContent = waitSeconds(fileContent, 1000, zookeeperMetadataReport.getNodePath(consumerMetadataIdentifier));
Assertions.assertNull(fileContent);
consumerMetadataIdentifier = storeConsumer(zookeeperMetadataReport, interfaceName, version, group, application);
fileContent = zookeeperMetadataReport.zkClient.getContent(
zookeeperMetadataReport.getNodePath(consumerMetadataIdentifier));
fileContent = waitSeconds(fileContent, 3000, zookeeperMetadataReport.getNodePath(consumerMetadataIdentifier));
Assertions.assertNotNull(fileContent);
Assertions.assertEquals(fileContent, "{\"paramConsumerTest\":\"zkCm\"}");
}
@Test
void testDoSaveMetadata() throws ExecutionException, InterruptedException {
String interfaceName = "org.apache.dubbo.metadata.store.zookeeper.ZookeeperMetadataReport4TstService";
String version = "1.0.0";
String group = null;
String application = "etc-metadata-report-consumer-test";
String revision = "90980";
String protocol = "xxx";
URL url = generateURL(interfaceName, version, group, application);
ServiceMetadataIdentifier serviceMetadataIdentifier =
new ServiceMetadataIdentifier(interfaceName, version, group, "provider", revision, protocol);
zookeeperMetadataReport.doSaveMetadata(serviceMetadataIdentifier, url);
String fileContent = zookeeperMetadataReport.zkClient.getContent(
zookeeperMetadataReport.getNodePath(serviceMetadataIdentifier));
Assertions.assertNotNull(fileContent);
Assertions.assertEquals(fileContent, URL.encode(url.toFullString()));
}
@Test
void testDoRemoveMetadata() throws ExecutionException, InterruptedException {
String interfaceName = "org.apache.dubbo.metadata.store.zookeeper.ZookeeperMetadataReport4TstService";
String version = "1.0.0";
String group = null;
String application = "etc-metadata-report-consumer-test";
String revision = "90980";
String protocol = "xxx";
URL url = generateURL(interfaceName, version, group, application);
ServiceMetadataIdentifier serviceMetadataIdentifier =
new ServiceMetadataIdentifier(interfaceName, version, group, "provider", revision, protocol);
zookeeperMetadataReport.doSaveMetadata(serviceMetadataIdentifier, url);
String fileContent = zookeeperMetadataReport.zkClient.getContent(
zookeeperMetadataReport.getNodePath(serviceMetadataIdentifier));
Assertions.assertNotNull(fileContent);
zookeeperMetadataReport.doRemoveMetadata(serviceMetadataIdentifier);
fileContent = zookeeperMetadataReport.zkClient.getContent(
zookeeperMetadataReport.getNodePath(serviceMetadataIdentifier));
Assertions.assertNull(fileContent);
}
@Test
void testDoGetExportedURLs() throws ExecutionException, InterruptedException {
String interfaceName = "org.apache.dubbo.metadata.store.zookeeper.ZookeeperMetadataReport4TstService";
String version = "1.0.0";
String group = null;
String application = "etc-metadata-report-consumer-test";
String revision = "90980";
String protocol = "xxx";
URL url = generateURL(interfaceName, version, group, application);
ServiceMetadataIdentifier serviceMetadataIdentifier =
new ServiceMetadataIdentifier(interfaceName, version, group, "provider", revision, protocol);
zookeeperMetadataReport.doSaveMetadata(serviceMetadataIdentifier, url);
List<String> r = zookeeperMetadataReport.doGetExportedURLs(serviceMetadataIdentifier);
Assertions.assertTrue(r.size() == 1);
String fileContent = r.get(0);
Assertions.assertNotNull(fileContent);
Assertions.assertEquals(fileContent, url.toFullString());
}
@Test
void testDoSaveSubscriberData() throws ExecutionException, InterruptedException {
String interfaceName = "org.apache.dubbo.metadata.store.zookeeper.ZookeeperMetadataReport4TstService";
String version = "1.0.0";
String group = null;
String application = "etc-metadata-report-consumer-test";
String revision = "90980";
String protocol = "xxx";
URL url = generateURL(interfaceName, version, group, application);
SubscriberMetadataIdentifier subscriberMetadataIdentifier =
new SubscriberMetadataIdentifier(application, revision);
String r = JsonUtils.toJson(Arrays.asList(url.toString()));
zookeeperMetadataReport.doSaveSubscriberData(subscriberMetadataIdentifier, r);
String fileContent = zookeeperMetadataReport.zkClient.getContent(
zookeeperMetadataReport.getNodePath(subscriberMetadataIdentifier));
Assertions.assertNotNull(fileContent);
Assertions.assertEquals(fileContent, r);
}
@Test
void testDoGetSubscribedURLs() throws ExecutionException, InterruptedException {
String interfaceName = "org.apache.dubbo.metadata.store.zookeeper.ZookeeperMetadataReport4TstService";
String version = "1.0.0";
String group = null;
String application = "etc-metadata-report-consumer-test";
String revision = "90980";
String protocol = "xxx";
URL url = generateURL(interfaceName, version, group, application);
SubscriberMetadataIdentifier subscriberMetadataIdentifier =
new SubscriberMetadataIdentifier(application, revision);
String r = JsonUtils.toJson(Arrays.asList(url.toString()));
zookeeperMetadataReport.doSaveSubscriberData(subscriberMetadataIdentifier, r);
String fileContent = zookeeperMetadataReport.zkClient.getContent(
zookeeperMetadataReport.getNodePath(subscriberMetadataIdentifier));
Assertions.assertNotNull(fileContent);
Assertions.assertEquals(fileContent, r);
}
private MetadataIdentifier storePrivider(
MetadataReport zookeeperMetadataReport,
String interfaceName,
String version,
String group,
String application)
throws ClassNotFoundException, InterruptedException {
URL url = URL.valueOf("xxx://" + NetUtils.getLocalAddress().getHostName() + ":4444/" + interfaceName
+ "?paramTest=zkTest&version=" + version + "&application=" + application
+ (group == null ? "" : "&group=" + group));
MetadataIdentifier providerMetadataIdentifier =
new MetadataIdentifier(interfaceName, version, group, PROVIDER_SIDE, application);
Class interfaceClass = Class.forName(interfaceName);
FullServiceDefinition fullServiceDefinition =
ServiceDefinitionBuilder.buildFullDefinition(interfaceClass, url.getParameters());
zookeeperMetadataReport.storeProviderMetadata(providerMetadataIdentifier, fullServiceDefinition);
Thread.sleep(2000);
return providerMetadataIdentifier;
}
private MetadataIdentifier storeConsumer(
MetadataReport zookeeperMetadataReport,
String interfaceName,
String version,
String group,
String application)
throws ClassNotFoundException, InterruptedException {
URL url = URL.valueOf("xxx://" + NetUtils.getLocalAddress().getHostName() + ":4444/" + interfaceName
+ "?version=" + version + "&application=" + application + (group == null ? "" : "&group=" + group));
MetadataIdentifier consumerMetadataIdentifier =
new MetadataIdentifier(interfaceName, version, group, CONSUMER_SIDE, application);
Class interfaceClass = Class.forName(interfaceName);
Map<String, String> tmp = new HashMap<>();
tmp.put("paramConsumerTest", "zkCm");
zookeeperMetadataReport.storeConsumerMetadata(consumerMetadataIdentifier, tmp);
Thread.sleep(2000);
return consumerMetadataIdentifier;
}
private String waitSeconds(String value, long moreTime, String path) throws InterruptedException {
if (value == null) {
Thread.sleep(moreTime);
return zookeeperMetadataReport.zkClient.getContent(path);
}
return value;
}
private URL generateURL(String interfaceName, String version, String group, String application) {
URL url = URL.valueOf("xxx://" + NetUtils.getLocalAddress().getHostName() + ":8989/" + interfaceName
+ "?paramTest=etcdTest&version="
+ version + "&application="
+ application + (group == null ? "" : "&group=" + group));
return url;
}
@Test
void testMapping() throws InterruptedException {
String serviceKey = ZookeeperMetadataReportTest.class.getName();
URL url = URL.valueOf("test://127.0.0.1:8888/" + serviceKey);
String appNames = "demo1,demo2";
CountDownLatch latch = new CountDownLatch(1);
Set<String> serviceAppMapping = zookeeperMetadataReport.getServiceAppMapping(
serviceKey,
new MappingListener() {
@Override
public void onEvent(MappingChangedEvent event) {
Set<String> apps = event.getApps();
Assertions.assertEquals(apps.size(), 2);
Assertions.assertTrue(apps.contains("demo1"));
Assertions.assertTrue(apps.contains("demo2"));
latch.countDown();
}
@Override
public void stop() {}
},
url);
Assertions.assertTrue(serviceAppMapping.isEmpty());
ConfigItem configItem = zookeeperMetadataReport.getConfigItem(serviceKey, DEFAULT_MAPPING_GROUP);
zookeeperMetadataReport.registerServiceAppMapping(
serviceKey, DEFAULT_MAPPING_GROUP, appNames, configItem.getTicket());
latch.await();
}
@Test
void testAppMetadata() {
String serviceKey = ZookeeperMetadataReportTest.class.getName();
String appName = "demo";
URL url = URL.valueOf("test://127.0.0.1:8888/" + serviceKey);
MetadataInfo metadataInfo = new MetadataInfo(appName);
metadataInfo.addService(url);
SubscriberMetadataIdentifier identifier =
new SubscriberMetadataIdentifier(appName, metadataInfo.calAndGetRevision());
MetadataInfo appMetadata = zookeeperMetadataReport.getAppMetadata(identifier, Collections.emptyMap());
Assertions.assertNull(appMetadata);
zookeeperMetadataReport.publishAppMetadata(identifier, metadataInfo);
appMetadata = zookeeperMetadataReport.getAppMetadata(identifier, Collections.emptyMap());
Assertions.assertNotNull(appMetadata);
Assertions.assertEquals(appMetadata.calAndGetRevision(), metadataInfo.calAndGetRevision());
}
}
| ZookeeperMetadataReportTest |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/metrics/DefaultCommandLatencyCollectorUnitTests.java | {
"start": 1422,
"end": 5338
} | class ____ {
private DefaultCommandLatencyCollector sut;
@Test
void shutdown() {
sut = new DefaultCommandLatencyCollector(DefaultCommandLatencyCollectorOptions.create());
sut.shutdown();
assertThat(sut.isEnabled()).isFalse();
}
@Test
void simpleCreateShouldNotInitializePauseDetector() {
sut = new DefaultCommandLatencyCollector(DefaultCommandLatencyCollectorOptions.create());
PauseDetectorWrapper wrapper = (PauseDetectorWrapper) ReflectionTestUtils.getField(sut, "pauseDetectorWrapper");
assertThat(wrapper).isNull();
}
@Test
void latencyRecordShouldInitializePauseDetectorWrapper() {
sut = new DefaultCommandLatencyCollector(DefaultCommandLatencyCollectorOptions.create());
setupData();
PauseDetectorWrapper wrapper = (PauseDetectorWrapper) ReflectionTestUtils.getField(sut, "pauseDetectorWrapper");
assertThat(wrapper).isNotNull();
sut.shutdown();
wrapper = (PauseDetectorWrapper) ReflectionTestUtils.getField(sut, "pauseDetectorWrapper");
assertThat(wrapper).isNull();
}
@Test
void shutdownShouldReleasePauseDetector() {
sut = new DefaultCommandLatencyCollector(DefaultCommandLatencyCollectorOptions.create());
PauseDetectorWrapper wrapper = (PauseDetectorWrapper) ReflectionTestUtils.getField(sut, "pauseDetectorWrapper");
assertThat(wrapper).isNull();
setupData();
wrapper = (PauseDetectorWrapper) ReflectionTestUtils.getField(sut, "pauseDetectorWrapper");
assertThat(wrapper).isNotNull();
sut.shutdown();
}
@Test
void verifyMetrics() {
sut = new DefaultCommandLatencyCollector(DefaultCommandLatencyCollectorOptions.builder().usePauseDetector().build());
setupData();
Map<CommandLatencyId, CommandMetrics> latencies = sut.retrieveMetrics();
assertThat(latencies).hasSize(1);
Map.Entry<CommandLatencyId, CommandMetrics> entry = latencies.entrySet().iterator().next();
assertThat(entry.getKey().commandType()).isSameAs(CommandType.BGSAVE);
CommandMetrics metrics = entry.getValue();
assertThat(metrics.getCount()).isEqualTo(3);
assertThat(metrics.getCompletion().getMin()).isBetween(990000L, 1100000L);
assertThat(metrics.getCompletion().getPercentiles()).hasSize(5);
assertThat(metrics.getFirstResponse().getMin()).isBetween(90000L, 110000L);
assertThat(metrics.getFirstResponse().getMax()).isBetween(290000L, 310000L);
assertThat(metrics.getCompletion().getPercentiles()).containsKey(50.0d);
assertThat(metrics.getFirstResponse().getPercentiles().get(50d))
.isLessThanOrEqualTo(metrics.getCompletion().getPercentiles().get(50d));
assertThat(metrics.getTimeUnit()).isEqualTo(MICROSECONDS);
assertThat(sut.retrieveMetrics()).isEmpty();
sut.shutdown();
}
@Test
void verifyCummulativeMetrics() {
sut = new DefaultCommandLatencyCollector(
DefaultCommandLatencyCollectorOptions.builder().usePauseDetector().resetLatenciesAfterEvent(false).build());
setupData();
assertThat(sut.retrieveMetrics()).hasSize(1);
assertThat(sut.retrieveMetrics()).hasSize(1);
sut.shutdown();
}
private void setupData() {
sut.recordCommandLatency(LocalAddress.ANY, LocalAddress.ANY, CommandType.BGSAVE, MILLISECONDS.toNanos(100),
MILLISECONDS.toNanos(1000));
sut.recordCommandLatency(LocalAddress.ANY, LocalAddress.ANY, CommandType.BGSAVE, MILLISECONDS.toNanos(200),
MILLISECONDS.toNanos(1000));
sut.recordCommandLatency(LocalAddress.ANY, LocalAddress.ANY, CommandType.BGSAVE, MILLISECONDS.toNanos(300),
MILLISECONDS.toNanos(1000));
}
}
| DefaultCommandLatencyCollectorUnitTests |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/GoogleSheetsComponentBuilderFactory.java | {
"start": 1378,
"end": 1879
} | interface ____ {
/**
* Google Sheets (camel-google-sheets)
* Manage spreadsheets in Google Sheets.
*
* Category: cloud,document
* Since: 2.23
* Maven coordinates: org.apache.camel:camel-google-sheets
*
* @return the dsl builder
*/
static GoogleSheetsComponentBuilder googleSheets() {
return new GoogleSheetsComponentBuilderImpl();
}
/**
* Builder for the Google Sheets component.
*/
| GoogleSheetsComponentBuilderFactory |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/samples/spr/EncodedUriTests.java | {
"start": 3453,
"end": 3695
} | class ____ {
@RequestMapping(value = "/circuit/{id}", method = RequestMethod.GET)
String getCircuit(@PathVariable String id, Model model) {
model.addAttribute("receivedId", id);
return "result";
}
}
@Component
static | MyController |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_issue_491.java | {
"start": 830,
"end": 1269
} | class ____ {
public String name;
}
public static <V> Map<String, V> getJsonToMap1(String json, Class<V> valueType) {
return JSON.parseObject(json, new TypeReference<Map<String, V>>(valueType) {});
}
public static <K, V> Map<K, V> getJsonToMap(String json, Class<K> keyType, Class<V> valueType) {
return JSON.parseObject(json, new TypeReference<Map<K, V>>(keyType, valueType) {});
}
}
| Model |
java | alibaba__nacos | common/src/test/java/com/alibaba/nacos/common/logging/NacosLoggingPropertiesTest.java | {
"start": 888,
"end": 2630
} | class ____ {
NacosLoggingProperties loggingProperties;
Properties properties;
@BeforeEach
void setUp() throws Exception {
properties = new Properties();
loggingProperties = new NacosLoggingProperties("classpath:test.xml", properties);
}
@Test
void testGetLocationWithDefault() {
assertEquals("classpath:test.xml", loggingProperties.getLocation());
}
@Test
void testGetLocationWithoutDefault() {
properties.setProperty("nacos.logging.default.config.enabled", "false");
assertNull(loggingProperties.getLocation());
}
@Test
void testGetLocationForSpecified() {
properties.setProperty("nacos.logging.config", "classpath:specified-test.xml");
properties.setProperty("nacos.logging.default.config.enabled", "false");
assertEquals("classpath:specified-test.xml", loggingProperties.getLocation());
}
@Test
void testGetLocationForSpecifiedWithDefault() {
properties.setProperty("nacos.logging.config", "classpath:specified-test.xml");
assertEquals("classpath:specified-test.xml", loggingProperties.getLocation());
}
@Test
void testGetReloadInternal() {
properties.setProperty("nacos.logging.reload.interval.seconds", "50000");
assertEquals(50000L, loggingProperties.getReloadInternal());
}
@Test
void testGetValue() {
properties.setProperty("test.key", "test.value");
assertEquals("test.value", loggingProperties.getValue("test.key", "default.value"));
properties.clear();
assertEquals("default.value", loggingProperties.getValue("test.key", "default.value"));
}
} | NacosLoggingPropertiesTest |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/source/datagen/DataGeneratorSourceTest.java | {
"start": 1680,
"end": 9598
} | class ____ {
@Test
void testRandomGenerator() throws Exception {
long min = 10;
long max = 20;
DataGeneratorSource<Long> source =
new DataGeneratorSource<>(RandomGenerator.longGenerator(min, max));
StreamSource<Long, DataGeneratorSource<Long>> src = new StreamSource<>(source);
AbstractStreamOperatorTestHarness<Long> testHarness =
new AbstractStreamOperatorTestHarness<>(src, 1, 1, 0);
testHarness.open();
int totalNumber = 1000;
List<Long> results = new ArrayList<>();
source.run(
new SourceFunction.SourceContext<Long>() {
private Object lock = new Object();
private int emitNumber = 0;
@Override
public void collect(Long element) {
if (++emitNumber > totalNumber) {
source.isRunning = false;
}
results.add(element);
}
@Override
public void collectWithTimestamp(Long element, long timestamp) {}
@Override
public void emitWatermark(Watermark mark) {}
@Override
public void markAsTemporarilyIdle() {}
@Override
public Object getCheckpointLock() {
return lock;
}
@Override
public void close() {}
});
for (Long l : results) {
assertThat(l).isBetween(min, max);
}
}
@Test
void testSequenceCheckpointRestore() throws Exception {
final int initElement = 0;
final int maxElement = 100;
final Set<Long> expectedOutput = new HashSet<>();
for (long i = initElement; i <= maxElement; i++) {
expectedOutput.add(i);
}
DataGeneratorSourceTest.innerTestDataGenCheckpointRestore(
() ->
new DataGeneratorSource<>(
SequenceGenerator.longGenerator(initElement, maxElement)),
expectedOutput);
}
public static <T> void innerTestDataGenCheckpointRestore(
Supplier<DataGeneratorSource<T>> supplier, Set<T> expectedOutput) throws Exception {
final int maxParallelsim = 2;
final ConcurrentHashMap<String, List<T>> outputCollector = new ConcurrentHashMap<>();
final OneShotLatch latchToTrigger1 = new OneShotLatch();
final OneShotLatch latchToWait1 = new OneShotLatch();
final OneShotLatch latchToTrigger2 = new OneShotLatch();
final OneShotLatch latchToWait2 = new OneShotLatch();
final DataGeneratorSource<T> source1 = supplier.get();
StreamSource<T, DataGeneratorSource<T>> src1 = new StreamSource<>(source1);
final AbstractStreamOperatorTestHarness<T> testHarness1 =
new AbstractStreamOperatorTestHarness<>(src1, maxParallelsim, 2, 0);
testHarness1.open();
final DataGeneratorSource<T> source2 = supplier.get();
StreamSource<T, DataGeneratorSource<T>> src2 = new StreamSource<>(source2);
final AbstractStreamOperatorTestHarness<T> testHarness2 =
new AbstractStreamOperatorTestHarness<>(src2, maxParallelsim, 2, 1);
testHarness2.open();
// run the source asynchronously
Thread runner1 =
new Thread(
() -> {
try {
source1.run(
new BlockingSourceContext<>(
"1",
latchToTrigger1,
latchToWait1,
outputCollector,
21));
} catch (Throwable t) {
t.printStackTrace();
}
});
// run the source asynchronously
Thread runner2 =
new Thread(
() -> {
try {
source2.run(
new BlockingSourceContext<>(
"2",
latchToTrigger2,
latchToWait2,
outputCollector,
32));
} catch (Throwable t) {
t.printStackTrace();
}
});
runner1.start();
runner2.start();
if (!latchToTrigger1.isTriggered()) {
latchToTrigger1.await();
}
if (!latchToTrigger2.isTriggered()) {
latchToTrigger2.await();
}
OperatorSubtaskState snapshot =
AbstractStreamOperatorTestHarness.repackageState(
testHarness1.snapshot(0L, 0L), testHarness2.snapshot(0L, 0L));
final DataGeneratorSource<T> source3 = supplier.get();
StreamSource<T, DataGeneratorSource<T>> src3 = new StreamSource<>(source3);
final OperatorSubtaskState initState =
AbstractStreamOperatorTestHarness.repartitionOperatorState(
snapshot, maxParallelsim, 2, 1, 0);
final AbstractStreamOperatorTestHarness<T> testHarness3 =
new AbstractStreamOperatorTestHarness<>(src3, maxParallelsim, 1, 0);
testHarness3.setup();
testHarness3.initializeState(initState);
testHarness3.open();
final OneShotLatch latchToTrigger3 = new OneShotLatch();
final OneShotLatch latchToWait3 = new OneShotLatch();
latchToWait3.trigger();
// run the source asynchronously
Thread runner3 =
new Thread(
() -> {
try {
source3.run(
new BlockingSourceContext<>(
"3",
latchToTrigger3,
latchToWait3,
outputCollector,
3));
} catch (Throwable t) {
t.printStackTrace();
}
});
runner3.start();
runner3.join();
assertThat(outputCollector).hasSize(3); // we have 3 tasks.
// test for at-most-once
Set<T> dedupRes = new HashSet<>(expectedOutput.size());
for (Map.Entry<String, List<T>> elementsPerTask : outputCollector.entrySet()) {
String key = elementsPerTask.getKey();
List<T> elements = outputCollector.get(key);
// this tests the correctness of the latches in the test
assertThat(elements).isNotEmpty();
for (T elem : elements) {
assertThat(dedupRes.add(elem)).as("Duplicate entry: " + elem).isTrue();
assertThat(expectedOutput).as("Unexpected element: " + elem).contains(elem);
}
}
// test for exactly-once
assertThat(dedupRes).hasSameSizeAs(expectedOutput);
latchToWait1.trigger();
latchToWait2.trigger();
// wait for everybody ot finish.
runner1.join();
runner2.join();
}
}
| DataGeneratorSourceTest |
java | apache__dubbo | dubbo-common/src/main/java/org/apache/dubbo/config/ProtocolConfig.java | {
"start": 1794,
"end": 16499
} | class ____ extends AbstractConfig {
private static final long serialVersionUID = 6913423882496634749L;
/**
* The name of the protocol.
*/
private String name;
/**
* The service's IP address (useful when there are multiple network cards available).
*/
private String host;
/**
* The service's port number.
*/
private Integer port;
/**
* The context path for the service.
*/
private String contextpath;
/**
* The name of the thread pool.
*/
private String threadpool;
/**
* The core thread size of the thread pool.
*/
private Integer corethreads;
/**
* The fixed size of the thread pool.
*/
private Integer threads;
/**
* The fixed size of the IO thread pool.
*/
private Integer iothreads;
/**
* The keep-alive time for threads in the thread pool (default unit is TimeUnit.MILLISECONDS).
*/
private Integer alive;
/**
* The length of the thread pool's queue.
*/
private Integer queues;
/**
* Listeners for exhausted thread pool.
*/
private String threadPoolExhaustedListeners;
/**
* The maximum acceptable connections.
*/
private Integer accepts;
/**
* The protocol codec.
*/
private String codec;
/**
* The serialization method.
*/
private String serialization;
/**
* Specifies the preferred serialization method for the consumer.
* If specified, the consumer will use this parameter first.
* If the Dubbo Sdk you are using contains the serialization type, the serialization method specified by the argument is used.
* <p>
* When this parameter is null or the serialization type specified by this parameter does not exist in the Dubbo SDK, the serialization type specified by serialization is used.
* If the Dubbo SDK if still does not exist, the default type of the Dubbo SDK is used.
* For Dubbo SDK >= 3.2, <code>preferSerialization</code> takes precedence over <code>serialization</code>
* <p>
* Supports multiple values separated by commas, e.g., "fastjson2,fastjson,hessian2".
*/
private String preferSerialization; // default:fastjson2,hessian2
/**
* The character set used for communication.
*/
private String charset;
/**
* The maximum payload length.
*/
private Integer payload;
/**
* The buffer size.
*/
private Integer buffer;
/**
* The interval for sending heartbeats.
*/
private Integer heartbeat;
/**
* The access log configuration.
*/
private String accesslog;
/**
* The transporter used for communication.
*/
private String transporter;
/**
* The method of information exchange.
*/
private String exchanger;
/**
* The thread dispatch mode.
*/
private String dispatcher;
/**
* The networker implementation.
*/
private String networker;
/**
* The server implementation.
*/
private String server;
/**
* The client implementation.
*/
private String client;
/**
* Supported Telnet commands, separated by commas.
*/
private String telnet;
/**
* The command line prompt.
*/
private String prompt;
/**
* The status check configuration.
*/
private String status;
/**
* Indicates whether the service should be registered.
*/
private Boolean register;
// TODO: Move this property to the provider configuration.
/**
* Indicates whether it is a persistent connection.
*/
private Boolean keepAlive;
// TODO: Move this property to the provider configuration.
/**
* The optimizer used for dubbo protocol.
*/
private String optimizer;
/**
* Additional extensions.
*/
private String extension;
/**
* Custom parameters.
*/
private Map<String, String> parameters;
/**
* Indicates whether SSL is enabled.
*/
private Boolean sslEnabled;
/**
* Extra protocol for this service, using Port Unification Server.
*/
private String extProtocol;
private String preferredProtocol;
/**
* JSON check level for serialization.
*/
private String jsonCheckLevel;
/**
* Indicates whether to support no interface.
*/
private Boolean noInterfaceSupport;
@Nested
private TripleConfig triple;
public ProtocolConfig() {}
public ProtocolConfig(ApplicationModel applicationModel) {
super(applicationModel);
}
public ProtocolConfig(String name) {
setName(name);
}
public ProtocolConfig(ApplicationModel applicationModel, String name) {
super(applicationModel);
setName(name);
}
public ProtocolConfig(String name, int port) {
setName(name);
setPort(port);
}
public ProtocolConfig(ApplicationModel applicationModel, String name, int port) {
super(applicationModel);
setName(name);
setPort(port);
}
@Override
protected void checkDefault() {
super.checkDefault();
if (name == null) {
name = DUBBO_PROTOCOL;
}
if (StringUtils.isBlank(preferSerialization)) {
preferSerialization = serialization != null
? serialization
: getScopeModel()
.getBeanFactory()
.getBean(PreferSerializationProvider.class)
.getPreferSerialization();
}
}
@Parameter(excluded = true)
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Parameter(excluded = true)
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
@Parameter(excluded = true)
public Integer getPort() {
return port;
}
public void setPort(Integer port) {
this.port = port;
}
@Deprecated
@Parameter(excluded = true, attribute = false)
public String getPath() {
return getContextpath();
}
@Deprecated
public void setPath(String path) {
setContextpath(path);
}
@Parameter(excluded = true)
public String getContextpath() {
return contextpath;
}
public void setContextpath(String contextpath) {
this.contextpath = contextpath;
}
public String getThreadpool() {
return threadpool;
}
public void setThreadpool(String threadpool) {
this.threadpool = threadpool;
}
@Parameter(key = JSON_CHECK_LEVEL_KEY)
public String getJsonCheckLevel() {
return jsonCheckLevel;
}
public void setJsonCheckLevel(String jsonCheckLevel) {
this.jsonCheckLevel = jsonCheckLevel;
}
@Parameter(key = THREAD_POOL_EXHAUSTED_LISTENERS_KEY)
public String getThreadPoolExhaustedListeners() {
return threadPoolExhaustedListeners;
}
public void setThreadPoolExhaustedListeners(String threadPoolExhaustedListeners) {
this.threadPoolExhaustedListeners = threadPoolExhaustedListeners;
}
public Integer getCorethreads() {
return corethreads;
}
public void setCorethreads(Integer corethreads) {
this.corethreads = corethreads;
}
public Integer getThreads() {
return threads;
}
public void setThreads(Integer threads) {
this.threads = threads;
}
public Integer getIothreads() {
return iothreads;
}
public void setIothreads(Integer iothreads) {
this.iothreads = iothreads;
}
public Integer getAlive() {
return alive;
}
public void setAlive(Integer alive) {
this.alive = alive;
}
public Integer getQueues() {
return queues;
}
public void setQueues(Integer queues) {
this.queues = queues;
}
public Integer getAccepts() {
return accepts;
}
public void setAccepts(Integer accepts) {
this.accepts = accepts;
}
public String getCodec() {
return codec;
}
public void setCodec(String codec) {
this.codec = codec;
}
public String getSerialization() {
return serialization;
}
public void setSerialization(String serialization) {
this.serialization = serialization;
}
public String getPreferSerialization() {
return preferSerialization;
}
public void setPreferSerialization(String preferSerialization) {
this.preferSerialization = preferSerialization;
}
public String getCharset() {
return charset;
}
public void setCharset(String charset) {
this.charset = charset;
}
public Integer getPayload() {
return payload;
}
public void setPayload(Integer payload) {
this.payload = payload;
}
public Integer getBuffer() {
return buffer;
}
public void setBuffer(Integer buffer) {
this.buffer = buffer;
}
public Integer getHeartbeat() {
return heartbeat;
}
public void setHeartbeat(Integer heartbeat) {
this.heartbeat = heartbeat;
}
public String getServer() {
return server;
}
public void setServer(String server) {
this.server = server;
}
public String getClient() {
return client;
}
public void setClient(String client) {
this.client = client;
}
public String getAccesslog() {
return accesslog;
}
public void setAccesslog(String accesslog) {
this.accesslog = accesslog;
}
public String getTelnet() {
return telnet;
}
public void setTelnet(String telnet) {
this.telnet = telnet;
}
@Parameter(escaped = true)
public String getPrompt() {
return prompt;
}
public void setPrompt(String prompt) {
this.prompt = prompt;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public Boolean isRegister() {
return register;
}
public void setRegister(Boolean register) {
this.register = register;
}
public String getTransporter() {
return transporter;
}
public void setTransporter(String transporter) {
this.transporter = transporter;
}
public String getExchanger() {
return exchanger;
}
public void setExchanger(String exchanger) {
this.exchanger = exchanger;
}
/**
* typo, switch to use {@link #getDispatcher()}
*
* @deprecated {@link #getDispatcher()}
*/
@Deprecated
@Parameter(excluded = true, attribute = false)
public String getDispather() {
return getDispatcher();
}
/**
* typo, switch to use {@link #getDispatcher()}
*
* @deprecated {@link #setDispatcher(String)}
*/
@Deprecated
public void setDispather(String dispather) {
setDispatcher(dispather);
}
public String getDispatcher() {
return dispatcher;
}
public void setDispatcher(String dispatcher) {
this.dispatcher = dispatcher;
}
public String getNetworker() {
return networker;
}
public void setNetworker(String networker) {
this.networker = networker;
}
public Map<String, String> getParameters() {
return parameters;
}
public void setParameters(Map<String, String> parameters) {
this.parameters = parameters;
}
@Parameter(key = SSL_ENABLED_KEY)
public Boolean getSslEnabled() {
return sslEnabled;
}
public void setSslEnabled(Boolean sslEnabled) {
this.sslEnabled = sslEnabled;
}
public Boolean getKeepAlive() {
return keepAlive;
}
public void setKeepAlive(Boolean keepAlive) {
this.keepAlive = keepAlive;
}
public String getOptimizer() {
return optimizer;
}
public void setOptimizer(String optimizer) {
this.optimizer = optimizer;
}
public String getExtension() {
return extension;
}
public void setExtension(String extension) {
this.extension = extension;
}
@Override
@Parameter(excluded = true, attribute = false)
public boolean isValid() {
return StringUtils.isNotEmpty(name);
}
public String getExtProtocol() {
return extProtocol;
}
public void setExtProtocol(String extProtocol) {
this.extProtocol = extProtocol;
}
public String getPreferredProtocol() {
return preferredProtocol;
}
public void setPreferredProtocol(String preferredProtocol) {
this.preferredProtocol = preferredProtocol;
}
public Boolean isNoInterfaceSupport() {
return noInterfaceSupport;
}
public void setNoInterfaceSupport(Boolean noInterfaceSupport) {
this.noInterfaceSupport = noInterfaceSupport;
}
public TripleConfig getTriple() {
return triple;
}
@Parameter(excluded = true)
public TripleConfig getTripleOrDefault() {
if (triple == null) {
triple = new TripleConfig();
}
return triple;
}
public void setTriple(TripleConfig triple) {
this.triple = triple;
}
public void mergeProtocol(ProtocolConfig sourceConfig) {
if (sourceConfig == null) {
return;
}
Field[] targetFields = getClass().getDeclaredFields();
try {
Map<String, Object> protocolConfigMap = CollectionUtils.objToMap(sourceConfig);
for (Field targetField : targetFields) {
Optional.ofNullable(protocolConfigMap.get(targetField.getName()))
.ifPresent(value -> {
try {
targetField.setAccessible(true);
if (targetField.get(this) == null) {
targetField.set(this, value);
}
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
});
}
} catch (Exception e) {
logger.error(COMMON_UNEXPECTED_EXCEPTION, "", "", "merge protocol config fail, error: ", e);
}
}
}
| ProtocolConfig |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_1904/Issue1904Mapper.java | {
"start": 270,
"end": 402
} | interface ____ {
CarManualDto translateManual(CarManual manual);
/**
* @author Filip Hrisafov
*/
| Issue1904Mapper |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/alterTable/MySqlAlterTableTest31_renameIndex.java | {
"start": 911,
"end": 1509
} | class ____ extends TestCase {
public void test_alter_add_key() throws Exception {
String sql = "ALTER TABLE test_table_normal\n" +
"RENAME INDEX old_index_name TO new_index_name";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
String output = SQLUtils.toMySqlString(stmt);
assertEquals("ALTER TABLE test_table_normal\n" +
"\tRENAME INDEX old_index_name TO new_index_name", output);
}
}
| MySqlAlterTableTest31_renameIndex |
java | apache__spark | common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ShuffleIndexInformation.java | {
"start": 1116,
"end": 2666
} | class ____ {
// The estimate of `ShuffleIndexInformation` memory footprint which is relevant in case of small
// index files (i.e. storing only 2 offsets = 16 bytes).
static final int INSTANCE_MEMORY_FOOTPRINT = 176;
/** offsets as long buffer */
private final LongBuffer offsets;
public ShuffleIndexInformation(String indexFilePath) throws IOException {
File indexFile = new File(indexFilePath);
ByteBuffer buffer = ByteBuffer.allocate((int)indexFile.length());
offsets = buffer.asLongBuffer();
try (DataInputStream dis = new DataInputStream(Files.newInputStream(indexFile.toPath()))) {
dis.readFully(buffer.array());
}
}
public int getRetainedMemorySize() {
// SPARK-33206: here the offsets' capacity is multiplied by 8 as offsets stores long values.
// Integer overflow won't be an issue here as long as the number of reducers is under
// (Integer.MAX_VALUE - INSTANCE_MEMORY_FOOTPRINT) / 8 - 1 = 268435432.
return (offsets.capacity() << 3) + INSTANCE_MEMORY_FOOTPRINT;
}
/**
* Get index offset for a particular reducer.
*/
public ShuffleIndexRecord getIndex(int reduceId) {
return getIndex(reduceId, reduceId + 1);
}
/**
* Get index offset for the reducer range of [startReduceId, endReduceId).
*/
public ShuffleIndexRecord getIndex(int startReduceId, int endReduceId) {
long offset = offsets.get(startReduceId);
long nextOffset = offsets.get(endReduceId);
return new ShuffleIndexRecord(offset, nextOffset - offset);
}
}
| ShuffleIndexInformation |
java | spring-projects__spring-security | core/src/test/java/org/springframework/security/core/context/ReactiveSecurityContextHolderTests.java | {
"start": 1258,
"end": 5778
} | class ____ {
@Test
public void getContextWhenEmpty() {
Mono<SecurityContext> context = ReactiveSecurityContextHolder.getContext();
// @formatter:off
StepVerifier.create(context)
.verifyComplete();
// @formatter:on
}
@Test
public void setContextAndGetContextThenEmitsContext() {
SecurityContext expectedContext = new SecurityContextImpl(
new TestingAuthenticationToken("user", "password", "ROLE_USER"));
Mono<SecurityContext> context = Mono.deferContextual(Mono::just)
.flatMap((c) -> ReactiveSecurityContextHolder.getContext())
.contextWrite(ReactiveSecurityContextHolder.withSecurityContext(Mono.just(expectedContext)));
// @formatter:off
StepVerifier.create(context)
.expectNext(expectedContext)
.verifyComplete();
// @formatter:on
}
@Test
public void demo() {
Authentication authentication = new TestingAuthenticationToken("user", "password", "ROLE_USER");
// @formatter:off
Mono<String> messageByUsername = ReactiveSecurityContextHolder.getContext()
.map(SecurityContext::getAuthentication)
.map(Authentication::getName)
.flatMap(this::findMessageByUsername)
.contextWrite(ReactiveSecurityContextHolder.withAuthentication(authentication));
StepVerifier.create(messageByUsername)
.expectNext("Hi user")
.verifyComplete();
// @formatter:on
}
private Mono<String> findMessageByUsername(String username) {
return Mono.just("Hi " + username);
}
@Test
public void setContextAndClearAndGetContextThenEmitsEmpty() {
SecurityContext expectedContext = new SecurityContextImpl(
new TestingAuthenticationToken("user", "password", "ROLE_USER"));
// @formatter:off
Mono<SecurityContext> context = Mono.deferContextual(Mono::just)
.flatMap((c) -> ReactiveSecurityContextHolder.getContext())
.contextWrite(ReactiveSecurityContextHolder.clearContext())
.contextWrite(ReactiveSecurityContextHolder.withSecurityContext(Mono.just(expectedContext)));
StepVerifier.create(context)
.verifyComplete();
// @formatter:on
}
@Test
public void setAuthenticationAndGetContextThenEmitsContext() {
Authentication expectedAuthentication = new TestingAuthenticationToken("user", "password", "ROLE_USER");
// @formatter:off
Mono<Authentication> authentication = Mono.deferContextual(Mono::just)
.flatMap((c) -> ReactiveSecurityContextHolder.getContext())
.map(SecurityContext::getAuthentication)
.contextWrite(ReactiveSecurityContextHolder.withAuthentication(expectedAuthentication));
StepVerifier.create(authentication)
.expectNext(expectedAuthentication)
.verifyComplete();
// @formatter:on
}
@Test
public void getContextWhenThreadFactoryIsPlatformThenPropagated() {
verifySecurityContextIsPropagated(Executors.defaultThreadFactory());
}
@Test
@DisabledOnJre(JRE.JAVA_17)
public void getContextWhenThreadFactoryIsVirtualThenPropagated() {
verifySecurityContextIsPropagated(new VirtualThreadTaskExecutor().getVirtualThreadFactory());
}
private static void verifySecurityContextIsPropagated(ThreadFactory threadFactory) {
Authentication authentication = new TestingAuthenticationToken("user", null);
// @formatter:off
Mono<Authentication> publisher = ReactiveSecurityContextHolder.getContext()
.map(SecurityContext::getAuthentication)
.contextWrite((context) -> ReactiveSecurityContextHolder.withAuthentication(authentication))
.subscribeOn(Schedulers.newSingle(threadFactory));
// @formatter:on
StepVerifier.create(publisher).expectNext(authentication).verifyComplete();
}
@Test
public void clearContextWhenThreadFactoryIsPlatformThenCleared() {
verifySecurityContextIsCleared(Executors.defaultThreadFactory());
}
@Test
@DisabledOnJre(JRE.JAVA_17)
public void clearContextWhenThreadFactoryIsVirtualThenCleared() {
verifySecurityContextIsCleared(new VirtualThreadTaskExecutor().getVirtualThreadFactory());
}
private static void verifySecurityContextIsCleared(ThreadFactory threadFactory) {
Authentication authentication = new TestingAuthenticationToken("user", null);
// @formatter:off
Mono<Authentication> publisher = ReactiveSecurityContextHolder.getContext()
.map(SecurityContext::getAuthentication)
.contextWrite(ReactiveSecurityContextHolder.clearContext())
.contextWrite((context) -> ReactiveSecurityContextHolder.withAuthentication(authentication))
.subscribeOn(Schedulers.newSingle(threadFactory));
// @formatter:on
StepVerifier.create(publisher).verifyComplete();
}
}
| ReactiveSecurityContextHolderTests |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/io/FinalizeOnMaster.java | {
"start": 1052,
"end": 1555
} | interface ____ {
/**
* The method is invoked on the master (JobManager) after all (parallel) instances of an
* OutputFormat finished.
*
* @param context The context to get finalization infos.
* @throws IOException The finalization may throw exceptions, which may cause the job to abort.
*/
void finalizeGlobal(FinalizationContext context) throws IOException;
/** A context that provides parallelism and finished attempts infos. */
@Public
| FinalizeOnMaster |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/state/internals/RocksDBVersionedStoreSegmentValueFormatterTest.java | {
"start": 19851,
"end": 20660
} | class ____ {
final List<TestRecord> records;
final long nextTimestamp;
final long minTimestamp;
final boolean isDegenerate;
final String name;
TestCase(final String name, final long nextTimestamp, final TestRecord... records) {
this(name, nextTimestamp, Arrays.asList(records));
}
TestCase(final String name, final long nextTimestamp, final List<TestRecord> records) {
this.records = records;
this.nextTimestamp = nextTimestamp;
this.minTimestamp = records.get(records.size() - 1).timestamp;
this.isDegenerate = nextTimestamp == minTimestamp;
this.name = name;
}
@Override
public String toString() {
return name;
}
}
} | TestCase |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/naturalid/NaturalIdAndAssociationTest.java | {
"start": 2626,
"end": 2859
} | class ____ {
@Id
private Long id;
@NaturalId
private String isoCode;
public ZCurrencyEntity1() {
}
public ZCurrencyEntity1(Long id, String isoCode) {
this.id = id;
this.isoCode = isoCode;
}
}
}
| ZCurrencyEntity1 |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/appender/rolling/AbstractTriggeringPolicy.java | {
"start": 1076,
"end": 1186
} | class ____ extends AbstractLifeCycle implements TriggeringPolicy {
// empty for now
}
| AbstractTriggeringPolicy |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/file/FileAssert_usingCharset_String_Test.java | {
"start": 908,
"end": 1226
} | class ____ extends FileAssertBaseTest {
@Override
protected FileAssert invoke_api_method() {
return assertions.usingCharset(otherCharset.name());
}
@Override
protected void verify_internal_effects() {
assertThat(getCharset(assertions)).isEqualTo(otherCharset);
}
}
| FileAssert_usingCharset_String_Test |
java | apache__spark | sql/api/src/main/java/org/apache/spark/sql/api/java/UDF18.java | {
"start": 981,
"end": 1277
} | interface ____<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R> extends Serializable {
R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18) throws Exception;
}
| UDF18 |
java | spring-projects__spring-framework | spring-tx/src/test/java/org/springframework/transaction/event/ReactiveTransactionalEventListenerTests.java | {
"start": 14779,
"end": 15010
} | interface ____ {
// Cannot use #data in condition due to dynamic proxy.
@TransactionalEventListener(condition = "!'SKIP'.equals(#p0)")
void handleAfterCommit(String data);
}
static | TransactionalComponentTestListenerInterface |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/JoinTableTest.java | {
"start": 3383,
"end": 3823
} | class ____ extends Account {
private BigDecimal creditLimit;
//Getters and setters are omitted for brevity
//end::entity-inheritance-joined-table-example[]
public BigDecimal getCreditLimit() {
return creditLimit;
}
public void setCreditLimit(BigDecimal creditLimit) {
this.creditLimit = creditLimit;
}
//tag::entity-inheritance-joined-table-example[]
}
//end::entity-inheritance-joined-table-example[]
}
| CreditAccount |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/TypeInfoFactoryTest.java | {
"start": 10624,
"end": 11771
} | class ____ extends TypeInformation<MyFaulty> {
@Override
public boolean isBasicType() {
return false;
}
@Override
public boolean isTupleType() {
return false;
}
@Override
public int getArity() {
return 0;
}
@Override
public int getTotalFields() {
return 0;
}
@Override
public Class<MyFaulty> getTypeClass() {
return null;
}
@Override
public boolean isKeyType() {
return false;
}
@Override
public TypeSerializer<MyFaulty> createSerializer(SerializerConfig config) {
return null;
}
@Override
public String toString() {
return null;
}
@Override
public boolean equals(Object obj) {
return false;
}
@Override
public int hashCode() {
return 0;
}
@Override
public boolean canEqual(Object obj) {
return false;
}
}
public static | MyFaultyTypeInfo |
java | spring-projects__spring-boot | build-plugin/spring-boot-gradle-plugin/src/test/java/org/springframework/boot/gradle/docs/RunningDocumentationTests.java | {
"start": 3461,
"end": 3706
} | class ____ {");
writer.println(" public static void main(String[] args) {");
writer.println(" System.out.println(ExampleApplication.class.getName());");
writer.println(" }");
writer.println("}");
}
}
}
| ExampleApplication |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/functions/co/KeyedCoProcessFunction.java | {
"start": 2369,
"end": 5538
} | class ____<K, IN1, IN2, OUT> extends AbstractRichFunction {
private static final long serialVersionUID = 1L;
/**
* This method is called for each element in the first of the connected streams.
*
* <p>This function can output zero or more elements using the {@link Collector} parameter and
* also update internal state or set timers using the {@link Context} parameter.
*
* @param value The stream element
* @param ctx A {@link Context} that allows querying the timestamp of the element, querying the
* {@link TimeDomain} of the firing timer and getting a {@link TimerService} for registering
* timers and querying the time. The context is only valid during the invocation of this
* method, do not store it.
* @param out The collector to emit resulting elements to
* @throws Exception The function may throw exceptions which cause the streaming program to fail
* and go into recovery.
*/
public abstract void processElement1(IN1 value, Context ctx, Collector<OUT> out)
throws Exception;
/**
* This method is called for each element in the second of the connected streams.
*
* <p>This function can output zero or more elements using the {@link Collector} parameter and
* also update internal state or set timers using the {@link Context} parameter.
*
* @param value The stream element
* @param ctx A {@link Context} that allows querying the timestamp of the element, querying the
* {@link TimeDomain} of the firing timer and getting a {@link TimerService} for registering
* timers and querying the time. The context is only valid during the invocation of this
* method, do not store it.
* @param out The collector to emit resulting elements to
* @throws Exception The function may throw exceptions which cause the streaming program to fail
* and go into recovery.
*/
public abstract void processElement2(IN2 value, Context ctx, Collector<OUT> out)
throws Exception;
/**
* Called when a timer set using {@link TimerService} fires.
*
* @param timestamp The timestamp of the firing timer.
* @param ctx An {@link OnTimerContext} that allows querying the timestamp of the firing timer,
* querying the {@link TimeDomain} of the firing timer and getting a {@link TimerService}
* for registering timers and querying the time. The context is only valid during the
* invocation of this method, do not store it.
* @param out The collector for returning result values.
* @throws Exception This method may throw exceptions. Throwing an exception will cause the
* operation to fail and may trigger recovery.
*/
public void onTimer(long timestamp, OnTimerContext ctx, Collector<OUT> out) throws Exception {}
/**
* Information available in an invocation of {@link #processElement1(Object, Context,
* Collector)}/ {@link #processElement2(Object, Context, Collector)} or {@link #onTimer(long,
* OnTimerContext, Collector)}.
*/
public abstract | KeyedCoProcessFunction |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/ast/spi/SqlAstCreationState.java | {
"start": 564,
"end": 1669
} | interface ____ {
SqlAstCreationContext getCreationContext();
SqlAstProcessingState getCurrentProcessingState();
SqlExpressionResolver getSqlExpressionResolver();
FromClauseAccess getFromClauseAccess();
SqlAliasBaseGenerator getSqlAliasBaseGenerator();
LoadQueryInfluencers getLoadQueryInfluencers();
default SqmCreationContext getSqmCreationContext() {
return getCreationContext().getSessionFactory().getQueryEngine().getCriteriaBuilder();
}
boolean applyOnlyLoadByKeyFilters();
void registerLockMode(String identificationVariable, LockMode explicitLockMode);
/**
* This callback is for handling of filters and is necessary to allow correct treat optimizations.
*/
@Internal
default void registerEntityNameUsage(
TableGroup tableGroup,
EntityNameUse entityNameUse,
String hibernateEntityName) {
// No-op
}
@Internal
default boolean supportsEntityNameUsage() {
return false;
}
@Internal
default void applyOrdering(TableGroup tableGroup, OrderByFragment orderByFragment) {
}
default boolean isProcedureOrNativeQuery(){
return false;
}
}
| SqlAstCreationState |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/legacy/Foo.java | {
"start": 352,
"end": 444
} | class ____ implements FooProxy, Serializable {
private static int count=0;
public static | Foo |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/filter/RestFilter.java | {
"start": 1057,
"end": 1265
} | interface ____ extends RestExtension {
default void doFilter(HttpRequest request, HttpResponse response, FilterChain chain) throws Exception {
chain.doFilter(request, response);
}
| RestFilter |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/indices/SystemIndexMappingUpdateServiceIT.java | {
"start": 1829,
"end": 6594
} | class ____ extends ESIntegTestCase {
@Before
public void beforeEach() {
TestSystemIndexDescriptor.useNewMappings.set(false);
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return CollectionUtils.appendToCopy(super.nodePlugins(), TestSystemIndexPlugin.class);
}
/**
* Check that if the SystemIndexManager finds a managed index with out-of-date mappings, then
* the manager updates those mappings.
*/
public void testSystemIndexManagerUpgradesMappings() throws Exception {
internalCluster().startNodes(1);
// Trigger the creation of the system index
assertAcked(prepareCreate(INDEX_NAME));
ensureGreen(INDEX_NAME);
assertMappingsAndSettings(TestSystemIndexDescriptor.getOldMappings());
// Poke the test descriptor so that the mappings are now "updated"
TestSystemIndexDescriptor.useNewMappings.set(true);
// Cause a cluster state update, so that the SystemIndexManager will update the mappings in our index
triggerClusterStateUpdates();
assertBusy(() -> assertMappingsAndSettings(TestSystemIndexDescriptor.getNewMappings()));
}
/**
* Check that if the SystemIndexManager finds a managed index with mappings that claim to be newer than
* what it expects, then those mappings are left alone.
*/
public void testSystemIndexManagerLeavesNewerMappingsAlone() throws Exception {
TestSystemIndexDescriptor.useNewMappings.set(true);
internalCluster().startNodes(1);
// Trigger the creation of the system index
assertAcked(prepareCreate(INDEX_NAME));
ensureGreen(INDEX_NAME);
assertMappingsAndSettings(TestSystemIndexDescriptor.getNewMappings());
// Poke the test descriptor so that the mappings are now out-dated.
TestSystemIndexDescriptor.useNewMappings.set(false);
// Cause a cluster state update, so that the SystemIndexManager's listener will execute
triggerClusterStateUpdates();
// Mappings should be unchanged.
assertBusy(() -> assertMappingsAndSettings(TestSystemIndexDescriptor.getNewMappings()));
}
/**
* Ensures that we can clear any blocks that get set on managed system indices.
*
* See https://github.com/elastic/elasticsearch/issues/80814
*/
public void testBlocksCanBeClearedFromManagedSystemIndices() throws Exception {
internalCluster().startNodes(1);
// Trigger the creation of the system index
assertAcked(prepareCreate(INDEX_NAME));
ensureGreen(INDEX_NAME);
for (IndexMetadata.APIBlock blockType : IndexMetadata.APIBlock.values()) {
enableIndexBlock(INDEX_NAME, blockType.settingName());
updateIndexSettings(Settings.builder().put(blockType.settingName(), false), INDEX_NAME);
}
}
/**
* Performs a cluster state update in order to trigger any cluster state listeners - specifically, SystemIndexManager.
*/
private void triggerClusterStateUpdates() {
final String name = randomAlphaOfLength(5).toLowerCase(Locale.ROOT);
indicesAdmin().putTemplate(new PutIndexTemplateRequest(name).patterns(List.of(name))).actionGet();
}
/**
* Fetch the mappings and settings for {@link TestSystemIndexDescriptor#INDEX_NAME} and verify that they match the expected values.
*/
private void assertMappingsAndSettings(String expectedMappings) {
final GetMappingsResponse getMappingsResponse = indicesAdmin().getMappings(
new GetMappingsRequest(TEST_REQUEST_TIMEOUT).indices(INDEX_NAME)
).actionGet();
final Map<String, MappingMetadata> mappings = getMappingsResponse.getMappings();
assertThat(
"Expected mappings to contain a key for [" + PRIMARY_INDEX_NAME + "], but found: " + mappings.toString(),
mappings.containsKey(PRIMARY_INDEX_NAME),
equalTo(true)
);
final Map<String, Object> sourceAsMap = mappings.get(PRIMARY_INDEX_NAME).getSourceAsMap();
assertThat(sourceAsMap, equalTo(XContentHelper.convertToMap(XContentType.JSON.xContent(), expectedMappings, false)));
final GetSettingsResponse getSettingsResponse = indicesAdmin().getSettings(
new GetSettingsRequest(TEST_REQUEST_TIMEOUT).indices(INDEX_NAME)
).actionGet();
final Settings actual = getSettingsResponse.getIndexToSettings().get(PRIMARY_INDEX_NAME);
for (String settingName : TestSystemIndexDescriptor.SETTINGS.keySet()) {
assertThat(actual.get(settingName), equalTo(TestSystemIndexDescriptor.SETTINGS.get(settingName)));
}
}
}
| SystemIndexMappingUpdateServiceIT |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/cid/FlightSegmentId.java | {
"start": 237,
"end": 1377
} | class ____ implements Serializable {
private Integer flight;
private Integer segmentNumber;
public FlightSegmentId() {
}
public FlightSegmentId(Integer flight, Integer segmentNumber) {
this.flight = flight;
this.segmentNumber = segmentNumber;
}
public Integer getFlight() {
return flight;
}
public void setFlight(Integer flight) {
this.flight = flight;
}
public Integer getSegmentNumber() {
return segmentNumber;
}
public void setSegmentNumber(Integer segmentNumber) {
this.segmentNumber = segmentNumber;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FlightSegmentId that = (FlightSegmentId) o;
return Objects.equals(flight, that.flight) && Objects.equals(segmentNumber, that.segmentNumber);
}
@Override
public int hashCode() {
return Objects.hash(flight, segmentNumber);
}
@Override
public String toString() {
return new StringJoiner(", ", FlightSegmentId.class.getSimpleName() + "[", "]")
.add("flight=" + flight)
.add("segmentNumber=" + segmentNumber)
.toString();
}
}
| FlightSegmentId |
java | google__error-prone | test_helpers/src/test/java/com/google/errorprone/BugCheckerRefactoringTestHelperTest.java | {
"start": 4390,
"end": 4840
} | class ____ {
public Object foo() {
Integer i = 2 + 1;
return null;
}
}
""")
.doTest(TestMode.TEXT_MATCH);
}
@Test
public void replaceTextMatchFail() {
assertThrows(
AssertionError.class,
() ->
helper
.addInputLines(
"in/Test.java",
"""
public | Test |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierRecordingService.java | {
"start": 5251,
"end": 7108
} | class ____ implements Writeable, ToXContentFragment {
private final Map<String, Recording> recordings;
public Stats(Map<String, Recording> recordings) {
this.recordings = recordings;
}
public Map<String, Recording> getRecordings() {
return recordings;
}
public Stats(StreamInput in) throws IOException {
this(in.readOrderedMap(StreamInput::readString, Recording::new));
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject("cluster_applier_stats");
builder.startArray("recordings");
for (Map.Entry<String, Recording> entry : recordings.entrySet()) {
builder.startObject();
builder.field("name", entry.getKey());
String name = "cumulative_execution";
builder.field(name + "_count", entry.getValue().count);
builder.humanReadableField(name + "_time_millis", name + "_time", TimeValue.timeValueMillis(entry.getValue().sum));
builder.endObject();
}
builder.endArray();
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeMap(recordings, StreamOutput::writeWriteable);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Stats stats = (Stats) o;
return Objects.equals(recordings, stats.recordings);
}
@Override
public int hashCode() {
return Objects.hash(recordings);
}
public static | Stats |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLRefreshTableStatement.java | {
"start": 251,
"end": 1271
} | class ____ extends SQLStatementImpl {
private SQLExpr name;
private List<SQLAssignItem> partitions;
public SQLRefreshTableStatement() {
partitions = new ArrayList<>();
}
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, name);
acceptChild(visitor, partitions);
}
visitor.endVisit(this);
}
public SQLExpr getName() {
return name;
}
public void setName(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.name = x;
}
public List<SQLAssignItem> getPartitions() {
return partitions;
}
public void setPartitions(List<SQLAssignItem> partitions) {
this.partitions = partitions;
}
public void addPartition(SQLAssignItem partition) {
if (partition != null) {
partition.setParent(this);
}
this.partitions.add(partition);
}
}
| SQLRefreshTableStatement |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/docker/LoadImageUpdateEvent.java | {
"start": 957,
"end": 1880
} | class ____ extends ProgressUpdateEvent {
private final @Nullable String stream;
private final @Nullable ErrorDetail errorDetail;
@JsonCreator
public LoadImageUpdateEvent(@Nullable String stream, String status, ProgressDetail progressDetail, String progress,
@Nullable ErrorDetail errorDetail) {
super(status, progressDetail, progress);
this.stream = stream;
this.errorDetail = errorDetail;
}
/**
* Return the stream response or {@code null} if no response is available.
* @return the stream response.
*/
public @Nullable String getStream() {
return this.stream;
}
/**
* Return the error detail or {@code null} if no error occurred.
* @return the error detail, if any
* @since 3.2.12
*/
public @Nullable ErrorDetail getErrorDetail() {
return this.errorDetail;
}
/**
* Details of an error embedded in a response stream.
*
* @since 3.2.12
*/
public static | LoadImageUpdateEvent |
java | elastic__elasticsearch | plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureSeedHostsProvider.java | {
"start": 1940,
"end": 2106
} | class ____ implements SeedHostsProvider {
private static final Logger logger = LogManager.getLogger(AzureSeedHostsProvider.class);
public | AzureSeedHostsProvider |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-api/src/main/java/org/apache/dubbo/rpc/protocol/AbstractExporter.java | {
"start": 1076,
"end": 2348
} | class ____<T> implements Exporter<T> {
protected final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(getClass());
private final Invoker<T> invoker;
private volatile boolean unexported = false;
public AbstractExporter(Invoker<T> invoker) {
if (invoker == null) {
throw new IllegalStateException("service invoker == null");
}
if (invoker.getInterface() == null) {
throw new IllegalStateException("service type == null");
}
if (invoker.getUrl() == null) {
throw new IllegalStateException("service url == null");
}
this.invoker = invoker;
}
@Override
public Invoker<T> getInvoker() {
return invoker;
}
@Override
public final void unexport() {
if (unexported) {
return;
}
unexported = true;
getInvoker().destroy();
afterUnExport();
}
@Override
public void register() {}
@Override
public void unregister() {}
/**
* subclasses need to override this method to destroy resources.
*/
public void afterUnExport() {}
@Override
public String toString() {
return getInvoker().toString();
}
}
| AbstractExporter |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/RxReturnValueIgnoredTest.java | {
"start": 1993,
"end": 2190
} | class ____<T> {}
""" //
)
.addSourceLines(
"rx2/Completable.java",
"""
package io.reactivex;
public | Single |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/function/FailableLongConsumer.java | {
"start": 1110,
"end": 2341
} | interface ____<E extends Throwable> {
/** NOP singleton */
@SuppressWarnings("rawtypes")
FailableLongConsumer NOP = t -> { /* NOP */ };
/**
* Gets the NOP singleton.
*
* @param <E> The kind of thrown exception or error.
* @return The NOP singleton.
*/
@SuppressWarnings("unchecked")
static <E extends Throwable> FailableLongConsumer<E> nop() {
return NOP;
}
/**
* Accepts the given arguments.
*
* @param object the parameter for the consumable to accept
* @throws E Thrown when the consumer fails.
*/
void accept(long object) throws E;
/**
* Returns a composed {@link FailableLongConsumer} like {@link LongConsumer#andThen(LongConsumer)}.
*
* @param after the operation to perform after this one.
* @return a composed {@link FailableLongConsumer} like {@link LongConsumer#andThen(LongConsumer)}.
* @throws NullPointerException if {@code after} is null
*/
default FailableLongConsumer<E> andThen(final FailableLongConsumer<E> after) {
Objects.requireNonNull(after);
return (final long t) -> {
accept(t);
after.accept(t);
};
}
}
| FailableLongConsumer |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/DisplayNameGenerator.java | {
"start": 9687,
"end": 10434
} | class ____ extends Standard {
static final DisplayNameGenerator INSTANCE = new Simple();
public Simple() {
}
@Override
public String generateDisplayNameForMethod(List<Class<?>> enclosingInstanceTypes, Class<?> testClass,
Method testMethod) {
String displayName = testMethod.getName();
if (hasParameters(testMethod)) {
displayName += ' ' + parameterTypesAsString(testMethod);
}
return displayName;
}
private static boolean hasParameters(Method method) {
return method.getParameterCount() > 0;
}
}
/**
* {@code DisplayNameGenerator} that replaces underscores with spaces.
*
* <p>This generator extends the functionality of {@link Simple} by
* replacing all underscores ({@code '_'}) found in | Simple |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/discovery/DiskDisruptionIT.java | {
"start": 2635,
"end": 6827
} | class ____ extends FilterFileSystemProvider {
AtomicBoolean injectFailures = new AtomicBoolean();
DisruptTranslogFileSystemProvider(FileSystem inner) {
super("disrupttranslog://", inner);
}
@Override
public FileChannel newFileChannel(Path path, Set<? extends OpenOption> options, FileAttribute<?>... attrs) throws IOException {
if (injectFailures.get() && path.toString().endsWith(".ckp")) {
// prevents checkpoint file to be updated
throw new IOException("fake IOException");
}
return super.newFileChannel(path, options, attrs);
}
}
/**
* This test checks that all operations below the global checkpoint are properly persisted.
* It simulates a full power outage by preventing translog checkpoint files to be written and restart the cluster. This means that
* all un-fsynced data will be lost.
*/
public void testGlobalCheckpointIsSafe() throws Exception {
startCluster(rarely() ? 5 : 3);
final int numberOfShards = 1 + randomInt(2);
assertAcked(
prepareCreate("test").setSettings(
Settings.builder()
.put(indexSettings())
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfShards)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, randomInt(2))
)
);
ensureGreen();
AtomicBoolean stopGlobalCheckpointFetcher = new AtomicBoolean();
Map<Integer, Long> shardToGcp = new ConcurrentHashMap<>();
for (int i = 0; i < numberOfShards; i++) {
shardToGcp.put(i, SequenceNumbers.NO_OPS_PERFORMED);
}
final Thread globalCheckpointSampler = new Thread(() -> {
while (stopGlobalCheckpointFetcher.get() == false) {
try {
for (ShardStats shardStats : indicesAdmin().prepareStats("test").clear().get().getShards()) {
final int shardId = shardStats.getShardRouting().id();
final long globalCheckpoint = shardStats.getSeqNoStats().getGlobalCheckpoint();
shardToGcp.compute(shardId, (i, v) -> Math.max(v, globalCheckpoint));
}
} catch (Exception e) {
// ignore
logger.debug("failed to fetch shard stats", e);
}
}
});
globalCheckpointSampler.start();
try (
BackgroundIndexer indexer = new BackgroundIndexer(
"test",
client(),
-1,
RandomizedTest.scaledRandomIntBetween(2, 5),
false,
random()
)
) {
indexer.setRequestTimeout(TimeValue.ZERO);
indexer.setIgnoreIndexingFailures(true);
indexer.setFailureAssertion(e -> {});
indexer.start(-1);
waitForDocs(randomIntBetween(1, 100), indexer);
logger.info("injecting failures");
injectTranslogFailures();
logger.info("stopping indexing");
}
logger.info("full cluster restart");
internalCluster().fullRestart(new InternalTestCluster.RestartCallback() {
@Override
public void onAllNodesStopped() {
logger.info("stopping failures");
stopTranslogFailures();
}
});
stopGlobalCheckpointFetcher.set(true);
logger.info("waiting for global checkpoint sampler");
globalCheckpointSampler.join();
logger.info("waiting for green");
ensureGreen("test");
for (ShardStats shardStats : indicesAdmin().prepareStats("test").clear().get().getShards()) {
final int shardId = shardStats.getShardRouting().id();
final long maxSeqNo = shardStats.getSeqNoStats().getMaxSeqNo();
if (shardStats.getShardRouting().active()) {
assertThat(maxSeqNo, greaterThanOrEqualTo(shardToGcp.get(shardId)));
}
}
}
}
| DisruptTranslogFileSystemProvider |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSenderTests.java | {
"start": 2389,
"end": 33118
} | class ____ extends ESTestCase {
private static final TimeValue TIMEOUT = TimeValue.timeValueSeconds(30);
private DeterministicTaskQueue taskQueue;
@Before
public void init() throws Exception {
taskQueue = new DeterministicTaskQueue();
}
public void testSend_CallsSenderAgain_AfterValidateResponseThrowsAnException() throws IOException {
var httpClient = mock(HttpClient.class);
var httpResponse = mockHttpResponse();
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<HttpResult> listener = (ActionListener<HttpResult>) invocation.getArguments()[2];
listener.onResponse(new HttpResult(httpResponse, new byte[0]));
return Void.TYPE;
}).when(httpClient).send(any(), any(), any());
var inferenceResults = mock(InferenceServiceResults.class);
Answer<InferenceServiceResults> answer = (invocation) -> inferenceResults;
var handler = mock(ResponseHandler.class);
doThrow(new RetryException(true, "failed")).doNothing().when(handler).validateResponse(any(), any(), any(), any());
// Mockito.thenReturn() does not compile when returning a
// bounded wild card list, thenAnswer must be used instead.
when(handler.parseResult(any(Request.class), any(HttpResult.class))).thenAnswer(answer);
var retrier = createRetrier(httpClient);
var listener = new PlainActionFuture<InferenceServiceResults>();
executeTasks(() -> retrier.send(mock(Logger.class), mockRequest(), () -> false, handler, listener), 1);
assertThat(listener.actionGet(TIMEOUT), is(inferenceResults));
verify(httpClient, times(2)).send(any(), any(), any());
verifyNoMoreInteractions(httpClient);
}
public void testSend_CallsSenderAgain_WhenAFailureStatusCodeIsReturned() throws IOException {
var statusLine = mock(StatusLine.class);
when(statusLine.getStatusCode()).thenReturn(300).thenReturn(200);
var httpResponse = mock(HttpResponse.class);
when(httpResponse.getStatusLine()).thenReturn(statusLine);
var httpClient = mock(HttpClient.class);
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<HttpResult> listener = (ActionListener<HttpResult>) invocation.getArguments()[2];
listener.onResponse(new HttpResult(httpResponse, new byte[] { 'a' }));
return Void.TYPE;
}).when(httpClient).send(any(), any(), any());
var inferenceResults = mock(InferenceServiceResults.class);
var handler = new AlwaysRetryingResponseHandler("test", result -> inferenceResults);
var retrier = createRetrier(httpClient);
var listener = new PlainActionFuture<InferenceServiceResults>();
executeTasks(() -> retrier.send(mock(Logger.class), mockRequest(), () -> false, handler, listener), 1);
assertThat(listener.actionGet(TIMEOUT), is(inferenceResults));
verify(httpClient, times(2)).send(any(), any(), any());
verifyNoMoreInteractions(httpClient);
}
public void testSend_CallsSenderAgain_WhenParsingFailsOnce() throws IOException {
var httpClient = mock(HttpClient.class);
var httpResponse = mockHttpResponse();
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<HttpResult> listener = (ActionListener<HttpResult>) invocation.getArguments()[2];
listener.onResponse(new HttpResult(httpResponse, new byte[] { 'a' }));
return Void.TYPE;
}).when(httpClient).send(any(), any(), any());
var inferenceResults = mock(InferenceServiceResults.class);
Answer<InferenceServiceResults> answer = (invocation) -> inferenceResults;
var handler = mock(ResponseHandler.class);
when(handler.parseResult(any(Request.class), any(HttpResult.class))).thenThrow(new RetryException(true, "failed"))
.thenAnswer(answer);
var retrier = createRetrier(httpClient);
var listener = new PlainActionFuture<InferenceServiceResults>();
executeTasks(() -> retrier.send(mock(Logger.class), mockRequest(), () -> false, handler, listener), 1);
assertThat(listener.actionGet(TIMEOUT), is(inferenceResults));
verify(httpClient, times(2)).send(any(), any(), any());
verifyNoMoreInteractions(httpClient);
}
public void testSend_DoesNotCallSenderAgain_WhenParsingFailsWithNonRetryableException() throws IOException {
var httpClient = mock(HttpClient.class);
var httpResponse = mockHttpResponse();
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<HttpResult> listener = (ActionListener<HttpResult>) invocation.getArguments()[2];
listener.onResponse(new HttpResult(httpResponse, new byte[] { 'a' }));
return Void.TYPE;
}).when(httpClient).send(any(), any(), any());
var inferenceResults = mock(InferenceServiceResults.class);
Answer<InferenceServiceResults> answer = (invocation) -> inferenceResults;
var handler = mock(ResponseHandler.class);
when(handler.parseResult(any(Request.class), any(HttpResult.class))).thenThrow(new IllegalStateException("failed"))
.thenAnswer(answer);
var retrier = createRetrier(httpClient);
var listener = new PlainActionFuture<InferenceServiceResults>();
executeTasks(() -> retrier.send(mock(Logger.class), mockRequest(), () -> false, handler, listener), 0);
var thrownException = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT));
assertThat(thrownException.getMessage(), is("failed"));
verify(httpClient, times(1)).send(any(), any(), any());
verifyNoMoreInteractions(httpClient);
}
public void testSend_CallsSenderAgain_WhenHttpResultListenerCallsOnFailureOnce() throws IOException {
var httpClient = mock(HttpClient.class);
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<HttpResult> listener = (ActionListener<HttpResult>) invocation.getArguments()[2];
listener.onFailure(new RetryException(true, "failed"));
return Void.TYPE;
}).doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<HttpResult> listener = (ActionListener<HttpResult>) invocation.getArguments()[2];
listener.onResponse(new HttpResult(mock(HttpResponse.class), new byte[] { 'a' }));
return Void.TYPE;
}).when(httpClient).send(any(), any(), any());
var inferenceResults = mock(InferenceServiceResults.class);
Answer<InferenceServiceResults> answer = (invocation) -> inferenceResults;
var handler = mock(ResponseHandler.class);
when(handler.parseResult(any(Request.class), any(HttpResult.class))).thenAnswer(answer);
var retrier = createRetrier(httpClient);
var listener = new PlainActionFuture<InferenceServiceResults>();
executeTasks(() -> retrier.send(mock(Logger.class), mockRequest(), () -> false, handler, listener), 1);
assertThat(listener.actionGet(TIMEOUT), is(inferenceResults));
verify(httpClient, times(2)).send(any(), any(), any());
verifyNoMoreInteractions(httpClient);
}
public void testSend_CallsSenderAgain_WhenHttpResultListenerCallsOnFailureOnce_WithContentTooLargeException() throws IOException {
var httpClient = mock(HttpClient.class);
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<HttpResult> listener = (ActionListener<HttpResult>) invocation.getArguments()[2];
listener.onFailure(new ContentTooLargeException(new IllegalStateException("failed")));
return Void.TYPE;
}).doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<HttpResult> listener = (ActionListener<HttpResult>) invocation.getArguments()[2];
listener.onResponse(new HttpResult(mock(HttpResponse.class), new byte[] { 'a' }));
return Void.TYPE;
}).when(httpClient).send(any(), any(), any());
var inferenceResults = mock(InferenceServiceResults.class);
Answer<InferenceServiceResults> answer = (invocation) -> inferenceResults;
var handler = mock(ResponseHandler.class);
when(handler.parseResult(any(Request.class), any(HttpResult.class))).thenAnswer(answer);
var retrier = createRetrier(httpClient);
var listener = new PlainActionFuture<InferenceServiceResults>();
executeTasks(() -> retrier.send(mock(Logger.class), mockRequest(), () -> false, handler, listener), 1);
assertThat(listener.actionGet(TIMEOUT), is(inferenceResults));
verify(httpClient, times(2)).send(any(), any(), any());
verifyNoMoreInteractions(httpClient);
}
public void testSend_CallsSenderAgain_WhenHttpResultListenerCallsOnFailureOnceWithConnectionClosedException() throws IOException {
var httpClient = mock(HttpClient.class);
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<HttpResult> listener = (ActionListener<HttpResult>) invocation.getArguments()[2];
listener.onFailure(new ConnectionClosedException("failed"));
return Void.TYPE;
}).doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<HttpResult> listener = (ActionListener<HttpResult>) invocation.getArguments()[2];
listener.onResponse(new HttpResult(mock(HttpResponse.class), new byte[] { 'a' }));
return Void.TYPE;
}).when(httpClient).send(any(), any(), any());
var inferenceResults = mock(InferenceServiceResults.class);
Answer<InferenceServiceResults> answer = (invocation) -> inferenceResults;
var handler = mock(ResponseHandler.class);
when(handler.parseResult(any(Request.class), any(HttpResult.class))).thenAnswer(answer);
var retrier = createRetrier(httpClient);
var listener = new PlainActionFuture<InferenceServiceResults>();
executeTasks(() -> retrier.send(mock(Logger.class), mockRequest(), () -> false, handler, listener), 1);
assertThat(listener.actionGet(TIMEOUT), is(inferenceResults));
verify(httpClient, times(2)).send(any(), any(), any());
verifyNoMoreInteractions(httpClient);
}
public void testSend_ReturnsFailure_WhenHttpResultListenerCallsOnFailureOnceWithUnknownHostException() throws IOException {
var httpClient = mock(HttpClient.class);
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<HttpResult> listener = (ActionListener<HttpResult>) invocation.getArguments()[2];
listener.onFailure(new UnknownHostException("failed"));
return Void.TYPE;
}).when(httpClient).send(any(), any(), any());
var inferenceResults = mock(InferenceServiceResults.class);
Answer<InferenceServiceResults> answer = (invocation) -> inferenceResults;
var handler = mock(ResponseHandler.class);
when(handler.parseResult(any(Request.class), any(HttpResult.class))).thenAnswer(answer);
var retrier = createRetrier(httpClient);
var listener = new PlainActionFuture<InferenceServiceResults>();
executeTasks(() -> retrier.send(mock(Logger.class), mockRequest(), () -> false, handler, listener), 0);
var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT));
assertThat(thrownException.getMessage(), is("Invalid host [null], please check that the URL is correct."));
verify(httpClient, times(1)).send(any(), any(), any());
verifyNoMoreInteractions(httpClient);
}
public void testSend_ReturnsElasticsearchExceptionFailure_WhenTheHttpClientThrowsAnIllegalStateException() throws IOException {
var httpClient = mock(HttpClient.class);
doAnswer(invocation -> { throw new IllegalStateException("failed"); }).when(httpClient).send(any(), any(), any());
var inferenceResults = mock(InferenceServiceResults.class);
Answer<InferenceServiceResults> answer = (invocation) -> inferenceResults;
var handler = mock(ResponseHandler.class);
when(handler.parseResult(any(Request.class), any(HttpResult.class))).thenAnswer(answer);
var retrier = createRetrier(httpClient);
var listener = new PlainActionFuture<InferenceServiceResults>();
executeTasks(() -> retrier.send(mock(Logger.class), mockRequest("id"), () -> false, handler, listener), 0);
var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT));
assertThat(thrownException.getMessage(), is("Http client failed to send request from inference entity id [id]"));
verify(httpClient, times(1)).send(any(), any(), any());
verifyNoMoreInteractions(httpClient);
}
public void testSend_ReturnsFailure_WhenValidateResponseThrowsAnException_AfterOneRetry() throws IOException {
var httpResponse = mock(HttpResponse.class);
when(httpResponse.getStatusLine()).thenReturn(mock(StatusLine.class));
var sender = mock(HttpClient.class);
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<HttpResult> listener = (ActionListener<HttpResult>) invocation.getArguments()[2];
listener.onResponse(new HttpResult(httpResponse, new byte[0]));
return Void.TYPE;
}).when(sender).send(any(), any(), any());
var inferenceResults = mock(InferenceServiceResults.class);
Answer<InferenceServiceResults> answer = (invocation) -> inferenceResults;
var handler = mock(ResponseHandler.class);
doThrow(new RetryException(true, "failed")).doThrow(new IllegalStateException("failed again"))
.when(handler)
.validateResponse(any(), any(), any(), any());
when(handler.parseResult(any(Request.class), any(HttpResult.class))).thenAnswer(answer);
var retrier = createRetrier(sender);
var listener = new PlainActionFuture<InferenceServiceResults>();
executeTasks(() -> retrier.send(mock(Logger.class), mockRequest(), () -> false, handler, listener), 1);
var thrownException = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT));
assertThat(thrownException.getMessage(), is("failed again"));
assertThat(thrownException.getSuppressed().length, is(1));
assertThat(thrownException.getSuppressed()[0].getMessage(), is("failed"));
verify(sender, times(2)).send(any(), any(), any());
verifyNoMoreInteractions(sender);
}
public void testSend_ReturnsFailure_WhenValidateResponseThrowsAnElasticsearchException_AfterOneRetry() throws IOException {
var httpResponse = mock(HttpResponse.class);
when(httpResponse.getStatusLine()).thenReturn(mock(StatusLine.class));
var httpClient = mock(HttpClient.class);
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<HttpResult> listener = (ActionListener<HttpResult>) invocation.getArguments()[2];
listener.onResponse(new HttpResult(httpResponse, new byte[0]));
return Void.TYPE;
}).when(httpClient).send(any(), any(), any());
var inferenceResults = mock(InferenceServiceResults.class);
Answer<InferenceServiceResults> answer = (invocation) -> inferenceResults;
var handler = mock(ResponseHandler.class);
doThrow(new RetryException(true, "failed")).doThrow(new RetryException(false, "failed again"))
.when(handler)
.validateResponse(any(), any(), any(), any());
when(handler.parseResult(any(Request.class), any(HttpResult.class))).thenAnswer(answer);
var retrier = createRetrier(httpClient);
var listener = new PlainActionFuture<InferenceServiceResults>();
executeTasks(() -> retrier.send(mock(Logger.class), mockRequest(), () -> false, handler, listener), 1);
var thrownException = expectThrows(RetryException.class, () -> listener.actionGet(TIMEOUT));
assertThat(thrownException.getMessage(), is("failed again"));
assertThat(thrownException.getSuppressed().length, is(1));
assertThat(thrownException.getSuppressed()[0].getMessage(), is("failed"));
verify(httpClient, times(2)).send(any(), any(), any());
verifyNoMoreInteractions(httpClient);
}
public void testSend_ReturnsFailure_WhenHttpResultsListenerCallsOnFailure_AfterOneRetry() throws IOException {
var httpClient = mock(HttpClient.class);
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<HttpResult> listener = (ActionListener<HttpResult>) invocation.getArguments()[2];
listener.onFailure(new RetryException(true, "failed"));
return Void.TYPE;
}).doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<HttpResult> listener = (ActionListener<HttpResult>) invocation.getArguments()[2];
listener.onFailure(new RetryException(false, "failed again"));
return Void.TYPE;
}).when(httpClient).send(any(), any(), any());
var handler = mock(ResponseHandler.class);
var retrier = createRetrier(httpClient);
var listener = new PlainActionFuture<InferenceServiceResults>();
executeTasks(() -> retrier.send(mock(Logger.class), mockRequest(), () -> false, handler, listener), 1);
var thrownException = expectThrows(RetryException.class, () -> listener.actionGet(TIMEOUT));
assertThat(thrownException.getMessage(), is("failed again"));
assertThat(thrownException.getSuppressed().length, is(1));
assertThat(thrownException.getSuppressed()[0].getMessage(), is("failed"));
verify(httpClient, times(2)).send(any(), any(), any());
verifyNoMoreInteractions(httpClient);
}
public void testSend_ReturnsFailure_WhenHttpResultsListenerCallsOnFailure_WithNonRetryableException() throws IOException {
var httpClient = mock(HttpClient.class);
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<HttpResult> listener = (ActionListener<HttpResult>) invocation.getArguments()[2];
listener.onFailure(new IllegalStateException("failed"));
return Void.TYPE;
}).when(httpClient).send(any(), any(), any());
var handler = mock(ResponseHandler.class);
var retrier = createRetrier(httpClient);
var listener = new PlainActionFuture<InferenceServiceResults>();
executeTasks(() -> retrier.send(mock(Logger.class), mockRequest(), () -> false, handler, listener), 0);
var thrownException = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT));
assertThat(thrownException.getMessage(), is("failed"));
assertThat(thrownException.getSuppressed().length, is(0));
verify(httpClient, times(1)).send(any(), any(), any());
verifyNoMoreInteractions(httpClient);
}
public void testStreamSuccess() throws IOException {
var httpClient = mock(HttpClient.class);
StreamingHttpResult streamingHttpResult = new StreamingHttpResult(mockHttpResponse(), randomPublisher());
doAnswer(ans -> {
ActionListener<StreamingHttpResult> listener = ans.getArgument(2);
listener.onResponse(streamingHttpResult);
return null;
}).when(httpClient).stream(any(), any(), any());
var retrier = createRetrier(httpClient);
ActionListener<InferenceServiceResults> listener = mock();
var request = mockRequest();
when(request.isStreaming()).thenReturn(true);
var responseHandler = mock(ResponseHandler.class);
when(responseHandler.canHandleStreamingResponses()).thenReturn(true);
executeTasks(() -> retrier.send(mock(Logger.class), request, () -> false, responseHandler, listener), 0);
verify(httpClient, times(1)).stream(any(), any(), any());
verifyNoMoreInteractions(httpClient);
verify(responseHandler, times(1)).parseResult(any(), ArgumentMatchers.<Flow.Publisher<HttpResult>>any());
}
private Flow.Publisher<byte[]> randomPublisher() {
var calls = new AtomicInteger(randomIntBetween(1, 4));
return subscriber -> {
subscriber.onSubscribe(new Flow.Subscription() {
@Override
public void request(long n) {
if (calls.getAndDecrement() > 0) {
subscriber.onNext(randomByteArrayOfLength(3));
} else {
subscriber.onComplete();
}
}
@Override
public void cancel() {
}
});
};
}
public void testStream_ResponseHandlerDoesNotHandleStreams() throws IOException {
var httpClient = mock(HttpClient.class);
doAnswer(ans -> {
ActionListener<HttpResult> listener = ans.getArgument(2);
listener.onResponse(new HttpResult(mock(), new byte[0]));
return null;
}).when(httpClient).send(any(), any(), any());
var expectedResponse = mock(InferenceServiceResults.class);
var retrier = createRetrier(httpClient);
var listener = new PlainActionFuture<InferenceServiceResults>();
var request = mockRequest();
when(request.isStreaming()).thenReturn(true);
var responseHandler = mock(ResponseHandler.class);
when(responseHandler.parseResult(any(Request.class), any(HttpResult.class))).thenReturn(expectedResponse);
when(responseHandler.canHandleStreamingResponses()).thenReturn(false);
executeTasks(() -> retrier.send(mock(Logger.class), request, () -> false, responseHandler, listener), 0);
var actualResponse = listener.actionGet(TIMEOUT);
verify(httpClient, times(1)).send(any(), any(), any());
verifyNoMoreInteractions(httpClient);
assertThat(actualResponse, sameInstance(expectedResponse));
}
public void testSend_DoesNotRetryIndefinitely() throws IOException {
var threadPool = new TestThreadPool(getTestName());
try {
var httpClient = mock(HttpClient.class);
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<HttpResult> listener = (ActionListener<HttpResult>) invocation.getArguments()[2];
// respond with a retryable exception
listener.onFailure(new ConnectionClosedException("failed"));
return Void.TYPE;
}).when(httpClient).send(any(), any(), any());
var handler = mock(ResponseHandler.class);
var retrier = new RetryingHttpSender(
httpClient,
mock(ThrottlerManager.class),
createDefaultRetrySettings(),
threadPool,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
var listener = new PlainActionFuture<InferenceServiceResults>();
retrier.send(mock(Logger.class), mockRequest(), () -> false, handler, listener);
// Assert that the retrying sender stopped after max retires even though the exception is retryable
var thrownException = expectThrows(UncategorizedExecutionException.class, () -> listener.actionGet(TIMEOUT));
assertThat(thrownException.getCause(), instanceOf(ConnectionClosedException.class));
assertThat(thrownException.getMessage(), is("Failed execution"));
assertThat(thrownException.getSuppressed().length, is(0));
verify(httpClient, times(RetryingHttpSender.MAX_RETIES)).send(any(), any(), any());
verifyNoMoreInteractions(httpClient);
} finally {
terminate(threadPool);
}
}
public void testStream_DoesNotRetryIndefinitely() throws IOException {
var threadPool = new TestThreadPool(getTestName());
try {
var httpClient = mock(HttpClient.class);
doAnswer(ans -> {
ActionListener<StreamingHttpResult> listener = ans.getArgument(2);
listener.onFailure(new ConnectionClosedException("failed"));
return null;
}).when(httpClient).stream(any(), any(), any());
var handler = mock(ResponseHandler.class);
when(handler.canHandleStreamingResponses()).thenReturn(true);
var retrier = new RetryingHttpSender(
httpClient,
mock(ThrottlerManager.class),
createDefaultRetrySettings(),
threadPool,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
var listener = new PlainActionFuture<InferenceServiceResults>();
var request = mockRequest();
when(request.isStreaming()).thenReturn(true);
retrier.send(mock(Logger.class), request, () -> false, handler, listener);
// Assert that the retrying sender stopped after max retires even though the exception is retryable
var thrownException = expectThrows(UncategorizedExecutionException.class, () -> listener.actionGet(TIMEOUT));
assertThat(thrownException.getCause(), instanceOf(ConnectionClosedException.class));
assertThat(thrownException.getMessage(), is("Failed execution"));
assertThat(thrownException.getSuppressed().length, is(0));
verify(httpClient, times(RetryingHttpSender.MAX_RETIES)).stream(any(), any(), any());
verifyNoMoreInteractions(httpClient);
} finally {
terminate(threadPool);
}
}
public void testSend_DoesNotRetryIndefinitely_WithAlwaysRetryingResponseHandler() throws IOException {
var threadPool = new TestThreadPool(getTestName());
try {
var httpClient = mock(HttpClient.class);
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<HttpResult> listener = (ActionListener<HttpResult>) invocation.getArguments()[2];
listener.onFailure(new ConnectionClosedException("failed"));
return Void.TYPE;
}).when(httpClient).send(any(), any(), any());
// This handler will always tell the sender to retry
var handler = createRetryingResponseHandler();
var retrier = new RetryingHttpSender(
httpClient,
mock(ThrottlerManager.class),
createDefaultRetrySettings(),
threadPool,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
var listener = new PlainActionFuture<InferenceServiceResults>();
retrier.send(mock(Logger.class), mockRequest(), () -> false, handler, listener);
// Assert that the retrying sender stopped after max retires
var thrownException = expectThrows(UncategorizedExecutionException.class, () -> listener.actionGet(TIMEOUT));
assertThat(thrownException.getCause(), instanceOf(ConnectionClosedException.class));
assertThat(thrownException.getMessage(), is("Failed execution"));
assertThat(thrownException.getSuppressed().length, is(0));
verify(httpClient, times(RetryingHttpSender.MAX_RETIES)).send(any(), any(), any());
verifyNoMoreInteractions(httpClient);
} finally {
terminate(threadPool);
}
}
private static HttpResponse mockHttpResponse() {
var statusLine = mock(StatusLine.class);
when(statusLine.getStatusCode()).thenReturn(200);
var httpResponse = mock(HttpResponse.class);
when(httpResponse.getStatusLine()).thenReturn(statusLine);
return httpResponse;
}
private void executeTasks(Runnable runnable, int retries) {
taskQueue.scheduleNow(runnable);
// Execute the task scheduled from the line above
taskQueue.runAllRunnableTasks();
for (int i = 0; i < retries; i++) {
// set the timing correctly to get ready to run the next task
taskQueue.advanceTime();
taskQueue.runAllRunnableTasks();
}
}
private static Request mockRequest() {
return mockRequest("inferenceEntityId");
}
private static Request mockRequest(String inferenceEntityId) {
var request = mock(Request.class);
when(request.truncate()).thenReturn(request);
when(request.createHttpRequest()).thenReturn(HttpRequestTests.createMock(inferenceEntityId));
when(request.getInferenceEntityId()).thenReturn(inferenceEntityId);
return request;
}
private RetryingHttpSender createRetrier(HttpClient httpClient) {
return new RetryingHttpSender(
httpClient,
mock(ThrottlerManager.class),
createDefaultRetrySettings(),
taskQueue.getThreadPool(),
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
}
private ResponseHandler createRetryingResponseHandler() {
// Returns a response handler that wants to retry.
// Does not need to handle parsing as it should only be used
// testing failed requests
return new ResponseHandler() {
@Override
public void validateResponse(ThrottlerManager throttlerManager, Logger logger, Request request, HttpResult result)
throws RetryException {
throw new RetryException(true, new IOException("response handler validate failed as designed"));
}
@Override
public InferenceServiceResults parseResult(Request request, HttpResult result) throws RetryException {
throw new RetryException(true, new IOException("response handler parse failed as designed"));
}
@Override
public String getRequestType() {
return "foo";
}
@Override
public boolean canHandleStreamingResponses() {
return false;
}
};
}
}
| RetryingHttpSenderTests |
java | apache__flink | flink-core/src/main/java/org/apache/flink/types/variant/BinaryVariantBuilder.java | {
"start": 4870,
"end": 5859
} | class ____ implements VariantBuilder.VariantObjectBuilder {
private final BinaryVariantInternalBuilder builder;
private final ArrayList<BinaryVariantInternalBuilder.FieldEntry> entries =
new ArrayList<>();
public VariantObjectBuilder(boolean allowDuplicateKeys) {
builder = new BinaryVariantInternalBuilder(allowDuplicateKeys);
}
@Override
public VariantObjectBuilder add(String key, Variant value) {
int id = builder.addKey(key);
entries.add(
new BinaryVariantInternalBuilder.FieldEntry(key, id, builder.getWritePos()));
builder.appendVariant((BinaryVariant) value);
return this;
}
@Override
public Variant build() {
builder.finishWritingObject(0, entries);
return builder.build();
}
}
/** Builder for a variant array. */
@PublicEvolving
public static | VariantObjectBuilder |
java | apache__camel | components/camel-grpc/src/main/java/org/apache/camel/component/grpc/client/GrpcResponseAggregationStreamObserver.java | {
"start": 1192,
"end": 2049
} | class ____ implements StreamObserver<Object> {
private final Exchange exchange;
private final AsyncCallback callback;
private List<Object> responseCollection = new LinkedList<>();
public GrpcResponseAggregationStreamObserver(Exchange exchange, AsyncCallback callback) {
this.exchange = exchange;
this.callback = callback;
}
@Override
public void onNext(Object response) {
responseCollection.add(response);
}
@Override
public void onError(Throwable throwable) {
exchange.setException(throwable);
callback.done(false);
}
@Override
public void onCompleted() {
exchange.getMessage().setHeaders(exchange.getIn().getHeaders());
exchange.getMessage().setBody(responseCollection);
callback.done(false);
}
}
| GrpcResponseAggregationStreamObserver |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_215.java | {
"start": 856,
"end": 1577
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "select 7777777777777777777777777777777777777 * 10 from t1;";
System.out.println(sql);
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
assertEquals(1, statementList.size());
SQLStatement stmt = statementList.get(0);
assertEquals("SELECT 7777777777777777777777777777777777777 * 10\n" +
"FROM t1;", stmt.toString());
assertEquals("select 7777777777777777777777777777777777777 * 10\n" +
"from t1;", stmt.clone().toLowerCaseString());
}
}
| MySqlSelectTest_215 |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/AutoConfigurationImportSelectorTests.java | {
"start": 2767,
"end": 12245
} | class ____ {
private final TestAutoConfigurationImportSelector importSelector = new TestAutoConfigurationImportSelector(null);
private final ConfigurableListableBeanFactory beanFactory = new DefaultListableBeanFactory();
private final MockEnvironment environment = new MockEnvironment();
private final List<AutoConfigurationImportFilter> filters = new ArrayList<>();
@BeforeEach
void setup() {
setupImportSelector(this.importSelector);
}
@Test
void importsAreSelectedWhenUsingEnableAutoConfiguration() {
String[] imports = selectImports(BasicEnableAutoConfiguration.class);
assertThat(imports).hasSameSizeAs(getAutoConfigurationClassNames());
assertThat(getLastEvent().getExclusions()).isEmpty();
}
@Test
void classExclusionsAreApplied() {
String[] imports = selectImports(EnableAutoConfigurationWithClassExclusions.class);
assertThat(imports).hasSize(getAutoConfigurationClassNames().size() - 1);
assertThat(getLastEvent().getExclusions()).contains(SeventhAutoConfiguration.class.getName());
}
@Test
void classExclusionsAreAppliedWhenUsingSpringBootApplication() {
String[] imports = selectImports(SpringBootApplicationWithClassExclusions.class);
assertThat(imports).hasSize(getAutoConfigurationClassNames().size() - 1);
assertThat(getLastEvent().getExclusions()).contains(SeventhAutoConfiguration.class.getName());
}
@Test
void classNamesExclusionsAreApplied() {
String[] imports = selectImports(EnableAutoConfigurationWithClassNameExclusions.class);
assertThat(imports).hasSize(getAutoConfigurationClassNames().size() - 1);
assertThat(getLastEvent().getExclusions()).contains("com.example.one.FirstAutoConfiguration");
}
@Test
void classNamesExclusionsAreAppliedWhenUsingSpringBootApplication() {
String[] imports = selectImports(SpringBootApplicationWithClassNameExclusions.class);
assertThat(imports).hasSize(getAutoConfigurationClassNames().size() - 1);
assertThat(getLastEvent().getExclusions()).contains("com.example.three.ThirdAutoConfiguration");
}
@Test
void propertyExclusionsAreApplied() {
this.environment.setProperty("spring.autoconfigure.exclude", "com.example.three.ThirdAutoConfiguration");
String[] imports = selectImports(BasicEnableAutoConfiguration.class);
assertThat(imports).hasSize(getAutoConfigurationClassNames().size() - 1);
assertThat(getLastEvent().getExclusions()).contains("com.example.three.ThirdAutoConfiguration");
}
@Test
void severalPropertyExclusionsAreApplied() {
this.environment.setProperty("spring.autoconfigure.exclude",
"com.example.two.SecondAutoConfiguration,com.example.four.FourthAutoConfiguration");
testSeveralPropertyExclusionsAreApplied();
}
@Test
void severalPropertyExclusionsAreAppliedWithExtraSpaces() {
this.environment.setProperty("spring.autoconfigure.exclude",
"com.example.two.SecondAutoConfiguration , com.example.four.FourthAutoConfiguration ");
testSeveralPropertyExclusionsAreApplied();
}
@Test
void severalPropertyYamlExclusionsAreApplied() {
this.environment.setProperty("spring.autoconfigure.exclude[0]", "com.example.two.SecondAutoConfiguration");
this.environment.setProperty("spring.autoconfigure.exclude[1]", "com.example.four.FourthAutoConfiguration");
testSeveralPropertyExclusionsAreApplied();
}
private void testSeveralPropertyExclusionsAreApplied() {
String[] imports = selectImports(BasicEnableAutoConfiguration.class);
assertThat(imports).hasSize(getAutoConfigurationClassNames().size() - 2);
assertThat(getLastEvent().getExclusions()).contains("com.example.two.SecondAutoConfiguration",
"com.example.four.FourthAutoConfiguration");
}
@Test
void combinedExclusionsAreApplied() {
this.environment.setProperty("spring.autoconfigure.exclude", "com.example.one.FirstAutoConfiguration");
String[] imports = selectImports(EnableAutoConfigurationWithClassAndClassNameExclusions.class);
assertThat(imports).hasSize(getAutoConfigurationClassNames().size() - 3);
assertThat(getLastEvent().getExclusions()).contains("com.example.one.FirstAutoConfiguration",
"com.example.five.FifthAutoConfiguration", SeventhAutoConfiguration.class.getName());
}
@Test
@WithTestAutoConfigurationImportsResource
@WithTestAutoConfigurationReplacementsResource
void removedExclusionsAreApplied() {
TestAutoConfigurationImportSelector importSelector = new TestAutoConfigurationImportSelector(
TestAutoConfiguration.class);
setupImportSelector(importSelector);
AnnotationMetadata metadata = AnnotationMetadata.introspect(BasicEnableAutoConfiguration.class);
assertThat(importSelector.selectImports(metadata)).contains(ReplacementAutoConfiguration.class.getName());
this.environment.setProperty("spring.autoconfigure.exclude", DeprecatedAutoConfiguration.class.getName());
assertThat(importSelector.selectImports(metadata)).doesNotContain(ReplacementAutoConfiguration.class.getName());
}
@Test
void nonAutoConfigurationClassExclusionsShouldThrowException() {
assertThatIllegalStateException()
.isThrownBy(() -> selectImports(EnableAutoConfigurationWithFaultyClassExclude.class));
}
@Test
void nonAutoConfigurationClassNameExclusionsWhenPresentOnClassPathShouldThrowException() {
assertThatIllegalStateException()
.isThrownBy(() -> selectImports(EnableAutoConfigurationWithFaultyClassNameExclude.class));
}
@Test
void nonAutoConfigurationPropertyExclusionsWhenPresentOnClassPathShouldThrowException() {
this.environment.setProperty("spring.autoconfigure.exclude",
"org.springframework.boot.autoconfigure.AutoConfigurationImportSelectorTests.TestConfiguration");
assertThatIllegalStateException().isThrownBy(() -> selectImports(BasicEnableAutoConfiguration.class));
}
@Test
void nameAndPropertyExclusionsWhenNotPresentOnClasspathShouldNotThrowException() {
this.environment.setProperty("spring.autoconfigure.exclude",
"org.springframework.boot.autoconfigure.DoesNotExist2");
selectImports(EnableAutoConfigurationWithAbsentClassNameExclude.class);
assertThat(getLastEvent().getExclusions()).containsExactlyInAnyOrder(
"org.springframework.boot.autoconfigure.DoesNotExist1",
"org.springframework.boot.autoconfigure.DoesNotExist2");
}
@Test
void filterShouldFilterImports() {
String[] defaultImports = selectImports(BasicEnableAutoConfiguration.class);
this.filters.add(new TestAutoConfigurationImportFilter(defaultImports, 1));
this.filters.add(new TestAutoConfigurationImportFilter(defaultImports, 3, 4));
String[] filtered = selectImports(BasicEnableAutoConfiguration.class);
assertThat(filtered).hasSize(defaultImports.length - 3);
assertThat(filtered).doesNotContain(defaultImports[1], defaultImports[3], defaultImports[4]);
}
@Test
void filterShouldSupportAware() {
TestAutoConfigurationImportFilter filter = new TestAutoConfigurationImportFilter(new String[] {});
this.filters.add(filter);
selectImports(BasicEnableAutoConfiguration.class);
assertThat(filter.getBeanFactory()).isEqualTo(this.beanFactory);
}
@Test
void getExclusionFilterReuseFilters() {
String[] allImports = new String[] { "com.example.A", "com.example.B", "com.example.C" };
this.filters.add(new TestAutoConfigurationImportFilter(allImports, 0));
this.filters.add(new TestAutoConfigurationImportFilter(allImports, 2));
assertThat(this.importSelector.getExclusionFilter().test("com.example.A")).isTrue();
assertThat(this.importSelector.getExclusionFilter().test("com.example.B")).isFalse();
assertThat(this.importSelector.getExclusionFilter().test("com.example.C")).isTrue();
}
@Test
@WithTestAutoConfigurationImportsResource
@WithTestAutoConfigurationReplacementsResource
void sortingConsidersReplacements() {
TestAutoConfigurationImportSelector importSelector = new TestAutoConfigurationImportSelector(
TestAutoConfiguration.class);
setupImportSelector(importSelector);
AnnotationMetadata metadata = AnnotationMetadata.introspect(BasicEnableAutoConfiguration.class);
assertThat(importSelector.selectImports(metadata)).containsExactly(
AfterDeprecatedAutoConfiguration.class.getName(), ReplacementAutoConfiguration.class.getName());
Group group = BeanUtils.instantiateClass(importSelector.getImportGroup());
((BeanFactoryAware) group).setBeanFactory(this.beanFactory);
group.process(metadata, importSelector);
Stream<Entry> imports = StreamSupport.stream(group.selectImports().spliterator(), false);
assertThat(imports.map(Entry::getImportClassName)).containsExactly(ReplacementAutoConfiguration.class.getName(),
AfterDeprecatedAutoConfiguration.class.getName());
}
private String[] selectImports(Class<?> source) {
return this.importSelector.selectImports(AnnotationMetadata.introspect(source));
}
private List<String> getAutoConfigurationClassNames() {
return ImportCandidates.load(AutoConfiguration.class, Thread.currentThread().getContextClassLoader())
.getCandidates();
}
private void setupImportSelector(TestAutoConfigurationImportSelector importSelector) {
importSelector.setBeanFactory(this.beanFactory);
importSelector.setEnvironment(this.environment);
importSelector.setResourceLoader(new DefaultResourceLoader());
importSelector.setBeanClassLoader(Thread.currentThread().getContextClassLoader());
}
private AutoConfigurationImportEvent getLastEvent() {
AutoConfigurationImportEvent result = this.importSelector.getLastEvent();
assertThat(result).isNotNull();
return result;
}
private final | AutoConfigurationImportSelectorTests |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/serde/RexNodeJsonSerializer.java | {
"start": 24611,
"end": 25264
} | class ____ not be stateful (i.e. containing only transient and "
+ "static fields) and should provide a default constructor. One can "
+ "register the function under a temporary name to resolve this issue.",
summaryName));
}
private static TableException cannotSerializePermanentCatalogFunction(
ObjectIdentifier objectIdentifier) {
return new TableException(
String.format(
"Permanent catalog function '%s' is not serializable. The function's implementation "
+ " | must |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/http/OAuth2LoginBeanDefinitionParser.java | {
"start": 19438,
"end": 20640
} | class ____ implements AuthenticationProvider {
@Override
public Authentication authenticate(Authentication authentication) throws AuthenticationException {
OAuth2LoginAuthenticationToken authorizationCodeAuthentication = (OAuth2LoginAuthenticationToken) authentication;
if (!authorizationCodeAuthentication.getAuthorizationExchange()
.getAuthorizationRequest()
.getScopes()
.contains(OidcScopes.OPENID)) {
return null;
}
// Section 3.1.2.1 Authentication Request -
// https://openid.net/specs/openid-connect-core-1_0.html#AuthRequest scope
// REQUIRED. OpenID Connect requests MUST contain the "openid" scope
// value.
OAuth2Error oauth2Error = new OAuth2Error("oidc_provider_not_configured",
"An OpenID Connect Authentication Provider has not been configured. "
+ "Check to ensure you include the dependency 'spring-security-oauth2-jose'.",
null);
throw new OAuth2AuthenticationException(oauth2Error, oauth2Error.toString());
}
@Override
public boolean supports(Class<?> authentication) {
return OAuth2LoginAuthenticationToken.class.isAssignableFrom(authentication);
}
}
/**
* Wrapper bean | OidcAuthenticationRequestChecker |
java | micronaut-projects__micronaut-core | http-client/src/main/java/io/micronaut/http/client/netty/NettyClientCustomizer.java | {
"start": 3488,
"end": 3937
} | enum ____ {
/**
* The channel is a connection channel, e.g. a
* {@link io.netty.channel.socket.SocketChannel}, representing an HTTP connection.
*/
CONNECTION,
/**
* The channel is a HTTP2 stream channel.
*
* @since 4.0.0
*/
HTTP2_STREAM,
}
/**
* Interface implemented by the HTTP client registry to register customizers.
*/
| ChannelRole |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java | {
"start": 1180,
"end": 7316
} | class ____ implements InferenceServiceResults {
public static final String NAME = "rerank_service_results";
public static final String RERANK = "rerank";
List<RankedDoc> rankedDocs;
public RankedDocsResults(List<RankedDoc> rankedDocs) {
this.rankedDocs = rankedDocs;
}
public RankedDocsResults(StreamInput in) throws IOException {
this.rankedDocs = in.readCollectionAsList(RankedDoc::of);
}
public static final ParseField RERANK_FIELD = new ParseField(RERANK);
public static ConstructingObjectParser<RankedDocsResults, Void> createParser(boolean ignoreUnknownFields) {
@SuppressWarnings("unchecked")
ConstructingObjectParser<RankedDocsResults, Void> parser = new ConstructingObjectParser<>(
"ranked_doc_results",
ignoreUnknownFields,
a -> new RankedDocsResults((List<RankedDoc>) a[0])
);
parser.declareObjectArray(
ConstructingObjectParser.constructorArg(),
(p, c) -> RankedDoc.createParser(true).apply(p, c),
RERANK_FIELD
);
return parser;
}
/**
* A record representing a document that has been ranked by the cohere rerank API
* @param index the index of the document when it was passed to the cohere rerank API
* @param relevanceScore
* @param text
*/
public record RankedDoc(int index, float relevanceScore, @Nullable String text)
implements
Comparable<RankedDoc>,
Writeable,
ToXContentObject {
public static ConstructingObjectParser<RankedDoc, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<RankedDoc, Void> parser = new ConstructingObjectParser<>(
"ranked_doc",
ignoreUnknownFields,
a -> new RankedDoc((int) a[0], (float) a[1], (String) a[2])
);
parser.declareInt(ConstructingObjectParser.constructorArg(), INDEX_FIELD);
parser.declareFloat(ConstructingObjectParser.constructorArg(), RELEVANCE_SCORE_FIELD);
parser.declareString(ConstructingObjectParser.optionalConstructorArg(), TEXT_FIELD);
return parser;
}
public static final String NAME = "ranked_doc";
public static final String INDEX = "index";
public static final String RELEVANCE_SCORE = "relevance_score";
public static final String TEXT = "text";
public static final ParseField INDEX_FIELD = new ParseField(INDEX);
public static final ParseField RELEVANCE_SCORE_FIELD = new ParseField(RELEVANCE_SCORE);
public static final ParseField TEXT_FIELD = new ParseField(TEXT);
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(INDEX, index);
builder.field(RELEVANCE_SCORE, relevanceScore);
if (text != null) {
builder.field(TEXT, text);
}
builder.endObject();
return builder;
}
public static RankedDoc of(StreamInput in) throws IOException {
return new RankedDoc(in.readInt(), in.readFloat(), in.readOptionalString());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeInt(index);
out.writeFloat(relevanceScore);
out.writeOptionalString(text);
}
public Map<String, Object> asMap() {
if (text != null) {
return Map.of(NAME, Map.of(INDEX, index, RELEVANCE_SCORE, relevanceScore, TEXT, text));
} else {
return Map.of(NAME, Map.of(INDEX, index, RELEVANCE_SCORE, relevanceScore));
}
}
@Override
public int compareTo(RankedDoc other) {
return Float.compare(other.relevanceScore, this.relevanceScore);
}
public String toString() {
return "RankedDoc{"
+ "index='"
+ index
+ '\''
+ ", relevanceScore='"
+ relevanceScore
+ '\''
+ ", text='"
+ text
+ '\''
+ ", hashcode="
+ hashCode()
+ '}';
}
};
public RankedDocsResults() {
this.rankedDocs = new ArrayList<RankedDoc>(0);
}
public List<RankedDoc> getRankedDocs() {
return this.rankedDocs;
}
@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
return ChunkedToXContentHelper.array(RERANK, rankedDocs.iterator());
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeCollection(rankedDocs);
}
@Override
public List<? extends InferenceResults> transformToCoordinationFormat() {
throw new UnsupportedOperationException("Coordination format not supported by " + NAME);
}
@Override
public Map<String, Object> asMap() {
Map<String, Object> map = new LinkedHashMap<>();
map.put(RERANK, rankedDocs.stream().map(RankedDoc::asMap).collect(Collectors.toList()));
return map;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("RankedDocsResults@");
sb.append(Integer.toHexString(hashCode()));
sb.append("\n");
for (RankedDoc rankedDoc : rankedDocs) {
sb.append(rankedDoc.toString());
}
return sb.toString();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RankedDocsResults that = (RankedDocsResults) o;
return Objects.equals(rankedDocs, that.rankedDocs);
}
@Override
public int hashCode() {
return Objects.hash(rankedDocs);
}
}
| RankedDocsResults |
java | spring-projects__spring-framework | spring-webflux/src/test/java/org/springframework/web/reactive/result/method/annotation/PrincipalMethodArgumentResolverTests.java | {
"start": 1441,
"end": 3148
} | class ____ {
private final PrincipalMethodArgumentResolver resolver =
new PrincipalMethodArgumentResolver(ReactiveAdapterRegistry.getSharedInstance());
private final ResolvableMethod testMethod = ResolvableMethod.on(getClass()).named("handle").build();
@Test
void supportsParameter() {
assertThat(this.resolver.supportsParameter(this.testMethod.arg(Principal.class))).isTrue();
assertThat(this.resolver.supportsParameter(this.testMethod.arg(Mono.class, Principal.class))).isTrue();
assertThat(this.resolver.supportsParameter(this.testMethod.arg(Single.class, Principal.class))).isTrue();
}
@Test
void resolverArgument() {
Principal user = () -> "Joe";
ServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get("/"))
.mutate().principal(Mono.just(user)).build();
BindingContext context = new BindingContext();
MethodParameter param = this.testMethod.arg(Principal.class);
Object actual = this.resolver.resolveArgument(param, context, exchange).block();
assertThat(actual).isSameAs(user);
param = this.testMethod.arg(Mono.class, Principal.class);
actual = this.resolver.resolveArgument(param, context, exchange).block();
assertThat(actual).isInstanceOf(Mono.class).extracting(o -> ((Mono<?>) o).block()).isSameAs(user);
param = this.testMethod.arg(Single.class, Principal.class);
actual = this.resolver.resolveArgument(param, context, exchange).block();
assertThat(actual).isInstanceOf(Single.class).extracting(o -> ((Single<?>) o).blockingGet()).isSameAs(user);
}
@SuppressWarnings("unused")
void handle(
Principal user,
Mono<Principal> userMono,
Single<Principal> singleUser) {
}
}
| PrincipalMethodArgumentResolverTests |
java | redisson__redisson | redisson/src/main/java/org/redisson/renewal/FastMultilockTask.java | {
"start": 948,
"end": 4615
} | class ____ extends LockTask {
public FastMultilockTask(long internalLockLeaseTime, CommandAsyncExecutor executor) {
super(internalLockLeaseTime, executor, 1);
}
@Override
CompletionStage<Void> renew(Iterator<String> iter, int chunkSize) {
if (!iter.hasNext()) {
return CompletableFuture.completedFuture(null);
}
Map<String, Long> name2lockName = new HashMap<>();
List<Object> args = new ArrayList<>();
args.add(internalLockLeaseTime);
args.add(System.currentTimeMillis());
List<String> keys = new ArrayList<>(chunkSize);
while (iter.hasNext()) {
String key = iter.next();
FastMultilockEntry entry = (FastMultilockEntry) name2entry.get(key);
if (entry == null) {
continue;
}
Long threadId = entry.getFirstThreadId();
if (threadId == null) {
continue;
}
keys.add(key);
args.add(entry.getLockName(threadId));
args.addAll(entry.getFields());
name2lockName.put(key, threadId);
if (keys.size() == chunkSize) {
break;
}
}
if (keys.isEmpty()) {
return CompletableFuture.completedFuture(null);
}
String firstName = keys.get(0);
CompletionStage<Boolean> f = executor.syncedEval(firstName, LongCodec.INSTANCE, RedisCommands.EVAL_BOOLEAN,
"local leaseTime = tonumber(ARGV[1]);" +
"local currentTime = tonumber(ARGV[2]);" +
"local currentThread = ARGV[3];" +
"if (redis.call('exists',KEYS[1]) > 0) then" +
" local newExpireTime = leaseTime + currentTime;" +
" for i=4, #ARGV, 1 do " +
" local lockThread = redis.call('hget', KEYS[1], ARGV[i]);" +
" if(lockThread ~= false and lockThread == currentThread) then " +
" local expireFieldName = ARGV[i]..':'..lockThread..':expire_time';" +
" local expireTime = redis.call('hget', KEYS[1], expireFieldName);" +
" if(tonumber(expireTime) < newExpireTime) then " +
" redis.call('hset', KEYS[1],expireFieldName, newExpireTime);" +
" end;" +
" else" +
" return 0;" +
" end;" +
" end; " +
" local expireTime = redis.call('pttl',KEYS[1]);" +
" if(tonumber(expireTime) < tonumber(leaseTime)) then " +
" redis.call('pexpire',KEYS[1], leaseTime);" +
" end;" +
" return 1;" +
"end;" +
"return 0;",
Collections.singletonList(firstName),
args.toArray());
return f.thenCompose(exists -> {
if (!exists) {
cancelExpirationRenewal(firstName, name2lockName.get(firstName));
}
return renew(iter, chunkSize);
});
}
public void add(String rawName, String lockName, long threadId, Collection<String> fields) {
FastMultilockEntry entry = new FastMultilockEntry(fields);
entry.addThreadId(threadId, lockName);
add(rawName, lockName, threadId, entry);
}
}
| FastMultilockTask |
java | apache__camel | components/camel-google/camel-google-calendar/src/test/java/org/apache/camel/component/google/calendar/CalendarCalendarListIT.java | {
"start": 1761,
"end": 4007
} | class ____ extends AbstractGoogleCalendarTestSupport {
private static final String PATH_PREFIX
= GoogleCalendarApiCollection.getCollection().getApiName(CalendarCalendarListApiMethod.class).getName();
@Test
public void testCalendarList() {
Calendar calendar = getCalendar();
assertTrue(isCalendarInList(calendar), "Test calendar should be in the list");
CalendarListEntry calendarFromGet = requestBody("direct://GET", calendar.getId());
assertEquals(calendar.getId(), calendarFromGet.getId());
}
protected boolean isCalendarInList(Calendar calendar) {
CalendarList calendarList = requestBody("direct://LIST", null);
java.util.List<CalendarListEntry> items = calendarList.getItems();
boolean found = false;
for (CalendarListEntry calendarListEntry : items) {
if (calendar.getSummary().equals(calendarListEntry.getSummary())) {
found = true;
break;
}
}
return found;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// test route for delete
from("direct://DELETE").to("google-calendar://" + PATH_PREFIX + "/delete?inBody=calendarId");
// test route for get
from("direct://GET").to("google-calendar://" + PATH_PREFIX + "/get?inBody=calendarId");
// test route for insert
from("direct://INSERT").to("google-calendar://" + PATH_PREFIX + "/insert?inBody=content");
// test route for list
from("direct://LIST").to("google-calendar://" + PATH_PREFIX + "/list");
// test route for patch
from("direct://PATCH").to("google-calendar://" + PATH_PREFIX + "/patch");
// test route for update
from("direct://UPDATE").to("google-calendar://" + PATH_PREFIX + "/update");
// test route for watch
from("direct://WATCH").to("google-calendar://" + PATH_PREFIX + "/watch?inBody=contentChannel");
}
};
}
}
| CalendarCalendarListIT |
java | apache__kafka | trogdor/src/main/java/org/apache/kafka/trogdor/workload/GaussianTimestampConstantPayloadGenerator.java | {
"start": 2291,
"end": 4758
} | class ____ implements PayloadGenerator {
private final int messageSizeAverage;
private final double messageSizeDeviation;
private final int messagesUntilSizeChange;
private final long seed;
private final Random random = new Random();
private final ByteBuffer buffer;
private int messageTracker = 0;
private int messageSize = 0;
@JsonCreator
public GaussianTimestampConstantPayloadGenerator(@JsonProperty("messageSizeAverage") int messageSizeAverage,
@JsonProperty("messageSizeDeviation") double messageSizeDeviation,
@JsonProperty("messagesUntilSizeChange") int messagesUntilSizeChange,
@JsonProperty("seed") long seed) {
this.messageSizeAverage = messageSizeAverage;
this.messageSizeDeviation = messageSizeDeviation;
this.seed = seed;
this.messagesUntilSizeChange = messagesUntilSizeChange;
buffer = ByteBuffer.allocate(Long.BYTES);
buffer.order(ByteOrder.LITTLE_ENDIAN);
}
@JsonProperty
public int messageSizeAverage() {
return messageSizeAverage;
}
@JsonProperty
public double messageSizeDeviation() {
return messageSizeDeviation;
}
@JsonProperty
public int messagesUntilSizeChange() {
return messagesUntilSizeChange;
}
@JsonProperty
public long seed() {
return seed;
}
@Override
public synchronized byte[] generate(long position) {
// Make the random number generator deterministic for unit tests.
random.setSeed(seed + position);
// Calculate the next message size based on a gaussian distribution.
if ((messageSize == 0) || (messageTracker >= messagesUntilSizeChange)) {
messageTracker = 0;
messageSize = Math.max((int) (random.nextGaussian() * messageSizeDeviation) + messageSizeAverage, Long.BYTES);
}
messageTracker += 1;
// Generate the byte array before the timestamp generation.
byte[] result = new byte[messageSize];
// Do the timestamp generation as the very last task.
buffer.clear();
buffer.putLong(Time.SYSTEM.milliseconds());
buffer.rewind();
System.arraycopy(buffer.array(), 0, result, 0, Long.BYTES);
return result;
}
}
| GaussianTimestampConstantPayloadGenerator |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/deser/impl/MethodProperty.java | {
"start": 918,
"end": 6900
} | class ____
extends SettableBeanProperty
{
protected final AnnotatedMember _annotated;
/**
* Setter MethodHandle holder for modifying property value.
*/
protected final SetterHolder _setter = new SetterHolder(methodType(void.class, Object.class, Object.class));
/**
* Setter MethodHandle holder for modifying property value and returning the modified bean.
*/
protected final SetterHolder _setterReturn = new SetterHolder(methodType(Object.class, Object.class, Object.class));
protected final boolean _skipNulls;
public MethodProperty(BeanPropertyDefinition propDef,
JavaType type, TypeDeserializer typeDeser,
Annotations contextAnnotations, AnnotatedMember annotated)
{
super(propDef, type, typeDeser, contextAnnotations);
_annotated = annotated;
_skipNulls = NullsConstantProvider.isSkipper(_nullProvider);
}
protected MethodProperty(MethodProperty src, ValueDeserializer<?> deser,
NullValueProvider nva) {
super(src, deser, nva);
_annotated = src._annotated;
_skipNulls = NullsConstantProvider.isSkipper(nva);
}
protected MethodProperty(MethodProperty src, PropertyName newName) {
super(src, newName);
_annotated = src._annotated;
_skipNulls = src._skipNulls;
}
@Override
public SettableBeanProperty withName(PropertyName newName) {
return new MethodProperty(this, newName);
}
@Override
public SettableBeanProperty withValueDeserializer(ValueDeserializer<?> deser) {
if (_valueDeserializer == deser) {
return this;
}
// 07-May-2019, tatu: As per [databind#2303], must keep VD/NVP in-sync if they were
NullValueProvider nvp = (_valueDeserializer == _nullProvider) ? deser : _nullProvider;
return new MethodProperty(this, deser, nvp);
}
@Override
public SettableBeanProperty withNullProvider(NullValueProvider nva) {
return new MethodProperty(this, _valueDeserializer, nva);
}
@Override
public void fixAccess(DeserializationConfig config) {
_annotated.fixAccess(
config.isEnabled(MapperFeature.OVERRIDE_PUBLIC_ACCESS_MODIFIERS));
}
/*
/**********************************************************
/* BeanProperty impl
/**********************************************************
*/
@Override
public <A extends Annotation> A getAnnotation(Class<A> acls) {
return (_annotated == null) ? null : _annotated.getAnnotation(acls);
}
@Override public AnnotatedMember getMember() { return _annotated; }
/*
/**********************************************************
/* Overridden methods
/**********************************************************
*/
@Override
public void deserializeAndSet(JsonParser p, DeserializationContext ctxt,
Object instance) throws JacksonException
{
Object value;
if (p.hasToken(JsonToken.VALUE_NULL)) {
if (_skipNulls) {
return;
}
value = _nullProvider.getNullValue(ctxt);
} else if (_valueTypeDeserializer == null) {
value = _valueDeserializer.deserialize(p, ctxt);
// 04-May-2018, tatu: [databind#2023] Coercion from String (mostly) can give null
if (value == null) {
if (_skipNulls) {
return;
}
value = _nullProvider.getNullValue(ctxt);
}
} else {
value = _valueDeserializer.deserializeWithType(p, ctxt, _valueTypeDeserializer);
}
try {
_setter.get().invokeExact(instance, value);
} catch (Throwable e) {
_throwAsJacksonE(p, e, value);
}
}
@Override
public Object deserializeSetAndReturn(JsonParser p,
DeserializationContext ctxt, Object instance) throws JacksonException
{
Object value;
if (p.hasToken(JsonToken.VALUE_NULL)) {
if (_skipNulls) {
return instance;
}
value = _nullProvider.getNullValue(ctxt);
} else if (_valueTypeDeserializer == null) {
value = _valueDeserializer.deserialize(p, ctxt);
// 04-May-2018, tatu: [databind#2023] Coercion from String (mostly) can give null
if (value == null) {
if (_skipNulls) {
return instance;
}
value = _nullProvider.getNullValue(ctxt);
}
} else {
value = _valueDeserializer.deserializeWithType(p, ctxt, _valueTypeDeserializer);
}
try {
Object result = _setterReturn.get().invokeExact(instance, value);
return (result == null) ? instance : result;
} catch (Throwable e) {
_throwAsJacksonE(p, e, value);
return null;
}
}
@Override
public final void set(DeserializationContext ctxt, Object instance, Object value)
throws JacksonException
{
if (value == null) {
if (_skipNulls) {
return;
}
}
try {
_setter.get().invokeExact(instance, value);
} catch (Throwable e) {
_throwAsJacksonE(ctxt.getParser(), e, value);
}
}
@Override
public Object setAndReturn(DeserializationContext ctxt,
Object instance, Object value) throws JacksonException
{
if (value == null) {
if (_skipNulls) {
return instance;
}
}
try {
Object result = _setterReturn.get().invokeExact(instance, value);
return (result == null) ? instance : result;
} catch (Throwable e) {
_throwAsJacksonE(ctxt.getParser(), e, value);
return null;
}
}
| MethodProperty |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/transforms/LegacyToNonLegacyTransformation.java | {
"start": 1493,
"end": 2376
} | class ____ implements TypeTransformation {
public static final TypeTransformation INSTANCE = new LegacyToNonLegacyTransformation();
@Override
public DataType transform(DataType typeToTransform) {
return transform(null, typeToTransform);
}
@Override
public DataType transform(@Nullable DataTypeFactory factory, DataType dataType) {
if (factory == null) {
throw new TableException(
"LegacyToNonLegacyTransformation requires access to the data type factory.");
}
final LogicalType type = dataType.getLogicalType();
if (type instanceof LegacyTypeInformationType) {
return TypeInfoDataTypeConverter.toDataType(
factory, ((LegacyTypeInformationType<?>) type).getTypeInformation(), true);
}
return dataType;
}
}
| LegacyToNonLegacyTransformation |
java | spring-projects__spring-framework | spring-core-test/src/test/java/org/springframework/aot/agent/InstrumentedMethodTests.java | {
"start": 13925,
"end": 17704
} | class ____ {
RecordedInvocation getPublicField;
RecordedInvocation stringGetDeclaredField;
RecordedInvocation stringGetDeclaredFields;
RecordedInvocation stringGetFields;
@BeforeEach
void setup() throws Exception {
this.getPublicField = RecordedInvocation.of(InstrumentedMethod.CLASS_GETFIELD)
.onInstance(PublicField.class).withArgument("field")
.returnValue(PublicField.class.getField("field")).build();
this.stringGetDeclaredField = RecordedInvocation.of(InstrumentedMethod.CLASS_GETDECLAREDFIELD)
.onInstance(String.class).withArgument("value").returnValue(String.class.getDeclaredField("value")).build();
this.stringGetDeclaredFields = RecordedInvocation.of(InstrumentedMethod.CLASS_GETDECLAREDFIELDS)
.onInstance(String.class).returnValue(String.class.getDeclaredFields()).build();
this.stringGetFields = RecordedInvocation.of(InstrumentedMethod.CLASS_GETFIELDS)
.onInstance(String.class).returnValue(String.class.getFields()).build();
}
@Test
void classGetDeclaredFieldShouldMatchTypeReflectionHint() {
hints.reflection().registerType(String.class);
assertThatInvocationMatches(InstrumentedMethod.CLASS_GETDECLAREDFIELD, this.stringGetDeclaredField);
}
@Test
void classGetDeclaredFieldShouldMatchFieldHint() {
hints.reflection().registerType(String.class, typeHint -> typeHint.withField("value"));
assertThatInvocationMatches(InstrumentedMethod.CLASS_GETDECLAREDFIELD, this.stringGetDeclaredField);
}
@Test
void classGetDeclaredFieldsShouldMatchTypeReflectionHint() {
hints.reflection().registerType(String.class);
assertThatInvocationMatches(InstrumentedMethod.CLASS_GETDECLAREDFIELDS, this.stringGetDeclaredFields);
}
@Test
void classGetDeclaredFieldsShouldMatchFieldHint() throws Exception {
hints.reflection().registerField(String.class.getDeclaredField("value"));
assertThatInvocationMatches(InstrumentedMethod.CLASS_GETDECLAREDFIELDS, this.stringGetDeclaredFields);
}
@Test
void classGetFieldShouldMatchTypeReflectionHint() {
hints.reflection().registerType(PublicField.class);
assertThatInvocationMatches(InstrumentedMethod.CLASS_GETFIELD, this.getPublicField);
}
@Test
void classGetFieldShouldMatchFieldHint() {
hints.reflection().registerType(PublicField.class, typeHint -> typeHint.withField("field"));
assertThatInvocationMatches(InstrumentedMethod.CLASS_GETFIELD, this.getPublicField);
}
@Test
void classGetFieldShouldNotMatchPublicFieldsHintWhenPrivate() {
RecordedInvocation invocation = RecordedInvocation.of(InstrumentedMethod.CLASS_GETFIELD)
.onInstance(String.class).withArgument("value").returnValue(null).build();
hints.reflection().registerType(String.class);
assertThatInvocationDoesNotMatch(InstrumentedMethod.CLASS_GETFIELD, invocation);
}
@Test
void classGetFieldShouldNotMatchForWrongType() {
RecordedInvocation invocation = RecordedInvocation.of(InstrumentedMethod.CLASS_GETFIELD)
.onInstance(String.class).withArgument("value").returnValue(null).build();
hints.reflection().registerType(Integer.class);
assertThatInvocationDoesNotMatch(InstrumentedMethod.CLASS_GETFIELD, invocation);
}
@Test
void classGetFieldsShouldMatchReflectionHint() {
RecordedInvocation invocation = RecordedInvocation.of(InstrumentedMethod.CLASS_GETFIELDS)
.onInstance(PublicField.class).build();
hints.reflection().registerType(PublicField.class);
assertThatInvocationMatches(InstrumentedMethod.CLASS_GETFIELDS, invocation);
}
@Test
void classGetFieldsShouldMatchTypeHint() {
hints.reflection().registerType(String.class);
assertThatInvocationMatches(InstrumentedMethod.CLASS_GETFIELDS, this.stringGetFields);
}
}
@Nested
| FieldReflectionInstrumentationTests |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/io/network/netty/NettyPartitionRequestClient.java | {
"start": 13701,
"end": 14109
} | class ____ extends ClientOutboundMessage {
private AcknowledgeAllRecordsProcessedMessage(RemoteInputChannel inputChannel) {
super(checkNotNull(inputChannel));
}
@Override
Object buildMessage() {
return new NettyMessage.AckAllUserRecordsProcessed(inputChannel.getInputChannelId());
}
}
private static | AcknowledgeAllRecordsProcessedMessage |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/propertyeditors/URLEditorTests.java | {
"start": 1009,
"end": 2885
} | class ____ {
@Test
void testCtorWithNullResourceEditor() {
assertThatIllegalArgumentException().isThrownBy(() ->
new URLEditor(null));
}
@Test
void testStandardURI() {
PropertyEditor urlEditor = new URLEditor();
urlEditor.setAsText("mailto:juergen.hoeller@interface21.com");
Object value = urlEditor.getValue();
assertThat(value).isInstanceOf(URL.class);
URL url = (URL) value;
assertThat(urlEditor.getAsText()).isEqualTo(url.toExternalForm());
}
@Test
void testStandardURL() {
PropertyEditor urlEditor = new URLEditor();
urlEditor.setAsText("https://www.springframework.org");
Object value = urlEditor.getValue();
assertThat(value).isInstanceOf(URL.class);
URL url = (URL) value;
assertThat(urlEditor.getAsText()).isEqualTo(url.toExternalForm());
}
@Test
void testClasspathURL() {
PropertyEditor urlEditor = new URLEditor();
urlEditor.setAsText("classpath:" + ClassUtils.classPackageAsResourcePath(getClass()) +
"/" + ClassUtils.getShortName(getClass()) + ".class");
Object value = urlEditor.getValue();
assertThat(value).isInstanceOf(URL.class);
URL url = (URL) value;
assertThat(urlEditor.getAsText()).isEqualTo(url.toExternalForm());
assertThat(url.getProtocol()).doesNotStartWith("classpath");
}
@Test
void testWithNonExistentResource() {
PropertyEditor urlEditor = new URLEditor();
assertThatIllegalArgumentException().isThrownBy(() ->
urlEditor.setAsText("gonna:/freak/in/the/morning/freak/in/the.evening"));
}
@Test
void testSetAsTextWithNull() {
PropertyEditor urlEditor = new URLEditor();
urlEditor.setAsText(null);
assertThat(urlEditor.getValue()).isNull();
assertThat(urlEditor.getAsText()).isEmpty();
}
@Test
void testGetAsTextReturnsEmptyStringIfValueNotSet() {
PropertyEditor urlEditor = new URLEditor();
assertThat(urlEditor.getAsText()).isEmpty();
}
}
| URLEditorTests |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java | {
"start": 9562,
"end": 9932
} | class ____ maintain version information in their meta blocks.
*
* A version number consists of a major version and a minor version. The
* suggested usage of major and minor version number is to increment major
* version number when the new storage format is not backward compatible, and
* increment the minor version otherwise.
*/
public static final | to |
java | quarkusio__quarkus | independent-projects/arc/processor/src/test/java/io/quarkus/arc/processor/types/Baz.java | {
"start": 202,
"end": 346
} | class ____ {
@Inject
Instance<List<String>> list;
public boolean isListResolvable() {
return list.isResolvable();
}
}
| Baz |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_asInstanceOf_with_InstanceOfAssertFactory_Test.java | {
"start": 1281,
"end": 2578
} | class ____ extends AbstractAssertBaseTest
implements NavigationMethodWithComparatorBaseTest<ConcreteAssert> {
@Override
protected ConcreteAssert invoke_api_method() {
assertions.asInstanceOf(LONG);
return null;
}
@Override
protected void verify_internal_effects() {
verify(objects).assertIsInstanceOf(getInfo(assertions), getActual(assertions), Long.class);
}
@Override
public void should_return_this() {
// Test disabled since asInstanceOf does not return this.
}
@Override
public ConcreteAssert getAssertion() {
return assertions;
}
@Override
public AbstractAssert<?, ?> invoke_navigation_method(ConcreteAssert assertion) {
return assertion.asInstanceOf(LONG);
}
@Test
void should_throw_npe_if_no_factory_is_given() {
// WHEN
Throwable thrown = catchThrowable(() -> assertions.asInstanceOf(null));
// THEN
then(thrown).isInstanceOf(NullPointerException.class)
.hasMessage(shouldNotBeNull("instanceOfAssertFactory").create());
}
@Test
void should_return_narrowed_assert_type() {
// WHEN
AbstractAssert<?, ?> result = assertions.asInstanceOf(LONG);
// THEN
then(result).isInstanceOf(AbstractLongAssert.class);
}
}
| AbstractAssert_asInstanceOf_with_InstanceOfAssertFactory_Test |
java | apache__camel | components/camel-leveldb/src/test/java/org/apache/camel/component/leveldb/LevelDBBinaryTest.java | {
"start": 1455,
"end": 3299
} | class ____ extends LevelDBTestSupport {
@Override
public void doPreSetup() {
deleteDirectory("target/data");
}
@Test
public void testLevelDBAggregate() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:aggregated");
byte[] a = new byte[10];
new SecureRandom().nextBytes(a);
byte[] b = new byte[10];
new SecureRandom().nextBytes(b);
byte[] c = new byte[10];
new SecureRandom().nextBytes(c);
try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) {
outputStream.write(a);
outputStream.write(b);
outputStream.write(c);
mock.expectedBodiesReceived(outputStream.toByteArray());
}
template.sendBodyAndHeader("direct:start", a, "id", 123);
template.sendBodyAndHeader("direct:start", b, "id", 123);
template.sendBodyAndHeader("direct:start", c, "id", 123);
MockEndpoint.assertIsSatisfied(context, 30, TimeUnit.SECONDS);
// from endpoint should be preserved
assertEquals("direct://start", mock.getReceivedExchanges().get(0).getFromEndpoint().getEndpointUri());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
// START SNIPPET: e1
public void configure() {
// here is the Camel route where we aggregate
from("direct:start")
.aggregate(header("id"), new ByteAggregationStrategy())
// use our created leveldb repo as aggregation repository
.completionSize(3).aggregationRepository(getRepo())
.to("mock:aggregated");
}
// END SNIPPET: e1
};
}
}
| LevelDBBinaryTest |
java | spring-projects__spring-boot | module/spring-boot-micrometer-metrics/src/main/java/org/springframework/boot/micrometer/metrics/autoconfigure/startup/StartupTimeMetricsListenerAutoConfiguration.java | {
"start": 1794,
"end": 2021
} | class ____ {
@Bean
@ConditionalOnMissingBean
StartupTimeMetricsListener startupTimeMetrics(MeterRegistry meterRegistry) {
return new StartupTimeMetricsListener(meterRegistry);
}
}
| StartupTimeMetricsListenerAutoConfiguration |
java | quarkusio__quarkus | extensions/vertx/deployment/src/test/java/io/quarkus/vertx/deployment/VerticleWithClassNameDeploymentTest.java | {
"start": 1330,
"end": 1778
} | class ____ {
@Inject
Vertx vertx;
public void init(@Observes StartupEvent ev) throws InterruptedException {
CountDownLatch latch = new CountDownLatch(1);
vertx.deployVerticle(MyVerticle.class.getName(),
new DeploymentOptions().setInstances(2),
ar -> latch.countDown());
latch.await();
}
}
public static | BeanDeployingAVerticleFromClass |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/annotation/ProfileValueUtils.java | {
"start": 2156,
"end": 4256
} | class ____ which the ProfileValueSource should
* be retrieved
* @return the configured (or default) ProfileValueSource for the specified
* class
* @see SystemProfileValueSource
*/
@SuppressWarnings("unchecked")
public static ProfileValueSource retrieveProfileValueSource(Class<?> testClass) {
Assert.notNull(testClass, "testClass must not be null");
Class<ProfileValueSourceConfiguration> annotationType = ProfileValueSourceConfiguration.class;
ProfileValueSourceConfiguration config = AnnotatedElementUtils.findMergedAnnotation(testClass, annotationType);
if (logger.isDebugEnabled()) {
logger.debug("Retrieved @ProfileValueSourceConfiguration [" + config + "] for test class [" +
testClass.getName() + "]");
}
Class<? extends ProfileValueSource> profileValueSourceType;
if (config != null) {
profileValueSourceType = config.value();
}
else {
profileValueSourceType = (Class<? extends ProfileValueSource>) AnnotationUtils.getDefaultValue(annotationType);
Assert.state(profileValueSourceType != null, "No default ProfileValueSource class");
}
if (logger.isDebugEnabled()) {
logger.debug("Retrieved ProfileValueSource type [" + profileValueSourceType + "] for class [" +
testClass.getName() + "]");
}
ProfileValueSource profileValueSource;
if (SystemProfileValueSource.class == profileValueSourceType) {
profileValueSource = SystemProfileValueSource.getInstance();
}
else {
try {
profileValueSource = ReflectionUtils.accessibleConstructor(profileValueSourceType).newInstance();
}
catch (Exception ex) {
if (logger.isWarnEnabled()) {
logger.warn("Could not instantiate a ProfileValueSource of type [" + profileValueSourceType +
"] for class [" + testClass.getName() + "]: using default.", ex);
}
profileValueSource = SystemProfileValueSource.getInstance();
}
}
return profileValueSource;
}
/**
* Determine if the supplied {@code testClass} is <em>enabled</em> in
* the current environment, as specified by the {@link IfProfileValue
* @IfProfileValue} annotation at the | for |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/core/SingleOperator.java | {
"start": 938,
"end": 1421
} | interface ____<@NonNull Downstream, @NonNull Upstream> {
/**
* Applies a function to the child {@link SingleObserver} and returns a new parent {@code SingleObserver}.
* @param observer the child {@code SingleObserver} instance
* @return the parent {@code SingleObserver} instance
* @throws Throwable on failure
*/
@NonNull
SingleObserver<? super Upstream> apply(@NonNull SingleObserver<? super Downstream> observer) throws Throwable;
}
| SingleOperator |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/Dhis2EndpointBuilderFactory.java | {
"start": 1604,
"end": 22545
} | interface ____
extends
EndpointConsumerBuilder {
default AdvancedDhis2EndpointConsumerBuilder advanced() {
return (AdvancedDhis2EndpointConsumerBuilder) this;
}
/**
* DHIS2 server base API URL (e.g.,
* https://play.dhis2.org/2.39.1.1/api).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param baseApiUrl the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder baseApiUrl(String baseApiUrl) {
doSetProperty("baseApiUrl", baseApiUrl);
return this;
}
/**
* Sets the name of a parameter to be passed in the exchange In Body.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param inBody the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder inBody(String inBody) {
doSetProperty("inBody", inBody);
return this;
}
/**
* If the polling consumer did not poll any files, you can enable this
* option to send an empty message (no body) instead.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param sendEmptyMessageWhenIdle the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder sendEmptyMessageWhenIdle(boolean sendEmptyMessageWhenIdle) {
doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle);
return this;
}
/**
* If the polling consumer did not poll any files, you can enable this
* option to send an empty message (no body) instead.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param sendEmptyMessageWhenIdle the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder sendEmptyMessageWhenIdle(String sendEmptyMessageWhenIdle) {
doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle);
return this;
}
/**
* The number of subsequent error polls (failed due some error) that
* should happen before the backoffMultipler should kick-in.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*
* @param backoffErrorThreshold the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder backoffErrorThreshold(int backoffErrorThreshold) {
doSetProperty("backoffErrorThreshold", backoffErrorThreshold);
return this;
}
/**
* The number of subsequent error polls (failed due some error) that
* should happen before the backoffMultipler should kick-in.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*
* @param backoffErrorThreshold the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder backoffErrorThreshold(String backoffErrorThreshold) {
doSetProperty("backoffErrorThreshold", backoffErrorThreshold);
return this;
}
/**
* The number of subsequent idle polls that should happen before the
* backoffMultipler should kick-in.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*
* @param backoffIdleThreshold the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder backoffIdleThreshold(int backoffIdleThreshold) {
doSetProperty("backoffIdleThreshold", backoffIdleThreshold);
return this;
}
/**
* The number of subsequent idle polls that should happen before the
* backoffMultipler should kick-in.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*
* @param backoffIdleThreshold the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder backoffIdleThreshold(String backoffIdleThreshold) {
doSetProperty("backoffIdleThreshold", backoffIdleThreshold);
return this;
}
/**
* To let the scheduled polling consumer backoff if there has been a
* number of subsequent idles/errors in a row. The multiplier is then
* the number of polls that will be skipped before the next actual
* attempt is happening again. When this option is in use then
* backoffIdleThreshold and/or backoffErrorThreshold must also be
* configured.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*
* @param backoffMultiplier the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder backoffMultiplier(int backoffMultiplier) {
doSetProperty("backoffMultiplier", backoffMultiplier);
return this;
}
/**
* To let the scheduled polling consumer backoff if there has been a
* number of subsequent idles/errors in a row. The multiplier is then
* the number of polls that will be skipped before the next actual
* attempt is happening again. When this option is in use then
* backoffIdleThreshold and/or backoffErrorThreshold must also be
* configured.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*
* @param backoffMultiplier the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder backoffMultiplier(String backoffMultiplier) {
doSetProperty("backoffMultiplier", backoffMultiplier);
return this;
}
/**
* Milliseconds before the next poll.
*
* The option is a: <code>long</code> type.
*
* Default: 500
* Group: scheduler
*
* @param delay the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder delay(long delay) {
doSetProperty("delay", delay);
return this;
}
/**
* Milliseconds before the next poll.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 500
* Group: scheduler
*
* @param delay the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder delay(String delay) {
doSetProperty("delay", delay);
return this;
}
/**
* If greedy is enabled, then the ScheduledPollConsumer will run
* immediately again, if the previous run polled 1 or more messages.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: scheduler
*
* @param greedy the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder greedy(boolean greedy) {
doSetProperty("greedy", greedy);
return this;
}
/**
* If greedy is enabled, then the ScheduledPollConsumer will run
* immediately again, if the previous run polled 1 or more messages.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: scheduler
*
* @param greedy the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder greedy(String greedy) {
doSetProperty("greedy", greedy);
return this;
}
/**
* Milliseconds before the first poll starts.
*
* The option is a: <code>long</code> type.
*
* Default: 1000
* Group: scheduler
*
* @param initialDelay the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder initialDelay(long initialDelay) {
doSetProperty("initialDelay", initialDelay);
return this;
}
/**
* Milliseconds before the first poll starts.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 1000
* Group: scheduler
*
* @param initialDelay the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder initialDelay(String initialDelay) {
doSetProperty("initialDelay", initialDelay);
return this;
}
/**
* Specifies a maximum limit of number of fires. So if you set it to 1,
* the scheduler will only fire once. If you set it to 5, it will only
* fire five times. A value of zero or negative means fire forever.
*
* The option is a: <code>long</code> type.
*
* Default: 0
* Group: scheduler
*
* @param repeatCount the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder repeatCount(long repeatCount) {
doSetProperty("repeatCount", repeatCount);
return this;
}
/**
* Specifies a maximum limit of number of fires. So if you set it to 1,
* the scheduler will only fire once. If you set it to 5, it will only
* fire five times. A value of zero or negative means fire forever.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 0
* Group: scheduler
*
* @param repeatCount the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder repeatCount(String repeatCount) {
doSetProperty("repeatCount", repeatCount);
return this;
}
/**
* The consumer logs a start/complete log line when it polls. This
* option allows you to configure the logging level for that.
*
* The option is a: <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: TRACE
* Group: scheduler
*
* @param runLoggingLevel the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder runLoggingLevel(org.apache.camel.LoggingLevel runLoggingLevel) {
doSetProperty("runLoggingLevel", runLoggingLevel);
return this;
}
/**
* The consumer logs a start/complete log line when it polls. This
* option allows you to configure the logging level for that.
*
* The option will be converted to a
* <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: TRACE
* Group: scheduler
*
* @param runLoggingLevel the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder runLoggingLevel(String runLoggingLevel) {
doSetProperty("runLoggingLevel", runLoggingLevel);
return this;
}
/**
* Allows for configuring a custom/shared thread pool to use for the
* consumer. By default each consumer has its own single threaded thread
* pool.
*
* The option is a:
* <code>java.util.concurrent.ScheduledExecutorService</code> type.
*
* Group: scheduler
*
* @param scheduledExecutorService the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder scheduledExecutorService(ScheduledExecutorService scheduledExecutorService) {
doSetProperty("scheduledExecutorService", scheduledExecutorService);
return this;
}
/**
* Allows for configuring a custom/shared thread pool to use for the
* consumer. By default each consumer has its own single threaded thread
* pool.
*
* The option will be converted to a
* <code>java.util.concurrent.ScheduledExecutorService</code> type.
*
* Group: scheduler
*
* @param scheduledExecutorService the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder scheduledExecutorService(String scheduledExecutorService) {
doSetProperty("scheduledExecutorService", scheduledExecutorService);
return this;
}
/**
* To use a cron scheduler from either camel-spring or camel-quartz
* component. Use value spring or quartz for built in scheduler.
*
* The option is a: <code>java.lang.Object</code> type.
*
* Default: none
* Group: scheduler
*
* @param scheduler the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder scheduler(Object scheduler) {
doSetProperty("scheduler", scheduler);
return this;
}
/**
* To use a cron scheduler from either camel-spring or camel-quartz
* component. Use value spring or quartz for built in scheduler.
*
* The option will be converted to a <code>java.lang.Object</code> type.
*
* Default: none
* Group: scheduler
*
* @param scheduler the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder scheduler(String scheduler) {
doSetProperty("scheduler", scheduler);
return this;
}
/**
* To configure additional properties when using a custom scheduler or
* any of the Quartz, Spring based scheduler. This is a multi-value
* option with prefix: scheduler.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* schedulerProperties(String, Object) method to add a value (call the
* method multiple times to set more values).
*
* Group: scheduler
*
* @param key the option key
* @param value the option value
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder schedulerProperties(String key, Object value) {
doSetMultiValueProperty("schedulerProperties", "scheduler." + key, value);
return this;
}
/**
* To configure additional properties when using a custom scheduler or
* any of the Quartz, Spring based scheduler. This is a multi-value
* option with prefix: scheduler.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* schedulerProperties(String, Object) method to add a value (call the
* method multiple times to set more values).
*
* Group: scheduler
*
* @param values the values
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder schedulerProperties(Map values) {
doSetMultiValueProperties("schedulerProperties", "scheduler.", values);
return this;
}
/**
* Whether the scheduler should be auto started.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*
* @param startScheduler the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder startScheduler(boolean startScheduler) {
doSetProperty("startScheduler", startScheduler);
return this;
}
/**
* Whether the scheduler should be auto started.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*
* @param startScheduler the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder startScheduler(String startScheduler) {
doSetProperty("startScheduler", startScheduler);
return this;
}
/**
* Time unit for initialDelay and delay options.
*
* The option is a: <code>java.util.concurrent.TimeUnit</code> type.
*
* Default: MILLISECONDS
* Group: scheduler
*
* @param timeUnit the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder timeUnit(TimeUnit timeUnit) {
doSetProperty("timeUnit", timeUnit);
return this;
}
/**
* Time unit for initialDelay and delay options.
*
* The option will be converted to a
* <code>java.util.concurrent.TimeUnit</code> type.
*
* Default: MILLISECONDS
* Group: scheduler
*
* @param timeUnit the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder timeUnit(String timeUnit) {
doSetProperty("timeUnit", timeUnit);
return this;
}
/**
* Controls if fixed delay or fixed rate is used. See
* ScheduledExecutorService in JDK for details.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*
* @param useFixedDelay the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder useFixedDelay(boolean useFixedDelay) {
doSetProperty("useFixedDelay", useFixedDelay);
return this;
}
/**
* Controls if fixed delay or fixed rate is used. See
* ScheduledExecutorService in JDK for details.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*
* @param useFixedDelay the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder useFixedDelay(String useFixedDelay) {
doSetProperty("useFixedDelay", useFixedDelay);
return this;
}
/**
* Password of the DHIS2 username.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param password the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder password(String password) {
doSetProperty("password", password);
return this;
}
/**
* Personal access token to authenticate with DHIS2. This option is
* mutually exclusive to username and password.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param personalAccessToken the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder personalAccessToken(String personalAccessToken) {
doSetProperty("personalAccessToken", personalAccessToken);
return this;
}
/**
* Username of the DHIS2 user to operate as.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param username the value to set
* @return the dsl builder
*/
default Dhis2EndpointConsumerBuilder username(String username) {
doSetProperty("username", username);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the DHIS2 component.
*/
public | Dhis2EndpointConsumerBuilder |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_3300/Issue3443.java | {
"start": 1280,
"end": 1611
} | class ____ {
private String parameterName;
private String parameterUsage;
// do some work...
public ParameterDesc(String parameterName, String parameterUsage) {
this.parameterName = parameterName;
this.parameterUsage = parameterUsage;
}
}
static | ParameterDesc |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/http/CsrfBeanDefinitionParserTests.java | {
"start": 828,
"end": 1442
} | class ____ {
private static final String CONFIG_LOCATION_PREFIX = "classpath:org/springframework/security/config/http/CsrfBeanDefinitionParserTests";
@Test
public void registerDataValueProcessorOnlyIfNotRegistered() {
try (ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext()) {
context.setAllowBeanDefinitionOverriding(false);
context.setConfigLocation(this.xml("RegisterDataValueProcessorOnyIfNotRegistered"));
context.refresh();
}
}
private String xml(String configName) {
return CONFIG_LOCATION_PREFIX + "-" + configName + ".xml";
}
}
| CsrfBeanDefinitionParserTests |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationRequest.java | {
"start": 706,
"end": 2524
} | class ____ extends LegacyActionRequest {
@Nullable
private String realmName;
@Nullable
private String assertionConsumerServiceURL;
@Nullable
private String relayState;
public SamlPrepareAuthenticationRequest(StreamInput in) throws IOException {
super(in);
realmName = in.readOptionalString();
assertionConsumerServiceURL = in.readOptionalString();
relayState = in.readOptionalString();
}
public SamlPrepareAuthenticationRequest() {}
@Override
public ActionRequestValidationException validate() {
return null;
}
public String getRealmName() {
return realmName;
}
public void setRealmName(String realmName) {
this.realmName = realmName;
}
public String getAssertionConsumerServiceURL() {
return assertionConsumerServiceURL;
}
public void setAssertionConsumerServiceURL(String assertionConsumerServiceURL) {
this.assertionConsumerServiceURL = assertionConsumerServiceURL;
}
public String getRelayState() {
return relayState;
}
public void setRelayState(String relayState) {
this.relayState = relayState;
}
@Override
public String toString() {
return getClass().getSimpleName()
+ "{"
+ "realmName="
+ realmName
+ ", assertionConsumerServiceURL="
+ assertionConsumerServiceURL
+ ", relayState="
+ relayState
+ '}';
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeOptionalString(realmName);
out.writeOptionalString(assertionConsumerServiceURL);
out.writeOptionalString(relayState);
}
}
| SamlPrepareAuthenticationRequest |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/api/DisplayNameGenerationTests.java | {
"start": 1683,
"end": 12499
} | class ____ extends AbstractJupiterTestEngineTests {
@Test
void standardGenerator() {
check(DefaultStyleTestCase.class, //
"CONTAINER: DisplayNameGenerationTests$DefaultStyleTestCase", //
"TEST: @DisplayName prevails", //
"TEST: test()", //
"TEST: test(TestInfo)", //
"TEST: testUsingCamelCaseStyle()", //
"TEST: testUsingCamelCase_and_also_UnderScores()", //
"TEST: testUsingCamelCase_and_also_UnderScores_keepingParameterTypeNamesIntact(TestInfo)", //
"TEST: test_with_underscores()" //
);
}
@Test
void simpleGenerator() {
check(SimpleStyleTestCase.class, //
"CONTAINER: DisplayNameGenerationTests$SimpleStyleTestCase", //
"TEST: @DisplayName prevails", //
"TEST: test", //
"TEST: test (TestInfo)", //
"TEST: testUsingCamelCaseStyle", //
"TEST: testUsingCamelCase_and_also_UnderScores", //
"TEST: testUsingCamelCase_and_also_UnderScores_keepingParameterTypeNamesIntact (TestInfo)", //
"TEST: test_with_underscores" //
);
}
@Test
void underscoreGenerator() {
var expectedDisplayNames = new String[] { //
"<replace me>", //
"TEST: @DisplayName prevails", //
"TEST: test", //
"TEST: test (TestInfo)", //
"TEST: test with underscores", //
"TEST: testUsingCamelCase and also UnderScores", //
"TEST: testUsingCamelCase and also UnderScores keepingParameterTypeNamesIntact (TestInfo)", //
"TEST: testUsingCamelCaseStyle" //
};
expectedDisplayNames[0] = "CONTAINER: DisplayNameGenerationTests$UnderscoreStyleTestCase";
check(UnderscoreStyleTestCase.class, expectedDisplayNames);
expectedDisplayNames[0] = "CONTAINER: DisplayNameGenerationTests$UnderscoreStyleInheritedFromSuperClassTestCase";
check(UnderscoreStyleInheritedFromSuperClassTestCase.class, expectedDisplayNames);
}
@Test
void indicativeSentencesGeneratorOnStaticNestedClass() {
check(IndicativeStyleTestCase.class, //
"CONTAINER: DisplayNameGenerationTests$IndicativeStyleTestCase", //
"TEST: @DisplayName prevails", //
"TEST: DisplayNameGenerationTests$IndicativeStyleTestCase -> test", //
"TEST: DisplayNameGenerationTests$IndicativeStyleTestCase -> test (TestInfo)", //
"TEST: DisplayNameGenerationTests$IndicativeStyleTestCase -> test with underscores", //
"TEST: DisplayNameGenerationTests$IndicativeStyleTestCase -> testUsingCamelCase and also UnderScores", //
"TEST: DisplayNameGenerationTests$IndicativeStyleTestCase -> testUsingCamelCase and also UnderScores keepingParameterTypeNamesIntact (TestInfo)", //
"TEST: DisplayNameGenerationTests$IndicativeStyleTestCase -> testUsingCamelCaseStyle" //
);
}
@Test
void indicativeSentencesGeneratorOnTopLevelClass() {
check(IndicativeSentencesTopLevelTestCase.class, //
"CONTAINER: IndicativeSentencesTopLevelTestCase", //
"CONTAINER: IndicativeSentencesTopLevelTestCase -> A year is a leap year", //
"TEST: IndicativeSentencesTopLevelTestCase -> A year is a leap year -> if it is divisible by 4 but not by 100" //
);
}
@Test
void indicativeSentencesGeneratorOnNestedClass() {
check(IndicativeSentencesNestedTestCase.class, //
"CONTAINER: IndicativeSentencesNestedTestCase", //
"CONTAINER: A year is a leap year", //
"TEST: A year is a leap year -> if it is divisible by 4 but not by 100" //
);
}
@Test
void noNameGenerator() {
check(NoNameStyleTestCase.class, //
"CONTAINER: nn", //
"TEST: @DisplayName prevails", //
"TEST: nn", //
"TEST: nn", //
"TEST: nn", //
"TEST: nn", //
"TEST: nn", //
"TEST: nn" //
);
}
@Test
void checkDisplayNameGeneratedForTestingAStackDemo() {
check(StackTestCase.class, //
"CONTAINER: A stack", //
"TEST: is instantiated using its noarg constructor", //
"CONTAINER: A new stack", //
"TEST: throws an EmptyStackException when peeked", //
"TEST: throws an EmptyStackException when popped", //
"TEST: is empty", //
"CONTAINER: After pushing an element to an empty stack", //
"TEST: peek returns that element without removing it from the stack", //
"TEST: pop returns that element and leaves an empty stack", //
"TEST: the stack is no longer empty" //
);
}
@Test
void checkDisplayNameGeneratedForIndicativeGenerator() {
check(IndicativeGeneratorTestCase.class, //
"CONTAINER: A stack", //
"TEST: A stack, is instantiated with its constructor", //
"CONTAINER: A stack, when new", //
"TEST: A stack, when new, throws EmptyStackException when peeked", //
"CONTAINER: A stack, when new, after pushing an element to an empty stack", //
"TEST: A stack, when new, after pushing an element to an empty stack, is no longer empty" //
);
}
@Test
void checkDisplayNameGeneratedForIndicativeGeneratorWithCustomSeparator() {
check(IndicativeGeneratorWithCustomSeparatorTestCase.class, //
"CONTAINER: A stack", //
"TEST: A stack >> is instantiated with its constructor", //
"CONTAINER: A stack >> when new", //
"TEST: A stack >> when new >> throws EmptyStackException when peeked", //
"CONTAINER: A stack >> when new >> after pushing an element to an empty stack", //
"TEST: A stack >> when new >> after pushing an element to an empty stack >> is no longer empty" //
);
}
@Test
void checkDisplayNameGeneratedForIndicativeGeneratorWithCustomSentenceFragments() {
check(IndicativeGeneratorWithCustomSentenceFragmentsTestCase.class, //
"CONTAINER: A stack", //
"TEST: A stack, is instantiated with its constructor", //
"CONTAINER: A stack, when new", //
"TEST: A stack, when new, throws EmptyStackException when peeked", //
"CONTAINER: A stack, when new, after pushing an element to an empty stack", //
"TEST: A stack, when new, after pushing an element to an empty stack, is no longer empty" //
);
}
@Test
void blankSentenceFragmentOnClassYieldsError() {
var results = discoverTests(selectClass(BlankSentenceFragmentOnClassTestCase.class));
var discoveryIssues = results.getDiscoveryIssues();
assertThat(discoveryIssues).hasSize(1);
assertThat(discoveryIssues.getFirst().severity()).isEqualTo(Severity.ERROR);
assertThat(discoveryIssues.getFirst().cause().orElseThrow()) //
.hasMessage("@SentenceFragment on [%s] must be declared with a non-blank value.",
BlankSentenceFragmentOnClassTestCase.class);
}
@Test
void blankSentenceFragmentOnMethodYieldsError() throws Exception {
var results = discoverTests(selectMethod(BlankSentenceFragmentOnMethodTestCase.class, "test"));
var discoveryIssues = results.getDiscoveryIssues();
assertThat(discoveryIssues).hasSize(1);
assertThat(discoveryIssues.getFirst().severity()).isEqualTo(Severity.ERROR);
assertThat(discoveryIssues.getFirst().cause().orElseThrow()) //
.hasMessage("@SentenceFragment on [%s] must be declared with a non-blank value.",
BlankSentenceFragmentOnMethodTestCase.class.getDeclaredMethod("test"));
}
@Test
void displayNameGenerationInheritance() {
check(DisplayNameGenerationInheritanceTestCase.InnerNestedTestCase.class, //
"CONTAINER: DisplayNameGenerationInheritanceTestCase", //
"CONTAINER: InnerNestedTestCase", //
"TEST: this is a test"//
);
check(DisplayNameGenerationInheritanceTestCase.StaticNestedTestCase.class, //
"CONTAINER: DisplayNameGenerationInheritanceTestCase$StaticNestedTestCase", //
"TEST: this_is_a_test()"//
);
}
@Test
void indicativeSentencesGenerationInheritance() {
check(IndicativeSentencesGenerationInheritanceTestCase.InnerNestedTestCase.class, //
"CONTAINER: IndicativeSentencesGenerationInheritanceTestCase", //
"CONTAINER: IndicativeSentencesGenerationInheritanceTestCase -> InnerNestedTestCase", //
"TEST: IndicativeSentencesGenerationInheritanceTestCase -> InnerNestedTestCase -> this is a test"//
);
check(IndicativeSentencesGenerationInheritanceTestCase.StaticNestedTestCase.class, //
"CONTAINER: IndicativeSentencesGenerationInheritanceTestCase$StaticNestedTestCase", //
"TEST: this_is_a_test()"//
);
}
@Test
void indicativeSentencesRuntimeEnclosingType() {
check(IndicativeSentencesRuntimeEnclosingTypeScenarioOneTestCase.class, //
"CONTAINER: Scenario 1", //
"CONTAINER: Scenario 1 -> Level 1", //
"CONTAINER: Scenario 1 -> Level 1 -> Level 2", //
"TEST: Scenario 1 -> Level 1 -> Level 2 -> this is a test"//
);
check(IndicativeSentencesRuntimeEnclosingTypeScenarioTwoTestCase.class, //
"CONTAINER: Scenario 2", //
"CONTAINER: Scenario 2 -> Level 1", //
"CONTAINER: Scenario 2 -> Level 1 -> Level 2", //
"TEST: Scenario 2 -> Level 1 -> Level 2 -> this is a test"//
);
}
@Test
void indicativeSentencesOnSubClass() {
check(IndicativeSentencesOnSubClassScenarioOneTestCase.class, //
"CONTAINER: IndicativeSentencesOnSubClassScenarioOneTestCase", //
"CONTAINER: IndicativeSentencesOnSubClassScenarioOneTestCase -> Level 1", //
"CONTAINER: IndicativeSentencesOnSubClassScenarioOneTestCase -> Level 1 -> Level 2", //
"TEST: IndicativeSentencesOnSubClassScenarioOneTestCase -> Level 1 -> Level 2 -> this is a test"//
);
}
@Test
void indicativeSentencesOnClassTemplate() {
check(ClassTemplateTestCase.class, //
"CONTAINER: Class template", //
"CONTAINER: [1] Class template", //
"TEST: Class template, some test", //
"CONTAINER: Class template, Regular Nested Test Case", //
"TEST: Class template, Regular Nested Test Case, some nested test", //
"CONTAINER: Class template, Nested Class Template", //
"CONTAINER: [1] Class template, Nested Class Template", //
"TEST: Class template, Nested Class Template, some nested test" //
);
assertThat(executeTestsForClass(ClassTemplateTestCase.class).allEvents().started().stream()) //
.map(event -> event.getTestDescriptor().getDisplayName()) //
.containsExactly( //
"JUnit Jupiter", //
"Class template", //
"[1] Class template", //
"Class template, some test", //
"Class template, Regular Nested Test Case", //
"Class template, Regular Nested Test Case, some nested test", //
"Class template, Nested Class Template", //
"[1] Class template, Nested Class Template", //
"Class template, Nested Class Template, some nested test" //
);
}
private void check(Class<?> testClass, String... expectedDisplayNames) {
var results = executeTestsForClass(testClass);
check(results, expectedDisplayNames);
}
private void check(EngineExecutionResults results, String[] expectedDisplayNames) {
var descriptors = results.allEvents().started().stream() //
.map(Event::getTestDescriptor) //
.skip(1); // Skip engine descriptor
assertThat(descriptors) //
.map(it -> it.getType() + ": " + it.getDisplayName()) //
.containsExactlyInAnyOrder(expectedDisplayNames);
}
// -------------------------------------------------------------------------
static | DisplayNameGenerationTests |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_1200/Issue1293.java | {
"start": 627,
"end": 1255
} | class ____{
private long count;
private IdType idType;
private UserType userType;
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public IdType getIdType() {
return idType;
}
public void setIdType(IdType idType) {
this.idType = idType;
}
public UserType getUserType() {
return userType;
}
public void setUserType(UserType userType) {
this.userType = userType;
}
}
static | Test |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/AddToClusterNodeLabelsRequest.java | {
"start": 1219,
"end": 2401
} | class ____ {
@Public
@Unstable
public static AddToClusterNodeLabelsRequest newInstance(List<NodeLabel> nodeLabels) {
AddToClusterNodeLabelsRequest request = Records
.newRecord(AddToClusterNodeLabelsRequest.class);
request.setNodeLabels(nodeLabels);
return request;
}
@Public
@Unstable
public static AddToClusterNodeLabelsRequest newInstance(String subClusterId,
List<NodeLabel> nodeLabels) {
AddToClusterNodeLabelsRequest request = Records
.newRecord(AddToClusterNodeLabelsRequest.class);
request.setNodeLabels(nodeLabels);
request.setSubClusterId(subClusterId);
return request;
}
@Public
@Unstable
public abstract void setNodeLabels(List<NodeLabel> nodeLabels);
@Public
@Unstable
public abstract List<NodeLabel> getNodeLabels();
/**
* Get the subClusterId.
*
* @return subClusterId.
*/
@Public
@InterfaceStability.Evolving
public abstract String getSubClusterId();
/**
* Set the subClusterId.
*
* @param subClusterId subCluster Id.
*/
@Public
@InterfaceStability.Evolving
public abstract void setSubClusterId(String subClusterId);
}
| AddToClusterNodeLabelsRequest |
java | micronaut-projects__micronaut-core | http/src/test/java/io/micronaut/http/cookie/ServerCookieDecoderTest.java | {
"start": 137,
"end": 325
} | class ____ {
@Test
void serverCookieDecoderResolvedViaSpi() {
assertInstanceOf(DefaultServerCookieDecoder.class, ServerCookieDecoder.INSTANCE);
}
}
| ServerCookieDecoderTest |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java | {
"start": 18551,
"end": 19595
} | class ____ implements JSON.Convertor {
private static final String OWNER_TAG = "owner";
private static final String GROUP_TAG = "group";
private static final String PERMISSIONS_TAG = "permissions";
@Override
public void toJSON(Object obj, JSON.Output out) {
PermissionStatus permissionStatus = (PermissionStatus) obj;
// Don't store group as null, just store it as empty string
// (which is FileStatus behavior).
String group = permissionStatus.getGroupName() == null ? ""
: permissionStatus.getGroupName();
out.add(OWNER_TAG, permissionStatus.getUserName());
out.add(GROUP_TAG, group);
out.add(PERMISSIONS_TAG, permissionStatus.getPermission().toString());
}
@Override
public Object fromJSON(@SuppressWarnings("rawtypes") Map object) {
return PermissionStatusJsonSerializer.fromJSONMap(object);
}
@SuppressWarnings("rawtypes")
public static PermissionStatus fromJSONString(String jsonString) {
// The JSON | PermissionStatusJsonSerializer |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/cache/spi/support/AbstractCachedDomainDataAccess.java | {
"start": 522,
"end": 2631
} | class ____ implements CachedDomainDataAccess, AbstractDomainDataRegion.Destructible {
private final DomainDataRegion region;
private final DomainDataStorageAccess storageAccess;
protected AbstractCachedDomainDataAccess(
DomainDataRegion region,
DomainDataStorageAccess storageAccess) {
this.region = region;
this.storageAccess = storageAccess;
}
@Override
public DomainDataRegion getRegion() {
return region;
}
@Internal
public DomainDataStorageAccess getStorageAccess() {
return storageAccess;
}
protected void clearCache() {
L2CACHE_LOGGER.tracef( "Clearing cache data map [region='%s']", region.getName() );
getStorageAccess().evictData();
}
@Override
public boolean contains(Object key) {
return getStorageAccess().contains( key );
}
@Override
public Object get(SharedSessionContractImplementor session, Object key) {
return getStorageAccess().getFromCache( key, session );
}
@Override
public boolean putFromLoad(
SharedSessionContractImplementor session,
Object key,
Object value,
Object version) {
getStorageAccess().putFromLoad( key, value, session );
return true;
}
@Override
public boolean putFromLoad(
SharedSessionContractImplementor session,
Object key,
Object value,
Object version,
boolean minimalPutOverride) {
return putFromLoad( session, key, value, version );
}
private static final SoftLock REGION_LOCK = new SoftLock() {
};
@Override
public SoftLock lockRegion() {
return REGION_LOCK;
}
@Override
public void unlockRegion(SoftLock lock) {
evictAll();
}
@Override
public void remove(SharedSessionContractImplementor session, Object key) {
getStorageAccess().removeFromCache( key, session );
}
@Override
public void removeAll(SharedSessionContractImplementor session) {
getStorageAccess().clearCache( session );
}
@Override
public void evict(Object key) {
getStorageAccess().evictData( key );
}
@Override
public void evictAll() {
getStorageAccess().evictData();
}
@Override
public void destroy() {
getStorageAccess().release();
}
}
| AbstractCachedDomainDataAccess |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java | {
"start": 11191,
"end": 15754
} | interface ____ extends BlockLoader.Builder, Releasable {
/**
* Appends a null value to the block.
*/
Builder appendNull();
/**
* Begins a multivalued entry. Calling this for the first time will put
* the builder into a mode that generates Blocks that return {@code true}
* from {@link Block#mayHaveMultivaluedFields} which can force less
* optimized code paths. So don't call this unless you are sure you are
* emitting more than one value for this position.
*/
Builder beginPositionEntry();
/**
* Ends the current multi-value entry.
*/
Builder endPositionEntry();
/**
* Copy the values in {@code block} from {@code beginInclusive} to
* {@code endExclusive} into this builder.
* <p>
* For single position copies use the faster
* {@link IntBlockBuilder#copyFrom(IntBlock, int)},
* {@link LongBlockBuilder#copyFrom(LongBlock, int)}, etc.
* </p>
*/
Builder copyFrom(Block block, int beginInclusive, int endExclusive);
/**
* How are multivalued fields ordered? This defaults to {@link Block.MvOrdering#UNORDERED}
* but when you set it to {@link Block.MvOrdering#DEDUPLICATED_AND_SORTED_ASCENDING} some operators can optimize
* themselves. This is a <strong>promise</strong> that is never checked. If you set this
* to anything other than {@link Block.MvOrdering#UNORDERED} be sure the values are in
* that order or other operators will make mistakes. The actual ordering isn't checked
* at runtime.
*/
Builder mvOrdering(Block.MvOrdering mvOrdering);
/**
* An estimate of the number of bytes the {@link Block} created by
* {@link #build} will use. This may overestimate the size but shouldn't
* underestimate it.
*/
long estimatedBytes();
/**
* Builds the block. This method can be called multiple times.
*/
Block build();
/**
* Build many {@link Block}s at once, releasing any partially built blocks
* if any fail.
*/
static Block[] buildAll(Block.Builder... builders) {
Block[] blocks = new Block[builders.length];
try {
for (int b = 0; b < blocks.length; b++) {
blocks[b] = builders[b].build();
}
} finally {
if (blocks[blocks.length - 1] == null) {
Releasables.closeExpectNoException(blocks);
}
}
return blocks;
}
}
/**
* Writes only the data of the block to a stream output.
* This method should be used when the type of the block is known during reading.
*/
void writeTo(StreamOutput out) throws IOException;
/**
* Writes the type of the block followed by the block data to a stream output.
* This should be paired with {@link #readTypedBlock(BlockStreamInput)}
*/
static void writeTypedBlock(Block block, StreamOutput out) throws IOException {
if (false == supportsAggregateMetricDoubleBlock(out.getTransportVersion()) && block instanceof AggregateMetricDoubleArrayBlock a) {
block = a.asCompositeBlock();
}
block.elementType().writeTo(out);
block.writeTo(out);
}
/**
* Reads the block type and then the block data from a stream input
* This should be paired with {@link #writeTypedBlock(Block, StreamOutput)}
*/
static Block readTypedBlock(BlockStreamInput in) throws IOException {
ElementType elementType = ElementType.readFrom(in);
Block block = elementType.reader.readBlock(in);
if (false == supportsAggregateMetricDoubleBlock(in.getTransportVersion()) && block instanceof CompositeBlock compositeBlock) {
block = AggregateMetricDoubleArrayBlock.fromCompositeBlock(compositeBlock);
}
return block;
}
static boolean supportsAggregateMetricDoubleBlock(TransportVersion version) {
return version.supports(ESQL_AGGREGATE_METRIC_DOUBLE_BLOCK);
}
/**
* Serialization type for blocks: 0 and 1 replace false/true used in pre-8.14
*/
byte SERIALIZE_BLOCK_VALUES = 0;
byte SERIALIZE_BLOCK_VECTOR = 1;
byte SERIALIZE_BLOCK_ARRAY = 2;
byte SERIALIZE_BLOCK_BIG_ARRAY = 3;
byte SERIALIZE_BLOCK_ORDINAL = 3;
}
| Builder |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/sps/StoragePolicySatisfier.java | {
"start": 34257,
"end": 37378
} | class ____ {
private final StorageType storageType;
private final DatanodeInfo dn;
StorageTypeNodePair(StorageType storageType, DatanodeInfo dn) {
this.storageType = storageType;
this.dn = dn;
}
public DatanodeInfo getDatanodeInfo() {
return dn;
}
public StorageType getStorageType() {
return storageType;
}
@Override
public String toString() {
return new StringBuilder().append("StorageTypeNodePair(\n ")
.append("DatanodeInfo: ").append(dn).append(", StorageType: ")
.append(storageType).toString();
}
}
private EnumMap<StorageType, List<DatanodeWithStorage.StorageDetails>>
findTargetsForExpectedStorageTypes(List<StorageType> expected,
DatanodeMap liveDns) {
EnumMap<StorageType, List<DatanodeWithStorage.StorageDetails>> targetsMap =
new EnumMap<StorageType, List<DatanodeWithStorage.StorageDetails>>(
StorageType.class);
for (StorageType storageType : expected) {
List<DatanodeWithStorage> nodes = liveDns.getTarget(storageType);
if (nodes == null) {
return targetsMap;
}
List<DatanodeWithStorage.StorageDetails> listNodes = targetsMap
.get(storageType);
if (listNodes == null) {
listNodes = new ArrayList<>();
targetsMap.put(storageType, listNodes);
}
for (DatanodeWithStorage n : nodes) {
final DatanodeWithStorage.StorageDetails node = getMaxRemaining(n,
storageType);
if (node != null) {
listNodes.add(node);
}
}
}
return targetsMap;
}
private static DatanodeWithStorage.StorageDetails getMaxRemaining(
DatanodeWithStorage node, StorageType storageType) {
long max = 0L;
DatanodeWithStorage.StorageDetails nodeInfo = null;
List<DatanodeWithStorage.StorageDetails> storages = node
.getNodesWithStorages(storageType);
for (DatanodeWithStorage.StorageDetails n : storages) {
if (n.availableSizeToMove() > max) {
max = n.availableSizeToMove();
nodeInfo = n;
}
}
return nodeInfo;
}
private boolean checkSourceAndTargetTypeExists(DatanodeInfo dn,
List<StorageType> existingStorageTypes,
List<StorageType> expectedStorageTypes, DatanodeMap liveDns) {
boolean isExpectedTypeAvailable = false;
boolean isExistingTypeAvailable = false;
for (DatanodeWithStorage liveDn : liveDns.getTargets()) {
if (dn.equals(liveDn.datanode)) {
for (StorageType eachType : liveDn.getStorageTypes()) {
if (existingStorageTypes.contains(eachType)) {
isExistingTypeAvailable = true;
}
if (expectedStorageTypes.contains(eachType)) {
isExpectedTypeAvailable = true;
}
if (isExistingTypeAvailable && isExpectedTypeAvailable) {
return true;
}
}
}
}
return isExistingTypeAvailable && isExpectedTypeAvailable;
}
/**
* Maintains storage type map with the available datanodes in the cluster.
*/
public static | StorageTypeNodePair |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/criteria/TreatAbstractSuperclassTest.java | {
"start": 4668,
"end": 4847
} | class ____ {
@Id
@GeneratedValue
private long id;
}
@Entity(name = "Publication")
@Inheritance(strategy = InheritanceType.TABLE_PER_CLASS)
public abstract static | BaseEntity |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/PropertyNameTest.java | {
"start": 197,
"end": 892
} | class ____
{
@Test
public void testMerging() {
PropertyName name1 = PropertyName.construct("name1", "ns1");
PropertyName name2 = PropertyName.construct("name2", "ns2");
PropertyName empty = PropertyName.construct("", null);
PropertyName nsX = PropertyName.construct("", "nsX");
assertSame(name1, PropertyName.merge(name1, name2));
assertSame(name2, PropertyName.merge(name2, name1));
assertSame(name1, PropertyName.merge(name1, empty));
assertSame(name1, PropertyName.merge(empty, name1));
assertEquals(PropertyName.construct("name1", "nsX"),
PropertyName.merge(nsX, name1));
}
}
| PropertyNameTest |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/beans/factory/xml/XmlBeanFactoryTests.java | {
"start": 78501,
"end": 78855
} | class ____ implements MethodReplacer {
@Override
public Object reimplement(Object obj, Method method, Object[] args) {
List<String> list = Arrays.stream((Object[]) args[0])
.map(Object::toString)
.collect(Collectors.toCollection(ArrayList::new));
Collections.reverse(list);
return list.toArray(String[]::new);
}
}
| ReverseArrayMethodReplacer |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/functions/FlatMapFunction.java | {
"start": 981,
"end": 1626
} | interface ____ flatMap functions. FlatMap functions take elements and transform them, into
* zero, one, or more elements. Typical applications can be splitting elements, or unnesting lists
* and arrays. Operations that produce multiple strictly one result element per input element can
* also use the {@link MapFunction}.
*
* <p>The basic syntax for using a FlatMapFunction is as follows:
*
* <pre>{@code
* DataSet<X> input = ...;
*
* DataSet<Y> result = input.flatMap(new MyFlatMapFunction());
* }</pre>
*
* @param <T> Type of the input elements.
* @param <O> Type of the returned elements.
*/
@Public
@FunctionalInterface
public | for |
java | quarkusio__quarkus | extensions/websockets-next/runtime/src/main/java/io/quarkus/websockets/next/WebSocketClientConnection.java | {
"start": 304,
"end": 432
} | interface ____ extends Connection {
/*
* @return the client id
*/
String clientId();
}
| WebSocketClientConnection |
java | quarkusio__quarkus | integration-tests/redis-client/src/main/java/io/quarkus/redis/it/RedisWithNamedClientResource.java | {
"start": 543,
"end": 1701
} | class ____ {
@Inject
@RedisClientName("named-client-legacy")
RedisClient redisClient;
@Inject
@RedisClientName("named-reactive-client-legacy")
ReactiveRedisClient reactiveRedisClient;
// synchronous
@GET
@Path("/sync/{key}")
public String getSync(@PathParam("key") String key) {
Response response = redisClient.get(key);
return response == null ? null : response.toString();
}
@POST
@Path("/sync/{key}")
public void setSync(@PathParam("key") String key, String value) {
this.redisClient.set(Arrays.asList(key, value));
}
// reactive
@GET
@Path("/reactive/{key}")
public Uni<String> getReactive(@PathParam("key") String key) {
return reactiveRedisClient
.get(key)
.map(response -> response == null ? null : response.toString());
}
@POST
@Path("/reactive/{key}")
public Uni<Void> setReactive(@PathParam("key") String key, String value) {
return this.reactiveRedisClient
.set(Arrays.asList(key, value))
.map(response -> null);
}
}
| RedisWithNamedClientResource |
java | spring-projects__spring-framework | spring-tx/src/main/java/org/springframework/transaction/reactive/TransactionalEventPublisher.java | {
"start": 1797,
"end": 3508
} | class ____ {
private final ApplicationEventPublisher eventPublisher;
/**
* Create a new delegate for publishing transactional events in a reactive setup.
* @param eventPublisher the actual event publisher to use,
* typically a Spring {@link org.springframework.context.ApplicationContext}
*/
public TransactionalEventPublisher(ApplicationEventPublisher eventPublisher) {
this.eventPublisher = eventPublisher;
}
/**
* Publish an event created through the given function which maps the transaction
* source object (the {@link TransactionContext}) to the event instance.
* @param eventCreationFunction a function mapping the source object to the event instance,
* for example, {@code source -> new PayloadApplicationEvent<>(source, "myPayload")}
* @return the Reactor {@link Mono} for the transactional event publication
*/
public Mono<Void> publishEvent(Function<TransactionContext, ApplicationEvent> eventCreationFunction) {
return TransactionContextManager.currentContext().map(eventCreationFunction)
.doOnSuccess(this.eventPublisher::publishEvent).then();
}
/**
* Publish an event created for the given payload.
* @param payload the payload to publish as an event
* @return the Reactor {@link Mono} for the transactional event publication
*/
public Mono<Void> publishEvent(Object payload) {
if (payload instanceof ApplicationEvent) {
return Mono.error(new IllegalArgumentException("Cannot publish ApplicationEvent with transactional " +
"source - publish payload object or use publishEvent(Function<Object, ApplicationEvent>"));
}
return publishEvent(source -> new PayloadApplicationEvent<>(source, payload));
}
}
| TransactionalEventPublisher |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/annotation/ServerFilter.java | {
"start": 1407,
"end": 2224
} | interface ____ {
/**
* Pattern used to match all requests.
*/
String MATCH_ALL_PATTERN = Filter.MATCH_ALL_PATTERN;
/**
* @return The patterns this filter should match
*/
String[] value() default {};
/**
* @return The style of pattern this filter uses
*/
FilterPatternStyle patternStyle() default FilterPatternStyle.ANT;
/**
* Same as {@link #value()}.
*
* @return The patterns
*/
@AliasFor(member = "value")
String[] patterns() default {};
/**
* @return The methods to match. Defaults to all
*/
HttpMethod[] methods() default {};
/**
* @return Whether the contextPath should be concatenated into the filter pattern
* @since 4.5.1
*/
boolean appendContextPath() default true;
}
| ServerFilter |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/async/AsyncEndpointCustomInterceptorTest.java | {
"start": 3581,
"end": 4443
} | class ____ implements InterceptStrategy {
private final AtomicInteger counter = new AtomicInteger();
@Override
public Processor wrapProcessorInInterceptors(
final CamelContext context, final NamedNode definition, final Processor target, final Processor nextTarget) {
return new DelegateAsyncProcessor(target) {
public boolean process(final Exchange exchange, final AsyncCallback callback) {
// we just want to count number of interceptions
counter.incrementAndGet();
// and continue processing the exchange
return super.process(exchange, callback);
}
};
}
public int getCounter() {
return counter.get();
}
}
// END SNIPPET: e1
}
| MyInterceptor |
java | apache__flink | tools/ci/flink-ci-tools/src/main/java/org/apache/flink/tools/ci/utils/shared/DependencyTree.java | {
"start": 1329,
"end": 2272
} | class ____ {
private final Map<String, Node> lookup = new LinkedHashMap<>();
private final List<Node> directDependencies = new ArrayList<>();
public DependencyTree addDirectDependency(Dependency dependency) {
final String key = getKey(dependency);
if (lookup.containsKey(key)) {
return this;
}
final Node node = new Node(dependency, null);
lookup.put(key, node);
directDependencies.add(node);
return this;
}
public DependencyTree addTransitiveDependencyTo(
Dependency transitiveDependency, Dependency parent) {
final String key = getKey(transitiveDependency);
if (lookup.containsKey(key)) {
return this;
}
final Node node = lookup.get(getKey(parent)).addTransitiveDependency(transitiveDependency);
lookup.put(key, node);
return this;
}
private static final | DependencyTree |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/TestRegistryWithCustomPlugin.java | {
"start": 1274,
"end": 5877
} | class ____ extends IndexTemplateRegistry {
public static final int REGISTRY_VERSION = 3;
public static final String TEMPLATE_VERSION_VARIABLE = "xpack.custom_plugin.template.version";
private final AtomicBoolean policyUpgradeRequired = new AtomicBoolean(false);
private final AtomicBoolean applyRollover = new AtomicBoolean(false);
private final Map<ProjectId, AtomicReference<Collection<RolloverResponse>>> rolloverResponses = new ConcurrentHashMap<>();
private final Map<ProjectId, AtomicReference<Exception>> rolloverFailure = new ConcurrentHashMap<>();
private final ThreadPool threadPool;
TestRegistryWithCustomPlugin(
Settings nodeSettings,
ClusterService clusterService,
ThreadPool threadPool,
Client client,
NamedXContentRegistry xContentRegistry
) {
super(nodeSettings, clusterService, threadPool, client, xContentRegistry);
this.threadPool = threadPool;
}
@Override
protected Map<String, ComponentTemplate> getComponentTemplateConfigs() {
String settingsConfigName = "custom-plugin-settings";
IndexTemplateConfig config = new IndexTemplateConfig(
settingsConfigName,
"/org/elasticsearch/xpack/core/template/custom-plugin-settings.json",
REGISTRY_VERSION,
TEMPLATE_VERSION_VARIABLE
);
ComponentTemplate componentTemplate = null;
try {
componentTemplate = ComponentTemplate.parse(
JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, config.loadBytes())
);
} catch (IOException e) {
throw new AssertionError(e);
}
return Map.of(settingsConfigName, componentTemplate);
}
@Override
protected Map<String, ComposableIndexTemplate> getComposableTemplateConfigs() {
return IndexTemplateRegistry.parseComposableTemplates(
new IndexTemplateConfig(
"custom-plugin-template",
"/org/elasticsearch/xpack/core/template/custom-plugin-template.json",
REGISTRY_VERSION,
TEMPLATE_VERSION_VARIABLE
)
);
}
@Override
protected List<IngestPipelineConfig> getIngestPipelines() {
return List.of(
new JsonIngestPipelineConfig(
"custom-plugin-default_pipeline",
"/org/elasticsearch/xpack/core/template/custom-plugin-default_pipeline.json",
REGISTRY_VERSION,
TEMPLATE_VERSION_VARIABLE,
Collections.singletonList("custom-plugin-final_pipeline")
),
new JsonIngestPipelineConfig(
"custom-plugin-final_pipeline",
"/org/elasticsearch/xpack/core/template/custom-plugin-final_pipeline.json",
REGISTRY_VERSION,
TEMPLATE_VERSION_VARIABLE
)
);
}
@Override
protected List<LifecyclePolicy> getLifecyclePolicies() {
return List.of(
new LifecyclePolicyConfig("custom-plugin-policy", "/org/elasticsearch/xpack/core/template/custom-plugin-policy.json").load(
LifecyclePolicyConfig.DEFAULT_X_CONTENT_REGISTRY
)
);
}
@Override
protected boolean isUpgradeRequired(LifecyclePolicy currentPolicy, LifecyclePolicy newPolicy) {
return policyUpgradeRequired.get();
}
public void setPolicyUpgradeRequired(boolean policyUpgradeRequired) {
this.policyUpgradeRequired.set(policyUpgradeRequired);
}
@Override
protected boolean applyRolloverAfterTemplateV2Update() {
return applyRollover.get();
}
public void setApplyRollover(boolean shouldApplyRollover) {
applyRollover.set(shouldApplyRollover);
}
@Override
void onRolloversBulkResponse(ProjectId projectId, Collection<RolloverResponse> rolloverResponses) {
this.rolloverResponses.computeIfAbsent(projectId, k -> new AtomicReference<>()).set(rolloverResponses);
}
public Map<ProjectId, AtomicReference<Collection<RolloverResponse>>> getRolloverResponses() {
return rolloverResponses;
}
@Override
void onRolloverFailure(ProjectId projectId, Exception e) {
rolloverFailure.computeIfAbsent(projectId, k -> new AtomicReference<>()).set(e);
}
public Map<ProjectId, AtomicReference<Exception>> getRolloverFailure() {
return rolloverFailure;
}
@Override
protected String getOrigin() {
return "test";
}
}
| TestRegistryWithCustomPlugin |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/io/buffer/OutputStreamPublisher.java | {
"start": 1598,
"end": 1814
} | class ____ a near duplicate in
* {@link org.springframework.http.client.OutputStreamPublisher}.
*
* @author Oleh Dokuka
* @author Arjen Poutsma
* @since 6.1
* @param <T> the published byte buffer type
*/
final | has |
java | google__guice | extensions/persist/test/com/google/inject/persist/jpa/EnsureJpaCanTakeObjectsInPropertiesTest.java | {
"start": 1295,
"end": 3014
} | class ____ extends AbstractModule {
final DataSource ds;
final boolean passDataSource;
DBModule(DataSource ds, boolean passDataSource) {
this.ds = ds;
this.passDataSource = passDataSource;
}
@Override
protected void configure() {
Map<String, Object> p = new HashMap<>();
p.put(Environment.CONNECTION_PROVIDER, DatasourceConnectionProviderImpl.class.getName());
if (passDataSource) {
p.put(Environment.DATASOURCE, ds);
}
JpaPersistModule jpaPersistModule = new JpaPersistModule("testProperties").properties(p);
install(jpaPersistModule);
}
}
@Override
public void setUp() {
injector = null;
}
@Override
public final void tearDown() {
if (injector == null) {
return;
}
injector.getInstance(UnitOfWork.class).end();
injector.getInstance(EntityManagerFactory.class).close();
}
private static DataSource getDataSource() {
final JDBCDataSource dataSource = new JDBCDataSource();
dataSource.setDatabase("jdbc:hsqldb:mem:persistence");
dataSource.setUser("sa");
dataSource.setPassword("");
return dataSource;
}
private void startPersistService(boolean passDataSource) {
final DataSource dataSource = getDataSource();
injector = Guice.createInjector(new DBModule(dataSource, passDataSource));
//startup persistence
injector.getInstance(PersistService.class).start();
}
public void testWorksIfPassDataSource() {
startPersistService(true);
}
public void testFailsIfNoDataSource() {
try {
startPersistService(false);
fail();
} catch (PersistenceException ex) {
// Expected
injector = null;
}
}
}
| DBModule |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/enrollment/NodeEnrollmentRequest.java | {
"start": 537,
"end": 831
} | class ____ extends LegacyActionRequest {
public NodeEnrollmentRequest() {}
public NodeEnrollmentRequest(StreamInput in) throws IOException {
super(in);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
}
| NodeEnrollmentRequest |
java | quarkusio__quarkus | extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/GuardedResult.java | {
"start": 71,
"end": 763
} | class ____ {
private boolean complete;
private NullPointerException withException;
public synchronized Object get() {
while (!complete) {
try {
wait();
} catch (InterruptedException e) {
// Intentionally empty
}
}
if (withException == null) {
return new Object();
}
throw withException;
}
public synchronized void complete() {
complete(null);
}
public synchronized void complete(NullPointerException withException) {
this.complete = true;
this.withException = withException;
notifyAll();
}
}
| GuardedResult |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/ValueJoiner.java | {
"start": 874,
"end": 2034
} | interface ____ joining two values into a new value of arbitrary type.
* This is a stateless operation, i.e, {@link #apply(Object, Object)} is invoked individually for each joining
* record-pair of a {@link KStream}-{@link KStream}, {@link KStream}-{@link KTable}, or {@link KTable}-{@link KTable}
* join.
*
* @param <V1> first value type
* @param <V2> second value type
* @param <VR> joined value type
* @see KStream#join(KStream, ValueJoiner, JoinWindows)
* @see KStream#join(KStream, ValueJoiner, JoinWindows, StreamJoined)
* @see KStream#leftJoin(KStream, ValueJoiner, JoinWindows)
* @see KStream#leftJoin(KStream, ValueJoiner, JoinWindows, StreamJoined)
* @see KStream#outerJoin(KStream, ValueJoiner, JoinWindows)
* @see KStream#outerJoin(KStream, ValueJoiner, JoinWindows, StreamJoined)
* @see KStream#join(KTable, ValueJoiner)
* @see KStream#join(KTable, ValueJoiner, Joined)
* @see KStream#leftJoin(KTable, ValueJoiner)
* @see KStream#leftJoin(KTable, ValueJoiner, Joined)
* @see KTable#join(KTable, ValueJoiner)
* @see KTable#leftJoin(KTable, ValueJoiner)
* @see KTable#outerJoin(KTable, ValueJoiner)
*/
@FunctionalInterface
public | for |
java | micronaut-projects__micronaut-core | http-netty/src/main/java/io/micronaut/http/netty/stream/DelegateHttpMessage.java | {
"start": 987,
"end": 2170
} | class ____ implements HttpMessage {
protected final HttpMessage message;
/**
* @param message The Http message
*/
DelegateHttpMessage(HttpMessage message) {
this.message = message;
}
@Override
@Deprecated
public HttpVersion getProtocolVersion() {
return message.protocolVersion();
}
@Override
public HttpVersion protocolVersion() {
return message.protocolVersion();
}
@Override
public HttpMessage setProtocolVersion(HttpVersion version) {
message.setProtocolVersion(version);
return this;
}
@Override
public HttpHeaders headers() {
return message.headers();
}
@Override
@Deprecated
public DecoderResult getDecoderResult() {
return message.decoderResult();
}
@Override
public DecoderResult decoderResult() {
return message.decoderResult();
}
@Override
public void setDecoderResult(DecoderResult result) {
message.setDecoderResult(result);
}
@Override
public String toString() {
return this.getClass().getName() + "(" + message.toString() + ")";
}
}
| DelegateHttpMessage |
java | apache__camel | components/camel-saxon/src/test/java/org/apache/camel/component/xquery/BeanWithXQueryInjectionUsingHeaderValueTest.java | {
"start": 1812,
"end": 2357
} | class ____ {
public String userName;
public String date;
public String response;
@Handler
public void handler(
@XQuery("/response") String response,
@XQuery(source = "header:invoiceDetails", value = "/invoice/person/name") String userName,
@XQuery(source = "header:invoiceDetails", value = "/invoice/person/date") String date) {
this.response = response;
this.userName = userName;
this.date = date;
}
}
}
| MyBean |
java | spring-projects__spring-security | crypto/src/main/java/org/springframework/security/crypto/encrypt/RsaKeyHolder.java | {
"start": 730,
"end": 783
} | interface ____ {
String getPublicKey();
}
| RsaKeyHolder |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/inheritfromconfig/CarMapperReverseWithExplicitInheritance.java | {
"start": 565,
"end": 934
} | class ____ {
public static final CarMapperReverseWithExplicitInheritance INSTANCE =
Mappers.getMapper( CarMapperReverseWithExplicitInheritance.class );
@InheritInverseConfiguration(name = "baseDtoToEntity")
@Mapping( target = "colour", source = "color" )
public abstract CarDto toCarDto(CarEntity entity);
}
| CarMapperReverseWithExplicitInheritance |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.