language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/util/Utf8.java
|
{
"start": 1316,
"end": 7483
}
|
class ____ implements Comparable<Utf8>, CharSequence, Externalizable {
private static final byte[] EMPTY = new byte[0];
private byte[] bytes;
private int hash;
private int length;
private String string;
public Utf8() {
this.bytes = EMPTY;
this.hash = 1;
}
public Utf8(String string) {
byte[] bytes = getBytesFor(string);
int length = bytes.length;
SystemLimitException.checkMaxStringLength(length);
this.bytes = bytes;
this.length = length;
this.string = string;
}
public Utf8(Utf8 other) {
this.length = other.length;
this.bytes = Arrays.copyOf(other.bytes, other.length);
this.string = other.string;
this.hash = other.hash;
}
public Utf8(byte[] bytes) {
int length = bytes.length;
SystemLimitException.checkMaxStringLength(length);
this.bytes = bytes;
this.length = length;
}
Utf8(String string, int length) {
this(string);
this.length = length;
}
/**
* Return UTF-8 encoded bytes. Only valid through {@link #getByteLength()}
* assuming the bytes have been fully copied into the underlying buffer from the
* source.
*
* @see #setByteLength(int)
* @return a reference to the underlying byte array
*/
public byte[] getBytes() {
return bytes;
}
/** Return length in bytes. */
public int getByteLength() {
return length;
}
/**
* Set length in bytes. When calling this method, even if the new length is the
* same as the current length, the cached contents of this Utf8 object will be
* wiped out. After calling this method, no assumptions should be made about the
* internal state (e.g., contents, hashcode, equality, etc.) of this Utf8 String
* other than the internal buffer being large enough to accommodate a String of
* the new length. This should be called immediately before reading a String
* from the underlying data source.
*
* @param newLength the new length of the underlying buffer
* @return a reference to this object.
* @see org.apache.avro.io.BinaryDecoder#readString(Utf8)
*/
public Utf8 setByteLength(int newLength) {
SystemLimitException.checkMaxStringLength(newLength);
// Note that if the buffer size increases, the internal buffer is zero-ed out.
// If the buffer is large enough, just the length pointer moves and the old
// contents remain. For consistency's sake, we could zero-out the buffer in
// both cases, but would be a perf hit.
if (this.bytes.length < newLength) {
this.bytes = new byte[newLength];
}
this.length = newLength;
this.string = null;
this.hash = 0;
return this;
}
/** Set to the contents of a String. */
public Utf8 set(String string) {
byte[] bytes = getBytesFor(string);
int length = bytes.length;
SystemLimitException.checkMaxStringLength(length);
this.bytes = bytes;
this.length = length;
this.string = string;
this.hash = 0;
return this;
}
public Utf8 set(Utf8 other) {
if (this.bytes.length < other.length) {
this.bytes = new byte[other.length];
}
this.length = other.length;
System.arraycopy(other.bytes, 0, bytes, 0, length);
this.string = other.string;
this.hash = other.hash;
return this;
}
@Override
public String toString() {
if (this.length == 0)
return "";
if (this.string == null) {
this.string = new String(bytes, 0, length, StandardCharsets.UTF_8);
}
return this.string;
}
@Override
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof Utf8))
return false;
Utf8 that = (Utf8) o;
if (!(this.length == that.length))
return false;
// For longer strings, leverage vectorization (JDK 9+) to determine equality
// For shorter strings, the overhead of this method defeats the value
if (this.length > 7)
return Arrays.equals(this.bytes, 0, this.length, that.bytes, 0, that.length);
byte[] thatBytes = that.bytes;
for (int i = 0; i < this.length; i++)
if (bytes[i] != thatBytes[i])
return false;
return true;
}
@Override
public int hashCode() {
int h = hash;
if (h == 0) {
byte[] bytes = this.bytes;
int length = this.length;
// If the array is filled, use the underlying JDK hash functionality.
// Starting with JDK 21, the underlying implementation is vectorized.
if (length > 7 && bytes.length == length) {
h = Arrays.hashCode(bytes);
} else {
h = 1;
for (int i = 0; i < length; i++) {
h = h * 31 + bytes[i];
}
}
this.hash = h;
}
return h;
}
@Override
public int compareTo(Utf8 that) {
return BinaryData.compareBytes(this.bytes, 0, this.length, that.bytes, 0, that.length);
}
// CharSequence implementation
@Override
public char charAt(int index) {
return toString().charAt(index);
}
@Override
public int length() {
return toString().length();
}
@Override
public CharSequence subSequence(int start, int end) {
return toString().subSequence(start, end);
}
/** Gets the UTF-8 bytes for a String */
public static byte[] getBytesFor(String str) {
return str.getBytes(StandardCharsets.UTF_8);
}
@Override
public void writeExternal(ObjectOutput out) throws IOException {
out.writeInt(bytes.length);
out.write(bytes);
}
@Override
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
setByteLength(in.readInt());
in.readFully(bytes);
}
public static int compareSequences(CharSequence cs1, CharSequence cs2) {
if (cs1 == cs2) {
return 0;
}
if (cs1 == null || cs2 == null) {
return cs1 == null ? 1 : -1;
}
if (cs1.getClass() == cs2.getClass() && cs1 instanceof Comparable) {
return ((Comparable<Object>) cs1).compareTo(cs2);
}
for (int i = 0, len = Math.min(cs1.length(), cs2.length()); i < len; i++) {
char a = cs1.charAt(i);
char b = cs2.charAt(i);
if (a != b) {
return a - b;
}
}
return cs1.length() - cs2.length();
}
}
|
Utf8
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/oidc/web/authentication/OidcLogoutAuthenticationConverter.java
|
{
"start": 2130,
"end": 4559
}
|
class ____ implements AuthenticationConverter {
private static final Authentication ANONYMOUS_AUTHENTICATION = new AnonymousAuthenticationToken("anonymous",
"anonymousUser", AuthorityUtils.createAuthorityList("ROLE_ANONYMOUS"));
@Override
public Authentication convert(HttpServletRequest request) {
MultiValueMap<String, String> parameters = "GET".equals(request.getMethod())
? OAuth2EndpointUtils.getQueryParameters(request) : OAuth2EndpointUtils.getFormParameters(request);
// id_token_hint (REQUIRED) // RECOMMENDED as per spec
String idTokenHint = parameters.getFirst("id_token_hint");
if (!StringUtils.hasText(idTokenHint) || parameters.get("id_token_hint").size() != 1) {
throwError(OAuth2ErrorCodes.INVALID_REQUEST, "id_token_hint");
}
Authentication principal = SecurityContextHolder.getContext().getAuthentication();
if (principal == null) {
principal = ANONYMOUS_AUTHENTICATION;
}
String sessionId = null;
HttpSession session = request.getSession(false);
if (session != null) {
sessionId = session.getId();
}
// client_id (OPTIONAL)
String clientId = parameters.getFirst(OAuth2ParameterNames.CLIENT_ID);
if (StringUtils.hasText(clientId) && parameters.get(OAuth2ParameterNames.CLIENT_ID).size() != 1) {
throwError(OAuth2ErrorCodes.INVALID_REQUEST, OAuth2ParameterNames.CLIENT_ID);
}
// post_logout_redirect_uri (OPTIONAL)
String postLogoutRedirectUri = parameters.getFirst("post_logout_redirect_uri");
if (StringUtils.hasText(postLogoutRedirectUri) && parameters.get("post_logout_redirect_uri").size() != 1) {
throwError(OAuth2ErrorCodes.INVALID_REQUEST, "post_logout_redirect_uri");
}
// state (OPTIONAL)
String state = parameters.getFirst(OAuth2ParameterNames.STATE);
if (StringUtils.hasText(state) && parameters.get(OAuth2ParameterNames.STATE).size() != 1) {
throwError(OAuth2ErrorCodes.INVALID_REQUEST, OAuth2ParameterNames.STATE);
}
return new OidcLogoutAuthenticationToken(idTokenHint, principal, sessionId, clientId, postLogoutRedirectUri,
state);
}
private static void throwError(String errorCode, String parameterName) {
OAuth2Error error = new OAuth2Error(errorCode, "OpenID Connect 1.0 Logout Request Parameter: " + parameterName,
"https://openid.net/specs/openid-connect-rpinitiated-1_0.html#ValidationAndErrorHandling");
throw new OAuth2AuthenticationException(error);
}
}
|
OidcLogoutAuthenticationConverter
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/AbstractLogicalPlanOptimizerTests.java
|
{
"start": 3642,
"end": 13252
}
|
class ____ extends LogicalPlanOptimizer {
// A static instance of this would break the EsqlNodeSubclassTests because its initialization requires a Random instance.
public TestSubstitutionOnlyOptimizer() {
super(unboundLogicalOptimizerContext());
}
@Override
protected List<Batch<LogicalPlan>> batches() {
return List.of(substitutions());
}
}
@BeforeClass
public static void init() {
parser = new EsqlParser();
logicalOptimizerCtx = unboundLogicalOptimizerContext();
logicalOptimizer = new LogicalPlanOptimizer(logicalOptimizerCtx);
logicalOptimizerWithLatestVersion = new LogicalPlanOptimizer(
new LogicalOptimizerContext(logicalOptimizerCtx.configuration(), logicalOptimizerCtx.foldCtx(), TransportVersion.current())
);
enrichResolution = new EnrichResolution();
AnalyzerTestUtils.loadEnrichPolicyResolution(enrichResolution, "languages_idx", "id", "languages_idx", "mapping-languages.json");
AnalyzerTestUtils.loadEnrichPolicyResolution(
enrichResolution,
Enrich.Mode.REMOTE,
MATCH_TYPE,
"languages_remote",
"id",
"languages_idx",
"mapping-languages.json"
);
AnalyzerTestUtils.loadEnrichPolicyResolution(
enrichResolution,
Enrich.Mode.COORDINATOR,
MATCH_TYPE,
"languages_coordinator",
"id",
"languages_idx",
"mapping-languages.json"
);
// Most tests use either "test" or "employees" as the index name, but for the same mapping
mapping = loadMapping("mapping-basic.json");
EsIndex test = EsIndexGenerator.esIndex("test", mapping, Map.of("test", IndexMode.STANDARD));
EsIndex employees = EsIndexGenerator.esIndex("employees", mapping, Map.of("employees", IndexMode.STANDARD));
analyzer = new Analyzer(
testAnalyzerContext(
EsqlTestUtils.TEST_CFG,
new EsqlFunctionRegistry(),
indexResolutions(test, employees),
defaultLookupResolution(),
enrichResolution,
emptyInferenceResolution()
),
TEST_VERIFIER
);
// Some tests use data from the airports index, so we load it here, and use it in the planAirports() function.
mappingAirports = loadMapping("mapping-airports.json");
EsIndex airports = EsIndexGenerator.esIndex("airports", mappingAirports, Map.of("airports", IndexMode.STANDARD));
analyzerAirports = new Analyzer(
testAnalyzerContext(
EsqlTestUtils.TEST_CFG,
new EsqlFunctionRegistry(),
indexResolutions(airports),
defaultLookupResolution(),
enrichResolution,
emptyInferenceResolution()
),
TEST_VERIFIER
);
// Some tests need additional types, so we load that index here and use it in the plan_types() function.
mappingTypes = loadMapping("mapping-all-types.json");
EsIndex types = EsIndexGenerator.esIndex("types", mappingTypes, Map.of("types", IndexMode.STANDARD));
analyzerTypes = new Analyzer(
testAnalyzerContext(
EsqlTestUtils.TEST_CFG,
new EsqlFunctionRegistry(),
indexResolutions(types),
enrichResolution,
defaultInferenceResolution()
),
TEST_VERIFIER
);
// Some tests use mappings from mapping-extra.json to be able to test more types so we load it here
mappingExtra = loadMapping("mapping-extra.json");
EsIndex extra = EsIndexGenerator.esIndex("extra", mappingExtra, Map.of("extra", IndexMode.STANDARD));
analyzerExtra = new Analyzer(
testAnalyzerContext(
EsqlTestUtils.TEST_CFG,
new EsqlFunctionRegistry(),
indexResolutions(extra),
enrichResolution,
emptyInferenceResolution()
),
TEST_VERIFIER
);
List<EsIndex> metricIndices = new ArrayList<>();
if (EsqlCapabilities.Cap.EXPONENTIAL_HISTOGRAM_PRE_TECH_PREVIEW_V5.isEnabled()) {
Map<String, EsField> expHistoMetricMapping = loadMapping("exp_histo_sample-mappings.json");
metricIndices.add(
EsIndexGenerator.esIndex("exp_histo_sample", expHistoMetricMapping, Map.of("exp_histo_sample", IndexMode.TIME_SERIES))
);
}
metricMapping = loadMapping("k8s-mappings.json");
metricIndices.add(EsIndexGenerator.esIndex("k8s", metricMapping, Map.of("k8s", IndexMode.TIME_SERIES)));
metricsAnalyzer = new Analyzer(
testAnalyzerContext(
EsqlTestUtils.TEST_CFG,
new EsqlFunctionRegistry(),
indexResolutions(metricIndices.toArray(EsIndex[]::new)),
enrichResolution,
emptyInferenceResolution()
),
TEST_VERIFIER
);
var multiIndexMapping = loadMapping("mapping-basic.json");
multiIndexMapping.put(
"partial_type_keyword",
new EsField("partial_type_keyword", KEYWORD, emptyMap(), true, EsField.TimeSeriesFieldType.NONE)
);
var multiIndex = new EsIndex(
"multi_index",
multiIndexMapping,
Map.of("test1", IndexMode.STANDARD, "test2", IndexMode.STANDARD),
Map.of(),
Map.of(),
Set.of("partial_type_keyword")
);
multiIndexAnalyzer = new Analyzer(
testAnalyzerContext(
EsqlTestUtils.TEST_CFG,
new EsqlFunctionRegistry(),
indexResolutions(multiIndex),
enrichResolution,
emptyInferenceResolution()
),
TEST_VERIFIER
);
var sampleDataMapping = loadMapping("mapping-sample_data.json");
var sampleDataIndex = new EsIndex(
"sample_data",
sampleDataMapping,
Map.of("sample_data", IndexMode.STANDARD),
Map.of(),
Map.of(),
Set.of()
);
sampleDataIndexAnalyzer = new Analyzer(
testAnalyzerContext(
EsqlTestUtils.TEST_CFG,
new EsqlFunctionRegistry(),
indexResolutions(sampleDataIndex),
enrichResolution,
emptyInferenceResolution()
),
TEST_VERIFIER
);
subqueryAnalyzer = new Analyzer(
testAnalyzerContext(
EsqlTestUtils.TEST_CFG,
new EsqlFunctionRegistry(),
mergeIndexResolutions(indexResolutions(test), defaultSubqueryResolution()),
defaultLookupResolution(),
enrichResolution,
emptyInferenceResolution()
),
TEST_VERIFIER
);
}
protected LogicalPlan optimizedPlan(String query) {
return plan(query);
}
protected LogicalPlan optimizedPlan(String query, TransportVersion transportVersion) {
MutableAnalyzerContext mutableContext = (MutableAnalyzerContext) analyzer.context();
try (var restore = mutableContext.setTemporaryTransportVersionOnOrAfter(transportVersion)) {
return optimizedPlan(query);
}
}
protected LogicalPlan plan(String query) {
return plan(query, logicalOptimizer);
}
protected LogicalPlan plan(String query, LogicalPlanOptimizer optimizer) {
var analyzed = analyzer.analyze(parser.createStatement(query));
var optimized = optimizer.optimize(analyzed);
return optimized;
}
protected LogicalPlan planAirports(String query) {
var analyzed = analyzerAirports.analyze(parser.createStatement(query));
var optimized = logicalOptimizer.optimize(analyzed);
return optimized;
}
protected LogicalPlan planExtra(String query) {
var analyzed = analyzerExtra.analyze(parser.createStatement(query));
var optimized = logicalOptimizer.optimize(analyzed);
return optimized;
}
protected LogicalPlan planTypes(String query) {
return logicalOptimizer.optimize(analyzerTypes.analyze(parser.createStatement(query)));
}
protected LogicalPlan planMultiIndex(String query) {
return logicalOptimizer.optimize(multiIndexAnalyzer.analyze(parser.createStatement(query)));
}
protected LogicalPlan planSample(String query) {
var analyzed = sampleDataIndexAnalyzer.analyze(parser.createStatement(query));
return logicalOptimizer.optimize(analyzed);
}
protected LogicalPlan planSubquery(String query) {
var analyzed = subqueryAnalyzer.analyze(parser.createStatement(query));
return logicalOptimizer.optimize(analyzed);
}
@Override
protected List<String> filteredWarnings() {
return withDefaultLimitWarning(super.filteredWarnings());
}
protected <T extends Throwable> void failPlan(String esql, Class<T> exceptionClass, String reason) {
var e = expectThrows(exceptionClass, () -> plan(esql));
assertThat(e.getMessage(), containsString(reason));
}
protected void failPlan(String esql, String reason) {
failPlan(esql, VerificationException.class, reason);
}
}
|
TestSubstitutionOnlyOptimizer
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/data/conversion/MapMapConverter.java
|
{
"start": 1456,
"end": 5077
}
|
class ____<K, V> implements DataStructureConverter<MapData, Map<K, V>> {
private static final long serialVersionUID = 1L;
private final ArrayObjectArrayConverter<K> keyConverter;
private final ArrayObjectArrayConverter<V> valueConverter;
private final boolean hasInternalEntries;
private MapMapConverter(
ArrayObjectArrayConverter<K> keyConverter,
ArrayObjectArrayConverter<V> valueConverter) {
this.keyConverter = keyConverter;
this.valueConverter = valueConverter;
this.hasInternalEntries =
keyConverter.hasInternalElements && valueConverter.hasInternalElements;
}
@Override
public void open(ClassLoader classLoader) {
keyConverter.open(classLoader);
valueConverter.open(classLoader);
}
@Override
public MapData toInternal(Map<K, V> external) {
if (hasInternalEntries) {
return new GenericMapData(external);
}
return toBinaryMapData(external);
}
@Override
public Map<K, V> toExternal(MapData internal) {
final ArrayData keyArray = internal.keyArray();
final ArrayData valueArray = internal.valueArray();
final int length = internal.size();
final Map<K, V> map = new HashMap<>();
for (int pos = 0; pos < length; pos++) {
final Object keyValue = keyConverter.elementGetter.getElementOrNull(keyArray, pos);
final Object valueValue =
valueConverter.elementGetter.getElementOrNull(valueArray, pos);
map.put(
keyConverter.elementConverter.toExternalOrNull(keyValue),
valueConverter.elementConverter.toExternalOrNull(valueValue));
}
return map;
}
// --------------------------------------------------------------------------------------------
// Runtime helper methods
// --------------------------------------------------------------------------------------------
private MapData toBinaryMapData(Map<K, V> external) {
final int length = external.size();
keyConverter.allocateWriter(length);
valueConverter.allocateWriter(length);
int pos = 0;
for (Map.Entry<K, V> entry : external.entrySet()) {
keyConverter.writeElement(pos, entry.getKey());
valueConverter.writeElement(pos, entry.getValue());
pos++;
}
return BinaryMapData.valueOf(
keyConverter.completeWriter(), valueConverter.completeWriter());
}
// --------------------------------------------------------------------------------------------
// Factory method
// --------------------------------------------------------------------------------------------
public static MapMapConverter<?, ?> createForMapType(DataType dataType) {
final DataType keyDataType = dataType.getChildren().get(0);
final DataType valueDataType = dataType.getChildren().get(1);
return new MapMapConverter<>(
ArrayObjectArrayConverter.createForElement(keyDataType),
ArrayObjectArrayConverter.createForElement(valueDataType));
}
public static MapMapConverter<?, ?> createForMultisetType(DataType dataType) {
final DataType keyDataType = dataType.getChildren().get(0);
final DataType valueDataType = DataTypes.INT().notNull();
return new MapMapConverter<>(
ArrayObjectArrayConverter.createForElement(keyDataType),
ArrayObjectArrayConverter.createForElement(valueDataType));
}
}
|
MapMapConverter
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/time/JavaDurationGetSecondsGetNanoTest.java
|
{
"start": 2982,
"end": 3605
}
|
class ____ {
public static void foo(Duration duration) {
long seconds = duration.getSeconds();
if (true) {
// BUG: Diagnostic contains: JavaDurationGetSecondsGetNano
int nanos = duration.getNano();
}
}
}
""")
.doTest();
}
@Test
public void getSecondsWithGetNanosInDifferentMethods() {
compilationHelper
.addSourceLines(
"test/TestCase.java",
"""
package test;
import java.time.Duration;
public
|
TestCase
|
java
|
google__error-prone
|
check_api/src/main/java/com/google/errorprone/util/ErrorProneToken.java
|
{
"start": 1732,
"end": 2414
}
|
class ____ to figure out which hazzers are supported by a given token
return token.getClass().getSimpleName().contentEquals("NamedToken");
}
public boolean hasStringVal() {
String name = token.getClass().getSimpleName();
return name.contentEquals("StringToken") || name.contentEquals("NumericToken");
}
public boolean hasRadix() {
return token.getClass().getSimpleName().contentEquals("NumericToken");
}
public Name name() {
return token.name();
}
public String stringVal() {
return token.stringVal();
}
public int radix() {
return token.radix();
}
@Override
public String toString() {
return token.toString();
}
}
|
names
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/injection/guice/MembersInjectorImpl.java
|
{
"start": 966,
"end": 2160
}
|
class ____<T> implements MembersInjector<T> {
private final TypeLiteral<T> typeLiteral;
private final InjectorImpl injector;
private final List<SingleMethodInjector> memberInjectors;
MembersInjectorImpl(InjectorImpl injector, TypeLiteral<T> typeLiteral, List<SingleMethodInjector> memberInjectors) {
this.injector = injector;
this.typeLiteral = typeLiteral;
this.memberInjectors = memberInjectors;
}
void injectAndNotify(final T instance, final Errors errors) throws ErrorsException {
if (instance == null) {
return;
}
injector.callInContext((ContextualCallable<Void>) context -> {
injectMembers(instance, errors, context);
return null;
});
}
void injectMembers(T t, Errors errors, InternalContext context) {
// optimization: use manual for/each to save allocating an iterator here
for (int i = 0, size = memberInjectors.size(); i < size; i++) {
memberInjectors.get(i).inject(errors, context, t);
}
}
@Override
public String toString() {
return "MembersInjector<" + typeLiteral + ">";
}
}
|
MembersInjectorImpl
|
java
|
apache__maven
|
its/core-it-suite/src/test/resources/mng-2135/plugin/src/main/java/coreit/ItMojo.java
|
{
"start": 1812,
"end": 2278
}
|
class ____ extends AbstractMojo {
/**
* @parameter default-value="${project.build.directory}/touch.txt"
*/
private File outputFile;
public void execute() throws MojoExecutionException {
try {
outputFile.getParentFile().mkdirs();
outputFile.createNewFile();
} catch (IOException e) {
throw new MojoExecutionException("Failed to create touch file: " + e.getMessage(), e);
}
}
}
|
ItMojo
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/SpringApplicationTests.java
|
{
"start": 71406,
"end": 71541
}
|
class ____ {
@Bean
String someBean() {
return "override";
}
}
@Configuration(proxyBeanMethods = false)
static
|
OverrideConfig
|
java
|
apache__camel
|
components/camel-csv/src/test/java/org/apache/camel/dataformat/csv/CsvDataFormatCustomRecordConverterTest.java
|
{
"start": 1358,
"end": 2388
}
|
class ____ extends CamelSpringTestSupport {
@Test
void unmarshalTest() throws InterruptedException {
MockEndpoint mock = getMockEndpoint("mock:unmarshaled");
mock.expectedMessageCount(1);
template.sendBody("direct:unmarshal", getData());
mock.assertIsSatisfied();
Message message = mock.getReceivedExchanges().get(0).getIn();
List<List<String>> body = CastUtils.cast((List) message.getBody());
assertNotNull(body);
assertEquals(1, body.size());
List<String> row = body.get(0);
assertEquals(3, row.size());
assertEquals("[Hello, Again, Democracy]", row.toString());
}
private String getData() {
return String.join(";", "A1", "B1", "C1");
}
@Override
protected ClassPathXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext(
"org/apache/camel/dataformat/csv/CsvDataFormatCustomRecordConverter.xml");
}
}
|
CsvDataFormatCustomRecordConverterTest
|
java
|
hibernate__hibernate-orm
|
tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/embeddable/generics/ChildEmbeddable.java
|
{
"start": 246,
"end": 309
}
|
class ____ extends ParentEmbeddable<MyTypeImpl> {
}
|
ChildEmbeddable
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/superbuilder/Passenger.java
|
{
"start": 200,
"end": 531
}
|
class ____ {
private final String name;
protected Passenger(PassengerBuilder<?, ?> b) {
this.name = b.name;
}
public static PassengerBuilder<?, ?> builder() {
return new PassengerBuilderImpl();
}
public String getName() {
return this.name;
}
public abstract static
|
Passenger
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/task/TaskSchedulingAutoConfigurationTests.java
|
{
"start": 13845,
"end": 14183
}
|
class ____ implements SchedulingConfigurer {
private final TaskScheduler taskScheduler = new TestTaskScheduler();
@Override
public void configureTasks(ScheduledTaskRegistrar taskRegistrar) {
taskRegistrar.setScheduler(this.taskScheduler);
}
}
@Configuration(proxyBeanMethods = false)
static
|
SchedulingConfigurerConfiguration
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/cascade/CascadeMergeToProxyEntityCopyAllowedTest.java
|
{
"start": 1977,
"end": 4956
}
|
class ____ {
private Project defaultProject;
@Test
public void test(SessionFactoryScope scope) {
final Event root = (Event) persistEntity( scope, new Event( null, defaultProject ) );
SQLStatementInspector statementInspector = scope.getCollectingStatementInspector();
statementInspector.clear();
Event rootFromDB = scope.fromTransaction(
session -> {
TypedQuery<Event> eventTypedQuery = session.createQuery(
"SELECT e FROM Event e LEFT JOIN FETCH e.speakers LEFT JOIN FETCH e.children LEFT JOIN FETCH e.project WHERE e.objectID = :oid",
Event.class
);
eventTypedQuery.setParameter( "oid", root.getObjectID() );
return eventTypedQuery.getSingleResult();
}
);
statementInspector.assertExecutedCount( 1 );
statementInspector.assertNumberOfOccurrenceInQuery( 0, "join", 4 );
assertNotNull( rootFromDB );
assertEquals( 0, rootFromDB.getChildren().size() );
assertEquals( 0, rootFromDB.getSpeakers().size() );
assertEquals( root, rootFromDB );
Speaker speaker = (Speaker) persistEntity( scope, new Speaker( defaultProject ) );
final long speakerId = speaker.getObjectID();
speaker = scope.fromTransaction(
session ->
session.find( Speaker.class, speakerId )
);
assertNotNull( speaker );
Event child = new Event( rootFromDB, defaultProject );
child.addSpeaker( speaker );
rootFromDB = (Event) persistEntity( scope, rootFromDB );
final long rootFromDBId = rootFromDB.getObjectID();
rootFromDB = scope.fromTransaction(
session -> {
TypedQuery<Event> eventTypedQuery = session.createQuery(
"SELECT e FROM Event e LEFT JOIN FETCH e.speakers LEFT JOIN FETCH e.children LEFT JOIN FETCH e.project WHERE e.objectID = :oid",
Event.class
);
eventTypedQuery.setParameter( "oid", rootFromDBId );
return eventTypedQuery.getSingleResult();
}
);
assertNotNull( rootFromDB );
assertEquals( 1, rootFromDB.getChildren().size() );
assertEquals( 0, rootFromDB.getSpeakers().size() );
}
private Object persistEntity(SessionFactoryScope scope, Object entity) {
return scope.fromTransaction(
session -> {
Object mergedEntity = session.merge( entity );
session.persist( mergedEntity );
session.flush();
return mergedEntity;
}
);
}
@BeforeEach
public void setupData(SessionFactoryScope scope) {
Long objectId = scope.fromTransaction(
session -> {
Project project = (Project) session.merge( new Project() );
session.persist( project );
session.flush();
return project.getObjectID();
}
);
scope.inTransaction(
session -> {
TypedQuery<Project> projectTypedQuery = session.createQuery(
"SELECT p FROM Project p WHERE p.objectID = :oid",
Project.class
);
projectTypedQuery.setParameter( "oid", objectId );
defaultProject = projectTypedQuery.getSingleResult();
}
);
}
@MappedSuperclass
public static
|
CascadeMergeToProxyEntityCopyAllowedTest
|
java
|
dropwizard__dropwizard
|
dropwizard-logging/src/main/java/io/dropwizard/logging/common/layout/DiscoverableLayoutFactory.java
|
{
"start": 576,
"end": 928
}
|
interface ____<E extends DeferredProcessingAware> extends Discoverable {
/**
* Creates a {@link LayoutBase} of type E
*
* @param context the Logback context
* @param timeZone the TimeZone
* @return a new {@link LayoutBase}
*/
LayoutBase<E> build(LoggerContext context, TimeZone timeZone);
}
|
DiscoverableLayoutFactory
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/view/TestCommonViews.java
|
{
"start": 1022,
"end": 1568
}
|
class ____ {
@Test
void testErrorPage() {
Injector injector = WebAppTests.testPage(ErrorPage.class);
}
@Test
void testHeaderBlock() {
WebAppTests.testBlock(HeaderBlock.class);
}
@Test
void testFooterBlock() {
WebAppTests.testBlock(FooterBlock.class);
}
@Test
void testJQueryUI() {
WebAppTests.testBlock(JQueryUI.class);
}
@Test
void testInfoBlock() {
Injector injector = WebAppTests.createMockInjector(this);
ResponseInfo info = injector.getInstance(ResponseInfo.class);
}
}
|
TestCommonViews
|
java
|
apache__camel
|
components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsHeaderFilteringTest.java
|
{
"start": 4927,
"end": 6009
}
|
class ____ implements Processor {
@Override
public void process(Exchange exchange) throws Exception {
JmsMessage message = exchange.getIn(JmsMessage.class);
// testheader not filtered out until it is copied back to camel
assertEquals(1020, message.getJmsMessage().getObjectProperty("testheader"));
// anotherheader has been filtered out
assertNull(message.getJmsMessage().getObjectProperty("anotherheader"));
// notice dots are replaced by '_DOT_' when it is copied to the jms message properties
assertEquals(10000, message.getJmsMessage().getObjectProperty("org_DOT_foo_DOT_jms"));
// like testheader, org.apache.camel.test.jms will be filtered by the "in" filter
assertEquals(20000, message.getJmsMessage().getObjectProperty("org_DOT_foo_DOT_test_DOT_jms"));
// should be filtered by default
assertNull(message.getJmsMessage().getStringProperty("JMSXAppID"));
latch.countDown();
}
}
|
OutHeaderChecker
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/CharLength.java
|
{
"start": 759,
"end": 1361
}
|
class ____ extends UnaryStringFunction {
public CharLength(Source source, Expression field) {
super(source, field);
}
@Override
protected NodeInfo<CharLength> info() {
return NodeInfo.create(this, CharLength::new, field());
}
@Override
protected CharLength replaceChild(Expression newChild) {
return new CharLength(source(), newChild);
}
@Override
protected StringOperation operation() {
return StringOperation.CHAR_LENGTH;
}
@Override
public DataType dataType() {
return DataTypes.INTEGER;
}
}
|
CharLength
|
java
|
quarkusio__quarkus
|
extensions/hibernate-validator/deployment/src/test/java/io/quarkus/hibernate/validator/test/AllowMultipleCascadedValidationOnReturnValuesTest.java
|
{
"start": 1454,
"end": 1549
}
|
interface ____ {
String foo(String s);
}
private static
|
InterfaceWithNoConstraints
|
java
|
apache__camel
|
components/camel-mail/src/test/java/org/apache/camel/component/mail/MailAttachmentPollEnrichTest.java
|
{
"start": 1768,
"end": 5600
}
|
class ____ extends CamelTestSupport {
private static final MailboxUser james = Mailbox.getOrCreateUser("james", "secret");
@Test
public void testPollEnrichMailWithAttachments() throws Exception {
// clear mailbox
Mailbox.clearAll();
// START SNIPPET: e1
// create an exchange with a normal body and attachment to be produced as email
Endpoint endpoint = context.getEndpoint(james.uriPrefix(Protocol.smtp));
// create the exchange with the mail message that is multipart with a file and a Hello World text/plain message.
Exchange exchange = endpoint.createExchange();
AttachmentMessage in = exchange.getIn(AttachmentMessage.class);
in.setBody("Hello World");
DefaultAttachment att = new DefaultAttachment(new FileDataSource("src/test/data/logo.jpeg"));
att.addHeader("Content-Description", "some sample content");
in.addAttachmentObject("logo.jpeg", att);
// create a producer that can produce the exchange (= send the mail)
Producer producer = endpoint.createProducer();
// start the producer
producer.start();
// and let it go (processes the exchange by sending the email)
producer.process(exchange);
// END SNIPPET: e1
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
template.sendBody("direct:start", "Trigger");
mock.assertIsSatisfied();
Exchange out = mock.assertExchangeReceived(0);
// plain text
assertEquals("Hello World", out.getIn().getBody(String.class));
// attachment
Map<String, Attachment> attachments = out.getIn(AttachmentMessage.class).getAttachmentObjects();
assertNotNull(attachments, "Should have attachments");
assertEquals(1, attachments.size());
Attachment attachment = out.getIn(AttachmentMessage.class).getAttachmentObject("logo.jpeg");
DataHandler handler = attachment.getDataHandler();
assertNotNull(handler, "The logo should be there");
// content type should match
boolean match1 = "image/jpeg; name=logo.jpeg".equals(handler.getContentType());
boolean match2 = "application/octet-stream; name=logo.jpeg".equals(handler.getContentType());
assertTrue(match1 || match2, "Should match 1 or 2");
assertEquals("logo.jpeg", handler.getName(), "Handler name should be the file name");
assertEquals("some sample content", attachment.getHeader("content-description"));
producer.stop();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:start")
.pollEnrich(james.uriPrefix(Protocol.pop3) + "&initialDelay=100&delay=100", 5000)
.process(e -> {
AttachmentMessage attachmentMessage = e.getIn(AttachmentMessage.class);
Map<String, DataHandler> attachments = attachmentMessage.getAttachments();
assertEquals(1, attachments.size());
assertEquals("logo.jpeg", attachments.keySet().iterator().next());
DataHandler dh = attachments.values().iterator().next();
byte[] data = context.getTypeConverter().convertTo(byte[].class, e, dh.getInputStream());
assertNotNull(data);
// should be logo that are 10.000 bytes or longer
assertTrue(data.length > 10000, "Should be 10000 bytes or more");
})
.to("mock:result");
}
};
}
}
|
MailAttachmentPollEnrichTest
|
java
|
apache__flink
|
flink-docs/src/main/java/org/apache/flink/docs/rest/SqlGatewayRestAPIDocGenerator.java
|
{
"start": 2842,
"end": 3810
}
|
class ____ {
/**
* Generates the Sql Gateway REST API documentation.
*
* @param args args[0] contains the directory into which the generated files are placed
* @throws IOException if any file operation failed
*/
public static void main(String[] args) throws IOException, ConfigurationException {
String outputDirectory = args[0];
for (final SqlGatewayRestAPIVersion apiVersion : SqlGatewayRestAPIVersion.values()) {
if (apiVersion == SqlGatewayRestAPIVersion.V0) {
// this version exists only for testing purposes
continue;
}
createHtmlFile(
new DocumentingSqlGatewayRestEndpoint(),
apiVersion,
Paths.get(
outputDirectory,
"rest_" + apiVersion.getURLVersionPrefix() + "_sql_gateway.html"));
}
}
}
|
SqlGatewayRestAPIDocGenerator
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/test/java/io/vertx/tests/net/NetTest.java
|
{
"start": 3184,
"end": 66156
}
|
class ____ extends VertxTestBase {
private SocketAddress testAddress;
private NetServer server;
private NetClient client;
private ProxyBase proxy;
private File tmp;
@Rule
public TemporaryFolder testFolder = new TemporaryFolder();
@Override
public void setUp() throws Exception {
super.setUp();
if (USE_DOMAIN_SOCKETS) {
assertTrue("Native transport not enabled", TRANSPORT.implementation().supportsDomainSockets());
tmp = TestUtils.tmpFile(".sock");
testAddress = SocketAddress.domainSocketAddress(tmp.getAbsolutePath());
} else {
testAddress = SocketAddress.inetSocketAddress(1234, "localhost");
}
client = vertx.createNetClient(new NetClientOptions().setConnectTimeout(1000));
server = vertx.createNetServer();
}
@Override
protected VertxOptions getOptions() {
VertxOptions options = super.getOptions();
options.getAddressResolverOptions().setHostsValue(Buffer.buffer("" +
"127.0.0.1 localhost\n" +
"127.0.0.1 host1\n" +
"127.0.0.1 host2.com\n" +
"127.0.0.1 example.com"));
return options;
}
@Override
protected void tearDown() throws Exception {
if (tmp != null) {
tmp.delete();
}
if (proxy != null) {
proxy.stop();
}
super.tearDown();
}
@Test
public void testClientOptions() {
NetClientOptions options = new NetClientOptions();
assertEquals(NetworkOptions.DEFAULT_SEND_BUFFER_SIZE, options.getSendBufferSize());
int rand = TestUtils.randomPositiveInt();
assertEquals(options, options.setSendBufferSize(rand));
assertEquals(rand, options.getSendBufferSize());
assertIllegalArgumentException(() -> options.setSendBufferSize(0));
assertIllegalArgumentException(() -> options.setSendBufferSize(-123));
assertEquals(NetworkOptions.DEFAULT_RECEIVE_BUFFER_SIZE, options.getReceiveBufferSize());
rand = TestUtils.randomPositiveInt();
assertEquals(options, options.setReceiveBufferSize(rand));
assertEquals(rand, options.getReceiveBufferSize());
assertIllegalArgumentException(() -> options.setReceiveBufferSize(0));
assertIllegalArgumentException(() -> options.setReceiveBufferSize(-123));
assertTrue(options.isReuseAddress());
assertEquals(options, options.setReuseAddress(false));
assertFalse(options.isReuseAddress());
assertEquals(NetworkOptions.DEFAULT_TRAFFIC_CLASS, options.getTrafficClass());
rand = 23;
assertEquals(options, options.setTrafficClass(rand));
assertEquals(rand, options.getTrafficClass());
assertIllegalArgumentException(() -> options.setTrafficClass(-2));
assertIllegalArgumentException(() -> options.setTrafficClass(256));
assertTrue(options.isTcpNoDelay());
assertEquals(options, options.setTcpNoDelay(false));
assertFalse(options.isTcpNoDelay());
boolean tcpKeepAlive = false;
assertEquals(tcpKeepAlive, options.isTcpKeepAlive());
assertEquals(options, options.setTcpKeepAlive(!tcpKeepAlive));
assertEquals(!tcpKeepAlive, options.isTcpKeepAlive());
int soLinger = -1;
assertEquals(soLinger, options.getSoLinger());
rand = TestUtils.randomPositiveInt();
assertEquals(options, options.setSoLinger(rand));
assertEquals(rand, options.getSoLinger());
assertIllegalArgumentException(() -> options.setSoLinger(-2));
rand = TestUtils.randomPositiveInt();
assertEquals(0, options.getIdleTimeout());
assertEquals(options, options.setIdleTimeout(rand));
assertEquals(rand, options.getIdleTimeout());
assertFalse(options.isSsl());
assertEquals(options, options.setSsl(true));
assertTrue(options.isSsl());
assertNull(options.getKeyCertOptions());
JksOptions keyStoreOptions = new JksOptions().setPath(TestUtils.randomAlphaString(100)).setPassword(TestUtils.randomAlphaString(100));
assertEquals(options, options.setKeyCertOptions(keyStoreOptions));
assertEquals(keyStoreOptions, options.getKeyCertOptions());
assertNull(options.getTrustOptions());
JksOptions trustStoreOptions = new JksOptions().setPath(TestUtils.randomAlphaString(100)).setPassword(TestUtils.randomAlphaString(100));
assertEquals(options, options.setTrustOptions(trustStoreOptions));
assertEquals(trustStoreOptions, options.getTrustOptions());
assertFalse(options.isTrustAll());
assertEquals(options, options.setTrustAll(true));
// assertTrue(options.isTrustAll());
String randomAlphaString = TestUtils.randomAlphaString(10);
assertNull(options.getHostnameVerificationAlgorithm());
assertEquals(options, options.setHostnameVerificationAlgorithm(randomAlphaString));
assertEquals(randomAlphaString, options.getHostnameVerificationAlgorithm());
assertEquals(0, options.getReconnectAttempts());
assertIllegalArgumentException(() -> options.setReconnectAttempts(-2));
rand = TestUtils.randomPositiveInt();
assertEquals(options, options.setReconnectAttempts(rand));
assertEquals(rand, options.getReconnectAttempts());
assertEquals(1000, options.getReconnectInterval());
assertIllegalArgumentException(() -> options.setReconnectInterval(0));
rand = TestUtils.randomPositiveInt();
assertEquals(options, options.setReconnectInterval(rand));
assertEquals(rand, options.getReconnectInterval());
assertTrue(options.getEnabledCipherSuites().isEmpty());
assertEquals(options, options.addEnabledCipherSuite("foo"));
assertEquals(options, options.addEnabledCipherSuite("bar"));
assertNotNull(options.getEnabledCipherSuites());
assertTrue(options.getEnabledCipherSuites().contains("foo"));
assertTrue(options.getEnabledCipherSuites().contains("bar"));
assertEquals(false, options.isUseAlpn());
assertEquals(options, options.setUseAlpn(true));
assertEquals(true, options.isUseAlpn());
assertNull(options.getSslEngineOptions());
assertEquals(options, options.setSslEngineOptions(new JdkSSLEngineOptions()));
assertTrue(options.getSslEngineOptions() instanceof JdkSSLEngineOptions);
assertEquals(SSLOptions.DEFAULT_SSL_HANDSHAKE_TIMEOUT, options.getSslHandshakeTimeout());
long randLong = TestUtils.randomPositiveLong();
assertEquals(options, options.setSslHandshakeTimeout(randLong));
assertEquals(randLong, options.getSslHandshakeTimeout());
assertIllegalArgumentException(() -> options.setSslHandshakeTimeout(-123));
testComplete();
}
@Test
public void testServerOptions() {
NetServerOptions options = new NetServerOptions();
assertEquals(NetworkOptions.DEFAULT_SEND_BUFFER_SIZE, options.getSendBufferSize());
int rand = TestUtils.randomPositiveInt();
assertEquals(options, options.setSendBufferSize(rand));
assertEquals(rand, options.getSendBufferSize());
assertIllegalArgumentException(() -> options.setSendBufferSize(0));
assertIllegalArgumentException(() -> options.setSendBufferSize(-123));
assertEquals(NetworkOptions.DEFAULT_RECEIVE_BUFFER_SIZE, options.getReceiveBufferSize());
rand = TestUtils.randomPositiveInt();
assertEquals(options, options.setReceiveBufferSize(rand));
assertEquals(rand, options.getReceiveBufferSize());
assertIllegalArgumentException(() -> options.setReceiveBufferSize(0));
assertIllegalArgumentException(() -> options.setReceiveBufferSize(-123));
assertTrue(options.isReuseAddress());
assertEquals(options, options.setReuseAddress(false));
assertFalse(options.isReuseAddress());
assertEquals(NetworkOptions.DEFAULT_TRAFFIC_CLASS, options.getTrafficClass());
rand = 23;
assertEquals(options, options.setTrafficClass(rand));
assertEquals(rand, options.getTrafficClass());
assertIllegalArgumentException(() -> options.setTrafficClass(-2));
assertIllegalArgumentException(() -> options.setTrafficClass(256));
assertTrue(options.isTcpNoDelay());
assertEquals(options, options.setTcpNoDelay(false));
assertFalse(options.isTcpNoDelay());
boolean tcpKeepAlive = false;
assertEquals(tcpKeepAlive, options.isTcpKeepAlive());
assertEquals(options, options.setTcpKeepAlive(!tcpKeepAlive));
assertEquals(!tcpKeepAlive, options.isTcpKeepAlive());
int soLinger = -1;
assertEquals(soLinger, options.getSoLinger());
rand = TestUtils.randomPositiveInt();
assertEquals(options, options.setSoLinger(rand));
assertEquals(rand, options.getSoLinger());
assertIllegalArgumentException(() -> options.setSoLinger(-2));
rand = TestUtils.randomPositiveInt();
assertEquals(0, options.getIdleTimeout());
assertEquals(options, options.setIdleTimeout(rand));
assertEquals(rand, options.getIdleTimeout());
assertIllegalArgumentException(() -> options.setIdleTimeout(-1));
assertFalse(options.isSsl());
assertEquals(options, options.setSsl(true));
assertTrue(options.isSsl());
assertNull(options.getKeyCertOptions());
JksOptions keyStoreOptions = new JksOptions().setPath(TestUtils.randomAlphaString(100)).setPassword(TestUtils.randomAlphaString(100));
assertEquals(options, options.setKeyCertOptions(keyStoreOptions));
assertEquals(keyStoreOptions, options.getKeyCertOptions());
assertNull(options.getTrustOptions());
JksOptions trustStoreOptions = new JksOptions().setPath(TestUtils.randomAlphaString(100)).setPassword(TestUtils.randomAlphaString(100));
assertEquals(options, options.setTrustOptions(trustStoreOptions));
assertEquals(trustStoreOptions, options.getTrustOptions());
assertEquals(-1, options.getAcceptBacklog());
rand = TestUtils.randomPositiveInt();
assertEquals(options, options.setAcceptBacklog(rand));
assertEquals(rand, options.getAcceptBacklog());
assertEquals(0, options.getPort());
assertEquals(options, options.setPort(1234));
assertEquals(1234, options.getPort());
assertIllegalArgumentException(() -> options.setPort(65536));
assertEquals("0.0.0.0", options.getHost());
String randString = TestUtils.randomUnicodeString(100);
assertEquals(options, options.setHost(randString));
assertEquals(randString, options.getHost());
assertTrue(options.getEnabledCipherSuites().isEmpty());
assertEquals(options, options.addEnabledCipherSuite("foo"));
assertEquals(options, options.addEnabledCipherSuite("bar"));
assertNotNull(options.getEnabledCipherSuites());
assertTrue(options.getEnabledCipherSuites().contains("foo"));
assertTrue(options.getEnabledCipherSuites().contains("bar"));
assertEquals(false, options.isUseAlpn());
assertEquals(options, options.setUseAlpn(true));
assertEquals(true, options.isUseAlpn());
assertNull(options.getSslEngineOptions());
assertEquals(options, options.setSslEngineOptions(new JdkSSLEngineOptions()));
assertTrue(options.getSslEngineOptions() instanceof JdkSSLEngineOptions);
assertFalse(options.isSni());
assertEquals(options, options.setSni(true));
assertTrue(options.isSni());
assertEquals(SSLOptions.DEFAULT_SSL_HANDSHAKE_TIMEOUT, options.getSslHandshakeTimeout());
long randomSslTimeout = TestUtils.randomPositiveLong();
assertEquals(options, options.setSslHandshakeTimeout(randomSslTimeout));
assertEquals(randomSslTimeout, options.getSslHandshakeTimeout());
assertIllegalArgumentException(() -> options.setSslHandshakeTimeout(-123));
assertFalse(options.isUseProxyProtocol());
assertEquals(options, options.setUseProxyProtocol(true));
assertTrue(options.isUseProxyProtocol());
assertEquals(NetServerOptions.DEFAULT_PROXY_PROTOCOL_TIMEOUT_TIME_UNIT, options.getProxyProtocolTimeoutUnit());
long randomProxyTimeout = TestUtils.randomPositiveLong();
assertEquals(options, options.setProxyProtocolTimeout(randomProxyTimeout));
assertEquals(randomProxyTimeout, options.getProxyProtocolTimeout());
assertIllegalArgumentException(() -> options.setProxyProtocolTimeout(-123));
testComplete();
}
@Test
public void testCopyClientOptions() {
NetClientOptions options = new NetClientOptions();
int sendBufferSize = TestUtils.randomPositiveInt();
int receiverBufferSize = TestUtils.randomPortInt();
Random rand = new Random();
boolean reuseAddress = rand.nextBoolean();
int trafficClass = TestUtils.randomByte() + 128;
boolean tcpNoDelay = rand.nextBoolean();
boolean tcpKeepAlive = rand.nextBoolean();
int soLinger = TestUtils.randomPositiveInt();
int idleTimeout = TestUtils.randomPositiveInt();
boolean ssl = rand.nextBoolean();
String hostnameVerificationAlgorithm = TestUtils.randomAlphaString(10);
JksOptions keyStoreOptions = new JksOptions();
String ksPassword = TestUtils.randomAlphaString(100);
keyStoreOptions.setPassword(ksPassword);
JksOptions trustStoreOptions = new JksOptions();
String tsPassword = TestUtils.randomAlphaString(100);
trustStoreOptions.setPassword(tsPassword);
String enabledCipher = TestUtils.randomAlphaString(100);
int connectTimeout = TestUtils.randomPositiveInt();
boolean trustAll = rand.nextBoolean();
String crlPath = TestUtils.randomUnicodeString(100);
Buffer crlValue = TestUtils.randomBuffer(100);
int reconnectAttempts = TestUtils.randomPositiveInt();
long reconnectInterval = TestUtils.randomPositiveInt();
boolean useAlpn = TestUtils.randomBoolean();
boolean openSslSessionCacheEnabled = rand.nextBoolean();
long sslHandshakeTimeout = TestUtils.randomPositiveLong();
SSLEngineOptions sslEngine = TestUtils.randomBoolean() ? new JdkSSLEngineOptions() : new OpenSSLEngineOptions();
options.setSendBufferSize(sendBufferSize);
options.setReceiveBufferSize(receiverBufferSize);
options.setReuseAddress(reuseAddress);
options.setTrafficClass(trafficClass);
options.setSsl(ssl);
options.setTcpNoDelay(tcpNoDelay);
options.setTcpKeepAlive(tcpKeepAlive);
options.setSoLinger(soLinger);
options.setIdleTimeout(idleTimeout);
options.setKeyCertOptions(keyStoreOptions);
options.setTrustOptions(trustStoreOptions);
options.addEnabledCipherSuite(enabledCipher);
options.setConnectTimeout(connectTimeout);
options.setTrustAll(trustAll);
options.addCrlPath(crlPath);
options.addCrlValue(crlValue);
options.setReconnectAttempts(reconnectAttempts);
options.setReconnectInterval(reconnectInterval);
options.setUseAlpn(useAlpn);
options.setSslEngineOptions(sslEngine);
options.setHostnameVerificationAlgorithm(hostnameVerificationAlgorithm);
options.setSslHandshakeTimeout(sslHandshakeTimeout);
NetClientOptions copy = new NetClientOptions(options);
assertEquals(options.toJson(), copy.toJson());
}
@Test
public void testDefaultClientOptionsJson() {
NetClientOptions def = new NetClientOptions();
NetClientOptions json = new NetClientOptions(new JsonObject());
assertEquals(def.getReconnectAttempts(), json.getReconnectAttempts());
assertEquals(def.getReconnectInterval(), json.getReconnectInterval());
assertEquals(def.isTrustAll(), json.isTrustAll());
assertEquals(def.getCrlPaths(), json.getCrlPaths());
assertEquals(def.getCrlValues(), json.getCrlValues());
assertEquals(def.getConnectTimeout(), json.getConnectTimeout());
assertEquals(def.isTcpNoDelay(), json.isTcpNoDelay());
assertEquals(def.isTcpKeepAlive(), json.isTcpKeepAlive());
assertEquals(def.getSoLinger(), json.getSoLinger());
assertEquals(def.isSsl(), json.isSsl());
assertEquals(def.isUseAlpn(), json.isUseAlpn());
assertEquals(def.getSslEngineOptions(), json.getSslEngineOptions());
assertEquals(def.getHostnameVerificationAlgorithm(), json.getHostnameVerificationAlgorithm());
assertEquals(def.getSslHandshakeTimeout(), json.getSslHandshakeTimeout());
}
@Test
public void testClientOptionsJson() {
int sendBufferSize = TestUtils.randomPositiveInt();
int receiverBufferSize = TestUtils.randomPortInt();
Random rand = new Random();
boolean reuseAddress = rand.nextBoolean();
int trafficClass = TestUtils.randomByte() + 128;
boolean tcpNoDelay = rand.nextBoolean();
boolean tcpKeepAlive = rand.nextBoolean();
int soLinger = TestUtils.randomPositiveInt();
int idleTimeout = TestUtils.randomPositiveInt();
boolean ssl = rand.nextBoolean();
JksOptions keyStoreOptions = new JksOptions();
String ksPassword = TestUtils.randomAlphaString(100);
keyStoreOptions.setPassword(ksPassword);
String ksPath = TestUtils.randomAlphaString(100);
keyStoreOptions.setPath(ksPath);
JksOptions trustStoreOptions = new JksOptions();
String tsPassword = TestUtils.randomAlphaString(100);
trustStoreOptions.setPassword(tsPassword);
String tsPath = TestUtils.randomAlphaString(100);
trustStoreOptions.setPath(tsPath);
String enabledCipher = TestUtils.randomAlphaString(100);
int connectTimeout = TestUtils.randomPositiveInt();
boolean trustAll = rand.nextBoolean();
String crlPath = TestUtils.randomUnicodeString(100);
int reconnectAttempts = TestUtils.randomPositiveInt();
long reconnectInterval = TestUtils.randomPositiveInt();
boolean useAlpn = TestUtils.randomBoolean();
String hostnameVerificationAlgorithm = TestUtils.randomAlphaString(10);
String sslEngine;
JsonObject sslEngineOptions;
if (TestUtils.randomBoolean()) {
sslEngine = "jdkSslEngineOptions";
sslEngineOptions = new JsonObject();
} else {
sslEngine = "openSslEngineOptions";
boolean sessionCacheEnabled = rand.nextBoolean();
sslEngineOptions = new JsonObject()
.put("sessionCacheEnabled", sessionCacheEnabled)
.put("useWorkerThread", SSLEngineOptions.DEFAULT_USE_WORKER_POOL);
}
long sslHandshakeTimeout = TestUtils.randomPositiveLong();
JsonObject json = new JsonObject();
json.put("sendBufferSize", sendBufferSize)
.put("receiveBufferSize", receiverBufferSize)
.put("reuseAddress", reuseAddress)
.put("trafficClass", trafficClass)
.put("tcpNoDelay", tcpNoDelay)
.put("tcpKeepAlive", tcpKeepAlive)
.put("soLinger", soLinger)
.put("idleTimeout", idleTimeout)
.put("ssl", ssl)
.put("enabledCipherSuites", new JsonArray().add(enabledCipher))
.put("connectTimeout", connectTimeout)
.put("trustAll", trustAll)
.put("crlPaths", new JsonArray().add(crlPath))
.put("keyStoreOptions", new JsonObject().put("password", ksPassword).put("path", ksPath))
.put("trustStoreOptions", new JsonObject().put("password", tsPassword).put("path", tsPath))
.put("reconnectAttempts", reconnectAttempts)
.put("reconnectInterval", reconnectInterval)
.put("useAlpn", useAlpn)
.put(sslEngine, sslEngineOptions)
.put("hostnameVerificationAlgorithm", hostnameVerificationAlgorithm)
.put("sslHandshakeTimeout", sslHandshakeTimeout);
JsonObject converted = new NetClientOptions(json).toJson();
for (Map.Entry<String, Object> entry : json) {
assertEquals(entry.getValue(), converted.getValue(entry.getKey()));
}
NetClientOptions options = new NetClientOptions(json);
assertEquals(sendBufferSize, options.getSendBufferSize());
assertEquals(receiverBufferSize, options.getReceiveBufferSize());
assertEquals(reuseAddress, options.isReuseAddress());
assertEquals(trafficClass, options.getTrafficClass());
assertEquals(tcpKeepAlive, options.isTcpKeepAlive());
assertEquals(tcpNoDelay, options.isTcpNoDelay());
assertEquals(soLinger, options.getSoLinger());
assertEquals(idleTimeout, options.getIdleTimeout());
assertEquals(ssl, options.isSsl());
assertEquals(sslHandshakeTimeout, options.getSslHandshakeTimeout());
assertNotSame(keyStoreOptions, options.getKeyCertOptions());
assertEquals(ksPassword, ((JksOptions) options.getKeyCertOptions()).getPassword());
assertEquals(ksPath, ((JksOptions) options.getKeyCertOptions()).getPath());
assertNotSame(trustStoreOptions, options.getTrustOptions());
assertEquals(tsPassword, ((JksOptions) options.getTrustOptions()).getPassword());
assertEquals(tsPath, ((JksOptions) options.getTrustOptions()).getPath());
assertEquals(1, options.getEnabledCipherSuites().size());
assertTrue(options.getEnabledCipherSuites().contains(enabledCipher));
assertEquals(connectTimeout, options.getConnectTimeout());
assertEquals(trustAll, options.isTrustAll());
assertEquals(1, options.getCrlPaths().size());
assertEquals(crlPath, options.getCrlPaths().get(0));
assertEquals(reconnectAttempts, options.getReconnectAttempts());
assertEquals(reconnectInterval, options.getReconnectInterval());
assertEquals(useAlpn, options.isUseAlpn());
switch (sslEngine) {
case "jdkSslEngineOptions":
assertTrue(options.getSslEngineOptions() instanceof JdkSSLEngineOptions);
break;
case "openSslEngineOptions":
assertTrue(options.getSslEngineOptions() instanceof OpenSSLEngineOptions);
break;
default:
fail();
break;
}
assertEquals(hostnameVerificationAlgorithm, options.getHostnameVerificationAlgorithm());
// Test other keystore/truststore types
json.remove("keyStoreOptions");
json.remove("trustStoreOptions");
json.put("pfxKeyCertOptions", new JsonObject().put("password", ksPassword))
.put("pfxTrustOptions", new JsonObject().put("password", tsPassword));
options = new NetClientOptions(json);
assertTrue(options.getTrustOptions() instanceof PfxOptions);
assertTrue(options.getKeyCertOptions() instanceof PfxOptions);
json.remove("pfxKeyCertOptions");
json.remove("pfxTrustOptions");
json.put("pemKeyCertOptions", new JsonObject())
.put("pemTrustOptions", new JsonObject());
options = new NetClientOptions(json);
assertTrue(options.getTrustOptions() instanceof PemTrustOptions);
assertTrue(options.getKeyCertOptions() instanceof PemKeyCertOptions);
}
@Test
public void testCopyServerOptions() {
NetServerOptions options = new NetServerOptions();
int sendBufferSize = TestUtils.randomPositiveInt();
int receiverBufferSize = TestUtils.randomPortInt();
Random rand = new Random();
boolean reuseAddress = rand.nextBoolean();
int trafficClass = TestUtils.randomByte() + 128;
boolean tcpNoDelay = rand.nextBoolean();
boolean tcpKeepAlive = rand.nextBoolean();
int soLinger = TestUtils.randomPositiveInt();
boolean usePooledBuffers = rand.nextBoolean();
int idleTimeout = TestUtils.randomPositiveInt();
boolean ssl = rand.nextBoolean();
JksOptions keyStoreOptions = new JksOptions();
String ksPassword = TestUtils.randomAlphaString(100);
keyStoreOptions.setPassword(ksPassword);
JksOptions trustStoreOptions = new JksOptions();
String tsPassword = TestUtils.randomAlphaString(100);
trustStoreOptions.setPassword(tsPassword);
String enabledCipher = TestUtils.randomAlphaString(100);
String crlPath = TestUtils.randomUnicodeString(100);
Buffer crlValue = TestUtils.randomBuffer(100);
int port = 1234;
String host = TestUtils.randomAlphaString(100);
int acceptBacklog = TestUtils.randomPortInt();
boolean useAlpn = TestUtils.randomBoolean();
boolean openSslSessionCacheEnabled = rand.nextBoolean();
SSLEngineOptions sslEngine = TestUtils.randomBoolean() ? new JdkSSLEngineOptions() : new OpenSSLEngineOptions();
boolean sni = TestUtils.randomBoolean();
long sslHandshakeTimeout = TestUtils.randomPositiveLong();
boolean useProxyProtocol = TestUtils.randomBoolean();
long proxyProtocolTimeout = TestUtils.randomPositiveLong();
options.setSendBufferSize(sendBufferSize);
options.setReceiveBufferSize(receiverBufferSize);
options.setReuseAddress(reuseAddress);
options.setTrafficClass(trafficClass);
options.setTcpNoDelay(tcpNoDelay);
options.setTcpKeepAlive(tcpKeepAlive);
options.setSoLinger(soLinger);
options.setIdleTimeout(idleTimeout);
options.setSsl(ssl);
options.setKeyCertOptions(keyStoreOptions);
options.setTrustOptions(trustStoreOptions);
options.addEnabledCipherSuite(enabledCipher);
options.addCrlPath(crlPath);
options.addCrlValue(crlValue);
options.setPort(port);
options.setHost(host);
options.setAcceptBacklog(acceptBacklog);
options.setUseAlpn(useAlpn);
options.setSslEngineOptions(sslEngine);
options.setSni(sni);
options.setSslHandshakeTimeout(sslHandshakeTimeout);
options.setUseProxyProtocol(useProxyProtocol);
options.setProxyProtocolTimeout(proxyProtocolTimeout);
NetServerOptions copy = new NetServerOptions(options);
assertEquals(options.toJson(), copy.toJson());
}
@Test
@SuppressWarnings("deprecation")
public void testDefaultServerOptionsJson() {
NetServerOptions def = new NetServerOptions();
NetServerOptions json = new NetServerOptions(new JsonObject());
assertEquals(def.getCrlPaths(), json.getCrlPaths());
assertEquals(def.getCrlValues(), json.getCrlValues());
assertEquals(def.getAcceptBacklog(), json.getAcceptBacklog());
assertEquals(def.getPort(), json.getPort());
assertEquals(def.getHost(), json.getHost());
assertEquals(def.getCrlPaths(), json.getCrlPaths());
assertEquals(def.getCrlValues(), json.getCrlValues());
assertEquals(def.getAcceptBacklog(), json.getAcceptBacklog());
assertEquals(def.getPort(), json.getPort());
assertEquals(def.getHost(), json.getHost());
assertEquals(def.isTcpNoDelay(), json.isTcpNoDelay());
assertEquals(def.isTcpKeepAlive(), json.isTcpKeepAlive());
assertEquals(def.getSoLinger(), json.getSoLinger());
assertEquals(def.isSsl(), json.isSsl());
assertEquals(def.isUseAlpn(), json.isUseAlpn());
assertEquals(def.getSslEngineOptions(), json.getSslEngineOptions());
assertEquals(def.isSni(), json.isSni());
assertEquals(def.getSslHandshakeTimeout(), json.getSslHandshakeTimeout());
assertEquals(def.getSslHandshakeTimeoutUnit(), json.getSslHandshakeTimeoutUnit());
assertEquals(def.isUseProxyProtocol(), json.isUseProxyProtocol());
assertEquals(def.getProxyProtocolTimeout(), json.getProxyProtocolTimeout());
assertEquals(def.getProxyProtocolTimeoutUnit(), json.getProxyProtocolTimeoutUnit());
}
@Test
public void testServerOptionsJson() {
int sendBufferSize = TestUtils.randomPositiveInt();
int receiverBufferSize = TestUtils.randomPositiveInt();
Random rand = new Random();
boolean reuseAddress = rand.nextBoolean();
int trafficClass = TestUtils.randomByte() + 128;
boolean tcpNoDelay = rand.nextBoolean();
boolean tcpKeepAlive = rand.nextBoolean();
int soLinger = TestUtils.randomPositiveInt();
boolean usePooledBuffers = rand.nextBoolean();
int idleTimeout = TestUtils.randomPositiveInt();
boolean ssl = rand.nextBoolean();
JksOptions keyStoreOptions = new JksOptions();
String ksPassword = TestUtils.randomAlphaString(100);
keyStoreOptions.setPassword(ksPassword);
String ksPath = TestUtils.randomAlphaString(100);
keyStoreOptions.setPath(ksPath);
JksOptions trustStoreOptions = new JksOptions();
String tsPassword = TestUtils.randomAlphaString(100);
trustStoreOptions.setPassword(tsPassword);
String tsPath = TestUtils.randomAlphaString(100);
trustStoreOptions.setPath(tsPath);
String enabledCipher = TestUtils.randomAlphaString(100);
String crlPath = TestUtils.randomUnicodeString(100);
int port = 1234;
String host = TestUtils.randomAlphaString(100);
int acceptBacklog = TestUtils.randomPortInt();
boolean useAlpn = TestUtils.randomBoolean();
boolean openSslSessionCacheEnabled = rand.nextBoolean();
String sslEngine = TestUtils.randomBoolean() ? "jdkSslEngineOptions" : "openSslEngineOptions";
boolean sni = TestUtils.randomBoolean();
long sslHandshakeTimeout = TestUtils.randomPositiveLong();
boolean useProxyProtocol = TestUtils.randomBoolean();
long proxyProtocolTimeout = TestUtils.randomPositiveLong();
JsonObject json = new JsonObject();
json.put("sendBufferSize", sendBufferSize)
.put("receiveBufferSize", receiverBufferSize)
.put("reuseAddress", reuseAddress)
.put("trafficClass", trafficClass)
.put("tcpNoDelay", tcpNoDelay)
.put("tcpKeepAlive", tcpKeepAlive)
.put("soLinger", soLinger)
.put("usePooledBuffers", usePooledBuffers)
.put("idleTimeout", idleTimeout)
.put("ssl", ssl)
.put("enabledCipherSuites", new JsonArray().add(enabledCipher))
.put("crlPaths", new JsonArray().add(crlPath))
.put("keyStoreOptions", new JsonObject().put("password", ksPassword).put("path", ksPath))
.put("trustStoreOptions", new JsonObject().put("password", tsPassword).put("path", tsPath))
.put("port", port)
.put("host", host)
.put("acceptBacklog", acceptBacklog)
.put("useAlpn", useAlpn)
.put(sslEngine, new JsonObject())
.put("openSslSessionCacheEnabled", openSslSessionCacheEnabled)
.put("sni", sni)
.put("sslHandshakeTimeout", sslHandshakeTimeout)
.put("useProxyProtocol", useProxyProtocol)
.put("proxyProtocolTimeout", proxyProtocolTimeout);
NetServerOptions options = new NetServerOptions(json);
assertEquals(sendBufferSize, options.getSendBufferSize());
assertEquals(receiverBufferSize, options.getReceiveBufferSize());
assertEquals(reuseAddress, options.isReuseAddress());
assertEquals(trafficClass, options.getTrafficClass());
assertEquals(tcpKeepAlive, options.isTcpKeepAlive());
assertEquals(tcpNoDelay, options.isTcpNoDelay());
assertEquals(soLinger, options.getSoLinger());
assertEquals(idleTimeout, options.getIdleTimeout());
assertEquals(ssl, options.isSsl());
assertEquals(sslHandshakeTimeout, options.getSslHandshakeTimeout());
assertNotSame(keyStoreOptions, options.getKeyCertOptions());
assertEquals(ksPassword, ((JksOptions) options.getKeyCertOptions()).getPassword());
assertEquals(ksPath, ((JksOptions) options.getKeyCertOptions()).getPath());
assertNotSame(trustStoreOptions, options.getTrustOptions());
assertEquals(tsPassword, ((JksOptions) options.getTrustOptions()).getPassword());
assertEquals(tsPath, ((JksOptions) options.getTrustOptions()).getPath());
assertEquals(1, options.getEnabledCipherSuites().size());
assertTrue(options.getEnabledCipherSuites().contains(enabledCipher));
assertEquals(1, options.getCrlPaths().size());
assertEquals(crlPath, options.getCrlPaths().get(0));
assertEquals(port, options.getPort());
assertEquals(host, options.getHost());
assertEquals(acceptBacklog, options.getAcceptBacklog());
assertEquals(useAlpn, options.isUseAlpn());
switch (sslEngine) {
case "jdkSslEngineOptions":
assertTrue(options.getSslEngineOptions() instanceof JdkSSLEngineOptions);
break;
case "openSslEngineOptions":
assertTrue(options.getSslEngineOptions() instanceof OpenSSLEngineOptions);
break;
default:
fail();
break;
}
assertEquals(sni, options.isSni());
assertEquals(useProxyProtocol, options.isUseProxyProtocol());
assertEquals(proxyProtocolTimeout, options.getProxyProtocolTimeout());
// Test other keystore/truststore types
json.remove("keyStoreOptions");
json.remove("trustStoreOptions");
json.put("pfxKeyCertOptions", new JsonObject().put("password", ksPassword))
.put("pfxTrustOptions", new JsonObject().put("password", tsPassword));
options = new NetServerOptions(json);
assertTrue(options.getTrustOptions() instanceof PfxOptions);
assertTrue(options.getKeyCertOptions() instanceof PfxOptions);
json.remove("pfxKeyCertOptions");
json.remove("pfxTrustOptions");
json.put("pemKeyCertOptions", new JsonObject())
.put("pemTrustOptions", new JsonObject());
options = new NetServerOptions(json);
assertTrue(options.getTrustOptions() instanceof PemTrustOptions);
assertTrue(options.getKeyCertOptions() instanceof PemKeyCertOptions);
}
@Test
public void testWriteHandlerSuccess() throws Exception {
CompletableFuture<Void> close = new CompletableFuture<>();
server.connectHandler(socket -> {
socket.pause();
close.thenAccept(v -> {
socket.resume();
});
});
startServer();
client.connect(testAddress).onComplete(onSuccess(so -> {
writeUntilFull(so, v -> {
so.write(Buffer.buffer("lost buffer")).onComplete(onSuccess(ack -> testComplete()));
close.complete(null);
});
}));
await();
}
@Test
public void testWriteHandlerFailure() throws Exception {
// Todo : investigate this
Assume.assumeFalse(TRANSPORT == Transport.IO_URING);
CompletableFuture<Void> close = new CompletableFuture<>();
server.connectHandler(socket -> {
socket.pause();
close.thenAccept(v -> {
socket.close();
});
});
startServer(testAddress);
client.connect(testAddress).onComplete(onSuccess(so -> {
writeUntilFull(so, v -> {
so.write(Buffer.buffer("lost buffer")).onComplete(onFailure(err -> testComplete()));
close.complete(null);
});
}));
await();
}
private void writeUntilFull(NetSocket so, Handler<Void> handler) {
if (so.writeQueueFull()) {
handler.handle(null);
} else {
// Give enough time to report a proper full
so.write(TestUtils.randomBuffer(16384));
vertx.setTimer(10, id -> writeUntilFull(so, handler));
}
}
@Test
public void testEchoBytes() {
Buffer sent = TestUtils.randomBuffer(100);
testEcho(sock -> sock.write(sent), buff -> assertEquals(sent, buff), sent.length());
}
@Test
public void testEchoString() {
String sent = TestUtils.randomUnicodeString(100);
Buffer buffSent = Buffer.buffer(sent);
testEcho(sock -> sock.write(sent), buff -> assertEquals(buffSent, buff), buffSent.length());
}
@Test
public void testEchoStringUTF8() {
testEchoStringWithEncoding("UTF-8");
}
@Test
public void testEchoStringUTF16() {
testEchoStringWithEncoding("UTF-16");
}
void testEchoStringWithEncoding(String encoding) {
String sent = TestUtils.randomUnicodeString(100);
Buffer buffSent = Buffer.buffer(sent, encoding);
testEcho(sock -> sock.write(sent, encoding), buff -> assertEquals(buffSent, buff), buffSent.length());
}
void testEcho(Consumer<NetSocket> writer, Consumer<Buffer> dataChecker, int length) {
Handler<AsyncResult<NetSocket>> clientHandler = (asyncResult) -> {
if (asyncResult.succeeded()) {
NetSocket sock = asyncResult.result();
Buffer buff = Buffer.buffer();
sock.handler((buffer) -> {
buff.appendBuffer(buffer);
if (buff.length() == length) {
dataChecker.accept(buff);
testComplete();
}
if (buff.length() > length) {
fail("Too many bytes received");
}
});
writer.accept(sock);
} else {
fail("failed to connect");
}
};
startEchoServer(testAddress, s -> client.connect(testAddress).onComplete(clientHandler));
await();
}
void startEchoServer(SocketAddress address, Handler<AsyncResult<NetServer>> listenHandler) {
Handler<NetSocket> serverHandler = socket -> socket.handler(socket::write);
server.connectHandler(serverHandler).listen(address).onComplete(listenHandler);
}
@Test
public void testConnectLocalHost() {
connect(testAddress);
}
void connect(SocketAddress address) {
startEchoServer(testAddress, s -> {
final int numConnections = 100;
final AtomicInteger connCount = new AtomicInteger(0);
for (int i = 0; i < numConnections; i++) {
Handler<AsyncResult<NetSocket>> handler = res -> {
if (res.succeeded()) {
res.result().close();
if (connCount.incrementAndGet() == numConnections) {
testComplete();
}
}
};
client.connect(address).onComplete(handler);
}
});
await();
}
@Test
public void testConnectInvalidPort() {
assertIllegalArgumentException(() -> client.connect(-1, "localhost"));
assertIllegalArgumentException(() -> client.connect(65536, "localhost"));
client.connect(9998, "localhost").onComplete(onFailure((err -> testComplete())));
await();
}
@Test
public void testConnectInvwalidHost() {
assertNullPointerException(() -> client.connect(80, null));
client.connect(1234, "127.0.0.2").onComplete(onFailure(err -> testComplete()));
await();
}
@Ignore("Now they share the same TCP server port")
@Test
public void testListenInvalidPort() {
final int port = 9090;
final HttpServer httpServer = vertx.createHttpServer();
try {
httpServer.requestHandler(ignore -> {})
.listen(port).onComplete(onSuccess(s ->
vertx.createNetServer()
.connectHandler(ignore -> {})
.listen(port).onComplete(onFailure(error -> {
assertNotNull(error);
testComplete();
}))));
await();
} finally {
httpServer.close();
}
}
@Test
public void testListenInvalidHost() {
server.close();
server = vertx.createNetServer(new NetServerOptions().setPort(1234).setHost("uhqwduhqwudhqwuidhqwiudhqwudqwiuhd"));
server.connectHandler(netSocket -> {
}).listen().onComplete(onFailure(err -> testComplete()));
await();
}
@Test
public void testListenOnWildcardPort() {
server.close();
server = vertx.createNetServer(new NetServerOptions().setPort(0));
server.connectHandler((netSocket) -> {
}).listen().onComplete(onSuccess(s -> {
assertTrue(server.actualPort() > 1024);
assertEquals(server, s);
testComplete();
}));
await();
}
@Test
public void testClientCloseHandlersCloseFromClient() {
startEchoServer(testAddress, s -> clientCloseHandlers(true));
await();
}
@Test
public void testClientCloseHandlersCloseFromServer() {
server.connectHandler(NetSocket::close).listen(testAddress).onComplete((s) -> clientCloseHandlers(false));
await();
}
void clientCloseHandlers(boolean closeFromClient) {
client.connect(testAddress).onComplete(onSuccess(so -> {
AtomicInteger counter = new AtomicInteger(0);
so.endHandler(v -> assertEquals(1, counter.incrementAndGet()));
so.closeHandler(v -> {
assertEquals(2, counter.incrementAndGet());
testComplete();
});
if (closeFromClient) {
so.close();
}
}));
}
@Test
public void testServerCloseHandlersCloseFromClient() {
serverCloseHandlers(false, s -> client.connect(testAddress).onComplete(ar -> ar.result().close()));
await();
}
@Test
public void testServerCloseHandlersCloseFromServer() {
serverCloseHandlers(true, s -> client.connect(testAddress));
await();
}
void serverCloseHandlers(boolean closeFromServer, Handler<NetServer> listenHandler) {
server.connectHandler((sock) -> {
AtomicInteger counter = new AtomicInteger(0);
sock.endHandler(v -> assertEquals(1, counter.incrementAndGet()));
sock.closeHandler(v -> {
assertEquals(2, counter.incrementAndGet());
testComplete();
});
if (closeFromServer) {
sock.close();
}
}).listen(testAddress).onComplete(onSuccess(listenHandler::handle));
}
@Test
public void testClientClose() throws Exception {
int num = 3;
List<NetServer> servers = new ArrayList<>();
try {
for (int i = 0;i < num;i++) {
NetServer server = vertx.createNetServer();
server.connectHandler(so -> {
});
startServer(SocketAddress.inetSocketAddress(1234 + i, "localhost"), server);
servers.add(server);
}
NetClient client = vertx.createNetClient();
AtomicInteger inflight = new AtomicInteger();
for (int i = 0;i < num;i++) {
client.connect(1234 + i, "localhost").onComplete(onSuccess(so -> {
inflight.incrementAndGet();
so.closeHandler(v -> {
inflight.decrementAndGet();
});
}));
}
assertWaitUntil(() -> inflight.get() == 3);
CountDownLatch latch = new CountDownLatch(1);
client.close().onComplete(onSuccess(v -> latch.countDown()));
awaitLatch(latch);
assertWaitUntil(() -> inflight.get() == 0);
} finally {
servers.forEach(NetServer::close);
}
}
@Test
public void testReceiveMessageAfterExplicitClose() throws Exception {
server.connectHandler(so -> {
so.handler(buff -> {
so.write(buff);
});
});
startServer();
client.connect(testAddress).onComplete(onSuccess(so -> {
NetSocketInternal soi = (NetSocketInternal) so;
soi.channelHandlerContext().pipeline().addFirst(new ChannelInboundHandlerAdapter() {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
so.close();
super.channelRead(ctx, msg);
}
});
so.handler(buff -> {
assertEquals("Hello World", buff.toString());
testComplete();
});
so.write("Hello World");
}));
await();
}
@Test
public void testClientDrainHandler() {
pausingServer((s) -> {
client.connect(testAddress).onComplete(onSuccess(sock -> {
assertFalse(sock.writeQueueFull());
sock.setWriteQueueMaxSize(1000);
Buffer buff = TestUtils.randomBuffer(10000);
vertx.setPeriodic(1, id -> {
sock.write(buff.copy());
if (sock.writeQueueFull()) {
vertx.cancelTimer(id);
sock.drainHandler(v -> {
assertFalse(sock.writeQueueFull());
testComplete();
});
// Tell the server to resume
vertx.eventBus().send("server_resume", "");
}
});
}));
});
await();
}
void pausingServer(Handler<AsyncResult<NetServer>> listenHandler) {
server.connectHandler(sock -> {
sock.pause();
Handler<Message<Buffer>> resumeHandler = (m) -> sock.resume();
MessageConsumer<?> reg = vertx.eventBus().<Buffer>consumer("server_resume").handler(resumeHandler);
sock.closeHandler(v -> reg.unregister());
}).listen(testAddress).onComplete(listenHandler);
}
@Test
public void testServerDrainHandler() {
drainingServer(s -> {
client.connect(testAddress).onComplete(onSuccess(sock -> {
sock.pause();
setHandlers(sock);
sock.handler(buf -> {});
}));
});
await();
}
void setHandlers(NetSocket sock) {
Handler<Message<Buffer>> resumeHandler = m -> sock.resume();
MessageConsumer reg = vertx.eventBus().<Buffer>consumer("client_resume").handler(resumeHandler);
sock.closeHandler(v -> reg.unregister());
}
void drainingServer(Handler<AsyncResult<NetServer>> listenHandler) {
server.connectHandler(sock -> {
assertFalse(sock.writeQueueFull());
sock.setWriteQueueMaxSize(1000);
Buffer buff = TestUtils.randomBuffer(10000);
//Send data until the buffer is full
vertx.setPeriodic(1, id -> {
sock.write(buff.copy());
if (sock.writeQueueFull()) {
vertx.cancelTimer(id);
sock.drainHandler(v -> {
assertFalse(sock.writeQueueFull());
// End test after a short delay to give the client some time to read the data
vertx.setTimer(100, id2 -> testComplete());
});
// Tell the client to resume
vertx.eventBus().send("client_resume", "");
}
});
}).listen(testAddress).onComplete(listenHandler);
}
@Test
public void testReconnectAttemptsInfinite() {
reconnectAttempts(-1);
}
@Test
public void testReconnectAttemptsMany() {
reconnectAttempts(100000);
}
private void reconnectAttempts(int attempts) {
client.close();
client = vertx.createNetClient(new NetClientOptions().setReconnectAttempts(attempts).setReconnectInterval(10));
//The server delays starting for a a few seconds, but it should still connect
client.connect(testAddress).onComplete(onSuccess(so -> testComplete()));
// Start the server after a delay
vertx.setTimer(2000, id -> startEchoServer(testAddress, s -> {}));
await();
}
@Test
public void testReconnectAttemptsNotEnough() {
// This test does not pass reliably in CI for Windows
Assume.assumeFalse(Utils.isWindows());
client.close();
client = vertx.createNetClient(new NetClientOptions().setReconnectAttempts(100).setReconnectInterval(10));
client.connect(testAddress).onComplete(onFailure(err -> testComplete()));
await();
}
@Test
public void testServerIdleTimeout1() {
testTimeout(new NetClientOptions(), new NetServerOptions().setIdleTimeout(500).setIdleTimeoutUnit(TimeUnit.MILLISECONDS), received -> assertEquals("0123456789", received.toString()), true);
}
@Test
public void testServerIdleTimeout2() {
testTimeout(new NetClientOptions(), new NetServerOptions().setReadIdleTimeout(500).setIdleTimeoutUnit(TimeUnit.MILLISECONDS), received -> assertEquals("0123456789", received.toString()), true);
}
@Test
public void testServerIdleTimeout3() {
// Usually 012 but might be 01 or 0123
testTimeout(new NetClientOptions(), new NetServerOptions().setWriteIdleTimeout(500).setIdleTimeoutUnit(TimeUnit.MILLISECONDS), received -> assertFalse("0123456789".equals(received.toString())), true);
}
@Test
public void testServerIdleTimeout4() {
testTimeout(new NetClientOptions(), new NetServerOptions().setIdleTimeout(500).setIdleTimeoutUnit(TimeUnit.MILLISECONDS), received -> assertEquals("0123456789", received.toString()), false);
}
@Test
public void testServerIdleTimeout5() {
// Usually 012 but might be 01 or 0123
testTimeout(new NetClientOptions(), new NetServerOptions().setReadIdleTimeout(500).setIdleTimeoutUnit(TimeUnit.MILLISECONDS), received -> assertFalse("0123456789".equals(received.toString())), false);
}
@Test
public void testServerIdleTimeout6() {
testTimeout(new NetClientOptions(), new NetServerOptions().setWriteIdleTimeout(500).setIdleTimeoutUnit(TimeUnit.MILLISECONDS), received -> assertEquals("0123456789", received.toString()), false);
}
@Test
public void testClientIdleTimeout1() {
testTimeout(new NetClientOptions().setIdleTimeout(500).setIdleTimeoutUnit(TimeUnit.MILLISECONDS), new NetServerOptions(), received -> assertEquals("0123456789", received.toString()), true);
}
@Test
public void testClientIdleTimeout2() {
testTimeout(new NetClientOptions().setWriteIdleTimeout(500).setIdleTimeoutUnit(TimeUnit.MILLISECONDS), new NetServerOptions(), received -> assertEquals("0123456789", received.toString()), true);
}
@Test
public void testClientIdleTimeout3() {
// Usually 012 but might be 01 or 0123
testTimeout(new NetClientOptions().setReadIdleTimeout(500).setIdleTimeoutUnit(TimeUnit.MILLISECONDS), new NetServerOptions(), received -> assertFalse("0123456789".equals(received.toString())), true);
}
@Test
public void testClientIdleTimeout4() {
testTimeout(new NetClientOptions().setIdleTimeout(500).setIdleTimeoutUnit(TimeUnit.MILLISECONDS), new NetServerOptions(), received -> assertEquals("0123456789", received.toString()), false);
}
@Test
public void testClientIdleTimeout5() {
// Usually 012 but might be 01 or 0123
testTimeout(new NetClientOptions().setWriteIdleTimeout(500).setIdleTimeoutUnit(TimeUnit.MILLISECONDS), new NetServerOptions(), received -> assertFalse("0123456789".equals(received.toString())), false);
}
@Test
public void testClientIdleTimeout6() {
testTimeout(new NetClientOptions().setReadIdleTimeout(500).setIdleTimeoutUnit(TimeUnit.MILLISECONDS), new NetServerOptions(), received -> assertEquals("0123456789", received.toString()), false);
}
private void testTimeout(NetClientOptions clientOptions, NetServerOptions serverOptions, Consumer<Buffer> check, boolean clientSends) {
server.close();
server = vertx.createNetServer(serverOptions);
client.close();
client = vertx.createNetClient(clientOptions);
Buffer received = Buffer.buffer();
AtomicInteger idleEvents = new AtomicInteger();
Handler<NetSocket> receiver = so -> {
((NetSocketInternal)so).eventHandler(evt -> {
if (evt instanceof IdleStateEvent) {
idleEvents.incrementAndGet();
}
});
so.handler(received::appendBuffer);
};
Handler<NetSocket> sender = so -> {
AtomicInteger times = new AtomicInteger();
vertx.setPeriodic(100, id -> {
int val = times.getAndIncrement();
if (val < 10) {
so.write("" + val);
} else {
vertx.cancelTimer(id);
}
});
((NetSocketInternal)so).eventHandler(evt -> {
if (evt instanceof IdleStateEvent) {
idleEvents.incrementAndGet();
}
});
so.closeHandler(v -> {
check.accept(received);
assertEquals(1, idleEvents.get());
testComplete();
});
};
Handler<NetSocket> clientHandler = clientSends ? sender : receiver;
Handler<NetSocket> serverHandler = clientSends ? receiver : sender;
server.connectHandler(serverHandler).listen(testAddress).onComplete(onSuccess(s -> {
client.connect(testAddress).onComplete(onSuccess(clientHandler::handle));
}));
await();
}
@Test
// StartTLS
public void testStartTLSClientTrustAll() throws Exception {
testTLS(Cert.NONE, Trust.NONE, Cert.SERVER_JKS, Trust.NONE, false, true, true, true);
}
@Test
// Client trusts all server certs
public void testTLSClientTrustAll() throws Exception {
testTLS(Cert.NONE, Trust.NONE, Cert.SERVER_JKS, Trust.NONE, false, true, true, false);
}
@Test
// Server specifies cert that the client trusts (not trust all)
public void testTLSClientTrustServerCert() throws Exception {
testTLS(Cert.NONE, Trust.SERVER_JKS, Cert.SERVER_JKS, Trust.NONE, false, false, true, false);
}
@Test
// Server specifies cert that the client doesn't trust
public void testTLSClientUntrustedServer() throws Exception {
testTLS(Cert.NONE, Trust.NONE, Cert.SERVER_JKS, Trust.NONE, false, false, false, false);
}
@Test
//Client specifies cert even though it's not required
public void testTLSClientCertNotRequired() throws Exception {
testTLS(Cert.CLIENT_JKS, Trust.SERVER_JKS, Cert.SERVER_JKS, Trust.CLIENT_JKS, false, false, true, false);
}
@Test
//Client specifies cert and it's not required
public void testTLSClientCertRequired() throws Exception {
testTLS(Cert.CLIENT_JKS, Trust.SERVER_JKS, Cert.SERVER_JKS, Trust.CLIENT_JKS, true, false, true, false);
}
@Test
//Client doesn't specify cert but it's required
public void testTLSClientCertRequiredNoClientCert() throws Exception {
testTLS(Cert.NONE, Trust.SERVER_JKS, Cert.SERVER_JKS, Trust.CLIENT_JKS, true, false, false, false);
}
@Test
//Client doesn't specify cert but it's required
public void testTLSClientCertRequiredNoClientCert1_3() throws Exception {
testTLS(Cert.NONE, Trust.SERVER_JKS, Cert.SERVER_JKS, Trust.CLIENT_JKS, true, false, false, false, new String[0], new String[]{"TLSv1.3"});
}
@Test
//Client specifies cert but it's not trusted
public void testTLSClientCertClientNotTrusted() throws Exception {
testTLS(Cert.NONE, Trust.SERVER_JKS, Cert.SERVER_JKS, Trust.NONE, true, false, false, false);
}
@Test
// StartTLS client specifies cert but it's not trusted
public void testStartTLSClientCertClientNotTrusted() throws Exception {
testTLS(Cert.NONE, Trust.SERVER_JKS, Cert.SERVER_JKS, Trust.CLIENT_JKS, true, false, false, true);
}
@Test
// Specify some cipher suites
public void testTLSCipherSuites() throws Exception {
testTLS(Cert.NONE, Trust.NONE, Cert.SERVER_JKS, Trust.NONE, false, true, true, false, ENABLED_CIPHER_SUITES);
}
@Test
// Specify some bogus protocol
public void testInvalidTlsProtocolVersion() throws Exception {
testTLS(Cert.NONE, Trust.NONE, Cert.SERVER_JKS, Trust.NONE, false, true, false, false, new String[0],
new String[]{"TLSv1.999"});
}
@Test
// Specify a valid protocol
public void testSpecificTlsProtocolVersion() throws Exception {
testTLS(Cert.NONE, Trust.NONE, Cert.SERVER_JKS, Trust.NONE, false, true, true, false, new String[0],
new String[]{"TLSv1.2"});
}
@Test
public void testTLSTrailingDotHost() throws Exception {
// Reuse SNI test certificate because it is convenient
TLSTest test = new TLSTest()
.clientTrust(Trust.SNI_JKS_HOST2)
.connectAddress(SocketAddress.inetSocketAddress(DEFAULT_HTTPS_PORT, "host2.com."))
.bindAddress(SocketAddress.inetSocketAddress(DEFAULT_HTTPS_PORT, "host2.com"))
.serverCert(Cert.SNI_JKS).sni(true);
test.run(true);
await();
assertEquals("host2.com", cnOf(test.clientPeerCert()));
assertEquals("host2.com", test.indicatedServerName);
}
@Test
// SNI without server name should use the first keystore entry
public void testSniWithoutServerNameUsesTheFirstKeyStoreEntry1() throws Exception {
TLSTest test = new TLSTest()
.clientTrust(Trust.SERVER_JKS)
.serverCert(Cert.SNI_JKS).sni(true);
test.run(true);
await();
assertEquals("localhost", cnOf(test.clientPeerCert()));
}
@Test
// SNI without server name should use the first keystore entry
public void testSniWithoutServerNameUsesTheFirstKeyStoreEntry2() throws Exception {
TLSTest test = new TLSTest()
.clientTrust(Trust.SNI_JKS_HOST1)
.serverCert(Cert.SNI_JKS).sni(true);
test.run(false);
await();
}
@Test
public void testSniImplicitServerName() throws Exception {
TLSTest test = new TLSTest()
.clientTrust(Trust.SNI_JKS_HOST2)
.address(SocketAddress.inetSocketAddress(DEFAULT_HTTPS_PORT, "host2.com"))
.serverCert(Cert.SNI_JKS).sni(true);
test.run(true);
await();
assertEquals("host2.com", cnOf(test.clientPeerCert()));
assertEquals("host2.com", test.indicatedServerName);
}
@Test
public void testSniImplicitServerNameDisabledForShortname1() throws Exception {
TLSTest test = new TLSTest()
.clientTrust(Trust.SNI_JKS_HOST1)
.address(SocketAddress.inetSocketAddress(DEFAULT_HTTPS_PORT, "host1"))
.serverCert(Cert.SNI_JKS).sni(true);
test.run(false);
await();
}
@Test
public void testSniImplicitServerNameDisabledForShortname2() throws Exception {
TLSTest test = new TLSTest()
.clientTrust(Trust.SERVER_JKS)
.address(SocketAddress.inetSocketAddress(DEFAULT_HTTPS_PORT, "host1"))
.serverCert(Cert.SNI_JKS).sni(true);
test.run(true);
await();
assertEquals("localhost", cnOf(test.clientPeerCert()));
}
@Test
public void testSniForceShortname() throws Exception {
TLSTest test = new TLSTest()
.clientTrust(Trust.SNI_JKS_HOST1)
.address(SocketAddress.inetSocketAddress(DEFAULT_HTTPS_PORT, "host1"))
.serverName("host1")
.serverCert(Cert.SNI_JKS).sni(true);
test.run(true);
await();
assertEquals("host1", cnOf(test.clientPeerCert()));
}
@Test
public void testSniOverrideServerName() throws Exception {
TLSTest test = new TLSTest()
.clientTrust(Trust.SNI_JKS_HOST2)
.address(SocketAddress.inetSocketAddress(DEFAULT_HTTPS_PORT, "example.com"))
.serverName("host2.com")
.serverCert(Cert.SNI_JKS).sni(true);
test.run(true);
await();
assertEquals("host2.com", cnOf(test.clientPeerCert()));
}
@Test
public void testClientSniMultipleServerName() throws Exception {
List<String> receivedServerNames = Collections.synchronizedList(new ArrayList<>());
server = vertx.createNetServer(new NetServerOptions()
.setSni(true)
.setSsl(true)
.setKeyCertOptions(Cert.SNI_JKS.get())
).connectHandler(so -> {
receivedServerNames.add(so.indicatedServerName());
});
startServer();
List<String> serverNames = Arrays.asList("host1", "host2.com", "fake");
List<String> cns = new ArrayList<>();
client = vertx.createNetClient(new NetClientOptions().setSsl(true).setHostnameVerificationAlgorithm("").setTrustAll(true));
for (String serverName : serverNames) {
NetSocket so = awaitFuture(client.connect(testAddress, serverName));
String host = cnOf(so.peerCertificates().get(0));
cns.add(host);
}
assertEquals(Arrays.asList("host1", "host2.com", "localhost"), cns);
assertEquals(2, ((NetServerImpl)server).sniEntrySize());
assertWaitUntil(() -> receivedServerNames.size() == 3);
assertEquals(receivedServerNames, serverNames);
}
@Test
// SNI present an unknown server
public void testSniWithUnknownServer1() throws Exception {
TLSTest test = new TLSTest()
.clientTrust(Trust.SERVER_JKS)
.serverCert(Cert.SNI_JKS).sni(true).serverName("unknown");
test.run(true);
await();
assertEquals("localhost", cnOf(test.clientPeerCert()));
}
@Test
// SNI present an unknown server
public void testSniWithUnknownServer2() throws Exception {
TLSTest test = new TLSTest()
.clientTrust(Trust.SNI_JKS_HOST2)
.serverCert(Cert.SNI_JKS).sni(true).serverName("unknown");
test.run(false);
await();
}
@Test
// SNI returns the certificate for the indicated server name
public void testSniWithServerNameStartTLS() throws Exception {
TLSTest test = new TLSTest()
.clientTrust(Trust.SNI_JKS_HOST1)
.startTLS(true)
.serverCert(Cert.SNI_JKS).sni(true).serverName("host1");
test.run(true);
await();
assertEquals("host1", cnOf(test.clientPeerCert()));
}
@Test
public void testSniWithServerNameTrust(){
TLSTest test = new TLSTest().clientTrust(Trust.SNI_JKS_HOST2)
.clientCert(Cert.CLIENT_PEM_ROOT_CA)
.requireClientAuth(true)
.serverCert(Cert.SNI_JKS)
.sni(true)
.serverName("host2.com")
.serverTrust(Trust.SNI_SERVER_ROOT_CA_AND_OTHER_CA_1);
test.run(true);
await();
}
@Test
public void testSniWithServerNameTrustFallback(){
TLSTest test = new TLSTest().clientTrust(Trust.SNI_JKS_HOST2)
.clientCert(Cert.CLIENT_PEM_ROOT_CA)
.requireClientAuth(true)
.serverCert(Cert.SNI_JKS)
.sni(true)
.serverName("host2.com")
.serverTrust(Trust.SNI_SERVER_ROOT_CA_FALLBACK);
test.run(true);
await();
}
@Test
public void testSniWithServerNameTrustFallbackFail(){
TLSTest test = new TLSTest().clientTrust(Trust.SNI_JKS_HOST2)
.clientCert(Cert.CLIENT_PEM_ROOT_CA)
.requireClientAuth(true)
.serverCert(Cert.SNI_JKS)
.sni(true)
.serverName("host2.com")
.serverTrust(Trust.SNI_SERVER_OTHER_CA_FALLBACK);
test.run(false);
await();
}
@Test
public void testSniWithServerNameTrustFail(){
TLSTest test = new TLSTest().clientTrust(Trust.SNI_JKS_HOST2)
.clientCert(Cert.CLIENT_PEM_ROOT_CA)
.requireClientAuth(true)
.serverCert(Cert.SNI_JKS)
.sni(true)
.serverName("host2.com")
.serverTrust(Trust.SNI_SERVER_ROOT_CA_AND_OTHER_CA_2);
test.run(false);
await();
}
@Test
public void testSniWithTrailingDotHost() throws Exception {
TLSTest test = new TLSTest()
.clientTrust(Trust.SNI_JKS_HOST2)
.connectAddress(SocketAddress.inetSocketAddress(DEFAULT_HTTPS_PORT, "host2.com."))
.bindAddress(SocketAddress.inetSocketAddress(DEFAULT_HTTPS_PORT, "host2.com"))
.serverCert(Cert.SNI_JKS).sni(true);
test.run(true);
await();
assertEquals("host2.com", cnOf(test.clientPeerCert()));
assertEquals("host2.com", test.indicatedServerName);
}
@Test
public void testServerCertificateMultiple() throws Exception {
TLSTest test = new TLSTest()
.serverCert(Cert.MULTIPLE_JKS)
.clientTrustAll(true);
test.run(true);
await();
assertEquals("precious", cnOf(test.clientPeerCert()));
}
@Test
public void testServerCertificateMultipleWrongAlias() throws Exception {
TLSTest test = new TLSTest()
.serverCert(Cert.MULTIPLE_JKS_WRONG_ALIAS)
.clientTrustAll(true);
server = vertx
.createNetServer(test.setupServer())
.connectHandler(so -> {
});
server.listen(test.bindAddress).onComplete(onFailure(t -> {
assertThat(t, is(instanceOf(IllegalArgumentException.class)));
assertThat(t.getMessage(), containsString("alias does not exist in the keystore"));
testComplete();
}));
await();
}
@Test
public void testServerCertificateMultipleWithKeyPassword() throws Exception {
TLSTest test = new TLSTest()
.serverCert(Cert.MULTIPLE_JKS_ALIAS_PASSWORD)
.clientTrustAll(true);
test.run(true);
await();
assertEquals("fonky", cnOf(test.clientPeerCert()));
}
void testTLS(Cert<?> clientCert, Trust<?> clientTrust,
Cert<?> serverCert, Trust<?> serverTrust,
boolean requireClientAuth, boolean clientTrustAll,
boolean shouldPass, boolean startTLS) throws Exception {
testTLS(clientCert, clientTrust, serverCert, serverTrust, requireClientAuth, clientTrustAll,
shouldPass, startTLS, new String[0], new String[]{"TLSv1.2"});
}
void testTLS(Cert<?> clientCert, Trust<?> clientTrust,
Cert<?> serverCert, Trust<?> serverTrust,
boolean requireClientAuth, boolean clientTrustAll,
boolean shouldPass, boolean startTLS,
String[] enabledCipherSuites) throws Exception {
testTLS(clientCert, clientTrust, serverCert, serverTrust, requireClientAuth, clientTrustAll,
shouldPass, startTLS, enabledCipherSuites, new String[]{"TLSv1.2"});
}
void testTLS(Cert<?> clientCert, Trust<?> clientTrust,
Cert<?> serverCert, Trust<?> serverTrust,
boolean requireClientAuth, boolean clientTrustAll,
boolean shouldPass, boolean startTLS,
String[] enabledCipherSuites,
String[] enabledSecureTransportProtocols) throws Exception {
TLSTest test = new TLSTest()
.clientCert(clientCert)
.clientTrust(clientTrust)
.serverCert(serverCert)
.serverTrust(serverTrust)
.requireClientAuth(requireClientAuth)
.clientTrustAll(clientTrustAll)
.startTLS(startTLS)
.enabledCipherSuites(enabledCipherSuites)
.clientVersion(enabledSecureTransportProtocols)
.serverVersion(enabledSecureTransportProtocols);
test.run(shouldPass);
await();
}
|
NetTest
|
java
|
apache__kafka
|
clients/clients-integration-tests/src/test/java/org/apache/kafka/clients/consumer/ConsumerIntegrationTest.java
|
{
"start": 2390,
"end": 17998
}
|
class ____ {
@ClusterTests({
@ClusterTest(serverProperties = {
@ClusterConfigProperty(key = "offsets.topic.num.partitions", value = "1"),
@ClusterConfigProperty(key = "offsets.topic.replication.factor", value = "1"),
@ClusterConfigProperty(key = "group.coordinator.rebalance.protocols", value = "classic")
})
})
public void testAsyncConsumerWithConsumerProtocolDisabled(ClusterInstance clusterInstance) throws Exception {
String topic = "test-topic";
clusterInstance.createTopic(topic, 1, (short) 1);
try (KafkaConsumer<String, String> consumer = new KafkaConsumer<>(Map.of(
ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, clusterInstance.bootstrapServers(),
ConsumerConfig.GROUP_ID_CONFIG, "test-group",
ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName(),
ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName(),
ConsumerConfig.GROUP_PROTOCOL_CONFIG, GroupProtocol.CONSUMER.name()))) {
consumer.subscribe(Collections.singletonList(topic));
TestUtils.waitForCondition(() -> {
try {
consumer.poll(Duration.ofMillis(1000));
return false;
} catch (UnsupportedVersionException e) {
return e.getMessage().equals(AbstractHeartbeatRequestManager.CONSUMER_PROTOCOL_NOT_SUPPORTED_MSG);
}
}, "Should get UnsupportedVersionException and how to revert to classic protocol");
}
}
@ClusterTest(serverProperties = {
@ClusterConfigProperty(key = "offsets.topic.num.partitions", value = "1"),
@ClusterConfigProperty(key = "offsets.topic.replication.factor", value = "1"),
})
public void testFetchPartitionsAfterFailedListenerWithGroupProtocolClassic(ClusterInstance clusterInstance)
throws InterruptedException {
testFetchPartitionsAfterFailedListener(clusterInstance, GroupProtocol.CLASSIC);
}
@ClusterTest(serverProperties = {
@ClusterConfigProperty(key = "offsets.topic.num.partitions", value = "1"),
@ClusterConfigProperty(key = "offsets.topic.replication.factor", value = "1"),
})
public void testFetchPartitionsAfterFailedListenerWithGroupProtocolConsumer(ClusterInstance clusterInstance)
throws InterruptedException {
testFetchPartitionsAfterFailedListener(clusterInstance, GroupProtocol.CONSUMER);
}
private static void testFetchPartitionsAfterFailedListener(ClusterInstance clusterInstance, GroupProtocol groupProtocol)
throws InterruptedException {
var topic = "topic";
try (var producer = clusterInstance.producer(Map.of(
ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class,
ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class))) {
producer.send(new ProducerRecord<>(topic, "key".getBytes(), "value".getBytes()));
}
try (var consumer = clusterInstance.consumer(Map.of(
ConsumerConfig.GROUP_PROTOCOL_CONFIG, groupProtocol.name()))) {
consumer.subscribe(List.of(topic), new ConsumerRebalanceListener() {
private int count = 0;
@Override
public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
}
@Override
public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
count++;
if (count == 1) throw new IllegalArgumentException("temporary error");
}
});
TestUtils.waitForCondition(() -> consumer.poll(Duration.ofSeconds(1)).count() == 1,
5000,
"failed to poll data");
}
}
@ClusterTest(serverProperties = {
@ClusterConfigProperty(key = "offsets.topic.num.partitions", value = "1"),
@ClusterConfigProperty(key = "offsets.topic.replication.factor", value = "1"),
})
public void testFetchPartitionsWithAlwaysFailedListenerWithGroupProtocolClassic(ClusterInstance clusterInstance)
throws InterruptedException {
testFetchPartitionsWithAlwaysFailedListener(clusterInstance, GroupProtocol.CLASSIC);
}
@ClusterTest(serverProperties = {
@ClusterConfigProperty(key = "offsets.topic.num.partitions", value = "1"),
@ClusterConfigProperty(key = "offsets.topic.replication.factor", value = "1"),
})
public void testFetchPartitionsWithAlwaysFailedListenerWithGroupProtocolConsumer(ClusterInstance clusterInstance)
throws InterruptedException {
testFetchPartitionsWithAlwaysFailedListener(clusterInstance, GroupProtocol.CONSUMER);
}
private static void testFetchPartitionsWithAlwaysFailedListener(ClusterInstance clusterInstance, GroupProtocol groupProtocol)
throws InterruptedException {
var topic = "topic";
try (var producer = clusterInstance.producer(Map.of(
ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class,
ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class))) {
producer.send(new ProducerRecord<>(topic, "key".getBytes(), "value".getBytes()));
}
try (var consumer = clusterInstance.consumer(Map.of(
ConsumerConfig.GROUP_PROTOCOL_CONFIG, groupProtocol.name()))) {
consumer.subscribe(List.of(topic), new ConsumerRebalanceListener() {
@Override
public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
}
@Override
public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
throw new IllegalArgumentException("always failed");
}
});
long startTimeMillis = System.currentTimeMillis();
long currentTimeMillis = System.currentTimeMillis();
while (currentTimeMillis < startTimeMillis + 3000) {
currentTimeMillis = System.currentTimeMillis();
try {
// In the async consumer, there is a possibility that the ConsumerRebalanceListenerCallbackCompletedEvent
// has not yet reached the application thread. And a poll operation might still succeed, but it
// should not return any records since none of the assigned topic partitions are marked as fetchable.
assertEquals(0, consumer.poll(Duration.ofSeconds(1)).count());
} catch (KafkaException ex) {
assertEquals("User rebalance callback throws an error", ex.getMessage());
}
Thread.sleep(300);
}
}
}
@ClusterTest(types = {Type.KRAFT}, brokers = 3)
public void testLeaderEpoch(ClusterInstance clusterInstance) throws Exception {
String topic = "test-topic";
clusterInstance.createTopic(topic, 1, (short) 2);
var msgNum = 10;
sendMsg(clusterInstance, topic, msgNum);
try (var consumer = clusterInstance.consumer()) {
TopicPartition targetTopicPartition = new TopicPartition(topic, 0);
List<TopicPartition> topicPartitions = List.of(targetTopicPartition);
consumer.assign(topicPartitions);
consumer.seekToBeginning(List.of(targetTopicPartition));
int consumed = 0;
while (consumed < msgNum) {
ConsumerRecords<Object, Object> records = consumer.poll(Duration.ofMillis(1000));
for (ConsumerRecord<Object, Object> record : records) {
assertTrue(record.leaderEpoch().isPresent());
assertEquals(0, record.leaderEpoch().get());
}
consumed += records.count();
}
// make the leader epoch increment by shutdown the leader broker
clusterInstance.shutdownBroker(clusterInstance.getLeaderBrokerId(targetTopicPartition));
sendMsg(clusterInstance, topic, msgNum);
consumed = 0;
while (consumed < msgNum) {
ConsumerRecords<Object, Object> records = consumer.poll(Duration.ofMillis(1000));
for (ConsumerRecord<Object, Object> record : records) {
assertTrue(record.leaderEpoch().isPresent());
assertEquals(1, record.leaderEpoch().get());
}
consumed += records.count();
}
}
}
@ClusterTest(
types = {Type.KRAFT},
brokers = 3,
serverProperties = {
@ClusterConfigProperty(id = 0, key = "broker.rack", value = "rack0"),
@ClusterConfigProperty(id = 1, key = "broker.rack", value = "rack1"),
@ClusterConfigProperty(id = 2, key = "broker.rack", value = "rack2"),
@ClusterConfigProperty(key = GroupCoordinatorConfig.CONSUMER_GROUP_ASSIGNORS_CONFIG, value = "org.apache.kafka.clients.consumer.RackAwareAssignor")
}
)
public void testRackAwareAssignment(ClusterInstance clusterInstance) throws ExecutionException, InterruptedException {
String topic = "test-topic";
try (Admin admin = clusterInstance.admin();
Producer<byte[], byte[]> producer = clusterInstance.producer();
Consumer<byte[], byte[]> consumer0 = clusterInstance.consumer(Map.of(
ConsumerConfig.GROUP_ID_CONFIG, "group0",
ConsumerConfig.CLIENT_RACK_CONFIG, "rack0",
ConsumerConfig.GROUP_PROTOCOL_CONFIG, GroupProtocol.CONSUMER.name()
));
Consumer<byte[], byte[]> consumer1 = clusterInstance.consumer(Map.of(
ConsumerConfig.GROUP_ID_CONFIG, "group0",
ConsumerConfig.CLIENT_RACK_CONFIG, "rack1",
ConsumerConfig.GROUP_PROTOCOL_CONFIG, GroupProtocol.CONSUMER.name()
));
Consumer<byte[], byte[]> consumer2 = clusterInstance.consumer(Map.of(
ConsumerConfig.GROUP_ID_CONFIG, "group0",
ConsumerConfig.CLIENT_RACK_CONFIG, "rack2",
ConsumerConfig.GROUP_PROTOCOL_CONFIG, GroupProtocol.CONSUMER.name()
))
) {
// Create a new topic with 1 partition on broker 0.
admin.createTopics(List.of(new NewTopic(topic, Map.of(0, List.of(0)))));
clusterInstance.waitTopicCreation(topic, 1);
producer.send(new ProducerRecord<>(topic, "key".getBytes(), "value".getBytes()));
producer.flush();
consumer0.subscribe(List.of(topic));
consumer1.subscribe(List.of(topic));
consumer2.subscribe(List.of(topic));
TestUtils.waitForCondition(() -> {
consumer0.poll(Duration.ofMillis(1000));
consumer1.poll(Duration.ofMillis(1000));
consumer2.poll(Duration.ofMillis(1000));
return consumer0.assignment().equals(Set.of(new TopicPartition(topic, 0))) &&
consumer1.assignment().isEmpty() &&
consumer2.assignment().isEmpty();
}, "Consumer 0 should be assigned to topic partition 0");
// Add a new partition 1 and 2 to broker 1.
admin.createPartitions(
Map.of(
topic,
NewPartitions.increaseTo(3, List.of(List.of(1), List.of(1)))
)
);
clusterInstance.waitTopicCreation(topic, 3);
TestUtils.waitForCondition(() -> {
consumer0.poll(Duration.ofMillis(1000));
consumer1.poll(Duration.ofMillis(1000));
consumer2.poll(Duration.ofMillis(1000));
return consumer0.assignment().equals(Set.of(new TopicPartition(topic, 0))) &&
consumer1.assignment().equals(Set.of(new TopicPartition(topic, 1), new TopicPartition(topic, 2))) &&
consumer2.assignment().isEmpty();
}, "Consumer 1 should be assigned to topic partition 1 and 2");
// Add a new partition 3, 4, and 5 to broker 2.
admin.createPartitions(
Map.of(
topic,
NewPartitions.increaseTo(6, List.of(List.of(2), List.of(2), List.of(2)))
)
);
clusterInstance.waitTopicCreation(topic, 6);
TestUtils.waitForCondition(() -> {
consumer0.poll(Duration.ofMillis(1000));
consumer1.poll(Duration.ofMillis(1000));
consumer2.poll(Duration.ofMillis(1000));
return consumer0.assignment().equals(Set.of(new TopicPartition(topic, 0))) &&
consumer1.assignment().equals(Set.of(new TopicPartition(topic, 1), new TopicPartition(topic, 2))) &&
consumer2.assignment().equals(Set.of(new TopicPartition(topic, 3), new TopicPartition(topic, 4), new TopicPartition(topic, 5)));
}, "Consumer 2 should be assigned to topic partition 3, 4, and 5");
// Change partitions to different brokers.
// partition 0 -> broker 2
// partition 1 -> broker 2
// partition 2 -> broker 2
// partition 3 -> broker 1
// partition 4 -> broker 1
// partition 5 -> broker 0
admin.alterPartitionReassignments(Map.of(
new TopicPartition(topic, 0), Optional.of(new NewPartitionReassignment(List.of(2))),
new TopicPartition(topic, 1), Optional.of(new NewPartitionReassignment(List.of(2))),
new TopicPartition(topic, 2), Optional.of(new NewPartitionReassignment(List.of(2))),
new TopicPartition(topic, 3), Optional.of(new NewPartitionReassignment(List.of(1))),
new TopicPartition(topic, 4), Optional.of(new NewPartitionReassignment(List.of(1))),
new TopicPartition(topic, 5), Optional.of(new NewPartitionReassignment(List.of(0)))
)).all().get();
TestUtils.waitForCondition(() -> {
consumer0.poll(Duration.ofMillis(1000));
consumer1.poll(Duration.ofMillis(1000));
consumer2.poll(Duration.ofMillis(1000));
return consumer0.assignment().equals(Set.of(new TopicPartition(topic, 5))) &&
consumer1.assignment().equals(Set.of(new TopicPartition(topic, 3), new TopicPartition(topic, 4))) &&
consumer2.assignment().equals(Set.of(new TopicPartition(topic, 0), new TopicPartition(topic, 1), new TopicPartition(topic, 2)));
}, "Consumer with topic partition mapping should be 0 -> 5 | 1 -> 3, 4 | 2 -> 0, 1, 2");
}
}
private void sendMsg(ClusterInstance clusterInstance, String topic, int sendMsgNum) {
try (var producer = clusterInstance.producer(Map.of(
ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class,
ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class,
ProducerConfig.ACKS_CONFIG, "-1"))) {
for (int i = 0; i < sendMsgNum; i++) {
producer.send(new ProducerRecord<>(topic, ("key_" + i), ("value_" + i)));
}
producer.flush();
}
}
}
|
ConsumerIntegrationTest
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/SubcomponentValidationTest.java
|
{
"start": 10332,
"end": 10676
}
|
class ____ {",
" @Provides String provideString(int i) {",
" return Integer.toString(i);",
" }",
"}");
Source componentFile =
CompilerTests.javaSource("test.TestComponent",
"package test;",
"",
"import dagger.Component;",
"",
"@Component",
"
|
TestModule
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/headers/DateDelegate.java
|
{
"start": 258,
"end": 794
}
|
class ____ implements RuntimeDelegate.HeaderDelegate<Date> {
public static final DateDelegate INSTANCE = new DateDelegate();
@Override
public Date fromString(String value) {
if (value == null)
throw new IllegalArgumentException("Param was null");
return DateUtil.parseDate(value);
}
@Override
public String toString(Date value) {
if (value == null)
throw new IllegalArgumentException("Param was null");
return DateUtil.formatDate(value);
}
}
|
DateDelegate
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/common/FrameMode.java
|
{
"start": 174,
"end": 216
}
|
enum ____ {
ROWS,
RANGE,
GROUPS
}
|
FrameMode
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/mapper/SortedSetDocValuesSyntheticFieldLoaderLayer.java
|
{
"start": 3508,
"end": 6166
}
|
class ____ implements DocValuesLoader, DocValuesFieldValues {
private final SortedSetDocValues dv;
private boolean hasValue;
ImmediateDocValuesLoader(SortedSetDocValues dv) {
this.dv = dv;
}
@Override
public boolean advanceToDoc(int docId) throws IOException {
return hasValue = dv.advanceExact(docId);
}
@Override
public int count() {
return hasValue ? dv.docValueCount() : 0;
}
@Override
public void write(XContentBuilder b) throws IOException {
if (hasValue == false) {
return;
}
for (int i = 0; i < dv.docValueCount(); i++) {
BytesRef c = convert(dv.lookupOrd(dv.nextOrd()));
b.utf8Value(c.bytes, c.offset, c.length);
}
}
}
/**
* Load all ordinals for all docs up front and resolve to their string
* values in order. This should be much more disk-friendly than
* {@link ImmediateDocValuesLoader} because it resolves the ordinals in order and
* marginally more cpu friendly because it resolves the ordinals one time.
*/
private SingletonDocValuesLoader buildSingletonDocValuesLoader(SortedDocValues singleton, int[] docIdsInLeaf) throws IOException {
int[] ords = new int[docIdsInLeaf.length];
int found = 0;
for (int d = 0; d < docIdsInLeaf.length; d++) {
if (false == singleton.advanceExact(docIdsInLeaf[d])) {
ords[d] = -1;
continue;
}
ords[d] = singleton.ordValue();
found++;
}
if (found == 0) {
return null;
}
int[] sortedOrds = ords.clone();
Arrays.sort(sortedOrds);
int unique = 0;
int prev = -1;
for (int ord : sortedOrds) {
if (ord != prev) {
prev = ord;
unique++;
}
}
int[] uniqueOrds = new int[unique];
BytesRef[] converted = new BytesRef[unique];
unique = 0;
prev = -1;
for (int ord : sortedOrds) {
if (ord != prev) {
prev = ord;
uniqueOrds[unique] = ord;
converted[unique] = preserve(convert(singleton.lookupOrd(ord)));
unique++;
}
}
logger.debug("loading [{}] on [{}] docs covering [{}] ords", name, docIdsInLeaf.length, uniqueOrds.length);
return new SingletonDocValuesLoader(docIdsInLeaf, ords, uniqueOrds, converted);
}
private static
|
ImmediateDocValuesLoader
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Mode.java
|
{
"start": 360,
"end": 941
}
|
enum ____ {
CLI,
PLAIN,
JDBC,
ODBC;
public static Mode fromString(String mode) {
if (mode == null || mode.isEmpty()) {
return PLAIN;
}
return Mode.valueOf(mode.toUpperCase(Locale.ROOT));
}
@Override
public String toString() {
return this.name().toLowerCase(Locale.ROOT);
}
public static boolean isDriver(Mode mode) {
return mode == JDBC || mode == ODBC;
}
public static boolean isDedicatedClient(Mode mode) {
return mode == JDBC || mode == ODBC || mode == CLI;
}
}
|
Mode
|
java
|
quarkusio__quarkus
|
test-framework/junit5/src/test/java/io/quarkus/test/junit/util/QuarkusTestProfileAwareClassOrdererTest.java
|
{
"start": 1549,
"end": 14434
}
|
class ____ {
@Mock
ClassOrdererContext contextMock;
@Test
void singleClass() {
doReturn(Collections.singletonList(descriptorMock(Test01.class)))
.when(contextMock)
.getClassDescriptors();
new QuarkusTestProfileAwareClassOrderer().orderClasses(contextMock);
verify(contextMock, never()).getConfigurationParameter(anyString());
}
@Test
void multipleClassloaders() throws IOException {
ByteClassLoader a = new ByteClassLoader("a");
Class cla1 = a.cloneClass(Test08.class);
Class cla2 = a.cloneClass(Test09.class);
ByteClassLoader b = new ByteClassLoader("b");
Class clb1 = b.cloneClass(Test07.class);
Class clb2 = b.cloneClass(Test10.class);
ClassDescriptor quarkusTesta1Desc = quarkusDescriptorMock(cla1, null);
ClassDescriptor quarkusTesta2Desc = quarkusDescriptorMock(cla2, null);
ClassDescriptor quarkusTestb1Desc = quarkusDescriptorMock(clb1, null);
ClassDescriptor quarkusTestb2Desc = quarkusDescriptorMock(clb2, null);
List<ClassDescriptor> input = Arrays.asList(
quarkusTestb2Desc,
quarkusTesta1Desc,
quarkusTestb1Desc,
quarkusTesta2Desc);
doReturn(input).when(contextMock)
.getClassDescriptors();
new QuarkusTestProfileAwareClassOrderer().orderClasses(contextMock);
assertThat(input).containsExactly(
quarkusTesta1Desc,
quarkusTesta2Desc,
quarkusTestb1Desc,
quarkusTestb2Desc);
}
@Test
void multipleClassloadersAndSecondaryOrderer() throws IOException {
ByteClassLoader a = new ByteClassLoader("a");
Class cla1 = a.cloneClass(Test07.class);
Class cla2 = a.cloneClass(Test08.class);
Class cla3 = a.cloneClass(Test09.class);
ByteClassLoader b = new ByteClassLoader("b");
Class clb1 = b.cloneClass(Test10.class);
ClassDescriptor quarkusTesta1Desc = quarkusDescriptorMock(cla1, null, 4);
ClassDescriptor quarkusTesta2Desc = quarkusDescriptorMock(cla2, null, 6);
ClassDescriptor quarkusTesta3Desc = quarkusDescriptorMock(cla3, null, 2);
ClassDescriptor quarkusTestb1Desc = quarkusDescriptorMock(clb1, null, 1);
List<ClassDescriptor> input = Arrays.asList(
quarkusTesta3Desc,
quarkusTesta1Desc,
quarkusTesta2Desc,
quarkusTestb1Desc);
doReturn(input).when(contextMock)
.getClassDescriptors();
// change secondary orderer from ClassName to OrderAnnotation
new QuarkusTestProfileAwareClassOrderer("20_", "40_", "45_", "50_", "60_",
Optional.of(ClassOrderer.OrderAnnotation.class.getName())).orderClasses(contextMock);
assertThat(input).containsExactly(
quarkusTesta3Desc,
quarkusTesta1Desc,
quarkusTesta2Desc,
quarkusTestb1Desc);
}
@Test
void allVariants() {
ClassDescriptor quarkusTest1Desc = quarkusDescriptorMock(Test01.class, null);
ClassDescriptor quarkusTest2b = quarkusDescriptorMock(Test02b.class, Manager3.class, false,
WithTestResource.class);
ClassDescriptor quarkusTest2a = quarkusDescriptorMock(Test02a.class,
Manager3.class, false, QuarkusTestResource.class);
ClassDescriptor quarkusTest2Desc = quarkusDescriptorMock(Test02.class, Manager3.class, false, WithTestResource.class);
ClassDescriptor quarkusTest3Desc = quarkusDescriptorMock(Test03.class, Manager3.class, false,
QuarkusTestResource.class);
ClassDescriptor quarkusTest3aDesc = quarkusDescriptorMock(Test03a.class, Manager4.class, false, WithTestResource.class);
ClassDescriptor quarkusTest3bDesc = quarkusDescriptorMock(Test03b.class, Manager4.class, false,
QuarkusTestResource.class);
ClassDescriptor quarkusTest1aDesc = quarkusDescriptorMock(Test01a.class, null);
ClassDescriptor quarkusTestWithProfile1Desc = quarkusDescriptorMock(Test04.class, Profile1.class);
ClassDescriptor quarkusTestWithProfile2Test4Desc = quarkusDescriptorMock(Test05.class, Profile2.class);
ClassDescriptor quarkusTestWithProfile2Test5Desc = quarkusDescriptorMock(Test06.class, Profile2.class);
ClassDescriptor quarkusTestWithRestrictedResourceDesc = quarkusDescriptorMock(Test07.class, Manager2.class, true,
WithTestResource.class);
ClassDescriptor quarkusTestWithRestrictedResourceDesc2 = quarkusDescriptorMock(Test07.class, Manager2.class, true,
QuarkusTestResource.class);
ClassDescriptor quarkusTestWithMetaResourceDesc = quarkusDescriptorMock(Test08.class, Manager1.class, false,
WithTestResource.class);
ClassDescriptor quarkusTestWithMetaResourceDesc2 = quarkusDescriptorMock(Test08.class, Manager1.class, false,
QuarkusTestResource.class);
ClassDescriptor nonQuarkusTest1Desc = descriptorMock(Test09.class);
ClassDescriptor nonQuarkusTest2Desc = descriptorMock(Test10.class);
List<ClassDescriptor> input = Arrays.asList(
quarkusTestWithRestrictedResourceDesc,
nonQuarkusTest2Desc,
quarkusTest2Desc,
quarkusTestWithRestrictedResourceDesc2,
quarkusTestWithProfile2Test5Desc,
quarkusTest1aDesc,
nonQuarkusTest1Desc,
quarkusTestWithMetaResourceDesc,
quarkusTest1Desc,
quarkusTestWithProfile2Test4Desc,
quarkusTestWithMetaResourceDesc2,
quarkusTest3Desc,
quarkusTest2b,
quarkusTestWithProfile1Desc,
quarkusTest2a,
quarkusTest3bDesc,
quarkusTest3aDesc);
doReturn(input).when(contextMock).getClassDescriptors();
new QuarkusTestProfileAwareClassOrderer().orderClasses(contextMock);
assertThat(input).containsExactly(
quarkusTest1Desc,
quarkusTest1aDesc,
quarkusTest2Desc,
quarkusTest2a,
quarkusTest2b,
quarkusTest3Desc,
quarkusTest3aDesc,
quarkusTest3bDesc,
quarkusTestWithProfile1Desc,
quarkusTestWithProfile2Test4Desc,
quarkusTestWithProfile2Test5Desc,
quarkusTestWithRestrictedResourceDesc,
quarkusTestWithRestrictedResourceDesc2,
quarkusTestWithMetaResourceDesc,
quarkusTestWithMetaResourceDesc2,
nonQuarkusTest1Desc,
nonQuarkusTest2Desc);
}
@Test
void configuredPrefix() {
ClassDescriptor quarkusTestDesc = quarkusDescriptorMock(Test01.class, null);
ClassDescriptor nonQuarkusTestDesc = descriptorMock(Test01a.class);
List<ClassDescriptor> input = Arrays.asList(quarkusTestDesc, nonQuarkusTestDesc);
doReturn(input).when(contextMock)
.getClassDescriptors();
new QuarkusTestProfileAwareClassOrderer("20_", "30_", "40_", "45_", "01_", Optional.empty()).orderClasses(contextMock);
assertThat(input).containsExactly(nonQuarkusTestDesc, quarkusTestDesc);
}
@Test
void secondaryOrderer() {
ClassDescriptor quarkusTest1Desc = quarkusDescriptorMock(Test01.class, null);
ClassDescriptor nonQuarkusTest1Desc = descriptorMock(Test09.class);
ClassDescriptor nonQuarkusTest2Desc = descriptorMock(Test10.class);
var orderMock = Mockito.mock(Order.class);
when(orderMock.value()).thenReturn(1);
when(nonQuarkusTest2Desc.findAnnotation(Order.class)).thenReturn(Optional.of(orderMock));
List<ClassDescriptor> input = Arrays.asList(
nonQuarkusTest1Desc,
nonQuarkusTest2Desc,
quarkusTest1Desc);
doReturn(input).when(contextMock)
.getClassDescriptors();
new QuarkusTestProfileAwareClassOrderer("20_", "30_", "40_", "45_", "60_",
Optional.of(ClassOrderer.OrderAnnotation.class.getName())).orderClasses(contextMock);
assertThat(input).containsExactly(
quarkusTest1Desc,
nonQuarkusTest2Desc,
nonQuarkusTest1Desc);
}
@Test
void customOrderKey() {
ClassDescriptor quarkusTest1Desc = quarkusDescriptorMock(Test01.class, null);
ClassDescriptor quarkusTest2Desc = quarkusDescriptorMock(Test01a.class, null);
List<ClassDescriptor> input = Arrays.asList(quarkusTest1Desc, quarkusTest2Desc);
doReturn(input).when(contextMock)
.getClassDescriptors();
new QuarkusTestProfileAwareClassOrderer() {
@Override
protected Optional<String> getCustomOrderKey(ClassDescriptor classDescriptor, ClassOrdererContext context,
String secondaryOrderSuffix) {
return classDescriptor == quarkusTest2Desc ? Optional.of("00_first") : Optional.empty();
}
}.orderClasses(contextMock);
assertThat(input).containsExactly(quarkusTest2Desc, quarkusTest1Desc);
}
private ClassDescriptor descriptorMock(Class<?> testClass) {
ClassDescriptor mock = Mockito.mock(ClassDescriptor.class,
withSettings().strictness(Strictness.LENIENT)
.name(testClass.getSimpleName()));
doReturn(testClass).when(mock)
.getTestClass();
return mock;
}
private ClassDescriptor quarkusDescriptorMock(Class<?> testClass, Class<? extends QuarkusTestProfile> profileClass) {
return quarkusDescriptorMock(testClass, profileClass, -1);
}
private ClassDescriptor quarkusDescriptorMock(Class<?> testClass, Class<? extends QuarkusTestProfile> profileClass,
int order) {
ClassDescriptor mock = descriptorMock(testClass);
when(mock.isAnnotated(QuarkusTest.class)).thenReturn(true);
if (profileClass != null) {
TestProfile profileMock = Mockito.mock(TestProfile.class);
doReturn(profileClass).when(profileMock)
.value();
when(mock.findAnnotation(TestProfile.class)).thenReturn(Optional.of(profileMock));
}
if (order > 0) {
Order orderMock = Mockito.mock(Order.class);
doReturn(order).when(orderMock).value();
when(mock.findAnnotation(Order.class)).thenReturn(Optional.of(orderMock));
}
return mock;
}
private <A extends Annotation> ClassDescriptor quarkusDescriptorMock(Class<?> testClass,
Class<? extends QuarkusTestResourceLifecycleManager> managerClass, boolean restrictToAnnotatedClass,
Class<A> testResourceClass) {
ClassDescriptor mock = descriptorMock(testClass);
when(mock.isAnnotated(QuarkusTest.class)).thenReturn(true);
if (WithTestResource.class.isAssignableFrom(testResourceClass)) {
quarkusWithTestResourceMock(mock, managerClass, restrictToAnnotatedClass);
} else if (QuarkusTestResource.class.isAssignableFrom(testResourceClass)) {
quarkusTestResourceMock(mock, managerClass, restrictToAnnotatedClass);
}
return mock;
}
private void quarkusWithTestResourceMock(ClassDescriptor mock,
Class<? extends QuarkusTestResourceLifecycleManager> managerClass, boolean restrictToAnnotatedClass) {
WithTestResource withResourceMock = Mockito.mock(WithTestResource.class, withSettings().strictness(Strictness.LENIENT));
doReturn(managerClass).when(withResourceMock).value();
when(withResourceMock.scope()).thenReturn(
restrictToAnnotatedClass ? TestResourceScope.RESTRICTED_TO_CLASS : TestResourceScope.MATCHING_RESOURCES);
when(mock.findRepeatableAnnotations(WithTestResource.class)).thenReturn(List.of(withResourceMock));
}
private void quarkusTestResourceMock(ClassDescriptor mock,
Class<? extends QuarkusTestResourceLifecycleManager> managerClass, boolean restrictToAnnotatedClass) {
QuarkusTestResource testResourceMock = Mockito.mock(QuarkusTestResource.class,
withSettings().strictness(Strictness.LENIENT));
doReturn(managerClass).when(testResourceMock)
.value();
when(testResourceMock.restrictToAnnotatedClass()).thenReturn(restrictToAnnotatedClass);
when(mock.findRepeatableAnnotations(QuarkusTestResource.class)).thenReturn(List.of(testResourceMock));
}
private static
|
QuarkusTestProfileAwareClassOrdererTest
|
java
|
apache__spark
|
sql/core/src/main/java/org/apache/spark/sql/execution/RecordBinaryComparator.java
|
{
"start": 996,
"end": 3334
}
|
class ____ extends RecordComparator {
private static final boolean UNALIGNED = Platform.unaligned();
private static final boolean LITTLE_ENDIAN =
ByteOrder.nativeOrder().equals(ByteOrder.LITTLE_ENDIAN);
@Override
public int compare(
Object leftObj, long leftOff, int leftLen, Object rightObj, long rightOff, int rightLen) {
int i = 0;
// If the arrays have different length, the longer one is larger.
if (leftLen != rightLen) {
return leftLen - rightLen;
}
// The following logic uses `leftLen` as the length for both `leftObj` and `rightObj`, since
// we have guaranteed `leftLen` == `rightLen`.
// check if stars align and we can get both offsets to be aligned
if (!UNALIGNED && ((leftOff % 8) == (rightOff % 8))) {
while ((leftOff + i) % 8 != 0 && i < leftLen) {
final int v1 = Platform.getByte(leftObj, leftOff + i);
final int v2 = Platform.getByte(rightObj, rightOff + i);
if (v1 != v2) {
return (v1 & 0xff) > (v2 & 0xff) ? 1 : -1;
}
i += 1;
}
}
// for architectures that support unaligned accesses, chew it up 8 bytes at a time
if (UNALIGNED || (((leftOff + i) % 8 == 0) && ((rightOff + i) % 8 == 0))) {
while (i <= leftLen - 8) {
long v1 = Platform.getLong(leftObj, leftOff + i);
long v2 = Platform.getLong(rightObj, rightOff + i);
if (v1 != v2) {
if (LITTLE_ENDIAN) {
// if read as little-endian, we have to reverse bytes so that the long comparison result
// is equivalent to byte-by-byte comparison result.
// See discussion in https://github.com/apache/spark/pull/26548#issuecomment-554645859
v1 = Long.reverseBytes(v1);
v2 = Long.reverseBytes(v2);
}
return Long.compareUnsigned(v1, v2);
}
i += 8;
}
}
// this will finish off the unaligned comparisons, or do the entire aligned comparison
// whichever is needed.
while (i < leftLen) {
final int v1 = Platform.getByte(leftObj, leftOff + i);
final int v2 = Platform.getByte(rightObj, rightOff + i);
if (v1 != v2) {
return (v1 & 0xff) > (v2 & 0xff) ? 1 : -1;
}
i += 1;
}
// The two arrays are equal.
return 0;
}
}
|
RecordBinaryComparator
|
java
|
elastic__elasticsearch
|
plugins/discovery-gce/src/yamlRestTest/java/org/elasticsearch/discovery/gce/DiscoveryGceClientYamlTestSuiteIT.java
|
{
"start": 1064,
"end": 2441
}
|
class ____ extends ESClientYamlSuiteTestCase {
public static TemporaryFolder temporaryFolder = new TemporaryFolder();
public static GCEFixture gceFixture = new GCEFixture(() -> temporaryFolder.getRoot().toPath().resolve("unicast_hosts.txt"));
public static ElasticsearchCluster cluster = ElasticsearchCluster.local()
.plugin("discovery-gce")
.nodes(3)
.node(0, n -> n.withConfigDir(() -> temporaryFolder.getRoot().toPath()))
.systemProperty("es.allow_reroute_gce_settings", "true")
.environment("GCE_METADATA_HOST", () -> gceFixture.getHostAndPort())
.setting("discovery.seed_providers", "gce")
.setting("cloud.gce.host", () -> gceFixture.getAddress())
.setting("cloud.gce.root_url", () -> gceFixture.getAddress())
.build();
@ClassRule
public static RuleChain ruleChain = RuleChain.outerRule(temporaryFolder).around(gceFixture).around(cluster);
public DiscoveryGceClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
}
|
DiscoveryGceClientYamlTestSuiteIT
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/properties/bind/handler/NoUnboundElementsBindHandlerTests.java
|
{
"start": 8023,
"end": 8228
}
|
class ____ {
private @Nullable List<String> foo;
@Nullable List<String> getFoo() {
return this.foo;
}
void setFoo(@Nullable List<String> foo) {
this.foo = foo;
}
}
static
|
ExampleWithList
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/memory/GarbageCollectedMemoryPool.java
|
{
"start": 5833,
"end": 6024
}
|
class ____ {
private final int sizeBytes;
private BufferMetadata(int sizeBytes) {
this.sizeBytes = sizeBytes;
}
}
private static final
|
BufferMetadata
|
java
|
micronaut-projects__micronaut-core
|
inject/src/main/java/io/micronaut/inject/BeanConfiguration.java
|
{
"start": 2308,
"end": 3675
}
|
class ____ within this bean configuration.
*
* @param cls The class
* @return True if it is
*/
default boolean isWithin(Class cls) {
return isWithin(cls.getName());
}
/**
* Programmatically create a bean configuration for the given package.
* @param thePackage The package
* @param condition The condition
* @return The bean configuration
* @since 4.8.0
*/
static @NonNull BeanConfiguration of(@NonNull Package thePackage, @NonNull Predicate<BeanContext> condition) {
return of(thePackage.getName(), condition);
}
/**
* Programmatically create a bean configuration for the given package.
* @param thePackage The package
* @param condition The condition
* @return The bean configuration
* @since 4.8.0
*/
static @NonNull BeanConfiguration of(@NonNull String thePackage, @NonNull Predicate<BeanContext> condition) {
return new ConditionalBeanConfiguration(thePackage, condition);
}
/**
* Programmatically disable beans within a package.
*
* @param thePackage The package name
* @return The bean configuration
* @since 4.8.0
*/
static @NonNull BeanConfiguration disabled(@NonNull String thePackage) {
return new ConditionalBeanConfiguration(thePackage, (beanContext -> false));
}
}
|
is
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/IndexOfChar.java
|
{
"start": 1751,
"end": 2866
}
|
class ____ extends BugChecker implements MethodInvocationTreeMatcher {
private static final Matcher<ExpressionTree> MATCHER =
MethodMatchers.instanceMethod()
.onExactClass(Suppliers.STRING_TYPE)
.namedAnyOf("indexOf", "lastIndexOf")
.withParameters("int", "int");
@Override
public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) {
if (!MATCHER.matches(tree, state)) {
return NO_MATCH;
}
List<? extends ExpressionTree> arguments = tree.getArguments();
Symtab syms = state.getSymtab();
Types types = state.getTypes();
if (types.isSameType(types.unboxedTypeOrType(getType(arguments.get(0))), syms.intType)
&& types.isSameType(types.unboxedTypeOrType(getType(arguments.get(1))), syms.charType)) {
return describeMatch(
tree,
SuggestedFix.builder()
.replace(arguments.get(0), state.getSourceForNode(arguments.get(1)))
.replace(arguments.get(1), state.getSourceForNode(arguments.get(0)))
.build());
}
return NO_MATCH;
}
}
|
IndexOfChar
|
java
|
FasterXML__jackson-core
|
src/test/java/tools/jackson/core/unittest/io/schubfach/FloatToDecimalChecker.java
|
{
"start": 1379,
"end": 6615
}
|
class ____ extends ToDecimalChecker {
static final int P =
Integer.numberOfTrailingZeros(floatToRawIntBits(3)) + 2;
private static final int W = (SIZE - 1) - (P - 1);
static final int Q_MIN = (-1 << W - 1) - P + 3;
static final int Q_MAX = (1 << W - 1) - P;
static final int C_MIN = 1 << P - 1;
static final int C_MAX = (1 << P) - 1;
static final int K_MIN = MathUtils.flog10pow2(Q_MIN);
static final int K_MAX = MathUtils.flog10pow2(Q_MAX);
static final int H = MathUtils.flog10pow2(P) + 2;
static final float MIN_VALUE = StrictMath.scalb(1.0f, Q_MIN);
static final float MIN_NORMAL = StrictMath.scalb((float) C_MIN, Q_MIN);
static final float MAX_VALUE = StrictMath.scalb((float) C_MAX, Q_MAX);
static final int E_MIN = e(MIN_VALUE);
static final int E_MAX = e(MAX_VALUE);
static final long C_TINY = cTiny(Q_MIN, K_MIN);
private float v;
private final int originalBits;
private FloatToDecimalChecker(float v, String s) {
super(s);
this.v = v;
originalBits = floatToRawIntBits(v);
}
@Override
BigDecimal toBigDecimal() {
return new BigDecimal(v);
}
@Override
boolean recovers(BigDecimal b) {
return b.floatValue() == v;
}
@Override
String hexBits() {
return String.format("0x%01X__%02X__%02X_%04X",
(originalBits >>> 31) & 0x1,
(originalBits >>> 23) & 0xFF,
(originalBits >>> 16) & 0x7F,
originalBits & 0xFFFF);
}
@Override
boolean recovers(String s) {
return parseFloat(s) == v;
}
@Override
int minExp() {
return E_MIN;
}
@Override
int maxExp() {
return E_MAX;
}
@Override
int maxLen10() {
return H;
}
@Override
boolean isZero() {
return v == 0;
}
@Override
boolean isInfinity() {
return v == POSITIVE_INFINITY;
}
@Override
void negate() {
v = -v;
}
@Override
boolean isNegative() {
return originalBits < 0;
}
@Override
boolean isNaN() {
return Float.isNaN(v);
}
static void toDec(float v) {
// String s = Float.toString(v);
String s = FloatToDecimal.toString(v);
new FloatToDecimalChecker(v, s).validate();
}
/*
There are tons of doubles that are rendered incorrectly by the JDK.
While the renderings correctly round back to the original value,
they are longer than needed or are not the closest decimal to the double.
Here are just a very few examples.
*/
static final String[] Anomalies = {
// JDK renders these longer than needed.
"1.1754944E-38", "2.2E-44",
"1.0E16", "2.0E16", "3.0E16", "5.0E16", "3.0E17",
"3.2E18", "3.7E18", "3.7E16", "3.72E17",
// JDK does not render this as the closest.
"9.9E-44",
};
/*
Values are from
Paxson V, "A Program for Testing IEEE Decimal-Binary Conversion"
tables 16 and 17
*/
static final float[] PaxsonSignificands = {
12_676_506,
15_445_013,
13_734_123,
12_428_269,
12_676_506,
15_334_037,
11_518_287,
12_584_953,
15_961_084,
14_915_817,
10_845_484,
16_431_059,
16_093_626,
9_983_778,
12_745_034,
12_706_553,
11_005_028,
15_059_547,
16_015_691,
8_667_859,
14_855_922,
14_855_922,
10_144_164,
13_248_074,
};
static final int[] PaxsonExponents = {
-102,
-103,
86,
-138,
-130,
-146,
-41,
-145,
-125,
-146,
-102,
-61,
69,
25,
104,
72,
45,
71,
-99,
56,
-82,
-83,
-110,
95,
};
/*
Random floats over the whole range.
*/
private static void testRandom(int randomCount, Random r) {
for (int i = 0; i < randomCount; ++i) {
toDec(intBitsToFloat(r.nextInt()));
}
}
/*
All, really all, 2^32 possible floats. Takes between 90 and 120 minutes.
*/
public static void testAll() {
// Avoid wrapping around Integer.MAX_VALUE
int bits = Integer.MIN_VALUE;
for (; bits < Integer.MAX_VALUE; ++bits) {
toDec(intBitsToFloat(bits));
}
toDec(intBitsToFloat(bits));
}
/*
All positive 2^31 floats.
*/
public static void testPositive() {
// Avoid wrapping around Integer.MAX_VALUE
int bits = 0;
for (; bits < Integer.MAX_VALUE; ++bits) {
toDec(intBitsToFloat(bits));
}
toDec(intBitsToFloat(bits));
}
public static void randomNumberTests(int randomCount, Random r) {
testRandom(randomCount, r);
}
}
|
FloatToDecimalChecker
|
java
|
mapstruct__mapstruct
|
processor/src/main/java/org/mapstruct/ap/spi/CaseEnumTransformationStrategy.java
|
{
"start": 372,
"end": 1277
}
|
class ____ implements EnumTransformationStrategy {
private static final String UPPER = "upper";
private static final String LOWER = "lower";
private static final String CAPITAL = "capital";
private static final String CASE_ENUM_TRANSFORMATION_STRATEGIES = UPPER + ", " + LOWER + ", " + CAPITAL;
@Override
public String getStrategyName() {
return "case";
}
@Override
public String transform(String value, String configuration) {
switch ( configuration.toLowerCase() ) {
case UPPER:
return value.toUpperCase( Locale.ROOT );
case LOWER:
return value.toLowerCase( Locale.ROOT );
case CAPITAL:
return capitalize( value );
default:
throw new IllegalArgumentException(
"Unexpected configuration for
|
CaseEnumTransformationStrategy
|
java
|
alibaba__nacos
|
console/src/main/java/com/alibaba/nacos/console/filter/NacosConsoleAuthFilter.java
|
{
"start": 1070,
"end": 1659
}
|
class ____ extends AbstractWebAuthFilter {
private final NacosAuthConfig authConfig;
public NacosConsoleAuthFilter(NacosAuthConfig authConfig, ControllerMethodsCache methodsCache) {
super(authConfig, methodsCache);
this.authConfig = authConfig;
}
@Override
protected boolean isAuthEnabled() {
return authConfig.isAuthEnabled();
}
@Override
protected ServerIdentityResult checkServerIdentity(HttpServletRequest request, Secured secured) {
return ServerIdentityResult.noMatched();
}
}
|
NacosConsoleAuthFilter
|
java
|
apache__camel
|
components/camel-twilio/src/generated/java/org/apache/camel/component/twilio/CallRecordingEndpointConfiguration.java
|
{
"start": 2171,
"end": 4090
}
|
class ____ extends TwilioConfiguration {
@UriParam
@ApiParam(optional = false, apiMethods = {@ApiMethod(methodName = "creator"), @ApiMethod(methodName = "deleter"), @ApiMethod(methodName = "fetcher"), @ApiMethod(methodName = "reader"), @ApiMethod(methodName = "updater")})
private String pathAccountSid;
@UriParam
@ApiParam(optional = false, apiMethods = {@ApiMethod(methodName = "creator"), @ApiMethod(methodName = "creator"), @ApiMethod(methodName = "deleter"), @ApiMethod(methodName = "deleter"), @ApiMethod(methodName = "fetcher"), @ApiMethod(methodName = "fetcher"), @ApiMethod(methodName = "reader"), @ApiMethod(methodName = "reader"), @ApiMethod(methodName = "updater"), @ApiMethod(methodName = "updater")})
private String pathCallSid;
@UriParam
@ApiParam(optional = false, apiMethods = {@ApiMethod(methodName = "deleter"), @ApiMethod(methodName = "fetcher"), @ApiMethod(methodName = "updater")})
private String pathSid;
@UriParam
@ApiParam(optional = false, apiMethods = {@ApiMethod(methodName = "updater")})
private com.twilio.rest.api.v2010.account.call.Recording.Status status;
public String getPathAccountSid() {
return pathAccountSid;
}
public void setPathAccountSid(String pathAccountSid) {
this.pathAccountSid = pathAccountSid;
}
public String getPathCallSid() {
return pathCallSid;
}
public void setPathCallSid(String pathCallSid) {
this.pathCallSid = pathCallSid;
}
public String getPathSid() {
return pathSid;
}
public void setPathSid(String pathSid) {
this.pathSid = pathSid;
}
public com.twilio.rest.api.v2010.account.call.Recording.Status getStatus() {
return status;
}
public void setStatus(com.twilio.rest.api.v2010.account.call.Recording.Status status) {
this.status = status;
}
}
|
CallRecordingEndpointConfiguration
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/submitted/complex_type/Item.java
|
{
"start": 732,
"end": 800
}
|
class ____ {
public int id;
public List<Property> properties;
}
|
Item
|
java
|
elastic__elasticsearch
|
x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoTileGridTiler.java
|
{
"start": 861,
"end": 9056
}
|
class ____ extends GeoGridTiler {
protected final int tiles;
private GeoTileGridTiler(int precision) {
super(precision);
tiles = 1 << precision;
}
/** Factory method to create GeoTileGridTiler objects */
public static GeoTileGridTiler makeGridTiler(int precision, GeoBoundingBox geoBoundingBox) {
return geoBoundingBox == null || geoBoundingBox.isUnbounded()
? new GeoTileGridTiler.UnboundedGeoTileGridTiler(precision)
: new GeoTileGridTiler.BoundedGeoTileGridTiler(precision, geoBoundingBox);
}
/** check if the provided tile is in the solution space of this tiler */
protected abstract boolean validTile(int x, int y, int z);
@Override
public long encode(double x, double y) {
return GeoTileUtils.longEncode(x, y, precision);
}
/**
* Sets the values of the long[] underlying {@link GeoShapeCellValues}.
*
* If the shape resides between <code>GeoTileUtils.NORMALIZED_LATITUDE_MASK</code> and 90 or
* between <code>GeoTileUtils.NORMALIZED_NEGATIVE_LATITUDE_MASK</code> and -90 degree latitudes, then
* the shape is not accounted for since geo-tiles are only defined within those bounds.
*
* @param values the bucket values
* @param geoValue the input shape
*
* @return the number of tiles set by the shape
*/
@Override
public int setValues(GeoShapeCellValues values, GeoShapeValues.GeoShapeValue geoValue) throws IOException {
final GeoShapeValues.BoundingBox bounds = geoValue.boundingBox();
assert bounds.minX() <= bounds.maxX();
// geo tiles are not defined at the extreme latitudes due to them
// tiling the world as a square.
if (bounds.bottom > GeoTileUtils.NORMALIZED_LATITUDE_MASK || bounds.top < GeoTileUtils.NORMALIZED_NEGATIVE_LATITUDE_MASK) {
return 0;
}
if (precision == 0) {
return setValuesByBruteForceScan(values, geoValue, 0, 0, 0, 0);
}
final int minXTile = GeoTileUtils.getXTile(bounds.minX(), tiles);
final int minYTile = GeoTileUtils.getYTile(bounds.maxY(), tiles);
final int maxXTile = GeoTileUtils.getXTile(bounds.maxX(), tiles);
final int maxYTile = GeoTileUtils.getYTile(bounds.minY(), tiles);
final long count = (long) (maxXTile - minXTile + 1) * (maxYTile - minYTile + 1);
if (count == 1) {
return setValue(values, minXTile, minYTile);
} else if (count <= 8L * precision) {
return setValuesByBruteForceScan(values, geoValue, minXTile, minYTile, maxXTile, maxYTile);
} else {
return setValuesByRasterization(0, 0, 0, values, 0, geoValue);
}
}
private GeoRelation relateTile(GeoShapeValues.GeoShapeValue geoValue, int xTile, int yTile, int precision) throws IOException {
if (validTile(xTile, yTile, precision)) {
final int tiles = 1 << precision;
final int minX = GeoEncodingUtils.encodeLongitude(GeoTileUtils.tileToLon(xTile, tiles));
final int maxX = GeoEncodingUtils.encodeLongitude(GeoTileUtils.tileToLon(xTile + 1, tiles));
final int minY = GeoEncodingUtils.encodeLatitude(GeoTileUtils.tileToLat(yTile + 1, tiles));
final int maxY = GeoEncodingUtils.encodeLatitude(GeoTileUtils.tileToLat(yTile, tiles));
return geoValue.relate(
minX,
maxX == Integer.MAX_VALUE ? maxX : maxX - 1,
minY == GeoTileUtils.ENCODED_NEGATIVE_LATITUDE_MASK ? minY : minY + 1,
maxY
);
}
return GeoRelation.QUERY_DISJOINT;
}
/**
* Sets a singular doc-value with the provided x/y.
*/
private int setValue(GeoShapeCellValues docValues, int xTile, int yTile) {
if (validTile(xTile, yTile, precision)) {
docValues.resizeCell(1);
docValues.add(0, GeoTileUtils.longEncodeTiles(precision, xTile, yTile));
return 1;
}
return 0;
}
/**
* Checks all tiles between minXTile/maxXTile and minYTile/maxYTile.
*/
// pack private for testing
int setValuesByBruteForceScan(
GeoShapeCellValues values,
GeoShapeValues.GeoShapeValue geoValue,
int minXTile,
int minYTile,
int maxXTile,
int maxYTile
) throws IOException {
int idx = 0;
for (int i = minXTile; i <= maxXTile; i++) {
for (int j = minYTile; j <= maxYTile; j++) {
final GeoRelation relation = relateTile(geoValue, i, j, precision);
if (relation != GeoRelation.QUERY_DISJOINT) {
values.resizeCell(idx + 1);
values.add(idx++, GeoTileUtils.longEncodeTiles(precision, i, j));
}
}
}
return idx;
}
/**
* Recursively search the tile tree, only following branches that intersect the geometry.
* Once at the required depth, then all cells that intersect are added to the collection.
*/
// pkg protected for testing
int setValuesByRasterization(
int xTile,
int yTile,
int zTile,
GeoShapeCellValues values,
int valuesIndex,
GeoShapeValues.GeoShapeValue geoValue
) throws IOException {
zTile++;
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 2; j++) {
final int nextX = 2 * xTile + i;
final int nextY = 2 * yTile + j;
final GeoRelation relation = relateTile(geoValue, nextX, nextY, zTile);
if (GeoRelation.QUERY_INSIDE == relation) {
if (zTile == precision) {
values.resizeCell(getNewSize(valuesIndex, 1));
values.add(valuesIndex++, GeoTileUtils.longEncodeTiles(zTile, nextX, nextY));
} else {
final int numTilesAtPrecision = getNumTilesAtPrecision(precision, zTile);
values.resizeCell(getNewSize(valuesIndex, numTilesAtPrecision + 1));
valuesIndex = setValuesForFullyContainedTile(nextX, nextY, zTile, values, valuesIndex);
}
} else if (GeoRelation.QUERY_DISJOINT != relation) {
if (zTile == precision) {
values.resizeCell(getNewSize(valuesIndex, 1));
values.add(valuesIndex++, GeoTileUtils.longEncodeTiles(zTile, nextX, nextY));
} else {
valuesIndex = setValuesByRasterization(nextX, nextY, zTile, values, valuesIndex, geoValue);
}
}
}
}
return valuesIndex;
}
private int getNewSize(int valuesIndex, int increment) {
final long newSize = (long) valuesIndex + increment;
if (newSize > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Tile aggregation array overflow");
}
return (int) newSize;
}
private int getNumTilesAtPrecision(int finalPrecision, int currentPrecision) {
final long numTilesAtPrecision = Math.min(1L << (2 * (finalPrecision - currentPrecision)), getMaxCells());
if (numTilesAtPrecision > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Tile aggregation array overflow");
}
return (int) numTilesAtPrecision;
}
protected abstract int setValuesForFullyContainedTile(int xTile, int yTile, int zTile, GeoShapeCellValues values, int valuesIndex);
protected int setValues(GeoShapeCellValues values, int valuesIndex, int minY, int maxY, int minX, int maxX) {
for (int i = minX; i < maxX; i++) {
for (int j = minY; j < maxY; j++) {
assert validTile(i, j, precision);
values.add(valuesIndex++, GeoTileUtils.longEncodeTiles(precision, i, j));
}
}
return valuesIndex;
}
/**
* Bounded geotile aggregation. It accepts tiles that intersects the provided bounds.
*/
private static
|
GeoTileGridTiler
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/logging/logback/StructuredLogEncoderTests.java
|
{
"start": 6207,
"end": 6413
}
|
class ____ or one of the common formats: [ecs, gelf, logstash]");
}
private String encode(LoggingEvent event) {
return new String(this.encoder.encode(event), StandardCharsets.UTF_8);
}
static final
|
name
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/criteria/WrappedEntityCriteriaTest.java
|
{
"start": 2543,
"end": 2959
}
|
class ____ {
@Id
@GeneratedValue
private Integer id;
private String name;
protected SimpleEntity() {
}
public SimpleEntity(String name) {
this.name = name;
}
public Integer getId() {
return id;
}
protected void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
public static
|
SimpleEntity
|
java
|
netty__netty
|
codec-http/src/test/java/io/netty/handler/codec/http/QueryStringEncoderTest.java
|
{
"start": 843,
"end": 2918
}
|
class ____ {
@Test
public void testDefaultEncoding() throws Exception {
QueryStringEncoder e;
e = new QueryStringEncoder("/foo");
e.addParam("a", "b=c");
assertEquals("/foo?a=b%3Dc", e.toString());
assertEquals(new URI("/foo?a=b%3Dc"), e.toUri());
e = new QueryStringEncoder("/foo/\u00A5");
e.addParam("a", "\u00A5");
assertEquals("/foo/\u00A5?a=%C2%A5", e.toString());
assertEquals(new URI("/foo/\u00A5?a=%C2%A5"), e.toUri());
e = new QueryStringEncoder("/foo/\u00A5");
e.addParam("a", "abc\u00A5");
assertEquals("/foo/\u00A5?a=abc%C2%A5", e.toString());
assertEquals(new URI("/foo/\u00A5?a=abc%C2%A5"), e.toUri());
e = new QueryStringEncoder("/foo");
e.addParam("a", "1");
e.addParam("b", "2");
assertEquals("/foo?a=1&b=2", e.toString());
assertEquals(new URI("/foo?a=1&b=2"), e.toUri());
e = new QueryStringEncoder("/foo");
e.addParam("a", "1");
e.addParam("b", "");
e.addParam("c", null);
e.addParam("d", null);
assertEquals("/foo?a=1&b=&c&d", e.toString());
assertEquals(new URI("/foo?a=1&b=&c&d"), e.toUri());
e = new QueryStringEncoder("/foo");
e.addParam("test", "a~b");
assertEquals("/foo?test=a~b", e.toString());
assertEquals(new URI("/foo?test=a~b"), e.toUri());
}
@Test
public void testNonDefaultEncoding() throws Exception {
QueryStringEncoder e = new QueryStringEncoder("/foo/\u00A5", StandardCharsets.UTF_16);
e.addParam("a", "\u00A5");
assertEquals("/foo/\u00A5?a=%FE%FF%00%A5", e.toString());
assertEquals(new URI("/foo/\u00A5?a=%FE%FF%00%A5"), e.toUri());
}
@Test
public void testWhitespaceEncoding() throws Exception {
QueryStringEncoder e = new QueryStringEncoder("/foo");
e.addParam("a", "b c");
assertEquals("/foo?a=b%20c", e.toString());
assertEquals(new URI("/foo?a=b%20c"), e.toUri());
}
}
|
QueryStringEncoderTest
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/pool/TestRemoveAbandoned2.java
|
{
"start": 923,
"end": 3005
}
|
class ____ extends TestCase {
private MockDriver driver;
private DruidDataSource dataSource;
protected void setUp() throws Exception {
driver = new MockDriver();
dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mock:xxx");
dataSource.setDriver(driver);
dataSource.setInitialSize(1);
dataSource.setMaxActive(2);
dataSource.setMaxIdle(2);
dataSource.setMinIdle(1);
dataSource.setMinEvictableIdleTimeMillis(300 * 1000); // 300 / 10
dataSource.setTimeBetweenEvictionRunsMillis(1000 * 300); // 180 / 10
dataSource.setRemoveAbandoned(true);
dataSource.setRemoveAbandonedTimeoutMillis(0);
dataSource.setTestWhileIdle(true);
dataSource.setTestOnBorrow(false);
dataSource.setValidationQuery("SELECT 1");
dataSource.setFilters("stat");
assertEquals(0, DruidDataSourceStatManager.getInstance().getDataSourceList().size());
}
protected void tearDown() throws Exception {
dataSource.close();
assertEquals(0, DruidDataSourceStatManager.getInstance().getDataSourceList().size());
}
public void test_removeAbandoned() throws Exception {
{
Connection conn = dataSource.getConnection();
conn.close();
}
assertEquals(0, dataSource.getActiveCount());
Thread abandonThread = new Thread("abandoned") {
public void run() {
for (; ; ) {
dataSource.removeAbandoned();
if (Thread.interrupted()) {
break;
}
}
}
};
abandonThread.start();
for (int i = 0; i < 1000 * 100; ++i) {
DruidPooledConnection conn = dataSource.getConnection();
conn.close();
}
assertEquals(0, dataSource.getActiveCount());
abandonThread.interrupt();
System.out.println("removeAbandondedCount : " + dataSource.getRemoveAbandonedCount());
}
}
|
TestRemoveAbandoned2
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UnusedMethodTest.java
|
{
"start": 10516,
"end": 10950
}
|
class ____ {
@MethodSource("Test#parameters")
void test() {}
}
private static Stream<String> parameters() {
return Stream.of();
}
}
""")
.doTest();
}
@Test
public void overriddenMethodNotCalledWithinClass() {
helper
.addSourceLines(
"Test.java",
"""
|
NestedTest
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/exc/CustomExceptionDeser4071Test.java
|
{
"start": 563,
"end": 1906
}
|
class ____ extends Exception { }
private final ObjectMapper MAPPER = newJsonMapper();
@Test
public void testCustomException() throws Exception
{
String exStr = MAPPER.writeValueAsString(new CustomThrowable4071());
assertNotNull(MAPPER.readValue(exStr, CustomThrowable4071.class));
}
@Test
public void testCustomRuntimeException() throws Exception
{
String exStr = MAPPER.writeValueAsString(new CustomRuntimeException4071());
assertNotNull(MAPPER.readValue(exStr, CustomRuntimeException4071.class));
}
@Test
public void testCustomCheckedException() throws Exception
{
String exStr = MAPPER.writeValueAsString(new CustomCheckedException4071());
assertNotNull(MAPPER.readValue(exStr, CustomCheckedException4071.class));
}
@Test
public void testDeserAsThrowable() throws Exception
{
_testDeserAsThrowable(MAPPER.writeValueAsString(new CustomRuntimeException4071()));
_testDeserAsThrowable(MAPPER.writeValueAsString(new CustomCheckedException4071()));
_testDeserAsThrowable(MAPPER.writeValueAsString(new CustomThrowable4071()));
}
private void _testDeserAsThrowable(String exStr) throws Exception
{
assertNotNull(MAPPER.readValue(exStr, Throwable.class));
}
}
|
CustomCheckedException4071
|
java
|
apache__maven
|
its/core-it-suite/src/test/resources/mng-4347/src/main/java/org/apache/maven/it/mng4347/App.java
|
{
"start": 1662,
"end": 1801
}
|
class ____ {
public static void main(String[] args) {
Importing.main(args);
System.out.println("Hello World!");
}
}
|
App
|
java
|
quarkusio__quarkus
|
extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/TransactionRunnerOptions.java
|
{
"start": 662,
"end": 1794
}
|
interface ____ extends TransactionRunner {
/**
* Sets the transaction timeout for transactions created by this runner. A value of zero refers to the system default.
*
* @throws IllegalArgumentException If seconds is negative
* @param seconds The timeout in seconds
* @return This builder
*/
TransactionRunnerOptions timeout(int seconds);
/**
* Provides an exception handler that can make a decision to rollback or commit based on the type of exception. If the
* predicate returns {@link TransactionExceptionResult#ROLLBACK} the transaction is rolled back,
* otherwise it is committed.
* <p>
* This exception will still be propagated to the caller, so this method should not log or perform any other actions other
* than determine what should happen to the current transaction.
* <p>
* By default, the exception is always rolled back.
*
* @param handler The exception handler
* @return This builder
*/
TransactionRunnerOptions exceptionHandler(Function<Throwable, TransactionExceptionResult> handler);
}
|
TransactionRunnerOptions
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockito/internal/verification/VerificationOverTimeImplTest.java
|
{
"start": 638,
"end": 2194
}
|
class ____ {
@Mock private VerificationMode delegate;
private VerificationOverTimeImpl impl;
@Before
public void setUp() {
openMocks(this);
impl = new VerificationOverTimeImpl(10, 1000, delegate, true);
}
@Test
public void should_return_on_success() {
impl.verify(null);
verify(delegate).verify(null);
}
@Test
public void should_throw_mockito_assertion_error() {
MockitoAssertionError toBeThrown = new MockitoAssertionError("message");
doThrow(toBeThrown).when(delegate).verify(null);
assertThatThrownBy(
() -> {
impl.verify(null);
})
.isEqualTo(toBeThrown);
}
@Test
public void should_deal_with_junit_assertion_error() {
ArgumentsAreDifferent toBeThrown = new ArgumentsAreDifferent("message", "wanted", "actual");
doThrow(toBeThrown).when(delegate).verify(null);
assertThatThrownBy(
() -> {
impl.verify(null);
})
.isEqualTo(toBeThrown);
}
@Test
public void should_not_wrap_other_exceptions() {
RuntimeException toBeThrown = new RuntimeException();
doThrow(toBeThrown).when(delegate).verify(null);
assertThatThrownBy(
() -> {
impl.verify(null);
})
.isEqualTo(toBeThrown);
}
}
|
VerificationOverTimeImplTest
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/time/FastDatePrinterTest.java
|
{
"start": 1722,
"end": 17237
}
|
enum ____ {
India(INDIA, "+05", "+0530", "+05:30"), Greenwich(TimeZones.GMT, "Z", "Z", "Z"), NewYork(NEW_YORK, "-05", "-0500", "-05:00");
final TimeZone zone;
final String one;
final String two;
final String three;
Expected1806(final TimeZone zone, final String one, final String two, final String three) {
this.zone = zone;
this.one = one;
this.two = two;
this.three = three;
}
}
private static final String YYYY_MM_DD = "yyyy/MM/dd";
private static final TimeZone NEW_YORK = TimeZones.getTimeZone("America/New_York");
private static final TimeZone INDIA = TimeZones.getTimeZone("Asia/Calcutta");
private static final Locale SWEDEN = new Locale("sv", "SE");
private static Calendar initializeCalendar(final TimeZone tz) {
final Calendar cal = Calendar.getInstance(tz);
cal.set(Calendar.YEAR, 2001);
cal.set(Calendar.MONTH, 1); // not daylight savings
cal.set(Calendar.DAY_OF_MONTH, 4);
cal.set(Calendar.HOUR_OF_DAY, 12);
cal.set(Calendar.MINUTE, 8);
cal.set(Calendar.SECOND, 56);
cal.set(Calendar.MILLISECOND, 235);
return cal;
}
private DatePrinter getDateInstance(final int dateStyle, final Locale locale) {
return getInstance(AbstractFormatCache.getPatternForStyle(Integer.valueOf(dateStyle), null, locale), TimeZone.getDefault(), Locale.getDefault());
}
DatePrinter getInstance(final String format) {
return getInstance(format, TimeZone.getDefault(), Locale.getDefault());
}
private DatePrinter getInstance(final String format, final Locale locale) {
return getInstance(format, TimeZone.getDefault(), locale);
}
private DatePrinter getInstance(final String format, final TimeZone timeZone) {
return getInstance(format, timeZone, Locale.getDefault());
}
/**
* Override this method in derived tests to change the construction of instances
*
* @param format the format string to use
* @param timeZone the time zone to use
* @param locale the locale to use
* @return the DatePrinter to use for testing
*/
protected DatePrinter getInstance(final String format, final TimeZone timeZone, final Locale locale) {
return new FastDatePrinter(format, timeZone, locale);
}
@Test
void test1806() {
for (final Expected1806 trial : Expected1806.values()) {
final Calendar cal = initializeCalendar(trial.zone);
DatePrinter printer = getInstance("X", trial.zone);
assertEquals(trial.one, printer.format(cal));
printer = getInstance("XX", trial.zone);
assertEquals(trial.two, printer.format(cal));
printer = getInstance("XXX", trial.zone);
assertEquals(trial.three, printer.format(cal));
}
}
@Test
void test1806Argument() {
assertIllegalArgumentException(() -> getInstance("XXXX"));
}
@Test
void testAppendableOptions() {
final DatePrinter format = getInstance("yyyy-MM-dd HH:mm:ss.SSS Z", TimeZones.GMT);
final Calendar calendar = Calendar.getInstance();
final StringBuilder sb = new StringBuilder();
final String expected = format.format(calendar, sb).toString();
sb.setLength(0);
final Date date = calendar.getTime();
assertEquals(expected, format.format(date, sb).toString());
sb.setLength(0);
final long epoch = date.getTime();
assertEquals(expected, format.format(epoch, sb).toString());
}
@Test
void testDayNumberOfWeek() {
final DatePrinter printer = getInstance("u");
final Calendar calendar = Calendar.getInstance();
calendar.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY);
assertEquals("1", printer.format(calendar.getTime()));
calendar.set(Calendar.DAY_OF_WEEK, Calendar.SATURDAY);
assertEquals("6", printer.format(calendar.getTime()));
calendar.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY);
assertEquals("7", printer.format(calendar.getTime()));
}
@Test
void testEquals() {
final DatePrinter printer1 = getInstance(YYYY_MM_DD);
final DatePrinter printer2 = getInstance(YYYY_MM_DD);
assertEquals(printer1, printer2);
assertEquals(printer1.hashCode(), printer2.hashCode());
assertNotEquals(printer1, new Object());
}
@DefaultLocale(language = "en", country = "US")
@DefaultTimeZone("America/New_York")
@Test
void testFormat() {
final GregorianCalendar cal1 = new GregorianCalendar(2003, 0, 10, 15, 33, 20);
final GregorianCalendar cal2 = new GregorianCalendar(2003, 6, 10, 9, 0, 0);
final Date date1 = cal1.getTime();
final Date date2 = cal2.getTime();
final long millis1 = date1.getTime();
final long millis2 = date2.getTime();
DatePrinter fdf = getInstance("yyyy-MM-dd'T'HH:mm:ss");
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
assertEquals(sdf.format(date1), fdf.format(date1));
assertEquals("2003-01-10T15:33:20", fdf.format(date1));
assertEquals("2003-01-10T15:33:20", fdf.format(cal1));
assertEquals("2003-01-10T15:33:20", fdf.format(millis1));
assertEquals("2003-07-10T09:00:00", fdf.format(date2));
assertEquals("2003-07-10T09:00:00", fdf.format(cal2));
assertEquals("2003-07-10T09:00:00", fdf.format(millis2));
fdf = getInstance("Z");
assertEquals("-0500", fdf.format(date1));
assertEquals("-0500", fdf.format(cal1));
assertEquals("-0500", fdf.format(millis1));
assertEquals("-0400", fdf.format(date2));
assertEquals("-0400", fdf.format(cal2));
assertEquals("-0400", fdf.format(millis2));
fdf = getInstance("ZZ");
assertEquals("-05:00", fdf.format(date1));
assertEquals("-05:00", fdf.format(cal1));
assertEquals("-05:00", fdf.format(millis1));
assertEquals("-04:00", fdf.format(date2));
assertEquals("-04:00", fdf.format(cal2));
assertEquals("-04:00", fdf.format(millis2));
final String pattern = "GGGG GGG GG G yyyy yyy yy y MMMM MMM MM M LLLL LLL LL L"
+ " dddd ddd dd d DDDD DDD DD D EEEE EEE EE E aaaa aaa aa a zzzz zzz zz z";
fdf = getInstance(pattern);
sdf = new SimpleDateFormat(pattern);
// SDF bug fix starting with Java 7
assertEquals(sdf.format(date1).replace("2003 03 03 03", "2003 2003 03 2003"), fdf.format(date1));
assertEquals(sdf.format(date2).replace("2003 03 03 03", "2003 2003 03 2003"), fdf.format(date2));
}
@Test
void testHourFormats() {
final Calendar calendar = Calendar.getInstance();
calendar.clear();
final DatePrinter printer = getInstance("K k H h");
calendar.set(Calendar.HOUR_OF_DAY, 0);
assertEquals("0 24 0 12", printer.format(calendar));
calendar.set(Calendar.HOUR_OF_DAY, 12);
assertEquals("0 12 12 12", printer.format(calendar));
calendar.set(Calendar.HOUR_OF_DAY, 23);
assertEquals("11 23 23 11", printer.format(calendar));
}
@Test
void testLang1103() {
final Calendar cal = Calendar.getInstance(SWEDEN);
cal.set(Calendar.DAY_OF_MONTH, 2);
assertEquals("2", getInstance("d", SWEDEN).format(cal));
assertEquals("02", getInstance("dd", SWEDEN).format(cal));
assertEquals("002", getInstance("ddd", SWEDEN).format(cal));
assertEquals("0002", getInstance("dddd", SWEDEN).format(cal));
assertEquals("00002", getInstance("ddddd", SWEDEN).format(cal));
}
@Test
void testLang303() {
final Calendar cal = Calendar.getInstance();
cal.set(2004, Calendar.DECEMBER, 31);
DatePrinter format = getInstance(YYYY_MM_DD);
final String output = format.format(cal);
format = SerializationUtils.deserialize(SerializationUtils.serialize((Serializable) format));
assertEquals(output, format.format(cal));
}
@Test
void testLang538() {
// more commonly constructed with: cal = new GregorianCalendar(2009, 9, 16, 8, 42, 16)
// for the unit test to work in any time zone, constructing with GMT-8 rather than default locale time zone
final GregorianCalendar cal = new GregorianCalendar(TimeZones.getTimeZone("GMT-8"));
cal.clear();
cal.set(2009, Calendar.OCTOBER, 16, 8, 42, 16);
final DatePrinter format = getInstance("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", TimeZones.GMT);
assertEquals("2009-10-16T16:42:16.000Z", format.format(cal.getTime()), "dateTime");
assertEquals("2009-10-16T16:42:16.000Z", format.format(cal), "dateTime");
}
@Test
void testLang645() {
final Locale locale = new Locale("sv", "SE");
final Calendar cal = Calendar.getInstance();
cal.set(2010, Calendar.JANUARY, 1, 12, 0, 0);
final Date d = cal.getTime();
final DatePrinter fdf = getInstance("EEEE', week 'ww", locale);
assertEquals("fredag, week 53", fdf.format(d));
}
/**
* According to LANG-916 (https://issues.apache.org/jira/browse/LANG-916), the format method did contain a bug: it did not use the TimeZone data.
*
* This method test that the bug is fixed.
*/
@Test
void testLang916() {
final Calendar cal = Calendar.getInstance(TimeZones.getTimeZone("Europe/Paris"));
cal.clear();
cal.set(2009, 9, 16, 8, 42, 16);
// calendar fast.
{
final String value = FastDateFormat.getInstance("yyyy-MM-dd'T'HH:mm:ss Z", TimeZones.getTimeZone("Europe/Paris")).format(cal);
assertEquals("2009-10-16T08:42:16 +0200", value, "calendar");
}
{
final String value = FastDateFormat.getInstance("yyyy-MM-dd'T'HH:mm:ss Z", TimeZones.getTimeZone("Asia/Kolkata")).format(cal);
assertEquals("2009-10-16T12:12:16 +0530", value, "calendar");
}
{
final String value = FastDateFormat.getInstance("yyyy-MM-dd'T'HH:mm:ss Z", TimeZones.getTimeZone("Europe/London")).format(cal);
assertEquals("2009-10-16T07:42:16 +0100", value, "calendar");
}
}
@Test
void testLocaleMatches() {
final DatePrinter printer = getInstance(YYYY_MM_DD, SWEDEN);
assertEquals(SWEDEN, printer.getLocale());
}
/**
* Tests that pre-1000AD years get padded with yyyy
*/
@Test
void testLowYearPadding() {
final Calendar cal = Calendar.getInstance();
final DatePrinter format = getInstance(YYYY_MM_DD);
cal.set(1, Calendar.JANUARY, 1);
assertEquals("0001/01/01", format.format(cal));
cal.set(10, Calendar.JANUARY, 1);
assertEquals("0010/01/01", format.format(cal));
cal.set(100, Calendar.JANUARY, 1);
assertEquals("0100/01/01", format.format(cal));
cal.set(999, Calendar.JANUARY, 1);
assertEquals("0999/01/01", format.format(cal));
}
/**
* Show Bug #39410 is solved
*/
@Test
void testMilleniumBug() {
final Calendar cal = Calendar.getInstance();
final DatePrinter format = getInstance("dd.MM.yyyy");
cal.set(1000, Calendar.JANUARY, 1);
assertEquals("01.01.1000", format.format(cal));
}
@Test
void testPatternMatches() {
final DatePrinter printer = getInstance(YYYY_MM_DD);
assertEquals(YYYY_MM_DD, printer.getPattern());
}
/**
* Test case for {@link FastDateParser#FastDateParser(String, TimeZone, Locale)}.
*/
@Test
void testShortDateStyleWithLocales() {
final Locale usLocale = Locale.US;
final Locale swedishLocale = new Locale("sv", "SE");
final Calendar cal = Calendar.getInstance();
cal.set(2004, Calendar.FEBRUARY, 3);
DatePrinter fdf = getDateInstance(FastDateFormat.SHORT, usLocale);
assertEquals("2/3/04", fdf.format(cal));
fdf = getDateInstance(FastDateFormat.SHORT, swedishLocale);
assertEquals("2004-02-03", fdf.format(cal));
}
/**
* testLowYearPadding showed that the date was buggy This test confirms it, getting 366 back as a date
*/
@Test
void testSimpleDate() {
final Calendar cal = Calendar.getInstance();
final DatePrinter format = getInstance(YYYY_MM_DD);
cal.set(2004, Calendar.DECEMBER, 31);
assertEquals("2004/12/31", format.format(cal));
cal.set(999, Calendar.DECEMBER, 31);
assertEquals("0999/12/31", format.format(cal));
cal.set(1, Calendar.MARCH, 2);
assertEquals("0001/03/02", format.format(cal));
}
@SuppressWarnings("deprecation")
@Test
void testStringBufferOptions() {
final DatePrinter format = getInstance("yyyy-MM-dd HH:mm:ss.SSS Z", TimeZones.GMT);
final Calendar calendar = Calendar.getInstance();
final StringBuffer sb = new StringBuffer();
final String expected = format.format(calendar, sb, new FieldPosition(0)).toString();
sb.setLength(0);
assertEquals(expected, format.format(calendar, sb).toString());
sb.setLength(0);
final Date date = calendar.getTime();
assertEquals(expected, format.format(date, sb, new FieldPosition(0)).toString());
sb.setLength(0);
assertEquals(expected, format.format(date, sb).toString());
sb.setLength(0);
final long epoch = date.getTime();
assertEquals(expected, format.format(epoch, sb, new FieldPosition(0)).toString());
sb.setLength(0);
assertEquals(expected, format.format(epoch, sb).toString());
}
@DefaultTimeZone("UTC")
@Test
void testTimeZoneAsZ() {
final Calendar c = Calendar.getInstance(FastTimeZone.getGmtTimeZone());
final FastDateFormat noColonFormat = FastDateFormat.getInstance("Z");
assertEquals("+0000", noColonFormat.format(c));
final FastDateFormat isoFormat = FastDateFormat.getInstance("ZZ");
assertEquals("Z", isoFormat.format(c));
final FastDateFormat colonFormat = FastDateFormat.getInstance("ZZZ");
assertEquals("+00:00", colonFormat.format(c));
}
@Test
void testTimeZoneMatches() {
final DatePrinter printer = getInstance(YYYY_MM_DD, NEW_YORK);
assertEquals(NEW_YORK, printer.getTimeZone());
}
@Test
void testToStringContainsName() {
final DatePrinter printer = getInstance(YYYY_MM_DD);
assertTrue(printer.toString().startsWith("FastDate"));
}
@DefaultLocale(language = "en", country = "US")
@DefaultTimeZone("America/New_York")
@Test
void testWeekYear() {
final GregorianCalendar cal = new GregorianCalendar(2020, 12, 31, 0, 0, 0);
final DatePrinter printer4Digits = getInstance("YYYY");
final DatePrinter printer4DigitsFallback = getInstance("YYY");
final DatePrinter printer2Digits = getInstance("YY");
final DatePrinter printer4DigitAnotherFallback = getInstance("Y");
assertEquals("2021", printer4Digits.format(cal));
assertEquals("2021", printer4DigitsFallback.format(cal));
assertEquals("2021", printer4DigitAnotherFallback.format(cal));
assertEquals("21", printer2Digits.format(cal));
}
}
|
Expected1806
|
java
|
google__guava
|
android/guava/src/com/google/common/util/concurrent/ExecutionSequencer.java
|
{
"start": 5020,
"end": 5764
}
|
class ____ {
private ExecutionSequencer() {}
/** Creates a new instance. */
public static ExecutionSequencer create() {
return new ExecutionSequencer();
}
/** This reference acts as a pointer tracking the head of a linked list of ListenableFutures. */
private final AtomicReference<ListenableFuture<@Nullable Void>> ref =
new AtomicReference<>(immediateVoidFuture());
@LazyInit private ThreadConfinedTaskQueue latestTaskQueue = new ThreadConfinedTaskQueue();
/**
* This object is unsafely published, but avoids problematic races by relying exclusively on the
* identity equality of its Thread field so that the task field is only accessed by a single
* thread.
*/
private static final
|
ExecutionSequencer
|
java
|
elastic__elasticsearch
|
benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/TermsReduceBenchmark.java
|
{
"start": 3111,
"end": 4108
}
|
class ____ {
private final TermsAggregationBuilder builder = new TermsAggregationBuilder("terms");
private final SearchPhaseController controller = new SearchPhaseController((task, req) -> new AggregationReduceContext.Builder() {
@Override
public AggregationReduceContext forPartialReduction() {
return new AggregationReduceContext.ForPartial(null, null, task, builder, b -> {});
}
@Override
public AggregationReduceContext forFinalReduction() {
final MultiBucketConsumerService.MultiBucketConsumer bucketConsumer = new MultiBucketConsumerService.MultiBucketConsumer(
Integer.MAX_VALUE,
new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST)
);
return new AggregationReduceContext.ForFinal(null, null, task, builder, bucketConsumer, PipelineAggregator.PipelineTree.EMPTY);
}
});
@State(Scope.Benchmark)
public static
|
TermsReduceBenchmark
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/shortcircuit/TestShortCircuitCache.java
|
{
"start": 32530,
"end": 45512
}
|
class ____
extends BlockReaderFactory.FailureInjector {
@Override
public boolean getSupportsReceiptVerification() {
return false;
}
}
// Regression test for HDFS-8070
@Test
@Timeout(value = 60)
public void testPreReceiptVerificationDfsClientCanDoScr() throws Exception {
BlockReaderTestUtil.enableShortCircuitShmTracing();
TemporarySocketDirectory sockDir = new TemporarySocketDirectory();
Configuration conf = createShortCircuitConf(
"testPreReceiptVerificationDfsClientCanDoScr", sockDir);
conf.setLong(
HdfsClientConfigKeys.Read.ShortCircuit.STREAMS_CACHE_EXPIRY_MS_KEY,
1000000000L);
MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
cluster.waitActive();
DistributedFileSystem fs = cluster.getFileSystem();
BlockReaderFactory.setFailureInjectorForTesting(
new TestPreReceiptVerificationFailureInjector());
final Path TEST_PATH1 = new Path("/test_file1");
DFSTestUtil.createFile(fs, TEST_PATH1, 4096, (short)1, 0xFADE2);
final Path TEST_PATH2 = new Path("/test_file2");
DFSTestUtil.createFile(fs, TEST_PATH2, 4096, (short)1, 0xFADE2);
DFSTestUtil.readFileBuffer(fs, TEST_PATH1);
DFSTestUtil.readFileBuffer(fs, TEST_PATH2);
checkNumberOfSegmentsAndSlots(1, 2,
cluster.getDataNodes().get(0).getShortCircuitRegistry());
cluster.shutdown();
sockDir.close();
}
@Test
public void testFetchOrCreateRetries() throws Exception {
try(ShortCircuitCache cache = Mockito
.spy(new ShortCircuitCache(10, 10000000, 10, 10000000, 1, 10000, 0))) {
final TestFileDescriptorPair pair = new TestFileDescriptorPair();
ExtendedBlockId extendedBlockId = new ExtendedBlockId(123, "test_bp1");
SimpleReplicaCreator sRC = new SimpleReplicaCreator(123, cache, pair);
// Arrange that fetch will throw RetriableException for any call
Mockito.doThrow(new RetriableException("Retry")).when(cache)
.fetch(Mockito.eq(extendedBlockId), Mockito.any());
// Act: calling fetchOrCreate two times
// first call: it will create and put entry to replicaInfoMap
// second call: it will call fetch to get info for entry, and should
// retry 3 times because RetriableException thrown
cache.fetchOrCreate(extendedBlockId, sRC);
cache.fetchOrCreate(extendedBlockId, sRC);
// Assert that fetchOrCreate retried to fetch at least 3 times
Mockito.verify(cache, Mockito.atLeast(3))
.fetch(Mockito.eq(extendedBlockId), Mockito.any());
}
}
@Test
public void testRequestFileDescriptorsWhenULimit() throws Exception {
TemporarySocketDirectory sockDir = new TemporarySocketDirectory();
Configuration conf = createShortCircuitConf(
"testRequestFileDescriptorsWhenULimit", sockDir);
final short replicas = 1;
final int fileSize = 3;
final String testFile = "/testfile";
try (MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(replicas).build()) {
cluster.waitActive();
DistributedFileSystem fs = cluster.getFileSystem();
DFSTestUtil.createFile(fs, new Path(testFile), fileSize, replicas, 0L);
LocatedBlock blk = new DFSClient(DFSUtilClient.getNNAddress(conf), conf)
.getLocatedBlocks(testFile, 0, fileSize).get(0);
ClientContext clientContext = Mockito.mock(ClientContext.class);
Mockito.when(clientContext.getPeerCache()).thenAnswer(
(Answer<PeerCache>) peerCacheCall -> {
PeerCache peerCache = new PeerCache(10, Long.MAX_VALUE);
DomainPeer peer = Mockito.spy(getDomainPeerToDn(conf));
peerCache.put(blk.getLocations()[0], peer);
Mockito.when(peer.getDomainSocket()).thenAnswer(
(Answer<DomainSocket>) domainSocketCall -> {
DomainSocket domainSocket = Mockito.mock(DomainSocket.class);
Mockito.when(domainSocket
.recvFileInputStreams(
Mockito.any(FileInputStream[].class),
Mockito.any(byte[].class),
Mockito.anyInt(),
Mockito.anyInt())
).thenAnswer(
// we are mocking the FileOutputStream array with nulls
(Answer<Void>) recvFileInputStreamsCall -> null
);
return domainSocket;
}
);
return peerCache;
});
Mockito.when(clientContext.getShortCircuitCache(
blk.getBlock().getBlockId())).thenAnswer(
(Answer<ShortCircuitCache>) shortCircuitCacheCall -> {
ShortCircuitCache cache = Mockito.mock(ShortCircuitCache.class);
Mockito.when(cache.allocShmSlot(
Mockito.any(DatanodeInfo.class),
Mockito.any(DomainPeer.class),
Mockito.any(MutableBoolean.class),
Mockito.any(ExtendedBlockId.class),
Mockito.anyString()))
.thenAnswer((Answer<Slot>) call -> null);
return cache;
}
);
DatanodeInfo[] nodes = blk.getLocations();
try {
assertNull(new BlockReaderFactory(new DfsClientConf(conf))
.setInetSocketAddress(NetUtils.createSocketAddr(nodes[0]
.getXferAddr()))
.setClientCacheContext(clientContext)
.setDatanodeInfo(blk.getLocations()[0])
.setBlock(blk.getBlock())
.setBlockToken(new Token())
.createShortCircuitReplicaInfo());
} catch (NullPointerException ex) {
fail("Should not throw NPE when the native library is unable " +
"to create new files!");
}
}
}
@Test
@Timeout(value = 60)
public void testDomainSocketClosedByDN() throws Exception {
TemporarySocketDirectory sockDir = new TemporarySocketDirectory();
Configuration conf =
createShortCircuitConf("testDomainSocketClosedByDN", sockDir);
MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
try {
cluster.waitActive();
DistributedFileSystem fs = cluster.getFileSystem();
final ShortCircuitCache cache =
fs.getClient().getClientContext().getShortCircuitCache();
DomainPeer peer = getDomainPeerToDn(conf);
MutableBoolean usedPeer = new MutableBoolean(false);
ExtendedBlockId blockId = new ExtendedBlockId(123, "xyz");
final DatanodeInfo datanode = new DatanodeInfo.DatanodeInfoBuilder()
.setNodeID(cluster.getDataNodes().get(0).getDatanodeId()).build();
// Allocating the first shm slot requires using up a peer.
Slot slot1 = cache.allocShmSlot(datanode, peer, usedPeer, blockId,
"testReleaseSlotReuseDomainSocket_client");
cluster.getDataNodes().get(0).getShortCircuitRegistry()
.registerSlot(blockId, slot1.getSlotId(), false);
Slot slot2 = cache.allocShmSlot(datanode, peer, usedPeer, blockId,
"testReleaseSlotReuseDomainSocket_client");
cluster.getDataNodes().get(0).getShortCircuitRegistry()
.registerSlot(blockId, slot2.getSlotId(), false);
cache.scheduleSlotReleaser(slot1);
Thread.sleep(2000);
cache.scheduleSlotReleaser(slot2);
Thread.sleep(2000);
assertEquals(0,
cluster.getDataNodes().get(0).getShortCircuitRegistry().getShmNum());
assertEquals(0, cache.getDfsClientShmManager().getShmNum());
} finally {
cluster.shutdown();
}
}
// Regression test for HDFS-16535
@Test
@Timeout(value = 60)
public void testDomainSocketClosedByMultipleDNs() throws Exception {
TemporarySocketDirectory sockDir = new TemporarySocketDirectory();
String testName = "testDomainSocketClosedByMultipleDNs";
Configuration conf = createShortCircuitConf(testName, sockDir);
conf.set(DFS_DOMAIN_SOCKET_PATH_KEY, new File(sockDir.getDir(),
testName + "._PORT").getAbsolutePath());
MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
try {
cluster.waitActive();
DistributedFileSystem fs = cluster.getFileSystem();
final ShortCircuitCache cache =
fs.getClient().getClientContext().getShortCircuitCache();
ExtendedBlockId blockId0 = new ExtendedBlockId(123, "xyz");
ExtendedBlockId blockId1 = new ExtendedBlockId(456, "xyz");
DataNode dn0 = cluster.getDataNodes().get(0);
DataNode dn1 = cluster.getDataNodes().get(1);
DomainPeer peer0 = new DomainPeer(DomainSocket.connect(new File(
sockDir.getDir(), testName + "." + dn0.getXferPort()).getAbsolutePath()));
DomainPeer peer1 = new DomainPeer(DomainSocket.connect(new File(
sockDir.getDir(), testName + "." + dn1.getXferPort()).getAbsolutePath()));
final DatanodeInfo dnInfo0 = new DatanodeInfo.DatanodeInfoBuilder()
.setNodeID(dn0.getDatanodeId()).build();
final DatanodeInfo dnInfo1 = new DatanodeInfo.DatanodeInfoBuilder()
.setNodeID(dn1.getDatanodeId()).build();
// Allocate 2 shm slots from DataNode-0
MutableBoolean usedPeer = new MutableBoolean(false);
Slot slot1 = cache.allocShmSlot(dnInfo0, peer0, usedPeer, blockId0,
"testDomainSocketClosedByMultipleDNs_client");
dn0.getShortCircuitRegistry()
.registerSlot(blockId0, slot1.getSlotId(), false);
Slot slot2 = cache.allocShmSlot(dnInfo0, peer0, usedPeer, blockId0,
"testDomainSocketClosedByMultipleDNs_client");
dn0.getShortCircuitRegistry()
.registerSlot(blockId0, slot2.getSlotId(), false);
// Allocate 1 shm slot from DataNode-1
Slot slot3 = cache.allocShmSlot(dnInfo1, peer1, usedPeer, blockId1,
"testDomainSocketClosedByMultipleDNs_client");
dn1.getShortCircuitRegistry()
.registerSlot(blockId1, slot3.getSlotId(), false);
assertEquals(2, cache.getDfsClientShmManager().getShmNum());
assertEquals(1, dn0.getShortCircuitRegistry().getShmNum());
assertEquals(1, dn1.getShortCircuitRegistry().getShmNum());
// Release the slot of DataNode-1 first.
cache.scheduleSlotReleaser(slot3);
Thread.sleep(2000);
assertEquals(1, cache.getDfsClientShmManager().getShmNum());
// Release the slots of DataNode-0.
cache.scheduleSlotReleaser(slot1);
Thread.sleep(2000);
assertEquals(1, cache.getDfsClientShmManager().getShmNum(),
"0 ShmNum means the shm of DataNode-0 is shutdown"
+ " due to slot release failures.");
cache.scheduleSlotReleaser(slot2);
Thread.sleep(2000);
assertEquals(0, dn0.getShortCircuitRegistry().getShmNum());
assertEquals(0, dn1.getShortCircuitRegistry().getShmNum());
assertEquals(0, cache.getDfsClientShmManager().getShmNum());
} finally {
cluster.shutdown();
}
}
@Test
@Timeout(value = 60)
public void testDNRestart() throws Exception {
TemporarySocketDirectory sockDir = new TemporarySocketDirectory();
Configuration conf = createShortCircuitConf("testDNRestart", sockDir);
MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
try {
cluster.waitActive();
DistributedFileSystem fs = cluster.getFileSystem();
final ShortCircuitCache cache =
fs.getClient().getClientContext().getShortCircuitCache();
DomainPeer peer = getDomainPeerToDn(conf);
MutableBoolean usedPeer = new MutableBoolean(false);
ExtendedBlockId blockId = new ExtendedBlockId(123, "xyz");
final DatanodeInfo datanode = new DatanodeInfo.DatanodeInfoBuilder()
.setNodeID(cluster.getDataNodes().get(0).getDatanodeId()).build();
// Allocating the first shm slot requires using up a peer.
Slot slot1 = cache.allocShmSlot(datanode, peer, usedPeer, blockId,
"testReleaseSlotReuseDomainSocket_client");
cluster.getDataNodes().get(0).getShortCircuitRegistry()
.registerSlot(blockId, slot1.getSlotId(), false);
// restart the datanode to invalidate the cache
cluster.restartDataNode(0);
Thread.sleep(1000);
// after the restart, new allocation and release should not be affect
cache.scheduleSlotReleaser(slot1);
Slot slot2 = null;
try {
slot2 = cache.allocShmSlot(datanode, peer, usedPeer, blockId,
"testReleaseSlotReuseDomainSocket_client");
} catch (ClosedChannelException ce) {
}
cache.scheduleSlotReleaser(slot2);
Thread.sleep(2000);
assertEquals(0,
cluster.getDataNodes().get(0).getShortCircuitRegistry().getShmNum());
assertEquals(0, cache.getDfsClientShmManager().getShmNum());
} finally {
cluster.shutdown();
}
}
}
|
TestPreReceiptVerificationFailureInjector
|
java
|
apache__camel
|
components/camel-test/camel-test-main-junit5/src/main/java/org/apache/camel/test/main/junit5/AdviceRouteMapping.java
|
{
"start": 2269,
"end": 2393
}
|
class ____ the specific route builder that is used to advice the route.
*/
Class<? extends RouteBuilder> advice();
}
|
of
|
java
|
apache__maven
|
impl/maven-core/src/test/java/org/apache/maven/lifecycle/internal/builder/BuilderCommonTest.java
|
{
"start": 1554,
"end": 3916
}
|
class ____ {
private Logger logger = mock(Logger.class);
@Test
void testResolveBuildPlan() throws Exception {
MavenSession original = ProjectDependencyGraphStub.getMavenSession();
final TaskSegment taskSegment1 = new TaskSegment(false);
final MavenSession session1 = original.clone();
session1.setCurrentProject(ProjectDependencyGraphStub.A);
final BuilderCommon builderCommon = getBuilderCommon(logger);
final MavenExecutionPlan plan =
builderCommon.resolveBuildPlan(session1, ProjectDependencyGraphStub.A, taskSegment1, new HashSet<>());
assertEquals(
LifecycleExecutionPlanCalculatorStub.getProjectAExecutionPlan().size(), plan.size());
}
@Test
void testDefaultBindingPluginsWarning() throws Exception {
MavenSession original = ProjectDependencyGraphStub.getMavenSession();
final TaskSegment taskSegment1 = new TaskSegment(false);
final MavenSession session1 = original.clone();
session1.setCurrentProject(ProjectDependencyGraphStub.A);
getBuilderCommon(logger)
.resolveBuildPlan(session1, ProjectDependencyGraphStub.A, taskSegment1, new HashSet<>());
verify(logger)
.warn("Version not locked for default bindings plugins ["
+ "stub-plugin-initialize, "
+ "stub-plugin-process-resources, "
+ "stub-plugin-compile, "
+ "stub-plugin-process-test-resources, "
+ "stub-plugin-test-compile, "
+ "stub-plugin-test, "
+ "stub-plugin-package, "
+ "stub-plugin-install], "
+ "you should define versions in pluginManagement section of your pom.xml or parent");
}
@Test
void testHandleBuildError() throws Exception {}
@Test
void testAttachToThread() throws Exception {}
@Test
void testGetKey() throws Exception {}
public BuilderCommon getBuilderCommon(Logger logger) {
final LifecycleDebugLogger debugLogger = new LifecycleDebugLogger();
return new BuilderCommon(
debugLogger, new LifecycleExecutionPlanCalculatorStub(), mock(ExecutionEventCatapult.class), logger);
}
}
|
BuilderCommonTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/StatementSwitchToExpressionSwitchTest.java
|
{
"start": 62735,
"end": 63602
}
|
class ____ {
public int invoke() {
return 123;
}
public int foo(Suit suit) {
switch (suit) {
case HEART:
case DIAMOND:
throw new NullPointerException();
case SPADE:
throw new RuntimeException();
default:
throw new NullPointerException();
}
}
}
""")
.setArgs(
"-XepOpt:StatementSwitchToExpressionSwitch:EnableReturnSwitchConversion",
"-XepOpt:StatementSwitchToExpressionSwitch:EnableDirectConversion=false")
.doTest();
}
@Test
public void switchByEnum_returnSwitchWithShouldNeverHappen_errorAndRemoveShouldNeverHappen() {
// The switch has a case for each
|
Test
|
java
|
quarkusio__quarkus
|
extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/CachedResultsTest.java
|
{
"start": 5370,
"end": 5537
}
|
interface ____ {
String ping();
default String pong() {
return UUID.randomUUID().toString();
}
}
@Dependent
static
|
Nora
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/function/DefaultServerRequest.java
|
{
"start": 12013,
"end": 13459
}
|
class ____ implements Headers {
private final HttpHeaders httpHeaders;
public DefaultRequestHeaders(HttpHeaders httpHeaders) {
this.httpHeaders = HttpHeaders.readOnlyHttpHeaders(httpHeaders);
}
@Override
public List<MediaType> accept() {
return this.httpHeaders.getAccept();
}
@Override
public List<Charset> acceptCharset() {
return this.httpHeaders.getAcceptCharset();
}
@Override
public List<Locale.LanguageRange> acceptLanguage() {
return this.httpHeaders.getAcceptLanguage();
}
@Override
public OptionalLong contentLength() {
long value = this.httpHeaders.getContentLength();
return (value != -1 ? OptionalLong.of(value) : OptionalLong.empty());
}
@Override
public Optional<MediaType> contentType() {
return Optional.ofNullable(this.httpHeaders.getContentType());
}
@Override
public @Nullable InetSocketAddress host() {
return this.httpHeaders.getHost();
}
@Override
public List<HttpRange> range() {
return this.httpHeaders.getRange();
}
@Override
public List<String> header(String headerName) {
List<String> headerValues = this.httpHeaders.get(headerName);
return (headerValues != null ? headerValues : Collections.emptyList());
}
@Override
public HttpHeaders asHttpHeaders() {
return this.httpHeaders;
}
@Override
public String toString() {
return this.httpHeaders.toString();
}
}
private static final
|
DefaultRequestHeaders
|
java
|
quarkusio__quarkus
|
extensions/narayana-jta/deployment/src/test/java/io/quarkus/narayana/quarkus/TransactionJdbcObjectStoreValidationFailureTest.java
|
{
"start": 550,
"end": 1581
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addAsResource("jdbc-object-store-validation.properties", "application.properties"))
.setForcedDependencies(List.of(Dependency.of("io.quarkus", "quarkus-jdbc-h2", Version.getVersion())))
.assertException(t -> {
Throwable rootCause = ExceptionUtil.getRootCause(t);
if (rootCause instanceof ConfigurationException) {
assertTrue(rootCause.getMessage().contains(
"The Narayana JTA extension is configured to use the datasource 'test' but that datasource is not configured."));
} else {
fail(t);
}
});
@Test
public void test() {
// needs to be there in order to run test
Assertions.fail("Application was supposed to fail.");
}
}
|
TransactionJdbcObjectStoreValidationFailureTest
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/streaming/api/datastream/CoGroupedStreams.java
|
{
"start": 17736,
"end": 22265
}
|
class ____<T1, T2> extends TypeSerializer<TaggedUnion<T1, T2>> {
private static final long serialVersionUID = 1L;
private final TypeSerializer<T1> oneSerializer;
private final TypeSerializer<T2> twoSerializer;
public UnionSerializer(TypeSerializer<T1> oneSerializer, TypeSerializer<T2> twoSerializer) {
this.oneSerializer = oneSerializer;
this.twoSerializer = twoSerializer;
}
@Override
public boolean isImmutableType() {
return false;
}
@Override
public TypeSerializer<TaggedUnion<T1, T2>> duplicate() {
TypeSerializer<T1> duplicateOne = oneSerializer.duplicate();
TypeSerializer<T2> duplicateTwo = twoSerializer.duplicate();
// compare reference of nested serializers, if same instances returned, we can reuse
// this instance as well
if (duplicateOne != oneSerializer || duplicateTwo != twoSerializer) {
return new UnionSerializer<>(duplicateOne, duplicateTwo);
} else {
return this;
}
}
@Override
public TaggedUnion<T1, T2> createInstance() {
// we arbitrarily always create instance of one
return TaggedUnion.one(oneSerializer.createInstance());
}
@Override
public TaggedUnion<T1, T2> copy(TaggedUnion<T1, T2> from) {
if (from.isOne()) {
return TaggedUnion.one(oneSerializer.copy(from.getOne()));
} else {
return TaggedUnion.two(twoSerializer.copy(from.getTwo()));
}
}
@Override
public TaggedUnion<T1, T2> copy(TaggedUnion<T1, T2> from, TaggedUnion<T1, T2> reuse) {
if (from.isOne()) {
return TaggedUnion.one(oneSerializer.copy(from.getOne()));
} else {
return TaggedUnion.two(twoSerializer.copy(from.getTwo()));
}
}
@Override
public int getLength() {
return -1;
}
@Override
public void serialize(TaggedUnion<T1, T2> record, DataOutputView target)
throws IOException {
if (record.isOne()) {
target.writeByte(1);
oneSerializer.serialize(record.getOne(), target);
} else {
target.writeByte(2);
twoSerializer.serialize(record.getTwo(), target);
}
}
@Override
public TaggedUnion<T1, T2> deserialize(DataInputView source) throws IOException {
byte tag = source.readByte();
if (tag == 1) {
return TaggedUnion.one(oneSerializer.deserialize(source));
} else {
return TaggedUnion.two(twoSerializer.deserialize(source));
}
}
@Override
public TaggedUnion<T1, T2> deserialize(TaggedUnion<T1, T2> reuse, DataInputView source)
throws IOException {
byte tag = source.readByte();
if (tag == 1) {
return TaggedUnion.one(oneSerializer.deserialize(source));
} else {
return TaggedUnion.two(twoSerializer.deserialize(source));
}
}
@Override
public void copy(DataInputView source, DataOutputView target) throws IOException {
byte tag = source.readByte();
target.writeByte(tag);
if (tag == 1) {
oneSerializer.copy(source, target);
} else {
twoSerializer.copy(source, target);
}
}
@Override
public int hashCode() {
return 31 * oneSerializer.hashCode() + twoSerializer.hashCode();
}
@Override
@SuppressWarnings("unchecked")
public boolean equals(Object obj) {
if (obj instanceof UnionSerializer) {
UnionSerializer<T1, T2> other = (UnionSerializer<T1, T2>) obj;
return oneSerializer.equals(other.oneSerializer)
&& twoSerializer.equals(other.twoSerializer);
} else {
return false;
}
}
@Override
public TypeSerializerSnapshot<TaggedUnion<T1, T2>> snapshotConfiguration() {
return new UnionSerializerSnapshot<>(this);
}
}
/** The {@link TypeSerializerSnapshot} for the {@link UnionSerializer}. */
public static
|
UnionSerializer
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FsLinkResolution.java
|
{
"start": 1461,
"end": 2153
}
|
class ____<T> extends FSLinkResolver<T> {
/**
* The function to invoke in the {@link #next(AbstractFileSystem, Path)} call.
*/
private final FsLinkResolutionFunction<T> fn;
/**
* Construct an instance with the given function.
* @param fn function to invoke.
*/
public FsLinkResolution(final FsLinkResolutionFunction<T> fn) {
this.fn = Preconditions.checkNotNull(fn);
}
@Override
public T next(final AbstractFileSystem fs, final Path p)
throws UnresolvedLinkException, IOException {
return fn.apply(fs, p);
}
/**
* The signature of the function to invoke.
* @param <T> type resolved to
*/
@FunctionalInterface
public
|
FsLinkResolution
|
java
|
google__auto
|
factory/src/test/resources/good/ParameterAnnotations.java
|
{
"start": 1267,
"end": 1352
}
|
interface ____ {}
@Retention(RUNTIME)
@Target({PARAMETER, TYPE_USE})
@
|
NullableType
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/IterablesBaseTest.java
|
{
"start": 1480,
"end": 2310
}
|
class ____ {
protected static final AssertionInfo INFO = someInfo();
protected List<String> actual;
protected Failures failures;
protected Iterables iterables;
protected ComparatorBasedComparisonStrategy comparisonStrategy;
protected Iterables iterablesWithCaseInsensitiveComparisonStrategy;
protected AssertionInfo info;
@BeforeEach
public void setUp() {
actual = newArrayList("Luke", "Yoda", "Leia");
failures = spy(Failures.instance());
iterables = new Iterables();
iterables.failures = failures;
comparisonStrategy = new ComparatorBasedComparisonStrategy(CASE_INSENSITIVE_ORDER);
iterablesWithCaseInsensitiveComparisonStrategy = new Iterables(comparisonStrategy);
iterablesWithCaseInsensitiveComparisonStrategy.failures = failures;
info = someInfo();
}
}
|
IterablesBaseTest
|
java
|
spring-projects__spring-security
|
web/src/main/java/org/springframework/security/web/server/firewall/StrictServerWebExchangeFirewall.java
|
{
"start": 29151,
"end": 30839
}
|
class ____ implements Builder {
private final Builder delegate;
private StrictFirewallBuilder(Builder delegate) {
this.delegate = delegate;
}
@Override
public Builder method(HttpMethod httpMethod) {
return new StrictFirewallBuilder(this.delegate.method(httpMethod));
}
@Override
public Builder uri(URI uri) {
return new StrictFirewallBuilder(this.delegate.uri(uri));
}
@Override
public Builder path(String path) {
return new StrictFirewallBuilder(this.delegate.path(path));
}
@Override
public Builder contextPath(String contextPath) {
return new StrictFirewallBuilder(this.delegate.contextPath(contextPath));
}
@Override
public Builder header(String headerName, String... headerValues) {
return new StrictFirewallBuilder(this.delegate.header(headerName, headerValues));
}
@Override
public Builder headers(Consumer<HttpHeaders> headersConsumer) {
return new StrictFirewallBuilder(this.delegate.headers(headersConsumer));
}
@Override
public Builder sslInfo(SslInfo sslInfo) {
return new StrictFirewallBuilder(this.delegate.sslInfo(sslInfo));
}
@Override
public Builder remoteAddress(InetSocketAddress remoteAddress) {
return new StrictFirewallBuilder(this.delegate.remoteAddress(remoteAddress));
}
@Override
public Builder localAddress(InetSocketAddress localAddress) {
return new StrictFirewallBuilder(this.delegate.localAddress(localAddress));
}
@Override
public ServerHttpRequest build() {
return new StrictFirewallHttpRequest(this.delegate.build());
}
}
}
}
}
|
StrictFirewallBuilder
|
java
|
elastic__elasticsearch
|
test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/FipsEnabledClusterConfigProvider.java
|
{
"start": 586,
"end": 2559
}
|
class ____ implements LocalClusterConfigProvider {
@Override
public void apply(LocalClusterSpecBuilder<?> builder) {
if (isFipsEnabled()) {
builder.configFile(
"fips_java.security",
Resource.fromClasspath(isOracleJvm() ? "fips/fips_java_oracle.security" : "fips/fips_java.security")
)
.configFile("fips_java.policy", Resource.fromClasspath("fips/fips_java.policy"))
.configFile("cacerts.bcfks", Resource.fromClasspath("fips/cacerts.bcfks"))
.systemProperty("java.security.properties", "=${ES_PATH_CONF}/fips_java.security")
.systemProperty("java.security.policy", "=${ES_PATH_CONF}/fips_java.policy")
.systemProperty("javax.net.ssl.trustStore", "${ES_PATH_CONF}/cacerts.bcfks")
.systemProperty("javax.net.ssl.trustStorePassword", "password")
.systemProperty("javax.net.ssl.keyStorePassword", "password")
.systemProperty("javax.net.ssl.keyStoreType", "BCFKS")
.systemProperty("org.bouncycastle.fips.approved_only", "true")
.setting("network.host", "_local:ipv4_")
.setting("xpack.security.enabled", "false")
.setting("xpack.security.fips_mode.enabled", "true")
.setting("xpack.license.self_generated.type", "trial")
.setting("xpack.security.authc.password_hashing.algorithm", "pbkdf2_stretch")
.setting("xpack.security.fips_mode.required_providers", () -> "[BCFIPS, BCJSSE]", n -> n.getVersion().onOrAfter("8.13.0"))
.keystorePassword("keystore-password");
}
}
private static boolean isFipsEnabled() {
return Boolean.getBoolean("tests.fips.enabled");
}
private static boolean isOracleJvm() {
return System.getProperty("java.vendor").toLowerCase().contains("oracle");
}
}
|
FipsEnabledClusterConfigProvider
|
java
|
spring-projects__spring-framework
|
spring-aop/src/test/java/org/springframework/aop/aspectj/annotation/AbstractAspectJAdvisorFactoryTests.java
|
{
"start": 33120,
"end": 33216
}
|
class ____ implements AnnotatedTarget {
}
@Retention(RetentionPolicy.RUNTIME)
@
|
AnnotatedTargetImpl
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThan.java
|
{
"start": 1129,
"end": 5494
}
|
class ____ extends EsqlBinaryComparison implements Negatable<EsqlBinaryComparison> {
public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
Expression.class,
"LessThan",
EsqlBinaryComparison::readFrom
);
private static final Map<DataType, EsqlArithmeticOperation.BinaryEvaluator> evaluatorMap = Map.ofEntries(
Map.entry(DataType.INTEGER, LessThanIntsEvaluator.Factory::new),
Map.entry(DataType.DOUBLE, LessThanDoublesEvaluator.Factory::new),
Map.entry(DataType.LONG, LessThanLongsEvaluator.Factory::new),
Map.entry(DataType.UNSIGNED_LONG, LessThanLongsEvaluator.Factory::new),
Map.entry(DataType.DATETIME, LessThanLongsEvaluator.Factory::new),
Map.entry(DataType.DATE_NANOS, LessThanLongsEvaluator.Factory::new),
Map.entry(DataType.KEYWORD, LessThanKeywordsEvaluator.Factory::new),
Map.entry(DataType.TEXT, LessThanKeywordsEvaluator.Factory::new),
Map.entry(DataType.VERSION, LessThanKeywordsEvaluator.Factory::new),
Map.entry(DataType.IP, LessThanKeywordsEvaluator.Factory::new)
);
@FunctionInfo(
operator = "<",
returnType = { "boolean" },
description = "Check if one field is less than another. "
+ "If either field is <<esql-multivalued-fields,multivalued>> then the result is `null`.",
note = "This is pushed to the underlying search index if one side of the comparison is constant "
+ "and the other side is a field in the index that has both an <<mapping-index>> and <<doc-values>>."
)
public LessThan(
Source source,
@Param(
name = "lhs",
type = { "boolean", "date_nanos", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" },
description = "An expression."
) Expression left,
@Param(
name = "rhs",
type = { "boolean", "date_nanos", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" },
description = "An expression."
) Expression right
) {
this(source, left, right, null);
}
public LessThan(Source source, Expression left, Expression right, ZoneId zoneId) {
super(
source,
left,
right,
BinaryComparisonOperation.LT,
zoneId,
evaluatorMap,
LessThanNanosMillisEvaluator.Factory::new,
LessThanMillisNanosEvaluator.Factory::new
);
}
@Override
public String getWriteableName() {
return ENTRY.name;
}
@Override
protected NodeInfo<LessThan> info() {
return NodeInfo.create(this, LessThan::new, left(), right(), zoneId());
}
@Override
protected LessThan replaceChildren(Expression newLeft, Expression newRight) {
return new LessThan(source(), newLeft, newRight, zoneId());
}
@Override
public GreaterThan swapLeftAndRight() {
return new GreaterThan(source(), right(), left(), zoneId());
}
@Override
public GreaterThanOrEqual negate() {
return new GreaterThanOrEqual(source(), left(), right(), zoneId());
}
@Override
public EsqlBinaryComparison reverse() {
return new GreaterThan(source(), left(), right(), zoneId());
}
@Evaluator(extraName = "Ints")
static boolean processInts(int lhs, int rhs) {
return lhs < rhs;
}
@Evaluator(extraName = "Longs")
static boolean processLongs(long lhs, long rhs) {
return lhs < rhs;
}
@Evaluator(extraName = "MillisNanos")
static boolean processMillisNanos(long lhs, long rhs) {
// Note, parameters are reversed, so we need to invert the check.
return DateUtils.compareNanosToMillis(rhs, lhs) > 0;
}
@Evaluator(extraName = "NanosMillis")
static boolean processNanosMillis(long lhs, long rhs) {
return DateUtils.compareNanosToMillis(lhs, rhs) < 0;
}
@Evaluator(extraName = "Doubles")
static boolean processDoubles(double lhs, double rhs) {
return lhs < rhs;
}
@Evaluator(extraName = "Keywords") // TODO rename to "Bytes"
static boolean processKeywords(BytesRef lhs, BytesRef rhs) {
return lhs.compareTo(rhs) < 0;
}
}
|
LessThan
|
java
|
playframework__playframework
|
documentation/manual/working/javaGuide/main/forms/code/javaguide/forms/customconstraint/nopayload/DBAccessForm.java
|
{
"start": 1775,
"end": 1891
}
|
class ____ {
public static String byEmail(String email, Database db) {
return email;
}
}
}
// #user
|
User
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamSettingsAction.java
|
{
"start": 1404,
"end": 1767
}
|
class ____ extends ActionType<GetDataStreamSettingsAction.Response> {
public static final String NAME = "indices:monitor/data_stream/settings/get";
public static final GetDataStreamSettingsAction INSTANCE = new GetDataStreamSettingsAction();
public GetDataStreamSettingsAction() {
super(NAME);
}
public static
|
GetDataStreamSettingsAction
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/producer/generic/Claim.java
|
{
"start": 433,
"end": 612
}
|
interface ____ {
/**
* The value specifies the id name the claim to inject
*
* @return the claim name
*/
@Nonbinding
String value() default "";
}
|
Claim
|
java
|
spring-projects__spring-boot
|
module/spring-boot-integration/src/main/java/org/springframework/boot/integration/autoconfigure/IntegrationProperties.java
|
{
"start": 5363,
"end": 6103
}
|
class ____ {
/**
* TCP RSocket server host to connect to.
*/
private @Nullable String host;
/**
* TCP RSocket server port to connect to.
*/
private @Nullable Integer port;
/**
* WebSocket RSocket server uri to connect to.
*/
private @Nullable URI uri;
public void setHost(@Nullable String host) {
this.host = host;
}
public @Nullable String getHost() {
return this.host;
}
public void setPort(@Nullable Integer port) {
this.port = port;
}
public @Nullable Integer getPort() {
return this.port;
}
public void setUri(@Nullable URI uri) {
this.uri = uri;
}
public @Nullable URI getUri() {
return this.uri;
}
}
public static
|
Client
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/reactive/ServerHttpSecurityConfigurationTests.java
|
{
"start": 19966,
"end": 20173
}
|
class ____ {
@Bean
@Order(Ordered.HIGHEST_PRECEDENCE)
static Customizer<ServerHttpSecurity> httpSecurityCustomizer0() {
return mock(Customizer.class);
}
}
}
|
MultiServerHttpSecurityCustomizerConfig
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/boot/internal/TypeContributorImpl.java
|
{
"start": 426,
"end": 860
}
|
class ____ implements TypeContributor {
@Override
public void contribute(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
final EnversService enversService = serviceRegistry.getService( EnversService.class );
if ( !enversService.isEnabled() ) {
return;
}
typeContributions.contributeType(
new RevisionTypeType(),
new String[] { RevisionTypeType.class.getName() }
);
}
}
|
TypeContributorImpl
|
java
|
apache__camel
|
components/camel-kamelet/src/test/java/org/apache/camel/component/kamelet/KameletDiscoveryTest.java
|
{
"start": 1364,
"end": 3882
}
|
class ____ extends CamelTestSupport {
@Test
public void kameletCanBeDiscovered() throws Exception {
context.getRegistry().bind(
DefaultRoutesLoader.ROUTES_LOADER_KEY_PREFIX + "kamelet.yaml",
new RoutesBuilderLoaderSupport() {
@Override
public String getSupportedExtension() {
return "kamelet.yaml";
}
@Override
public RoutesBuilder loadRoutesBuilder(Resource resource) {
return new RouteBuilder() {
@Override
public void configure() {
routeTemplate("mySetBody")
.from("kamelet:source")
.setBody().constant("discovered");
}
};
}
});
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:discovery")
.toF("kamelet:mySetBody");
}
});
assertThat(fluentTemplate.to("direct:discovery").request(String.class)).isEqualTo("discovered");
}
@Test
public void kameletNotFound() {
context.getRegistry().bind(
DefaultRoutesLoader.ROUTES_LOADER_KEY_PREFIX + "kamelet.yaml",
new RoutesBuilderLoaderSupport() {
@Override
public String getSupportedExtension() {
return "kamelet.yaml";
}
@Override
public RoutesBuilder loadRoutesBuilder(Resource resource) {
return new RouteBuilder() {
@Override
public void configure() {
}
};
}
});
RouteBuilder builder = new RouteBuilder() {
@Override
public void configure() {
from("direct:discovery")
.toF("kamelet:mySetBody");
}
};
assertThatThrownBy(() -> context.addRoutes(builder))
.isInstanceOf(FailedToCreateRouteException.class)
.hasRootCauseMessage("Cannot find RouteTemplate with id mySetBody");
}
}
|
KameletDiscoveryTest
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/query/AbstractStringBasedJpaQueryUnitTests.java
|
{
"start": 7747,
"end": 7858
}
|
interface ____ {
void times(int invocationCount);
default void never() {
times(0);
}
}
}
}
|
Times
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/InterfacePathInheritanceTest.java
|
{
"start": 743,
"end": 2355
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClass(Z.class)
.addClass(Y.class)
.addClass(InheritanceTestClient.class))
.addBuildChainCustomizer(new Consumer<BuildChainBuilder>() {
@Override
public void accept(BuildChainBuilder buildChainBuilder) {
buildChainBuilder.addBuildStep(new BuildStep() {
@Override
public void execute(BuildContext context) {
ResourceScanningResultBuildItem consumed = context.consume(ResourceScanningResultBuildItem.class);
context.produce(new FeatureBuildItem("just-here-to-invoke-buildstep"));
Map<DotName, String> clientInterfaces = consumed.getResult().getClientInterfaces();
MatcherAssert.assertThat(clientInterfaces.size(), is(3));
clientInterfaces.forEach((k, v) -> {
MatcherAssert.assertThat("Path of %s needs to match".formatted(k), v, is("hello"));
});
}
}).consumes(ResourceScanningResultBuildItem.class).produces(FeatureBuildItem.class).build();
}
});
@Test
void basicTest() {
// see addBuildChainCustomizer of RegisterExtension for the test logic
}
@Path("hello")
public
|
InterfacePathInheritanceTest
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/data/TimeConversions.java
|
{
"start": 7704,
"end": 8772
}
|
class ____ extends Conversion<LocalDateTime> {
private final TimestampMillisConversion timestampMillisConversion = new TimestampMillisConversion();
@Override
public Class<LocalDateTime> getConvertedType() {
return LocalDateTime.class;
}
@Override
public String getLogicalTypeName() {
return "local-timestamp-millis";
}
@Override
public LocalDateTime fromLong(Long millisFromEpoch, Schema schema, LogicalType type) {
Instant instant = timestampMillisConversion.fromLong(millisFromEpoch, schema, type);
return LocalDateTime.ofInstant(instant, ZoneOffset.UTC);
}
@Override
public Long toLong(LocalDateTime timestamp, Schema schema, LogicalType type) {
Instant instant = timestamp.toInstant(ZoneOffset.UTC);
return timestampMillisConversion.toLong(instant, schema, type);
}
@Override
public Schema getRecommendedSchema() {
return LogicalTypes.localTimestampMillis().addToSchema(Schema.create(Schema.Type.LONG));
}
}
public static
|
LocalTimestampMillisConversion
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/network/NetworkUtils.java
|
{
"start": 1270,
"end": 9378
}
|
class ____ {
/** no instantiation */
private NetworkUtils() {}
/**
* By default we bind to any addresses on an interface/name, unless restricted by :ipv4 etc.
* This property is unrelated to that, this is about what we *publish*. Today the code pretty much
* expects one address so this is used for the sort order.
* @deprecated transition mechanism only
*/
@Deprecated
static final boolean PREFER_V6 = preferIPv6Addresses();
@SuppressForbidden(
reason = "TODO Deprecate any lenient usage of Boolean#parseBoolean https://github.com/elastic/elasticsearch/issues/128993"
)
private static boolean preferIPv6Addresses() {
return Boolean.parseBoolean(System.getProperty("java.net.preferIPv6Addresses", "false"));
}
/**
* True if we can bind to a v6 address. Its silly, but for *binding* we have a need to know
* if the stack works. this can prevent scary noise on IPv4-only hosts.
* @deprecated transition mechanism only, do not use
*/
@Deprecated
public static final boolean SUPPORTS_V6;
static {
boolean v = false;
try {
for (NetworkInterface nic : getInterfaces()) {
for (InetAddress address : Collections.list(nic.getInetAddresses())) {
if (address instanceof Inet6Address) {
v = true;
break;
}
}
}
} catch (SecurityException | SocketException misconfiguration) {
v = true; // be optimistic, you misconfigure, then you get noise to your screen
}
SUPPORTS_V6 = v;
}
/** Sorts an address by preference. This way code like publishing can just pick the first one */
static int sortKey(InetAddress address, boolean prefer_v6) {
int key = address.getAddress().length;
if (prefer_v6) {
key = -key;
}
if (address.isAnyLocalAddress()) {
key += 5;
}
if (address.isMulticastAddress()) {
key += 4;
}
if (address.isLoopbackAddress()) {
key += 3;
}
if (address.isLinkLocalAddress()) {
key += 2;
}
if (address.isSiteLocalAddress()) {
key += 1;
}
return key;
}
/**
* Sorts addresses by order of preference. This is used to pick the first one for publishing
* @deprecated remove this when multihoming is really correct
*/
@Deprecated
// only public because of silly multicast
public static void sortAddresses(List<InetAddress> list) {
Collections.sort(list, new Comparator<InetAddress>() {
@Override
public int compare(InetAddress left, InetAddress right) {
int cmp = Integer.compare(sortKey(left, PREFER_V6), sortKey(right, PREFER_V6));
if (cmp == 0) {
cmp = new BytesRef(left.getAddress()).compareTo(new BytesRef(right.getAddress()));
}
return cmp;
}
});
}
/** Return all interfaces (and subinterfaces) on the system */
static List<NetworkInterface> getInterfaces() throws SocketException {
List<NetworkInterface> all = new ArrayList<>();
addAllInterfaces(all, Collections.list(NetworkInterface.getNetworkInterfaces()));
all.sort(Comparator.comparingInt(NetworkInterface::getIndex));
return all;
}
/** Helper for getInterfaces, recursively adds subinterfaces to {@code target} */
private static void addAllInterfaces(List<NetworkInterface> target, List<NetworkInterface> level) {
if (level.isEmpty() == false) {
target.addAll(level);
for (NetworkInterface intf : level) {
addAllInterfaces(target, Collections.list(intf.getSubInterfaces()));
}
}
}
/** Returns system default for SO_REUSEADDR */
public static boolean defaultReuseAddress() {
return Constants.WINDOWS ? false : true;
}
private static InetAddress[] filterAllAddresses(final Predicate<InetAddress> predicate, final String message) throws IOException {
final List<NetworkInterface> interfaces = getInterfaces();
final List<InetAddress> list = new ArrayList<>();
for (final NetworkInterface intf : interfaces) {
for (final InetAddress address : Collections.list(intf.getInetAddresses())) {
if (predicate.test(address) && isUp(intf)) {
list.add(address);
}
}
}
if (list.isEmpty()) {
throw new IllegalArgumentException(message + ", got " + interfaces);
}
return list.toArray(new InetAddress[0]);
}
private static boolean isUp(final NetworkInterface intf) throws IOException {
try {
return intf.isUp();
} catch (final SocketException e) {
// virtual ethernet devices come and go, we will treat such a device that disappeared as not being up
if (intf.getName().startsWith("veth") && e.getMessage().equals("No such device (getFlags() failed)")) {
return false;
}
throw new IOException("failed to check if interface [" + intf.getName() + "] is up", e);
}
}
/** Returns all interface-local scope (loopback) addresses for interfaces that are up. */
static InetAddress[] getLoopbackAddresses() throws IOException {
return filterAllAddresses(InetAddress::isLoopbackAddress, "no up-and-running loopback addresses found");
}
/** Returns all site-local scope (private) addresses for interfaces that are up. */
static InetAddress[] getSiteLocalAddresses() throws IOException {
return filterAllAddresses(InetAddress::isSiteLocalAddress, "No up-and-running site-local (private) addresses found");
}
/** Returns all global scope addresses for interfaces that are up. */
static InetAddress[] getGlobalAddresses() throws IOException {
return filterAllAddresses(
address -> address.isLoopbackAddress() == false
&& address.isSiteLocalAddress() == false
&& address.isLinkLocalAddress() == false,
"no up-and-running global-scope (public) addresses found"
);
}
/** Returns all addresses (any scope) for interfaces that are up.
* This is only used to pick a publish address, when the user set network.host to a wildcard */
public static InetAddress[] getAllAddresses() throws IOException {
return filterAllAddresses(Predicates.always(), "no up-and-running addresses found");
}
static Optional<NetworkInterface> maybeGetInterfaceByName(List<NetworkInterface> networkInterfaces, String name) {
return networkInterfaces.stream().filter(netIf -> name.equals(netIf.getName())).findFirst();
}
/** Returns addresses for the given interface (it must be marked up) */
static InetAddress[] getAddressesForInterface(String settingValue, String suffix, String interfaceName) throws SocketException {
Optional<NetworkInterface> networkInterface = maybeGetInterfaceByName(getInterfaces(), interfaceName);
if (networkInterface.isPresent() == false) {
throw new IllegalArgumentException(
"setting ["
+ settingValue
+ "] matched no network interfaces; valid values include ["
+ getInterfaces().stream()
.map(otherInterface -> "_" + otherInterface.getName() + suffix + "_")
.collect(Collectors.joining(", "))
+ "]"
);
}
if (networkInterface.get().isUp() == false) {
throw new IllegalArgumentException(
"setting ["
+ settingValue
+ "] matched network interface ["
+ networkInterface.get().getName()
+ "] but this
|
NetworkUtils
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java
|
{
"start": 2637,
"end": 4331
}
|
class ____ {
private static final long TOKEN_VALIDITY_SEC = 1000;
private static final long TOKEN_MAX_INACTIVE_INTERVAL = 1000;
@Test
public void testGetConfiguration() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
FilterConfig config = mock(FilterConfig.class);
when(config.getInitParameter(AuthenticationFilter.CONFIG_PREFIX)).thenReturn("");
when(config.getInitParameter("a")).thenReturn("A");
when(config.getInitParameterNames()).thenReturn(
new Vector<String>(Arrays.asList("a")).elements());
Properties props = filter.getConfiguration("", config);
assertEquals("A", props.getProperty("a"));
config = mock(FilterConfig.class);
when(config.getInitParameter(AuthenticationFilter.CONFIG_PREFIX)).thenReturn("foo");
when(config.getInitParameter("foo.a")).thenReturn("A");
when(config.getInitParameterNames()).thenReturn(
new Vector<String>(Arrays.asList("foo.a")).elements());
props = filter.getConfiguration("foo.", config);
assertEquals("A", props.getProperty("a"));
}
@Test
public void testInitEmpty() throws Exception {
AuthenticationFilter filter = new AuthenticationFilter();
try {
FilterConfig config = mock(FilterConfig.class);
when(config.getInitParameterNames()).thenReturn(new Vector<String>().elements());
filter.init(config);
fail();
} catch (ServletException ex) {
// Expected
assertEquals("Authentication type must be specified: simple|kerberos|<class>",
ex.getMessage());
} catch (Exception ex) {
fail();
} finally {
filter.destroy();
}
}
public static
|
TestAuthenticationFilter
|
java
|
apache__camel
|
components/camel-hazelcast/src/main/java/org/apache/camel/component/hazelcast/list/HazelcastListComponent.java
|
{
"start": 1190,
"end": 1687
}
|
class ____ extends HazelcastDefaultComponent {
public HazelcastListComponent() {
}
public HazelcastListComponent(final CamelContext context) {
super(context);
}
@Override
protected HazelcastDefaultEndpoint doCreateEndpoint(
String uri, String remaining, Map<String, Object> parameters, HazelcastInstance hzInstance)
throws Exception {
return new HazelcastListEndpoint(hzInstance, uri, this, remaining);
}
}
|
HazelcastListComponent
|
java
|
elastic__elasticsearch
|
build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java
|
{
"start": 2464,
"end": 12465
}
|
class ____ extends DefaultTask {
private final RegularFileProperty reportFile;
private static List<License> conventionalLicenses = Arrays.asList(
// Triple AGPL, SSPLv1 and Elastic
new License(
"TRIPLE",
"AGLP+SSPL+Elastic License",
"2.0\", the \"GNU Affero General Public License v3.0 only\", and the \"Server Side"
)
);
/**
* Allowed license families for this project.
*/
@Input
private List<String> approvedLicenses = new ArrayList<String>(
Arrays.asList("AGLP+SSPL+Elastic License", "Generated", "Vendored", "Apache LZ4-Java")
);
/**
* Files that should be excluded from the license header check. Use with extreme care, only in situations where the license on the
* source file is compatible with the codebase but we do not want to add the license to the list of approved headers (to avoid the
* possibility of inadvertently using the license on our own source files).
*/
@Input
private List<String> excludes = new ArrayList<String>();
private ListProperty<License> additionalLicenses;
@Inject
public LicenseHeadersTask(ObjectFactory objectFactory, ProjectLayout projectLayout) {
additionalLicenses = objectFactory.listProperty(License.class).convention(conventionalLicenses);
reportFile = objectFactory.fileProperty().convention(projectLayout.getBuildDirectory().file("reports/licenseHeaders/rat.xml"));
setDescription("Checks sources for missing, incorrect, or unacceptable license headers");
}
/**
* The list of java files to check. protected so the afterEvaluate closure in the
* constructor can write to it.
*/
@InputFiles
@IgnoreEmptyDirectories
@SkipWhenEmpty
@PathSensitive(PathSensitivity.RELATIVE)
public List<FileCollection> getJavaFiles() {
return getSourceFolders().get();
}
@Internal
public abstract ListProperty<FileCollection> getSourceFolders();
@OutputFile
public RegularFileProperty getReportFile() {
return reportFile;
}
public List<String> getApprovedLicenses() {
return approvedLicenses;
}
public void setApprovedLicenses(List<String> approvedLicenses) {
this.approvedLicenses = approvedLicenses;
}
public List<String> getExcludes() {
return excludes;
}
public void setExcludes(List<String> excludes) {
this.excludes = excludes;
}
/**
* Additional license families that may be found. The key is the license category name (5 characters),
* followed by the family name and the value list of patterns to search for.
*/
@Input
public ListProperty<License> getAdditionalLicenses() {
return additionalLicenses;
}
/**
* Add a new license type.
* <p>
* The license may be added to the {@link #approvedLicenses} using the {@code familyName}.
*
* @param categoryName A 5-character string identifier for the license
* @param familyName An expanded string name for the license
* @param pattern A pattern to search for, which if found, indicates a file contains the license
*/
public void additionalLicense(final String categoryName, String familyName, String pattern) {
if (categoryName.length() != 5) {
throw new IllegalArgumentException("License category name must be exactly 5 characters, got " + categoryName);
}
additionalLicenses.add(new License(categoryName, familyName, pattern));
}
@TaskAction
public void runRat() {
ReportConfiguration reportConfiguration = new ReportConfiguration();
reportConfiguration.setAddingLicenses(true);
List<IHeaderMatcher> matchers = new ArrayList<>();
matchers.add(Defaults.createDefaultMatcher());
// BSD 4-clause stuff (is disallowed below)
// we keep this here, in case someone adds BSD code for some reason, it should never be allowed.
matchers.add(subStringMatcher("BSD4 ", "Original BSD License (with advertising clause)", "All advertising materials"));
// Apache
matchers.add(subStringMatcher("AL ", "Apache", "Licensed to Elasticsearch B.V. under one or more contributor"));
matchers.add(subStringMatcher("AL ", "Apache", "Copyright Elasticsearch B.V., and/or licensed to Elasticsearch B.V."));
// Apache lz4-java
matchers.add(subStringMatcher("ALLZ4", "Apache LZ4-Java", "Copyright 2020 Adrien Grand and the lz4-java contributors"));
// Generated resources
matchers.add(subStringMatcher("GEN ", "Generated", "ANTLR GENERATED CODE"));
// Vendored Code
matchers.add(subStringMatcher("VEN ", "Vendored", "@notice"));
additionalLicenses.get()
.forEach(l -> matchers.add(subStringMatcher(l.licenseFamilyCategory, l.licenseFamilyName, l.substringPattern)));
reportConfiguration.setHeaderMatcher(new HeaderMatcherMultiplexer(matchers.toArray(IHeaderMatcher[]::new)));
reportConfiguration.setApprovedLicenseNames(approvedLicenses.stream().map(license -> {
SimpleLicenseFamily simpleLicenseFamily = new SimpleLicenseFamily();
simpleLicenseFamily.setFamilyName(license);
return simpleLicenseFamily;
}).toArray(SimpleLicenseFamily[]::new));
File repFile = getReportFile().getAsFile().get();
ClaimStatistic stats = generateReport(reportConfiguration, repFile);
boolean unknownLicenses = stats.getNumUnknown() > 0;
boolean unApprovedLicenses = stats.getNumUnApproved() > 0;
if (unknownLicenses || unApprovedLicenses) {
getLogger().error("The following files contain unapproved license headers:");
unapprovedFiles(repFile).forEach(getLogger()::error);
throw new GradleException("Check failed. License header problems were found. Full details: " + repFile.getAbsolutePath());
}
}
private IHeaderMatcher subStringMatcher(String licenseFamilyCategory, String licenseFamilyName, String substringPattern) {
SubstringLicenseMatcher substringLicenseMatcher = new SubstringLicenseMatcher();
substringLicenseMatcher.setLicenseFamilyCategory(licenseFamilyCategory);
substringLicenseMatcher.setLicenseFamilyName(licenseFamilyName);
SubstringLicenseMatcher.Pattern pattern = new SubstringLicenseMatcher.Pattern();
pattern.setSubstring(substringPattern);
substringLicenseMatcher.addConfiguredPattern(pattern);
return substringLicenseMatcher;
}
private ClaimStatistic generateReport(ReportConfiguration config, File xmlReportFile) {
try {
Files.deleteIfExists(reportFile.get().getAsFile().toPath());
BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(xmlReportFile));
return toXmlReportFile(config, bufferedWriter);
} catch (IOException | RatException exception) {
throw new GradleException("Cannot generate license header report for " + getPath(), exception);
}
}
private ClaimStatistic toXmlReportFile(ReportConfiguration config, Writer writer) throws RatException, IOException {
ClaimStatistic stats = new ClaimStatistic();
RatReport standardReport = XmlReportFactory.createStandardReport(new XmlWriter(writer), stats, config);
standardReport.startReport();
for (FileCollection dirSet : getSourceFolders().get()) {
for (File f : dirSet.getAsFileTree().matching(patternFilterable -> patternFilterable.exclude(getExcludes()))) {
standardReport.report(new FileDocument(f));
}
}
standardReport.endReport();
writer.flush();
writer.close();
return stats;
}
private static List<String> unapprovedFiles(File xmlReportFile) {
try {
NodeList resourcesNodes = createXmlDocumentBuilderFactory().newDocumentBuilder()
.parse(xmlReportFile)
.getElementsByTagName("resource");
return elementList(resourcesNodes).stream()
.filter(
resource -> elementList(resource.getChildNodes()).stream()
.anyMatch(n -> n.getTagName().equals("license-approval") && n.getAttribute("name").equals("false"))
)
.map(e -> e.getAttribute("name"))
.sorted()
.collect(Collectors.toList());
} catch (SAXException | IOException | ParserConfigurationException e) {
throw new GradleException("Error parsing xml report " + xmlReportFile.getAbsolutePath());
}
}
private static DocumentBuilderFactory createXmlDocumentBuilderFactory() throws ParserConfigurationException {
final DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
dbf.setXIncludeAware(false);
dbf.setIgnoringComments(true);
dbf.setExpandEntityReferences(false);
dbf.setAttribute(XMLConstants.ACCESS_EXTERNAL_DTD, "");
dbf.setAttribute(XMLConstants.ACCESS_EXTERNAL_SCHEMA, "");
dbf.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
dbf.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
dbf.setFeature("http://xml.org/sax/features/external-general-entities", false);
dbf.setFeature("http://xml.org/sax/features/external-parameter-entities", false);
dbf.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false);
return dbf;
}
private static List<Element> elementList(NodeList resourcesNodes) {
List<Element> nodeList = new ArrayList<>(resourcesNodes.getLength());
for (int idx = 0; idx < resourcesNodes.getLength(); idx++) {
nodeList.add((Element) resourcesNodes.item(idx));
}
return nodeList;
}
static
|
LicenseHeadersTask
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/lob/Dog.java
|
{
"start": 180,
"end": 346
}
|
class ____ implements Serializable {
private String name;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
|
Dog
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/lock/prevention/CallMakingResource.java
|
{
"start": 363,
"end": 830
}
|
class ____ {
@RestClient
TestClient client;
@GET
@Path("/block")
public String doABlockingCall() {
return client.blockingCall();
}
@GET
@Path("/non-block")
public CompletionStage<String> doANonBlockingCall() {
return client.nonBlockingCall();
}
@GET
@Path("/block-properly")
@Blocking
public String doABlockingCallFromBlocking() {
return client.blockingCall();
}
}
|
CallMakingResource
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/spi/CamelEvent.java
|
{
"start": 7669,
"end": 7858
}
|
interface ____ extends ExchangeFailureEvent {
@Override
default Type getType() {
return Type.ExchangeFailureHandled;
}
}
|
ExchangeFailureHandledEvent
|
java
|
resilience4j__resilience4j
|
resilience4j-rxjava2/src/test/java/io/github/resilience4j/bulkhead/operator/ObserverBulkheadTest.java
|
{
"start": 486,
"end": 1842
}
|
class ____ {
private Bulkhead bulkhead;
@Before
public void setUp() {
bulkhead = mock(Bulkhead.class, RETURNS_DEEP_STUBS);
}
@Test
public void shouldEmitAllEvents() {
given(bulkhead.tryAcquirePermission()).willReturn(true);
Observable.fromArray("Event 1", "Event 2")
.compose(BulkheadOperator.of(bulkhead))
.test()
.assertResult("Event 1", "Event 2");
then(bulkhead).should().onComplete();
}
@Test
public void shouldPropagateError() {
given(bulkhead.tryAcquirePermission()).willReturn(true);
Observable.error(new IOException("BAM!"))
.compose(BulkheadOperator.of(bulkhead))
.test()
.assertSubscribed()
.assertError(IOException.class)
.assertNotComplete();
then(bulkhead).should().onComplete();
}
@Test
public void shouldEmitErrorWithBulkheadFullException() {
given(bulkhead.tryAcquirePermission()).willReturn(false);
Observable.fromArray("Event 1", "Event 2")
.compose(BulkheadOperator.of(bulkhead))
.test()
.assertSubscribed()
.assertError(BulkheadFullException.class)
.assertNotComplete();
then(bulkhead).should(never()).onComplete();
}
}
|
ObserverBulkheadTest
|
java
|
google__gson
|
extras/src/test/java/com/google/gson/interceptors/InterceptorTest.java
|
{
"start": 6011,
"end": 6206
}
|
class ____ {
static final String DEFAULT_FIRST_LINE = "unknown";
String firstLine;
String secondLine;
String city;
String state;
String zip;
}
public static final
|
Address
|
java
|
spring-projects__spring-framework
|
spring-jdbc/src/main/java/org/springframework/jdbc/support/incrementer/AbstractColumnMaxValueIncrementer.java
|
{
"start": 1064,
"end": 2712
}
|
class ____ extends AbstractDataFieldMaxValueIncrementer {
/** The name of the column for this sequence. */
private String columnName;
/** The number of keys buffered in a cache. */
private int cacheSize = 1;
/**
* Default constructor for bean property style usage.
* @see #setDataSource
* @see #setIncrementerName
* @see #setColumnName
*/
@SuppressWarnings("NullAway.Init")
public AbstractColumnMaxValueIncrementer() {
}
/**
* Convenience constructor.
* @param dataSource the DataSource to use
* @param incrementerName the name of the sequence/table to use
* @param columnName the name of the column in the sequence table to use
*/
public AbstractColumnMaxValueIncrementer(DataSource dataSource, String incrementerName, String columnName) {
super(dataSource, incrementerName);
Assert.notNull(columnName, "Column name must not be null");
this.columnName = columnName;
}
/**
* Set the name of the column in the sequence table.
*/
public void setColumnName(String columnName) {
this.columnName = columnName;
}
/**
* Return the name of the column in the sequence table.
*/
public String getColumnName() {
return this.columnName;
}
/**
* Set the number of buffered keys.
*/
public void setCacheSize(int cacheSize) {
this.cacheSize = cacheSize;
}
/**
* Return the number of buffered keys.
*/
public int getCacheSize() {
return this.cacheSize;
}
@Override
public void afterPropertiesSet() {
super.afterPropertiesSet();
if (this.columnName == null) {
throw new IllegalArgumentException("Property 'columnName' is required");
}
}
}
|
AbstractColumnMaxValueIncrementer
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/mapper/blockloader/docvalues/BytesRefsFromBinaryBlockLoader.java
|
{
"start": 798,
"end": 1807
}
|
class ____ extends BlockDocValuesReader.DocValuesBlockLoader {
private final String fieldName;
public BytesRefsFromBinaryBlockLoader(String fieldName) {
this.fieldName = fieldName;
}
@Override
public Builder builder(BlockFactory factory, int expectedCount) {
return factory.bytesRefs(expectedCount);
}
@Override
public AllReader reader(LeafReaderContext context) throws IOException {
BinaryDocValues docValues = context.reader().getBinaryDocValues(fieldName);
return createReader(docValues);
}
public static AllReader createReader(@Nullable BinaryDocValues docValues) {
if (docValues == null) {
return new ConstantNullsReader();
}
return new BytesRefsFromBinary(docValues);
}
/**
* Read BinaryDocValues with no additional structure in the BytesRefs.
* Each BytesRef from the doc values maps directly to a value in the block loader.
*/
static
|
BytesRefsFromBinaryBlockLoader
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/io/FileBackedOutputStreamTest.java
|
{
"start": 1474,
"end": 6852
}
|
class ____ extends IoTestCase {
public void testThreshold() throws Exception {
testThreshold(0, 100, true, false);
testThreshold(10, 100, true, false);
testThreshold(100, 100, true, false);
testThreshold(1000, 100, true, false);
testThreshold(0, 100, false, false);
testThreshold(10, 100, false, false);
testThreshold(100, 100, false, false);
testThreshold(1000, 100, false, false);
}
private void testThreshold(
int fileThreshold, int dataSize, boolean singleByte, boolean resetOnFinalize)
throws IOException {
byte[] data = newPreFilledByteArray(dataSize);
FileBackedOutputStream out = new FileBackedOutputStream(fileThreshold, resetOnFinalize);
ByteSource source = out.asByteSource();
int chunk1 = min(dataSize, fileThreshold);
int chunk2 = dataSize - chunk1;
// Write just enough to not trip the threshold
if (chunk1 > 0) {
write(out, data, 0, chunk1, singleByte);
assertTrue(ByteSource.wrap(data).slice(0, chunk1).contentEquals(source));
}
File file = out.getFile();
assertThat(file).isNull();
// Write data to go over the threshold
if (chunk2 > 0) {
write(out, data, chunk1, chunk2, singleByte);
file = out.getFile();
assertEquals(dataSize, file.length());
assertTrue(file.exists());
assertThat(file.getName()).contains("FileBackedOutputStream");
if (!isAndroid() && !isWindows()) {
PosixFileAttributes attributes =
java.nio.file.Files.getFileAttributeView(file.toPath(), PosixFileAttributeView.class)
.readAttributes();
assertThat(attributes.permissions()).containsExactly(OWNER_READ, OWNER_WRITE);
}
}
out.close();
// Check that source returns the right data
assertThat(source.read()).isEqualTo(data);
// Make sure that reset deleted the file
out.reset();
if (file != null) {
assertFalse(file.exists());
}
}
public void testThreshold_resetOnFinalize() throws Exception {
testThreshold(0, 100, true, true);
testThreshold(10, 100, true, true);
testThreshold(100, 100, true, true);
testThreshold(1000, 100, true, true);
testThreshold(0, 100, false, true);
testThreshold(10, 100, false, true);
testThreshold(100, 100, false, true);
testThreshold(1000, 100, false, true);
}
static void write(OutputStream out, byte[] b, int off, int len, boolean singleByte)
throws IOException {
if (singleByte) {
for (int i = off; i < off + len; i++) {
out.write(b[i]);
}
} else {
out.write(b, off, len);
}
out.flush(); // for coverage
}
// TODO(chrisn): only works if we ensure we have crossed file threshold
public void testWriteErrorAfterClose() throws Exception {
byte[] data = newPreFilledByteArray(100);
FileBackedOutputStream out = new FileBackedOutputStream(50);
ByteSource source = out.asByteSource();
out.write(data);
assertThat(source.read()).isEqualTo(data);
out.close();
assertThrows(IOException.class, () -> out.write(42));
// Verify that write had no effect
assertThat(source.read()).isEqualTo(data);
out.reset();
}
public void testReset() throws Exception {
byte[] data = newPreFilledByteArray(100);
FileBackedOutputStream out = new FileBackedOutputStream(Integer.MAX_VALUE);
ByteSource source = out.asByteSource();
out.write(data);
assertThat(source.read()).isEqualTo(data);
out.reset();
assertThat(source.read()).isEmpty();
out.write(data);
assertThat(source.read()).isEqualTo(data);
out.close();
}
private static boolean isAndroid() {
return System.getProperty("java.runtime.name", "").contains("Android");
}
private static boolean isWindows() {
return OS_NAME.value().startsWith("Windows");
}
/**
* Test that verifies the resource leak fix for <a
* href="https://github.com/google/guava/issues/5756">Issue #5756</a>.
*
* <p>This test covers a scenario where we write a smaller amount of data first, then write a
* large amount that crosses the threshold (transitioning from "not at threshold" to "over the
* threshold"). (We then write some more afterward.) This differs from the existing
* testThreshold() which writes exactly enough bytes to fill the buffer, then immediately writes
* more bytes.
*
* <p>Note: Direct testing of the {@link IOException} scenario during write/flush is challenging
* without mocking. This test verifies that normal operation with threshold crossing still works
* correctly with the fix in place.
*/
public void testThresholdCrossing_resourceManagement() throws Exception {
FileBackedOutputStream out = new FileBackedOutputStream(/* fileThreshold= */ 10);
ByteSource source = out.asByteSource();
byte[] chunk1 = newPreFilledByteArray(8); // Below threshold
byte[] chunk2 = newPreFilledByteArray(5); // Crosses threshold
byte[] chunk3 = newPreFilledByteArray(20); // More data to file
out.write(chunk1);
assertThat(out.getFile()).isNull();
out.write(chunk2);
assertThat(out.getFile()).isNotNull();
assertThat(source.read()).isEqualTo(concat(chunk1, chunk2));
out.write(chunk3);
assertThat(source.read()).isEqualTo(concat(chunk1, chunk2, chunk3));
out.reset();
}
}
|
FileBackedOutputStreamTest
|
java
|
apache__flink
|
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/config/OptimizerConfigOptions.java
|
{
"start": 29269,
"end": 30214
}
|
enum ____ implements DescribedEnum {
AUTO("auto", text(" Flink will automatically perform this optimization.")),
FORCED(
"forced",
text(
"Flink will perform this optimization even if it introduces extra hash shuffling.")),
NONE("none", text("Skewed join optimization will not be performed."));
private final String value;
private final InlineElement description;
AdaptiveSkewedJoinOptimizationStrategy(String value, InlineElement description) {
this.value = value;
this.description = description;
}
@Override
public String toString() {
return value;
}
@Override
public InlineElement getDescription() {
return description;
}
}
/** Strategy for delta join. */
@PublicEvolving
public
|
AdaptiveSkewedJoinOptimizationStrategy
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/interfaces/hbm/propertiesAudited2/joined/JoinedPropertiesAudited2Test.java
|
{
"start": 539,
"end": 755
}
|
class ____ extends AbstractPropertiesAudited2Test {
@Override
protected String[] getMappings() {
return new String[] {"mappings/interfaces/joinedPropertiesAudited2Mappings.hbm.xml"};
}
}
|
JoinedPropertiesAudited2Test
|
java
|
apache__camel
|
components/camel-huawei/camel-huaweicloud-obs/src/test/java/org/apache/camel/component/huaweicloud/obs/GetObjectFunctionalTest.java
|
{
"start": 1322,
"end": 3396
}
|
class ____ extends CamelTestSupport {
private static final String ACCESS_KEY = "replace_this_with_access_key";
private static final String SECRET_KEY = "replace_this_with_secret_key";
private static final String REGION = "replace_this_with_region";
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:get_object")
.setProperty(OBSProperties.BUCKET_NAME, constant("reji-test"))
.setProperty(OBSProperties.OBJECT_NAME, constant("test_file.txt"))
.to("hwcloud-obs:getObject?" +
"accessKey=" + ACCESS_KEY +
"&secretKey=" + SECRET_KEY +
"®ion=" + REGION +
"&ignoreSslVerification=true")
.log("Get object successful")
.to("log:LOG?showAll=true")
.to("mock:get_object_result");
}
};
}
/**
* The following test cases should be manually enabled to perform test against the actual HuaweiCloud OBS server
* with real user credentials. To perform this test, manually comment out the @Ignore annotation and enter relevant
* service parameters in the placeholders above (static variables of this test class)
*
* @throws Exception
*/
@Disabled("Manually enable this once you configure the parameters in the placeholders above")
@Test
public void testGetObjectFunctionalTest() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:get_object_result");
mock.expectedMinimumMessageCount(1);
template.sendBody("direct:get_object", null);
Exchange responseExchange = mock.getExchanges().get(0);
mock.assertIsSatisfied();
assertNotNull(responseExchange.getIn().getBody(String.class));
assertTrue(responseExchange.getIn().getBody(String.class).length() > 0);
}
}
|
GetObjectFunctionalTest
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/web/WebTestContextBootstrapper.java
|
{
"start": 1569,
"end": 1717
}
|
class ____ extends DefaultTestContextBootstrapper {
/**
* Returns {@link WebDelegatingSmartContextLoader} if the supplied
|
WebTestContextBootstrapper
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/qualifiers/multiple/MultipleCompositeQualifierSpec.java
|
{
"start": 3009,
"end": 3074
}
|
interface ____ {
}
@Qualifier
@Retention(RUNTIME)
@
|
RequiresSignature
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/test/java/io/vertx/tests/http/SharedHttpClientTest.java
|
{
"start": 6608,
"end": 7854
}
|
class ____ extends AbstractVerticle implements Handler<Message<Integer>> {
static final String TRIGGER_ADDRESS = UUID.randomUUID().toString();
static final String SHARED_CLIENT_NAME = UUID.randomUUID().toString();
final Consumer<ClientVerticle> onResponseReceived;
volatile Context context;
HttpClient client;
ClientVerticle(Consumer<ClientVerticle> onResponseReceived) {
this.onResponseReceived = onResponseReceived;
}
@Override
public void start(Promise<Void> startPromise) throws Exception {
context = super.context;
HttpClientOptions options = new HttpClientOptions(config().getJsonObject("httpClientOptions")).setShared(true).setName(SHARED_CLIENT_NAME);
PoolOptions poolOptions = new PoolOptions(config().getJsonObject("poolOptions"));
client = vertx.createHttpClient(options, poolOptions);
vertx.eventBus().consumer(TRIGGER_ADDRESS, this).completion().onComplete(startPromise);
}
@Override
public void handle(Message<Integer> message) {
for (int i = 0; i < message.body(); i++) {
client.request(GET, "/").compose(HttpClientRequest::send).onComplete(ar -> onResponseReceived.accept(this));
}
}
}
private static
|
ClientVerticle
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/iterative/concurrent/SuperstepBarrierTest.java
|
{
"start": 1236,
"end": 2379
}
|
class ____ {
@Test
public void syncAllWorkersDone() throws InterruptedException {
for (int n = 0; n < 20; n++) {
sync(new AllWorkersDoneEvent());
}
}
@Test
public void syncTermination() throws InterruptedException {
for (int n = 0; n < 20; n++) {
sync(new TerminationEvent());
}
}
private void sync(TaskEvent event) throws InterruptedException {
TerminationSignaled terminationSignaled = new TerminationSignaled();
SuperstepBarrier barrier = new SuperstepBarrier(getClass().getClassLoader());
barrier.setup();
Thread headThread = new Thread(new IterationHead(barrier, terminationSignaled));
Thread syncThread = new Thread(new IterationSync(barrier, event));
headThread.start();
syncThread.start();
headThread.join();
syncThread.join();
if (event instanceof TerminationEvent) {
assertTrue(terminationSignaled.isTerminationSignaled());
} else {
assertFalse(terminationSignaled.isTerminationSignaled());
}
}
|
SuperstepBarrierTest
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-guava-tests/src/test/java/org/assertj/tests/guava/api/MultimapAssert_hasSameEntriesAs_Test.java
|
{
"start": 1267,
"end": 4733
}
|
class ____ extends MultimapAssertBaseTest {
private final Multimap<String, String> other = LinkedHashMultimap.create();
@Test
void should_pass_if_actual_has_the_same_entries_as_the_given_multimap() {
// GIVEN
other.putAll("Lakers", List.of("Kobe Bryant", "Magic Johnson", "Kareem Abdul Jabbar"));
other.putAll("Bulls", List.of("Michael Jordan", "Scottie Pippen", "Derrick Rose"));
other.putAll("Spurs", List.of("Tony Parker", "Tim Duncan", "Manu Ginobili"));
// THEN
assertThat(actual).hasSameEntriesAs(other);
assertThat(other).hasSameEntriesAs(actual);
}
@Test
void should_pass_with_multimaps_having_the_same_entries_with_different_but_compatible_generic_types() {
// GIVEN
Multimap<Object, Object> other = LinkedHashMultimap.create();
other.putAll("Lakers", List.of("Kobe Bryant", "Magic Johnson", "Kareem Abdul Jabbar"));
other.putAll("Bulls", List.of("Michael Jordan", "Scottie Pippen", "Derrick Rose"));
other.putAll("Spurs", List.of("Tony Parker", "Tim Duncan", "Manu Ginobili"));
// THEN
assertThat(other).hasSameEntriesAs(actual);
}
@Test
void should_pass_if_both_multimaps_are_empty() {
// GIVEN
actual.clear();
// THEN
assertThat(actual).hasSameEntriesAs(other);
}
@Test
void should_fail_if_actual_is_null() {
// GIVEN
actual = null;
// WHEN
Throwable thrown = catchThrowable(() -> assertThat(actual).hasSameEntriesAs(other));
// THEN
then(thrown).isInstanceOf(AssertionError.class)
.hasMessage(actualIsNull());
}
@Test
void should_fail_if_multimap_to_compare_actual_with_is_null() {
// GIVEN
Multimap<String, String> other = null;
// WHEN
Throwable thrown = catchThrowable(() -> assertThat(actual).hasSameEntriesAs(other));
// THEN
then(thrown).isInstanceOf(IllegalArgumentException.class)
.hasMessage("The multimap to compare actual with should not be null");
}
@Test
void should_fail_if_actual_contains_entries_not_in_given_multimap() {
// GIVEN
other.putAll("Lakers", List.of("Kobe Bryant", "Magic Johnson", "Kareem Abdul Jabbar"));
other.putAll("Bulls", List.of("Michael Jordan", "Scottie Pippen", "Derrick Rose"));
// WHEN
var error = expectAssertionError(() -> assertThat(actual).hasSameEntriesAs(other));
// THEN
then(error).hasMessage(shouldContainOnly(actual, other, null,
List.of(entry("Spurs", "Tony Parker"), entry("Spurs", "Tim Duncan"),
entry("Spurs", "Manu Ginobili"))).create());
}
@Test
void should_fail_if_actual_does_not_contain_all_given_multimap_entries() {
// GIVEN
other.putAll("Lakers", List.of("Kobe Bryant", "Magic Johnson", "Kareem Abdul Jabbar"));
other.putAll("Bulls", List.of("Michael Jordan", "Scottie Pippen", "Derrick Rose"));
other.putAll("Spurs", List.of("Tony Parker", "Tim Duncan", "Manu Ginobili"));
other.putAll("Warriors", List.of("Stephen Curry", "Klay Thompson"));
// WHEN
var error = expectAssertionError(() -> assertThat(actual).hasSameEntriesAs(other));
// THEN
then(error).hasMessage(shouldContainOnly(actual, other,
List.of(entry("Warriors", "Stephen Curry"), entry("Warriors", "Klay Thompson")),
null).create());
}
}
|
MultimapAssert_hasSameEntriesAs_Test
|
java
|
apache__spark
|
sql/core/src/test/java/test/org/apache/spark/sql/connector/JavaAdvancedDataSourceV2WithV2Filter.java
|
{
"start": 5655,
"end": 6834
}
|
class ____ implements PartitionReaderFactory {
StructType requiredSchema;
AdvancedReaderFactoryWithV2Filter(StructType requiredSchema) {
this.requiredSchema = requiredSchema;
}
@Override
public PartitionReader<InternalRow> createReader(InputPartition partition) {
JavaRangeInputPartition p = (JavaRangeInputPartition) partition;
return new PartitionReader<InternalRow>() {
private int current = p.start - 1;
@Override
public boolean next() throws IOException {
current += 1;
return current < p.end;
}
@Override
public InternalRow get() {
Object[] values = new Object[requiredSchema.size()];
for (int i = 0; i < values.length; i++) {
if ("i".equals(requiredSchema.apply(i).name())) {
values[i] = current;
} else if ("j".equals(requiredSchema.apply(i).name())) {
values[i] = -current;
}
}
return new GenericInternalRow(values);
}
@Override
public void close() throws IOException {
}
};
}
}
}
|
AdvancedReaderFactoryWithV2Filter
|
java
|
spring-projects__spring-security
|
access/src/main/java/org/springframework/security/web/access/channel/ChannelDecisionManagerImpl.java
|
{
"start": 2127,
"end": 3826
}
|
class ____ implements ChannelDecisionManager, InitializingBean {
public static final String ANY_CHANNEL = "ANY_CHANNEL";
private List<ChannelProcessor> channelProcessors;
@Override
public void afterPropertiesSet() {
Assert.notEmpty(this.channelProcessors, "A list of ChannelProcessors is required");
}
@Override
public void decide(FilterInvocation invocation, Collection<ConfigAttribute> config)
throws IOException, ServletException {
for (ConfigAttribute attribute : config) {
if (ANY_CHANNEL.equals(attribute.getAttribute())) {
return;
}
}
for (ChannelProcessor processor : this.channelProcessors) {
processor.decide(invocation, config);
if (invocation.getResponse().isCommitted()) {
break;
}
}
}
protected @Nullable List<ChannelProcessor> getChannelProcessors() {
return this.channelProcessors;
}
@SuppressWarnings("cast")
public void setChannelProcessors(List<?> channelProcessors) {
Assert.notEmpty(channelProcessors, "A list of ChannelProcessors is required");
this.channelProcessors = new ArrayList<>(channelProcessors.size());
for (Object currentObject : channelProcessors) {
Assert.isInstanceOf(ChannelProcessor.class, currentObject, () -> "ChannelProcessor "
+ currentObject.getClass().getName() + " must implement ChannelProcessor");
this.channelProcessors.add((ChannelProcessor) currentObject);
}
}
@Override
public boolean supports(ConfigAttribute attribute) {
if (ANY_CHANNEL.equals(attribute.getAttribute())) {
return true;
}
for (ChannelProcessor processor : this.channelProcessors) {
if (processor.supports(attribute)) {
return true;
}
}
return false;
}
}
|
ChannelDecisionManagerImpl
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.