language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/factories/TestProcedureCatalogFactory.java | {
"start": 8188,
"end": 8713
} | class ____ implements Procedure {
@ProcedureHint(
input = {@DataTypeHint("STRING"), @DataTypeHint("INT")},
output = @DataTypeHint("STRING"),
argumentNames = {"c", "d"})
public String[] call(ProcedureContext procedureContext, String arg1, Integer arg2) {
return new String[] {arg1 + ", " + arg2};
}
}
/** A procedure to generate a user according to the passed parameters for testing purpose. */
public static | NamedArgumentsProcedure |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/internal/SetBasicValueTypeSecondPass.java | {
"start": 329,
"end": 652
} | class ____ implements SecondPass {
private final BasicValueBinder binder;
public SetBasicValueTypeSecondPass(BasicValueBinder val) {
binder = val;
}
@Override
public void doSecondPass(Map<String, PersistentClass> persistentClasses) throws MappingException {
binder.fillSimpleValue();
}
}
| SetBasicValueTypeSecondPass |
java | apache__camel | core/camel-base/src/main/java/org/apache/camel/converter/CollectionConverter.java | {
"start": 1364,
"end": 4722
} | class ____ {
/**
* Utility classes should not have a public constructor.
*/
private CollectionConverter() {
}
/**
* Converts a collection to an array
*/
@Converter(order = 1)
public static Object[] toArray(Collection<?> value) {
return value.toArray();
}
/**
* Converts an array to a collection
*/
@Converter(order = 2)
public static List<Object> toList(Object[] array) {
return Arrays.asList(array);
}
/**
* Converts a collection to a List if it is not already
*/
@Converter(order = 3)
public static <T> List<T> toList(Collection<T> collection) {
return new ArrayList<>(collection);
}
/**
* Converts an {@link Iterator} to a {@link ArrayList}
*/
@Converter(order = 4)
public static <T> ArrayList<T> toArrayList(Iterator<T> it) {
if (it instanceof ArrayList list) {
return list;
}
ArrayList<T> list = new ArrayList<>();
while (it.hasNext()) {
list.add(it.next());
}
return list;
}
/**
* Converts an {@link Iterable} to a {@link ArrayList}
*/
@Converter(order = 5)
public static <T> ArrayList<T> toArrayList(Iterable<T> it) {
if (it instanceof ArrayList list) {
return list;
}
ArrayList<T> list = new ArrayList<>();
for (T value : it) {
list.add(value);
}
return list;
}
@Converter(order = 6)
public static Set<Object> toSet(Object[] array) {
return new HashSet<>(Arrays.asList(array));
}
@Converter(order = 7)
public static <T> Set<T> toSet(Collection<T> collection) {
return new HashSet<>(collection);
}
@Converter(order = 8)
public static <K, V> Set<Map.Entry<K, V>> toSet(Map<K, V> map) {
return map.entrySet();
}
@Converter(order = 9)
public static <K, V> Collection<Map.Entry<K, V>> toCollection(Map<K, V> map) {
return map.entrySet();
}
@Converter(order = 10)
public static Properties toProperties(Map<Object, Object> map) {
Properties answer = new Properties();
answer.putAll(map);
return answer;
}
@Converter(order = 11)
public static <K, V> Hashtable<K, V> toHashtable(Map<? extends K, ? extends V> map) {
return new Hashtable<>(map);
}
@Converter(order = 12)
public static <K, V> HashMap<K, V> toHashMap(Map<? extends K, ? extends V> map) {
return new HashMap<>(map);
}
/**
* Converts an {@link Iterable} into a {@link List}
*/
@Converter(order = 13)
public static <T> List<T> toList(Iterable<T> iterable) {
if (iterable instanceof List list) {
return list;
}
List<T> result = new LinkedList<>();
for (T value : iterable) {
result.add(value);
}
return result;
}
/**
* Converts an {@link Iterator} into a {@link List}
*/
@Converter(order = 14)
public static <T> List<T> toList(Iterator<T> it) {
if (it instanceof List value) {
return value;
}
List<T> result = new LinkedList<>();
while (it.hasNext()) {
result.add(it.next());
}
return result;
}
}
| CollectionConverter |
java | spring-projects__spring-boot | core/spring-boot-test/src/main/java/org/springframework/boot/test/context/assertj/ApplicationContextAssert.java | {
"start": 20155,
"end": 21664
} | class ____<C extends ApplicationContext> extends BasicErrorMessageFactory {
private ContextFailedToStart(C context, Throwable ex, String expectationFormat, Object... arguments) {
super("%nExpecting:%n <%s>%n" + expectationFormat + ":%nbut context failed to start:%n%s",
combineArguments(context.toString(), ex, arguments));
}
private static Object[] combineArguments(String context, Throwable ex, Object[] arguments) {
Object[] combinedArguments = new Object[arguments.length + 2];
combinedArguments[0] = unquotedString(context);
System.arraycopy(arguments, 0, combinedArguments, 1, arguments.length);
combinedArguments[combinedArguments.length - 1] = unquotedString(getIndentedStackTraceAsString(ex));
return combinedArguments;
}
private static String getIndentedStackTraceAsString(Throwable ex) {
String stackTrace = getStackTraceAsString(ex);
return indent(stackTrace);
}
private static String getStackTraceAsString(Throwable ex) {
StringWriter writer = new StringWriter();
PrintWriter printer = new PrintWriter(writer);
ex.printStackTrace(printer);
return writer.toString();
}
private static String indent(String input) {
BufferedReader reader = new BufferedReader(new StringReader(input));
StringWriter writer = new StringWriter();
PrintWriter printer = new PrintWriter(writer);
reader.lines().forEach((line) -> {
printer.print(" ");
printer.println(line);
});
return writer.toString();
}
}
}
| ContextFailedToStart |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/MissingCasesInEnumSwitchTest.java | {
"start": 1803,
"end": 2387
} | enum ____ {
ONE,
TWO,
THREE
}
void m(Case c) {
switch (c) {
case Case.ONE:
case Case.TWO:
case Case.THREE:
System.err.println("found it!");
break;
}
}
}
""")
.doTest();
}
@Test
public void exhaustive_multipleCaseExpressions() {
compilationHelper
.addSourceLines(
"Test.java",
"""
| Case |
java | apache__kafka | connect/api/src/test/java/org/apache/kafka/connect/data/ConnectSchemaTest.java | {
"start": 16237,
"end": 16418
} | class ____.lang.Object for value");
assertInvalidValueForSchema(fieldName, Schema.INT8_SCHEMA, new Object(),
"Invalid Java object for schema with type INT8: | java |
java | alibaba__nacos | core/src/main/java/com/alibaba/nacos/core/paramcheck/AbstractRpcParamExtractor.java | {
"start": 879,
"end": 946
} | class ____ rpc request.
*
* @author zhuoguang
*/
public abstract | for |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/lifecycle/mapping/DefaultLifecycleMapping.java | {
"start": 1099,
"end": 4055
} | class ____ implements LifecycleMapping {
private List<Lifecycle> lifecycles;
private Map<String, Lifecycle> lifecycleMap;
/** @deprecated use lifecycles instead */
@Deprecated
private Map<String, LifecyclePhase> phases;
/**
* Default ctor for plexus compatibility: lifecycles are most commonly defined in Plexus XML, that does field
* injection. Still, for Plexus to be able to instantiate this class, default ctor is needed.
*
* @deprecated Should not be used in Java code.
*/
@Deprecated
public DefaultLifecycleMapping() {}
/**
* Ctor to be used in Java code/providers.
*/
public DefaultLifecycleMapping(final List<Lifecycle> lifecycles) {
this.lifecycleMap =
Collections.unmodifiableMap(lifecycles.stream().collect(toMap(Lifecycle::getId, identity())));
}
/**
* Plexus: Populates the lifecycle map from the injected list of lifecycle mappings (if not already done).
*/
private synchronized void initLifecycleMap() {
if (lifecycleMap == null) {
lifecycleMap = new HashMap<>();
if (lifecycles != null) {
for (Lifecycle lifecycle : lifecycles) {
lifecycleMap.put(lifecycle.getId(), lifecycle);
}
} else {
/*
* NOTE: This is to provide a migration path for implementors of the legacy API which did not know about
* getLifecycles().
*/
String[] lifecycleIds = {"default", "clean", "site"};
for (String lifecycleId : lifecycleIds) {
Map<String, LifecyclePhase> phases = getLifecyclePhases(lifecycleId);
if (phases != null) {
Lifecycle lifecycle = new Lifecycle();
lifecycle.setId(lifecycleId);
lifecycle.setLifecyclePhases(phases);
lifecycleMap.put(lifecycleId, lifecycle);
}
}
}
}
}
@Override
public Map<String, Lifecycle> getLifecycles() {
initLifecycleMap();
return lifecycleMap;
}
@Deprecated
@Override
public List<String> getOptionalMojos(String lifecycle) {
return null;
}
private Map<String, LifecyclePhase> getLifecyclePhases(String lifecycle) {
initLifecycleMap();
Lifecycle lifecycleMapping = lifecycleMap.get(lifecycle);
if (lifecycleMapping != null) {
return lifecycleMapping.getLifecyclePhases();
} else if ("default".equals(lifecycle)) {
return phases;
} else {
return null;
}
}
@Deprecated
@Override
public Map<String, String> getPhases(String lifecycle) {
return LifecyclePhase.toLegacyMap(getLifecyclePhases(lifecycle));
}
}
| DefaultLifecycleMapping |
java | apache__camel | components/camel-telegram/src/test/java/org/apache/camel/component/telegram/util/TelegramApiConfig.java | {
"start": 924,
"end": 2079
} | class ____ {
private final String authorizationToken;
private final int port;
private final String baseUri;
private final String chatId;
public TelegramApiConfig(String baseUri, int port, String authorizationToken, String chatId) {
this.baseUri = baseUri;
this.port = port;
this.authorizationToken = authorizationToken;
this.chatId = chatId;
}
public static TelegramApiConfig fromEnv() {
final String authorizationToken = System.getenv("TELEGRAM_AUTHORIZATION_TOKEN");
final String chatId = System.getenv("TELEGRAM_CHAT_ID");
return new TelegramApiConfig(TelegramComponent.BOT_API_DEFAULT_URL, 443, authorizationToken, chatId);
}
public static TelegramApiConfig mock(int port) {
return new TelegramApiConfig("http://localhost:" + port, port, "mock-token", "-1");
}
public String getAuthorizationToken() {
return authorizationToken;
}
public String getBaseUri() {
return baseUri;
}
public String getChatId() {
return chatId;
}
public int getPort() {
return port;
}
}
| TelegramApiConfig |
java | grpc__grpc-java | benchmarks/src/main/java/io/grpc/benchmarks/driver/LoadClient.java | {
"start": 11463,
"end": 12057
} | class ____ implements Runnable {
final BenchmarkServiceGrpc.BenchmarkServiceBlockingStub stub;
private BlockingUnaryWorker(BenchmarkServiceGrpc.BenchmarkServiceBlockingStub stub) {
this.stub = stub;
}
@Override
public void run() {
while (!shutdown) {
long now = System.nanoTime();
stub.unaryCall(simpleRequest);
delay(System.nanoTime() - now);
}
}
}
/**
* Worker which executes async unary calls. Event timing is the duration between sending the
* request and receiving the response.
*/
private | BlockingUnaryWorker |
java | spring-projects__spring-framework | spring-messaging/src/test/java/org/springframework/messaging/handler/invocation/ResolvableMethod.java | {
"start": 16376,
"end": 19264
} | class ____ {
private final List<Predicate<MethodParameter>> filters = new ArrayList<>(4);
@SafeVarargs
private ArgResolver(Predicate<MethodParameter>... filter) {
this.filters.addAll(Arrays.asList(filter));
}
/**
* Filter on method arguments with annotations.
*/
@SafeVarargs
public final ArgResolver annot(Predicate<MethodParameter>... filters) {
this.filters.addAll(Arrays.asList(filters));
return this;
}
/**
* Filter on method arguments that have the given annotations.
* @param annotationTypes the annotation types
* @see #annot(Predicate[])
*/
@SafeVarargs
public final ArgResolver annotPresent(Class<? extends Annotation>... annotationTypes) {
this.filters.add(param -> Arrays.stream(annotationTypes).allMatch(param::hasParameterAnnotation));
return this;
}
/**
* Filter on method arguments that don't have the given annotations.
* @param annotationTypes the annotation types
*/
@SafeVarargs
public final ArgResolver annotNotPresent(Class<? extends Annotation>... annotationTypes) {
this.filters.add(param ->
(annotationTypes.length > 0 ?
Arrays.stream(annotationTypes).noneMatch(param::hasParameterAnnotation) :
param.getParameterAnnotations().length == 0));
return this;
}
/**
* Resolve the argument also matching to the given type.
* @param type the expected type
*/
public MethodParameter arg(Class<?> type, Class<?>... generics) {
return arg(toResolvableType(type, generics));
}
/**
* Resolve the argument also matching to the given type.
* @param type the expected type
*/
public MethodParameter arg(Class<?> type, ResolvableType generic, ResolvableType... generics) {
return arg(toResolvableType(type, generic, generics));
}
/**
* Resolve the argument also matching to the given type.
* @param type the expected type
*/
public MethodParameter arg(ResolvableType type) {
this.filters.add(p -> type.toString().equals(ResolvableType.forMethodParameter(p).toString()));
return arg();
}
/**
* Resolve the argument.
*/
public final MethodParameter arg() {
List<MethodParameter> matches = applyFilters();
Assert.state(!matches.isEmpty(), () ->
"No matching arg in method\n" + formatMethod());
Assert.state(matches.size() == 1, () ->
"Multiple matching args in method\n" + formatMethod() + "\nMatches:\n\t" + matches);
return matches.get(0);
}
private List<MethodParameter> applyFilters() {
List<MethodParameter> matches = new ArrayList<>();
for (int i = 0; i < method.getParameterCount(); i++) {
MethodParameter param = new SynthesizingMethodParameter(method, i);
param.initParameterNameDiscovery(nameDiscoverer);
if (this.filters.stream().allMatch(p -> p.test(param))) {
matches.add(param);
}
}
return matches;
}
}
private static | ArgResolver |
java | apache__camel | components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsSimpleRequestReplyFixedReplyQueueTest.java | {
"start": 1520,
"end": 3572
} | class ____ extends AbstractJMSTest {
@Order(2)
@RegisterExtension
public static CamelContextExtension camelContextExtension = new DefaultCamelContextExtension();
protected final String componentName = "activemq";
protected CamelContext context;
protected ProducerTemplate template;
protected ConsumerTemplate consumer;
@Test
public void testWithInOnly() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
// send an InOnly
template.sendBody("direct:start", "World");
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testWithInOut() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
// send an InOut
String out = template.requestBody("direct:start", "World", String.class);
assertEquals("Hello World", out);
MockEndpoint.assertIsSatisfied(context);
}
@Override
public String getComponentName() {
return componentName;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:start")
.to(ExchangePattern.InOut,
"activemq:queue:JmsSimpleRequestReplyFixedReplyQueueTest?replyTo=queue:JmsSimpleRequestReplyFixedReplyQueueTest.reply")
.to("mock:result");
from("activemq:queue:JmsSimpleRequestReplyFixedReplyQueueTest")
.transform(body().prepend("Hello "));
}
};
}
@Override
public CamelContextExtension getCamelContextExtension() {
return camelContextExtension;
}
@BeforeEach
void setUpRequirements() {
context = camelContextExtension.getContext();
template = camelContextExtension.getProducerTemplate();
consumer = camelContextExtension.getConsumerTemplate();
}
}
| JmsSimpleRequestReplyFixedReplyQueueTest |
java | google__gson | test-shrinker/src/main/java/com/example/ClassWithUnreferencedHasArgsConstructor.java | {
"start": 350,
"end": 615
} | class ____ {
@SerializedName("myField")
public int i;
// Specify explicit constructor with args to remove implicit no-args default constructor
public ClassWithUnreferencedHasArgsConstructor(int i) {
this.i = i;
}
}
| ClassWithUnreferencedHasArgsConstructor |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/DoNotCallSuggesterTest.java | {
"start": 8660,
"end": 8963
} | class ____ {
abstract void test();
}
""")
.doTest();
}
@Test
public void annotatedMethod() {
testHelper
.addSourceLines(
"StarlarkMethod.java",
"""
package net.starlark.java.annot;
public @ | Test |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtil.java | {
"start": 20615,
"end": 21971
} | class ____ {
private final String name;
private final int version;
private ExecNodeNameVersion(String name, int version) {
this.name = name;
this.version = version;
}
@Override
public String toString() {
return String.format("name: %s, version: %s", name, version);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ExecNodeNameVersion that = (ExecNodeNameVersion) o;
return version == that.version && Objects.equals(name, that.name);
}
@Override
public int hashCode() {
return Objects.hash(name, version);
}
}
/** Return true if the given class's constructors have @JsonCreator annotation, else false. */
static boolean hasJsonCreatorAnnotation(Class<?> clazz) {
for (Constructor<?> constructor : clazz.getDeclaredConstructors()) {
for (Annotation annotation : constructor.getAnnotations()) {
if (annotation instanceof JsonCreator) {
return true;
}
}
}
return false;
}
}
| ExecNodeNameVersion |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/MultiErrorHandlerInRouteNotHandledTest.java | {
"start": 3216,
"end": 3651
} | class ____ implements Processor {
private String name;
@Override
public void process(Exchange exchange) {
if (name.equals("Error")) {
throw new IllegalArgumentException("Forced exception by unit test");
}
exchange.getIn().setHeader("name", name);
}
public void setName(String name) {
this.name = name;
}
}
}
| MyProcessor |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/DefaultApplicationArguments.java | {
"start": 2031,
"end": 2355
} | class ____ extends SimpleCommandLinePropertySource {
Source(String[] args) {
super(args);
}
@Override
public List<String> getNonOptionArgs() {
return super.getNonOptionArgs();
}
@Override
public @Nullable List<String> getOptionValues(String name) {
return super.getOptionValues(name);
}
}
}
| Source |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/models/MultipleAttributeNaturesException.java | {
"start": 377,
"end": 1194
} | class ____ extends AnnotationException {
private final String attributeName;
public MultipleAttributeNaturesException(
String attributeName,
EnumSet<AttributeNature> natures) {
super( craftMessage( attributeName, natures ) );
this.attributeName = attributeName;
}
public String getAttributeName() {
return attributeName;
}
private static String craftMessage(String attributeName, EnumSet<AttributeNature> natures) {
final StringBuilder buffer = new StringBuilder( "Attribute `" )
.append( attributeName )
.append( "` expressed multiple natures [" );
String separator = "";
for ( AttributeNature nature : natures ) {
buffer.append( separator );
buffer.append( nature.name() );
separator = ",";
}
return buffer.append( "]" ).toString();
}
}
| MultipleAttributeNaturesException |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/results/LegacyFetchBuilder.java | {
"start": 458,
"end": 910
} | interface ____ extends FetchBuilder {
/**
* The table-alias associated with the fetch modeled by this builder.
*/
String getTableAlias();
/**
* The alias for the node (result or fetch) which owns the fetch modeled by this builder.
*/
String getOwnerAlias();
/**
* The name of the model-part being fetched.
*/
String getFetchableName();
@Override
LegacyFetchBuilder cacheKeyInstance();
Fetchable getFetchable();
}
| LegacyFetchBuilder |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/GraphqlComponentBuilderFactory.java | {
"start": 6053,
"end": 7271
} | class ____
extends AbstractComponentBuilder<GraphqlComponent>
implements GraphqlComponentBuilder {
@Override
protected GraphqlComponent buildConcreteComponent() {
return new GraphqlComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "lazyStartProducer": ((GraphqlComponent) component).setLazyStartProducer((boolean) value); return true;
case "throwExceptionOnFailure": ((GraphqlComponent) component).setThrowExceptionOnFailure((boolean) value); return true;
case "autowiredEnabled": ((GraphqlComponent) component).setAutowiredEnabled((boolean) value); return true;
case "httpClient": ((GraphqlComponent) component).setHttpClient((org.apache.hc.client5.http.classic.HttpClient) value); return true;
case "headerFilterStrategy": ((GraphqlComponent) component).setHeaderFilterStrategy((org.apache.camel.spi.HeaderFilterStrategy) value); return true;
default: return false;
}
}
}
} | GraphqlComponentBuilderImpl |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/utils/JoinReorderITCaseBase.java | {
"start": 2453,
"end": 17689
} | class ____ {
private static final int DEFAULT_PARALLELISM = 4;
@RegisterExtension
private static final MiniClusterExtension MINI_CLUSTER_EXTENSION =
new MiniClusterExtension(
new MiniClusterResourceConfiguration.Builder()
.setNumberTaskManagers(1)
.setNumberSlotsPerTaskManager(DEFAULT_PARALLELISM)
.build());
protected TableEnvironment tEnv;
private Catalog catalog;
protected abstract TableEnvironment getTableEnvironment();
protected abstract void assertEquals(String query, List<String> expectedList);
@BeforeEach
public void before() throws Exception {
tEnv = getTableEnvironment();
catalog = tEnv.getCatalog(tEnv.getCurrentCatalog()).get();
tEnv.getConfig()
.getConfiguration()
.set(OptimizerConfigOptions.TABLE_OPTIMIZER_JOIN_REORDER_ENABLED, true);
tEnv.getConfig()
.getConfiguration()
.set(
ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM,
DEFAULT_PARALLELISM);
// Test data
String dataId2 = TestValuesTableFactory.registerData(TestData.data2());
tEnv.executeSql(
String.format(
"CREATE TABLE T1 (\n"
+ " a1 INT,\n"
+ " b1 BIGINT,\n"
+ " c1 INT,\n"
+ " d1 STRING,\n"
+ " e1 BIGINT\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'data-id' = '%s',\n"
+ " 'bounded' = 'true'\n"
+ ")",
dataId2));
catalog.alterTableStatistics(
new ObjectPath(tEnv.getCurrentDatabase(), "T1"),
new CatalogTableStatistics(100000, 1, 1, 1),
false);
tEnv.executeSql(
String.format(
"CREATE TABLE T2 (\n"
+ " a2 INT,\n"
+ " b2 BIGINT,\n"
+ " c2 INT,\n"
+ " d2 STRING,\n"
+ " e2 BIGINT\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'data-id' = '%s',\n"
+ " 'bounded' = 'true'\n"
+ ")",
dataId2));
catalog.alterTableStatistics(
new ObjectPath(tEnv.getCurrentDatabase(), "T2"),
new CatalogTableStatistics(10000, 1, 1, 1),
false);
String dataId3 = TestValuesTableFactory.registerData(TestData.smallData3());
tEnv.executeSql(
String.format(
"CREATE TABLE T3 (\n"
+ " a3 INT,\n"
+ " b3 BIGINT,\n"
+ " c3 STRING\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'data-id' = '%s',\n"
+ " 'bounded' = 'true'\n"
+ ")",
dataId3));
catalog.alterTableStatistics(
new ObjectPath(tEnv.getCurrentDatabase(), "T3"),
new CatalogTableStatistics(1000, 1, 1, 1),
false);
String dataId5 = TestValuesTableFactory.registerData(TestData.data5());
tEnv.executeSql(
String.format(
"CREATE TABLE T4 (\n"
+ " a4 INT,\n"
+ " b4 BIGINT,\n"
+ " c4 INT,\n"
+ " d4 STRING,\n"
+ " e4 BIGINT\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'data-id' = '%s',\n"
+ " 'bounded' = 'true'\n"
+ ")",
dataId5));
catalog.alterTableStatistics(
new ObjectPath(tEnv.getCurrentDatabase(), "T4"),
new CatalogTableStatistics(100, 1, 1, 1),
false);
}
@AfterEach
public void after() {
TestValuesTableFactory.clearAllData();
}
@ParameterizedTest(name = "Is bushy join reorder: {0}")
@ValueSource(booleans = {true, false})
public void testJoinReorderWithFullOuterJoin(boolean isBushyJoinReorder) {
setIsBushyJoinReorder(isBushyJoinReorder);
String query =
"SELECT T4.d4, T3.c3, T2.d2, T1.d1 FROM T4 "
+ "FULL OUTER JOIN T3 ON T4.b4 = T3.b3 "
+ "FULL OUTER JOIN T2 ON T4.b4 = T2.b2 "
+ "FULL OUTER JOIN T1 ON T2.b2 = T1.b1";
List<String> expectedList =
Arrays.asList(
"ABC,null,ABC,ABC",
"BCD,null,BCD,BCD",
"CDE,null,CDE,CDE",
"DEF,null,DEF,DEF",
"EFG,null,EFG,EFG",
"FGH,null,FGH,FGH",
"GHI,null,GHI,GHI",
"HIJ,null,HIJ,HIJ",
"Hallo Welt wie gehts?,null,Hallo Welt wie gehts?,Hallo Welt wie gehts?",
"Hallo Welt wie,null,Hallo Welt wie,Hallo Welt wie",
"Hallo Welt,Hello world,Hallo Welt,Hallo Welt",
"Hallo Welt,Hello,Hallo Welt,Hallo Welt",
"Hallo,Hi,Hallo,Hallo",
"IJK,null,IJK,IJK",
"JKL,null,JKL,JKL",
"KLM,null,KLM,KLM");
assertEquals(query, expectedList);
}
@ParameterizedTest(name = "Is bushy join reorder: {0}")
@ValueSource(booleans = {true, false})
public void testJoinReorderWithInnerAndFullOuterJoin(boolean isBushyJoinReorder) {
setIsBushyJoinReorder(isBushyJoinReorder);
String query =
"SELECT T4.d4, T3.c3, T2.d2, T1.d1 FROM T4 "
+ "JOIN T3 ON T4.b4 = T3.b3 "
+ "FULL OUTER JOIN T2 ON T4.b4 = T2.b2 "
+ "JOIN T1 ON T4.b4 = T1.b1";
List<String> expectedList =
Arrays.asList(
"Hallo Welt,Hello world,Hallo Welt,Hallo Welt",
"Hallo Welt,Hello,Hallo Welt,Hallo Welt",
"Hallo,Hi,Hallo,Hallo");
assertEquals(query, expectedList);
}
@ParameterizedTest(name = "Is bushy join reorder: {0}")
@ValueSource(booleans = {true, false})
public void testJoinReorderWithInnerJoin(boolean isBushyJoinReorder) {
setIsBushyJoinReorder(isBushyJoinReorder);
String query =
"SELECT T4.d4, T3.c3, T2.d2, T1.d1 FROM T4 "
+ "JOIN T3 ON T4.b4 = T3.b3 "
+ "JOIN T2 ON T4.b4 = T2.b2 "
+ "JOIN T1 ON T4.b4 = T1.b1 WHERE T1.a1 > 0 AND T3.a3 > 0";
List<String> expectedList =
Arrays.asList(
"Hallo Welt,Hello world,Hallo Welt,Hallo Welt",
"Hallo Welt,Hello,Hallo Welt,Hallo Welt",
"Hallo,Hi,Hallo,Hallo");
assertEquals(query, expectedList);
}
@ParameterizedTest(name = "Is bushy join reorder: {0}")
@ValueSource(booleans = {true, false})
public void testJoinReorderWithLeftOuterJoin(boolean isBushyJoinReorder) {
setIsBushyJoinReorder(isBushyJoinReorder);
// can reorder, all join keys will not generate null.
String query =
"SELECT T4.d4, T3.c3, T2.d2, T1.d1 FROM T4 "
+ "LEFT OUTER JOIN T3 ON T4.b4 = T3.b3 "
+ "LEFT OUTER JOIN T2 ON T4.b4 = T2.b2 "
+ "LEFT OUTER JOIN T1 ON T4.b4 = T1.b1 WHERE T4.a4 < 3";
List<String> expectedList =
Arrays.asList(
"Hallo,Hi,Hallo,Hallo",
"Hallo Welt,Hello,Hallo Welt,Hallo Welt",
"Hallo Welt,Hello world,Hallo Welt,Hallo Welt",
"Hallo Welt wie,null,Hallo Welt wie,Hallo Welt wie");
assertEquals(query, expectedList);
}
@ParameterizedTest(name = "Is bushy join reorder: {0}")
@ValueSource(booleans = {true, false})
public void testJoinReorderWithInnerAndLeftOuterJoin(boolean isBushyJoinReorder) {
setIsBushyJoinReorder(isBushyJoinReorder);
String query =
"SELECT T4.d4, T3.c3, T2.d2, T1.d1 FROM T4 "
+ "JOIN T3 ON T4.b4 = T3.b3 "
+ "LEFT OUTER JOIN T2 ON T4.b4 = T2.b2 "
+ "JOIN T1 ON T4.b4 = T1.b1 "
+ "WHERE T4.a4 < 3";
List<String> expectedList =
Arrays.asList(
"Hallo,Hi,Hallo,Hallo",
"Hallo Welt,Hello,Hallo Welt,Hallo Welt",
"Hallo Welt,Hello world,Hallo Welt,Hallo Welt");
assertEquals(query, expectedList);
}
@ParameterizedTest(name = "Is bushy join reorder: {0}")
@ValueSource(booleans = {true, false})
public void testJoinReorderWithRightOuterJoin(boolean isBushyJoinReorder) {
setIsBushyJoinReorder(isBushyJoinReorder);
String query =
"SELECT T4.d4, T3.c3, T2.d2, T1.d1 FROM T4 "
+ "RIGHT OUTER JOIN T3 ON T4.b4 = T3.b3 "
+ "JOIN T2 ON T3.b3 = T2.b2 "
+ "JOIN T1 ON T2.b2 = T1.b1 WHERE T2.a2 <= 2";
List<String> expectedList =
Arrays.asList(
"Hallo,Hi,Hallo,Hallo",
"Hallo Welt,Hello,Hallo Welt,Hallo Welt",
"Hallo Welt,Hello world,Hallo Welt,Hallo Welt");
assertEquals(query, expectedList);
}
@ParameterizedTest(name = "Is bushy join reorder: {0}")
@ValueSource(booleans = {true, false})
public void testJoinReorderWithTrueCondition(boolean isBushyJoinReorder) {
setIsBushyJoinReorder(isBushyJoinReorder);
String query =
"SELECT T4.d4, T3.c3, T2.d2, T1.d1 FROM T4, T3, T2, T1 "
+ "WHERE T4.a4 <= 1 AND T3.a3 <= 1 AND T2.a2 <= 1 AND T1.a1 <= 1";
assertEquals(query, Collections.singletonList("Hallo,Hi,Hallo,Hallo"));
}
@ParameterizedTest(name = "Is bushy join reorder: {0}")
@ValueSource(booleans = {true, false})
public void testJoinReorderWithInnerJoinAndTrueCondition(boolean isBushyJoinReorder) {
setIsBushyJoinReorder(isBushyJoinReorder);
String query =
"SELECT tab1.d4, tab1.c3, T2.d2, T1.d1 FROM T1, "
+ "(SELECT * FROM T3 JOIN T4 ON T4.b4 = T3.b3) tab1, T2 "
+ "WHERE tab1.a4 <= 1 AND tab1.a3 <= 1 AND T2.a2 <= 1 AND T1.a1 <= 1";
assertEquals(query, Collections.singletonList("Hallo,Hi,Hallo,Hallo"));
}
@ParameterizedTest(name = "Is bushy join reorder: {0}")
@ValueSource(booleans = {true, false})
public void testJoinReorderWithMixedJoinTypeAndCondition(boolean isBushyJoinReorder) {
setIsBushyJoinReorder(isBushyJoinReorder);
String query =
"SELECT tab2.d4, tab2.c3, tab2.d2, T1.d1 FROM T1, (SELECT * FROM T4 "
+ "LEFT OUTER JOIN T3 ON T4.b4 = T3.b3 "
+ "JOIN T2 ON T4.b4 = T2.b2) tab2 "
+ "WHERE tab2.a4 <= 1 AND tab2.a3 <= 1 AND tab2.a2 <= 1 AND T1.a1 <= 1";
assertEquals(query, Collections.singletonList("Hallo,Hi,Hallo,Hallo"));
}
@ParameterizedTest(name = "Is bushy join reorder: {0}")
@ValueSource(booleans = {true, false})
public void testBushyTreeJoinReorder(boolean isBushyJoinReorder)
throws TableNotExistException, TablePartitionedException {
setIsBushyJoinReorder(isBushyJoinReorder);
CatalogColumnStatisticsDataLong longColStats =
new CatalogColumnStatisticsDataLong(100L, 100L, 50L, 1000L);
Map<String, CatalogColumnStatisticsDataBase> colStatsMap = new HashMap<>(1);
colStatsMap.put("b1", longColStats);
catalog.alterTableColumnStatistics(
new ObjectPath(tEnv.getCurrentDatabase(), "T1"),
new CatalogColumnStatistics(colStatsMap),
false);
longColStats = new CatalogColumnStatisticsDataLong(100L, 100L, 500000L, 1000L);
colStatsMap = new HashMap<>(1);
colStatsMap.put("b2", longColStats);
catalog.alterTableColumnStatistics(
new ObjectPath(tEnv.getCurrentDatabase(), "T2"),
new CatalogColumnStatistics(colStatsMap),
false);
longColStats = new CatalogColumnStatisticsDataLong(100L, 100L, 50L, 1000L);
colStatsMap = new HashMap<>(1);
colStatsMap.put("b3", longColStats);
catalog.alterTableColumnStatistics(
new ObjectPath(tEnv.getCurrentDatabase(), "T3"),
new CatalogColumnStatistics(colStatsMap),
false);
longColStats = new CatalogColumnStatisticsDataLong(100L, 100L, 500000L, 1000L);
colStatsMap = new HashMap<>(1);
colStatsMap.put("b4", longColStats);
catalog.alterTableColumnStatistics(
new ObjectPath(tEnv.getCurrentDatabase(), "T4"),
new CatalogColumnStatistics(colStatsMap),
false);
String query =
"SELECT tab2.d4, tab2.c3, tab1.d2, tab1.d1 FROM "
+ "(SELECT * FROM T1 JOIN T2 ON T1.b1 = T2.b2) tab1 "
+ "JOIN (SELECT * FROM T3 JOIN T4 ON T3.b3 = T4.b4) tab2 "
+ "ON tab1.b2 = tab2.b4";
List<String> expectedList =
Arrays.asList(
"Hallo,Hi,Hallo,Hallo",
"Hallo Welt,Hello,Hallo Welt,Hallo Welt",
"Hallo Welt,Hello world,Hallo Welt,Hallo Welt");
assertEquals(query, expectedList);
}
private void setIsBushyJoinReorder(boolean isBushyJoinReorder) {
if (!isBushyJoinReorder) {
tEnv.getConfig()
.getConfiguration()
.set(OptimizerConfigOptions.TABLE_OPTIMIZER_BUSHY_JOIN_REORDER_THRESHOLD, 3);
} else {
tEnv.getConfig()
.getConfiguration()
.set(OptimizerConfigOptions.TABLE_OPTIMIZER_BUSHY_JOIN_REORDER_THRESHOLD, 1000);
}
}
}
| JoinReorderITCaseBase |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java | {
"start": 2310,
"end": 9399
} | class ____ extends AggregatorTestCase {
public void testTopLevel() throws Exception {
Aggregation result;
if (randomBoolean()) {
result = testCase(new MatchAllDocsQuery(), topHits("_name").sort("string", SortOrder.DESC));
} else {
Query query = new QueryParser("string", new KeywordAnalyzer()).parse("d^1000 c^100 b^10 a^1");
result = testCase(query, topHits("_name"));
}
SearchHits searchHits = ((TopHits) result).getHits();
assertEquals(3L, searchHits.getTotalHits().value());
assertEquals("3", searchHits.getAt(0).getId());
assertEquals("2", searchHits.getAt(1).getId());
assertEquals("1", searchHits.getAt(2).getId());
assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) result)));
}
public void testNoResults() throws Exception {
TopHits result = (TopHits) testCase(new MatchNoDocsQuery(), topHits("_name").sort("string", SortOrder.DESC));
SearchHits searchHits = result.getHits();
assertEquals(0L, searchHits.getTotalHits().value());
assertFalse(AggregationInspectionHelper.hasValue(((InternalTopHits) result)));
}
/**
* Tests {@code top_hits} inside of {@code terms}. While not strictly a unit test this is a fairly common way to run {@code top_hits}
* and serves as a good example of running {@code top_hits} inside of another aggregation.
*/
public void testInsideTerms() throws Exception {
Aggregation result;
if (randomBoolean()) {
result = testCase(
new MatchAllDocsQuery(),
terms("term").field("string").subAggregation(topHits("top").sort("string", SortOrder.DESC))
);
} else {
Query query = new QueryParser("string", new KeywordAnalyzer()).parse("d^1000 c^100 b^10 a^1");
result = testCase(query, terms("term").field("string").subAggregation(topHits("top")));
}
Terms terms = (Terms) result;
// The "a" bucket
TopHits hits = (TopHits) terms.getBucketByKey("a").getAggregations().get("top");
SearchHits searchHits = (hits).getHits();
assertEquals(2L, searchHits.getTotalHits().value());
assertEquals("2", searchHits.getAt(0).getId());
assertEquals("1", searchHits.getAt(1).getId());
assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("a").getAggregations().get("top"))));
// The "b" bucket
searchHits = ((TopHits) terms.getBucketByKey("b").getAggregations().get("top")).getHits();
assertEquals(2L, searchHits.getTotalHits().value());
assertEquals("3", searchHits.getAt(0).getId());
assertEquals("1", searchHits.getAt(1).getId());
assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("b").getAggregations().get("top"))));
// The "c" bucket
searchHits = ((TopHits) terms.getBucketByKey("c").getAggregations().get("top")).getHits();
assertEquals(1L, searchHits.getTotalHits().value());
assertEquals("2", searchHits.getAt(0).getId());
assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("c").getAggregations().get("top"))));
// The "d" bucket
searchHits = ((TopHits) terms.getBucketByKey("d").getAggregations().get("top")).getHits();
assertEquals(1L, searchHits.getTotalHits().value());
assertEquals("3", searchHits.getAt(0).getId());
assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("d").getAggregations().get("top"))));
}
private static final MappedFieldType STRING_FIELD_TYPE = new KeywordFieldMapper.KeywordFieldType("string");
private Aggregation testCase(Query query, AggregationBuilder builder) throws IOException {
Directory directory = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random(), directory);
iw.addDocument(document("1", "a", "b"));
iw.addDocument(document("2", "c", "a"));
iw.addDocument(document("3", "b", "d"));
iw.close();
IndexReader indexReader = DirectoryReader.open(directory);
Aggregation result = searchAndReduce(indexReader, new AggTestConfig(builder, STRING_FIELD_TYPE).withQuery(query));
indexReader.close();
directory.close();
return result;
}
private Document document(String id, String... stringValues) {
Document document = new Document();
document.add(new StringField(IdFieldMapper.NAME, Uid.encodeId(id), Store.YES));
for (String stringValue : stringValues) {
document.add(new Field("string", new BytesRef(stringValue), KeywordFieldMapper.Defaults.FIELD_TYPE));
}
return document;
}
public void testSetScorer() throws Exception {
Directory directory = newDirectory();
IndexWriter w = new IndexWriter(
directory,
newIndexWriterConfig()
// only merge adjacent segments
.setMergePolicy(newLogMergePolicy())
);
// first window (see BooleanScorer) has matches on one clause only
for (int i = 0; i < 2048; ++i) {
Document doc = new Document();
doc.add(new StringField("_id", Uid.encodeId(Integer.toString(i)), Store.YES));
if (i == 1000) { // any doc in 0..2048
doc.add(new StringField("string", "bar", Store.NO));
}
w.addDocument(doc);
}
// second window has matches in two clauses
for (int i = 0; i < 2048; ++i) {
Document doc = new Document();
doc.add(new StringField("_id", Uid.encodeId(Integer.toString(2048 + i)), Store.YES));
if (i == 500) { // any doc in 0..2048
doc.add(new StringField("string", "baz", Store.NO));
} else if (i == 1500) {
doc.add(new StringField("string", "bar", Store.NO));
}
w.addDocument(doc);
}
w.forceMerge(1); // we need all docs to be in the same segment
IndexReader reader = DirectoryReader.open(w);
w.close();
Query query = new BooleanQuery.Builder().add(new TermQuery(new Term("string", "bar")), Occur.SHOULD)
.add(new TermQuery(new Term("string", "baz")), Occur.SHOULD)
.build();
AggregationBuilder agg = AggregationBuilders.topHits("top_hits");
TopHits result = searchAndReduce(reader, new AggTestConfig(agg, STRING_FIELD_TYPE).withQuery(query));
assertEquals(3, result.getHits().getTotalHits().value());
reader.close();
directory.close();
}
public void testSortByScore() throws Exception {
// just check that it does not fail with exceptions
testCase(new MatchAllDocsQuery(), topHits("_name").sort("_score", SortOrder.DESC));
testCase(new MatchAllDocsQuery(), topHits("_name").sort("_score"));
}
}
| TopHitsAggregatorTests |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/converter/stream/StreamSourceCacheTest.java | {
"start": 1260,
"end": 2789
} | class ____ extends ContextTestSupport {
@Test
public void testStreamSourceCache() throws Exception {
Exchange exchange = new DefaultExchange(context);
StreamSource source = context.getTypeConverter().convertTo(StreamSource.class, "<foo>bar</foo>");
StreamSourceCache cache = new StreamSourceCache(source, exchange);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
cache.writeTo(bos);
String s = context.getTypeConverter().convertTo(String.class, bos);
assertEquals("<foo>bar</foo>", s);
}
@Test
public void testStreamSourceCacheIsEmpty() throws Exception {
Exchange exchange = new DefaultExchange(context);
StreamSource source = context.getTypeConverter().convertTo(StreamSource.class, "");
StreamSourceCache cache = new StreamSourceCache(source, exchange);
Assertions.assertTrue(cache.isEmpty());
ByteArrayOutputStream bos = new ByteArrayOutputStream();
cache.writeTo(bos);
String s = context.getTypeConverter().convertTo(String.class, bos);
assertEquals("", s);
source = context.getTypeConverter().convertTo(StreamSource.class, "<foo>bar</foo>");
cache = new StreamSourceCache(source, exchange);
Assertions.assertFalse(cache.isEmpty());
bos = new ByteArrayOutputStream();
cache.writeTo(bos);
s = context.getTypeConverter().convertTo(String.class, bos);
assertEquals("<foo>bar</foo>", s);
}
}
| StreamSourceCacheTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/db2/DB2SelectTest_24.java | {
"start": 1037,
"end": 3369
} | class ____ extends DB2Test {
public void test_0() throws Exception {
String sql = "SELECT BANK_CODE, CONN_LOCATION_TYPE,\n" +
"OUTER_KEYLABEL_NAME, INNER_KEYLABEL_NAME,\n" +
"DESC, COMMENT, STATUS, DB_TIMESTAMP\n" +
"FROM EGL_SYS_KEYLABEL_CONVERT_DEF\n" +
"WHERE STATUS='1'";
DB2StatementParser parser = new DB2StatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
System.out.println(SQLUtils.toDB2String(stmt));
assertEquals(1, statementList.size());
DB2SchemaStatVisitor visitor = new DB2SchemaStatVisitor();
stmt.accept(visitor);
// System.out.println("Tables : " + visitor.getTables());
// System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(8, visitor.getColumns().size());
assertEquals(1, visitor.getConditions().size());
assertTrue(visitor.getTables().containsKey(new TableStat.Name("EGL_SYS_KEYLABEL_CONVERT_DEF")));
// assertTrue(visitor.getColumns().contains(new Column("DSN8B10.EMP", "WORKDEPT")));
// assertTrue(visitor.getColumns().contains(new Column("mytable", "first_name")));
// assertTrue(visitor.getColumns().contains(new Column("mytable", "full_name")));
assertEquals("SELECT BANK_CODE, CONN_LOCATION_TYPE, OUTER_KEYLABEL_NAME, INNER_KEYLABEL_NAME, DESC\n" +
"\t, COMMENT, STATUS, DB_TIMESTAMP\n" +
"FROM EGL_SYS_KEYLABEL_CONVERT_DEF\n" +
"WHERE STATUS = '1'", //
SQLUtils.toSQLString(stmt, JdbcConstants.DB2));
assertEquals("select BANK_CODE, CONN_LOCATION_TYPE, OUTER_KEYLABEL_NAME, INNER_KEYLABEL_NAME, DESC\n" +
"\t, COMMENT, STATUS, DB_TIMESTAMP\n" +
"from EGL_SYS_KEYLABEL_CONVERT_DEF\n" +
"where STATUS = '1'", //
SQLUtils.toSQLString(stmt, JdbcConstants.DB2, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION));
}
}
| DB2SelectTest_24 |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/collections/BidirectionalBagTest.java | {
"start": 1399,
"end": 2062
} | class ____ {
@Id
private Long id;
@OneToMany(mappedBy = "person", cascade = CascadeType.ALL)
private List<Phone> phones = new ArrayList<>();
//Getters and setters are omitted for brevity
//end::collections-bidirectional-bag-example[]
public Person() {
}
public Person(Long id) {
this.id = id;
}
public List<Phone> getPhones() {
return phones;
}
//tag::collections-bidirectional-bag-example[]
public void addPhone(Phone phone) {
phones.add(phone);
phone.setPerson(this);
}
public void removePhone(Phone phone) {
phones.remove(phone);
phone.setPerson(null);
}
}
@Entity(name = "Phone")
public static | Person |
java | spring-projects__spring-framework | spring-core/src/testFixtures/java/org/springframework/core/testfixture/io/buffer/AbstractDataBufferAllocatingTests.java | {
"start": 2397,
"end": 5924
} | class ____ {
private static UnpooledByteBufAllocator netty4OffHeapUnpooled;
private static UnpooledByteBufAllocator netty4OnHeapUnpooled;
private static PooledByteBufAllocator netty4OffHeapPooled;
private static PooledByteBufAllocator netty4OnHeapPooled;
@RegisterExtension
AfterEachCallback leakDetector = context -> waitForDataBufferRelease(Duration.ofSeconds(2));
protected DataBufferFactory bufferFactory;
protected DataBuffer createDataBuffer(int capacity) {
return this.bufferFactory.allocateBuffer(capacity);
}
protected DataBuffer stringBuffer(String value) {
return byteBuffer(value.getBytes(StandardCharsets.UTF_8));
}
protected Mono<DataBuffer> deferStringBuffer(String value) {
return Mono.defer(() -> Mono.just(stringBuffer(value)));
}
protected DataBuffer byteBuffer(byte[] value) {
DataBuffer buffer = this.bufferFactory.allocateBuffer(value.length);
buffer.write(value);
return buffer;
}
protected void release(DataBuffer... buffers) {
Arrays.stream(buffers).forEach(DataBufferUtils::release);
}
protected Consumer<DataBuffer> stringConsumer(String expected) {
return stringConsumer(expected, UTF_8);
}
protected Consumer<DataBuffer> stringConsumer(String expected, Charset charset) {
return dataBuffer -> {
String value = dataBuffer.toString(charset);
DataBufferUtils.release(dataBuffer);
assertThat(value).isEqualTo(expected);
};
}
/**
* Wait until allocations are at 0, or the given duration elapses.
*/
private void waitForDataBufferRelease(Duration duration) throws InterruptedException {
Instant start = Instant.now();
while (true) {
try {
verifyAllocations();
break;
}
catch (AssertionError ex) {
if (Instant.now().isAfter(start.plus(duration))) {
throw ex;
}
}
Thread.sleep(50);
}
}
private void verifyAllocations() {
if (this.bufferFactory instanceof NettyDataBufferFactory) {
ByteBufAllocator allocator = ((NettyDataBufferFactory) this.bufferFactory).getByteBufAllocator();
if (allocator instanceof PooledByteBufAllocator) {
Instant start = Instant.now();
while (true) {
PooledByteBufAllocatorMetric metric = ((PooledByteBufAllocator) allocator).metric();
long total = getAllocations(metric.directArenas()) + getAllocations(metric.heapArenas());
if (total == 0) {
return;
}
if (Instant.now().isBefore(start.plus(Duration.ofSeconds(5)))) {
try {
Thread.sleep(50);
}
catch (InterruptedException ex) {
// ignore
}
continue;
}
assertThat(total).as("ByteBuf Leak: " + total + " unreleased allocations").isEqualTo(0);
}
}
}
}
private static long getAllocations(List<PoolArenaMetric> metrics) {
return metrics.stream().mapToLong(PoolArenaMetric::numActiveAllocations).sum();
}
@BeforeAll
@SuppressWarnings("deprecation") // PooledByteBufAllocator no longer supports tinyCacheSize.
public static void createAllocators() {
netty4OnHeapUnpooled = new UnpooledByteBufAllocator(false);
netty4OffHeapUnpooled = new UnpooledByteBufAllocator(true);
netty4OnHeapPooled = new PooledByteBufAllocator(false, 1, 1, 4096, 4, 0, 0, 0, true);
netty4OffHeapPooled = new PooledByteBufAllocator(true, 1, 1, 4096, 4, 0, 0, 0, true);
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@ParameterizedTest
@MethodSource("org.springframework.core.testfixture.io.buffer.AbstractDataBufferAllocatingTests#dataBufferFactories()")
public @ | AbstractDataBufferAllocatingTests |
java | mockito__mockito | mockito-extensions/mockito-errorprone/src/test/java/org/mockito/errorprone/bugpatterns/MockitoAnyIncorrectPrimitiveTypeTest.java | {
"start": 1617,
"end": 2092
} | class ____ {",
" public void test() {",
" Foo foo = mock(Foo.class);",
" // BUG: Diagnostic contains:",
" when(foo.run(anyInt())).thenReturn(5);",
" // BUG: Diagnostic contains:",
" when(foo.runWithBoth(any(String.class), anyInt())).thenReturn(5);",
" }",
" static | Test |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/producer/ProducerWithAbstractClassWithInterfaceInterceptionAndBindingsSourceTest.java | {
"start": 3040,
"end": 3262
} | class ____ {
@Produces
MyNonbeanBase produce(@BindingsSource(MyNonbeanBindings.class) InterceptionProxy<MyNonbeanBase> proxy) {
return proxy.create(new MyNonbeanImpl());
}
}
}
| MyProducer |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sources/FieldComputer.java | {
"start": 1182,
"end": 1430
} | interface ____ an expression to compute the field of the table
* schema of a {@link TableSource} from one or more fields of the {@link TableSource}'s return type.
*
* @param <T> The result type of the provided expression.
* @deprecated This | returns |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/function/FailableCallable.java | {
"start": 869,
"end": 1087
} | interface ____ {@link java.util.concurrent.Callable} that declares a {@link Throwable}.
*
* @param <R> Return type.
* @param <E> The kind of thrown exception or error.
* @since 3.11
*/
@FunctionalInterface
public | like |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/ext/ExternalTypeIdTest.java | {
"start": 2505,
"end": 2753
} | class ____ {
public int value;
public ValueBean() { }
public ValueBean(int v) { value = v; }
}
@JsonTypeName("funk")
@JsonTypeInfo(use=Id.NAME, include=As.EXTERNAL_PROPERTY, property="extType")
static | ValueBean |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/search/SearchExecutionStatsCollector.java | {
"start": 1145,
"end": 2942
} | class ____ extends DelegatingActionListener<SearchPhaseResult, SearchPhaseResult> {
private final String nodeId;
private final ResponseCollectorService collector;
private final long startNanos;
SearchExecutionStatsCollector(ActionListener<SearchPhaseResult> listener, ResponseCollectorService collector, String nodeId) {
super(Objects.requireNonNull(listener, "listener cannot be null"));
this.collector = Objects.requireNonNull(collector, "response collector cannot be null");
this.startNanos = System.nanoTime();
this.nodeId = nodeId;
}
@SuppressWarnings("unchecked")
public static
BiFunction<Transport.Connection, ActionListener<? super SearchPhaseResult>, ActionListener<? super SearchPhaseResult>>
makeWrapper(ResponseCollectorService service) {
return (connection, originalListener) -> new SearchExecutionStatsCollector(
(ActionListener<SearchPhaseResult>) originalListener,
service,
connection.getNode().getId()
);
}
@Override
public void onResponse(SearchPhaseResult response) {
QuerySearchResult queryResult = response.queryResult();
if (nodeId != null && queryResult != null) {
final long serviceTimeEWMA = queryResult.serviceTimeEWMA();
final int queueSize = queryResult.nodeQueueSize();
final long responseDuration = System.nanoTime() - startNanos;
// EWMA/queue size may be -1 if the query node doesn't support capturing it
if (serviceTimeEWMA > 0 && queueSize >= 0) {
collector.addNodeStatistics(nodeId, queueSize, responseDuration, serviceTimeEWMA);
}
}
delegate.onResponse(response);
}
}
| SearchExecutionStatsCollector |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/MulticastGroupedExchangeExceptionTest.java | {
"start": 1255,
"end": 3532
} | class ____ extends ContextTestSupport {
@Test
public void testBothGood() throws Exception {
MockEndpoint result = getMockEndpoint("mock:result");
result.expectedMessageCount(1);
template.sendBody("direct:start", "dummy");
assertMockEndpointsSatisfied();
Exchange received = result.getReceivedExchanges().get(0);
MatcherAssert.assertThat("no exception", received.isFailed(), is(false));
}
@Test
public void testBFail() throws Exception {
MockEndpoint result = getMockEndpoint("mock:result");
result.expectedMessageCount(1);
MockEndpoint endpointB = getMockEndpoint("mock:endpointB");
endpointB.whenAnyExchangeReceived(new Processor() {
@Override
public void process(Exchange exchange) {
throw new IllegalArgumentException("Fake exception");
}
});
template.sendBody("direct:start", "dummy");
assertMockEndpointsSatisfied();
Exchange received = result.getReceivedExchanges().get(0);
MatcherAssert.assertThat("no exception", received.isFailed(), is(false));
}
@Test
public void testAFail() throws Exception {
MockEndpoint result = getMockEndpoint("mock:result");
result.expectedMessageCount(1);
MockEndpoint endpointA = getMockEndpoint("mock:endpointA");
endpointA.whenAnyExchangeReceived(new Processor() {
@Override
public void process(Exchange exchange) {
throw new IllegalArgumentException("Fake exception");
}
});
template.sendBody("direct:start", "dummy");
assertMockEndpointsSatisfied();
Exchange received = result.getReceivedExchanges().get(0);
MatcherAssert.assertThat("no exception", received.isFailed(), is(false));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").multicast(new GroupedExchangeAggregationStrategy()).to("mock:endpointA", "mock:endpointB")
.end().to("mock:result");
}
};
}
}
| MulticastGroupedExchangeExceptionTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine2.java | {
"start": 15247,
"end": 15480
} | class ____ extends RPC.Server {
static final ThreadLocal<ProtobufRpcEngineCallback2> CURRENT_CALLBACK =
new ThreadLocal<>();
static final ThreadLocal<CallInfo> CURRENT_CALL_INFO = new ThreadLocal<>();
static | Server |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/operators/LegacyKeyedProcessOperator.java | {
"start": 1703,
"end": 3870
} | class ____<K, IN, OUT>
extends AbstractUdfStreamOperator<OUT, ProcessFunction<IN, OUT>>
implements OneInputStreamOperator<IN, OUT>, Triggerable<K, VoidNamespace> {
private static final long serialVersionUID = 1L;
private transient TimestampedCollector<OUT> collector;
private transient ContextImpl context;
private transient OnTimerContextImpl onTimerContext;
public LegacyKeyedProcessOperator(ProcessFunction<IN, OUT> function) {
super(function);
}
@Override
public void open() throws Exception {
super.open();
collector = new TimestampedCollector<>(output);
InternalTimerService<VoidNamespace> internalTimerService =
getInternalTimerService("user-timers", VoidNamespaceSerializer.INSTANCE, this);
TimerService timerService = new SimpleTimerService(internalTimerService);
context = new ContextImpl(userFunction, timerService);
onTimerContext = new OnTimerContextImpl(userFunction, timerService);
}
@Override
public void onEventTime(InternalTimer<K, VoidNamespace> timer) throws Exception {
collector.setAbsoluteTimestamp(timer.getTimestamp());
invokeUserFunction(TimeDomain.EVENT_TIME, timer);
}
@Override
public void onProcessingTime(InternalTimer<K, VoidNamespace> timer) throws Exception {
collector.eraseTimestamp();
invokeUserFunction(TimeDomain.PROCESSING_TIME, timer);
}
@Override
public void processElement(StreamRecord<IN> element) throws Exception {
collector.setTimestamp(element);
context.element = element;
userFunction.processElement(element.getValue(), context, collector);
context.element = null;
}
private void invokeUserFunction(TimeDomain timeDomain, InternalTimer<K, VoidNamespace> timer)
throws Exception {
onTimerContext.timeDomain = timeDomain;
onTimerContext.timer = timer;
userFunction.onTimer(timer.getTimestamp(), onTimerContext, collector);
onTimerContext.timeDomain = null;
onTimerContext.timer = null;
}
private | LegacyKeyedProcessOperator |
java | elastic__elasticsearch | modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SetProcessor.java | {
"start": 3277,
"end": 5388
} | class ____ implements Processor.Factory {
private final ScriptService scriptService;
public Factory(ScriptService scriptService) {
this.scriptService = scriptService;
}
@Override
public SetProcessor create(
Map<String, Processor.Factory> registry,
String processorTag,
String description,
Map<String, Object> config,
ProjectId projectId
) throws Exception {
String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field");
String copyFrom = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "copy_from");
String mediaType = ConfigurationUtils.readMediaTypeProperty(TYPE, processorTag, config, "media_type", "application/json");
ValueSource valueSource = null;
if (copyFrom == null) {
Object value = ConfigurationUtils.readObject(TYPE, processorTag, config, "value");
valueSource = ValueSource.wrap(value, scriptService, Map.of(Script.CONTENT_TYPE_OPTION, mediaType));
} else {
Object value = config.remove("value");
if (value != null) {
throw newConfigurationException(
TYPE,
processorTag,
"copy_from",
"cannot set both `copy_from` and `value` in the same processor"
);
}
}
boolean overrideEnabled = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "override", true);
TemplateScript.Factory compiledTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, "field", field, scriptService);
boolean ignoreEmptyValue = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_empty_value", false);
return new SetProcessor(processorTag, description, compiledTemplate, valueSource, copyFrom, overrideEnabled, ignoreEmptyValue);
}
}
}
| Factory |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/FieldDeserTest.java | {
"start": 1432,
"end": 1641
} | class ____
extends SimpleFieldBean
{
@JsonProperty("x")
protected int myX = 10;
@SuppressWarnings("hiding")
public int y = 11;
}
abstract static | OkDupFieldBean |
java | quarkusio__quarkus | integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/KubernetesWithInputStatefulSetResourcesTest.java | {
"start": 687,
"end": 3791
} | class ____ {
static final String APP_NAME = "kubernetes-with-input-statefulset-resource";
@RegisterExtension
static final QuarkusProdModeTest config = new QuarkusProdModeTest()
.withApplicationRoot((jar) -> jar.addClasses(GreetingResource.class))
.setApplicationName(APP_NAME)
.setApplicationVersion("0.1-SNAPSHOT")
.withConfigurationResource("kubernetes-with-statefulset-resource.properties")
.setLogFileName("k8s.log")
.addCustomResourceEntry(Path.of("src", "main", "kubernetes", "kubernetes.yml"),
"manifests/custom-deployment/kubernetes-with-stateful.yml")
.setForcedDependencies(List.of(Dependency.of("io.quarkus", "quarkus-kubernetes", Version.getVersion())))
.addBuildChainCustomizerEntries(
new QuarkusProdModeTest.BuildChainCustomizerEntry(
KubernetesWithCustomResourcesTest.CustomProjectRootBuildItemProducerProdMode.class,
Collections.singletonList(CustomProjectRootBuildItem.class), Collections.emptyList()));
@ProdBuildResults
private ProdModeTestResults prodModeTestResults;
@Test
public void assertGeneratedResources() throws IOException {
final Path kubernetesDir = prodModeTestResults.getBuildDir().resolve("kubernetes");
assertThat(kubernetesDir)
.isDirectoryContaining(p -> p.getFileName().endsWith("kubernetes.json"))
.isDirectoryContaining(p -> p.getFileName().endsWith("kubernetes.yml"));
List<HasMetadata> kubernetesList = DeserializationUtil
.deserializeAsList(kubernetesDir.resolve("kubernetes.yml"));
assertThat(kubernetesList).filteredOn(i -> i instanceof StatefulSet).singleElement().satisfies(i -> {
assertThat(i).isInstanceOfSatisfying(StatefulSet.class, s -> {
assertThat(s.getMetadata()).satisfies(m -> {
assertThat(m.getName()).isEqualTo(APP_NAME);
});
assertThat(s.getSpec()).satisfies(statefulSetSpec -> {
assertThat(statefulSetSpec.getServiceName()).isEqualTo(APP_NAME);
assertThat(statefulSetSpec.getReplicas()).isEqualTo(42);
assertThat(statefulSetSpec.getTemplate()).satisfies(t -> {
assertThat(t.getSpec()).satisfies(podSpec -> {
assertThat(podSpec.getTerminationGracePeriodSeconds()).isEqualTo(10);
assertThat(podSpec.getContainers()).allMatch(c -> APP_NAME.equals(c.getName()));
});
});
assertThat(statefulSetSpec.getSelector()).satisfies(ls -> {
assertThat(ls.getMatchLabels()).containsEntry("app.kubernetes.io/name", APP_NAME);
assertThat(ls.getMatchLabels()).containsEntry("custom-label", "my-label");
});
});
});
});
}
}
| KubernetesWithInputStatefulSetResourcesTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java | {
"start": 7931,
"end": 11605
} | class ____ extends CombineFileInputFormat<Text, Text> {
@SuppressWarnings("unchecked")
@Override
public RecordReader<Text,Text> createRecordReader(InputSplit split,
TaskAttemptContext context) throws IOException {
return new CombineFileRecordReader((CombineFileSplit) split, context,
(Class) DummyRecordReader.class);
}
}
@Test
public void testRecordReaderInit() throws InterruptedException, IOException {
// Test that we properly initialize the child recordreader when
// CombineFileInputFormat and CombineFileRecordReader are used.
TaskAttemptID taskId = new TaskAttemptID("jt", 0, TaskType.MAP, 0, 0);
Configuration conf1 = new Configuration();
conf1.set(DUMMY_KEY, "STATE1");
TaskAttemptContext context1 = new TaskAttemptContextImpl(conf1, taskId);
// This will create a CombineFileRecordReader that itself contains a
// DummyRecordReader.
InputFormat inputFormat = new ChildRRInputFormat();
Path [] files = { new Path("file1") };
long [] lengths = { 1 };
CombineFileSplit split = new CombineFileSplit(files, lengths);
RecordReader rr = inputFormat.createRecordReader(split, context1);
assertTrue(rr instanceof CombineFileRecordReader, "Unexpected RR type!");
// Verify that the initial configuration is the one being used.
// Right after construction the dummy key should have value "STATE1"
assertEquals("STATE1", rr.getCurrentKey().toString(),
"Invalid initial dummy key value");
// Switch the active context for the RecordReader...
Configuration conf2 = new Configuration();
conf2.set(DUMMY_KEY, "STATE2");
TaskAttemptContext context2 = new TaskAttemptContextImpl(conf2, taskId);
rr.initialize(split, context2);
// And verify that the new context is updated into the child record reader.
assertEquals("STATE2", rr.getCurrentKey().toString(),
"Invalid secondary dummy key value");
}
@Test
public void testReinit() throws Exception {
// Test that a split containing multiple files works correctly,
// with the child RecordReader getting its initialize() method
// called a second time.
TaskAttemptID taskId = new TaskAttemptID("jt", 0, TaskType.MAP, 0, 0);
Configuration conf = new Configuration();
TaskAttemptContext context = new TaskAttemptContextImpl(conf, taskId);
// This will create a CombineFileRecordReader that itself contains a
// DummyRecordReader.
InputFormat inputFormat = new ChildRRInputFormat();
Path [] files = { new Path("file1"), new Path("file2") };
long [] lengths = { 1, 1 };
CombineFileSplit split = new CombineFileSplit(files, lengths);
RecordReader rr = inputFormat.createRecordReader(split, context);
assertTrue(rr instanceof CombineFileRecordReader, "Unexpected RR type!");
// first initialize() call comes from MapTask. We'll do it here.
rr.initialize(split, context);
// First value is first filename.
assertTrue(rr.nextKeyValue());
assertEquals("file1", rr.getCurrentValue().toString());
// The inner RR will return false, because it only emits one (k, v) pair.
// But there's another sub-split to process. This returns true to us.
assertTrue(rr.nextKeyValue());
// And the 2nd rr will have its initialize method called correctly.
assertEquals("file2", rr.getCurrentValue().toString());
// But after both child RR's have returned their singleton (k, v), this
// should also return false.
assertFalse(rr.nextKeyValue());
}
/**
* For testing each split has the expected name, length, and offset.
*/
private final | ChildRRInputFormat |
java | spring-projects__spring-boot | module/spring-boot-http-converter/src/test/java/org/springframework/boot/http/converter/autoconfigure/HttpMessageConvertersAutoConfigurationTests.java | {
"start": 20060,
"end": 20295
} | class ____ {
@Bean
XmlMapper xmlMapper() {
return new XmlMapper();
}
@Bean
XmlMapper.Builder builder() {
return XmlMapper.builder();
}
}
@Configuration(proxyBeanMethods = false)
static | JacksonXmlMapperBuilderConfig |
java | spring-projects__spring-boot | loader/spring-boot-loader/src/test/java/org/springframework/boot/loader/launch/LauncherTests.java | {
"start": 4191,
"end": 6100
} | class ____ {
@BeforeEach
void setup() {
System.setProperty(JarModeRunner.DISABLE_SYSTEM_EXIT, "true");
}
@AfterEach
void cleanup() {
System.clearProperty("jarmode");
System.clearProperty(JarModeRunner.DISABLE_SYSTEM_EXIT);
}
@Test
void launchWhenJarModePropertyIsSetLaunchesJarMode(CapturedOutput out) throws Exception {
System.setProperty("jarmode", "test");
new JarModeTestLauncher().launch(new String[] { "boot" });
assertThat(out).contains("running in test jar mode [boot]");
assertThat(System.getProperty(JarModeRunner.SUPPRESSED_SYSTEM_EXIT_CODE)).isEqualTo("0");
}
@Test
void launchWhenJarModePropertyIsNotAcceptedThrowsException(CapturedOutput out) throws Exception {
System.setProperty("jarmode", "idontexist");
new JarModeTestLauncher().launch(new String[] { "boot" });
assertThat(out).contains("Unsupported jarmode 'idontexist'");
assertThat(System.getProperty(JarModeRunner.SUPPRESSED_SYSTEM_EXIT_CODE)).isEqualTo("1");
}
@Test
void launchWhenJarModeRunFailsWithErrorExceptionPrintsSimpleMessage(CapturedOutput out) throws Exception {
System.setProperty("jarmode", "test");
new JarModeTestLauncher().launch(new String[] { "error" });
assertThat(out).contains("running in test jar mode [error]");
assertThat(out).contains("Error: error message");
assertThat(System.getProperty(JarModeRunner.SUPPRESSED_SYSTEM_EXIT_CODE)).isEqualTo("1");
}
@Test
void launchWhenJarModeRunFailsWithErrorExceptionPrintsStackTrace(CapturedOutput out) throws Exception {
System.setProperty("jarmode", "test");
new JarModeTestLauncher().launch(new String[] { "fail" });
assertThat(out).contains("running in test jar mode [fail]");
assertThat(out).contains("java.lang.IllegalStateException: bad");
assertThat(System.getProperty(JarModeRunner.SUPPRESSED_SYSTEM_EXIT_CODE)).isEqualTo("1");
}
private static final | JarMode |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/observers/InnerQueuedObserverTest.java | {
"start": 784,
"end": 952
} | class ____ extends RxJavaTest {
@Test
public void dispose() {
TestHelper.checkDisposed(new InnerQueuedObserver<>(null, 1));
}
}
| InnerQueuedObserverTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderFactory.java | {
"start": 1388,
"end": 1554
} | interface ____ find the available
* KeyProviders and create them based on the list of URIs.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public abstract | to |
java | greenrobot__greendao | DaoCore/src/main/java/org/greenrobot/greendao/Property.java | {
"start": 1051,
"end": 4467
} | class ____ {
public final int ordinal;
public final Class<?> type;
public final String name;
public final boolean primaryKey;
public final String columnName;
public Property(int ordinal, Class<?> type, String name, boolean primaryKey, String columnName) {
this.ordinal = ordinal;
this.type = type;
this.name = name;
this.primaryKey = primaryKey;
this.columnName = columnName;
}
/** Creates an "equal ('=')" condition for this property. */
public WhereCondition eq(Object value) {
return new PropertyCondition(this, "=?", value);
}
/** Creates an "not equal ('<>')" condition for this property. */
public WhereCondition notEq(Object value) {
return new PropertyCondition(this, "<>?", value);
}
/** Creates an "LIKE" condition for this property. */
public WhereCondition like(String value) {
return new PropertyCondition(this, " LIKE ?", value);
}
/** Creates an "BETWEEN ... AND ..." condition for this property. */
public WhereCondition between(Object value1, Object value2) {
Object[] values = { value1, value2 };
return new PropertyCondition(this, " BETWEEN ? AND ?", values);
}
/** Creates an "IN (..., ..., ...)" condition for this property. */
public WhereCondition in(Object... inValues) {
StringBuilder condition = new StringBuilder(" IN (");
SqlUtils.appendPlaceholders(condition, inValues.length).append(')');
return new PropertyCondition(this, condition.toString(), inValues);
}
/** Creates an "IN (..., ..., ...)" condition for this property. */
public WhereCondition in(Collection<?> inValues) {
return in(inValues.toArray());
}
/** Creates an "NOT IN (..., ..., ...)" condition for this property. */
public WhereCondition notIn(Object... notInValues) {
StringBuilder condition = new StringBuilder(" NOT IN (");
SqlUtils.appendPlaceholders(condition, notInValues.length).append(')');
return new PropertyCondition(this, condition.toString(), notInValues);
}
/** Creates an "NOT IN (..., ..., ...)" condition for this property. */
public WhereCondition notIn(Collection<?> notInValues) {
return notIn(notInValues.toArray());
}
/** Creates an "greater than ('>')" condition for this property. */
public WhereCondition gt(Object value) {
return new PropertyCondition(this, ">?", value);
}
/** Creates an "less than ('<')" condition for this property. */
public WhereCondition lt(Object value) {
return new PropertyCondition(this, "<?", value);
}
/** Creates an "greater or equal ('>=')" condition for this property. */
public WhereCondition ge(Object value) {
return new PropertyCondition(this, ">=?", value);
}
/** Creates an "less or equal ('<=')" condition for this property. */
public WhereCondition le(Object value) {
return new PropertyCondition(this, "<=?", value);
}
/** Creates an "IS NULL" condition for this property. */
public WhereCondition isNull() {
return new PropertyCondition(this, " IS NULL");
}
/** Creates an "IS NOT NULL" condition for this property. */
public WhereCondition isNotNull() {
return new PropertyCondition(this, " IS NOT NULL");
}
}
| Property |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/matchers/GreaterThan.java | {
"start": 197,
"end": 523
} | class ____<T extends Comparable<T>> extends CompareTo<T> implements Serializable {
public GreaterThan(T value) {
super(value);
}
@Override
protected String getName() {
return "gt";
}
@Override
protected boolean matchResult(int result) {
return result > 0;
}
}
| GreaterThan |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/functions/Functions.java | {
"start": 24010,
"end": 24311
} | class ____ implements Consumer<Subscription> {
final int bufferSize;
BoundedConsumer(int bufferSize) {
this.bufferSize = bufferSize;
}
@Override
public void accept(Subscription s) {
s.request(bufferSize);
}
}
}
| BoundedConsumer |
java | apache__camel | components/camel-stream/src/generated/java/org/apache/camel/component/stream/StreamEndpointConfigurer.java | {
"start": 733,
"end": 8729
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
StreamEndpoint target = (StreamEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "appendnewline":
case "appendNewLine": target.setAppendNewLine(property(camelContext, boolean.class, value)); return true;
case "autoclosecount":
case "autoCloseCount": target.setAutoCloseCount(property(camelContext, int.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "closeondone":
case "closeOnDone": target.setCloseOnDone(property(camelContext, boolean.class, value)); return true;
case "delay": target.setDelay(property(camelContext, long.class, value)); return true;
case "encoding": target.setEncoding(property(camelContext, java.lang.String.class, value)); return true;
case "exceptionhandler":
case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "exchangepattern":
case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "filename":
case "fileName": target.setFileName(property(camelContext, java.lang.String.class, value)); return true;
case "filewatcher":
case "fileWatcher": target.setFileWatcher(property(camelContext, boolean.class, value)); return true;
case "grouplines":
case "groupLines": target.setGroupLines(property(camelContext, int.class, value)); return true;
case "groupstrategy":
case "groupStrategy": target.setGroupStrategy(property(camelContext, org.apache.camel.component.stream.GroupStrategy.class, value)); return true;
case "httpheaders":
case "httpHeaders": target.setHttpHeaders(property(camelContext, java.lang.String.class, value)); return true;
case "httpurl":
case "httpUrl": target.setHttpUrl(property(camelContext, java.lang.String.class, value)); return true;
case "initialpromptdelay":
case "initialPromptDelay": target.setInitialPromptDelay(property(camelContext, long.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "promptdelay":
case "promptDelay": target.setPromptDelay(property(camelContext, long.class, value)); return true;
case "promptmessage":
case "promptMessage": target.setPromptMessage(property(camelContext, java.lang.String.class, value)); return true;
case "readline":
case "readLine": target.setReadLine(property(camelContext, boolean.class, value)); return true;
case "readtimeout":
case "readTimeout": target.setReadTimeout(property(camelContext, int.class, value)); return true;
case "retry": target.setRetry(property(camelContext, boolean.class, value)); return true;
case "scanstream":
case "scanStream": target.setScanStream(property(camelContext, boolean.class, value)); return true;
case "scanstreamdelay":
case "scanStreamDelay": target.setScanStreamDelay(property(camelContext, long.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "appendnewline":
case "appendNewLine": return boolean.class;
case "autoclosecount":
case "autoCloseCount": return int.class;
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "closeondone":
case "closeOnDone": return boolean.class;
case "delay": return long.class;
case "encoding": return java.lang.String.class;
case "exceptionhandler":
case "exceptionHandler": return org.apache.camel.spi.ExceptionHandler.class;
case "exchangepattern":
case "exchangePattern": return org.apache.camel.ExchangePattern.class;
case "filename":
case "fileName": return java.lang.String.class;
case "filewatcher":
case "fileWatcher": return boolean.class;
case "grouplines":
case "groupLines": return int.class;
case "groupstrategy":
case "groupStrategy": return org.apache.camel.component.stream.GroupStrategy.class;
case "httpheaders":
case "httpHeaders": return java.lang.String.class;
case "httpurl":
case "httpUrl": return java.lang.String.class;
case "initialpromptdelay":
case "initialPromptDelay": return long.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "promptdelay":
case "promptDelay": return long.class;
case "promptmessage":
case "promptMessage": return java.lang.String.class;
case "readline":
case "readLine": return boolean.class;
case "readtimeout":
case "readTimeout": return int.class;
case "retry": return boolean.class;
case "scanstream":
case "scanStream": return boolean.class;
case "scanstreamdelay":
case "scanStreamDelay": return long.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
StreamEndpoint target = (StreamEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "appendnewline":
case "appendNewLine": return target.isAppendNewLine();
case "autoclosecount":
case "autoCloseCount": return target.getAutoCloseCount();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "closeondone":
case "closeOnDone": return target.isCloseOnDone();
case "delay": return target.getDelay();
case "encoding": return target.getEncoding();
case "exceptionhandler":
case "exceptionHandler": return target.getExceptionHandler();
case "exchangepattern":
case "exchangePattern": return target.getExchangePattern();
case "filename":
case "fileName": return target.getFileName();
case "filewatcher":
case "fileWatcher": return target.isFileWatcher();
case "grouplines":
case "groupLines": return target.getGroupLines();
case "groupstrategy":
case "groupStrategy": return target.getGroupStrategy();
case "httpheaders":
case "httpHeaders": return target.getHttpHeaders();
case "httpurl":
case "httpUrl": return target.getHttpUrl();
case "initialpromptdelay":
case "initialPromptDelay": return target.getInitialPromptDelay();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "promptdelay":
case "promptDelay": return target.getPromptDelay();
case "promptmessage":
case "promptMessage": return target.getPromptMessage();
case "readline":
case "readLine": return target.isReadLine();
case "readtimeout":
case "readTimeout": return target.getReadTimeout();
case "retry": return target.isRetry();
case "scanstream":
case "scanStream": return target.isScanStream();
case "scanstreamdelay":
case "scanStreamDelay": return target.getScanStreamDelay();
default: return null;
}
}
}
| StreamEndpointConfigurer |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/CamelContextHelper.java | {
"start": 21414,
"end": 29761
} | class ____ convert the value to
* @param text the text
* @return the boolean vale, or <tt>null</tt> if the text was <tt>null</tt>
* @throws IllegalArgumentException is thrown if illegal argument or type conversion not possible
*/
public static <T> T parse(CamelContext camelContext, Class<T> clazz, String text) {
// ensure we support property placeholders
String s = camelContext.resolvePropertyPlaceholders(text);
if (s != null) {
try {
return camelContext.getTypeConverter().mandatoryConvertTo(clazz, s);
} catch (Exception e) {
if (s.equals(text)) {
throw new IllegalArgumentException("Error parsing [" + s + "] as a " + clazz.getName() + ".", e);
} else {
throw new IllegalArgumentException(
"Error parsing [" + s + "] from property " + text + " as a " + clazz.getName() + ".", e);
}
}
}
return null;
}
/**
* Gets the route startup order for the given route id
*
* @param camelContext the camel context
* @param routeId the id of the route
* @return the startup order, or <tt>0</tt> if not possible to determine
*/
public static int getRouteStartupOrder(CamelContext camelContext, String routeId) {
for (RouteStartupOrder order : camelContext.getCamelContextExtension().getRouteStartupOrder()) {
if (order.getRoute().getId().equals(routeId)) {
return order.getStartupOrder();
}
}
return 0;
}
/**
* A helper method to access a camel context properties with a prefix
*
* @param prefix the prefix
* @param camelContext the camel context
* @return the properties which holds the camel context properties with the prefix, and the key omit
* the prefix part
*/
public static Properties getCamelPropertiesWithPrefix(String prefix, CamelContext camelContext) {
Properties answer = new Properties();
Map<String, String> camelProperties = camelContext.getGlobalOptions();
if (camelProperties != null) {
for (Map.Entry<String, String> entry : camelProperties.entrySet()) {
String key = entry.getKey();
if (key != null && key.startsWith(prefix)) {
answer.put(key.substring(prefix.length()), entry.getValue());
}
}
}
return answer;
}
/**
* Gets the route id the given node belongs to.
*
* @param node the node
* @return the route id, or <tt>null</tt> if not possible to find
*/
public static String getRouteId(NamedNode node) {
NamedNode parent = node;
while (parent != null && parent.getParent() != null) {
parent = parent.getParent();
}
return parent != null ? parent.getId() : null;
}
/**
* Gets the route the given node belongs to.
*
* @param node the node
* @return the route, or <tt>null</tt> if not possible to find
*/
public static NamedRoute getRoute(NamedNode node) {
NamedNode parent = node;
while (parent != null && parent.getParent() != null) {
parent = parent.getParent();
}
if (parent instanceof NamedRoute namedRoute) {
return namedRoute;
}
return null;
}
/**
* Gets the {@link RestConfiguration} from the {@link CamelContext} and check if the component which consumes the
* configuration is compatible with the one for which the rest configuration is set-up.
*
* @param camelContext the camel context
* @param component the component that will consume the {@link RestConfiguration}
* @return the {@link RestConfiguration}
* @throws IllegalArgumentException is the component is not compatible with the {@link RestConfiguration} set-up
*/
public static RestConfiguration getRestConfiguration(CamelContext camelContext, String component) {
RestConfiguration configuration = camelContext.getRestConfiguration();
validateRestConfigurationComponent(component, configuration.getComponent());
return configuration;
}
/**
* Gets the {@link RestConfiguration} from the {@link CamelContext} and check if the component which consumes the
* configuration is compatible with the one for which the rest configuration is set-up.
*
* @param camelContext the camel context
* @param component the component that will consume the {@link RestConfiguration}
* @param producerComponent the producer component that will consume the {@link RestConfiguration}
* @return the {@link RestConfiguration}
* @throws IllegalArgumentException is the component is not compatible with the {@link RestConfiguration} set-up
*/
public static RestConfiguration getRestConfiguration(
CamelContext camelContext, String component, String producerComponent) {
RestConfiguration configuration = camelContext.getRestConfiguration();
validateRestConfigurationComponent(component, configuration.getComponent());
validateRestConfigurationComponent(producerComponent, configuration.getProducerComponent());
return configuration;
}
/**
* Gets the components from the given {@code CamelContext} that match with the given predicate.
*
* @param camelContext the camel context
* @param predicate the predicate to evaluate to know whether a given component should be returned or not.
* @return the existing components that match the predicate.
*/
public static List<Component> getComponents(CamelContext camelContext, Predicate<Component> predicate) {
return camelContext.getComponentNames().stream()
.map(camelContext::getComponent)
.filter(predicate)
.toList();
}
/**
* Gets the endpoints from the given {@code CamelContext} that match with the given predicate
*
* @param camelContext the camel context
* @param predicate the predicate to evaluate to know whether a given endpoint should be returned or not.
* @return the existing endpoints that match the predicate.
*/
public static List<Endpoint> getEndpoints(CamelContext camelContext, Predicate<Endpoint> predicate) {
return camelContext.getEndpoints().stream()
.filter(predicate)
.toList();
}
private static void validateRestConfigurationComponent(String component, String configurationComponent) {
if (ObjectHelper.isEmpty(component) || ObjectHelper.isEmpty(configurationComponent)) {
return;
}
if (!Objects.equals(component, configurationComponent)) {
throw new IllegalArgumentException(
"No RestConfiguration for component: " + component + " found, RestConfiguration targets: "
+ configurationComponent);
}
}
/**
* Gets the uptime in a human-readable format
*
* @return the uptime in days/hours/minutes
*/
public static String getUptime(CamelContext context) {
long delta = context.getUptime().toMillis();
if (delta == 0) {
return "0ms";
}
return TimeUtils.printDuration(delta);
}
/**
* Gets the uptime in milliseconds
*
* @return the uptime in milliseconds
*/
public static long getUptimeMillis(CamelContext context) {
return context.getUptime().toMillis();
}
/**
* Gets the date and time Camel was started up.
*/
public static Date getStartDate(CamelContext context) {
EventClock<ContextEvents> contextClock = context.getClock();
final Clock clock = contextClock.get(ContextEvents.START);
if (clock == null) {
return null;
}
return clock.asDate();
}
}
| to |
java | spring-projects__spring-security | messaging/src/test/java/org/springframework/security/messaging/web/csrf/XorCsrfChannelInterceptorTests.java | {
"start": 1573,
"end": 8321
} | class ____ {
private static final String XOR_CSRF_TOKEN_VALUE = "wpe7zB62-NCpcA==";
private static final String INVALID_XOR_CSRF_TOKEN_VALUE = "KneoaygbRZtfHQ==";
private CsrfToken token;
private SimpMessageHeaderAccessor messageHeaders;
private MessageChannel channel;
private XorCsrfChannelInterceptor interceptor;
@BeforeEach
public void setup() {
this.token = new DefaultCsrfToken("header", "param", "token");
this.messageHeaders = SimpMessageHeaderAccessor.create(SimpMessageType.CONNECT);
this.messageHeaders.setSessionAttributes(new HashMap<>());
this.channel = mock(MessageChannel.class);
this.interceptor = new XorCsrfChannelInterceptor();
}
@Test
public void preSendWhenConnectWithValidTokenThenSuccess() {
this.messageHeaders.setNativeHeader(this.token.getHeaderName(), XOR_CSRF_TOKEN_VALUE);
this.messageHeaders.getSessionAttributes().put(CsrfToken.class.getName(), this.token);
this.interceptor.preSend(message(), this.channel);
}
@Test
public void preSendWhenConnectWithInvalidTokenThenThrowsInvalidCsrfTokenException() {
this.messageHeaders.setNativeHeader(this.token.getHeaderName(), INVALID_XOR_CSRF_TOKEN_VALUE);
this.messageHeaders.getSessionAttributes().put(CsrfToken.class.getName(), this.token);
// @formatter:off
assertThatExceptionOfType(InvalidCsrfTokenException.class)
.isThrownBy(() -> this.interceptor.preSend(message(), mock(MessageChannel.class)));
// @formatter:on
}
@Test
public void preSendWhenConnectWithNoTokenThenThrowsInvalidCsrfTokenException() {
this.messageHeaders.getSessionAttributes().put(CsrfToken.class.getName(), this.token);
// @formatter:off
assertThatExceptionOfType(InvalidCsrfTokenException.class)
.isThrownBy(() -> this.interceptor.preSend(message(), mock(MessageChannel.class)));
// @formatter:on
}
@Test
public void preSendWhenConnectWithMissingTokenThenThrowsMissingCsrfTokenException() {
// @formatter:off
assertThatExceptionOfType(MissingCsrfTokenException.class)
.isThrownBy(() -> this.interceptor.preSend(message(), mock(MessageChannel.class)));
// @formatter:on
}
@Test
public void preSendWhenConnectWithNullSessionAttributesThenThrowsMissingCsrfTokenException() {
this.messageHeaders.setSessionAttributes(null);
// @formatter:off
assertThatExceptionOfType(MissingCsrfTokenException.class)
.isThrownBy(() -> this.interceptor.preSend(message(), mock(MessageChannel.class)));
// @formatter:on
}
@Test
public void preSendWhenAckThenIgnores() {
this.messageHeaders = SimpMessageHeaderAccessor.create(SimpMessageType.CONNECT_ACK);
this.interceptor.preSend(message(), this.channel);
}
@Test
public void preSendWhenDisconnectThenIgnores() {
this.messageHeaders = SimpMessageHeaderAccessor.create(SimpMessageType.DISCONNECT);
this.interceptor.preSend(message(), this.channel);
}
@Test
public void preSendWhenHeartbeatThenIgnores() {
this.messageHeaders = SimpMessageHeaderAccessor.create(SimpMessageType.HEARTBEAT);
this.interceptor.preSend(message(), this.channel);
}
@Test
public void preSendWhenMessageThenIgnores() {
this.messageHeaders = SimpMessageHeaderAccessor.create(SimpMessageType.MESSAGE);
this.interceptor.preSend(message(), this.channel);
}
@Test
public void preSendWhenOtherThenIgnores() {
this.messageHeaders = SimpMessageHeaderAccessor.create(SimpMessageType.OTHER);
this.interceptor.preSend(message(), this.channel);
}
@Test
public void preSendWhenUnsubscribeThenIgnores() {
this.messageHeaders = SimpMessageHeaderAccessor.create(SimpMessageType.UNSUBSCRIBE);
this.interceptor.preSend(message(), this.channel);
}
// gh-13310, gh-15184
@Test
public void preSendWhenCsrfBytesIsShorterThanRandomBytesThenThrowsInvalidCsrfTokenException() {
/*
* Token format: 3 random pad bytes + 2 padded bytes.
*/
byte[] actualBytes = { 1, 1, 1, 96, 99 };
String actualToken = Base64.getEncoder().encodeToString(actualBytes);
this.messageHeaders.setNativeHeader(this.token.getHeaderName(), actualToken);
this.messageHeaders.getSessionAttributes().put(CsrfToken.class.getName(), this.token);
// @formatter:off
assertThatExceptionOfType(InvalidCsrfTokenException.class)
.isThrownBy(() -> this.interceptor.preSend(message(), mock(MessageChannel.class)));
// @formatter:on
}
// gh-13310, gh-15184
@Test
public void preSendWhenCsrfBytesIsLongerThanRandomBytesThenThrowsInvalidCsrfTokenException() {
/*
* Token format: 3 random pad bytes + 4 padded bytes.
*/
byte[] actualBytes = { 1, 1, 1, 96, 99, 98, 97 };
String actualToken = Base64.getEncoder().encodeToString(actualBytes);
this.messageHeaders.setNativeHeader(this.token.getHeaderName(), actualToken);
this.messageHeaders.getSessionAttributes().put(CsrfToken.class.getName(), this.token);
// @formatter:off
assertThatExceptionOfType(InvalidCsrfTokenException.class)
.isThrownBy(() -> this.interceptor.preSend(message(), mock(MessageChannel.class)));
// @formatter:on
}
// gh-13310, gh-15184
@Test
public void preSendWhenTokenBytesIsShorterThanActualBytesThenThrowsInvalidCsrfTokenException() {
this.messageHeaders.setNativeHeader(this.token.getHeaderName(), XOR_CSRF_TOKEN_VALUE);
CsrfToken csrfToken = new DefaultCsrfToken("header", "param", "a");
this.messageHeaders.getSessionAttributes().put(CsrfToken.class.getName(), csrfToken);
// @formatter:off
assertThatExceptionOfType(InvalidCsrfTokenException.class)
.isThrownBy(() -> this.interceptor.preSend(message(), mock(MessageChannel.class)));
// @formatter:on
}
// gh-13310, gh-15184
@Test
public void preSendWhenTokenBytesIsLongerThanActualBytesThenThrowsInvalidCsrfTokenException() {
this.messageHeaders.setNativeHeader(this.token.getHeaderName(), XOR_CSRF_TOKEN_VALUE);
CsrfToken csrfToken = new DefaultCsrfToken("header", "param", "abcde");
this.messageHeaders.getSessionAttributes().put(CsrfToken.class.getName(), csrfToken);
// @formatter:off
assertThatExceptionOfType(InvalidCsrfTokenException.class)
.isThrownBy(() -> this.interceptor.preSend(message(), mock(MessageChannel.class)));
// @formatter:on
}
// gh-13310, gh-15184
@Test
public void preSendWhenActualBytesIsEmptyThenThrowsInvalidCsrfTokenException() {
this.messageHeaders.setNativeHeader(this.token.getHeaderName(), "");
this.messageHeaders.getSessionAttributes().put(CsrfToken.class.getName(), this.token);
// @formatter:off
assertThatExceptionOfType(InvalidCsrfTokenException.class)
.isThrownBy(() -> this.interceptor.preSend(message(), mock(MessageChannel.class)));
// @formatter:on
}
private Message<String> message() {
return MessageBuilder.withPayload("message").copyHeaders(this.messageHeaders.toMap()).build();
}
}
| XorCsrfChannelInterceptorTests |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/core/BeanPropertyRowMapper.java | {
"start": 1964,
"end": 2198
} | class ____ {@code static} nested class, and it must have a default or
* no-arg constructor.
*
* <p>Column values are mapped based on matching the column name (as obtained from
* result set meta-data) to public setters in the target | or |
java | quarkusio__quarkus | extensions/smallrye-openapi/deployment/src/test/java/io/quarkus/smallrye/openapi/test/vertx/OpenApiDefaultPathTestCase.java | {
"start": 256,
"end": 1584
} | class ____ {
private static final String OPEN_API_PATH = "/q/openapi";
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(OpenApiRoute.class));
@Test
public void testOpenApiPathAccessResource() {
RestAssured.given().header("Accept", "application/yaml")
.when().get(OPEN_API_PATH)
.then().header("Content-Type", "application/yaml;charset=UTF-8");
RestAssured.given().queryParam("format", "YAML")
.when().get(OPEN_API_PATH)
.then().header("Content-Type", "application/yaml;charset=UTF-8");
RestAssured.given().header("Accept", "application/json")
.when().get(OPEN_API_PATH)
.then().header("Content-Type", "application/json;charset=UTF-8");
RestAssured.given().queryParam("format", "JSON")
.when().get(OPEN_API_PATH)
.then()
.header("Content-Type", "application/json;charset=UTF-8")
.body("openapi", Matchers.startsWith("3.1"))
.body("info.title", Matchers.equalTo("quarkus-smallrye-openapi-deployment API"))
.body("paths", Matchers.hasKey("/resource"));
}
}
| OpenApiDefaultPathTestCase |
java | google__guice | extensions/assistedinject/test/com/google/inject/assistedinject/FactoryProvider2Test.java | {
"start": 38225,
"end": 38342
} | interface ____ {
public Insurance<Mustang> create(Mustang car, double premium);
}
public | MustangInsuranceFactory |
java | apache__kafka | server-common/src/test/java/org/apache/kafka/server/util/InterBrokerSendThreadTest.java | {
"start": 13804,
"end": 14107
} | class ____<T extends AbstractRequest>
extends AbstractRequest.Builder<T> {
private StubRequestBuilder() {
super(ApiKeys.END_TXN);
}
@Override
public T build(short version) {
return null;
}
}
private static | StubRequestBuilder |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/script/ScriptTermStats.java | {
"start": 1148,
"end": 8055
} | class ____ {
private final IntSupplier docIdSupplier;
private final Term[] terms;
private final IndexSearcher searcher;
private final LeafReaderContext leafReaderContext;
private final StatsSummary statsSummary = new StatsSummary();
private final Supplier<TermStates[]> termContextsSupplier;
private final Supplier<PostingsEnum[]> postingsSupplier;
private final Supplier<StatsSummary> docFreqSupplier;
private final Supplier<StatsSummary> totalTermFreqSupplier;
public ScriptTermStats(IndexSearcher searcher, LeafReaderContext leafReaderContext, IntSupplier docIdSupplier, Set<Term> terms) {
this.searcher = searcher;
this.leafReaderContext = leafReaderContext;
this.docIdSupplier = docIdSupplier;
this.terms = terms.toArray(new Term[0]);
this.termContextsSupplier = CachedSupplier.wrap(this::loadTermContexts);
this.postingsSupplier = CachedSupplier.wrap(this::loadPostings);
this.docFreqSupplier = CachedSupplier.wrap(this::loadDocFreq);
this.totalTermFreqSupplier = CachedSupplier.wrap(this::loadTotalTermFreq);
}
/**
* Number of unique terms in the query.
*
* @return the number of unique terms
*/
public int uniqueTermsCount() {
return terms.length;
}
/**
* Number of terms that are matched im the query.
*
* @return the number of matched terms
*/
public int matchedTermsCount() {
final int docId = docIdSupplier.getAsInt();
int matchedTerms = 0;
advancePostings(docId);
for (PostingsEnum postingsEnum : postingsSupplier.get()) {
if (postingsEnum != null && postingsEnum.docID() == docId) {
matchedTerms++;
}
}
return matchedTerms;
}
/**
* Collect docFreq (number of documents a term occurs in) for the terms of the query and returns statistics for them.
*
* @return statistics on docFreq for the terms of the query.
*/
public StatsSummary docFreq() {
return docFreqSupplier.get();
}
private StatsSummary loadDocFreq() {
StatsSummary docFreqStats = new StatsSummary();
TermStates[] termContexts = termContextsSupplier.get();
try {
for (int i = 0; i < termContexts.length; i++) {
if (searcher instanceof ContextIndexSearcher contextIndexSearcher) {
docFreqStats.accept(contextIndexSearcher.docFreq(terms[i], termContexts[i].docFreq()));
} else {
docFreqStats.accept(termContexts[i].docFreq());
}
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
return docFreqStats;
}
/**
* Collect totalTermFreq (total number of occurrence of a term in the index) for the terms of the query and returns statistics for them.
*
* @return statistics on totalTermFreq for the terms of the query.
*/
public StatsSummary totalTermFreq() {
return this.totalTermFreqSupplier.get();
}
private StatsSummary loadTotalTermFreq() {
StatsSummary totalTermFreqStats = new StatsSummary();
TermStates[] termContexts = termContextsSupplier.get();
try {
for (int i = 0; i < termContexts.length; i++) {
if (searcher instanceof ContextIndexSearcher contextIndexSearcher) {
totalTermFreqStats.accept(contextIndexSearcher.totalTermFreq(terms[i], termContexts[i].totalTermFreq()));
} else {
totalTermFreqStats.accept(termContexts[i].totalTermFreq());
}
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
return totalTermFreqStats;
}
/**
* Collect totalFreq (number of occurrence of a term in the current doc for the terms of the query and returns statistics for them.
*
* @return statistics on totalTermFreq for the terms of the query in the current dac
*/
public StatsSummary termFreq() {
statsSummary.reset();
final int docId = docIdSupplier.getAsInt();
try {
advancePostings(docId);
for (PostingsEnum postingsEnum : postingsSupplier.get()) {
if (postingsEnum == null || postingsEnum.docID() != docId) {
statsSummary.accept(0);
} else {
statsSummary.accept(postingsEnum.freq());
}
}
return statsSummary;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
/**
* Collect termPositions (positions of a term in the current document) for the terms of the query and returns statistics for them.
*
* @return statistics on termPositions for the terms of the query in the current dac
*/
public StatsSummary termPositions() {
statsSummary.reset();
int docId = docIdSupplier.getAsInt();
try {
advancePostings(docId);
for (PostingsEnum postingsEnum : postingsSupplier.get()) {
if (postingsEnum == null || postingsEnum.docID() != docId) {
continue;
}
for (int i = 0; i < postingsEnum.freq(); i++) {
statsSummary.accept(postingsEnum.nextPosition() + 1);
}
}
return statsSummary;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private TermStates[] loadTermContexts() {
try {
TermStates[] termContexts = new TermStates[terms.length];
for (int i = 0; i < terms.length; i++) {
termContexts[i] = TermStates.build(searcher, terms[i], true);
}
return termContexts;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private PostingsEnum[] loadPostings() {
try {
PostingsEnum[] postings = new PostingsEnum[terms.length];
for (int i = 0; i < terms.length; i++) {
postings[i] = leafReaderContext.reader().postings(terms[i], PostingsEnum.POSITIONS);
}
return postings;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private void advancePostings(int targetDocId) {
try {
for (PostingsEnum posting : postingsSupplier.get()) {
if (posting != null && posting.docID() < targetDocId && posting.docID() != DocIdSetIterator.NO_MORE_DOCS) {
posting.advance(targetDocId);
}
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
| ScriptTermStats |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/filter/NoMarkerFilter.java | {
"start": 1611,
"end": 6314
} | class ____ extends AbstractFilter {
private NoMarkerFilter(final Result onMatch, final Result onMismatch) {
super(onMatch, onMismatch);
}
@Override
public Result filter(
final Logger logger, final Level level, final Marker marker, final String msg, final Object... params) {
return filter(marker);
}
@Override
public Result filter(
final Logger logger, final Level level, final Marker marker, final Object msg, final Throwable t) {
return filter(marker);
}
@Override
public Result filter(
final Logger logger, final Level level, final Marker marker, final Message msg, final Throwable t) {
return filter(marker);
}
@Override
public Result filter(final LogEvent event) {
return filter(event.getMarker());
}
@Override
public Result filter(
final Logger logger, final Level level, final Marker marker, final String msg, final Object p0) {
return filter(marker);
}
@Override
public Result filter(
final Logger logger,
final Level level,
final Marker marker,
final String msg,
final Object p0,
final Object p1) {
return filter(marker);
}
@Override
public Result filter(
final Logger logger,
final Level level,
final Marker marker,
final String msg,
final Object p0,
final Object p1,
final Object p2) {
return filter(marker);
}
@Override
public Result filter(
final Logger logger,
final Level level,
final Marker marker,
final String msg,
final Object p0,
final Object p1,
final Object p2,
final Object p3) {
return filter(marker);
}
@Override
public Result filter(
final Logger logger,
final Level level,
final Marker marker,
final String msg,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4) {
return filter(marker);
}
@Override
public Result filter(
final Logger logger,
final Level level,
final Marker marker,
final String msg,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5) {
return filter(marker);
}
@Override
public Result filter(
final Logger logger,
final Level level,
final Marker marker,
final String msg,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5,
final Object p6) {
return filter(marker);
}
@Override
public Result filter(
final Logger logger,
final Level level,
final Marker marker,
final String msg,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5,
final Object p6,
final Object p7) {
return filter(marker);
}
@Override
public Result filter(
final Logger logger,
final Level level,
final Marker marker,
final String msg,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5,
final Object p6,
final Object p7,
final Object p8) {
return filter(marker);
}
@Override
public Result filter(
final Logger logger,
final Level level,
final Marker marker,
final String msg,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5,
final Object p6,
final Object p7,
final Object p8,
final Object p9) {
return filter(marker);
}
private Result filter(final Marker marker) {
return null == marker ? onMatch : onMismatch;
}
@PluginBuilderFactory
public static Builder newBuilder() {
return new Builder();
}
public static | NoMarkerFilter |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/struct/TestPOJOAsArray.java | {
"start": 2139,
"end": 2476
} | class ____ extends JacksonAnnotationIntrospector
{
private static final long serialVersionUID = 1L;
@Override
public JsonFormat.Value findFormat(MapperConfig<?> config, Annotated a) {
return new JsonFormat.Value().withShape(JsonFormat.Shape.ARRAY);
}
}
static | ForceArraysIntrospector |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/metamodel/model/domain/internal/PrimitiveBasicTypeImpl.java | {
"start": 262,
"end": 649
} | class ____<J> extends BasicTypeImpl<J> {
private final Class<J> primitiveClass;
public PrimitiveBasicTypeImpl(JavaType<J> javaType, JdbcType jdbcType, Class<J> primitiveClass) {
super( javaType, jdbcType );
assert primitiveClass.isPrimitive();
this.primitiveClass = primitiveClass;
}
@Override
public Class<J> getJavaType() {
return primitiveClass;
}
}
| PrimitiveBasicTypeImpl |
java | apache__camel | components/camel-xslt/src/main/java/org/apache/camel/component/xslt/DefaultTransformErrorHandler.java | {
"start": 1546,
"end": 2864
} | class ____ implements ErrorHandler, ErrorListener {
private final Exchange exchange;
public DefaultTransformErrorHandler(Exchange exchange) {
this.exchange = exchange;
}
@Override
public void error(SAXParseException exception) throws SAXException {
exchange.setProperty(Exchange.XSLT_ERROR, exception);
throw exception;
}
@Override
public void fatalError(SAXParseException exception) throws SAXException {
exchange.setProperty(Exchange.XSLT_FATAL_ERROR, exception);
throw exception;
}
@Override
public void warning(SAXParseException exception) throws SAXException {
exchange.setProperty(Exchange.XSLT_WARNING, exception);
}
@Override
public void error(TransformerException exception) throws TransformerException {
exchange.setProperty(Exchange.XSLT_ERROR, exception);
throw exception;
}
@Override
public void fatalError(TransformerException exception) throws TransformerException {
exchange.setProperty(Exchange.XSLT_FATAL_ERROR, exception);
throw exception;
}
@Override
public void warning(TransformerException exception) throws TransformerException {
exchange.setProperty(Exchange.XSLT_WARNING, exception);
}
}
| DefaultTransformErrorHandler |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesContainers.java | {
"start": 3716,
"end": 8051
} | class ____ extends AbstractBinder {
@Override
protected void configure() {
try {
userName = UserGroupInformation.getCurrentUser().getShortUserName();
} catch (IOException ioe) {
throw new RuntimeException("Unable to get current user name "
+ ioe.getMessage(), ioe);
}
Configuration conf = new Configuration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class,
ResourceScheduler.class);
conf.set(YarnConfiguration.YARN_ADMIN_ACL, "admin");
rm = new MockRM(conf);
bind(rm).to(ResourceManager.class).named("rm");
bind(conf).to(Configuration.class).named("conf");
HttpServletRequest request = mock(HttpServletRequest.class);
Principal principal = () -> userName;
when(request.getUserPrincipal()).thenReturn(principal);
HttpServletResponse response = mock(HttpServletResponse.class);
bind(request).to(HttpServletRequest.class);
bind(response).to(HttpServletResponse.class);
}
}
@BeforeEach
@Override
public void setUp() throws Exception {
super.setUp();
}
public TestRMWebServicesContainers() {
}
@Test
public void testSignalContainer() throws Exception {
rm.start();
MockNM nm = rm.registerNode("127.0.0.1:1234", 2048);
RMApp app = MockRMAppSubmitter.submit(rm,
MockRMAppSubmissionData.Builder.createWithMemory(1024, rm).build());
nm.nodeHeartbeat(true);
MockRM
.waitForState(app.getCurrentAppAttempt(), RMAppAttemptState.ALLOCATED);
rm.sendAMLaunched(app.getCurrentAppAttempt().getAppAttemptId());
WebTarget r = target();
// test error command
Response response =
r.path("ws").path("v1").path("cluster").path("containers").path(
app.getCurrentAppAttempt().getMasterContainer().getId().toString())
.path("signal")
.path("not-exist-signal")
.queryParam("user.name", userName)
.request(MediaType.APPLICATION_JSON).post(null, Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
assertEquals(Response.Status.BAD_REQUEST.getStatusCode(), response.getStatus());
assertTrue(response.readEntity(String.class).contains("Invalid command: NOT-EXIST-SIGNAL"));
// test error containerId
response =
r.path("ws").path("v1").path("cluster").path("containers").path("XXX")
.path("signal")
.path(SignalContainerCommand.OUTPUT_THREAD_DUMP.name())
.queryParam("user.name", userName)
.request()
.accept(MediaType.APPLICATION_JSON)
.post(null, Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
assertEquals(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), response.getStatus());
assertTrue(
response.readEntity(String.class).contains("Invalid ContainerId"));
// test correct signal by owner
response =
r.path("ws").path("v1").path("cluster").path("containers").path(
app.getCurrentAppAttempt().getMasterContainer().getId().toString())
.path("signal")
.path(SignalContainerCommand.OUTPUT_THREAD_DUMP.name())
.queryParam("user.name", userName)
.request()
.accept(MediaType.APPLICATION_JSON)
.post(null, Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
assertEquals(Response.Status.OK.getStatusCode(), response.getStatus());
// test correct signal by admin
response =
r.path("ws").path("v1").path("cluster").path("containers").path(
app.getCurrentAppAttempt().getMasterContainer().getId().toString())
.path("signal")
.path(SignalContainerCommand.OUTPUT_THREAD_DUMP.name())
.queryParam("user.name", "admin")
.request(MediaType.APPLICATION_JSON).post(null, Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
assertEquals(Response.Status.OK.getStatusCode(), response.getStatus());
rm.stop();
}
}
| JerseyBinder |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/tests/StreamsSmokeTest.java | {
"start": 1288,
"end": 4535
} | class ____ {
/**
* args ::= kafka propFileName command disableAutoTerminate
* command := "run" | "process"
*
* @param args
*/
public static void main(final String[] args) throws IOException {
if (args.length < 2) {
System.err.println("StreamsSmokeTest are expecting two parameters: propFile, command; but only see " + args.length + " parameter");
Exit.exit(1);
}
final String propFileName = args[0];
final String command = args[1];
final boolean disableAutoTerminate = args.length > 2;
final Properties streamsProperties = Utils.loadProps(propFileName);
final String kafka = streamsProperties.getProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG);
final String processingGuarantee = streamsProperties.getProperty(StreamsConfig.PROCESSING_GUARANTEE_CONFIG);
if (kafka == null) {
System.err.println("No bootstrap kafka servers specified in " + StreamsConfig.BOOTSTRAP_SERVERS_CONFIG);
Exit.exit(1);
}
if ("process".equals(command)) {
if (!StreamsConfig.AT_LEAST_ONCE.equals(processingGuarantee) &&
!StreamsConfig.EXACTLY_ONCE_V2.equals(processingGuarantee)) {
System.err.println("processingGuarantee must be either " +
StreamsConfig.AT_LEAST_ONCE + ", " +
StreamsConfig.EXACTLY_ONCE_V2);
Exit.exit(1);
}
}
System.out.println("StreamsTest instance started (StreamsSmokeTest)");
System.out.println("command=" + command);
System.out.println("props=" + streamsProperties);
System.out.println("disableAutoTerminate=" + disableAutoTerminate);
switch (command) {
case "run":
// this starts the driver (data generation and result verification)
final int numKeys = 20;
final int maxRecordsPerKey = 1000;
if (disableAutoTerminate) {
generatePerpetually(kafka, numKeys, maxRecordsPerKey);
} else {
// slow down data production so that system tests have time to
// do their bounces, etc.
final Map<String, Set<Integer>> allData =
generate(kafka, numKeys, maxRecordsPerKey, Duration.ofSeconds(90));
SmokeTestDriver.verify(
kafka,
allData,
maxRecordsPerKey,
StreamsConfig.EXACTLY_ONCE_V2.equals(processingGuarantee)
);
}
break;
case "process":
// this starts the stream processing app
new SmokeTestClient(UUID.randomUUID().toString()).start(streamsProperties);
break;
case "close-deadlock-test":
final ShutdownDeadlockTest test = new ShutdownDeadlockTest(kafka);
test.start();
break;
default:
System.out.println("unknown command: " + command);
}
}
}
| StreamsSmokeTest |
java | quarkusio__quarkus | extensions/hibernate-validator/deployment/src/test/java/io/quarkus/hibernate/validator/test/devmode/DevModeConstraintValidationTest.java | {
"start": 1352,
"end": 4393
} | class ____ message"));
}
@Test
public void testPropertyConstraintHotReplacement() {
RestAssured.given()
.header("Content-Type", "application/json")
.when()
.body("{}")
.post("/test/validate")
.then()
.body(containsString("ok"));
TEST.modifySourceFile("TestBean.java", s -> s.replace("// <placeholder2>",
"@jakarta.validation.constraints.NotNull(message=\"My property message\")"));
RestAssured.given()
.header("Content-Type", "application/json")
.when()
.body("{}")
.post("/test/validate")
.then()
.body(containsString("My property message"));
}
@Test
public void testMethodConstraintHotReplacement() {
RestAssured.given()
.when()
.get("/test/mymessage")
.then()
.body(containsString("mymessage"));
TEST.modifySourceFile("DependentTestBean.java", s -> s.replace("/* <placeholder> */",
"@jakarta.validation.constraints.Size(max=1, message=\"My method message\")"));
RestAssured.given()
.header("Content-Type", "application/json")
.when()
.get("/test/mymessage")
.then()
.body(containsString("My method message"));
}
@Test
public void testNewBeanHotReplacement() {
RestAssured.given()
.header("Content-Type", "application/json")
.when()
.body("{}")
.post("/test/validate")
.then()
.body(containsString("ok"));
TEST.addSourceFile(NewTestBean.class);
TEST.modifySourceFile("DevModeTestResource.java", s -> s.replace("@Valid TestBean",
"@Valid NewTestBean"));
RestAssured.given()
.header("Content-Type", "application/json")
.when()
.body("{}")
.post("/test/validate")
.then()
.body(containsString("My new bean message"));
}
@Test
public void testNewConstraintHotReplacement() {
RestAssured.given()
.header("Content-Type", "application/json")
.when()
.body("{}")
.post("/test/validate")
.then()
.body(containsString("ok"));
TEST.addSourceFile(NewConstraint.class);
TEST.addSourceFile(NewValidator.class);
TEST.modifySourceFile("TestBean.java", s -> s.replace("// <placeholder2>",
"@NewConstraint"));
RestAssured.given()
.header("Content-Type", "application/json")
.when()
.body("{}")
.post("/test/validate")
.then()
.body(containsString("My new constraint message"));
}
}
| constraint |
java | apache__camel | components/camel-sql/src/test/java/org/apache/camel/component/sql/SqlProducerInMultiExpressionTest.java | {
"start": 898,
"end": 1509
} | class ____ extends SqlProducerInMultiTest {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// required for the sql component
getContext().getComponent("sql", SqlComponent.class).setDataSource(db);
from("direct:query")
.to("sql:classpath:sql/selectProjectsInMultiExpression.sql")
.to("log:query")
.to("mock:query");
}
};
}
}
| SqlProducerInMultiExpressionTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnusedMethodTest.java | {
"start": 15364,
"end": 15629
} | class ____ extends A {}
}
""")
.doTest();
}
@Test
public void effectivelyPrivateMethodMadeVisible_bySubclassImplementingPublicInterface() {
helper
.addSourceLines(
"Test.java",
"""
| B |
java | elastic__elasticsearch | qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartTestOrdering.java | {
"start": 607,
"end": 1001
} | class ____ implements Comparator<TestMethodAndParams> {
@Override
public int compare(TestMethodAndParams o1, TestMethodAndParams o2) {
return Integer.compare(getOrdinal(o1), getOrdinal(o2));
}
private int getOrdinal(TestMethodAndParams t) {
return ((FullClusterRestartUpgradeStatus) t.getInstanceArguments().get(0)).ordinal();
}
}
| FullClusterRestartTestOrdering |
java | google__guava | android/guava/src/com/google/common/base/CharMatcher.java | {
"start": 49509,
"end": 50203
} | class ____ extends CharMatcher {
final CharMatcher first;
final CharMatcher second;
Or(CharMatcher a, CharMatcher b) {
first = checkNotNull(a);
second = checkNotNull(b);
}
@GwtIncompatible // used only from other GwtIncompatible code
@Override
void setBits(BitSet table) {
first.setBits(table);
second.setBits(table);
}
@Override
public boolean matches(char c) {
return first.matches(c) || second.matches(c);
}
@Override
public String toString() {
return first + ".or(" + second + ")";
}
}
// Static factory implementations
/** Implementation of {@link #is(char)}. */
private static final | Or |
java | apache__camel | components/camel-jackson-avro/src/test/java/org/apache/camel/component/jackson/avro/JacksonAvroMarshalUnmarshalPojoTest.java | {
"start": 1395,
"end": 3355
} | class ____ extends CamelTestSupport {
@Test
public void testMarshalUnmarshalPojo() throws Exception {
MockEndpoint mock1 = getMockEndpoint("mock:serialized");
mock1.expectedMessageCount(1);
Pojo pojo = new Pojo("Hello");
template.sendBody("direct:pojo", pojo);
mock1.assertIsSatisfied();
byte[] serialized = mock1.getReceivedExchanges().get(0).getIn().getBody(byte[].class);
assertNotNull(serialized);
assertEquals(6, serialized.length);
MockEndpoint mock2 = getMockEndpoint("mock:pojo");
mock2.expectedMessageCount(1);
mock2.message(0).body().isInstanceOf(Pojo.class);
template.sendBody("direct:serialized", serialized);
mock2.assertIsSatisfied();
Pojo back = mock2.getReceivedExchanges().get(0).getIn().getBody(Pojo.class);
assertEquals(pojo.getText(), back.getText());
}
@Override
protected void bindToRegistry(Registry registry) {
String schemaJson = "{\n"
+ "\"type\": \"record\",\n"
+ "\"name\": \"Pojo\",\n"
+ "\"fields\": [\n"
+ " {\"name\": \"text\", \"type\": \"string\"}\n"
+ "]}";
Schema raw = new Schema.Parser(NameValidator.UTF_VALIDATOR).parse(schemaJson);
AvroSchema schema = new AvroSchema(raw);
SchemaResolver resolver = ex -> schema;
registry.bind("schema-resolver", SchemaResolver.class, resolver);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:serialized").unmarshal().avro(Pojo.class).to("mock:pojo");
from("direct:pojo").marshal().avro().to("mock:serialized");
}
};
}
public static | JacksonAvroMarshalUnmarshalPojoTest |
java | apache__spark | common/kvstore/src/test/java/org/apache/spark/util/kvstore/LevelDBTypeInfoSuite.java | {
"start": 5857,
"end": 5945
} | class ____ {
@KVIndex("id")
public String id;
}
public static | NoNaturalIndex2 |
java | qos-ch__slf4j | slf4j-migrator/src/main/java/org/slf4j/migrator/Constant.java | {
"start": 1239,
"end": 1492
} | class ____ {
public final static int JCL_TO_SLF4J = 0;
public final static int LOG4J_TO_SLF4J = 1;
public final static int JUL_TO_SLF4J = 2;
public final static int NOP_TO_SLF4J = 3;
public final static int NB_FILES_MAX = 1;
} | Constant |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/util/KotlinReflectionUtils.java | {
"start": 1133,
"end": 2758
} | class ____ {
private static final String DEFAULT_IMPLS_CLASS_NAME = "DefaultImpls";
private static final @Nullable Class<? extends Annotation> kotlinMetadata;
private static final @Nullable Class<?> kotlinCoroutineContinuation;
private static final boolean kotlinReflectPresent;
private static final boolean kotlinxCoroutinesPresent;
static {
var metadata = tryToLoadKotlinMetadataClass();
kotlinMetadata = metadata.toOptional().orElse(null);
kotlinCoroutineContinuation = metadata //
.andThen(__ -> tryToLoadClass("kotlin.coroutines.Continuation")) //
.toOptional() //
.orElse(null);
kotlinReflectPresent = metadata.andThen(__ -> tryToLoadClass("kotlin.reflect.jvm.ReflectJvmMapping")) //
.toOptional() //
.isPresent();
kotlinxCoroutinesPresent = metadata.andThen(__ -> tryToLoadClass("kotlinx.coroutines.BuildersKt")) //
.toOptional() //
.isPresent();
}
@SuppressWarnings("unchecked")
private static Try<Class<? extends Annotation>> tryToLoadKotlinMetadataClass() {
return tryToLoadClass("kotlin.Metadata") //
.andThenTry(it -> (Class<? extends Annotation>) it);
}
/**
* @since 6.0
*/
@API(status = INTERNAL, since = "6.0")
public static boolean isKotlinSuspendingFunction(Method method) {
if (!method.isSynthetic() && kotlinCoroutineContinuation != null && isKotlinType(method.getDeclaringClass())) {
int parameterCount = method.getParameterCount();
return parameterCount > 0 //
&& method.getParameterTypes()[parameterCount - 1] == kotlinCoroutineContinuation;
}
return false;
}
/**
* Determines whether the supplied | KotlinReflectionUtils |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/cglib/core/DebuggingClassWriter.java | {
"start": 1073,
"end": 3581
} | class ____ extends ClassVisitor {
public static final String DEBUG_LOCATION_PROPERTY = "cglib.debugLocation";
private static String debugLocation;
private static Constructor traceCtor;
private String className;
private String superName;
static {
debugLocation = System.getProperty(DEBUG_LOCATION_PROPERTY);
if (debugLocation != null) {
System.err.println("CGLIB debugging enabled, writing to '" + debugLocation + "'");
try {
Class clazz = Class.forName("org.springframework.asm.util.TraceClassVisitor");
traceCtor = clazz.getConstructor(new Class[]{ClassVisitor.class, PrintWriter.class});
} catch (Throwable ignore) {
}
}
}
public DebuggingClassWriter(int flags) {
super(Constants.ASM_API, new ClassWriter(flags));
}
@Override
public void visit(int version,
int access,
String name,
String signature,
String superName,
String[] interfaces) {
className = name.replace('/', '.');
this.superName = superName.replace('/', '.');
super.visit(version, access, name, signature, superName, interfaces);
}
public String getClassName() {
return className;
}
public String getSuperName() {
return superName;
}
public byte[] toByteArray() {
byte[] b = ((ClassWriter) DebuggingClassWriter.super.cv).toByteArray();
if (debugLocation != null) {
String dirs = className.replace('.', File.separatorChar);
try {
new File(debugLocation + File.separatorChar + dirs).getParentFile().mkdirs();
File file = new File(new File(debugLocation), dirs + ".class");
OutputStream out = new BufferedOutputStream(new FileOutputStream(file));
try {
out.write(b);
} finally {
out.close();
}
if (traceCtor != null) {
file = new File(new File(debugLocation), dirs + ".asm");
out = new BufferedOutputStream(new FileOutputStream(file));
try {
ClassReader cr = new ClassReader(b);
PrintWriter pw = new PrintWriter(new OutputStreamWriter(out));
ClassVisitor tcv = (ClassVisitor)traceCtor.newInstance(new Object[]{null, pw});
cr.accept(tcv, 0);
pw.flush();
} finally {
out.close();
}
}
} catch (Exception e) {
throw new CodeGenerationException(e);
}
}
return b;
}
}
| DebuggingClassWriter |
java | quarkusio__quarkus | extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmConfigPersistenceUnit.java | {
"start": 25099,
"end": 25331
} | interface ____ {
/**
* The maximum time before an object of the cache is considered expired.
*/
Optional<Duration> maxIdle();
}
@ConfigGroup
| HibernateOrmConfigPersistenceUnitCacheExpiration |
java | quarkusio__quarkus | extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/extensions/NamespaceTemplateExtensionTest.java | {
"start": 3041,
"end": 3098
} | enum ____ {
ONE,
TWO
}
public | MyEnum |
java | elastic__elasticsearch | libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/URLConnectionFileActions.java | {
"start": 818,
"end": 9247
} | class ____ {
private static void withJdkFileConnection(CheckedConsumer<URLConnection, Exception> connectionConsumer) throws Exception {
var conn = EntitledActions.createFileURLConnection();
// Be sure we got the connection implementation we want
assert conn.getClass().getSimpleName().equals("FileURLConnection");
try {
connectionConsumer.accept(conn);
} catch (IOException e) {
// It's OK, it means we passed entitlement checks, and we tried to perform some operation
}
}
private static void withJarConnection(CheckedConsumer<JarURLConnection, Exception> connectionConsumer) throws Exception {
var conn = EntitledActions.createJarURLConnection();
// Be sure we got the connection implementation we want
assert JarURLConnection.class.isAssignableFrom(conn.getClass());
connectionConsumer.accept((JarURLConnection) conn);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunFileURLConnectionConnect() throws Exception {
withJdkFileConnection(URLConnection::connect);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunFileURLConnectionGetHeaderFields() throws Exception {
withJdkFileConnection(URLConnection::getHeaderFields);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunFileURLConnectionGetHeaderFieldWithName() throws Exception {
withJdkFileConnection(urlConnection -> urlConnection.getHeaderField("date"));
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunFileURLConnectionGetHeaderFieldWithIndex() throws Exception {
withJdkFileConnection(urlConnection -> urlConnection.getHeaderField(0));
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunFileURLConnectionGetContentLength() throws Exception {
withJdkFileConnection(URLConnection::getContentLength);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunFileURLConnectionGetContentLengthLong() throws Exception {
withJdkFileConnection(URLConnection::getContentLengthLong);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunFileURLConnectionGetHeaderFieldKey() throws Exception {
withJdkFileConnection(urlConnection -> urlConnection.getHeaderFieldKey(0));
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunFileURLConnectionGetLastModified() throws Exception {
withJdkFileConnection(URLConnection::getLastModified);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunFileURLConnectionGetInputStream() throws Exception {
withJdkFileConnection(URLConnection::getInputStream);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunFileURLConnectionGetContentType() throws Exception {
withJdkFileConnection(URLConnection::getContentType);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunFileURLConnectionGetContentEncoding() throws Exception {
withJdkFileConnection(URLConnection::getContentEncoding);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunFileURLConnectionGetExpiration() throws Exception {
withJdkFileConnection(URLConnection::getExpiration);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunFileURLConnectionGetDate() throws Exception {
withJdkFileConnection(URLConnection::getDate);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunFileURLConnectionGetHeaderFieldInt() throws Exception {
withJdkFileConnection(conn -> conn.getHeaderFieldInt("field", 0));
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunFileURLConnectionGetHeaderFieldLong() throws Exception {
withJdkFileConnection(conn -> conn.getHeaderFieldLong("field", 0));
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunFileURLConnectionGetContent() throws Exception {
withJdkFileConnection(URLConnection::getContent);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunFileURLConnectionGetContentWithClasses() throws Exception {
withJdkFileConnection(conn -> conn.getContent(new Class<?>[] { String.class }));
}
@EntitlementTest(expectedAccess = PLUGINS)
static void netJarURLConnectionGetManifest() throws Exception {
withJarConnection(JarURLConnection::getManifest);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void netJarURLConnectionGetJarEntry() throws Exception {
withJarConnection(JarURLConnection::getJarEntry);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void netJarURLConnectionGetAttributes() throws Exception {
withJarConnection(JarURLConnection::getAttributes);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void netJarURLConnectionGetMainAttributes() throws Exception {
withJarConnection(JarURLConnection::getMainAttributes);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void netJarURLConnectionGetCertificates() throws Exception {
withJarConnection(JarURLConnection::getCertificates);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunJarURLConnectionGetJarFile() throws Exception {
withJarConnection(JarURLConnection::getJarFile);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunJarURLConnectionGetJarEntry() throws Exception {
withJarConnection(JarURLConnection::getJarEntry);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunJarURLConnectionConnect() throws Exception {
withJarConnection(JarURLConnection::connect);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunJarURLConnectionGetInputStream() throws Exception {
withJarConnection(JarURLConnection::getInputStream);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunJarURLConnectionGetContentLength() throws Exception {
withJarConnection(JarURLConnection::getContentLength);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunJarURLConnectionGetContentLengthLong() throws Exception {
withJarConnection(JarURLConnection::getContentLengthLong);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunJarURLConnectionGetContent() throws Exception {
withJarConnection(JarURLConnection::getContent);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunJarURLConnectionGetContentType() throws Exception {
withJarConnection(JarURLConnection::getContentType);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void sunJarURLConnectionGetHeaderFieldWithName() throws Exception {
withJarConnection(conn -> conn.getHeaderField("field"));
}
@EntitlementTest(expectedAccess = PLUGINS)
static void netJarURLConnectionGetContentEncoding() throws Exception {
withJarConnection(URLConnection::getContentEncoding);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void netJarURLConnectionGetExpiration() throws Exception {
withJarConnection(URLConnection::getExpiration);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void netJarURLConnectionGetDate() throws Exception {
withJarConnection(URLConnection::getDate);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void netJarURLConnectionGetLastModified() throws Exception {
withJarConnection(URLConnection::getLastModified);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void netJarURLConnectionGetHeaderFieldInt() throws Exception {
withJarConnection(conn -> conn.getHeaderFieldInt("field", 0));
}
@EntitlementTest(expectedAccess = PLUGINS)
static void netJarURLConnectionGetHeaderFieldLong() throws Exception {
withJarConnection(conn -> conn.getHeaderFieldLong("field", 0));
}
@EntitlementTest(expectedAccess = PLUGINS)
static void netJarURLConnectionGetHeaderFieldDate() throws Exception {
withJarConnection(conn -> conn.getHeaderFieldDate("field", 0));
}
@EntitlementTest(expectedAccess = PLUGINS)
static void netJarURLConnectionGetContent() throws Exception {
withJarConnection(conn -> conn.getContent(new Class<?>[] { String.class }));
}
}
| URLConnectionFileActions |
java | alibaba__nacos | api/src/main/java/com/alibaba/nacos/api/naming/pojo/healthcheck/HealthCheckType.java | {
"start": 1047,
"end": 1447
} | enum ____ {
/**
* TCP type.
*/
TCP(Tcp.class),
/**
* HTTP type.
*/
HTTP(Http.class),
/**
* MySQL type.
*/
MYSQL(Mysql.class),
/**
* No check.
*/
NONE(AbstractHealthChecker.None.class);
private final Class<? extends AbstractHealthChecker> healthCheckerClass;
/**
* In JDK 1.6, the map need full | HealthCheckType |
java | apache__flink | flink-clients/src/test/java/org/apache/flink/client/testjar/ErrorHandlingSubmissionJob.java | {
"start": 1623,
"end": 3074
} | class ____ {
private static final AtomicReference<Exception> SUBMISSION_EXCEPTION = new AtomicReference<>();
public static PackagedProgram createPackagedProgram() throws FlinkException {
try {
return PackagedProgram.newBuilder()
.setUserClassPaths(
Collections.singletonList(
new File(CliFrontendTestUtils.getTestJarPath())
.toURI()
.toURL()))
.setEntryPointClassName(ErrorHandlingSubmissionJob.class.getName())
.build();
} catch (ProgramInvocationException | FileNotFoundException | MalformedURLException e) {
throw new FlinkException("Could not load the provided entrypoint class.", e);
}
}
public static void main(String[] args) throws Exception {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.fromData(Arrays.asList(1, 2, 3))
.map(element -> element + 1)
.sinkTo(new DiscardingSink<>());
try {
env.execute();
} catch (Exception e) {
SUBMISSION_EXCEPTION.set(e);
throw e;
}
}
public static Exception getSubmissionException() {
return SUBMISSION_EXCEPTION.get();
}
}
| ErrorHandlingSubmissionJob |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryTypeArgumentTest.java | {
"start": 2206,
"end": 2636
} | class ____ extends B {
public <T> C() {
// BUG: Diagnostic contains: /*START*/ super()
/*START*/ <String>super();
}
}
}
""")
.doTest();
}
@Test
public void positiveInstantiation() {
compilationHelper
.addSourceLines(
"Test.java",
"""
package foo.bar;
| C |
java | apache__kafka | connect/runtime/src/main/java/org/apache/kafka/connect/util/ConnectUtils.java | {
"start": 1752,
"end": 9624
} | class ____ {
private static final Logger log = LoggerFactory.getLogger(ConnectUtils.class);
public static Long checkAndConvertTimestamp(Long timestamp) {
if (timestamp == null || timestamp >= 0)
return timestamp;
else if (timestamp == RecordBatch.NO_TIMESTAMP)
return null;
else
throw new InvalidRecordException(String.format("Invalid record timestamp %d", timestamp));
}
/**
* Ensure that the {@link Map properties} contain an expected value for the given key, inserting the
* expected value into the properties if necessary.
*
* <p>If there is a pre-existing value for the key in the properties, log a warning to the user
* that this value will be ignored, and the expected value will be used instead.
*
* @param props the configuration properties provided by the user; may not be null
* @param key the name of the property to check on; may not be null
* @param expectedValue the expected value for the property; may not be null
* @param justification the reason the property cannot be overridden.
* Will follow the phrase "The value... for the... property will be ignored as it cannot be overridden ".
* For example, one might supply the message "in connectors with the DLQ feature enabled" for this parameter.
* May be null (in which case, no justification is given to the user in the logged warning message)
* @param caseSensitive whether the value should match case-insensitively
*/
public static void ensureProperty(
Map<String, ? super String> props,
String key,
String expectedValue,
String justification,
boolean caseSensitive
) {
ensurePropertyAndGetWarning(props, key, expectedValue, justification, caseSensitive).ifPresent(log::warn);
}
// Visible for testing
/**
* Ensure that a given key has an expected value in the properties, inserting the expected value into the
* properties if necessary. If a user-supplied value is overridden, return a warning message that can
* be logged to the user notifying them of this fact.
*
* @return an {@link Optional} containing a warning that should be logged to the user if a value they
* supplied in the properties is being overridden, or {@link Optional#empty()} if no such override has
* taken place
*/
static Optional<String> ensurePropertyAndGetWarning(
Map<String, ? super String> props,
String key,
String expectedValue,
String justification,
boolean caseSensitive) {
if (!props.containsKey(key)) {
// Insert the expected value
props.put(key, expectedValue);
// But don't issue a warning to the user
return Optional.empty();
}
String value = Objects.toString(props.get(key));
boolean matchesExpectedValue = caseSensitive ? expectedValue.equals(value) : expectedValue.equalsIgnoreCase(value);
if (matchesExpectedValue) {
return Optional.empty();
}
// Insert the expected value
props.put(key, expectedValue);
justification = justification != null ? " " + justification : "";
// And issue a warning to the user
return Optional.of(String.format(
"The value '%s' for the '%s' property will be ignored as it cannot be overridden%s. "
+ "The value '%s' will be used instead.",
value, key, justification, expectedValue
));
}
/**
* Adds Connect metrics context properties.
* @param prop the properties map to which the metrics context properties are to be added
* @param config the worker config
* @param clusterId the Connect cluster's backing Kafka cluster ID
*
* @see <a href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-606%3A+Add+Metadata+Context+to+MetricsReporter">KIP-606</a>
*/
public static void addMetricsContextProperties(Map<String, Object> prop, WorkerConfig config, String clusterId) {
//add all properties predefined with "metrics.context."
prop.putAll(config.originalsWithPrefix(CommonClientConfigs.METRICS_CONTEXT_PREFIX, false));
//add connect properties
prop.put(CommonClientConfigs.METRICS_CONTEXT_PREFIX + WorkerConfig.CONNECT_KAFKA_CLUSTER_ID, clusterId);
Object groupId = config.originals().get(DistributedConfig.GROUP_ID_CONFIG);
if (groupId != null) {
prop.put(CommonClientConfigs.METRICS_CONTEXT_PREFIX + WorkerConfig.CONNECT_GROUP_ID, groupId);
}
}
public static boolean isSinkConnector(Connector connector) {
return SinkConnector.class.isAssignableFrom(connector.getClass());
}
public static boolean isSourceConnector(Connector connector) {
return SourceConnector.class.isAssignableFrom(connector.getClass());
}
/**
* Apply a specified transformation {@link Function} to every value in a Map.
* @param map the Map to be transformed
* @param transformation the transformation function
* @return the transformed Map
* @param <K> the key type
* @param <I> the pre-transform value type
* @param <O> the post-transform value type
*/
public static <K, I, O> Map<K, O> transformValues(Map<K, I> map, Function<I, O> transformation) {
return map.entrySet().stream().collect(Collectors.toMap(
Map.Entry::getKey,
transformation.compose(Map.Entry::getValue)
));
}
public static <I> List<I> combineCollections(Collection<Collection<I>> collections) {
return combineCollections(collections, Function.identity());
}
public static <I, T> List<T> combineCollections(Collection<I> collection, Function<I, Collection<T>> extractCollection) {
return combineCollections(collection, extractCollection, Collectors.toList());
}
public static <I, T, C> C combineCollections(
Collection<I> collection,
Function<I, Collection<T>> extractCollection,
Collector<T, ?, C> collector
) {
return collection.stream()
.map(extractCollection)
.flatMap(Collection::stream)
.collect(collector);
}
public static ConnectException maybeWrap(Throwable t, String message) {
if (t == null) {
return null;
}
if (t instanceof ConnectException) {
return (ConnectException) t;
}
return new ConnectException(message, t);
}
/**
* Create the base of a {@link CommonClientConfigs#CLIENT_ID_DOC client ID} that can be
* used for Kafka clients instantiated by this worker. Workers should append an extra identifier
* to the end of this base ID to include extra information on what they are using it for; for example,
* {@code clientIdBase(config) + "configs"} could be used as the client ID for a consumer, producer,
* or admin client used to interact with a worker's config topic.
* @param config the worker config; may not be null
* @return the base client ID for this worker; never null, never empty, and will always end in a
* hyphen ('-')
*/
public static String clientIdBase(WorkerConfig config) {
String result = Optional.ofNullable(config.groupId())
.orElse("connect");
String userSpecifiedClientId = config.getString(CLIENT_ID_CONFIG);
if (userSpecifiedClientId != null && !userSpecifiedClientId.trim().isEmpty()) {
result += "-" + userSpecifiedClientId;
}
return result + "-";
}
/**
* Get the | ConnectUtils |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java | {
"start": 1722,
"end": 7170
} | class ____ implements RpcServerFactory {
private static final Logger LOG =
LoggerFactory.getLogger(RpcServerFactoryPBImpl.class);
private static final String PROTO_GEN_PACKAGE_NAME = "org.apache.hadoop.yarn.proto";
private static final String PROTO_GEN_CLASS_SUFFIX = "Service";
private static final String PB_IMPL_PACKAGE_SUFFIX = "impl.pb.service";
private static final String PB_IMPL_CLASS_SUFFIX = "PBServiceImpl";
private static final RpcServerFactoryPBImpl self = new RpcServerFactoryPBImpl();
private ConcurrentMap<Class<?>, Constructor<?>> serviceCache = new ConcurrentHashMap<Class<?>, Constructor<?>>();
private ConcurrentMap<Class<?>, Method> protoCache = new ConcurrentHashMap<Class<?>, Method>();
public static RpcServerFactoryPBImpl get() {
return RpcServerFactoryPBImpl.self;
}
private RpcServerFactoryPBImpl() {
}
public Server getServer(Class<?> protocol, Object instance,
InetSocketAddress addr, Configuration conf,
SecretManager<? extends TokenIdentifier> secretManager, int numHandlers) {
return getServer(protocol, instance, addr, conf, secretManager, numHandlers,
null);
}
@Override
public Server getServer(Class<?> protocol, Object instance,
InetSocketAddress addr, Configuration conf,
SecretManager<? extends TokenIdentifier> secretManager, int numHandlers,
String portRangeConfig) {
Constructor<?> constructor = serviceCache.get(protocol);
if (constructor == null) {
Class<?> pbServiceImplClazz = null;
try {
pbServiceImplClazz = conf
.getClassByName(getPbServiceImplClassName(protocol));
} catch (ClassNotFoundException e) {
throw new YarnRuntimeException("Failed to load class: ["
+ getPbServiceImplClassName(protocol) + "]", e);
}
try {
constructor = pbServiceImplClazz.getConstructor(protocol);
constructor.setAccessible(true);
serviceCache.putIfAbsent(protocol, constructor);
} catch (NoSuchMethodException e) {
throw new YarnRuntimeException("Could not find constructor with params: "
+ Long.TYPE + ", " + InetSocketAddress.class + ", "
+ Configuration.class, e);
}
}
Object service = null;
try {
service = constructor.newInstance(instance);
} catch (InvocationTargetException e) {
throw new YarnRuntimeException(e);
} catch (IllegalAccessException e) {
throw new YarnRuntimeException(e);
} catch (InstantiationException e) {
throw new YarnRuntimeException(e);
}
Class<?> pbProtocol = service.getClass().getInterfaces()[0];
Method method = protoCache.get(protocol);
if (method == null) {
Class<?> protoClazz = null;
try {
protoClazz = conf.getClassByName(getProtoClassName(protocol));
} catch (ClassNotFoundException e) {
throw new YarnRuntimeException("Failed to load class: ["
+ getProtoClassName(protocol) + "]", e);
}
try {
method = protoClazz.getMethod("newReflectiveBlockingService",
pbProtocol.getInterfaces()[0]);
method.setAccessible(true);
protoCache.putIfAbsent(protocol, method);
} catch (NoSuchMethodException e) {
throw new YarnRuntimeException(e);
}
}
try {
return createServer(pbProtocol, addr, conf, secretManager, numHandlers,
(BlockingService)method.invoke(null, service), portRangeConfig);
} catch (InvocationTargetException e) {
throw new YarnRuntimeException(e);
} catch (IllegalAccessException e) {
throw new YarnRuntimeException(e);
} catch (IOException e) {
throw new YarnRuntimeException(e);
}
}
private String getProtoClassName(Class<?> clazz) {
String srcClassName = getClassName(clazz);
return PROTO_GEN_PACKAGE_NAME + "." + srcClassName + "$" + srcClassName + PROTO_GEN_CLASS_SUFFIX;
}
private String getPbServiceImplClassName(Class<?> clazz) {
String srcPackagePart = getPackageName(clazz);
String srcClassName = getClassName(clazz);
String destPackagePart = srcPackagePart + "." + PB_IMPL_PACKAGE_SUFFIX;
String destClassPart = srcClassName + PB_IMPL_CLASS_SUFFIX;
return destPackagePart + "." + destClassPart;
}
private String getClassName(Class<?> clazz) {
String fqName = clazz.getName();
return (fqName.substring(fqName.lastIndexOf(".") + 1, fqName.length()));
}
private String getPackageName(Class<?> clazz) {
return clazz.getPackage().getName();
}
private Server createServer(Class<?> pbProtocol, InetSocketAddress addr, Configuration conf,
SecretManager<? extends TokenIdentifier> secretManager, int numHandlers,
BlockingService blockingService, String portRangeConfig) throws IOException {
RPC.setProtocolEngine(conf, pbProtocol, ProtobufRpcEngine2.class);
RPC.Server server = new RPC.Builder(conf).setProtocol(pbProtocol)
.setInstance(blockingService).setBindAddress(addr.getHostName())
.setPort(addr.getPort()).setNumHandlers(numHandlers).setVerbose(false)
.setSecretManager(secretManager).setPortRangeConfig(portRangeConfig)
.build();
LOG.info("Adding protocol "+pbProtocol.getCanonicalName()+" to the server");
server.addProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, pbProtocol, blockingService);
return server;
}
}
| RpcServerFactoryPBImpl |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/float_/FloatAssert_isNotCloseTo_Float_Test.java | {
"start": 988,
"end": 1423
} | class ____ extends FloatAssertBaseTest {
private final Offset<Float> offset = offset(5f);
private final Float value = 33f;
@Override
protected FloatAssert invoke_api_method() {
return assertions.isNotCloseTo(value, offset);
}
@Override
protected void verify_internal_effects() {
verify(floats).assertIsNotCloseTo(getInfo(assertions), getActual(assertions), value, offset);
}
}
| FloatAssert_isNotCloseTo_Float_Test |
java | google__guava | android/guava/src/com/google/common/collect/ForwardingObject.java | {
"start": 744,
"end": 961
} | class ____ implementing the <a
* href="http://en.wikipedia.org/wiki/Decorator_pattern">decorator pattern</a>. The {@link
* #delegate()} method must be overridden to return the instance being decorated.
*
* <p>This | for |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/MemoryUsageEstimationProcessManager.java | {
"start": 1106,
"end": 6417
} | class ____ {
private static final Logger LOGGER = LogManager.getLogger(MemoryUsageEstimationProcessManager.class);
private final ExecutorService executorServiceForJob;
private final ExecutorService executorServiceForProcess;
private final AnalyticsProcessFactory<MemoryUsageEstimationResult> processFactory;
public MemoryUsageEstimationProcessManager(
ExecutorService executorServiceForJob,
ExecutorService executorServiceForProcess,
AnalyticsProcessFactory<MemoryUsageEstimationResult> processFactory
) {
this.executorServiceForJob = Objects.requireNonNull(executorServiceForJob);
this.executorServiceForProcess = Objects.requireNonNull(executorServiceForProcess);
this.processFactory = Objects.requireNonNull(processFactory);
}
public void runJobAsync(
String jobId,
DataFrameAnalyticsConfig config,
DataFrameDataExtractorFactory dataExtractorFactory,
ActionListener<MemoryUsageEstimationResult> listener
) {
executorServiceForJob.execute(() -> {
try {
MemoryUsageEstimationResult result = runJob(jobId, config, dataExtractorFactory);
listener.onResponse(result);
} catch (Exception e) {
listener.onFailure(e);
}
});
}
private MemoryUsageEstimationResult runJob(
String jobId,
DataFrameAnalyticsConfig config,
DataFrameDataExtractorFactory dataExtractorFactory
) {
DataFrameDataExtractor dataExtractor = dataExtractorFactory.newExtractor(false);
DataFrameDataExtractor.DataSummary dataSummary = dataExtractor.collectDataSummary();
if (dataSummary.rows == 0) {
throw ExceptionsHelper.badRequestException(
"[{}] Unable to estimate memory usage as no documents in the source indices [{}] contained all the fields selected for "
+ "analysis. If you are relying on automatic field selection then there are currently mapped fields that do not exist "
+ "in any indexed documents, and you will have to switch to explicit field selection and include only fields that "
+ "exist in indexed documents.",
jobId,
Strings.arrayToCommaDelimitedString(config.getSource().getIndex())
);
}
Set<String> categoricalFields = dataExtractor.getCategoricalFields(config.getAnalysis());
AnalyticsProcessConfig processConfig = new AnalyticsProcessConfig(
jobId,
dataSummary.rows,
dataSummary.cols,
// For memory estimation the model memory limit here should be set high enough not to trigger an error when C++ code
// compares the limit to the result of estimation.
ByteSizeValue.ofPb(1),
1,
"",
categoricalFields,
config.getAnalysis(),
dataExtractorFactory.getExtractedFields()
);
AnalyticsProcess<MemoryUsageEstimationResult> process = processFactory.createAnalyticsProcess(
config,
processConfig,
false,
executorServiceForProcess,
// The handler passed here will never be called as AbstractNativeProcess.detectCrash method returns early when
// (processInStream == null) which is the case for MemoryUsageEstimationProcess.
reason -> {}
);
try {
return readResult(jobId, process);
} catch (Exception e) {
String errorMsg = format(
"[%s] Error while processing process output [%s], process errors: [%s]",
jobId,
e.getMessage(),
process.readError()
);
throw ExceptionsHelper.serverError(errorMsg, e);
} finally {
try {
LOGGER.debug("[{}] Closing process", jobId);
process.close();
LOGGER.debug("[{}] Closed process", jobId);
} catch (Exception e) {
String errorMsg = format(
"[%s] Error while closing process [%s], process errors: [%s]",
jobId,
e.getMessage(),
process.readError()
);
throw ExceptionsHelper.serverError(errorMsg, e);
}
}
}
/**
* Extracts {@link MemoryUsageEstimationResult} from process' output.
*/
private static MemoryUsageEstimationResult readResult(String jobId, AnalyticsProcess<MemoryUsageEstimationResult> process) {
Iterator<MemoryUsageEstimationResult> iterator = process.readAnalyticsResults();
if (iterator.hasNext() == false) {
String errorMsg = "[" + jobId + "] Memory usage estimation process returned no results";
throw ExceptionsHelper.serverError(errorMsg);
}
MemoryUsageEstimationResult result = iterator.next();
if (iterator.hasNext()) {
String errorMsg = "[" + jobId + "] Memory usage estimation process returned more than one result";
throw ExceptionsHelper.serverError(errorMsg);
}
return result;
}
}
| MemoryUsageEstimationProcessManager |
java | processing__processing4 | java/test/processing/mode/java/AutoFormatTests.java | {
"start": 310,
"end": 1739
} | class ____ {
@BeforeClass
public static void init() {
ProcessingTestUtil.init();
}
static void expectGood(final String id) {
try {
final String formattedProgram = ProcessingTestUtil.format(res(id + ".pde"));
final File goldenFile = res(id + ".expected");
checkGolden(formattedProgram, goldenFile);
// check that the formatted text doesn't change
checkGolden(ProcessingTestUtil.format(formattedProgram), goldenFile);
} catch (Exception e) {
if (!e.equals(e.getCause()) && e.getCause() != null)
fail(e.getCause().toString());
else
e.printStackTrace(System.err);
fail(e.toString());
}
}
private static void checkGolden(final String expectedText,
final File goldenFile) throws IOException {
if (goldenFile.exists()) {
final String expected = ProcessingTestUtil.read(goldenFile);
assertEquals(expected, expectedText);
} else {
System.err.println("WARN: golden file " + goldenFile
+ " does not exist. Generating.");
final FileWriter sug = new FileWriter(goldenFile);
sug.write(ProcessingTestUtil.normalize(expectedText));
sug.close();
}
}
@Test
public void bug109() {
expectGood("bug109");
}
@Test
public void bug405() {
expectGood("bug405");
}
@Test
public void bug420() {
expectGood("bug420");
}
}
| AutoFormatTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/discriminator/TablePerClassInheritancePersistTest.java | {
"start": 1909,
"end": 3901
} | class ____ {
private final Man john = new Man( "John", "Riding Roller Coasters" );
private final Woman jane = new Woman( "Jane", "Hippotherapist" );
private final Child susan = new Child( "Susan", "Super Mario retro Mushroom" );
private final Child mark = new Child( "Mark", "Fidget Spinner" );
private final Family family = new Family( "McCloud" );
private final List<Child> children = new ArrayList<>( Arrays.asList( susan, mark ) );
private final List<Person> familyMembers = Arrays.asList( john, jane, susan, mark );
@BeforeEach
public void setUp(SessionFactoryScope scope) {
scope.inTransaction( session -> {
jane.setHusband( john );
jane.setChildren( children );
john.setWife( jane );
john.setChildren( children );
for ( Child child : children ) {
child.setFather( john );
child.setMother( jane );
}
for ( Person person : familyMembers ) {
family.add( person );
}
session.persist( family );
} );
}
@Test
public void testPolymorphicAssociation(SessionFactoryScope scope) {
scope.inTransaction( session -> {
Family family = session.createQuery( "FROM Family f", Family.class ).getSingleResult();
List<Person> members = family.getMembers();
assertThat( members.size(), is( familyMembers.size() ) );
for ( Person person : members ) {
if ( person instanceof Man ) {
assertThat( ( (Man) person ).getHobby(), is( john.getHobby() ) );
}
else if ( person instanceof Woman ) {
assertThat( ( (Woman) person ).getJob(), is( jane.getJob() ) );
}
else if ( person instanceof Child ) {
if ( person.getName().equals( "Susan" ) ) {
assertThat( ( (Child) person ).getFavouriteToy(), is( susan.getFavouriteToy() ) );
}
else {
assertThat( ( (Child) person ).getFavouriteToy(), is( mark.getFavouriteToy() ) );
}
}
else {
fail( "Unexpected result: " + person );
}
}
} );
}
@Entity(name = "Family")
public static | TablePerClassInheritancePersistTest |
java | apache__camel | components/camel-ai/camel-djl/src/main/java/org/apache/camel/component/djl/DJLComponent.java | {
"start": 1071,
"end": 1562
} | class ____ extends DefaultComponent {
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
if (ObjectHelper.isEmpty(remaining)) {
throw new IllegalArgumentException("Application must be configured on endpoint using syntax djl:application");
}
Endpoint endpoint = new DJLEndpoint(uri, this, remaining);
setProperties(endpoint, parameters);
return endpoint;
}
}
| DJLComponent |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/LazyBasicFieldMergeTest.java | {
"start": 2500,
"end": 3294
} | class ____ {
@Id
@GeneratedValue
@Column(name = "MAN_ID")
private Long id;
@Column(name = "NAME")
private String name;
@Lob
@Column(name = "RESUME")
@Basic(fetch = FetchType.LAZY)
private byte[] resume;
@OneToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "COMP_ID")
private Company company;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public byte[] getResume() {
return resume;
}
public void setResume(byte[] resume) {
this.resume = resume;
}
public Company getCompany() {
return company;
}
public void setCompany(Company company) {
this.company = company;
}
}
}
| Manager |
java | redisson__redisson | redisson-spring-data/redisson-spring-data-26/src/test/java/org/redisson/spring/data/connection/RedissonSubscribeTest.java | {
"start": 1002,
"end": 7797
} | class ____ extends BaseConnectionTest {
@Test
public void testContainer() {
RedissonConnectionFactory f = new RedissonConnectionFactory(redisson);
RedisMessageListenerContainer container = new RedisMessageListenerContainer();
container.setConnectionFactory(f);
container.afterPropertiesSet();
container.start();
// for (int i = 0; i < 2; i++) {
// container.addMessageListener(new MessageListener() {
// @Override
// public void onMessage(Message message, byte[] pattern) {
// }
// }, ChannelTopic.of("test"));
// }
//
// container.stop();
//
// container = new RedisMessageListenerContainer();
// container.setConnectionFactory(f);
// container.afterPropertiesSet();
// container.start();
// for (int i = 0; i < 2; i++) {
// container.addMessageListener(new MessageListener() {
// @Override
// public void onMessage(Message message, byte[] pattern) {
// }
// }, PatternTopic.of("*"));
// }
// container.stop();
//
// container= new RedisMessageListenerContainer();
// container.setConnectionFactory(f);
// container.afterPropertiesSet();
// container.start();
for (int i = 0; i < 2; i++) {
container.addMessageListener(new MessageListener() {
@Override
public void onMessage(Message message, byte[] pattern) {
}
}, ChannelTopic.of("test"+i));
}
container.stop();
container= new RedisMessageListenerContainer();
container.setConnectionFactory(f);
container.afterPropertiesSet();
container.start();
for (int i = 0; i < 2; i++) {
container.addMessageListener(new MessageListener() {
@Override
public void onMessage(Message message, byte[] pattern) {
}
}, PatternTopic.of("*" + i));
}
container.stop();
}
@Test
public void testListenersDuplication() throws InterruptedException {
Queue<byte[]> msg = new ConcurrentLinkedQueue<>();
MessageListener aListener = (message, pattern) -> {
msg.add(message.getBody());
};
RedissonConnectionFactory factory = new RedissonConnectionFactory(redisson);
RedisMessageListenerContainer container = new RedisMessageListenerContainer();
container.setConnectionFactory(factory);
container.addMessageListener(aListener,
Arrays.asList(new ChannelTopic("a"), new ChannelTopic("b")));
container.addMessageListener(aListener,
Arrays.asList(new PatternTopic("c*")));
container.afterPropertiesSet();
container.start();
Thread.sleep(200);
RedisConnection c = factory.getConnection();
c.publish("a".getBytes(), "msg".getBytes());
Awaitility.await().atMost(Durations.ONE_SECOND)
.untilAsserted(() -> {
assertThat(msg).containsExactly("msg".getBytes());
});
}
@Test
public void testPatterTopic() throws IOException, InterruptedException {
RedisRunner.RedisProcess instance = new RedisRunner()
.nosave()
.randomPort()
.randomDir()
.notifyKeyspaceEvents(
RedisRunner.KEYSPACE_EVENTS_OPTIONS.K,
RedisRunner.KEYSPACE_EVENTS_OPTIONS.g,
RedisRunner.KEYSPACE_EVENTS_OPTIONS.E,
RedisRunner.KEYSPACE_EVENTS_OPTIONS.$)
.run();
Config config = new Config();
config.useSingleServer().setAddress(instance.getRedisServerAddressAndPort()).setPingConnectionInterval(0);
RedissonClient redisson = Redisson.create(config);
RedissonConnectionFactory factory = new RedissonConnectionFactory(redisson);
RedisMessageListenerContainer container = new RedisMessageListenerContainer();
container.setConnectionFactory(factory);
AtomicInteger counterTest = new AtomicInteger();
container.addMessageListener(new MessageListener() {
@Override
public void onMessage(Message message, byte[] pattern) {
counterTest.incrementAndGet();
}
}, new PatternTopic("__keyspace@0__:mykey"));
container.addMessageListener(new MessageListener() {
@Override
public void onMessage(Message message, byte[] pattern) {
counterTest.incrementAndGet();
}
}, new PatternTopic("__keyevent@0__:del"));
container.afterPropertiesSet();
container.start();
Assertions.assertThat(container.isRunning()).isTrue();
RedisConnection c = factory.getConnection();
c.set("mykey".getBytes(), "2".getBytes());
c.del("mykey".getBytes());
Awaitility.await().atMost(Durations.FIVE_SECONDS).until(() -> {
return counterTest.get() == 3;
});
container.stop();
redisson.shutdown();
}
@Test
public void testSubscribe() {
RedissonConnection connection = new RedissonConnection(redisson);
AtomicReference<byte[]> msg = new AtomicReference<byte[]>();
connection.subscribe(new MessageListener() {
@Override
public void onMessage(Message message, byte[] pattern) {
msg.set(message.getBody());
}
}, "test".getBytes());
connection.publish("test".getBytes(), "msg".getBytes());
Awaitility.await().atMost(Durations.ONE_SECOND)
.until(() -> Arrays.equals("msg".getBytes(), msg.get()));
connection.getSubscription().unsubscribe();
connection.publish("test".getBytes(), "msg".getBytes());
}
@Test
public void testUnSubscribe() {
RedissonConnection connection = new RedissonConnection(redisson);
AtomicReference<byte[]> msg = new AtomicReference<byte[]>();
connection.subscribe(new MessageListener() {
@Override
public void onMessage(Message message, byte[] pattern) {
msg.set(message.getBody());
}
}, "test".getBytes());
connection.publish("test".getBytes(), "msg".getBytes());
Awaitility.await().atMost(Durations.ONE_SECOND)
.until(() -> Arrays.equals("msg".getBytes(), msg.get()));
connection.getSubscription().unsubscribe();
}
}
| RedissonSubscribeTest |
java | google__dagger | dagger-producers/main/java/dagger/producers/monitoring/TimingRecorders.java | {
"start": 3210,
"end": 4160
} | class ____ implements ProductionComponentTimingRecorder.Factory {
private final ProductionComponentTimingRecorder.Factory delegate;
Factory(ProductionComponentTimingRecorder.Factory delegate) {
this.delegate = delegate;
}
@Override
public ProductionComponentTimingRecorder create(Object component) {
try {
ProductionComponentTimingRecorder recorder = delegate.create(component);
return recorder == null
? noOpProductionComponentTimingRecorder()
: new NonThrowingProductionComponentTimingRecorder(recorder);
} catch (RuntimeException e) {
logCreateException(e, delegate, component);
return noOpProductionComponentTimingRecorder();
}
}
}
}
/**
* A producer recorder that delegates to a single recorder, and catches and logs all exceptions
* that the delegate throws.
*/
private static final | Factory |
java | apache__camel | components/camel-jt400/src/main/java/org/apache/camel/component/jt400/Jt400Configuration.java | {
"start": 1551,
"end": 1719
} | enum ____ {
EQ,
NE,
LT,
LE,
GT,
GE
}
/**
* Enumeration of supported data formats
*/
public | SearchType |
java | quarkusio__quarkus | extensions/smallrye-graphql/deployment/src/test/java/io/quarkus/smallrye/graphql/deployment/federation/resolver/ExtendedApi.java | {
"start": 241,
"end": 862
} | class ____ {
@Resolver
public ExtendedType extendedTypeById(String id) {
ExtendedType extendedType = new ExtendedType();
extendedType.setId(id);
extendedType.setDescription("extendedTypeById");
return extendedType;
}
@Resolver
@Blocking
public ExtendedType extendedTypeByIdNameKey(String id, String name, String key) {
ExtendedType extendedType = new ExtendedType();
extendedType.setId(id);
extendedType.setValue(id + name + key);
extendedType.setDescription("extendedTypeByIdNameKey");
return extendedType;
}
}
| ExtendedApi |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/BoxEndpointBuilderFactory.java | {
"start": 1479,
"end": 1606
} | interface ____ {
/**
* Builder for endpoint consumers for the Box component.
*/
public | BoxEndpointBuilderFactory |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java | {
"start": 11471,
"end": 27150
} | class ____ extends CharacterCodingException{
private TestIOException(String cause){
}
TestIOException(){
}
}
IOException e = new TestIOException();
IOException wrapped = verifyExceptionClass(e, TestIOException.class);
assertEquals(null, wrapped.getMessage());
}
@Test
public void testWrapSocketException() throws Throwable {
IOException wrapped = verifyExceptionClass(new SocketException("failed"),
SocketException.class);
assertInException(wrapped, "failed");
assertWikified(wrapped);
assertInException(wrapped, "localhost");
assertRemoteDetailsIncluded(wrapped);
assertInException(wrapped, "/SocketException");
}
@Test
public void testGetConnectAddress() throws IOException {
NetUtils.addStaticResolution("host", "127.0.0.1");
InetSocketAddress addr = NetUtils.createSocketAddrForHost("host", 1);
InetSocketAddress connectAddr = NetUtils.getConnectAddress(addr);
assertEquals(addr.getHostName(), connectAddr.getHostName());
addr = new InetSocketAddress(1);
connectAddr = NetUtils.getConnectAddress(addr);
assertEquals(InetAddress.getLocalHost().getHostName(),
connectAddr.getHostName());
}
@Test
public void testCreateSocketAddress() throws Throwable {
InetSocketAddress addr = NetUtils.createSocketAddr(
"127.0.0.1:12345", 1000, "myconfig");
assertEquals("127.0.0.1", addr.getAddress().getHostAddress());
assertEquals(12345, addr.getPort());
addr = NetUtils.createSocketAddr(
"127.0.0.1", 1000, "myconfig");
assertEquals("127.0.0.1", addr.getAddress().getHostAddress());
assertEquals(1000, addr.getPort());
try {
NetUtils.createSocketAddr(
"127.0.0.1:blahblah", 1000, "myconfig");
fail("Should have failed to parse bad port");
} catch (IllegalArgumentException iae) {
assertInException(iae, "myconfig");
}
}
@Test
public void testCreateSocketAddressWithURICache() throws Throwable {
InetSocketAddress addr = NetUtils.createSocketAddr(
"127.0.0.1:12345", 1000, "myconfig", true);
assertEquals("127.0.0.1", addr.getAddress().getHostAddress());
assertEquals(12345, addr.getPort());
addr = NetUtils.createSocketAddr(
"127.0.0.1:12345", 1000, "myconfig", true);
assertEquals("127.0.0.1", addr.getAddress().getHostAddress());
assertEquals(12345, addr.getPort());
// ----------------------------------------------------
addr = NetUtils.createSocketAddr(
"127.0.0.1", 1000, "myconfig", true);
assertEquals("127.0.0.1", addr.getAddress().getHostAddress());
assertEquals(1000, addr.getPort());
addr = NetUtils.createSocketAddr(
"127.0.0.1", 1000, "myconfig", true);
assertEquals("127.0.0.1", addr.getAddress().getHostAddress());
assertEquals(1000, addr.getPort());
// ----------------------------------------------------
try {
NetUtils.createSocketAddr(
"127.0.0.1:blahblah", 1000, "myconfig", true);
fail("Should have failed to parse bad port");
} catch (IllegalArgumentException iae) {
assertInException(iae, "myconfig");
}
try {
NetUtils.createSocketAddr(
"127.0.0.1:blahblah", 1000, "myconfig", true);
fail("Should have failed to parse bad port");
} catch (IllegalArgumentException iae) {
assertInException(iae, "myconfig");
}
}
private void assertRemoteDetailsIncluded(IOException wrapped)
throws Throwable {
assertInException(wrapped, "desthost");
assertInException(wrapped, DEST_PORT_NAME);
}
private void assertLocalDetailsIncluded(IOException wrapped)
throws Throwable {
assertInException(wrapped, "localhost");
assertInException(wrapped, LOCAL_PORT_NAME);
}
private void assertWikified(Exception e) throws Throwable {
assertInException(e, NetUtils.HADOOP_WIKI);
}
private void assertInException(Exception e, String text) throws Throwable {
String message = extractExceptionMessage(e);
if (!(message.contains(text))) {
throw new AssertionError("Wrong text in message "
+ "\"" + message + "\""
+ " expected \"" + text + "\"")
.initCause(e);
}
}
private String extractExceptionMessage(Exception e) throws Throwable {
assertNotNull(e, "Null Exception");
String message = e.getMessage();
if (message == null) {
throw new AssertionError("Empty text in exception " + e)
.initCause(e);
}
return message;
}
private void assertNotInException(Exception e, String text)
throws Throwable{
String message = extractExceptionMessage(e);
if (message.contains(text)) {
throw new AssertionError("Wrong text in message "
+ "\"" + message + "\""
+ " did not expect \"" + text + "\"")
.initCause(e);
}
}
private IOException verifyExceptionClass(IOException e,
Class expectedClass)
throws Throwable {
assertNotNull(e, "Null Exception");
IOException wrapped = NetUtils.wrapException("desthost", DEST_PORT,
"localhost", LOCAL_PORT, e);
LOG.info(wrapped.toString(), wrapped);
if(!(wrapped.getClass().equals(expectedClass))) {
throw new AssertionError("Wrong exception class; expected "
+ expectedClass
+ " got " + wrapped.getClass() + ": " + wrapped).initCause(wrapped);
}
return wrapped;
}
static NetUtilsTestResolver resolver;
static Configuration config;
@BeforeAll
public static void setupResolver() {
resolver = NetUtilsTestResolver.install();
}
@BeforeEach
public void resetResolver() {
resolver.reset();
config = new Configuration();
}
// getByExactName
private void verifyGetByExactNameSearch(String host, String ... searches) {
assertNull(resolver.getByExactName(host));
assertBetterArrayEquals(searches, resolver.getHostSearches());
}
@Test
public void testResolverGetByExactNameUnqualified() {
verifyGetByExactNameSearch("unknown", "unknown.");
}
@Test
public void testResolverGetByExactNameUnqualifiedWithDomain() {
verifyGetByExactNameSearch("unknown.domain", "unknown.domain.");
}
@Test
public void testResolverGetByExactNameQualified() {
verifyGetByExactNameSearch("unknown.", "unknown.");
}
@Test
public void testResolverGetByExactNameQualifiedWithDomain() {
verifyGetByExactNameSearch("unknown.domain.", "unknown.domain.");
}
// getByNameWithSearch
private void verifyGetByNameWithSearch(String host, String ... searches) {
assertNull(resolver.getByNameWithSearch(host));
assertBetterArrayEquals(searches, resolver.getHostSearches());
}
@Test
public void testResolverGetByNameWithSearchUnqualified() {
String host = "unknown";
verifyGetByNameWithSearch(host, host+".a.b.", host+".b.", host+".c.");
}
@Test
public void testResolverGetByNameWithSearchUnqualifiedWithDomain() {
String host = "unknown.domain";
verifyGetByNameWithSearch(host, host+".a.b.", host+".b.", host+".c.");
}
@Test
public void testResolverGetByNameWithSearchQualified() {
String host = "unknown.";
verifyGetByNameWithSearch(host, host);
}
@Test
public void testResolverGetByNameWithSearchQualifiedWithDomain() {
String host = "unknown.domain.";
verifyGetByNameWithSearch(host, host);
}
// getByName
private void verifyGetByName(String host, String ... searches) {
InetAddress addr = null;
try {
addr = resolver.getByName(host);
} catch (UnknownHostException e) {} // ignore
assertNull(addr);
assertBetterArrayEquals(searches, resolver.getHostSearches());
}
@Test
public void testResolverGetByNameQualified() {
String host = "unknown.";
verifyGetByName(host, host);
}
@Test
public void testResolverGetByNameQualifiedWithDomain() {
verifyGetByName("unknown.domain.", "unknown.domain.");
}
@Test
public void testResolverGetByNameUnqualified() {
String host = "unknown";
verifyGetByName(host, host+".a.b.", host+".b.", host+".c.", host+".");
}
@Test
public void testResolverGetByNameUnqualifiedWithDomain() {
String host = "unknown.domain";
verifyGetByName(host, host+".", host+".a.b.", host+".b.", host+".c.");
}
// resolving of hosts
private InetAddress verifyResolve(String host, String ... searches) {
InetAddress addr = null;
try {
addr = resolver.getByName(host);
} catch (UnknownHostException e) {} // ignore
assertNotNull(addr);
assertBetterArrayEquals(searches, resolver.getHostSearches());
return addr;
}
private void
verifyInetAddress(InetAddress addr, String host, String ip) {
assertNotNull(addr);
assertEquals(host, addr.getHostName());
assertEquals(ip, addr.getHostAddress());
}
@Test
public void testResolverUnqualified() {
String host = "host";
InetAddress addr = verifyResolve(host, host+".a.b.");
verifyInetAddress(addr, "host.a.b", "1.1.1.1");
}
@Test
public void testResolverUnqualifiedWithDomain() {
String host = "host.a";
InetAddress addr = verifyResolve(host, host+".", host+".a.b.", host+".b.");
verifyInetAddress(addr, "host.a.b", "1.1.1.1");
}
@Test
public void testResolverUnqualifedFull() {
String host = "host.a.b";
InetAddress addr = verifyResolve(host, host+".");
verifyInetAddress(addr, host, "1.1.1.1");
}
@Test
public void testResolverQualifed() {
String host = "host.a.b.";
InetAddress addr = verifyResolve(host, host);
verifyInetAddress(addr, host, "1.1.1.1");
}
// localhost
@Test
public void testResolverLoopback() {
String host = "Localhost";
InetAddress addr = verifyResolve(host); // no lookup should occur
verifyInetAddress(addr, "Localhost", "127.0.0.1");
}
@Test
public void testResolverIP() {
String host = "1.1.1.1";
InetAddress addr = verifyResolve(host); // no lookup should occur for ips
verifyInetAddress(addr, host, host);
}
//
@Test
public void testCanonicalUriWithPort() {
URI uri;
uri = NetUtils.getCanonicalUri(URI.create("scheme://host:123"), 456);
assertEquals("scheme://host.a.b:123", uri.toString());
uri = NetUtils.getCanonicalUri(URI.create("scheme://host:123/"), 456);
assertEquals("scheme://host.a.b:123/", uri.toString());
uri = NetUtils.getCanonicalUri(URI.create("scheme://host:123/path"), 456);
assertEquals("scheme://host.a.b:123/path", uri.toString());
uri = NetUtils.getCanonicalUri(URI.create("scheme://host:123/path?q#frag"), 456);
assertEquals("scheme://host.a.b:123/path?q#frag", uri.toString());
}
@Test
public void testCanonicalUriWithDefaultPort() {
URI uri;
uri = NetUtils.getCanonicalUri(URI.create("scheme://host"), 123);
assertEquals("scheme://host.a.b:123", uri.toString());
uri = NetUtils.getCanonicalUri(URI.create("scheme://host/"), 123);
assertEquals("scheme://host.a.b:123/", uri.toString());
uri = NetUtils.getCanonicalUri(URI.create("scheme://host/path"), 123);
assertEquals("scheme://host.a.b:123/path", uri.toString());
uri = NetUtils.getCanonicalUri(URI.create("scheme://host/path?q#frag"), 123);
assertEquals("scheme://host.a.b:123/path?q#frag", uri.toString());
}
@Test
public void testCanonicalUriWithPath() {
URI uri;
uri = NetUtils.getCanonicalUri(URI.create("path"), 2);
assertEquals("path", uri.toString());
uri = NetUtils.getCanonicalUri(URI.create("/path"), 2);
assertEquals("/path", uri.toString());
}
@Test
public void testCanonicalUriWithNoAuthority() {
URI uri;
uri = NetUtils.getCanonicalUri(URI.create("scheme:/"), 2);
assertEquals("scheme:/", uri.toString());
uri = NetUtils.getCanonicalUri(URI.create("scheme:/path"), 2);
assertEquals("scheme:/path", uri.toString());
uri = NetUtils.getCanonicalUri(URI.create("scheme:///"), 2);
assertEquals("scheme:///", uri.toString());
uri = NetUtils.getCanonicalUri(URI.create("scheme:///path"), 2);
assertEquals("scheme:///path", uri.toString());
}
@Test
public void testCanonicalUriWithNoHost() {
URI uri = NetUtils.getCanonicalUri(URI.create("scheme://:123/path"), 2);
assertEquals("scheme://:123/path", uri.toString());
}
@Test
public void testCanonicalUriWithNoPortNoDefaultPort() {
URI uri = NetUtils.getCanonicalUri(URI.create("scheme://host/path"), -1);
assertEquals("scheme://host.a.b/path", uri.toString());
}
/**
* Test for {@link NetUtils#normalizeHostNames}
*/
@Test
public void testNormalizeHostName() {
String oneHost = "1.kanyezone.appspot.com";
try {
InetAddress.getByName(oneHost);
} catch (UnknownHostException e) {
assumeTrue(false, "Network not resolving " + oneHost);
}
List<String> hosts = Arrays.asList("127.0.0.1",
"localhost", oneHost, "UnknownHost123");
List<String> normalizedHosts = NetUtils.normalizeHostNames(hosts);
String summary = "original [" + StringUtils.join(hosts, ", ") + "]"
+ " normalized [" + StringUtils.join(normalizedHosts, ", ") + "]";
// when ipaddress is normalized, same address is expected in return
assertEquals(hosts.get(0), normalizedHosts.get(0), summary);
// for normalizing a resolvable hostname, resolved ipaddress is expected in return
assertFalse(normalizedHosts.get(1).equals(hosts.get(1)),
"Element 1 equal "+ summary);
assertEquals(hosts.get(0), normalizedHosts.get(1), summary);
// this address HADOOP-8372: when normalizing a valid resolvable hostname start with numeric,
// its ipaddress is expected to return
assertFalse(normalizedHosts.get(2).equals(hosts.get(2)),
"Element 2 equal " + summary);
// return the same hostname after normalizing a irresolvable hostname.
assertEquals(hosts.get(3), normalizedHosts.get(3), summary);
}
@Test
public void testGetHostNameOfIP() {
assertNull(NetUtils.getHostNameOfIP(null));
assertNull(NetUtils.getHostNameOfIP(""));
assertNull(NetUtils.getHostNameOfIP("crazytown"));
assertNull(NetUtils.getHostNameOfIP("127.0.0.1:")); // no port
assertNull(NetUtils.getHostNameOfIP("127.0.0.1:-1")); // bogus port
assertNull(NetUtils.getHostNameOfIP("127.0.0.1:A")); // bogus port
assertNotNull(NetUtils.getHostNameOfIP("127.0.0.1"));
assertNotNull(NetUtils.getHostNameOfIP("127.0.0.1:1"));
}
@Test
public void testTrimCreateSocketAddress() {
Configuration conf = new Configuration();
NetUtils.addStaticResolution("host", "127.0.0.1");
final String defaultAddr = "host:1 ";
InetSocketAddress addr = NetUtils.createSocketAddr(defaultAddr);
conf.setSocketAddr("myAddress", addr);
assertEquals(defaultAddr.trim(), NetUtils.getHostPortString(addr));
}
@Test
public void testGetPortFromHostPortString() throws Exception {
assertEquals(1002, NetUtils.getPortFromHostPortString("testHost:1002"));
LambdaTestUtils.intercept(IllegalArgumentException.class,
() -> NetUtils.getPortFromHostPortString("testHost"));
LambdaTestUtils.intercept(IllegalArgumentException.class,
() -> NetUtils.getPortFromHostPortString("testHost:randomString"));
}
@Test
public void testBindToLocalAddress() throws Exception {
assertNotNull(NetUtils
.bindToLocalAddress(NetUtils.getLocalInetAddress("127.0.0.1"), false));
assertNull(NetUtils
.bindToLocalAddress(NetUtils.getLocalInetAddress("127.0.0.1"), true));
}
public static | TestIOException |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/convert/support/StringToUUIDConverter.java | {
"start": 985,
"end": 1196
} | class ____ implements Converter<String, UUID> {
@Override
public @Nullable UUID convert(String source) {
return (StringUtils.hasText(source) ? UUID.fromString(source.trim()) : null);
}
}
| StringToUUIDConverter |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/plugin/ExtensionRealmCache.java | {
"start": 1268,
"end": 1343
} | interface ____ be changed or deleted
* without prior notice.
*
*/
public | can |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSWithKerberos.java | {
"start": 2502,
"end": 11062
} | class ____ extends HFSTestCase {
@AfterEach
public void resetUGI() {
Configuration conf = new Configuration();
UserGroupInformation.setConfiguration(conf);
}
private void createHttpFSServer() throws Exception {
File homeDir = TestDirHelper.getTestDir();
assertTrue(new File(homeDir, "conf").mkdir());
assertTrue(new File(homeDir, "log").mkdir());
assertTrue(new File(homeDir, "temp").mkdir());
HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
File secretFile = new File(new File(homeDir, "conf"), "secret");
Writer w = new FileWriter(secretFile);
w.write("secret");
w.close();
//HDFS configuration
File hadoopConfDir = new File(new File(homeDir, "conf"), "hadoop-conf");
hadoopConfDir.mkdirs();
String fsDefaultName = TestHdfsHelper.getHdfsConf()
.get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
Configuration conf = new Configuration(false);
conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
File hdfsSite = new File(hadoopConfDir, "hdfs-site.xml");
OutputStream os = new FileOutputStream(hdfsSite);
conf.writeXml(os);
os.close();
conf = new Configuration(false);
conf.set("httpfs.proxyuser.client.hosts", "*");
conf.set("httpfs.proxyuser.client.groups", "*");
conf.set("httpfs.authentication.type", "kerberos");
conf.set("httpfs.authentication.signature.secret.file",
secretFile.getAbsolutePath());
File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
os = new FileOutputStream(httpfsSite);
conf.writeXml(os);
os.close();
ClassLoader cl = Thread.currentThread().getContextClassLoader();
URL url = cl.getResource("webapp");
WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
Server server = TestJettyHelper.getJettyServer();
server.setHandler(context);
server.start();
HttpFSServerWebApp.get().setAuthority(TestJettyHelper.getAuthority());
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void testValidHttpFSAccess() throws Exception {
createHttpFSServer();
KerberosTestUtils.doAsClient(new Callable<Void>() {
@Override
public Void call() throws Exception {
URL url = new URL(TestJettyHelper.getJettyURL(),
"/webhdfs/v1/?op=GETHOMEDIRECTORY");
AuthenticatedURL aUrl = new AuthenticatedURL();
AuthenticatedURL.Token aToken = new AuthenticatedURL.Token();
HttpURLConnection conn = aUrl.openConnection(url, aToken);
assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
return null;
}
});
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void testInvalidadHttpFSAccess() throws Exception {
createHttpFSServer();
URL url = new URL(TestJettyHelper.getJettyURL(),
"/webhdfs/v1/?op=GETHOMEDIRECTORY");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_UNAUTHORIZED);
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void testDelegationTokenHttpFSAccess() throws Exception {
createHttpFSServer();
KerberosTestUtils.doAsClient(new Callable<Void>() {
@Override
public Void call() throws Exception {
//get delegation token doing SPNEGO authentication
URL url = new URL(TestJettyHelper.getJettyURL(),
"/webhdfs/v1/?op=GETDELEGATIONTOKEN");
AuthenticatedURL aUrl = new AuthenticatedURL();
AuthenticatedURL.Token aToken = new AuthenticatedURL.Token();
HttpURLConnection conn = aUrl.openConnection(url, aToken);
assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
JSONObject json = (JSONObject) new JSONParser()
.parse(new InputStreamReader(conn.getInputStream()));
json =
(JSONObject) json
.get(DelegationTokenAuthenticator.DELEGATION_TOKEN_JSON);
String tokenStr = (String) json
.get(DelegationTokenAuthenticator.DELEGATION_TOKEN_URL_STRING_JSON);
//access httpfs using the delegation token
url = new URL(TestJettyHelper.getJettyURL(),
"/webhdfs/v1/?op=GETHOMEDIRECTORY&delegation=" +
tokenStr);
conn = (HttpURLConnection) url.openConnection();
assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
//try to renew the delegation token without SPNEGO credentials
url = new URL(TestJettyHelper.getJettyURL(),
"/webhdfs/v1/?op=RENEWDELEGATIONTOKEN&token=" + tokenStr);
conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("PUT");
assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_UNAUTHORIZED);
//renew the delegation token with SPNEGO credentials
url = new URL(TestJettyHelper.getJettyURL(),
"/webhdfs/v1/?op=RENEWDELEGATIONTOKEN&token=" + tokenStr);
conn = aUrl.openConnection(url, aToken);
conn.setRequestMethod("PUT");
assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
//cancel delegation token, no need for SPNEGO credentials
url = new URL(TestJettyHelper.getJettyURL(),
"/webhdfs/v1/?op=CANCELDELEGATIONTOKEN&token=" +
tokenStr);
conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("PUT");
assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
//try to access httpfs with the canceled delegation token
url = new URL(TestJettyHelper.getJettyURL(),
"/webhdfs/v1/?op=GETHOMEDIRECTORY&delegation=" +
tokenStr);
conn = (HttpURLConnection) url.openConnection();
assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_UNAUTHORIZED);
return null;
}
});
}
@SuppressWarnings("deprecation")
private void testDelegationTokenWithFS(Class fileSystemClass)
throws Exception {
createHttpFSServer();
Configuration conf = new Configuration();
conf.set("fs.webhdfs.impl", fileSystemClass.getName());
conf.set("fs.hdfs.impl.disable.cache", "true");
URI uri = new URI( "webhdfs://" +
TestJettyHelper.getJettyURL().toURI().getAuthority());
FileSystem fs = FileSystem.get(uri, conf);
Token<?> tokens[] = fs.addDelegationTokens("foo", null);
fs.close();
assertEquals(1, tokens.length);
fs = FileSystem.get(uri, conf);
((DelegationTokenRenewer.Renewable) fs).setDelegationToken(tokens[0]);
fs.listStatus(new Path("/"));
fs.close();
}
private void testDelegationTokenWithinDoAs(
final Class fileSystemClass, boolean proxyUser) throws Exception {
Configuration conf = new Configuration();
conf.set("hadoop.security.authentication", "kerberos");
UserGroupInformation.setConfiguration(conf);
UserGroupInformation.loginUserFromKeytab("client",
"/Users/tucu/tucu.keytab");
UserGroupInformation ugi = UserGroupInformation.getLoginUser();
if (proxyUser) {
ugi = UserGroupInformation.createProxyUser("foo", ugi);
}
conf = new Configuration();
UserGroupInformation.setConfiguration(conf);
ugi.doAs(
new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
testDelegationTokenWithFS(fileSystemClass);
return null;
}
});
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void testDelegationTokenWithHttpFSFileSystem() throws Exception {
testDelegationTokenWithinDoAs(HttpFSFileSystem.class, false);
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void testDelegationTokenWithWebhdfsFileSystem() throws Exception {
testDelegationTokenWithinDoAs(WebHdfsFileSystem.class, false);
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void testDelegationTokenWithHttpFSFileSystemProxyUser()
throws Exception {
testDelegationTokenWithinDoAs(HttpFSFileSystem.class, true);
}
// TODO: WebHdfsFilesystem does work with ProxyUser HDFS-3509
// @Test
// @TestDir
// @TestJetty
// @TestHdfs
// public void testDelegationTokenWithWebhdfsFileSystemProxyUser()
// throws Exception {
// testDelegationTokenWithinDoAs(WebHdfsFileSystem.class, true);
// }
}
| TestHttpFSWithKerberos |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ConstantPatternCompileTest.java | {
"start": 15353,
"end": 15639
} | class ____ {
@SuppressWarnings("ConstantPatternCompile")
boolean isCar(String input) {
return Pattern.compile("car").matcher(input).matches();
}
}
""")
.expectUnchanged()
.doTest();
}
}
| Test |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/ServiceState.java | {
"start": 1183,
"end": 1526
} | enum ____ {
ACCEPTED, STARTED, STABLE, STOPPED, FAILED, FLEX, UPGRADING,
UPGRADING_AUTO_FINALIZE, EXPRESS_UPGRADING, SUCCEEDED, CANCEL_UPGRADING;
public static boolean isUpgrading(ServiceState state) {
return state.equals(UPGRADING) || state.equals(UPGRADING_AUTO_FINALIZE)
|| state.equals(EXPRESS_UPGRADING);
}
}
| ServiceState |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/streaming/api/transformations/GetTransitivePredecessorsTest.java | {
"start": 5694,
"end": 6195
} | class ____<T> extends AbstractStreamOperator<T>
implements TwoInputStreamOperator<T, T, T> {
@Override
public void processElement1(StreamRecord<T> element) throws Exception {
output.collect(element);
}
@Override
public void processElement2(StreamRecord<T> element) throws Exception {
output.collect(element);
}
}
/** A test implementation of {@link TypeInformation}. */
private static | DummyTwoInputOperator |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/query/BootQueryLogging.java | {
"start": 455,
"end": 584
} | interface ____ {
String NAME = BootLogging.NAME + ".query";
Logger BOOT_QUERY_LOGGER = Logger.getLogger( NAME );
}
| BootQueryLogging |
java | alibaba__fastjson | src/main/java/com/alibaba/fastjson/JSONValidator.java | {
"start": 12335,
"end": 13863
} | class ____ extends JSONValidator {
private final String str;
public UTF16Validator(String str) {
this.str = str;
next();
skipWhiteSpace();
}
void next() {
++pos;
if (pos >= str.length()) {
ch = '\0';
eof = true;
} else {
ch = str.charAt(pos);
}
}
protected final void fieldName()
{
for (int i = pos + 1; i < str.length(); ++i) {
char ch = str.charAt(i);
if (ch == '\\') {
break;
}
if (ch == '\"') {
this.ch = str.charAt(i + 1);
pos = i + 1;
return;
}
}
next();
for (; ; ) {
if (ch == '\\') {
next();
if (ch == 'u') {
next();
next();
next();
next();
next();
} else {
next();
}
}
else if (ch == '"') {
next();
break;
}
else if(eof){
break;
}else {
next();
}
}
}
}
static | UTF16Validator |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/AttributeConverterDefinitionTest.java | {
"start": 3780,
"end": 4087
} | class ____ implements AttributeConverter<Collection<String>, String> {
@Override
public String convertToDatabaseColumn(Collection<String> attribute) {
return "";
}
@Override
public Collection<String> convertToEntityAttribute(String dbData) {
return List.of();
}
}
}
| CollectionAttrConverter |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.