language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_extractingResultOf_Test.java | {
"start": 1150,
"end": 2706
} | class ____ {
private static FluentJedi yoda;
private static FluentJedi vader;
private static AtomicReferenceArray<FluentJedi> jedis;
@BeforeAll
static void setUpOnce() {
yoda = new FluentJedi(new Name("Yoda"), 800, false);
vader = new FluentJedi(new Name("Darth Vader"), 50, true);
jedis = new AtomicReferenceArray<>(array(yoda, vader));
}
@Test
void should_allow_assertions_on_method_invocation_result_extracted_from_given_iterable() {
// extract method result
assertThat(jedis).extractingResultOf("age").containsOnly(800, 50);
// extract if method result is primitive
assertThat(jedis).extractingResultOf("darkSide").containsOnly(false, true);
// extract if method result is also a property
assertThat(jedis).extractingResultOf("name").containsOnly(new Name("Yoda"), new Name("Darth Vader"));
// extract toString method result
assertThat(jedis).extractingResultOf("toString").containsOnly("Yoda", "Darth Vader");
}
@Test
void should_allow_assertions_on_method_invocation_result_extracted_from_given_iterable_with_enforcing_return_type() {
assertThat(jedis).extractingResultOf("name", Name.class).containsOnly(new Name("Yoda"), new Name("Darth Vader"));
}
@Test
void should_throw_error_if_no_method_with_given_name_can_be_extracted() {
assertThatIllegalArgumentException().isThrownBy(() -> assertThat(jedis).extractingResultOf("unknown"))
.withMessage("Can't find method 'unknown' in | AtomicReferenceArrayAssert_extractingResultOf_Test |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/functions/source/legacy/SourceFunction.java | {
"start": 5122,
"end": 7961
} | class ____} for an example.
*
* @param ctx The context to emit elements to and for accessing locks.
*/
void run(SourceContext<T> ctx) throws Exception;
/**
* Cancels the source. Most sources will have a while loop inside the {@link
* #run(SourceContext)} method. The implementation needs to ensure that the source will break
* out of that loop after this method is called.
*
* <p>A typical pattern is to have an {@code "volatile boolean isRunning"} flag that is set to
* {@code false} in this method. That flag is checked in the loop condition.
*
* <p>In case of an ungraceful shutdown (cancellation of the source operator, possibly for
* failover), the thread that calls {@link #run(SourceContext)} will also be {@link
* Thread#interrupt() interrupted}) by the Flink runtime, in order to speed up the cancellation
* (to ensure threads exit blocking methods fast, like I/O, blocking queues, etc.). The
* interruption happens strictly after this method has been called, so any interruption handler
* can rely on the fact that this method has completed (for example to ignore exceptions that
* happen after cancellation).
*
* <p>During graceful shutdown (for example stopping a job with a savepoint), the program must
* cleanly exit the {@link #run(SourceContext)} method soon after this method was called. The
* Flink runtime will NOT interrupt the source thread during graceful shutdown. Source
* implementors must ensure that no thread interruption happens on any thread that emits records
* through the {@code SourceContext} from the {@link #run(SourceContext)} method; otherwise the
* clean shutdown may fail when threads are interrupted while processing the final records.
*
* <p>Because the {@code SourceFunction} cannot easily differentiate whether the shutdown should
* be graceful or ungraceful, we recommend that implementors refrain from interrupting any
* threads that interact with the {@code SourceContext} at all. You can rely on the Flink
* runtime to interrupt the source thread in case of ungraceful cancellation. Any additionally
* spawned threads that directly emit records through the {@code SourceContext} should use a
* shutdown method that does not rely on thread interruption.
*/
void cancel();
// ------------------------------------------------------------------------
// source context
// ------------------------------------------------------------------------
/**
* Interface that source functions use to emit elements, and possibly watermarks.
*
* @param <T> The type of the elements produced by the source.
*/
@Public // Interface might be extended in the future with additional methods.
| docs |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionResponse.java | {
"start": 1105,
"end": 4732
} | class ____ extends ActionResponse implements ToXContentObject {
private static final ParseField CONNECTED_FIELD = new ParseField("connected");
private static final ParseField SKIP_UNAVAILABLE_FIELD = new ParseField("skip_unavailable");
private static final ParseField MATCHING_INDICES_FIELD = new ParseField("matching_indices");
private static final ParseField ES_VERSION_FIELD = new ParseField("version");
private static final ParseField ERROR_FIELD = new ParseField("error");
private final Map<String, ResolveClusterInfo> infoMap;
public ResolveClusterActionResponse(Map<String, ResolveClusterInfo> infoMap) {
this.infoMap = infoMap;
}
public ResolveClusterActionResponse(StreamInput in) throws IOException {
this.infoMap = in.readImmutableMap(ResolveClusterInfo::new);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) {
throw new UnsupportedOperationException(
"ResolveClusterAction requires at least version "
+ TransportVersions.V_8_13_0.toReleaseVersion()
+ " but was "
+ out.getTransportVersion().toReleaseVersion()
);
}
out.writeMap(infoMap, StreamOutput::writeWriteable);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
for (Map.Entry<String, ResolveClusterInfo> entry : infoMap.entrySet()) {
String clusterAlias = entry.getKey();
if (clusterAlias.equals(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY)) {
clusterAlias = SearchResponse.LOCAL_CLUSTER_NAME_REPRESENTATION;
}
builder.startObject(clusterAlias);
ResolveClusterInfo clusterInfo = entry.getValue();
builder.field(CONNECTED_FIELD.getPreferredName(), clusterInfo.isConnected());
builder.field(SKIP_UNAVAILABLE_FIELD.getPreferredName(), clusterInfo.getSkipUnavailable());
if (clusterInfo.getError() != null) {
builder.field(ERROR_FIELD.getPreferredName(), clusterInfo.getError());
}
if (clusterInfo.getMatchingIndices() != null) {
builder.field(MATCHING_INDICES_FIELD.getPreferredName(), clusterInfo.getMatchingIndices());
}
Build build = clusterInfo.getBuild();
if (build != null) {
builder.startObject(ES_VERSION_FIELD.getPreferredName())
.field("number", build.qualifiedVersion())
.field("build_flavor", build.flavor()) // is "stateless" for stateless projects
.field("minimum_wire_compatibility_version", build.minWireCompatVersion())
.field("minimum_index_compatibility_version", build.minIndexCompatVersion())
.endObject();
}
builder.endObject();
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ResolveClusterActionResponse response = (ResolveClusterActionResponse) o;
return infoMap.equals(response.infoMap);
}
@Override
public int hashCode() {
return Objects.hash(infoMap);
}
public Map<String, ResolveClusterInfo> getResolveClusterInfo() {
return infoMap;
}
}
| ResolveClusterActionResponse |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/monitor/jvm/JvmStats.java | {
"start": 11882,
"end": 14361
} | class ____ {
static final String JVM = "jvm";
static final String TIMESTAMP = "timestamp";
static final String UPTIME = "uptime";
static final String UPTIME_IN_MILLIS = "uptime_in_millis";
static final String MEM = "mem";
static final String HEAP_USED = "heap_used";
static final String HEAP_USED_IN_BYTES = "heap_used_in_bytes";
static final String HEAP_USED_PERCENT = "heap_used_percent";
static final String HEAP_MAX = "heap_max";
static final String HEAP_MAX_IN_BYTES = "heap_max_in_bytes";
static final String HEAP_COMMITTED = "heap_committed";
static final String HEAP_COMMITTED_IN_BYTES = "heap_committed_in_bytes";
static final String NON_HEAP_USED = "non_heap_used";
static final String NON_HEAP_USED_IN_BYTES = "non_heap_used_in_bytes";
static final String NON_HEAP_COMMITTED = "non_heap_committed";
static final String NON_HEAP_COMMITTED_IN_BYTES = "non_heap_committed_in_bytes";
static final String POOLS = "pools";
static final String USED = "used";
static final String USED_IN_BYTES = "used_in_bytes";
static final String MAX = "max";
static final String MAX_IN_BYTES = "max_in_bytes";
static final String PEAK_USED = "peak_used";
static final String PEAK_USED_IN_BYTES = "peak_used_in_bytes";
static final String PEAK_MAX = "peak_max";
static final String PEAK_MAX_IN_BYTES = "peak_max_in_bytes";
static final String THREADS = "threads";
static final String COUNT = "count";
static final String PEAK_COUNT = "peak_count";
static final String GC = "gc";
static final String COLLECTORS = "collectors";
static final String COLLECTION_COUNT = "collection_count";
static final String COLLECTION_TIME = "collection_time";
static final String COLLECTION_TIME_IN_MILLIS = "collection_time_in_millis";
static final String BUFFER_POOLS = "buffer_pools";
static final String TOTAL_CAPACITY = "total_capacity";
static final String TOTAL_CAPACITY_IN_BYTES = "total_capacity_in_bytes";
static final String CLASSES = "classes";
static final String CURRENT_LOADED_COUNT = "current_loaded_count";
static final String TOTAL_LOADED_COUNT = "total_loaded_count";
static final String TOTAL_UNLOADED_COUNT = "total_unloaded_count";
}
public static | Fields |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/embeddable/NestedAssociationEmbeddableTest.java | {
"start": 3356,
"end": 3706
} | class ____ {
private Integer distance;
@ManyToOne
private Poi poi;
public Location() {
}
public Location(Integer distance, Poi poi) {
this.distance = distance;
this.poi = poi;
}
public Integer getDistance() {
return distance;
}
public Poi getPoi() {
return poi;
}
}
@Entity( name = "Poi" )
public static | Location |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/security/HttpUpgradeSelectAuthMechOnMethodValidationFailureTest.java | {
"start": 1737,
"end": 1907
} | class ____ {
@BasicAuthentication
@OnTextMessage
void onMessage(String message) {
// ignored
}
}
}
| BasicAuthOnMethodEndpoint |
java | spring-projects__spring-framework | spring-jms/src/test/java/org/springframework/jms/listener/endpoint/StubJmsActivationSpecFactory.java | {
"start": 862,
"end": 1091
} | class ____ implements JmsActivationSpecFactory {
@Override
public ActivationSpec createActivationSpec(ResourceAdapter adapter, JmsActivationSpecConfig config) {
return new StubActivationSpec();
}
}
| StubJmsActivationSpecFactory |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/search/arguments/ExplainArgs.java | {
"start": 601,
"end": 776
} | class ____<K, V> {
private QueryDialects dialect = QueryDialects.DIALECT2;
/**
* Builder entry points for {@link ExplainArgs}.
*/
public static | ExplainArgs |
java | google__dagger | javatests/dagger/internal/codegen/kotlin/KspComponentProcessorTest.java | {
"start": 12675,
"end": 13956
} | class ____");
CompilerTests.daggerCompiler(componentSrc)
.compile(
subject -> {
subject.hasErrorCount(0);
subject.generatedSource(
CompilerTests.javaSource(
"test/DaggerMyComponent",
"package test;",
"",
"import dagger.internal.DaggerGenerated;",
"import dagger.internal.Preconditions;",
"import javax.annotation.processing.Generated;",
"",
"@DaggerGenerated",
"@Generated(",
" value = \"dagger.internal.codegen.ComponentProcessor\",",
" comments = \"https://dagger.dev\"",
")",
"@SuppressWarnings({",
" \"unchecked\",",
" \"rawtypes\",",
" \"KotlinInternal\",",
" \"KotlinInternalInJava\",",
" \"cast\",",
" \"deprecation\",",
" \"nullness:initialization.field.uninitialized\"",
"})",
"public final | Bar |
java | spring-projects__spring-boot | module/spring-boot-data-neo4j-test/src/dockerTest/java/org/springframework/boot/data/neo4j/test/autoconfigure/DataNeo4jTestWithIncludeFilterIntegrationTests.java | {
"start": 1602,
"end": 1942
} | class ____ {
@Container
@ServiceConnection
static final Neo4jContainer neo4j = TestImage.container(Neo4jContainer.class).withoutAuthentication();
@Autowired
private ExampleService service;
@Test
void testService() {
assertThat(this.service.hasNode(ExampleGraph.class)).isFalse();
}
}
| DataNeo4jTestWithIncludeFilterIntegrationTests |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/fairscheduler/FairSchedulerXmlVerifications.java | {
"start": 1782,
"end": 6169
} | class ____ {
private static final Set<String> RESOURCE_FIELDS = Sets.newHashSet(
"minResources", "amUsedResources", "amMaxResources", "fairResources",
"clusterResources", "reservedResources", "maxResources", "usedResources",
"steadyFairResources", "demandResources");
private final Set<String> customResourceTypes;
FairSchedulerXmlVerifications(List<String> customResourceTypes) {
this.customResourceTypes = Sets.newHashSet(customResourceTypes);
}
public void verify(Element element) {
verifyResourcesContainDefaultResourceTypes(element, RESOURCE_FIELDS);
verifyResourcesContainCustomResourceTypes(element, RESOURCE_FIELDS);
}
private void verifyResourcesContainDefaultResourceTypes(Element queue,
Set<String> resourceCategories) {
for (String resourceCategory : resourceCategories) {
boolean hasResourceCategory = hasChild(queue, resourceCategory);
assertTrue(hasResourceCategory, "Queue " + queue + " does not have resource category key: "
+ resourceCategory);
verifyResourceContainsDefaultResourceTypes(
(Element) queue.getElementsByTagName(resourceCategory).item(0));
}
}
private void verifyResourceContainsDefaultResourceTypes(
Element element) {
Object memory = opt(element, "memory");
Object vCores = opt(element, "vCores");
assertNotNull(memory, "Key 'memory' not found in: " + element);
assertNotNull(vCores, "Key 'vCores' not found in: " + element);
}
private void verifyResourcesContainCustomResourceTypes(Element queue,
Set<String> resourceCategories) {
for (String resourceCategory : resourceCategories) {
assertTrue(hasChild(queue, resourceCategory),
"Queue " + queue + " does not have key for resourceCategory: "
+ resourceCategory);
verifyResourceContainsCustomResourceTypes(
(Element) queue.getElementsByTagName(resourceCategory).item(0));
}
}
private void verifyResourceContainsCustomResourceTypes(
Element resourceCategory) {
assertEquals(1, resourceCategory.getElementsByTagName("resourceInformations")
.getLength(), toXml(resourceCategory)
+ " should have only one resourceInformations child!");
Element resourceInformations = (Element) resourceCategory
.getElementsByTagName("resourceInformations").item(0);
NodeList customResources =
resourceInformations.getElementsByTagName("resourceInformation");
// customResources will include vcores / memory as well
assertEquals(customResourceTypes.size(), customResources.getLength() - 2,
"Different number of custom resource types found than expected");
for (int i = 0; i < customResources.getLength(); i++) {
Element customResource = (Element) customResources.item(i);
String name = getXmlString(customResource, "name");
String unit = getXmlString(customResource, "units");
String resourceType = getXmlString(customResource, "resourceType");
Long value = getXmlLong(customResource, "value");
if (ResourceInformation.MEMORY_URI.equals(name)
|| ResourceInformation.VCORES_URI.equals(name)) {
continue;
}
assertTrue(customResourceTypes.contains(name),
"Custom resource type " + name + " not found");
assertEquals("k", unit);
assertEquals(ResourceTypes.COUNTABLE,
ResourceTypes.valueOf(resourceType));
assertNotNull(value, "Resource value should not be null for resource type "
+ resourceType + ", listing xml contents: " + toXml(customResource));
}
}
private Object opt(Node node, String child) {
NodeList nodes = getElementsByTagNameInternal(node, child);
if (nodes.getLength() > 0) {
return nodes.item(0);
}
return null;
}
private boolean hasChild(Node node, String child) {
return getElementsByTagNameInternal(node, child).getLength() > 0;
}
private NodeList getElementsByTagNameInternal(Node node, String child) {
if (node instanceof Element) {
return ((Element) node).getElementsByTagName(child);
} else if (node instanceof Document) {
return ((Document) node).getElementsByTagName(child);
} else {
throw new IllegalStateException("Unknown type of wrappedObject: " + node
+ ", type: " + node.getClass());
}
}
}
| FairSchedulerXmlVerifications |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/ConfigurationPropertiesTests.java | {
"start": 94190,
"end": 94480
} | class ____ {
@Bean
@ConfigurationPropertiesBinding
static WithObjectToObjectMethodConverter withObjectToObjectMethodConverter() {
return new WithObjectToObjectMethodConverter();
}
}
@ConfigurationProperties("test")
static | WithCustomConverterAndObjectToObjectMethodConfiguration |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_wangran1.java | {
"start": 185,
"end": 905
} | class ____ extends TestCase {
public void test_0() throws Exception {
Entity entity = new Entity();
entity.setId(11);
entity.setName("xx");
Queue q = new Queue();
q.setId(55);
entity.getQueue().put(q.getId(), q);
String text = JSON.toJSONString(entity);
System.out.println(text);
Entity entity2 = JSON.parseObject(text, Entity.class);
Assert.assertNotNull(entity2.getQueue());
Assert.assertEquals(1, entity2.getQueue().size());
Assert.assertEquals(true, entity2.getQueue().values().iterator().next() instanceof Queue);
}
public static | Bug_for_wangran1 |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/producer/ProducerWithMethodLevelInterceptorsAndBindingsSourceTest.java | {
"start": 2317,
"end": 2616
} | class ____ {
String hello1() {
return "hello1";
}
String hello2() {
return "hello2";
}
String hello3() {
return "hello3";
}
String hello4() {
return "hello4";
}
}
static | MyNonbean |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/support/conversion/FallbackStringToObjectConverter.java | {
"start": 2531,
"end": 7093
} | class ____ implements StringToObjectConverter {
/**
* Implementation of the NULL Object Pattern.
*/
private static final Function<String, Object> NULL_EXECUTABLE = source -> source;
/**
* Cache for factory methods and factory constructors.
*
* <p>Searches that do not find a factory method or constructor are tracked
* by the presence of a {@link #NULL_EXECUTABLE} object stored in the map.
* This prevents the framework from repeatedly searching for things which
* are already known not to exist.
*/
private static final ConcurrentHashMap<Class<?>, Function<String, @Nullable Object>> factoryExecutableCache //
= new ConcurrentHashMap<>(64);
@Override
public boolean canConvertTo(Class<?> targetType) {
return findFactoryExecutable(targetType) != NULL_EXECUTABLE;
}
@Override
public @Nullable Object convert(String source, Class<?> targetType) throws Exception {
Function<String, @Nullable Object> executable = findFactoryExecutable(targetType);
Preconditions.condition(executable != NULL_EXECUTABLE,
"Illegal state: convert() must not be called if canConvert() returned false");
return executable.apply(source);
}
private static Function<String, @Nullable Object> findFactoryExecutable(Class<?> targetType) {
return factoryExecutableCache.computeIfAbsent(targetType, type -> {
// First, search for exact String argument matches.
Function<String, @Nullable Object> factory = findFactoryExecutable(type, String.class);
if (factory != null) {
return factory;
}
// Second, fall back to CharSequence argument matches.
factory = findFactoryExecutable(type, CharSequence.class);
if (factory != null) {
return factory;
}
// Else, nothing found.
return NULL_EXECUTABLE;
});
}
private static @Nullable Function<String, @Nullable Object> findFactoryExecutable(Class<?> targetType,
Class<?> parameterType) {
Method factoryMethod = findFactoryMethod(targetType, parameterType);
if (factoryMethod != null) {
return source -> invokeMethod(factoryMethod, null, source);
}
Constructor<?> constructor = findFactoryConstructor(targetType, parameterType);
if (constructor != null) {
return source -> newInstance(constructor, source);
}
return null;
}
private static @Nullable Method findFactoryMethod(Class<?> targetType, Class<?> parameterType) {
List<Method> factoryMethods = findMethods(targetType, new IsFactoryMethod(targetType, parameterType),
BOTTOM_UP);
if (factoryMethods.size() == 1) {
return factoryMethods.get(0);
}
return null;
}
private static @Nullable Constructor<?> findFactoryConstructor(Class<?> targetType, Class<?> parameterType) {
List<Constructor<?>> constructors = findConstructors(targetType,
new IsFactoryConstructor(targetType, parameterType));
if (constructors.size() == 1) {
return constructors.get(0);
}
return null;
}
/**
* {@link Predicate} that determines if the {@link Method} supplied to
* {@link #test(Method)} is a non-private static factory method for the
* supplied {@link #targetType} and {@link #parameterType}.
*/
record IsFactoryMethod(Class<?> targetType, Class<?> parameterType) implements Predicate<Method> {
@Override
public boolean test(Method method) {
// Please do not collapse the following into a single statement.
if (!method.getReturnType().equals(this.targetType)) {
return false;
}
if (isNotStatic(method)) {
return false;
}
return isFactoryCandidate(method, this.parameterType);
}
}
/**
* {@link Predicate} that determines if the {@link Constructor} supplied to
* {@link #test(Constructor)} is a non-private factory constructor for the
* supplied {@link #targetType} and {@link #parameterType}.
*/
record IsFactoryConstructor(Class<?> targetType, Class<?> parameterType) implements Predicate<Constructor<?>> {
@Override
public boolean test(Constructor<?> constructor) {
// Please do not collapse the following into a single statement.
if (!constructor.getDeclaringClass().equals(this.targetType)) {
return false;
}
return isFactoryCandidate(constructor, this.parameterType);
}
}
/**
* Determine if the supplied {@link Executable} is not private and accepts a
* single argument of the supplied parameter type.
*/
private static boolean isFactoryCandidate(Executable executable, Class<?> parameterType) {
return isNotPrivate(executable) //
&& (executable.getParameterCount() == 1) //
&& (executable.getParameterTypes()[0] == parameterType);
}
}
| FallbackStringToObjectConverter |
java | quarkusio__quarkus | extensions/smallrye-openapi/runtime/src/main/java/io/quarkus/smallrye/openapi/runtime/OpenApiDocumentService.java | {
"start": 789,
"end": 3248
} | class ____ {
private final OpenApiDocumentHolder documentHolder;
@Inject
public OpenApiDocumentService(OASFilter autoSecurityFilter,
OpenApiRecorder.UserDefinedRuntimeFilters runtimeFilters, Config config) {
ClassLoader loader = Optional.ofNullable(OpenApiConstants.classLoader)
.orElseGet(Thread.currentThread()::getContextClassLoader);
try (InputStream source = loader.getResourceAsStream(OpenApiConstants.BASE_NAME + "JSON")) {
if (source != null) {
Set<String> userFilters = new LinkedHashSet<>(runtimeFilters.filters());
boolean dynamic = config.getOptionalValue("quarkus.smallrye-openapi.always-run-filter", Boolean.class)
.orElse(Boolean.FALSE);
SmallRyeOpenAPI.Builder builder = new OpenAPIRuntimeBuilder()
.withConfig(config)
.withApplicationClassLoader(loader)
.enableModelReader(false)
.enableStandardStaticFiles(false)
.enableAnnotationScan(false)
.enableStandardFilter(false)
.withCustomStaticFile(() -> source);
// Auth-security and disabled endpoint filters will only run once
Optional.ofNullable(autoSecurityFilter)
.ifPresent(builder::addFilter);
DisabledRestEndpointsFilter.maybeGetInstance()
.ifPresent(builder::addFilter);
if (dynamic && !userFilters.isEmpty()) {
// Only regenerate the OpenAPI document when configured and there are filters to run
this.documentHolder = new DynamicDocument(builder, loader, userFilters);
} else {
userFilters.forEach(name -> builder.addFilter(name, loader, (IndexView) null));
this.documentHolder = new StaticDocument(builder.build());
}
} else {
this.documentHolder = new EmptyDocument();
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public byte[] getDocument(Format format) {
if (format.equals(Format.JSON)) {
return documentHolder.getJsonDocument();
}
return documentHolder.getYamlDocument();
}
static | OpenApiDocumentService |
java | elastic__elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/deployment/ThreadSettingsControlMessagePytorchActionTests.java | {
"start": 1498,
"end": 6113
} | class ____ extends ESTestCase {
public void testBuildControlMessage() throws IOException {
DeploymentManager.ProcessContext processContext = mock(DeploymentManager.ProcessContext.class);
ThreadPool tp = mock(ThreadPool.class);
ThreadSettingsControlMessagePytorchAction action = new ThreadSettingsControlMessagePytorchAction(
"model_id",
1,
4,
TimeValue.MINUS_ONE,
processContext,
tp,
ActionListener.noop()
);
var message = action.buildControlMessage("foo");
assertEquals("{\"request_id\":\"foo\",\"control\":0,\"num_allocations\":4}", message.utf8ToString());
}
@SuppressWarnings("unchecked")
public void testRunNotCalledAfterNotified() {
DeploymentManager.ProcessContext processContext = mock(DeploymentManager.ProcessContext.class);
PyTorchResultProcessor resultProcessor = mock(PyTorchResultProcessor.class);
when(processContext.getResultProcessor()).thenReturn(resultProcessor);
AtomicInteger timeoutCount = new AtomicInteger();
when(processContext.getTimeoutCount()).thenReturn(timeoutCount);
final var deterministicTaskQueue = new DeterministicTaskQueue();
ThreadPool tp = deterministicTaskQueue.getThreadPool();
{
ActionListener<ThreadSettings> listener = mock(ActionListener.class);
ThreadSettingsControlMessagePytorchAction action = new ThreadSettingsControlMessagePytorchAction(
"test-model",
1,
1,
TimeValue.MAX_VALUE,
processContext,
tp,
listener
);
action.init();
action.onTimeout();
action.run();
verify(resultProcessor, times(1)).ignoreResponseWithoutNotifying("1");
verify(resultProcessor, never()).registerRequest(anyString(), any());
verify(listener, times(1)).onFailure(any());
verify(listener, never()).onResponse(any());
}
{
ActionListener<ThreadSettings> listener = mock(ActionListener.class);
ThreadSettingsControlMessagePytorchAction action = new ThreadSettingsControlMessagePytorchAction(
"test-model",
1,
1,
TimeValue.MAX_VALUE,
processContext,
tp,
listener
);
action.init();
action.onFailure(new IllegalStateException());
action.run();
verify(resultProcessor, never()).registerRequest(anyString(), any());
verify(listener, times(1)).onFailure(any());
verify(listener, never()).onResponse(any());
}
}
@SuppressWarnings("unchecked")
public void testDoRun() throws IOException {
DeploymentManager.ProcessContext processContext = mock(DeploymentManager.ProcessContext.class);
PyTorchResultProcessor resultProcessor = mock(PyTorchResultProcessor.class);
when(processContext.getResultProcessor()).thenReturn(resultProcessor);
AtomicInteger timeoutCount = new AtomicInteger();
when(processContext.getTimeoutCount()).thenReturn(timeoutCount);
SetOnce<PyTorchProcess> process = new SetOnce<>();
PyTorchProcess pp = mock(PyTorchProcess.class);
process.set(pp);
when(processContext.getProcess()).thenReturn(process);
Scheduler.ScheduledCancellable cancellable = mock(Scheduler.ScheduledCancellable.class);
ThreadPool tp = mock(ThreadPool.class);
when(tp.schedule(any(), any(), any(Executor.class))).thenReturn(cancellable);
ActionListener<ThreadSettings> listener = mock(ActionListener.class);
ArgumentCaptor<BytesReference> messageCapture = ArgumentCaptor.forClass(BytesReference.class);
doNothing().when(pp).writeInferenceRequest(messageCapture.capture());
ThreadSettingsControlMessagePytorchAction action = new ThreadSettingsControlMessagePytorchAction(
"test-model",
1,
1,
TimeValue.MAX_VALUE,
processContext,
tp,
listener
);
action.init();
action.run();
verify(resultProcessor).registerRequest(eq("1"), any());
verify(listener, never()).onFailure(any());
assertEquals("{\"request_id\":\"1\",\"control\":0,\"num_allocations\":1}", messageCapture.getValue().utf8ToString());
}
}
| ThreadSettingsControlMessagePytorchActionTests |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/beans/factory/support/QualifierAnnotationAutowireContextTests.java | {
"start": 30107,
"end": 30152
} | interface ____ {
}
private static | MyAutowired |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/cache/query/QueryCacheStats.java | {
"start": 987,
"end": 4826
} | class ____ implements Writeable, ToXContentFragment {
private long ramBytesUsed;
private long hitCount;
private long missCount;
private long cacheCount;
private long cacheSize;
public QueryCacheStats() {}
public QueryCacheStats(StreamInput in) throws IOException {
ramBytesUsed = in.readLong();
hitCount = in.readLong();
missCount = in.readLong();
cacheCount = in.readLong();
cacheSize = in.readLong();
}
public QueryCacheStats(long ramBytesUsed, long hitCount, long missCount, long cacheCount, long cacheSize) {
this.ramBytesUsed = ramBytesUsed;
this.hitCount = hitCount;
this.missCount = missCount;
this.cacheCount = cacheCount;
this.cacheSize = cacheSize;
}
public void add(QueryCacheStats stats) {
if (stats == null) {
return;
}
ramBytesUsed += stats.ramBytesUsed;
hitCount += stats.hitCount;
missCount += stats.missCount;
cacheCount += stats.cacheCount;
cacheSize += stats.cacheSize;
}
public void addRamBytesUsed(long additionalRamBytesUsed) {
ramBytesUsed += additionalRamBytesUsed;
}
public long getMemorySizeInBytes() {
return ramBytesUsed;
}
public ByteSizeValue getMemorySize() {
return ByteSizeValue.ofBytes(ramBytesUsed);
}
/**
* The total number of lookups in the cache.
*/
public long getTotalCount() {
return hitCount + missCount;
}
/**
* The number of successful lookups in the cache.
*/
public long getHitCount() {
return hitCount;
}
/**
* The number of lookups in the cache that failed to retrieve a {@link DocIdSet}.
*/
public long getMissCount() {
return missCount;
}
/**
* The number of {@link DocIdSet}s that have been cached.
*/
public long getCacheCount() {
return cacheCount;
}
/**
* The number of {@link DocIdSet}s that are in the cache.
*/
public long getCacheSize() {
return cacheSize;
}
/**
* The number of {@link DocIdSet}s that have been evicted from the cache.
*/
public long getEvictions() {
return cacheCount - cacheSize;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeLong(ramBytesUsed);
out.writeLong(hitCount);
out.writeLong(missCount);
out.writeLong(cacheCount);
out.writeLong(cacheSize);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
QueryCacheStats that = (QueryCacheStats) o;
return ramBytesUsed == that.ramBytesUsed
&& hitCount == that.hitCount
&& missCount == that.missCount
&& cacheCount == that.cacheCount
&& cacheSize == that.cacheSize;
}
@Override
public int hashCode() {
return Objects.hash(ramBytesUsed, hitCount, missCount, cacheCount, cacheSize);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject(Fields.QUERY_CACHE);
builder.humanReadableField(Fields.MEMORY_SIZE_IN_BYTES, Fields.MEMORY_SIZE, getMemorySize());
builder.field(Fields.TOTAL_COUNT, getTotalCount());
builder.field(Fields.HIT_COUNT, getHitCount());
builder.field(Fields.MISS_COUNT, getMissCount());
builder.field(Fields.CACHE_SIZE, getCacheSize());
builder.field(Fields.CACHE_COUNT, getCacheCount());
builder.field(Fields.EVICTIONS, getEvictions());
builder.endObject();
return builder;
}
static final | QueryCacheStats |
java | apache__rocketmq | tools/src/main/java/org/apache/rocketmq/tools/command/cluster/ClusterListSubCommand.java | {
"start": 1527,
"end": 14540
} | class ____ implements SubCommand {
@Override
public String commandName() {
return "clusterList";
}
@Override
public String commandDesc() {
return "List cluster infos.";
}
@Override
public Options buildCommandlineOptions(Options options) {
Option opt = new Option("m", "moreStats", false, "Print more stats");
opt.setRequired(false);
options.addOption(opt);
opt = new Option("i", "interval", true, "specify intervals numbers, it is in seconds");
opt.setRequired(false);
options.addOption(opt);
opt = new Option("c", "clusterName", true, "which cluster");
opt.setRequired(false);
options.addOption(opt);
return options;
}
@Override
public void execute(final CommandLine commandLine, final Options options,
RPCHook rpcHook) throws SubCommandException {
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
long printInterval = 1;
boolean enableInterval = commandLine.hasOption('i');
if (enableInterval) {
printInterval = Long.parseLong(commandLine.getOptionValue('i')) * 1000;
}
String clusterName = commandLine.hasOption('c') ? commandLine.getOptionValue('c').trim() : "";
try {
defaultMQAdminExt.start();
long i = 0;
do {
if (i++ > 0) {
Thread.sleep(printInterval);
}
ClusterInfo clusterInfo = defaultMQAdminExt.examineBrokerClusterInfo();
Set<String> clusterNames = getTargetClusterNames(clusterName, clusterInfo);
if (commandLine.hasOption('m')) {
this.printClusterMoreStats(clusterNames, defaultMQAdminExt, clusterInfo);
} else {
this.printClusterBaseInfo(clusterNames, defaultMQAdminExt, clusterInfo);
}
}
while (enableInterval);
} catch (Exception e) {
throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e);
} finally {
defaultMQAdminExt.shutdown();
}
}
private Set<String> getTargetClusterNames(String clusterName, ClusterInfo clusterInfo) {
if (StringUtils.isEmpty(clusterName)) {
return clusterInfo.getClusterAddrTable().keySet();
} else {
Set<String> clusterNames = new TreeSet<>();
clusterNames.add(clusterName);
return clusterNames;
}
}
private void printClusterMoreStats(final Set<String> clusterNames,
final DefaultMQAdminExt defaultMQAdminExt,
final ClusterInfo clusterInfo) {
System.out.printf("%-16s %-32s %14s %14s %14s %14s%n",
"#Cluster Name",
"#Broker Name",
"#InTotalYest",
"#OutTotalYest",
"#InTotalToday",
"#OutTotalToday"
);
for (String clusterName : clusterNames) {
TreeSet<String> brokerNameTreeSet = new TreeSet<>();
Set<String> brokerNameSet = clusterInfo.getClusterAddrTable().get(clusterName);
if (brokerNameSet != null && !brokerNameSet.isEmpty()) {
brokerNameTreeSet.addAll(brokerNameSet);
}
for (String brokerName : brokerNameTreeSet) {
BrokerData brokerData = clusterInfo.getBrokerAddrTable().get(brokerName);
if (brokerData != null) {
Iterator<Map.Entry<Long, String>> itAddr = brokerData.getBrokerAddrs().entrySet().iterator();
while (itAddr.hasNext()) {
Map.Entry<Long, String> next1 = itAddr.next();
long inTotalYest = 0;
long outTotalYest = 0;
long inTotalToday = 0;
long outTotalToday = 0;
try {
KVTable kvTable = defaultMQAdminExt.fetchBrokerRuntimeStats(next1.getValue());
String msgPutTotalYesterdayMorning = kvTable.getTable().get("msgPutTotalYesterdayMorning");
String msgPutTotalTodayMorning = kvTable.getTable().get("msgPutTotalTodayMorning");
String msgPutTotalTodayNow = kvTable.getTable().get("msgPutTotalTodayNow");
String msgGetTotalYesterdayMorning = kvTable.getTable().get("msgGetTotalYesterdayMorning");
String msgGetTotalTodayMorning = kvTable.getTable().get("msgGetTotalTodayMorning");
String msgGetTotalTodayNow = kvTable.getTable().get("msgGetTotalTodayNow");
inTotalYest = Long.parseLong(msgPutTotalTodayMorning) - Long.parseLong(msgPutTotalYesterdayMorning);
outTotalYest = Long.parseLong(msgGetTotalTodayMorning) - Long.parseLong(msgGetTotalYesterdayMorning);
inTotalToday = Long.parseLong(msgPutTotalTodayNow) - Long.parseLong(msgPutTotalTodayMorning);
outTotalToday = Long.parseLong(msgGetTotalTodayNow) - Long.parseLong(msgGetTotalTodayMorning);
} catch (Exception ignored) {
}
System.out.printf("%-16s %-32s %14d %14d %14d %14d%n",
clusterName,
brokerName,
inTotalYest,
outTotalYest,
inTotalToday,
outTotalToday
);
}
}
}
}
}
private void printClusterBaseInfo(final Set<String> clusterNames,
final DefaultMQAdminExt defaultMQAdminExt,
final ClusterInfo clusterInfo) {
System.out.printf("%-22s %-22s %-4s %-22s %-16s %16s %30s %-22s %-11s %-12s %-8s %-10s%n",
"#Cluster Name",
"#Broker Name",
"#BID",
"#Addr",
"#Version",
"#InTPS(LOAD)",
"#OutTPS(LOAD)",
"#Timer(Progress)",
"#PCWait(ms)",
"#Hour",
"#SPACE",
"#ACTIVATED"
);
for (String clusterName : clusterNames) {
TreeSet<String> brokerNameTreeSet = new TreeSet<>();
Set<String> brokerNameSet = clusterInfo.getClusterAddrTable().get(clusterName);
if (brokerNameSet != null && !brokerNameSet.isEmpty()) {
brokerNameTreeSet.addAll(brokerNameSet);
}
for (String brokerName : brokerNameTreeSet) {
BrokerData brokerData = clusterInfo.getBrokerAddrTable().get(brokerName);
if (brokerData != null) {
Iterator<Map.Entry<Long, String>> itAddr = brokerData.getBrokerAddrs().entrySet().iterator();
while (itAddr.hasNext()) {
Map.Entry<Long, String> next1 = itAddr.next();
double in = 0;
double out = 0;
String version = "";
String sendThreadPoolQueueSize = "";
String pullThreadPoolQueueSize = "";
String ackThreadPoolQueueSize = "";
String sendThreadPoolQueueHeadWaitTimeMills = "";
String pullThreadPoolQueueHeadWaitTimeMills = "";
String ackThreadPoolQueueHeadWaitTimeMills = "";
String pageCacheLockTimeMills = "";
String earliestMessageTimeStamp = "";
String commitLogDiskRatio = "";
long timerReadBehind = 0;
long timerOffsetBehind = 0;
long timerCongestNum = 0;
float timerEnqueueTps = 0.0f;
float timerDequeueTps = 0.0f;
boolean isBrokerActive = false;
try {
KVTable kvTable = defaultMQAdminExt.fetchBrokerRuntimeStats(next1.getValue());
isBrokerActive = Boolean.parseBoolean(kvTable.getTable().get("brokerActive"));
String putTps = kvTable.getTable().get("putTps");
String getTransferredTps = kvTable.getTable().get("getTransferredTps");
sendThreadPoolQueueSize = kvTable.getTable().get("sendThreadPoolQueueSize");
pullThreadPoolQueueSize = kvTable.getTable().get("pullThreadPoolQueueSize");
ackThreadPoolQueueSize = kvTable.getTable().getOrDefault("ackThreadPoolQueueSize", "N");
sendThreadPoolQueueHeadWaitTimeMills = kvTable.getTable().get("sendThreadPoolQueueHeadWaitTimeMills");
pullThreadPoolQueueHeadWaitTimeMills = kvTable.getTable().get("pullThreadPoolQueueHeadWaitTimeMills");
ackThreadPoolQueueHeadWaitTimeMills = kvTable.getTable().getOrDefault("ackThreadPoolQueueHeadWaitTimeMills", "N");
pageCacheLockTimeMills = kvTable.getTable().get("pageCacheLockTimeMills");
earliestMessageTimeStamp = kvTable.getTable().get("earliestMessageTimeStamp");
commitLogDiskRatio = kvTable.getTable().get("commitLogDiskRatio");
try {
timerReadBehind = Long.parseLong(kvTable.getTable().get("timerReadBehind"));
timerOffsetBehind = Long.parseLong(kvTable.getTable().get("timerOffsetBehind"));
timerCongestNum = Long.parseLong(kvTable.getTable().get("timerCongestNum"));
timerEnqueueTps = Float.parseFloat(kvTable.getTable().get("timerEnqueueTps"));
timerDequeueTps = Float.parseFloat(kvTable.getTable().get("timerDequeueTps"));
} catch (Throwable ignored) {
}
version = kvTable.getTable().get("brokerVersionDesc");
if (StringUtils.isNotBlank(putTps)) {
String[] tpss = putTps.split(" ");
if (tpss.length > 0) {
in = Double.parseDouble(tpss[0]);
}
}
if (StringUtils.isNotBlank(getTransferredTps)) {
String[] tpss = getTransferredTps.split(" ");
if (tpss.length > 0) {
out = Double.parseDouble(tpss[0]);
}
}
} catch (Exception e) {
e.printStackTrace();
}
double hour = 0.0;
double space = 0.0;
if (earliestMessageTimeStamp != null && earliestMessageTimeStamp.length() > 0) {
long mills = System.currentTimeMillis() - Long.parseLong(earliestMessageTimeStamp);
hour = mills / 1000.0 / 60.0 / 60.0;
}
if (commitLogDiskRatio != null && commitLogDiskRatio.length() > 0) {
space = Double.parseDouble(commitLogDiskRatio);
}
System.out.printf("%-22s %-22s %-4s %-22s %-16s %16s %30s %-22s %11s %-12s %-8s %10s%n",
clusterName,
brokerName,
next1.getKey(),
next1.getValue(),
version,
String.format("%9.2f(%s,%sms)", in, sendThreadPoolQueueSize, sendThreadPoolQueueHeadWaitTimeMills),
String.format("%9.2f(%s,%sms|%s,%sms)", out, pullThreadPoolQueueSize, pullThreadPoolQueueHeadWaitTimeMills, ackThreadPoolQueueSize, ackThreadPoolQueueHeadWaitTimeMills),
String.format("%d-%d(%.1fw, %.1f, %.1f)", timerReadBehind, timerOffsetBehind, timerCongestNum / 10000.0f, timerEnqueueTps, timerDequeueTps),
pageCacheLockTimeMills,
String.format("%2.2f", hour),
String.format("%.4f", space),
isBrokerActive
);
}
}
}
}
}
}
| ClusterListSubCommand |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/signature/ObjectKey.java | {
"start": 637,
"end": 1323
} | class ____ implements Key {
private final Object object;
public ObjectKey(@NonNull Object object) {
this.object = Preconditions.checkNotNull(object);
}
@Override
public String toString() {
return "ObjectKey{" + "object=" + object + '}';
}
@Override
public boolean equals(Object o) {
if (o instanceof ObjectKey) {
ObjectKey other = (ObjectKey) o;
return object.equals(other.object);
}
return false;
}
@Override
public int hashCode() {
return object.hashCode();
}
@Override
public void updateDiskCacheKey(@NonNull MessageDigest messageDigest) {
messageDigest.update(object.toString().getBytes(CHARSET));
}
}
| ObjectKey |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/util/Annotations.java | {
"start": 475,
"end": 1042
} | interface ____
{
/**
* Main access method used to find value for given annotation.
*/
public <A extends Annotation> A get(Class<A> cls);
/**
* Access method that returns a stream of all annotations contained.
*
* @since 3.0
*/
public abstract Stream<Annotation> values();
public boolean has(Class<? extends Annotation> cls);
public boolean hasOneOf(Class<? extends Annotation>[] annoClasses);
/**
* Returns number of annotation entries in this collection.
*/
public int size();
}
| Annotations |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/origin/OriginLookup.java | {
"start": 721,
"end": 996
} | interface ____ may be implemented by an object that can lookup {@link Origin}
* information from a given key. Can be used to add origin support to existing classes.
*
* @param <K> the lookup key type
* @author Phillip Webb
* @since 2.0.0
*/
@FunctionalInterface
public | that |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/components/DefaultValueComponents.java | {
"start": 1185,
"end": 14549
} | class ____ {
private static final Logger log = Logger.getLogger( DefaultValueComponents.class );
private Integer id0;
private Integer id1;
private Integer id2;
private Integer id3;
private Integer id4;
private Integer id5;
private Integer id6;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
// Revision 1
em.getTransaction().begin();
DefaultValueComponentTestEntity cte0 = DefaultValueComponentTestEntity
.of( null );
DefaultValueComponentTestEntity cte1 = DefaultValueComponentTestEntity
.of( DefaultValueComponent1.of( "c1-str1", null ) );
DefaultValueComponentTestEntity cte2 = DefaultValueComponentTestEntity
.of(
DefaultValueComponent1.of(
"c1-str1", DefaultValueComponent2
.of( "c2-str1", "c2-str2" )
)
);
DefaultValueComponentTestEntity cte3 = DefaultValueComponentTestEntity
.of(
DefaultValueComponent1.of(
null, DefaultValueComponent2.of(
"c2-str1", "c2-str2"
)
)
);
DefaultValueComponentTestEntity cte4 = DefaultValueComponentTestEntity
.of(
DefaultValueComponent1.of(
null, DefaultValueComponent2.of(
null, "c2-str2"
)
)
);
DefaultValueComponentTestEntity cte5 = DefaultValueComponentTestEntity
.of(
DefaultValueComponent1.of(
null, DefaultValueComponent2.of(
"c2-str1", null
)
)
);
DefaultValueComponentTestEntity cte6 = DefaultValueComponentTestEntity
.of(
DefaultValueComponent1.of(
null, DefaultValueComponent2.of(
null, null
)
)
);
em.persist( cte0 );
em.persist( cte1 );
em.persist( cte2 );
em.persist( cte3 );
em.persist( cte4 );
em.persist( cte5 );
em.persist( cte6 );
em.getTransaction().commit();
// Revision 2
em.getTransaction().begin();
cte0 = em.find( DefaultValueComponentTestEntity.class, cte0.getId() );
cte1 = em.find( DefaultValueComponentTestEntity.class, cte1.getId() );
cte2 = em.find( DefaultValueComponentTestEntity.class, cte2.getId() );
cte3 = em.find( DefaultValueComponentTestEntity.class, cte3.getId() );
cte4 = em.find( DefaultValueComponentTestEntity.class, cte4.getId() );
cte5 = em.find( DefaultValueComponentTestEntity.class, cte5.getId() );
cte6 = em.find( DefaultValueComponentTestEntity.class, cte6.getId() );
cte0.setComp1( DefaultValueComponent1.of( "upd-c1-str1", null ) );
cte1.setComp1(
DefaultValueComponent1.of(
null, DefaultValueComponent2
.of( "upd-c2-str1", "upd-c2-str2" )
)
);
cte2.getComp1().getComp2().setStr1( "upd-c2-str1" );
cte3.getComp1().getComp2().setStr1( "upd-c2-str1" );
cte4.getComp1().getComp2().setStr1( "upd-c2-str1" );
cte5.getComp1().getComp2().setStr1( "upd-c2-str1" );
cte6.getComp1().getComp2().setStr1( "upd-c2-str1" );
em.getTransaction().commit();
// afterwards
id0 = cte0.getId();
id1 = cte1.getId();
id2 = cte2.getId();
id3 = cte3.getId();
id4 = cte4.getId();
id5 = cte5.getId();
id6 = cte6.getId();
} );
}
@Test
public void testRevisionsCounts(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
log.error( auditReader.getRevisions( DefaultValueComponentTestEntity.class, id0 ).toString() );
log.error( auditReader.getRevisions( DefaultValueComponentTestEntity.class, id1 ).toString() );
log.error( auditReader.getRevisions( DefaultValueComponentTestEntity.class, id2 ).toString() );
log.error( auditReader.getRevisions( DefaultValueComponentTestEntity.class, id3 ).toString() );
log.error( auditReader.getRevisions( DefaultValueComponentTestEntity.class, id4 ).toString() );
log.error( auditReader.getRevisions( DefaultValueComponentTestEntity.class, id5 ).toString() );
log.error( auditReader.getRevisions( DefaultValueComponentTestEntity.class, id6 ).toString() );
assertEquals( Arrays.asList( 1, 2 ),
auditReader.getRevisions( DefaultValueComponentTestEntity.class, id0 ) );
assertEquals( Arrays.asList( 1, 2 ),
auditReader.getRevisions( DefaultValueComponentTestEntity.class, id1 ) );
assertEquals( Arrays.asList( 1, 2 ),
auditReader.getRevisions( DefaultValueComponentTestEntity.class, id2 ) );
assertEquals( Arrays.asList( 1, 2 ),
auditReader.getRevisions( DefaultValueComponentTestEntity.class, id3 ) );
assertEquals( Arrays.asList( 1, 2 ),
auditReader.getRevisions( DefaultValueComponentTestEntity.class, id4 ) );
assertEquals( Arrays.asList( 1, 2 ),
auditReader.getRevisions( DefaultValueComponentTestEntity.class, id5 ) );
assertEquals( Arrays.asList( 1, 2 ),
auditReader.getRevisions( DefaultValueComponentTestEntity.class, id6 ) );
} );
}
@Test
public void testHistoryOfId0(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
DefaultValueComponentTestEntity ent1 = auditReader.find( DefaultValueComponentTestEntity.class, id0, 1 );
DefaultValueComponentTestEntity ent2 = auditReader.find( DefaultValueComponentTestEntity.class, id0, 2 );
log.error( "------------ id0 -------------" );
log.error( ent1.toString() );
log.error( ent2.toString() );
checkCorrectlyPersisted( em, id0, null, null );
DefaultValueComponentTestEntity expectedVer1 = DefaultValueComponentTestEntity
.of( id0, DefaultValueComponent1.of( null, null ) );
DefaultValueComponentTestEntity expectedVer2 = DefaultValueComponentTestEntity
.of( id0, DefaultValueComponent1.of( "upd-c1-str1", null ) );
assertEquals( expectedVer1, ent1 );
assertEquals( expectedVer2, ent2 );
} );
}
@Test
public void testHistoryOfId1(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
DefaultValueComponentTestEntity ent1 = auditReader.find( DefaultValueComponentTestEntity.class, id1, 1 );
DefaultValueComponentTestEntity ent2 = auditReader.find( DefaultValueComponentTestEntity.class, id1, 2 );
log.error( "------------ id1 -------------" );
log.error( ent1.toString() );
log.error( ent2.toString() );
checkCorrectlyPersisted( em, id1, null, "upd-c2-str1" );
DefaultValueComponentTestEntity expectedVer1 = DefaultValueComponentTestEntity
.of( id1, DefaultValueComponent1.of( "c1-str1", null ) );
DefaultValueComponentTestEntity expectedVer2 = DefaultValueComponentTestEntity
.of(
id1, DefaultValueComponent1.of(
null, DefaultValueComponent2
.of( "upd-c2-str1", "upd-c2-str2" )
)
);
assertEquals( expectedVer2, ent2 );
assertEquals( expectedVer1, ent1 );
} );
}
@Test
public void testHistoryOfId2(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
DefaultValueComponentTestEntity ent1 = auditReader.find( DefaultValueComponentTestEntity.class, id2, 1 );
DefaultValueComponentTestEntity ent2 = auditReader.find( DefaultValueComponentTestEntity.class, id2, 2 );
log.error( "------------ id2 -------------" );
log.error( ent1.toString() );
log.error( ent2.toString() );
DefaultValueComponentTestEntity expectedVer1 = DefaultValueComponentTestEntity
.of(
id2, DefaultValueComponent1.of(
"c1-str1",
DefaultValueComponent2.of( "c2-str1", "c2-str2" )
)
);
DefaultValueComponentTestEntity expectedVer2 = DefaultValueComponentTestEntity
.of(
id2, DefaultValueComponent1.of(
"c1-str1",
DefaultValueComponent2.of( "upd-c2-str1", "c2-str2" )
)
);
assertEquals( expectedVer1, ent1 );
assertEquals( expectedVer2, ent2 );
} );
}
@Test
public void testHistoryOfId3(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
DefaultValueComponentTestEntity ent1 = auditReader.find( DefaultValueComponentTestEntity.class, id3, 1 );
DefaultValueComponentTestEntity ent2 = auditReader.find( DefaultValueComponentTestEntity.class, id3, 2 );
log.error( "------------ id3 -------------" );
log.error( ent1.toString() );
log.error( ent2.toString() );
DefaultValueComponentTestEntity expectedVer1 = DefaultValueComponentTestEntity
.of(
id3, DefaultValueComponent1.of(
null, DefaultValueComponent2
.of( "c2-str1", "c2-str2" )
)
);
DefaultValueComponentTestEntity expectedVer2 = DefaultValueComponentTestEntity
.of(
id3, DefaultValueComponent1.of(
null, DefaultValueComponent2
.of( "upd-c2-str1", "c2-str2" )
)
);
assertEquals( expectedVer1, ent1 );
assertEquals( expectedVer2, ent2 );
} );
}
@Test
public void testHistoryOfId4(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
DefaultValueComponentTestEntity ent1 = auditReader.find( DefaultValueComponentTestEntity.class, id4, 1 );
DefaultValueComponentTestEntity ent2 = auditReader.find( DefaultValueComponentTestEntity.class, id4, 2 );
log.error( "------------ id4 -------------" );
log.error( ent1.toString() );
log.error( ent2.toString() );
DefaultValueComponentTestEntity expectedVer1 = DefaultValueComponentTestEntity
.of(
id4, DefaultValueComponent1.of(
null, DefaultValueComponent2
.of( null, "c2-str2" )
)
);
DefaultValueComponentTestEntity expectedVer2 = DefaultValueComponentTestEntity
.of(
id4, DefaultValueComponent1.of(
null, DefaultValueComponent2
.of( "upd-c2-str1", "c2-str2" )
)
);
assertEquals( expectedVer1, ent1 );
assertEquals( expectedVer2, ent2 );
} );
}
@Test
public void testHistoryOfId5(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
DefaultValueComponentTestEntity ent1 = auditReader.find( DefaultValueComponentTestEntity.class, id5, 1 );
DefaultValueComponentTestEntity ent2 = auditReader.find( DefaultValueComponentTestEntity.class, id5, 2 );
log.error( "------------ id5 -------------" );
log.error( ent1.toString() );
log.error( ent2.toString() );
DefaultValueComponentTestEntity expectedVer1 = DefaultValueComponentTestEntity
.of(
id5, DefaultValueComponent1.of(
null, DefaultValueComponent2
.of( "c2-str1", null )
)
);
DefaultValueComponentTestEntity expectedVer2 = DefaultValueComponentTestEntity
.of(
id5, DefaultValueComponent1.of(
null, DefaultValueComponent2
.of( "upd-c2-str1", null )
)
);
assertEquals( expectedVer1, ent1 );
assertEquals( expectedVer2, ent2 );
} );
}
@Test
public void testHistoryOfId6(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
DefaultValueComponentTestEntity ent1 = auditReader.find( DefaultValueComponentTestEntity.class, id6, 1 );
DefaultValueComponentTestEntity ent2 = auditReader.find( DefaultValueComponentTestEntity.class, id6, 2 );
log.error( "------------ id6 -------------" );
log.error( ent1.toString() );
log.error( ent2.toString() );
DefaultValueComponentTestEntity expectedVer1 = DefaultValueComponentTestEntity
.of( id6, DefaultValueComponent1.of( null, null ) );
DefaultValueComponentTestEntity expectedVer2 = DefaultValueComponentTestEntity
.of(
id6, DefaultValueComponent1.of(
null, DefaultValueComponent2
.of( "upd-c2-str1", null )
)
);
assertEquals( expectedVer2, ent2 );
assertEquals( expectedVer1, ent1 );
} );
}
private void checkCorrectlyPersisted(
jakarta.persistence.EntityManager em,
Integer expectedId,
String expectedComp2Str1Rev1, String expectedComp2Str1Rev2) {
// Verify that the entity was correctly persisted
Long entCount = (Long) em.createQuery(
"select count(s) from DefaultValueComponentTestEntity s where s.id = "
+ expectedId.toString()
).getSingleResult();
Number auditCount = (Number) em.createNativeQuery(
"select count(id) from DefaultValueComponent_AUD s where s.id = "
+ expectedId.toString()
).getSingleResult();
String comp2Str1Rev1 = (String) em
.createNativeQuery(
"select COMP2_STR1 from DefaultValueComponent_AUD s where REV=1 and s.id = "
+ expectedId.toString()
).getSingleResult();
String comp2Str1Rev2 = (String) em
.createNativeQuery(
"select COMP2_STR1 from DefaultValueComponent_AUD s where REV=2 and s.id = "
+ expectedId.toString()
).getSingleResult();
assertEquals( Long.valueOf( 1L ), entCount );
assertEquals( Integer.valueOf( 2 ), auditCount.intValue() );
if ( expectedComp2Str1Rev1 == null ) {
assertNull( comp2Str1Rev1 );
}
else {
assertEquals( expectedComp2Str1Rev1, comp2Str1Rev1 );
}
if ( expectedComp2Str1Rev2 == null ) {
assertNull( comp2Str1Rev2 );
}
else {
assertEquals( expectedComp2Str1Rev2, comp2Str1Rev2 );
}
}
}
| DefaultValueComponents |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/orderby/OrderByExpressionNotInSelectTest.java | {
"start": 4158,
"end": 4237
} | class ____ {
@Id
private Integer id;
private String destination;
}
}
| Travel |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/subscribers/DisposableSubscriberTest.java | {
"start": 1078,
"end": 3239
} | class ____<T> extends DisposableSubscriber<T> {
int start;
final List<T> values = new ArrayList<>();
final List<Throwable> errors = new ArrayList<>();
int completions;
@Override
protected void onStart() {
request(1);
start++;
}
@Override
public void onNext(T value) {
values.add(value);
}
@Override
public void onError(Throwable e) {
errors.add(e);
}
@Override
public void onComplete() {
completions++;
}
}
@Test
public void normal() {
TestDisposableSubscriber<Integer> tc = new TestDisposableSubscriber<>();
assertFalse(tc.isDisposed());
assertEquals(0, tc.start);
assertTrue(tc.values.isEmpty());
assertTrue(tc.errors.isEmpty());
Flowable.just(1).subscribe(tc);
assertFalse(tc.isDisposed());
assertEquals(1, tc.start);
assertEquals(1, tc.values.get(0).intValue());
assertTrue(tc.errors.isEmpty());
}
@Test
public void startOnce() {
List<Throwable> error = TestHelper.trackPluginErrors();
try {
TestDisposableSubscriber<Integer> tc = new TestDisposableSubscriber<>();
tc.onSubscribe(new BooleanSubscription());
BooleanSubscription bs = new BooleanSubscription();
tc.onSubscribe(bs);
assertTrue(bs.isCancelled());
assertEquals(1, tc.start);
TestHelper.assertError(error, 0, IllegalStateException.class, EndConsumerHelper.composeMessage(tc.getClass().getName()));
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void dispose() {
TestDisposableSubscriber<Integer> tc = new TestDisposableSubscriber<>();
assertFalse(tc.isDisposed());
tc.dispose();
assertTrue(tc.isDisposed());
BooleanSubscription bs = new BooleanSubscription();
tc.onSubscribe(bs);
assertTrue(bs.isCancelled());
assertEquals(0, tc.start);
}
}
| TestDisposableSubscriber |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/engine/spi/SubselectFetch.java | {
"start": 903,
"end": 3687
} | class ____ {
private final QuerySpec loadingSqlAst;
private final TableGroup ownerTableGroup;
private final JdbcParametersList loadingJdbcParameters;
private final JdbcParameterBindings loadingJdbcParameterBindings;
private final Set<EntityKey> resultingEntityKeys;
public SubselectFetch(
QuerySpec loadingSqlAst,
TableGroup ownerTableGroup,
JdbcParametersList loadingJdbcParameters,
JdbcParameterBindings loadingJdbcParameterBindings,
Set<EntityKey> resultingEntityKeys) {
this.loadingSqlAst = loadingSqlAst;
this.ownerTableGroup = ownerTableGroup;
this.loadingJdbcParameters = loadingJdbcParameters;
this.loadingJdbcParameterBindings = loadingJdbcParameterBindings;
this.resultingEntityKeys = resultingEntityKeys;
}
public JdbcParametersList getLoadingJdbcParameters() {
// todo (6.0) : do not believe this is needed
// - see org.hibernate.loader.ast.internal.LoaderSelectBuilder.generateSelect(org.hibernate.engine.spi.SubselectFetch)
return loadingJdbcParameters;
}
/**
* The SQL AST select from which the owner was loaded
*/
public QuerySpec getLoadingSqlAst() {
return loadingSqlAst;
}
/**
* The TableGroup for the owner within the {@link #getLoadingSqlAst()}
*/
public TableGroup getOwnerTableGroup() {
return ownerTableGroup;
}
/**
* The JDBC parameter bindings related to {@link #getLoadingSqlAst()} for
* the specific execution that loaded the owners
*/
public JdbcParameterBindings getLoadingJdbcParameterBindings() {
return loadingJdbcParameterBindings;
}
/**
*The entity-keys of all owners loaded from a particular execution
* <p>
* Used for "empty collection" handling mostly
*/
public Set<EntityKey> getResultingEntityKeys() {
return resultingEntityKeys;
}
@Override
public String toString() {
return "SubselectFetch(" + ownerTableGroup.getNavigablePath() + ")";
}
public static RegistrationHandler createRegistrationHandler(
BatchFetchQueue batchFetchQueue,
SelectStatement sqlAst,
TableGroup tableGroup,
JdbcParametersList jdbcParameters,
JdbcParameterBindings jdbcParameterBindings) {
return new StandardRegistrationHandler(
batchFetchQueue,
sqlAst,
tableGroup,
jdbcParameters,
jdbcParameterBindings
);
}
public static RegistrationHandler createRegistrationHandler(
BatchFetchQueue batchFetchQueue,
SelectStatement sqlAst,
JdbcParametersList jdbcParameters,
JdbcParameterBindings jdbcParameterBindings) {
final List<TableGroup> roots = sqlAst.getQuerySpec().getFromClause().getRoots();
if ( roots.isEmpty() ) {
// we allow this now
return NO_OP_REG_HANDLER;
}
return createRegistrationHandler( batchFetchQueue, sqlAst, roots.get( 0 ), jdbcParameters, jdbcParameterBindings );
}
public | SubselectFetch |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/aot/hint/BindingReflectionHintsRegistrarTests.java | {
"start": 14681,
"end": 14747
} | class ____ extends SampleEmptyClass {
}
static | SampleExtendingClass |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/maybe/MaybeCache.java | {
"start": 948,
"end": 4910
} | class ____<T> extends Maybe<T> implements MaybeObserver<T> {
@SuppressWarnings("rawtypes")
static final CacheDisposable[] EMPTY = new CacheDisposable[0];
@SuppressWarnings("rawtypes")
static final CacheDisposable[] TERMINATED = new CacheDisposable[0];
final AtomicReference<MaybeSource<T>> source;
final AtomicReference<CacheDisposable<T>[]> observers;
T value;
Throwable error;
@SuppressWarnings("unchecked")
public MaybeCache(MaybeSource<T> source) {
this.source = new AtomicReference<>(source);
this.observers = new AtomicReference<>(EMPTY);
}
@Override
protected void subscribeActual(MaybeObserver<? super T> observer) {
CacheDisposable<T> parent = new CacheDisposable<>(observer, this);
observer.onSubscribe(parent);
if (add(parent)) {
if (parent.isDisposed()) {
remove(parent);
return;
}
} else {
if (!parent.isDisposed()) {
Throwable ex = error;
if (ex != null) {
observer.onError(ex);
} else {
T v = value;
if (v != null) {
observer.onSuccess(v);
} else {
observer.onComplete();
}
}
}
return;
}
MaybeSource<T> src = source.getAndSet(null);
if (src != null) {
src.subscribe(this);
}
}
@Override
public void onSubscribe(Disposable d) {
// deliberately ignored
}
@SuppressWarnings("unchecked")
@Override
public void onSuccess(T value) {
this.value = value;
for (CacheDisposable<T> inner : observers.getAndSet(TERMINATED)) {
if (!inner.isDisposed()) {
inner.downstream.onSuccess(value);
}
}
}
@SuppressWarnings("unchecked")
@Override
public void onError(Throwable e) {
this.error = e;
for (CacheDisposable<T> inner : observers.getAndSet(TERMINATED)) {
if (!inner.isDisposed()) {
inner.downstream.onError(e);
}
}
}
@SuppressWarnings("unchecked")
@Override
public void onComplete() {
for (CacheDisposable<T> inner : observers.getAndSet(TERMINATED)) {
if (!inner.isDisposed()) {
inner.downstream.onComplete();
}
}
}
boolean add(CacheDisposable<T> inner) {
for (;;) {
CacheDisposable<T>[] a = observers.get();
if (a == TERMINATED) {
return false;
}
int n = a.length;
@SuppressWarnings("unchecked")
CacheDisposable<T>[] b = new CacheDisposable[n + 1];
System.arraycopy(a, 0, b, 0, n);
b[n] = inner;
if (observers.compareAndSet(a, b)) {
return true;
}
}
}
@SuppressWarnings("unchecked")
void remove(CacheDisposable<T> inner) {
for (;;) {
CacheDisposable<T>[] a = observers.get();
int n = a.length;
if (n == 0) {
return;
}
int j = -1;
for (int i = 0; i < n; i++) {
if (a[i] == inner) {
j = i;
break;
}
}
if (j < 0) {
return;
}
CacheDisposable<T>[] b;
if (n == 1) {
b = EMPTY;
} else {
b = new CacheDisposable[n - 1];
System.arraycopy(a, 0, b, 0, j);
System.arraycopy(a, j + 1, b, j, n - j - 1);
}
if (observers.compareAndSet(a, b)) {
return;
}
}
}
static final | MaybeCache |
java | apache__flink | flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/runtime/typeutils/RowDataSerializerTest.java | {
"start": 8184,
"end": 9006
} | class ____ extends RowDataSerializerTest {
public SimpleRowDataSerializerTest() {
super(getRowSerializer(), getData());
}
private static RowData[] getData() {
GenericRowData row1 = new GenericRowData(2);
row1.setField(0, 1);
row1.setField(1, fromString("a"));
GenericRowData row2 = new GenericRowData(2);
row2.setField(0, 2);
row2.setField(1, null);
return new RowData[] {row1, row2};
}
private static RowDataSerializer getRowSerializer() {
InternalTypeInfo<RowData> typeInfo =
InternalTypeInfo.ofFields(new IntType(), VarCharType.STRING_TYPE);
return typeInfo.toRowSerializer();
}
}
static final | SimpleRowDataSerializerTest |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/config/plugins/visitors/PluginVisitors.java | {
"start": 1244,
"end": 1442
} | class ____ {
private static final Logger LOGGER = StatusLogger.getLogger();
private PluginVisitors() {}
/**
* Creates a PluginVisitor instance for the given annotation | PluginVisitors |
java | apache__maven | api/maven-api-core/src/main/java/org/apache/maven/api/services/PathMatcherFactory.java | {
"start": 2020,
"end": 7004
} | interface ____ extends Service {
/**
* Creates a path matcher for filtering files based on include and exclude patterns.
* <p>
* The pathnames used for matching will be relative to the specified base directory
* and use {@code '/'} as separator, regardless of the hosting operating system.
*
* @param baseDirectory the base directory for relativizing paths during matching
* @param includes the patterns of files to include, or null/empty for including all files
* @param excludes the patterns of files to exclude, or null/empty for no exclusion
* @param useDefaultExcludes whether to augment excludes with default SCM exclusion patterns
* @return a PathMatcher that can be used to test if paths should be included
* @throws NullPointerException if baseDirectory is null
*/
@Nonnull
PathMatcher createPathMatcher(
@Nonnull Path baseDirectory,
Collection<String> includes,
Collection<String> excludes,
boolean useDefaultExcludes);
/**
* Creates a path matcher for filtering files based on include and exclude patterns,
* without using default exclusion patterns.
* <p>
* This is equivalent to calling {@link #createPathMatcher(Path, Collection, Collection, boolean)}
* with {@code useDefaultExcludes = false}.
*
* @param baseDirectory the base directory for relativizing paths during matching
* @param includes the patterns of files to include, or null/empty for including all files
* @param excludes the patterns of files to exclude, or null/empty for no exclusion
* @return a PathMatcher that can be used to test if paths should be included
* @throws NullPointerException if baseDirectory is null
*/
@Nonnull
default PathMatcher createPathMatcher(
@Nonnull Path baseDirectory, Collection<String> includes, Collection<String> excludes) {
return createPathMatcher(baseDirectory, includes, excludes, false);
}
/**
* Creates a path matcher that includes all files except those matching the exclude patterns.
* <p>
* This is equivalent to calling {@link #createPathMatcher(Path, Collection, Collection, boolean)}
* with {@code includes = null}.
*
* @param baseDirectory the base directory for relativizing paths during matching
* @param excludes the patterns of files to exclude, or null/empty for no exclusion
* @param useDefaultExcludes whether to augment excludes with default SCM exclusion patterns
* @return a PathMatcher that can be used to test if paths should be included
* @throws NullPointerException if baseDirectory is null
*/
@Nonnull
default PathMatcher createExcludeOnlyMatcher(
@Nonnull Path baseDirectory, Collection<String> excludes, boolean useDefaultExcludes) {
return createPathMatcher(baseDirectory, null, excludes, useDefaultExcludes);
}
/**
* Creates a path matcher that only includes files matching the include patterns.
* <p>
* This is equivalent to calling {@link #createPathMatcher(Path, Collection, Collection, boolean)}
* with {@code excludes = null} and {@code useDefaultExcludes = false}.
*
* @param baseDirectory the base directory for relativizing paths during matching
* @param includes the patterns of files to include, or null/empty for including all files
* @return a PathMatcher that can be used to test if paths should be included
* @throws NullPointerException if baseDirectory is null
*/
@Nonnull
default PathMatcher createIncludeOnlyMatcher(@Nonnull Path baseDirectory, Collection<String> includes) {
return createPathMatcher(baseDirectory, includes, null, false);
}
/**
* Returns a filter for directories that may contain paths accepted by the given matcher.
* The given path matcher should be an instance created by this service.
* The path matcher returned by this method expects <em>directory</em> paths.
* If that matcher returns {@code false}, then the directory will definitively not contain
* the paths selected by the matcher given in argument to this method.
* In such case, the whole directory and all its sub-directories can be skipped.
* In case of doubt, or if the matcher given in argument is not recognized by this method,
* then the matcher returned by this method will return {@code true}.
*
* @param fileMatcher a matcher created by one of the other methods of this interface
* @return filter for directories that may contain the selected files
* @throws NullPointerException if fileMatcher is null
*/
@Nonnull
PathMatcher deriveDirectoryMatcher(@Nonnull PathMatcher fileMatcher);
/**
* Returns the path matcher that unconditionally returns {@code true} for all files.
* It should be the matcher returned by the other methods of this | PathMatcherFactory |
java | apache__camel | components/camel-bindy/src/main/java/org/apache/camel/dataformat/bindy/csv/BindyCsvDataFormat.java | {
"start": 2142,
"end": 15522
} | class ____ extends BindyAbstractDataFormat {
private static final Logger LOG = LoggerFactory.getLogger(BindyCsvDataFormat.class);
public BindyCsvDataFormat() {
}
public BindyCsvDataFormat(Class<?> type) {
super(type);
}
@Override
public String getDataFormatName() {
return "bindyCsv";
}
@Override
@SuppressWarnings("unchecked")
public void marshal(Exchange exchange, Object body, OutputStream outputStream) throws Exception {
BindyCsvFactory factory = (BindyCsvFactory) getFactory();
org.apache.camel.util.ObjectHelper.notNull(factory, "not instantiated");
// Get CRLF
byte[] bytesCRLF = ConverterUtils.getByteReturn(factory.getCarriageReturn());
if (factory.getGenerateHeaderColumnNames()) {
String result = factory.generateHeader();
byte[] bytes = exchange.getContext().getTypeConverter().convertTo(byte[].class, exchange, result);
outputStream.write(bytes);
// Add a carriage return
outputStream.write(bytesCRLF);
}
List<Map<String, Object>> models = new ArrayList<>();
// the body is not a prepared list of map that bindy expects so help a
// bit here and create one for us
if (body instanceof Map) {
// the body is already a map, and we do not want to iterate each element in the map,
// but keep the body as a map, so wrap as iterator
body = Collections.singleton(body).iterator();
}
for (Object model : ObjectHelper.createIterable(body)) {
if (model instanceof Map) {
models.add((Map<String, Object>) model);
} else {
String name = model.getClass().getName();
Map<String, Object> row = new HashMap<>(1);
row.put(name, model);
row.putAll(createLinkedFieldsModel(model));
models.add(row);
}
}
Iterator<Map<String, Object>> modelsMap = models.iterator();
while (modelsMap.hasNext()) {
String result = factory.unbind(getCamelContext(), modelsMap.next());
byte[] bytes = exchange.getContext().getTypeConverter().convertTo(byte[].class, exchange, result);
outputStream.write(bytes);
if (factory.isEndWithLineBreak() || modelsMap.hasNext()) {
// Add a carriage return
outputStream.write(bytesCRLF);
}
}
}
/**
* check emptyStream and if CVSRecord is allow to process emptyStreams avoid IllegalArgumentException and return
* empty list when unmarshalling
*/
private boolean checkEmptyStream(BindyCsvFactory factory, InputStream inputStream) throws IOException {
boolean allowEmptyStream = factory.isAllowEmptyStream();
boolean isStreamEmpty = false;
boolean canReturnEmptyListOfModels = false;
if (inputStream == null || inputStream.available() == 0) {
isStreamEmpty = true;
}
if (isStreamEmpty && allowEmptyStream) {
canReturnEmptyListOfModels = true;
}
return canReturnEmptyListOfModels;
}
@Override
public Object unmarshal(Exchange exchange, InputStream inputStream) throws Exception {
BindyCsvFactory factory = (BindyCsvFactory) getFactory();
org.apache.camel.util.ObjectHelper.notNull(factory, "not instantiated");
// List of Pojos
List<Map<String, Object>> models = new ArrayList<>();
InputStreamReader in = null;
try {
if (checkEmptyStream(factory, inputStream)) {
return models;
}
in = new InputStreamReader(inputStream, ExchangeHelper.getCharsetName(exchange));
// Retrieve the separator defined to split the record
String separator = factory.getSeparator();
String quote = factory.getQuote();
org.apache.camel.util.ObjectHelper.notNull(separator,
"The separator has not been defined in the annotation @CsvRecord or not instantiated during initModel.");
Boolean removeQuotes = factory.getRemoveQuotes();
AtomicInteger count = new AtomicInteger();
// Use a Stream to stream a file across.
try (Stream<String> lines = new BufferedReader(in).lines()) {
int linesToSkip = 0;
// If the first line of the CSV file contains columns name, then we
// skip this line
if (factory.getSkipFirstLine()) {
linesToSkip = 1;
}
// Consume the lines in the file via a consumer method, passing in state as necessary.
// If the internals of the consumer fail, we unrap the checked exception upstream.
try {
lines.skip(linesToSkip)
.forEachOrdered(consumeFile(factory, models, separator, removeQuotes, quote, count));
} catch (WrappedException e) {
throw e.getWrappedException();
}
// BigIntegerFormatFactory if models list is empty or not
// If this is the case (correspond to an empty stream, ...)
if (models.isEmpty() && !isAllowEmptyStream()) {
throw new java.lang.IllegalArgumentException("No records have been defined in the CSV");
} else {
return extractUnmarshalResult(models);
}
}
} finally {
if (in != null) {
IOHelper.close(in, "in", LOG);
}
}
}
private Consumer<String> consumeFile(
BindyCsvFactory factory, List<Map<String, Object>> models,
String separator, Boolean removeQuotes, String quote, AtomicInteger count) {
return line -> {
try {
String trimmedLine;
// Trim the line coming in to remove any trailing whitespace
if (factory.isTrimLine()) {
// if separator is a tab, don't trim any leading whitespaces (could be empty values separated by tabs)
if (separator.equals("\t")) {
// trim only trailing whitespaces (remove new lines etc but keep tab character)
trimmedLine = line.replaceAll("[ \\n\\x0B\\f\\r]+$", "");
} else {
trimmedLine = line.trim();
}
} else {
// no trim
trimmedLine = line;
}
// Increment counter
count.incrementAndGet();
Map<String, Object> model;
// Create POJO where CSV data will be stored
model = factory.factory();
// Split the CSV record according to the separator defined in
// annotated class @CSVRecord
Pattern pattern = Pattern.compile(separator);
Matcher matcher = pattern.matcher(trimmedLine);
List<String> separators = new ArrayList<>();
// Retrieve separators for each match
while (matcher.find()) {
separators.add(matcher.group());
}
// Add terminal separator
if (!separators.isEmpty()) {
separators.add(separators.get(separators.size() - 1));
}
Pattern delimiterPattern = Pattern.compile(Pattern.quote(quote) + "(.*?)" + Pattern.quote(quote));
Matcher delimiterMatcher = delimiterPattern.matcher(trimmedLine);
int escapedSubstringToHandle = 0;
// Find and print delimited substrings
while (delimiterMatcher.find()) {
String substring = delimiterMatcher.group();
escapedSubstringToHandle += pattern.split(substring).length - 1;
}
String[] tokens = pattern.split(trimmedLine,
factory.getAutospanLine() ? factory.getMaxpos() + escapedSubstringToHandle : -1);
List<String> result = Arrays.asList(tokens);
// must unquote tokens before use
if (Boolean.TRUE.equals(removeQuotes)) {
result = unquoteTokens(result, separators, quote);
}
if (result.isEmpty()) {
throw new IllegalArgumentException("No records have been defined in the CSV");
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Size of the record splitted : {}", result.size());
}
// Bind data from CSV record with model classes
factory.bind(getCamelContext(), result, model, count.get());
// Link objects together
factory.link(model);
// Add objects graph to the list
models.add(model);
LOG.debug("Graph of objects created: {}", model);
}
} catch (Exception e) {
throw new WrappedException(e);
}
};
}
/**
* Unquote the tokens, by removing leading and trailing quote chars, as will handling fixing broken tokens which may
* have been split by a separator inside a quote.
*/
private List<String> unquoteTokens(List<String> result, List<String> separators, String quote) {
// a current quoted token which we assemble from the broken pieces
// we need to do this as we use the split method on the String class
// to split the line using regular expression, and it does not handle
// if the separator char is also inside a quoted token, therefore we
// need
// to fix this afterwards
StringBuilder current = new StringBuilder(256);
boolean inProgress = false;
List<String> answer = new ArrayList<>();
int idxSeparator = 0;
// parsing assumes matching close and end quotes
for (String s : result) {
boolean canStart = false;
boolean canClose = false;
boolean cutStart = false;
boolean cutEnd = false;
if (s.startsWith(quote)) {
// token is just a quote
if (s.length() == 1) {
s = "";
// if token is a quote then it can only close processing if
// it has begun
if (inProgress) {
canClose = true;
} else {
canStart = true;
}
} else {
// quote+"not empty"
cutStart = true;
canStart = true;
}
}
// "not empty"+quote
if (s.endsWith(quote)) {
boolean escaped = false;
if (quote.equals("\"")) {
int i;
for (i = s.length() - 2; i > 0; i--) {
char ch = s.charAt(i);
if (ch == '"' && (canStart || inProgress)) {
escaped = !escaped;
} else if (ch == '\\') {
continue;
} else {
break;
}
}
if (i == 0 && s.charAt(i) == '"' && inProgress) {
escaped = !escaped;
}
}
if (!escaped) {
cutEnd = true;
canClose = true;
}
}
// optimize to only substring once
if (cutEnd || cutStart) {
s = s.substring(cutStart ? 1 : 0, cutEnd ? s.length() - 1 : s.length());
}
// are we in progress of rebuilding a broken token
if (inProgress) {
current.append(separators.get(idxSeparator));
current.append(s);
if (canClose) {
answer.add(current.toString());
current.setLength(0);
inProgress = false;
}
} else {
if (canStart && !canClose) {
current.append(s);
inProgress = true;
} else {
// case where no quotes
answer.add(s);
}
}
idxSeparator++;
}
// any left over from current?
if (current.length() > 0) {
answer.add(current.toString());
current.setLength(0);
}
return answer;
}
@Override
protected BindyAbstractFactory createModelFactory(FormatFactory formatFactory) throws Exception {
BindyCsvFactory bindyCsvFactory = new BindyCsvFactory(getClassType());
bindyCsvFactory.setFormatFactory(formatFactory);
return bindyCsvFactory;
}
}
| BindyCsvDataFormat |
java | micronaut-projects__micronaut-core | router/src/main/java/io/micronaut/web/router/exceptions/DuplicateRouteException.java | {
"start": 872,
"end": 1946
} | class ____ extends RoutingException {
private final String uri;
private final List<UriRouteMatch<Object, Object>> uriRoutes;
/**
* @param uri The URI
* @param uriRoutes The routes
*/
public DuplicateRouteException(String uri, List<UriRouteMatch<Object, Object>> uriRoutes) {
super(buildMessage(uri, uriRoutes));
this.uri = uri;
this.uriRoutes = uriRoutes;
}
/**
* @return The uri
*/
public String getUri() {
return uri;
}
/**
* @return The routes which caused this exception
*/
public List<UriRouteMatch<Object, Object>> getUriRoutes() {
return this.uriRoutes;
}
private static String buildMessage(String uri, List<UriRouteMatch<Object, Object>> uriRoutes) {
return "More than 1 route matched the incoming request. The following routes matched "
+ uri + ": "
+ uriRoutes.stream()
.map((Object::toString))
.collect(Collectors.joining(", "));
}
}
| DuplicateRouteException |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/ExceptionDepthComparatorTests.java | {
"start": 3504,
"end": 3559
} | class ____ extends TargetException {
}
}
| NoDepthException |
java | spring-projects__spring-security | web/src/test/java/org/springframework/security/web/method/annotation/CurrentSecurityContextArgumentResolverTests.java | {
"start": 17389,
"end": 17810
} | class ____ implements SecurityContext {
private Authentication authentication;
@Override
public Authentication getAuthentication() {
return this.authentication;
}
@Override
public void setAuthentication(Authentication authentication) {
this.authentication = authentication;
}
}
@Target({ ElementType.PARAMETER })
@Retention(RetentionPolicy.RUNTIME)
@CurrentSecurityContext
@ | CustomSecurityContext |
java | quarkusio__quarkus | integration-tests/oidc-dev-services/src/main/java/io/quarkus/it/oidc/dev/services/ExpiredIdentityAugmentor.java | {
"start": 530,
"end": 2064
} | class ____ implements SecurityIdentityAugmentor {
private volatile int invocationCount = 0;
@Override
public Uni<SecurityIdentity> augment(SecurityIdentity identity, AuthenticationRequestContext context,
Map<String, Object> attributes) {
if (shouldNotAugment(attributes)) {
return Uni.createFrom().item(identity);
}
return Uni
.createFrom()
.item(QuarkusSecurityIdentity
.builder(identity)
.addAttribute("quarkus.identity.expire-time", expireIn2Seconds())
.build());
}
@Override
public Uni<SecurityIdentity> augment(SecurityIdentity securityIdentity,
AuthenticationRequestContext authenticationRequestContext) {
throw new IllegalStateException();
}
private boolean shouldNotAugment(Map<String, Object> attributes) {
RoutingContext routingContext = HttpSecurityUtils.getRoutingContextAttribute(attributes);
if (routingContext == null) {
return true;
}
if (!routingContext.normalizedPath().contains("/expired-updated-identity")) {
return true;
}
invocationCount++;
boolean firstInvocation = invocationCount == 1;
return firstInvocation;
}
private static long expireIn2Seconds() {
return Duration.ofMillis(System.currentTimeMillis())
.plusSeconds(2)
.toSeconds();
}
}
| ExpiredIdentityAugmentor |
java | apache__kafka | connect/runtime/src/test/java/org/apache/kafka/connect/runtime/TransformationStageTest.java | {
"start": 1574,
"end": 3395
} | class ____ {
private final SourceRecord initial = new SourceRecord(Map.of("initial", 1), null, null, null, null);
private final SourceRecord transformed = new SourceRecord(Map.of("transformed", 2), null, null, null, null);
@Test
public void apply() throws Exception {
applyAndAssert(true, false, transformed);
applyAndAssert(true, true, initial);
applyAndAssert(false, false, initial);
applyAndAssert(false, true, transformed);
}
@SuppressWarnings("unchecked")
private void applyAndAssert(boolean predicateResult, boolean negate, SourceRecord expectedResult) throws Exception {
Plugin<Predicate<SourceRecord>> predicatePlugin = mock(Plugin.class);
Predicate<SourceRecord> predicate = mock(Predicate.class);
when(predicate.test(any())).thenReturn(predicateResult);
when(predicatePlugin.get()).thenReturn(predicate);
Plugin<Transformation<SourceRecord>> transformationPlugin = mock(Plugin.class);
Transformation<SourceRecord> transformation = mock(Transformation.class);
if (expectedResult == transformed) {
when(transformationPlugin.get()).thenReturn(transformation);
when(transformation.apply(any())).thenReturn(transformed);
}
TransformationStage<SourceRecord> stage = new TransformationStage<>(
predicatePlugin,
"testPredicate",
null,
negate,
transformationPlugin,
"testTransformation",
null,
TestPlugins.noOpLoaderSwap()
);
assertEquals(expectedResult, stage.apply(initial));
stage.close();
verify(predicatePlugin).close();
verify(transformationPlugin).close();
}
}
| TransformationStageTest |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/event/EventPublishingTestExecutionListenerIntegrationTests.java | {
"start": 7584,
"end": 7963
} | class ____ {
@Traceable
@Test
public void traceableTest() {
/* no-op */
}
@Test
public void standardTest() {
/* no-op */
}
@Test
public void testWithFailingEventListener() {
/* no-op */
}
@Test
public void testWithFailingAsyncEventListener() {
/* no-op */
}
}
@Configuration
@EnableAsync(proxyTargetClass = true)
static | ExampleTestCase |
java | google__dagger | javatests/dagger/internal/codegen/DependencyCycleValidationTest.java | {
"start": 20749,
"end": 21160
} | interface ____ {",
" Child build();",
" }",
"}");
// Grandchild has no entry point that depends on the cycle. http://b/111317986
Source grandchild =
CompilerTests.javaSource(
"test.Grandchild",
"package test;",
"",
"import dagger.Subcomponent;",
"",
"@Subcomponent",
" | Builder |
java | spring-projects__spring-security | oauth2/oauth2-resource-server/src/main/java/org/springframework/security/oauth2/server/resource/authentication/JwtIssuerReactiveAuthenticationManagerResolver.java | {
"start": 2514,
"end": 2792
} | class ____ the Issuer from the `iss` claim found in the
* {@link ServerWebExchange}'s
* <a href="https://tools.ietf.org/html/rfc6750#section-1.2" target="_blank">Bearer
* Token</a>.
*
* @author Josh Cummings
* @author Roman Matiushchenko
* @since 5.3
*/
public final | derives |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/exc/CollectedProblem.java | {
"start": 1457,
"end": 3839
} | class ____ {
/**
* Maximum length for raw value strings before truncation.
*/
private static final int MAX_RAW_VALUE_LENGTH = 200;
private final JsonPointer path;
private final String message;
private final JavaType targetType;
private final TokenStreamLocation location;
private final Object rawValue; // @Nullable
private final JsonToken token; // @Nullable
public CollectedProblem(JsonPointer path, String message,
JavaType targetType, TokenStreamLocation location,
Object rawValue, JsonToken token) {
this.path = Objects.requireNonNull(path, "path");
this.message = Objects.requireNonNull(message, "message");
this.targetType = targetType;
this.location = location;
this.rawValue = truncateIfNeeded(rawValue);
this.token = token;
}
/**
* @return JSON Pointer path to the problematic field (e.g., "/items/1/date").
* Empty string ("") for root-level problems.
*/
public JsonPointer getPath() { return path; }
/**
* @return Human-readable error message
*/
public String getMessage() { return message; }
/**
* @return Expected Java type for the field (may be null)
*/
public JavaType getTargetType() { return targetType; }
/**
* @return Location in source JSON where problem occurred (may be null)
*/
public TokenStreamLocation getLocation() { return location; }
/**
* @return Raw value from JSON that caused the problem (may be null or truncated).
* For unknown properties, this is null; use the path to identify the property name.
*/
public Object getRawValue() { return rawValue; }
/**
* @return JSON token type at the error location (may be null)
*/
public JsonToken getToken() { return token; }
private static Object truncateIfNeeded(Object value) {
if (value instanceof String) {
String s = (String) value;
if (s.length() > MAX_RAW_VALUE_LENGTH) {
return s.substring(0, MAX_RAW_VALUE_LENGTH - 3) + "...";
}
}
return value;
}
@Override
public String toString() {
return String.format("CollectedProblem[path=%s, message=%s, targetType=%s]",
path, message, targetType);
}
}
| CollectedProblem |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java | {
"start": 4666,
"end": 20208
} | class ____ extends GeneratorAdapter {
private final BitSet statements;
private final CompilerSettings settings;
private final Deque<List<Type>> stringConcatArgs = new ArrayDeque<>();
public MethodWriter(int access, Method method, ClassVisitor cw, BitSet statements, CompilerSettings settings) {
super(
Opcodes.ASM5,
cw.visitMethod(access, method.getName(), method.getDescriptor(), null, null),
access,
method.getName(),
method.getDescriptor()
);
this.statements = statements;
this.settings = settings;
}
/**
* Marks a new statement boundary.
* <p>
* This is invoked for each statement boundary (leaf {@code S*} nodes).
*/
public void writeStatementOffset(Location location) {
int offset = location.getOffset();
// ensure we don't have duplicate stuff going in here. can catch bugs
// (e.g. nodes get assigned wrong offsets by antlr walker)
// TODO: introduce a way to ignore internal statements so this assert is not triggered
// TODO: https://github.com/elastic/elasticsearch/issues/51836
// assert statements.get(offset) == false;
statements.set(offset);
}
/**
* Encodes the offset into the line number table as {@code offset + 1}.
* <p>
* This is invoked before instructions that can hit exceptions.
*/
public void writeDebugInfo(Location location) {
// TODO: maybe track these in bitsets too? this is trickier...
Label label = new Label();
visitLabel(label);
visitLineNumber(location.getOffset() + 1, label);
}
public void writeLoopCounter(int slot, Location location) {
assert slot != -1;
writeDebugInfo(location);
final Label end = new Label();
iinc(slot, -1);
visitVarInsn(Opcodes.ILOAD, slot);
push(0);
ifICmp(GeneratorAdapter.GT, end);
throwException(PAINLESS_ERROR_TYPE, "The maximum number of statements that can be executed in a loop has been reached.");
mark(end);
}
public void writeCast(PainlessCast cast) {
if (cast == null) {
return;
}
if (cast.originalType == char.class && cast.targetType == String.class) {
invokeStatic(UTILITY_TYPE, CHAR_TO_STRING);
} else if (cast.originalType == String.class && cast.targetType == char.class) {
invokeStatic(UTILITY_TYPE, STRING_TO_CHAR);
} else if (cast.unboxOriginalType != null && cast.boxTargetType != null) {
unbox(getType(cast.unboxOriginalType));
writeCast(cast.unboxOriginalType, cast.boxTargetType);
box(getType(cast.boxTargetType));
} else if (cast.unboxOriginalType != null) {
unbox(getType(cast.unboxOriginalType));
writeCast(cast.originalType, cast.targetType);
} else if (cast.unboxTargetType != null) {
writeCast(cast.originalType, cast.targetType);
unbox(getType(cast.unboxTargetType));
} else if (cast.boxOriginalType != null) {
box(getType(cast.boxOriginalType));
writeCast(cast.originalType, cast.targetType);
} else if (cast.boxTargetType != null) {
writeCast(cast.originalType, cast.targetType);
box(getType(cast.boxTargetType));
} else if (cast.originalType == def.class) {
if (cast.explicitCast) {
if (cast.targetType == boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_BOOLEAN);
else if (cast.targetType == byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_BYTE_EXPLICIT);
else if (cast.targetType == short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_SHORT_EXPLICIT);
else if (cast.targetType == char.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_CHAR_EXPLICIT);
else if (cast.targetType == int.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_INT_EXPLICIT);
else if (cast.targetType == long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_LONG_EXPLICIT);
else if (cast.targetType == float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_FLOAT_EXPLICIT);
else if (cast.targetType == double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_DOUBLE_EXPLICIT);
else if (cast.targetType == Boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_BOOLEAN);
else if (cast.targetType == Byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_BYTE_EXPLICIT);
else if (cast.targetType == Short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_SHORT_EXPLICIT);
else if (cast.targetType == Character.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_CHARACTER_EXPLICIT);
else if (cast.targetType == Integer.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_INTEGER_EXPLICIT);
else if (cast.targetType == Long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_LONG_EXPLICIT);
else if (cast.targetType == Float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_FLOAT_EXPLICIT);
else if (cast.targetType == Double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_DOUBLE_EXPLICIT);
else if (cast.targetType == String.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_STRING_EXPLICIT);
else {
writeCast(cast.originalType, cast.targetType);
}
} else {
if (cast.targetType == boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_BOOLEAN);
else if (cast.targetType == byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_BYTE_IMPLICIT);
else if (cast.targetType == short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_SHORT_IMPLICIT);
else if (cast.targetType == char.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_CHAR_IMPLICIT);
else if (cast.targetType == int.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_INT_IMPLICIT);
else if (cast.targetType == long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_LONG_IMPLICIT);
else if (cast.targetType == float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_FLOAT_IMPLICIT);
else if (cast.targetType == double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_P_DOUBLE_IMPLICIT);
else if (cast.targetType == Boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_BOOLEAN);
else if (cast.targetType == Byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_BYTE_IMPLICIT);
else if (cast.targetType == Short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_SHORT_IMPLICIT);
else if (cast.targetType == Character.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_CHARACTER_IMPLICIT);
else if (cast.targetType == Integer.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_INTEGER_IMPLICIT);
else if (cast.targetType == Long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_LONG_IMPLICIT);
else if (cast.targetType == Float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_FLOAT_IMPLICIT);
else if (cast.targetType == Double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_DOUBLE_IMPLICIT);
else if (cast.targetType == String.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_STRING_IMPLICIT);
else {
writeCast(cast.originalType, cast.targetType);
}
}
} else {
writeCast(cast.originalType, cast.targetType);
}
}
private void writeCast(Class<?> from, Class<?> to) {
if (from.equals(to)) {
return;
}
if (from != boolean.class && from.isPrimitive() && to != boolean.class && to.isPrimitive()) {
cast(getType(from), getType(to));
} else {
if (to.isAssignableFrom(from) == false) {
checkCast(getType(to));
}
}
}
/**
* Proxy the box method to use valueOf instead to ensure that the modern boxing methods are used.
*/
@Override
public void box(Type type) {
valueOf(type);
}
public static Type getType(Class<?> clazz) {
if (clazz.isArray()) {
Class<?> component = clazz.getComponentType();
int dimensions = 1;
while (component.isArray()) {
component = component.getComponentType();
++dimensions;
}
if (component == def.class) {
char[] braces = new char[dimensions];
Arrays.fill(braces, '[');
return Type.getType(new String(braces) + Type.getType(Object.class).getDescriptor());
}
} else if (clazz == def.class) {
return Type.getType(Object.class);
}
return Type.getType(clazz);
}
/** Starts a new string concat.
* @return the size of arguments pushed to stack (the object that does string concats, e.g. a StringBuilder)
*/
public List<Type> writeNewStrings() {
List<Type> list = new ArrayList<>();
stringConcatArgs.push(list);
return list;
}
public void writeAppendStrings(Class<?> clazz) {
List<Type> currentConcat = stringConcatArgs.peek();
currentConcat.add(getType(clazz));
// prevent too many concat args.
// If there are too many, do the actual concat:
if (currentConcat.size() >= MAX_STRING_CONCAT_ARGS) {
writeToStrings();
currentConcat = writeNewStrings();
// add the return value type as new first param for next concat:
currentConcat.add(STRING_TYPE);
}
}
public void writeToStrings() {
final String desc = Type.getMethodDescriptor(STRING_TYPE, stringConcatArgs.pop().toArray(Type[]::new));
invokeDynamic("concat", desc, STRING_CONCAT_BOOTSTRAP_HANDLE);
}
/** Writes a dynamic binary instruction: returnType, lhs, and rhs can be different */
public void writeDynamicBinaryInstruction(
Location location,
Class<?> returnType,
Class<?> lhs,
Class<?> rhs,
Operation operation,
int flags
) {
Type methodType = Type.getMethodType(getType(returnType), getType(lhs), getType(rhs));
switch (operation) {
case MUL -> invokeDefCall("mul", methodType, DefBootstrap.BINARY_OPERATOR, flags);
case DIV -> invokeDefCall("div", methodType, DefBootstrap.BINARY_OPERATOR, flags);
case REM -> invokeDefCall("rem", methodType, DefBootstrap.BINARY_OPERATOR, flags);
case ADD -> {
// if either side is primitive, then the + operator should always throw NPE on null,
// so we don't need a special NPE guard.
// otherwise, we need to allow nulls for possible string concatenation.
boolean hasPrimitiveArg = lhs.isPrimitive() || rhs.isPrimitive();
if (hasPrimitiveArg == false) {
flags |= DefBootstrap.OPERATOR_ALLOWS_NULL;
}
invokeDefCall("add", methodType, DefBootstrap.BINARY_OPERATOR, flags);
}
case SUB -> invokeDefCall("sub", methodType, DefBootstrap.BINARY_OPERATOR, flags);
case LSH -> invokeDefCall("lsh", methodType, DefBootstrap.SHIFT_OPERATOR, flags);
case USH -> invokeDefCall("ush", methodType, DefBootstrap.SHIFT_OPERATOR, flags);
case RSH -> invokeDefCall("rsh", methodType, DefBootstrap.SHIFT_OPERATOR, flags);
case BWAND -> invokeDefCall("and", methodType, DefBootstrap.BINARY_OPERATOR, flags);
case XOR -> invokeDefCall("xor", methodType, DefBootstrap.BINARY_OPERATOR, flags);
case BWOR -> invokeDefCall("or", methodType, DefBootstrap.BINARY_OPERATOR, flags);
default -> throw location.createError(new IllegalStateException("Illegal tree structure."));
}
}
/** Writes a static binary instruction */
public void writeBinaryInstruction(Location location, Class<?> clazz, Operation operation) {
if ((clazz == float.class || clazz == double.class)
&& (operation == Operation.LSH
|| operation == Operation.USH
|| operation == Operation.RSH
|| operation == Operation.BWAND
|| operation == Operation.XOR
|| operation == Operation.BWOR)) {
throw location.createError(new IllegalStateException("Illegal tree structure."));
}
switch (operation) {
case MUL -> math(GeneratorAdapter.MUL, getType(clazz));
case DIV -> math(GeneratorAdapter.DIV, getType(clazz));
case REM -> math(GeneratorAdapter.REM, getType(clazz));
case ADD -> math(GeneratorAdapter.ADD, getType(clazz));
case SUB -> math(GeneratorAdapter.SUB, getType(clazz));
case LSH -> math(GeneratorAdapter.SHL, getType(clazz));
case USH -> math(GeneratorAdapter.USHR, getType(clazz));
case RSH -> math(GeneratorAdapter.SHR, getType(clazz));
case BWAND -> math(GeneratorAdapter.AND, getType(clazz));
case XOR -> math(GeneratorAdapter.XOR, getType(clazz));
case BWOR -> math(GeneratorAdapter.OR, getType(clazz));
default -> throw location.createError(new IllegalStateException("Illegal tree structure."));
}
}
public void writeDup(final int size, final int xsize) {
if (size == 1) {
if (xsize == 2) {
dupX2();
} else if (xsize == 1) {
dupX1();
} else {
dup();
}
} else if (size == 2) {
if (xsize == 2) {
dup2X2();
} else if (xsize == 1) {
dup2X1();
} else {
dup2();
}
}
}
public void writePop(final int size) {
if (size == 1) {
pop();
} else if (size == 2) {
pop2();
}
}
@Override
public void endMethod() {
if (stringConcatArgs != null && stringConcatArgs.isEmpty() == false) {
throw new IllegalStateException("String concat bytecode not completed.");
}
super.endMethod();
}
@Override
public void visitEnd() {
throw new AssertionError("Should never call this method on MethodWriter, use endMethod() instead");
}
/**
* Writes a dynamic call for a def method.
* @param name method name
* @param methodType callsite signature
* @param flavor type of call
* @param params flavor-specific parameters
*/
public void invokeDefCall(String name, Type methodType, int flavor, Object... params) {
Object[] args = new Object[params.length + 2];
args[0] = settings.getInitialCallSiteDepth();
args[1] = flavor;
System.arraycopy(params, 0, args, 2, params.length);
invokeDynamic(name, methodType.getDescriptor(), DEF_BOOTSTRAP_HANDLE, args);
}
public void invokeMethodCall(PainlessMethod painlessMethod) {
Type type = Type.getType(painlessMethod.javaMethod().getDeclaringClass());
Method method = Method.getMethod(painlessMethod.javaMethod());
if (Modifier.isStatic(painlessMethod.javaMethod().getModifiers())) {
// invokeStatic assumes that the owner | MethodWriter |
java | apache__camel | components/camel-jgroups/src/test/java/org/apache/camel/component/jgroups/JGroupsClusterRouteTest.java | {
"start": 1646,
"end": 4682
} | class ____ extends RouteBuilder {
@Override
public void configure() {
from("jgroups:" + clusterName + "?enableViewMessages=true").filter(dropNonCoordinatorViews()).threads()
.delay(delayIfContextNotStarted(SECONDS.toMillis(15)))
.to("controlbus:route?routeId=masterRoute&action=start&async=true");
from("timer://master?repeatCount=1").routeId("masterRoute").autoStartup(false).to(masterMockUri);
}
}
@BeforeEach
public void setUp() throws Exception {
firstCamelContext = new DefaultCamelContext();
firstCamelContext.addRoutes(new Builder());
secondCamelContext = new DefaultCamelContext();
secondCamelContext.addRoutes(new Builder());
}
// Tests
@Test
public void shouldElectSecondNode() throws Exception {
expectMasterIs(firstCamelContext);
firstCamelContext.start();
assertMasterIs(firstCamelContext);
expectMasterIsNot(secondCamelContext);
secondCamelContext.start();
assertMasterIsNot(secondCamelContext);
expectMasterIs(secondCamelContext);
firstCamelContext.stop();
assertMasterIs(secondCamelContext);
}
@Test
public void shouldKeepMaster() throws Exception {
expectMasterIs(firstCamelContext);
firstCamelContext.start();
assertMasterIs(firstCamelContext);
expectMasterIsNot(secondCamelContext);
secondCamelContext.start();
assertMasterIsNot(secondCamelContext);
expectMasterIs(firstCamelContext);
secondCamelContext.stop();
assertMasterIs(firstCamelContext);
}
@Test
public void shouldElectSecondNodeAndReturnToFirst() throws Exception {
expectMasterIs(firstCamelContext);
firstCamelContext.start();
assertMasterIs(firstCamelContext);
expectMasterIsNot(secondCamelContext);
secondCamelContext.start();
assertMasterIsNot(secondCamelContext);
expectMasterIsNot(firstCamelContext);
firstCamelContext.stop();
firstCamelContext.start();
assertMasterIsNot(firstCamelContext);
expectMasterIs(firstCamelContext);
secondCamelContext.stop();
assertMasterIs(firstCamelContext);
}
// Helpers
private void expectMasterIs(CamelContext camelContext) {
camelContext.getEndpoint(masterMockUri, MockEndpoint.class).expectedMessageCount(1);
}
private void expectMasterIsNot(CamelContext camelContext) {
camelContext.getEndpoint(masterMockUri, MockEndpoint.class).expectedMessageCount(0);
}
private void assertMasterIs(CamelContext camelContext) throws InterruptedException {
camelContext.getEndpoint(masterMockUri, MockEndpoint.class).assertIsSatisfied();
}
private void assertMasterIsNot(CamelContext camelContext) throws InterruptedException {
camelContext.getEndpoint(masterMockUri, MockEndpoint.class).assertIsSatisfied();
}
}
| Builder |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/ConfigurationPropertiesCharSequenceToObjectConverterTests.java | {
"start": 4246,
"end": 4441
} | class ____ implements Converter<CharSequence, Long> {
@Override
public Long convert(CharSequence source) {
return Long.parseLong(source.toString()) + 1;
}
}
}
| CharSequenceToLongConverter |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/oracle/ast/clause/ModelClause.java | {
"start": 8086,
"end": 9293
} | class ____ extends OracleSQLObjectImpl {
private final List<ModelRuleOption> options = new ArrayList<ModelRuleOption>();
private SQLExpr iterate;
private SQLExpr until;
private final List<CellAssignmentItem> cellAssignmentItems = new ArrayList<CellAssignmentItem>();
public SQLExpr getUntil() {
return until;
}
public void setUntil(SQLExpr until) {
this.until = until;
}
public SQLExpr getIterate() {
return iterate;
}
public void setIterate(SQLExpr iterate) {
this.iterate = iterate;
}
public List<ModelRuleOption> getOptions() {
return options;
}
public List<CellAssignmentItem> getCellAssignmentItems() {
return cellAssignmentItems;
}
@Override
public void accept0(OracleASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, iterate);
acceptChild(visitor, until);
acceptChild(visitor, cellAssignmentItems);
}
visitor.endVisit(this);
}
}
public static | ModelRulesClause |
java | apache__avro | lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithoutLogicalTypes.java | {
"start": 302,
"end": 6519
} | class ____ extends org.apache.avro.specific.SpecificRecordBase
implements org.apache.avro.specific.SpecificRecord {
public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse(
"{\"type\":\"record\",\"name\":\"TestRecordWithoutLogicalTypes\",\"namespace\":\"org.apache.avro.specific\",\"fields\":[{\"name\":\"b\",\"type\":\"boolean\"},{\"name\":\"i32\",\"type\":\"int\"},{\"name\":\"i64\",\"type\":\"long\"},{\"name\":\"f32\",\"type\":\"float\"},{\"name\":\"f64\",\"type\":\"double\"},{\"name\":\"s\",\"type\":[\"null\",\"string\"],\"default\":null},{\"name\":\"d\",\"type\":{\"type\":\"int\",\"logicalType\":\"date\"}},{\"name\":\"t\",\"type\":{\"type\":\"int\",\"logicalType\":\"time-millis\"}},{\"name\":\"ts\",\"type\":{\"type\":\"long\",\"logicalType\":\"timestamp-millis\"}},{\"name\":\"dec\",\"type\":{\"type\":\"bytes\",\"logicalType\":\"decimal\",\"precision\":9,\"scale\":2}}]}");
public static org.apache.avro.Schema getClassSchema() {
return SCHEMA$;
}
private static final SpecificData MODEL$ = new SpecificData();
private static final BinaryMessageEncoder<TestRecordWithoutLogicalTypes> ENCODER = new BinaryMessageEncoder<TestRecordWithoutLogicalTypes>(
MODEL$, SCHEMA$);
private static final BinaryMessageDecoder<TestRecordWithoutLogicalTypes> DECODER = new BinaryMessageDecoder<TestRecordWithoutLogicalTypes>(
MODEL$, SCHEMA$);
/** Serializes this ${schema.getName()} to a ByteBuffer. */
public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
return ENCODER.encode(this);
}
/** Deserializes a ${schema.getName()} from a ByteBuffer. */
public static TestRecordWithoutLogicalTypes fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException {
return DECODER.decode(b);
}
private boolean b;
private int i32;
private long i64;
private float f32;
private double f64;
private java.lang.String s;
private int d;
private int t;
private long ts;
private ByteBuffer dec;
/**
* Default constructor. Note that this does not initialize fields to their
* default values from the schema. If that is desired then one should use
* {@link \#newBuilder()}.
*/
public TestRecordWithoutLogicalTypes() {
}
/**
* All-args constructor.
*/
public TestRecordWithoutLogicalTypes(java.lang.Boolean b, java.lang.Integer i32, java.lang.Long i64,
java.lang.Float f32, java.lang.Double f64, java.lang.String s, java.lang.Integer d, java.lang.Integer t,
java.lang.Long ts, java.nio.ByteBuffer dec) {
this.b = b;
this.i32 = i32;
this.i64 = i64;
this.f32 = f32;
this.f64 = f64;
this.s = s;
this.d = d;
this.t = t;
this.ts = ts;
this.dec = dec;
}
public org.apache.avro.Schema getSchema() {
return SCHEMA$;
}
// Used by DatumWriter. Applications should not call.
public java.lang.Object get(int field$) {
switch (field$) {
case 0:
return b;
case 1:
return i32;
case 2:
return i64;
case 3:
return f32;
case 4:
return f64;
case 5:
return s;
case 6:
return d;
case 7:
return t;
case 8:
return ts;
case 9:
return dec;
default:
throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
// Used by DatumReader. Applications should not call.
@SuppressWarnings(value = "unchecked")
public void put(int field$, java.lang.Object value$) {
switch (field$) {
case 0:
b = (java.lang.Boolean) value$;
break;
case 1:
i32 = (java.lang.Integer) value$;
break;
case 2:
i64 = (java.lang.Long) value$;
break;
case 3:
f32 = (java.lang.Float) value$;
break;
case 4:
f64 = (java.lang.Double) value$;
break;
case 5:
s = (java.lang.String) value$;
break;
case 6:
d = (java.lang.Integer) value$;
break;
case 7:
t = (java.lang.Integer) value$;
break;
case 8:
ts = (java.lang.Long) value$;
break;
case 9:
dec = (java.nio.ByteBuffer) value$;
break;
default:
throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
/**
* Gets the value of the 'b' field.
*/
public java.lang.Boolean getB() {
return b;
}
/**
* Gets the value of the 'i32' field.
*/
public java.lang.Integer getI32() {
return i32;
}
/**
* Gets the value of the 'i64' field.
*/
public java.lang.Long getI64() {
return i64;
}
/**
* Gets the value of the 'f32' field.
*/
public java.lang.Float getF32() {
return f32;
}
/**
* Gets the value of the 'f64' field.
*/
public java.lang.Double getF64() {
return f64;
}
/**
* Gets the value of the 's' field.
*/
public java.lang.String getS() {
return s;
}
/**
* Gets the value of the 'd' field.
*/
public java.lang.Integer getD() {
return d;
}
/**
* Gets the value of the 't' field.
*/
public java.lang.Integer getT() {
return t;
}
/**
* Gets the value of the 'ts' field.
*/
public java.lang.Long getTs() {
return ts;
}
/**
* Gets the value of the 'ts' field.
*/
public java.nio.ByteBuffer getDec() {
return dec;
}
/** Creates a new TestRecordWithoutLogicalTypes RecordBuilder */
public static TestRecordWithoutLogicalTypes.Builder newBuilder() {
return new TestRecordWithoutLogicalTypes.Builder();
}
/**
* Creates a new TestRecordWithoutLogicalTypes RecordBuilder by copying an
* existing Builder
*/
public static TestRecordWithoutLogicalTypes.Builder newBuilder(TestRecordWithoutLogicalTypes.Builder other) {
return new TestRecordWithoutLogicalTypes.Builder(other);
}
/**
* Creates a new TestRecordWithoutLogicalTypes RecordBuilder by copying an
* existing TestRecordWithoutLogicalTypes instance
*/
public static TestRecordWithoutLogicalTypes.Builder newBuilder(TestRecordWithoutLogicalTypes other) {
return new TestRecordWithoutLogicalTypes.Builder(other);
}
/**
* RecordBuilder for TestRecordWithoutLogicalTypes instances.
*/
public static | TestRecordWithoutLogicalTypes |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ServerEndpointBuilderFactory.java | {
"start": 19432,
"end": 25251
} | interface ____
extends
EndpointConsumerBuilder {
default ServerEndpointConsumerBuilder basic() {
return (ServerEndpointConsumerBuilder) this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedServerEndpointConsumerBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedServerEndpointConsumerBuilder bridgeErrorHandler(String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedServerEndpointConsumerBuilder exceptionHandler(org.apache.camel.spi.ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedServerEndpointConsumerBuilder exceptionHandler(String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedServerEndpointConsumerBuilder exchangePattern(org.apache.camel.ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedServerEndpointConsumerBuilder exchangePattern(String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
}
/**
* Builder for endpoint producers for the IEC 60870 Server component.
*/
public | AdvancedServerEndpointConsumerBuilder |
java | netty__netty | handler/src/test/java/io/netty/handler/timeout/IdleStateEventTest.java | {
"start": 834,
"end": 1486
} | class ____ {
@Test
public void testHumanReadableToString() {
assertEquals("IdleStateEvent(READER_IDLE, first)", FIRST_READER_IDLE_STATE_EVENT.toString());
assertEquals("IdleStateEvent(READER_IDLE)", READER_IDLE_STATE_EVENT.toString());
assertEquals("IdleStateEvent(WRITER_IDLE, first)", FIRST_WRITER_IDLE_STATE_EVENT.toString());
assertEquals("IdleStateEvent(WRITER_IDLE)", WRITER_IDLE_STATE_EVENT.toString());
assertEquals("IdleStateEvent(ALL_IDLE, first)", FIRST_ALL_IDLE_STATE_EVENT.toString());
assertEquals("IdleStateEvent(ALL_IDLE)", ALL_IDLE_STATE_EVENT.toString());
}
}
| IdleStateEventTest |
java | redisson__redisson | redisson/src/test/java/org/redisson/RedissonScriptReactiveTest.java | {
"start": 434,
"end": 3253
} | class ____ extends BaseReactiveTest {
@Test
public void testEval() {
RScriptReactive script = redisson.getScript(StringCodec.INSTANCE);
List<Object> res = sync(script.eval(RScript.Mode.READ_ONLY, "return {'1','2','3.3333','foo',nil,'bar'}", RScript.ReturnType.MULTI, Collections.emptyList()));
assertThat(res).containsExactly("1", "2", "3.3333", "foo");
}
@Test
public void testScriptExists() {
RScriptReactive s = redisson.getScript();
String r = sync(s.scriptLoad("return redis.call('get', 'foo')"));
Assertions.assertEquals("282297a0228f48cd3fc6a55de6316f31422f5d17", r);
List<Boolean> r1 = sync(s.scriptExists(r));
Assertions.assertEquals(1, r1.size());
Assertions.assertTrue(r1.get(0));
sync(s.scriptFlush());
List<Boolean> r2 = sync(s.scriptExists(r));
Assertions.assertEquals(1, r2.size());
Assertions.assertFalse(r2.get(0));
}
@Test
public void testScriptFlush() {
sync(redisson.getBucket("foo").set("bar"));
String r = sync(redisson.getScript().scriptLoad("return redis.call('get', 'foo')"));
Assertions.assertEquals("282297a0228f48cd3fc6a55de6316f31422f5d17", r);
String r1 = sync(redisson.getScript().evalSha(RScript.Mode.READ_ONLY, "282297a0228f48cd3fc6a55de6316f31422f5d17", RScript.ReturnType.VALUE, Collections.emptyList()));
Assertions.assertEquals("bar", r1);
sync(redisson.getScript().scriptFlush());
Assertions.assertThrows(RedisNoScriptException.class, () -> {
redisson.getScript().evalSha(RScript.Mode.READ_ONLY, "282297a0228f48cd3fc6a55de6316f31422f5d17",
RScript.ReturnType.VALUE, Collections.emptyList()).block();
});
}
@Test
public void testScriptLoad() {
sync(redisson.getBucket("foo").set("bar"));
String r = sync(redisson.getScript().scriptLoad("return redis.call('get', 'foo')"));
Assertions.assertEquals("282297a0228f48cd3fc6a55de6316f31422f5d17", r);
String r1 = sync(redisson.getScript().evalSha(RScript.Mode.READ_ONLY, "282297a0228f48cd3fc6a55de6316f31422f5d17", RScript.ReturnType.VALUE, Collections.emptyList()));
Assertions.assertEquals("bar", r1);
}
@Test
public void testEvalSha() {
RScriptReactive s = redisson.getScript();
String res = sync(s.scriptLoad("return redis.call('get', 'foo')"));
Assertions.assertEquals("282297a0228f48cd3fc6a55de6316f31422f5d17", res);
sync(redisson.getBucket("foo").set("bar"));
String r1 = sync(s.evalSha(RScript.Mode.READ_ONLY, "282297a0228f48cd3fc6a55de6316f31422f5d17", RScript.ReturnType.VALUE, Collections.emptyList()));
Assertions.assertEquals("bar", r1);
}
}
| RedissonScriptReactiveTest |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/layout/XmlLayout.java | {
"start": 3137,
"end": 3258
} | class ____ extends AbstractJacksonLayout {
private static final String ROOT_TAG = "Events";
public static | XmlLayout |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/diagnostics/AbstractFailureAnalyzerTests.java | {
"start": 2672,
"end": 2731
} | class ____ extends TestException {
}
}
| SpecificTestException |
java | elastic__elasticsearch | x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilderTests.java | {
"start": 1145,
"end": 3876
} | class ____ extends AbstractXContentSerializingTestCase<RateAggregationBuilder> {
String aggregationName;
@Before
public void setupName() {
aggregationName = randomAlphaOfLength(10);
}
@Override
protected RateAggregationBuilder doParseInstance(XContentParser parser) throws IOException {
assertSame(XContentParser.Token.START_OBJECT, parser.nextToken());
AggregatorFactories.Builder parsed = AggregatorFactories.parseAggregators(parser);
assertThat(parsed.getAggregatorFactories(), hasSize(1));
assertThat(parsed.getPipelineAggregatorFactories(), hasSize(0));
RateAggregationBuilder agg = (RateAggregationBuilder) parsed.getAggregatorFactories().iterator().next();
assertNull(parser.nextToken());
assertNotNull(agg);
return agg;
}
@Override
protected RateAggregationBuilder createTestInstance() {
RateAggregationBuilder aggregationBuilder = new RateAggregationBuilder(aggregationName);
if (randomBoolean()) {
if (randomBoolean()) {
aggregationBuilder.field(randomAlphaOfLength(10));
} else {
aggregationBuilder.script(new Script(randomAlphaOfLength(10)));
}
if (randomBoolean()) {
aggregationBuilder.rateMode(randomFrom(RateMode.values()));
}
}
if (randomBoolean()) {
aggregationBuilder.rateUnit(randomFrom(Rounding.DateTimeUnit.values()));
}
return aggregationBuilder;
}
@Override
protected RateAggregationBuilder mutateInstance(RateAggregationBuilder instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
@Override
protected Writeable.Reader<RateAggregationBuilder> instanceReader() {
return RateAggregationBuilder::new;
}
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return new NamedWriteableRegistry(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedWriteables());
}
@Override
protected NamedXContentRegistry xContentRegistry() {
List<NamedXContentRegistry.Entry> namedXContent = new ArrayList<>();
namedXContent.add(
new NamedXContentRegistry.Entry(
BaseAggregationBuilder.class,
new ParseField(RateAggregationBuilder.NAME),
(p, n) -> RateAggregationBuilder.PARSER.apply(p, (String) n)
)
);
namedXContent.addAll(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedXContents());
return new NamedXContentRegistry(namedXContent);
}
}
| RateAggregationBuilderTests |
java | apache__dubbo | dubbo-common/src/main/java/org/apache/dubbo/common/bytecode/Mixin.java | {
"start": 1384,
"end": 1431
} | class ____ be public.
*
* @param ics | must |
java | apache__camel | components/camel-bindy/src/main/java/org/apache/camel/dataformat/bindy/format/factories/FloatFormatFactory.java | {
"start": 1815,
"end": 2799
} | class ____ extends AbstractNumberFormat<Float> {
FloatFormat(boolean impliedDecimalPosition, int precision, Locale locale) {
super(impliedDecimalPosition, precision, locale);
}
@Override
public String format(Float object) throws Exception {
return !super.hasImpliedDecimalPosition()
? super.getFormat().format(object)
: super.getFormat().format(object * super.getMultiplier());
}
@Override
public Float parse(String string) throws Exception {
float value;
if (!super.hasImpliedDecimalPosition()) {
value = Float.parseFloat(string.trim());
} else {
BigDecimal tmp = new BigDecimal(string.trim());
BigDecimal div = BigDecimal.valueOf(super.getMultiplier());
value = tmp.divide(div).floatValue();
}
return value;
}
}
}
| FloatFormat |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/ListPreloader.java | {
"start": 8744,
"end": 9377
} | class ____ {
@Synthetic final Queue<PreloadTarget> queue;
// The loop is short and the only point is to create the objects.
@SuppressWarnings("PMD.AvoidInstantiatingObjectsInLoops")
PreloadTargetQueue(int size) {
queue = Util.createQueue(size);
for (int i = 0; i < size; i++) {
queue.offer(new PreloadTarget());
}
}
public PreloadTarget next(int width, int height) {
final PreloadTarget result = queue.poll();
queue.offer(result);
result.photoWidth = width;
result.photoHeight = height;
return result;
}
}
private static final | PreloadTargetQueue |
java | lettuce-io__lettuce-core | src/test/jmh/io/lettuce/core/cluster/models/partitions/RedisClusterNodeBenchmark.java | {
"start": 412,
"end": 1405
} | class ____ {
private static final List<Integer> ALL_SLOTS = IntStream.range(0, SlotHash.SLOT_COUNT).boxed().collect(Collectors.toList());
private static final List<Integer> LOWER_SLOTS = IntStream.range(0, 8192).boxed().collect(Collectors.toList());
private static final RedisClusterNode NODE = new RedisClusterNode(null, null, true, null, 0, 0, 0, ALL_SLOTS,
Collections.emptySet());
@Benchmark
public RedisClusterNode createClusterNodeAllSlots() {
return new RedisClusterNode(null, null, true, null, 0, 0, 0, ALL_SLOTS, Collections.emptySet());
}
@Benchmark
public RedisClusterNode createClusterNodeLowerSlots() {
return new RedisClusterNode(null, null, true, null, 0, 0, 0, LOWER_SLOTS, Collections.emptySet());
}
@Benchmark
public void querySlotStatusPresent() {
NODE.hasSlot(1234);
}
@Benchmark
public void querySlotStatusAbsent() {
NODE.hasSlot(8193);
}
}
| RedisClusterNodeBenchmark |
java | netty__netty | codec-smtp/src/main/java/io/netty/handler/codec/smtp/DefaultLastSmtpContent.java | {
"start": 888,
"end": 2073
} | class ____ extends DefaultSmtpContent implements LastSmtpContent {
/**
* Creates a new instance using the given data.
*/
public DefaultLastSmtpContent(ByteBuf data) {
super(data);
}
@Override
public LastSmtpContent copy() {
return (LastSmtpContent) super.copy();
}
@Override
public LastSmtpContent duplicate() {
return (LastSmtpContent) super.duplicate();
}
@Override
public LastSmtpContent retainedDuplicate() {
return (LastSmtpContent) super.retainedDuplicate();
}
@Override
public LastSmtpContent replace(ByteBuf content) {
return new DefaultLastSmtpContent(content);
}
@Override
public DefaultLastSmtpContent retain() {
super.retain();
return this;
}
@Override
public DefaultLastSmtpContent retain(int increment) {
super.retain(increment);
return this;
}
@Override
public DefaultLastSmtpContent touch() {
super.touch();
return this;
}
@Override
public DefaultLastSmtpContent touch(Object hint) {
super.touch(hint);
return this;
}
}
| DefaultLastSmtpContent |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java | {
"start": 1290,
"end": 1604
} | class ____ extends ActionType<PutFilterAction.Response> {
public static final UpdateFilterAction INSTANCE = new UpdateFilterAction();
public static final String NAME = "cluster:admin/xpack/ml/filters/update";
private UpdateFilterAction() {
super(NAME);
}
public static | UpdateFilterAction |
java | micronaut-projects__micronaut-core | http-server-netty/src/main/java/io/micronaut/http/server/netty/NonReentrantLock.java | {
"start": 956,
"end": 1682
} | class ____ extends Semaphore implements Lock {
public NonReentrantLock() {
super(1);
}
@Override
public void lock() {
acquireUninterruptibly();
}
@Override
public void lockInterruptibly() throws InterruptedException {
acquire();
}
@Override
public boolean tryLock() {
return tryAcquire();
}
@Override
public boolean tryLock(long time, @NonNull TimeUnit unit) throws InterruptedException {
return tryAcquire(time, unit);
}
@Override
public void unlock() {
release();
}
@NonNull
@Override
public Condition newCondition() {
throw new UnsupportedOperationException();
}
}
| NonReentrantLock |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/framework/ProxyCreatorSupport.java | {
"start": 775,
"end": 978
} | class ____ proxy factories.
* Provides convenient access to a configurable AopProxyFactory.
*
* @author Juergen Hoeller
* @since 2.0.3
* @see #createAopProxy()
*/
@SuppressWarnings("serial")
public | for |
java | micronaut-projects__micronaut-core | http-tck/src/main/java/io/micronaut/http/tck/BodyAssertion.java | {
"start": 2907,
"end": 3042
} | interface ____ typed BodyAssertion Builders.
*
* @param <T> The body type
* @param <E> The error type
*/
public | for |
java | apache__camel | components/camel-azure/camel-azure-files/src/test/java/org/apache/camel/component/file/azure/FilesPathTest.java | {
"start": 996,
"end": 1927
} | class ____ {
@Test
void splitAbsolutePreservingRootShouldReturnRootAndSteps() {
assertArrayEquals(new String[] { "/", "1", "2" }, FilesPath.splitToSteps("/1/2", true));
}
@Test
void splitAbsoluteWithoutPreservingRootShouldReturnStepsOnly() {
assertArrayEquals(new String[] { "1", "2" }, FilesPath.splitToSteps("/1/2", false));
}
@Test
void splitRelativePreservingRootShouldReturnStepsOnly() {
assertArrayEquals(new String[] { "1", "2" }, FilesPath.splitToSteps("1/2", true));
}
@Test
void splitRootPreservingRootShouldReturnRoot() {
assertArrayEquals(new String[] { "/" }, FilesPath.splitToSteps("/", true));
}
@Test
void splitWithoutSeparatorShouldReturnInput() {
// by observation, Camel devs were uncertain what is returned ...
assertArrayEquals(new String[] { "a path" }, FilesPath.split("a path"));
}
}
| FilesPathTest |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/ActionListenerTests.java | {
"start": 1648,
"end": 28692
} | class ____ extends ESTestCase {
public void testWrapConsumers() {
AtomicReference<Boolean> reference = new AtomicReference<>();
AtomicReference<Exception> exReference = new AtomicReference<>();
ActionListener<Boolean> wrap = ActionListener.wrap(new CheckedConsumer<>() {
@Override
public void accept(Boolean o) {
if (Boolean.FALSE.equals(o)) {
throw new IllegalArgumentException("must not be false");
}
reference.set(o);
}
@Override
public String toString() {
return "test handler";
}
}, new Consumer<>() {
@Override
public void accept(Exception newValue) {
exReference.set(newValue);
}
@Override
public String toString() {
return "test exception handler";
}
});
assertEquals("WrappedActionListener{test handler}{test exception handler}", wrap.toString());
wrap.onResponse(Boolean.FALSE);
assertNull(reference.getAndSet(null));
assertEquals("must not be false", exReference.getAndSet(null).getMessage());
wrap.onResponse(Boolean.TRUE);
assertTrue(reference.getAndSet(null));
assertNull(exReference.getAndSet(null));
wrap.onFailure(new RuntimeException("test exception"));
assertNull(reference.getAndSet(null));
assertEquals("test exception", exReference.getAndSet(null).getMessage());
}
public void testWrapRunnable() {
var executed = new AtomicBoolean();
var listener = ActionListener.running(new Runnable() {
@Override
public void run() {
assertTrue(executed.compareAndSet(false, true));
}
@Override
public String toString() {
return "test runnable";
}
});
assertEquals("RunnableWrappingActionListener{test runnable}", listener.toString());
listener.onResponse(new Object());
assertTrue(executed.getAndSet(false));
listener.onFailure(new Exception("simulated"));
assertTrue(executed.getAndSet(false));
expectThrows(
AssertionError.class,
() -> ActionListener.running(() -> { throw new UnsupportedOperationException(); }).onResponse(null)
);
}
public void testOnResponse() {
final int numListeners = randomIntBetween(1, 20);
List<AtomicReference<Boolean>> refList = new ArrayList<>();
List<AtomicReference<Exception>> excList = new ArrayList<>();
List<ActionListener<Boolean>> listeners = new ArrayList<>();
List<Boolean> failOnTrue = new ArrayList<>();
AtomicInteger exceptionCounter = new AtomicInteger(0);
for (int i = 0; i < numListeners; i++) {
boolean doFailOnTrue = rarely();
failOnTrue.add(doFailOnTrue);
AtomicReference<Boolean> reference = new AtomicReference<>();
AtomicReference<Exception> exReference = new AtomicReference<>();
refList.add(reference);
excList.add(exReference);
CheckedConsumer<Boolean, ? extends Exception> handler = (o) -> {
if (Boolean.FALSE.equals(o)) {
throw new IllegalArgumentException("must not be false " + exceptionCounter.getAndIncrement());
}
if (doFailOnTrue) {
throw new IllegalStateException("must not be true");
}
reference.set(o);
};
listeners.add(ActionListener.wrap(handler, exReference::set));
}
ActionListener.onResponse(listeners, Boolean.TRUE);
for (int i = 0; i < numListeners; i++) {
if (failOnTrue.get(i) == false) {
assertTrue("listener index " + i, refList.get(i).get());
refList.get(i).set(null);
} else {
assertNull("listener index " + i, refList.get(i).get());
}
}
for (int i = 0; i < numListeners; i++) {
if (failOnTrue.get(i) == false) {
assertNull("listener index " + i, excList.get(i).get());
} else {
assertEquals("listener index " + i, "must not be true", excList.get(i).get().getMessage());
}
}
ActionListener.onResponse(listeners, Boolean.FALSE);
for (int i = 0; i < numListeners; i++) {
assertNull("listener index " + i, refList.get(i).get());
}
assertEquals(numListeners, exceptionCounter.get());
for (int i = 0; i < numListeners; i++) {
assertNotNull(excList.get(i).get());
assertEquals("listener index " + i, "must not be false " + i, excList.get(i).get().getMessage());
}
}
public void testOnFailure() {
final int numListeners = randomIntBetween(1, 20);
List<AtomicReference<Boolean>> refList = new ArrayList<>();
List<AtomicReference<Exception>> excList = new ArrayList<>();
List<ActionListener<Boolean>> listeners = new ArrayList<>();
final int listenerToFail = randomBoolean() ? -1 : randomIntBetween(0, numListeners - 1);
for (int i = 0; i < numListeners; i++) {
AtomicReference<Boolean> reference = new AtomicReference<>();
AtomicReference<Exception> exReference = new AtomicReference<>();
refList.add(reference);
excList.add(exReference);
boolean fail = i == listenerToFail;
listeners.add(new ActionListener<>() {
@Override
public void onResponse(Boolean result) {
reference.set(result);
}
@Override
public void onFailure(Exception e) {
exReference.set(e);
if (fail) {
throw new RuntimeException("double boom");
}
}
});
}
try {
ActionListener.onFailure(listeners, new Exception("booom"));
assertEquals("unexpected succces listener to fail: " + listenerToFail, -1, listenerToFail);
} catch (RuntimeException ex) {
assertTrue("listener to fail: " + listenerToFail, listenerToFail >= 0);
assertNotNull(ex.getCause());
assertEquals("double boom", ex.getCause().getMessage());
}
for (int i = 0; i < numListeners; i++) {
assertNull("listener index " + i, refList.get(i).get());
}
for (int i = 0; i < numListeners; i++) {
assertEquals("listener index " + i, "booom", excList.get(i).get().getMessage());
}
}
public void testRunAfter() {
{
AtomicBoolean afterSuccess = new AtomicBoolean();
ActionListener<Object> listener = ActionListener.runAfter(ActionListener.noop(), () -> afterSuccess.set(true));
listener.onResponse(null);
assertThat(afterSuccess.get(), equalTo(true));
}
{
AtomicBoolean afterFailure = new AtomicBoolean();
ActionListener<Object> listener = ActionListener.runAfter(ActionListener.noop(), () -> afterFailure.set(true));
listener.onFailure(new RuntimeException("test"));
assertThat(afterFailure.get(), equalTo(true));
}
}
public void testRunBefore() {
{
AtomicBoolean afterSuccess = new AtomicBoolean();
ActionListener<Object> listener = ActionListener.runBefore(ActionListener.noop(), () -> afterSuccess.set(true));
listener.onResponse(null);
assertThat(afterSuccess.get(), equalTo(true));
}
{
AtomicBoolean afterFailure = new AtomicBoolean();
ActionListener<Object> listener = ActionListener.runBefore(ActionListener.noop(), () -> afterFailure.set(true));
listener.onFailure(new RuntimeException("test"));
assertThat(afterFailure.get(), equalTo(true));
}
}
public void testNotifyOnce() {
AtomicInteger onResponseTimes = new AtomicInteger();
AtomicInteger onFailureTimes = new AtomicInteger();
ActionListener<Object> listener = ActionListener.notifyOnce(new ActionListener<Object>() {
@Override
public void onResponse(Object o) {
onResponseTimes.getAndIncrement();
}
@Override
public void onFailure(Exception e) {
onFailureTimes.getAndIncrement();
}
});
boolean success = randomBoolean();
if (success) {
listener.onResponse(null);
} else {
listener.onFailure(new RuntimeException("test"));
}
for (int iters = between(0, 10), i = 0; i < iters; i++) {
if (randomBoolean()) {
listener.onResponse(null);
} else {
listener.onFailure(new RuntimeException("test"));
}
}
if (success) {
assertThat(onResponseTimes.get(), equalTo(1));
assertThat(onFailureTimes.get(), equalTo(0));
} else {
assertThat(onResponseTimes.get(), equalTo(0));
assertThat(onFailureTimes.get(), equalTo(1));
}
}
public void testNotifyOnceReleasesDelegate() {
final var reachabilityChecker = new ReachabilityChecker();
final var listener = ActionListener.notifyOnce(reachabilityChecker.register(ActionListener.running(() -> {})));
reachabilityChecker.checkReachable();
listener.onResponse(null);
reachabilityChecker.ensureUnreachable();
assertEquals("notifyOnce[null]", listener.toString());
}
public void testConcurrentNotifyOnce() throws InterruptedException {
final var completed = new AtomicBoolean();
final var listener = ActionListener.notifyOnce(new ActionListener<Void>() {
@Override
public void onResponse(Void o) {
assertTrue(completed.compareAndSet(false, true));
}
@Override
public void onFailure(Exception e) {
assertTrue(completed.compareAndSet(false, true));
}
@Override
public String toString() {
return "inner-listener";
}
});
assertThat(listener.toString(), equalTo("notifyOnce[inner-listener]"));
startInParallel(between(1, 10), i -> {
if (randomBoolean()) {
listener.onResponse(null);
} else {
listener.onFailure(new RuntimeException("test"));
}
});
assertTrue(completed.get());
}
public void testCompleteWith() {
PlainActionFuture<Integer> onResponseListener = new PlainActionFuture<>();
ActionListener.completeWith(onResponseListener, () -> 100);
assertThat(onResponseListener.isDone(), equalTo(true));
assertThat(onResponseListener.actionGet(), equalTo(100));
PlainActionFuture<Integer> onFailureListener = new PlainActionFuture<>();
ActionListener.completeWith(onFailureListener, () -> { throw new IOException("not found"); });
assertThat(onFailureListener.isDone(), equalTo(true));
assertThat(expectThrows(ExecutionException.class, onFailureListener::get).getCause(), instanceOf(IOException.class));
AtomicReference<Exception> exReference = new AtomicReference<>();
ActionListener<String> listener = new ActionListener<>() {
@Override
public void onResponse(String s) {
if (s == null) {
throw new IllegalArgumentException("simulate onResponse exception");
}
}
@Override
public void onFailure(Exception e) {
exReference.set(e);
if (e instanceof IllegalArgumentException iae) {
throw iae;
}
}
};
AssertionError assertionError = expectThrows(AssertionError.class, () -> ActionListener.completeWith(listener, () -> null));
assertThat(assertionError.getCause(), instanceOf(IllegalArgumentException.class));
assertNull(exReference.get());
assertionError = expectThrows(AssertionError.class, () -> ActionListener.completeWith(listener, () -> {
throw new IllegalArgumentException();
}));
assertThat(assertionError.getCause(), instanceOf(IllegalArgumentException.class));
assertThat(exReference.get(), instanceOf(IllegalArgumentException.class));
}
public void testAssertAtLeastOnceWillLogAssertionErrorWhenNotResolved() throws Exception {
assumeTrue("assertAtLeastOnce will be a no-op when assertions are disabled", Assertions.ENABLED);
ActionListener<Object> listenerRef = ActionListener.assertAtLeastOnce(ActionListener.running(() -> {
// Do nothing, but don't use ActionListener.noop() as it'll never be garbage collected
}));
// Nullify reference so it becomes unreachable
listenerRef = null;
assertBusy(() -> {
System.gc();
assertLeakDetected();
});
}
public void testAssertAtLeastOnceWillNotLogWhenResolvedOrFailed() {
assumeTrue("assertAtLeastOnce will be a no-op when assertions are disabled", Assertions.ENABLED);
ReachabilityChecker reachabilityChecker = new ReachabilityChecker();
ActionListener<Object> listenerRef = reachabilityChecker.register(ActionListener.assertAtLeastOnce(ActionListener.running(() -> {
// Do nothing, but don't use ActionListener.noop() as it'll never be garbage collected
})));
// Call onResponse and/or onFailure at least once
int times = randomIntBetween(1, 3);
for (int i = 0; i < times; i++) {
if (randomBoolean()) {
listenerRef.onResponse("succeeded");
} else {
listenerRef.onFailure(new RuntimeException("Failed"));
}
}
// Nullify reference so it becomes unreachable
listenerRef = null;
reachabilityChecker.ensureUnreachable();
}
public void testAssertAtLeastOnceWillDelegateResponses() {
final var response = new Object();
assertSame(response, safeAwait(SubscribableListener.newForked(l -> ActionListener.assertAtLeastOnce(l).onResponse(response))));
}
public void testAssertAtLeastOnceWillDelegateFailures() {
final var exception = new RuntimeException();
assertSame(
exception,
safeAwaitFailure(SubscribableListener.newForked(l -> ActionListener.assertAtLeastOnce(l).onFailure(exception)))
);
}
/**
* Test that map passes the output of the function to its delegate listener and that exceptions in the function are propagated to the
* onFailure handler. Also verify that exceptions from ActionListener.onResponse does not invoke onFailure, since it is the
* responsibility of the ActionListener implementation (the client of the API) to handle exceptions in onResponse and onFailure.
*/
public void testMap() {
AtomicReference<Exception> exReference = new AtomicReference<>();
ActionListener<String> listener = new ActionListener<>() {
@Override
public void onResponse(String s) {
if (s == null) {
throw new IllegalArgumentException("simulate onResponse exception");
}
}
@Override
public void onFailure(Exception e) {
exReference.set(e);
if (e instanceof IllegalArgumentException) {
throw (IllegalArgumentException) e;
}
}
};
ActionListener<Boolean> mapped = listener.map(b -> {
if (b == null) {
return null;
} else if (b) {
throw new IllegalStateException("simulate map function exception");
} else {
return b.toString();
}
});
AssertionError assertionError = expectThrows(AssertionError.class, () -> mapped.onResponse(null));
assertThat(assertionError.getCause(), instanceOf(IllegalArgumentException.class));
assertNull(exReference.get());
mapped.onResponse(false);
assertNull(exReference.get());
mapped.onResponse(true);
assertThat(exReference.get(), instanceOf(IllegalStateException.class));
assertionError = expectThrows(AssertionError.class, () -> mapped.onFailure(new IllegalArgumentException()));
assertThat(assertionError.getCause(), instanceOf(IllegalArgumentException.class));
assertThat(exReference.get(), instanceOf(IllegalArgumentException.class));
mapped.onFailure(new IllegalStateException());
assertThat(exReference.get(), instanceOf(IllegalStateException.class));
}
public void testRunBeforeThrowsAssertionErrorIfExecutedMoreThanOnce() {
assumeTrue("test only works with assertions enabled", Assertions.ENABLED);
final String description = randomAlphaOfLength(10);
final ActionListener<Void> runBefore = ActionListener.runBefore(ActionListener.noop(), makeCheckedRunnable(description));
completeListener(randomBoolean(), runBefore);
var error = expectThrows(AssertionError.class, () -> completeListener(true, runBefore));
assertThat(error.getMessage(), containsString(description));
}
public void testRunAfterThrowsAssertionErrorIfExecutedMoreThanOnce() {
assumeTrue("test only works with assertions enabled", Assertions.ENABLED);
final String description = randomAlphaOfLength(10);
final ActionListener<Void> runAfter = randomBoolean()
? ActionListener.runAfter(ActionListener.noop(), makeRunnable(description))
: ActionListener.releaseAfter(ActionListener.noop(), makeReleasable(description, new AtomicBoolean()));
completeListener(randomBoolean(), runAfter);
var error = expectThrows(AssertionError.class, () -> completeListener(true, runAfter));
assertThat(error.getMessage(), containsString(description));
}
public void testWrappedRunBeforeOrAfterThrowsAssertionErrorIfExecutedMoreThanOnce() {
assumeTrue("test only works with assertions enabled", Assertions.ENABLED);
final ActionListener<Void> throwingListener = new ActionListener<>() {
@Override
public void onResponse(Void o) {
throw new AlreadyClosedException("throwing on purpose");
}
@Override
public void onFailure(Exception e) {
throw new AssertionError("should not be called");
}
};
final String description = randomAlphaOfLength(10);
final ActionListener<Void> runBeforeOrAfterListener = randomBoolean()
? ActionListener.runBefore(throwingListener, makeCheckedRunnable(description))
: ActionListener.runAfter(throwingListener, makeRunnable(description));
final ActionListener<Void> wrappedListener = ActionListener.running(new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
runBeforeOrAfterListener.onFailure(e);
}
@Override
protected void doRun() {
runBeforeOrAfterListener.onResponse(null);
}
});
var error = expectThrows(AssertionError.class, () -> completeListener(true, wrappedListener));
assertThat(error.getMessage(), containsString(description));
}
public void testReleasing() {
runReleasingTest(true);
runReleasingTest(false);
}
private static void runReleasingTest(boolean successResponse) {
final AtomicBoolean releasedFlag = new AtomicBoolean();
final String description = randomAlphaOfLength(10);
final ActionListener<Void> l = ActionListener.releasing(makeReleasable(description, releasedFlag));
assertThat(l.toString(), containsString("release[" + description + "]}"));
completeListener(successResponse, l);
assertTrue(releasedFlag.get());
}
private static void completeListener(boolean successResponse, ActionListener<Void> listener) {
if (successResponse) {
try {
listener.onResponse(null);
} catch (Exception e) {
// ok
}
} else {
listener.onFailure(new RuntimeException("simulated"));
}
}
public void testRun() throws Exception {
final var successFuture = new PlainActionFuture<>();
final var successResult = new Object();
ActionListener.run(successFuture, l -> l.onResponse(successResult));
assertTrue(successFuture.isDone());
assertSame(successResult, successFuture.get());
final var failFuture = new PlainActionFuture<>();
final var failException = new ElasticsearchException("simulated");
ActionListener.run(failFuture, l -> {
if (randomBoolean()) {
l.onFailure(failException);
} else {
throw failException;
}
});
assertTrue(failFuture.isDone());
assertSame(failException, expectThrows(ExecutionException.class, ElasticsearchException.class, failFuture::get));
}
public void testRunWithResource() {
final var future = new PlainActionFuture<>();
final var successResult = new Object();
final var failException = new ElasticsearchException("simulated");
final var resourceIsClosed = new AtomicBoolean(false);
ActionListener.runWithResource(ActionListener.runBefore(future, () -> assertTrue(resourceIsClosed.get())), () -> new Releasable() {
@Override
public void close() {
assertTrue(resourceIsClosed.compareAndSet(false, true));
}
@Override
public String toString() {
return "test releasable";
}
}, (l, r) -> {
assertFalse(resourceIsClosed.get());
assertEquals("test releasable", r.toString());
if (randomBoolean()) {
l.onResponse(successResult);
} else {
if (randomBoolean()) {
l.onFailure(failException);
} else {
throw failException;
}
}
});
assertTrue(future.isDone());
try {
assertSame(successResult, future.get());
} catch (ExecutionException e) {
assertSame(failException, e.getCause());
} catch (InterruptedException e) {
fail(e);
}
final var failureFuture = new PlainActionFuture<>();
ActionListener.runWithResource(
failureFuture,
() -> { throw new ElasticsearchException("resource creation failure"); },
(l, r) -> fail("should not be called")
);
assertTrue(failureFuture.isDone());
assertEquals(
"resource creation failure",
expectThrows(ExecutionException.class, ElasticsearchException.class, failureFuture::get).getMessage()
);
}
public void testReleaseBefore() {
runReleaseListenerTest(true, false, (delegate, releasable) -> ActionListener.releaseBefore(releasable, delegate));
runReleaseListenerTest(true, true, (delegate, releasable) -> ActionListener.releaseBefore(releasable, delegate));
runReleaseListenerTest(false, false, (delegate, releasable) -> ActionListener.releaseBefore(releasable, delegate));
}
public void testReleaseAfter() {
runReleaseListenerTest(true, false, ActionListener::releaseAfter);
runReleaseListenerTest(true, true, ActionListener::releaseAfter);
runReleaseListenerTest(false, false, ActionListener::releaseAfter);
}
private static void runReleaseListenerTest(
boolean successResponse,
final boolean throwFromOnResponse,
BiFunction<ActionListener<Void>, Releasable, ActionListener<Void>> releaseListenerProvider
) {
final AtomicBoolean released = new AtomicBoolean();
final String description = randomAlphaOfLength(10);
final ActionListener<Void> l = releaseListenerProvider.apply(new ActionListener<>() {
@Override
public void onResponse(Void unused) {
if (throwFromOnResponse) {
throw new RuntimeException("onResponse");
}
}
@Override
public void onFailure(Exception e) {
// ok
}
@Override
public String toString() {
return "test listener";
}
}, makeReleasable(description, released));
assertThat(l.toString(), containsString("test listener/release[" + description + "]"));
if (successResponse) {
try {
l.onResponse(null);
} catch (Exception e) {
// ok
} catch (AssertionError e) {
// ensure this was only thrown by ActionListener#assertOnce
assertThat(e.getMessage(), endsWith("must handle its own exceptions"));
}
} else {
l.onFailure(new RuntimeException("supplied"));
}
assertTrue(released.get());
}
private static Releasable makeReleasable(String description, AtomicBoolean releasedFlag) {
return new Releasable() {
@Override
public void close() {
assertTrue(releasedFlag.compareAndSet(false, true));
}
@Override
public String toString() {
return description;
}
};
}
private static Runnable makeRunnable(String description) {
return new Runnable() {
@Override
public void run() {}
@Override
public String toString() {
return description;
}
};
}
private static CheckedRunnable<?> makeCheckedRunnable(String description) {
return new CheckedRunnable<>() {
@Override
public void run() {}
@Override
public String toString() {
return description;
}
};
}
public static <T> Matcher<T> isMappedActionListener() {
return instanceOf(ActionListenerImplementations.MappedActionListener.class);
}
}
| ActionListenerTests |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/chain/ChainMapper.java | {
"start": 3137,
"end": 3278
} | class ____<KEYIN, VALUEIN, KEYOUT, VALUEOUT> extends
Mapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT> {
/**
* Adds a {@link Mapper} | ChainMapper |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/internals/KTableKTableOuterJoin.java | {
"start": 3126,
"end": 6387
} | class ____ extends ContextualProcessor<K, Change<V1>, K, Change<VOut>> {
private final KTableValueGetter<K, V2> valueGetter;
private Sensor droppedRecordsSensor;
KTableKTableOuterJoinProcessor(final KTableValueGetter<K, V2> valueGetter) {
this.valueGetter = valueGetter;
}
@Override
public void init(final ProcessorContext<K, Change<VOut>> context) {
super.init(context);
droppedRecordsSensor = droppedRecordsSensor(
Thread.currentThread().getName(),
context.taskId().toString(),
(StreamsMetricsImpl) context.metrics()
);
valueGetter.init(context);
}
@Override
public void process(final Record<K, Change<V1>> record) {
// we do join iff keys are equal, thus, if key is null we cannot join and just ignore the record
if (record.key() == null) {
if (context().recordMetadata().isPresent()) {
final RecordMetadata recordMetadata = context().recordMetadata().get();
LOG.warn(
"Skipping record due to null key. "
+ "topic=[{}] partition=[{}] offset=[{}]",
recordMetadata.topic(), recordMetadata.partition(), recordMetadata.offset()
);
} else {
LOG.warn(
"Skipping record due to null key. Topic, partition, and offset not known."
);
}
droppedRecordsSensor.record();
return;
}
// drop out-of-order records from versioned tables (cf. KIP-914)
if (useVersionedSemantics && !record.value().isLatest) {
LOG.info("Skipping out-of-order record from versioned table while performing table-table join.");
droppedRecordsSensor.record();
return;
}
VOut newValue = null;
final long resultTimestamp;
VOut oldValue = null;
final ValueAndTimestamp<V2> valueAndTimestamp2 = valueGetter.get(record.key());
final V2 value2 = getValueOrNull(valueAndTimestamp2);
if (value2 == null) {
if (record.value().newValue == null && record.value().oldValue == null) {
return;
}
resultTimestamp = record.timestamp();
} else {
resultTimestamp = Math.max(record.timestamp(), valueAndTimestamp2.timestamp());
}
if (value2 != null || record.value().newValue != null) {
newValue = joiner.apply(record.value().newValue, value2);
}
if (sendOldValues && (value2 != null || record.value().oldValue != null)) {
oldValue = joiner.apply(record.value().oldValue, value2);
}
context().forward(record.withValue(new Change<>(newValue, oldValue, record.value().isLatest)).withTimestamp(resultTimestamp));
}
@Override
public void close() {
valueGetter.close();
}
}
private | KTableKTableOuterJoinProcessor |
java | spring-projects__spring-framework | spring-webflux/src/main/java/org/springframework/web/reactive/socket/client/StandardWebSocketClient.java | {
"start": 5874,
"end": 6504
} | class ____ extends Configurator {
private final HttpHeaders requestHeaders;
private final HttpHeaders responseHeaders = new HttpHeaders();
public DefaultConfigurator(HttpHeaders requestHeaders) {
this.requestHeaders = requestHeaders;
}
public HttpHeaders getResponseHeaders() {
return this.responseHeaders;
}
@Override
public void beforeRequest(Map<String, List<String>> requestHeaders) {
this.requestHeaders.forEach(requestHeaders::put);
}
@Override
public void afterResponse(HandshakeResponse response) {
response.getHeaders().forEach(this.responseHeaders::put);
}
}
}
| DefaultConfigurator |
java | google__error-prone | core/src/test/java/com/google/errorprone/dataflow/nullnesspropagation/NullnessPropagationTest.java | {
"start": 52735,
"end": 52788
} | class ____ {
int field;
}
private static | MyClass |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/PgEventEndpointBuilderFactory.java | {
"start": 21875,
"end": 25154
} | interface ____ {
/**
* PostgresSQL Event (camel-pgevent)
* Send and receive PostgreSQL events via LISTEN and NOTIFY commands.
*
* Category: database
* Since: 2.15
* Maven coordinates: org.apache.camel:camel-pgevent
*
* @return the dsl builder for the headers' name.
*/
default PgEventHeaderNameBuilder pgevent() {
return PgEventHeaderNameBuilder.INSTANCE;
}
/**
* PostgresSQL Event (camel-pgevent)
* Send and receive PostgreSQL events via LISTEN and NOTIFY commands.
*
* Category: database
* Since: 2.15
* Maven coordinates: org.apache.camel:camel-pgevent
*
* Syntax: <code>pgevent:host:port/database/channel</code>
*
* Path parameter: host
* To connect using hostname and port to the database.
* Default value: localhost
*
* Path parameter: port
* To connect using hostname and port to the database.
* Default value: 5432
*
* Path parameter: database (required)
* The database name. The database name can take any characters because
* it is sent as a quoted identifier. It is part of the endpoint URI, so
* diacritical marks and non-Latin letters have to be URL encoded.
*
* Path parameter: channel (required)
* The channel name
*
* @param path host:port/database/channel
* @return the dsl builder
*/
default PgEventEndpointBuilder pgevent(String path) {
return PgEventEndpointBuilderFactory.endpointBuilder("pgevent", path);
}
/**
* PostgresSQL Event (camel-pgevent)
* Send and receive PostgreSQL events via LISTEN and NOTIFY commands.
*
* Category: database
* Since: 2.15
* Maven coordinates: org.apache.camel:camel-pgevent
*
* Syntax: <code>pgevent:host:port/database/channel</code>
*
* Path parameter: host
* To connect using hostname and port to the database.
* Default value: localhost
*
* Path parameter: port
* To connect using hostname and port to the database.
* Default value: 5432
*
* Path parameter: database (required)
* The database name. The database name can take any characters because
* it is sent as a quoted identifier. It is part of the endpoint URI, so
* diacritical marks and non-Latin letters have to be URL encoded.
*
* Path parameter: channel (required)
* The channel name
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path host:port/database/channel
* @return the dsl builder
*/
default PgEventEndpointBuilder pgevent(String componentName, String path) {
return PgEventEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the PostgresSQL Event component.
*/
public static | PgEventBuilders |
java | elastic__elasticsearch | modules/apm/src/test/java/org/elasticsearch/telemetry/apm/MeterRegistryConcurrencyTests.java | {
"start": 2279,
"end": 4502
} | class ____ implements LongCounterBuilder {
@Override
public LongCounterBuilder setDescription(String description) {
return this;
}
@Override
public LongCounterBuilder setUnit(String unit) {
return this;
}
@Override
public DoubleCounterBuilder ofDoubles() {
return null;
}
@Override
public LongCounter build() {
try {
buildLatch.countDown();
registerLatch.await();
} catch (Exception e) {
throw new RuntimeException(e);
}
return null;
}
@Override
public ObservableLongCounter buildWithCallback(Consumer<ObservableLongMeasurement> callback) {
return null;
}
}
public void testLockingWhenRegistering() throws Exception {
APMMeterRegistry meterRegistrar = new APMMeterRegistry(lockingMeter);
var registerThread = new Thread(() -> meterRegistrar.registerLongCounter(name, description, unit));
// registerThread has a countDown latch that is simulating a long-running registration
registerThread.start();
buildLatch.await(); // wait for registerThread to hold the lock
var setProviderThread = new Thread(() -> meterRegistrar.setProvider(noopMeter));
// a setProviderThread will attempt to override a meter, but will wait to acquireLock
setProviderThread.start();
// assert that a thread is waiting for a lock during long-running registration
assertBusy(() -> assertThat(setProviderThread.getState(), equalTo(Thread.State.WAITING)));
// assert that the old lockingMeter is still in place
assertThat(meterRegistrar.getMeter(), sameInstance(lockingMeter));
// finish long-running registration
registerLatch.countDown();
// wait for everything to quiesce, registerLatch.countDown() doesn't ensure lock has been released
setProviderThread.join();
registerThread.join();
// assert that a meter was overriden
assertThat(meterRegistrar.getMeter(), sameInstance(noopMeter));
}
}
| LockingLongCounterBuilder |
java | apache__flink | flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/functions/TimedOutPartialMatchHandler.java | {
"start": 1232,
"end": 1421
} | class ____ extends PatternProcessFunction<IN, OUT> implements TimedOutPartialMatchHandler<IN> {
*
* }
* }</pre>
*
* @param <IN> type of input elements
*/
@PublicEvolving
public | MyFunction |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java | {
"start": 1454,
"end": 1520
} | class ____ supposed to be substituted by a {@link Join}.
*/
public | is |
java | apache__camel | components/camel-opentelemetry/src/test/java/org/apache/camel/opentelemetry/OpenTelemetryTracingStrategyTest.java | {
"start": 1172,
"end": 3822
} | class ____ extends CamelOpenTelemetryTestSupport {
private static final SpanTestData[] testdata = {
new SpanTestData().setLabel("camel-process").setOperation("third-party-span")
.setParentId(1),
new SpanTestData().setLabel("camel-process").setOperation("third-party-processor")
.setParentId(6),
new SpanTestData().setLabel("camel-process").setOperation("direct-processor")
.setParentId(3),
new SpanTestData().setLabel("direct:serviceB").setOperation("serviceB")
.setParentId(4),
new SpanTestData().setLabel("direct:serviceB").setOperation("serviceB")
.setKind(SpanKind.CLIENT)
.setParentId(5),
new SpanTestData().setLabel("to:serviceB").setOperation("to-serviceB")
.setParentId(6),
new SpanTestData().setLabel("direct:serviceA").setUri("direct://start").setOperation("serviceA")
.setParentId(7),
new SpanTestData().setLabel("direct:serviceA").setUri("direct://start").setOperation("serviceA")
.setKind(SpanKind.CLIENT)
};
OpenTelemetryTracingStrategyTest() {
super(testdata);
}
@Test
void testTracingOfProcessors() {
template.requestBody("direct:serviceA", "Hello");
verify();
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:serviceA").routeId("serviceA")
.process(exchange -> {
callThirdPartyInstrumentation();
}).id("third-party-processor")
.to("direct:serviceB").id("to-serviceB");
from("direct:serviceB").routeId("serviceB")
.process(exchange -> {
// noop
}).id("direct-processor");
}
private void callThirdPartyInstrumentation() {
Span span = getTracer().spanBuilder("third-party-span").startSpan();
try (Scope ignored = span.makeCurrent()) {
span.setAttribute(COMPONENT_KEY, "third-party-component");
} finally {
span.end();
}
}
};
}
@Override
protected Function<OpenTelemetryTracer, InterceptStrategy> getTracingStrategy() {
return OpenTelemetryTracingStrategy::new;
}
}
| OpenTelemetryTracingStrategyTest |
java | square__javapoet | src/test/java/com/squareup/javapoet/TypeSpecTest.java | {
"start": 20554,
"end": 22509
} | class ____ {\n"
+ " void throwOne() throws IOException {\n"
+ " }\n"
+ "\n"
+ " void throwTwo() throws IOException, SourCreamException {\n"
+ " }\n"
+ "\n"
+ " abstract void abstractThrow() throws IOException;\n"
+ "\n"
+ " native void nativeThrow() throws IOException;\n"
+ "}\n");
}
@Test public void typeVariables() throws Exception {
TypeVariableName t = TypeVariableName.get("T");
TypeVariableName p = TypeVariableName.get("P", Number.class);
ClassName location = ClassName.get(tacosPackage, "Location");
TypeSpec typeSpec = TypeSpec.classBuilder("Location")
.addTypeVariable(t)
.addTypeVariable(p)
.addSuperinterface(ParameterizedTypeName.get(ClassName.get(Comparable.class), p))
.addField(t, "label")
.addField(p, "x")
.addField(p, "y")
.addMethod(MethodSpec.methodBuilder("compareTo")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(int.class)
.addParameter(p, "p")
.addCode("return 0;\n")
.build())
.addMethod(MethodSpec.methodBuilder("of")
.addModifiers(Modifier.PUBLIC, Modifier.STATIC)
.addTypeVariable(t)
.addTypeVariable(p)
.returns(ParameterizedTypeName.get(location, t, p))
.addParameter(t, "label")
.addParameter(p, "x")
.addParameter(p, "y")
.addCode("throw new $T($S);\n", UnsupportedOperationException.class, "TODO")
.build())
.build();
assertThat(toString(typeSpec)).isEqualTo(""
+ "package com.squareup.tacos;\n"
+ "\n"
+ "import java.lang.Comparable;\n"
+ "import java.lang.Number;\n"
+ "import java.lang.Override;\n"
+ "import java.lang.UnsupportedOperationException;\n"
+ "\n"
+ " | Taco |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_3000/Issue3066.java | {
"start": 195,
"end": 763
} | class ____ extends TestCase {
public void test_for_jsonpath() throws Exception {
String str = "{ 'id' : 0, 'items' : [ {'name': 'apple', 'price' : 30 }, {'name': 'pear', 'price' : 40 } ] }";
JSONObject root = JSON.parseObject(str);
Object max = JSONPath.eval(root, "$.items[*].price.max()");
assertEquals(40, max);
Object min = JSONPath.eval(root, "$.items[*].price.min()");
assertEquals(30, min);
Object count = JSONPath.eval(root, "$.items[*].price.size()");
assertEquals(2, count);
}
}
| Issue3066 |
java | resilience4j__resilience4j | resilience4j-spring/src/test/java/io/github/resilience4j/ratelimiter/configure/RateLimiterConfigurationTest.java | {
"start": 1033,
"end": 9187
} | class ____ {
@Test
public void testRateLimiterRegistry() {
io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties instanceProperties1 = new io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties();
instanceProperties1.setLimitForPeriod(2);
instanceProperties1.setSubscribeForEvents(true);
io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties instanceProperties2 = new io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties();
instanceProperties2.setLimitForPeriod(4);
instanceProperties2.setSubscribeForEvents(true);
RateLimiterConfigurationProperties rateLimiterConfigurationProperties = new RateLimiterConfigurationProperties();
rateLimiterConfigurationProperties.getInstances().put("backend1", instanceProperties1);
rateLimiterConfigurationProperties.getInstances().put("backend2", instanceProperties2);
rateLimiterConfigurationProperties.setRateLimiterAspectOrder(300);
RateLimiterConfiguration rateLimiterConfiguration = new RateLimiterConfiguration();
DefaultEventConsumerRegistry<RateLimiterEvent> eventConsumerRegistry = new DefaultEventConsumerRegistry<>();
RateLimiterRegistry rateLimiterRegistry = rateLimiterConfiguration
.rateLimiterRegistry(rateLimiterConfigurationProperties, eventConsumerRegistry,
new CompositeRegistryEventConsumer<>(emptyList()),
compositeRateLimiterCustomizerTest());
assertThat(rateLimiterConfigurationProperties.getRateLimiterAspectOrder()).isEqualTo(300);
assertThat(rateLimiterRegistry.getAllRateLimiters().size()).isEqualTo(2);
RateLimiter rateLimiter = rateLimiterRegistry.rateLimiter("backend1");
assertThat(rateLimiter).isNotNull();
assertThat(rateLimiter.getRateLimiterConfig().getLimitForPeriod()).isEqualTo(2);
RateLimiter rateLimiter2 = rateLimiterRegistry.rateLimiter("backend2");
assertThat(rateLimiter2).isNotNull();
assertThat(rateLimiter2.getRateLimiterConfig().getLimitForPeriod()).isEqualTo(4);
assertThat(eventConsumerRegistry.getAllEventConsumer()).hasSize(2);
}
@Test
public void testCreateRateLimiterRegistryWithSharedConfigs() {
//Given
io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties defaultProperties = new io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties();
defaultProperties.setLimitForPeriod(3);
defaultProperties.setLimitRefreshPeriod(Duration.ofNanos(5000000));
defaultProperties.setSubscribeForEvents(true);
io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties sharedProperties = new io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties();
sharedProperties.setLimitForPeriod(2);
sharedProperties.setLimitRefreshPeriod(Duration.ofNanos(6000000));
sharedProperties.setSubscribeForEvents(true);
io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties backendWithDefaultConfig = new io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties();
backendWithDefaultConfig.setBaseConfig("default");
backendWithDefaultConfig.setLimitForPeriod(200);
backendWithDefaultConfig.setSubscribeForEvents(true);
io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties backendWithSharedConfig = new io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties();
backendWithSharedConfig.setBaseConfig("sharedConfig");
backendWithSharedConfig.setLimitForPeriod(300);
backendWithSharedConfig.setSubscribeForEvents(true);
RateLimiterConfigurationProperties rateLimiterConfigurationProperties = new RateLimiterConfigurationProperties();
rateLimiterConfigurationProperties.getConfigs().put("default", defaultProperties);
rateLimiterConfigurationProperties.getConfigs().put("sharedConfig", sharedProperties);
rateLimiterConfigurationProperties.getInstances()
.put("backendWithDefaultConfig", backendWithDefaultConfig);
rateLimiterConfigurationProperties.getInstances()
.put("backendWithSharedConfig", backendWithSharedConfig);
RateLimiterConfiguration rateLimiterConfiguration = new RateLimiterConfiguration();
DefaultEventConsumerRegistry<RateLimiterEvent> eventConsumerRegistry = new DefaultEventConsumerRegistry<>();
//When
RateLimiterRegistry rateLimiterRegistry = rateLimiterConfiguration
.rateLimiterRegistry(rateLimiterConfigurationProperties, eventConsumerRegistry,
new CompositeRegistryEventConsumer<>(emptyList()),
compositeRateLimiterCustomizerTest());
//Then
assertThat(rateLimiterRegistry.getAllRateLimiters().size()).isEqualTo(2);
// Should get default config and override LimitForPeriod
RateLimiter rateLimiter1 = rateLimiterRegistry.rateLimiter("backendWithDefaultConfig");
assertThat(rateLimiter1).isNotNull();
assertThat(rateLimiter1.getRateLimiterConfig().getLimitForPeriod()).isEqualTo(200);
assertThat(rateLimiter1.getRateLimiterConfig().getLimitRefreshPeriod())
.isEqualTo(Duration.ofMillis(5));
// Should get shared config and override LimitForPeriod
RateLimiter rateLimiter2 = rateLimiterRegistry.rateLimiter("backendWithSharedConfig");
assertThat(rateLimiter2).isNotNull();
assertThat(rateLimiter2.getRateLimiterConfig().getLimitForPeriod()).isEqualTo(300);
assertThat(rateLimiter2.getRateLimiterConfig().getLimitRefreshPeriod())
.isEqualTo(Duration.ofMillis(6));
// Unknown backend should get default config of Registry
RateLimiter rerateLimiter3 = rateLimiterRegistry.rateLimiter("unknownBackend");
assertThat(rerateLimiter3).isNotNull();
assertThat(rerateLimiter3.getRateLimiterConfig().getLimitForPeriod()).isEqualTo(3);
assertThat(eventConsumerRegistry.getAllEventConsumer()).hasSize(2);
}
@Test
public void testCreateRateLimiterRegistryWithUnknownConfig() {
RateLimiterConfigurationProperties rateLimiterConfigurationProperties = new RateLimiterConfigurationProperties();
io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties instanceProperties = new io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties();
instanceProperties.setBaseConfig("unknownConfig");
rateLimiterConfigurationProperties.getInstances().put("backend", instanceProperties);
RateLimiterConfiguration rateLimiterConfiguration = new RateLimiterConfiguration();
DefaultEventConsumerRegistry<RateLimiterEvent> eventConsumerRegistry = new DefaultEventConsumerRegistry<>();
assertThatThrownBy(() -> rateLimiterConfiguration
.rateLimiterRegistry(rateLimiterConfigurationProperties, eventConsumerRegistry,
new CompositeRegistryEventConsumer<>(emptyList()),
compositeRateLimiterCustomizerTest()))
.isInstanceOf(ConfigurationNotFoundException.class)
.hasMessage("Configuration with name 'unknownConfig' does not exist");
}
public CompositeCustomizer<RateLimiterConfigCustomizer> compositeRateLimiterCustomizerTest() {
return new CompositeCustomizer<>(Collections.emptyList());
}
} | RateLimiterConfigurationTest |
java | quarkusio__quarkus | integration-tests/openapi/src/main/java/io/quarkus/it/openapi/jaxrs/ByteArrayResource.java | {
"start": 508,
"end": 2484
} | class ____ {
@GET
@Path("/justByteArray/{fileName}")
public byte[] justByteArray(@PathParam("fileName") String filename) {
return toByteArray(filename);
}
@POST
@Path("/justByteArray")
public byte[] justByteArray(byte[] bs) {
return bs;
}
@GET
@Path("/restResponseByteArray/{fileName}")
public RestResponse<byte[]> restResponseByteArray(@PathParam("fileName") String filename) {
return RestResponse.ok(toByteArray(filename));
}
@POST
@Path("/restResponseByteArray")
public RestResponse<byte[]> restResponseByteArray(byte[] bs) {
return RestResponse.ok(bs);
}
@GET
@Path("/optionalByteArray/{fileName}")
public Optional<byte[]> optionalByteArray(@PathParam("fileName") String filename) {
return Optional.of(toByteArray(filename));
}
@POST
@Path("/optionalByteArray")
public Optional<byte[]> optionalByteArray(Optional<byte[]> inputStream) {
return inputStream;
}
@GET
@Path("/uniByteArray/{fileName}")
public Uni<byte[]> uniByteArray(@PathParam("fileName") String filename) {
return Uni.createFrom().item(toByteArray(filename));
}
@GET
@Path("/completionStageByteArray/{fileName}")
public CompletionStage<byte[]> completionStageByteArray(@PathParam("fileName") String filename) {
return CompletableFuture.completedStage(toByteArray(filename));
}
@GET
@Path("/completedFutureByteArray/{fileName}")
public CompletableFuture<byte[]> completedFutureByteArray(@PathParam("fileName") String filename) {
return CompletableFuture.completedFuture(toByteArray(filename));
}
private byte[] toByteArray(String filename) {
try {
String f = URLDecoder.decode(filename, "UTF-8");
return Files.readAllBytes(Paths.get(f));
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
}
| ByteArrayResource |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/tree/domain/SqmCorrelatedCteJoin.java | {
"start": 693,
"end": 3284
} | class ____<T> extends SqmCteJoin<T> implements SqmCorrelation<T, T>, SqmCorrelatedSingularValuedJoin<T, T> {
private final SqmCorrelatedRootJoin<T> correlatedRootJoin;
private final SqmCteJoin<T> correlationParent;
public SqmCorrelatedCteJoin(SqmCteJoin<T> correlationParent) {
//noinspection unchecked
super(
correlationParent.getCte(),
correlationParent.getExplicitAlias(),
correlationParent.getSqmJoinType(),
(SqmRoot<T>) correlationParent.getRoot()
);
this.correlatedRootJoin = SqmCorrelatedDerivedRootJoin.create( correlationParent, this );
this.correlationParent = correlationParent;
}
private SqmCorrelatedCteJoin(
NavigablePath navigablePath,
SqmCteStatement<T> cte,
SqmPathSource<T> pathSource,
@Nullable String alias,
SqmJoinType joinType,
SqmRoot<T> sqmRoot,
SqmCorrelatedRootJoin<T> correlatedRootJoin,
SqmCteJoin<T> correlationParent) {
super( navigablePath, cte, pathSource, alias, joinType, sqmRoot );
this.correlatedRootJoin = correlatedRootJoin;
this.correlationParent = correlationParent;
}
@Override
public SqmCorrelatedCteJoin<T> copy(SqmCopyContext context) {
final SqmCorrelatedCteJoin<T> existing = context.getCopy( this );
if ( existing != null ) {
return existing;
}
final SqmCorrelatedCteJoin<T> path = context.registerCopy(
this,
new SqmCorrelatedCteJoin<>(
getNavigablePath(),
getCte().copy( context ),
getReferencedPathSource(),
getExplicitAlias(),
getSqmJoinType(),
(SqmRoot<T>) findRoot().copy( context ),
correlatedRootJoin.copy( context ),
correlationParent.copy( context )
)
);
copyTo( path, context );
return path;
}
@Override
public SqmCteJoin<T> getCorrelationParent() {
return correlationParent;
}
@Override
public SqmPath<T> getWrappedPath() {
return correlationParent;
}
@Override
public boolean isCorrelated() {
return true;
}
@Override
public SqmRoot<T> getCorrelatedRoot() {
return correlatedRootJoin;
}
@Override
public <X> X accept(SemanticQueryWalker<X> walker) {
return walker.visitCorrelatedCteJoin( this );
}
@Override
public boolean deepEquals(SqmFrom<?, ?> other) {
return super.deepEquals( other )
&& other instanceof SqmCorrelatedCteJoin<?> that
&& correlationParent.equals( that.correlationParent );
}
@Override
public boolean isDeepCompatible(SqmFrom<?, ?> other) {
return super.isDeepCompatible( other )
&& other instanceof SqmCorrelatedCteJoin<?> that
&& correlationParent.isCompatible( that.correlationParent );
}
}
| SqmCorrelatedCteJoin |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/appender/rolling/DirectWriteRolloverStrategy.java | {
"start": 2987,
"end": 3235
} | class ____ extends AbstractRolloverStrategy implements DirectFileRolloverStrategy {
private static final int DEFAULT_MAX_FILES = 7;
/**
* Builds DirectWriteRolloverStrategy instances.
*/
public static | DirectWriteRolloverStrategy |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/MockStorageInterface.java | {
"start": 6437,
"end": 9188
} | class ____ extends CloudBlobContainerWrapper {
private boolean created = false;
private HashMap<String, String> metadata;
private final String baseUri;
private final String name;
public MockCloudBlobContainerWrapper(String baseUri, String name) {
this.baseUri = baseUri;
this.name = name;
}
@Override
public String getName() {
return name;
}
@Override
public boolean exists(OperationContext opContext) throws StorageException {
return created;
}
@Override
public void create(OperationContext opContext) throws StorageException {
created = true;
backingStore.setContainerMetadata(metadata);
}
@Override
public HashMap<String, String> getMetadata() {
return metadata;
}
@Override
public void setMetadata(HashMap<String, String> metadata) {
this.metadata = metadata;
}
@Override
public void downloadAttributes(OperationContext opContext)
throws StorageException {
metadata = backingStore.getContainerMetadata();
}
@Override
public void uploadMetadata(OperationContext opContext)
throws StorageException {
backingStore.setContainerMetadata(metadata);
}
@Override
public CloudBlobDirectoryWrapper getDirectoryReference(String relativePath)
throws URISyntaxException, StorageException {
return new MockCloudBlobDirectoryWrapper(new URI(fullUriString(
relativePath, true)));
}
@Override
public CloudBlockBlobWrapper getBlockBlobReference(String relativePath)
throws URISyntaxException, StorageException {
return new MockCloudBlockBlobWrapper(new URI(fullUriString(relativePath,
false)), null, 0);
}
@Override
public CloudPageBlobWrapper getPageBlobReference(String blobAddressUri)
throws URISyntaxException, StorageException {
return new MockCloudPageBlobWrapper(new URI(blobAddressUri), null, 0);
}
// helper to create full URIs for directory and blob.
// use withTrailingSlash=true to get a good path for a directory.
private String fullUriString(String relativePath, boolean withTrailingSlash) {
String baseUri = this.baseUri;
if (!baseUri.endsWith("/")) {
baseUri += "/";
}
if (withTrailingSlash && !relativePath.equals("")
&& !relativePath.endsWith("/")) {
relativePath += "/";
}
try {
URIBuilder builder = new URIBuilder(baseUri);
return builder.setPath(builder.getPath() + relativePath).toString();
} catch (URISyntaxException e) {
throw new RuntimeException("problem encoding fullUri", e);
}
}
}
private static | MockCloudBlobContainerWrapper |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/Header.java | {
"start": 1237,
"end": 1317
} | interface ____ {
/**
* Name of header
*/
String value();
}
| Header |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/AbstractReturnValueIgnored.java | {
"start": 5656,
"end": 18098
} | class ____ extends BugChecker
implements MethodInvocationTreeMatcher,
MemberReferenceTreeMatcher,
ReturnTreeMatcher,
NewClassTreeMatcher,
ResultUsePolicyAnalyzer<ExpressionTree, VisitorState> {
private final Supplier<UnusedReturnValueMatcher> unusedReturnValueMatcher =
memoize(() -> UnusedReturnValueMatcher.get(allowInExceptionThrowers()));
private final Supplier<Matcher<ExpressionTree>> matcher =
memoize(() -> allOf(unusedReturnValueMatcher.get(), this::isCheckReturnValue));
private final Supplier<Matcher<MemberReferenceTree>> lostReferenceTreeMatcher =
memoize(
() ->
allOf(
(t, s) -> isObjectReturningMethodReferenceExpression(t, s),
not((t, s) -> isExemptedInterfaceType(getType(t), s)),
not((t, s) -> isThrowingFunctionalInterface(getType(t), s)),
specializedMatcher()));
private final ConstantExpressions constantExpressions;
protected AbstractReturnValueIgnored(ConstantExpressions constantExpressions) {
this.constantExpressions = constantExpressions;
}
@Override
public Description matchMethodInvocation(
MethodInvocationTree methodInvocationTree, VisitorState state) {
Description description =
matcher.get().matches(methodInvocationTree, state)
? describeReturnValueIgnored(methodInvocationTree, state)
: NO_MATCH;
if (!description.equals(NO_MATCH)) {
return description;
}
return checkLostType(methodInvocationTree, state);
}
@Override
public Description matchNewClass(NewClassTree newClassTree, VisitorState state) {
return matcher.get().matches(newClassTree, state)
? describeReturnValueIgnored(newClassTree, state)
: NO_MATCH;
}
@Override
public Description matchMemberReference(MemberReferenceTree tree, VisitorState state) {
Description description =
matcher.get().matches(tree, state) ? describeReturnValueIgnored(tree, state) : NO_MATCH;
if (lostType(state).isEmpty() || !description.equals(NO_MATCH)) {
return description;
}
if (lostReferenceTreeMatcher.get().matches(tree, state)) {
return describeMatch(tree);
}
return description;
}
@Override
public boolean isCovered(ExpressionTree tree, VisitorState state) {
return isCheckReturnValue(tree, state);
}
@Override
public ResultUsePolicy getMethodPolicy(ExpressionTree expression, VisitorState state) {
return isCheckReturnValue(expression, state) ? EXPECTED : UNSPECIFIED;
}
/**
* Returns whether the given expression's return value should be used according to this checker,
* regardless of whether or not the return value is actually used.
*/
private boolean isCheckReturnValue(ExpressionTree tree, VisitorState state) {
// TODO(cgdecker): Just replace specializedMatcher with this?
return specializedMatcher().matches(tree, state);
}
/**
* Match whatever additional conditions concrete subclasses want to match (a list of known
* side-effect-free methods, has a @CheckReturnValue annotation, etc.).
*/
protected abstract Matcher<? super ExpressionTree> specializedMatcher();
/** Check for occurrences of this type being lost, i.e. cast to {@link Object}. */
protected Optional<Type> lostType(VisitorState state) {
return Optional.empty();
}
protected String lostTypeMessage(String returnedType, String declaredReturnType) {
return format("Returning %s from method that returns %s.", returnedType, declaredReturnType);
}
/**
* Override this to return false to forbid discarding return values in testers that are testing
* whether an exception is thrown.
*/
protected boolean allowInExceptionThrowers() {
return true;
}
/**
* Fixes the error by assigning the result of the call to the receiver reference, or deleting the
* method call. Subclasses may override if they prefer a different description.
*/
protected Description describeReturnValueIgnored(
MethodInvocationTree methodInvocationTree, VisitorState state) {
return buildDescription(methodInvocationTree)
.addAllFixes(fixesAtCallSite(methodInvocationTree, state))
.setMessage(getMessage(getSymbol(methodInvocationTree).getSimpleName()))
.build();
}
final ImmutableList<Fix> fixesAtCallSite(ExpressionTree invocationTree, VisitorState state) {
checkArgument(
invocationTree instanceof MethodInvocationTree || invocationTree instanceof NewClassTree,
"unexpected kind: %s",
invocationTree.getKind());
Tree parent = state.getPath().getParentPath().getLeaf();
Type resultType = getType(invocationTree);
// Find the root of the field access chain, i.e. a.intern().trim() ==> a.
/*
* TODO(cpovirk): Enhance getRootAssignable to return array accesses (e.g., `x[y]`)? If we do,
* then we'll also need to accept `symbol == null` (which is fine, since all we need the symbol
* for is to check against `this`, and `x[y]` is not `this`.)
*/
ExpressionTree identifierExpr =
invocationTree instanceof MethodInvocationTree methodInvocationTree
? getRootAssignable(methodInvocationTree)
: null; // null root assignable for constructor calls (as well as some method calls)
Symbol symbol = getSymbol(identifierExpr);
Type identifierType = getType(identifierExpr);
/*
* A map from short description to fix instance (even though every short description ultimately
* will become _part of_ a fix instance later).
*
* As always, the order of suggested fixes can matter. In practice, it probably matters mostly
* just to the checker's own tests. But it also affects the order in which the fixes are printed
* during compile errors, and it affects which fix is chosen for automatically generated fix CLs
* (though those should be rare inside Google: b/244334502#comment13).
*
* Note that, when possible, we have separate code that suggests adding @CanIgnoreReturnValue in
* preference to all the fixes below.
*
* The _names_ of the fixes probably don't actually matter inside Google: b/204435834#comment4.
* Luckily, they're not a ton harder to include than plain code comments would be.
*/
ImmutableMap.Builder<String, SuggestedFix> fixes = ImmutableMap.builder();
if (MOCKITO_VERIFY.matches(invocationTree, state)) {
ExpressionTree maybeCallToMock =
((MethodInvocationTree) invocationTree).getArguments().getFirst();
if (maybeCallToMock instanceof MethodInvocationTree methodInvocationTree) {
ExpressionTree maybeMethodSelectOnMock = methodInvocationTree.getMethodSelect();
if (maybeMethodSelectOnMock instanceof MemberSelectTree maybeSelectOnMock) {
// For this suggestion, we want to move the closing parenthesis:
// verify(foo .bar())
// ^ v
// +------+
//
// The result is:
// verify(foo).bar()
//
// TODO(cpovirk): Suggest this only if `foo` looks like an actual mock object.
SuggestedFix.Builder fix = SuggestedFix.builder();
fix.postfixWith(maybeSelectOnMock.getExpression(), ")");
int closingParen =
reverse(state.getOffsetTokensForNode(invocationTree)).stream()
.filter(t -> t.kind() == RPAREN)
.findFirst()
.get()
.pos();
fix.replace(closingParen, closingParen + 1, "");
fixes.put(
format("Verify that %s was called", maybeSelectOnMock.getIdentifier()), fix.build());
}
}
}
boolean considerBlanketFixes = true;
if (resultType != null && resultType.getKind() == TypeKind.BOOLEAN) {
// Fix by calling either assertThat(...).isTrue() or verify(...).
if (state.errorProneOptions().isTestOnlyTarget()) {
SuggestedFix.Builder fix = SuggestedFix.builder();
fix.prefixWith(
invocationTree,
qualifyStaticImport("com.google.common.truth.Truth.assertThat", fix, state) + "(")
.postfixWith(invocationTree, ").isTrue()");
fixes.put("Assert that the result is true", fix.build());
} else {
SuggestedFix.Builder fix = SuggestedFix.builder();
fix.prefixWith(
invocationTree,
qualifyStaticImport("com.google.common.base.Verify.verify", fix, state) + "(")
.postfixWith(invocationTree, ")");
fixes.put("Insert a runtime check that the result is true", fix.build());
}
} else if (resultType != null
// By looking for any isTrue() method, we handle not just Truth but also AssertJ.
&& matchingMethods(
NAME_OF_IS_TRUE.get(state),
m -> m.getParameters().isEmpty(),
resultType,
state.getTypes())
.anyMatch(m -> true)) {
fixes.put("Assert that the result is true", postfixWith(invocationTree, ".isTrue()"));
considerBlanketFixes = false;
}
if (identifierExpr != null
&& symbol != null
&& !symbol.name.contentEquals("this")
&& resultType != null
&& state.getTypes().isAssignable(resultType, identifierType)) {
fixes.put(
"Assign result back to variable",
prefixWith(invocationTree, state.getSourceForNode(identifierExpr) + " = "));
}
/*
* TODO(cpovirk): Suggest returning the value from the enclosing method where possible... *if*
* we can find a good heuristic. We could consider "Is the return type a protobuf" and/or "Is
* this a constructor call or build() call?"
*/
if (parent instanceof ExpressionStatementTree
&& constantExpressions.constantExpression(invocationTree, state).isEmpty()
&& considerBlanketFixes) {
ImmutableSet<String> identifiersInScope =
findAllIdents(state).stream().map(v -> v.name.toString()).collect(toImmutableSet());
concat(Stream.of("unused"), range(2, 10).mapToObj(i -> "unused" + i))
// TODO(b/72928608): Handle even local variables declared *later* within this scope.
// TODO(b/250568455): Also check whether we have suggested this name before in this scope.
.filter(n -> !identifiersInScope.contains(n))
.findFirst()
.ifPresent(
n ->
fixes.put(
"Suppress error by assigning to a variable",
prefixWith(parent, format("var %s = ", n))));
}
if (parent instanceof ExpressionStatementTree && considerBlanketFixes) {
if (constantExpressions.constantExpression(invocationTree, state).isPresent()) {
fixes.put("Delete call", delete(parent));
} else {
fixes.put("Delete call and any side effects", delete(parent));
}
}
return fixes.buildOrThrow().entrySet().stream()
.map(e -> e.getValue().toBuilder().setShortDescription(e.getKey()).build())
.collect(toImmutableList());
}
/**
* Uses the default description for results ignored via a method reference. Subclasses may
* override if they prefer a different description.
*/
protected Description describeReturnValueIgnored(
MemberReferenceTree memberReferenceTree, VisitorState state) {
return buildDescription(memberReferenceTree)
.setMessage(
getMessage(
state.getName(descriptiveNameForMemberReference(memberReferenceTree, state))))
.build();
}
/**
* Uses the default description for results ignored via a constructor call. Subclasses may
* override if they prefer a different description.
*/
protected Description describeReturnValueIgnored(NewClassTree newClassTree, VisitorState state) {
return buildDescription(newClassTree)
.setMessage(
format(
"Ignored return value of '%s'",
state.getSourceForNode(newClassTree.getIdentifier())))
.build();
}
private static String descriptiveNameForMemberReference(
MemberReferenceTree memberReferenceTree, VisitorState state) {
if (memberReferenceTree.getMode() == ReferenceMode.NEW) {
// The qualifier expression *should* just be the name of the | AbstractReturnValueIgnored |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Result.java | {
"start": 521,
"end": 1084
} | class ____<T> {
/**
* Serialisation fields
*/
public static final ParseField TYPE = new ParseField("result");
public static final ParseField RESULT_TYPE = new ParseField("result_type");
public static final ParseField TIMESTAMP = new ParseField("timestamp");
public static final ParseField IS_INTERIM = new ParseField("is_interim");
@Nullable
public final String index;
@Nullable
public final T result;
public Result(String index, T result) {
this.index = index;
this.result = result;
}
}
| Result |
java | apache__camel | components/camel-hazelcast/src/test/java/org/apache/camel/component/hazelcast/HazelcastListConsumerTest.java | {
"start": 1719,
"end": 4679
} | class ____ extends HazelcastCamelTestSupport {
@Mock
private IList<String> list;
@Captor
private ArgumentCaptor<ItemListener<String>> argument;
@Override
protected void trainHazelcastInstance(HazelcastInstance hazelcastInstance) {
when(hazelcastInstance.<String> getList("foo")).thenReturn(list);
when(list.addItemListener(any(), eq(true))).thenReturn(UUID.randomUUID());
}
@Override
@SuppressWarnings("unchecked")
protected void verifyHazelcastInstance(HazelcastInstance hazelcastInstance) {
verify(hazelcastInstance).getList("foo");
verify(list).addItemListener(any(ItemListener.class), eq(true));
}
@Test
public void add() throws InterruptedException {
MockEndpoint out = getMockEndpoint("mock:added");
out.expectedMessageCount(1);
verify(list).addItemListener(argument.capture(), eq(true));
final ItemEvent<String> event = new ItemEvent<>("mm", ItemEventType.ADDED, "foo", null);
argument.getValue().itemAdded(event);
MockEndpoint.assertIsSatisfied(context, 2000, TimeUnit.MILLISECONDS);
this.checkHeaders(out.getExchanges().get(0).getIn().getHeaders(), HazelcastConstants.ADDED);
}
@Test
public void remove() throws InterruptedException {
MockEndpoint out = getMockEndpoint("mock:removed");
out.expectedMessageCount(1);
verify(list).addItemListener(argument.capture(), eq(true));
final ItemEvent<String> event = new ItemEvent<>("mm", ItemEventType.REMOVED, "foo", null);
argument.getValue().itemRemoved(event);
MockEndpoint.assertIsSatisfied(context, 2000, TimeUnit.MILLISECONDS);
this.checkHeaders(out.getExchanges().get(0).getIn().getHeaders(), HazelcastConstants.REMOVED);
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from(String.format("hazelcast-%sfoo", HazelcastConstants.LIST_PREFIX)).log("object...").choice()
.when(header(HazelcastConstants.LISTENER_ACTION).isEqualTo(HazelcastConstants.ADDED))
.log("...added").to("mock:added")
.when(header(HazelcastConstants.LISTENER_ACTION).isEqualTo(HazelcastConstants.REMOVED))
.log("...removed").to("mock:removed").otherwise()
.log("fail!");
}
};
}
private void checkHeaders(Map<String, Object> headers, String action) {
assertEquals(action, headers.get(HazelcastConstants.LISTENER_ACTION));
assertEquals(HazelcastConstants.CACHE_LISTENER, headers.get(HazelcastConstants.LISTENER_TYPE));
assertNull(headers.get(HazelcastConstants.OBJECT_ID));
assertNotNull(headers.get(HazelcastConstants.LISTENER_TIME));
}
}
| HazelcastListConsumerTest |
java | junit-team__junit5 | documentation/src/test/java/example/exception/MultipleHandlersTestCase.java | {
"start": 1341,
"end": 1582
} | class ____ implements LifecycleMethodExecutionExceptionHandler {
@Override
public void handleBeforeEachMethodExecutionException(ExtensionContext context, Throwable ex)
throws Throwable {
throw ex;
}
}
static | SecondExecutedHandler |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/PhaseCacheManagement.java | {
"start": 1669,
"end": 12950
} | class ____ {
private static final Logger logger = LogManager.getLogger(PhaseCacheManagement.class);
private PhaseCacheManagement() {}
/**
* Rereads the phase JSON for the given index, returning a new cluster state.
*/
public static ProjectMetadata refreshPhaseDefinition(
final ProjectMetadata projectMetadata,
final String index,
final LifecyclePolicyMetadata updatedPolicy
) {
IndexMetadata idxMeta = projectMetadata.index(index);
ProjectMetadata.Builder projectMetadataBuilder = ProjectMetadata.builder(projectMetadata);
refreshPhaseDefinition(projectMetadataBuilder, idxMeta, updatedPolicy);
return projectMetadataBuilder.build();
}
/**
* Rereads the phase JSON for the given index, and updates the provided metadata.
*/
public static void refreshPhaseDefinition(
final ProjectMetadata.Builder projectMetadataBuilder,
final IndexMetadata idxMeta,
final LifecyclePolicyMetadata updatedPolicy
) {
String index = idxMeta.getIndex().getName();
assert eligibleToCheckForRefresh(idxMeta) : "index " + index + " is missing crucial information needed to refresh phase definition";
logger.trace("[{}] updating cached phase definition for policy [{}]", index, updatedPolicy.getName());
LifecycleExecutionState currentExState = idxMeta.getLifecycleExecutionState();
String currentPhase = currentExState.phase();
PhaseExecutionInfo pei = new PhaseExecutionInfo(
updatedPolicy.getName(),
updatedPolicy.getPolicy().getPhases().get(currentPhase),
updatedPolicy.getVersion(),
updatedPolicy.getModifiedDate()
);
LifecycleExecutionState newExState = LifecycleExecutionState.builder(currentExState)
.setPhaseDefinition(Strings.toString(pei, false, false))
.build();
projectMetadataBuilder.put(IndexMetadata.builder(idxMeta).putCustom(ILM_CUSTOM_METADATA_KEY, newExState.asMap()));
}
/**
* Ensure that we have the minimum amount of metadata necessary to check for cache phase
* refresh. This includes:
* - An execution state
* - Existing phase definition JSON
* - A current step key
* - A current phase in the step key
* - Not currently in the ERROR step
*/
public static boolean eligibleToCheckForRefresh(final IndexMetadata metadata) {
LifecycleExecutionState executionState = metadata.getLifecycleExecutionState();
if (executionState == null || executionState.phaseDefinition() == null) {
return false;
}
Step.StepKey currentStepKey = Step.getCurrentStepKey(executionState);
if (currentStepKey == null || currentStepKey.phase() == null) {
return false;
}
return ErrorStep.NAME.equals(currentStepKey.name()) == false;
}
/**
* For the given new policy, returns a new cluster with all updateable indices' phase JSON refreshed.
*/
public static ProjectMetadata updateIndicesForPolicy(
final ProjectMetadata projectMetadata,
final NamedXContentRegistry xContentRegistry,
final Client client,
final LifecyclePolicy oldPolicy,
final LifecyclePolicyMetadata newPolicy,
XPackLicenseState licenseState
) {
ProjectMetadata.Builder projectMetadataBuilder = ProjectMetadata.builder(projectMetadata);
if (updateIndicesForPolicy(projectMetadataBuilder, projectMetadata, xContentRegistry, client, oldPolicy, newPolicy, licenseState)) {
return projectMetadataBuilder.build();
}
return projectMetadata;
}
/**
* For the given new policy, update the provided metadata to reflect the refreshed phase JSON for all updateable indices.
* Returns true if any indices were updated and false otherwise.
* Users of this API should consider the returned value and only create a new {@link ClusterState} if `true` is returned.
*/
public static boolean updateIndicesForPolicy(
final ProjectMetadata.Builder projectMetadataBuilder,
final ProjectMetadata projectMetadata,
final NamedXContentRegistry xContentRegistry,
final Client client,
final LifecyclePolicy oldPolicy,
final LifecyclePolicyMetadata newPolicy,
final XPackLicenseState licenseState
) {
assert oldPolicy.getName().equals(newPolicy.getName())
: "expected both policies to have the same id but they were: [" + oldPolicy.getName() + "] vs. [" + newPolicy.getName() + "]";
// No need to update anything if the policies are identical in contents
if (oldPolicy.equals(newPolicy.getPolicy())) {
logger.debug("policy [{}] is unchanged and no phase definition refresh is needed", oldPolicy.getName());
return false;
}
final List<IndexMetadata> indicesThatCanBeUpdated = projectMetadata.indices()
.values()
.stream()
.filter(meta -> newPolicy.getName().equals(meta.getLifecyclePolicyName()))
.filter(meta -> isIndexPhaseDefinitionUpdatable(xContentRegistry, client, meta, newPolicy.getPolicy(), licenseState))
.toList();
final List<String> refreshedIndices = new ArrayList<>(indicesThatCanBeUpdated.size());
for (IndexMetadata index : indicesThatCanBeUpdated) {
try {
refreshPhaseDefinition(projectMetadataBuilder, index, newPolicy);
refreshedIndices.add(index.getIndex().getName());
} catch (Exception e) {
logger.warn(() -> format("[%s] unable to refresh phase definition for updated policy [%s]", index, newPolicy.getName()), e);
}
}
logger.debug("refreshed policy [{}] phase definition for [{}] indices", newPolicy.getName(), refreshedIndices.size());
return refreshedIndices.size() > 0;
}
/**
* Returns 'true' if the index's cached phase JSON can be safely reread, 'false' otherwise.
*/
public static boolean isIndexPhaseDefinitionUpdatable(
final NamedXContentRegistry xContentRegistry,
final Client client,
final IndexMetadata metadata,
final LifecyclePolicy newPolicy,
final XPackLicenseState licenseState
) {
final String index = metadata.getIndex().getName();
if (eligibleToCheckForRefresh(metadata) == false) {
logger.debug("[{}] does not contain enough information to check for eligibility of refreshing phase", index);
return false;
}
final String policyId = newPolicy.getName();
final LifecycleExecutionState executionState = metadata.getLifecycleExecutionState();
final Step.StepKey currentStepKey = Step.getCurrentStepKey(executionState);
final String currentPhase = currentStepKey.phase();
final Set<Step.StepKey> newStepKeys = newPolicy.toSteps(client, licenseState)
.stream()
.map(Step::getKey)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (newStepKeys.contains(currentStepKey) == false) {
// The index is on a step that doesn't exist in the new policy, we
// can't safely re-read the JSON
logger.debug(
"[{}] updated policy [{}] does not contain the current step key [{}], so the policy phase will not be refreshed",
index,
policyId,
currentStepKey
);
return false;
}
final String phaseDef = executionState.phaseDefinition();
final Set<Step.StepKey> oldStepKeys = readStepKeys(xContentRegistry, client, phaseDef, currentPhase, licenseState);
if (oldStepKeys == null) {
logger.debug(
"[{}] unable to parse phase definition for cached policy [{}], policy phase will not be refreshed",
index,
policyId
);
return false;
}
final Set<Step.StepKey> oldPhaseStepKeys = oldStepKeys.stream()
.filter(sk -> currentPhase.equals(sk.phase()))
.collect(Collectors.toCollection(LinkedHashSet::new));
final PhaseExecutionInfo phaseExecutionInfo = new PhaseExecutionInfo(policyId, newPolicy.getPhases().get(currentPhase), 1L, 1L);
final String peiJson = Strings.toString(phaseExecutionInfo);
final Set<Step.StepKey> newPhaseStepKeys = readStepKeys(xContentRegistry, client, peiJson, currentPhase, licenseState);
if (newPhaseStepKeys == null) {
logger.debug(
() -> format("[%s] unable to parse phase definition for policy [%s] to determine if it could be refreshed", index, policyId)
);
return false;
}
if (newPhaseStepKeys.equals(oldPhaseStepKeys)) {
// The new and old phase have the same stepkeys for this current phase, so we can
// refresh the definition because we know it won't change the execution flow.
logger.debug("[{}] updated policy [{}] contains the same phase step keys and can be refreshed", index, policyId);
return true;
} else {
logger.debug(
"[{}] updated policy [{}] has different phase step keys and will NOT refresh phase "
+ "definition as it differs too greatly. old: {}, new: {}",
index,
policyId,
oldPhaseStepKeys,
newPhaseStepKeys
);
return false;
}
}
/**
* Parse the {@code phaseDef} phase definition to get the stepkeys for the given phase.
* If there is an error parsing or if the phase definition is missing the required
* information, returns null.
*/
@Nullable
static Set<Step.StepKey> readStepKeys(
final NamedXContentRegistry xContentRegistry,
final Client client,
final String phaseDef,
final String currentPhase,
final XPackLicenseState licenseState
) {
if (phaseDef == null) {
return null;
}
final PhaseExecutionInfo phaseExecutionInfo;
try (
XContentParser parser = JsonXContent.jsonXContent.createParser(
XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry),
phaseDef
)
) {
phaseExecutionInfo = PhaseExecutionInfo.parse(parser, currentPhase);
} catch (Exception e) {
logger.trace(() -> format("exception reading step keys checking for refreshability, phase definition: %s", phaseDef), e);
return null;
}
if (phaseExecutionInfo == null || phaseExecutionInfo.getPhase() == null) {
return null;
}
return phaseExecutionInfo.getPhase()
.getActions()
.values()
.stream()
.flatMap(a -> a.toSteps(client, phaseExecutionInfo.getPhase().getName(), null, licenseState).stream())
.map(Step::getKey)
.collect(Collectors.toCollection(LinkedHashSet::new));
}
}
| PhaseCacheManagement |
java | apache__camel | components/camel-mybatis/src/test/java/org/apache/camel/component/mybatis/MyBatisInsertTest.java | {
"start": 1101,
"end": 2874
} | class ____ extends MyBatisTestSupport {
@Test
public void testInsert() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
Account account = new Account();
account.setId(444);
account.setFirstName("Willem");
account.setLastName("Jiang");
account.setEmailAddress("Faraway@gmail.com");
template.sendBody("direct:start", account);
MockEndpoint.assertIsSatisfied(context);
// there should be 3 rows now
Integer rows = template.requestBody("mybatis:count?statementType=SelectOne", null, Integer.class);
assertEquals(3, rows.intValue(), "There should be 3 rows");
}
@Test
public void testInsertMap() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
Map<String, Object> map = new HashMap<>();
map.put("id", 555);
map.put("firstName", "Donald");
map.put("lastName", "Duck");
map.put("emailAddress", "donald@duck.com");
template.sendBody("direct:start", map);
MockEndpoint.assertIsSatisfied(context);
// there should be 3 rows now
Integer rows = template.requestBody("mybatis:count?statementType=SelectOne", null, Integer.class);
assertEquals(3, rows.intValue(), "There should be 3 rows");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.to("mybatis:insertAccount?statementType=Insert")
.to("mock:result");
}
};
}
}
| MyBatisInsertTest |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/objectid/TestObjectIdDeserialization.java | {
"start": 760,
"end": 1017
} | class ____ extends DatabindTestUtil
{
private static final String POOL_KEY = "POOL";
// // Classes for external id use
@JsonIdentityInfo(generator=ObjectIdGenerators.IntSequenceGenerator.class, property="id")
static | TestObjectIdDeserialization |
java | google__guava | guava-tests/test/com/google/common/collect/ImmutableRangeMapTest.java | {
"start": 1233,
"end": 9670
} | class ____ extends TestCase {
private static final ImmutableList<Range<Integer>> RANGES;
private static final int MIN_BOUND = 0;
private static final int MAX_BOUND = 10;
static {
ImmutableList.Builder<Range<Integer>> builder = ImmutableList.builder();
builder.add(Range.<Integer>all());
// Add one-ended ranges
for (int i = MIN_BOUND; i <= MAX_BOUND; i++) {
for (BoundType type : BoundType.values()) {
builder.add(Range.upTo(i, type));
builder.add(Range.downTo(i, type));
}
}
// Add two-ended ranges
for (int i = MIN_BOUND; i <= MAX_BOUND; i++) {
for (int j = i + 1; j <= MAX_BOUND; j++) {
for (BoundType lowerType : BoundType.values()) {
for (BoundType upperType : BoundType.values()) {
if (i == j & lowerType == OPEN & upperType == OPEN) {
continue;
}
builder.add(Range.range(i, lowerType, j, upperType));
}
}
}
}
RANGES = builder.build();
}
public void testBuilderRejectsEmptyRanges() {
for (int i = MIN_BOUND; i <= MAX_BOUND; i++) {
int ii = i;
ImmutableRangeMap.Builder<Integer, Integer> builder = ImmutableRangeMap.builder();
assertThrows(IllegalArgumentException.class, () -> builder.put(Range.closedOpen(ii, ii), 1));
assertThrows(IllegalArgumentException.class, () -> builder.put(Range.openClosed(ii, ii), 1));
}
}
public void testOverlapRejection() {
for (Range<Integer> range1 : RANGES) {
for (Range<Integer> range2 : RANGES) {
boolean expectRejection =
range1.isConnected(range2) && !range1.intersection(range2).isEmpty();
ImmutableRangeMap.Builder<Integer, Integer> builder = ImmutableRangeMap.builder();
builder.put(range1, 1).put(range2, 2);
try {
ImmutableRangeMap<Integer, Integer> unused = builder.build();
assertFalse(expectRejection);
} catch (IllegalArgumentException e) {
assertTrue(expectRejection);
}
}
}
}
public void testGet() {
for (Range<Integer> range1 : RANGES) {
for (Range<Integer> range2 : RANGES) {
if (!range1.isConnected(range2) || range1.intersection(range2).isEmpty()) {
ImmutableRangeMap<Integer, Integer> rangeMap =
ImmutableRangeMap.<Integer, Integer>builder().put(range1, 1).put(range2, 2).build();
for (int i = MIN_BOUND; i <= MAX_BOUND; i++) {
Integer expectedValue = null;
if (range1.contains(i)) {
expectedValue = 1;
} else if (range2.contains(i)) {
expectedValue = 2;
}
assertEquals(expectedValue, rangeMap.get(i));
}
}
}
}
}
public void testSpanEmpty() {
assertThrows(NoSuchElementException.class, () -> ImmutableRangeMap.of().span());
}
public void testSpanSingleRange() {
for (Range<Integer> range : RANGES) {
RangeMap<Integer, Integer> rangemap =
ImmutableRangeMap.<Integer, Integer>builder().put(range, 1).build();
assertEquals(range, rangemap.span());
}
}
public void testSpanTwoRanges() {
for (Range<Integer> range1 : RANGES) {
for (Range<Integer> range2 : RANGES) {
if (!range1.isConnected(range2) || range1.intersection(range2).isEmpty()) {
RangeMap<Integer, Integer> rangemap =
ImmutableRangeMap.<Integer, Integer>builder().put(range1, 1).put(range2, 2).build();
assertEquals(range1.span(range2), rangemap.span());
}
}
}
}
public void testGetEntry() {
for (Range<Integer> range1 : RANGES) {
for (Range<Integer> range2 : RANGES) {
if (!range1.isConnected(range2) || range1.intersection(range2).isEmpty()) {
ImmutableRangeMap<Integer, Integer> rangeMap =
ImmutableRangeMap.<Integer, Integer>builder().put(range1, 1).put(range2, 2).build();
for (int i = MIN_BOUND; i <= MAX_BOUND; i++) {
Entry<Range<Integer>, Integer> expectedEntry = null;
if (range1.contains(i)) {
expectedEntry = immutableEntry(range1, 1);
} else if (range2.contains(i)) {
expectedEntry = immutableEntry(range2, 2);
}
assertEquals(expectedEntry, rangeMap.getEntry(i));
}
}
}
}
}
public void testGetLargeRangeMap() {
ImmutableRangeMap.Builder<Integer, Integer> builder = ImmutableRangeMap.builder();
for (int i = 0; i < 100; i++) {
builder.put(Range.closedOpen(i, i + 1), i);
}
ImmutableRangeMap<Integer, Integer> map = builder.build();
for (int i = 0; i < 100; i++) {
assertEquals(Integer.valueOf(i), map.get(i));
}
}
@AndroidIncompatible // slow
public void testAsMapOfRanges() {
for (Range<Integer> range1 : RANGES) {
for (Range<Integer> range2 : RANGES) {
if (!range1.isConnected(range2) || range1.intersection(range2).isEmpty()) {
ImmutableRangeMap<Integer, Integer> rangeMap =
ImmutableRangeMap.<Integer, Integer>builder().put(range1, 1).put(range2, 2).build();
ImmutableMap<Range<Integer>, Integer> expectedAsMap =
ImmutableMap.of(range1, 1, range2, 2);
ImmutableMap<Range<Integer>, Integer> asMap = rangeMap.asMapOfRanges();
ImmutableMap<Range<Integer>, Integer> descendingMap = rangeMap.asDescendingMapOfRanges();
assertEquals(expectedAsMap, asMap);
assertEquals(expectedAsMap, descendingMap);
SerializableTester.reserializeAndAssert(asMap);
SerializableTester.reserializeAndAssert(descendingMap);
assertEquals(
ImmutableList.copyOf(asMap.entrySet()).reverse(),
ImmutableList.copyOf(descendingMap.entrySet()));
for (Range<Integer> query : RANGES) {
assertEquals(expectedAsMap.get(query), asMap.get(query));
}
}
}
}
}
public void testSubRangeMap() {
for (Range<Integer> range1 : RANGES) {
for (Range<Integer> range2 : RANGES) {
if (!range1.isConnected(range2) || range1.intersection(range2).isEmpty()) {
for (Range<Integer> subRange : RANGES) {
ImmutableRangeMap<Integer, Integer> rangeMap =
ImmutableRangeMap.<Integer, Integer>builder().put(range1, 1).put(range2, 2).build();
ImmutableRangeMap.Builder<Integer, Integer> expectedBuilder =
ImmutableRangeMap.builder();
for (Entry<Range<Integer>, Integer> entry : rangeMap.asMapOfRanges().entrySet()) {
if (entry.getKey().isConnected(subRange)
&& !entry.getKey().intersection(subRange).isEmpty()) {
expectedBuilder.put(entry.getKey().intersection(subRange), entry.getValue());
}
}
ImmutableRangeMap<Integer, Integer> expected = expectedBuilder.build();
assertEquals(expected, rangeMap.subRangeMap(subRange));
}
}
}
}
}
public void testSerialization() {
ImmutableRangeMap<Integer, Integer> emptyRangeMap = ImmutableRangeMap.of();
SerializableTester.reserializeAndAssert(emptyRangeMap);
ImmutableRangeMap<Integer, Integer> nonEmptyRangeMap =
new ImmutableRangeMap.Builder<Integer, Integer>()
.put(Range.closed(2, 4), 5)
.put(Range.open(6, 7), 3)
.put(Range.closedOpen(8, 10), 4)
.put(Range.openClosed(15, 17), 2)
.build();
ImmutableMap<Range<Integer>, Integer> test = nonEmptyRangeMap.asMapOfRanges();
for (Range<Integer> range : test.keySet()) {
SerializableTester.reserializeAndAssert(range);
}
SerializableTester.reserializeAndAssert(test.keySet());
SerializableTester.reserializeAndAssert(nonEmptyRangeMap);
}
public void testToImmutableRangeMap() {
Range<Integer> rangeOne = Range.closedOpen(1, 5);
Range<Integer> rangeTwo = Range.openClosed(6, 7);
ImmutableRangeMap<Integer, Integer> rangeMap =
new ImmutableRangeMap.Builder<Integer, Integer>().put(rangeOne, 1).put(rangeTwo, 6).build();
CollectorTester.of(
ImmutableRangeMap.<Range<Integer>, Integer, Integer>toImmutableRangeMap(
k -> k, k -> k.lowerEndpoint()))
.expectCollects(rangeMap, rangeOne, rangeTwo);
}
}
| ImmutableRangeMapTest |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_2117/Issue2117Mapper.java | {
"start": 372,
"end": 605
} | interface ____ {
Issue2117Mapper INSTANCE = Mappers.getMapper( Issue2117Mapper.class );
@Mapping(target = "accessMode", source = "accessMode")
Target toTarget(AccessMode accessMode, String otherSource);
| Issue2117Mapper |
java | elastic__elasticsearch | x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityResponseChunk.java | {
"start": 4206,
"end": 5455
} | enum ____ {
/**
* The first chunk sent. Used to indicate that the verification has successfully started, and therefore we should start to send a
* 200 OK response to the client.
*/
START_RESPONSE,
/**
* This chunk contains the raw {@link SnapshotInfo} for a snapshot.
*/
SNAPSHOT_INFO,
/**
* This chunk contains information about the restorability of an index.
*/
INDEX_RESTORABILITY,
/**
* This chunk describes an anomaly found during verification.
*/
ANOMALY,
}
public RepositoryVerifyIntegrityResponseChunk {
if (fileLength == null
|| partLength == null
|| blobLength == null
|| shardId < -1
|| partIndex < -1
|| partCount < -1
|| totalSnapshotCount < -1
|| restorableSnapshotCount < -1
|| (totalSnapshotCount >= 0 != restorableSnapshotCount >= 0)) {
throw new IllegalArgumentException("invalid: " + this);
}
}
public RepositoryVerifyIntegrityResponseChunk(StreamInput in) throws IOException {
this(
in.readVLong(),
// TODO | Type |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/BridgeMethodResolverTests.java | {
"start": 27749,
"end": 27828
} | interface ____<T> extends GenericDao<T> {
}
public static | ConvenienceGenericDao |
java | apache__camel | core/camel-core-processor/src/main/java/org/apache/camel/processor/resume/ResumableCompletion.java | {
"start": 1208,
"end": 4146
} | class ____ implements Synchronization {
private static final Logger LOG = LoggerFactory.getLogger(ResumableCompletion.class);
private final ResumeStrategy resumeStrategy;
private final LoggingLevel loggingLevel;
private final boolean intermittent;
public ResumableCompletion(ResumeStrategy resumeStrategy, LoggingLevel loggingLevel, boolean intermittent) {
this.resumeStrategy = resumeStrategy;
this.loggingLevel = loggingLevel;
this.intermittent = intermittent;
}
@Override
public void onComplete(Exchange exchange) {
if (ExchangeHelper.isFailureHandled(exchange)) {
return;
}
Object offset = ExchangeHelper.getResultMessage(exchange).getHeader(Exchange.OFFSET);
if (offset instanceof Resumable resumable) {
if (LOG.isTraceEnabled()) {
LOG.trace("Processing the resumable: {}", resumable.getOffsetKey());
LOG.trace("Processing the resumable of type: {}", resumable.getLastOffset().getValue());
}
try {
resumeStrategy.updateLastOffset(resumable);
} catch (Exception e) {
LOG.error("Unable to update the offset: {}", e.getMessage(), e);
}
} else {
if (!intermittent) {
exchange.setException(new NoOffsetException(exchange));
LOG.warn("Cannot update the last offset because it's not available");
}
}
}
@Override
public void onFailure(Exchange exchange) {
Exception e = exchange.getException();
Object resObj = exchange.getMessage().getHeader(Exchange.OFFSET);
if (resObj instanceof Resumable resumable) {
String logMessage = String.format(
"Skipping offset update with address '%s' and offset value '%s' due to failure in processing: %s",
resumable.getOffsetKey(), resumable.getLastOffset().getValue(), e.getMessage());
if (LOG.isDebugEnabled() || CamelLogger.shouldLog(LOG, loggingLevel)) {
CamelLogger.log(LOG, LoggingLevel.DEBUG, logMessage, e);
} else {
logMessage += " (stacktrace available in DEBUG logging level)";
CamelLogger.log(LOG, loggingLevel, logMessage);
}
} else {
String logMessage = String.format("Skipping offset update of '%s' due to failure in processing: %s",
resObj == null ? "type null" : "unspecified type", e.getMessage());
if (LOG.isDebugEnabled() || CamelLogger.shouldLog(LOG, loggingLevel)) {
CamelLogger.log(LOG, LoggingLevel.DEBUG, logMessage, e);
} else {
logMessage += " (stacktrace available in DEBUG logging level)";
CamelLogger.log(LOG, loggingLevel, logMessage);
}
}
}
}
| ResumableCompletion |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/config/plugins/validation/ConstraintValidators.java | {
"start": 1242,
"end": 3685
} | class ____ {
private ConstraintValidators() {}
/**
* Finds all relevant {@link ConstraintValidator} objects from an array of annotations. All validators will be
* {@link ConstraintValidator#initialize(java.lang.annotation.Annotation) initialized} before being returned.
*
* @param annotations the annotations to find constraint validators for
* @return a collection of ConstraintValidators for the given annotations
*/
public static Collection<ConstraintValidator<?>> findValidators(final Annotation... annotations) {
final Collection<ConstraintValidator<?>> validators = new ArrayList<>();
for (final Annotation annotation : annotations) {
final Class<? extends Annotation> type = annotation.annotationType();
if (type.isAnnotationPresent(Constraint.class)) {
final ConstraintValidator<?> validator = getValidator(annotation, type);
if (validator != null) {
validators.add(validator);
}
}
}
return validators;
}
private static <A extends Annotation> ConstraintValidator<A> getValidator(
final A annotation, final Class<? extends A> type) {
final Constraint constraint = type.getAnnotation(Constraint.class);
final Class<? extends ConstraintValidator<?>> validatorClass = constraint.value();
if (type.equals(getConstraintValidatorAnnotationType(validatorClass))) {
@SuppressWarnings("unchecked") // I don't think we could be any more thorough in validation here
final ConstraintValidator<A> validator =
(ConstraintValidator<A>) ReflectionUtil.instantiate(validatorClass);
validator.initialize(annotation);
return validator;
}
return null;
}
private static Type getConstraintValidatorAnnotationType(final Class<? extends ConstraintValidator<?>> type) {
for (final Type parentType : type.getGenericInterfaces()) {
if (parentType instanceof ParameterizedType) {
final ParameterizedType parameterizedType = (ParameterizedType) parentType;
if (ConstraintValidator.class.equals(parameterizedType.getRawType())) {
return parameterizedType.getActualTypeArguments()[0];
}
}
}
return Void.TYPE;
}
}
| ConstraintValidators |
java | mapstruct__mapstruct | core/src/main/java/org/mapstruct/Mappings.java | {
"start": 687,
"end": 1019
} | interface ____ {
* @Mappings({
* @Mapping(target = "firstProperty", source = "first"),
* @Mapping(target = "secondProperty", source = "second")
* })
* HumanDto toHumanDto(Human human);
* }
* </code></pre>
* <pre><code class='java'>
* // Java 8 and later
* @Mapper
* public | MyMapper |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/impl/utils/ConcurrentCyclicSequence.java | {
"start": 5101,
"end": 5546
} | class ____ implements Iterator<T> {
final int start;
int cursor;
public Iter(int start) {
this.start = start;
cursor = 0;
}
@Override
public boolean hasNext() {
return cursor != elements.length;
}
@Override
public T next() {
if (cursor >= elements.length) {
throw new NoSuchElementException();
}
return (T) elements[computeIndex(start + cursor++)];
}
}
}
| Iter |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java | {
"start": 17789,
"end": 17949
} | class ____ qdisc at root
*/
private String getStringForAddClassToRootQDisc(int rateMbit) {
String rateMbitStr = rateMbit + MBIT_SUFFIX;
//example : " | to |
java | google__dagger | javatests/dagger/internal/codegen/MapMultibindingValidationTest.java | {
"start": 1768,
"end": 3447
} | class ____ {",
" @Provides @IntoMap @StringKey(\"AKey\") Object provideObjectForAKey() {",
" return \"one\";",
" }",
"",
" @Provides @IntoMap @StringKey(\"AKey\") Object provideObjectForAKeyAgain() {",
" return \"one again\";",
" }",
"}");
// If they're all there, report only Map<K, V>.
CompilerTests.daggerCompiler(
module,
component(
"Map<String, Object> objects();",
"Map<String, Provider<Object>> objectProviders();",
"Producer<Map<String, Producer<Object>>> objectProducers();"))
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining(
"The same map key is bound more than once for Map<String,Object>");
subject.hasErrorContaining("provideObjectForAKey()");
subject.hasErrorContaining("provideObjectForAKeyAgain()");
});
CompilerTests.daggerCompiler(module)
.withProcessingOptions(
ImmutableMap.<String, String>builder()
.putAll(compilerMode.processorOptions())
.put("dagger.fullBindingGraphValidation", "ERROR")
.buildOrThrow())
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining(
"The same map key is bound more than once for Map<String,Object>")
.onSource(module)
.onLineContaining(" | MapModule |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/appender/routing/RoutingAppender.java | {
"start": 21027,
"end": 21356
} | class ____ extends RouteAppenderControl {
ReferencedRouteAppenderControl(final Appender appender) {
super(appender);
}
@Override
void checkout() {
// nop
}
@Override
void release() {
// nop
}
}
}
| ReferencedRouteAppenderControl |
java | apache__kafka | connect/runtime/src/main/java/org/apache/kafka/connect/runtime/AbstractHerder.java | {
"start": 19957,
"end": 50670
} | class ____ perform validation for
*/
@SuppressWarnings("unchecked")
private <T> ConfigInfos validateConverterConfig(
Map<String, String> connectorConfig,
ConfigValue pluginConfigValue,
ConfigValue pluginVersionValue,
Class<T> pluginInterface,
Function<T, ConfigDef> configDefAccessor,
String pluginName,
String pluginProperty,
String pluginVersionProperty,
Map<String, String> defaultProperties,
ClassLoader connectorLoader,
Function<String, TemporaryStage> reportStage
) {
Objects.requireNonNull(connectorConfig);
Objects.requireNonNull(pluginInterface);
Objects.requireNonNull(configDefAccessor);
Objects.requireNonNull(pluginName);
Objects.requireNonNull(pluginProperty);
Objects.requireNonNull(pluginVersionProperty);
String pluginClass = connectorConfig.get(pluginProperty);
String pluginVersion = connectorConfig.get(pluginVersionProperty);
if (pluginClass == null
|| pluginConfigValue == null
|| !pluginConfigValue.errorMessages().isEmpty()
|| !pluginVersionValue.errorMessages().isEmpty()
) {
// Either no custom converter was specified, or one was specified but there's a problem with it.
// No need to proceed any further.
return null;
}
T pluginInstance;
String stageDescription = "instantiating the connector's " + pluginName + " for validation";
try (TemporaryStage stage = reportStage.apply(stageDescription)) {
VersionRange range = PluginUtils.connectorVersionRequirement(pluginVersion);
pluginInstance = (T) plugins().newPlugin(pluginClass, range, connectorLoader);
} catch (VersionedPluginLoadingException e) {
log.error("Failed to load {} class {} with version {}", pluginName, pluginClass, pluginVersion, e);
pluginConfigValue.addErrorMessage(e.getMessage());
pluginVersionValue.addErrorMessage(e.getMessage());
return null;
} catch (ClassNotFoundException | RuntimeException e) {
log.error("Failed to instantiate {} class {}; this should have been caught by prior validation logic", pluginName, pluginClass, e);
pluginConfigValue.addErrorMessage("Failed to load class " + pluginClass + (e.getMessage() != null ? ": " + e.getMessage() : ""));
return null;
} catch (InvalidVersionSpecificationException e) {
// this should have been caught by prior validation logic
log.error("Invalid version range for {} class {} with version {}", pluginName, pluginClass, pluginVersion, e);
pluginVersionValue.addErrorMessage(e.getMessage());
return null;
}
try {
ConfigDef configDef;
stageDescription = "retrieving the configuration definition from the connector's " + pluginName;
try (TemporaryStage stage = reportStage.apply(stageDescription)) {
configDef = configDefAccessor.apply(pluginInstance);
} catch (RuntimeException e) {
log.error("Failed to load ConfigDef from {} of type {}", pluginName, pluginClass, e);
pluginConfigValue.addErrorMessage("Failed to load ConfigDef from " + pluginName + (e.getMessage() != null ? ": " + e.getMessage() : ""));
return null;
}
if (configDef == null) {
log.warn("{}.config() has returned a null ConfigDef; no further preflight config validation for this converter will be performed", pluginClass);
// Older versions of Connect didn't do any converter validation.
// Even though converters are technically required to return a non-null ConfigDef object from their config() method,
// we permit this case in order to avoid breaking existing converters that, despite not adhering to this requirement,
// can be used successfully with a connector.
return null;
}
final String pluginPrefix = pluginProperty + ".";
Map<String, String> pluginConfig = Utils.entriesWithPrefix(connectorConfig, pluginPrefix);
if (defaultProperties != null)
defaultProperties.forEach(pluginConfig::putIfAbsent);
List<ConfigValue> configValues;
stageDescription = "performing config validation for the connector's " + pluginName;
try (TemporaryStage stage = reportStage.apply(stageDescription)) {
configValues = configDef.validate(pluginConfig);
} catch (RuntimeException e) {
log.error("Failed to perform config validation for {} of type {}", pluginName, pluginClass, e);
pluginConfigValue.addErrorMessage("Failed to perform config validation for " + pluginName + (e.getMessage() != null ? ": " + e.getMessage() : ""));
return null;
}
return prefixedConfigInfos(configDef.configKeys(), configValues, pluginPrefix);
} finally {
Utils.maybeCloseQuietly(pluginInstance, pluginName + " " + pluginClass);
}
}
private ConfigInfos validateAllConverterConfigs(
Map<String, String> connectorProps,
Map<String, ConfigValue> validatedConnectorConfig,
ClassLoader connectorLoader,
Function<String, TemporaryStage> reportStage
) {
String connType = connectorProps.get(CONNECTOR_CLASS_CONFIG);
// do custom converter-specific validation
ConfigInfos headerConverterConfigInfos = validateConverterConfig(
connectorProps,
validatedConnectorConfig.get(HEADER_CONVERTER_CLASS_CONFIG),
validatedConnectorConfig.get(HEADER_CONVERTER_VERSION_CONFIG),
HeaderConverter.class,
HeaderConverter::config,
"header converter",
HEADER_CONVERTER_CLASS_CONFIG,
HEADER_CONVERTER_VERSION_CONFIG,
Map.of(ConverterConfig.TYPE_CONFIG, ConverterType.HEADER.getName()),
connectorLoader,
reportStage
);
ConfigInfos keyConverterConfigInfos = validateConverterConfig(
connectorProps,
validatedConnectorConfig.get(KEY_CONVERTER_CLASS_CONFIG),
validatedConnectorConfig.get(KEY_CONVERTER_VERSION_CONFIG),
Converter.class,
Converter::config,
"key converter",
KEY_CONVERTER_CLASS_CONFIG,
KEY_CONVERTER_VERSION_CONFIG,
Map.of(ConverterConfig.TYPE_CONFIG, ConverterType.KEY.getName()),
connectorLoader,
reportStage
);
ConfigInfos valueConverterConfigInfos = validateConverterConfig(
connectorProps,
validatedConnectorConfig.get(VALUE_CONVERTER_CLASS_CONFIG),
validatedConnectorConfig.get(VALUE_CONVERTER_VERSION_CONFIG),
Converter.class,
Converter::config,
"value converter",
VALUE_CONVERTER_CLASS_CONFIG,
VALUE_CONVERTER_VERSION_CONFIG,
Map.of(ConverterConfig.TYPE_CONFIG, ConverterType.VALUE.getName()),
connectorLoader,
reportStage
);
return mergeConfigInfos(connType, headerConverterConfigInfos, keyConverterConfigInfos, valueConverterConfigInfos);
}
@Override
public void validateConnectorConfig(Map<String, String> connectorProps, Callback<ConfigInfos> callback) {
validateConnectorConfig(connectorProps, callback, true);
}
@Override
public void validateConnectorConfig(Map<String, String> connectorProps, Callback<ConfigInfos> callback, boolean doLog) {
Stage waitingForThread = new Stage(
"waiting for a new thread to become available for connector validation",
time.milliseconds()
);
callback.recordStage(waitingForThread);
connectorExecutor.submit(() -> {
waitingForThread.complete(time.milliseconds());
try {
Function<String, TemporaryStage> reportStage = description ->
new TemporaryStage(description, callback, time);
ConfigInfos result = validateConnectorConfig(connectorProps, reportStage, doLog);
callback.onCompletion(null, result);
} catch (Throwable t) {
callback.onCompletion(t, null);
}
});
}
/**
* Build the {@link RestartPlan} that describes what should and should not be restarted given the restart request
* and the current status of the connector and task instances.
*
* @param request the restart request; may not be null
* @return the restart plan, or empty if this worker has no status for the connector named in the request and therefore the
* connector cannot be restarted
*/
public Optional<RestartPlan> buildRestartPlan(RestartRequest request) {
String connectorName = request.connectorName();
ConnectorStatus connectorStatus = statusBackingStore.get(connectorName);
if (connectorStatus == null) {
return Optional.empty();
}
// If requested, mark the connector as restarting
AbstractStatus.State connectorState = request.shouldRestartConnector(connectorStatus) ? AbstractStatus.State.RESTARTING : connectorStatus.state();
ConnectorStateInfo.ConnectorState connectorInfoState = new ConnectorStateInfo.ConnectorState(
connectorState.toString(),
connectorStatus.workerId(),
connectorStatus.trace(),
connectorStatus.version()
);
// Collect the task states, If requested, mark the task as restarting
List<ConnectorStateInfo.TaskState> taskStates = statusBackingStore.getAll(connectorName)
.stream()
.map(taskStatus -> {
AbstractStatus.State taskState = request.shouldRestartTask(taskStatus) ? AbstractStatus.State.RESTARTING : taskStatus.state();
return new ConnectorStateInfo.TaskState(
taskStatus.id().task(),
taskState.toString(),
taskStatus.workerId(),
taskStatus.trace(),
taskStatus.version()
);
})
.collect(Collectors.toList());
// Construct the response from the various states
Map<String, String> conf = rawConfig(connectorName);
ConnectorStateInfo stateInfo = new ConnectorStateInfo(
connectorName,
connectorInfoState,
taskStates,
connectorType(conf)
);
return Optional.of(new RestartPlan(request, stateInfo));
}
protected boolean connectorUsesConsumer(org.apache.kafka.connect.health.ConnectorType connectorType, Map<String, String> connProps) {
return connectorType == org.apache.kafka.connect.health.ConnectorType.SINK;
}
protected boolean connectorUsesAdmin(org.apache.kafka.connect.health.ConnectorType connectorType, Map<String, String> connProps) {
if (connectorType == org.apache.kafka.connect.health.ConnectorType.SOURCE) {
return SourceConnectorConfig.usesTopicCreation(connProps);
} else {
return SinkConnectorConfig.hasDlqTopicConfig(connProps);
}
}
protected boolean connectorUsesProducer(org.apache.kafka.connect.health.ConnectorType connectorType, Map<String, String> connProps) {
return connectorType == org.apache.kafka.connect.health.ConnectorType.SOURCE
|| SinkConnectorConfig.hasDlqTopicConfig(connProps);
}
private ConfigInfos validateClientOverrides(
Map<String, String> connectorProps,
org.apache.kafka.connect.health.ConnectorType connectorType,
Class<? extends Connector> connectorClass,
Function<String, TemporaryStage> reportStage,
boolean doLog
) {
if (connectorClass == null || connectorType == null) {
return null;
}
AbstractConfig connectorConfig = new AbstractConfig(new ConfigDef(), connectorProps, doLog);
String connName = connectorProps.get(ConnectorConfig.NAME_CONFIG);
String connType = connectorProps.get(CONNECTOR_CLASS_CONFIG);
ConfigInfos producerConfigInfos = null;
ConfigInfos consumerConfigInfos = null;
ConfigInfos adminConfigInfos = null;
String stageDescription = null;
if (connectorUsesProducer(connectorType, connectorProps)) {
stageDescription = "validating producer config overrides for the connector";
try (TemporaryStage stage = reportStage.apply(stageDescription)) {
producerConfigInfos = validateClientOverrides(
connName,
ConnectorConfig.CONNECTOR_CLIENT_PRODUCER_OVERRIDES_PREFIX,
connectorConfig,
ProducerConfig.configDef(),
connectorClass,
connectorType,
ConnectorClientConfigRequest.ClientType.PRODUCER,
connectorClientConfigOverridePolicyPlugin);
}
}
if (connectorUsesAdmin(connectorType, connectorProps)) {
stageDescription = "validating admin config overrides for the connector";
try (TemporaryStage stage = reportStage.apply(stageDescription)) {
adminConfigInfos = validateClientOverrides(
connName,
ConnectorConfig.CONNECTOR_CLIENT_ADMIN_OVERRIDES_PREFIX,
connectorConfig,
AdminClientConfig.configDef(),
connectorClass,
connectorType,
ConnectorClientConfigRequest.ClientType.ADMIN,
connectorClientConfigOverridePolicyPlugin);
}
}
if (connectorUsesConsumer(connectorType, connectorProps)) {
stageDescription = "validating consumer config overrides for the connector";
try (TemporaryStage stage = reportStage.apply(stageDescription)) {
consumerConfigInfos = validateClientOverrides(
connName,
ConnectorConfig.CONNECTOR_CLIENT_CONSUMER_OVERRIDES_PREFIX,
connectorConfig,
ConsumerConfig.configDef(),
connectorClass,
connectorType,
ConnectorClientConfigRequest.ClientType.CONSUMER,
connectorClientConfigOverridePolicyPlugin);
}
}
return mergeConfigInfos(connType,
producerConfigInfos,
consumerConfigInfos,
adminConfigInfos
);
}
private ConfigInfos validateConnectorPluginSpecifiedConfigs(
Map<String, String> connectorProps,
Map<String, ConfigValue> validatedConnectorConfig,
ConfigDef enrichedConfigDef,
Connector connector,
Function<String, TemporaryStage> reportStage
) {
List<ConfigValue> configValues = new ArrayList<>(validatedConnectorConfig.values());
Map<String, ConfigKey> configKeys = new LinkedHashMap<>(enrichedConfigDef.configKeys());
Set<String> allGroups = new LinkedHashSet<>(enrichedConfigDef.groups());
String connType = connectorProps.get(CONNECTOR_CLASS_CONFIG);
// do custom connector-specific validation
ConfigDef configDef;
String stageDescription = "retrieving the configuration definition from the connector";
try (TemporaryStage stage = reportStage.apply(stageDescription)) {
configDef = connector.config();
}
if (null == configDef) {
throw new BadRequestException(
String.format(
"%s.config() must return a ConfigDef that is not null.",
connector.getClass().getName()
)
);
}
Config config;
stageDescription = "performing multi-property validation for the connector";
try (TemporaryStage stage = reportStage.apply(stageDescription)) {
config = connector.validate(connectorProps);
}
if (null == config) {
throw new BadRequestException(
String.format(
"%s.validate() must return a Config that is not null.",
connector.getClass().getName()
)
);
}
configKeys.putAll(configDef.configKeys());
allGroups.addAll(configDef.groups());
configValues.addAll(config.configValues());
return generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups));
}
private void addNullValuedErrors(Map<String, String> connectorProps, Map<String, ConfigValue> validatedConfig) {
connectorProps.entrySet().stream()
.filter(e -> e.getValue() == null)
.map(Map.Entry::getKey)
.forEach(prop ->
validatedConfig.computeIfAbsent(prop, ConfigValue::new)
.addErrorMessage("Null value can not be supplied as the configuration value."));
}
private ConfigInfos invalidVersionedConnectorValidation(
Map<String, String> connectorProps,
VersionedPluginLoadingException e,
Function<String, TemporaryStage> reportStage
) {
String connType = connectorProps.get(CONNECTOR_CLASS_CONFIG);
ConfigDef configDef = ConnectorConfig.enrichedConfigDef(worker.getPlugins(), connType);
Map<String, ConfigValue> validatedConfig;
try (TemporaryStage stage = reportStage.apply("validating connector configuration")) {
validatedConfig = configDef.validateAll(connectorProps);
}
validatedConfig.get(CONNECTOR_CLASS_CONFIG).addErrorMessage(e.getMessage());
validatedConfig.get(CONNECTOR_VERSION).addErrorMessage(e.getMessage());
validatedConfig.get(CONNECTOR_VERSION).recommendedValues(e.availableVersions().stream().map(v -> (Object) v).collect(Collectors.toList()));
addNullValuedErrors(connectorProps, validatedConfig);
return generateResult(connType, configDef.configKeys(), new ArrayList<>(validatedConfig.values()), new ArrayList<>(configDef.groups()));
}
ConfigInfos validateConnectorConfig(
Map<String, String> connectorProps,
Function<String, TemporaryStage> reportStage,
boolean doLog
) {
String stageDescription;
if (worker.configTransformer() != null) {
stageDescription = "resolving transformed configuration properties for the connector";
try (TemporaryStage stage = reportStage.apply(stageDescription)) {
connectorProps = worker.configTransformer().transform(connectorProps);
}
}
String connType = connectorProps.get(CONNECTOR_CLASS_CONFIG);
if (connType == null) {
throw new BadRequestException("Connector config " + connectorProps + " contains no connector type");
}
VersionRange connVersion;
Connector connector;
ClassLoader connectorLoader;
try {
connVersion = PluginUtils.connectorVersionRequirement(connectorProps.get(CONNECTOR_VERSION));
connector = cachedConnectors.getConnector(connType, connVersion);
connectorLoader = plugins().connectorLoader(connType, connVersion);
log.info("Validating connector {}, version {}", connType, connector.version());
} catch (VersionedPluginLoadingException e) {
log.warn("Failed to load connector {} with version {}, skipping additional validations (connector, converters, transformations, client overrides) ",
connType, connectorProps.get(CONNECTOR_VERSION), e);
return invalidVersionedConnectorValidation(connectorProps, e, reportStage);
} catch (Exception e) {
throw new BadRequestException(e.getMessage(), e);
}
try (LoaderSwap loaderSwap = plugins().withClassLoader(connectorLoader)) {
ConfigDef enrichedConfigDef;
Map<String, ConfigValue> validatedConnectorConfig;
org.apache.kafka.connect.health.ConnectorType connectorType;
if (connector instanceof SourceConnector) {
connectorType = org.apache.kafka.connect.health.ConnectorType.SOURCE;
enrichedConfigDef = ConnectorConfig.enrich(plugins(), SourceConnectorConfig.enrichedConfigDef(plugins(), connectorProps, worker.config()), connectorProps, false);
stageDescription = "validating source connector-specific properties for the connector";
try (TemporaryStage stage = reportStage.apply(stageDescription)) {
validatedConnectorConfig = validateSourceConnectorConfig((SourceConnector) connector, enrichedConfigDef, connectorProps);
}
} else {
connectorType = org.apache.kafka.connect.health.ConnectorType.SINK;
enrichedConfigDef = ConnectorConfig.enrich(plugins(), SinkConnectorConfig.enrichedConfigDef(plugins(), connectorProps, worker.config()), connectorProps, false);
stageDescription = "validating sink connector-specific properties for the connector";
try (TemporaryStage stage = reportStage.apply(stageDescription)) {
validatedConnectorConfig = validateSinkConnectorConfig((SinkConnector) connector, enrichedConfigDef, connectorProps);
}
}
addNullValuedErrors(connectorProps, validatedConnectorConfig);
// the order of operations here is important, converter validations can add error messages to the connector config
// which are collected and converted to ConfigInfos in validateConnectorPluginSpecifiedConfigs
ConfigInfos converterConfigInfo = validateAllConverterConfigs(connectorProps, validatedConnectorConfig, connectorLoader, reportStage);
ConfigInfos clientOverrideInfo = validateClientOverrides(connectorProps, connectorType, connector.getClass(), reportStage, doLog);
ConfigInfos connectorConfigInfo = validateConnectorPluginSpecifiedConfigs(connectorProps, validatedConnectorConfig, enrichedConfigDef, connector, reportStage);
return mergeConfigInfos(connType,
connectorConfigInfo,
clientOverrideInfo,
converterConfigInfo
);
}
}
private static ConfigInfos mergeConfigInfos(String connType, ConfigInfos... configInfosList) {
int errorCount = 0;
List<ConfigInfo> configInfoList = new LinkedList<>();
Set<String> groups = new LinkedHashSet<>();
for (ConfigInfos configInfos : configInfosList) {
if (configInfos != null) {
errorCount += configInfos.errorCount();
configInfoList.addAll(configInfos.configs());
groups.addAll(configInfos.groups());
}
}
return new ConfigInfos(connType, errorCount, new ArrayList<>(groups), configInfoList);
}
private static ConfigInfos validateClientOverrides(String connName,
String prefix,
AbstractConfig connectorConfig,
ConfigDef configDef,
Class<? extends Connector> connectorClass,
org.apache.kafka.connect.health.ConnectorType connectorType,
ConnectorClientConfigRequest.ClientType clientType,
Plugin<ConnectorClientConfigOverridePolicy> connectorClientConfigOverridePolicyPlugin) {
Map<String, Object> clientConfigs = new HashMap<>();
for (Map.Entry<String, Object> rawClientConfig : connectorConfig.originalsWithPrefix(prefix).entrySet()) {
String configName = rawClientConfig.getKey();
Object rawConfigValue = rawClientConfig.getValue();
ConfigKey configKey = configDef.configKeys().get(configName);
Object parsedConfigValue = configKey != null
? ConfigDef.parseType(configName, rawConfigValue, configKey.type)
: rawConfigValue;
clientConfigs.put(configName, parsedConfigValue);
}
ConnectorClientConfigRequest connectorClientConfigRequest = new ConnectorClientConfigRequest(
connName, connectorType, connectorClass, clientConfigs, clientType);
List<ConfigValue> configValues = connectorClientConfigOverridePolicyPlugin.get().validate(connectorClientConfigRequest);
return prefixedConfigInfos(configDef.configKeys(), configValues, prefix);
}
private static ConfigInfos prefixedConfigInfos(Map<String, ConfigKey> configKeys, List<ConfigValue> configValues, String prefix) {
int errorCount = 0;
Set<String> groups = new LinkedHashSet<>();
List<ConfigInfo> configInfos = new ArrayList<>();
if (configValues == null) {
return new ConfigInfos("", 0, new ArrayList<>(groups), configInfos);
}
for (ConfigValue validatedConfigValue : configValues) {
ConfigKey configKey = configKeys.get(validatedConfigValue.name());
ConfigKeyInfo configKeyInfo = null;
if (configKey != null) {
if (configKey.group != null) {
groups.add(configKey.group);
}
configKeyInfo = convertConfigKey(configKey, prefix);
}
ConfigValue configValue = new ConfigValue(prefix + validatedConfigValue.name(), validatedConfigValue.value(),
validatedConfigValue.recommendedValues(), validatedConfigValue.errorMessages());
if (configValue.errorMessages().size() > 0) {
errorCount++;
}
ConfigValueInfo configValueInfo = convertConfigValue(configValue, configKey != null ? configKey.type : null);
configInfos.add(new ConfigInfo(configKeyInfo, configValueInfo));
}
return new ConfigInfos("", errorCount, new ArrayList<>(groups), configInfos);
}
// public for testing
public static ConfigInfos generateResult(String connType, Map<String, ConfigKey> configKeys, List<ConfigValue> configValues, List<String> groups) {
int errorCount = 0;
List<ConfigInfo> configInfoList = new LinkedList<>();
Map<String, ConfigValue> configValueMap = new HashMap<>();
for (ConfigValue configValue: configValues) {
String configName = configValue.name();
configValueMap.put(configName, configValue);
if (!configKeys.containsKey(configName)) {
configInfoList.add(new ConfigInfo(null, convertConfigValue(configValue, null)));
errorCount += configValue.errorMessages().size();
}
}
for (Map.Entry<String, ConfigKey> entry : configKeys.entrySet()) {
String configName = entry.getKey();
ConfigKeyInfo configKeyInfo = convertConfigKey(entry.getValue());
Type type = entry.getValue().type;
ConfigValueInfo configValueInfo = null;
if (configValueMap.containsKey(configName)) {
ConfigValue configValue = configValueMap.get(configName);
configValueInfo = convertConfigValue(configValue, type);
errorCount += configValue.errorMessages().size();
}
configInfoList.add(new ConfigInfo(configKeyInfo, configValueInfo));
}
return new ConfigInfos(connType, errorCount, groups, configInfoList);
}
public static ConfigKeyInfo convertConfigKey(ConfigKey configKey) {
return convertConfigKey(configKey, "");
}
private static ConfigKeyInfo convertConfigKey(ConfigKey configKey, String prefix) {
String name = prefix + configKey.name;
Type type = configKey.type;
String typeName = configKey.type.name();
boolean required = false;
String defaultValue;
if (ConfigDef.NO_DEFAULT_VALUE.equals(configKey.defaultValue)) {
defaultValue = null;
required = true;
} else {
defaultValue = ConfigDef.convertToString(configKey.defaultValue, type);
}
String importance = configKey.importance.name();
String documentation = configKey.documentation;
String group = configKey.group;
int orderInGroup = configKey.orderInGroup;
String width = configKey.width.name();
String displayName = configKey.displayName;
List<String> dependents = configKey.dependents;
return new ConfigKeyInfo(name, typeName, required, defaultValue, importance, documentation, group, orderInGroup, width, displayName, dependents);
}
private static ConfigValueInfo convertConfigValue(ConfigValue configValue, Type type) {
String value = ConfigDef.convertToString(configValue.value(), type);
List<String> recommendedValues = new LinkedList<>();
if (type == Type.LIST) {
for (Object object: configValue.recommendedValues()) {
recommendedValues.add(ConfigDef.convertToString(object, Type.STRING));
}
} else {
for (Object object : configValue.recommendedValues()) {
recommendedValues.add(ConfigDef.convertToString(object, type));
}
}
return new ConfigValueInfo(configValue.name(), value, recommendedValues, configValue.errorMessages(), configValue.visible());
}
/**
* Retrieves ConnectorType for the | to |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.